diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml deleted file mode 100644 index 87e0f60..0000000 --- a/.github/workflows/ci.yml +++ /dev/null @@ -1,110 +0,0 @@ -# ────────────────────────────────────────────────────────────── -# f2a CI — Lint, build, test on every push / PR -# Based on CocoRoF/googer proven workflow pattern. -# -# Optimised for speed: -# - Rust target + registry cached (Swatinem/rust-cache) -# - pip cached -# - lint & test run in parallel -# - matrix trimmed: full OS × Python only on main; PR = Linux-only -# ────────────────────────────────────────────────────────────── -name: CI - -on: - push: - branches: [main] - pull_request: - branches: [main] - -env: - CARGO_INCREMENTAL: "1" - CARGO_NET_RETRY: "10" - RUSTUP_MAX_RETRIES: "10" - -jobs: - # ── Version consistency check (fast, no build) ──────────── - check-versions: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - - name: Verify pyproject.toml == Cargo.toml versions - run: | - PY_VER=$(grep -oP '^version\s*=\s*"\K[^"]+' pyproject.toml) - RS_VER=$(grep -oP '^version\s*=\s*"\K[^"]+' Cargo.toml) - echo "pyproject.toml = $PY_VER" - echo "Cargo.toml = $RS_VER" - if [ "$PY_VER" != "$RS_VER" ]; then - echo "::error::Version mismatch! pyproject.toml=$PY_VER vs Cargo.toml=$RS_VER — update both files." - exit 1 - fi - - # ── Lint (Rust + Python) ────────────────────────────────── - lint: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - - name: Install Rust toolchain - uses: dtolnay/rust-toolchain@stable - with: - components: clippy, rustfmt - - - name: Rust cache - uses: Swatinem/rust-cache@v2 - with: - cache-on-failure: true - - - name: cargo fmt --check - run: cargo fmt --all -- --check - - - name: cargo clippy - run: cargo clippy --all-targets -- -D warnings - - # ── Test (cross-platform × multi-Python) ────────────────── - test: - # Run in parallel with lint (no 'needs') - strategy: - fail-fast: false - matrix: - os: [ubuntu-latest] - python-version: ["3.10", "3.12"] - include: - # Spot-check other platforms with one Python version - - os: macos-14 - python-version: "3.12" - - os: windows-latest - python-version: "3.12" - runs-on: ${{ matrix.os }} - steps: - - uses: actions/checkout@v4 - - - name: Install Rust toolchain - uses: dtolnay/rust-toolchain@stable - - - name: Rust cache - uses: Swatinem/rust-cache@v2 - with: - key: ${{ matrix.os }}-py${{ matrix.python-version }} - cache-on-failure: true - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - - - name: Pip cache - uses: actions/cache@v4 - with: - path: ~/.cache/pip - key: pip-${{ matrix.os }}-py${{ matrix.python-version }}-${{ hashFiles('pyproject.toml') }} - restore-keys: | - pip-${{ matrix.os }}-py${{ matrix.python-version }}- - - - name: Install package and test deps - run: | - python -m pip install --upgrade pip - python -m pip install .[dev] - - - name: Run tests - run: python -m pytest -v --tb=short diff --git a/.github/workflows/pages.yml b/.github/workflows/pages.yml new file mode 100644 index 0000000..f20156e --- /dev/null +++ b/.github/workflows/pages.yml @@ -0,0 +1,43 @@ +# ────────────────────────────────────────────────────────────── +# GitHub Pages — Serve sample HTML reports +# Deploys the sample/ folder so reports can be viewed in-browser +# ────────────────────────────────────────────────────────────── +name: Deploy Sample Reports to Pages + +on: + push: + branches: [main] + paths: [sample/**] + workflow_dispatch: + +permissions: + contents: read + pages: write + id-token: write + +concurrency: + group: pages + cancel-in-progress: true + +jobs: + deploy: + runs-on: ubuntu-latest + environment: + name: github-pages + url: ${{ steps.deployment.outputs.page_url }} + steps: + - uses: actions/checkout@v5 + + - name: Setup Pages + uses: actions/configure-pages@v5 + + - name: Upload artifact + uses: actions/upload-pages-artifact@v3 + with: + path: sample/ + + - name: Deploy to GitHub Pages + id: deployment + uses: actions/deploy-pages@v4 + with: + enablement: true diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 022789e..c429466 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -1,73 +1,83 @@ # ────────────────────────────────────────────────────────────── -# f2a — Cross-platform wheel build & PyPI deploy -# -# Trigger : push to the `deploy` branch (only when pyproject.toml changes) -# Pipeline : -# 1. check-version — skip if version already on PyPI -# 2. test — pytest across Python 3.10–3.13 -# 3. build-wheels — maturin native wheels (5 targets × 4 Pythons) -# 4. build-sdist — source distribution -# 5. publish — upload to PyPI via Trusted Publisher (OIDC) -# -# Based on CocoRoF/googer proven workflow pattern. +# PyPI auto-deploy workflow +# Trigger: push to the deploy branch +# Condition: only deploy if not yet published on PyPI, or version bumped # ────────────────────────────────────────────────────────────── -name: Build & Publish to PyPI +name: Publish to PyPI on: push: branches: - deploy - paths: - - "pyproject.toml" jobs: - # ── 1. Version gate ─────────────────────────────────────── + # ── Step 1: Version check ───────────────────────────────── check-version: runs-on: ubuntu-latest outputs: - version_changed: ${{ steps.check.outputs.changed }} - new_version: ${{ steps.check.outputs.new_version }} + should_publish: ${{ steps.decide.outputs.should_publish }} + local_version: ${{ steps.local.outputs.version }} steps: - uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 with: - fetch-depth: 2 + python-version: "3.12" - - name: Check version change - id: check + - name: Read local version + id: local run: | - NEW_VERSION=$(grep -oP '^version\s*=\s*"\K[^"]+' pyproject.toml) - echo "new_version=$NEW_VERSION" >> "$GITHUB_OUTPUT" - - # Verify Cargo.toml version matches - CARGO_VERSION=$(grep -oP '^version\s*=\s*"\K[^"]+' Cargo.toml) - if [ "$NEW_VERSION" != "$CARGO_VERSION" ]; then - echo "::error::Version mismatch! pyproject.toml=$NEW_VERSION vs Cargo.toml=$CARGO_VERSION" - exit 1 - fi + VERSION=$(python -c "import tomllib; print(tomllib.load(open('pyproject.toml','rb'))['project']['version'])") + echo "version=$VERSION" >> "$GITHUB_OUTPUT" + echo "📦 Local version: $VERSION" - # Compare with previous commit - OLD_VERSION=$(git show HEAD~1:pyproject.toml 2>/dev/null | grep -oP '^version\s*=\s*"\K[^"]+' || echo "") - - echo "Old version: $OLD_VERSION" - echo "New version: $NEW_VERSION" + - name: Check PyPI for existing version + id: pypi + run: | + LOCAL="${{ steps.local.outputs.version }}" + # PyPI JSON API — returns 404 if the package does not exist + HTTP_CODE=$(curl -s -o /tmp/pypi.json -w "%{http_code}" \ + "https://pypi.org/pypi/f2a/json") + + if [ "$HTTP_CODE" = "404" ]; then + echo "pypi_version=NONE" >> "$GITHUB_OUTPUT" + echo "🆕 Package not yet on PyPI" + else + PYPI_VER=$(python -c " + import json, pathlib + data = json.loads(pathlib.Path('/tmp/pypi.json').read_text()) + print(data['info']['version']) + ") + echo "pypi_version=$PYPI_VER" >> "$GITHUB_OUTPUT" + echo "📡 PyPI version: $PYPI_VER" + fi - if [ -z "$OLD_VERSION" ] || [ "$OLD_VERSION" != "$NEW_VERSION" ]; then - echo "changed=true" >> "$GITHUB_OUTPUT" - echo "✅ Version changed: $OLD_VERSION -> $NEW_VERSION" + - name: Decide whether to publish + id: decide + run: | + LOCAL="${{ steps.local.outputs.version }}" + PYPI="${{ steps.pypi.outputs.pypi_version }}" + + if [ "$PYPI" = "NONE" ]; then + echo "should_publish=true" >> "$GITHUB_OUTPUT" + echo "✅ First publish — will deploy $LOCAL" + elif [ "$LOCAL" != "$PYPI" ]; then + echo "should_publish=true" >> "$GITHUB_OUTPUT" + echo "✅ Version bumped ($PYPI → $LOCAL) — will deploy" else - echo "changed=false" >> "$GITHUB_OUTPUT" - echo "⏭️ Version unchanged, skipping publish." + echo "should_publish=false" >> "$GITHUB_OUTPUT" + echo "⏭️ Version $LOCAL already on PyPI — skipping" fi - # ── 2. Test ─────────────────────────────────────────────── + # ── Step 2: Tests ───────────────────────────────────────── test: needs: check-version - if: needs.check-version.outputs.version_changed == 'true' + if: needs.check-version.outputs.should_publish == 'true' runs-on: ubuntu-latest strategy: - fail-fast: false matrix: - python-version: ["3.10", "3.13"] + python-version: ["3.10", "3.11", "3.12"] steps: - uses: actions/checkout@v4 @@ -76,123 +86,41 @@ jobs: with: python-version: ${{ matrix.python-version }} - - name: Install Rust toolchain - uses: dtolnay/rust-toolchain@stable - - - name: Rust cache - uses: Swatinem/rust-cache@v2 - with: - key: publish-py${{ matrix.python-version }} - cache-on-failure: true - - - name: Pip cache - uses: actions/cache@v4 - with: - path: ~/.cache/pip - key: pip-publish-py${{ matrix.python-version }}-${{ hashFiles('pyproject.toml') }} - restore-keys: | - pip-publish-py${{ matrix.python-version }}- - - - name: Install package and test deps + - name: Install dependencies run: | python -m pip install --upgrade pip - python -m pip install .[dev] + pip install -e ".[dev]" + pip install beautifulsoup4 html5lib - name: Run tests - run: python -m pytest -v --tb=short - - # ── 3. Build wheels (maturin) ───────────────────────────── - build-wheels: - needs: [check-version, test] - if: needs.check-version.outputs.version_changed == 'true' - strategy: - fail-fast: false - matrix: - include: - # ── Linux x86_64 ── - - os: ubuntu-latest - target: x86_64-unknown-linux-gnu - manylinux: auto - # ── Linux aarch64 ── - - os: ubuntu-latest - target: aarch64-unknown-linux-gnu - manylinux: auto - # ── macOS x86_64 (Intel, cross-compiled on ARM) ── - - os: macos-14 - target: x86_64-apple-darwin - manylinux: "off" - # ── macOS aarch64 (Apple Silicon) ── - - os: macos-14 - target: aarch64-apple-darwin - manylinux: "off" - # ── Windows x86_64 ── - - os: windows-latest - target: x86_64-pc-windows-msvc - manylinux: "off" - - runs-on: ${{ matrix.os }} - steps: - - uses: actions/checkout@v4 - - - name: Build wheels - uses: PyO3/maturin-action@v1 - with: - target: ${{ matrix.target }} - args: --release --out dist --interpreter 3.10 3.11 3.12 3.13 - manylinux: ${{ matrix.manylinux }} - before-script-linux: | - # Ensure Perl is available for vendored OpenSSL build (openssl-src) - if command -v yum &> /dev/null; then - yum install -y perl-IPC-Cmd perl-core - elif command -v apk &> /dev/null; then - apk add --no-cache perl make - fi - - - name: Upload wheels - uses: actions/upload-artifact@v4 - with: - name: wheels-${{ matrix.target }} - path: dist/*.whl + run: pytest git_action/tests/ -v --tb=short - # ── 4. Build sdist ──────────────────────────────────────── - build-sdist: + # ── Step 3: Build & Deploy ──────────────────────────────── + publish: needs: [check-version, test] - if: needs.check-version.outputs.version_changed == 'true' + if: needs.check-version.outputs.should_publish == 'true' runs-on: ubuntu-latest + environment: pypi + permissions: + id-token: write # Trusted Publisher (OIDC) steps: - uses: actions/checkout@v4 - - name: Build sdist - uses: PyO3/maturin-action@v1 - with: - command: sdist - args: --out dist - - - name: Upload sdist - uses: actions/upload-artifact@v4 + - name: Set up Python + uses: actions/setup-python@v5 with: - name: sdist - path: dist/*.tar.gz + python-version: "3.12" - # ── 5. Publish to PyPI ──────────────────────────────────── - publish: - needs: [check-version, build-wheels, build-sdist] - if: needs.check-version.outputs.version_changed == 'true' - runs-on: ubuntu-latest - environment: pypi - permissions: - id-token: write # Required for Trusted Publisher (OIDC) - steps: - - name: Download all artifacts - uses: actions/download-artifact@v4 - with: - path: dist - merge-multiple: true + - name: Install build tools + run: python -m pip install --upgrade pip build - - name: List artifacts - run: ls -lh dist/ + - name: Build package + run: python -m build - name: Publish to PyPI uses: pypa/gh-action-pypi-publish@release/v1 - # Uses Trusted Publisher (OIDC) — no API token needed. - # Register at: https://pypi.org/manage/project/f2a/settings/publishing/ + # Uses Trusted Publisher, so no API token required. + # You must register GitHub Actions as a Trusted Publisher in the PyPI project settings. + # To use a manual token instead, uncomment below: + # with: + # password: ${{ secrets.PYPI_API_TOKEN }} diff --git a/.gitignore b/.gitignore index c9c39d9..a75caaf 100644 --- a/.gitignore +++ b/.gitignore @@ -1,10 +1,6 @@ # f2a — .gitignore -# ── Rust ── -target/ -**/*.rs.bk - -# ── Python ── +# Python __pycache__/ *.py[cod] *$py.class @@ -12,35 +8,41 @@ __pycache__/ dist/ build/ *.egg -*.so -*.pyd -*.pdb -# ── Virtual environments ── +# Virtual environments .venv/ venv/ env/ -# ── IDE ── +# IDE .vscode/ .idea/ *.swp *.swo -# ── OS ── +# OS .DS_Store Thumbs.db -# ── Test & Coverage ── +# Test & Coverage .pytest_cache/ htmlcov/ .coverage coverage.xml -test_data_e2e/ -# ── Output ── +# Manual test folder +test/ + +# Output +examples/output/ +examples/sample_data.csv output/*.html -# ── mypy / ruff ── +# Allow sample reports +!sample/ + +# mypy .mypy_cache/ + +# ruff .ruff_cache/ diff --git a/ADVANCED_ANALYSIS_PLAN.md b/ADVANCED_ANALYSIS_PLAN.md new file mode 100644 index 0000000..374f785 --- /dev/null +++ b/ADVANCED_ANALYSIS_PLAN.md @@ -0,0 +1,371 @@ +# f2a Advanced Analysis Plan + +> **목적**: ML 논문/기법 기반의 고급 분석 기능을 체계적으로 설계하고, HTML 리포트에 2-depth 탭 구조로 제공한다. + +--- + +## 1. 현재 상태 분석 (As-Is) + +### 1.1 현재 구현된 분석 (Basic Report) + +| 영역 | 기법 | 비고 | +|------|------|------| +| **Descriptive** | count, missing, unique, mean, median, std, SE, CV, MAD, min/max/range, p5/q1/q3/p95, IQR, skewness, kurtosis | 16개 수치 지표 | +| **Distribution** | Shapiro-Wilk, D'Agostino, KS, Anderson-Darling, skew/kurt 분류 | 4개 정규성 검정 | +| **Correlation** | Pearson, Spearman, Kendall, Cramér's V, VIF | 5종 상관 분석 | +| **Missing** | column summary, row distribution, total ratio | 기초 결측 분석 | +| **Outlier** | IQR method, Z-score method | 2종 이상치 탐지 | +| **Categorical** | frequency, entropy, chi-square independence | 범주 분석 | +| **Feature Importance** | variance ranking, mean abs correlation, mutual information | 3종 중요도 | +| **PCA** | StandardScaler + PCA, scree, loadings | 기초 차원축소 | +| **Duplicates** | exact duplicates, column uniqueness | 중복 탐지 | +| **Quality** | completeness, uniqueness, consistency, validity (weighted) | 4차원 품질 | + +### 1.2 현재 HTML 구조 + +``` +Header +├── [1-depth tabs: subset/split 선택] ← 현재 유일한 탭 계층 +│ ├── Overview +│ ├── Data Quality +│ ├── Preprocessing +│ ├── Descriptive Statistics +│ ├── Distribution Analysis +│ ├── Correlation Analysis +│ ├── Missing Data +│ ├── Outlier Detection +│ ├── Categorical Analysis +│ ├── Feature Importance +│ ├── PCA +│ ├── Duplicates +│ └── Warnings +Footer +``` + +--- + +## 2. Advanced 분석 기법 설계 (To-Be) + +### 2.1 새로운 HTML 2-Depth 탭 구조 + +``` +Header +├── [1-depth: subset/split 선택] ← 기존 +│ ├── [2-depth: Basic | Advanced] ← 신규 +│ │ ├── Basic → 기존 모든 섹션 그대로 +│ │ └── Advanced +│ │ ├── A1. Advanced Distribution +│ │ ├── A2. Advanced Correlation +│ │ ├── A3. Clustering Analysis +│ │ ├── A4. Dimensionality Reduction +│ │ ├── A5. Feature Engineering Insights +│ │ ├── A6. Anomaly Detection +│ │ ├── A7. Statistical Tests +│ │ └── A8. Data Profiling Summary +Footer +``` + +--- + +### 2.2 Advanced 탭 상세 설계 + +--- + +#### A1. Advanced Distribution Analysis + +| 기법 | 근거 | 구현 계획 | 효과 | +|------|------|-----------|------| +| **Kernel Density Estimation (KDE) bandwidth selection** | Silverman(1986), Scott's rule | `scipy.stats.gaussian_kde`로 최적 bandwidth 자동 추정, KDE curve + histogram overlay | 데이터의 실제 분포 형태를 비모수적으로 파악 | +| **Best-fit distribution matching** | D'Agostino & Stephens(1986) | `scipy.stats`의 주요 분포(norm, lognorm, exponential, gamma, beta, weibull, uniform) 피팅 후 AIC/BIC 비교 | 각 컬럼이 어떤 이론적 분포에 가장 가까운지 자동 식별 | +| **Jarque-Bera test** | Jarque & Bera(1987) | `scipy.stats.jarque_bera` — skewness+kurtosis 기반 정규성 검정 | 기존 4개 검정에 추가, 대표본에 특히 유효 | +| **Power transformation recommendation** | Box-Cox(1964), Yeo-Johnson(2000) | `scipy.stats.boxcox`/`yeojohnson`으로 변환 후 skewness 변화량 측정 | 어떤 변환이 정규성을 개선하는지 자동 추천 | +| **Empirical CDF** | Kolmogorov(1933) | `statsmodels.distributions.empirical_distribution.ECDF` 또는 직접 step plot | 데이터의 누적 분포를 직관적으로 시각화 | + +**시각화:** +- KDE overlay histograms (bandwidth comparison) +- Best-fit distribution overlay plot (데이터 + 최적 분포 곡선) +- Power transformation before/after comparison +- ECDF step plots + +--- + +#### A2. Advanced Correlation Analysis + +| 기법 | 근거 | 구현 계획 | 효과 | +|------|------|-----------|------| +| **Partial correlation** | Fisher(1924) | 다른 변수를 제어한 상태에서의 순수 상관. inverse correlation matrix에서 추출 | 교란 변수 제거한 진정한 관계 파악 | +| **Distance correlation** | Székely et al.(2007) | `dcor` 라이브러리 또는 직접 구현. 비선형 관계까지 감지 | Pearson이 놓치는 비선형 의존성 탐지 | +| **Mutual Information heatmap** | Shannon(1948), Kraskov et al.(2004) | sklearn `mutual_info_regression`으로 전체 컬럼 쌍 MI 행렬 생성 | 비선형 정보 공유량의 정량적 시각화 | +| **Correlation stability (bootstrap)** | Efron(1979) | 상관계수의 bootstrap 신뢰구간 (95% CI) 계산 | 상관 추정의 신뢰도/안정성 평가 | +| **Correlation network graph** | Graph theory | 상관 threshold 초과 쌍을 node-edge로 시각화 | 변수 간 관계 구조의 직관적 네트워크 파악 | + +**시각화:** +- Partial correlation heatmap +- MI heatmap +- Bootstrap correlation CI forest plot +- Correlation network graph (matplotlib `networkx` layout) + +--- + +#### A3. Clustering Analysis + +| 기법 | 근거 | 구현 계획 | 효과 | +|------|------|-----------|------| +| **K-Means + Elbow/Silhouette** | MacQueen(1967), Rousseeuw(1987) | sklearn `KMeans` (k=2~10) + inertia elbow + silhouette score → optimal k 자동 결정 | 데이터의 자연 군집 구조 탐색 | +| **DBSCAN** | Ester et al.(1996) | sklearn `DBSCAN` with automated eps (k-distance graph) | 밀도 기반 클러스터링, 노이즈/이상치 자연 분리 | +| **Hierarchical clustering (dendrogram)** | Ward(1963) | `scipy.cluster.hierarchy.linkage` + dendrogram | 계층적 구조 시각화, 적절한 컷 레벨 참고 | +| **Cluster profiling** | — | 군집별 평균/분포 요약 테이블 생성 | 각 군집의 특성 자동 프로파일링 | + +**시각화:** +- Elbow plot + Silhouette score plot +- 2D PCA scatter with cluster labels (color-coded) +- DBSCAN result scatter (noise = gray) +- Dendrogram +- Cluster profile radar/bar chart + +--- + +#### A4. Dimensionality Reduction (확장) + +| 기법 | 근거 | 구현 계획 | 효과 | +|------|------|-----------|------| +| **t-SNE** | van der Maaten & Hinton(2008) | sklearn `TSNE(n_components=2, perplexity=30)` | 고차원 데이터의 2D 비선형 임베딩으로 군집 시각화 | +| **UMAP** | McInnes et al.(2018) | `umap-learn` 라이브러리 (optional dependency) | t-SNE보다 빠르고 전역 구조 보존 | +| **Factor Analysis** | Spearman(1904), Thurstone(1935) | sklearn `FactorAnalysis` — 잠재 요인 추출 + loadings | PCA와 달리 잠재 변수 모델, 해석력 우수 | +| **Explained variance per feature** | Kaiser criterion(1960) | 각 원본 feature가 top-k PC에 기여하는 분산 비율 | feature-level 중요도의 차원축소 관점 제공 | + +**시각화:** +- t-SNE 2D scatter (cluster labels overlay 가능) +- UMAP 2D scatter +- Factor loadings heatmap +- Feature contribution stacked bar chart + +--- + +#### A5. Feature Engineering Insights + +| 기법 | 근거 | 구현 계획 | 효과 | +|------|------|-----------|------| +| **Interaction detection** | Friedman & Popescu(2008) | 수치 컬럼 쌍의 곱/비율 생성 후 분산/상관 분석 | 유망한 interaction feature 자동 발견 | +| **Monotonic relationship detection** | Spearman rho | Spearman vs. Pearson 차이로 비선형 단조성 판별 | 변환이 필요한 비선형 관계 식별 | +| **Binning analysis** | Dougherty et al.(1995) | 수치 컬럼의 equal-width/equal-freq 빈 생성, 빈별 엔트로피 비교 | 이산화 전략 선택 도움 | +| **Cardinality analysis** | — | 범주형 컬럼의 유니크 비율별 인코딩 전략 추천 (one-hot / target / ordinal) | 전처리 파이프라인 설계 자동 가이드 | +| **Target leakage detection** | Kaufman et al.(2012) | 수치 컬럼 중 다른 컬럼과 r>0.99 또는 MI≈max인 쌍 경고 | 데이터 누수 조기 발견 | + +**시각화:** +- Top-N interaction feature 분포 히스토그램 +- Spearman vs Pearson 차이 bar chart +- Encoding strategy recommendation 테이블 + +--- + +#### A6. Anomaly Detection (확장) + +| 기법 | 근거 | 구현 계획 | 효과 | +|------|------|-----------|------| +| **Isolation Forest** | Liu et al.(2008) | sklearn `IsolationForest` → anomaly score per row | 다변량 이상치 탐지 (IQR/Z-score는 단변량) | +| **Local Outlier Factor (LOF)** | Breunig et al.(2000) | sklearn `LocalOutlierFactor` → LOF score per row | 밀도 기반 국소 이상치, 군집 밖의 점 탐지 | +| **Mahalanobis distance** | Mahalanobis(1936) | 공분산 기반 다변량 거리, chi-squared 임계값 | 상관 구조를 고려한 다변량 이상치 | +| **Anomaly summary** | — | 다수 방법의 consensus (≥2 방법에서 anomaly → 고확률) | 단일 방법 의존 제거, 견고한 이상치 판정 | + +**시각화:** +- Isolation Forest anomaly score 분포 히스토그램 +- LOF score scatter (2D PCA 공간에서) +- Mahalanobis distance 히스토그램 with chi-squared 임계선 +- Consensus anomaly heatmap (row × method) + +--- + +#### A7. Statistical Tests + +| 기법 | 근거 | 구현 계획 | 효과 | +|------|------|-----------|------| +| **Levene's test (등분산)** | Levene(1960) | 범주별로 수치 컬럼의 분산 동질성 검정 | ANOVA 전제조건 확인 | +| **Kruskal-Wallis test** | Kruskal & Wallis(1952) | 비모수 다집단 중위수 비교 | 비정규 분포에서의 집단 차이 검정 | +| **Mann-Whitney U test** | Mann & Whitney(1947) | 이진 범주와 수치 컬럼 간 비모수 검정 | 두 집단 차이의 비모수 평가 | +| **Chi-square goodness of fit** | Pearson(1900) | 범주형 컬럼의 균등 분포 검정 | 범주 분포의 편향 정도 정량 평가 | +| **Grubbs' test** | Grubbs(1950) | 단일 이상치의 통계적 유의성 검정 | 극단값의 통계적 유의미성 판별 | +| **Stationarity (ADF test)** | Dickey & Fuller(1979) | 시계열 컬럼의 단위근 검정 (`statsmodels.tsa`) | 시계열 정상성 자동 판단 | + +**시각화:** +- Group comparison boxplots (Kruskal-Wallis/Mann-Whitney와 함께) +- Test results summary table with p-values and significance stars +- Levene test bar chart per column + +--- + +#### A8. Data Profiling Summary + +| 기법 | 근거 | 구현 계획 | 효과 | +|------|------|-----------|------| +| **Automated insight generation** | AutoEDA literature | 모든 분석 결과를 종합하여 자연어 인사이트 생성 | 비전문가도 핵심 발견 사항을 즉시 파악 | +| **Feature type recommendation** | — | 각 컬럼의 분포/유니크/결측 패턴으로 최적 ML 타입 추천 | ML 파이프라인 설계 가이드 | +| **Dataset complexity scoring** | Ho & Basu(2002) | 차원수, 클래스 수, 불균형도, 상관 구조 → 복잡도 점수 | 데이터셋 난이도의 정량적 평가 | +| **Overall health dashboard** | — | 전체 분석 결과의 1-page 대시보드 (트래픽 라이트 시스템) | 데이터 상태의 즉각적 파악 | + +**시각화:** +- Health score radar chart (6 축: completeness, consistency, outlier ratio, skewness, correlation, duplicates) +- Insight cards (자동 생성된 주요 발견 사항) +- Feature type recommendation table + +--- + +## 3. 기술 의존성 분석 + +### 3.1 새로 필요한 패키지 + +| 패키지 | 용도 | 필수 여부 | 비고 | +|--------|------|-----------|------| +| `scikit-learn` | K-Means, DBSCAN, IsolationForest, LOF, FactorAnalysis, t-SNE, MI | **이미 설치됨** | core dependency | +| `networkx` | Correlation network graph | Optional | `try/except` 처리 | +| `umap-learn` | UMAP 차원축소 | Optional | `try/except` 처리 | +| `statsmodels` | ADF test, ECDF | Optional | `try/except` 처리 | + +**원칙:** `scikit-learn`과 기존 종속성(`scipy`, `numpy`, `pandas`, `matplotlib`, `seaborn`)만으로 A1~A8의 80%+ 구현 가능. `networkx`, `umap-learn`, `statsmodels`는 optional — 없으면 해당 분석을 건너뛰고 "library not available" 메시지 표시. + +### 3.2 성능 고려사항 + +| 기법 | 시간 복잡도 | 대응 전략 | +|------|-------------|-----------| +| t-SNE | O(n²) | n>5000이면 샘플링 후 수행 | +| UMAP | O(n·log(n)) | n>10000이면 샘플링 | +| Isolation Forest | O(n·t·log(n)) | max_samples=min(256, n) 기본 | +| MI 행렬 | O(n·d²) | d>30이면 top-30 컬럼만 | +| Bootstrap CI | O(B·n) | B=1000, n>5000이면 샘플링 | +| K-Means elbow | O(k·n·d·iter) | k=2~10, max_iter=100 | +| Best-fit distribution | O(n·d_count) | 7개 분포만 피팅 | + +--- + +## 4. 구현 계획 + +### Phase 1: 인프라 (2-depth 탭 구조) + +1. **`AnalysisConfig` 확장** — `advanced: bool = True` 플래그 + `AdvancedConfig` sub-dataclass 추가 +2. **HTML generator 2-depth 탭** — 기존 subset 탭 내부에 "Basic / Advanced" 서브탭 도입 +3. **`StatsResult` 확장** — `advanced_stats: dict[str, Any]` 필드 추가 +4. **`VizResult` 확장** — advanced plot 메서드 추가 + +### Phase 2: Advanced Stats 모듈 (4개 파일) + +5. **`stats/advanced_distribution.py`** — best_fit, kde_bandwidth, jarque_bera, power_transform, ecdf +6. **`stats/advanced_correlation.py`** — partial_corr, mi_matrix, bootstrap_ci, correlation_network_data +7. **`stats/clustering.py`** — kmeans_analysis, dbscan_analysis, hierarchical, cluster_profiles +8. **`stats/statistical_tests.py`** — levene, kruskal_wallis, mann_whitney, chi_sq_goodness, grubbs, adf_stationarity + +### Phase 3: Advanced Stats 모듈 (3개 파일) + +9. **`stats/advanced_anomaly.py`** — isolation_forest, lof, mahalanobis, consensus_anomaly +10. **`stats/advanced_dimreduction.py`** — tsne, umap, factor_analysis, feature_contribution +11. **`stats/feature_insights.py`** — interaction_detection, monotonic_detection, binning_analysis, cardinality_analysis, leakage_detection + +### Phase 4: Advanced Viz 모듈 (4개 파일) + +12. **`viz/advanced_dist_plots.py`** — best_fit_overlay, power_transform_comparison, ecdf_plot, kde_bandwidth_comparison +13. **`viz/advanced_corr_plots.py`** — partial_corr_heatmap, mi_heatmap, bootstrap_ci_plot, network_graph +14. **`viz/cluster_plots.py`** — elbow_plot, silhouette_plot, cluster_scatter, dendrogram, cluster_profiles_chart +15. **`viz/advanced_anomaly_plots.py`** — isolation_forest_hist, lof_scatter, mahalanobis_hist, consensus_heatmap + +### Phase 5: 통합 + +16. **Analyzer `_compute_advanced_stats()` 추가** — 각 advanced 모듈 호출 +17. **VizResult advanced plot 메서드 추가** — 각 advanced viz 호출 +18. **HTML generator advanced 섹션 빌더** — 8개 advanced 섹션 + 서브탭 +19. **Data Profiling Summary (A8)** — insights 자동 생성 로직 + +### Phase 6: 마무리 + +20. **pyproject.toml 업데이트** — optional deps 추가 +21. **`_METRIC_TIPS` 확장** — advanced 지표 tooltip 추가 +22. **End-to-end 테스트** — 실제 데이터셋으로 전체 리포트 생성 검증 + +--- + +## 5. 효과성 평가 + +### 5.1 분석 범위 확장 + +| 카테고리 | Basic (현재) | + Advanced | 커버리지 증가 | +|----------|-----------|------------|---------------| +| 정규성/분포 검정 | 4종 | +3종 (JB, 7-dist fitting, power transform) | +75% | +| 상관 분석 | 5종 | +4종 (partial, MI matrix, bootstrap CI, network) | +80% | +| 이상치 탐지 | 2종 (단변량) | +3종 (다변량: IF, LOF, Mahalanobis) | +150% | +| 차원축소 | PCA 1종 | +3종 (t-SNE, UMAP, Factor Analysis) | +300% | +| 군집 분석 | 0종 | +3종 (K-Means, DBSCAN, Hierarchical) | 신규 | +| 통계 검정 | 4종 (정규성) | +6종 (등분산, 비모수, 적합도, Grubbs, ADF) | +150% | +| Feature 공학 | 3종 (중요도) | +5종 (interaction, monotonic, binning, cardinality, leakage) | +167% | +| Data profiling | 품질 점수 | +3종 (insights, type recommendation, complexity) | +300% | + +### 5.2 실무적 가치 + +1. **비선형 관계 탐지**: Pearson/Spearman만으로는 포착 불가능한 비선형 의존성을 MI, distance correlation 으로 발견 +2. **다변량 이상치**: IQR/Z-score는 단변량 — Isolation Forest와 LOF로 변수 간 상호작용 고려한 이상치 탐지 +3. **군집 구조 발견**: 데이터의 자연 그룹을 자동 탐색, ML 모델링 전 데이터 이해도 극대화 +4. **최적 분포 식별**: 각 변수의 이론적 분포를 자동 피팅하여 변환/모델링 전략 결정 +5. **통계적 유의성**: 시각적 차이를 넘어 통계 검정으로 엄밀한 판단 근거 제공 +6. **Feature 공학 자동화**: interaction feature, 인코딩 전략, 데이터 누수를 자동 탐지 + +### 5.3 학술적 근거 (Key References) + +| # | 논문/방법 | 연도 | 핵심 기여 | +|---|-----------|------|-----------| +| 1 | Silverman, *Density Estimation for Statistics and Data Analysis* | 1986 | KDE bandwidth selection | +| 2 | Jarque & Bera, *Efficient tests for normality* | 1987 | JB normality test | +| 3 | Box & Cox, *An analysis of transformations* | 1964 | Power transformation | +| 4 | Yeo & Johnson, *A new family of power transformations* | 2000 | 음수 허용 power transform | +| 5 | Székely et al., *Measuring and testing dependence by correlation of distances* | 2007 | Distance correlation | +| 6 | Shannon, *A mathematical theory of communication* | 1948 | Mutual information | +| 7 | Efron, *Bootstrap methods: another look at the jackknife* | 1979 | Bootstrap CI | +| 8 | MacQueen, *Some methods for classification* | 1967 | K-Means | +| 9 | Ester et al., *A density-based algorithm (DBSCAN)* | 1996 | DBSCAN | +| 10 | Rousseeuw, *Silhouettes: a graphical aid* | 1987 | Silhouette score | +| 11 | van der Maaten & Hinton, *Visualizing data using t-SNE* | 2008 | t-SNE | +| 12 | McInnes et al., *UMAP: Uniform manifold approximation* | 2018 | UMAP | +| 13 | Liu et al., *Isolation forest* | 2008 | Isolation Forest | +| 14 | Breunig et al., *LOF: identifying density-based local outliers* | 2000 | LOF | +| 15 | Mahalanobis, *On the generalized distance in statistics* | 1936 | Mahalanobis distance | +| 16 | Fisher, *The distribution of the partial correlation coefficient* | 1924 | Partial correlation | +| 17 | Levene, *Robust tests for equality of variances* | 1960 | Levene's test | +| 18 | Kruskal & Wallis, *Use of ranks in one-criterion variance analysis* | 1952 | KW test | +| 19 | Dickey & Fuller, *Distribution of the estimators* | 1979 | ADF stationarity test | +| 20 | Ho & Basu, *Complexity measures of supervised classification problems* | 2002 | Dataset complexity | + +--- + +## 6. 2-Depth 탭 UI 설계 + +### 6.1 탭 구조 + +```html + +
+ + +
+ + +
+
+ + +
+
+ +
+
+ +
+
+``` + +### 6.2 Advanced 탭 내부 네비게이션 + +Advanced 탭 내에 섹션 앵커 점프 네비게이션: +``` +[Distribution+] [Correlation+] [Clustering] [Dim. Reduction] +[Feature Eng.] [Anomaly] [Statistical Tests] [Profiling] +``` + +--- + +*Generated by f2a analysis planning system* diff --git a/Cargo.lock b/Cargo.lock deleted file mode 100644 index 54588af..0000000 --- a/Cargo.lock +++ /dev/null @@ -1,2805 +0,0 @@ -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 4 - -[[package]] -name = "adler2" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" - -[[package]] -name = "ahash" -version = "0.8.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a15f179cd60c4584b8a8c596927aadc462e27f2ca70c04e0071964a73ba7a75" -dependencies = [ - "cfg-if", - "getrandom 0.3.4", - "once_cell", - "version_check", - "zerocopy", -] - -[[package]] -name = "aho-corasick" -version = "1.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ddd31a130427c27518df266943a5308ed92d4b226cc639f5a8f1002816174301" -dependencies = [ - "memchr", -] - -[[package]] -name = "alloc-no-stdlib" -version = "2.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc7bb162ec39d46ab1ca8c77bf72e890535becd1751bb45f64c597edb4c8c6b3" - -[[package]] -name = "alloc-stdlib" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94fb8275041c72129eb51b7d0322c29b8387a0386127718b096429201a5d6ece" -dependencies = [ - "alloc-no-stdlib", -] - -[[package]] -name = "allocator-api2" -version = "0.2.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" - -[[package]] -name = "android_system_properties" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" -dependencies = [ - "libc", -] - -[[package]] -name = "anyhow" -version = "1.0.102" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c" - -[[package]] -name = "approx" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cab112f0a86d568ea0e627cc1d6be74a1e9cd55214684db5561995f6dad897c6" -dependencies = [ - "num-traits", -] - -[[package]] -name = "ar_archive_writer" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7eb93bbb63b9c227414f6eb3a0adfddca591a8ce1e9b60661bb08969b87e340b" -dependencies = [ - "object", -] - -[[package]] -name = "argminmax" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70f13d10a41ac8d2ec79ee34178d61e6f47a29c2edfe7ef1721c7383b0359e65" -dependencies = [ - "num-traits", -] - -[[package]] -name = "array-init-cursor" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed51fe0f224d1d4ea768be38c51f9f831dee9d05c163c11fba0b8c44387b1fc3" - -[[package]] -name = "async-stream" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476" -dependencies = [ - "async-stream-impl", - "futures-core", - "pin-project-lite", -] - -[[package]] -name = "async-stream-impl" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "async-trait" -version = "0.1.89" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "atoi_simd" -version = "0.16.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2a49e05797ca52e312a0c658938b7d00693ef037799ef7187678f212d7684cf" -dependencies = [ - "debug_unsafe", -] - -[[package]] -name = "atomic-waker" -version = "1.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" - -[[package]] -name = "autocfg" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" - -[[package]] -name = "base64" -version = "0.22.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" - -[[package]] -name = "bitflags" -version = "2.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "843867be96c8daad0d758b57df9392b6d8d271134fce549de6ce169ff98a92af" - -[[package]] -name = "brotli" -version = "7.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc97b8f16f944bba54f0433f07e30be199b6dc2bd25937444bbad560bcea29bd" -dependencies = [ - "alloc-no-stdlib", - "alloc-stdlib", - "brotli-decompressor", -] - -[[package]] -name = "brotli-decompressor" -version = "4.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a334ef7c9e23abf0ce748e8cd309037da93e606ad52eb372e4ce327a0dcfbdfd" -dependencies = [ - "alloc-no-stdlib", - "alloc-stdlib", -] - -[[package]] -name = "bumpalo" -version = "3.20.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d20789868f4b01b2f2caec9f5c4e0213b41e3e5702a50157d699ae31ced2fcb" - -[[package]] -name = "bytemuck" -version = "1.25.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8efb64bd706a16a1bdde310ae86b351e4d21550d98d056f22f8a7f7a2183fec" -dependencies = [ - "bytemuck_derive", -] - -[[package]] -name = "bytemuck_derive" -version = "1.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9abbd1bc6865053c427f7198e6af43bfdedc55ab791faed4fbd361d789575ff" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "bytes" -version = "1.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e748733b7cbc798e1434b6ac524f0c1ff2ab456fe201501e6497c8417a4fc33" -dependencies = [ - "serde", -] - -[[package]] -name = "castaway" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dec551ab6e7578819132c713a93c022a05d60159dc86e7a7050223577484c55a" -dependencies = [ - "rustversion", -] - -[[package]] -name = "cc" -version = "1.2.57" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a0dd1ca384932ff3641c8718a02769f1698e7563dc6974ffd03346116310423" -dependencies = [ - "find-msvc-tools", - "jobserver", - "libc", - "shlex", -] - -[[package]] -name = "cfg-if" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" - -[[package]] -name = "chrono" -version = "0.4.44" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c673075a2e0e5f4a1dde27ce9dee1ea4558c7ffe648f576438a20ca1d2acc4b0" -dependencies = [ - "iana-time-zone", - "num-traits", - "windows-link", -] - -[[package]] -name = "chrono-tz" -version = "0.10.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6139a8597ed92cf816dfb33f5dd6cf0bb93a6adc938f11039f371bc5bcd26c3" -dependencies = [ - "chrono", - "phf", -] - -[[package]] -name = "comfy-table" -version = "7.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "958c5d6ecf1f214b4c2bbbbf6ab9523a864bd136dcf71a7e8904799acfe1ad47" -dependencies = [ - "crossterm", - "unicode-segmentation", - "unicode-width", -] - -[[package]] -name = "compact_str" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b79c4069c6cad78e2e0cdfcbd26275770669fb39fd308a752dc110e83b9af32" -dependencies = [ - "castaway", - "cfg-if", - "itoa", - "rustversion", - "ryu", - "serde", - "static_assertions", -] - -[[package]] -name = "core-foundation-sys" -version = "0.8.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" - -[[package]] -name = "crossbeam-channel" -version = "0.5.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2" -dependencies = [ - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-deque" -version = "0.8.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51" -dependencies = [ - "crossbeam-epoch", - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-epoch" -version = "0.9.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" -dependencies = [ - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-queue" -version = "0.3.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f58bbc28f91df819d0aa2a2c00cd19754769c2fad90579b3592b1c9ba7a3115" -dependencies = [ - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-utils" -version = "0.8.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" - -[[package]] -name = "crossterm" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8b9f2e4c67f833b660cdb0a3523065869fb35570177239812ed4c905aeff87b" -dependencies = [ - "bitflags", - "crossterm_winapi", - "document-features", - "parking_lot", - "rustix", - "winapi", -] - -[[package]] -name = "crossterm_winapi" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "acdd7c62a3665c7f6830a51635d9ac9b23ed385797f70a83bb8bafe9c572ab2b" -dependencies = [ - "winapi", -] - -[[package]] -name = "debug_unsafe" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7eed2c4702fa172d1ce21078faa7c5203e69f5394d48cc436d25928394a867a2" - -[[package]] -name = "document-features" -version = "0.2.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4b8a88685455ed29a21542a33abd9cb6510b6b129abadabdcef0f4c55bc8f61" -dependencies = [ - "litrs", -] - -[[package]] -name = "dyn-clone" -version = "1.0.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0881ea181b1df73ff77ffaaf9c7544ecc11e82fba9b5f27b262a3c73a332555" - -[[package]] -name = "either" -version = "1.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" - -[[package]] -name = "enum_dispatch" -version = "0.3.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa18ce2bc66555b3218614519ac839ddb759a7d6720732f979ef8d13be147ecd" -dependencies = [ - "once_cell", - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "equivalent" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" - -[[package]] -name = "errno" -version = "0.3.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" -dependencies = [ - "libc", - "windows-sys 0.61.2", -] - -[[package]] -name = "ethnum" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca81e6b4777c89fd810c25a4be2b1bd93ea034fbe58e6a75216a34c6b82c539b" - -[[package]] -name = "f2a" -version = "1.0.0" -dependencies = [ - "indexmap", - "ndarray", - "polars", - "pyo3", - "rayon", - "serde", - "serde_json", - "statrs", - "thiserror", -] - -[[package]] -name = "fallible-streaming-iterator" -version = "0.1.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a" - -[[package]] -name = "fast-float2" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8eb564c5c7423d25c886fb561d1e4ee69f72354d16918afa32c08811f6b6a55" - -[[package]] -name = "find-msvc-tools" -version = "0.1.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5baebc0774151f905a1a2cc41989300b1e6fbb29aff0ceffa1064fdd3088d582" - -[[package]] -name = "flate2" -version = "1.1.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "843fba2746e448b37e26a819579957415c8cef339bf08564fe8b7ddbd959573c" -dependencies = [ - "miniz_oxide", - "zlib-rs", -] - -[[package]] -name = "float-cmp" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b09cf3155332e944990140d967ff5eceb70df778b34f77d8075db46e4704e6d8" -dependencies = [ - "num-traits", -] - -[[package]] -name = "foldhash" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" - -[[package]] -name = "futures" -version = "0.3.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b147ee9d1f6d097cef9ce628cd2ee62288d963e16fb287bd9286455b241382d" -dependencies = [ - "futures-channel", - "futures-core", - "futures-executor", - "futures-io", - "futures-sink", - "futures-task", - "futures-util", -] - -[[package]] -name = "futures-channel" -version = "0.3.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07bbe89c50d7a535e539b8c17bc0b49bdb77747034daa8087407d655f3f7cc1d" -dependencies = [ - "futures-core", - "futures-sink", -] - -[[package]] -name = "futures-core" -version = "0.3.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e3450815272ef58cec6d564423f6e755e25379b217b0bc688e295ba24df6b1d" - -[[package]] -name = "futures-executor" -version = "0.3.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf29c38818342a3b26b5b923639e7b1f4a61fc5e76102d4b1981c6dc7a7579d" -dependencies = [ - "futures-core", - "futures-task", - "futures-util", -] - -[[package]] -name = "futures-io" -version = "0.3.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cecba35d7ad927e23624b22ad55235f2239cfa44fd10428eecbeba6d6a717718" - -[[package]] -name = "futures-macro" -version = "0.3.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e835b70203e41293343137df5c0664546da5745f82ec9b84d40be8336958447b" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "futures-sink" -version = "0.3.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c39754e157331b013978ec91992bde1ac089843443c49cbc7f46150b0fad0893" - -[[package]] -name = "futures-task" -version = "0.3.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "037711b3d59c33004d3856fbdc83b99d4ff37a24768fa1be9ce3538a1cde4393" - -[[package]] -name = "futures-util" -version = "0.3.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "389ca41296e6190b48053de0321d02a77f32f8a5d2461dd38762c0593805c6d6" -dependencies = [ - "futures-channel", - "futures-core", - "futures-io", - "futures-macro", - "futures-sink", - "futures-task", - "memchr", - "pin-project-lite", - "slab", -] - -[[package]] -name = "getrandom" -version = "0.2.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff2abc00be7fca6ebc474524697ae276ad847ad0a6b3faa4bcb027e9a4614ad0" -dependencies = [ - "cfg-if", - "js-sys", - "libc", - "wasi", - "wasm-bindgen", -] - -[[package]] -name = "getrandom" -version = "0.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd" -dependencies = [ - "cfg-if", - "libc", - "r-efi 5.3.0", - "wasip2", -] - -[[package]] -name = "getrandom" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0de51e6874e94e7bf76d726fc5d13ba782deca734ff60d5bb2fb2607c7406555" -dependencies = [ - "cfg-if", - "libc", - "r-efi 6.0.0", - "wasip2", - "wasip3", -] - -[[package]] -name = "glob" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280" - -[[package]] -name = "halfbrown" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8588661a8607108a5ca69cab034063441a0413a0b041c13618a7dd348021ef6f" -dependencies = [ - "hashbrown 0.14.5", - "serde", -] - -[[package]] -name = "hashbrown" -version = "0.14.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" -dependencies = [ - "ahash", - "allocator-api2", - "rayon", - "serde", -] - -[[package]] -name = "hashbrown" -version = "0.15.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" -dependencies = [ - "allocator-api2", - "equivalent", - "foldhash", - "rayon", - "serde", -] - -[[package]] -name = "hashbrown" -version = "0.16.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" - -[[package]] -name = "heck" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" - -[[package]] -name = "hex" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" - -[[package]] -name = "home" -version = "0.5.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc627f471c528ff0c4a49e1d5e60450c8f6461dd6d10ba9dcd3a61d3dff7728d" -dependencies = [ - "windows-sys 0.61.2", -] - -[[package]] -name = "iana-time-zone" -version = "0.1.65" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e31bc9ad994ba00e440a8aa5c9ef0ec67d5cb5e5cb0cc7f8b744a35b389cc470" -dependencies = [ - "android_system_properties", - "core-foundation-sys", - "iana-time-zone-haiku", - "js-sys", - "log", - "wasm-bindgen", - "windows-core 0.62.2", -] - -[[package]] -name = "iana-time-zone-haiku" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" -dependencies = [ - "cc", -] - -[[package]] -name = "id-arena" -version = "2.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d3067d79b975e8844ca9eb072e16b31c3c1c36928edf9c6789548c524d0d954" - -[[package]] -name = "indexmap" -version = "2.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7714e70437a7dc3ac8eb7e6f8df75fd8eb422675fc7678aff7364301092b1017" -dependencies = [ - "equivalent", - "hashbrown 0.16.1", - "serde", - "serde_core", -] - -[[package]] -name = "indoc" -version = "2.0.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79cf5c93f93228cf8efb3ba362535fb11199ac548a09ce117c9b1adc3030d706" -dependencies = [ - "rustversion", -] - -[[package]] -name = "itoa" -version = "1.0.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2" - -[[package]] -name = "jobserver" -version = "0.1.34" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33" -dependencies = [ - "getrandom 0.3.4", - "libc", -] - -[[package]] -name = "js-sys" -version = "0.3.91" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b49715b7073f385ba4bc528e5747d02e66cb39c6146efb66b781f131f0fb399c" -dependencies = [ - "once_cell", - "wasm-bindgen", -] - -[[package]] -name = "jsonpath_lib_polars_vendor" -version = "0.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4bd9354947622f7471ff713eacaabdb683ccb13bba4edccaab9860abf480b7d" -dependencies = [ - "log", - "serde", - "serde_json", -] - -[[package]] -name = "leb128fmt" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2" - -[[package]] -name = "libc" -version = "0.2.183" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5b646652bf6661599e1da8901b3b9522896f01e736bad5f723fe7a3a27f899d" - -[[package]] -name = "libm" -version = "0.2.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6d2cec3eae94f9f509c767b45932f1ada8350c4bdb85af2fcab4a3c14807981" - -[[package]] -name = "linux-raw-sys" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32a66949e030da00e8c7d4434b251670a91556f4144941d37452769c25d58a53" - -[[package]] -name = "litrs" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11d3d7f243d5c5a8b9bb5d6dd2b1602c0cb0b9db1621bafc7ed66e35ff9fe092" - -[[package]] -name = "lock_api" -version = "0.4.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "224399e74b87b5f3557511d98dff8b14089b3dadafcab6bb93eab67d3aace965" -dependencies = [ - "scopeguard", -] - -[[package]] -name = "log" -version = "0.4.29" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" - -[[package]] -name = "lz4" -version = "1.28.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a20b523e860d03443e98350ceaac5e71c6ba89aea7d960769ec3ce37f4de5af4" -dependencies = [ - "lz4-sys", -] - -[[package]] -name = "lz4-sys" -version = "1.11.1+lz4-1.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6bd8c0d6c6ed0cd30b3652886bb8711dc4bb01d637a68105a3d5158039b418e6" -dependencies = [ - "cc", - "libc", -] - -[[package]] -name = "matrixmultiply" -version = "0.3.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a06de3016e9fae57a36fd14dba131fccf49f74b40b7fbdb472f96e361ec71a08" -dependencies = [ - "autocfg", - "rawpointer", -] - -[[package]] -name = "memchr" -version = "2.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79" - -[[package]] -name = "memmap2" -version = "0.9.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "714098028fe011992e1c3962653c96b2d578c4b4bce9036e15ff220319b1e0e3" -dependencies = [ - "libc", -] - -[[package]] -name = "memoffset" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "488016bfae457b036d996092f6cb448677611ce4449e970ceaf42695203f218a" -dependencies = [ - "autocfg", -] - -[[package]] -name = "miniz_oxide" -version = "0.8.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" -dependencies = [ - "adler2", - "simd-adler32", -] - -[[package]] -name = "mio" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a69bcab0ad47271a0234d9422b131806bf3968021e5dc9328caf2d4cd58557fc" -dependencies = [ - "libc", - "wasi", - "windows-sys 0.61.2", -] - -[[package]] -name = "nalgebra" -version = "0.32.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b5c17de023a86f59ed79891b2e5d5a94c705dbe904a5b5c9c952ea6221b03e4" -dependencies = [ - "approx", - "matrixmultiply", - "nalgebra-macros", - "num-complex", - "num-rational", - "num-traits", - "rand", - "rand_distr", - "simba", - "typenum", -] - -[[package]] -name = "nalgebra-macros" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "254a5372af8fc138e36684761d3c0cdb758a4410e938babcff1c860ce14ddbfc" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "ndarray" -version = "0.16.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "882ed72dce9365842bf196bdeedf5055305f11fc8c03dee7bb0194a6cad34841" -dependencies = [ - "matrixmultiply", - "num-complex", - "num-integer", - "num-traits", - "portable-atomic", - "portable-atomic-util", - "rawpointer", - "rayon", -] - -[[package]] -name = "now" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d89e9874397a1f0a52fc1f197a8effd9735223cb2390e9dcc83ac6cd02923d0" -dependencies = [ - "chrono", -] - -[[package]] -name = "ntapi" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3b335231dfd352ffb0f8017f3b6027a4917f7df785ea2143d8af2adc66980ae" -dependencies = [ - "winapi", -] - -[[package]] -name = "num-complex" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73f88a1307638156682bada9d7604135552957b7818057dcef22705b4d509495" -dependencies = [ - "num-traits", -] - -[[package]] -name = "num-integer" -version = "0.1.46" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" -dependencies = [ - "num-traits", -] - -[[package]] -name = "num-rational" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f83d14da390562dca69fc84082e73e548e1ad308d24accdedd2720017cb37824" -dependencies = [ - "num-integer", - "num-traits", -] - -[[package]] -name = "num-traits" -version = "0.2.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" -dependencies = [ - "autocfg", - "libm", -] - -[[package]] -name = "object" -version = "0.37.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff76201f031d8863c38aa7f905eca4f53abbfa15f609db4277d44cd8938f33fe" -dependencies = [ - "memchr", -] - -[[package]] -name = "once_cell" -version = "1.21.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f7c3e4beb33f85d45ae3e3a1792185706c8e16d043238c593331cc7cd313b50" - -[[package]] -name = "parking_lot" -version = "0.12.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93857453250e3077bd71ff98b6a65ea6621a19bb0f559a85248955ac12c45a1a" -dependencies = [ - "lock_api", - "parking_lot_core", -] - -[[package]] -name = "parking_lot_core" -version = "0.9.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1" -dependencies = [ - "cfg-if", - "libc", - "redox_syscall", - "smallvec", - "windows-link", -] - -[[package]] -name = "paste" -version = "1.0.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" - -[[package]] -name = "percent-encoding" -version = "2.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" - -[[package]] -name = "phf" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "913273894cec178f401a31ec4b656318d95473527be05c0752cc41cdc32be8b7" -dependencies = [ - "phf_shared", -] - -[[package]] -name = "phf_shared" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06005508882fb681fd97892ecff4b7fd0fee13ef1aa569f8695dae7ab9099981" -dependencies = [ - "siphasher", -] - -[[package]] -name = "pin-project-lite" -version = "0.2.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a89322df9ebe1c1578d689c92318e070967d1042b512afbe49518723f4e6d5cd" - -[[package]] -name = "pkg-config" -version = "0.3.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" - -[[package]] -name = "planus" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc1691dd09e82f428ce8d6310bd6d5da2557c82ff17694d2a32cad7242aea89f" -dependencies = [ - "array-init-cursor", -] - -[[package]] -name = "polars" -version = "0.46.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72571dde488ecccbe799798bf99ab7308ebdb7cf5d95bcc498dbd5a132f0da4d" -dependencies = [ - "getrandom 0.2.17", - "polars-arrow", - "polars-core", - "polars-error", - "polars-io", - "polars-lazy", - "polars-ops", - "polars-parquet", - "polars-sql", - "polars-time", - "polars-utils", - "version_check", -] - -[[package]] -name = "polars-arrow" -version = "0.46.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6611c758d52e799761cc25900666b71552e6c929d88052811bc9daad4b3321a8" -dependencies = [ - "ahash", - "atoi_simd", - "bytemuck", - "chrono", - "chrono-tz", - "dyn-clone", - "either", - "ethnum", - "getrandom 0.2.17", - "hashbrown 0.15.5", - "itoa", - "lz4", - "num-traits", - "parking_lot", - "polars-arrow-format", - "polars-error", - "polars-schema", - "polars-utils", - "simdutf8", - "streaming-iterator", - "strength_reduce", - "strum_macros", - "version_check", - "zstd", -] - -[[package]] -name = "polars-arrow-format" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19b0ef2474af9396b19025b189d96e992311e6a47f90c53cd998b36c4c64b84c" -dependencies = [ - "planus", - "serde", -] - -[[package]] -name = "polars-compute" -version = "0.46.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "332f2547dbb27599a8ffe68e56159f5996ba03d1dad0382ccb62c109ceacdeb6" -dependencies = [ - "atoi_simd", - "bytemuck", - "chrono", - "either", - "fast-float2", - "itoa", - "num-traits", - "polars-arrow", - "polars-error", - "polars-utils", - "ryu", - "strength_reduce", - "version_check", -] - -[[package]] -name = "polars-core" -version = "0.46.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "796d06eae7e6e74ed28ea54a8fccc584ebac84e6cf0e1e9ba41ffc807b169a01" -dependencies = [ - "ahash", - "bitflags", - "bytemuck", - "chrono", - "chrono-tz", - "comfy-table", - "either", - "hashbrown 0.14.5", - "hashbrown 0.15.5", - "indexmap", - "itoa", - "num-traits", - "once_cell", - "polars-arrow", - "polars-compute", - "polars-error", - "polars-row", - "polars-schema", - "polars-utils", - "rand", - "rand_distr", - "rayon", - "regex", - "strum_macros", - "thiserror", - "version_check", - "xxhash-rust", -] - -[[package]] -name = "polars-error" -version = "0.46.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19d6529cae0d1db5ed690e47de41fac9b35ae0c26d476830c2079f130887b847" -dependencies = [ - "polars-arrow-format", - "regex", - "simdutf8", - "thiserror", -] - -[[package]] -name = "polars-expr" -version = "0.46.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8e639991a8ad4fb12880ab44bcc3cf44a5703df003142334d9caf86d77d77e7" -dependencies = [ - "ahash", - "bitflags", - "hashbrown 0.15.5", - "num-traits", - "once_cell", - "polars-arrow", - "polars-compute", - "polars-core", - "polars-io", - "polars-ops", - "polars-plan", - "polars-row", - "polars-time", - "polars-utils", - "rand", - "rayon", -] - -[[package]] -name = "polars-io" -version = "0.46.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "719a77e94480f6be090512da196e378cbcbeb3584c6fe1134c600aee906e38ab" -dependencies = [ - "ahash", - "async-trait", - "atoi_simd", - "bytes", - "chrono", - "fast-float2", - "futures", - "glob", - "hashbrown 0.15.5", - "home", - "itoa", - "memchr", - "memmap2", - "num-traits", - "once_cell", - "percent-encoding", - "polars-arrow", - "polars-core", - "polars-error", - "polars-json", - "polars-parquet", - "polars-schema", - "polars-time", - "polars-utils", - "rayon", - "regex", - "ryu", - "simd-json", - "simdutf8", - "tokio", - "tokio-util", -] - -[[package]] -name = "polars-json" -version = "0.46.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e30603ca81e317b66b4caac683a8325a6a82ea0489685dc37e22ae03720def98" -dependencies = [ - "ahash", - "chrono", - "fallible-streaming-iterator", - "hashbrown 0.15.5", - "indexmap", - "itoa", - "num-traits", - "polars-arrow", - "polars-compute", - "polars-error", - "polars-utils", - "ryu", - "simd-json", - "streaming-iterator", -] - -[[package]] -name = "polars-lazy" -version = "0.46.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0a731a672dfc8ac38c1f73c9a4b2ae38d2fc8ac363bfb64c5f3a3e072ffc5ad" -dependencies = [ - "ahash", - "bitflags", - "chrono", - "memchr", - "once_cell", - "polars-arrow", - "polars-core", - "polars-expr", - "polars-io", - "polars-json", - "polars-mem-engine", - "polars-ops", - "polars-pipe", - "polars-plan", - "polars-stream", - "polars-time", - "polars-utils", - "rayon", - "version_check", -] - -[[package]] -name = "polars-mem-engine" -version = "0.46.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33442189bcbf2e2559aa7914db3835429030a13f4f18e43af5fba9d1b018cf12" -dependencies = [ - "memmap2", - "polars-arrow", - "polars-core", - "polars-error", - "polars-expr", - "polars-io", - "polars-json", - "polars-ops", - "polars-plan", - "polars-time", - "polars-utils", - "rayon", -] - -[[package]] -name = "polars-ops" -version = "0.46.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cbb83218b0c216104f0076cd1a005128be078f958125f3d59b094ee73d78c18e" -dependencies = [ - "ahash", - "argminmax", - "base64", - "bytemuck", - "chrono", - "chrono-tz", - "either", - "hashbrown 0.15.5", - "hex", - "indexmap", - "jsonpath_lib_polars_vendor", - "memchr", - "num-traits", - "once_cell", - "polars-arrow", - "polars-compute", - "polars-core", - "polars-error", - "polars-json", - "polars-schema", - "polars-utils", - "rand", - "rayon", - "regex", - "regex-syntax", - "serde_json", - "strum_macros", - "unicode-normalization", - "unicode-reverse", - "version_check", -] - -[[package]] -name = "polars-parquet" -version = "0.46.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c60ee85535590a38db6c703a21be4cb25342e40f573f070d1e16f9d84a53ac7" -dependencies = [ - "ahash", - "async-stream", - "base64", - "brotli", - "bytemuck", - "ethnum", - "flate2", - "futures", - "hashbrown 0.15.5", - "lz4", - "num-traits", - "polars-arrow", - "polars-compute", - "polars-error", - "polars-parquet-format", - "polars-utils", - "simdutf8", - "snap", - "streaming-decompression", - "zstd", -] - -[[package]] -name = "polars-parquet-format" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c025243dcfe8dbc57e94d9f82eb3bef10b565ab180d5b99bed87fd8aea319ce1" -dependencies = [ - "async-trait", - "futures", -] - -[[package]] -name = "polars-pipe" -version = "0.46.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42d238fb76698f56e51ddfa89b135e4eda56a4767c6e8859eed0ab78386fcd52" -dependencies = [ - "crossbeam-channel", - "crossbeam-queue", - "enum_dispatch", - "futures", - "hashbrown 0.15.5", - "num-traits", - "once_cell", - "polars-arrow", - "polars-compute", - "polars-core", - "polars-expr", - "polars-io", - "polars-ops", - "polars-plan", - "polars-row", - "polars-utils", - "rayon", - "uuid", - "version_check", -] - -[[package]] -name = "polars-plan" -version = "0.46.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f03533a93aa66127fcb909a87153a3c7cfee6f0ae59f497e73d7736208da54c" -dependencies = [ - "ahash", - "bitflags", - "bytemuck", - "bytes", - "chrono", - "chrono-tz", - "either", - "hashbrown 0.15.5", - "memmap2", - "num-traits", - "once_cell", - "percent-encoding", - "polars-arrow", - "polars-compute", - "polars-core", - "polars-io", - "polars-json", - "polars-ops", - "polars-parquet", - "polars-time", - "polars-utils", - "rayon", - "recursive", - "regex", - "strum_macros", - "version_check", -] - -[[package]] -name = "polars-row" -version = "0.46.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6bf47f7409f8e75328d7d034be390842924eb276716d0458607be0bddb8cc839" -dependencies = [ - "bitflags", - "bytemuck", - "polars-arrow", - "polars-compute", - "polars-error", - "polars-utils", -] - -[[package]] -name = "polars-schema" -version = "0.46.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "416621ae82b84466cf4ff36838a9b0aeb4a67e76bd3065edc8c9cb7da19b1bc7" -dependencies = [ - "indexmap", - "polars-error", - "polars-utils", - "version_check", -] - -[[package]] -name = "polars-sql" -version = "0.46.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edaab553b90aa4d6743bb538978e1982368acb58a94408d7dd3299cad49c7083" -dependencies = [ - "hex", - "polars-core", - "polars-error", - "polars-lazy", - "polars-ops", - "polars-plan", - "polars-time", - "polars-utils", - "rand", - "regex", - "serde", - "sqlparser", -] - -[[package]] -name = "polars-stream" -version = "0.46.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "498997b656c779610c1496b3d96a59fe569ef22a5b81ccfe5325cb3df8dff2fd" -dependencies = [ - "atomic-waker", - "crossbeam-deque", - "crossbeam-utils", - "futures", - "memmap2", - "parking_lot", - "pin-project-lite", - "polars-core", - "polars-error", - "polars-expr", - "polars-io", - "polars-mem-engine", - "polars-ops", - "polars-parquet", - "polars-plan", - "polars-utils", - "rand", - "rayon", - "recursive", - "slotmap", - "tokio", - "version_check", -] - -[[package]] -name = "polars-time" -version = "0.46.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d192efbdab516d28b3fab1709a969e3385bd5cda050b7c9aa9e2502a01fda879" -dependencies = [ - "atoi_simd", - "bytemuck", - "chrono", - "chrono-tz", - "now", - "num-traits", - "once_cell", - "polars-arrow", - "polars-compute", - "polars-core", - "polars-error", - "polars-ops", - "polars-utils", - "rayon", - "regex", - "strum_macros", -] - -[[package]] -name = "polars-utils" -version = "0.46.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8f6c8166a4a7fbc15b87c81645ed9e1f0651ff2e8c96cafc40ac5bf43441a10" -dependencies = [ - "ahash", - "bytemuck", - "bytes", - "compact_str", - "hashbrown 0.15.5", - "indexmap", - "libc", - "memmap2", - "num-traits", - "once_cell", - "polars-error", - "rand", - "raw-cpuid", - "rayon", - "stacker", - "sysinfo", - "version_check", -] - -[[package]] -name = "portable-atomic" -version = "1.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c33a9471896f1c69cecef8d20cbe2f7accd12527ce60845ff44c153bb2a21b49" - -[[package]] -name = "portable-atomic-util" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "091397be61a01d4be58e7841595bd4bfedb15f1cd54977d79b8271e94ed799a3" -dependencies = [ - "portable-atomic", -] - -[[package]] -name = "ppv-lite86" -version = "0.2.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" -dependencies = [ - "zerocopy", -] - -[[package]] -name = "prettyplease" -version = "0.2.37" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b" -dependencies = [ - "proc-macro2", - "syn", -] - -[[package]] -name = "proc-macro2" -version = "1.0.106" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fd00f0bb2e90d81d1044c2b32617f68fcb9fa3bb7640c23e9c748e53fb30934" -dependencies = [ - "unicode-ident", -] - -[[package]] -name = "psm" -version = "0.1.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3852766467df634d74f0b2d7819bf8dc483a0eb2e3b0f50f756f9cfe8b0d18d8" -dependencies = [ - "ar_archive_writer", - "cc", -] - -[[package]] -name = "pyo3" -version = "0.22.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f402062616ab18202ae8319da13fa4279883a2b8a9d9f83f20dbade813ce1884" -dependencies = [ - "cfg-if", - "indoc", - "libc", - "memoffset", - "once_cell", - "portable-atomic", - "pyo3-build-config", - "pyo3-ffi", - "pyo3-macros", - "unindent", -] - -[[package]] -name = "pyo3-build-config" -version = "0.22.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b14b5775b5ff446dd1056212d778012cbe8a0fbffd368029fd9e25b514479c38" -dependencies = [ - "once_cell", - "target-lexicon", -] - -[[package]] -name = "pyo3-ffi" -version = "0.22.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ab5bcf04a2cdcbb50c7d6105de943f543f9ed92af55818fd17b660390fc8636" -dependencies = [ - "libc", - "pyo3-build-config", -] - -[[package]] -name = "pyo3-macros" -version = "0.22.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fd24d897903a9e6d80b968368a34e1525aeb719d568dba8b3d4bfa5dc67d453" -dependencies = [ - "proc-macro2", - "pyo3-macros-backend", - "quote", - "syn", -] - -[[package]] -name = "pyo3-macros-backend" -version = "0.22.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36c011a03ba1e50152b4b394b479826cad97e7a21eb52df179cd91ac411cbfbe" -dependencies = [ - "heck", - "proc-macro2", - "pyo3-build-config", - "quote", - "syn", -] - -[[package]] -name = "quote" -version = "1.0.45" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41f2619966050689382d2b44f664f4bc593e129785a36d6ee376ddf37259b924" -dependencies = [ - "proc-macro2", -] - -[[package]] -name = "r-efi" -version = "5.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" - -[[package]] -name = "r-efi" -version = "6.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8dcc9c7d52a811697d2151c701e0d08956f92b0e24136cf4cf27b57a6a0d9bf" - -[[package]] -name = "rand" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" -dependencies = [ - "libc", - "rand_chacha", - "rand_core", -] - -[[package]] -name = "rand_chacha" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" -dependencies = [ - "ppv-lite86", - "rand_core", -] - -[[package]] -name = "rand_core" -version = "0.6.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" -dependencies = [ - "getrandom 0.2.17", -] - -[[package]] -name = "rand_distr" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32cb0b9bc82b0a0876c2dd994a7e7a2683d3e7390ca40e6886785ef0c7e3ee31" -dependencies = [ - "num-traits", - "rand", -] - -[[package]] -name = "raw-cpuid" -version = "11.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "498cd0dc59d73224351ee52a95fee0f1a617a2eae0e7d9d720cc622c73a54186" -dependencies = [ - "bitflags", -] - -[[package]] -name = "rawpointer" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60a357793950651c4ed0f3f52338f53b2f809f32d83a07f72909fa13e4c6c1e3" - -[[package]] -name = "rayon" -version = "1.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "368f01d005bf8fd9b1206fb6fa653e6c4a81ceb1466406b81792d87c5677a58f" -dependencies = [ - "either", - "rayon-core", -] - -[[package]] -name = "rayon-core" -version = "1.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22e18b0f0062d30d4230b2e85ff77fdfe4326feb054b9783a3460d8435c8ab91" -dependencies = [ - "crossbeam-deque", - "crossbeam-utils", -] - -[[package]] -name = "recursive" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0786a43debb760f491b1bc0269fe5e84155353c67482b9e60d0cfb596054b43e" -dependencies = [ - "recursive-proc-macro-impl", - "stacker", -] - -[[package]] -name = "recursive-proc-macro-impl" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76009fbe0614077fc1a2ce255e3a1881a2e3a3527097d5dc6d8212c585e7e38b" -dependencies = [ - "quote", - "syn", -] - -[[package]] -name = "redox_syscall" -version = "0.5.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d" -dependencies = [ - "bitflags", -] - -[[package]] -name = "ref-cast" -version = "1.0.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f354300ae66f76f1c85c5f84693f0ce81d747e2c3f21a45fef496d89c960bf7d" -dependencies = [ - "ref-cast-impl", -] - -[[package]] -name = "ref-cast-impl" -version = "1.0.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7186006dcb21920990093f30e3dea63b7d6e977bf1256be20c3563a5db070da" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "regex" -version = "1.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e10754a14b9137dd7b1e3e5b0493cc9171fdd105e0ab477f51b72e7f3ac0e276" -dependencies = [ - "aho-corasick", - "memchr", - "regex-automata", - "regex-syntax", -] - -[[package]] -name = "regex-automata" -version = "0.4.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e1dd4122fc1595e8162618945476892eefca7b88c52820e74af6262213cae8f" -dependencies = [ - "aho-corasick", - "memchr", - "regex-syntax", -] - -[[package]] -name = "regex-syntax" -version = "0.8.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc897dd8d9e8bd1ed8cdad82b5966c3e0ecae09fb1907d58efaa013543185d0a" - -[[package]] -name = "rustix" -version = "1.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6fe4565b9518b83ef4f91bb47ce29620ca828bd32cb7e408f0062e9930ba190" -dependencies = [ - "bitflags", - "errno", - "libc", - "linux-raw-sys", - "windows-sys 0.61.2", -] - -[[package]] -name = "rustversion" -version = "1.0.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" - -[[package]] -name = "ryu" -version = "1.0.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9774ba4a74de5f7b1c1451ed6cd5285a32eddb5cccb8cc655a4e50009e06477f" - -[[package]] -name = "safe_arch" -version = "0.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96b02de82ddbe1b636e6170c21be622223aea188ef2e139be0a5b219ec215323" -dependencies = [ - "bytemuck", -] - -[[package]] -name = "scopeguard" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" - -[[package]] -name = "semver" -version = "1.0.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" - -[[package]] -name = "serde" -version = "1.0.228" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" -dependencies = [ - "serde_core", - "serde_derive", -] - -[[package]] -name = "serde_core" -version = "1.0.228" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" -dependencies = [ - "serde_derive", -] - -[[package]] -name = "serde_derive" -version = "1.0.228" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "serde_json" -version = "1.0.149" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86" -dependencies = [ - "indexmap", - "itoa", - "memchr", - "serde", - "serde_core", - "zmij", -] - -[[package]] -name = "shlex" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" - -[[package]] -name = "simba" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "061507c94fc6ab4ba1c9a0305018408e312e17c041eb63bef8aa726fa33aceae" -dependencies = [ - "approx", - "num-complex", - "num-traits", - "paste", - "wide", -] - -[[package]] -name = "simd-adler32" -version = "0.3.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e320a6c5ad31d271ad523dcf3ad13e2767ad8b1cb8f047f75a8aeaf8da139da2" - -[[package]] -name = "simd-json" -version = "0.14.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa2bcf6c6e164e81bc7a5d49fc6988b3d515d9e8c07457d7b74ffb9324b9cd40" -dependencies = [ - "ahash", - "getrandom 0.2.17", - "halfbrown", - "once_cell", - "ref-cast", - "serde", - "serde_json", - "simdutf8", - "value-trait", -] - -[[package]] -name = "simdutf8" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3a9fe34e3e7a50316060351f37187a3f546bce95496156754b601a5fa71b76e" - -[[package]] -name = "siphasher" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2aa850e253778c88a04c3d7323b043aeda9d3e30d5971937c1855769763678e" - -[[package]] -name = "slab" -version = "0.4.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c790de23124f9ab44544d7ac05d60440adc586479ce501c1d6d7da3cd8c9cf5" - -[[package]] -name = "slotmap" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bdd58c3c93c3d278ca835519292445cb4b0d4dc59ccfdf7ceadaab3f8aeb4038" -dependencies = [ - "version_check", -] - -[[package]] -name = "smallvec" -version = "1.15.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" - -[[package]] -name = "snap" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b6b67fb9a61334225b5b790716f609cd58395f895b3fe8b328786812a40bc3b" - -[[package]] -name = "socket2" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a766e1110788c36f4fa1c2b71b387a7815aa65f88ce0229841826633d93723e" -dependencies = [ - "libc", - "windows-sys 0.61.2", -] - -[[package]] -name = "sqlparser" -version = "0.53.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05a528114c392209b3264855ad491fcce534b94a38771b0a0b97a79379275ce8" -dependencies = [ - "log", -] - -[[package]] -name = "stacker" -version = "0.1.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08d74a23609d509411d10e2176dc2a4346e3b4aea2e7b1869f19fdedbc71c013" -dependencies = [ - "cc", - "cfg-if", - "libc", - "psm", - "windows-sys 0.59.0", -] - -[[package]] -name = "static_assertions" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" - -[[package]] -name = "statrs" -version = "0.17.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f697a07e4606a0a25c044de247e583a330dbb1731d11bc7350b81f48ad567255" -dependencies = [ - "approx", - "nalgebra", - "num-traits", - "rand", -] - -[[package]] -name = "streaming-decompression" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf6cc3b19bfb128a8ad11026086e31d3ce9ad23f8ea37354b31383a187c44cf3" -dependencies = [ - "fallible-streaming-iterator", -] - -[[package]] -name = "streaming-iterator" -version = "0.1.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b2231b7c3057d5e4ad0156fb3dc807d900806020c5ffa3ee6ff2c8c76fb8520" - -[[package]] -name = "strength_reduce" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe895eb47f22e2ddd4dabc02bce419d2e643c8e3b585c78158b349195bc24d82" - -[[package]] -name = "strum_macros" -version = "0.26.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be" -dependencies = [ - "heck", - "proc-macro2", - "quote", - "rustversion", - "syn", -] - -[[package]] -name = "syn" -version = "2.0.117" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e665b8803e7b1d2a727f4023456bbbbe74da67099c585258af0ad9c5013b9b99" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - -[[package]] -name = "sysinfo" -version = "0.33.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fc858248ea01b66f19d8e8a6d55f41deaf91e9d495246fd01368d99935c6c01" -dependencies = [ - "core-foundation-sys", - "libc", - "memchr", - "ntapi", - "windows", -] - -[[package]] -name = "target-lexicon" -version = "0.12.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61c41af27dd6d1e27b1b16b489db798443478cef1f06a660c96db617ba5de3b1" - -[[package]] -name = "thiserror" -version = "2.0.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4288b5bcbc7920c07a1149a35cf9590a2aa808e0bc1eafaade0b80947865fbc4" -dependencies = [ - "thiserror-impl", -] - -[[package]] -name = "thiserror-impl" -version = "2.0.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebc4ee7f67670e9b64d05fa4253e753e016c6c95ff35b89b7941d6b856dec1d5" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "tinyvec" -version = "1.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e61e67053d25a4e82c844e8424039d9745781b3fc4f32b8d55ed50f5f667ef3" -dependencies = [ - "tinyvec_macros", -] - -[[package]] -name = "tinyvec_macros" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" - -[[package]] -name = "tokio" -version = "1.50.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "27ad5e34374e03cfffefc301becb44e9dc3c17584f414349ebe29ed26661822d" -dependencies = [ - "bytes", - "libc", - "mio", - "pin-project-lite", - "socket2", - "windows-sys 0.61.2", -] - -[[package]] -name = "tokio-util" -version = "0.7.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ae9cec805b01e8fc3fd2fe289f89149a9b66dd16786abd8b19cfa7b48cb0098" -dependencies = [ - "bytes", - "futures-core", - "futures-sink", - "pin-project-lite", - "tokio", -] - -[[package]] -name = "typenum" -version = "1.19.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb" - -[[package]] -name = "unicode-ident" -version = "1.0.24" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6e4313cd5fcd3dad5cafa179702e2b244f760991f45397d14d4ebf38247da75" - -[[package]] -name = "unicode-normalization" -version = "0.1.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5fd4f6878c9cb28d874b009da9e8d183b5abc80117c40bbd187a1fde336be6e8" -dependencies = [ - "tinyvec", -] - -[[package]] -name = "unicode-reverse" -version = "1.0.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b6f4888ebc23094adfb574fdca9fdc891826287a6397d2cd28802ffd6f20c76" -dependencies = [ - "unicode-segmentation", -] - -[[package]] -name = "unicode-segmentation" -version = "1.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" - -[[package]] -name = "unicode-width" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4ac048d71ede7ee76d585517add45da530660ef4390e49b098733c6e897f254" - -[[package]] -name = "unicode-xid" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" - -[[package]] -name = "unindent" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7264e107f553ccae879d21fbea1d6724ac785e8c3bfc762137959b5802826ef3" - -[[package]] -name = "uuid" -version = "1.22.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a68d3c8f01c0cfa54a75291d83601161799e4a89a39e0929f4b0354d88757a37" -dependencies = [ - "getrandom 0.4.2", - "js-sys", - "wasm-bindgen", -] - -[[package]] -name = "value-trait" -version = "0.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9170e001f458781e92711d2ad666110f153e4e50bfd5cbd02db6547625714187" -dependencies = [ - "float-cmp", - "halfbrown", - "itoa", - "ryu", -] - -[[package]] -name = "version_check" -version = "0.9.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" - -[[package]] -name = "wasi" -version = "0.11.1+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" - -[[package]] -name = "wasip2" -version = "1.0.2+wasi-0.2.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9517f9239f02c069db75e65f174b3da828fe5f5b945c4dd26bd25d89c03ebcf5" -dependencies = [ - "wit-bindgen", -] - -[[package]] -name = "wasip3" -version = "0.4.0+wasi-0.3.0-rc-2026-01-06" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5428f8bf88ea5ddc08faddef2ac4a67e390b88186c703ce6dbd955e1c145aca5" -dependencies = [ - "wit-bindgen", -] - -[[package]] -name = "wasm-bindgen" -version = "0.2.114" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6532f9a5c1ece3798cb1c2cfdba640b9b3ba884f5db45973a6f442510a87d38e" -dependencies = [ - "cfg-if", - "once_cell", - "rustversion", - "wasm-bindgen-macro", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-macro" -version = "0.2.114" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18a2d50fcf105fb33bb15f00e7a77b772945a2ee45dcf454961fd843e74c18e6" -dependencies = [ - "quote", - "wasm-bindgen-macro-support", -] - -[[package]] -name = "wasm-bindgen-macro-support" -version = "0.2.114" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03ce4caeaac547cdf713d280eda22a730824dd11e6b8c3ca9e42247b25c631e3" -dependencies = [ - "bumpalo", - "proc-macro2", - "quote", - "syn", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-shared" -version = "0.2.114" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75a326b8c223ee17883a4251907455a2431acc2791c98c26279376490c378c16" -dependencies = [ - "unicode-ident", -] - -[[package]] -name = "wasm-encoder" -version = "0.244.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "990065f2fe63003fe337b932cfb5e3b80e0b4d0f5ff650e6985b1048f62c8319" -dependencies = [ - "leb128fmt", - "wasmparser", -] - -[[package]] -name = "wasm-metadata" -version = "0.244.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb0e353e6a2fbdc176932bbaab493762eb1255a7900fe0fea1a2f96c296cc909" -dependencies = [ - "anyhow", - "indexmap", - "wasm-encoder", - "wasmparser", -] - -[[package]] -name = "wasmparser" -version = "0.244.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47b807c72e1bac69382b3a6fb3dbe8ea4c0ed87ff5629b8685ae6b9a611028fe" -dependencies = [ - "bitflags", - "hashbrown 0.15.5", - "indexmap", - "semver", -] - -[[package]] -name = "wide" -version = "0.7.33" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ce5da8ecb62bcd8ec8b7ea19f69a51275e91299be594ea5cc6ef7819e16cd03" -dependencies = [ - "bytemuck", - "safe_arch", -] - -[[package]] -name = "winapi" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" -dependencies = [ - "winapi-i686-pc-windows-gnu", - "winapi-x86_64-pc-windows-gnu", -] - -[[package]] -name = "winapi-i686-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" - -[[package]] -name = "winapi-x86_64-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" - -[[package]] -name = "windows" -version = "0.57.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12342cb4d8e3b046f3d80effd474a7a02447231330ef77d71daa6fbc40681143" -dependencies = [ - "windows-core 0.57.0", - "windows-targets", -] - -[[package]] -name = "windows-core" -version = "0.57.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2ed2439a290666cd67ecce2b0ffaad89c2a56b976b736e6ece670297897832d" -dependencies = [ - "windows-implement 0.57.0", - "windows-interface 0.57.0", - "windows-result 0.1.2", - "windows-targets", -] - -[[package]] -name = "windows-core" -version = "0.62.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8e83a14d34d0623b51dce9581199302a221863196a1dde71a7663a4c2be9deb" -dependencies = [ - "windows-implement 0.60.2", - "windows-interface 0.59.3", - "windows-link", - "windows-result 0.4.1", - "windows-strings", -] - -[[package]] -name = "windows-implement" -version = "0.57.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9107ddc059d5b6fbfbffdfa7a7fe3e22a226def0b2608f72e9d552763d3e1ad7" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "windows-implement" -version = "0.60.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "windows-interface" -version = "0.57.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29bee4b38ea3cde66011baa44dba677c432a78593e202392d1e9070cf2a7fca7" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "windows-interface" -version = "0.59.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "windows-link" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" - -[[package]] -name = "windows-result" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e383302e8ec8515204254685643de10811af0ed97ea37210dc26fb0032647f8" -dependencies = [ - "windows-targets", -] - -[[package]] -name = "windows-result" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7781fa89eaf60850ac3d2da7af8e5242a5ea78d1a11c49bf2910bb5a73853eb5" -dependencies = [ - "windows-link", -] - -[[package]] -name = "windows-strings" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7837d08f69c77cf6b07689544538e017c1bfcf57e34b4c0ff58e6c2cd3b37091" -dependencies = [ - "windows-link", -] - -[[package]] -name = "windows-sys" -version = "0.59.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" -dependencies = [ - "windows-targets", -] - -[[package]] -name = "windows-sys" -version = "0.61.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" -dependencies = [ - "windows-link", -] - -[[package]] -name = "windows-targets" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" -dependencies = [ - "windows_aarch64_gnullvm", - "windows_aarch64_msvc", - "windows_i686_gnu", - "windows_i686_gnullvm", - "windows_i686_msvc", - "windows_x86_64_gnu", - "windows_x86_64_gnullvm", - "windows_x86_64_msvc", -] - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" - -[[package]] -name = "windows_i686_gnu" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" - -[[package]] -name = "windows_i686_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" - -[[package]] -name = "windows_i686_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" - -[[package]] -name = "wit-bindgen" -version = "0.51.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5" -dependencies = [ - "wit-bindgen-rust-macro", -] - -[[package]] -name = "wit-bindgen-core" -version = "0.51.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea61de684c3ea68cb082b7a88508a8b27fcc8b797d738bfc99a82facf1d752dc" -dependencies = [ - "anyhow", - "heck", - "wit-parser", -] - -[[package]] -name = "wit-bindgen-rust" -version = "0.51.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7c566e0f4b284dd6561c786d9cb0142da491f46a9fbed79ea69cdad5db17f21" -dependencies = [ - "anyhow", - "heck", - "indexmap", - "prettyplease", - "syn", - "wasm-metadata", - "wit-bindgen-core", - "wit-component", -] - -[[package]] -name = "wit-bindgen-rust-macro" -version = "0.51.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c0f9bfd77e6a48eccf51359e3ae77140a7f50b1e2ebfe62422d8afdaffab17a" -dependencies = [ - "anyhow", - "prettyplease", - "proc-macro2", - "quote", - "syn", - "wit-bindgen-core", - "wit-bindgen-rust", -] - -[[package]] -name = "wit-component" -version = "0.244.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d66ea20e9553b30172b5e831994e35fbde2d165325bec84fc43dbf6f4eb9cb2" -dependencies = [ - "anyhow", - "bitflags", - "indexmap", - "log", - "serde", - "serde_derive", - "serde_json", - "wasm-encoder", - "wasm-metadata", - "wasmparser", - "wit-parser", -] - -[[package]] -name = "wit-parser" -version = "0.244.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ecc8ac4bc1dc3381b7f59c34f00b67e18f910c2c0f50015669dde7def656a736" -dependencies = [ - "anyhow", - "id-arena", - "indexmap", - "log", - "semver", - "serde", - "serde_derive", - "serde_json", - "unicode-xid", - "wasmparser", -] - -[[package]] -name = "xxhash-rust" -version = "0.8.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fdd20c5420375476fbd4394763288da7eb0cc0b8c11deed431a91562af7335d3" - -[[package]] -name = "zerocopy" -version = "0.8.42" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2578b716f8a7a858b7f02d5bd870c14bf4ddbbcf3a4c05414ba6503640505e3" -dependencies = [ - "zerocopy-derive", -] - -[[package]] -name = "zerocopy-derive" -version = "0.8.42" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e6cc098ea4d3bd6246687de65af3f920c430e236bee1e3bf2e441463f08a02f" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "zlib-rs" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3be3d40e40a133f9c916ee3f9f4fa2d9d63435b5fbe1bfc6d9dae0aa0ada1513" - -[[package]] -name = "zmij" -version = "1.0.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8848ee67ecc8aedbaf3e4122217aff892639231befc6a1b58d29fff4c2cabaa" - -[[package]] -name = "zstd" -version = "0.13.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e91ee311a569c327171651566e07972200e76fcfe2242a4fa446149a3881c08a" -dependencies = [ - "zstd-safe", -] - -[[package]] -name = "zstd-safe" -version = "7.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f49c4d5f0abb602a93fb8736af2a4f4dd9512e36f7f570d66e65ff867ed3b9d" -dependencies = [ - "zstd-sys", -] - -[[package]] -name = "zstd-sys" -version = "2.0.16+zstd.1.5.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91e19ebc2adc8f83e43039e79776e3fda8ca919132d68a1fed6a5faca2683748" -dependencies = [ - "cc", - "pkg-config", -] diff --git a/Cargo.toml b/Cargo.toml deleted file mode 100644 index 1971cc0..0000000 --- a/Cargo.toml +++ /dev/null @@ -1,68 +0,0 @@ -[package] -name = "f2a" -version = "1.0.3" -edition = "2021" -description = "f2a computation core -- Rust engine with PyO3 bindings" -license = "Apache-2.0" -readme = "README.md" - -[lib] -name = "_core" -crate-type = ["cdylib"] - -[dependencies] -# PyO3 – Python bindings -pyo3 = { version = "0.22", features = ["extension-module"] } - -# DataFrame / Arrow -polars = { version = "0.46", features = [ - "csv", - "parquet", - "json", - "lazy", - "strings", - "temporal", - "dtype-struct", - "rank", - "round_series", - "abs", - "log", - "mode", - "is_unique", - "is_in", - "cum_agg", - "interpolate", - "ipc", -] } - -# Numerical computing -ndarray = { version = "0.16", features = ["rayon"] } -# ndarray-linalg removed — all linear algebra is hand-implemented -# (power iteration, Gaussian elimination, etc.) to avoid BLAS build complexity - -# Statistics -statrs = "0.17" - -# Parallel iteration -rayon = "1.10" - -# Error handling -thiserror = "2" - -# Serialization -serde = { version = "1", features = ["derive"] } -serde_json = "1" - -# Utilities -indexmap = { version = "2", features = ["serde"] } - -[profile.release] -opt-level = 3 - -# Faster dev/test builds (used by `pip install .` which defaults to debug) -[profile.dev] -opt-level = 1 # enough optimisation to keep tests realistic -debug = false # skip DWARF → faster link -lto = "fat" -codegen-units = 1 -strip = true diff --git a/ENHANCEMENT_PLAN.md b/ENHANCEMENT_PLAN.md new file mode 100644 index 0000000..ac96f43 --- /dev/null +++ b/ENHANCEMENT_PLAN.md @@ -0,0 +1,665 @@ +# f2a Enhancement Master Plan — v2.0 Masterpiece + +> **Date**: 2026-03-16 +> **Status**: Design Complete → Implementation Phase +> **Goal**: f2a를 단순 통계 덤프 도구에서 **자동 인사이트 생성 + 다차원 교차분석 + ML 준비도 평가** 를 갖춘 최고 수준의 EDA 엔진으로 고도화한다. + +--- + +## 0. 현재 상태 진단 (Diagnosis) + +### 0.1 현재 강점 +| 영역 | 구현 상태 | 비고 | +|------|-----------|------| +| 데이터 로더 | ★★★★★ | 24+ 포맷, HuggingFace 다중 subset 자동 탐색 | +| 기초 통계 | ★★★★★ | 16개 기술통계, 4개 정규성 검정, 5종 상관분석 | +| 고급 통계 | ★★★★☆ | 7종 분포 피팅, 부분상관, MI 행렬, IF/LOF/Mahalanobis 등 | +| 시각화 | ★★★★☆ | 12종 Plotter, 30+ 차트 유형, base64 인라인 | +| HTML 리포트 | ★★★★☆ | 2-depth 탭, 드래그 스크롤, 툴팁, 모달, i18n 6개 언어 | +| 전처리 | ★★★★☆ | 상수/고결측/ID성/혼합타입/무한값 자동 탐지·정제 | + +### 0.2 핵심 약점 (Gap Analysis) + +| # | 약점 | 영향도 | 현재 상태 | +|---|------|--------|-----------| +| **G1** | **자동 인사이트 엔진 부재** | ★★★★★ | 보고서가 수치/차트만 나열 — "그래서 뭐가 중요한데?" 대답 불가 | +| **G2** | **교차분석(cross-dimensional) 부재** | ★★★★★ | 각 분석(상관/이상치/클러스터/분포)이 고립적 수행 — 연계 패턴 미탐지 | +| **G3** | **Data Profile 탭이 stub** | ★★★★☆ | 7개 숫자 카드만 렌더링, 컬럼별 프로파일/샘플/메모리 분석 없음 | +| **G4** | **Dim Reduction / Feature Insights / Stat Tests 차트 누락** | ★★★★☆ | 섹션 빌더에 chart_keys 미등록 → 생성된 Figure도 미표시 | +| **G5** | **통계 검정 시맨틱 오류** | ★★★☆☆ | Kruskal-Wallis가 범주형 그룹 변수 없이 수치 컬럼 간 비교 | +| **G6** | **다중검정 보정 없음** | ★★★☆☆ | 쌍별 검정(Levene, Mann-Whitney)에서 Bonferroni/FDR 미적용 | +| **G7** | **효과 크기(Effect Size) 미제공** | ★★★☆☆ | p-value만 보고, Cohen's d / η² / Cramér's V 없음 | +| **G8** | **컬럼 역할 자동 분류 없음** | ★★★☆☆ | ID/타겟/피처/시간/텍스트 역할 미추론 | +| **G9** | **ML 준비도 평가 없음** | ★★★☆☆ | "이 데이터가 ML에 바로 쓸 수 있나?" 판단 기능 없음 | +| **G10** | **Health Radar 차트 없음** | ★★☆☆☆ | 수평 바만 존재, 방사형 종합 대시보드 미구현 | + +--- + +## 1. Enhancement 아키텍처 설계 + +### 1.1 신규 모듈 구조 (추가 파일) + +``` +f2a/ +├── stats/ +│ ├── insight_engine.py ← [신규] 자동 인사이트 생성 엔진 +│ ├── cross_analysis.py ← [신규] 교차 분석 모듈 +│ ├── column_role.py ← [신규] 컬럼 역할 자동 분류 +│ └── ml_readiness.py ← [신규] ML 준비도 평가 +│ +├── viz/ +│ ├── insight_plots.py ← [신규] 인사이트 시각화 +│ ├── cross_plots.py ← [신규] 교차 분석 시각화 +│ └── dimreduction_plots.py ← [신규] 차원축소 전용 시각화 +│ +├── core/ +│ └── analyzer.py ← [수정] 새 모듈 통합 + VizResult 확장 +│ +└── report/ + └── generator.py ← [수정] 인사이트 패널, 교차분석 섹션, 누락 차트 등록 +``` + +### 1.2 수정 파일 + +| 파일 | 변경 내용 | +|------|-----------| +| `core/config.py` | 새 분석 토글 추가 (insight_engine, cross_analysis, column_role, ml_readiness) | +| `core/analyzer.py` | `_compute_stats()` 확장, VizResult에 새 plot 메서드, `_compute_advanced_stats()` 확장 | +| `stats/statistical_tests.py` | Kruskal-Wallis 시맨틱 수정, 다중검정 보정 추가, 효과 크기 추가 | +| `stats/quality.py` | consistency() 성능 개선, 새 차원(timeliness, conformity) 추가 | +| `report/generator.py` | Data Profile 강화, 누락 차트 등록, 인사이트 패널, 교차분석 섹션, Health Radar | +| `report/i18n.py` | 새 섹션/인사이트 번역 키 추가 | + +--- + +## 2. Enhancement 상세 설계 + +### Phase 1: 자동 인사이트 엔진 (Insight Engine) — `stats/insight_engine.py` + +> **목적**: 모든 분석 결과를 종합하여 우선순위화된 자연어 인사이트를 자동 생성한다. + +#### 2.1.1 인사이트 타입 분류 + +| 타입 | 아이콘 | 설명 | 예시 | +|------|--------|------|------| +| `FINDING` | 🔍 | 주목할 만한 데이터 패턴/사실 발견 | "column 'price'는 강한 오른쪽 꼬리(skew=2.3)를 가지며, log 변환으로 정규성이 크게 개선됩니다" | +| `WARNING` | ⚠️ | 데이터 품질/이상 경고 | "3개 컬럼 쌍에서 r>0.95의 다중공선성이 탐지되었습니다. VIF 기반 제거를 검토하세요" | +| `RECOMMENDATION` | 💡 | 데이터 전처리/모델링 제안 | "'age' 컬럼에 5.2%의 결측이 있습니다. 분포가 정규에 가까우므로 평균 대체가 적합합니다" | +| `OPPORTUNITY` | 🚀 | 활용 가능한 패턴/기회 | "K-Means(k=3)에서 뚜렷한 3개 군집이 형성됩니다. 군집별 특성 분석으로 세분화 전략 수립이 가능합니다" | + +#### 2.1.2 인사이트 생성 규칙 (Rules Engine) + +각 분석 모듈의 결과에서 다음 규칙들을 체계적으로 적용: + +**분포 기반 인사이트:** +```python +class DistributionInsightRules: + """분포 분석에서 인사이트를 추출하는 규칙 엔진.""" + + rules = [ + # (조건 함수, 인사이트 생성 함수, 심각도/우선순위) + (lambda col: abs(col.skewness) > 2.0, + "극단적 비대칭: '{col}'의 skewness={val:.2f}. {transform} 변환 권장", + "high"), + (lambda col: col.kurtosis > 7.0, + "극단적 첨도: '{col}'에 heavy tail ({val:.1f}). 이상치가 통계량을 왜곡할 수 있음", + "high"), + (lambda col: col.is_normal and col.cv < 0.1, + "'{col}'는 정규분포이며 변동성이 매우 낮음 (CV={val:.3f}). 안정적 특성", + "low"), + (lambda col: not col.is_normal and col.best_fit != 'norm', + "'{col}'는 {best_fit} 분포에 가장 적합 (AIC 기준). 파라미터 변환 시 유용", + "medium"), + ] +``` + +**상관 기반 인사이트:** +- 다중공선성 탐지 → VIF>10 또는 |r|>0.9인 쌍 식별 + 제거 후보 추천 +- 비선형 의존성 → MI가 높으나 Pearson이 낮은 쌍 (MI/max(MI) > 0.5 & |r| < 0.3) +- 교란 변수 의심 → 편상관에서 크게 감소하는 쌍 (|partial_r - r| > 0.3) +- 안정적 상관 → bootstrap CI 폭이 좁은 쌍 (CI_width < 0.1) + +**클러스터 기반 인사이트:** +- 최적 k 추천 근거 (실루엣 스코어 + 엘보우) +- 군집별 핵심 차별화 특성 상위 3개 +- 소수 군집(< 전체의 5%) → 이상치 군집 의심, 별도 분석 권고 +- 군집 간 크기 편차 → 불균형 정도와 대응 전략 + +**이상치 기반 인사이트:** +- 다변량 합의(consensus ≥ 2/3) 이상치 비율 및 특성 +- 단변량 vs 다변량 이상치 불일치 → 변수 간 상호작용 이상치 +- 이상치 제거 전후 통계량 변화 추정 + +**결측 기반 인사이트:** +- 결측 패턴(MCAR/MAR/MNAR) 진단 + 대체 전략 추천 +- 컬럼 간 결측 상관(함께 결측인 컬럼 쌍) → 체계적 결측 의심 +- 결측률 구간별 카운트 (0%, 0-5%, 5-20%, 20-50%, 50%+) + +#### 2.1.3 인사이트 우선순위화 + +```python +@dataclass +class Insight: + type: InsightType # FINDING | WARNING | RECOMMENDATION | OPPORTUNITY + severity: str # critical | high | medium | low + category: str # distribution | correlation | cluster | anomaly | missing | quality | feature + title: str # 한줄 제목 + description: str # 상세 설명 + affected_columns: list[str] # 관련 컬럼 + evidence: dict[str, Any] # 근거 데이터 + action_items: list[str] # 구체적 조치 항목 + priority_score: float # 0~1 산출 점수 (정렬용) + +class InsightEngine: + """모든 분석 결과를 종합하여 인사이트를 생성·우선순위화한다.""" + + def generate(self, stats: StatsResult, schema: DataSchema) -> list[Insight]: + """모든 규칙 엔진을 실행하고 인사이트를 우선순위 역순으로 정렬하여 반환.""" + + def _score_priority(self, insight: Insight) -> float: + """심각도 × 영향범위(affected_columns 수) × 실행가능성 가중으로 점수 산정.""" +``` + +#### 2.1.4 HTML 렌더링 + +``` +┌──────────────────────────────────────────────────────────────┐ +│ 📊 Key Insights [Show All / Top 10] │ +├──────────────────────────────────────────────────────────────┤ +│ 🔴 CRITICAL (2) │ +│ ┌──────────────────────────────────────────────────────────┐│ +│ │ ⚠️ 3 column pairs show multicollinearity (VIF>10) ││ +│ │ → col_a ↔ col_b (r=0.97), col_c ↔ col_d (r=0.95) ││ +│ │ 💡 Action: Consider removing one from each pair ││ +│ └──────────────────────────────────────────────────────────┘│ +│ 🟡 HIGH (5) │ +│ ┌──────────────────────────────────────────────────────────┐│ +│ │ 🔍 Column 'price' follows lognormal distribution ││ +│ │ → skew=2.3, best-fit: lognorm (AIC=-1234) ││ +│ │ 💡 Action: Apply log transform before modeling ││ +│ └──────────────────────────────────────────────────────────┘│ +│ 🟢 MEDIUM (8) │ ⚪ LOW (12) │ +└──────────────────────────────────────────────────────────────┘ +``` + +**인사이트 패널 위치**: 각 subset의 Basic 탭 Overview 섹션 바로 아래, 모든 분석 섹션보다 앞에 위치. + +--- + +### Phase 2: 교차 분석 (Cross-Dimensional Analysis) — `stats/cross_analysis.py` + +> **목적**: 개별 분석을 넘어 분석 차원 간 교차점에서 발현하는 복합 패턴을 탐지한다. + +#### 2.2.1 교차 분석 유형 + +| # | 교차 축 | 기법 | 근거 | 산출물 | +|---|---------|------|------|--------| +| **X1** | 이상치 × 클러스터 | 클러스터별 이상치 분포 불균형 분석 | 특정 군집에 이상치 집중 → 해당 군집이 에러 데이터일 수 있음 | cluster_id별 anomaly_rate 테이블 + 막대 차트 | +| **X2** | 결측 × 상관 | 결측 여부를 더미(0/1)로 변환 후 다른 컬럼과의 상관 | 결측이 무작위(MCAR)가 아닌 체계적(MAR/MNAR) 여부 판단 | missing_indicator ↔ features 상관행렬 | +| **X3** | 분포 × 이상치 | 분포 꼬리 형태별 이상치 탐지 방법 적합성 | Heavy-tail 분포에서 IQR 방법은 과탐지 → Mahalanobis/IF 우선 권고 | 컬럼별 권장 이상치 탐지 방법 테이블 | +| **X4** | 클러스터 × 상관 | 군집별 상관 구조 비교 (within-cluster correlation) | Simpson's paradox 탐지: 전체-수준과 군집-수준 상관이 역전되는 경우 | 군집별 상관행렬 + 전체 상관 대비 차이 히트맵 | +| **X5** | 특성중요도 × 결측 | 중요 특성에서의 결측률 교차 확인 | 가장 중요한 컬럼에 결측이 많으면 심각한 정보 손실 | 중요도 vs 결측률 scatter + 경고 | +| **X6** | 차원축소 × 클러스터 | t-SNE/UMAP 임베딩 공간에서 클러스터 레이블 오버레이 | 클러스터 분리도의 시각적 확인, 클러스터 경계 명확성 | 2D scatter (색상=클러스터, 마커=이상치) | + +#### 2.2.2 Simpson's Paradox 탐지기 (X4 상세) + +```python +class SimpsonParadoxDetector: + """ + 전체 데이터에서의 상관 방향과 군집 내 상관 방향이 반전되는 + Simpson's Paradox를 자동 탐지한다. + + 근거: Simpson(1951), Blyth(1972) + """ + + def detect( + self, + df: pd.DataFrame, + cluster_labels: np.ndarray, + numeric_cols: list[str], + ) -> pd.DataFrame: + """ + Returns DataFrame: + col_a, col_b, overall_corr, cluster_corrs (dict), + is_paradox (bool), paradox_strength (float) + """ +``` + +#### 2.2.3 결측-상관 분석기 (X2 상세) + +```python +class MissingCorrelationAnalyzer: + """ + 각 컬럼의 결측 여부를 이진 지시자(indicator)로 변환한 뒤, + 원본 수치 컬럼들과의 상관을 계산하여 결측의 체계성을 진단한다. + + 높은 상관 → MAR (Missing At Random): 다른 변수 값에 의존하는 결측 + 낮은 상관 → MCAR (Missing Completely At Random): 완전 무작위 결측 + + 근거: Little & Rubin (2002), *Statistical Analysis with Missing Data* + """ + + def analyze(self, df: pd.DataFrame) -> dict: + """ + Returns: + missing_indicator_corr: DataFrame — 결측 지시자 × 수치 컬럼 상관 행렬 + mar_suspects: list[dict] — |corr| > 0.3인 (결측컬럼, 상관컬럼, 상관계수) 쌍 + mcar_test_result: dict — Little's MCAR test 결과 (가능한 경우) + imputation_strategy: dict — 컬럼별 추천 대체 전략 + """ +``` + +--- + +### Phase 3: 컬럼 역할 자동 분류 — `stats/column_role.py` + +> **목적**: 각 컬럼이 데이터셋 내에서 어떤 역할(ID, 타겟, 피처, 시간, 텍스트 등)을 하는지 자동 추론한다. + +#### 2.3.1 역할 분류 체계 + +| 역할 | 탐지 기준 | 의미 | 후속 조치 | +|------|-----------|------|-----------| +| `ID` | 유니크율 > 95%, 이름에 id/key/index 포함, 단조증가 패턴 | 개체 식별자 → ML에서 제거 필요 | "제거 권장" 인사이트 | +| `TIMESTAMP` | datetime 타입 or 단조증가 정수 + 이름에 time/date/ts 포함 | 시간 인덱스 → 시계열 분석 가능 | "시계열 분석 활성화" 인사이트 | +| `CATEGORICAL_FEATURE` | 기존 categorical 타입 + 유니크율 < 5% | 범주형 입력 변수 | 인코딩 전략 추천 | +| `ORDINAL_FEATURE` | 정수형 + 유니크 값이 연속적 + 이름에 level/grade/rating 포함 | 순서형 입력 변수 | 레이블 인코딩 추천 | +| `NUMERIC_FEATURE` | 기존 numeric 타입 + ID/TIMESTAMP 아닌 것 | 연속형 입력 변수 | 스케일링/정규화 추천 | +| `TEXT` | 기존 text 타입 | 자유 텍스트 → NLP 파이프라인 필요 | 텍스트 최소 프로파일(평균 길이, 어휘 크기) | +| `BINARY` | 유니크 값이 정확히 2개 | 이진 변수 | 클래스 균형 확인 | +| `CONSTANT` | 유니크 값이 1개 | 정보 없음 → 제거 필요 | "제거 필수" 인사이트 | +| `TARGET_CANDIDATE` | 이름에 target/label/y/class 포함 + 낮은 cardinality | 잠재적 타겟 변수 후보 | ML 문맥에서 활용 | + +#### 2.3.2 구현 + +```python +@dataclass +class ColumnRole: + column: str + primary_role: str # 위 역할 중 하나 + confidence: float # 0~1 확신도 + secondary_role: str | None # 보조 역할 (예: ORDINAL이면서 TARGET_CANDIDATE) + properties: dict[str, Any] # 역할 판단 근거 증빙 + +class ColumnRoleClassifier: + """모든 컬럼의 역할을 자동 추론한다.""" + + def classify(self, df: pd.DataFrame, schema: DataSchema) -> list[ColumnRole]: + """각 컬럼에 대해 역할을 판정하고 확신도를 산출한다.""" + + def summary(self) -> pd.DataFrame: + """컬럼 × 역할 요약 테이블을 반환한다.""" +``` + +--- + +### Phase 4: ML 준비도 평가 — `stats/ml_readiness.py` + +> **목적**: 데이터셋이 ML 파이프라인에 투입되기 전에 얼마나 준비되어 있는지를 다차원으로 평가한다. + +#### 2.4.1 평가 차원 + +| 차원 | 세부 지표 | 가중치 | 근거 | +|------|-----------|--------|------| +| **완전성 (Completeness)** | 전체 결측률, 고결측 컬럼 비율, 행 단위 결측 분포 | 25% | 결측이 많으면 대체/제거 전처리 필수 | +| **일관성 (Consistency)** | 타입 혼합 컬럼 수, 이상 범위 값 비율, id-like 컬럼 비율 | 15% | 타입 불일치/이상값은 모델 오류 유발 | +| **균형성 (Balance)** | 범주 불균형(Gini), 이상치 비율, 클래스 비율(타겟 존재 시) | 15% | 불균형 데이터는 편향된 학습 유발 | +| **정보성 (Informativeness)** | 상수 컬럼 비율, 중복 행 비율, 평균 MI, 분산 분포 | 20% | 정보 없는 피처는 노이즈만 추가 | +| **독립성 (Independence)** | 다중공선성(VIF>10 비율), 완전 상관 쌍 수, 평균 상관 | 15% | 높은 공선성은 모델 불안정 유발 | +| **규모성 (Scale)** | 행 수 대비 컬럼 수(차원의 저주), 유효 피처 수 vs 표본 수 | 10% | n << p 상황은 과적합 위험 | + +#### 2.4.2 종합 점수 및 등급 + +``` +ML Readiness Score: 78.5 / 100 +Grade: B+ (Good — minor preprocessing needed) + +┌─────────────────────────────────────────┐ +│ Completeness ████████████░░ 85% │ +│ Consistency ██████████████ 95% │ +│ Balance ██████████░░░░ 72% │ +│ Informativeness ███████████░░░ 80% │ +│ Independence ██████░░░░░░░░ 50% │ ← 주의 필요 +│ Scale █████████████░ 90% │ +└─────────────────────────────────────────┘ + +Blocking Issues (must fix before ML): + ⛔ 2 columns have >50% missing — drop or impute + ⛔ VIF > 100 detected for 'col_x' — remove or combine + +Improvement Suggestions: + 💡 Apply log transform to 3 skewed features + 💡 Consider SMOTE for class imbalance (minority: 8%) + 💡 Remove 2 constant columns +``` + +#### 2.4.3 구현 + +```python +@dataclass +class ReadinessScore: + overall: float # 0~100 + grade: str # A+, A, B+, B, C+, C, D, F + dimensions: dict[str, float] # 각 차원별 0~100 + blocking_issues: list[str] # 반드시 해결해야 할 문제 + suggestions: list[str] # 권장 개선 사항 + details: dict[str, Any] # 상세 근거 데이터 + +class MLReadinessEvaluator: + """데이터셋의 ML 준비도를 다차원으로 평가한다.""" + + def evaluate( + self, df, schema, stats: StatsResult + ) -> ReadinessScore: + """이전 분석 결과를 활용하여 ML 준비도를 산정한다.""" +``` + +--- + +### Phase 5: 기존 모듈 개선 + +#### 2.5.1 statistical_tests.py 개선 + +| 개선 항목 | 현재 | 변경 | 근거 | +|-----------|------|------|------| +| **Kruskal-Wallis 시맨틱** | 수치 컬럼 간 비교 (의미 없음) | 범주형 변수를 그룹 변수로 활용하여 수치 컬럼의 그룹 간 차이 검정 | 올바른 통계적 사용법 | +| **다중검정 보정** | 없음 | Benjamini-Hochberg FDR 보정 (모든 쌍별 검정에 적용) | Benjamini & Hochberg(1995) | +| **효과 크기** | 없음 | Cohen's d (연속), Cramér's V (범주), η² (ANOVA/KW), rank-biserial r (MW) | 실무적 유의미성 판단에 필수 | +| **Bonferroni-adjusted significance** | 없음 | adjusted_p 컬럼 추가 + significance star 업데이트 | 대량 다중비교 시 Type I 오류 통제 | + +#### 2.5.2 quality.py 개선 + +| 개선 항목 | 현재 | 변경 | +|-----------|------|------| +| **consistency()** | `series.apply(type).nunique()` — O(n) 느림 | `series.dtype` 기반 빠른 검사 + 도메인 규칙 (ex: 음수 나이, 미래 날짜) | +| **새 차원: Timeliness** | 없음 | datetime 컬럼의 최신성(recency) + 시간 범위 적절성 평가 | +| **새 차원: Conformity** | 없음 | 값 범위, 패턴 일치(regex), 도메인 규격 준수율 평가 | +| **컬럼 품질 세분화** | 4차원 집계 | 각 차원별 컬럼 레벨 점수 표시 | + +#### 2.5.3 Data Profile 섹션 강화 (`report/generator.py`) + +현재 7개 숫자 카드만 표시하는 Data Profile을 다음으로 확장: + +| 구성요소 | 내용 | +|----------|------| +| **Dataset Overview Cards** | 행 수, 컬럼 수, 메모리, 결측률, 중복률, 수치/범주 비율 | +| **Column Role Table** | 각 컬럼의 역할(ID/Feature/Target/Time), 타입, 유니크, 결측률, 샘플값 3개 | +| **ML Readiness Dashboard** | 6차원 레이더 차트 + 종합 점수/등급 + blocking issues + suggestions | +| **Health Radar Chart** | 방사형 차트: completeness, consistency, outlier_ratio(반전), skewness_balance, correlation_health, duplicate_freedom | +| **Type Distribution Donut** | 컬럼 타입 비율 도넛 차트 (numeric/categorical/text/datetime/boolean) | +| **Memory Breakdown** | 컬럼별 메모리 사용량 수평 막대 (Top-10 heavy columns) | +| **Sample Data Preview** | 첫 5행 + 마지막 5행 (민감 데이터 마스킹 옵션) | + +#### 2.5.4 누락된 차트 등록 (report/generator.py) + +| 섹션 | 누락 차트 | 등록 방법 | +|------|-----------|-----------| +| **Dim. Reduction** | t-SNE scatter, UMAP scatter, Factor loadings heatmap, Feature contribution bar | `chart_keys` 맵에 추가 + VizResult에 해당 plot 메서드 구현 | +| **Feature Insights** | Interaction strength bar, Monotonic gap scatter, Binning comparison, Cardinality distribution | `chart_keys` 맵에 추가 + `viz/insight_plots.py` 구현 | +| **Statistical Tests** | Group comparison boxplots, p-value summary bar, Effect size forest plot | `chart_keys` 맵에 추가 + 기존 plotter 확장 | +| **Cross Analysis** | 이상치×클러스터 bar, 결측×상관 heatmap, Simpson paradox highlight, 중요도×결측 scatter, 통합 2D scatter | `viz/cross_plots.py`에서 구현 | + +--- + +### Phase 6: 시각화 확장 + +#### 2.6.1 신규 시각화 모듈 + +**`viz/insight_plots.py`:** +| 차트 | 용도 | 기법 | +|------|------|------| +| `insight_severity_bar()` | 인사이트 심각도별 개수 bar chart | 수평 막대, 색상 코딩 | +| `interaction_strength_bar()` | 상호작용 세기 상위 N개 | 수평 막대 | +| `monotonic_gap_scatter()` | Pearson vs Spearman 차이 scatter | X=Pearson, Y=Spearman, 대각선 기준 | +| `binning_comparison()` | Equal-width vs equal-freq 엔트로피 비교 | 병렬 막대 | +| `cardinality_distribution()` | 컬럼별 cardinality 분포 | 히스토그램 + 인코딩 전략 색상 | + +**`viz/cross_plots.py`:** +| 차트 | 용도 | +|------|------| +| `anomaly_by_cluster_bar()` | 군집별 이상치 비율 막대 | +| `missing_correlation_heatmap()` | 결측 지시자 × 수치 컬럼 상관 히트맵 | +| `simpson_paradox_highlight()` | 전체 vs 군집별 상관 비교 scatter (방향 반전 강조) | +| `importance_vs_missing_scatter()` | X=중요도, Y=결측률, 크기=유니크 수 | +| `unified_2d_scatter()` | t-SNE/UMAP 2D에 클러스터 색상 + 이상치 마커 통합 | + +**`viz/dimreduction_plots.py`:** +| 차트 | 용도 | +|------|------| +| `tsne_scatter()` | t-SNE 2D scatter (클러스터 라벨 오버레이 지원) | +| `umap_scatter()` | UMAP 2D scatter | +| `factor_loadings_heatmap()` | Factor Analysis loadings 히트맵 | +| `feature_contribution_bar()` | PCA 기반 feature 기여도 bar chart | + +#### 2.6.2 Health Radar Chart + +```python +def health_radar_chart(quality_scores: dict, ml_readiness: ReadinessScore) -> plt.Figure: + """ + 6축 방사형 차트: + - Completeness (결측 기반) + - Consistency (타입 일관성) + - Outlier Freedom (1 - 이상치 비율) + - Distribution Health (정규성/대칭성) + - Correlation Health (다중공선성 없음 정도) + - Duplicate Freedom (1 - 중복률) + + 중앙에 종합 점수 표시, 각 축에 0~100 스케일. + """ +``` + +--- + +## 3. 교차분석 섹션의 HTML 배치 + +Advanced 서브탭에 2개 신규 탭 추가: + +``` +[Basic] [Distribution+] [Correlation+] [Clustering] [Dim. Reduction] +[Feature Insights] [Anomaly+] [Stat Tests] [Cross Analysis ★] [Data Profile ★] +``` + +**Cross Analysis** 탭은 Phase 2의 6개 교차 분석을 포함: +``` +Cross Analysis +├── Outlier × Cluster Distribution +├── Missing × Correlation (MAR Detection) +├── Distribution × Outlier Method Fitness +├── Cluster × Correlation (Simpson's Paradox Check) +├── Feature Importance × Missing Rate +└── Unified 2D Embedding (t-SNE/UMAP + Cluster + Anomaly overlay) +``` + +**Data Profile** 탭은 기존 7카드 → 풍부한 대시보드: +``` +Data Profile ★ (Enhanced) +├── Overview Cards (확장) +├── Column Roles Table (신규) +├── ML Readiness Dashboard (신규) +│ ├── Radar Chart (6 dimensions) +│ ├── Score & Grade +│ ├── Blocking Issues +│ └── Suggestions +├── Health Radar Chart (신규) +├── Type Distribution Donut (신규) +├── Memory Breakdown Chart (신규) +└── Sample Data Preview (신규) +``` + +--- + +## 4. 인사이트 패널의 HTML 배치 + +**위치**: Basic 탭의 Overview 바로 아래 (모든 분석 섹션보다 앞) + +``` +Basic Tab +├── Overview +├── ★ Key Insights Panel ★ ← 신규 위치 +│ ├── Executive Summary (1~2문장 총평) +│ ├── Critical Issues (접힘가능) +│ ├── Key Findings (접힘가능) +│ ├── Recommendations (접힘가능) +│ └── Opportunities (접힘가능) +├── Data Quality +├── Preprocessing +├── Descriptive Statistics +│ ... +``` + +--- + +## 5. 구현 순서 및 의존성 그래프 + +``` +Phase 1: Insight Engine ─────────────────────────┐ + stats/insight_engine.py │ + (depends on: 기존 모든 stats 결과) │ + │ +Phase 2: Cross Analysis ───────────────────────┐ │ + stats/cross_analysis.py │ │ + (depends on: clustering, anomaly, correlation,│ │ + missing, feature_importance, dimreduction) │ │ + │ │ +Phase 3: Column Role ──────────────────────────┐│ │ + stats/column_role.py ││ │ + (depends on: schema, descriptive) ││ │ + ││ │ +Phase 4: ML Readiness ─────────────────────────┤│ │ + stats/ml_readiness.py ││ │ + (depends on: quality, column_role, stats) ││ │ + ││ │ +Phase 5: 기존 모듈 개선 ──────────────────────── ││ │ + stats/statistical_tests.py (패치) ││ │ + stats/quality.py (패치) ││ │ + ││ │ +Phase 6: 시각화 확장 ── ─────────────────────── ┤│ │ + viz/insight_plots.py ││ │ + viz/cross_plots.py ││ │ + viz/dimreduction_plots.py ││ │ + ↓↓ ↓ +Phase 7: 통합 ──────────────────────────────────────┘ + core/config.py (토글 추가) + core/analyzer.py (새 모듈 호출 + VizResult 확장) + report/generator.py (인사이트 패널, 교차분석 섹션, Data Profile 강화, 누락 차트) + report/i18n.py (번역 키 추가) +``` + +**병렬 가능**: Phase 1·2·3은 서로 독립적으로 구현 가능 +**순차 필수**: Phase 4(ML Readiness)는 Phase 3(Column Role) 완료 후 시작 +**최종 통합**: Phase 7은 모든 모듈 완성 후 + +--- + +## 6. 기술 의존성 + +### 6.1 신규 패키지 필요 여부 + +| 필요 기능 | 패키지 | 필수여부 | 비고 | +|-----------|--------|----------|------| +| FDR 보정 | `scipy.stats` (이미 있음) | 이미 설치 | `scipy.stats.false_discovery_control` 또는 직접 Benjamini-Hochberg 구현 | +| 효과 크기 계산 | 직접 구현 | 새 코드 | Cohen's d, η², rank-biserial r — 수식이 단순하므로 외부 패키지 불필요 | +| Radar chart | `matplotlib` (이미 있음) | 이미 설치 | polar projection subplot으로 구현 | +| Donut chart | `matplotlib` (이미 있음) | 이미 설치 | `pie()` with `wedgeprops` | +| 교차분석 | `numpy`, `pandas`, `scipy` (이미 있음) | 이미 설치 | 기존 의존성만으로 충분 | + +**결론: 새로운 외부 패키지 추가 불필요.** 기존 scipy + numpy + pandas + matplotlib + sklearn으로 100% 구현 가능. + +### 6.2 성능 고려사항 + +| 신규 분석 | 복잡도 | 대응 전략 | +|-----------|--------|-----------| +| Insight Engine | O(1) — 이미 계산된 결과에서 규칙 적용 | 규칙 평가는 마이크로초 단위, 성능 무관 | +| Cross Analysis: 결측×상관 | O(n·d) | 기존 결측 분석 + 상관 결과 재활용 | +| Cross Analysis: Simpson's Paradox | O(k·d²) per cluster | k=max(10), d=max(15) → 무시 가능 | +| Column Role Classification | O(d) | 컬럼 수만큼, 룰 기반이므로 즉시 | +| ML Readiness | O(1) | 이미 계산된 통계량 조합 | +| Health Radar | O(1) | 단일 차트 렌더링 | +| 누락 차트 등록 | 기존과 동일 | 이미 VizResult에 존재하는 것을 등록만 | + +**총 추가 분석 시간 예측**: 기존 Advanced 분석 대비 **+5~10%** 이내 (대부분 기존 결과 재활용) + +--- + +## 7. 효과성 평가 + +### 7.1 Before vs After + +| 카테고리 | Before (현재) | After (Enhancement) | 변화 | +|----------|--------------|-------------------|----- | +| 자동 인사이트 | 0 | 4타입 × ~40개 규칙 → 5~30개 인사이트/데이터셋 | **신규** | +| 교차 분석 | 0 | 6종 교차 분석 | **신규** | +| 컬럼 역할 분류 | 0 (타입만 추론) | 9종 역할 자동 분류 | **신규** | +| ML 준비도 | 0 | 6차원 평가 + 등급 + blocking issues | **신규** | +| 통계 검정 엄밀성 | p-value만 | + 효과 크기 + 다중검정 보정 | **+200%** | +| Data Profile | 7 카드 | 종합 대시보드 (7섹션) | **+700%** | +| 시각화 | 30+ 차트 | +15종 추가 | **+50%** | +| 보고서 섹션 | 21 섹션 | +2 탭 (Cross Analysis, Data Profile 강화) | **+10%** | + +### 7.2 사용자 경험 향상 + +| 사용자 질문 | Before | After | +|------------|--------|-------| +| "이 데이터에서 뭐가 중요해?" | 직접 표/차트 해석 | 자동 인사이트 → 핵심 발견 즉시 파악 | +| "ML에 바로 쓸 수 있어?" | 품질 점수만 참고 | ML Readiness 등급 + blocking issues 목록 | +| "이상치가 특정 그룹에 집중되나?" | 개별 이상치/클러스터 결과 따로 확인 | 교차분석 → 군집별 이상치 분포 한눈에 | +| "결측이 무작위인가?" | 결측률만 표시 | 결측×상관 분석 → MAR/MCAR 자동 진단 | +| "어떤 전처리가 필요해?" | 경고 목록 참고 | 인사이트 엔진 → 구체적 조치 항목 목록 | +| "Simpson's paradox는 없나?" | 확인 불가 | 교차분석 → 자동 탐지 + 시각화 | + +### 7.3 분석 깊이 비교 (표준 EDA 도구 대비) + +| 기능 | pandas-profiling | sweetviz | f2a v1 (현재) | f2a v2 (Enhancement) | +|------|-----------------|----------|--------------|---------------------| +| 기술 통계 | ★★★★ | ★★★ | ★★★★★ | ★★★★★ | +| 분포 분석 | ★★★ | ★★★ | ★★★★★ | ★★★★★ | +| 상관 분석 | ★★★ | ★★★★ | ★★★★★ | ★★★★★ | +| 이상치 탐지 | ★★ | ★★ | ★★★★★ | ★★★★★ | +| 클러스터링 | ✗ | ✗ | ★★★★ | ★★★★ | +| 차원축소 | ✗ | ✗ | ★★★★ | ★★★★★ | +| 교차분석 | ✗ | ✗ | ✗ | ★★★★ | +| 자동 인사이트 | ★★ | ★★★ | ✗ | ★★★★★ | +| ML 준비도 | ✗ | ✗ | ✗ | ★★★★ | +| 컬럼 역할 분류 | ✗ | ✗ | ✗ | ★★★★ | +| 통계 검정 엄밀성 | ★★ | ★ | ★★★ | ★★★★★ | +| i18n | ✗ | ✗ | ★★★★★ | ★★★★★ | +| HuggingFace 지원 | ✗ | ✗ | ★★★★★ | ★★★★★ | + +--- + +## 8. 파일별 구현 명세 (Summary) + +| # | 파일 | 동작 | 예상 LOC | 의존성 | +|---|------|------|---------|--------| +| 1 | `stats/insight_engine.py` | 신규 | ~500 | StatsResult, DataSchema | +| 2 | `stats/cross_analysis.py` | 신규 | ~450 | numpy, pandas, scipy | +| 3 | `stats/column_role.py` | 신규 | ~250 | schema, descriptive stats | +| 4 | `stats/ml_readiness.py` | 신규 | ~350 | quality, column_role, StatsResult | +| 5 | `stats/statistical_tests.py` | 수정 | +~120 | scipy.stats | +| 6 | `stats/quality.py` | 수정 | +~80 | pandas | +| 7 | `viz/insight_plots.py` | 신규 | ~300 | matplotlib | +| 8 | `viz/cross_plots.py` | 신규 | ~350 | matplotlib, numpy | +| 9 | `viz/dimreduction_plots.py` | 신규 | ~200 | matplotlib | +| 10 | `core/config.py` | 수정 | +~30 | — | +| 11 | `core/analyzer.py` | 수정 | +~200 | 새 모듈 import | +| 12 | `report/generator.py` | 수정 | +~400 | HTML/CSS/JS | +| 13 | `report/i18n.py` | 수정 | +~200 | — | +| **합계** | | | **~3,430 LOC** | | + +--- + +## 9. 검증 계획 + +| 검증 항목 | 방법 | 기준 | +|-----------|------|------| +| 인사이트 품질 | 합성 데이터(정규/비정규/결측/이상치) + lerobot/roboturk 실데이터 | 알려진 패턴이 인사이트로 탐지되는지 | +| 교차분석 정확성 | Simpson's paradox 합성 데이터 | 반전 탐지 100% | +| ML 준비도 유효성 | 품질 좋은/나쁜 데이터셋 대비 | 점수 차이가 직관과 일치 | +| 통계 검정 보정 | 시뮬레이션(H0 하 1000회) | FDR ≤ 0.05 | +| 기존 테스트 통과 | `pytest git_action/tests/` | 전체 통과 | +| HTML 렌더링 | 생성된 리포트 브라우저 확인 | 모든 섹션 올바르게 표시 | +| 성능 | 10만행 × 50컬럼 | 전체 분석 < 120초 | + +--- + +*이 계획은 f2a를 단순 통계 리포트 생성기에서 **인텔리전트 데이터 분석 엔진**으로 진화시키기 위한 로드맵이다.* +*모든 추가 기능은 학술적 근거 위에 실무적 가치를 제공하며, 기존 의존성만으로 구현 가능하다.* diff --git a/LICENSE b/LICENSE index a930586..f247997 100644 --- a/LICENSE +++ b/LICENSE @@ -1,200 +1,21 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to the Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by the Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding any notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. Please also get an in-depth - understanding of the Apache License by reading the FAQ at - http://www.apache.org/foundation/license-faq.html - - Copyright 2026 CocoRoF - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. +MIT License + +Copyright (c) 2026 f2a contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/PLAN.md b/PLAN.md new file mode 100644 index 0000000..caa8a3f --- /dev/null +++ b/PLAN.md @@ -0,0 +1,375 @@ +# f2a (File to Analysis) — Technical Design Document + +> **Version**: 0.1.0 (Draft) +> **Date**: 2026-03-13 +> **Status**: Design Phase + +--- + +## 1. Project Overview + +**f2a** is a Python library that takes various data sources (local files, Hugging Face datasets, etc.) +and automatically performs **Descriptive Statistics** analysis and **Visualization**. + +### 1.1 Core Goals +- **One-click Analysis**: Full descriptive statistics + visualization from a single file path or HuggingFace URL +- **Diverse Input Support**: CSV, JSON, Parquet, Excel, TSV, Hugging Face `datasets` +- **Rich Statistics**: Summary statistics, distribution analysis, correlation analysis, missing data analysis +- **Automatic Visualization**: Histograms, boxplots, correlation heatmaps, missing data maps, etc. +- **Report Generation**: Automatically produce HTML reports from analysis results + +### 1.2 Usage Scenario + +```python +import f2a + +# Analyze a local file +report = f2a.analyze("data/sales.csv") +report.show() # Print summary to console +report.to_html("out/") # Save HTML report + +# Analyze a Hugging Face dataset +report = f2a.analyze("hf://imdb") +report.show() + +# Detailed access +report.stats.summary() # Summary statistics DataFrame +report.stats.correlation() # Correlation matrix +report.viz.plot_distributions() # Distribution plots +``` + +--- + +## 2. Architecture + +### 2.1 Layered Structure + +``` +┌─────────────────────────────────────────────┐ +│ Public API │ +│ f2a.analyze() / f2a.load() │ +├─────────────────────────────────────────────┤ +│ Core Orchestrator │ +│ Analyzer (pipeline control) │ +├──────────┬──────────┬──────────┬────────────┤ +│ Loader │ Stats │ Viz │ Reporter │ +│ Data │ Stat │ Visual │ Report │ +│ Loading │ Analysis │ ization │ Generation │ +├──────────┴──────────┴──────────┴────────────┤ +│ Utilities │ +│ Type Inference · Validation · Logging │ +└─────────────────────────────────────────────┘ +``` + +### 2.2 Directory Structure + +``` +f2a/ +├── pyproject.toml # Build config (PEP 621) +├── README.md # Project introduction +├── PLAN.md # This document +├── LICENSE # MIT License +│ +├── src/ +│ └── f2a/ +│ ├── __init__.py # Public API exports +│ ├── _version.py # Version management +│ │ +│ ├── core/ +│ │ ├── __init__.py +│ │ ├── loader.py # File/HF data loading +│ │ ├── analyzer.py # Analysis orchestrator +│ │ └── schema.py # Column type inference & schema +│ │ +│ ├── stats/ +│ │ ├── __init__.py +│ │ ├── descriptive.py # Descriptive stats (mean, median, variance, etc.) +│ │ ├── distribution.py # Distribution analysis (skewness, kurtosis, normality) +│ │ ├── correlation.py # Correlation analysis +│ │ └── missing.py # Missing data analysis +│ │ +│ ├── viz/ +│ │ ├── __init__.py +│ │ ├── theme.py # Visualization theme/style +│ │ ├── plots.py # Basic plots (histogram, bar, box) +│ │ ├── dist_plots.py # Distribution visualization +│ │ ├── corr_plots.py # Correlation visualization +│ │ └── missing_plots.py # Missing data visualization +│ │ +│ ├── report/ +│ │ ├── __init__.py +│ │ ├── generator.py # Report generation engine +│ │ └── templates/ # HTML templates +│ │ └── base.html +│ │ +│ └── utils/ +│ ├── __init__.py +│ ├── type_inference.py # Automatic data type inference +│ ├── validators.py # Input validation +│ └── logging.py # Logging configuration +│ +├── tests/ +│ ├── __init__.py +│ ├── conftest.py # pytest fixtures +│ ├── test_loader.py +│ ├── test_descriptive.py +│ ├── test_correlation.py +│ ├── test_viz.py +│ └── test_report.py +│ +└── examples/ + ├── quickstart.py # Quick start example + └── huggingface_demo.py # HF dataset example +``` + +--- + +## 3. Core Module Design + +### 3.1 Loader (`core/loader.py`) + +Automatically detects the data source and converts it uniformly to a `pandas.DataFrame`. + +| Input Type | Detection Method | Conversion Method | +|---|---|---| +| CSV / TSV | Extension `.csv`, `.tsv` | `pd.read_csv()` | +| JSON / JSONL | Extension `.json`, `.jsonl` | `pd.read_json()` | +| Parquet | Extension `.parquet` | `pd.read_parquet()` | +| Excel | Extension `.xlsx`, `.xls` | `pd.read_excel()` | +| HuggingFace | `hf://` prefix or `org/dataset` pattern | `datasets.load_dataset()` → `.to_pandas()` | + +**Core Interface:** +```python +class DataLoader: + def load(self, source: str, **kwargs) -> pd.DataFrame: + """Analyze source string and route to appropriate loader""" + + def _detect_source_type(self, source: str) -> SourceType: + """Auto-detect source type""" +``` + +### 3.2 Stats (`stats/`) + +#### 3.2.1 Descriptive Statistics (`descriptive.py`) + +| Statistic | Numeric | Categorical | +|---|---|---| +| count / unique | ✅ | ✅ | +| mean / median | ✅ | — | +| std / variance | ✅ | — | +| min / max / range | ✅ | — | +| Q1, Q3, IQR | ✅ | — | +| top / freq | — | ✅ | +| mode | ✅ | ✅ | + +#### 3.2.2 Distribution Analysis (`distribution.py`) + +- **Skewness** & **Kurtosis** +- **Normality Tests**: Shapiro-Wilk (n ≤ 5000), D'Agostino-Pearson +- **Quantile Table**: 5%, 10%, 25%, 50%, 75%, 90%, 95% + +#### 3.2.3 Correlation Analysis (`correlation.py`) + +- **Pearson** correlation (numeric-numeric) +- **Spearman** rank correlation (numeric-numeric, nonlinear) +- **Cramér's V** (categorical-categorical) +- Multicollinearity warning (|r| > 0.9) + +#### 3.2.4 Missing Data Analysis (`missing.py`) + +- Column-wise missing ratio +- Missing pattern analysis (MCAR / MAR hints) +- Row-wise missing distribution + +### 3.3 Viz (`viz/`) + +| Chart Type | Target | Module | +|---|---|---| +| Histogram + KDE | Numeric columns | `dist_plots.py` | +| Boxplot | Numeric columns | `plots.py` | +| Bar chart (frequency) | Categorical columns | `plots.py` | +| Correlation heatmap | Numeric column pairs | `corr_plots.py` | +| Pairplot | Top N numeric columns | `corr_plots.py` | +| Missing data matrix | All columns | `missing_plots.py` | +| Violin plot | Numeric columns | `dist_plots.py` | + +**Visualization Theme**: Unified style management in `viz/theme.py` (color palette, font size, etc.) + +### 3.4 Report (`report/`) + +Generates comprehensive HTML reports from analysis results. + +**Report Structure:** +1. **Overview Section**: Dataset name, row/column counts, memory usage +2. **Variable Summary**: Column types, missing ratios, key statistics +3. **Distribution Section**: Distribution visualization for each column +4. **Correlation Section**: Correlation heatmap + key correlated pairs +5. **Missing Data Section**: Missing pattern visualization +6. **Warnings Section**: Outliers, high correlation, high missing ratios, etc. + +--- + +## 4. Data Flow + +``` +Input (file path / HF URL) + │ + ▼ + ┌─────────┐ + │ Loader │ ──→ pd.DataFrame + └────┬─────┘ + │ + ▼ + ┌──────────┐ + │ Schema │ ──→ Column type inference (numeric/categorical/text/datetime) + └────┬─────┘ + │ + ├──→ Stats.descriptive() ──→ StatResult + ├──→ Stats.distribution() ──→ StatResult + ├──→ Stats.correlation() ──→ StatResult + └──→ Stats.missing() ──→ StatResult + │ + ▼ + ┌────────────┐ + │ Viz Engine │ ──→ matplotlib Figure objects + └─────┬──────┘ + │ + ▼ + ┌───────────┐ + │ Reporter │ ──→ AnalysisReport + └───────────┘ + │ + ├──→ .show() (console output) + ├──→ .to_html() (HTML file) + └──→ .to_dict() (programmatic access) +``` + +--- + +## 5. Dependencies + +### 5.1 Required (Core) + +| Package | Version | Purpose | +|---|---|---| +| `pandas` | ≥ 2.0 | DataFrame core | +| `numpy` | ≥ 1.24 | Numeric operations | +| `matplotlib` | ≥ 3.7 | Basic visualization | +| `seaborn` | ≥ 0.13 | Statistical visualization | +| `scipy` | ≥ 1.11 | Statistical tests | + +### 5.2 Optional + +| Package | Purpose | Extras Name | +|---|---|---| +| `datasets` | HuggingFace dataset loading | `[hf]` | +| `openpyxl` | Excel file support | `[excel]` | +| `pyarrow` | Parquet file support | `[parquet]` | +| `rich` | Console output formatting | `[rich]` | +| `jinja2` | HTML report templates | `[report]` | + +### 5.3 Install Commands + +```bash +# Basic install +pip install f2a + +# With HuggingFace support +pip install f2a[hf] + +# All features +pip install f2a[all] +``` + +--- + +## 6. Development Roadmap + +### Phase 1 — Foundation (v0.1.0) ← **Current** +- [x] Project structure setup (pyproject.toml, directories) +- [x] Basic Loader (CSV, JSON) +- [x] Descriptive statistics module (descriptive.py) +- [x] Basic visualization (histograms, boxplots) +- [x] Console output (show) + +### Phase 2 — Expansion (v0.2.0) +- [ ] HuggingFace dataset loader +- [ ] Correlation analysis & heatmap +- [ ] Missing data analysis & visualization +- [ ] HTML report generation + +### Phase 3 — Enhancement (v0.3.0) +- [ ] Distribution analysis (normality tests, etc.) +- [ ] Large dataset support (chunk loading) +- [ ] Interactive visualization (plotly option) +- [ ] CLI interface + +### Phase 4 — Stabilization (v1.0.0) +- [ ] API stabilization & documentation +- [ ] Comprehensive test coverage > 80% +- [ ] PyPI deployment +- [ ] Tutorials & example notebooks + +--- + +## 7. Coding Conventions + +- **Python**: 3.10+ +- **Style**: PEP 8, Black formatter, isort +- **Type Hints**: Required for all public APIs +- **Docstrings**: Google style +- **Testing**: pytest, unit testing principles +- **Linting**: ruff + +--- + +## 8. Core Class Design + +### 8.1 AnalysisReport + +```python +@dataclass +class AnalysisReport: + """Top-level container for analysis results""" + dataset_name: str + shape: tuple[int, int] + schema: DataSchema + stats: StatsResult + figures: dict[str, Figure] + warnings: list[str] + + def show(self) -> None: ... + def to_html(self, output_dir: str) -> Path: ... + def to_dict(self) -> dict: ... +``` + +### 8.2 StatsResult + +```python +@dataclass +class StatsResult: + """Container for statistical analysis results""" + summary: pd.DataFrame # Summary statistics + correlation_matrix: pd.DataFrame # Correlation matrix + missing_info: pd.DataFrame # Missing data info + distribution_info: pd.DataFrame # Distribution info + + def get_numeric_summary(self) -> pd.DataFrame: ... + def get_categorical_summary(self) -> pd.DataFrame: ... +``` + +--- + +## 9. Error Handling Strategy + +| Scenario | Handling | +|---|---| +| File not found | `FileNotFoundError` with clear message | +| Unsupported format | `UnsupportedFormatError` (custom) | +| HF dataset load failure | `DataLoadError` (custom) + cause chaining | +| Empty dataset | `EmptyDataError` (custom) | +| No numeric columns | Warning log + skip relevant analysis | + +--- + +*This document is continuously updated as the project progresses.* diff --git a/README.md b/README.md index da18a7e..213b85a 100644 --- a/README.md +++ b/README.md @@ -1,84 +1,460 @@ -# f2a +# f2a — File to Analysis -> **File to Analysis** — Automatically perform statistical analysis from any data source. +> **One line of code → Full statistical analysis + interactive HTML report.** +> 24+ file formats, HuggingFace datasets, 6 languages, 20+ analysis modules, 50+ visualizations. -`f2a` is a high-performance data analysis library that provides a simple -Python API while running all compute-heavy operations in native Rust via -[PyO3](https://pyo3.rs) and [maturin](https://www.maturin.rs). +[![PyPI](https://img.shields.io/pypi/v/f2a?color=blue)](https://pypi.org/project/f2a/) +[![Python](https://img.shields.io/pypi/pyversions/f2a)](https://pypi.org/project/f2a/) +[![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](LICENSE) +[![Tests](https://img.shields.io/badge/tests-88%20passed-brightgreen)]() -## Architecture +

+ f2a Overview Report +

+

+ f2a Clustering Analysis +

+ +

Generated from f2a.analyze("lerobot/roboturk") — a single line of code.

+ +--- + +## Live Sample Report + +> 📊 **[View Sample Report (lerobot/roboturk)](https://cocoRoF.github.io/f2a/lerobot_roboturk_20260317_090024_report.html)** ← GitHub Pages (recommended) +> +> A fully self-contained interactive HTML report generated from the [lerobot/roboturk](https://huggingface.co/datasets/lerobot/roboturk) dataset. +> +> **Alternative:** [Download raw HTML](https://raw.githubusercontent.com/CocoRoF/f2a/main/sample/lerobot_roboturk_20260317_090024_report.html) and open in your browser. + +--- + +## Installation + +```bash +pip install f2a ``` -┌─────────────────────────────────────────┐ -│ Python API Layer │ -│ f2a.analyze() / AnalysisConfig │ -│ Report generation (Jinja2 HTML) │ -│ Visualization (matplotlib / seaborn) │ -└──────────────┬──────────────────────────┘ - │ PyO3 FFI -┌──────────────▼──────────────────────────┐ -│ Rust Core (_core) │ -│ Data loading (polars) │ -│ Schema inference & preprocessing │ -│ 21 statistical analysis modules │ -│ Parallel computation (rayon) │ -└─────────────────────────────────────────┘ + +For advanced analyses (UMAP, ADF tests): + +```bash +pip install f2a[advanced] ``` -### What runs in Rust +--- -| Layer | Modules | -|---|---| -| **Core** | Loader (CSV/TSV/Parquet/JSON/JSONL), Schema inference, Preprocessor, Analyzer orchestration | -| **Basic Stats** | Descriptive, Correlation, Distribution, Missing, Outlier, Categorical, Duplicates, Quality, Feature Importance, PCA | -| **Advanced Stats** | Statistical Tests, Clustering, Anomaly Detection, Advanced Correlation, Advanced Distribution, Dimensionality Reduction, Feature Insights, Insight Engine, Column Role, Cross Analysis, ML Readiness | +## Quick Start -### What stays in Python +```python +import f2a -| Layer | Reason | -|---|---| -| **Visualization** | matplotlib/seaborn — no Rust equivalent worth the effort | -| **HTML Report** | Jinja2 templating is inherently Python | -| **i18n** | String-heavy, low compute | +# ── Local files ────────────────────────────────────── +report = f2a.analyze("data/sales.csv") +report.show() # Print summary to console +report.to_html("output/") # Save interactive HTML report -## Quick Start +# ── HuggingFace datasets ──────────────────────────── +report = f2a.analyze("https://huggingface.co/datasets/imdb") +report = f2a.analyze("hf://imdb") +report = f2a.analyze("imdb") # org/dataset pattern auto-detected + +# ── Access results ─────────────────────────────────── +report.stats.summary # Descriptive statistics (DataFrame) +report.stats.correlation_matrix # Correlation matrix +report.stats.advanced_stats # Advanced analysis results +report.schema.columns # Column type information +report.to_dict() # Everything as a dictionary +``` + +--- + +## Example: Analyzing a HuggingFace Dataset ```python import f2a -report = f2a.analyze("data.csv") -report.show() # Rich console summary -report.to_html("output/") # Self-contained HTML report -report.get("quality") # Dict access to any section +report = f2a.analyze("https://huggingface.co/datasets/lerobot/roboturk") ``` -## Installation +``` +shape: (187507, 11) | subsets: 1 + default/train: (187507, 11) +``` -```bash -pip install f2a +```python +report.show() ``` -### Building from Source +``` +╔══════════════════════════════════════════════════════════╗ +║ f2a Analysis Report — lerobot/roboturk ║ +╠══════════════════════════════════════════════════════════╣ +║ Rows: 187,507 · Columns: 11 ║ +║ Numeric: 9 · Categorical: 0 · Text: 0 ║ +║ Datetime: 0 · Boolean: 0 ║ +╚══════════════════════════════════════════════════════════╝ +``` -```bash -# Prerequisites: Rust toolchain, Python >=3.10 -pip install maturin +```python +# Save interactive HTML report (2.5 MB self-contained file) +path = report.to_html("output/") +print(path) +# → output/lerobot_roboturk_20260317_090024_report.html +``` -# Development build (editable) -maturin develop --release +> 📊 **[View this report live](https://cocoRoF.github.io/f2a/lerobot_roboturk_20260317_090024_report.html)** -# Build wheel -maturin build --release +```python +# Access statistics programmatically +report.stats.summary +# timestamp episode_index frame_index ... +# count 187507.00 187507.00 187507.00 ... +# mean ... ... ... ... +# std ... ... ... ... + +report.stats.correlation_matrix +# timestamp episode_index frame_index ... +# timestamp 1.000000 0.978193 0.054412 ... +# episode_index 0.978193 1.000000 -0.003887 ... + +# Advanced analysis results +report.stats.advanced_stats.keys() +# dict_keys(['advanced_distribution', 'advanced_correlation', 'clustering', +# 'dimreduction', 'feature_insights', 'advanced_anomaly', ...]) ``` -## Supported Formats +--- + +## Multi-Subset HuggingFace Datasets + +Datasets with multiple configs and splits are **automatically discovered and analyzed**. + +```python +report = f2a.analyze("FINAL-Bench/ALL-Bench-Leaderboard") +print(f"Total: {report.shape[0]} rows across {len(report.subsets)} subsets") -CSV, TSV, JSON, JSONL, Parquet — plus optional extras for Excel, SPSS, SAS, HDF5, ODF, and more: +for s in report.subsets: + print(f" {s.subset}/{s.split}: {s.shape}") + +# Load specific subset +report = f2a.analyze("FINAL-Bench/ALL-Bench-Leaderboard", config="agent", split="train") +``` + +The HTML report generates **tabbed navigation** — each subset/split gets its own analysis page. + +--- + +## HTML Report Features + +`report.to_html()` generates a **single self-contained HTML file** (no external dependencies) with: + +### 📑 Two-Depth Tab Navigation + +``` +[Subset/Split Tabs] + └── [Basic] | [Advanced] + ├── Basic: 13 analysis sections + └── Advanced: 10 advanced analysis sections +``` + +### 🎯 Interactive Elements + +| Feature | Description | +|---|---| +| **Metric Tooltips** | Hover any table header to see a detailed explanation of the metric | +| **Method Info Modals** | Click the ⓘ button on each section to see a detailed beginner-friendly explanation | +| **Image Zoom Modal** | Click any chart to view full-size with zoom/pan/drag support | +| **Draggable Tables** | Wide tables support horizontal drag-scrolling with sticky first column | +| **6-Language i18n** | English, Korean, Chinese, Japanese, German, French — switch in the header | +| **Dark/Light Theme** | Automatic system preference detection + manual toggle | +| **Responsive Layout** | Works on desktop, tablet, and mobile | + +### 📖 Beginner-Friendly Descriptions + +Every section and every metric includes: +- **Detailed modal descriptions** with HTML formatting, examples, and analogies +- **Beginner tips** (초심자 팁 / Anfänger-Tipp / Conseil débutant / 初心者向けヒント / 初学者提示) +- **Interpretation guidance** — what does this number actually mean? +- All descriptions are **fully translated** into 6 languages (not machine-translated placeholders) + +--- + +## Analysis Modules + +### Basic Analysis (13 sections) + +| Section | Key Metrics | +|---|---| +| **Overview** | Row/column count, type distribution, memory usage | +| **Data Quality** | Completeness, uniqueness, consistency, validity (0–100%) | +| **Preprocessing** | Applied steps, before/after comparison | +| **Descriptive Statistics** | Mean, median, std, SE, CV, MAD, min/max, quartiles, IQR, skewness, kurtosis | +| **Distribution Analysis** | Shapiro-Wilk, D'Agostino, KS, Anderson-Darling normality tests | +| **Correlation Analysis** | Pearson, Spearman, Kendall matrices, Cramér's V, VIF | +| **Missing Data** | Per-column missing ratio, row distribution, pattern analysis | +| **Outlier Detection** | IQR method, Z-score method, per-column outlier stats | +| **Categorical Analysis** | Frequency, entropy, normalized entropy, chi-square independence | +| **Feature Importance** | Variance ranking, mean absolute correlation, mutual information | +| **PCA** | Explained variance, scree plot, loadings heatmap, biplot | +| **Duplicates** | Exact duplicate rows, column-wise uniqueness | +| **Warnings** | High correlation, high missing ratio, constant columns | + +### Advanced Analysis (10 sections) + +| Section | Techniques | +|---|---| +| **Advanced Distribution** | Best-fit distribution selection (7 candidates), power transform analysis, Jarque-Bera test, ECDF, KDE bandwidth optimization | +| **Advanced Correlation** | Partial correlation, mutual information matrix, bootstrap confidence intervals, correlation network graph | +| **Clustering** | K-Means (elbow method), DBSCAN, hierarchical clustering (dendrogram), cluster profiling | +| **Dimensionality Reduction** | t-SNE, UMAP (optional), Factor Analysis | +| **Feature Insights** | Interaction detection, monotonic relationships, optimal binning, cardinality analysis, data leakage detection | +| **Anomaly Detection** | Isolation Forest, Local Outlier Factor (LOF), Mahalanobis distance, ensemble consensus | +| **Statistical Tests** | Levene, Kruskal-Wallis, Mann-Whitney U, chi-square goodness-of-fit, Grubbs test, ADF stationarity | +| **Insight Engine** | Auto-generated prioritized natural-language insights | +| **Cross Analysis** | Outlier × cluster intersection, Simpson's paradox detection | +| **ML Readiness** | Multi-dimensional ML-readiness scoring, encoding recommendations, data type suggestions | + +--- + +## Visualizations (50+) + +| Category | Charts | +|---|---| +| **Distribution** | Histogram + KDE, boxplots, violin plots, Q-Q plots | +| **Correlation** | Heatmap (Pearson/Spearman/Kendall), partial correlation heatmap, MI heatmap, bootstrap CI plot, network graph | +| **Missing** | Missing matrix, bar chart, heatmap | +| **Outlier** | Box plots with outlier markers, scatter plots | +| **Categorical** | Bar charts, frequency tables | +| **PCA** | Scree plot, cumulative variance, loadings heatmap, biplot | +| **Clustering** | Elbow curve, silhouette plot, cluster scatter, dendrogram, cluster profiles | +| **Advanced Distribution** | ECDF, power transform comparison, KDE bandwidth grid | +| **Dimensionality Reduction** | t-SNE scatter, Factor Analysis loadings | +| **Anomaly** | Isolation Forest scores, LOF scores, Mahalanobis distances, consensus heatmap | +| **Quality** | Radar chart (4 dimensions), per-column quality bars | +| **Insights** | Insight summary cards, cross-analysis Venn diagrams | + +All charts are **inline base64 PNG** — no external image files needed. + +--- + +## Supported Formats (24+) + +| Category | Formats | +|---|---| +| **Delimited** | `.csv` `.tsv` `.txt` `.dat` `.tab` `.fwf` | +| **JSON** | `.json` `.jsonl` `.ndjson` | +| **Spreadsheet** | `.xlsx` `.xls` `.xlsm` `.xlsb` | +| **OpenDocument** | `.ods` | +| **Columnar** | `.parquet` `.pq` `.feather` `.ftr` `.arrow` `.ipc` `.orc` | +| **HDF5** | `.hdf` `.hdf5` `.h5` | +| **Statistical** | `.dta` (Stata) `.sas7bdat` `.xpt` (SAS) `.sav` `.zsav` (SPSS) | +| **Database** | `.sqlite` `.sqlite3` `.db` `.duckdb` | +| **Pickle** | `.pkl` `.pickle` | +| **Markup** | `.xml` `.html` `.htm` | +| **HuggingFace** | `hf://` URL, full URL, or `org/dataset` pattern | + +--- + +## Configuration + +```python +from f2a import AnalysisConfig + +# ── Preset configs ─────────────────────────────────── +config = AnalysisConfig.fast() # Skip PCA, feature importance, advanced +config = AnalysisConfig.minimal() # Descriptive + missing only +config = AnalysisConfig.basic_only() # All basic on, all advanced off + +# ── Custom config ──────────────────────────────────── +config = AnalysisConfig( + advanced=True, + clustering=True, + advanced_anomaly=True, + statistical_tests=True, + insight_engine=True, + cross_analysis=True, + ml_readiness=True, + outlier_method="zscore", # "iqr" (default) or "zscore" + outlier_threshold=3.0, # Z-score cutoff + correlation_threshold=0.9, # High-correlation warning threshold + pca_max_components=10, + max_cluster_k=10, # Max K for K-Means elbow search + tsne_perplexity=30.0, + bootstrap_iterations=1000, + max_sample_for_advanced=5000, # Subsample for expensive analyses +) + +report = f2a.analyze("data.csv", config=config) +``` + +### Config Options + +| Option | Default | Description | +|---|---|---| +| `descriptive` | `True` | Basic descriptive statistics | +| `distribution` | `True` | Distribution & normality tests | +| `correlation` | `True` | Correlation matrices | +| `outlier` | `True` | Outlier detection | +| `categorical` | `True` | Categorical variable analysis | +| `feature_importance` | `True` | Feature importance ranking | +| `pca` | `True` | PCA analysis | +| `duplicates` | `True` | Duplicate detection | +| `quality_score` | `True` | Data quality scoring | +| `advanced` | `True` | Master toggle for all advanced analyses | +| `advanced_distribution` | `True` | Best-fit distribution, ECDF, power transform | +| `advanced_correlation` | `True` | Partial correlation, MI matrix, bootstrap CI | +| `clustering` | `True` | K-Means, DBSCAN, hierarchical | +| `advanced_dimreduction` | `True` | t-SNE, UMAP, Factor Analysis | +| `feature_insights` | `True` | Interaction & leakage detection | +| `advanced_anomaly` | `True` | Isolation Forest, LOF, Mahalanobis | +| `statistical_tests` | `True` | Levene, Kruskal-Wallis, Grubbs, ADF | +| `insight_engine` | `True` | Auto-generated insights | +| `cross_analysis` | `True` | Cross-dimensional analysis | +| `column_role` | `True` | Column role detection | +| `ml_readiness` | `True` | ML readiness scoring | + +--- + +## API Reference + +### `f2a.analyze(source, **kwargs) → AnalysisReport` + +| Parameter | Type | Description | +|---|---|---| +| `source` | `str` | File path, URL, or HuggingFace dataset identifier | +| `config` | `AnalysisConfig` | Analysis configuration (optional) | +| `config` | `str` | HuggingFace dataset config/subset name (optional) | +| `split` | `str` | HuggingFace dataset split name (optional) | + +### `AnalysisReport` + +| Attribute / Method | Type | Description | +|---|---|---| +| `.shape` | `tuple[int, int]` | `(total_rows, columns)` | +| `.schema` | `SchemaInfo` | Column types and metadata | +| `.stats` | `StatsResult` | All statistical results | +| `.stats.summary` | `DataFrame` | Descriptive statistics table | +| `.stats.correlation_matrix` | `DataFrame` | Correlation matrix | +| `.stats.advanced_stats` | `dict` | Advanced analysis results | +| `.subsets` | `list[SubsetReport]` | Per-subset results (multi-subset HF datasets) | +| `.warnings` | `list[str]` | Analysis warnings | +| `.show()` | — | Print summary to console | +| `.to_html(output_dir)` | `Path` | Save interactive HTML report | +| `.to_dict()` | `dict` | Export all results as dictionary | + +--- + +## Project Structure + +``` +f2a/ +├── __init__.py # Public API: analyze(), AnalysisConfig +├── _version.py +├── core/ +│ ├── analyzer.py # Main analysis orchestrator +│ ├── config.py # AnalysisConfig dataclass +│ ├── loader.py # 24+ format data loader +│ ├── preprocessor.py # Data preprocessing pipeline +│ └── schema.py # Schema inference +├── stats/ # 20 analysis modules +│ ├── descriptive.py # Mean, median, std, quartiles, etc. +│ ├── distribution.py # Normality tests, skew/kurtosis +│ ├── correlation.py # Pearson, Spearman, Kendall, VIF +│ ├── missing.py # Missing data analysis +│ ├── outlier.py # IQR / Z-score outlier detection +│ ├── categorical.py # Frequency, entropy, chi-square +│ ├── feature_importance.py +│ ├── pca_analysis.py +│ ├── duplicates.py +│ ├── quality.py # 4-dimension quality scoring +│ ├── advanced_distribution.py +│ ├── advanced_correlation.py +│ ├── advanced_anomaly.py # Isolation Forest, LOF, Mahalanobis +│ ├── advanced_dimreduction.py # t-SNE, UMAP, Factor Analysis +│ ├── clustering.py # K-Means, DBSCAN, hierarchical +│ ├── feature_insights.py # Interaction, leakage detection +│ ├── statistical_tests.py # Levene, KW, Mann-Whitney, ADF +│ ├── insight_engine.py # Auto insight generation +│ ├── cross_analysis.py # Cross-dimensional analysis +│ ├── column_role.py # Column role inference +│ └── ml_readiness.py # ML readiness scoring +├── viz/ # 15 visualization modules +│ ├── plots.py # Base plot utilities +│ ├── theme.py # Consistent theming +│ ├── dist_plots.py +│ ├── corr_plots.py +│ ├── missing_plots.py +│ ├── outlier_plots.py +│ ├── categorical_plots.py +│ ├── pca_plots.py +│ ├── quality_plots.py +│ ├── cluster_plots.py +│ ├── advanced_dist_plots.py +│ ├── advanced_corr_plots.py +│ ├── advanced_anomaly_plots.py +│ ├── dimreduction_plots.py +│ ├── insight_plots.py +│ └── cross_plots.py +├── report/ +│ ├── generator.py # HTML report generator +│ └── i18n.py # 6-language translations +└── utils/ + ├── exceptions.py + ├── logging.py + ├── type_inference.py + └── validators.py +``` + +--- + +## Internationalization (i18n) + +The HTML report supports **6 languages** with a language selector in the header: + +| Language | Code | Description Quality | +|---|---|---| +| 🇺🇸 English | `en` | Full detailed descriptions with beginner tips | +| 🇰🇷 Korean | `ko` | Full detailed descriptions with 초심자 팁 | +| 🇨🇳 Chinese | `zh` | Full detailed descriptions with 初学者提示 | +| 🇯🇵 Japanese | `ja` | Full detailed descriptions with 初心者向けヒント | +| 🇩🇪 German | `de` | Full detailed descriptions with Anfänger-Tipp | +| 🇫🇷 French | `fr` | Full detailed descriptions with Conseil débutant | + +Each language includes: +- **~120 metric tooltip translations** — hover any table header +- **~50 section modal descriptions** — click the ⓘ button on each section +- All UI labels, buttons, and messages + +--- + +## Requirements + +- **Python** ≥ 3.10 +- **Core**: pandas, numpy, matplotlib, seaborn, scipy, scikit-learn +- **Formats**: datasets (HuggingFace), openpyxl, pyarrow, pyreadstat, tables, odfpy, lxml, duckdb +- **UI**: rich, jinja2 +- **Optional**: networkx, umap-learn, statsmodels (install with `pip install f2a[advanced]`) + +--- + +## Development ```bash -pip install f2a[io] +# Clone and install +git clone https://github.com/CocoRoF/f2a.git +cd f2a +pip install -e ".[dev]" + +# Run tests (88 tests) +pytest git_action/tests/ -q + +# Lint +ruff check f2a/ ``` +--- + ## License -Apache-2.0 +MIT License — See [LICENSE](LICENSE) for details. diff --git a/f2a/__init__.py b/f2a/__init__.py new file mode 100644 index 0000000..a3415de --- /dev/null +++ b/f2a/__init__.py @@ -0,0 +1,16 @@ +"""f2a — File to Analysis. + +A library that automatically performs descriptive statistical analysis +and visualization from various data sources. + +Usage: + >>> import f2a + >>> report = f2a.analyze("data.csv") + >>> report.show() +""" + +from f2a._version import __version__ +from f2a.core.analyzer import analyze +from f2a.core.config import AnalysisConfig + +__all__ = ["__version__", "analyze", "AnalysisConfig"] diff --git a/f2a/_version.py b/f2a/_version.py new file mode 100644 index 0000000..bfde9e9 --- /dev/null +++ b/f2a/_version.py @@ -0,0 +1,8 @@ +"""Version information for f2a.""" + +try: + from importlib.metadata import version as _get_version + + __version__: str = _get_version("f2a") +except Exception: + __version__ = "0.1.0" diff --git a/f2a/core/__init__.py b/f2a/core/__init__.py new file mode 100644 index 0000000..7d4aa4a --- /dev/null +++ b/f2a/core/__init__.py @@ -0,0 +1,18 @@ +"""Core module — data loading, analysis orchestration, and schema inference.""" + +from f2a.core.analyzer import Analyzer, analyze +from f2a.core.config import AnalysisConfig +from f2a.core.loader import DataLoader +from f2a.core.preprocessor import Preprocessor, PreprocessingResult +from f2a.core.schema import DataSchema, infer_schema + +__all__ = [ + "AnalysisConfig", + "Analyzer", + "DataLoader", + "DataSchema", + "Preprocessor", + "PreprocessingResult", + "analyze", + "infer_schema", +] diff --git a/f2a/core/analyzer.py b/f2a/core/analyzer.py new file mode 100644 index 0000000..e0973a9 --- /dev/null +++ b/f2a/core/analyzer.py @@ -0,0 +1,1609 @@ +"""Analysis orchestrator — coordinates the entire analysis pipeline. + +This module connects preprocessing, statistical analysis, visualization, +and report generation into a single ``analyze()`` entry point. +""" + +from __future__ import annotations + +import re +import time +from dataclasses import dataclass, field +from datetime import datetime, timezone +from pathlib import Path +from typing import Any + +import matplotlib +matplotlib.use("Agg") # non-interactive backend +import matplotlib.pyplot as plt +import pandas as pd + +from f2a.core.config import AnalysisConfig +from f2a.core.loader import DataLoader +from f2a.core.preprocessor import Preprocessor, PreprocessingResult +from f2a.core.schema import DataSchema, infer_schema +from f2a.stats.categorical import CategoricalStats +from f2a.stats.correlation import CorrelationStats +from f2a.stats.descriptive import DescriptiveStats +from f2a.stats.distribution import DistributionStats +from f2a.stats.duplicates import DuplicateStats +from f2a.stats.feature_importance import FeatureImportanceStats +from f2a.stats.missing import MissingStats +from f2a.stats.outlier import OutlierStats +from f2a.stats.pca_analysis import PCAStats +from f2a.stats.quality import QualityStats +from f2a.utils.logging import get_logger +from f2a.utils.validators import validate_source + +logger = get_logger(__name__) + + +# ===================================================================== +# Result containers +# ===================================================================== + +@dataclass +class StatsResult: + """Container for ALL statistical analysis results.""" + + # Descriptive + summary: pd.DataFrame = field(default_factory=pd.DataFrame) + numeric_summary: pd.DataFrame = field(default_factory=pd.DataFrame) + categorical_summary: pd.DataFrame = field(default_factory=pd.DataFrame) + + # Correlation + correlation_matrix: pd.DataFrame = field(default_factory=pd.DataFrame) + spearman_matrix: pd.DataFrame = field(default_factory=pd.DataFrame) + cramers_v_matrix: pd.DataFrame = field(default_factory=pd.DataFrame) + vif_table: pd.DataFrame = field(default_factory=pd.DataFrame) + + # Missing + missing_info: pd.DataFrame = field(default_factory=pd.DataFrame) + + # Distribution + distribution_info: pd.DataFrame = field(default_factory=pd.DataFrame) + + # Outlier + outlier_summary: pd.DataFrame = field(default_factory=pd.DataFrame) + + # Categorical analysis + categorical_analysis: pd.DataFrame = field(default_factory=pd.DataFrame) + chi_square_matrix: pd.DataFrame = field(default_factory=pd.DataFrame) + + # Feature importance + feature_importance: pd.DataFrame = field(default_factory=pd.DataFrame) + + # PCA + pca_variance: pd.DataFrame = field(default_factory=pd.DataFrame) + pca_loadings: pd.DataFrame = field(default_factory=pd.DataFrame) + pca_summary: dict[str, Any] = field(default_factory=dict) + + # Duplicates + duplicate_stats: dict[str, Any] = field(default_factory=dict) + + # Quality + quality_scores: dict[str, Any] = field(default_factory=dict) + quality_by_column: pd.DataFrame = field(default_factory=pd.DataFrame) + + # Preprocessing + preprocessing: PreprocessingResult | None = None + + # Advanced analysis + advanced_stats: dict[str, Any] = field(default_factory=dict) + + def get_numeric_summary(self) -> pd.DataFrame: + return self.numeric_summary + + def get_categorical_summary(self) -> pd.DataFrame: + return self.categorical_summary + + +@dataclass +class VizResult: + """Container for lazy visualization generation.""" + + _df: pd.DataFrame + _schema: DataSchema + _config: AnalysisConfig = field(default_factory=AnalysisConfig) + _stats: StatsResult = field(default_factory=StatsResult) + _figures: dict[str, plt.Figure] = field(default_factory=dict) + + # -- Core plots ------------------------------------------------------- + + def plot_distributions(self) -> plt.Figure: + from f2a.viz.plots import BasicPlotter + p = BasicPlotter(self._df, self._schema) + fig = p.histograms(columns=self._schema.numeric_columns[:self._config.max_plot_columns]) + self._figures["distributions"] = fig + return fig + + def plot_boxplots(self) -> plt.Figure: + from f2a.viz.plots import BasicPlotter + p = BasicPlotter(self._df, self._schema) + fig = p.boxplots(columns=self._schema.numeric_columns[:self._config.max_plot_columns]) + self._figures["boxplots"] = fig + return fig + + def plot_bar_charts(self) -> plt.Figure: + from f2a.viz.plots import BasicPlotter + p = BasicPlotter(self._df, self._schema) + fig = p.bar_charts(columns=self._schema.categorical_columns[:self._config.max_plot_columns]) + self._figures["bar_charts"] = fig + return fig + + def plot_correlation(self, method: str = "pearson") -> plt.Figure: + from f2a.viz.corr_plots import CorrelationPlotter + p = CorrelationPlotter(self._df, self._schema) + fig = p.heatmap(method=method) + self._figures[f"correlation_{method}"] = fig + return fig + + def plot_missing(self) -> plt.Figure: + from f2a.viz.missing_plots import MissingPlotter + p = MissingPlotter(self._df, self._schema) + fig = p.bar() + self._figures["missing_bar"] = fig + return fig + + def plot_missing_matrix(self) -> plt.Figure: + from f2a.viz.missing_plots import MissingPlotter + p = MissingPlotter(self._df, self._schema) + fig = p.matrix() + self._figures["missing_matrix"] = fig + return fig + + # -- Distribution plots ----------------------------------------------- + + def plot_violins(self) -> plt.Figure: + from f2a.viz.dist_plots import DistributionPlotter + p = DistributionPlotter(self._df, self._schema) + fig = p.violin_plots(columns=self._schema.numeric_columns[:self._config.max_plot_columns]) + self._figures["violins"] = fig + return fig + + def plot_qq(self) -> plt.Figure: + from f2a.viz.dist_plots import DistributionPlotter + p = DistributionPlotter(self._df, self._schema) + fig = p.qq_plots(columns=self._schema.numeric_columns[:self._config.max_plot_columns]) + self._figures["qq"] = fig + return fig + + # -- Outlier plots ---------------------------------------------------- + + def plot_outliers(self) -> plt.Figure: + from f2a.viz.outlier_plots import OutlierPlotter + p = OutlierPlotter(self._df, self._schema) + fig = p.box_strip(columns=self._schema.numeric_columns[:self._config.max_plot_columns]) + self._figures["outliers"] = fig + return fig + + # -- Categorical plots ------------------------------------------------ + + def plot_categorical_frequency(self) -> plt.Figure: + from f2a.viz.categorical_plots import CategoricalPlotter + p = CategoricalPlotter(self._df, self._schema) + fig = p.frequency_bars( + columns=self._schema.categorical_columns[:self._config.max_plot_columns], + top_n=self._config.max_categories, + ) + self._figures["categorical_freq"] = fig + return fig + + def plot_chi_square_heatmap(self) -> plt.Figure: + from f2a.viz.categorical_plots import CategoricalPlotter + p = CategoricalPlotter(self._df, self._schema) + fig = p.chi_square_heatmap(self._stats.chi_square_matrix) + self._figures["chi_square"] = fig + return fig + + # -- PCA plots -------------------------------------------------------- + + def plot_pca_scree(self) -> plt.Figure: + from f2a.viz.pca_plots import PCAPlotter + p = PCAPlotter() + fig = p.scree_plot(self._stats.pca_variance) + self._figures["pca_scree"] = fig + return fig + + def plot_pca_loadings(self) -> plt.Figure: + from f2a.viz.pca_plots import PCAPlotter + p = PCAPlotter() + fig = p.loadings_heatmap(self._stats.pca_loadings) + self._figures["pca_loadings"] = fig + return fig + + # -- Quality / Feature importance plots -------------------------------- + + def plot_quality(self) -> plt.Figure: + from f2a.viz.quality_plots import QualityPlotter + p = QualityPlotter() + fig = p.dimension_bar(self._stats.quality_scores) + self._figures["quality"] = fig + return fig + + def plot_column_quality(self) -> plt.Figure: + from f2a.viz.quality_plots import QualityPlotter + p = QualityPlotter() + fig = p.column_quality_heatmap(self._stats.quality_by_column) + self._figures["column_quality"] = fig + return fig + + def plot_feature_importance(self) -> plt.Figure: + from f2a.viz.quality_plots import QualityPlotter + p = QualityPlotter() + fig = p.feature_importance_bar(self._stats.feature_importance) + self._figures["feature_importance"] = fig + return fig + + # -- Advanced plots --------------------------------------------------- + + def plot_best_fit_overlay(self) -> plt.Figure: + from f2a.viz.advanced_dist_plots import AdvancedDistPlotter + p = AdvancedDistPlotter() + bf = self._stats.advanced_stats.get("advanced_distribution", {}).get("best_fit") + if bf is None or bf.empty: + return None # type: ignore[return-value] + fig = p.best_fit_overlay(self._df, bf) + self._figures["best_fit_overlay"] = fig + return fig + + def plot_ecdf(self) -> plt.Figure: + from f2a.viz.advanced_dist_plots import AdvancedDistPlotter + p = AdvancedDistPlotter() + ecdf_data = self._stats.advanced_stats.get("ecdf_data", {}) + if not ecdf_data: + return None # type: ignore[return-value] + fig = p.ecdf_plot(ecdf_data) + self._figures["ecdf"] = fig + return fig + + def plot_power_transform(self) -> plt.Figure: + from f2a.viz.advanced_dist_plots import AdvancedDistPlotter + p = AdvancedDistPlotter() + pt = self._stats.advanced_stats.get("advanced_distribution", {}).get("power_transform") + if pt is None or pt.empty: + return None # type: ignore[return-value] + fig = p.power_transform_plot(self._df, pt) + self._figures["power_transform"] = fig + return fig + + def plot_jarque_bera(self) -> plt.Figure: + from f2a.viz.advanced_dist_plots import AdvancedDistPlotter + p = AdvancedDistPlotter() + jb = self._stats.advanced_stats.get("advanced_distribution", {}).get("jarque_bera") + if jb is None or jb.empty: + return None # type: ignore[return-value] + fig = p.jarque_bera_summary(jb) + self._figures["jarque_bera"] = fig + return fig + + def plot_partial_correlation(self) -> plt.Figure: + from f2a.viz.advanced_corr_plots import AdvancedCorrPlotter + p = AdvancedCorrPlotter() + pcorr = self._stats.advanced_stats.get("advanced_correlation", {}).get("partial_correlation") + if pcorr is None or pcorr.empty: + return None # type: ignore[return-value] + fig = p.partial_correlation_heatmap(pcorr) + self._figures["partial_correlation"] = fig + return fig + + def plot_mi_heatmap(self) -> plt.Figure: + from f2a.viz.advanced_corr_plots import AdvancedCorrPlotter + p = AdvancedCorrPlotter() + mi = self._stats.advanced_stats.get("advanced_correlation", {}).get("mutual_information") + if mi is None or mi.empty: + return None # type: ignore[return-value] + fig = p.mi_heatmap(mi) + self._figures["mi_heatmap"] = fig + return fig + + def plot_bootstrap_ci(self) -> plt.Figure: + from f2a.viz.advanced_corr_plots import AdvancedCorrPlotter + p = AdvancedCorrPlotter() + bci = self._stats.advanced_stats.get("advanced_correlation", {}).get("bootstrap_ci") + if bci is None or bci.empty: + return None # type: ignore[return-value] + fig = p.bootstrap_ci_plot(bci) + self._figures["bootstrap_ci"] = fig + return fig + + def plot_correlation_network(self) -> plt.Figure: + from f2a.viz.advanced_corr_plots import AdvancedCorrPlotter + p = AdvancedCorrPlotter() + net = self._stats.advanced_stats.get("advanced_correlation", {}).get("network") + if not net or not net.get("edges"): + return None # type: ignore[return-value] + fig = p.correlation_network(net) + self._figures["correlation_network"] = fig + return fig + + def plot_distance_correlation(self) -> plt.Figure: + from f2a.viz.advanced_corr_plots import AdvancedCorrPlotter + p = AdvancedCorrPlotter() + dc = self._stats.advanced_stats.get("advanced_correlation", {}).get("distance_correlation") + if dc is None or dc.empty: + return None # type: ignore[return-value] + fig = p.distance_correlation_heatmap(dc) + self._figures["distance_correlation"] = fig + return fig + + def plot_elbow_silhouette(self) -> plt.Figure: + from f2a.viz.cluster_plots import ClusterPlotter + p = ClusterPlotter() + km = self._stats.advanced_stats.get("clustering", {}).get("kmeans") + if not km: + return None # type: ignore[return-value] + fig = p.elbow_silhouette(km) + self._figures["elbow_silhouette"] = fig + return fig + + def plot_cluster_scatter(self) -> plt.Figure: + from f2a.viz.cluster_plots import ClusterPlotter + p = ClusterPlotter() + km = self._stats.advanced_stats.get("clustering", {}).get("kmeans") + if not km: + return None # type: ignore[return-value] + fig = p.cluster_scatter_2d( + self._df, self._schema.numeric_columns, km, + ) + self._figures["cluster_scatter"] = fig + return fig + + def plot_dendrogram(self) -> plt.Figure: + from f2a.viz.cluster_plots import ClusterPlotter + p = ClusterPlotter() + hc = self._stats.advanced_stats.get("clustering", {}).get("hierarchical") + if not hc: + return None # type: ignore[return-value] + fig = p.dendrogram(hc) + self._figures["dendrogram"] = fig + return fig + + def plot_cluster_profiles(self) -> plt.Figure: + from f2a.viz.cluster_plots import ClusterPlotter + p = ClusterPlotter() + profiles = self._stats.advanced_stats.get("clustering", {}).get("profiles") + if profiles is None or profiles.empty: + return None # type: ignore[return-value] + fig = p.cluster_profile_heatmap(profiles) + self._figures["cluster_profiles"] = fig + return fig + + def plot_anomaly_scatter(self) -> plt.Figure: + from f2a.viz.advanced_anomaly_plots import AdvancedAnomalyPlotter + p = AdvancedAnomalyPlotter() + iso = self._stats.advanced_stats.get("advanced_anomaly_full", {}).get("isolation_forest") + if not iso: + return None # type: ignore[return-value] + fig = p.anomaly_scatter_2d( + self._df, self._schema.numeric_columns, iso, + ) + self._figures["anomaly_scatter"] = fig + return fig + + def plot_mahalanobis_hist(self) -> plt.Figure: + from f2a.viz.advanced_anomaly_plots import AdvancedAnomalyPlotter + p = AdvancedAnomalyPlotter() + maha = self._stats.advanced_stats.get("advanced_anomaly_full", {}).get("mahalanobis") + if not maha: + return None # type: ignore[return-value] + fig = p.mahalanobis_histogram(maha) + self._figures["mahalanobis_hist"] = fig + return fig + + def plot_consensus_comparison(self) -> plt.Figure: + from f2a.viz.advanced_anomaly_plots import AdvancedAnomalyPlotter + p = AdvancedAnomalyPlotter() + cons = self._stats.advanced_stats.get("advanced_anomaly", {}).get("consensus") + if not cons: + return None # type: ignore[return-value] + fig = p.consensus_comparison(cons) + self._figures["consensus_comparison"] = fig + return fig + + # -- Insight plots (enhancement) -------------------------------------- + + def plot_insight_severity(self) -> plt.Figure: + from f2a.viz.insight_plots import InsightPlotter + p = InsightPlotter() + insights = self._stats.advanced_stats.get("insights", {}).get("all_insights", []) + if not insights: + return None # type: ignore[return-value] + fig = p.severity_bar(insights) + self._figures["insight_severity"] = fig + return fig + + def plot_insight_categories(self) -> plt.Figure: + from f2a.viz.insight_plots import InsightPlotter + p = InsightPlotter() + insights = self._stats.advanced_stats.get("insights", {}).get("all_insights", []) + if not insights: + return None # type: ignore[return-value] + fig = p.category_treemap(insights) + self._figures["insight_categories"] = fig + return fig + + def plot_top_insights(self) -> plt.Figure: + from f2a.viz.insight_plots import InsightPlotter + p = InsightPlotter() + insights = self._stats.advanced_stats.get("insights", {}).get("all_insights", []) + if not insights: + return None # type: ignore[return-value] + fig = p.top_insights_table(insights) + self._figures["top_insights"] = fig + return fig + + def plot_action_items(self) -> plt.Figure: + from f2a.viz.insight_plots import InsightPlotter + p = InsightPlotter() + insights = self._stats.advanced_stats.get("insights", {}).get("all_insights", []) + if not insights: + return None # type: ignore[return-value] + fig = p.action_items_chart(insights) + self._figures["action_items"] = fig + return fig + + # -- Cross-analysis plots (enhancement) -------------------------------- + + def plot_anomaly_by_cluster(self) -> plt.Figure: + from f2a.viz.cross_plots import CrossPlotter + p = CrossPlotter() + ca = self._stats.advanced_stats.get("cross_analysis", {}).get("outlier_by_cluster") + if not ca: + return None # type: ignore[return-value] + fig = p.anomaly_by_cluster_bar(ca) + self._figures["anomaly_by_cluster"] = fig + return fig + + def plot_missing_correlation_cross(self) -> plt.Figure: + from f2a.viz.cross_plots import CrossPlotter + p = CrossPlotter() + ca = self._stats.advanced_stats.get("cross_analysis", {}).get("missing_correlation") + if not ca: + return None # type: ignore[return-value] + fig = p.missing_correlation_heatmap(ca) + self._figures["missing_correlation"] = fig + return fig + + def plot_simpson_paradox(self) -> plt.Figure: + from f2a.viz.cross_plots import CrossPlotter + p = CrossPlotter() + ca = self._stats.advanced_stats.get("cross_analysis", {}).get("simpson_paradox") + if not ca: + return None # type: ignore[return-value] + fig = p.simpson_paradox_scatter(ca) + self._figures["simpson_paradox"] = fig + return fig + + def plot_importance_vs_missing(self) -> plt.Figure: + from f2a.viz.cross_plots import CrossPlotter + p = CrossPlotter() + ca = self._stats.advanced_stats.get("cross_analysis", {}).get("importance_vs_missing") + if not ca: + return None # type: ignore[return-value] + fig = p.importance_vs_missing_scatter(ca) + self._figures["importance_vs_missing"] = fig + return fig + + def plot_unified_embedding(self) -> plt.Figure: + from f2a.viz.cross_plots import CrossPlotter + p = CrossPlotter() + ca = self._stats.advanced_stats.get("cross_analysis", {}).get("unified_2d_embedding") + if not ca: + return None # type: ignore[return-value] + fig = p.unified_2d_scatter(ca) + self._figures["unified_embedding"] = fig + return fig + + # -- Dim-reduction plots (enhancement) -------------------------------- + + def plot_tsne(self) -> plt.Figure: + from f2a.viz.dimreduction_plots import DimReductionPlotter + p = DimReductionPlotter() + fig = p.tsne_scatter( + self._df, self._schema.numeric_columns[:20], + perplexity=self._config.tsne_perplexity, + max_sample=self._config.max_sample_for_advanced, + ) + self._figures["tsne"] = fig + return fig + + def plot_umap(self) -> plt.Figure: + from f2a.viz.dimreduction_plots import DimReductionPlotter + p = DimReductionPlotter() + fig = p.umap_scatter( + self._df, self._schema.numeric_columns[:20], + max_sample=self._config.max_sample_for_advanced, + ) + self._figures["umap"] = fig + return fig + + def plot_explained_variance_curve(self) -> plt.Figure: + from f2a.viz.dimreduction_plots import DimReductionPlotter + p = DimReductionPlotter() + pca_data = self._stats.pca_summary or {} + ev_df = pca_data.get("explained_variance_df", self._stats.pca_variance) + if ev_df is None or (isinstance(ev_df, pd.DataFrame) and ev_df.empty): + return None # type: ignore[return-value] + fig = p.explained_variance_curve({"explained_variance_df": ev_df}) + self._figures["explained_variance_curve"] = fig + return fig + + def plot_factor_loadings_heatmap(self) -> plt.Figure: + from f2a.viz.dimreduction_plots import DimReductionPlotter + p = DimReductionPlotter() + if self._stats.pca_loadings.empty: + return None # type: ignore[return-value] + fig = p.factor_loadings_heatmap({"loadings_df": self._stats.pca_loadings}) + self._figures["factor_loadings"] = fig + return fig + + def plot_feature_contribution(self) -> plt.Figure: + from f2a.viz.dimreduction_plots import DimReductionPlotter + p = DimReductionPlotter() + if self._stats.pca_loadings.empty: + return None # type: ignore[return-value] + fig = p.feature_contribution_bar({"loadings_df": self._stats.pca_loadings}) + self._figures["feature_contribution"] = fig + return fig + + def plot_biplot(self) -> plt.Figure: + from f2a.viz.dimreduction_plots import DimReductionPlotter + p = DimReductionPlotter() + num_cols = self._schema.numeric_columns[:20] + if len(num_cols) < 2: + return None # type: ignore[return-value] + fig = p.biplot( + self._df, num_cols, + max_sample=self._config.max_sample_for_advanced, + ) + self._figures["biplot"] = fig + return fig + + +# ===================================================================== +# Subset / Analysis Report +# ===================================================================== + +@dataclass +class SubsetReport: + """Analysis results for a single subset/split partition.""" + + subset: str + split: str + shape: tuple[int, int] + schema: DataSchema + stats: StatsResult + viz: VizResult + warnings: list[str] = field(default_factory=list) + + +@dataclass +class AnalysisReport: + """Top-level container for analysis results. + + Attributes: + dataset_name: Dataset name. + shape: ``(rows, columns)`` tuple. + schema: Data schema. + stats: Statistical analysis results. + viz: Visualization access object. + warnings: List of warnings. + subsets: Per-subset/split reports (empty for single partition). + config: The :class:`AnalysisConfig` used. + """ + + dataset_name: str + shape: tuple[int, int] + schema: DataSchema + stats: StatsResult + viz: VizResult + warnings: list[str] = field(default_factory=list) + subsets: list[SubsetReport] = field(default_factory=list) + config: AnalysisConfig = field(default_factory=AnalysisConfig) + analysis_started_at: str = field(default_factory=lambda: datetime.now(timezone.utc).isoformat(timespec="seconds")) + analysis_duration_sec: float = 0.0 + + # -- Console output --------------------------------------------------- + + def show(self) -> None: + """Print analysis summary to console.""" + sep = "=" * 60 + print(sep) + print(f" f2a Analysis Report: {self.dataset_name}") + print(sep) + + if self.subsets: + print(f"\n Total Rows: {self.shape[0]:,} | Subsets: {len(self.subsets)}") + for sr in self.subsets: + print(f"\n{'-' * 60}") + print(f" [{sr.subset} / {sr.split}] {sr.shape[0]:,} rows x {sr.shape[1]} cols") + print(f" Memory: {sr.schema.memory_usage_mb} MB") + print(f" Numeric: {len(sr.schema.numeric_columns)} | " + f"Categorical: {len(sr.schema.categorical_columns)} | " + f"Text: {len(sr.schema.text_columns)} | " + f"Datetime: {len(sr.schema.datetime_columns)}") + if not sr.stats.summary.empty: + print() + print(sr.stats.summary.to_string()) + if sr.warnings: + print("\n Warnings:") + for w in sr.warnings: + print(f" - {w}") + else: + print(f"\n Rows: {self.shape[0]:,} | Columns: {self.shape[1]}") + print(f" Memory: {self.schema.memory_usage_mb} MB") + print(f"\n Numeric: {len(self.schema.numeric_columns)}") + print(f" Categorical: {len(self.schema.categorical_columns)}") + print(f" Text: {len(self.schema.text_columns)}") + print(f" Datetime: {len(self.schema.datetime_columns)}") + + if self.stats.quality_scores: + qs = self.stats.quality_scores + print(f"\n Data Quality: {qs.get('overall', 0) * 100:.1f}%") + + if self.stats.preprocessing: + pp = self.stats.preprocessing + n_issues = ( + len(pp.constant_columns) + len(pp.high_missing_columns) + + len(pp.id_like_columns) + pp.duplicate_rows_count + + len(pp.mixed_type_columns) + len(pp.infinite_value_columns) + ) + print(f" Preprocessing: {len(pp.cleaning_log)} steps, {n_issues} issues found") + + print(f"\n{'-' * 60}") + print(" Summary Statistics:") + if not self.stats.summary.empty: + print(self.stats.summary.to_string()) + + if not self.stats.outlier_summary.empty: + total_outliers = self.stats.outlier_summary.get("outlier_count", pd.Series()).sum() + if total_outliers > 0: + print(f"\n Outliers detected: {int(total_outliers)} total across numeric columns") + + if self.stats.pca_summary: + ps = self.stats.pca_summary + print(f"\n PCA: {ps.get('components_for_90pct', '?')} components explain 90% variance") + + if self.warnings: + print(f"\n{'-' * 60}") + print(" Warnings:") + for w in self.warnings: + print(f" - {w}") + + print(sep) + + # -- HTML report ------------------------------------------------------ + + def to_html(self, output_dir: str = ".") -> Path: + """Generate and save an HTML report. + + Args: + output_dir: Output directory path. + + Returns: + Path to the saved HTML file. + """ + from f2a.report.generator import ReportGenerator + + generator = ReportGenerator() + safe_name = re.sub(r'[<>:"/\\|?*]', "_", self.dataset_name) + safe_name = safe_name.strip(". ")[:120] or "report" + ts = datetime.now().strftime("%Y%m%d_%H%M%S") + output_path = Path(output_dir) / f"{safe_name}_{ts}_report.html" + + if self.subsets: + subset_sections = self._build_subset_sections() + generator.save_html_multi( + output_path=output_path, + dataset_name=self.dataset_name, + sections=subset_sections, + config=self.config, + analysis_started_at=self.analysis_started_at, + analysis_duration_sec=self.analysis_duration_sec, + ) + else: + report_data = self._build_single_report_data() + generator.save_html(output_path=output_path, **report_data) + + return output_path + + def _build_single_report_data(self) -> dict[str, Any]: + figures = self._generate_figures(self.viz, self.stats, self.config) + return { + "dataset_name": self.dataset_name, + "schema_summary": self.schema.summary_dict(), + "stats": self.stats, + "figures": figures, + "warnings": self.warnings, + "config": self.config, + "analysis_started_at": self.analysis_started_at, + "analysis_duration_sec": self.analysis_duration_sec, + } + + def _build_subset_sections(self) -> list[dict[str, Any]]: + sections: list[dict[str, Any]] = [] + for sr in self.subsets: + figures = self._generate_figures(sr.viz, sr.stats, self.config) + sections.append({ + "subset": sr.subset, + "split": sr.split, + "schema_summary": sr.schema.summary_dict(), + "stats": sr.stats, + "figures": figures, + "warnings": sr.warnings, + }) + return sections + + @staticmethod + def _has_data(obj: Any) -> bool: + """Check if an object represents non-empty data (safe for DataFrames).""" + if obj is None: + return False + if isinstance(obj, pd.DataFrame): + return not obj.empty + if isinstance(obj, pd.Series): + return not obj.empty + if isinstance(obj, (dict, list)): + return len(obj) > 0 + return bool(obj) + + @staticmethod + def _generate_figures( + viz: VizResult, + stats: StatsResult, + config: AnalysisConfig, + ) -> dict[str, plt.Figure]: + """Generate all configured figures, catching individual failures.""" + figures: dict[str, plt.Figure] = {} + _hd = AnalysisReport._has_data + + if not config.visualizations: + return figures + + plot_attempts: list[tuple[str, Any, bool]] = [ + ("Distribution Histograms", viz.plot_distributions, config.descriptive), + ("Boxplots", viz.plot_boxplots, config.descriptive), + ("Violin Plots", viz.plot_violins, config.distribution), + ("Q-Q Plots", viz.plot_qq, config.distribution), + ("Correlation Heatmap (Pearson)", lambda: viz.plot_correlation("pearson"), config.correlation), + ("Correlation Heatmap (Spearman)", lambda: viz.plot_correlation("spearman"), config.correlation), + ("Missing Data", viz.plot_missing, True), + ("Missing Data Matrix", viz.plot_missing_matrix, True), + ("Outlier Detection", viz.plot_outliers, config.outlier), + ("Categorical Frequency", viz.plot_categorical_frequency, config.categorical), + ( + "Chi-Square Heatmap", + viz.plot_chi_square_heatmap, + config.categorical and not stats.chi_square_matrix.empty, + ), + ( + "PCA Scree Plot", + viz.plot_pca_scree, + config.pca and not stats.pca_variance.empty, + ), + ( + "PCA Loadings", + viz.plot_pca_loadings, + config.pca and not stats.pca_loadings.empty, + ), + ( + "Data Quality Scores", + viz.plot_quality, + config.quality_score and bool(stats.quality_scores), + ), + ( + "Column Quality", + viz.plot_column_quality, + config.quality_score and not stats.quality_by_column.empty, + ), + ( + "Feature Importance", + viz.plot_feature_importance, + config.feature_importance and not stats.feature_importance.empty, + ), + ] + + # -- Advanced plots ----------------------------------------------- + if config.advanced: + adv = stats.advanced_stats + adv_attempts: list[tuple[str, Any, bool]] = [ + # A1. Advanced Distribution + ( + "Best-Fit Distribution Overlay", + viz.plot_best_fit_overlay, + config.advanced_distribution + and bool(adv.get("advanced_distribution", {}).get("best_fit") is not None), + ), + ( + "ECDF Plot", + viz.plot_ecdf, + config.advanced_distribution and bool(adv.get("ecdf_data")), + ), + ( + "Power Transform Comparison", + viz.plot_power_transform, + config.advanced_distribution + and bool(adv.get("advanced_distribution", {}).get("power_transform") is not None), + ), + ( + "Jarque-Bera Normality Test", + viz.plot_jarque_bera, + config.advanced_distribution + and bool(adv.get("advanced_distribution", {}).get("jarque_bera") is not None), + ), + # A2. Advanced Correlation + ( + "Partial Correlation Heatmap", + viz.plot_partial_correlation, + config.advanced_correlation + and bool(adv.get("advanced_correlation", {}).get("partial_correlation") is not None), + ), + ( + "Mutual Information Heatmap", + viz.plot_mi_heatmap, + config.advanced_correlation + and bool(adv.get("advanced_correlation", {}).get("mutual_information") is not None), + ), + ( + "Bootstrap Correlation CI", + viz.plot_bootstrap_ci, + config.advanced_correlation + and bool(adv.get("advanced_correlation", {}).get("bootstrap_ci") is not None), + ), + ( + "Correlation Network", + viz.plot_correlation_network, + config.advanced_correlation + and bool(adv.get("advanced_correlation", {}).get("network")), + ), + ( + "Distance Correlation Heatmap", + viz.plot_distance_correlation, + config.advanced_correlation + and bool(adv.get("advanced_correlation", {}).get("distance_correlation") is not None), + ), + # A3. Clustering + ( + "Elbow & Silhouette", + viz.plot_elbow_silhouette, + config.clustering and bool(adv.get("clustering", {}).get("kmeans")), + ), + ( + "Cluster Scatter", + viz.plot_cluster_scatter, + config.clustering and bool(adv.get("clustering", {}).get("kmeans")), + ), + ( + "Dendrogram", + viz.plot_dendrogram, + config.clustering and bool(adv.get("clustering", {}).get("hierarchical")), + ), + ( + "Cluster Profiles", + viz.plot_cluster_profiles, + config.clustering and bool(adv.get("clustering", {}).get("profiles") is not None), + ), + # A6. Advanced Anomaly + ( + "Anomaly Scatter", + viz.plot_anomaly_scatter, + config.advanced_anomaly + and bool(adv.get("advanced_anomaly_full", {}).get("isolation_forest")), + ), + ( + "Mahalanobis Distance", + viz.plot_mahalanobis_hist, + config.advanced_anomaly + and bool(adv.get("advanced_anomaly_full", {}).get("mahalanobis")), + ), + ( + "Consensus Anomaly Comparison", + viz.plot_consensus_comparison, + config.advanced_anomaly + and bool(adv.get("advanced_anomaly", {}).get("consensus")), + ), + # Enhancement: Insight Engine plots + ( + "Insight Severity Distribution", + viz.plot_insight_severity, + config.insight_engine + and bool(adv.get("insights", {}).get("all_insights")), + ), + ( + "Insight Categories", + viz.plot_insight_categories, + config.insight_engine + and bool(adv.get("insights", {}).get("all_insights")), + ), + ( + "Top Insights", + viz.plot_top_insights, + config.insight_engine + and bool(adv.get("insights", {}).get("all_insights")), + ), + ( + "Action Items Summary", + viz.plot_action_items, + config.insight_engine + and bool(adv.get("insights", {}).get("all_insights")), + ), + # Enhancement: Cross-Analysis plots + ( + "Anomaly by Cluster", + viz.plot_anomaly_by_cluster, + config.cross_analysis + and _hd(adv.get("cross_analysis", {}).get("outlier_by_cluster")), + ), + ( + "Missing Correlation (Cross)", + viz.plot_missing_correlation_cross, + config.cross_analysis + and _hd(adv.get("cross_analysis", {}).get("missing_correlation")), + ), + ( + "Simpson's Paradox", + viz.plot_simpson_paradox, + config.cross_analysis + and _hd(adv.get("cross_analysis", {}).get("simpson_paradox")), + ), + ( + "Importance vs Missing", + viz.plot_importance_vs_missing, + config.cross_analysis + and _hd(adv.get("cross_analysis", {}).get("importance_vs_missing")), + ), + ( + "Unified 2D Embedding", + viz.plot_unified_embedding, + config.cross_analysis + and _hd(adv.get("cross_analysis", {}).get("unified_2d_embedding")), + ), + # Enhancement: Dim-reduction plots + ( + "t-SNE Scatter", + viz.plot_tsne, + config.advanced_dimreduction + and len(stats._schema.numeric_columns if hasattr(stats, '_schema') else []) >= 2, + ), + ( + "PCA Biplot", + viz.plot_biplot, + config.advanced_dimreduction + and len(viz._schema.numeric_columns) >= 2, + ), + ( + "Explained Variance Curve", + viz.plot_explained_variance_curve, + config.pca and not stats.pca_variance.empty, + ), + ( + "Factor Loadings Heatmap", + viz.plot_factor_loadings_heatmap, + config.pca and not stats.pca_loadings.empty, + ), + ( + "Feature Contribution per PC", + viz.plot_feature_contribution, + config.pca and not stats.pca_loadings.empty, + ), + ] + plot_attempts.extend(adv_attempts) + + for name, fn, condition in plot_attempts: + if not condition: + continue + try: + fig = fn() + if fig is not None: + figures[name] = fig + except Exception as exc: + logger.debug("Figure '%s' skipped: %s", name, exc) + + return figures + + # -- Dict export ------------------------------------------------------- + + def to_dict(self) -> dict[str, Any]: + """Return analysis results as a dictionary.""" + result: dict[str, Any] = { + "dataset_name": self.dataset_name, + "shape": self.shape, + "schema": self.schema.summary_dict(), + "stats_summary": self.stats.summary.to_dict() if not self.stats.summary.empty else {}, + "correlation_matrix": ( + self.stats.correlation_matrix.to_dict() + if not self.stats.correlation_matrix.empty else {} + ), + "outlier_summary": ( + self.stats.outlier_summary.to_dict() + if not self.stats.outlier_summary.empty else {} + ), + "quality_scores": self.stats.quality_scores, + "pca_summary": self.stats.pca_summary, + "duplicate_stats": self.stats.duplicate_stats, + "warnings": self.warnings, + } + if self.subsets: + result["subsets"] = [ + { + "subset": sr.subset, + "split": sr.split, + "shape": sr.shape, + "schema": sr.schema.summary_dict(), + "stats_summary": sr.stats.summary.to_dict() if not sr.stats.summary.empty else {}, + "quality_scores": sr.stats.quality_scores, + "warnings": sr.warnings, + } + for sr in self.subsets + ] + return result + + +# ===================================================================== +# Analyzer +# ===================================================================== + +class Analyzer: + """Orchestrate the full analysis pipeline. + + Example:: + + analyzer = Analyzer() + report = analyzer.run("data.csv") + report.show() + """ + + def __init__(self) -> None: + self._loader = DataLoader() + + def run( + self, + source: str, + config: AnalysisConfig | None = None, + **kwargs: Any, + ) -> AnalysisReport: + """Execute the full analysis pipeline. + + Args: + source: Data source (file path or HuggingFace address). + config: Analysis configuration. Defaults to all-on. + **kwargs: Additional arguments passed to the loader. + + Returns: + :class:`AnalysisReport` instance. + """ + config = config or AnalysisConfig() + source = validate_source(source) + logger.info("Analysis started: %s", source) + + # 1. Load data + df = self._loader.load(source, **kwargs) + + # 2. Check for multi-subset HuggingFace data + has_partitions = "__subset__" in df.columns and "__split__" in df.columns + + if has_partitions: + return self._run_multi_subset(source, df, config) + + return self._run_single(source, df, config) + + # -- Single partition -------------------------------------------------- + + def _run_single( + self, source: str, df: pd.DataFrame, config: AnalysisConfig, + ) -> AnalysisReport: + t0 = time.perf_counter() + started_at = datetime.now(timezone.utc).isoformat(timespec="seconds") + + schema = infer_schema(df) + logger.info("Schema inference complete: %s", schema.summary_dict()) + + warnings: list[str] = [] + stats = self._compute_stats(df, schema, warnings, config) + + viz_df = stats.preprocessing.cleaned_df if stats.preprocessing else df + viz_schema = infer_schema(viz_df) if stats.preprocessing else schema + + dataset_name = ( + Path(source).stem + if "/" not in source or "://" not in source + else source + ) + viz = VizResult(_df=viz_df, _schema=viz_schema, _config=config, _stats=stats) + + elapsed = round(time.perf_counter() - t0, 2) + report = AnalysisReport( + dataset_name=dataset_name, + shape=(len(df), len(df.columns)), + schema=schema, + stats=stats, + viz=viz, + warnings=warnings, + config=config, + analysis_started_at=started_at, + analysis_duration_sec=elapsed, + ) + logger.info("Analysis complete: %s (%.2fs)", source, elapsed) + return report + + # -- Multi-subset ------------------------------------------------------ + + def _run_multi_subset( + self, source: str, df: pd.DataFrame, config: AnalysisConfig, + ) -> AnalysisReport: + t0 = time.perf_counter() + started_at = datetime.now(timezone.utc).isoformat(timespec="seconds") + + groups = df.groupby(["__subset__", "__split__"], sort=False) + + subset_reports: list[SubsetReport] = [] + all_warnings: list[str] = [] + + for (subset_name, split_name), group_df in groups: + part_df = group_df.drop(columns=["__subset__", "__split__"]).reset_index(drop=True) + + schema = infer_schema(part_df) + warnings: list[str] = [] + stats = self._compute_stats(part_df, schema, warnings, config) + + viz_df = stats.preprocessing.cleaned_df if stats.preprocessing else part_df + viz_schema = infer_schema(viz_df) if stats.preprocessing else schema + viz = VizResult(_df=viz_df, _schema=viz_schema, _config=config, _stats=stats) + + sr = SubsetReport( + subset=str(subset_name), + split=str(split_name), + shape=(len(part_df), len(part_df.columns)), + schema=schema, + stats=stats, + viz=viz, + warnings=warnings, + ) + subset_reports.append(sr) + all_warnings.extend(f"[{subset_name}/{split_name}] {w}" for w in warnings) + logger.info( + "Subset analysis complete: %s/%s (%d rows x %d cols)", + subset_name, split_name, len(part_df), len(part_df.columns), + ) + + first = subset_reports[0] + total_rows = sum(sr.shape[0] for sr in subset_reports) + + elapsed = round(time.perf_counter() - t0, 2) + report = AnalysisReport( + dataset_name=source, + shape=(total_rows, first.shape[1]), + schema=first.schema, + stats=first.stats, + viz=first.viz, + warnings=all_warnings, + subsets=subset_reports, + config=config, + analysis_started_at=started_at, + analysis_duration_sec=elapsed, + ) + logger.info( + "Multi-subset analysis complete: %s (%d subsets, %d total rows, %.2fs)", + source, len(subset_reports), total_rows, elapsed, + ) + return report + + # -- Stats computation ------------------------------------------------- + + def _compute_stats( + self, + df: pd.DataFrame, + schema: DataSchema, + warnings: list[str], + config: AnalysisConfig, + ) -> StatsResult: + """Perform all configured statistical analyses.""" + result = StatsResult() + + # 0. Preprocessing + analysis_df = df + if config.preprocessing: + try: + pp = Preprocessor(df, schema) + result.preprocessing = pp.run() + analysis_df = result.preprocessing.cleaned_df + schema = infer_schema(analysis_df) + + for log_entry in result.preprocessing.cleaning_log: + logger.info("Preprocessing: %s", log_entry) + if result.preprocessing.high_missing_columns: + for item in result.preprocessing.high_missing_columns: + warnings.append( + f"High missing ratio: {item['column']} " + f"({item['missing_ratio'] * 100:.1f}%)" + ) + if result.preprocessing.id_like_columns: + warnings.append( + f"ID-like columns detected: " + f"{', '.join(result.preprocessing.id_like_columns[:5])}" + ) + except Exception as exc: + logger.warning("Preprocessing failed: %s", exc) + + # 1. Descriptive statistics + if config.descriptive: + try: + desc = DescriptiveStats(analysis_df, schema) + result.summary = desc.summary() + result.numeric_summary = desc.numeric_summary() + result.categorical_summary = desc.categorical_summary() + except Exception as exc: + logger.warning("Descriptive stats failed: %s", exc) + + # 2. Distribution analysis + if config.distribution: + try: + dist = DistributionStats(analysis_df, schema) + result.distribution_info = dist.analyze() + except Exception as exc: + logger.warning("Distribution analysis failed: %s", exc) + + # 3. Correlation analysis + if config.correlation: + try: + corr = CorrelationStats(analysis_df, schema) + result.correlation_matrix = corr.pearson() + result.spearman_matrix = corr.spearman() + result.cramers_v_matrix = corr.cramers_v_matrix() + + try: + result.vif_table = corr.vif() + except Exception: + pass + + high_corrs = corr.high_correlations(threshold=config.correlation_threshold) + for col_a, col_b, val in high_corrs: + warnings.append(f"High correlation: {col_a} <-> {col_b} (r={val})") + except Exception as exc: + logger.warning("Correlation analysis failed: %s", exc) + + # 4. Missing data analysis (always run) + try: + miss = MissingStats(analysis_df, schema) + result.missing_info = miss.column_summary() + total_missing = miss.total_missing_ratio() + if total_missing > 0.1: + warnings.append( + f"Overall missing ratio is high: {total_missing * 100:.1f}%" + ) + except Exception as exc: + logger.warning("Missing data analysis failed: %s", exc) + + # 5. Outlier detection + if config.outlier: + try: + out = OutlierStats(analysis_df, schema) + kw: dict[str, Any] = {} + if config.outlier_method == "iqr": + kw["multiplier"] = config.outlier_threshold + else: + kw["threshold"] = config.outlier_threshold + result.outlier_summary = out.summary(method=config.outlier_method, **kw) + + if not result.outlier_summary.empty and "outlier_%" in result.outlier_summary.columns: + for col_name, row in result.outlier_summary.iterrows(): + if row.get("outlier_%", 0) > 10: + warnings.append( + f"High outlier ratio in '{col_name}': {row['outlier_%']:.1f}%" + ) + except Exception as exc: + logger.warning("Outlier detection failed: %s", exc) + + # 6. Categorical analysis + if config.categorical: + try: + cat = CategoricalStats(analysis_df, schema) + result.categorical_analysis = cat.summary() + result.chi_square_matrix = cat.chi_square_matrix() + except Exception as exc: + logger.warning("Categorical analysis failed: %s", exc) + + # 7. Feature importance + if config.feature_importance: + try: + fi = FeatureImportanceStats(analysis_df, schema) + result.feature_importance = fi.variance_ranking() + except Exception as exc: + logger.warning("Feature importance failed: %s", exc) + + # 8. PCA + if config.pca: + try: + pca = PCAStats( + analysis_df, schema, max_components=config.pca_max_components, + ) + result.pca_variance = pca.variance_explained() + result.pca_loadings = pca.loadings() + result.pca_summary = pca.summary() + except Exception as exc: + logger.warning("PCA analysis failed: %s", exc) + + # 9. Duplicates + if config.duplicates: + try: + dup = DuplicateStats(analysis_df, schema) + result.duplicate_stats = dup.summary() + except Exception as exc: + logger.warning("Duplicate detection failed: %s", exc) + + # 10. Quality score + if config.quality_score: + try: + qs = QualityStats(analysis_df, schema) + result.quality_scores = qs.summary() + result.quality_by_column = qs.column_quality() + except Exception as exc: + logger.warning("Quality scoring failed: %s", exc) + + # 11. Advanced analyses + if config.advanced: + self._compute_advanced_stats(analysis_df, schema, result, config) + + return result + + # -- Advanced stats computation ---------------------------------------- + + def _compute_advanced_stats( + self, + df: pd.DataFrame, + schema: DataSchema, + result: StatsResult, + config: AnalysisConfig, + ) -> None: + """Compute advanced analysis modules and populate result.advanced_stats.""" + adv = result.advanced_stats + + # A1. Advanced Distribution + if config.advanced_distribution: + try: + from f2a.stats.advanced_distribution import AdvancedDistributionStats + ad = AdvancedDistributionStats( + df, schema, + n_fits=config.n_distribution_fits, + max_sample=config.max_sample_for_advanced, + ) + adv["advanced_distribution"] = ad.summary() + adv["ecdf_data"] = ad.ecdf() + except Exception as exc: + logger.debug("Advanced distribution failed: %s", exc) + + # A2. Advanced Correlation + if config.advanced_correlation: + try: + from f2a.stats.advanced_correlation import AdvancedCorrelationStats + ac = AdvancedCorrelationStats( + df, schema, + bootstrap_iterations=config.bootstrap_iterations, + max_sample=config.max_sample_for_advanced, + ) + adv["advanced_correlation"] = ac.summary() + except Exception as exc: + logger.debug("Advanced correlation failed: %s", exc) + + # A3. Clustering + if config.clustering: + try: + from f2a.stats.clustering import ClusteringStats + cl = ClusteringStats( + df, schema, + max_k=config.max_cluster_k, + max_sample=config.max_sample_for_advanced, + ) + adv["clustering"] = cl.summary() + except Exception as exc: + logger.debug("Clustering failed: %s", exc) + + # A4. Dimensionality Reduction + if config.advanced_dimreduction: + try: + from f2a.stats.advanced_dimreduction import AdvancedDimReductionStats + dr = AdvancedDimReductionStats( + df, schema, + tsne_perplexity=config.tsne_perplexity, + max_sample=config.max_sample_for_advanced, + ) + adv["dimreduction"] = dr.summary() + except Exception as exc: + logger.debug("Dimensionality reduction failed: %s", exc) + + # A5. Feature Insights + if config.feature_insights: + try: + from f2a.stats.feature_insights import FeatureInsightsStats + fi = FeatureInsightsStats( + df, schema, + max_sample=config.max_sample_for_advanced, + ) + adv["feature_insights"] = fi.summary() + except Exception as exc: + logger.debug("Feature insights failed: %s", exc) + + # A6. Advanced Anomaly Detection + if config.advanced_anomaly: + try: + from f2a.stats.advanced_anomaly import AdvancedAnomalyStats + aa = AdvancedAnomalyStats( + df, schema, + max_sample=config.max_sample_for_advanced, + ) + stripped, full = aa.summary_full() + adv["advanced_anomaly"] = stripped + adv["advanced_anomaly_full"] = full + except Exception as exc: + logger.debug("Advanced anomaly detection failed: %s", exc) + + # A7. Statistical Tests + if config.statistical_tests: + try: + from f2a.stats.statistical_tests import StatisticalTests + st = StatisticalTests(df, schema) + adv["statistical_tests"] = st.summary() + except Exception as exc: + logger.debug("Statistical tests failed: %s", exc) + + # A8. Data Profiling (aggregated summary) + if config.data_profiling: + try: + profile: dict[str, Any] = { + "n_rows": len(df), + "n_cols": len(df.columns), + "memory_mb": round(df.memory_usage(deep=True).sum() / 1024 / 1024, 2), + "numeric_ratio": round( + len(schema.numeric_columns) / max(len(df.columns), 1), 3, + ), + "categorical_ratio": round( + len(schema.categorical_columns) / max(len(df.columns), 1), 3, + ), + "missing_ratio": round( + df.isnull().sum().sum() / max(df.size, 1), 4, + ), + "duplicate_row_ratio": round( + df.duplicated().sum() / max(len(df), 1), 4, + ), + } + adv["data_profiling"] = profile + except Exception as exc: + logger.debug("Data profiling failed: %s", exc) + + # A9. Insight Engine (v2 enhancement) + if config.insight_engine: + try: + from f2a.stats.insight_engine import InsightEngine + ie = InsightEngine(result, schema) + insights = ie.generate() + adv["insights"] = { + "all_insights": [ + { + "type": i.type.value, + "severity": i.severity.value, + "category": i.category, + "title": i.title, + "description": i.description, + "affected_columns": i.affected_columns, + "evidence": i.evidence, + "action_items": i.action_items, + "priority_score": i.priority_score, + } + for i in insights + ], + "summary": ie.summary_dict(), + "executive_summary": ie.executive_summary(), + } + except Exception as exc: + logger.debug("Insight engine failed: %s", exc) + + # A10. Cross Analysis (v2 enhancement) + if config.cross_analysis: + try: + from f2a.stats.cross_analysis import CrossAnalysis + ca = CrossAnalysis(df, schema, result) + cross_results: dict[str, Any] = {} + + try: + cross_results["outlier_by_cluster"] = ca.outlier_by_cluster() + except Exception: + pass + try: + cross_results["missing_correlation"] = ca.missing_correlation() + except Exception: + pass + try: + cross_results["distribution_outlier_fitness"] = ca.distribution_outlier_fitness() + except Exception: + pass + try: + cross_results["simpson_paradox"] = ca.simpson_paradox() + except Exception: + pass + try: + cross_results["importance_vs_missing"] = ca.importance_vs_missing() + except Exception: + pass + try: + cross_results["unified_2d_embedding"] = ca.unified_2d_embedding() + except Exception: + pass + + adv["cross_analysis"] = cross_results + except Exception as exc: + logger.debug("Cross analysis failed: %s", exc) + + # A11. Column Role Classification (v2 enhancement) + if config.column_role: + try: + from f2a.stats.column_role import ColumnRoleClassifier + crc = ColumnRoleClassifier(df, schema) + roles = crc.classify() + adv["column_roles"] = { + "roles": [ + { + "column": r.column, + "primary_role": r.primary_role, + "confidence": r.confidence, + "secondary_role": r.secondary_role, + "properties": r.properties, + } + for r in roles + ], + "summary_df": crc.summary(), + } + except Exception as exc: + logger.debug("Column role classification failed: %s", exc) + + # A12. ML Readiness (v2 enhancement) + if config.ml_readiness: + try: + from f2a.stats.ml_readiness import MLReadinessEvaluator + roles_df = adv.get("column_roles", {}).get("summary_df") + mle = MLReadinessEvaluator(df, schema, result, column_roles=roles_df) + readiness = mle.evaluate() + adv["ml_readiness"] = { + "overall": readiness.overall, + "grade": readiness.grade, + "dimensions": readiness.dimensions, + "blocking_issues": readiness.blocking_issues, + "suggestions": readiness.suggestions, + "details": readiness.details, + } + except Exception as exc: + logger.debug("ML readiness evaluation failed: %s", exc) + + +# ===================================================================== +# Public entry point +# ===================================================================== + +def analyze( + source: str, + config: AnalysisConfig | None = None, + **kwargs: Any, +) -> AnalysisReport: + """Analyze a data source and return a comprehensive report. + + This function is the main entry point for ``f2a``. + + Args: + source: File path or HuggingFace dataset address. + config: :class:`AnalysisConfig` to control which analyses run. + Defaults to all analyses enabled. + **kwargs: Additional arguments passed to the data loader. + + Returns: + :class:`AnalysisReport` with statistics, visualization, and report + generation capabilities. + + Example:: + + import f2a + report = f2a.analyze("sales.csv") + report.show() + report.to_html("output/") + """ + analyzer = Analyzer() + return analyzer.run(source, config=config, **kwargs) diff --git a/f2a/core/config.py b/f2a/core/config.py new file mode 100644 index 0000000..e9aeb91 --- /dev/null +++ b/f2a/core/config.py @@ -0,0 +1,155 @@ +"""Analysis configuration module. + +Provides :class:`AnalysisConfig` to control which analysis steps are executed. +All steps are enabled by default. +""" + +from __future__ import annotations + +from dataclasses import dataclass + + +@dataclass +class AnalysisConfig: + """Configuration for the f2a analysis pipeline. + + All analysis steps are enabled by default. Set individual flags to + ``False`` to skip specific analyses. + + Example:: + + import f2a + from f2a import AnalysisConfig + + # Run only descriptive stats and correlation + config = AnalysisConfig( + distribution=False, + outlier=False, + categorical=False, + feature_importance=False, + pca=False, + duplicates=False, + ) + report = f2a.analyze("data.csv", config=config) + """ + + # ── Analysis toggles ────────────────────────────────── + preprocessing: bool = True + descriptive: bool = True + distribution: bool = True + correlation: bool = True + outlier: bool = True + categorical: bool = True + feature_importance: bool = True + pca: bool = True + duplicates: bool = True + quality_score: bool = True + + # ── Visualization toggle ────────────────────────────── + visualizations: bool = True + + # ── Sub-options ─────────────────────────────────────── + outlier_method: str = "iqr" + """``"iqr"`` (default) or ``"zscore"``.""" + + outlier_threshold: float = 1.5 + """IQR multiplier (default 1.5) or z-score cutoff (use 3.0 with zscore).""" + + correlation_threshold: float = 0.9 + """Absolute correlation coefficient threshold for high-correlation warnings.""" + + pca_max_components: int = 10 + """Maximum number of PCA components to compute.""" + + max_categories: int = 50 + """Maximum categories to display in categorical charts.""" + + max_plot_columns: int = 20 + """Maximum columns per plot grid (prevents overly large figures).""" + + # ── Advanced analysis ───────────────────────────────── + advanced: bool = True + """Enable the Advanced analysis tab (clustering, anomaly, etc.).""" + + advanced_distribution: bool = True + """Best-fit distribution, power transform, Jarque-Bera, ECDF.""" + + advanced_correlation: bool = True + """Partial correlation, MI matrix, bootstrap CI, network graph.""" + + clustering: bool = True + """K-Means, DBSCAN, hierarchical clustering.""" + + advanced_dimreduction: bool = True + """t-SNE, UMAP (optional), Factor Analysis.""" + + feature_insights: bool = True + """Interaction, monotonic, binning, cardinality, leakage detection.""" + + advanced_anomaly: bool = True + """Isolation Forest, LOF, Mahalanobis, consensus.""" + + statistical_tests: bool = True + """Levene, Kruskal-Wallis, Mann-Whitney, goodness-of-fit, Grubbs.""" + + data_profiling: bool = True + """Automated insights, type recommendation, health dashboard.""" + + # ── Enhancement modules (v2) ────────────────────────── + insight_engine: bool = True + """Auto-generate prioritised natural-language insights.""" + + cross_analysis: bool = True + """Cross-dimensional analysis (outlier × cluster, Simpson, etc.).""" + + column_role: bool = True + """Auto-detect column semantic roles (ID, target, feature, …).""" + + ml_readiness: bool = True + """Multi-dimensional ML-readiness scoring.""" + + # ── Advanced sub-options ────────────────────────────── + max_cluster_k: int = 10 + """Maximum k for K-Means elbow search.""" + + tsne_perplexity: float = 30.0 + """t-SNE perplexity parameter.""" + + bootstrap_iterations: int = 1000 + """Number of bootstrap resamples for correlation CI.""" + + max_sample_for_advanced: int = 5000 + """Max rows sampled for expensive advanced analyses (t-SNE, UMAP, etc.).""" + + n_distribution_fits: int = 7 + """Number of candidate distributions to fit.""" + + @staticmethod + def minimal() -> "AnalysisConfig": + """Return a config with only core analyses (descriptive + missing).""" + return AnalysisConfig( + preprocessing=False, + distribution=False, + correlation=False, + outlier=False, + categorical=False, + feature_importance=False, + pca=False, + duplicates=False, + quality_score=False, + advanced=False, + ) + + @staticmethod + def fast() -> "AnalysisConfig": + """Return a config that skips expensive analyses (PCA, feature importance, advanced).""" + return AnalysisConfig( + pca=False, + feature_importance=False, + advanced=False, + ) + + @staticmethod + def basic_only() -> "AnalysisConfig": + """Return a config with all Basic analyses on, all Advanced off.""" + return AnalysisConfig(advanced=False) diff --git a/f2a/core/loader.py b/f2a/core/loader.py new file mode 100644 index 0000000..4f51729 --- /dev/null +++ b/f2a/core/loader.py @@ -0,0 +1,728 @@ +"""Data loading module — loads DataFrames from various sources. + +Supported formats: + - **Delimited text**: CSV, TSV, TXT (auto-detect), DAT, TAB, FWF (fixed-width) + - **JSON family**: JSON, JSONL, NDJSON + - **Spreadsheets**: XLSX, XLS, XLSM, XLSB, ODS + - **Binary/columnar**: Parquet, Feather, Arrow IPC, ORC, HDF5, Pickle + - **Statistical packages**: SAS (.sas7bdat, .xpt), Stata (.dta), SPSS (.sav, .zsav, .por) + - **Databases**: SQLite, DuckDB + - **Markup**: XML, HTML (tables) + - **Remote**: HTTP/HTTPS URL (auto-routing by extension) + - **Platforms**: HuggingFace Datasets (hf://...) +""" + +from __future__ import annotations + +import csv +import io +from pathlib import Path +from typing import Any + +import pandas as pd + +from f2a.utils.exceptions import DataLoadError, EmptyDataError, UnsupportedFormatError +from f2a.utils.logging import get_logger +from f2a.utils.validators import HF_PREFIXES, HF_URL_PATTERN, URL_PREFIXES, detect_source_type + +logger = get_logger(__name__) + + +class DataLoader: + """Load ``pd.DataFrame`` from various data sources. + + Automatically detects the format from the input string (file path, URL, + HuggingFace address, etc.) and selects the appropriate loader. + + Example: + >>> loader = DataLoader() + >>> df = loader.load("data.csv") + >>> df = loader.load("hf://imdb", split="train") + >>> df = loader.load("https://example.com/data.parquet") + >>> df = loader.load("results.db", table="experiments") + """ + + # ── Source type → loader method mapping ──────────────────── + # Register new formats here; they will be auto-routed. + _LOADER_REGISTRY: dict[str, str] = { + # Delimited text + "csv": "_load_csv", + "tsv": "_load_tsv", + "delimited": "_load_delimited", + "fwf": "_load_fwf", + # JSON family + "json": "_load_json", + "jsonl": "_load_jsonl", + # Spreadsheets + "excel": "_load_excel", + "ods": "_load_ods", + # Binary / columnar + "parquet": "_load_parquet", + "feather": "_load_feather", + "arrow_ipc": "_load_arrow_ipc", + "orc": "_load_orc", + "hdf5": "_load_hdf5", + "pickle": "_load_pickle", + # Statistical packages + "sas": "_load_sas", + "sas_xport": "_load_sas_xport", + "stata": "_load_stata", + "spss": "_load_spss", + # Databases + "sqlite": "_load_sqlite", + "duckdb": "_load_duckdb", + # Markup + "xml": "_load_xml", + "html": "_load_html", + # Remote/URL + "url_auto": "_load_url_auto", + # HuggingFace + "hf": "_load_huggingface", + } + + def load(self, source: str, **kwargs: Any) -> pd.DataFrame: + """Analyze the source string and call the appropriate loader. + + Args: + source: File path, URL, or HuggingFace dataset address. + **kwargs: Additional arguments passed to the loader. + + Returns: + Loaded DataFrame. + + Raises: + UnsupportedFormatError: Unsupported format. + DataLoadError: Error during loading. + EmptyDataError: Loaded result is an empty DataFrame. + """ + source_type = detect_source_type(source) + logger.info("Source type detected: %s → %s", source, source_type) + + method_name = self._LOADER_REGISTRY.get(source_type) + if method_name is None: + raise UnsupportedFormatError(source, detected=source_type) + + loader_fn = getattr(self, method_name, None) + if loader_fn is None: + raise UnsupportedFormatError(source, detected=source_type) + + try: + df = loader_fn(source, **kwargs) + except (UnsupportedFormatError, DataLoadError, EmptyDataError): + raise + except Exception as exc: + raise DataLoadError(source, reason=str(exc)) from exc + + if df is None or df.empty: + raise EmptyDataError(source) + + logger.info("Loading complete: %d rows × %d cols (%s)", len(df), len(df.columns), source_type) + return df + + @classmethod + def supported_formats(cls) -> list[str]: + """Return list of supported source types.""" + return sorted(cls._LOADER_REGISTRY.keys()) + + # ================================================================ + # Delimited text (CSV / TSV / auto-detect) + # ================================================================ + + @staticmethod + def _load_csv(source: str, **kwargs: Any) -> pd.DataFrame: + """Load a CSV file.""" + kwargs.setdefault("encoding", "utf-8") + try: + return pd.read_csv(source, **kwargs) + except UnicodeDecodeError: + kwargs["encoding"] = "cp949" # fallback for Korean CSV + return pd.read_csv(source, **kwargs) + + @staticmethod + def _load_tsv(source: str, **kwargs: Any) -> pd.DataFrame: + """Load a TSV file.""" + kwargs.setdefault("sep", "\t") + return pd.read_csv(source, **kwargs) + + @staticmethod + def _load_delimited(source: str, **kwargs: Any) -> pd.DataFrame: + """Load a text file with auto-detected delimiter. + + Uses ``csv.Sniffer`` to infer the delimiter; falls back to common delimiters. + """ + if "sep" in kwargs or "delimiter" in kwargs: + return pd.read_csv(source, **kwargs) + + # Step 1: auto-detect delimiter with csv.Sniffer + try: + with open(source, "r", encoding="utf-8", errors="replace") as f: + sample = f.read(8192) + dialect = csv.Sniffer().sniff(sample, delimiters=",\t|;: ") + kwargs["sep"] = dialect.delimiter + logger.info("Delimiter auto-detected: %r", dialect.delimiter) + return pd.read_csv(source, **kwargs) + except csv.Error: + pass + + # Step 2: try common delimiters sequentially + for sep in [",", "\t", ";", "|", " "]: + try: + df = pd.read_csv(source, sep=sep, nrows=5, **kwargs) + if len(df.columns) > 1: + logger.info("Delimiter confirmed: %r", sep) + return pd.read_csv(source, sep=sep, **kwargs) + except Exception: + continue + + # Last resort: load as single column + return pd.read_csv(source, **kwargs) + + @staticmethod + def _load_fwf(source: str, **kwargs: Any) -> pd.DataFrame: + """Load a fixed-width format (FWF) file.""" + return pd.read_fwf(source, **kwargs) + + # ================================================================ + # JSON family + # ================================================================ + + @staticmethod + def _load_json(source: str, **kwargs: Any) -> pd.DataFrame: + """Load a JSON file (array or records).""" + try: + return pd.read_json(source, **kwargs) + except ValueError: + # Attempt normalize for nested JSON + import json + + with open(source, "r", encoding="utf-8") as f: + data = json.load(f) + if isinstance(data, list): + return pd.json_normalize(data) + elif isinstance(data, dict): + # Look for a key containing an array of records + for key, val in data.items(): + if isinstance(val, list) and len(val) > 0 and isinstance(val[0], dict): + logger.info("Nested JSON key detected: %s", key) + return pd.json_normalize(val) + return pd.json_normalize(data) + raise + + @staticmethod + def _load_jsonl(source: str, **kwargs: Any) -> pd.DataFrame: + """Load a JSONL / NDJSON file.""" + kwargs.setdefault("lines", True) + return pd.read_json(source, **kwargs) + + # ================================================================ + # Spreadsheets + # ================================================================ + + @staticmethod + def _load_excel(source: str, **kwargs: Any) -> pd.DataFrame: + """Load an Excel file (.xlsx, .xls, .xlsm, .xlsb). + + If multiple sheets exist and ``sheet_name`` is not specified, + the first sheet is loaded with a warning. + """ + try: + import openpyxl # noqa: F401 + except ImportError as exc: + raise DataLoadError( + source, + reason="Install 'openpyxl' for Excel support: pip install f2a[excel]", + ) from exc + + # xlsb files require a dedicated engine + if Path(source).suffix.lower() == ".xlsb": + try: + import pyxlsb # noqa: F401 + kwargs.setdefault("engine", "pyxlsb") + except ImportError as exc: + raise DataLoadError( + source, + reason="Install 'pyxlsb' for xlsb support: pip install pyxlsb", + ) from exc + + result = pd.read_excel(source, **kwargs) + + # read_excel returns a dict when multiple sheets exist (sheet_name=None) + if isinstance(result, dict): + sheet_names = list(result.keys()) + logger.warning( + "%d sheets found: %s — using first sheet '%s'.", + len(sheet_names), + sheet_names, + sheet_names[0], + ) + return result[sheet_names[0]] + return result + + @staticmethod + def _load_ods(source: str, **kwargs: Any) -> pd.DataFrame: + """Load an ODS (OpenDocument Spreadsheet) file.""" + try: + import odf # noqa: F401 + except ImportError as exc: + raise DataLoadError( + source, + reason="Install 'odfpy' for ODS support: pip install odfpy", + ) from exc + kwargs.setdefault("engine", "odf") + return pd.read_excel(source, **kwargs) + + # ================================================================ + # Binary / columnar formats + # ================================================================ + + @staticmethod + def _load_parquet(source: str, **kwargs: Any) -> pd.DataFrame: + """Load a Parquet file.""" + try: + return pd.read_parquet(source, **kwargs) + except ImportError as exc: + raise DataLoadError( + source, + reason="Install 'pyarrow' for Parquet support: pip install f2a[parquet]", + ) from exc + + @staticmethod + def _load_feather(source: str, **kwargs: Any) -> pd.DataFrame: + """Load a Feather (Arrow IPC v2) file.""" + try: + return pd.read_feather(source, **kwargs) + except ImportError as exc: + raise DataLoadError( + source, + reason="Install 'pyarrow' for Feather support: pip install f2a[parquet]", + ) from exc + + @staticmethod + def _load_arrow_ipc(source: str, **kwargs: Any) -> pd.DataFrame: + """Load an Apache Arrow IPC file.""" + try: + import pyarrow as pa + import pyarrow.ipc as ipc + except ImportError as exc: + raise DataLoadError( + source, + reason="Install 'pyarrow' for Arrow IPC support: pip install f2a[parquet]", + ) from exc + + with open(source, "rb") as f: + reader = ipc.open_file(f) + table = reader.read_all() + return table.to_pandas(**kwargs) + + @staticmethod + def _load_orc(source: str, **kwargs: Any) -> pd.DataFrame: + """Load an ORC file.""" + try: + return pd.read_orc(source, **kwargs) + except ImportError as exc: + raise DataLoadError( + source, + reason="Install 'pyarrow' for ORC support: pip install f2a[parquet]", + ) from exc + + @staticmethod + def _load_hdf5(source: str, **kwargs: Any) -> pd.DataFrame: + """Load an HDF5 file.""" + try: + import tables # noqa: F401 + except ImportError as exc: + raise DataLoadError( + source, + reason="Install 'tables' for HDF5 support: pip install tables", + ) from exc + + key = kwargs.pop("key", None) + if key: + return pd.read_hdf(source, key=key, **kwargs) + + # If no key specified, use the first key + with pd.HDFStore(source, mode="r") as store: + keys = store.keys() + if not keys: + raise DataLoadError(source, reason="No datasets found in HDF5 file.") + if len(keys) > 1: + logger.warning( + "HDF5 contains %d keys: %s — using first key '%s'.", + len(keys), + keys, + keys[0], + ) + return pd.read_hdf(source, key=keys[0], **kwargs) + + @staticmethod + def _load_pickle(source: str, **kwargs: Any) -> pd.DataFrame: + """Load a Pickle file. + + Warning: + Only use pickle with trusted sources. + """ + logger.warning("Loading pickle: verify this is a trusted source — %s", source) + return pd.read_pickle(source, **kwargs) + + # ================================================================ + # Statistical package formats + # ================================================================ + + @staticmethod + def _load_sas(source: str, **kwargs: Any) -> pd.DataFrame: + """Load a SAS data file (.sas7bdat).""" + kwargs.setdefault("format", "sas7bdat") + return pd.read_sas(source, **kwargs) + + @staticmethod + def _load_sas_xport(source: str, **kwargs: Any) -> pd.DataFrame: + """Load a SAS Transport file (.xpt).""" + kwargs.setdefault("format", "xport") + return pd.read_sas(source, **kwargs) + + @staticmethod + def _load_stata(source: str, **kwargs: Any) -> pd.DataFrame: + """Load a Stata file (.dta).""" + return pd.read_stata(source, **kwargs) + + @staticmethod + def _load_spss(source: str, **kwargs: Any) -> pd.DataFrame: + """Load an SPSS file (.sav, .zsav, .por).""" + try: + import pyreadstat # noqa: F401 + except ImportError as exc: + raise DataLoadError( + source, + reason="Install 'pyreadstat' for SPSS support: pip install pyreadstat", + ) from exc + return pd.read_spss(source, **kwargs) + + # ================================================================ + # Databases + # ================================================================ + + @staticmethod + def _load_sqlite(source: str, **kwargs: Any) -> pd.DataFrame: + """Load a table from a SQLite database. + + Args: + source: .db / .sqlite file path. + **kwargs: + table (str): Table name to load. Defaults to the first table. + query (str): Direct SQL query. Takes precedence over ``table``. + """ + import sqlite3 + + table = kwargs.pop("table", None) + query = kwargs.pop("query", None) + conn = sqlite3.connect(source) + + try: + if query: + return pd.read_sql_query(query, conn, **kwargs) + + # Query table list + tables = pd.read_sql_query( + "SELECT name FROM sqlite_master WHERE type='table'", conn + )["name"].tolist() + + if not tables: + raise DataLoadError(source, reason="No tables found in SQLite database.") + + if table is None: + table = tables[0] + if len(tables) > 1: + logger.warning( + "SQLite contains %d tables: %s — using '%s'.", + len(tables), + tables, + table, + ) + + if table not in tables: + raise DataLoadError( + source, + reason=f"Table '{table}' not found. Available: {tables}", + ) + + return pd.read_sql_query(f'SELECT * FROM "{table}"', conn, **kwargs) + finally: + conn.close() + + @staticmethod + def _load_duckdb(source: str, **kwargs: Any) -> pd.DataFrame: + """Load a table from a DuckDB database.""" + try: + import duckdb + except ImportError as exc: + raise DataLoadError( + source, + reason="Install 'duckdb' for DuckDB support: pip install duckdb", + ) from exc + + table = kwargs.pop("table", None) + query = kwargs.pop("query", None) + conn = duckdb.connect(source, read_only=True) + + try: + if query: + return conn.execute(query).fetchdf() + + tables = conn.execute("SHOW TABLES").fetchdf() + table_names = tables.iloc[:, 0].tolist() if not tables.empty else [] + + if not table_names: + raise DataLoadError(source, reason="No tables found in DuckDB database.") + + if table is None: + table = table_names[0] + if len(table_names) > 1: + logger.warning( + "DuckDB contains %d tables: %s — using '%s'.", + len(table_names), + table_names, + table, + ) + + return conn.execute(f'SELECT * FROM "{table}"').fetchdf() + finally: + conn.close() + + # ================================================================ + # Markup (XML / HTML) + # ================================================================ + + @staticmethod + def _load_xml(source: str, **kwargs: Any) -> pd.DataFrame: + """Load an XML file.""" + try: + import lxml # noqa: F401 + except ImportError: + logger.info("lxml not installed — using built-in etree parser.") + kwargs.setdefault("parser", "etree") + return pd.read_xml(source, **kwargs) + + @staticmethod + def _load_html(source: str, **kwargs: Any) -> pd.DataFrame: + """Extract tables from an HTML file. + + Returns the largest table if multiple tables are found. + """ + try: + import lxml # noqa: F401 + except ImportError: + logger.info("lxml not installed — using bs4 (html.parser).") + kwargs.setdefault("flavor", "bs4") + + table_index = kwargs.pop("table_index", None) + tables = pd.read_html(source, **kwargs) + + if not tables: + raise DataLoadError(source, reason="No tables found in HTML file.") + + if table_index is not None: + if table_index >= len(tables): + raise DataLoadError( + source, + reason=f"table_index={table_index} out of range (total {len(tables)} tables)", + ) + return tables[table_index] + + # Select the largest table + if len(tables) > 1: + sizes = [(i, len(t) * len(t.columns)) for i, t in enumerate(tables)] + best_idx = max(sizes, key=lambda x: x[1])[0] + logger.warning( + "Found %d tables in HTML — using largest table #%d.", + len(tables), + best_idx, + ) + return tables[best_idx] + + return tables[0] + + # ================================================================ + # URL (remote files) + # ================================================================ + + def _load_url_auto(self, source: str, **kwargs: Any) -> pd.DataFrame: + """Download and load a file from a URL. + + Infers format by analyzing Content-Type header and URL path. + """ + import tempfile + from urllib.parse import urlparse + from urllib.request import urlopen, Request + + logger.info("Starting URL download: %s", source) + + req = Request(source, headers={"User-Agent": "f2a/0.1"}) + with urlopen(req, timeout=60) as resp: + content_type = resp.headers.get("Content-Type", "").lower() + data = resp.read() + + # Infer format from Content-Type + ct_map = { + "text/csv": "csv", + "text/tab-separated-values": "tsv", + "application/json": "json", + "application/x-ndjson": "jsonl", + "application/vnd.apache.parquet": "parquet", + "application/vnd.openxmlformats": "excel", + "application/vnd.ms-excel": "excel", + "text/xml": "xml", + "application/xml": "xml", + "text/html": "html", + } + + detected_type: str | None = None + for ct_key, fmt in ct_map.items(): + if ct_key in content_type: + detected_type = fmt + break + + if detected_type is None: + # Check URL path extension + from f2a.utils.validators import SUPPORTED_EXTENSIONS + + path_ext = Path(urlparse(source).path).suffix.lower() + detected_type = SUPPORTED_EXTENSIONS.get(path_ext, "csv") + + # Save to temp file and re-load with the appropriate loader + suffix_map = { + "csv": ".csv", + "tsv": ".tsv", + "json": ".json", + "jsonl": ".jsonl", + "parquet": ".parquet", + "excel": ".xlsx", + "xml": ".xml", + "html": ".html", + } + suffix = suffix_map.get(detected_type, ".tmp") + + with tempfile.NamedTemporaryFile(suffix=suffix, delete=False) as tmp: + tmp.write(data) + tmp_path = tmp.name + + logger.info("URL data → temp file (%s): %s", detected_type, tmp_path) + + method_name = self._LOADER_REGISTRY.get(detected_type) + if method_name and hasattr(self, method_name): + try: + return getattr(self, method_name)(tmp_path, **kwargs) + finally: + Path(tmp_path).unlink(missing_ok=True) + + # Default: try as CSV + try: + return self._load_csv(tmp_path, **kwargs) + finally: + Path(tmp_path).unlink(missing_ok=True) + + # ================================================================ + # HuggingFace Datasets + # ================================================================ + + @staticmethod + def _load_huggingface(source: str, **kwargs: Any) -> pd.DataFrame: + """Load a HuggingFace dataset. + + When neither ``config`` nor ``split`` is specified, all available + configs × splits are discovered and concatenated into a single + DataFrame with extra ``__subset__`` and ``__split__`` columns so the + caller can distinguish each partition. + + To load only one specific partition, pass ``config`` and/or + ``split`` explicitly. + """ + try: + from datasets import get_dataset_config_names, get_dataset_split_names, load_dataset + except ImportError as exc: + raise DataLoadError( + source, + reason="Install 'datasets' for HuggingFace support: pip install f2a[hf]", + ) from exc + + # Extract dataset name from various formats + dataset_name = source + + # HuggingFace URL: https://huggingface.co/datasets/org/name[/viewer/config[/split]] + hf_match = HF_URL_PATTERN.match(dataset_name) + if hf_match: + dataset_name = hf_match.group("dataset") + # Extract config/split from /viewer/... path if present + url_config = hf_match.group("config") + url_split = hf_match.group("split") + if url_config and "config" not in kwargs: + kwargs["config"] = url_config + if url_split and "split" not in kwargs: + kwargs["split"] = url_split + else: + # hf:// or huggingface:// prefix + for prefix in HF_PREFIXES: + if dataset_name.startswith(prefix): + dataset_name = dataset_name[len(prefix) :] + break + + # Strip trailing slashes + dataset_name = dataset_name.rstrip("/") + + config = kwargs.pop("config", None) + split = kwargs.pop("split", None) + + # --- explicit single-partition mode --- + if config is not None or split is not None: + split = split or "train" + try: + if config: + ds = load_dataset(dataset_name, config, split=split, **kwargs) + else: + ds = load_dataset(dataset_name, split=split, **kwargs) + return ds.to_pandas() + except Exception as exc: + raise DataLoadError(source, reason=str(exc)) from exc + + # --- auto-discover all configs × splits --- + try: + configs = get_dataset_config_names(dataset_name) + except Exception: + configs = [None] + + if not configs: + configs = [None] + + frames: list[pd.DataFrame] = [] + for cfg in configs: + try: + if cfg is not None: + splits = get_dataset_split_names(dataset_name, cfg) + else: + splits = get_dataset_split_names(dataset_name) + except Exception: + splits = ["train"] + + for sp in splits: + try: + if cfg is not None: + ds = load_dataset(dataset_name, cfg, split=sp, **kwargs) + else: + ds = load_dataset(dataset_name, split=sp, **kwargs) + df_part = ds.to_pandas() + df_part["__subset__"] = cfg or "default" + df_part["__split__"] = sp + frames.append(df_part) + logger.info( + "HF partition loaded: config=%s split=%s (%d rows)", + cfg or "default", sp, len(df_part), + ) + except Exception as exc: + logger.warning( + "Failed to load config=%s split=%s: %s", + cfg, sp, exc, + ) + + if not frames: + raise DataLoadError(source, reason="No loadable configs/splits found.") + + return pd.concat(frames, ignore_index=True) diff --git a/f2a/core/preprocessor.py b/f2a/core/preprocessor.py new file mode 100644 index 0000000..c156b19 --- /dev/null +++ b/f2a/core/preprocessor.py @@ -0,0 +1,215 @@ +"""Data preprocessing and data-quality detection module. + +The :class:`Preprocessor` inspects raw data, detects quality issues, and +produces a lightly-cleaned copy suitable for downstream analysis. + +Cleaning is *non-destructive* — the original DataFrame is never mutated. +""" + +from __future__ import annotations + +from dataclasses import dataclass, field +from typing import Any + +import numpy as np +import pandas as pd + +from f2a.core.schema import DataSchema +from f2a.utils.logging import get_logger + +logger = get_logger(__name__) + + +@dataclass +class PreprocessingResult: + """Results of the preprocessing step.""" + + original_shape: tuple[int, int] = (0, 0) + cleaned_shape: tuple[int, int] = (0, 0) + + # Detected issues + constant_columns: list[str] = field(default_factory=list) + duplicate_rows_count: int = 0 + high_missing_columns: list[dict[str, Any]] = field(default_factory=list) + id_like_columns: list[str] = field(default_factory=list) + mixed_type_columns: list[str] = field(default_factory=list) + highly_correlated_pairs: list[tuple[str, str, float]] = field(default_factory=list) + infinite_value_columns: list[str] = field(default_factory=list) + + # Applied transformations + cleaning_log: list[str] = field(default_factory=list) + + # Quality indicators + completeness: float = 1.0 + + # The cleaned DataFrame (not shown in repr) + cleaned_df: pd.DataFrame = field(default_factory=pd.DataFrame, repr=False) + + def summary_dict(self) -> dict[str, Any]: + """Return a concise summary as a dictionary.""" + return { + "original_rows": self.original_shape[0], + "original_cols": self.original_shape[1], + "cleaned_rows": self.cleaned_shape[0], + "cleaned_cols": self.cleaned_shape[1], + "constant_columns": len(self.constant_columns), + "duplicate_rows": self.duplicate_rows_count, + "high_missing_columns": len(self.high_missing_columns), + "id_like_columns": len(self.id_like_columns), + "mixed_type_columns": len(self.mixed_type_columns), + "infinite_value_columns": len(self.infinite_value_columns), + "completeness": round(self.completeness, 4), + "cleaning_steps": len(self.cleaning_log), + } + + def issues_table(self) -> pd.DataFrame: + """Return a DataFrame summarising all detected issues.""" + rows: list[dict[str, Any]] = [] + + for col in self.constant_columns: + rows.append({"issue": "Constant column", "column": col, "detail": "Single unique value"}) + for item in self.high_missing_columns: + rows.append({ + "issue": "High missing ratio", + "column": item["column"], + "detail": f"{item['missing_ratio'] * 100:.1f}%", + }) + for col in self.id_like_columns: + rows.append({"issue": "ID-like column", "column": col, "detail": "All values unique"}) + for col in self.mixed_type_columns: + rows.append({"issue": "Mixed types", "column": col, "detail": "Multiple Python types"}) + for col in self.infinite_value_columns: + rows.append({"issue": "Infinite values", "column": col, "detail": "Contains inf/-inf"}) + if self.duplicate_rows_count > 0: + rows.append({ + "issue": "Duplicate rows", + "column": "(all)", + "detail": f"{self.duplicate_rows_count} rows", + }) + + return pd.DataFrame(rows) if rows else pd.DataFrame(columns=["issue", "column", "detail"]) + + +class Preprocessor: + """Analyse and lightly clean a DataFrame for optimal analysis. + + Args: + df: Raw DataFrame. + schema: Inferred :class:`DataSchema`. + """ + + def __init__(self, df: pd.DataFrame, schema: DataSchema) -> None: + self._df = df + self._schema = schema + + def run(self) -> PreprocessingResult: + """Execute the full preprocessing pipeline. + + Returns: + :class:`PreprocessingResult` with cleaned data and issue reports. + """ + result = PreprocessingResult( + original_shape=(len(self._df), len(self._df.columns)), + ) + + df = self._df.copy() + + # 1. Detect & remove constant columns + result.constant_columns = self._detect_constant_columns(df) + if result.constant_columns: + result.cleaning_log.append( + f"Removed {len(result.constant_columns)} constant column(s): " + f"{', '.join(result.constant_columns[:5])}" + + ("..." if len(result.constant_columns) > 5 else "") + ) + df = df.drop(columns=result.constant_columns) + + # 2. Detect & remove exact duplicate rows + result.duplicate_rows_count = int(df.duplicated().sum()) + if result.duplicate_rows_count > 0: + pct = result.duplicate_rows_count / len(df) * 100 + result.cleaning_log.append( + f"Removed {result.duplicate_rows_count} duplicate row(s) ({pct:.1f}%)" + ) + df = df.drop_duplicates().reset_index(drop=True) + + # 3. Detect high-missing columns (>= 50 %) + result.high_missing_columns = self._detect_high_missing(df, threshold=0.5) + + # 4. Detect ID-like columns (all unique values) + result.id_like_columns = self._detect_id_columns(df) + + # 5. Detect mixed-type columns + result.mixed_type_columns = self._detect_mixed_types(df) + + # 6. Detect infinite values in numeric columns + result.infinite_value_columns = self._detect_infinite_values(df) + if result.infinite_value_columns: + for col in result.infinite_value_columns: + count = int(np.isinf(df[col]).sum()) + result.cleaning_log.append( + f"Replaced {count} infinite value(s) in '{col}' with NaN" + ) + df[col] = df[col].replace([np.inf, -np.inf], np.nan) + + # 7. Compute completeness + total_cells = df.shape[0] * df.shape[1] + if total_cells > 0: + result.completeness = 1.0 - float(df.isna().sum().sum() / total_cells) + else: + result.completeness = 1.0 + + result.cleaned_df = df + result.cleaned_shape = (len(df), len(df.columns)) + + logger.info( + "Preprocessing complete: %s -> %s (%d steps)", + result.original_shape, + result.cleaned_shape, + len(result.cleaning_log), + ) + return result + + # ── Internal detectors ──────────────────────────────── + + @staticmethod + def _detect_constant_columns(df: pd.DataFrame) -> list[str]: + """Find columns that have at most one unique non-null value.""" + return [col for col in df.columns if df[col].nunique(dropna=True) <= 1] + + @staticmethod + def _detect_high_missing(df: pd.DataFrame, threshold: float = 0.5) -> list[dict[str, Any]]: + """Find columns where the missing ratio exceeds *threshold*.""" + result: list[dict[str, Any]] = [] + for col in df.columns: + ratio = float(df[col].isna().mean()) + if ratio >= threshold: + result.append({"column": col, "missing_ratio": round(ratio, 4)}) + return sorted(result, key=lambda x: x["missing_ratio"], reverse=True) + + @staticmethod + def _detect_id_columns(df: pd.DataFrame) -> list[str]: + """Detect columns where every value is unique (likely an ID).""" + if len(df) < 10: + return [] + return [col for col in df.columns if df[col].nunique() == len(df)] + + @staticmethod + def _detect_mixed_types(df: pd.DataFrame) -> list[str]: + """Detect object columns containing more than one Python type.""" + mixed: list[str] = [] + for col in df.columns: + if df[col].dtype == object: + non_null = df[col].dropna() + if len(non_null) > 0 and non_null.apply(type).nunique() > 1: + mixed.append(col) + return mixed + + @staticmethod + def _detect_infinite_values(df: pd.DataFrame) -> list[str]: + """Detect numeric columns that contain ``inf`` / ``-inf``.""" + inf_cols: list[str] = [] + for col in df.select_dtypes(include="number").columns: + if np.isinf(df[col]).any(): + inf_cols.append(col) + return inf_cols diff --git a/f2a/core/schema.py b/f2a/core/schema.py new file mode 100644 index 0000000..c260fac --- /dev/null +++ b/f2a/core/schema.py @@ -0,0 +1,103 @@ +"""Data schema inference and management.""" + +from __future__ import annotations + +from dataclasses import dataclass, field + +import pandas as pd + +from f2a.utils.type_inference import ColumnType, infer_all_types + + +@dataclass +class ColumnInfo: + """Metadata for an individual column.""" + + name: str + dtype: str + inferred_type: ColumnType + n_unique: int + n_missing: int + missing_ratio: float + + +@dataclass +class DataSchema: + """Schema information for an entire DataFrame.""" + + n_rows: int + n_cols: int + columns: list[ColumnInfo] = field(default_factory=list) + memory_usage_mb: float = 0.0 + + @property + def numeric_columns(self) -> list[str]: + """List of numeric column names.""" + return [c.name for c in self.columns if c.inferred_type == ColumnType.NUMERIC] + + @property + def categorical_columns(self) -> list[str]: + """List of categorical column names.""" + return [c.name for c in self.columns if c.inferred_type == ColumnType.CATEGORICAL] + + @property + def text_columns(self) -> list[str]: + """List of text column names.""" + return [c.name for c in self.columns if c.inferred_type == ColumnType.TEXT] + + @property + def datetime_columns(self) -> list[str]: + """List of datetime column names.""" + return [c.name for c in self.columns if c.inferred_type == ColumnType.DATETIME] + + def summary_dict(self) -> dict[str, str | int | float]: + """Return schema summary as a dictionary.""" + return { + "rows": self.n_rows, + "columns": self.n_cols, + "numeric": len(self.numeric_columns), + "categorical": len(self.categorical_columns), + "text": len(self.text_columns), + "datetime": len(self.datetime_columns), + "memory_mb": round(self.memory_usage_mb, 2), + } + + +def infer_schema(df: pd.DataFrame) -> DataSchema: + """Infer schema from a DataFrame. + + Args: + df: Target DataFrame to analyze. + + Returns: + Inferred :class:`DataSchema`. + """ + type_map = infer_all_types(df) + columns: list[ColumnInfo] = [] + + for col in df.columns: + n_missing = int(df[col].isna().sum()) + try: + n_unique = int(df[col].nunique()) + except TypeError: + # Column contains unhashable types (e.g. numpy arrays, lists) + n_unique = len(df[col].dropna()) + columns.append( + ColumnInfo( + name=col, + dtype=str(df[col].dtype), + inferred_type=type_map[col], + n_unique=n_unique, + n_missing=n_missing, + missing_ratio=round(n_missing / len(df), 4) if len(df) > 0 else 0.0, + ) + ) + + memory_mb = df.memory_usage(deep=True).sum() / (1024 * 1024) + + return DataSchema( + n_rows=len(df), + n_cols=len(df.columns), + columns=columns, + memory_usage_mb=round(memory_mb, 2), + ) diff --git a/f2a/report/__init__.py b/f2a/report/__init__.py new file mode 100644 index 0000000..5e5bf1a --- /dev/null +++ b/f2a/report/__init__.py @@ -0,0 +1,5 @@ +"""Report module — analysis report generation.""" + +from f2a.report.generator import ReportGenerator + +__all__ = ["ReportGenerator"] diff --git a/f2a/report/generator.py b/f2a/report/generator.py new file mode 100644 index 0000000..62be19a --- /dev/null +++ b/f2a/report/generator.py @@ -0,0 +1,2116 @@ +"""HTML report generation module. + +Generates comprehensive single-page HTML reports with: +- Sticky navigation bar +- Data quality dashboard +- Preprocessing report +- Descriptive / distribution / correlation / missing / outlier / categorical / + feature-importance / PCA / duplicate analysis sections +- Inline base64 charts +- Drag-to-scroll tables +""" + +from __future__ import annotations + +import base64 +import html as html_mod +import io +import json +from pathlib import Path +from typing import Any + +import matplotlib +matplotlib.use("Agg") +import matplotlib.pyplot as plt +import pandas as pd + +from f2a.core.config import AnalysisConfig +from f2a.report.i18n import SUPPORTED_LANGUAGES, TRANSLATIONS, DEFAULT_LANG, t, get_method_info_json, get_metric_tips_json +from f2a.utils.logging import get_logger + +logger = get_logger(__name__) + + +# ===================================================================== +# Helpers +# ===================================================================== + +def _fig_to_base64(fig: plt.Figure) -> str: + """Convert a matplotlib Figure to a base64 PNG string.""" + buf = io.BytesIO() + fig.savefig(buf, format="png", bbox_inches="tight", dpi=100) + buf.seek(0) + encoded = base64.b64encode(buf.read()).decode("utf-8") + plt.close(fig) + return encoded + + +# -- Metric tooltip descriptions -------------------------------------- + +_METRIC_TIPS: dict[str, str] = { + # Descriptive + "type": "Inferred data type of the column (numeric, categorical, text, datetime, boolean).", + "count": "Number of non-null values in the column.", + "missing": "Number of missing (null / NaN) values.", + "missing_%": "Percentage of missing values = (missing / total rows) x 100.", + "unique": "Number of distinct values in the column.", + "mean": "Arithmetic mean = sum of values / count.", + "median": "Middle value when data is sorted (50th percentile).", + "std": "Standard deviation -- measures spread around the mean. Larger = more dispersed.", + "se": "Standard error of the mean = std / sqrtn. Indicates precision of the sample mean.", + "cv": "Coefficient of variation = std / |mean|. Unitless relative measure of variability.", + "mad": "Median Absolute Deviation = median(|xi - median|). Robust measure of spread.", + "min": "Minimum value in the column.", + "max": "Maximum value in the column.", + "range": "Range = max - min. Total spread of the data.", + "p5": "5th percentile -- 5% of data falls below this value.", + "q1": "1st quartile (25th percentile) -- 25% of data falls below this value.", + "q3": "3rd quartile (75th percentile) -- 75% of data falls below this value.", + "p95": "95th percentile -- 95% of data falls below this value.", + "iqr": "Interquartile Range = Q3 - Q1. Middle 50% spread, used for outlier detection.", + "skewness": "Skewness measures distribution asymmetry. 0 = symmetric, >0 = right-skewed, <0 = left-skewed.", + "kurtosis": "Excess kurtosis measures tail heaviness. 0 = normal, >0 = heavy tails, <0 = light tails.", + "top": "Most frequently occurring value in the column.", + "freq": "Frequency count of the most common value.", + # Distribution + "n": "Number of non-null observations used for the distribution test.", + "skew_type": "Interpretation of skewness: symmetric (|s|<0.5), moderate skew (0.5-1), high skew (>1).", + "kurt_type": "Interpretation of kurtosis: mesokurtic (~0), leptokurtic (>1, heavy tails), platykurtic (<-1, light tails).", + "normality_test": "Primary normality test used (Shapiro-Wilk for n<=5000, D'Agostino-Pearson for larger).", + "normality_p": "p-value of the primary normality test. p<0.05 -> likely non-normal.", + "is_normal_0.05": "True if p-value >= 0.05, meaning the null hypothesis of normality is not rejected at alpha=0.05.", + "shapiro_p": "p-value from Shapiro-Wilk test. Best for small-medium samples (n<=5000).", + "dagostino_p": "p-value from D'Agostino-Pearson test. Uses skewness + kurtosis, good for n>=20.", + "ks_p": "p-value from Kolmogorov-Smirnov test vs. normal distribution.", + "anderson_stat": "Anderson-Darling test statistic. Higher = stronger evidence against normality.", + "anderson_5pct_cv": "Anderson-Darling 5% critical value. If stat > cv -> reject normality at 5%.", + # Missing + "missing_count": "Number of missing (null) values in this column.", + "missing_ratio": "Fraction of missing values = missing_count / total_rows (0 to 1).", + "dtype": "Pandas dtype of the column.", + # Outlier + "lower_bound": "IQR lower fence = Q1 - k x IQR. Values below this are outliers (default k=1.5).", + "upper_bound": "IQR upper fence = Q3 + k x IQR. Values above this are outliers (default k=1.5).", + "outlier_count": "Number of values falling outside the outlier bounds.", + "outlier_%": "Percentage of outlier values = (outlier_count / total) x 100.", + "min_outlier": "Smallest outlier value detected.", + "max_outlier": "Largest outlier value detected.", + "threshold": "Z-score threshold used. Values with |z| > threshold are outliers.", + "max_zscore": "Maximum absolute z-score found in the column.", + # Categorical + "top_value": "The most frequently occurring category value.", + "top_frequency": "Count of the most frequent category.", + "top_%": "Percentage of the most frequent category = (top_freq / total) x 100.", + "entropy": "Shannon entropy (bits). Higher = more uniform distribution among categories.", + "norm_entropy": "Normalized entropy = entropy / log2(unique). 1.0 = perfectly uniform.", + "max_entropy": "Maximum possible entropy = log2(unique). Achieved when all categories are equally frequent.", + "normalized_entropy": "Same as norm_entropy: entropy / max_entropy. 1.0 = uniform.", + "unique_values": "Number of distinct category values.", + # Feature importance + "variance": "Variance of the column = mean of squared deviations from mean.", + "mean_abs_corr": "Mean absolute Pearson correlation with all other numeric columns.", + "avg_mutual_info": "Average mutual information with all other columns (uses sklearn).", + # Correlation + "VIF": "Variance Inflation Factor. VIF=1 -> no multicollinearity, >5 -> moderate, >10 -> severe.", + "multicollinearity": "Interpretation of VIF: low (<5), moderate (5-10), or high (>=10).", + # PCA + "variance_ratio": "Proportion of total variance explained by this principal component.", + "cumulative_ratio": "Cumulative proportion of variance explained up to this component.", + "eigenvalue": "Eigenvalue of the covariance matrix for this component. Higher = more variance.", + "n_components": "Total number of principal components computed.", + "total_variance_explained": "Total variance captured by all computed components.", + "components_for_90pct": "Minimum number of components needed to explain >= 90% of variance.", + "top_component_variance": "Variance ratio of the first (most important) principal component.", + # Duplicates + "total_rows": "Total number of rows in the dataset.", + "duplicate_rows": "Number of exact duplicate rows found.", + "unique_rows": "Number of unique (non-duplicate) rows.", + "duplicate_ratio": "Fraction of duplicate rows = duplicate_rows / total_rows.", + "uniqueness_ratio": "Ratio of unique values = unique / total_non_null. 1.0 = all unique.", + "total_non_null": "Number of non-null values used for uniqueness calculation.", + "is_unique_key": "True if every non-null value is unique -- potential primary key.", + # Quality + "completeness": "Fraction of non-missing values = 1 - (missing / total). 1.0 = no missing data.", + "uniqueness": "Ratio of unique values to total non-null values. Higher = more diverse.", + "consistency": "Measures type consistency. 1.0 = all values match the expected data type.", + "validity": "Fraction of values within expected ranges/formats. 1.0 = all valid.", + "overall": "Weighted quality score = 0.35xcompleteness + 0.25xuniqueness + 0.20xconsistency + 0.20xvalidity.", + "quality_score": "Per-column quality score combining completeness and uniqueness.", + # Common row-index labels + "column": "Column name in the dataset.", + "component": "Principal component identifier (PC1, PC2, ...).", + "value": "Category or discrete value.", + "percentage": "Percentage share of this value = (count / total) x 100.", + # -- Advanced Distribution -- + "best_distribution": "Scipy distribution that best fits the data according to AIC.", + "aic": "Akaike Information Criterion -- lower is better. Penalises complexity.", + "bic": "Bayesian Information Criterion -- lower is better. More conservative than AIC.", + "ks_statistic": "Kolmogorov-Smirnov statistic measuring max CDF deviation from the fitted distribution.", + "jarque_bera_stat": "Jarque-Bera test statistic. Large values indicate non-normality.", + "jb_p_value": "p-value of the Jarque-Bera test. p < 0.05 -> reject normality.", + "recommended_transform": "Power transform recommended to make the column more normal (Box-Cox or Yeo-Johnson).", + "original_skew": "Skewness of the original (untransformed) column.", + "transformed_skew": "Skewness after applying the recommended power transform.", + "bandwidth_silverman": "Kernel bandwidth via Silverman's rule for KDE estimation.", + "bandwidth_scott": "Kernel bandwidth via Scott's rule for KDE estimation.", + # -- Advanced Correlation -- + "partial_corr": "Partial correlation -- Pearson correlation after removing confounding effects of other variables.", + "mutual_information": "Mutual information (bits) -- measures non-linear dependency between two variables.", + "ci_lower": "Lower bound of the 95% bootstrap confidence interval for the correlation.", + "ci_upper": "Upper bound of the 95% bootstrap confidence interval for the correlation.", + "distance_corr": "Szekely distance correlation -- captures non-linear dependencies (0 = independent, 1 = dependent).", + # -- Clustering -- + "optimal_k": "Best number of clusters determined by silhouette score analysis.", + "best_silhouette": "Highest mean silhouette score across evaluated k values (-1 to 1, higher = better separation).", + "inertia": "Within-cluster sum of squares (WCSS). Lower = tighter clusters.", + "n_clusters_dbscan": "Number of clusters found by DBSCAN (excludes noise).", + "noise_ratio": "Fraction of points labelled as noise by DBSCAN.", + "eps": "DBSCAN epsilon -- neighbourhood radius auto-estimated from k-distance plot.", + # -- Dimensionality Reduction -- + "kl_divergence": "Kullback-Leibler divergence of the t-SNE embedding. Lower = better fit.", + "tsne_perplexity": "Perplexity parameter for t-SNE (balances local vs. global structure).", + "n_factors": "Number of latent factors retained via Kaiser criterion (eigenvalue > 1).", + "factor_loading": "Correlation between an observed variable and a latent factor.", + "noise_variance": "Estimated noise (uniqueness) for each variable in Factor Analysis.", + # -- Feature Insights -- + "interaction_strength": "Pearson correlation between a product-interaction term and the top feature.", + "monotonic_gap": "Gap between Pearson and Spearman correlations -- large gap -> non-linear monotonic relationship.", + "entropy_equal_width": "Shannon entropy of equal-width binning. Lower = more concentrated distribution.", + "entropy_equal_freq": "Shannon entropy of equal-frequency binning. Lower = more concentrated.", + "cardinality": "Number of unique values in a categorical column.", + "encoding_rec": "Recommended encoding strategy based on cardinality analysis.", + "leakage_risk": "Risk level (low/medium/high) that a feature may leak target information.", + # -- Advanced Anomaly -- + "anomaly_score_if": "Isolation Forest anomaly score. More negative = more anomalous.", + "lof_score": "Local Outlier Factor minus-score. More negative = more anomalous.", + "mahalanobis_dist": "Mahalanobis distance from the data centroid. Larger = more unusual.", + "consensus_flag": "True if >= 2 out of 3 anomaly methods agree the point is anomalous.", + # -- Statistical Tests -- + "levene_stat": "Levene test statistic for equality of variances.", + "levene_p": "p-value of Levene's test. p < 0.05 -> variances are significantly different.", + "kw_stat": "Kruskal-Wallis H statistic -- non-parametric one-way ANOVA.", + "kw_p": "p-value of Kruskal-Wallis test. p < 0.05 -> at least one group differs.", + "mw_stat": "Mann-Whitney U statistic -- non-parametric two-sample rank test.", + "mw_p": "p-value of Mann-Whitney U test.", + "chi2_stat": "Chi-square goodness-of-fit statistic vs. uniform distribution.", + "chi2_p": "p-value of chi-square goodness-of-fit test.", + "grubbs_stat": "Grubbs test statistic for detecting a single outlier.", + "grubbs_p": "p-value of Grubbs test.", + "adf_stat": "Augmented Dickey-Fuller test statistic for stationarity.", + "adf_p": "p-value of the ADF test. p < 0.05 -> series is stationary.", + # -- Data Profiling -- + "numeric_ratio": "Fraction of columns that are numeric.", + "categorical_ratio": "Fraction of columns that are categorical.", + "duplicate_row_ratio": "Fraction of rows that are exact duplicates.", +} + + +def _df_to_html(df: pd.DataFrame, max_rows: int = 100) -> str: + """Convert a DataFrame to an HTML table with tooltip annotations.""" + if df.empty: + return "

No data available

" + + sub = df.head(max_rows) + # Build table manually to inject data-tip attributes + parts: list[str] = [''] + + # Header row + parts.append("") + # Index header + idx_name = sub.index.name or "" + tip = _METRIC_TIPS.get(idx_name, "") + tip_attr = f' data-tip="{tip}"' if tip else "" + key_attr = f' data-tip-key="{idx_name}"' if tip else "" + parts.append(f"{idx_name}") + for col in sub.columns: + col_str = str(col) + tip = _METRIC_TIPS.get(col_str, "") + tip_attr = f' data-tip="{tip}"' if tip else "" + key_attr = f' data-tip-key="{col_str}"' if tip else "" + parts.append(f"{col}") + parts.append("") + + # Body rows + parts.append("") + for idx_val, row in sub.iterrows(): + parts.append("") + # Index cell -- row identifier + parts.append(f"") + for col in sub.columns: + val = row[col] + col_str = str(col) + col_tip = _METRIC_TIPS.get(col_str, "") + # Format the display value + if isinstance(val, float): + display = f"{val:.4f}" + else: + display = str(val) if pd.notna(val) else "NaN" + tip_attr = f' data-tip="{col_tip}"' if col_tip else "" + key_attr = f' data-tip-key="{col_str}"' if col_tip else "" + parts.append(f"{html_mod.escape(display)}") + parts.append("") + parts.append("
{html_mod.escape(str(idx_val))}
") + return "\n".join(parts) + + +def _dict_to_cards(d: dict[str, Any], fmt: str = ",.0f") -> str: + """Convert a dict to stat-card HTML elements with tooltips.""" + # Keys that represent [0,1] ratios and should be displayed as percentages + _RATIO_KEYS = { + "anomaly_ratio", "noise_ratio", "consensus_ratio", "missing_ratio", + "duplicate_row_ratio", "numeric_ratio", "categorical_ratio", + "total_variance_explained", + } + cards: list[str] = [] + for key, val in d.items(): + if isinstance(val, float): + if key in _RATIO_KEYS and 0 <= val <= 1: + display = f"{val * 100:.1f}%" + else: + display = f"{val:{fmt}}" + elif isinstance(val, int): + display = f"{val:,}" + else: + display = str(val) + label = key.replace("_", " ").title() + tip = _METRIC_TIPS.get(key, "") + tip_attr = f' data-tip="{tip}"' if tip else "" + key_attr = f' data-tip-key="{key}"' if tip else "" + cards.append( + f'
{display}
' + f'
{label}
' + ) + return "\n".join(cards) + + +# ===================================================================== +# CSS / JS constants +# ===================================================================== + +_CSS = """ +* { margin: 0; padding: 0; box-sizing: border-box; } +body { + font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif; + line-height: 1.6; color: #333; background: #f5f7fa; margin: 0; +} +/* Header */ +.header { + background: linear-gradient(135deg, #2c3e50, #3498db); + color: #fff; padding: 30px 40px; +} +.header h1 { font-size: 1.8em; margin-bottom: 4px; } +.header p { font-size: 1.05em; opacity: 0.9; } +/* Top nav */ +.topnav { + background: #fff; border-bottom: 1px solid #dde; padding: 8px 20px; + position: sticky; top: 0; z-index: 100; + display: flex; flex-wrap: wrap; gap: 4px; align-items: center; + box-shadow: 0 1px 3px rgba(0,0,0,0.05); +} +.topnav a { + padding: 5px 14px; border-radius: 20px; text-decoration: none; + color: #666; font-size: 0.82em; transition: all 0.2s; white-space: nowrap; +} +.topnav a:hover, .topnav a.active { + background: #3498db; color: #fff; +} +/* Main content */ +.main { max-width: 1400px; margin: 0 auto; padding: 20px; } +/* Sections */ +section { + background: #fff; border-radius: 12px; + box-shadow: 0 1px 3px rgba(0,0,0,0.08); + margin: 20px 0; padding: 25px; +} +.section-title { + font-size: 1.25em; color: #2c3e50; + border-bottom: 2px solid #3498db; + padding-bottom: 8px; margin-bottom: 18px; +} +.section-subtitle { font-size: 1em; color: #555; margin: 18px 0 10px 0; } +/* Cards grid */ +.cards { + display: grid; grid-template-columns: repeat(auto-fit, minmax(160px, 1fr)); + gap: 12px; margin: 15px 0; +} +.card { + background: #f8f9fa; border-radius: 8px; padding: 14px; text-align: center; +} +.card .value { font-size: 1.7em; font-weight: bold; color: #3498db; } +.card .label { font-size: 0.82em; color: #888; margin-top: 2px; } +/* Tables */ +.table-wrapper { + position: relative; overflow-x: auto; overflow-y: visible; + margin: 12px 0; border: 1px solid #e0e0e0; border-radius: 8px; + cursor: grab; -webkit-user-select: none; user-select: none; +} +.table-wrapper.dragging { cursor: grabbing; } +.table-wrapper .scroll-hint { + position: absolute; top: 0; right: 0; bottom: 0; width: 40px; + pointer-events: none; + background: linear-gradient(to right, transparent, rgba(0,0,0,0.06)); + border-radius: 0 8px 8px 0; transition: opacity 0.3s; +} +.table-wrapper .scroll-hint.hidden { opacity: 0; } +.table { + width: max-content; min-width: 100%; border-collapse: collapse; font-size: 0.85em; +} +.table th, .table td { + padding: 7px 11px; text-align: left; border-bottom: 1px solid #eee; white-space: nowrap; +} +.table th { + background: #f8f9fa; font-weight: 600; position: sticky; top: 0; z-index: 1; +} +.table th:first-child { position: sticky; left: 0; z-index: 2; background: #eef2f5; } +.table td:first-child { + position: sticky; left: 0; background: #fff; z-index: 1; + font-weight: 500; border-right: 2px solid #e0e0e0; +} +.table tr:hover td { background: #f1f3f5; } +.table tr:hover td:first-child { background: #e8ecf0; } +/* Charts */ +.charts-grid { + display: grid; grid-template-columns: repeat(auto-fit, minmax(420px, 1fr)); + gap: 15px; margin: 15px 0; +} +.chart-card { + background: #fafafa; border-radius: 8px; padding: 12px; text-align: center; +} +.chart-card img { max-width: 100%; border-radius: 6px; cursor: zoom-in; transition: opacity 0.15s; } +.chart-card img:hover { opacity: 0.85; } +.chart-card h4 { font-size: 0.9em; color: #555; margin-bottom: 8px; } +/* Single full-width chart */ +.chart-full { text-align: center; margin: 15px 0; } +.chart-full img { max-width: 100%; border-radius: 8px; box-shadow: 0 2px 8px rgba(0,0,0,0.08); cursor: zoom-in; transition: opacity 0.15s; } +.chart-full img:hover { opacity: 0.85; } +/* Image viewer modal */ +.f2a-img-overlay { + position: fixed; inset: 0; z-index: 10001; + background: rgba(0,0,0,0.82); backdrop-filter: blur(4px); + display: flex; align-items: center; justify-content: center; + opacity: 0; pointer-events: none; transition: opacity 0.2s; + cursor: grab; +} +.f2a-img-overlay.visible { opacity: 1; pointer-events: auto; } +.f2a-img-overlay.dragging { cursor: grabbing; } +.f2a-img-overlay .img-viewport { + position: relative; width: 100%; height: 100%; + overflow: hidden; +} +.f2a-img-overlay .img-viewport img { + position: absolute; top: 0; left: 0; transform-origin: 0 0; + max-width: none; max-height: none; user-select: none; -webkit-user-drag: none; + transition: none; +} +.f2a-img-overlay .img-close { + position: fixed; top: 18px; right: 24px; z-index: 10002; + background: rgba(255,255,255,0.15); border: none; color: #fff; font-size: 2em; + cursor: pointer; width: 48px; height: 48px; border-radius: 50%; + display: flex; align-items: center; justify-content: center; + transition: background 0.15s; line-height: 1; +} +.f2a-img-overlay .img-close:hover { background: rgba(255,255,255,0.3); } +.f2a-img-overlay .img-title { + position: fixed; bottom: 20px; left: 50%; transform: translateX(-50%); z-index: 10002; + color: #fff; font-size: 0.9em; background: rgba(0,0,0,0.5); padding: 6px 18px; + border-radius: 20px; white-space: nowrap; pointer-events: none; +} +.f2a-img-overlay .img-zoom-info { + position: fixed; top: 24px; left: 50%; transform: translateX(-50%); z-index: 10002; + color: #fff; font-size: 0.82em; background: rgba(0,0,0,0.45); padding: 4px 14px; + border-radius: 14px; pointer-events: none; opacity: 0; transition: opacity 0.25s; +} +.f2a-img-overlay .img-zoom-info.show { opacity: 1; } +/* Warnings */ +.warnings { + background: #fff3cd; border: 1px solid #ffc107; border-radius: 8px; + padding: 14px; margin: 15px 0; +} +.warnings li { margin: 4px 0 4px 20px; font-size: 0.92em; } +/* Preprocessing log */ +.log-list { list-style: none; padding: 0; } +.log-list li { padding: 4px 0; font-size: 0.9em; color: #555; } +.log-list li::before { content: "-> "; color: #3498db; font-weight: bold; } +/* Quality gauge */ +.quality-bars { display: flex; flex-wrap: wrap; gap: 20px; margin: 15px 0; } +.qbar { flex: 1; min-width: 120px; } +.qbar-label { font-size: 0.85em; color: #555; margin-bottom: 4px; } +.qbar-track { background: #eee; border-radius: 6px; height: 22px; position: relative; overflow: hidden; } +.qbar-fill { height: 100%; border-radius: 6px; transition: width 0.4s; display: flex; align-items: center; justify-content: flex-end; padding-right: 6px; font-size: 0.75em; color: #fff; font-weight: 600; } +.qbar-fill.good { background: #27ae60; } .qbar-fill.fair { background: #f39c12; } .qbar-fill.poor { background: #e74c3c; } +/* Tabs (multi-subset) */ +.tab-bar { + display: flex; flex-wrap: wrap; gap: 4px; + border-bottom: 2px solid #e0e0e0; margin: 20px 0 0 0; +} +.tab-btn { + padding: 10px 20px; border: 1px solid #ddd; border-bottom: none; + background: #f8f9fa; cursor: pointer; border-radius: 8px 8px 0 0; + font-size: 0.92em; transition: background 0.15s; +} +.tab-btn:hover { background: #e9ecef; } +.tab-btn.active { + background: #fff; border-bottom: 2px solid #fff; margin-bottom: -2px; + font-weight: 600; color: #3498db; +} +.tab-content { padding: 20px 0; } +.summary-bar { + background: #eaf3fb; border-radius: 8px; padding: 12px 20px; + margin: 10px 0 20px 0; font-size: 1.05em; +} +/* Footer */ +footer { text-align: center; margin-top: 40px; padding: 20px; color: #aaa; font-size: 0.85em; } +/* Sub-tabs (2nd depth: Basic / Advanced categories) */ +.sub-tab-bar { + display: flex; flex-wrap: wrap; gap: 3px; + border-bottom: 2px solid #d5dce4; margin: 18px 0 0 0; padding: 0; +} +.sub-tab-btn { + padding: 7px 16px; border: 1px solid transparent; border-bottom: none; + background: transparent; cursor: pointer; border-radius: 6px 6px 0 0; + font-size: 0.84em; color: #888; transition: all 0.15s; white-space: nowrap; +} +.sub-tab-btn:hover { background: #edf2f7; color: #555; } +.sub-tab-btn.active { + background: #fff; border-color: #d5dce4; border-bottom: 2px solid #fff; + margin-bottom: -2px; font-weight: 600; color: #2980b9; +} +.sub-tab-btn.adv { color: #8e44ad; } +.sub-tab-btn.adv.active { color: #8e44ad; border-bottom-color: #fff; } +.sub-tab-content { padding: 18px 0; display: none; } +.sub-tab-content.active { display: block; } +/* Advanced section badges */ +.adv-badge { + display: inline-block; background: #8e44ad; color: #fff; font-size: 0.7em; + padding: 1px 7px; border-radius: 10px; margin-left: 8px; vertical-align: middle; +} +/* Tooltip */ +.f2a-tooltip { + position: fixed; z-index: 9999; + max-width: 340px; padding: 10px 14px; + background: #2c3e50; color: #fff; font-size: 0.82em; line-height: 1.5; + border-radius: 8px; pointer-events: none; + box-shadow: 0 4px 16px rgba(0,0,0,0.25); + opacity: 0; transition: opacity 0.15s; +} +.f2a-tooltip.visible { opacity: 1; } +.f2a-tooltip .tip-header { + font-weight: 700; color: #5dade2; margin-bottom: 4px; + border-bottom: 1px solid rgba(255,255,255,0.15); padding-bottom: 3px; +} +.f2a-tooltip .tip-value { color: #f9e79f; font-weight: 600; } +[data-tip] { cursor: help; } +th[data-tip] { text-decoration: underline dotted rgba(0,0,0,0.25); text-underline-offset: 3px; } +/* Language selector */ +.lang-selector { + position: absolute; top: 24px; right: 30px; + display: flex; align-items: center; gap: 8px; +} +.lang-selector label { font-size: 0.85em; opacity: 0.85; color: #fff; } +.lang-selector select { + background: rgba(255,255,255,0.2); color: #fff; border: 1px solid rgba(255,255,255,0.4); + border-radius: 6px; padding: 4px 10px; font-size: 0.85em; cursor: pointer; + backdrop-filter: blur(4px); +} +.lang-selector select option { color: #333; background: #fff; } +/* Analysis timing */ +.analysis-meta { font-size: 0.88em; opacity: 0.8; margin-top: 4px; } +.header { position: relative; } +/* Method-info clickable headings */ +.section-subtitle[data-method-key] { + cursor: pointer; transition: color 0.15s; + border-bottom: 1px dashed rgba(0,0,0,0.2); display: inline-block; +} +.section-subtitle[data-method-key]:hover { color: #2980b9; } +/* Modal overlay + card */ +.f2a-modal-overlay { + position: fixed; inset: 0; z-index: 10000; + background: rgba(0,0,0,0.45); backdrop-filter: blur(3px); + display: flex; align-items: center; justify-content: center; + opacity: 0; pointer-events: none; transition: opacity 0.2s; +} +.f2a-modal-overlay.visible { opacity: 1; pointer-events: auto; } +.f2a-modal { + background: #fff; border-radius: 14px; max-width: 620px; width: 92%; + max-height: 80vh; overflow-y: auto; padding: 28px 32px; + box-shadow: 0 12px 48px rgba(0,0,0,0.25); + transform: translateY(20px); transition: transform 0.2s; + position: relative; +} +.f2a-modal-overlay.visible .f2a-modal { transform: translateY(0); } +.f2a-modal-close { + position: absolute; top: 14px; right: 16px; + background: none; border: none; font-size: 1.3em; cursor: pointer; + color: #999; line-height: 1; padding: 4px 8px; border-radius: 6px; + transition: background 0.15s; +} +.f2a-modal-close:hover { background: #f0f0f0; color: #333; } +.f2a-modal h3 { + font-size: 1.15em; color: #2c3e50; margin: 0 0 6px 0; + padding-right: 30px; +} +.f2a-modal .modal-tip { + color: #888; font-size: 0.88em; margin-bottom: 14px; + padding-bottom: 10px; border-bottom: 1px solid #eee; +} +.f2a-modal .modal-desc { font-size: 0.92em; color: #444; line-height: 1.7; } +.f2a-modal .modal-desc ul { margin: 6px 0 6px 20px; } +.f2a-modal .modal-desc li { margin: 3px 0; } +.f2a-modal .modal-desc b { color: #2c3e50; } +""" + +_DRAG_SCROLL_JS = """ +(function() { + document.querySelectorAll('.table-wrapper').forEach(function(wrapper) { + var isDown = false, startX, scrollLeft, velX = 0, momentumId; + function updateHint() { + var hint = wrapper.querySelector('.scroll-hint'); + if (!hint) return; + hint.classList.toggle('hidden', + wrapper.scrollLeft + wrapper.clientWidth >= wrapper.scrollWidth - 2); + } + wrapper.addEventListener('mousedown', function(e) { + isDown = true; wrapper.classList.add('dragging'); + startX = e.pageX - wrapper.offsetLeft; scrollLeft = wrapper.scrollLeft; + velX = 0; cancelAnimationFrame(momentumId); e.preventDefault(); + }); + wrapper.addEventListener('mouseleave', function() { + if (isDown) { isDown = false; wrapper.classList.remove('dragging'); startMomentum(); } + }); + wrapper.addEventListener('mouseup', function() { + if (isDown) { isDown = false; wrapper.classList.remove('dragging'); startMomentum(); } + }); + wrapper.addEventListener('mousemove', function(e) { + if (!isDown) return; + var x = e.pageX - wrapper.offsetLeft; + var walk = (x - startX) * 1.5; + velX = wrapper.scrollLeft; + wrapper.scrollLeft = scrollLeft - walk; + velX = velX - wrapper.scrollLeft; + updateHint(); + }); + wrapper.addEventListener('scroll', updateHint); + function startMomentum() { + cancelAnimationFrame(momentumId); + (function step() { + velX *= 0.92; + if (Math.abs(velX) > 0.5) { + wrapper.scrollLeft -= velX; updateHint(); + momentumId = requestAnimationFrame(step); + } + })(); + } + var touchStartX, touchScrollLeft; + wrapper.addEventListener('touchstart', function(e) { + touchStartX = e.touches[0].pageX; touchScrollLeft = wrapper.scrollLeft; + }, {passive: true}); + wrapper.addEventListener('touchmove', function(e) { + wrapper.scrollLeft = touchScrollLeft - (e.touches[0].pageX - touchStartX); + updateHint(); + }, {passive: true}); + updateHint(); + }); +})(); +""" + +_NAV_SCROLL_JS = """ +(function() { + var links = document.querySelectorAll('.topnav a[href^="#"]'); + var sections = []; + links.forEach(function(a) { + var id = a.getAttribute('href').slice(1); + var el = document.getElementById(id); + if (el) sections.push({el: el, link: a}); + }); + function highlight() { + var scrollY = window.scrollY + 120; + var active = null; + sections.forEach(function(s) { + if (s.el.offsetTop <= scrollY) active = s; + }); + links.forEach(function(a) { a.classList.remove('active'); }); + if (active) active.link.classList.add('active'); + } + window.addEventListener('scroll', highlight); + highlight(); +})(); +""" + +_TOOLTIP_JS = """ +(function() { + var tip = document.createElement('div'); + tip.className = 'f2a-tooltip'; + document.body.appendChild(tip); + var showTimer = null, hideTimer = null; + + function getColHeader(td) { + var ci = Array.prototype.indexOf.call(td.parentNode.children, td); + var thead = td.closest('table').querySelector('thead'); + if (!thead) return null; + var ths = thead.querySelectorAll('tr:first-child th'); + return ci < ths.length ? ths[ci] : null; + } + + function getRowLabel(td) { + var first = td.parentNode.children[0]; + return first ? first.textContent.trim() : ''; + } + + function show(el, ev) { + var desc = el.getAttribute('data-tip'); + if (!desc) return; + var tagName = el.tagName.toLowerCase(); + var html = ''; + if (tagName === 'th') { + html = '
' + el.textContent.trim() + '
' + desc; + } else { + var colTh = getColHeader(el); + var colName = colTh ? colTh.textContent.trim() : ''; + var rowLabel = getRowLabel(el); + var cellVal = el.textContent.trim(); + html = ''; + if (rowLabel) html += '
' + rowLabel + ' -> ' + colName + '
'; + else if (colName) html += '
' + colName + '
'; + html += desc; + if (cellVal && cellVal !== 'NaN') html += '
Value: ' + cellVal + ''; + } + tip.innerHTML = html; + tip.classList.add('visible'); + position(ev); + } + + function position(ev) { + var x = ev.clientX + 14, y = ev.clientY + 14; + var tw = tip.offsetWidth, th2 = tip.offsetHeight; + var vw = window.innerWidth, vh = window.innerHeight; + if (x + tw > vw - 10) x = ev.clientX - tw - 10; + if (y + th2 > vh - 10) y = ev.clientY - th2 - 10; + if (x < 4) x = 4; if (y < 4) y = 4; + tip.style.left = x + 'px'; tip.style.top = y + 'px'; + } + + function hide() { tip.classList.remove('visible'); } + + document.addEventListener('mouseover', function(e) { + var el = e.target.closest('[data-tip]'); + if (!el) return; + clearTimeout(hideTimer); + showTimer = setTimeout(function() { show(el, e); }, 250); + }); + document.addEventListener('mousemove', function(e) { + if (tip.classList.contains('visible')) position(e); + }); + document.addEventListener('mouseout', function(e) { + var el = e.target.closest('[data-tip]'); + if (!el) return; + clearTimeout(showTimer); + hideTimer = setTimeout(hide, 120); + }); +})(); +""" + +_SUB_TAB_JS = """ +function openSubTab(evt, subTabId, groupId) { + var group = document.getElementById(groupId); + if (!group) return; + group.querySelectorAll('.sub-tab-content').forEach(function(el) { el.classList.remove('active'); }); + group.querySelectorAll('.sub-tab-btn').forEach(function(el) { el.classList.remove('active'); }); + var target = document.getElementById(subTabId); + if (target) target.classList.add('active'); + evt.currentTarget.classList.add('active'); +} +/* When a topnav anchor is clicked, ensure the Basic sub-tab is active */ +document.addEventListener('DOMContentLoaded', function() { + document.querySelectorAll('.topnav a[href^="#"]').forEach(function(link) { + link.addEventListener('click', function() { + var subTabGroups = document.querySelectorAll('.sub-tab-group'); + subTabGroups.forEach(function(group) { + var basicBtn = group.querySelector('.sub-tab-btn'); + if (basicBtn && !basicBtn.classList.contains('active')) { + basicBtn.click(); + } + }); + }); + }); +}); +""" + +_IMG_MODAL_JS = """ +(function() { + /* Build overlay DOM */ + var ov = document.createElement('div'); + ov.className = 'f2a-img-overlay'; + ov.innerHTML = '
' + + '' + + '
' + + '
'; + document.body.appendChild(ov); + + var vp = ov.querySelector('.img-viewport'); + var img = vp.querySelector('img'); + var titleEl = ov.querySelector('.img-title'); + var zoomInfo = ov.querySelector('.img-zoom-info'); + var closeBtn = ov.querySelector('.img-close'); + + var scale = 1, panX = 0, panY = 0; + var dragging = false, dragStartX = 0, dragStartY = 0, panStartX = 0, panStartY = 0; + var zoomTimer = null; + var MIN_SCALE = 0.2, MAX_SCALE = 12; + + function applyTransform() { + img.style.transform = 'translate(' + panX + 'px,' + panY + 'px) scale(' + scale + ')'; + } + + function showZoom() { + zoomInfo.textContent = Math.round(scale * 100) + '%'; + zoomInfo.classList.add('show'); + clearTimeout(zoomTimer); + zoomTimer = setTimeout(function() { zoomInfo.classList.remove('show'); }, 900); + } + + function resetView() { + /* Fit image within viewport */ + var vw = vp.clientWidth, vh = vp.clientHeight; + var nw = img.naturalWidth, nh = img.naturalHeight; + if (!nw || !nh) { scale = 1; panX = 0; panY = 0; applyTransform(); return; } + scale = Math.min(vw * 0.92 / nw, vh * 0.88 / nh, 1); + panX = (vw - nw * scale) / 2; + panY = (vh - nh * scale) / 2; + applyTransform(); + } + + function openImg(src, alt) { + img.src = src; + titleEl.textContent = alt || ''; + ov.classList.add('visible'); + document.body.style.overflow = 'hidden'; + /* Wait one frame so the overlay is laid out before measuring dimensions */ + requestAnimationFrame(function() { + if (img.naturalWidth) { resetView(); } else { + img.onload = function() { resetView(); img.onload = null; }; + } + }); + } + + function closeImg() { + ov.classList.remove('visible'); + document.body.style.overflow = ''; + } + + closeBtn.addEventListener('click', closeImg); + document.addEventListener('keydown', function(e) { + if (e.key === 'Escape' && ov.classList.contains('visible')) closeImg(); + }); + + /* Click outside image to close */ + ov.addEventListener('click', function(e) { + if (e.target === ov || e.target === vp) closeImg(); + }); + + /* Wheel zoom — zoom towards cursor */ + vp.addEventListener('wheel', function(e) { + e.preventDefault(); + var rect = vp.getBoundingClientRect(); + var mx = e.clientX - rect.left, my = e.clientY - rect.top; + var factor = e.deltaY < 0 ? 1.12 : 1 / 1.12; + var ns = Math.min(Math.max(scale * factor, MIN_SCALE), MAX_SCALE); + var ratio = ns / scale; + panX = mx - ratio * (mx - panX); + panY = my - ratio * (my - panY); + scale = ns; + applyTransform(); + showZoom(); + }, { passive: false }); + + /* Drag to pan */ + vp.addEventListener('mousedown', function(e) { + if (e.button !== 0) return; + e.preventDefault(); + dragging = true; + ov.classList.add('dragging'); + dragStartX = e.clientX; dragStartY = e.clientY; + panStartX = panX; panStartY = panY; + }); + window.addEventListener('mousemove', function(e) { + if (!dragging) return; + panX = panStartX + (e.clientX - dragStartX); + panY = panStartY + (e.clientY - dragStartY); + applyTransform(); + }); + window.addEventListener('mouseup', function() { + if (dragging) { dragging = false; ov.classList.remove('dragging'); } + }); + + /* Touch: pinch zoom + pan */ + var lastTouchDist = 0, lastTouchMid = null, touchPanStart = null; + vp.addEventListener('touchstart', function(e) { + if (e.touches.length === 2) { + e.preventDefault(); + var dx = e.touches[0].clientX - e.touches[1].clientX; + var dy = e.touches[0].clientY - e.touches[1].clientY; + lastTouchDist = Math.sqrt(dx*dx + dy*dy); + lastTouchMid = { x: (e.touches[0].clientX + e.touches[1].clientX)/2, + y: (e.touches[0].clientY + e.touches[1].clientY)/2 }; + } else if (e.touches.length === 1) { + touchPanStart = { x: e.touches[0].clientX, y: e.touches[0].clientY, px: panX, py: panY }; + } + }, { passive: false }); + vp.addEventListener('touchmove', function(e) { + if (e.touches.length === 2 && lastTouchDist) { + e.preventDefault(); + var dx = e.touches[0].clientX - e.touches[1].clientX; + var dy = e.touches[0].clientY - e.touches[1].clientY; + var dist = Math.sqrt(dx*dx + dy*dy); + var factor = dist / lastTouchDist; + var rect = vp.getBoundingClientRect(); + var mx = (e.touches[0].clientX + e.touches[1].clientX)/2 - rect.left; + var my = (e.touches[0].clientY + e.touches[1].clientY)/2 - rect.top; + var ns = Math.min(Math.max(scale * factor, MIN_SCALE), MAX_SCALE); + var ratio = ns / scale; + panX = mx - ratio * (mx - panX); + panY = my - ratio * (my - panY); + scale = ns; + lastTouchDist = dist; + applyTransform(); showZoom(); + } else if (e.touches.length === 1 && touchPanStart) { + panX = touchPanStart.px + (e.touches[0].clientX - touchPanStart.x); + panY = touchPanStart.py + (e.touches[0].clientY - touchPanStart.y); + applyTransform(); + } + }, { passive: false }); + vp.addEventListener('touchend', function() { lastTouchDist = 0; touchPanStart = null; }); + + /* Double-click to reset / toggle 100% */ + vp.addEventListener('dblclick', function(e) { + e.preventDefault(); + var rect = vp.getBoundingClientRect(); + var mx = e.clientX - rect.left, my = e.clientY - rect.top; + if (Math.abs(scale - 1) < 0.01) { + resetView(); + } else { + var ratio = 1 / scale; + panX = mx - ratio * (mx - panX); + panY = my - ratio * (my - panY); + scale = 1; + } + applyTransform(); showZoom(); + }); + + /* Attach click listeners to chart images */ + document.addEventListener('click', function(e) { + var target = e.target; + if (target.tagName === 'IMG' && (target.closest('.chart-card') || target.closest('.chart-full'))) { + e.stopPropagation(); + openImg(target.src, target.alt || ''); + } + }); +})(); +"""; + +_METHOD_MODAL_JS = """ +(function() { + /* Create modal overlay once */ + var overlay = document.createElement('div'); + overlay.className = 'f2a-modal-overlay'; + overlay.innerHTML = '
' + + '' + + '' + + '' + + '' + + '
'; + document.body.appendChild(overlay); + + var modal = overlay.querySelector('.f2a-modal'); + var closeBtn = overlay.querySelector('.f2a-modal-close'); + var titleEl = overlay.querySelector('.modal-title'); + var tipEl = overlay.querySelector('.modal-tip'); + var descEl = overlay.querySelector('.modal-desc'); + + function showModal(title, tip, desc) { + titleEl.textContent = title; + tipEl.textContent = tip; + descEl.innerHTML = desc; + overlay.classList.add('visible'); + } + function hideModal() { overlay.classList.remove('visible'); } + + closeBtn.addEventListener('click', hideModal); + overlay.addEventListener('click', function(e) { + if (e.target === overlay) hideModal(); + }); + document.addEventListener('keydown', function(e) { + if (e.key === 'Escape') hideModal(); + }); + + /* On DOMContentLoaded, attach data-method-key + data-tip to h3[data-i18n] */ + document.addEventListener('DOMContentLoaded', function() { + if (typeof _F2A_METHOD_INFO === 'undefined') return; + var lang = (typeof _f2aLang !== 'undefined') ? _f2aLang : 'en'; + document.querySelectorAll('h3.section-subtitle[data-i18n]').forEach(function(el) { + var key = el.getAttribute('data-i18n'); + var info = (_F2A_METHOD_INFO[lang] && _F2A_METHOD_INFO[lang][key]) || + (_F2A_METHOD_INFO['en'] && _F2A_METHOD_INFO['en'][key]); + if (!info) return; + el.setAttribute('data-method-key', key); + el.setAttribute('data-tip', info.tip || ''); + }); + }); + + /* Click handler for method-info headings */ + document.addEventListener('click', function(e) { + var el = e.target.closest('[data-method-key]'); + if (!el || el.tagName.toLowerCase() !== 'h3') return; + var key = el.getAttribute('data-method-key'); + if (typeof _F2A_METHOD_INFO === 'undefined') return; + var lang = (typeof _f2aLang !== 'undefined') ? _f2aLang : 'en'; + var info = (_F2A_METHOD_INFO[lang] && _F2A_METHOD_INFO[lang][key]) || + (_F2A_METHOD_INFO['en'] && _F2A_METHOD_INFO['en'][key]); + if (!info) return; + var title = el.textContent.trim(); + showModal(title, info.tip || '', info.desc || ''); + }); +})(); +""" + + +def _build_i18n_js(translations_json: str) -> str: + """Build the i18n JavaScript that handles language switching.""" + return f""" +var _F2A_I18N = {translations_json}; +var _f2aLang = 'en'; +function f2aSetLang(lang) {{ + if (!_F2A_I18N[lang]) lang = 'en'; + _f2aLang = lang; + document.querySelectorAll('[data-i18n]').forEach(function(el) {{ + var key = el.getAttribute('data-i18n'); + var text = _F2A_I18N[lang][key] || _F2A_I18N['en'][key] || key; + /* Interpolate {{var}} placeholders from data-i18n-args */ + var argsAttr = el.getAttribute('data-i18n-args'); + if (argsAttr) {{ + try {{ + var params = JSON.parse(argsAttr); + for (var k in params) {{ + text = text.replace('{{' + k + '}}', params[k]); + }} + }} catch(e) {{}} + }} + if (el.hasAttribute('data-i18n-html')) {{ + el.innerHTML = text; + }} else {{ + el.textContent = text; + }} + }}); + document.querySelectorAll('[data-i18n-title]').forEach(function(el) {{ + var key = el.getAttribute('data-i18n-title'); + var text = _F2A_I18N[lang][key] || _F2A_I18N['en'][key] || key; + document.title = text; + }}); + var sel = document.getElementById('f2a-lang-select'); + if (sel) sel.value = lang; + /* Update method-info modal title translations */ + document.querySelectorAll('[data-method-key]').forEach(function(el) {{ + var mkey = el.getAttribute('data-method-key'); + if (_F2A_METHOD_INFO && _F2A_METHOD_INFO[lang] && _F2A_METHOD_INFO[lang][mkey]) {{ + el.setAttribute('data-tip', _F2A_METHOD_INFO[lang][mkey].tip || ''); + }} + }}); + /* Update metric tooltip translations */ + if (typeof _F2A_METRIC_TIPS !== 'undefined') {{ + var tips = _F2A_METRIC_TIPS[lang] || _F2A_METRIC_TIPS['en'] || {{}}; + document.querySelectorAll('[data-tip-key]').forEach(function(el) {{ + var tkey = el.getAttribute('data-tip-key'); + if (tips[tkey]) {{ + el.setAttribute('data-tip', tips[tkey]); + }} + }}); + }} +}} +document.addEventListener('DOMContentLoaded', function() {{ + var sel = document.getElementById('f2a-lang-select'); + if (sel) {{ + sel.addEventListener('change', function() {{ f2aSetLang(this.value); }}); + }} +}}); +""" + + +# ===================================================================== +# Section builders +# ===================================================================== + +def _build_quality_bars(scores: dict[str, Any]) -> str: + """Build quality gauge HTML from quality scores dict.""" + if not scores: + return "" + + dims = [ + ("Completeness", "completeness", scores.get("completeness", 0)), + ("Uniqueness", "uniqueness", scores.get("uniqueness", 0)), + ("Consistency", "consistency", scores.get("consistency", 0)), + ("Validity", "validity", scores.get("validity", 0)), + ("Overall", "overall", scores.get("overall", 0)), + ] + parts: list[str] = [] + for label, key, val in dims: + pct = val * 100 + cls = "good" if pct >= 90 else ("fair" if pct >= 70 else "poor") + tip = _METRIC_TIPS.get(key, "") + tip_attr = f' data-tip="{tip}"' if tip else "" + parts.append( + f'
' + f'
{label}
' + f'
' + f'
{pct:.1f}%
' + f'
' + ) + return '
' + "".join(parts) + "
" + + +def _wrap_table(html: str) -> str: + """Wrap table HTML in a scrollable container.""" + return ( + '
' + + html + + '
' + ) + + +def _figures_to_html(figures: dict[str, plt.Figure], grid: bool = True) -> str: + """Convert figure dict to chart HTML (grid or full-width).""" + parts: list[str] = [] + for name, fig in figures.items(): + b64 = _fig_to_base64(fig) + if grid: + parts.append( + f'

{name}

' + f'{name}
' + ) + else: + parts.append( + f'

{name}

' + f'{name}
' + ) + if grid and parts: + return '
' + "\n".join(parts) + "
" + return "\n".join(parts) + + +def _build_section( + section_id: str, + title: str, + body: str, + condition: bool = True, + i18n_key: str = "", +) -> str: + """Wrap body content in a
element.""" + if not condition or not body.strip(): + return "" + i18n_attr = f' data-i18n="{i18n_key}"' if i18n_key else "" + return ( + f'
' + f'

{title}

' + f'{body}
' + ) + + +# ===================================================================== +# Section content builders +# ===================================================================== + +def _section_overview(schema_summary: dict[str, Any]) -> str: + return ( + '
' + + _dict_to_cards({ + "rows": schema_summary.get("rows", 0), + "columns": schema_summary.get("columns", 0), + "numeric": schema_summary.get("numeric", 0), + "categorical": schema_summary.get("categorical", 0), + "text": schema_summary.get("text", 0), + "datetime": schema_summary.get("datetime", 0), + "memory_mb": schema_summary.get("memory_mb", 0), + }) + + "
" + ) + + +def _section_quality(stats: Any) -> str: + body = _build_quality_bars(stats.quality_scores) + if not stats.quality_by_column.empty: + body += '

Column Quality

' + body += _wrap_table(_df_to_html(stats.quality_by_column)) + return body + + +def _section_preprocessing(stats: Any) -> str: + pp = stats.preprocessing + if pp is None: + return "" + body = '
' + body += _dict_to_cards({ + "original_rows": pp.original_shape[0], + "cleaned_rows": pp.cleaned_shape[0], + "columns_removed": pp.original_shape[1] - pp.cleaned_shape[1], + "duplicates_removed": pp.duplicate_rows_count, + "completeness": pp.completeness, + }) + body += "
" + if pp.cleaning_log: + body += '

Cleaning Log

" + issues = pp.issues_table() + if not issues.empty: + body += '

Detected Issues

' + body += _wrap_table(_df_to_html(issues)) + return body + + +def _section_descriptive(stats: Any, figures: dict) -> str: + body = "" + if not stats.summary.empty: + body += _wrap_table(_df_to_html(stats.summary)) + + chart_parts: dict[str, plt.Figure] = {} + for key in ("Distribution Histograms", "Boxplots"): + if key in figures: + chart_parts[key] = figures[key] + if chart_parts: + body += _figures_to_html(chart_parts, grid=False) + return body + + +def _section_distribution(stats: Any, figures: dict) -> str: + body = "" + if not stats.distribution_info.empty: + body += '

Normality Tests & Shape

' + body += _wrap_table(_df_to_html(stats.distribution_info)) + + chart_parts: dict[str, plt.Figure] = {} + for key in ("Violin Plots", "Q-Q Plots"): + if key in figures: + chart_parts[key] = figures[key] + if chart_parts: + body += _figures_to_html(chart_parts, grid=False) + return body + + +def _section_correlation(stats: Any, figures: dict) -> str: + body = "" + chart_parts: dict[str, plt.Figure] = {} + for key in ("Correlation Heatmap (Pearson)", "Correlation Heatmap (Spearman)"): + if key in figures: + chart_parts[key] = figures[key] + if chart_parts: + body += _figures_to_html(chart_parts, grid=True) + + if not stats.vif_table.empty: + body += '

Variance Inflation Factor (VIF)

' + body += _wrap_table(_df_to_html(stats.vif_table)) + + return body + + +def _section_missing(stats: Any, figures: dict) -> str: + body = "" + if not stats.missing_info.empty: + body += _wrap_table(_df_to_html(stats.missing_info)) + chart_parts: dict[str, plt.Figure] = {} + for key in ("Missing Data", "Missing Data Matrix"): + if key in figures: + chart_parts[key] = figures[key] + if chart_parts: + body += _figures_to_html(chart_parts, grid=True) + return body + + +def _section_outlier(stats: Any, figures: dict) -> str: + body = "" + if not stats.outlier_summary.empty: + body += _wrap_table(_df_to_html(stats.outlier_summary)) + if "Outlier Detection" in figures: + body += _figures_to_html({"Outlier Detection": figures["Outlier Detection"]}, grid=False) + return body + + +def _section_categorical(stats: Any, figures: dict) -> str: + body = "" + if not stats.categorical_analysis.empty: + body += '

Summary

' + body += _wrap_table(_df_to_html(stats.categorical_analysis)) + chart_parts: dict[str, plt.Figure] = {} + for key in ("Categorical Frequency", "Chi-Square Heatmap"): + if key in figures: + chart_parts[key] = figures[key] + if chart_parts: + body += _figures_to_html(chart_parts, grid=False) + return body + + +def _section_feature_importance(stats: Any, figures: dict) -> str: + body = "" + if not stats.feature_importance.empty: + body += _wrap_table(_df_to_html(stats.feature_importance)) + if "Feature Importance" in figures: + body += _figures_to_html({"Feature Importance": figures["Feature Importance"]}, grid=False) + return body + + +def _section_pca(stats: Any, figures: dict) -> str: + body = "" + if stats.pca_summary: + body += '
' + _dict_to_cards(stats.pca_summary) + "
" + if not stats.pca_variance.empty: + body += '

Variance Explained

' + body += _wrap_table(_df_to_html(stats.pca_variance)) + if not stats.pca_loadings.empty: + body += '

Loadings

' + body += _wrap_table(_df_to_html(stats.pca_loadings)) + chart_parts: dict[str, plt.Figure] = {} + for key in ("PCA Scree Plot", "PCA Loadings"): + if key in figures: + chart_parts[key] = figures[key] + if chart_parts: + body += _figures_to_html(chart_parts, grid=True) + return body + + +def _section_duplicates(stats: Any) -> str: + if not stats.duplicate_stats: + return "" + return '
' + _dict_to_cards(stats.duplicate_stats) + "
" + + +def _section_warnings(warnings: list[str]) -> str: + if not warnings: + return "" + items = "".join(f"
  • {html_mod.escape(w)}
  • " for w in warnings) + return f'
    ' + + +# ===================================================================== +# Advanced section content builders +# ===================================================================== + +def _section_adv_distribution(stats: Any, figures: dict[str, plt.Figure]) -> str: + adv = stats.advanced_stats.get("advanced_distribution", {}) + if not adv: + return "" + body = "" + bf = adv.get("best_fit") + if bf is not None and not bf.empty: + body += '

    Best-Fit Distribution

    ' + body += _wrap_table(_df_to_html(bf)) + jb = adv.get("jarque_bera") + if jb is not None and not jb.empty: + body += '

    Jarque-Bera Normality Test

    ' + body += _wrap_table(_df_to_html(jb)) + pt = adv.get("power_transform") + if pt is not None and not pt.empty: + body += '

    Power Transform Recommendation

    ' + body += _wrap_table(_df_to_html(pt)) + kde = adv.get("kde_bandwidth") + if kde is not None and not kde.empty: + body += '

    KDE Bandwidth Analysis

    ' + body += _wrap_table(_df_to_html(kde)) + chart_keys = [ + "Best-Fit Distribution Overlay", "ECDF Plot", + "Power Transform Comparison", "Jarque-Bera Normality Test", + ] + chart_parts: dict[str, plt.Figure] = {k: figures[k] for k in chart_keys if k in figures} + if chart_parts: + body += _figures_to_html(chart_parts, grid=False) + return body + + +def _section_adv_correlation(stats: Any, figures: dict[str, plt.Figure]) -> str: + adv = stats.advanced_stats.get("advanced_correlation", {}) + if not adv: + return "" + body = "" + pcorr = adv.get("partial_correlation") + if pcorr is not None and not pcorr.empty: + body += '

    Partial Correlation Matrix

    ' + body += _wrap_table(_df_to_html(pcorr)) + mi = adv.get("mutual_information") + if mi is not None and not mi.empty: + body += '

    Mutual Information Matrix

    ' + body += _wrap_table(_df_to_html(mi)) + bci = adv.get("bootstrap_ci") + if bci is not None and not bci.empty: + body += '

    Bootstrap Correlation 95% CI

    ' + body += _wrap_table(_df_to_html(bci)) + dc = adv.get("distance_correlation") + if dc is not None and not dc.empty: + body += '

    Distance Correlation Matrix

    ' + body += _wrap_table(_df_to_html(dc)) + chart_keys = [ + "Partial Correlation Heatmap", "Mutual Information Heatmap", + "Bootstrap Correlation CI", "Correlation Network", + "Distance Correlation Heatmap", + ] + chart_parts: dict[str, plt.Figure] = {k: figures[k] for k in chart_keys if k in figures} + if chart_parts: + body += _figures_to_html(chart_parts, grid=True) + return body + + +def _section_clustering(stats: Any, figures: dict[str, plt.Figure]) -> str: + adv = stats.advanced_stats.get("clustering", {}) + if not adv: + return "" + body = "" + km = adv.get("kmeans") + if km: + body += '

    K-Means Summary

    ' + summary_cards = { + "optimal_k": km.get("optimal_k"), + "best_silhouette": km.get("best_silhouette"), + } + sizes = km.get("cluster_sizes", {}) + if sizes: + summary_cards["largest_cluster"] = max(sizes.values()) if sizes else 0 + body += '
    ' + _dict_to_cards(summary_cards) + "
    " + db = adv.get("dbscan") + if db: + body += '

    DBSCAN Summary

    ' + body += '
    ' + _dict_to_cards({ + "n_clusters_dbscan": db.get("n_clusters", 0), + "noise_ratio": db.get("noise_ratio", 0), + "eps": db.get("eps", 0), + }) + "
    " + hc = adv.get("hierarchical") + if hc: + body += '

    Hierarchical Clustering

    ' + body += '
    ' + _dict_to_cards({ + "optimal_k": hc.get("optimal_k"), + "best_silhouette": hc.get("silhouette_score"), + }) + "
    " + profiles = adv.get("profiles") + if profiles is not None and not profiles.empty: + body += '

    Cluster Profiles

    ' + body += _wrap_table(_df_to_html(profiles)) + chart_keys = ["Elbow & Silhouette", "Cluster Scatter", "Dendrogram", "Cluster Profiles"] + chart_parts: dict[str, plt.Figure] = {k: figures[k] for k in chart_keys if k in figures} + if chart_parts: + body += _figures_to_html(chart_parts, grid=True) + return body + + +def _section_dimreduction(stats: Any, figures: dict[str, plt.Figure]) -> str: + adv = stats.advanced_stats.get("dimreduction", {}) + if not adv: + return "" + body = "" + tsne = adv.get("tsne") + if tsne: + body += '

    t-SNE Embedding

    ' + body += '
    ' + _dict_to_cards({ + "kl_divergence": tsne.get("kl_divergence", 0), + "n_points": tsne.get("n_samples", 0), + }) + "
    " + umap_res = adv.get("umap") + if umap_res: + body += '

    UMAP Embedding

    ' + body += '
    ' + _dict_to_cards({ + "n_points": umap_res.get("n_samples", 0), + }) + "
    " + fa = adv.get("factor_analysis") + if fa: + body += '

    Factor Analysis

    ' + body += '
    ' + _dict_to_cards({ + "n_factors": fa.get("n_factors", 0), + }) + "
    " + loadings = adv.get("factor_loadings") + if loadings is not None and not loadings.empty: + body += '

    Factor Loadings

    ' + body += _wrap_table(_df_to_html(loadings)) + fc = adv.get("feature_contribution") + if fc is not None and not fc.empty: + body += '

    PCA-Weighted Feature Contribution

    ' + body += _wrap_table(_df_to_html(fc)) + + # Dim-reduction charts + chart_keys = [ + "t-SNE Scatter", "PCA Biplot", "Explained Variance Curve", + "Factor Loadings Heatmap", "Feature Contribution per PC", + ] + chart_parts: dict[str, plt.Figure] = {k: figures[k] for k in chart_keys if k in figures} + if chart_parts: + body += _figures_to_html(chart_parts, grid=True) + + return body + + +def _section_feature_insights(stats: Any, figures: dict[str, plt.Figure]) -> str: + adv = stats.advanced_stats.get("feature_insights", {}) + if not adv: + return "" + body = "" + interact = adv.get("interactions") + if interact is not None and not interact.empty: + body += '

    Interaction Detection

    ' + body += _wrap_table(_df_to_html(interact)) + mono = adv.get("monotonic") + if mono is not None and not mono.empty: + body += '

    Monotonic Relationship Analysis

    ' + body += _wrap_table(_df_to_html(mono)) + binning = adv.get("binning") + if binning is not None and not binning.empty: + body += '

    Binning Analysis

    ' + body += _wrap_table(_df_to_html(binning)) + card = adv.get("cardinality") + if card is not None and not card.empty: + body += '

    Cardinality & Encoding Recommendation

    ' + body += _wrap_table(_df_to_html(card)) + leak = adv.get("leakage") + if leak is not None and not leak.empty: + body += '

    Leakage Risk Assessment

    ' + body += _wrap_table(_df_to_html(leak)) + return body + + +def _section_adv_anomaly(stats: Any, figures: dict[str, plt.Figure]) -> str: + adv = stats.advanced_stats.get("advanced_anomaly", {}) + if not adv: + return "" + body = "" + iso = adv.get("isolation_forest") + if iso: + body += '

    Isolation Forest

    ' + body += '
    ' + _dict_to_cards({ + "anomaly_count": iso.get("anomaly_count", 0), + "anomaly_ratio": iso.get("anomaly_ratio", 0), + }) + "
    " + lof = adv.get("local_outlier_factor") + if lof: + body += '

    Local Outlier Factor

    ' + body += '
    ' + _dict_to_cards({ + "anomaly_count": lof.get("anomaly_count", 0), + "anomaly_ratio": lof.get("anomaly_ratio", 0), + }) + "
    " + maha = adv.get("mahalanobis") + if maha: + body += '

    Mahalanobis Distance

    ' + body += '
    ' + _dict_to_cards({ + "anomaly_count": maha.get("anomaly_count", 0), + "anomaly_ratio": maha.get("anomaly_ratio", 0), + }) + "
    " + cons = adv.get("consensus") + if cons: + body += '

    Consensus (>=2/3 agree)

    ' + body += '
    ' + _dict_to_cards({ + "consensus_count": cons.get("consensus_count", 0), + "consensus_ratio": cons.get("consensus_ratio", 0), + }) + "
    " + chart_keys = ["Anomaly Scatter", "Mahalanobis Distance", "Consensus Anomaly Comparison"] + chart_parts: dict[str, plt.Figure] = {k: figures[k] for k in chart_keys if k in figures} + if chart_parts: + body += _figures_to_html(chart_parts, grid=True) + return body + + +def _section_stat_tests(stats: Any, figures: dict[str, plt.Figure]) -> str: + adv = stats.advanced_stats.get("statistical_tests", {}) + if not adv: + return "" + body = "" + for key, title, i18n_key in [ + ("levene", "Levene's Test (Equality of Variances)", "test_levene"), + ("kruskal_wallis", "Kruskal-Wallis Test", "test_kruskal_wallis"), + ("mann_whitney", "Mann-Whitney U Test", "test_mann_whitney"), + ("chi_square_goodness", "Chi-Square Goodness of Fit", "test_chi_square"), + ("grubbs", "Grubbs Outlier Test", "test_grubbs"), + ("adf", "Augmented Dickey-Fuller (Stationarity)", "test_adf"), + ]: + data = adv.get(key) + if data is not None and isinstance(data, pd.DataFrame) and not data.empty: + body += f'

    {title}

    ' + body += _wrap_table(_df_to_html(data)) + elif data is not None and isinstance(data, dict) and data: + body += f'

    {title}

    ' + body += '
    ' + _dict_to_cards(data) + "
    " + return body + + +def _section_data_profiling(stats: Any, figures: dict[str, plt.Figure]) -> str: + adv = stats.advanced_stats + profiling = adv.get("data_profiling", {}) + body = "" + + # Basic profiling metrics + if profiling: + body += '

    Dataset Profile

    ' + body += '
    ' + _dict_to_cards(profiling) + "
    " + + # Column roles + roles = adv.get("column_roles", {}) + roles_df = roles.get("summary_df") + if roles_df is not None and isinstance(roles_df, pd.DataFrame) and not roles_df.empty: + body += '

    Column Roles

    ' + body += _wrap_table(_df_to_html(roles_df)) + + # ML Readiness + ml = adv.get("ml_readiness", {}) + if ml: + body += '

    ML Readiness

    ' + grade = ml.get("grade", "?") + overall = ml.get("overall", 0) + dims = ml.get("dimensions", {}) + body += ( + f'
    ' + f'
    Overall Score' + f'{overall:.0f}/100 ({grade})
    ' + ) + for dim_name, dim_score in dims.items(): + body += ( + f'
    {dim_name}' + f'{dim_score:.1f}
    ' + ) + body += "
    " + + blocking = ml.get("blocking_issues", []) + if blocking: + body += '

    Blocking Issues

    " + + suggestions = ml.get("suggestions", []) + if suggestions: + body += '

    Suggestions

    " + + return body + + +def _section_insights(stats: Any, figures: dict[str, plt.Figure]) -> str: + """Build Insights sub-tab content.""" + insights_data = stats.advanced_stats.get("insights", {}) + if not insights_data: + return "" + + body = "" + + # Executive summary + exec_summary = insights_data.get("executive_summary", "") + if exec_summary: + body += ( + '
    ' + f'

    Executive Summary

    ' + f'

    {html_mod.escape(exec_summary)}

    ' + '
    ' + ) + + # Summary stats + summary = insights_data.get("summary", {}) + if summary: + body += '
    ' + body += f'
    Total Insights{summary.get("total", 0)}
    ' + by_sev = summary.get("by_severity", {}) + for sev, count in by_sev.items(): + color = {"critical": "#e74c3c", "warning": "#f39c12", "info": "#3498db", "opportunity": "#2ecc71"}.get(sev, "#95a5a6") + body += ( + f'
    ' + f'{sev.title()}' + f'{count}
    ' + ) + body += "
    " + + # All insight items (top 20) + all_insights = insights_data.get("all_insights", []) + if all_insights: + sorted_insights = sorted(all_insights, key=lambda i: i.get("priority_score", 0), reverse=True) + body += '

    Insight Details

    ' + body += '
    ' + for ins in sorted_insights[:20]: + sev = ins.get("severity", "info") + color = {"critical": "#e74c3c", "warning": "#f39c12", "info": "#3498db", "opportunity": "#2ecc71"}.get(sev, "#95a5a6") + title = html_mod.escape(ins.get("title", "")) + desc = html_mod.escape(ins.get("description", "")) + category = html_mod.escape(ins.get("category", "")) + score = ins.get("priority_score", 0) + body += ( + f'
    ' + f'
    ' + f'{title}' + f'{sev.upper()} · {score:.1f}' + f'
    ' + f'
    {category}
    ' + f'

    {desc}

    ' + ) + actions = ins.get("action_items", []) + if actions: + body += '
      ' + for a in actions[:3]: + body += f'
    • {html_mod.escape(str(a))}
    • ' + body += "
    " + body += "
    " + body += "
    " + + # Insight charts + chart_keys = ["Insight Severity Distribution", "Insight Categories", "Top Insights", "Action Items Summary"] + chart_parts: dict[str, plt.Figure] = {k: figures[k] for k in chart_keys if k in figures} + if chart_parts: + body += _figures_to_html(chart_parts, grid=True) + + return body + + +def _section_cross_analysis(stats: Any, figures: dict[str, plt.Figure]) -> str: + """Build Cross Analysis sub-tab content.""" + cross = stats.advanced_stats.get("cross_analysis", {}) + if not cross: + return "" + + body = "" + + # Outlier by cluster table + obc = cross.get("outlier_by_cluster", {}) + per_cluster = obc.get("per_cluster") + if per_cluster is not None: + if isinstance(per_cluster, pd.DataFrame) and not per_cluster.empty: + body += '

    Anomaly Distribution by Cluster

    ' + body += _wrap_table(_df_to_html(per_cluster)) + + # Distribution–outlier fitness + dof = cross.get("distribution_outlier_fitness", {}) + rec_df = dof.get("recommendations") if isinstance(dof, dict) else dof + if rec_df is not None and isinstance(rec_df, pd.DataFrame) and not rec_df.empty: + body += '

    Outlier Method Recommendation

    ' + body += _wrap_table(_df_to_html(rec_df)) + + # Importance vs. missing risk + ivm = cross.get("importance_vs_missing", {}) + risk_table = ivm.get("risk_table") if isinstance(ivm, dict) else ivm + if risk_table is not None and isinstance(risk_table, pd.DataFrame) and not risk_table.empty: + body += '

    Feature Importance vs. Missing Rate

    ' + body += _wrap_table(_df_to_html(risk_table)) + + # Simpson's paradox + sp = cross.get("simpson_paradox", {}) + sp_cases = sp.get("cases", []) if isinstance(sp, dict) else [] + if sp_cases: + body += '

    Simpson\'s Paradox Detection

    ' + body += '
    ' + for case in sp_cases[:5]: + body += ( + f'
    ' + f'{html_mod.escape(str(case.get("col_a", "?")))} vs ' + f'{html_mod.escape(str(case.get("col_b", "?")))}' + f'Overall r={case.get("overall_corr", 0):+.3f}
    ' + ) + body += "
    " + + # Cross-analysis charts + chart_keys = [ + "Anomaly by Cluster", "Missing Correlation (Cross)", + "Simpson's Paradox", "Importance vs Missing", "Unified 2D Embedding", + ] + chart_parts: dict[str, plt.Figure] = {k: figures[k] for k in chart_keys if k in figures} + if chart_parts: + body += _figures_to_html(chart_parts, grid=True) + + return body + + +# ===================================================================== +# Navigation links +# ===================================================================== + +_SECTION_ORDER = [ + ("overview", "Overview", "nav_overview"), + ("quality", "Quality", "nav_quality"), + ("preprocessing", "Preprocessing", "nav_preprocessing"), + ("descriptive", "Descriptive", "nav_descriptive"), + ("distribution", "Distribution", "nav_distribution"), + ("correlation", "Correlation", "nav_correlation"), + ("missing", "Missing Data", "nav_missing"), + ("outlier", "Outliers", "nav_outlier"), + ("categorical", "Categorical", "nav_categorical"), + ("importance", "Feature Importance", "nav_importance"), + ("pca", "PCA", "nav_pca"), + ("duplicates", "Duplicates", "nav_duplicates"), + ("warnings-section", "Warnings", "nav_warnings"), +] + +_ADV_SUB_TABS = [ + ("adv-dist", "Distribution+"), + ("adv-corr", "Correlation+"), + ("clustering", "Clustering"), + ("dimreduction", "Dim. Reduction"), + ("feat-insights", "Feature Insights"), + ("adv-anomaly", "Anomaly+"), + ("stat-tests", "Statistical Tests"), + ("data-profile", "Data Profile"), +] + + +def _build_sub_tabs( + prefix: str, + basic_html: str, + stats: Any, + figures: dict[str, plt.Figure], + config: AnalysisConfig, +) -> str: + """Build 2nd-depth sub-tab structure (Basic + Advanced categories). + + If advanced is disabled or there is no advanced data, return basic_html + directly (no sub-tab wrapper). + """ + if not config.advanced: + return basic_html + + adv = getattr(stats, "advanced_stats", {}) + if not adv: + return basic_html + + # Build advanced tab contents + # (key, tab_label, section_title, tab_i18n_key, section_i18n_key, builder_fn) + adv_builders: list[tuple[str, str, str, str, str, Any]] = [ + ("insights", "Key Insights", "Auto-Generated Insights", + "tab_insights", "adv_insights", + lambda: _section_insights(stats, figures)), + ("adv-dist", "Distribution+", "Advanced Distribution Analysis", + "tab_adv_dist", "adv_distribution", + lambda: _section_adv_distribution(stats, figures)), + ("adv-corr", "Correlation+", "Advanced Correlation Analysis", + "tab_adv_corr", "adv_correlation", + lambda: _section_adv_correlation(stats, figures)), + ("clustering", "Clustering", "Clustering Analysis", + "tab_clustering", "adv_clustering", + lambda: _section_clustering(stats, figures)), + ("dimreduction", "Dim. Reduction", "Dimensionality Reduction", + "tab_dimreduction", "adv_dimreduction", + lambda: _section_dimreduction(stats, figures)), + ("feat-insights", "Feature Insights", "Feature Engineering Insights", + "tab_feat_insights", "adv_feat_insights", + lambda: _section_feature_insights(stats, figures)), + ("cross-analysis", "Cross Analysis", "Cross-Dimensional Analysis", + "tab_cross_analysis", "adv_cross_analysis", + lambda: _section_cross_analysis(stats, figures)), + ("adv-anomaly", "Anomaly+", "Advanced Anomaly Detection", + "tab_adv_anomaly", "adv_anomaly", + lambda: _section_adv_anomaly(stats, figures)), + ("stat-tests", "Stat Tests", "Statistical Tests", + "tab_stat_tests", "adv_stat_tests", + lambda: _section_stat_tests(stats, figures)), + ("data-profile", "Data Profile", "Data Profiling Summary", + "tab_data_profile", "adv_data_profile", + lambda: _section_data_profiling(stats, figures)), + ] + + group_id = f"stg-{prefix}" + basic_tab_id = f"{prefix}-basic" + + buttons: list[str] = [ + f'""" + ] + contents: list[str] = [ + f'
    {basic_html}
    ' + ] + + for key, label, section_title, tab_i18n, section_i18n, builder_fn in adv_builders: + tab_id = f"{prefix}-{key}" + try: + body = builder_fn() + except Exception: + body = "" + if not body.strip(): + continue + wrapped = ( + f'

    {section_title}' + f'ADV

    {body}
    ' + ) + buttons.append( + f'""" + ) + contents.append( + f'
    {wrapped}
    ' + ) + + if len(buttons) <= 1: + return basic_html + + return ( + f'
    ' + f'
    {"".join(buttons)}
    ' + f'{"".join(contents)}' + f'
    ' + ) + + +# ===================================================================== +# Report Generator +# ===================================================================== + +class ReportGenerator: + """Generate comprehensive HTML reports from analysis results.""" + + # -- Single partition ------------------------------------------------- + + def generate_html( + self, + dataset_name: str, + schema_summary: dict[str, Any], + stats: Any, + figures: dict[str, plt.Figure], + warnings: list[str] | None = None, + config: AnalysisConfig | None = None, + analysis_started_at: str = "", + analysis_duration_sec: float = 0.0, + ) -> str: + """Generate a full HTML report string.""" + warnings = warnings or [] + config = config or AnalysisConfig() + + # Build basic sections with i18n keys + basic_sections = "" + basic_sections += _build_section("overview", "Overview", _section_overview(schema_summary), i18n_key="section_overview") + basic_sections += _build_section("quality", "Data Quality", _section_quality(stats), config.quality_score, i18n_key="section_quality") + basic_sections += _build_section("preprocessing", "Preprocessing", _section_preprocessing(stats), config.preprocessing, i18n_key="section_preprocessing") + basic_sections += _build_section("descriptive", "Descriptive Statistics", _section_descriptive(stats, figures), config.descriptive, i18n_key="section_descriptive") + basic_sections += _build_section("distribution", "Distribution Analysis", _section_distribution(stats, figures), config.distribution, i18n_key="section_distribution") + basic_sections += _build_section("correlation", "Correlation Analysis", _section_correlation(stats, figures), config.correlation, i18n_key="section_correlation") + basic_sections += _build_section("missing", "Missing Data Analysis", _section_missing(stats, figures), i18n_key="section_missing") + basic_sections += _build_section("outlier", "Outlier Detection", _section_outlier(stats, figures), config.outlier, i18n_key="section_outlier") + basic_sections += _build_section("categorical", "Categorical Analysis", _section_categorical(stats, figures), config.categorical, i18n_key="section_categorical") + basic_sections += _build_section("importance", "Feature Importance", _section_feature_importance(stats, figures), config.feature_importance, i18n_key="section_importance") + basic_sections += _build_section("pca", "PCA Analysis", _section_pca(stats, figures), config.pca, i18n_key="section_pca") + basic_sections += _build_section("duplicates", "Duplicate Analysis", _section_duplicates(stats), config.duplicates, i18n_key="section_duplicates") + basic_sections += _build_section("warnings-section", "Warnings", _section_warnings(warnings), bool(warnings), i18n_key="section_warnings") + + # Wrap with 2-depth sub-tabs (Basic / Advanced categories) + sections_html = _build_sub_tabs("single", basic_sections, stats, figures, config) + + nav_links = "".join( + f'{label}' + for sid, label, i18n_key in _SECTION_ORDER + ) + rows = schema_summary.get("rows", 0) + cols = schema_summary.get("columns", 0) + + # Language selector + lang_options = "".join( + f'' + for l in SUPPORTED_LANGUAGES + ) + lang_selector = ( + '
    ' + f' ' + f'' + '
    ' + ) + + # Analysis meta (timing) + meta_html = "" + if analysis_started_at: + dur = f"{analysis_duration_sec:.1f}s" if analysis_duration_sec else "" + meta_html = ( + '
    ' + f'Analysis Time: {html_mod.escape(analysis_started_at)}' + + (f' — Duration: {dur}' if dur else "") + + '
    ' + ) + + # i18n JS + i18n_js = _build_i18n_js(json.dumps(TRANSLATIONS, ensure_ascii=False)) + method_info_json = get_method_info_json() + metric_tips_json = get_metric_tips_json() + + html = f""" + + + + +f2a Report - {html_mod.escape(dataset_name)} + + + +
    + {lang_selector} +

    f2a Analysis Report

    +

    {html_mod.escape(dataset_name)} — + {rows:,} rows x + {cols} columns

    + {meta_html} +
    + +
    +{sections_html} +
    + + + + + + + + + + + +""" + return html + + def save_html(self, output_path: str | Path, **kwargs: Any) -> Path: + """Save single-partition HTML report to file.""" + path = Path(output_path) + path.parent.mkdir(parents=True, exist_ok=True) + html = self.generate_html(**kwargs) + path.write_text(html, encoding="utf-8") + logger.info("Report saved: %s", path) + return path + + # -- Multi-subset ----------------------------------------------------- + + def generate_html_multi( + self, + dataset_name: str, + sections: list[dict[str, Any]], + config: AnalysisConfig | None = None, + analysis_started_at: str = "", + analysis_duration_sec: float = 0.0, + ) -> str: + """Generate a multi-subset tabbed HTML report.""" + config = config or AnalysisConfig() + + tab_buttons: list[str] = [] + tab_contents: list[str] = [] + + for idx, sec in enumerate(sections): + tab_id = f"tab-{idx}" + label = f"{sec['subset']} / {sec['split']}" + active = "active" if idx == 0 else "" + + tab_buttons.append( + f'""" + ) + + s = sec["stats"] + figures = sec.get("figures", {}) + schema = sec["schema_summary"] + sec_warnings = sec.get("warnings", []) + + # Build basic sections for this subset (with i18n keys) + basic_inner = "" + basic_inner += _build_section(f"{tab_id}-overview", "Overview", _section_overview(schema), i18n_key="section_overview") + basic_inner += _build_section(f"{tab_id}-quality", "Data Quality", _section_quality(s), config.quality_score, i18n_key="section_quality") + basic_inner += _build_section(f"{tab_id}-preprocessing", "Preprocessing", _section_preprocessing(s), config.preprocessing, i18n_key="section_preprocessing") + basic_inner += _build_section(f"{tab_id}-descriptive", "Descriptive Statistics", _section_descriptive(s, figures), config.descriptive, i18n_key="section_descriptive") + basic_inner += _build_section(f"{tab_id}-distribution", "Distribution Analysis", _section_distribution(s, figures), config.distribution, i18n_key="section_distribution") + basic_inner += _build_section(f"{tab_id}-correlation", "Correlation Analysis", _section_correlation(s, figures), config.correlation, i18n_key="section_correlation") + basic_inner += _build_section(f"{tab_id}-missing", "Missing Data", _section_missing(s, figures), i18n_key="section_missing") + basic_inner += _build_section(f"{tab_id}-outlier", "Outlier Detection", _section_outlier(s, figures), config.outlier, i18n_key="section_outlier") + basic_inner += _build_section(f"{tab_id}-categorical", "Categorical Analysis", _section_categorical(s, figures), config.categorical, i18n_key="section_categorical") + basic_inner += _build_section(f"{tab_id}-importance", "Feature Importance", _section_feature_importance(s, figures), config.feature_importance, i18n_key="section_importance") + basic_inner += _build_section(f"{tab_id}-pca", "PCA Analysis", _section_pca(s, figures), config.pca, i18n_key="section_pca") + basic_inner += _build_section(f"{tab_id}-duplicates", "Duplicates", _section_duplicates(s), config.duplicates, i18n_key="section_duplicates") + basic_inner += _build_section(f"{tab_id}-warnings", "Warnings", _section_warnings(sec_warnings), bool(sec_warnings), i18n_key="section_warnings") + + # Wrap with 2-depth sub-tabs + inner = _build_sub_tabs(tab_id, basic_inner, s, figures, config) + + display = "block" if idx == 0 else "none" + tab_contents.append( + f'
    ' + f"

    {label}

    {inner}
    " + ) + + total_rows = sum(s["schema_summary"].get("rows", 0) for s in sections) + tabs_html = "\n".join(tab_buttons) + content_html = "\n".join(tab_contents) + + # Language selector + lang_options = "".join( + f'' + for l in SUPPORTED_LANGUAGES + ) + lang_selector = ( + '
    ' + f' ' + f'' + '
    ' + ) + + # Analysis meta (timing) + meta_html = "" + if analysis_started_at: + dur = f"{analysis_duration_sec:.1f}s" if analysis_duration_sec else "" + meta_html = ( + '
    ' + f'Analysis Time: {html_mod.escape(analysis_started_at)}' + + (f' — Duration: {dur}' if dur else "") + + '
    ' + ) + + # i18n JS + i18n_js = _build_i18n_js(json.dumps(TRANSLATIONS, ensure_ascii=False)) + method_info_json = get_method_info_json() + metric_tips_json = get_metric_tips_json() + + # Pre-format summary values for i18n interpolation + _total_fmt = f"{total_rows:,}" + _count_fmt = str(len(sections)) + _i18n_args_summary = html_mod.escape(json.dumps({"total": _total_fmt, "count": _count_fmt}, ensure_ascii=False)) + + html = f""" + + + + +f2a Report - {html_mod.escape(dataset_name)} + + + +
    + {lang_selector} +

    f2a Analysis Report

    +

    {html_mod.escape(dataset_name)}

    + {meta_html} +
    +
    +
    + Total: {total_rows:,} rows across + {len(sections)} subsets / splits +
    +
    {tabs_html}
    + {content_html} +
    + + + + + + + + + + + +""" + return html + + def save_html_multi(self, output_path: str | Path, **kwargs: Any) -> Path: + """Save multi-subset HTML report to file.""" + path = Path(output_path) + path.parent.mkdir(parents=True, exist_ok=True) + html = self.generate_html_multi(**kwargs) + path.write_text(html, encoding="utf-8") + logger.info("Report saved: %s", path) + return path diff --git a/f2a/report/i18n.py b/f2a/report/i18n.py new file mode 100644 index 0000000..8cfa7e3 --- /dev/null +++ b/f2a/report/i18n.py @@ -0,0 +1,4836 @@ +"""Internationalization (i18n) module for HTML report generation. + +Provides translation dictionaries for 6 languages: +English (en), Korean (ko), Chinese (zh), Japanese (ja), German (de), French (fr). + +The keys are semantic identifiers used in generator.py; the values are the +translated user-visible strings. +""" + +from __future__ import annotations + +# -- Supported languages ---------------------------------------------- + +SUPPORTED_LANGUAGES: list[dict[str, str]] = [ + {"code": "en", "label": "English"}, + {"code": "ko", "label": "한국어"}, + {"code": "zh", "label": "中文"}, + {"code": "ja", "label": "日本語"}, + {"code": "de", "label": "Deutsch"}, + {"code": "fr", "label": "Français"}, +] + +DEFAULT_LANG = "en" + +# -- Translation dictionary ------------------------------------------- +# Flat dict: TRANSLATIONS[lang_code][key] = translated string + +TRANSLATIONS: dict[str, dict[str, str]] = {} + +# ----- English (en) ----- +TRANSLATIONS["en"] = { + # Page-level + "page_title": "f2a Analysis Report", + "report_header": "f2a Analysis Report", + "generated_by": "Generated by", + "analysis_time": "Analysis Time", + "duration": "Duration", + "rows": "rows", + "columns": "columns", + "language": "Language", + + # Navigation / section titles + "nav_overview": "Overview", + "nav_quality": "Quality", + "nav_preprocessing": "Preprocessing", + "nav_descriptive": "Descriptive", + "nav_distribution": "Distribution", + "nav_correlation": "Correlation", + "nav_missing": "Missing Data", + "nav_outlier": "Outliers", + "nav_categorical": "Categorical", + "nav_importance": "Feature Importance", + "nav_pca": "PCA", + "nav_duplicates": "Duplicates", + "nav_warnings": "Warnings", + + # Section titles (full) + "section_overview": "Overview", + "section_quality": "Data Quality", + "section_preprocessing": "Preprocessing", + "section_descriptive": "Descriptive Statistics", + "section_distribution": "Distribution Analysis", + "section_correlation": "Correlation Analysis", + "section_missing": "Missing Data Analysis", + "section_outlier": "Outlier Detection", + "section_categorical": "Categorical Analysis", + "section_importance": "Feature Importance", + "section_pca": "PCA Analysis", + "section_duplicates": "Duplicate Analysis", + "section_warnings": "Warnings", + + # Sub-tab labels + "tab_basic": "Basic", + "tab_adv_dist": "Distribution+", + "tab_adv_corr": "Correlation+", + "tab_clustering": "Clustering", + "tab_dimreduction": "Dim. Reduction", + "tab_feat_insights": "Feature Insights", + "tab_adv_anomaly": "Anomaly+", + "tab_stat_tests": "Statistical Tests", + "tab_data_profile": "Data Profile", + "tab_insights": "Key Insights", + "tab_cross_analysis": "Cross Analysis", + + # Advanced section titles + "adv_distribution": "Advanced Distribution Analysis", + "adv_correlation": "Advanced Correlation Analysis", + "adv_clustering": "Clustering Analysis", + "adv_dimreduction": "Dimensionality Reduction", + "adv_feat_insights": "Feature Engineering Insights", + "adv_anomaly": "Advanced Anomaly Detection", + "adv_stat_tests": "Statistical Tests", + "adv_data_profile": "Data Profiling Summary", + "adv_insights": "Auto-Generated Insights", + "adv_cross_analysis": "Cross-Dimensional Analysis", + + # Sub-section headings + "sub_best_fit": "Best-Fit Distribution", + "sub_jarque_bera": "Jarque-Bera Normality Test", + "sub_power_transform": "Power Transform Recommendation", + "sub_kde_bandwidth": "KDE Bandwidth Analysis", + "sub_partial_corr": "Partial Correlation Matrix", + "sub_mutual_info": "Mutual Information Matrix", + "sub_bootstrap_ci": "Bootstrap Correlation 95% CI", + "sub_distance_corr": "Distance Correlation Matrix", + "sub_kmeans": "K-Means Summary", + "sub_dbscan": "DBSCAN Summary", + "sub_hierarchical": "Hierarchical Clustering", + "sub_cluster_profiles": "Cluster Profiles", + "sub_tsne": "t-SNE Embedding", + "sub_umap": "UMAP Embedding", + "sub_factor_analysis": "Factor Analysis", + "sub_factor_loadings": "Factor Loadings", + "sub_feature_contrib": "PCA-Weighted Feature Contribution", + "sub_interaction": "Interaction Detection", + "sub_monotonic": "Monotonic Relationship Analysis", + "sub_binning": "Binning Analysis", + "sub_cardinality": "Cardinality & Encoding Recommendation", + "sub_leakage": "Leakage Risk Assessment", + "sub_iso_forest": "Isolation Forest", + "sub_lof": "Local Outlier Factor", + "sub_mahalanobis": "Mahalanobis Distance", + "sub_consensus": "Consensus (>=2/3 agree)", + "sub_normality_tests": "Normality Tests & Shape", + "sub_vif": "Variance Inflation Factor (VIF)", + "sub_cleaning_log": "Cleaning Log", + "sub_detected_issues": "Detected Issues", + "sub_variance_explained": "Variance Explained", + "sub_loadings": "Loadings", + "sub_column_quality": "Column Quality", + "sub_summary": "Summary", + + # Enhancement sub-sections + "sub_executive_summary": "Executive Summary", + "sub_insight_details": "Insight Details", + "sub_data_profile_summary": "Dataset Profile", + "sub_column_roles": "Column Roles", + "sub_ml_readiness": "ML Readiness", + "sub_outlier_cluster": "Anomaly Distribution by Cluster", + "sub_dist_outlier_fitness": "Outlier Method Recommendation", + "sub_importance_missing": "Feature Importance vs. Missing Rate", + "sub_simpson_paradox": "Simpson's Paradox Detection", + + # Stat test names + "test_levene": "Levene's Test (Equality of Variances)", + "test_kruskal_wallis": "Kruskal-Wallis Test", + "test_mann_whitney": "Mann-Whitney U Test", + "test_chi_square": "Chi-Square Goodness of Fit", + "test_grubbs": "Grubbs Outlier Test", + "test_adf": "Augmented Dickey-Fuller (Stationarity)", + + # Multi-subset + "total_rows_across": "Total: {total} rows across {count} subsets / splits", + "subsets": "subsets / splits", + + # Footer + "footer_text": "Generated by f2a (File to Analysis)", + + # Misc + "no_data": "No data available", + "value_label": "Value", +} + +# ----- Korean (ko) ----- +TRANSLATIONS["ko"] = { + "page_title": "f2a 분석 보고서", + "report_header": "f2a 분석 보고서", + "generated_by": "생성:", + "analysis_time": "분석 시간", + "duration": "소요 시간", + "rows": "행", + "columns": "열", + "language": "언어", + + "nav_overview": "개요", + "nav_quality": "품질", + "nav_preprocessing": "전처리", + "nav_descriptive": "기술통계", + "nav_distribution": "분포", + "nav_correlation": "상관관계", + "nav_missing": "결측치", + "nav_outlier": "이상치", + "nav_categorical": "범주형", + "nav_importance": "특성 중요도", + "nav_pca": "PCA", + "nav_duplicates": "중복", + "nav_warnings": "경고", + + "section_overview": "개요", + "section_quality": "데이터 품질", + "section_preprocessing": "전처리", + "section_descriptive": "기술 통계량", + "section_distribution": "분포 분석", + "section_correlation": "상관 분석", + "section_missing": "결측치 분석", + "section_outlier": "이상치 탐지", + "section_categorical": "범주형 분석", + "section_importance": "특성 중요도", + "section_pca": "PCA 분석", + "section_duplicates": "중복 분석", + "section_warnings": "경고", + + "tab_basic": "기본", + "tab_adv_dist": "분포+", + "tab_adv_corr": "상관+", + "tab_clustering": "클러스터링", + "tab_dimreduction": "차원 축소", + "tab_feat_insights": "특성 인사이트", + "tab_adv_anomaly": "이상치+", + "tab_stat_tests": "통계 검정", + "tab_data_profile": "데이터 프로필", + "tab_insights": "핵심 인사이트", + "tab_cross_analysis": "교차 분석", + + "adv_distribution": "고급 분포 분석", + "adv_correlation": "고급 상관 분석", + "adv_clustering": "클러스터링 분석", + "adv_dimreduction": "차원 축소", + "adv_feat_insights": "특성 엔지니어링 인사이트", + "adv_anomaly": "고급 이상치 탐지", + "adv_stat_tests": "통계 검정", + "adv_data_profile": "데이터 프로파일링 요약", + "adv_insights": "자동 생성 인사이트", + "adv_cross_analysis": "교차 차원 분석", + + "sub_best_fit": "최적 분포 적합", + "sub_jarque_bera": "자크-베라 정규성 검정", + "sub_power_transform": "거듭제곱 변환 권장", + "sub_kde_bandwidth": "KDE 대역폭 분석", + "sub_partial_corr": "편상관 행렬", + "sub_mutual_info": "상호 정보량 행렬", + "sub_bootstrap_ci": "부트스트랩 상관 95% CI", + "sub_distance_corr": "거리 상관 행렬", + "sub_kmeans": "K-평균 요약", + "sub_dbscan": "DBSCAN 요약", + "sub_hierarchical": "계층적 군집화", + "sub_cluster_profiles": "클러스터 프로필", + "sub_tsne": "t-SNE 임베딩", + "sub_umap": "UMAP 임베딩", + "sub_factor_analysis": "요인 분석", + "sub_factor_loadings": "요인 적재량", + "sub_feature_contrib": "PCA 가중 특성 기여도", + "sub_interaction": "상호작용 탐지", + "sub_monotonic": "단조 관계 분석", + "sub_binning": "구간화 분석", + "sub_cardinality": "카디널리티 및 인코딩 권장", + "sub_leakage": "누수 위험 평가", + "sub_iso_forest": "고립 포레스트", + "sub_lof": "로컬 이상치 팩터", + "sub_mahalanobis": "마할라노비스 거리", + "sub_consensus": "합의 (>=2/3 동의)", + "sub_normality_tests": "정규성 검정 및 형태", + "sub_vif": "분산 팽창 인자 (VIF)", + "sub_cleaning_log": "전처리 로그", + "sub_detected_issues": "탐지된 문제", + "sub_variance_explained": "설명된 분산", + "sub_loadings": "적재량", + "sub_column_quality": "컬럼별 품질", + "sub_summary": "요약", + + "test_levene": "레빈 검정 (분산 동질성)", + "test_kruskal_wallis": "크루스칼-왈리스 검정", + "test_mann_whitney": "만-휘트니 U 검정", + "test_chi_square": "카이제곱 적합도 검정", + "test_grubbs": "그럽스 이상치 검정", + "test_adf": "ADF 정상성 검정", + + "total_rows_across": "총 {total}행, {count}개 하위 데이터셋", + "subsets": "하위 데이터셋", + + "footer_text": "f2a (File to Analysis)로 생성됨", + + "no_data": "데이터 없음", + "value_label": "값", +} + +# ----- Chinese (zh) ----- +TRANSLATIONS["zh"] = { + "page_title": "f2a 分析报告", + "report_header": "f2a 分析报告", + "generated_by": "生成工具:", + "analysis_time": "分析时间", + "duration": "耗时", + "rows": "行", + "columns": "列", + "language": "语言", + + "nav_overview": "概览", + "nav_quality": "质量", + "nav_preprocessing": "预处理", + "nav_descriptive": "描述统计", + "nav_distribution": "分布", + "nav_correlation": "相关性", + "nav_missing": "缺失值", + "nav_outlier": "异常值", + "nav_categorical": "分类变量", + "nav_importance": "特征重要性", + "nav_pca": "PCA", + "nav_duplicates": "重复项", + "nav_warnings": "警告", + + "section_overview": "概览", + "section_quality": "数据质量", + "section_preprocessing": "预处理", + "section_descriptive": "描述性统计", + "section_distribution": "分布分析", + "section_correlation": "相关性分析", + "section_missing": "缺失数据分析", + "section_outlier": "异常值检测", + "section_categorical": "分类分析", + "section_importance": "特征重要性", + "section_pca": "PCA分析", + "section_duplicates": "重复项分析", + "section_warnings": "警告", + + "tab_basic": "基础", + "tab_adv_dist": "分布+", + "tab_adv_corr": "相关+", + "tab_clustering": "聚类", + "tab_dimreduction": "降维", + "tab_feat_insights": "特征洞察", + "tab_adv_anomaly": "异常+", + "tab_stat_tests": "统计检验", + "tab_data_profile": "数据概要", + "tab_insights": "关键洞察", + "tab_cross_analysis": "交叉分析", + + "adv_distribution": "高级分布分析", + "adv_correlation": "高级相关性分析", + "adv_clustering": "聚类分析", + "adv_dimreduction": "降维分析", + "adv_feat_insights": "特征工程洞察", + "adv_anomaly": "高级异常检测", + "adv_stat_tests": "统计检验", + "adv_data_profile": "数据概要统计", + "adv_insights": "自动生成的洞察", + "adv_cross_analysis": "跨维度分析", + + "sub_best_fit": "最佳拟合分布", + "sub_jarque_bera": "Jarque-Bera正态性检验", + "sub_power_transform": "幂变换推荐", + "sub_kde_bandwidth": "KDE带宽分析", + "sub_partial_corr": "偏相关矩阵", + "sub_mutual_info": "互信息矩阵", + "sub_bootstrap_ci": "自举法相关性95% CI", + "sub_distance_corr": "距离相关矩阵", + "sub_kmeans": "K-Means摘要", + "sub_dbscan": "DBSCAN摘要", + "sub_hierarchical": "层次聚类", + "sub_cluster_profiles": "聚类概况", + "sub_tsne": "t-SNE嵌入", + "sub_umap": "UMAP嵌入", + "sub_factor_analysis": "因子分析", + "sub_factor_loadings": "因子载荷", + "sub_feature_contrib": "PCA加权特征贡献", + "sub_interaction": "交互作用检测", + "sub_monotonic": "单调关系分析", + "sub_binning": "分箱分析", + "sub_cardinality": "基数与编码建议", + "sub_leakage": "数据泄漏风险评估", + "sub_iso_forest": "隔离森林", + "sub_lof": "局部离群因子", + "sub_mahalanobis": "马氏距离", + "sub_consensus": "共识 (>=2/3方法一致)", + "sub_normality_tests": "正态性检验与形态", + "sub_vif": "方差膨胀因子 (VIF)", + "sub_cleaning_log": "清洗日志", + "sub_detected_issues": "检测到的问题", + "sub_variance_explained": "解释方差", + "sub_loadings": "载荷", + "sub_column_quality": "列质量", + "sub_summary": "摘要", + + "test_levene": "Levene检验(方差齐性)", + "test_kruskal_wallis": "Kruskal-Wallis检验", + "test_mann_whitney": "Mann-Whitney U检验", + "test_chi_square": "卡方拟合优度检验", + "test_grubbs": "Grubbs异常值检验", + "test_adf": "ADF平稳性检验", + + "total_rows_across": "共 {total} 行,{count} 个子集/拆分", + "subsets": "子集/拆分", + + "footer_text": "由 f2a(File to Analysis)生成", + + "no_data": "暂无数据", + "value_label": "值", +} + +# ----- Japanese (ja) ----- +TRANSLATIONS["ja"] = { + "page_title": "f2a 分析レポート", + "report_header": "f2a 分析レポート", + "generated_by": "生成:", + "analysis_time": "分析時刻", + "duration": "所要時間", + "rows": "行", + "columns": "列", + "language": "言語", + + "nav_overview": "概要", + "nav_quality": "品質", + "nav_preprocessing": "前処理", + "nav_descriptive": "記述統計", + "nav_distribution": "分布", + "nav_correlation": "相関", + "nav_missing": "欠損値", + "nav_outlier": "外れ値", + "nav_categorical": "カテゴリ", + "nav_importance": "特徴量重要度", + "nav_pca": "PCA", + "nav_duplicates": "重複", + "nav_warnings": "警告", + + "section_overview": "概要", + "section_quality": "データ品質", + "section_preprocessing": "前処理", + "section_descriptive": "記述統計量", + "section_distribution": "分布分析", + "section_correlation": "相関分析", + "section_missing": "欠損データ分析", + "section_outlier": "外れ値検出", + "section_categorical": "カテゴリ分析", + "section_importance": "特徴量重要度", + "section_pca": "PCA分析", + "section_duplicates": "重複分析", + "section_warnings": "警告", + + "tab_basic": "基本", + "tab_adv_dist": "分布+", + "tab_adv_corr": "相関+", + "tab_clustering": "クラスタリング", + "tab_dimreduction": "次元削減", + "tab_feat_insights": "特徴量インサイト", + "tab_adv_anomaly": "異常+", + "tab_stat_tests": "統計検定", + "tab_data_profile": "データプロファイル", + "tab_insights": "主要インサイト", + "tab_cross_analysis": "クロス分析", + + "adv_distribution": "高度な分布分析", + "adv_correlation": "高度な相関分析", + "adv_clustering": "クラスタリング分析", + "adv_dimreduction": "次元削減分析", + "adv_feat_insights": "特徴量エンジニアリングインサイト", + "adv_anomaly": "高度な異常検出", + "adv_stat_tests": "統計検定", + "adv_data_profile": "データプロファイリング要約", + "adv_insights": "自動生成インサイト", + "adv_cross_analysis": "クロスディメンション分析", + + "sub_best_fit": "最適分布フィッティング", + "sub_jarque_bera": "Jarque-Bera正規性検定", + "sub_power_transform": "べき変換の推奨", + "sub_kde_bandwidth": "KDE帯域幅分析", + "sub_partial_corr": "偏相関行列", + "sub_mutual_info": "相互情報量行列", + "sub_bootstrap_ci": "ブートストラップ相関95% CI", + "sub_distance_corr": "距離相関行列", + "sub_kmeans": "K-Means要約", + "sub_dbscan": "DBSCAN要約", + "sub_hierarchical": "階層的クラスタリング", + "sub_cluster_profiles": "クラスタプロファイル", + "sub_tsne": "t-SNE埋め込み", + "sub_umap": "UMAP埋め込み", + "sub_factor_analysis": "因子分析", + "sub_factor_loadings": "因子負荷量", + "sub_feature_contrib": "PCA加重特徴量寄与", + "sub_interaction": "交互作用検出", + "sub_monotonic": "単調関係分析", + "sub_binning": "ビニング分析", + "sub_cardinality": "カーディナリティとエンコーディング推奨", + "sub_leakage": "リーク リスク評価", + "sub_iso_forest": "Isolation Forest", + "sub_lof": "局所外れ値因子", + "sub_mahalanobis": "マハラノビス距離", + "sub_consensus": "合意(>=2/3手法一致)", + "sub_normality_tests": "正規性検定と形状", + "sub_vif": "分散膨張係数(VIF)", + "sub_cleaning_log": "クリーニングログ", + "sub_detected_issues": "検出された問題", + "sub_variance_explained": "説明された分散", + "sub_loadings": "負荷量", + "sub_column_quality": "列品質", + "sub_summary": "要約", + + "test_levene": "Levene検定(等分散性)", + "test_kruskal_wallis": "Kruskal-Wallis検定", + "test_mann_whitney": "Mann-Whitney U検定", + "test_chi_square": "カイ二乗適合度検定", + "test_grubbs": "Grubbs外れ値検定", + "test_adf": "ADF定常性検定", + + "total_rows_across": "合計 {total} 行、{count} サブセット/分割", + "subsets": "サブセット/分割", + + "footer_text": "f2a(File to Analysis)で生成", + + "no_data": "データなし", + "value_label": "値", +} + +# ----- German (de) ----- +TRANSLATIONS["de"] = { + "page_title": "f2a Analysebericht", + "report_header": "f2a Analysebericht", + "generated_by": "Erstellt mit", + "analysis_time": "Analysezeit", + "duration": "Dauer", + "rows": "Zeilen", + "columns": "Spalten", + "language": "Sprache", + + "nav_overview": "Übersicht", + "nav_quality": "Qualität", + "nav_preprocessing": "Vorverarbeitung", + "nav_descriptive": "Deskriptiv", + "nav_distribution": "Verteilung", + "nav_correlation": "Korrelation", + "nav_missing": "Fehlende Daten", + "nav_outlier": "Ausreißer", + "nav_categorical": "Kategorial", + "nav_importance": "Merkmalswichtigkeit", + "nav_pca": "PCA", + "nav_duplicates": "Duplikate", + "nav_warnings": "Warnungen", + + "section_overview": "Übersicht", + "section_quality": "Datenqualität", + "section_preprocessing": "Vorverarbeitung", + "section_descriptive": "Deskriptive Statistik", + "section_distribution": "Verteilungsanalyse", + "section_correlation": "Korrelationsanalyse", + "section_missing": "Fehlende-Daten-Analyse", + "section_outlier": "Ausreißererkennung", + "section_categorical": "Kategoriale Analyse", + "section_importance": "Merkmalswichtigkeit", + "section_pca": "PCA-Analyse", + "section_duplicates": "Duplikatanalyse", + "section_warnings": "Warnungen", + + "tab_basic": "Basis", + "tab_adv_dist": "Verteilung+", + "tab_adv_corr": "Korrelation+", + "tab_clustering": "Clustering", + "tab_dimreduction": "Dim.-Reduktion", + "tab_feat_insights": "Merkmals-Insights", + "tab_adv_anomaly": "Anomalie+", + "tab_stat_tests": "Statistische Tests", + "tab_data_profile": "Datenprofil", + "tab_insights": "Schlüsselerkenntnisse", + "tab_cross_analysis": "Kreuzanalyse", + + "adv_distribution": "Erweiterte Verteilungsanalyse", + "adv_correlation": "Erweiterte Korrelationsanalyse", + "adv_clustering": "Clusteranalyse", + "adv_dimreduction": "Dimensionalitätsreduktion", + "adv_feat_insights": "Feature-Engineering-Insights", + "adv_anomaly": "Erweiterte Anomalieerkennung", + "adv_stat_tests": "Statistische Tests", + "adv_data_profile": "Datenprofilierungs-Zusammenfassung", + "adv_insights": "Automatisch generierte Erkenntnisse", + "adv_cross_analysis": "Dimensionsübergreifende Analyse", + + "sub_best_fit": "Best-Fit-Verteilung", + "sub_jarque_bera": "Jarque-Bera-Normalitätstest", + "sub_power_transform": "Potenztransformation-Empfehlung", + "sub_kde_bandwidth": "KDE-Bandbreitenanalyse", + "sub_partial_corr": "Partielle Korrelationsmatrix", + "sub_mutual_info": "Informationsmatrix", + "sub_bootstrap_ci": "Bootstrap-Korrelation 95%-KI", + "sub_distance_corr": "Distanzkorrelationsmatrix", + "sub_kmeans": "K-Means-Zusammenfassung", + "sub_dbscan": "DBSCAN-Zusammenfassung", + "sub_hierarchical": "Hierarchisches Clustering", + "sub_cluster_profiles": "Clusterprofile", + "sub_tsne": "t-SNE-Einbettung", + "sub_umap": "UMAP-Einbettung", + "sub_factor_analysis": "Faktorenanalyse", + "sub_factor_loadings": "Faktorladungen", + "sub_feature_contrib": "PCA-gewichteter Merkmalsbeitrag", + "sub_interaction": "Interaktionserkennung", + "sub_monotonic": "Monotone Beziehungsanalyse", + "sub_binning": "Diskretisierungsanalyse", + "sub_cardinality": "Kardinalität & Kodierungsempfehlung", + "sub_leakage": "Datenleck-Risikobewertung", + "sub_iso_forest": "Isolation Forest", + "sub_lof": "Lokaler Ausreißerfaktor", + "sub_mahalanobis": "Mahalanobis-Distanz", + "sub_consensus": "Konsens (>=2/3 Übereinstimmung)", + "sub_normality_tests": "Normalitätstests & Form", + "sub_vif": "Varianzinflationsfaktor (VIF)", + "sub_cleaning_log": "Bereinigungsprotokoll", + "sub_detected_issues": "Erkannte Probleme", + "sub_variance_explained": "Erklärte Varianz", + "sub_loadings": "Ladungen", + "sub_column_quality": "Spaltenqualität", + "sub_summary": "Zusammenfassung", + + "test_levene": "Levene-Test (Varianzhomogenität)", + "test_kruskal_wallis": "Kruskal-Wallis-Test", + "test_mann_whitney": "Mann-Whitney-U-Test", + "test_chi_square": "Chi-Quadrat-Anpassungstest", + "test_grubbs": "Grubbs-Ausreißertest", + "test_adf": "ADF-Stationaritätstest", + + "total_rows_across": "Gesamt: {total} Zeilen in {count} Teilmengen", + "subsets": "Teilmengen", + + "footer_text": "Erstellt mit f2a (File to Analysis)", + + "no_data": "Keine Daten verfügbar", + "value_label": "Wert", +} + +# ----- French (fr) ----- +TRANSLATIONS["fr"] = { + "page_title": "Rapport d'analyse f2a", + "report_header": "Rapport d'analyse f2a", + "generated_by": "Généré par", + "analysis_time": "Heure d'analyse", + "duration": "Durée", + "rows": "lignes", + "columns": "colonnes", + "language": "Langue", + + "nav_overview": "Aperçu", + "nav_quality": "Qualité", + "nav_preprocessing": "Prétraitement", + "nav_descriptive": "Descriptif", + "nav_distribution": "Distribution", + "nav_correlation": "Corrélation", + "nav_missing": "Données manquantes", + "nav_outlier": "Valeurs aberrantes", + "nav_categorical": "Catégoriel", + "nav_importance": "Importance des variables", + "nav_pca": "ACP", + "nav_duplicates": "Doublons", + "nav_warnings": "Avertissements", + + "section_overview": "Aperçu", + "section_quality": "Qualité des données", + "section_preprocessing": "Prétraitement", + "section_descriptive": "Statistiques descriptives", + "section_distribution": "Analyse de distribution", + "section_correlation": "Analyse de corrélation", + "section_missing": "Analyse des données manquantes", + "section_outlier": "Détection des valeurs aberrantes", + "section_categorical": "Analyse catégorielle", + "section_importance": "Importance des variables", + "section_pca": "Analyse ACP", + "section_duplicates": "Analyse des doublons", + "section_warnings": "Avertissements", + + "tab_basic": "Base", + "tab_adv_dist": "Distribution+", + "tab_adv_corr": "Corrélation+", + "tab_clustering": "Clustering", + "tab_dimreduction": "Réd. de dim.", + "tab_feat_insights": "Insights variables", + "tab_adv_anomaly": "Anomalie+", + "tab_stat_tests": "Tests statistiques", + "tab_data_profile": "Profil de données", + "tab_insights": "Informations clés", + "tab_cross_analysis": "Analyse croisée", + + "adv_distribution": "Analyse avancée de distribution", + "adv_correlation": "Analyse avancée de corrélation", + "adv_clustering": "Analyse de clustering", + "adv_dimreduction": "Réduction de dimensionnalité", + "adv_feat_insights": "Insights d'ingénierie des variables", + "adv_anomaly": "Détection avancée d'anomalies", + "adv_stat_tests": "Tests statistiques", + "adv_data_profile": "Résumé du profilage de données", + "adv_insights": "Informations générées automatiquement", + "adv_cross_analysis": "Analyse interdimensionnelle", + + "sub_best_fit": "Meilleur ajustement de distribution", + "sub_jarque_bera": "Test de normalité Jarque-Bera", + "sub_power_transform": "Recommandation de transformation", + "sub_kde_bandwidth": "Analyse de bande passante KDE", + "sub_partial_corr": "Matrice de corrélation partielle", + "sub_mutual_info": "Matrice d'information mutuelle", + "sub_bootstrap_ci": "IC 95% de corrélation bootstrap", + "sub_distance_corr": "Matrice de corrélation de distance", + "sub_kmeans": "Résumé K-Means", + "sub_dbscan": "Résumé DBSCAN", + "sub_hierarchical": "Clustering hiérarchique", + "sub_cluster_profiles": "Profils de clusters", + "sub_tsne": "Projection t-SNE", + "sub_umap": "Projection UMAP", + "sub_factor_analysis": "Analyse factorielle", + "sub_factor_loadings": "Charges factorielles", + "sub_feature_contrib": "Contribution pondérée ACP", + "sub_interaction": "Détection d'interactions", + "sub_monotonic": "Analyse des relations monotones", + "sub_binning": "Analyse de discrétisation", + "sub_cardinality": "Cardinalité et recommandation d'encodage", + "sub_leakage": "Évaluation du risque de fuite", + "sub_iso_forest": "Isolation Forest", + "sub_lof": "Facteur local d'aberration", + "sub_mahalanobis": "Distance de Mahalanobis", + "sub_consensus": "Consensus (>=2/3 d'accord)", + "sub_normality_tests": "Tests de normalité et forme", + "sub_vif": "Facteur d'inflation de la variance (VIF)", + "sub_cleaning_log": "Journal de nettoyage", + "sub_detected_issues": "Problèmes détectés", + "sub_variance_explained": "Variance expliquée", + "sub_loadings": "Charges", + "sub_column_quality": "Qualité par colonne", + "sub_summary": "Résumé", + + "test_levene": "Test de Levene (homogénéité des variances)", + "test_kruskal_wallis": "Test de Kruskal-Wallis", + "test_mann_whitney": "Test de Mann-Whitney U", + "test_chi_square": "Test d'ajustement du chi-deux", + "test_grubbs": "Test de Grubbs", + "test_adf": "Test ADF (stationnarité)", + + "total_rows_across": "Total : {total} lignes dans {count} sous-ensembles", + "subsets": "sous-ensembles", + + "footer_text": "Généré par f2a (File to Analysis)", + + "no_data": "Aucune donnée disponible", + "value_label": "Valeur", +} + + +def t(key: str, lang: str = DEFAULT_LANG) -> str: + "“”Look up a translation. Falls back to English if key is missing.“”" + return TRANSLATIONS.get(lang, TRANSLATIONS["en"]).get( + key, TRANSLATIONS["en"].get(key, key) + ) + + +def get_all_translations_json() -> str: + "“”Return the full translation dict as a JSON string for embedding in JS.“”" + import json + return json.dumps(TRANSLATIONS, ensure_ascii=False) + + +# -- Method information (tooltip + modal descriptions) ----------------- +# METHOD_INFO[lang_code][method_key] = {"tip": "short hover text", "desc": "detailed HTML"} +# Keys match the data-i18n keys used on h3.section-subtitle elements. + +METHOD_INFO: dict[str, dict[str, dict[str, str]]] = {} + +# -- English ----------------------------------------------------------- +METHOD_INFO["en"] = { + # ===== Section-level ===== + "section_overview": { + "tip": "High-level summary of data shape, types, and memory footprint.", + "desc": ( + "

    Overview gives you a bird's-eye view of the entire dataset before you dive into " + "deeper analysis.

    " + "

    What you will find here:

    " + "" + "

    Why it matters: Verifying row counts and types first catches loading errors " + "(truncated files, wrong delimiters, encoding issues) before they silently corrupt downstream results.

    " + "

    Beginner tip: If the row count is much smaller than expected, your file may have " + "been loaded with the wrong separator. If a numeric column shows up as 'text', it probably " + "contains non-numeric characters that need cleaning.

    " + ), + }, + "section_quality": { + "tip": "Scores the dataset across completeness, uniqueness, consistency, and validity (0-100%).", + "desc": ( + "

    Data Quality Assessment evaluates your dataset along four independent dimensions, " + "each scored from 0 to 100%. Think of it as a health check-up for your data.

    " + "

    The four quality dimensions:

    " + "" + "

    How to read the scores:

    " + "" + "

    Overall score formula: 0.35 × Completeness + 0.25 × Uniqueness + " + "0.20 × Consistency + 0.20 × Validity. Completeness is weighted highest because missing " + "data affects nearly every analysis method.

    " + ), + }, + "section_preprocessing": { + "tip": "Documents all automatic cleaning and transformation steps applied before analysis.", + "desc": ( + "

    Preprocessing Log records every automatic cleaning action the system performed on " + "your raw data before running any analysis.

    " + "

    Why this matters: Reproducibility is the cornerstone of trustworthy analysis. " + "If you cannot explain exactly what transformations were applied, your results cannot be verified.

    " + "

    Common preprocessing steps recorded:

    " + "" + "

    Beginner tip: Always review this log. If you see an important column was dropped, " + "it may mean the original data had formatting issues that need manual fixing.

    " + ), + }, + "section_descriptive": { + "tip": "Central tendency, dispersion, and shape statistics for every column.", + "desc": ( + "

    Descriptive Statistics is the foundation of Exploratory Data Analysis (EDA). " + "It summarises each column with a set of numbers that describe its centre, spread, and shape.

    " + "

    For numeric columns, you will see:

    " + "" + "

    For categorical columns: count, unique, top (most frequent), freq (frequency of top).

    " + "

    Beginner tip: Look for columns where mean and median are very different -- " + "this signals outliers or skewed data that may need special treatment.

    " + ), + }, + "section_distribution": { + "tip": "Histograms and Q-Q plots revealing the shape and spread of each numeric column.", + "desc": ( + "

    Distribution Analysis visualises how values are spread out in each numeric column. " + "While descriptive statistics give you numbers, distribution plots let you see the shape.

    " + "

    What the charts show:

    " + "" + "

    Common distribution shapes:

    " + "" + "

    Why it matters: Many machine learning algorithms assume normally distributed inputs. " + "Knowing the actual distribution shape helps you choose the right model or apply transformations.

    " + ), + }, + "section_correlation": { + "tip": "Measures pairwise linear and rank-based relationships between numeric columns.", + "desc": ( + "

    Correlation Analysis measures how strongly pairs of numeric columns are related.

    " + "

    Two types of correlation computed:

    " + "" + "

    How to read the heatmap: Darker colours = stronger correlation. Red = positive, " + "Blue = negative. The diagonal is always 1.0 (each variable is perfectly correlated with itself).

    " + "

    Warning thresholds:

    " + "" + "

    Beginner tip: High correlation between two features means they carry similar " + "information. Including both in a linear model can cause instability (multicollinearity).

    " + ), + }, + "section_missing": { + "tip": "Analyses patterns, proportions, and potential mechanisms of missing data.", + "desc": ( + "

    Missing Data Analysis investigates where, how much, and " + "why data is missing.

    " + "

    Key metrics:

    " + "" + "

    Three mechanisms of missingness:

    " + "" + "

    Practical guidelines:

    " + "" + ), + }, + "section_outlier": { + "tip": "Detects anomalous data points using IQR fences and z-scores.", + "desc": ( + "

    Outlier Detection identifies data points that are unusually far from the rest. " + "Outliers can be genuine extreme values or data entry errors.

    " + "

    Detection method (IQR):

    " + "" + "

    Important: Not all outliers are errors! In many domains (fraud detection, " + "rare diseases, extreme weather), outliers are the most interesting data points. " + "Always investigate before removing.

    " + "

    Beginner tip: Use box plots (shown in this section) to visually assess outliers. " + "Points shown as dots beyond the whiskers are potential outliers worth examining.

    " + ), + }, + "section_categorical": { + "tip": "Frequency distributions, bar charts, and entropy analysis for categorical columns.", + "desc": ( + "

    Categorical Analysis examines non-numeric columns -- text labels, categories, " + "boolean flags, and any column with a limited set of distinct values.

    " + "

    Key metrics for each categorical column:

    " + "" + "

    Why it matters: Categories with very low frequency (rare classes) can cause " + "problems in machine learning. A column where one category appears 99% of the time " + "carries almost no information.

    " + "

    Beginner tip: Look at the bar charts. If one bar is overwhelmingly taller than " + "the rest, the column is 'imbalanced' -- you may need special techniques like oversampling.

    " + ), + }, + "section_importance": { + "tip": "Ranks features by informational value using variance and mutual information.", + "desc": ( + "

    Feature Importance helps you answer: Which columns carry the most useful " + "information?

    " + "

    Methods used:

    " + "" + "

    Practical use: Features with near-zero importance are candidates for removal. " + "Reducing dimensionality can speed up training, reduce overfitting, and improve interpretability.

    " + "

    Beginner tip: Do NOT blindly remove all low-importance features. Sometimes a " + "feature is unimportant alone but becomes powerful when combined with another (interaction effects).

    " + ), + }, + "section_pca": { + "tip": "Principal Component Analysis reveals the intrinsic dimensionality of the data.", + "desc": ( + "

    PCA (Principal Component Analysis) is a technique that transforms correlated features " + "into a smaller set of uncorrelated components ordered by how much variance they explain.

    " + "

    Key outputs:

    " + "" + "

    Why it matters: If 95% of variance is explained by 3 components out of 50 features, " + "your data's intrinsic dimensionality is very low -- many features are redundant.

    " + "

    Beginner tip: PCA works best when features are on similar scales. The system " + "automatically standardises (z-score) before applying PCA.

    " + ), + }, + "section_duplicates": { + "tip": "Identifies exact duplicate rows that may inflate statistics or bias models.", + "desc": ( + "

    Duplicate Analysis scans for rows that are exactly identical across all columns.

    " + "

    Why duplicates matter:

    " + "" + "

    Metrics shown: total rows, duplicate count, unique count, and duplicate ratio.

    " + "

    Beginner tip: A small number of duplicates (< 1%) is often harmless, especially " + "if your data legitimately contains identical records. But always investigate unexpected high ratios.

    " + ), + }, + "section_warnings": { + "tip": "Aggregated warnings and potential issues detected across all analyses.", + "desc": ( + "

    Warnings & Issues collects all anomalies and concerns found during the " + "entire analysis into one place for easy review.

    " + "

    Common warnings include:

    " + "" + "

    Beginner tip: Treat this section as a priority to-do list. Address the highest-" + "severity warnings first, then re-run your analysis to see if the quality score improves.

    " + ), + }, + # ===== Advanced: Distribution+ ===== + "sub_best_fit": { + "tip": "Finds the theoretical distribution (Normal, Gamma, Weibull, etc.) that best matches each column.", + "desc": ( + "

    Best-Fit Distribution evaluates each numeric column against a library of theoretical " + "distributions to find the one that most closely matches the observed data.

    " + "

    Distributions tested include: Normal, Lognormal, Exponential, Gamma, Beta, Weibull, " + "Uniform, and more.

    " + "

    How the best fit is selected:

    " + "" + "

    Why it matters: Knowing which distribution generated your data enables better " + "simulation, parametric modelling, confidence interval construction, and anomaly detection.

    " + "

    Beginner tip: If the best-fit distribution is 'norm' (normal), many standard " + "statistical tests apply directly. If it is something else (e.g. lognorm), consider a " + "log-transform before applying methods that assume normality.

    " + ), + }, + "sub_jarque_bera": { + "tip": "Tests whether each column's skewness and kurtosis match a normal distribution.", + "desc": ( + "

    Jarque-Bera Normality Test specifically checks if the shape of your " + "data matches a normal (bell-curve) distribution.

    " + "

    How it works:

    " + "" + "

    How to interpret:

    " + "" + "

    Beginner tip: Non-normal data is extremely common in real-world datasets. " + "A failed normality test does not mean your data is 'bad' -- it means you should use " + "non-parametric methods or apply a transformation (like log or Box-Cox).

    " + ), + }, + "sub_power_transform": { + "tip": "Recommends Box-Cox or Yeo-Johnson transformations to make skewed distributions more Gaussian.", + "desc": ( + "

    Power Transform Recommendation suggests mathematical transformations that can " + "reshape your skewed data into a more bell-shaped (Gaussian) form.

    " + "

    Two methods evaluated:

    " + "" + "

    Key outputs:

    " + "" + "

    Beginner tip: Power transforms are essential preprocessing steps for algorithms " + "like linear regression and neural networks that assume roughly normal input distributions.

    " + ), + }, + "sub_kde_bandwidth": { + "tip": "Determines the optimal smoothing parameter for Kernel Density Estimation plots.", + "desc": ( + "

    KDE Bandwidth Analysis finds the best 'smoothing level' for density curve " + "estimation.

    " + "

    What is a KDE? Kernel Density Estimation creates a smooth curve from your data " + "points by placing a small bell-curve (kernel) on each point and adding them up. " + "The bandwidth controls how wide each kernel is.

    " + "

    Trade-off:

    " + "" + "

    Two automatic rules compared:

    " + "" + "

    Beginner tip: If the two rules give very different bandwidths, your data likely " + "has outliers or multiple modes (peaks). Check the histogram to confirm.

    " + ), + }, + # ===== Advanced: Correlation+ ===== + "sub_partial_corr": { + "tip": "Reveals direct relationships between variables after removing confounding effects.", + "desc": ( + "

    Partial Correlation answers: Do these two variables have a direct relationship, " + "or is their correlation caused by a third variable?

    " + "

    Example: Ice cream sales and drowning deaths are correlated -- but the partial " + "correlation controlling for temperature will be near zero. Temperature is the real driver.

    " + "

    How it is computed: Using the inverse of the covariance matrix (precision matrix). " + "The negative off-diagonal elements, when normalised, give the partial correlation.

    " + "

    How to interpret:

    " + "" + "

    Why it matters: Identifying true direct relationships (vs. spurious ones) is critical " + "for causal inference and building parsimonious models.

    " + ), + }, + "sub_mutual_info": { + "tip": "Information-theoretic measure that captures both linear and non-linear dependencies.", + "desc": ( + "

    Mutual Information (MI) measures how much knowing one variable tells you about " + "another -- capturing any type of relationship, not just linear ones.

    " + "

    Formula: MI(X,Y) = H(X) + H(Y) - H(X,Y), where H is Shannon entropy.

    " + "

    Key properties:

    " + "" + "

    Compare with Pearson correlation: Pearson r might be zero for X and sin(X), but " + "MI will correctly detect the dependency.

    " + "

    Beginner tip: If you see a high MI but low Pearson correlation between two variables, " + "there is a non-linear relationship worth investigating with a scatter plot.

    " + ), + }, + "sub_bootstrap_ci": { + "tip": "Resampling-based 95% confidence interval for each pairwise correlation.", + "desc": ( + "

    Bootstrap Correlation Confidence Intervals tell you how reliable each correlation " + "estimate actually is.

    " + "

    How it works:

    " + "
      " + "
    1. Draw 1,000 random samples (with replacement) from your data.
    2. " + "
    3. Compute the correlation for each sample.
    4. " + "
    5. The 2.5th and 97.5th percentiles form the 95% confidence interval.
    6. " + "
    " + "

    How to interpret the CI:

    " + "" + "

    Beginner tip: A correlation of r = 0.5 with a CI of [0.45, 0.55] is much more " + "trustworthy than the same r = 0.5 with a CI of [-0.1, 0.9]. Always check the CI.

    " + ), + }, + "sub_distance_corr": { + "tip": "Szekely distance correlation that detects non-linear dependencies missed by Pearson.", + "desc": ( + "

    Distance Correlation is a modern statistical measure that equals zero " + "if and only if variables are truly independent. This is a stronger guarantee " + "than Pearson correlation, which can be zero even when strong non-linear patterns exist.

    " + "

    Range: 0 (perfect independence) to 1 (strong dependence).

    " + "

    Key comparison with Pearson:

    " + "" + "

    Beginner tip: Distance correlation is computationally more expensive than Pearson " + "but catches hidden patterns that Pearson completely misses.

    " + ), + }, + # ===== Clustering ===== + "sub_kmeans": { + "tip": "K-Means partitioning with automatically optimised cluster count via silhouette analysis.", + "desc": ( + "

    K-Means Clustering automatically groups your data points into k clusters " + "where each point belongs to the cluster with the nearest mean (centroid).

    " + "

    How it works:

    " + "
      " + "
    1. Data is standardised (z-score) so all features have equal weight.
    2. " + "
    3. K-Means is run for k = 2, 3, ..., 10.
    4. " + "
    5. The best k is chosen by the highest silhouette score (measures how similar " + "a point is to its own cluster vs. other clusters).
    6. " + "
    " + "

    Key metrics:

    " + "" + "

    Beginner tip: K-Means assumes roughly spherical clusters of similar size. " + "If your data has irregularly shaped or very differently sized clusters, check the DBSCAN " + "results instead.

    " + ), + }, + "sub_dbscan": { + "tip": "Density-based clustering that discovers clusters of arbitrary shape and identifies noise.", + "desc": ( + "

    DBSCAN (Density-Based Spatial Clustering) finds clusters by looking for areas " + "where data points are densely packed together.

    " + "

    Key advantages over K-Means:

    " + "" + "

    Key parameters:

    " + "" + "

    Metrics shown:

    " + "" + "

    Beginner tip: If DBSCAN finds only 1 cluster with many noise points, the data may " + "not have clear density-based structure, or the eps parameter needs tuning.

    " + ), + }, + "sub_hierarchical": { + "tip": "Agglomerative clustering dendrogram showing how clusters merge at each level.", + "desc": ( + "

    Hierarchical Clustering builds a tree-like structure (dendrogram) showing how " + "data points merge into progressively larger clusters.

    " + "

    How it works:

    " + "
      " + "
    1. Start with each point as its own cluster.
    2. " + "
    3. Repeatedly merge the two most similar clusters (using Ward's method, which minimises " + "within-cluster variance).
    4. " + "
    5. Continue until all points are in one cluster.
    6. " + "
    " + "

    Reading the dendrogram: The y-axis shows the 'distance' (dissimilarity) at which " + "clusters merge. You can draw a horizontal line at any height to get a different number of " + "clusters. Large vertical gaps suggest natural cluster boundaries.

    " + "

    Beginner tip: Look for long vertical lines in the dendrogram -- these represent " + "large jumps in dissimilarity and suggest natural groupings in your data.

    " + ), + }, + "sub_cluster_profiles": { + "tip": "Statistical summary (mean, std) of each K-Means cluster across all features.", + "desc": ( + "

    Cluster Profiles describes what makes each cluster unique by showing the average " + "value and standard deviation of every feature within each cluster.

    " + "

    How to use:

    " + "" + "

    Example interpretation: If Cluster 0 has high income + high age, and Cluster 1 " + "has low income + low age, the main clustering dimension is a socioeconomic one.

    " + "

    Beginner tip: This table is excellent for giving meaningful names to clusters " + "(e.g. 'High-value customers', 'Budget shoppers') based on their most distinctive features.

    " + ), + }, + # ===== Dimensionality Reduction ===== + "sub_tsne": { + "tip": "Non-linear 2D projection that preserves local neighbourhood structure for visualisation.", + "desc": ( + "

    t-SNE (t-Distributed Stochastic Neighbour Embedding) is a visualisation technique " + "that compresses high-dimensional data into a 2D scatter plot while preserving which data " + "points are similar to each other.

    " + "

    Key parameter:

    " + "" + "

    How to read the plot: Points close together in the 2D plot were similar in the " + "original high-dimensional space. Distinct clusters in the plot suggest real groups in the data.

    " + "

    ⚠️ Important caveats:

    " + "" + ), + }, + "sub_umap": { + "tip": "Fast non-linear 2D visualisation preserving both local and global data structure.", + "desc": ( + "

    UMAP (Uniform Manifold Approximation and Projection) is a modern alternative " + "to t-SNE that is generally faster and better at preserving the global layout of the data.

    " + "

    Advantages over t-SNE:

    " + "" + "

    How to read the plot: Similar interpretation to t-SNE. Points close together are " + "similar; distinct groupings suggest real clusters. But unlike t-SNE, the relative positions " + "of clusters carry some meaning too.

    " + "

    Beginner tip: If t-SNE and UMAP show similar cluster structure, you can be more " + "confident that the clusters are real. If they disagree, investigate further.

    " + ), + }, + "sub_factor_analysis": { + "tip": "Discovers latent (hidden) factors that explain the correlations among observed variables.", + "desc": ( + "

    Factor Analysis seeks to explain why certain variables are correlated by " + "hypothesising the existence of hidden (latent) factors.

    " + "

    Analogy: Imagine you measure students' scores in 10 subjects. Factor Analysis " + "might discover 3 latent factors: 'Verbal ability', 'Mathematical ability', and " + "'Artistic ability', each influencing several subjects.

    " + "

    How it works:

    " + "
      " + "
    1. Each observed variable = weighted sum of latent factors + noise.
    2. " + "
    3. Number of factors is selected using the Kaiser criterion (retain factors with " + "eigenvalue > 1).
    4. " + "
    " + "

    Output: Number of retained factors and the noise variance (uniqueness) for " + "each variable -- how much of its variation is NOT explained by the factors.

    " + "

    Beginner tip: High noise variance for a variable means the common factors do not " + "explain it well -- it may be measuring something unique.

    " + ), + }, + "sub_factor_loadings": { + "tip": "Shows how strongly each observed variable relates to each latent factor.", + "desc": ( + "

    Factor Loadings quantify the relationship between each original variable and " + "each latent factor discovered by Factor Analysis.

    " + "

    How to interpret the values:

    " + "" + "

    Cross-loadings: If a variable loads highly on multiple factors, it is measuring " + "a mix of constructs and may not be well-suited for the factor model.

    " + "

    Beginner tip: Factor loadings are similar to PCA loadings but have a different " + "interpretation. In Factor Analysis, the latent factors are hypothesised causes; in PCA, " + "components are just mathematical summaries of variance.

    " + ), + }, + "sub_feature_contrib": { + "tip": "Ranks features by their contribution to total variance using PCA loadings.", + "desc": ( + "

    PCA-Weighted Feature Contribution ranks original features by how much of the " + "total variance they contribute to, weighted across all principal components.

    " + "

    How it is calculated: For each feature, sum the squared loadings across all " + "components, weighted by each component's eigenvalue proportion.

    " + "

    Use cases:

    " + "" + "

    Beginner tip: Features near the bottom of the ranking contribute very little " + "to overall variance and are good candidates for removal in a preprocessing pipeline.

    " + ), + }, + # ===== Feature Insights ===== + "sub_interaction": { + "tip": "Detects synergistic product-interaction effects between feature pairs.", + "desc": ( + "

    Interaction Detection checks whether the product of two features " + "contains information not present in either feature alone.

    " + "

    Why it matters: In many real-world scenarios, the effect of one variable depends " + "on the value of another. For example, the effect of 'education' on 'salary' might depend " + "on 'years of experience'.

    " + "

    How it works: For each pair of features, a product-interaction term (X₁ × X₂) " + "is created and its correlation with other features is measured.

    " + "

    Beginner tip: Strong interaction effects are excellent candidates for feature " + "engineering -- adding the product as a new feature can significantly improve model performance.

    " + ), + }, + "sub_monotonic": { + "tip": "Compares Pearson vs Spearman correlation to identify non-linear monotonic patterns.", + "desc": ( + "

    Monotonic Relationship Analysis detects variables that consistently increase (or decrease) " + "together, but not necessarily in a straight line.

    " + "

    The key insight:

    " + "" + "

    Practical implication: If you find a pair with high Spearman but low Pearson, " + "applying a monotonic transform (log, sqrt, etc.) before linear modelling will improve fit.

    " + ), + }, + "sub_binning": { + "tip": "Evaluates equal-width and equal-frequency binning with entropy analysis.", + "desc": ( + "

    Binning Analysis evaluates different strategies for converting continuous variables " + "into discrete categories (bins).

    " + "

    Two strategies compared:

    " + "" + "

    Shannon entropy measures how evenly data is distributed across bins. " + "Lower entropy = more concentrated (uneven); higher entropy = more uniform.

    " + "

    Beginner tip: Binning is useful when you need to convert a numeric feature into " + "categories (e.g. age groups) or when tree-based models need to handle extreme outliers.

    " + ), + }, + "sub_cardinality": { + "tip": "Analyses unique-value counts and recommends encoding methods for categorical features.", + "desc": ( + "

    Cardinality & Encoding Recommendation analyses each categorical column's " + "number of unique values and suggests the best encoding strategy for machine learning.

    " + "

    Encoding recommendations by cardinality:

    " + "" + "

    Why it matters: Most ML algorithms cannot handle text labels directly -- they need " + "numeric representation. Choosing the wrong encoding can waste memory, cause overfitting, or " + "lose important information.

    " + ), + }, + "sub_leakage": { + "tip": "Flags features that might unintentionally leak target information.", + "desc": ( + "

    Leakage Risk Assessment checks for features that might be improperly providing " + "direct or indirect access to the target variable.

    " + "

    Common leakage signals:

    " + "" + "

    Why it is dangerous: Data leakage causes models to show unrealistically high " + "accuracy during training/validation but fail catastrophically in production.

    " + "

    Beginner tip: If your model seems 'too good to be true' (e.g. 99% accuracy), " + "leakage is the most likely culprit. Check this section carefully.

    " + ), + }, + # ===== Advanced Anomaly ===== + "sub_iso_forest": { + "tip": "Tree-based anomaly detection that isolates outliers via random feature splits.", + "desc": ( + "

    Isolation Forest detects anomalies based on a simple idea: outliers are " + "easier to isolate than normal points.

    " + "

    How it works:

    " + "
      " + "
    1. Build random decision trees by selecting random features and random split points.
    2. " + "
    3. Measure the average number of splits (path length) needed to isolate each point.
    4. " + "
    5. Points that are isolated in fewer splits are more anomalous.
    6. " + "
    " + "

    Anomaly score: More negative = more anomalous. The contamination rate (default 5%) " + "determines the threshold.

    " + "

    Advantages: Works well in high dimensions, does not assume any specific distribution, " + "and is very fast.

    " + "

    Beginner tip: Isolation Forest is often the best first choice for anomaly detection " + "because it requires minimal parameter tuning and handles mixed-type data well.

    " + ), + }, + "sub_lof": { + "tip": "Density-based anomaly detection comparing each point's local density to its neighbours.", + "desc": ( + "

    Local Outlier Factor (LOF) identifies anomalies by comparing the local density " + "of each data point to the density of its k nearest neighbours.

    " + "

    Intuition: A point in a sparse region surrounded by dense regions is anomalous. " + "A point in a uniformly sparse region is just an edge case, not an anomaly.

    " + "

    LOF score interpretation:

    " + "" + "

    Advantage over Isolation Forest: LOF is better at detecting anomalies in datasets " + "with varying density -- e.g. a point that is normal in one region of the data but anomalous " + "in another.

    " + "

    Beginner tip: LOF works best when clusters have different densities. If all clusters " + "are equally dense, Isolation Forest may suffice.

    " + ), + }, + "sub_mahalanobis": { + "tip": "Multivariate distance from the data centre, accounting for feature correlations.", + "desc": ( + "

    Mahalanobis Distance measures how far each observation is from the centre of the " + "data distribution, taking into account the correlations between features.

    " + "

    Comparison with Euclidean distance:

    " + "" + "

    Statistical foundation: Under multivariate normality, D² follows a chi-squared " + "distribution. Points exceeding the 97.5th percentile chi-squared critical value are flagged.

    " + "

    Beginner tip: Mahalanobis is ideal when features are correlated. If two features " + "always move together, a point where one is high and the other is low is genuinely unusual -- " + "Mahalanobis will catch this, but Euclidean distance might not.

    " + ), + }, + "sub_consensus": { + "tip": "Combines 3 anomaly methods -- flags points agreed upon by at least 2 out of 3.", + "desc": ( + "

    Consensus Anomaly Detection combines the results of Isolation Forest, LOF, and " + "Mahalanobis distance to produce a more reliable anomaly assessment.

    " + "

    The voting rule: A point is flagged as anomalous only if at least 2 out of 3 " + "methods agree. This reduces false positives dramatically.

    " + "

    Why consensus is better than any single method:

    " + "" + "

    Beginner tip: Start your anomaly investigation with the consensus flags. These are " + "the most reliable candidates for genuine anomalies and are worth investigating first.

    " + ), + }, + # ===== Statistical Tests ===== + "test_levene": { + "tip": "Tests whether groups have equal variances (homoscedasticity assumption).", + "desc": ( + "

    Levene's Test checks whether different groups in your data have approximately " + "equal variances -- a key assumption for many statistical tests and ANOVA.

    " + "

    Why it matters: Many tests (like t-tests and ANOVA) assume equal variances. " + "Violating this assumption can lead to incorrect conclusions.

    " + "

    How to interpret:

    " + "" + "

    Advantage: Levene's test is more robust to non-normality than Bartlett's test, " + "making it the preferred choice in practice.

    " + ), + }, + "test_kruskal_wallis": { + "tip": "Non-parametric ANOVA: tests whether multiple groups have the same distribution.", + "desc": ( + "

    Kruskal-Wallis Test is the non-parametric equivalent of one-way ANOVA. " + "It tests whether multiple groups come from the same distribution, without assuming normality.

    " + "

    How it works: All values from all groups are ranked together. The test checks " + "whether the average ranks differ significantly across groups.

    " + "

    How to interpret:

    " + "" + "

    Beginner tip: Use Kruskal-Wallis when your data is ordinal, non-normal, or has " + "outliers that would make ANOVA unreliable.

    " + ), + }, + "test_mann_whitney": { + "tip": "Non-parametric test comparing the distributions of two independent groups.", + "desc": ( + "

    Mann-Whitney U Test (also called Wilcoxon rank-sum) compares two independent " + "groups to determine whether they come from the same distribution.

    " + "

    How it works: All values from both groups are ranked together. The test measures " + "whether one group tends to have systematically higher ranks than the other.

    " + "

    How to interpret:

    " + "" + "

    When to use: Ideal for ordinal data, non-normal distributions, small samples, " + "or when outliers make the t-test unreliable.

    " + "

    Beginner tip: Mann-Whitney tests for differences in distribution shape, not just " + "the mean. Two groups with the same mean but different spreads can still yield a significant result.

    " + ), + }, + "test_chi_square": { + "tip": "Tests whether observed category frequencies differ from expected frequencies.", + "desc": ( + "

    Chi-Square Goodness of Fit tests whether the observed frequency distribution " + "of categories matches what you would expect (by default, a uniform distribution).

    " + "

    Formula: χ² = Σ (Observed - Expected)² / Expected

    " + "

    How to interpret:

    " + "" + "

    Requirement: Each expected frequency should be ≥ 5 for the test to be valid. " + "With very small expected counts, consider Fisher's exact test instead.

    " + "

    Beginner tip: This test is commonly used to check whether a categorical variable " + "has a balanced distribution or whether some categories dominate.

    " + ), + }, + "test_grubbs": { + "tip": "Tests whether the single most extreme value in a dataset is a statistically significant outlier.", + "desc": ( + "

    Grubbs' Test evaluates whether the most extreme value in a dataset is a " + "statistically significant outlier, as opposed to a natural extreme of the distribution.

    " + "

    Formula: G = max|Xᵢ - X̄| / s, where X̄ is the mean and s is the standard deviation.

    " + "

    How to interpret:

    " + "" + "

    Assumption: The test assumes the data (excluding the potential outlier) is " + "approximately normally distributed.

    " + "

    Beginner tip: Grubbs' test only checks one extreme value at a time. " + "For datasets with multiple outliers, use the IQR method or Isolation Forest instead.

    " + ), + }, + "test_adf": { + "tip": "Tests whether a time series is stationary (constant statistical properties over time).", + "desc": ( + "

    Augmented Dickey-Fuller (ADF) Test determines whether a time series has a " + "unit root -- meaning it is non-stationary (its mean, variance, or autocorrelation " + "change over time).

    " + "

    Why stationarity matters: Most time-series models (ARIMA, etc.) require stationary " + "input. Non-stationary data can produce spurious correlations and unreliable forecasts.

    " + "

    How to interpret:

    " + "" + "

    Beginner tip: If your numeric column represents sequential measurements over time, " + "check ADF before running any regression. A non-stationary predictor can make regression " + "results meaningless.

    " + ), + }, + # ===== Basic sub-sections ===== + "sub_column_quality": { + "tip": "Per-column quality scores for completeness, uniqueness, and validity.", + "desc": ( + "

    Column Quality breaks down the overall quality score into individual columns, " + "letting you identify exactly which columns have problems.

    " + "

    Each column is scored on completeness (non-missing ratio), uniqueness (distinct value ratio), " + "and validity (values within expected ranges). This helps you prioritise which columns need " + "the most attention during data cleaning.

    " + "

    Beginner tip: Columns with very low quality scores are the first targets for cleaning " + "or removal.

    " + ), + }, + "sub_cleaning_log": { + "tip": "Step-by-step record of all automated data cleaning actions performed.", + "desc": ( + "

    Cleaning Log documents every transformation the system applied during preprocessing " + "for full transparency and reproducibility.

    " + "

    This includes columns dropped, type conversions, encoding fixes, and any rows removed. " + "Review it carefully to ensure no important data was unexpectedly modified.

    " + ), + }, + "sub_detected_issues": { + "tip": "List of data quality issues found during preprocessing.", + "desc": ( + "

    Detected Issues enumerates specific problems found in the raw data: mixed types " + "within a column, suspicious patterns (e.g. '999' or '-1' used as missing-value markers), " + "encoding problems, and more.

    " + "

    Each issue includes the affected column and a description. Address these before running " + "production models.

    " + ), + }, + "sub_normality_tests": { + "tip": "Multiple normality tests (Shapiro-Wilk, Anderson-Darling, Jarque-Bera) for each numeric column.", + "desc": ( + "

    Normality Tests & Shape Analysis applies three complementary tests to assess " + "whether each numeric column follows a normal distribution:

    " + "" + "

    If all three agree (p < 0.05), the column is very likely non-normal. If they disagree, " + "examine the histogram to understand why.

    " + ), + }, + "sub_vif": { + "tip": "Variance Inflation Factor detects multicollinearity between features.", + "desc": ( + "

    VIF (Variance Inflation Factor) measures how much the variance of a regression " + "coefficient is inflated by correlation with other features.

    " + "

    Formula: VIF = 1 / (1 - R²ᵢ), where R²ᵢ is the R-squared from regressing " + "feature i on all other features.

    " + "

    Interpretation:

    " + "" + "

    Beginner tip: High VIF causes unstable regression coefficients. Even small changes " + "in data can flip coefficient signs.

    " + ), + }, + "sub_summary": { + "tip": "Compact summary of distribution shape, normality test results, and outlier counts.", + "desc": ( + "

    Summary provides a quick-reference view combining skewness classification, " + "kurtosis type, normality indicators, and outlier counts in a single table.

    " + "

    Use this as a rapid screening tool before diving into detailed per-column analysis.

    " + ), + }, + "sub_variance_explained": { + "tip": "Shows how much variance each principal component captures (scree plot data).", + "desc": ( + "

    Variance Explained shows each principal component's individual and cumulative " + "contribution to the total variance.

    " + "

    How to use the scree plot: Look for an 'elbow' -- the point where the curve " + "bends sharply and additional components add very little variance. Components before the elbow " + "contain most of the signal; those after contain mostly noise.

    " + "

    Rule of thumb: Typically, retaining enough components to explain 80-95% of " + "cumulative variance is a good balance between dimensionality reduction and information loss.

    " + ), + }, + "sub_loadings": { + "tip": "Shows each original feature's contribution to each principal component.", + "desc": ( + "

    PCA Loadings matrix shows the weight (contribution) of each original feature to " + "each principal component.

    " + "

    Features with high absolute loadings on a component are the main contributors to that " + "component. Use loadings to interpret what each component represents in domain terms.

    " + "

    Example: If PC1 has high loadings for 'height', 'weight', and 'BMI', you might " + "interpret PC1 as a 'body size' component.

    " + ), + }, +} + +# -- Korean ------------------------------------------------------------ +METHOD_INFO["ko"] = { + "section_overview": { + "tip": "행/열 개수, 데이터 타입 분포, 메모리 사용량 등 데이터셋 전체 요약.", + "desc": ( + "

    개요(Overview)는 본격적인 분석에 앞서 데이터셋의 전체 구조를 한눈에 보여줍니다.

    " + "

    확인할 수 있는 내용:

    " + "" + "

    왜 중요한가: 행 수와 타입을 먼저 확인하면 파일 로딩 오류(잘린 파일, 잘못된 구분자, " + "인코딩 문제)를 분석 전에 잡아낼 수 있습니다.

    " + "

    초보자 팁: 행 수가 예상보다 훨씬 적다면 구분자(separator) 설정이 잘못되었을 수 있고, " + "수치 열이 '텍스트'로 표시되면 숫자가 아닌 문자가 섞여 있을 가능성이 높습니다.

    " + ), + }, + "section_quality": { + "tip": "완전성·유일성·일관성·유효성 4개 차원으로 데이터 품질을 0-100% 평가.", + "desc": ( + "

    데이터 품질 평가는 데이터셋의 건강 상태를 네 가지 독립적 차원에서 각각 0~100%로 점수화합니다.

    " + "

    4가지 품질 차원:

    " + "" + "

    점수 읽는 법: 90-100%: 우수 | 70-89%: 양호, 플래그 확인 | 70% 미만: 모델링 전 반드시 해결

    " + "

    종합 점수: 0.35×완전성 + 0.25×유일성 + 0.20×일관성 + 0.20×유효성

    " + ), + }, + "section_preprocessing": { + "tip": "분석 전 자동으로 수행된 모든 정제·변환 단계를 기록.", + "desc": ( + "

    전처리 로그는 시스템이 원본 데이터에 수행한 모든 자동 정제 작업을 순서대로 기록합니다.

    " + "

    기록되는 전처리 예시:

    " + "" + "

    왜 중요한가: 재현성(reproducibility)은 신뢰할 수 있는 분석의 토대입니다. " + "어떤 변환이 적용되었는지 정확히 알아야 결과를 검증할 수 있습니다.

    " + "

    초보자 팁: 중요한 열이 삭제되었다면, 원본 데이터에 형식 문제가 있어 수동 수정이 필요할 수 있습니다.

    " + ), + }, + "section_descriptive": { + "tip": "각 열의 중심경향, 산포도, 분포 형태를 요약하는 기술통계량.", + "desc": ( + "

    기술통계량은 탐색적 데이터 분석(EDA)의 기초로, 각 열을 중심·산포·형태 수치로 요약합니다.

    " + "

    수치 열에서 확인할 수 있는 항목:

    " + "" + "

    초보자 팁: 평균과 중앙값이 크게 다른 열이 있다면, 이상치나 심한 비대칭이 있다는 신호입니다.

    " + ), + }, + "section_distribution": { + "tip": "히스토그램과 Q-Q 플롯으로 각 수치 열의 분포 형태를 시각화.", + "desc": ( + "

    분포 분석은 각 수치 열의 값이 어떻게 퍼져 있는지 시각적으로 보여줍니다.

    " + "

    차트 유형:

    " + "" + "

    대표적 분포 형태: 종형(정규)·오른쪽 치우침(소득, 가격)·왼쪽 치우침(만점 근처 시험 점수)·" + "이봉(두 집단 혼합)·균일(모든 값 동일 확률)

    " + "

    왜 중요한가: 많은 머신러닝 알고리즘은 정규분포 입력을 가정합니다. " + "실제 분포 형태를 알면 올바른 모델을 선택하거나 변환을 적용할 수 있습니다.

    " + ), + }, + "section_correlation": { + "tip": "수치 열 간 피어슨(선형) 및 스피어만(순위) 상관관계를 측정.", + "desc": ( + "

    상관 분석은 수치 열 쌍 사이의 관계 강도를 측정합니다.

    " + "

    두 가지 상관계수:

    " + "" + "

    히트맵 읽는 법: 진한 색 = 강한 상관. 빨강 = 양의 상관, 파랑 = 음의 상관.

    " + "

    경고 기준: |r| > 0.90: 심각한 다중공선성, 하나 제거 고려 | " + "|r| > 0.70: 강한 상관, 모니터링 | |r| < 0.30: 약한 상관

    " + "

    초보자 팁: 두 특성 간 높은 상관은 비슷한 정보를 가지고 있다는 뜻입니다. " + "둘 다 선형 모델에 포함하면 불안정(다중공선성)을 유발할 수 있습니다.

    " + ), + }, + "section_missing": { + "tip": "결측 데이터의 패턴, 비율, 발생 메커니즘을 분석.", + "desc": ( + "

    결측치 분석은 데이터가 어디서, 얼마나, 빠져 있는지 조사합니다.

    " + "

    결측 메커니즘 3가지:

    " + "" + "

    실무 가이드: 5% 미만: 평균/중앙값 대체 | 5-30%: 고급 대체(KNN, MICE) | " + "50% 초과: 열 삭제 고려

    " + ), + }, + "section_outlier": { + "tip": "IQR 펜스와 Z-점수를 사용하여 이상 데이터 포인트를 탐지.", + "desc": ( + "

    이상치 탐지는 나머지 데이터에서 비정상적으로 멀리 떨어진 값을 식별합니다.

    " + "

    IQR 탐지 방법:

    " + "" + "

    중요: 모든 이상치가 오류는 아닙니다! 이상 탐지, 희귀 질환, 극한 날씨 등에서는 " + "이상치가 가장 흥미로운 데이터일 수 있습니다. 항상 조사 후 제거하세요.

    " + ), + }, + "section_categorical": { + "tip": "범주형·불리언 열의 빈도분포, 막대차트, 엔트로피 분석.", + "desc": ( + "

    범주형 분석은 텍스트 라벨, 범주, 불리언 등 비수치 열을 검사합니다.

    " + "

    주요 지표:

    " + "" + "

    초보자 팁: 막대차트에서 하나의 막대가 압도적으로 크면 '불균형'이 있어 " + "오버샘플링 등 특수 기법이 필요할 수 있습니다.

    " + ), + }, + "section_importance": { + "tip": "분산·상호정보량을 사용하여 특성의 정보 가치를 순위화.", + "desc": ( + "

    특성 중요도어떤 열이 가장 유용한 정보를 담고 있는가?라는 질문에 답합니다.

    " + "

    사용 방법:

    " + "" + "

    초보자 팁: 중요도가 낮은 특성을 무조건 삭제하지 마세요. " + "혼자서는 약해도 다른 특성과 결합하면 강력한 상호작용 효과가 있을 수 있습니다.

    " + ), + }, + "section_pca": { + "tip": "주성분 분석(PCA)으로 데이터의 내재적 차원과 분산 구조를 파악.", + "desc": ( + "

    PCA(주성분 분석)은 상관된 특성을 분산 설명량 순서로 정렬된 비상관 성분으로 변환합니다.

    " + "

    주요 출력:

    " + "" + "

    초보자 팁: PCA는 특성 스케일이 비슷할 때 가장 잘 작동합니다. " + "시스템이 자동으로 표준화(z-score)한 후 PCA를 적용합니다.

    " + ), + }, + "section_duplicates": { + "tip": "통계를 부풀리거나 모델을 편향시킬 수 있는 완전 중복 행을 식별.", + "desc": ( + "

    중복 분석은 모든 열이 정확히 동일한 행을 스캔합니다.

    " + "

    중복이 문제인 이유:

    " + "" + "

    초보자 팁: 소량(1% 미만)의 중복은 대개 무해합니다만, " + "예상 밖의 높은 비율은 항상 조사해야 합니다.

    " + ), + }, + "section_warnings": { + "tip": "전체 분석에서 발견된 경고와 잠재적 이슈를 한 곳에 모아 표시.", + "desc": ( + "

    경고 & 이슈는 모든 분석에서 발견된 이상 징후를 한 곳에 모아 보여줍니다.

    " + "

    대표적 경고: 높은 결측률(>30%), 상수 열, 다중공선성(|r|>0.90), 극단 이상치 수, " + "데이터 타입 불일치 등

    " + "

    초보자 팁: 이 섹션을 우선순위 할 일 목록처럼 활용하세요. " + "심각도가 높은 경고부터 해결 후 분석을 재실행하면 품질 점수가 개선됩니다.

    " + ), + }, + "sub_best_fit": { + "tip": "각 수치 열을 이론적 분포(정규, 감마, 와이블 등)와 비교해 최적 분포를 선택.", + "desc": ( + "

    최적 분포 적합은 각 수치 열을 정규, 로그정규, 지수, 감마, 베타, 와이블 등과 비교하여 " + "가장 잘 맞는 이론적 분포를 찾습니다.

    " + "

    선택 기준:

    " + "" + "

    초보자 팁: 최적 분포가 'norm'(정규)이면 대부분의 표준 통계 검정이 바로 적용됩니다. " + "그렇지 않으면 로그 변환 등을 고려하세요.

    " + ), + }, + "sub_jarque_bera": { + "tip": "왜도와 첨도가 정규분포와 일치하는지 검정.", + "desc": ( + "

    자크-베라(JB) 검정은 데이터의 형태가 정규분포(종형 곡선)과 일치하는지 검사합니다.

    " + "

    원리: 정규분포는 왜도=0, 초과첨도=0입니다. JB 통계량은 이 이상값으로부터의 편차를 측정합니다.

    " + "

    해석: p ≥ 0.05: 정규성 기각 불가 | p < 0.05: 유의하게 비정규

    " + "

    초보자 팁: 비정규 데이터는 현실에서 매우 흔합니다. 정규성 검정 실패가 데이터가 " + "'나쁘다'는 의미가 아니라, 비모수 방법을 쓰거나 변환이 필요하다는 의미입니다.

    " + ), + }, + "sub_power_transform": { + "tip": "Box-Cox 또는 Yeo-Johnson 변환으로 치우친 분포를 정규에 가깝게 변환.", + "desc": ( + "

    거듭제곱 변환 권장은 치우친 데이터를 더 종형(가우시안)으로 변환하는 방법을 제안합니다.

    " + "

    두 가지 방법:

    " + "" + "

    초보자 팁: 거듭제곱 변환은 선형회귀나 신경망처럼 대략적인 정규 입력을 가정하는 " + "알고리즘의 필수 전처리 단계입니다.

    " + ), + }, + "sub_kde_bandwidth": { + "tip": "커널 밀도 추정(KDE)에 최적인 평활 매개변수를 Scott/Silverman 규칙으로 분석.", + "desc": ( + "

    KDE 대역폭 분석은 밀도 곡선 추정의 최적 '평활 수준'을 찾습니다.

    " + "

    트레이드오프: 작은 대역폭 = 세부 포착, 노이즈 과적합 | " + "큰 대역폭 = 부드러운 곡선, 중요 특성 놓침

    " + "

    두 규칙: Scott 규칙(단봉 데이터에 적합) vs Silverman 규칙(이상치에 더 강건)

    " + "

    초보자 팁: 두 규칙의 대역폭이 크게 다르면 이상치나 다봉 분포일 가능성이 높으므로 " + "히스토그램을 확인하세요.

    " + ), + }, + "sub_partial_corr": { + "tip": "다른 모든 변수의 효과를 제거한 후 두 변수 간 직접 관계를 측정.", + "desc": ( + "

    편상관두 변수의 관계가 직접적인 것인지, 아니면 제3의 변수 때문인지?를 답합니다.

    " + "

    예시: 아이스크림 판매량과 익사 사고는 상관이 있지만, 기온을 통제하면 편상관은 거의 0입니다.

    " + "

    해석: 높은 편상관 = 진짜 직접 관계 | 거의 0 = 다른 변수를 매개로 한 허위 관계

    " + ), + }, + "sub_mutual_info": { + "tip": "선형·비선형 의존성을 모두 포착하는 정보이론적 측정치.", + "desc": ( + "

    상호정보량(MI)은 한 변수를 알면 다른 변수에 대해 얼마나 알 수 있는지를 측정합니다.

    " + "

    핵심 특성: MI=0이면 통계적 독립, MI>0이면 어떤 형태든 의존성이 있습니다. " + "피어슨이 0인 X와 sin(X)도 MI는 정확히 감지합니다.

    " + "

    초보자 팁: MI는 높은데 피어슨은 낮다면 비선형 관계가 있으므로 산점도로 패턴을 확인하세요.

    " + ), + }, + "sub_bootstrap_ci": { + "tip": "리샘플링 기반 각 쌍별 상관계수의 95% 신뢰구간.", + "desc": ( + "

    부트스트랩 상관 신뢰구간은 각 상관 추정치가 실제로 얼마나 신뢰할 수 있는지 알려줍니다.

    " + "

    작동 방식: 복원 추출 1,000회 → 각각 상관계수 계산 → 2.5~97.5 백분위가 95% CI

    " + "

    해석: 좁은 CI = 안정적 추정 | 넓은 CI = 높은 불확실성 | CI가 0을 포함 = 유의하지 않을 수 있음

    " + ), + }, + "sub_distance_corr": { + "tip": "피어슨이 놓치는 비선형 의존성을 탐지하는 세켈리 거리상관.", + "desc": ( + "

    거리상관은 변수가 진정 독립일 때 그리고 오직 그때만 0이 됩니다. " + "피어슨보다 강력한 보장입니다.

    " + "

    비교: 낮은 피어슨 + 높은 거리상관 → 비선형 관계 존재! 산점도로 패턴을 확인하세요.

    " + ), + }, + "sub_kmeans": { + "tip": "실루엣 분석으로 최적 클러스터 수를 자동 결정하는 K-Means.", + "desc": ( + "

    K-Means 클러스터링은 데이터 포인트를 k개 그룹으로 자동 분할합니다.

    " + "

    작동 방식: 표준화(z-score) → k=2~10 시도 → 최고 실루엣 점수의 k 선택

    " + "

    지표: 실루엣 > 0.5: 좋은 군집화, > 0.7: 강한 구조 | 관성(WCSS): 낮을수록 밀집

    " + "

    초보자 팁: K-Means는 비슷한 크기의 구형 클러스터를 가정합니다. " + "불규칙한 모양이면 DBSCAN 결과를 확인하세요.

    " + ), + }, + "sub_dbscan": { + "tip": "임의 형태의 클러스터를 자동 발견하고 노이즈를 식별하는 밀도 기반 방법.", + "desc": ( + "

    DBSCAN은 데이터 포인트가 밀집된 영역을 찾아 클러스터를 형성합니다.

    " + "

    K-Means 대비 장점: 클러스터 수 사전 지정 불필요, 임의 형태 클러스터 발견, " + "노이즈 포인트 자동 식별

    " + "

    초보자 팁: 클러스터가 1개뿐이고 노이즈가 많다면, 데이터에 뚜렷한 밀도 구조가 " + "없거나 eps 파라미터 조정이 필요합니다.

    " + ), + }, + "sub_hierarchical": { + "tip": "클러스터가 각 수준에서 어떻게 병합되는지 보여주는 덴드로그램.", + "desc": ( + "

    계층적 클러스터링은 트리 구조(덴드로그램)를 구축하여 데이터 포인트가 점차 큰 " + "클러스터로 합쳐지는 과정을 보여줍니다.

    " + "

    덴드로그램 읽기: y축은 병합 '거리'(비유사도). 어느 높이에서든 수평선을 그으면 다른 k를 얻습니다. " + "긴 수직선은 자연스러운 클러스터 경계를 나타냅니다.

    " + ), + }, + "sub_cluster_profiles": { + "tip": "각 K-Means 클러스터의 모든 특성에 대한 통계 요약(평균, 표준편차).", + "desc": ( + "

    클러스터 프로필은 각 클러스터의 평균/표준편차를 보여줘 무엇이 각 클러스터를 고유하게 만드는지 설명합니다.

    " + "

    활용법: 클러스터 간 평균이 크게 다른 특성 = 클러스터를 정의하는 핵심 차별화 특성.

    " + "

    초보자 팁: 이 테이블로 클러스터에 의미 있는 이름을 부여할 수 있습니다 " + "(예: '고가치 고객', '절약형 쇼퍼').

    " + ), + }, + "sub_tsne": { + "tip": "지역 이웃 구조를 보존하는 비선형 2D 시각화 투영.", + "desc": ( + "

    t-SNE는 고차원 데이터를 2D 산점도로 압축하면서 어떤 포인트가 서로 유사한지를 보존합니다.

    " + "

    플롯 읽기: 2D에서 가까운 점 = 원래 고차원 공간에서도 유사. 뚜렷한 군집 = 실제 그룹 가능.

    " + "

    ⚠️ 주의: 클러스터 간 거리는 의미 없음 | 클러스터 크기도 실제 크기를 반영하지 않음 | " + "매번 다른 모양의 플롯이 나올 수 있음(확률적 알고리즘)

    " + ), + }, + "sub_umap": { + "tip": "지역+전역 데이터 구조를 모두 보존하는 빠른 비선형 2D 시각화.", + "desc": ( + "

    UMAP은 t-SNE의 현대적 대안으로, 일반적으로 더 빠르고 전역 레이아웃을 더 잘 보존합니다.

    " + "

    t-SNE 대비 장점: 훨씬 빠름, 전역 구조 보존 우수, 클러스터 간 상대 위치에도 의미 있음

    " + "

    초보자 팁: t-SNE와 UMAP이 비슷한 군집 구조를 보이면 그 군집이 실제일 가능성이 높습니다.

    " + ), + }, + "sub_factor_analysis": { + "tip": "관측 변수 간 상관을 설명하는 숨겨진(잠재) 요인을 발견.", + "desc": ( + "

    요인 분석은 왜 특정 변수들이 서로 상관이 있는지를 숨겨진 잠재 요인으로 설명합니다.

    " + "

    비유: 학생의 10개 과목 점수에서 '언어 능력', '수리 능력', '예술 능력' 같은 " + "3개 잠재 요인을 발견하는 것입니다.

    " + "

    초보자 팁: 노이즈 분산이 높은 변수는 공통 요인으로 설명되지 않으며, " + "고유한 무언가를 측정하고 있을 수 있습니다.

    " + ), + }, + "sub_factor_loadings": { + "tip": "각 관측 변수가 각 잠재 요인과 얼마나 강하게 관련되는지 표시.", + "desc": ( + "

    요인 적재량은 원래 변수와 잠재 요인 사이의 관계 강도를 수치화합니다.

    " + "

    해석: |적재량| > 0.7: 강한 관계 | 0.4-0.7: 중간 | < 0.4: 약한 관계

    " + "

    교차 적재: 한 변수가 여러 요인에 높게 적재되면, 요인 모델에 잘 맞지 않는 혼합 변수입니다.

    " + ), + }, + "sub_feature_contrib": { + "tip": "PCA 적재량을 사용하여 각 특성의 총 분산 기여도를 순위화.", + "desc": ( + "

    PCA 가중 특성 기여도는 원래 특성들이 전체 분산에 얼마나 기여하는지 순위를 매겨 " + "비지도 특성 선택에 활용합니다.

    " + "

    초보자 팁: 순위 하위의 특성은 전체 분산에 거의 기여하지 않으며 제거 후보입니다.

    " + ), + }, + "sub_interaction": { + "tip": "특성 쌍 간 시너지적 곱-상호작용 효과를 탐지.", + "desc": ( + "

    상호작용 탐지는 두 특성의 곱이 개별 특성에는 없는 새로운 정보를 가지고 있는지 확인합니다.

    " + "

    초보자 팁: 강한 상호작용 효과가 발견되면 해당 곱을 새로운 특성으로 추가하여 " + "모델 성능을 크게 향상시킬 수 있습니다.

    " + ), + }, + "sub_monotonic": { + "tip": "피어슨(선형) vs 스피어만(순위) 상관 비교로 비선형 단조 패턴을 탐색.", + "desc": ( + "

    단조 관계 분석은 함께 일관되게 증가/감소하지만 직선은 아닌 변수 쌍을 탐지합니다.

    " + "

    핵심: |스피어만| - |피어슨|이 크면 지수, 로그, 시그모이드 같은 비선형 단조 패턴입니다. " + "단조 변환(log, sqrt 등)을 적용하면 선형 모델 성능이 향상됩니다.

    " + ), + }, + "sub_binning": { + "tip": "등폭·등빈도 구간화를 엔트로피 분석으로 비교 평가.", + "desc": ( + "

    구간화 분석은 연속 변수를 이산 범주(구간)로 변환하는 전략을 평가합니다.

    " + "

    두 전략: 등폭(동일 구간 폭, 이상치에 민감) vs 등빈도(동일 데이터 수, 비대칭에 적합)

    " + "

    초보자 팁: 구간화는 연령 그룹처럼 수치를 범주로 바꿀 때나 극단 이상치를 " + "다뤄야 하는 트리 모델에 유용합니다.

    " + ), + }, + "sub_cardinality": { + "tip": "고유값 수를 분석하고 범주형 열에 적합한 인코딩 방법을 권장.", + "desc": ( + "

    카디널리티 & 인코딩 권장은 고유값 수에 따라 최적 인코딩을 제안합니다:

    " + "" + ), + }, + "sub_leakage": { + "tip": "타겟 정보를 의도치 않게 누설할 수 있는 특성을 식별.", + "desc": ( + "

    누수 위험 평가는 타겟 변수에 직접/간접적으로 접근하는 특성을 검사합니다.

    " + "

    위험 신호: 타겟과 거의 완벽한 상관, 카디널리티=행 수(ID 열), 미래 정보 포함 특성

    " + "

    초보자 팁: 모델이 '너무 좋아 보이면'(예: 99% 정확도) 데이터 누수가 가장 의심됩니다.

    " + ), + }, + "sub_iso_forest": { + "tip": "랜덤 특성 분할로 이상치를 격리하는 트리 기반 이상 탐지.", + "desc": ( + "

    고립 포레스트(Isolation Forest)는 '이상치는 격리하기 쉽다'는 아이디어에 기반합니다.

    " + "

    작동 방식: 랜덤 분할 트리 구축 → 각 포인트 격리에 필요한 평균 분할 수 측정 → " + "적은 분할로 격리 = 더 이상적

    " + "

    초보자 팁: 최소한의 파라미터 조정으로 고차원 데이터에서도 잘 작동하므로 " + "이상 탐지의 첫 번째 선택으로 추천됩니다.

    " + ), + }, + "sub_lof": { + "tip": "각 포인트의 지역 밀도를 이웃과 비교하는 밀도 기반 이상 탐지.", + "desc": ( + "

    LOF(Local Outlier Factor)는 각 데이터 포인트의 지역 밀도를 k개 가장 가까운 이웃의 밀도와 비교합니다.

    " + "

    LOF ≈ 1: 정상 | LOF >> 1: 이웃보다 훨씬 희소한 영역(이상적)

    " + "

    초보자 팁: 클러스터마다 밀도가 다를 때 LOF가 Isolation Forest보다 효과적입니다.

    " + ), + }, + "sub_mahalanobis": { + "tip": "특성 간 상관을 고려하여 데이터 중심으로부터의 다변량 거리를 측정.", + "desc": ( + "

    마할라노비스 거리는 공분산 구조를 고려하여 각 관측치가 데이터 중심에서 얼마나 먼지 측정합니다.

    " + "

    유클리드 vs 마할라노비스: 유클리드는 모든 방향을 동등하게 취급하지만, " + "마할라노비스는 해당 방향에서 얼마나 비일상적인지로 측정합니다.

    " + "

    초보자 팁: 특성이 상관된 경우 이상적입니다. 항상 함께 움직이는 두 특성에서 " + "하나만 높고 다른 하나는 낮으면 진짜 비정상 -- 마할라노비스가 잡아냅니다.

    " + ), + }, + "sub_consensus": { + "tip": "3가지 이상 탐지 방법 중 2개 이상이 동의한 포인트를 플래그.", + "desc": ( + "

    합의 이상 탐지는 Isolation Forest, LOF, 마할라노비스의 결과를 결합합니다.

    " + "

    투표 규칙: 3가지 방법 중 2개 이상 동의해야 이상으로 판정. " + "오탐(false positive)이 크게 줄어듭니다.

    " + "

    초보자 팁: 합의 플래그부터 조사하세요 -- 가장 신뢰할 수 있는 이상치 후보입니다.

    " + ), + }, + "test_levene": { + "tip": "그룹 간의 등분산성(homoscedasticity) 가정을 검정.", + "desc": ( + "

    레빈 검정은 서로 다른 그룹의 분산이 대략 같은지 확인합니다.

    " + "

    해석: p > 0.05: 등분산 가정 성립 | p ≤ 0.05: 분산이 유의하게 다름, " + "웰치 t-검정이나 비모수 대안 사용 권장.

    " + ), + }, + "test_kruskal_wallis": { + "tip": "비모수 ANOVA: 여러 그룹이 같은 분포에서 왔는지 검정.", + "desc": ( + "

    크루스칼-왈리스 검정은 일원 ANOVA의 비모수 버전입니다. " + "정규성 가정 없이 여러 그룹이 같은 분포에서 왔는지 검정합니다.

    " + "

    해석: p < 0.05: 최소 하나의 그룹이 유의하게 다름 → 쌍별 만-휘트니 후속 검정 | " + "p ≥ 0.05: 유의한 차이 발견 안 됨

    " + ), + }, + "test_mann_whitney": { + "tip": "비모수 이표본 검정: 두 독립 그룹의 분포를 비교.", + "desc": ( + "

    만-휘트니 U 검정(윌콕슨 순위합)은 두 독립 그룹이 같은 분포에서 왔는지 판단합니다.

    " + "

    해석: p < 0.05: 두 그룹이 유의하게 다름 | p ≥ 0.05: 유의한 차이 없음

    " + "

    초보자 팁: 분포의 '형태' 차이도 검출하므로, 평균이 같아도 산포가 다르면 유의할 수 있습니다.

    " + ), + }, + "test_chi_square": { + "tip": "관측된 범주 빈도가 기대 빈도와 유의하게 다른지 검정.", + "desc": ( + "

    카이제곱 적합도 검정은 관측된 범주 분포가 기대 분포(기본: 균일)와 일치하는지 확인합니다.

    " + "

    해석: p < 0.05: 관측 빈도가 기대와 유의하게 다름 | p ≥ 0.05: 기대 분포와 일치

    " + "

    조건: 각 기대 빈도가 5 이상이어야 검정이 유효합니다.

    " + ), + }, + "test_grubbs": { + "tip": "데이터의 최극단값이 통계적으로 유의한 이상치인지 검정.", + "desc": ( + "

    그럽스 검정은 데이터의 가장 극단적인 값이 분포의 자연스러운 극단인지, " + "아니면 유의미한 이상치인지 평가합니다.

    " + "

    해석: p < 0.05: 유의한 이상치 | p ≥ 0.05: 예상 범위 내

    " + "

    초보자 팁: 그럽스는 한 번에 하나의 극단값만 검정합니다. " + "여러 이상치가 있으면 IQR 방법이나 Isolation Forest를 사용하세요.

    " + ), + }, + "test_adf": { + "tip": "시계열이 정상(stationary)인지, 즉 통계적 속성이 시간에 따라 일정한지 검정.", + "desc": ( + "

    ADF(Augmented Dickey-Fuller) 검정은 시계열에 단위근이 있는지, " + "즉 비정상(평균·분산이 시간에 따라 변함)인지 판단합니다.

    " + "

    해석: p < 0.05: 정상 시계열 ✓ | p ≥ 0.05: 비정상, 차분(differencing)이나 추세 제거 고려

    " + "

    초보자 팁: 순차적 측정값 열이라면 회귀 전에 ADF를 확인하세요. " + "비정상 예측 변수는 회귀 결과를 무의미하게 만들 수 있습니다.

    " + ), + }, + "sub_column_quality": { + "tip": "열별 완전성·유일성·유효성 품질 점수.", + "desc": ( + "

    컬럼 품질은 전체 점수를 개별 열 단위로 분해하여 어떤 열에 문제가 있는지 정확히 파악합니다.

    " + "

    초보자 팁: 품질 점수가 매우 낮은 열이 정제나 제거의 최우선 대상입니다.

    " + ), + }, + "sub_cleaning_log": { + "tip": "모든 자동 정제 작업을 단계별로 기록한 로그.", + "desc": "

    전처리 로그는 완전한 투명성과 재현성을 위해 시스템이 적용한 모든 변환을 기록합니다.

    ", + }, + "sub_detected_issues": { + "tip": "전처리 과정에서 발견된 데이터 품질 이슈 목록.", + "desc": "

    탐지된 문제는 혼합 타입, 의심스러운 패턴('999' 결측값 마커 등), 인코딩 오류 등을 열거합니다.

    ", + }, + "sub_normality_tests": { + "tip": "각 수치 열에 대한 Shapiro-Wilk, Anderson-Darling, Jarque-Bera 정규성 검정.", + "desc": ( + "

    정규성 검정 & 형태 분석은 세 가지 보완적 검정으로 각 열이 정규분포를 따르는지 평가합니다.

    " + "

    세 검정 모두 p < 0.05이면 비정규일 가능성이 높습니다. 불일치하면 히스토그램을 확인하세요.

    " + ), + }, + "sub_vif": { + "tip": "분산팽창계수(VIF)로 특성 간 다중공선성을 탐지.", + "desc": ( + "

    VIF는 각 특성의 회귀 계수 분산이 다른 특성과의 상관에 의해 얼마나 팽창하는지 측정합니다.

    " + "

    해석: VIF=1: 상관 없음 | 1-5: 낮음 | 5-10: 보통 | >10: 심각, 특성 제거나 결합 고려

    " + "

    초보자 팁: 높은 VIF는 불안정한 회귀 계수를 유발합니다. " + "데이터가 조금만 변해도 계수 부호가 뒤바뀔 수 있습니다.

    " + ), + }, + "sub_summary": { + "tip": "분포 형태, 정규성 결과, 이상치 수의 빠른 요약.", + "desc": "

    요약은 왜도 분류, 첨도 유형, 정규성 지표, 이상치 수를 한 테이블에 제공합니다.

    ", + }, + "sub_variance_explained": { + "tip": "각 주성분이 포착하는 분산 비율(스크리 플롯 데이터).", + "desc": ( + "

    설명된 분산은 각 주성분의 개별 및 누적 분산 기여율을 보여줍니다.

    " + "

    스크리 플롯: '엘보' 지점(곡선이 급격히 꺾이는 곳)이 추가 성분이 큰 가치를 더하지 못하는 시점입니다.

    " + ), + }, + "sub_loadings": { + "tip": "각 원래 특성이 각 주성분에 기여하는 정도.", + "desc": ( + "

    PCA 적재량은 각 원래 특성의 주성분별 가중치(기여도)를 보여줍니다.

    " + "

    예시: PC1이 '키', '몸무게', 'BMI'에 높은 적재량을 가지면, " + "PC1을 '체격' 성분으로 해석할 수 있습니다.

    " + ), + }, +} + +# -- Chinese ----------------------------------------------------------- +METHOD_INFO["zh"] = { + "section_overview": { + "tip": "数据集概要:行/列数、数据类型分布、内存占用。", + "desc": ( + "

    概览在深入分析之前,展示数据集的整体结构。

    " + "

    包含信息:

    " + "" + "

    重要性:先检查行数和类型可以在分析前捕获加载错误(截断文件、分隔符错误、编码问题)。

    " + "

    初学者提示:如果行数远少于预期,可能是分隔符设置有误;如果数值列显示为“文本”,说明含有非数字字符。

    " + ), + }, + "section_quality": { + "tip": "从完整性·唯一性·一致性·有效性四个维度评估数据质量(0-100%)。", + "desc": ( + "

    数据质量评估从四个独立维度对数据进行“健康检查”,每项0~100%。

    " + "

    四个维度:

    " + "" + "

    分数标准:90-100%优秀 | 70-89%可接受,需关注标记 | <70%需在建模前解决

    " + "

    综合公式:0.35×完整性 + 0.25×唯一性 + 0.20×一致性 + 0.20×有效性

    " + ), + }, + "section_preprocessing": { + "tip": "记录分析前自动执行的所有清洗和转换步骤。", + "desc": ( + "

    预处理日志按顺序记录系统对原始数据执行的每一步自动清洗操作。

    " + "

    常见步骤:删除空/常量列、字符串转数值、编码修复、无法解析的行移除。

    " + "

    重要性:可重复性是可信分析的基础。必须清楚知道做了哪些变换才能验证结果。

    " + "

    初学者提示:如果重要列被删除了,说明原始数据可能有格式问题,需要手动修复。

    " + ), + }, + "section_descriptive": { + "tip": "每列的集中趋势、离散度和分布形态统计汇总。", + "desc": ( + "

    描述性统计是探索性数据分析(EDA)的基础,用数字概括每列的中心、散布和形状。

    " + "

    数值列指标:

    " + "" + "

    初学者提示:均值与中位数差异大的列可能有异常值或严重偏斜。

    " + ), + }, + "section_distribution": { + "tip": "通过直方图和Q-Q图可视化每个数值列的分布形态。", + "desc": ( + "

    分布分析让你直观“看到”每列值的分布形状。

    " + "

    图表类型:

    " + "" + "

    常见形状:钟形(正态)·右偏(收入、价格)·左偏(接近满分的考试成绩)·双峰(两个子群混合)·均匀

    " + "

    重要性:许多ML算法假设正态输入。了解实际分布有助于选择正确模型或应用变换。

    " + ), + }, + "section_correlation": { + "tip": "衡量数值列间的Pearson(线性)和Spearman(秩)相关。", + "desc": ( + "

    相关分析测量数值列对之间的关系强度。

    " + "

    两种相关系数:

    " + "" + "

    热力图:颜色越深=相关越强。红色=正相关, 蓝色=负相关。

    " + "

    警戒线:|r|>0.90严重多重共线性 | |r|>0.70强相关 | |r|<0.30弱相关

    " + "

    初学者提示:两个高度相关的特征携带相似信息,同时放入线性模型会导致不稳定。

    " + ), + }, + "section_missing": { + "tip": "分析缺失数据的模式、比例和产生机制。", + "desc": ( + "

    缺失数据分析调查数据在哪里多少为什么缺失。

    " + "

    三种缺失机制:

    " + "" + "

    实操指南:<5%可删或均值填充 | 5-30%用KNN/MICE | >50%考虑删除该列

    " + ), + }, + "section_outlier": { + "tip": "使用IQR围栏和Z分数检测异常数据点。", + "desc": ( + "

    异常值检测识别远离其他数据的异常点。

    " + "

    IQR方法:IQR=Q3-Q1 | 一般异常: Q3+1.5×IQR | " + "极端异常: Q3+3×IQR

    " + "

    重要:并非所有异常值都是错误!在欺诈检测、罕见疾病等领域,异常值可能是最有价值的数据。

    " + "

    初学者提示:箱线图中超出须(whisker)的点是需要查看的潜在异常值。

    " + ), + }, + "section_categorical": { + "tip": "类别/布尔列的频率分布、柱状图和熵分析。", + "desc": ( + "

    类别分析检查非数值列——文本标签、类别、布尔值。

    " + "

    关键指标:

    " + "" + "

    初学者提示:柱状图中某个柱远高于其他,说明该列“不平衡”,可能需要过采样等技术。

    " + ), + }, + "section_importance": { + "tip": "用方差和互信息量按信息价值排列特征。", + "desc": ( + "

    特征重要性回答:哪些列携带最有用的信息?

    " + "

    方法:方差(≈0的常量列无信息)·平均相关·互信息(捕获线性+非线性)

    " + "

    初学者提示:不要盲目删除低重要性特征——它们单独弱,但与其他特征组合后可能很强(交互效应)。

    " + ), + }, + "section_pca": { + "tip": "主成分分析揭示数据的内在维度和方差结构。", + "desc": ( + "

    PCA(主成分分析)将相关特征转换为按方差排序的不相关成分。

    " + "

    关键输出:碎石图(各成分方差)·累积方差(保留90%需多少成分)·载荷矩阵(特征权重)

    " + "

    初学者提示:PCA在特征尺度相近时效果最好。系统已自动标准化(z-score)后再执行PCA。

    " + ), + }, + "section_duplicates": { + "tip": "识别可能膨胀统计量或偏移模型的完全重复行。", + "desc": ( + "

    重复项分析扫描所有列完全相同的行。

    " + "

    危害:膨胀样本量使置信区间偏窄·训练测试集泄露·接近100%的重复率通常是加载错误。

    " + "

    初学者提示:少量重复(<1%)通常无害,但意外的高比率必须调查。

    " + ), + }, + "section_warnings": { + "tip": "汇聚所有分析中发现的警告和潜在问题。", + "desc": ( + "

    警告和问题将全部分析中发现的异常集中展示。

    " + "

    常见警告:高缺失率(>30%)·常量列·多重共线性·极端异常值·类型不匹配

    " + "

    初学者提示:把这里当作优先待办清单,先解决高严重度警告。

    " + ), + }, + "sub_best_fit": { + "tip": "将每列与理论分布(正态、Gamma、Weibull等)比较,选择最佳拟合。", + "desc": ( + "

    最佳拟合分布将每个数值列与正态、对数正态、指数、Gamma等分布进行比较。

    " + "

    选择标准:AIC(越低越好)·KS统计量(越小越好)·p>0.05表示可接受

    " + "

    初学者提示:最佳分布为“norm”(正态)则大部分标准检验可直接使用;否则考虑对数变换。

    " + ), + }, + "sub_jarque_bera": { + "tip": "检验偏度和峰度是否符合正态分布。", + "desc": ( + "

    Jarque-Bera检验专门检查数据的形状是否符合正态。

    " + "

    解读:p≥0.05=不能拒绝正态 | p<0.05=显著非正态

    " + "

    初学者提示:非正态数据非常常见,这不意味着数据“坏”,而是需要使用非参数方法或变换。

    " + ), + }, + "sub_power_transform": { + "tip": "推荐Box-Cox/Yeo-Johnson变换使偏斜分布更接近正态。", + "desc": ( + "

    幂变换推荐建议可将偏斜数据转为更加钟形的数学变换。

    " + "

    两种方法:Box-Cox(仅正数) | Yeo-Johnson(任意数据)

    " + "

    初学者提示:幂变换是线性回归和神经网络的必要预处理步骤。

    " + ), + }, + "sub_kde_bandwidth": { + "tip": "用Scott/Silverman规则确定核密度估计的最优平滑参数。", + "desc": ( + "

    KDE带宽分析寻找密度曲线的最佳“平滑度”。

    " + "

    权衡:小带宽=捕捉细节但过拟合噪声 | 大带宽=平滑但可能遗漏特征

    " + "

    初学者提示:两种规则给出的带宽差异大时,数据可能有异常值或多峰。

    " + ), + }, + "sub_partial_corr": { + "tip": "控制其他所有变量后测量两变量间的直接关系。", + "desc": ( + "

    偏相关回答:两变量的关系是直接的,还是由第三变量引起的?

    " + "

    例子:冰淇淋销量与溺水事故相关,但控制温度后偏相关接近零。温度才是真正驱动因素。

    " + "

    解读:高偏相关=直接关系 | 接近零=虚假关系(被其他变量中介)

    " + ), + }, + "sub_mutual_info": { + "tip": "捕获线性与非线性依赖的信息论度量。", + "desc": ( + "

    互信息(MI)衡量知道一个变量后能获得多少关于另一个变量的信息。

    " + "

    核心:MI=0=统计独立 | MI>0=存在依赖。可捕获任何关系类型,包括Pearson为零的非线性关系。

    " + "

    初学者提示:MI高但Pearson低说明存在非线性关系,建议查看散点图。

    " + ), + }, + "sub_bootstrap_ci": { + "tip": "每对相关系数的重采样95%置信区间。", + "desc": ( + "

    Bootstrap相关置信区间告诉你每个相关估计实际有多可靠。

    " + "

    原理:有放回抽样1000次→计算每次相关→2.5~97.5百分位=95% CI

    " + "

    解读:窄CI=稳定 | 宽CI=不确定性高 | CI跨零=可能不显著

    " + ), + }, + "sub_distance_corr": { + "tip": "Szekely距离相关——检测Pearson遗漏的非线性依赖。", + "desc": ( + "

    距离相关当且仅当变量真正独立时为零——比Pearson更强的保证。

    " + "

    比较:低Pearson+高距离相关→存在非线性关系!用散点图发现模式。

    " + ), + }, + "sub_kmeans": { + "tip": "通过轮廓分析自动优化聚类数的K-Means。", + "desc": ( + "

    K-Means聚类自动将数据分为k组。

    " + "

    流程:标准化→k=2~10逐一尝试→选择最高轮廓分数的k

    " + "

    指标:轮廓分>0.5=好 | >0.7=强结构 | 惯性(WCSS):越低越紧凑

    " + "

    初学者提示:K-Means假设大致球形、大小相近的簇。不规则形状请看DBSCAN。

    " + ), + }, + "sub_dbscan": { + "tip": "自动发现任意形状簇并识别噪声的密度聚类。", + "desc": ( + "

    DBSCAN通过寻找数据密集区域形成簇。

    " + "

    优势:无需指定k·可发现任意形状·自动识别噪声点

    " + "

    初学者提示:如果只找到1个簇且噪声很多,可能数据没有明显密度结构或eps需要调。

    " + ), + }, + "sub_hierarchical": { + "tip": "展示簇在各层级如何合并的树状图。", + "desc": ( + "

    层次聚类构建树形结构(树状图)展示数据逐步合并的过程。

    " + "

    读图:y轴=合并“距离”。在任意高度画水平线可得不同k。长竖线=自然簇边界。

    " + ), + }, + "sub_cluster_profiles": { + "tip": "每个K-Means簇在所有特征上的统计摘要。", + "desc": ( + "

    聚类画像展示每个簇的均值/标准差,说明每个簇的独特之处。

    " + "

    用途:簇间均值差异大的特征=定义簇的关键区分特征。

    " + "

    初学者提示:用这张表为簇命名(如“高价值客户”“节约型买家”)。

    " + ), + }, + "sub_tsne": { + "tip": "保留局部邻域结构的非线性2D可视化投影。", + "desc": ( + "

    t-SNE将高维数据压缩到2D散点图,同时保留哪些点彼此相似。

    " + "

    读图:2D中靠近=原始空间中相似。明显分组=可能是真实簇。

    " + "

    ⚠️ 注意:簇间距离无意义·簇大小不反映实际·每次运行结果可能不同(随机算法)

    " + ), + }, + "sub_umap": { + "tip": "同时保留局部和全局结构的快速非线性2D可视化。", + "desc": ( + "

    UMAP是t-SNE的现代替代,通常更快且更好保留全局布局。

    " + "

    优势:速度快·全局结构保留好·簇间相对位置有一定意义

    " + "

    初学者提示:如果t-SNE和UMAP显示相似簇结构,这些簇很可能是真实的。

    " + ), + }, + "sub_factor_analysis": { + "tip": "发现解释观测变量间相关性的隐藏(潜在)因子。", + "desc": ( + "

    因子分析解释为什么某些变量彼此相关——假设存在隐藏的潜在因子。

    " + "

    类比:10科成绩中可能隐含“语言能力”“数学能力”“艺术能力”三个潜在因子。

    " + "

    初学者提示:噪声方差高的变量说明它不被公共因子解释,可能在测量独特的东西。

    " + ), + }, + "sub_factor_loadings": { + "tip": "显示每个观测变量与每个潜在因子的关联强度。", + "desc": ( + "

    因子载荷量化原始变量与潜在因子之间的关系。

    " + "

    解读:|载荷|>0.7=强 | 0.4-0.7=中等 | <0.4=弱

    " + "

    交叉载荷:一个变量在多个因子上高载荷——不适合因子模型的混合变量。

    " + ), + }, + "sub_feature_contrib": { + "tip": "用PCA载荷加权各特征的方差贡献,排名特征重要性。", + "desc": ( + "

    PCA加权特征贡献按对总方差的贡献排名原始特征,用于无监督特征选择。

    " + "

    初学者提示:排名末尾的特征对整体方差贡献极小,是可以考虑移除的候选。

    " + ), + }, + "sub_interaction": { + "tip": "检测特征对之间的协同乘积交互效应。", + "desc": ( + "

    交互检测检验两个特征的乘积是否包含单个特征中没有的信息。

    " + "

    初学者提示:找到强交互后,将乘积作为新特征加入可以显著提升模型性能。

    " + ), + }, + "sub_monotonic": { + "tip": "比较Pearson与Spearman识别非线性单调模式。", + "desc": ( + "

    单调关系分析检测一致增减但非直线的变量对。

    " + "

    关键:|Spearman|-|Pearson|差距大=指数、对数等非线性单调模式。应用单调变换可改善线性模型。

    " + ), + }, + "sub_binning": { + "tip": "用熵分析评估等宽和等频分箱策略。", + "desc": ( + "

    分箱分析评估将连续变量转为离散类别的策略。

    " + "

    两种策略:等宽(相同区间宽度,对异常值敏感) vs 等频(相同数据量,适合偏斜数据)

    " + ), + }, + "sub_cardinality": { + "tip": "分析唯一值数量并推荐类别编码方法。", + "desc": ( + "

    基数与编码推荐按唯一值数建议最佳编码策略:

    " + "" + ), + }, + "sub_leakage": { + "tip": "标记可能无意泄露目标信息的特征。", + "desc": ( + "

    泄漏风险检查可能直接/间接获取目标变量的特征。

    " + "

    初学者提示:如果模型精度“好得不真实”(如99%),数据泄漏是首要嫌疑。

    " + ), + }, + "sub_iso_forest": { + "tip": "通过随机分割隔离异常值的树模型。", + "desc": ( + "

    隔离森林基于“异常值更容易被隔离”的思想。

    " + "

    原理:构建随机分割树→测量每个点的平均隔离路径长度→路径短=更异常

    " + "

    初学者提示:参数少、高维数据表现好,是异常检测的首选方法。

    " + ), + }, + "sub_lof": { + "tip": "将每个点的局部密度与邻居比较的密度型检测。", + "desc": ( + "

    LOF比较每个点的局部密度与k近邻的密度。

    " + "

    LOF≈1=正常 | LOF>>1=比邻居稀疏得多(异常)

    " + "

    初学者提示:当各簇密度不同时,LOF比隔离森林更有效。

    " + ), + }, + "sub_mahalanobis": { + "tip": "考虑特征相关性的数据中心多变量距离。", + "desc": ( + "

    马氏距离考虑协方差结构测量每个观测值到数据中心的距离。

    " + "

    vs 欧氏距离:欧氏对所有方向一视同仁;马氏考虑异常程度——两个通常同涨同跌的特征," + "一高一低才是真正异常。

    " + ), + }, + "sub_consensus": { + "tip": "3种方法中≥2种同意时标记为异常。", + "desc": ( + "

    共识异常检测结合隔离森林、LOF和马氏距离。

    " + "

    规则:≥2/3方法同意→标记为异常,大大减少误报。

    " + "

    初学者提示:从共识标记开始调查——这些是最可靠的异常候选。

    " + ), + }, + "test_levene": { + "tip": "检验各组方差是否相等(齐方差假设)。", + "desc": ( + "

    Levene检验确认不同组的方差是否大致相等。

    " + "

    解读:p>0.05=齐方差成立 | p≤0.05=方差显著不同,建议用Welch t检验或非参数方法。

    " + ), + }, + "test_kruskal_wallis": { + "tip": "非参数ANOVA:检验多组是否来自相同分布。", + "desc": ( + "

    Kruskal-Wallis检验是单因素ANOVA的非参数版本,无需正态假设。

    " + "

    解读:p<0.05=至少一组显著不同→做两两Mann-Whitney | p≥0.05=无显著差异

    " + ), + }, + "test_mann_whitney": { + "tip": "非参数两样本检验:比较两独立组的分布。", + "desc": ( + "

    Mann-Whitney U检验判断两组是否来自相同分布。

    " + "

    解读:p<0.05=两组显著不同 | p≥0.05=无显著差异

    " + "

    初学者提示:即使均值相同,若离散度不同也可能显著。

    " + ), + }, + "test_chi_square": { + "tip": "检验观测类别频率是否偏离期望频率。", + "desc": ( + "

    卡方适合度检验检查观测分布是否符合期望(默认均匀)。

    " + "

    解读:p<0.05=显著偏离 | p≥0.05=一致

    " + "

    条件:每个期望频率须≥5。

    " + ), + }, + "test_grubbs": { + "tip": "检验最极端值是否为统计显著异常值。", + "desc": ( + "

    Grubbs检验评估最极端值是分布自然极端还是显著异常。

    " + "

    解读:p<0.05=显著异常值 | p≥0.05=在预期范围内

    " + "

    初学者提示:Grubbs一次只检验一个极端值。多个异常值请用IQR或隔离森林。

    " + ), + }, + "test_adf": { + "tip": "检验时间序列是否平稳(统计特性随时间不变)。", + "desc": ( + "

    ADF检验判断时间序列是否有单位根(非平稳)。

    " + "

    解读:p<0.05=平稳✓ | p≥0.05=非平稳,考虑差分或去趋势

    " + "

    初学者提示:顺序测量列在回归前必须检查ADF。非平稳预测变量会使回归结果无意义。

    " + ), + }, + "sub_column_quality": { + "tip": "每列的完整性·唯一性·有效性质量评分。", + "desc": ( + "

    列质量将总分拆解到每列,精确定位问题列。

    " + "

    初学者提示:质量分极低的列是清洗或删除的首要目标。

    " + ), + }, + "sub_cleaning_log": { + "tip": "所有自动清洗操作的逐步记录。", + "desc": "

    清洗日志记录系统执行的每一步变换,保证完全透明和可重复。

    ", + }, + "sub_detected_issues": { + "tip": "预处理中发现的数据质量问题清单。", + "desc": "

    检测到的问题列举混合类型、可疑模式(如'999'缺失标记)、编码错误等。

    ", + }, + "sub_normality_tests": { + "tip": "每列的Shapiro-Wilk、Anderson-Darling、Jarque-Bera正态性检验。", + "desc": ( + "

    正态性检验用三种互补检验评估每列是否服从正态分布。

    " + "

    三者都p<0.05则很可能非正态。如果结果不一致,查看直方图确认原因。

    " + ), + }, + "sub_vif": { + "tip": "通过方差膨胀因子检测多重共线性。", + "desc": ( + "

    VIF衡量回归系数方差因特征间相关而膨胀多少。

    " + "

    解读:VIF=1无相关 | 1-5低 | 5-10中等 | >10严重,考虑移除或合并

    " + "

    初学者提示:高VIF导致系数不稳定,数据微小变化就可能翻转系数符号。

    " + ), + }, + "sub_summary": { + "tip": "分布形态、正态性和异常值数的快速汇总。", + "desc": "

    摘要在一张表中提供偏度分类、峰度类型、正态性指标和异常值计数。

    ", + }, + "sub_variance_explained": { + "tip": "每个主成分所捕获的方差比例(碎石图数据)。", + "desc": ( + "

    解释方差展示每个主成分的个体和累积方差贡献率。

    " + "

    碎石图:“肘部”(曲线急弯处)是追加成分价值变小的转折点。

    " + ), + }, + "sub_loadings": { + "tip": "每个原始特征对每个主成分的贡献权重。", + "desc": ( + "

    PCA载荷展示每个原始特征在各主成分上的权重。

    " + "

    示例:PC1在“身高”“体重”“BMI”上载荷高,可解读为“体型”成分。

    " + ), + }, +} + +# -- Japanese ---------------------------------------------------------- +METHOD_INFO["ja"] = { + "section_overview": { + "tip": "データセット概要:行/列数、データ型分布、メモリ使用量。", + "desc": ( + "

    概要は、詳細分析の前にデータセット全体の構造を把握するためのセクションです。

    " + "

    含まれる情報:

    " + "" + "

    重要性:行数と型を最初に確認することで、読み込みエラー(ファイル切断、区切り文字ミス、文字化け)を早期発見できます。

    " + "

    初心者向けヒント:行数が予想より少なければ区切り文字の問題、数値列がテキストと表示されていれば非数値文字が混入している可能性があります。

    " + ), + }, + "section_quality": { + "tip": "完全性・一意性・一貫性・妥当性の4次元で品質を0-100%で評価。", + "desc": ( + "

    データ品質評価は、4つの独立した軸でデータの「健康診断」を行います。各軸0〜100%。

    " + "

    4つの軸:

    " + "" + "

    スコア基準:90-100%=優秀 | 70-89%=注意すべきフラグあり | <70%=モデリング前に対処必須

    " + "

    総合式:0.35×完全性 + 0.25×一意性 + 0.20×一貫性 + 0.20×妥当性

    " + ), + }, + "section_preprocessing": { + "tip": "分析前に自動実行した全クリーニング/変換ステップの記録。", + "desc": ( + "

    前処理ログは、生データに対して実行された全自動クリーニング操作を時系列で記録します。

    " + "

    一般的な操作:空/定数列の削除、文字列→数値変換、エンコーディング修正、解析不能行の除去。

    " + "

    重要性:再現性は信頼できる分析の基盤です。結果を検証するには、どの変換が行われたか正確に知る必要があります。

    " + "

    初心者向けヒント:重要な列が削除された場合、元データにフォーマットの問題がある可能性があります。手動修正を検討してください。

    " + ), + }, + "section_descriptive": { + "tip": "各列の中心傾向・ばらつき・分布形状の統計要約。", + "desc": ( + "

    記述統計は探索的データ分析(EDA)の基礎で、各列の中心・広がり・形状を数値で要約します。

    " + "

    数値列の指標:

    " + "" + "

    初心者向けヒント:平均と中央値の差が大きい列は、外れ値や強い偏りがある可能性があります。

    " + ), + }, + "section_distribution": { + "tip": "ヒストグラムとQ-Qプロットで各数値列の分布形状を可視化。", + "desc": ( + "

    分布分析により、各列の値の分布を「目で見て」理解できます。

    " + "

    グラフの種類:

    " + "" + "

    よくある形状:釣り鐘型(正規)・右歪み(収入、価格)・左歪み(満点に近い成績)・二峰性(2グループの混合)・一様

    " + "

    重要性:多くのML手法は正規的な入力を仮定します。分布を知ることでモデル選択や変換の判断ができます。

    " + ), + }, + "section_correlation": { + "tip": "数値列間のPearson(線形)とSpearman(順位)相関を測定。", + "desc": ( + "

    相関分析は変数ペア間の関係の強さを測定します。

    " + "

    2種類の相関:

    " + "" + "

    ヒートマップ:色が濃い=相関が強い。赤=正相関、青=負相関。

    " + "

    警戒ライン:|r|>0.90 重度の多重共線性 | |r|>0.70 強い相関 | |r|<0.30 弱い相関

    " + "

    初心者向けヒント:高相関の特徴量は類似情報を持ちます。線形モデルに両方入れると不安定になります。

    " + ), + }, + "section_missing": { + "tip": "欠損データのパターン・割合・メカニズムを分析。", + "desc": ( + "

    欠損分析は、データがどこでどれだけなぜ欠損しているかを調査します。

    " + "

    3つのメカニズム:

    " + "" + "

    対処の目安:<5%=削除・平均補完 | 5-30%=KNN/MICE | >50%=列の削除を検討

    " + ), + }, + "section_outlier": { + "tip": "IQRフェンスとZスコアで異常データポイントを検出。", + "desc": ( + "

    外れ値検出は、他のデータから極端に離れた値を特定します。

    " + "

    IQR法:IQR=Q3-Q1 | 一般外れ値: Q3+1.5×IQR | " + "極端外れ値: Q3+3×IQR

    " + "

    重要:すべての外れ値がエラーではありません!不正検知や希少疾患では、外れ値こそ最も価値あるデータです。

    " + "

    初心者向けヒント:箱ひげ図のひげ(whisker)を超えた点が調査すべき外れ値候補です。

    " + ), + }, + "section_categorical": { + "tip": "カテゴリ/ブール列の頻度分布・棒グラフ・エントロピー分析。", + "desc": ( + "

    カテゴリ分析は非数値列—テキストラベル、カテゴリ、ブール値を検査します。

    " + "

    主要指標:

    " + "" + "

    初心者向けヒント:棒グラフで1本の棒が異常に高ければ「不均衡」です。オーバーサンプリング等の対策が必要な場合があります。

    " + ), + }, + "section_importance": { + "tip": "分散と相互情報量で特徴量を情報価値順にランク付け。", + "desc": ( + "

    特徴量重要度は、どの列が最も有用な情報を持っているか?に答えます。

    " + "

    手法:分散(≈0の定数列は無情報)・平均相関・相互情報量(線形+非線形を捕捉)

    " + "

    初心者向けヒント:低重要度の特徴を盲目的に削除しないでください。単独では弱くても他の特徴と組み合わせると強力になれます(交互作用効果)。

    " + ), + }, + "section_pca": { + "tip": "主成分分析でデータの内在次元と分散構造を解明。", + "desc": ( + "

    PCA(主成分分析)は相関のある特徴を分散順に並んだ無相関の成分に変換します。

    " + "

    主要出力:スクリープロット(各成分の分散)・累積分散(90%保持に必要な成分数)・負荷量行列(特徴の重み)

    " + "

    初心者向けヒント:PCAは特徴のスケールが揃っている時に最も有効です。システムはz-scoreで自動標準化してからPCAを実行しています。

    " + ), + }, + "section_duplicates": { + "tip": "統計量の膨張やモデルの偏りを招く完全重複行を特定。", + "desc": ( + "

    重複分析は、全列が完全に同一の行をスキャンします。

    " + "

    影響:サンプルサイズの膨張で信頼区間が狭くなる・訓練/テスト間のデータ漏洩・100%近い重複率はロードエラーの可能性大。

    " + "

    初心者向けヒント:少量の重複(<1%)は通常無害ですが、予想外に高い場合は必ず調査してください。

    " + ), + }, + "section_warnings": { + "tip": "全分析から抽出された警告と潜在的問題を集約。", + "desc": ( + "

    警告と問題は、全分析で検出された異常を一か所に集めて表示します。

    " + "

    よくある警告:高欠損率(>30%)・定数列・多重共線性・極端外れ値・型不整合

    " + "

    初心者向けヒント:ここを優先TODOリストとして使い、重大度の高い警告から対処しましょう。

    " + ), + }, + "sub_best_fit": { + "tip": "各列を理論分布(正規、ガンマ、ワイブル等)と比較し最適を選択。", + "desc": ( + "

    最適分布フィッティングは、各数値列を正規・対数正規・指数・ガンマ等と比較します。

    " + "

    選択基準:AIC(低い方がよい)・KS統計量(小さい方がよい)・p>0.05で許容範囲

    " + "

    初心者向けヒント:最適が“norm”(正規)なら標準検定がそのまま使えます。そうでなければ対数変換等を検討。

    " + ), + }, + "sub_jarque_bera": { + "tip": "歪度・尖度が正規分布と一致するか検定。", + "desc": ( + "

    Jarque-Bera検定はデータの形状が正規に合致するかを専門的に検定します。

    " + "

    解釈:p≥0.05=正規性を棄却できない | p<0.05=有意に非正規

    " + "

    初心者向けヒント:非正規は非常に一般的です。データが「悪い」のではなく、ノンパラメトリック手法や変換が必要なだけです。

    " + ), + }, + "sub_power_transform": { + "tip": "Box-Cox/Yeo-Johnson変換で歪んだ分布を正規に近づける推奨。", + "desc": ( + "

    べき変換推奨は、歪んだデータをより釣り鐘型に変換する数学変換を提案します。

    " + "

    2つの手法:Box-Cox(正の値のみ) | Yeo-Johnson(任意の値)

    " + "

    初心者向けヒント:べき変換は線形回帰やニューラルネットワークの必須前処理ステップです。

    " + ), + }, + "sub_kde_bandwidth": { + "tip": "Scott/Silvermanルールでカーネル密度推定の最適平滑パラメータを決定。", + "desc": ( + "

    KDE帯域幅分析は密度曲線の最適な「滑らかさ」を探します。

    " + "

    トレードオフ:帯域幅小=細部を捉えるがノイズに過剰適合 | 帯域幅大=滑らかだが特徴を見逃す可能性

    " + "

    初心者向けヒント:2つのルールの帯域幅が大きく異なる場合、外れ値や多峰性がある可能性があります。

    " + ), + }, + "sub_partial_corr": { + "tip": "他の全変数をコントロールした後の2変数間の直接関係を測定。", + "desc": ( + "

    偏相関は、2変数の関係は直接的か、第三の変数によるものか?に答えます。

    " + "

    例:アイスクリーム売上と溺水事故は相関しますが、気温をコントロールすると偏相関はゼロに。気温が真の駆動因子です。

    " + "

    解釈:高偏相関=直接関係 | ≈0=疑似相関(他の変数を介した関係)

    " + ), + }, + "sub_mutual_info": { + "tip": "線形・非線形双方の依存関係を捕捉する情報理論的尺度。", + "desc": ( + "

    相互情報量(MI)は、一方の変数を知ることで他方についてどれだけ情報が得られるかを測定します。

    " + "

    特徴:MI=0=統計的独立 | MI>0=依存あり。Pearsonがゼロの非線形関係も含め、あらゆる依存を捕捉。

    " + "

    初心者向けヒント:MIが高くPearsonが低ければ非線形関係が存在します。散布図で確認しましょう。

    " + ), + }, + "sub_bootstrap_ci": { + "tip": "各相関係数のリサンプリング95%信頼区間。", + "desc": ( + "

    ブートストラップ信頼区間は、各相関推定値の信頼性を示します。

    " + "

    手法:復元抽出1000回→各回の相関を計算→2.5〜97.5パーセンタイル=95%CI

    " + "

    解釈:CIが狭い=安定 | CIが広い=不確実性が高い | CIがゼロを跨ぐ=有意でない可能性

    " + ), + }, + "sub_distance_corr": { + "tip": "Szekely距離相関——Pearsonが見逃す非線形依存を検出。", + "desc": ( + "

    距離相関は、変数が真に独立の場合にのみゼロになる唯一の相関尺度です。

    " + "

    比較:Pearsonが低く距離相関が高い→非線形関係の存在!散布図でパターンを確認。

    " + ), + }, + "sub_kmeans": { + "tip": "シルエット分析で最適クラスタ数を自動選択するK-Means。", + "desc": ( + "

    K-Meansクラスタリングはデータを自動的にk個のグループに分割します。

    " + "

    手順:標準化→k=2〜10を順に試行→シルエットスコア最大のkを選択

    " + "

    指標:シルエット>0.5=良好 | >0.7=強い構造 | 慣性(WCSS):低いほどコンパクト

    " + "

    初心者向けヒント:K-Meansは大まかに球形・似た大きさのクラスタを仮定します。不規則な形状にはDBSCANが適しています。

    " + ), + }, + "sub_dbscan": { + "tip": "任意形状のクラスタを自動発見しノイズ点を識別する密度ベース法。", + "desc": ( + "

    DBSCANはデータの密集領域を探してクラスタを形成します。

    " + "

    利点:k指定不要・任意形状を発見可能・ノイズ点を自動識別

    " + "

    初心者向けヒント:クラスタが1つでノイズが多い場合、明確な密度構造がないかeps調整が必要です。

    " + ), + }, + "sub_hierarchical": { + "tip": "データ統合の階層構造を示すデンドログラム。", + "desc": ( + "

    階層的クラスタリングはデータの段階的マージ過程を樹形図(デンドログラム)で表示します。

    " + "

    読み方:y軸=マージ「距離」。任意の高さで水平線を引くと異なるkが得られます。長い縦線=自然なクラスタ境界。

    " + ), + }, + "sub_cluster_profiles": { + "tip": "各K-Meansクラスタの全特徴量にわたる統計プロファイル。", + "desc": ( + "

    クラスタプロファイルは各クラスタの平均/標準偏差を表示し、各クラスタの特徴を明らかにします。

    " + "

    用途:クラスタ間で平均差が最も大きい特徴=クラスタを定義する重要な区別特徴。

    " + "

    初心者向けヒント:この表を使ってクラスタに名前を付けましょう(例:「高価値顧客」「節約志向の購買者」)。

    " + ), + }, + "sub_tsne": { + "tip": "局所近傍構造を保持する非線形2D可視化。", + "desc": ( + "

    t-SNEは高次元データを2D散布図に圧縮し、どの点が似ているかを保持します。

    " + "

    読み方:2Dで近い=元の空間で類似。はっきりした塊=本当のクラスタの可能性。

    " + "

    ⚠️ 注意:クラスタ間距離は無意味・サイズは実際を反映しない・毎回結果が変化(確率的アルゴリズム)

    " + ), + }, + "sub_umap": { + "tip": "局所+大域構造を同時保持する高速非線形2D可視化。", + "desc": ( + "

    UMAPはt-SNEの現代的代替で、通常より高速で大域レイアウトもよく保持します。

    " + "

    利点:高速・大域構造保持が良好・クラスタ間相対位置にある程度の意味。

    " + "

    初心者向けヒント:t-SNEとUMAPの両方で似たクラスタ構造が出れば、それは本当のクラスタである可能性が高いです。

    " + ), + }, + "sub_factor_analysis": { + "tip": "観測変数間の相関を説明する隠れた(潜在)因子を発見。", + "desc": ( + "

    因子分析は、なぜ特定の変数が相関し合うのかを説明します――背後に潜在因子があると仮定します。

    " + "

    例え:10科目の成績の背後に「言語能力」「数学能力」「芸術能力」という3つの潜在因子が隠れている可能性。

    " + "

    初心者向けヒント:ノイズ分散が高い変数は共通因子で説明されず、何か独自のものを測定しています。

    " + ), + }, + "sub_factor_loadings": { + "tip": "各観測変数と各潜在因子間の関連強度。", + "desc": ( + "

    因子負荷量は元の変数と潜在因子の間の結びつきの強さを定量化します。

    " + "

    解釈:|負荷量|>0.7=強 | 0.4-0.7=中程度 | <0.4=弱

    " + "

    交差負荷:複数因子に高い負荷を持つ変数——因子モデルに適さない混合変数。

    " + ), + }, + "sub_feature_contrib": { + "tip": "PCA負荷量加重で各特徴の分散寄与をランキング。", + "desc": ( + "

    PCA加重特徴寄与は総分散への寄与で元の特徴をランク付けし、教師なし特徴選択に使います。

    " + "

    初心者向けヒント:ランキング下位の特徴は全体分散への貢献が極めて小さく、除外候補です。

    " + ), + }, + "sub_interaction": { + "tip": "特徴ペア間の相乗的な積交互作用効果を検出。", + "desc": ( + "

    交互作用検出は、2つの特徴の積が個別にはない情報を含むかを検定します。

    " + "

    初心者向けヒント:強い交互作用を発見したら、その積を新たな特徴として追加するとモデル性能が大幅に向上する場合があります。

    " + ), + }, + "sub_monotonic": { + "tip": "Pearson vs Spearman比較で非線形単調パターンを識別。", + "desc": ( + "

    単調関係分析は、直線ではないが一緒に増減する変数ペアを検出します。

    " + "

    ポイント:|Spearman|-|Pearson|差が大きい=指数・対数等の非線形単調パターン。単調変換で線形モデルを改善できます。

    " + ), + }, + "sub_binning": { + "tip": "エントロピー分析で等幅・等頻度ビニング戦略を評価。", + "desc": ( + "

    ビニング分析は連続変数を離散カテゴリに変換する戦略を評価します。

    " + "

    2つの戦略:等幅(同じ区間幅、外れ値に敏感) vs 等頻度(同じデータ量、歪んだデータに適切)

    " + ), + }, + "sub_cardinality": { + "tip": "ユニーク値数を分析しカテゴリエンコーディング手法を推奨。", + "desc": ( + "

    カーディナリティとエンコーディング推奨:ユニーク値数に基づき最適戦略を提案:

    " + "" + ), + }, + "sub_leakage": { + "tip": "ターゲット情報を意図せず漏洩するリスクのある特徴を識別。", + "desc": ( + "

    リークリスクはターゲット変数に直接/間接的にアクセスできる特徴を検査します。

    " + "

    初心者向けヒント:モデルの精度が「出来すぎ」(例:99%)の場合、データリークが最有力犯です。

    " + ), + }, + "sub_iso_forest": { + "tip": "ランダム分割で異常値を隔離するツリーベースの手法。", + "desc": ( + "

    Isolation Forestは「異常は隔離しやすい」という考えに基づきます。

    " + "

    原理:ランダム分割木を構築→各点の平均隔離パス長を測定→パスが短い=より異常

    " + "

    初心者向けヒント:パラメータが少なく高次元データに強い、異常検出の第一選択アルゴリズムです。

    " + ), + }, + "sub_lof": { + "tip": "各点の局所密度を近傍と比較する密度ベースの異常検出。", + "desc": ( + "

    LOF(局所外れ値因子)は各点の局所密度をk近傍の密度と比較します。

    " + "

    LOF≈1=正常 | LOF>>1=近傍よりはるかに疎(異常)

    " + "

    初心者向けヒント:各クラスタの密度が異なる場合、LOFはIsolation Forestより有効です。

    " + ), + }, + "sub_mahalanobis": { + "tip": "特徴間の相関を考慮したデータ中心からの多変量距離。", + "desc": ( + "

    マハラノビス距離は共分散構造を考慮して各観測点からデータ中心までの距離を測定します。

    " + "

    vs ユークリッド:ユークリッドは全方向を等しく扱いますが、マハラノビスは異常の程度を考慮します。" + "通常は連動する2つの特徴が一方高・他方低の場合、それが真の異常です。

    " + ), + }, + "sub_consensus": { + "tip": "3手法中≥2が同意した場合にのみ異常値としてフラグ。", + "desc": ( + "

    合意型異常検出はIsolation Forest・LOF・マハラノビスを統合します。

    " + "

    ルール:≥2/3の手法が同意→異常としてフラグ。誤検出を大幅に削減します。

    " + "

    初心者向けヒント:まず合意フラグから調査しましょう——最も信頼性の高い異常候補です。

    " + ), + }, + "test_levene": { + "tip": "グループ間の分散が等しいか検定(等分散の仮定)。", + "desc": ( + "

    Levene検定は異なるグループの分散がほぼ等しいかを確認します。

    " + "

    解釈:p>0.05=等分散成立 | p≤0.05=分散が有意に異なる→Welch t検定やノンパラ法を推奨

    " + ), + }, + "test_kruskal_wallis": { + "tip": "ノンパラ版ANOVA:複数グループが同じ分布からのものか検定。", + "desc": ( + "

    Kruskal-Wallis検定は一元配置ANOVAのノンパラメトリック版で、正規性仮定が不要です。

    " + "

    解釈:p<0.05=少なくとも1グループが有意に異なる→ペアワイズMann-Whitney | p≥0.05=有意差なし

    " + ), + }, + "test_mann_whitney": { + "tip": "ノンパラ二標本検定:2独立グループの分布を比較。", + "desc": ( + "

    Mann-Whitney U検定は2つのグループが同じ分布から来ているか判定します。

    " + "

    解釈:p<0.05=2群は有意に異なる | p≥0.05=有意差なし

    " + "

    初心者向けヒント:平均が同じでも散らばりが異なれば有意になることがあります。

    " + ), + }, + "test_chi_square": { + "tip": "観測カテゴリ頻度が期待頻度から逸脱しているか検定。", + "desc": ( + "

    カイ二乗適合度検定は観測分布が期待(デフォルトで均一)に合致するかを検定します。

    " + "

    解釈:p<0.05=有意に逸脱 | p≥0.05=一致

    " + "

    条件:各期待頻度が5以上であること。

    " + ), + }, + "test_grubbs": { + "tip": "最極端値が統計的に有意な外れ値かを検定。", + "desc": ( + "

    Grubbs検定は最極端値が分布の自然な極端か有意な異常かを評価します。

    " + "

    解釈:p<0.05=有意な外れ値 | p≥0.05=予想範囲内

    " + "

    初心者向けヒント:Grubbs検定は一度に1つの極端値のみ検定します。複数の外れ値にはIQRやIsolation Forestを使いましょう。

    " + ), + }, + "test_adf": { + "tip": "時系列が定常か(統計的性質が時間変化しないか)検定。", + "desc": ( + "

    ADF検定は時系列に単位根(非定常)があるか判定します。

    " + "

    解釈:p<0.05=定常✓ | p≥0.05=非定常、差分やトレンド除去を検討

    " + "

    初心者向けヒント:時系列的な列は回帰前にADFチェックが必須です。非定常な予測変数は回帰結果を無意味にします。

    " + ), + }, + "sub_column_quality": { + "tip": "各列の完全性・一意性・妥当性の品質スコア。", + "desc": ( + "

    列品質は全体スコアを各列に分解し、問題列をピンポイントで特定します。

    " + "

    初心者向けヒント:品質スコアが極端に低い列がクリーニングまたは削除の最優先ターゲットです。

    " + ), + }, + "sub_cleaning_log": { + "tip": "全自動クリーニング操作のステップバイステップ記録。", + "desc": "

    クリーニングログはシステムが実行した各変換を記録し、完全な透明性と再現性を保証します。

    ", + }, + "sub_detected_issues": { + "tip": "前処理で検出されたデータ品質問題のリスト。", + "desc": "

    検出された問題は混合型、疑わしいパターン(「999」欠損マーカーなど)、エンコーディングエラー等を列挙します。

    ", + }, + "sub_normality_tests": { + "tip": "各列のShapiro-Wilk、Anderson-Darling、Jarque-Bera正規性検定。", + "desc": ( + "

    正規性検定は3つの相補的な検定で各列が正規分布に従うか評価します。

    " + "

    3つ全てp<0.05なら非正規の可能性が高いです。不一致ならヒストグラムで確認しましょう。

    " + ), + }, + "sub_vif": { + "tip": "分散膨張因子(VIF)で多重共線性を検出。", + "desc": ( + "

    VIFは特徴間の相関により回帰係数の分散がどれだけ膨張するかを測定します。

    " + "

    解釈:VIF=1=相関なし | 1-5=低 | 5-10=中程度 | >10=深刻、除去や結合を検討

    " + "

    初心者向けヒント:VIFが高いと係数が不安定になり、データの微小変化で符号が反転することがあります。

    " + ), + }, + "sub_summary": { + "tip": "分布形状、正規性、外れ値数のクイックサマリー。", + "desc": "

    要約は歪度分類、尖度タイプ、正規性指標、外れ値カウントを1つの表にまとめます。

    ", + }, + "sub_variance_explained": { + "tip": "各主成分が捕捉する分散の割合(スクリープロットデータ)。", + "desc": ( + "

    説明分散は各主成分の個別および累積分散寄与率を表示します。

    " + "

    スクリープロット:「肘」(曲線が急に曲がる点)が追加成分の価値が低下する転換点です。

    " + ), + }, + "sub_loadings": { + "tip": "各元の特徴の各主成分への寄与の重み。", + "desc": ( + "

    PCA負荷量は各元の特徴が各主成分にどの程度の重みを持つかを表示します。

    " + "

    例:PC1が「身長」「体重」「BMI」に高い負荷→「体格」成分と解釈可能。

    " + ), + }, +} + +# -- German ------------------------------------------------------------ +METHOD_INFO["de"] = { + "section_overview": { + "tip": "Datensatz-Überblick: Zeilen/Spalten, Datentypen, Speicherverbrauch.", + "desc": ( + "

    Überblick zeigt die Gesamtstruktur des Datensatzes vor der Detailanalyse.

    " + "

    Enthaltene Informationen:

    " + "" + "

    Warum wichtig: Eine Überprüfung der Zeilen und Typen deckt Ladefehler (abgeschnittene Dateien, Trennzeichenfehler, Kodierungsprobleme) frühzeitig auf.

    " + "

    Anfänger-Tipp: Weniger Zeilen als erwartet? Möglicherweise ein Trennzeichenproblem. Numerische Spalten als «Text» angezeigt? Es könnten nicht-numerische Zeichen enthalten sein.

    " + ), + }, + "section_quality": { + "tip": "Datenqualität in 4 Dimensionen: Vollständigkeit, Eindeutigkeit, Konsistenz, Gültigkeit (0-100%).", + "desc": ( + "

    Qualitätsbewertung – ein «Gesundheitscheck» über 4 unabhängige Achsen, je 0–100%.

    " + "

    Die vier Achsen:

    " + "" + "

    Bewertung: 90-100% = ausgezeichnet | 70-89% = akzeptabel, Hinweise beachten | <70% = vor Modellierung beheben

    " + "

    Formel: 0,35×Vollst. + 0,25×Eindeut. + 0,20×Konsist. + 0,20×Gültigkeit

    " + ), + }, + "section_preprocessing": { + "tip": "Protokoll aller automatischen Bereinigungsschritte vor der Analyse.", + "desc": ( + "

    Vorverarbeitungsprotokoll dokumentiert chronologisch alle automatischen Bereinigungsoperationen.

    " + "

    Typische Schritte: Entfernung leerer/konstanter Spalten, String-zu-Zahl-Konvertierung, Kodierungskorrekturen, nicht parsbare Zeilen entfernen.

    " + "

    Warum wichtig: Reproduzierbarkeit ist die Grundlage vertrauenswürdiger Analysen. Nur wer weiß, welche Transformationen angewandt wurden, kann Ergebnisse validieren.

    " + "

    Anfänger-Tipp: Wurde eine wichtige Spalte entfernt? Dann hat die Originaldatei möglicherweise Formatprobleme.

    " + ), + }, + "section_descriptive": { + "tip": "Zentrale Tendenz, Streuung und Verteilungsform jeder Spalte.", + "desc": ( + "

    Deskriptive Statistik ist das Fundament der explorativen Datenanalyse (EDA) und fasst Zentrum, Streuung und Form jeder Spalte zusammen.

    " + "

    Numerische Kennzahlen:

    " + "" + "

    Anfänger-Tipp: Spalten mit großer Abweichung zwischen Mittelwert und Median könnten Ausreißer oder starke Schiefe aufweisen.

    " + ), + }, + "section_distribution": { + "tip": "Histogramme und Q-Q-Plots zur Visualisierung der Verteilungsform.", + "desc": ( + "

    Verteilungsanalyse macht die Werteverteilung jeder Spalte sichtbar.

    " + "

    Diagrammtypen:

    " + "" + "

    Häufige Formen: Glocke (normal), rechtschief (Einkommen), linksschief (Prüfungsnoten nahe Maximum), bimodal (Mischung), gleichverteilt.

    " + "

    Warum wichtig: Viele ML-Algorithmen setzen normalverteilte Eingaben voraus. Kenntnis der tatsächlichen Verteilung hilft bei Modellwahl und Transformationen.

    " + ), + }, + "section_correlation": { + "tip": "Pearson- (linear) und Spearman- (Rang-)Korrelation zwischen numerischen Spalten.", + "desc": ( + "

    Korrelationsanalyse misst die Stärke der Beziehung zwischen Variablenpaaren.

    " + "

    Zwei Korrelationsmaße:

    " + "" + "

    Heatmap: Dunklere Farbe = stärkere Korrelation. Rot = positiv, Blau = negativ.

    " + "

    Schwellenwerte: |r|>0,90 schwere Multikollinearität | |r|>0,70 stark | |r|<0,30 schwach

    " + "

    Anfänger-Tipp: Hoch korrelierte Features tragen ähnliche Informationen. Beide in einem linearen Modell → Instabilität.

    " + ), + }, + "section_missing": { + "tip": "Analyse fehlender Daten: Muster, Anteil und Mechanismus.", + "desc": ( + "

    Analyse fehlender Daten untersucht, wo, wie viel und warum Daten fehlen.

    " + "

    Drei Mechanismen:

    " + "" + "

    Praxis-Leitfaden: <5% Löschen/Mittelwert | 5-30% KNN/MICE | >50% Spalte entfernen erwägen

    " + ), + }, + "section_outlier": { + "tip": "Erkennung anomaler Datenpunkte mittels IQR-Zäunen und Z-Scores.", + "desc": ( + "

    Ausreißererkennung identifiziert Werte, die weit vom Rest entfernt liegen.

    " + "

    IQR-Methode: IQR = Q3−Q1 | Mild: Q3+1,5×IQR | " + "Extrem: Q3+3×IQR

    " + "

    Wichtig: Nicht alle Ausreißer sind Fehler! Bei Betrugserkennung oder seltenen Krankheiten können Ausreißer die wertvollsten Daten sein.

    " + "

    Anfänger-Tipp: Punkte jenseits der Whisker im Boxplot sind potenzielle Ausreißer, die untersucht werden sollten.

    " + ), + }, + "section_categorical": { + "tip": "Häufigkeitsverteilung, Balkendiagramme und Entropie kategorischer Spalten.", + "desc": ( + "

    Kategorische Analyse untersucht nicht-numerische Spalten – Textlabels, Kategorien, boolesche Werte.

    " + "

    Kernkennzahlen:

    " + "" + "

    Anfänger-Tipp: Ein extrem hoher Balken im Diagramm zeigt «Ungleichgewicht» – ggf. Oversampling nötig.

    " + ), + }, + "section_importance": { + "tip": "Feature-Ranking nach Varianz und gegenseitiger Information.", + "desc": ( + "

    Feature-Wichtigkeit beantwortet: Welche Spalten tragen die nützlichsten Informationen?

    " + "

    Methoden: Varianz (≈0 = konstant, keine Info) · mittlere Korrelation · Mutual Information (linear + nichtlinear).

    " + "

    Anfänger-Tipp: Löschen Sie unwichtige Features nicht blind – einzeln schwach, aber in Kombinationen möglicherweise stark (Interaktionseffekte).

    " + ), + }, + "section_pca": { + "tip": "Hauptkomponentenanalyse: intrinsische Dimensionen und Varianzstruktur.", + "desc": ( + "

    PCA transformiert korrelierte Features in nach Varianz geordnete, unkorrelierte Komponenten.

    " + "

    Ergebnisse: Scree-Plot · kumulative Varianz · Loadings-Matrix

    " + "

    Anfänger-Tipp: PCA funktioniert am besten bei ähnlichen Skalen. Das System hat automatisch z-standardisiert.

    " + ), + }, + "section_duplicates": { + "tip": "Erkennung vollständig identischer Zeilen, die Statistiken aufblähen.", + "desc": ( + "

    Duplikat-Analyse scannt nach Zeilen, die in allen Spalten identisch sind.

    " + "

    Risiken: Aufgeblähte Stichprobe → zu enge Konfidenzintervalle · Train/Test-Leakage · ≈100% Duplikate = wahrscheinlich ein Ladefehler.

    " + "

    Anfänger-Tipp: Wenige Duplikate (<1%) sind meist harmlos, aber unerwartet hohe Raten erfordern Untersuchung.

    " + ), + }, + "section_warnings": { + "tip": "Zusammenfassung aller erkannten Warnungen und potenzieller Probleme.", + "desc": ( + "

    Warnungen und Probleme bündelt alle Anomalien aus allen Analysen an einem Ort.

    " + "

    Häufige Warnungen: Hoher Fehlanteil (>30%) · Konstante Spalten · Multikollinearität · Extreme Ausreißer · Typ-Inkompatibilitäten

    " + "

    Anfänger-Tipp: Behandeln Sie dies als priorisierte Aufgabenliste – beginnen Sie mit den schwerwiegendsten.

    " + ), + }, + "sub_best_fit": { + "tip": "Vergleich jeder Spalte mit theoretischen Verteilungen (Normal, Gamma, Weibull, …).", + "desc": ( + "

    Best-Fit-Verteilung vergleicht numerische Spalten mit Normal-, Log-Normal-, Exponential-, Gamma-Verteilungen usw.

    " + "

    Kriterien: AIC (niedriger = besser) · KS-Statistik (kleiner = besser) · p>0,05 = akzeptabel

    " + "

    Anfänger-Tipp: Best-Fit «norm» → Standard-Tests direkt anwendbar; sonst Log-Transformation erwägen.

    " + ), + }, + "sub_jarque_bera": { + "tip": "Prüft, ob Schiefe und Kurtosis einer Normalverteilung entsprechen.", + "desc": ( + "

    Jarque-Bera-Test prüft speziell die Form der Verteilung.

    " + "

    Interpretation: p≥0,05 = Normalität nicht abgelehnt | p<0,05 = signifikant nicht-normal

    " + "

    Anfänger-Tipp: Nicht-Normalität ist sehr häufig und bedeutet nicht «schlechte» Daten, sondern dass nicht-parametrische Methoden oder Transformationen nötig sind.

    " + ), + }, + "sub_power_transform": { + "tip": "Empfehlung für Box-Cox/Yeo-Johnson zur Annäherung an Normalverteilung.", + "desc": ( + "

    Potenztransformation schlägt mathematische Umformungen vor, die schiefe Daten glockenförmiger machen.

    " + "

    Zwei Verfahren: Box-Cox (nur positive Werte) | Yeo-Johnson (beliebige Werte)

    " + "

    Anfänger-Tipp: Eine wichtige Vorverarbeitungs-Stufe für lineare Regression und neuronale Netze.

    " + ), + }, + "sub_kde_bandwidth": { + "tip": "Optimale Glättung der Kerndichteschätzung nach Scott/Silverman.", + "desc": ( + "

    KDE-Bandbreite bestimmt die optimale «Glätte» der Dichtekurve.

    " + "

    Abwägung: Klein = Detailreich, aber Rauschen-Überanpassung | Groß = Glatt, aber Merkmale werden möglicherweise übersehen

    " + "

    Anfänger-Tipp: Große Differenz beider Regeln → möglicherweise Ausreißer oder Multimodalität.

    " + ), + }, + "sub_partial_corr": { + "tip": "Direkte Beziehung zweier Variablen nach Kontrolle aller anderen.", + "desc": ( + "

    Partielle Korrelation beantwortet: Ist die Beziehung direkt oder durch eine dritte Variable verursacht?

    " + "

    Beispiel: Eisverkauf ↔ Ertrinkungsfälle korrelieren, aber nach Kontrolle der Temperatur ≈0. Temperatur ist der wahre Treiber.

    " + "

    Interpretation: Hohe partielle Korr. = direkt | ≈0 = Scheinkorrelation (durch andere Variablen vermittelt)

    " + ), + }, + "sub_mutual_info": { + "tip": "Informationstheoretische Maßzahl für lineare und nichtlineare Abhängigkeit.", + "desc": ( + "

    Gegenseitige Information (MI) misst, wie viel man über eine Variable erfährt, wenn man eine andere kennt.

    " + "

    Kern: MI=0 = statistisch unabhängig | MI>0 = Abhängigkeit vorhanden. Erfasst auch nichtlineare Beziehungen, bei denen Pearson null ist.

    " + "

    Anfänger-Tipp: Hohe MI bei niedrigem Pearson → nichtlineare Beziehung. Streudiagramm prüfen!

    " + ), + }, + "sub_bootstrap_ci": { + "tip": "Resampling-basierte 95%-Konfidenzintervalle für jede Korrelation.", + "desc": ( + "

    Bootstrap-Konfidenzintervall zeigt, wie zuverlässig jede Korrelationsschätzung ist.

    " + "

    Methode: 1000× mit Zurücklegen ziehen → Korrelation berechnen → 2,5–97,5 Perzentil = 95% KI

    " + "

    Interpretation: Eng = stabil | Breit = hohe Unsicherheit | KI überquert Null = möglicherweise nicht signifikant

    " + ), + }, + "sub_distance_corr": { + "tip": "Szekely-Distanzkorrelation – erkennt nichtlineare Abhängigkeiten.", + "desc": ( + "

    Distanzkorrelation ist genau dann null, wenn Variablen wirklich unabhängig sind – stärkere Garantie als Pearson.

    " + "

    Vergleich: Niedriger Pearson + hohe Distanzkorrelation → nichtlineare Beziehung! Streudiagramm anschauen.

    " + ), + }, + "sub_kmeans": { + "tip": "K-Means mit automatischer Silhouetten-Optimierung der Clusteranzahl.", + "desc": ( + "

    K-Means-Clustering teilt Daten automatisch in k Gruppen.

    " + "

    Ablauf: Standardisierung → k=2–10 durchprobieren → k mit höchstem Silhouetten-Score wählen

    " + "

    Metriken: Silhouette >0,5 = gut | >0,7 = starke Struktur | Trägheit (WCSS): niedriger = kompakter

    " + "

    Anfänger-Tipp: K-Means setzt kugelförmige, ähnlich große Cluster voraus. Für unregelmäßige Formen: DBSCAN.

    " + ), + }, + "sub_dbscan": { + "tip": "Dichtebasiertes Clustering: findet beliebige Clusterformen automatisch.", + "desc": ( + "

    DBSCAN bildet Cluster durch Auffinden dichter Datenbereiche.

    " + "

    Vorteile: Kein k nötig · Beliebige Formen · Automatische Rauscherkennung

    " + "

    Anfänger-Tipp: Nur 1 Cluster + viel Rauschen? Keine klare Dichtestruktur oder eps-Anpassung nötig.

    " + ), + }, + "sub_hierarchical": { + "tip": "Dendrogramm: zeigt schrittweise Zusammenführung der Cluster.", + "desc": ( + "

    Hierarchisches Clustering zeigt den schrittweisen Zusammenführungsprozess als Baumdiagramm (Dendrogramm).

    " + "

    Lesen: y-Achse = Fusionsdistanz. Horizontale Linie auf beliebiger Höhe → verschiedene k. Lange vertikale Linien = natürliche Clustergrenzen.

    " + ), + }, + "sub_cluster_profiles": { + "tip": "Statistische Zusammenfassung jedes K-Means-Clusters über alle Features.", + "desc": ( + "

    Cluster-Profile zeigen pro Cluster Mittelwert/Std.-Abweichung und machen die Besonderheiten sichtbar.

    " + "

    Verwendung: Features mit den größten Mittelwertunterschieden = die definierenden Merkmale.

    " + "

    Anfänger-Tipp: Nutzen Sie diese Tabelle, um Clustern Namen zu geben (z. B. «Premium-Kunden», «Sparfüchse»).

    " + ), + }, + "sub_tsne": { + "tip": "Nichtlineare 2D-Projektion mit Erhaltung lokaler Nachbarschaften.", + "desc": ( + "

    t-SNE komprimiert hochdimensionale Daten auf ein 2D-Streudiagramm und bewahrt Ähnlichkeiten.

    " + "

    Lesen: Nah in 2D = ähnlich im Originalraum. Klar abgegrenzte Gruppen = mögliche echte Cluster.

    " + "

    ⚠️ Achtung: Inter-Cluster-Abstände sind bedeutungslos · Größen spiegeln nicht die Realität · Ergebnisse variieren (stochastisch)

    " + ), + }, + "sub_umap": { + "tip": "Schnelle nichtlineare 2D-Projektion mit Erhalt lokaler + globaler Struktur.", + "desc": ( + "

    UMAP ist die moderne Alternative zu t-SNE – meist schneller und besser im Erhalt globaler Layouts.

    " + "

    Vorteile: Schnell · Besserer Erhalt globaler Struktur · Relative Positionen der Cluster teilweise aussagekräftig

    " + "

    Anfänger-Tipp: Zeigen t-SNE und UMAP ähnliche Clusterstrukturen, sind diese wahrscheinlich real.

    " + ), + }, + "sub_factor_analysis": { + "tip": "Entdeckung verborgener (latenter) Faktoren hinter beobachteten Korrelationen.", + "desc": ( + "

    Faktorenanalyse erklärt, warum bestimmte Variablen korrelieren – sie nimmt latente Faktoren an.

    " + "

    Analogie: 10 Prüfungsnoten könnten 3 latente Faktoren widerspiegeln: «Sprachfähigkeit», «Mathefähigkeit», «Kreativität».

    " + "

    Anfänger-Tipp: Variablen mit hoher Rauschvarianz werden nicht durch gemeinsame Faktoren erklärt und messen möglicherweise etwas Einzigartiges.

    " + ), + }, + "sub_factor_loadings": { + "tip": "Stärke der Zuordnung jeder Variable zu jedem latenten Faktor.", + "desc": ( + "

    Faktorladungen quantifizieren die Beziehung zwischen Originalvariablen und latenten Faktoren.

    " + "

    Interpretation: |Ladung|>0,7 = stark | 0,4-0,7 = mäßig | <0,4 = schwach

    " + "

    Querladungen: Variable lädt hoch auf mehrere Faktoren – passt nicht gut ins Faktormodell.

    " + ), + }, + "sub_feature_contrib": { + "tip": "PCA-gewichteter Varianzbeitrag jedes Features zum Gesamtdatensatz.", + "desc": ( + "

    PCA-gewichteter Feature-Beitrag rankt Originalfeatures nach ihrem Anteil an der Gesamtvarianz für unüberwachte Feature-Auswahl.

    " + "

    Anfänger-Tipp: Features am unteren Ende tragen minimal zur Gesamtvarianz bei – Kandidaten für Entfernung.

    " + ), + }, + "sub_interaction": { + "tip": "Erkennung synergistischer Produkt-Interaktionen zwischen Feature-Paaren.", + "desc": ( + "

    Interaktionserkennung prüft, ob das Produkt zweier Features Informationen enthält, die einzeln fehlen.

    " + "

    Anfänger-Tipp: Starke Interaktion gefunden? Produkt als neues Feature hinzufügen kann Modellleistung deutlich steigern.

    " + ), + }, + "sub_monotonic": { + "tip": "Pearson vs. Spearman zum Aufdecken nichtlinearer monotoner Muster.", + "desc": ( + "

    Monotone Beziehungsanalyse erkennt Variablenpaare, die zusammen steigen/fallen, aber nicht linear.

    " + "

    Schlüssel: Große |Spearman|−|Pearson|-Differenz = exponentielle/logarithmische Muster. Monotone Transformation kann lineare Modelle verbessern.

    " + ), + }, + "sub_binning": { + "tip": "Bewertung von Gleich-Breite- und Gleich-Frequenz-Binning per Entropie.", + "desc": ( + "

    Binning-Analyse evaluiert Strategien zur Diskretisierung kontinuierlicher Variablen.

    " + "

    Zwei Strategien: Gleiche Breite (ausreißerempfindlich) vs. Gleiche Frequenz (geeignet für schiefe Daten)

    " + ), + }, + "sub_cardinality": { + "tip": "Analyse der Unique-Wert-Anzahl und empfohlene Kodierungsmethode.", + "desc": ( + "

    Kardinalität und Kodierung: Empfehlung basierend auf Unique-Wert-Anzahl:

    " + "" + ), + }, + "sub_leakage": { + "tip": "Erkennung von Features, die unbeabsichtigt Zielinformationen enthalten.", + "desc": ( + "

    Leakage-Risiko prüft Features, die direkt/indirekt auf die Zielvariable zugreifen.

    " + "

    Anfänger-Tipp: «Zu gute» Genauigkeit (z. B. 99%) → Data Leakage ist der Hauptverdächtige.

    " + ), + }, + "sub_iso_forest": { + "tip": "Baumbasierte Anomalieerkennung durch zufällige Isolation.", + "desc": ( + "

    Isolation Forest basiert auf der Idee: «Anomalien sind leichter zu isolieren.»

    " + "

    Prinzip: Zufällige Teilungsbäume → mittlere Isolation-Pfadlänge → kurzer Pfad = anomaler

    " + "

    Anfänger-Tipp: Wenige Parameter, gut bei hochdimensionalen Daten – erste Wahl für Anomalieerkennung.

    " + ), + }, + "sub_lof": { + "tip": "Dichtebasierte Erkennung: Vergleich lokaler Dichte mit Nachbarn.", + "desc": ( + "

    LOF vergleicht die lokale Dichte eines Punktes mit der seiner k nächsten Nachbarn.

    " + "

    LOF≈1 = normal | LOF>>1 = deutlich dünner als Nachbarn (anomal)

    " + "

    Anfänger-Tipp: Bei Clustern unterschiedlicher Dichte ist LOF effektiver als Isolation Forest.

    " + ), + }, + "sub_mahalanobis": { + "tip": "Multivariater Abstand zum Datenzentrum unter Berücksichtigung von Korrelationen.", + "desc": ( + "

    Mahalanobis-Distanz misst den Abstand jeder Beobachtung zum Datenzentrum unter Berücksichtigung der Kovarianzstruktur.

    " + "

    vs. Euklid: Euklid behandelt alle Richtungen gleich; Mahalanobis berücksichtigt Korrelationen – " + "wenn zwei normalerweise gleichzeitig steigende Features gegeneinander laufen, ist das wirklich anomal.

    " + ), + }, + "sub_consensus": { + "tip": "Nur als anomal markiert, wenn ≥2 von 3 Methoden zustimmen.", + "desc": ( + "

    Konsens-Anomalieerkennung kombiniert Isolation Forest, LOF und Mahalanobis.

    " + "

    Regel: ≥2/3 Methoden stimmen zu → als anomal markiert. Reduziert Fehlalarme erheblich.

    " + "

    Anfänger-Tipp: Beginnen Sie die Untersuchung mit Konsens-Flags – das sind die zuverlässigsten Anomalie-Kandidaten.

    " + ), + }, + "test_levene": { + "tip": "Prüft, ob die Varianzen verschiedener Gruppen gleich sind.", + "desc": ( + "

    Levene-Test bestätigt, ob Gruppenvarianzen annähernd gleich sind.

    " + "

    Interpretation: p>0,05 = Varianzhomogenität bestätigt | p≤0,05 = signifikant unterschiedlich → Welch-Test oder nicht-parametrisch

    " + ), + }, + "test_kruskal_wallis": { + "tip": "Nichtparametrische ANOVA: Prüft, ob Gruppen aus derselben Verteilung stammen.", + "desc": ( + "

    Kruskal-Wallis-Test ist das nicht-parametrische Pendant zur Einweg-ANOVA. Keine Normalitätsannahme nötig.

    " + "

    Interpretation: p<0,05 = mindestens eine Gruppe unterscheidet sich → paarweise Mann-Whitney | p≥0,05 = kein Unterschied

    " + ), + }, + "test_mann_whitney": { + "tip": "Nichtparametrischer Zweistichprobentest: Vergleich zweier Gruppen.", + "desc": ( + "

    Mann-Whitney U-Test prüft, ob zwei Gruppen aus derselben Verteilung stammen.

    " + "

    Interpretation: p<0,05 = signifikanter Unterschied | p≥0,05 = kein Unterschied

    " + "

    Anfänger-Tipp: Gleicher Mittelwert, aber unterschiedliche Streuung kann ebenfalls signifikant sein.

    " + ), + }, + "test_chi_square": { + "tip": "Prüft, ob beobachtete Kategorie-Häufigkeiten von Erwartungen abweichen.", + "desc": ( + "

    Chi-Quadrat-Anpassungstest prüft, ob die beobachtete Verteilung der erwarteten (Standard: gleichmäßig) entspricht.

    " + "

    Interpretation: p<0,05 = signifikante Abweichung | p≥0,05 = Übereinstimmung

    " + "

    Voraussetzung: Jede erwartete Häufigkeit muss ≥5 sein.

    " + ), + }, + "test_grubbs": { + "tip": "Prüft, ob der extremste Wert ein statistisch signifikanter Ausreißer ist.", + "desc": ( + "

    Grubbs-Test bewertet, ob der extremste Wert ein natürliches Extrem oder eine signifikante Anomalie ist.

    " + "

    Interpretation: p<0,05 = signifikanter Ausreißer | p≥0,05 = im erwarteten Bereich

    " + "

    Anfänger-Tipp: Grubbs testet nur einen Extremwert. Für mehrere Ausreißer: IQR oder Isolation Forest.

    " + ), + }, + "test_adf": { + "tip": "Prüft, ob eine Zeitreihe stationär ist (zeitinvariante Statistiken).", + "desc": ( + "

    ADF-Test prüft auf Einheitswurzel (Nicht-Stationarität) in Zeitreihen.

    " + "

    Interpretation: p<0,05 = stationär ✓ | p≥0,05 = nicht-stationär → Differenzierung erwägen

    " + "

    Anfänger-Tipp: Zeitreihen-Spalten müssen vor der Regression mit ADF geprüft werden. Nicht-stationäre Prädiktoren machen Regressionen sinnlos.

    " + ), + }, + "sub_column_quality": { + "tip": "Qualitätsscore pro Spalte: Vollständigkeit, Eindeutigkeit, Gültigkeit.", + "desc": ( + "

    Spaltenqualität zerlegt den Gesamtscore auf jede Spalte, um Problemspalten zu identifizieren.

    " + "

    Anfänger-Tipp: Spalten mit extrem niedrigem Score sind vorrangige Ziele für Bereinigung oder Entfernung.

    " + ), + }, + "sub_cleaning_log": { + "tip": "Schritt-für-Schritt-Protokoll aller automatischen Bereinigungen.", + "desc": "

    Bereinigungsprotokoll dokumentiert jede Transformation für volle Transparenz und Reproduzierbarkeit.

    ", + }, + "sub_detected_issues": { + "tip": "Liste der bei der Vorverarbeitung erkannten Datenqualitätsprobleme.", + "desc": "

    Erkannte Probleme listet gemischte Typen, verdächtige Muster (z. B. «999» als Fehlwert), Kodierungsfehler etc.

    ", + }, + "sub_normality_tests": { + "tip": "Shapiro-Wilk, Anderson-Darling und Jarque-Bera pro Spalte.", + "desc": ( + "

    Normalitätstests bewerten mit drei komplementären Tests, ob jede Spalte normalverteilt ist.

    " + "

    Alle drei p<0,05 → wahrscheinlich nicht normal. Bei Widersprüchen Histogramm prüfen.

    " + ), + }, + "sub_vif": { + "tip": "Varianzinflationsfaktor zur Erkennung von Multikollinearität.", + "desc": ( + "

    VIF misst, wie stark die Varianz eines Regressionskoeffizienten durch Feature-Korrelation aufgebläht wird.

    " + "

    Interpretation: VIF=1 keine Korrelation | 1-5 gering | 5-10 mäßig | >10 ernst – Entfernung/Zusammenlegung erwägen

    " + "

    Anfänger-Tipp: Hoher VIF → instabile Koeffizienten; kleine Datenänderungen können das Vorzeichen umkehren.

    " + ), + }, + "sub_summary": { + "tip": "Kurzübersicht: Verteilungsform, Normalität und Ausreißeranzahl.", + "desc": "

    Zusammenfassung zeigt Schiefekategorie, Kurtosis-Typ, Normalitätsindikator und Ausreißeranzahl in einer Tabelle.

    ", + }, + "sub_variance_explained": { + "tip": "Von jeder Hauptkomponente erfasster Varianzanteil (Scree-Plot-Daten).", + "desc": ( + "

    Erklärte Varianz zeigt den individuellen und kumulativen Varianzanteil jeder Komponente.

    " + "

    Scree-Plot: Der «Ellbogen» (Stelle des stärksten Knicks) ist der Punkt, ab dem zusätzliche Komponenten wenig beitragen.

    " + ), + }, + "sub_loadings": { + "tip": "Gewicht jedes Originalfeatures in jeder Hauptkomponente.", + "desc": ( + "

    PCA-Ladungen zeigen, wie stark jedes Originalfeature zu jeder Hauptkomponente beiträgt.

    " + "

    Beispiel: PC1 lädt hoch auf «Größe», «Gewicht», «BMI» → interpretierbar als «Körperbau»-Komponente.

    " + ), + }, +} +# -- French ------------------------------------------------------------ +METHOD_INFO["fr"] = { + "section_overview": { + "tip": "Aperçu du jeu de données : lignes/colonnes, types, mémoire.", + "desc": ( + "

    Vue d'ensemble – structure globale du jeu de données avant l'analyse détaillée.

    " + "

    Informations incluses :

    " + "" + "

    Pourquoi c'est important : Vérifier les lignes et les types révèle les erreurs de chargement (fichier tronqué, mauvais séparateur, problème d'encodage).

    " + "

    Conseil débutant : Moins de lignes que prévu ? Peut-être un problème de séparateur. Colonnes numériques affichées comme « texte » ? Elles contiennent probablement des caractères non numériques.

    " + ), + }, + "section_quality": { + "tip": "Qualité en 4 dimensions : complétude, unicité, cohérence, validité (0-100%).", + "desc": ( + "

    Évaluation de la qualité – un « bilan de santé » sur 4 axes indépendants, chacun de 0 à 100%.

    " + "

    Les quatre axes :

    " + "" + "

    Notation : 90-100% = excellent | 70-89% = acceptable, attention aux alertes | <70% = à corriger avant modélisation

    " + "

    Formule : 0,35×Complétude + 0,25×Unicité + 0,20×Cohérence + 0,20×Validité

    " + ), + }, + "section_preprocessing": { + "tip": "Journal de toutes les étapes de nettoyage automatique avant l'analyse.", + "desc": ( + "

    Journal de prétraitement documente chronologiquement toutes les opérations de nettoyage automatique.

    " + "

    Étapes typiques : Suppression de colonnes vides/constantes, conversion texte→nombre, correction d'encodage, suppression de lignes non analysables.

    " + "

    Pourquoi c'est important : La reproductibilité est le fondement d'analyses fiables. Connaître les transformations permet de valider les résultats.

    " + "

    Conseil débutant : Si une colonne importante a été supprimée, le fichier source a probablement un problème de format.

    " + ), + }, + "section_descriptive": { + "tip": "Tendance centrale, dispersion et forme de la distribution pour chaque colonne.", + "desc": ( + "

    Statistiques descriptives – fondement de l'analyse exploratoire (EDA), résumant le centre, la dispersion et la forme de chaque colonne.

    " + "

    Indicateurs numériques :

    " + "" + "

    Conseil débutant : Les colonnes avec une grande différence moyenne/médiane peuvent contenir des valeurs aberrantes ou une forte asymétrie.

    " + ), + }, + "section_distribution": { + "tip": "Histogrammes et Q-Q plots pour visualiser la forme de chaque colonne numérique.", + "desc": ( + "

    Analyse des distributions – visualisation de la répartition des valeurs.

    " + "

    Types de graphiques :

    " + "" + "

    Formes courantes : Cloche (normale), asymétrie droite (revenus), asymétrie gauche (notes en haut d'échelle), bimodale (mélange), uniforme.

    " + "

    Pourquoi c'est important : De nombreux algorithmes ML supposent des données normales. Connaître la distribution réelle guide le choix du modèle et des transformations.

    " + ), + }, + "section_correlation": { + "tip": "Corrélation de Pearson (linéaire) et Spearman (rang) entre colonnes numériques.", + "desc": ( + "

    Analyse de corrélation mesure la force des liens entre paires de variables.

    " + "

    Deux mesures :

    " + "" + "

    Heatmap : Couleur foncée = corrélation forte. Rouge = positive, bleu = négative.

    " + "

    Seuils : |r|>0,90 multicolinéarité sévère | |r|>0,70 forte | |r|<0,30 faible

    " + "

    Conseil débutant : Les features fortement corrélées portent des informations similaires. Les deux dans un modèle linéaire → instabilité.

    " + ), + }, + "section_missing": { + "tip": "Analyse des données manquantes : motif, proportion et mécanisme.", + "desc": ( + "

    Analyse des données manquantes, combien et pourquoi les données sont absentes.

    " + "

    Trois mécanismes :

    " + "" + "

    Guide pratique : <5% suppression/moyenne | 5–30% KNN/MICE | >50% suppression de colonne

    " + ), + }, + "section_outlier": { + "tip": "Détection des anomalies via IQR et Z-score.", + "desc": ( + "

    Détection des valeurs aberrantes – points extrêmement éloignés des autres données.

    " + "

    Méthode IQR : IQR = Q3−Q1 | Modéré : Q3+1,5×IQR | " + "Extrême : Q3+3×IQR

    " + "

    Important : Toutes les valeurs aberrantes ne sont pas des erreurs ! En détection de fraude ou maladies rares, elles sont parfois les plus précieuses.

    " + "

    Conseil débutant : Dans le boxplot, les points au-delà des moustaches (whiskers) sont des aberrants potentiels à examiner.

    " + ), + }, + "section_categorical": { + "tip": "Fréquences, diagrammes en barres et entropie des colonnes catégorielles.", + "desc": ( + "

    Analyse catégorielle examine les colonnes non numériques – labels texte, catégories, booléens.

    " + "

    Indicateurs clés :

    " + "" + "

    Conseil débutant : Une barre très haute dans le graphique signale un « déséquilibre » – sur-échantillonnage peut être nécessaire.

    " + ), + }, + "section_importance": { + "tip": "Classement des features par variance et information mutuelle.", + "desc": ( + "

    Importance des features : quelles colonnes portent l'information la plus utile ?

    " + "

    Méthodes : Variance (≈0 = constante, sans info) · corrélation moyenne · information mutuelle (linéaire + non linéaire).

    " + "

    Conseil débutant : Ne supprimez pas aveuglément les features peu importantes – faibles seules, elles peuvent être fortes en combinaison (effets d'interaction).

    " + ), + }, + "section_pca": { + "tip": "ACP : dimensions intrinsèques et structure de la variance.", + "desc": ( + "

    ACP transforme les features corrélées en composantes non corrélées, ordonnées par variance.

    " + "

    Résultats : Scree plot · variance cumulée · matrice des loadings

    " + "

    Conseil débutant : L'ACP fonctionne mieux avec des échelles similaires. Le système a automatiquement standardisé (z-score).

    " + ), + }, + "section_duplicates": { + "tip": "Détection des lignes en double exact pouvant gonfler les statistiques.", + "desc": ( + "

    Analyse des doublons – recherche de lignes identiques sur toutes les colonnes.

    " + "

    Risques : Effectif gonflé → intervalles de confiance trop étroits · fuite train/test · ≈100% de doublons = probable erreur de chargement.

    " + "

    Conseil débutant : Quelques doublons (<1%) sont généralement inoffensifs, mais un taux élevé inattendu doit être investigué.

    " + ), + }, + "section_warnings": { + "tip": "Synthèse de toutes les alertes et problèmes détectés.", + "desc": ( + "

    Avertissements et problèmes regroupe toutes les anomalies détectées en un seul endroit.

    " + "

    Avertissements courants : Fort taux de manquants (>30%) · Colonnes constantes · Multicolinéarité · Aberrants extrêmes · Incohérences de type

    " + "

    Conseil débutant : Utilisez cette section comme liste de tâches prioritaire – commencez par les plus sévères.

    " + ), + }, + "sub_best_fit": { + "tip": "Comparaison de chaque colonne avec des distributions théoriques (normale, gamma, Weibull…).", + "desc": ( + "

    Meilleur ajustement compare les colonnes numériques à la normale, log-normale, exponentielle, gamma, etc.

    " + "

    Critères : AIC (plus bas = mieux) · stat KS (plus petit = mieux) · p>0,05 = acceptable

    " + "

    Conseil débutant : Résultat « norm » → tests standards applicables ; sinon, envisager une transformation log.

    " + ), + }, + "sub_jarque_bera": { + "tip": "Teste si asymétrie et kurtosis sont compatibles avec la loi normale.", + "desc": ( + "

    Test de Jarque-Bera vérifie la forme de la distribution.

    " + "

    Interprétation : p≥0,05 = normalité non rejetée | p<0,05 = significativement non normale

    " + "

    Conseil débutant : La non-normalité est très fréquente. Cela ne signifie pas des données « mauvaises », mais qu'il faut utiliser des méthodes non paramétriques ou des transformations.

    " + ), + }, + "sub_power_transform": { + "tip": "Recommandation Box-Cox/Yeo-Johnson pour rapprocher la distribution de la normale.", + "desc": ( + "

    Transformation de puissance suggère des transformations mathématiques pour rendre les données asymétriques plus symétriques.

    " + "

    Deux méthodes : Box-Cox (valeurs positives uniquement) | Yeo-Johnson (toutes valeurs)

    " + "

    Conseil débutant : Étape de prétraitement essentielle pour la régression linéaire et les réseaux de neurones.

    " + ), + }, + "sub_kde_bandwidth": { + "tip": "Largeur de bande KDE optimale selon les règles de Scott/Silverman.", + "desc": ( + "

    Largeur de bande KDE – le degré de « lissage » optimal de la courbe de densité.

    " + "

    Compromis : Faible = détaillé mais surapprentissage du bruit | Élevé = lisse mais peut manquer des caractéristiques

    " + "

    Conseil débutant : Grande différence entre les deux règles → possibilité d'aberrants ou de multimodalité.

    " + ), + }, + "sub_partial_corr": { + "tip": "Relation directe entre deux variables après contrôle de toutes les autres.", + "desc": ( + "

    Corrélation partielle : la relation est-elle directe ou causée par une tierce variable ?

    " + "

    Exemple : Ventes de glaces ↔ noyades corrélées ; après contrôle de la température → corrél. partielle ≈ 0. La température est le vrai moteur.

    " + "

    Interprétation : Corrélation partielle élevée = relation directe | ≈0 = corrélation fallacieuse

    " + ), + }, + "sub_mutual_info": { + "tip": "Mesure informationnelle capturant les dépendances linéaires et non linéaires.", + "desc": ( + "

    Information mutuelle (MI) – quantité d'information obtenue sur une variable en connaissant l'autre.

    " + "

    Essentiel : MI=0 = indépendance statistique | MI>0 = dépendance. Capte toute relation, y compris non linéaire quand Pearson est nul.

    " + "

    Conseil débutant : MI élevée + Pearson faible → relation non linéaire. Vérifiez le nuage de points.

    " + ), + }, + "sub_bootstrap_ci": { + "tip": "Intervalle de confiance à 95% par rééchantillonnage pour chaque corrélation.", + "desc": ( + "

    IC Bootstrap montre la fiabilité de chaque estimation de corrélation.

    " + "

    Méthode : 1000 tirages avec remise → calcul corrélation → percentiles 2,5–97,5 = IC 95%

    " + "

    Interprétation : IC étroit = stable | IC large = incertitude élevée | IC franchissant zéro = peut-être non significatif

    " + ), + }, + "sub_distance_corr": { + "tip": "Corrélation de distance de Szekely – détecte les dépendances non linéaires.", + "desc": ( + "

    Corrélation de distance : exactement nulle seulement si les variables sont vraiment indépendantes.

    " + "

    Comparaison : Faible Pearson + haute corrélation de distance → relation non linéaire ! Consulter le nuage de points.

    " + ), + }, + "sub_kmeans": { + "tip": "K-Means avec optimisation automatique du nombre de clusters par silhouette.", + "desc": ( + "

    Clustering K-Means divise automatiquement les données en k groupes.

    " + "

    Processus : Standardisation → k=2–10 essayés → k avec le meilleur score silhouette choisi

    " + "

    Métriques : Silhouette >0,5 = bon | >0,7 = structure forte | Inertie (WCSS) : plus bas = plus compact

    " + "

    Conseil débutant : K-Means suppose des clusters globalement sphériques. Pour des formes irrégulières : DBSCAN.

    " + ), + }, + "sub_dbscan": { + "tip": "Clustering par densité : formes arbitraires sans spécifier k.", + "desc": ( + "

    DBSCAN forme des clusters en trouvant les zones denses des données.

    " + "

    Avantages : Pas de k · Formes quelconques · Détection automatique du bruit

    " + "

    Conseil débutant : Un seul cluster + beaucoup de bruit ? Pas de structure de densité claire ou ajustement d'eps nécessaire.

    " + ), + }, + "sub_hierarchical": { + "tip": "Dendrogramme : fusion progressive des clusters.", + "desc": ( + "

    Clustering hiérarchique – dendrogramme montrant le processus de fusion étape par étape.

    " + "

    Lecture : Axe y = distance de fusion. Ligne horizontale à n'importe quelle hauteur → différents k. Longue ligne verticale = frontière naturelle.

    " + ), + }, + "sub_cluster_profiles": { + "tip": "Profil statistique de chaque cluster K-Means sur toutes les features.", + "desc": ( + "

    Profils de clusters montrent moyenne/écart-type par cluster, révélant les particularités.

    " + "

    Utilisation : Les features avec les plus grandes différences de moyenne = caractéristiques définissantes.

    " + "

    Conseil débutant : Utilisez ce tableau pour nommer les clusters (ex. : « Clients premium », « Économes »).

    " + ), + }, + "sub_tsne": { + "tip": "Projection 2D non linéaire préservant les voisinages locaux.", + "desc": ( + "

    t-SNE comprime les données haute dimension en 2D tout en préservant les similarités.

    " + "

    Lecture : Proches en 2D = similaires dans l'espace original. Groupes nets = clusters potentiellement réels.

    " + "

    ⚠️ Attention : Distances inter-clusters sans signification · Tailles non représentatives · Résultats variables (stochastique)

    " + ), + }, + "sub_umap": { + "tip": "Projection 2D rapide préservant structures locales et globales.", + "desc": ( + "

    UMAP – alternative moderne au t-SNE, plus rapide et meilleur pour la structure globale.

    " + "

    Avantages : Rapide · Meilleure préservation globale · Positions relatives des clusters partiellement significatives

    " + "

    Conseil débutant : Si t-SNE et UMAP montrent des clusters similaires, ils sont probablement réels.

    " + ), + }, + "sub_factor_analysis": { + "tip": "Découverte des facteurs latents cachés derrière les corrélations observées.", + "desc": ( + "

    Analyse factorielle explique pourquoi certaines variables sont corrélées – en supposant des facteurs latents.

    " + "

    Analogie : 10 notes d'examen pourraient refléter 3 facteurs latents : « aptitude verbale », « aptitude mathématique », « créativité ».

    " + "

    Conseil débutant : Les variables avec forte variance de bruit ne sont pas expliquées par les facteurs communs et mesurent peut-être quelque chose d'unique.

    " + ), + }, + "sub_factor_loadings": { + "tip": "Force de l'association entre chaque variable et chaque facteur latent.", + "desc": ( + "

    Loadings factoriels quantifient le lien entre variables originales et facteurs latents.

    " + "

    Interprétation : |loading|>0,7 = fort | 0,4-0,7 = modéré | <0,4 = faible

    " + "

    Loadings croisés : Variable sur plusieurs facteurs – ne convient pas bien au modèle factoriel.

    " + ), + }, + "sub_feature_contrib": { + "tip": "Contribution en variance de chaque feature pondérée par l'ACP.", + "desc": ( + "

    Contribution ACP-pondérée classe les features par contribution à la variance totale pour la sélection non supervisée.

    " + "

    Conseil débutant : Les features en bas du classement contribuent très peu – candidates à la suppression.

    " + ), + }, + "sub_interaction": { + "tip": "Détection d'interactions produit synergiques entre paires de features.", + "desc": ( + "

    Détection d'interactions : le produit de deux features contient-il une information absente individuellement ?

    " + "

    Conseil débutant : Interaction forte trouvée ? Ajouter le produit comme nouvelle feature peut améliorer significativement le modèle.

    " + ), + }, + "sub_monotonic": { + "tip": "Pearson vs Spearman pour identifier des relations monotones non linéaires.", + "desc": ( + "

    Analyse monotone détecte les paires de variables qui croissent/décroissent ensemble sans être linéaires.

    " + "

    Clé : Grande différence |Spearman|−|Pearson| = motif exponentiel/logarithmique. Une transformation monotone peut améliorer le modèle linéaire.

    " + ), + }, + "sub_binning": { + "tip": "Évaluation du binning à largeur égale et fréquence égale par entropie.", + "desc": ( + "

    Analyse de binning – stratégies de discrétisation des variables continues.

    " + "

    Deux stratégies : Largeur égale (sensible aux aberrants) vs fréquence égale (adapté aux données asymétriques)

    " + ), + }, + "sub_cardinality": { + "tip": "Analyse des valeurs uniques et recommandation d'encodage.", + "desc": ( + "

    Cardinalité et encodage : recommandation selon le nombre de valeurs uniques :

    " + "" + ), + }, + "sub_leakage": { + "tip": "Identification de features susceptibles de divulguer des informations cible.", + "desc": ( + "

    Risque de fuite examine les features accédant directement/indirectement à la variable cible.

    " + "

    Conseil débutant : Précision « trop belle pour être vraie » (ex. 99%) → la fuite de données est le suspect nᵒ1.

    " + ), + }, + "sub_iso_forest": { + "tip": "Détection d'anomalies par isolation aléatoire (forêt d'arbres).", + "desc": ( + "

    Isolation Forest : « les anomalies sont plus faciles à isoler ».

    " + "

    Principe : Arbres de partition aléatoire → longueur moyenne du chemin d'isolation → chemin court = plus anomal

    " + "

    Conseil débutant : Peu de paramètres, bon en haute dimension – premier choix pour la détection d'anomalies.

    " + ), + }, + "sub_lof": { + "tip": "Détection basée sur la densité locale comparée aux voisins.", + "desc": ( + "

    LOF compare la densité locale d'un point à celle de ses k plus proches voisins.

    " + "

    LOF≈1 = normal | LOF>>1 = bien plus clairsemé que les voisins (anomal)

    " + "

    Conseil débutant : En présence de clusters de densités différentes, LOF est plus efficace qu'Isolation Forest.

    " + ), + }, + "sub_mahalanobis": { + "tip": "Distance multivariée au centre prenant en compte les corrélations.", + "desc": ( + "

    Distance de Mahalanobis mesure la distance au centre des données en tenant compte de la structure de covariance.

    " + "

    vs Euclidien : L'euclidien traite toutes les directions de la même façon ; Mahalanobis intègre les corrélations – " + "deux features habituellement conjointes évoluant en sens opposé = vrai anomal.

    " + ), + }, + "sub_consensus": { + "tip": "Anomalie signalée uniquement si ≥2 méthodes sur 3 sont d'accord.", + "desc": ( + "

    Détection par consensus combine Isolation Forest, LOF et Mahalanobis.

    " + "

    Règle : ≥2/3 méthodes concordantes → signalé comme anomalie. Réduit fortement les fausses alertes.

    " + "

    Conseil débutant : Commencez l'investigation par les signalements par consensus – les candidats les plus fiables.

    " + ), + }, + "test_levene": { + "tip": "Vérifie l'égalité des variances entre groupes.", + "desc": ( + "

    Test de Levene confirme si les variances des groupes sont approximativement égales.

    " + "

    Interprétation : p>0,05 = homogénéité confirmée | p≤0,05 = variances différentes → utiliser Welch ou non-paramétrique

    " + ), + }, + "test_kruskal_wallis": { + "tip": "ANOVA non paramétrique : les groupes viennent-ils de la même distribution ?", + "desc": ( + "

    Test de Kruskal-Wallis – équivalent non paramétrique de l'ANOVA à un facteur.

    " + "

    Interprétation : p<0,05 = au moins un groupe diffère → Mann-Whitney par paires | p≥0,05 = pas de différence

    " + ), + }, + "test_mann_whitney": { + "tip": "Test non paramétrique à deux échantillons : comparaison de deux groupes indépendants.", + "desc": ( + "

    Test de Mann-Whitney U : les deux groupes proviennent-ils de la même distribution ?

    " + "

    Interprétation : p<0,05 = différence significative | p≥0,05 = pas de différence

    " + "

    Conseil débutant : Même moyenne, dispersion différente → peut quand même être significatif.

    " + ), + }, + "test_chi_square": { + "tip": "Les fréquences observées s'écartent-elles des fréquences attendues ?", + "desc": ( + "

    Test du Chi-deux d'ajustement : la distribution observée correspond-elle à l'attendue (par défaut : uniforme) ?

    " + "

    Interprétation : p<0,05 = écart significatif | p≥0,05 = concordance

    " + "

    Condition : Chaque fréquence attendue doit être ≥5.

    " + ), + }, + "test_grubbs": { + "tip": "La valeur la plus extrême est-elle un aberrant statistiquement significatif ?", + "desc": ( + "

    Test de Grubbs : la valeur extrême est-elle un phénomène naturel ou une vraie anomalie ?

    " + "

    Interprétation : p<0,05 = aberrant significatif | p≥0,05 = dans les limites attendues

    " + "

    Conseil débutant : Grubbs ne teste qu'un seul extrême. Pour plusieurs aberrants : IQR ou Isolation Forest.

    " + ), + }, + "test_adf": { + "tip": "La série temporelle est-elle stationnaire ?", + "desc": ( + "

    Test ADF : recherche de racine unitaire (non-stationnarité).

    " + "

    Interprétation : p<0,05 = stationnaire ✓ | p≥0,05 = non stationnaire → différenciation nécessaire

    " + "

    Conseil débutant : Les colonnes temporelles doivent passer le test ADF avant régression. Des prédicteurs non stationnaires invalident la régression.

    " + ), + }, + "sub_column_quality": { + "tip": "Score qualité par colonne : complétude, unicité, validité.", + "desc": ( + "

    Qualité par colonne décompose le score global pour identifier les colonnes problématiques.

    " + "

    Conseil débutant : Les colonnes au score très bas sont les premières à nettoyer ou supprimer.

    " + ), + }, + "sub_cleaning_log": { + "tip": "Journal étape par étape de toutes les opérations de nettoyage automatique.", + "desc": "

    Journal de nettoyage documente chaque transformation pour une transparence et reproductibilité totales.

    ", + }, + "sub_detected_issues": { + "tip": "Liste des problèmes de qualité détectés lors du prétraitement.", + "desc": "

    Problèmes détectés : types mixtes, motifs suspects (ex. « 999 » comme marqueur manquant), erreurs d'encodage, etc.

    ", + }, + "sub_normality_tests": { + "tip": "Tests de Shapiro-Wilk, Anderson-Darling et Jarque-Bera par colonne.", + "desc": ( + "

    Tests de normalité évaluent la normalité de chaque colonne avec trois tests complémentaires.

    " + "

    Les trois à p<0,05 → probablement non normale. En cas de désaccord, vérifier l'histogramme.

    " + ), + }, + "sub_vif": { + "tip": "Facteur d'inflation de la variance pour détecter la multicolinéarité.", + "desc": ( + "

    VIF mesure à quel point la variance d'un coefficient de régression est gonflée par la corrélation entre features.

    " + "

    Interprétation : VIF=1 aucune corrélation | 1-5 faible | 5-10 modéré | >10 sévère – suppression/fusion à envisager

    " + "

    Conseil débutant : VIF élevé → coefficients instables ; un petit changement de données peut inverser le signe.

    " + ), + }, + "sub_summary": { + "tip": "Aperçu rapide : forme de distribution, normalité et nombre d'aberrants.", + "desc": "

    Résumé rassemble catégorie d'asymétrie, type de kurtosis, indicateur de normalité et nombre d'aberrants en un tableau.

    ", + }, + "sub_variance_explained": { + "tip": "Proportion de variance captée par chaque composante principale.", + "desc": ( + "

    Variance expliquée montre la contribution individuelle et cumulée de chaque composante.

    " + "

    Scree plot : Le « coude » est le point où les composantes supplémentaires apportent de moins en moins.

    " + ), + }, + "sub_loadings": { + "tip": "Poids de chaque feature originale dans chaque composante principale.", + "desc": ( + "

    Loadings ACP montrent le poids de chaque feature dans chaque composante.

    " + "

    Exemple : PC1 charge fortement sur « Taille », « Poids », « IMC » → interprétable comme composante « corpulence ».

    " + ), + }, +} + + +def get_method_info_json() -> str: + "“”Return METHOD_INFO dict as a JSON string for embedding in JS.“”" + import json + return json.dumps(METHOD_INFO, ensure_ascii=False) + + +# ===================================================================== +# Metric tooltip translations (column header / cell hover tips) +# ===================================================================== +# METRIC_TIPS_I18N[lang_code][metric_key] = translated tooltip string +# English tips are canonical; other languages mirror them. + +METRIC_TIPS_I18N: dict[str, dict[str, str]] = {} + +# ----- English (en) -------------------------------------------------- +METRIC_TIPS_I18N["en"] = { + "type": "Inferred data type of the column (numeric, categorical, text, datetime, boolean).", + "count": "Number of non-null values in the column.", + "missing": "Number of missing (null / NaN) values.", + "missing_%": "Percentage of missing values = (missing / total rows) x 100.", + "unique": "Number of distinct values in the column.", + "mean": "Arithmetic mean = sum of values / count.", + "median": "Middle value when data is sorted (50th percentile).", + "std": "Standard deviation -- measures spread around the mean. Larger = more dispersed.", + "se": "Standard error of the mean = std / sqrt(n). Indicates precision of the sample mean.", + "cv": "Coefficient of variation = std / |mean|. Unitless relative measure of variability.", + "mad": "Median Absolute Deviation = median(|xi - median|). Robust measure of spread.", + "min": "Minimum value in the column.", + "max": "Maximum value in the column.", + "range": "Range = max - min. Total spread of the data.", + "p5": "5th percentile -- 5% of data falls below this value.", + "q1": "1st quartile (25th percentile) -- 25% of data falls below this value.", + "q3": "3rd quartile (75th percentile) -- 75% of data falls below this value.", + "p95": "95th percentile -- 95% of data falls below this value.", + "iqr": "Interquartile Range = Q3 - Q1. Middle 50% spread, used for outlier detection.", + "skewness": "Skewness measures distribution asymmetry. 0 = symmetric, >0 = right-skewed, <0 = left-skewed.", + "kurtosis": "Excess kurtosis measures tail heaviness. 0 = normal, >0 = heavy tails, <0 = light tails.", + "top": "Most frequently occurring value in the column.", + "freq": "Frequency count of the most common value.", + "n": "Number of non-null observations used for the distribution test.", + "skew_type": "Interpretation of skewness: symmetric (|s|<0.5), moderate skew (0.5-1), high skew (>1).", + "kurt_type": "Interpretation of kurtosis: mesokurtic (~0), leptokurtic (>1, heavy tails), platykurtic (<-1, light tails).", + "normality_test": "Primary normality test used (Shapiro-Wilk for n<=5000, D'Agostino-Pearson for larger).", + "normality_p": "p-value of the primary normality test. p<0.05 -> likely non-normal.", + "is_normal_0.05": "True if p-value >= 0.05, meaning the null hypothesis of normality is not rejected at alpha=0.05.", + "shapiro_p": "p-value from Shapiro-Wilk test. Best for small-medium samples (n<=5000).", + "dagostino_p": "p-value from D'Agostino-Pearson test. Uses skewness + kurtosis, good for n>=20.", + "ks_p": "p-value from Kolmogorov-Smirnov test vs. normal distribution.", + "anderson_stat": "Anderson-Darling test statistic. Higher = stronger evidence against normality.", + "anderson_5pct_cv": "Anderson-Darling 5% critical value. If stat > cv -> reject normality at 5%.", + "missing_count": "Number of missing (null) values in this column.", + "missing_ratio": "Fraction of missing values = missing_count / total_rows (0 to 1).", + "dtype": "Pandas dtype of the column.", + "lower_bound": "IQR lower fence = Q1 - k x IQR. Values below this are outliers (default k=1.5).", + "upper_bound": "IQR upper fence = Q3 + k x IQR. Values above this are outliers (default k=1.5).", + "outlier_count": "Number of values falling outside the outlier bounds.", + "outlier_%": "Percentage of outlier values = (outlier_count / total) x 100.", + "min_outlier": "Smallest outlier value detected.", + "max_outlier": "Largest outlier value detected.", + "threshold": "Z-score threshold used. Values with |z| > threshold are outliers.", + "max_zscore": "Maximum absolute z-score found in the column.", + "top_value": "The most frequently occurring category value.", + "top_frequency": "Count of the most frequent category.", + "top_%": "Percentage of the most frequent category = (top_freq / total) x 100.", + "entropy": "Shannon entropy (bits). Higher = more uniform distribution among categories.", + "norm_entropy": "Normalized entropy = entropy / log2(unique). 1.0 = perfectly uniform.", + "max_entropy": "Maximum possible entropy = log2(unique). Achieved when all categories are equally frequent.", + "normalized_entropy": "Same as norm_entropy: entropy / max_entropy. 1.0 = uniform.", + "unique_values": "Number of distinct category values.", + "variance": "Variance of the column = mean of squared deviations from mean.", + "mean_abs_corr": "Mean absolute Pearson correlation with all other numeric columns.", + "avg_mutual_info": "Average mutual information with all other columns (uses sklearn).", + "VIF": "Variance Inflation Factor. VIF=1 -> no multicollinearity, >5 -> moderate, >10 -> severe.", + "multicollinearity": "Interpretation of VIF: low (<5), moderate (5-10), or high (>=10).", + "variance_ratio": "Proportion of total variance explained by this principal component.", + "cumulative_ratio": "Cumulative proportion of variance explained up to this component.", + "eigenvalue": "Eigenvalue of the covariance matrix for this component. Higher = more variance.", + "n_components": "Total number of principal components computed.", + "total_variance_explained": "Total variance captured by all computed components.", + "components_for_90pct": "Minimum number of components needed to explain >= 90% of variance.", + "top_component_variance": "Variance ratio of the first (most important) principal component.", + "total_rows": "Total number of rows in the dataset.", + "duplicate_rows": "Number of exact duplicate rows found.", + "unique_rows": "Number of unique (non-duplicate) rows.", + "duplicate_ratio": "Fraction of duplicate rows = duplicate_rows / total_rows.", + "uniqueness_ratio": "Ratio of unique values = unique / total_non_null. 1.0 = all unique.", + "total_non_null": "Number of non-null values used for uniqueness calculation.", + "is_unique_key": "True if every non-null value is unique -- potential primary key.", + "completeness": "Fraction of non-missing values = 1 - (missing / total). 1.0 = no missing data.", + "uniqueness": "Ratio of unique values to total non-null values. Higher = more diverse.", + "consistency": "Measures type consistency. 1.0 = all values match the expected data type.", + "validity": "Fraction of values within expected ranges/formats. 1.0 = all valid.", + "overall": "Weighted quality score = 0.35*completeness + 0.25*uniqueness + 0.20*consistency + 0.20*validity.", + "quality_score": "Per-column quality score combining completeness and uniqueness.", + "column": "Column name in the dataset.", + "component": "Principal component identifier (PC1, PC2, ...).", + "value": "Category or discrete value.", + "percentage": "Percentage share of this value = (count / total) x 100.", + "best_distribution": "Scipy distribution that best fits the data according to AIC.", + "aic": "Akaike Information Criterion -- lower is better. Penalises complexity.", + "bic": "Bayesian Information Criterion -- lower is better. More conservative than AIC.", + "ks_statistic": "Kolmogorov-Smirnov statistic measuring max CDF deviation from the fitted distribution.", + "jarque_bera_stat": "Jarque-Bera test statistic. Large values indicate non-normality.", + "jb_p_value": "p-value of the Jarque-Bera test. p < 0.05 -> reject normality.", + "recommended_transform": "Power transform recommended to make the column more normal (Box-Cox or Yeo-Johnson).", + "original_skew": "Skewness of the original (untransformed) column.", + "transformed_skew": "Skewness after applying the recommended power transform.", + "bandwidth_silverman": "Kernel bandwidth via Silverman's rule for KDE estimation.", + "bandwidth_scott": "Kernel bandwidth via Scott's rule for KDE estimation.", + "partial_corr": "Partial correlation -- Pearson correlation after removing confounding effects.", + "mutual_information": "Mutual information (bits) -- measures non-linear dependency between two variables.", + "ci_lower": "Lower bound of the 95% bootstrap confidence interval for the correlation.", + "ci_upper": "Upper bound of the 95% bootstrap confidence interval for the correlation.", + "distance_corr": "Szekely distance correlation -- captures non-linear dependencies (0=independent, 1=dependent).", + "optimal_k": "Best number of clusters determined by silhouette score analysis.", + "best_silhouette": "Highest mean silhouette score across evaluated k values (-1 to 1, higher=better).", + "inertia": "Within-cluster sum of squares (WCSS). Lower = tighter clusters.", + "n_clusters_dbscan": "Number of clusters found by DBSCAN (excludes noise).", + "noise_ratio": "Fraction of points labelled as noise by DBSCAN.", + "eps": "DBSCAN epsilon -- neighbourhood radius auto-estimated from k-distance plot.", + "kl_divergence": "Kullback-Leibler divergence of the t-SNE embedding. Lower = better fit.", + "tsne_perplexity": "Perplexity parameter for t-SNE (balances local vs. global structure).", + "n_factors": "Number of latent factors retained via Kaiser criterion (eigenvalue > 1).", + "factor_loading": "Correlation between an observed variable and a latent factor.", + "noise_variance": "Estimated noise (uniqueness) for each variable in Factor Analysis.", + "interaction_strength": "Pearson correlation between a product-interaction term and the top feature.", + "monotonic_gap": "Gap between Pearson and Spearman correlations -- large gap -> non-linear monotonic.", + "entropy_equal_width": "Shannon entropy of equal-width binning. Lower = more concentrated distribution.", + "entropy_equal_freq": "Shannon entropy of equal-frequency binning. Lower = more concentrated.", + "cardinality": "Number of unique values in a categorical column.", + "encoding_rec": "Recommended encoding strategy based on cardinality analysis.", + "leakage_risk": "Risk level (low/medium/high) that a feature may leak target information.", + "anomaly_score_if": "Isolation Forest anomaly score. More negative = more anomalous.", + "lof_score": "Local Outlier Factor minus-score. More negative = more anomalous.", + "mahalanobis_dist": "Mahalanobis distance from the data centroid. Larger = more unusual.", + "consensus_flag": "True if >= 2 out of 3 anomaly methods agree the point is anomalous.", + "levene_stat": "Levene test statistic for equality of variances.", + "levene_p": "p-value of Levene's test. p < 0.05 -> variances are significantly different.", + "kw_stat": "Kruskal-Wallis H statistic -- non-parametric one-way ANOVA.", + "kw_p": "p-value of Kruskal-Wallis test. p < 0.05 -> at least one group differs.", + "mw_stat": "Mann-Whitney U statistic -- non-parametric two-sample rank test.", + "mw_p": "p-value of Mann-Whitney U test.", + "chi2_stat": "Chi-square goodness-of-fit statistic vs. uniform distribution.", + "chi2_p": "p-value of chi-square goodness-of-fit test.", + "grubbs_stat": "Grubbs test statistic for detecting a single outlier.", + "grubbs_p": "p-value of Grubbs test.", + "adf_stat": "Augmented Dickey-Fuller test statistic for stationarity.", + "adf_p": "p-value of the ADF test. p < 0.05 -> series is stationary.", + "numeric_ratio": "Fraction of columns that are numeric.", + "categorical_ratio": "Fraction of columns that are categorical.", + "duplicate_row_ratio": "Fraction of rows that are exact duplicates.", +} + +# ----- Korean (ko) --------------------------------------------------- +METRIC_TIPS_I18N["ko"] = { + "type": "Column(Feature)의 추론된 데이터 타입 (numeric, categorical, text, datetime, boolean).", + "count": "Column(Feature)의 null이 아닌 값 개수.", + "missing": "결측(null / NaN) 값 개수.", + "missing_%": "결측 비율 = (결측 수 / 전체 행) x 100.", + "unique": "Column(Feature)의 고유(distinct) 값 개수.", + "mean": "산술 평균 = 값 합계 / 개수.", + "median": "데이터를 정렬했을 때 중앙값 (50번째 백분위).", + "std": "표준편차 -- 평균 주위의 분산 정도를 측정. 클수록 분산이 큼.", + "se": "평균의 표준오차 = std / sqrt(n). 표본 평균의 정밀도를 나타냄.", + "cv": "변동계수 = std / |mean|. 단위 없는 상대적 변동성 측정치.", + "mad": "중앙값 절대 편차 = median(|xi - median|). 강건한 산포 측정치.", + "min": "Column(Feature)의 최솟값.", + "max": "Column(Feature)의 최댓값.", + "range": "범위 = max - min. 데이터의 전체 분산 폭.", + "p5": "5번째 백분위 -- 데이터의 5%가 이 값보다 낮음.", + "q1": "1사분위수 (25번째 백분위) -- 데이터의 25%가 이 값보다 낮음.", + "q3": "3사분위수 (75번째 백분위) -- 데이터의 75%가 이 값보다 낮음.", + "p95": "95번째 백분위 -- 데이터의 95%가 이 값보다 낮음.", + "iqr": "사분위 범위 = Q3 - Q1. 중간 50% 분포 폭, 이상치 탐지에 사용.", + "skewness": "왜도: 분포의 비대칭성. 0 = 대칭, >0 = 오른쪽 치우침, <0 = 왼쪽 치우침.", + "kurtosis": "초과 첨도: 꼬리 두께. 0 = 정규, >0 = 두꺼운 꼬리, <0 = 얇은 꼬리.", + "top": "Column(Feature)에서 가장 자주 나타나는 값.", + "freq": "최빈값의 빈도 수.", + "n": "분포 검정에 사용된 null이 아닌 관측치 수.", + "skew_type": "왜도 해석: 대칭(|s|<0.5), 중간 왜도(0.5-1), 높은 왜도(>1).", + "kurt_type": "첨도 해석: 중간첨도(~0), 급첨(>1, 두꺼운 꼬리), 완첨(<-1, 얇은 꼬리).", + "normality_test": "사용된 주 정규성 검정 (Shapiro-Wilk n<=5000, D'Agostino-Pearson 큰 표본).", + "normality_p": "주 정규성 검정의 p값. p<0.05 -> 비정규 가능성 높음.", + "is_normal_0.05": "p값 >= 0.05이면 True, 즉 alpha=0.05에서 정규성 귀무가설이 기각되지 않음.", + "shapiro_p": "Shapiro-Wilk 검정 p값. 소-중 표본에 적합 (n<=5000).", + "dagostino_p": "D'Agostino-Pearson 검정 p값. 왜도 + 첨도 사용, n>=20에 적합.", + "ks_p": "Kolmogorov-Smirnov 검정 p값 (정규분포와 비교).", + "anderson_stat": "Anderson-Darling 검정 통계량. 높을수록 정규성 반증 강함.", + "anderson_5pct_cv": "Anderson-Darling 5% 임계값. stat > cv -> 5%에서 정규성 기각.", + "missing_count": "이 Column(Feature)의 결측(null) 값 개수.", + "missing_ratio": "결측 비율 = missing_count / total_rows (0~1).", + "dtype": "Column(Feature)의 Pandas dtype.", + "lower_bound": "IQR 하한 = Q1 - k x IQR. 이보다 낮으면 이상치 (기본 k=1.5).", + "upper_bound": "IQR 상한 = Q3 + k x IQR. 이보다 높으면 이상치 (기본 k=1.5).", + "outlier_count": "이상치 범위 밖의 값 개수.", + "outlier_%": "이상치 비율 = (outlier_count / 전체) x 100.", + "min_outlier": "탐지된 가장 작은 이상치 값.", + "max_outlier": "탐지된 가장 큰 이상치 값.", + "threshold": "사용된 Z-점수 임계값. |z| > threshold이면 이상치.", + "max_zscore": "Column(Feature)에서 발견된 최대 절대 Z-점수.", + "top_value": "가장 빈번한 범주 값.", + "top_frequency": "가장 빈번한 범주의 빈도 수.", + "top_%": "가장 빈번한 범주의 비율 = (top_freq / 전체) x 100.", + "entropy": "Shannon 엔트로피 (비트). 높을수록 범주 간 분포가 더 균일.", + "norm_entropy": "정규화 엔트로피 = entropy / log2(unique). 1.0 = 완전히 균일.", + "max_entropy": "최대 가능 엔트로피 = log2(unique). 모든 범주가 동일 빈도일 때 달성.", + "normalized_entropy": "norm_entropy와 동일: entropy / max_entropy. 1.0 = 균일.", + "unique_values": "고유 범주 값 개수.", + "variance": "Column(Feature)의 분산 = 평균으로부터 편차 제곱의 평균.", + "mean_abs_corr": "다른 모든 수치형 Column(Feature)과의 평균 절대 Pearson 상관계수.", + "avg_mutual_info": "다른 모든 Column(Feature)과의 평균 상호정보량.", + "VIF": "분산팽창계수. VIF=1 -> 다중공선성 없음, >5 -> 보통, >10 -> 심각.", + "multicollinearity": "VIF 해석: 낮음(<5), 보통(5-10), 높음(>=10).", + "variance_ratio": "이 주성분이 설명하는 총 분산의 비율.", + "cumulative_ratio": "이 성분까지 누적 설명 분산 비율.", + "eigenvalue": "이 성분의 공분산 행렬 고유값. 높을수록 더 많은 분산.", + "n_components": "계산된 주성분 총 개수.", + "total_variance_explained": "모든 성분이 설명하는 총 분산.", + "components_for_90pct": "분산 90% 이상 설명에 필요한 최소 성분 수.", + "top_component_variance": "첫 번째 (가장 중요한) 주성분의 분산 비율.", + "total_rows": "데이터셋의 총 행 수.", + "duplicate_rows": "정확히 중복된 행 수.", + "unique_rows": "고유한(비중복) 행 수.", + "duplicate_ratio": "중복 행 비율 = duplicate_rows / total_rows.", + "uniqueness_ratio": "고유값 비율 = unique / total_non_null. 1.0 = 모두 고유.", + "total_non_null": "고유성 계산에 사용된 null이 아닌 값 수.", + "is_unique_key": "모든 null이 아닌 값이 고유하면 True -- 잠재적 기본 키.", + "completeness": "비결측 값 비율 = 1 - (missing / total). 1.0 = 결측 없음.", + "uniqueness": "고유 값 대 총 non-null 값 비율. 높을수록 다양함.", + "consistency": "타입 일관성 측정. 1.0 = 모든 값이 예상 데이터 타입과 일치.", + "validity": "예상 범위/형식 내 값 비율. 1.0 = 모두 유효.", + "overall": "가중 품질 점수 = 0.35*completeness + 0.25*uniqueness + 0.20*consistency + 0.20*validity.", + "quality_score": "completeness와 uniqueness를 결합한 Column(Feature)별 품질 점수.", + "column": "데이터셋의 Column(Feature) 이름.", + "component": "주성분 식별자 (PC1, PC2, ...).", + "value": "범주 또는 이산 값.", + "percentage": "이 값의 비율 = (count / total) x 100.", + "best_distribution": "AIC 기준 데이터에 가장 적합한 scipy 분포.", + "aic": "아카이케 정보 기준 -- 낮을수록 좋음. 복잡도를 벌점화.", + "bic": "베이지안 정보 기준 -- 낮을수록 좋음. AIC보다 보수적.", + "ks_statistic": "적합된 분포와의 최대 CDF 편차를 측정하는 K-S 통계량.", + "jarque_bera_stat": "Jarque-Bera 검정 통계량. 큰 값은 비정규성을 나타냄.", + "jb_p_value": "Jarque-Bera 검정 p값. p < 0.05 -> 정규성 기각.", + "recommended_transform": "Column(Feature)을 더 정규적으로 만드는 권장 변환 (Box-Cox 또는 Yeo-Johnson).", + "original_skew": "원래(변환 전) Column(Feature)의 왜도.", + "transformed_skew": "권장 변환 적용 후 왜도.", + "bandwidth_silverman": "KDE 추정을 위한 Silverman 규칙 커널 대역폭.", + "bandwidth_scott": "KDE 추정을 위한 Scott 규칙 커널 대역폭.", + "partial_corr": "편상관 -- 다른 변수의 혼동 효과를 제거한 후의 Pearson 상관.", + "mutual_information": "상호정보량 (비트) -- 두 변수 간 비선형 의존성 측정.", + "ci_lower": "상관계수에 대한 95% 부트스트랩 신뢰구간의 하한.", + "ci_upper": "상관계수에 대한 95% 부트스트랩 신뢰구간의 상한.", + "distance_corr": "Szekely 거리 상관 -- 비선형 의존성 포착 (0=독립, 1=의존).", + "optimal_k": "실루엣 점수 분석으로 결정된 최적 클러스터 수.", + "best_silhouette": "평가된 k 값 중 최고 평균 실루엣 점수 (-1~1, 높을수록 좋음).", + "inertia": "클러스터 내 제곱합(WCSS). 낮을수록 더 밀집된 클러스터.", + "n_clusters_dbscan": "DBSCAN이 찾은 클러스터 수 (노이즈 제외).", + "noise_ratio": "DBSCAN이 노이즈로 분류한 점의 비율.", + "eps": "DBSCAN epsilon -- k-distance 플롯에서 추정된 이웃 반경.", + "kl_divergence": "t-SNE 임베딩의 KL 발산. 낮을수록 좋은 적합度.", + "tsne_perplexity": "t-SNE perplexity(혼란도) (지역 vs 전역 구조 균형).", + "n_factors": "Kaiser 기준(eigenvalue > 1)으로 유지된 잠재 요인 수.", + "factor_loading": "관측 변수와 잠재 요인 간 상관.", + "noise_variance": "요인 분석에서 각 변수의 추정 노이즈 (고유분산).", + "interaction_strength": "곱 상호작용 항과 상위 특성 간의 Pearson 상관.", + "monotonic_gap": "Pearson과 Spearman 상관 간 차이 -- 큰 차이 -> 비선형 단조 관계.", + "entropy_equal_width": "등폭 구간 Shannon 엔트로피. 낮을수록 분포가 집중적.", + "entropy_equal_freq": "등빈도 구간 Shannon 엔트로피. 낮을수록 집중적.", + "cardinality": "범주형 Column(Feature)의 고유 값 수.", + "encoding_rec": "카디널리티 분석 기반 권장 인코딩 전략.", + "leakage_risk": "특성이 타겟 정보를 누출할 수 있는 위험 수준 (low/medium/high).", + "anomaly_score_if": "Isolation Forest 이상 점수. 더 음수일수록 더 이상적.", + "lof_score": "Local Outlier Factor 마이너스 점수. 더 음수일수록 더 이상적.", + "mahalanobis_dist": "데이터 중심으로부터의 Mahalanobis 거리. 클수록 비정상적.", + "consensus_flag": "3가지 이상치 방법 중 2개 이상 동의하면 True.", + "levene_stat": "등분산성에 대한 Levene 검정 통계량.", + "levene_p": "Levene 검정 p값. p < 0.05 -> 분산이 유의하게 다름.", + "kw_stat": "Kruskal-Wallis H 통계량 -- 비모수 일원 ANOVA.", + "kw_p": "Kruskal-Wallis 검정 p값. p < 0.05 -> 적어도 하나의 집단이 다름.", + "mw_stat": "Mann-Whitney U 통계량 -- 비모수 두 표본 순위 검정.", + "mw_p": "Mann-Whitney U 검정 p값.", + "chi2_stat": "균일 분포 대비 카이제곱 적합도 검정 통계량.", + "chi2_p": "카이제곱 적합도 검정 p값.", + "grubbs_stat": "단일 이상치 탐지를 위한 Grubbs 검정 통계량.", + "grubbs_p": "Grubbs 검정 p값.", + "adf_stat": "정상성에 대한 ADF(Augmented Dickey-Fuller) 검정 통계량.", + "adf_p": "ADF 검정 p값. p < 0.05 -> 시계열이 정상적.", + "numeric_ratio": "수치형 Column(Feature) 비율.", + "categorical_ratio": "범주형 Column(Feature) 비율.", + "duplicate_row_ratio": "정확히 중복된 행의 비율.", +} + +# ----- Chinese (zh) -------------------------------------------------- +METRIC_TIPS_I18N["zh"] = { + "type": "推断的数据类型 (numeric, categorical, text, datetime, boolean).", + "count": "该列中非空值的数量.", + "missing": "缺失(null / NaN)值的数量.", + "missing_%": "缺失率 = (缺失数 / 总行数) x 100.", + "unique": "该列中不同(唯一)值的数量.", + "mean": "算术平均 = 值总和 / 个数.", + "median": "按大小排序后的中间值(第50百分位).", + "std": "标准差 -- 衡量围绕均值的离散程度. 越大越分散.", + "se": "均值标准误 = std / sqrt(n). 表示样本均值的精度.", + "cv": "变异系数 = std / |mean|. 无单位的相对变异度量.", + "mad": "中位绝对偏差 = median(|xi - median|). 稳健的离散度量.", + "min": "该列的最小值.", + "max": "该列的最大值.", + "range": "极差 = max - min. 数据的总分布幅度.", + "p5": "第5百分位 -- 5%的数据低于此值.", + "q1": "第1四分位(第25百分位) -- 25%的数据低于此值.", + "q3": "第3四分位(第75百分位) -- 75%的数据低于此值.", + "p95": "第95百分位 -- 95%的数据低于此值.", + "iqr": "四分位距 = Q3 - Q1. 中间50%的分布幅度, 用于异常值检测.", + "skewness": "偏度衡量分布不对称性. 0=对称, >0=右偏, <0=左偏.", + "kurtosis": "超额峰度衡量尾部厚度. 0=正态, >0=厚尾, <0=薄尾.", + "top": "该列中出现频率最高的值.", + "freq": "最频繁值的出现次数.", + "n": "用于分布检验的非空观测数.", + "skew_type": "偏度解释: 对称(|s|<0.5), 中等偏(0.5-1), 高偏(>1).", + "kurt_type": "峰度解释: 中等峰(~0), 尖峰(>1,厚尾), 扁峰(<-1,薄尾).", + "normality_test": "使用的主要正态性检验(n<=5000用Shapiro-Wilk, 大样本用D'Agostino-Pearson).", + "normality_p": "主要正态性检验的p值. p<0.05 -> 可能非正态.", + "is_normal_0.05": "若p值>=0.05则为True, 即在alpha=0.05下正态性零假设未被拒绝.", + "shapiro_p": "Shapiro-Wilk检验p值. 适合中小样本(n<=5000).", + "dagostino_p": "D'Agostino-Pearson检验p值. 使用偏度+峰度, 适合n>=20.", + "ks_p": "Kolmogorov-Smirnov检验p值(与正态分布比较).", + "anderson_stat": "Anderson-Darling检验统计量. 越高则反正态证据越强.", + "anderson_5pct_cv": "Anderson-Darling 5%临界值. stat > cv -> 在5%水平拒绝正态性.", + "missing_count": "该列中缺失(null)值的数量.", + "missing_ratio": "缺失比例 = missing_count / total_rows (0至1).", + "dtype": "该列的Pandas数据类型.", + "lower_bound": "IQR下限 = Q1 - k x IQR. 低于此值为异常值(默认k=1.5).", + "upper_bound": "IQR上限 = Q3 + k x IQR. 高于此值为异常值(默认k=1.5).", + "outlier_count": "落在异常值范围外的值数量.", + "outlier_%": "异常值百分比 = (outlier_count / 总数) x 100.", + "min_outlier": "检测到的最小异常值.", + "max_outlier": "检测到的最大异常值.", + "threshold": "使用的Z分数阈值. |z| > threshold则为异常值.", + "max_zscore": "该列中找到的最大绝对Z分数.", + "top_value": "出现频率最高的类别值.", + "top_frequency": "最频繁类别的计数.", + "top_%": "最频繁类别的百分比 = (top_freq / 总数) x 100.", + "entropy": "Shannon熵(比特). 越高意味着类别间分布越均匀.", + "norm_entropy": "归一化熵 = entropy / log2(unique). 1.0=完全均匀.", + "max_entropy": "最大可能熵 = log2(unique). 所有类别频率相同时达到.", + "normalized_entropy": "与norm_entropy相同: entropy / max_entropy. 1.0=均匀.", + "unique_values": "不同类别值的数量.", + "variance": "该列方差 = 与均值偏差平方的均值.", + "mean_abs_corr": "与所有其他数值列的平均绝对Pearson相关系数.", + "avg_mutual_info": "与所有其他列的平均互信息量.", + "VIF": "方差膨胀因子. VIF=1->无多重共线性, >5->中等, >10->严重.", + "multicollinearity": "VIF解释: 低(<5), 中等(5-10), 高(>=10).", + "variance_ratio": "该主成分解释的总方差比例.", + "cumulative_ratio": "截至该成分的累计方差解释比例.", + "eigenvalue": "该成分的协方差矩阵特征值. 越高解释越多方差.", + "n_components": "计算的主成分总数.", + "total_variance_explained": "所有成分解释的总方差.", + "components_for_90pct": "解释>=90%方差所需的最小成分数.", + "top_component_variance": "第一个(最重要)主成分的方差比例.", + "total_rows": "数据集的总行数.", + "duplicate_rows": "找到的完全重复行数.", + "unique_rows": "唯一(非重复)行数.", + "duplicate_ratio": "重复行比例 = duplicate_rows / total_rows.", + "uniqueness_ratio": "唯一值比例 = unique / total_non_null. 1.0=全部唯一.", + "total_non_null": "用于唯一性计算的非空值数.", + "is_unique_key": "若每个非空值都唯一则为True -- 可能是主键.", + "completeness": "非缺失值比例 = 1 - (missing / total). 1.0=无缺失.", + "uniqueness": "唯一值与总非空值的比率. 越高越多样.", + "consistency": "类型一致性度量. 1.0=所有值匹配预期数据类型.", + "validity": "在预期范围/格式内的值比例. 1.0=全部有效.", + "overall": "加权质量分数 = 0.35*completeness + 0.25*uniqueness + 0.20*consistency + 0.20*validity.", + "quality_score": "结合完整性和唯一性的逐列质量分数.", + "column": "数据集中的列名.", + "component": "主成分标识符(PC1, PC2, ...).", + "value": "类别或离散值.", + "percentage": "该值的占比 = (count / total) x 100.", + "best_distribution": "根据AIC拟合最好的scipy分布.", + "aic": "赤池信息准则 -- 越低越好, 惩罚复杂度.", + "bic": "贝叶斯信息准则 -- 越低越好, 比AIC更保守.", + "ks_statistic": "衡量拟合分布最大CDF偏差的K-S统计量.", + "jarque_bera_stat": "Jarque-Bera检验统计量. 大值表示非正态.", + "jb_p_value": "Jarque-Bera检验p值. p < 0.05 -> 拒绝正态性.", + "recommended_transform": "使列更正态的推荐变换(Box-Cox或Yeo-Johnson).", + "original_skew": "原始(未变换)列的偏度.", + "transformed_skew": "应用推荐变换后的偏度.", + "bandwidth_silverman": "用于KDE估计的Silverman规则核带宽.", + "bandwidth_scott": "用于KDE估计的Scott规则核带宽.", + "partial_corr": "偏相关 -- 去除其他变量混淆效应后的Pearson相关.", + "mutual_information": "互信息(比特) -- 衡量两变量间的非线性依赖.", + "ci_lower": "相关系数95%自助法置信区间下限.", + "ci_upper": "相关系数95%自助法置信区间上限.", + "distance_corr": "Szekely距离相关 -- 捕获非线性依赖(0=独立, 1=依赖).", + "optimal_k": "基于轮廓分数分析确定的最佳聚类数.", + "best_silhouette": "所评估k值中最高平均轮廓分数(-1至1, 越高越好).", + "inertia": "簇内平方和(WCSS). 越低=越紧凑.", + "n_clusters_dbscan": "DBSCAN找到的聚类数(不含噪声).", + "noise_ratio": "DBSCAN标记为噪声的点的比例.", + "eps": "DBSCAN epsilon -- 从k距离图估计的邻域半径.", + "kl_divergence": "t-SNE嵌入的KL散度. 越低拟合越好.", + "tsne_perplexity": "t-SNE困惑度参数(平衡局部与全局结构).", + "n_factors": "通过Kaiser准则(特征值>1)保留的潜因子数.", + "factor_loading": "观测变量与潜因子间的相关.", + "noise_variance": "因子分析中每个变量的估计噪声(独特方差).", + "interaction_strength": "乘积交互项与头部特征间的Pearson相关.", + "monotonic_gap": "Pearson与Spearman相关之差 -- 差距大->非线性单调关系.", + "entropy_equal_width": "等宽分箱Shannon熵. 越低分布越集中.", + "entropy_equal_freq": "等频分箱Shannon熵. 越低越集中.", + "cardinality": "类别列的唯一值数量.", + "encoding_rec": "基于基数分析的推荐编码策略.", + "leakage_risk": "特征可能泄露目标信息的风险等级(low/medium/high).", + "anomaly_score_if": "Isolation Forest异常分数. 越负越异常.", + "lof_score": "Local Outlier Factor负分数. 越负越异常.", + "mahalanobis_dist": "距数据中心的Mahalanobis距离. 越大越异常.", + "consensus_flag": "若3种异常方法中2种以上同意则为True.", + "levene_stat": "等方差性Levene检验统计量.", + "levene_p": "Levene检验p值. p < 0.05 -> 方差有显著差异.", + "kw_stat": "Kruskal-Wallis H统计量 -- 非参数单因素ANOVA.", + "kw_p": "Kruskal-Wallis检验p值. p < 0.05 -> 至少一组不同.", + "mw_stat": "Mann-Whitney U统计量 -- 非参数两样本秩检验.", + "mw_p": "Mann-Whitney U检验p值.", + "chi2_stat": "对均匀分布的卡方拟合优度统计量.", + "chi2_p": "卡方拟合优度检验p值.", + "grubbs_stat": "检测单个异常值的Grubbs检验统计量.", + "grubbs_p": "Grubbs检验p值.", + "adf_stat": "平稳性ADF(Augmented Dickey-Fuller)检验统计量.", + "adf_p": "ADF检验p值. p < 0.05 -> 序列平稳.", + "numeric_ratio": "数值列的比例.", + "categorical_ratio": "类别列的比例.", + "duplicate_row_ratio": "完全重复行的比例.", +} + +# ----- Japanese (ja) ------------------------------------------------- +METRIC_TIPS_I18N["ja"] = { + "type": "推論されたデータ型 (numeric, categorical, text, datetime, boolean).", + "count": "列の非null値の数.", + "missing": "欠損(null / NaN)値の数.", + "missing_%": "欠損率 = (欠損数 / 総行数) x 100.", + "unique": "列のユニーク(固有)値の数.", + "mean": "算術平均 = 値の合計 / 個数.", + "median": "データを並べた時の中央値(第50百分位).", + "std": "標準偏差 -- 平均周りの散らばりを測定. 大きいほど分散が大きい.", + "se": "平均の標準誤差 = std / sqrt(n). 標本平均の精度を示す.", + "cv": "変動係数 = std / |mean|. 無次元の相対的変動性の尺度.", + "mad": "中央絶対偏差 = median(|xi - median|). ロバストな散布度の尺度.", + "min": "列の最小値.", + "max": "列の最大値.", + "range": "範囲 = max - min. データの全体的な広がり.", + "p5": "第5百分位 -- データの5%がこの値未満.", + "q1": "第1四分位(第25百分位) -- データの25%がこの値未満.", + "q3": "第3四分位(第75百分位) -- データの75%がこの値未満.", + "p95": "第95百分位 -- データの95%がこの値未満.", + "iqr": "四分位範囲 = Q3 - Q1. 中央50%の広がり, 外れ値検出に使用.", + "skewness": "歪度: 分布の非対称性. 0=対称, >0=右に歪, <0=左に歪.", + "kurtosis": "超過尖度: 裾の厚さ. 0=正規, >0=厚い裾, <0=薄い裾.", + "top": "列で最も頻繁な値.", + "freq": "最頻値の出現回数.", + "n": "分布検定に使用された非null観測数.", + "skew_type": "歪度の解釈: 対称(|s|<0.5), 中程度の歪み(0.5-1), 高い歪み(>1).", + "kurt_type": "尖度の解釈: 中程度(~0), 尖峰(>1,厚い裾), 扁平(<-1,薄い裾).", + "normality_test": "使用された正規性検定(n<=5000ならShapiro-Wilk, 大標本はD'Agostino-Pearson).", + "normality_p": "主正規性検定のp値. p<0.05 -> 非正規の可能性.", + "is_normal_0.05": "p値>=0.05ならTrue. alpha=0.05で正規性の帰無仮説が棄却されない.", + "shapiro_p": "Shapiro-Wilk検定p値. 小中標本向け(n<=5000).", + "dagostino_p": "D'Agostino-Pearson検定p値. 歪度+尖度を使用, n>=20向け.", + "ks_p": "Kolmogorov-Smirnov検定p値(正規分布と比較).", + "anderson_stat": "Anderson-Darling検定統計量. 高いほど反正規性の証拠が強い.", + "anderson_5pct_cv": "Anderson-Darling 5%臨界値. stat > cv -> 5%で正規性棄却.", + "missing_count": "この列の欠損(null)値の数.", + "missing_ratio": "欠損割合 = missing_count / total_rows (0~1).", + "dtype": "列のPandasデータ型.", + "lower_bound": "IQR下限 = Q1 - k x IQR. これより低い値は外れ値(デフォルトk=1.5).", + "upper_bound": "IQR上限 = Q3 + k x IQR. これより高い値は外れ値(デフォルトk=1.5).", + "outlier_count": "外れ値範囲外の値の数.", + "outlier_%": "外れ値率 = (outlier_count / 総数) x 100.", + "min_outlier": "検出された最小の外れ値.", + "max_outlier": "検出された最大の外れ値.", + "threshold": "使用されたZスコア閾値. |z| > thresholdなら外れ値.", + "max_zscore": "列で見つかった最大絶対Zスコア.", + "top_value": "最も頻繁なカテゴリ値.", + "top_frequency": "最も頻繁なカテゴリのカウント.", + "top_%": "最も頻繁なカテゴリの割合 = (top_freq / 総数) x 100.", + "entropy": "Shannonエントロピー(ビット). 高いほどカテゴリ間の分布が均一.", + "norm_entropy": "正規化エントロピー = entropy / log2(unique). 1.0=完全均一.", + "max_entropy": "最大可能エントロピー = log2(unique). 全カテゴリが同頻度で達成.", + "normalized_entropy": "norm_entropyと同じ: entropy / max_entropy. 1.0=均一.", + "unique_values": "固有カテゴリ値の数.", + "variance": "列の分散 = 平均からの偏差の二乗の平均.", + "mean_abs_corr": "他の全数値列との平均絶対Pearson相関係数.", + "avg_mutual_info": "他の全列との平均相互情報量.", + "VIF": "分散膨張係数. VIF=1->多重共線性なし, >5->中程度, >10->深刻.", + "multicollinearity": "VIF解釈: 低(<5), 中程度(5-10), 高(>=10).", + "variance_ratio": "この主成分が説明する総分散の割合.", + "cumulative_ratio": "この成分までの累積分散説明割合.", + "eigenvalue": "この成分の共分散行列の固有値. 高いほどより多くの分散を説明.", + "n_components": "計算された主成分の総数.", + "total_variance_explained": "全成分で説明される総分散.", + "components_for_90pct": "分散90%以上の説明に必要な最小成分数.", + "top_component_variance": "第1(最重要)主成分の分散比率.", + "total_rows": "データセットの総行数.", + "duplicate_rows": "完全重複行の数.", + "unique_rows": "ユニーク(非重複)行の数.", + "duplicate_ratio": "重複行の割合 = duplicate_rows / total_rows.", + "uniqueness_ratio": "ユニーク値の割合 = unique / total_non_null. 1.0=全て固有.", + "total_non_null": "ユニーク性計算に使用された非null値の数.", + "is_unique_key": "全ての非null値がユニークならTrue -- 主キーの候補.", + "completeness": "非欠損値の割合 = 1 - (missing / total). 1.0=欠損なし.", + "uniqueness": "ユニーク値と総非null値の比率. 高いほど多様.", + "consistency": "型一致性の測定. 1.0=全値が期待データ型と一致.", + "validity": "期待範囲/形式内の値の割合. 1.0=全て有効.", + "overall": "加重品質スコア = 0.35*completeness + 0.25*uniqueness + 0.20*consistency + 0.20*validity.", + "quality_score": "完全性と一意性を組み合わせた列別品質スコア.", + "column": "データセットの列名.", + "component": "主成分識別子(PC1, PC2, ...).", + "value": "カテゴリまたは離散値.", + "percentage": "この値の割合 = (count / total) x 100.", + "best_distribution": "AIC基準でデータに最も適合するscipy分布.", + "aic": "赤池情報量基準 -- 低いほど良い. 複雑さにペナルティ.", + "bic": "ベイズ情報量基準 -- 低いほど良い. AICより保守的.", + "ks_statistic": "適合分布との最大CDF偏差を測るK-S統計量.", + "jarque_bera_stat": "Jarque-Bera検定統計量. 大きい値は非正規性を示す.", + "jb_p_value": "Jarque-Bera検定p値. p < 0.05 -> 正規性棄却.", + "recommended_transform": "列をより正規にする推奨変換(Box-CoxまたはYeo-Johnson).", + "original_skew": "元の(未変換)列の歪度.", + "transformed_skew": "推奨変換適用後の歪度.", + "bandwidth_silverman": "KDE推定用Silvermanルールのカーネル帯域幅.", + "bandwidth_scott": "KDE推定用Scottルールのカーネル帯域幅.", + "partial_corr": "偏相関 -- 他変数の交絡効果を除去後のPearson相関.", + "mutual_information": "相互情報量(ビット) -- 2変数間の非線形依存性を測定.", + "ci_lower": "相関係数の95%ブートストラップ信頼区間の下限.", + "ci_upper": "相関係数の95%ブートストラップ信頼区間の上限.", + "distance_corr": "Szekely距離相関 -- 非線形依存性を捕捉(0=独立, 1=依存).", + "optimal_k": "シルエットスコア分析で決定された最適クラスタ数.", + "best_silhouette": "評価したk値の中で最高の平均シルエットスコア(-1~1, 高いほど良い).", + "inertia": "クラスタ内二乗和(WCSS). 低いほど密集したクラスタ.", + "n_clusters_dbscan": "DBSCANが発見したクラスタ数(ノイズ除く).", + "noise_ratio": "DBSCANがノイズとした点の割合.", + "eps": "DBSCAN epsilon -- k距離プロットから推定された近傍半径.", + "kl_divergence": "t-SNE埋め込みのKLダイバージェンス. 低いほど良い適合.", + "tsne_perplexity": "t-SNEパープレキシティ(局所と大域構造のバランス).", + "n_factors": "Kaiser基準(固有値>1)で保持された潜在因子数.", + "factor_loading": "観測変数と潜在因子間の相関.", + "noise_variance": "因子分析における各変数の推定ノイズ(固有分散).", + "interaction_strength": "積交互作用項と上位特徴間のPearson相関.", + "monotonic_gap": "PearsonとSpearman相関の差 -- 大きい差->非線形単調関係.", + "entropy_equal_width": "等幅ビンのShannonエントロピー. 低いほど集中した分布.", + "entropy_equal_freq": "等頻度ビンのShannonエントロピー. 低いほど集中.", + "cardinality": "カテゴリ列のユニーク値の数.", + "encoding_rec": "カーディナリティ分析に基づく推奨エンコーディング戦略.", + "leakage_risk": "特徴がターゲット情報を漏洩するリスクレベル(low/medium/high).", + "anomaly_score_if": "Isolation Forest異常スコア. より負=より異常.", + "lof_score": "Local Outlier Factor負スコア. より負=より異常.", + "mahalanobis_dist": "データ重心からのMahalanobis距離. 大きい=より異常.", + "consensus_flag": "3つの異常値手法のうち2つ以上が同意すればTrue.", + "levene_stat": "等分散性のLevene検定統計量.", + "levene_p": "Levene検定p値. p < 0.05 -> 分散に有意差あり.", + "kw_stat": "Kruskal-Wallis H統計量 -- ノンパラメトリック一元配置ANOVA.", + "kw_p": "Kruskal-Wallis検定p値. p < 0.05 -> 少なくとも1つのグループが異なる.", + "mw_stat": "Mann-Whitney U統計量 -- ノンパラメトリック二標本順位検定.", + "mw_p": "Mann-Whitney U検定p値.", + "chi2_stat": "均一分布に対するカイ二乗適合度統計量.", + "chi2_p": "カイ二乗適合度検定p値.", + "grubbs_stat": "単一外れ値検出のためのGrubbs検定統計量.", + "grubbs_p": "Grubbs検定p値.", + "adf_stat": "定常性のADF(Augmented Dickey-Fuller)検定統計量.", + "adf_p": "ADF検定p値. p < 0.05 -> 時系列は定常.", + "numeric_ratio": "数値列の割合.", + "categorical_ratio": "カテゴリ列の割合.", + "duplicate_row_ratio": "完全重複行の割合.", +} + +# ----- German (de) ---------------------------------------------------- +METRIC_TIPS_I18N["de"] = { + "type": "Abgeleiteter Datentyp der Spalte (numerisch, kategorisch, Text, Datum, boolesch).", + "count": "Anzahl nicht-leerer Werte in der Spalte.", + "missing": "Anzahl fehlender (null / NaN) Werte.", + "missing_%": "Prozent fehlender Werte = (fehlend / Gesamtzeilen) × 100.", + "unique": "Anzahl eindeutiger Werte in der Spalte.", + "mean": "Arithmetisches Mittel = Summe der Werte / Anzahl.", + "median": "Mittlerer Wert der sortierten Daten (50. Perzentil).", + "std": "Standardabweichung – misst die Streuung um den Mittelwert. Größer = stärker gestreut.", + "se": "Standardfehler des Mittelwerts = std / √n. Zeigt die Präzision des Stichprobenmittelwerts.", + "cv": "Variationskoeffizient = std / |Mittelwert|. Dimensionslose relative Variabilität.", + "mad": "Mediane Absolute Abweichung = Median(|xi − Median|). Robuste Streuungsmaß.", + "min": "Minimalwert in der Spalte.", + "max": "Maximalwert in der Spalte.", + "range": "Spannweite = Max − Min. Gesamtspanne der Daten.", + "p5": "5. Perzentil – 5% der Daten liegen unterhalb dieses Werts.", + "q1": "1. Quartil (25. Perzentil) – 25% der Daten liegen darunter.", + "q3": "3. Quartil (75. Perzentil) – 75% der Daten liegen darunter.", + "p95": "95. Perzentil – 95% der Daten liegen unterhalb dieses Werts.", + "iqr": "Interquartilsabstand = Q3 − Q1. Mittlere 50%, verwendet für Ausreißererkennung.", + "skewness": "Schiefe misst Verteilungsasymmetrie. 0=symmetrisch, >0=rechtschief, <0=linksschief.", + "kurtosis": "Exzess-Kurtosis misst Schwere der Ränder. 0=normal, >0=schwere Ränder, <0=leichte Ränder.", + "top": "Häufigster Wert in der Spalte.", + "freq": "Häufigkeit des häufigsten Werts.", + "n": "Anzahl nicht-leerer Beobachtungen für den Verteilungstest.", + "skew_type": "Interpretation der Schiefe: symmetrisch (|s|<0,5), mäßig schief (0,5–1), stark schief (>1).", + "kurt_type": "Interpretation der Kurtosis: mesokurtisch (~0), leptokurtisch (>1, schwere Ränder), platykurtisch (<−1, leichte Ränder).", + "normality_test": "Verwendeter primärer Normalitätstest (Shapiro-Wilk für n≤5000, D'Agostino-Pearson für größere).", + "normality_p": "p-Wert des primären Normalitätstests. p<0,05 → wahrscheinlich nicht normal.", + "is_normal_0.05": "Wahr, wenn p≥0,05 – Normalitätshypothese wird bei α=0,05 nicht abgelehnt.", + "shapiro_p": "p-Wert des Shapiro-Wilk-Tests. Am besten für kleine bis mittlere Stichproben (n≤5000).", + "dagostino_p": "p-Wert des D'Agostino-Pearson-Tests. Nutzt Schiefe + Kurtosis, gut für n≥20.", + "ks_p": "p-Wert des Kolmogorow-Smirnow-Tests gegen Normalverteilung.", + "anderson_stat": "Anderson-Darling-Teststatistik. Höher = stärkerer Hinweis gegen Normalität.", + "anderson_5pct_cv": "Anderson-Darling 5%-Kritischer Wert. Stat > krit. Wert → Normalität bei 5% ablehnen.", + "missing_count": "Anzahl fehlender (null) Werte in dieser Spalte.", + "missing_ratio": "Anteil fehlender Werte = Fehlend / Gesamtzeilen (0 bis 1).", + "dtype": "Pandas-Datentyp der Spalte.", + "lower_bound": "IQR-Untergrenze = Q1 − k×IQR. Werte darunter sind Ausreißer (Standard k=1,5).", + "upper_bound": "IQR-Obergrenze = Q3 + k×IQR. Werte darüber sind Ausreißer (Standard k=1,5).", + "outlier_count": "Anzahl der Werte außerhalb der Ausreißergrenzen.", + "outlier_%": "Prozent der Ausreißer = (Ausreißeranzahl / Gesamt) × 100.", + "min_outlier": "Kleinster erkannter Ausreißerwert.", + "max_outlier": "Größter erkannter Ausreißerwert.", + "threshold": "Verwendeter Z-Score-Schwellenwert. Werte mit |z| > Schwelle sind Ausreißer.", + "max_zscore": "Maximaler absoluter Z-Score in der Spalte.", + "top_value": "Häufigste Kategorie.", + "top_frequency": "Anzahl der häufigsten Kategorie.", + "top_%": "Prozent der häufigsten Kategorie = (Häufigkeit / Gesamt) × 100.", + "entropy": "Shannon-Entropie (Bit). Höher = gleichmäßigere Verteilung.", + "norm_entropy": "Normalisierte Entropie = Entropie / log₂(Unique). 1,0 = perfekt gleichmäßig.", + "max_entropy": "Maximal mögliche Entropie = log₂(Unique). Erreicht bei Gleichverteilung.", + "normalized_entropy": "Wie norm_entropy: Entropie / max. Entropie. 1,0 = gleichverteilt.", + "unique_values": "Anzahl verschiedener Kategoriewerte.", + "variance": "Varianz der Spalte = mittlere quadratische Abweichung vom Mittelwert.", + "mean_abs_corr": "Mittlere absolute Pearson-Korrelation mit allen anderen numerischen Spalten.", + "avg_mutual_info": "Mittlere gegenseitige Information mit allen anderen Spalten.", + "VIF": "Varianzinflationsfaktor. VIF=1 → keine Multikollinearität, >5 → mäßig, >10 → schwer.", + "multicollinearity": "Interpretation des VIF: niedrig (<5), mäßig (5–10) oder hoch (≥10).", + "variance_ratio": "Anteil der Gesamtvarianz, der von dieser Hauptkomponente erklärt wird.", + "cumulative_ratio": "Kumulativer Varianzanteil bis einschließlich dieser Komponente.", + "eigenvalue": "Eigenwert der Kovarianzmatrix für diese Komponente. Höher = mehr Varianz.", + "n_components": "Gesamtanzahl berechneter Hauptkomponenten.", + "total_variance_explained": "Von allen berechneten Komponenten erfasste Gesamtvarianz.", + "components_for_90pct": "Minimale Komponentenanzahl für ≥90% erklärte Varianz.", + "top_component_variance": "Varianzanteil der ersten (wichtigsten) Hauptkomponente.", + "total_rows": "Gesamtzahl der Zeilen im Datensatz.", + "duplicate_rows": "Anzahl exakter Duplikatzeilen.", + "unique_rows": "Anzahl einzigartiger (nicht-duplizierter) Zeilen.", + "duplicate_ratio": "Anteil duplizierter Zeilen = Duplikate / Gesamtzeilen.", + "uniqueness_ratio": "Quotient einzigartiger Werte = Unique / Gesamt (nicht-null). 1,0 = alle einzigartig.", + "total_non_null": "Nicht-leere Werte für die Eindeutigkeitsberechnung.", + "is_unique_key": "Wahr, wenn jeder nicht-leere Wert einzigartig ist – potenzieller Primärschlüssel.", + "completeness": "Anteil nicht-fehlender Werte = 1 − (Fehlend / Gesamt). 1,0 = keine Fehlwerte.", + "uniqueness": "Quotient eindeutiger Werte zu nicht-leeren Werten. Höher = vielfältiger.", + "consistency": "Typ-Konsistenz. 1,0 = alle Werte entsprechen dem erwarteten Datentyp.", + "validity": "Anteil gültiger Werte (in erwarteten Bereichen/Formaten). 1,0 = alle gültig.", + "overall": "Gewichteter Qualitätsscore = 0,35×Vollst. + 0,25×Eindeut. + 0,20×Konsist. + 0,20×Gültigk.", + "quality_score": "Spaltenqualität aus Vollständigkeit und Eindeutigkeit.", + "column": "Spaltenname im Datensatz.", + "component": "Hauptkomponenten-Kennung (PC1, PC2, …).", + "value": "Kategorie- oder diskreter Wert.", + "percentage": "Prozentualer Anteil dieses Werts = (Anzahl / Gesamt) × 100.", + "best_distribution": "Scipy-Verteilung mit dem besten AIC-Fit.", + "aic": "Akaike-Informationskriterium – niedriger = besser. Bestraft Komplexität.", + "bic": "Bayessches Informationskriterium – niedriger = besser. Konservativer als AIC.", + "ks_statistic": "KS-Statistik: maximale CDF-Abweichung von der angepassten Verteilung.", + "jarque_bera_stat": "Jarque-Bera-Teststatistik. Große Werte → Nicht-Normalität.", + "jb_p_value": "p-Wert des Jarque-Bera-Tests. p<0,05 → Normalität ablehnen.", + "recommended_transform": "Empfohlene Potenztransformation für bessere Normalität (Box-Cox oder Yeo-Johnson).", + "original_skew": "Schiefe der Originalspalte (untransformiert).", + "transformed_skew": "Schiefe nach Anwendung der empfohlenen Transformation.", + "bandwidth_silverman": "Kernel-Bandbreite nach Silvermans Regel für KDE.", + "bandwidth_scott": "Kernel-Bandbreite nach Scotts Regel für KDE.", + "partial_corr": "Partielle Korrelation – Pearson nach Entfernung von Konfounder-Effekten.", + "mutual_information": "Gegenseitige Information (Bit) – misst nicht-lineare Abhängigkeit.", + "ci_lower": "Untere Grenze des 95%-Bootstrap-Konfidenzintervalls der Korrelation.", + "ci_upper": "Obere Grenze des 95%-Bootstrap-Konfidenzintervalls der Korrelation.", + "distance_corr": "Szekely-Distanzkorrelation – erfasst nicht-lineare Abhängigkeiten (0=unabhängig, 1=abhängig).", + "optimal_k": "Beste Clusteranzahl laut Silhouetten-Analyse.", + "best_silhouette": "Höchster mittlerer Silhouetten-Score (-1 bis 1, höher = besser).", + "inertia": "Within-Cluster Summe der Quadrate (WCSS). Niedriger = kompaktere Cluster.", + "n_clusters_dbscan": "Anzahl der von DBSCAN gefundenen Cluster (ohne Rauschen).", + "noise_ratio": "Anteil der als Rauschen eingestuften Punkte.", + "eps": "DBSCAN-Epsilon – Nachbarschaftsradius, automatisch aus k-Distanz-Plot geschätzt.", + "kl_divergence": "Kullback-Leibler-Divergenz der t-SNE-Einbettung. Niedriger = bessere Anpassung.", + "tsne_perplexity": "Perplexitäts-Parameter für t-SNE (balanciert lokal vs. global).", + "n_factors": "Anzahl behaltener latenter Faktoren nach Kaiser-Kriterium (Eigenwert > 1).", + "factor_loading": "Korrelation zwischen beobachteter Variable und latentem Faktor.", + "noise_variance": "Geschätzte Rauschvarianz (Uniqueness) jeder Variable in der Faktorenanalyse.", + "interaction_strength": "Pearson-Korrelation zwischen Produkt-Interaktionsterm und Top-Feature.", + "monotonic_gap": "Lücke zwischen Pearson- und Spearman-Korrelation. Groß → nicht-linearer monotoner Zusammenhang.", + "entropy_equal_width": "Shannon-Entropie des Gleich-Breite-Binnings. Niedriger = konzentriertere Verteilung.", + "entropy_equal_freq": "Shannon-Entropie des Gleich-Frequenz-Binnings. Niedriger = konzentrierter.", + "cardinality": "Anzahl eindeutiger Werte einer kategorischen Spalte.", + "encoding_rec": "Empfohlene Kodierungsstrategie basierend auf der Kardinalitätsanalyse.", + "leakage_risk": "Risikostufe (niedrig/mittel/hoch), dass ein Feature Zielinformationen leakt.", + "anomaly_score_if": "Isolation-Forest-Anomalie-Score. Negativer = anomaler.", + "lof_score": "Local Outlier Factor (minus-Score). Negativer = anomaler.", + "mahalanobis_dist": "Mahalanobis-Distanz zum Datenzentrum. Größer = ungewöhnlicher.", + "consensus_flag": "Wahr, wenn ≥2 von 3 Anomalie-Methoden den Punkt als anomal einstufen.", + "levene_stat": "Levene-Teststatistik für Varianzgleichheit.", + "levene_p": "p-Wert des Levene-Tests. p<0,05 → Varianzen signifikant verschieden.", + "kw_stat": "Kruskal-Wallis-H-Statistik – nicht-parametrische Einweg-ANOVA.", + "kw_p": "p-Wert des Kruskal-Wallis-Tests. p<0,05 → mindestens eine Gruppe unterscheidet sich.", + "mw_stat": "Mann-Whitney-U-Statistik – nicht-parametrischer Zweistichproben-Rangtest.", + "mw_p": "p-Wert des Mann-Whitney-U-Tests.", + "chi2_stat": "Chi-Quadrat-Anpassungsstatistik vs. Gleichverteilung.", + "chi2_p": "p-Wert des Chi-Quadrat-Anpassungstests.", + "grubbs_stat": "Grubbs-Teststatistik zur Erkennung eines einzelnen Ausreißers.", + "grubbs_p": "p-Wert des Grubbs-Tests.", + "adf_stat": "ADF-Teststatistik (Augmented Dickey-Fuller) für Stationarität.", + "adf_p": "p-Wert des ADF-Tests. p<0,05 → Reihe ist stationär.", + "numeric_ratio": "Anteil numerischer Spalten.", + "categorical_ratio": "Anteil kategorischer Spalten.", + "duplicate_row_ratio": "Anteil exakter Duplikatzeilen.", +} + +# ----- French (fr) ---------------------------------------------------- +METRIC_TIPS_I18N["fr"] = { + "type": "Type de données inféré de la colonne (numérique, catégoriel, texte, date, booléen).", + "count": "Nombre de valeurs non nulles dans la colonne.", + "missing": "Nombre de valeurs manquantes (null / NaN).", + "missing_%": "Pourcentage de valeurs manquantes = (manquants / total lignes) × 100.", + "unique": "Nombre de valeurs distinctes dans la colonne.", + "mean": "Moyenne arithmétique = somme des valeurs / nombre.", + "median": "Valeur centrale des données triées (50e percentile).", + "std": "Écart-type – mesure la dispersion autour de la moyenne. Plus grand = plus dispersé.", + "se": "Erreur standard de la moyenne = std / √n. Indique la précision de la moyenne.", + "cv": "Coefficient de variation = std / |moyenne|. Mesure relative sans unité de la variabilité.", + "mad": "Déviation Absolue Médiane = médiane(|xi − médiane|). Mesure robuste de dispersion.", + "min": "Valeur minimale dans la colonne.", + "max": "Valeur maximale dans la colonne.", + "range": "Étendue = max − min. Dispersion totale des données.", + "p5": "5e percentile – 5% des données sont inférieures à cette valeur.", + "q1": "1er quartile (25e percentile) – 25% des données sont inférieures.", + "q3": "3e quartile (75e percentile) – 75% des données sont inférieures.", + "p95": "95e percentile – 95% des données sont inférieures à cette valeur.", + "iqr": "Écart interquartile = Q3 − Q1. 50% central, utilisé pour la détection d'aberrants.", + "skewness": "L'asymétrie mesure la déformation de la distribution. 0=symétrique, >0=droite, <0=gauche.", + "kurtosis": "Kurtosis excédentaire – poids des queues. 0=normal, >0=queues lourdes, <0=queues légères.", + "top": "Valeur la plus fréquente dans la colonne.", + "freq": "Fréquence de la valeur la plus courante.", + "n": "Nombre d'observations non nulles utilisées pour le test de distribution.", + "skew_type": "Interprétation de l'asymétrie : symétrique (|s|<0,5), modérée (0,5–1), forte (>1).", + "kurt_type": "Interprétation de la kurtosis : mésokurtique (~0), leptokurtique (>1), platykurtique (<−1).", + "normality_test": "Test de normalité principal (Shapiro-Wilk pour n≤5000, D'Agostino-Pearson pour plus grand).", + "normality_p": "p-value du test de normalité. p<0,05 → probablement non normal.", + "is_normal_0.05": "Vrai si p≥0,05 – la normalité n'est pas rejetée à α=0,05.", + "shapiro_p": "p-value du test de Shapiro-Wilk. Optimal pour échantillons petits à moyens (n≤5000).", + "dagostino_p": "p-value du test de D'Agostino-Pearson. Utilise asymétrie + kurtosis, adapté pour n≥20.", + "ks_p": "p-value du test de Kolmogorov-Smirnov vs distribution normale.", + "anderson_stat": "Statistique du test d'Anderson-Darling. Plus élevée = preuve plus forte contre la normalité.", + "anderson_5pct_cv": "Valeur critique à 5% d'Anderson-Darling. Stat > VC → rejeter la normalité à 5%.", + "missing_count": "Nombre de valeurs manquantes (null) dans cette colonne.", + "missing_ratio": "Fraction de valeurs manquantes = manquants / total (0 à 1).", + "dtype": "Type pandas de la colonne.", + "lower_bound": "Borne inférieure IQR = Q1 − k×IQR. Valeurs en dessous = aberrants (k=1,5 par défaut).", + "upper_bound": "Borne supérieure IQR = Q3 + k×IQR. Valeurs au-dessus = aberrants (k=1,5 par défaut).", + "outlier_count": "Nombre de valeurs en dehors des bornes.", + "outlier_%": "Pourcentage d'aberrants = (nombre / total) × 100.", + "min_outlier": "Plus petit aberrant détecté.", + "max_outlier": "Plus grand aberrant détecté.", + "threshold": "Seuil de Z-score utilisé. |z| > seuil = aberrant.", + "max_zscore": "Z-score absolu maximal trouvé dans la colonne.", + "top_value": "Catégorie la plus fréquente.", + "top_frequency": "Nombre d'occurrences de la catégorie la plus fréquente.", + "top_%": "Pourcentage de la catégorie la plus fréquente = (fréq / total) × 100.", + "entropy": "Entropie de Shannon (bits). Plus élevée = distribution plus uniforme.", + "norm_entropy": "Entropie normalisée = entropie / log₂(unique). 1,0 = parfaitement uniforme.", + "max_entropy": "Entropie maximale possible = log₂(unique). Atteinte quand toutes les catégories sont équiprobables.", + "normalized_entropy": "Idem norm_entropy : entropie / entropie max. 1,0 = uniforme.", + "unique_values": "Nombre de valeurs catégorielles distinctes.", + "variance": "Variance de la colonne = moyenne des écarts quadratiques à la moyenne.", + "mean_abs_corr": "Corrélation Pearson absolue moyenne avec toutes les autres colonnes numériques.", + "avg_mutual_info": "Information mutuelle moyenne avec toutes les autres colonnes.", + "VIF": "Facteur d'inflation de la variance. VIF=1 → pas de multicolinéarité, >5 → modérée, >10 → sévère.", + "multicollinearity": "Interprétation du VIF : faible (<5), modérée (5–10) ou élevée (≥10).", + "variance_ratio": "Part de la variance totale expliquée par cette composante principale.", + "cumulative_ratio": "Part cumulative de la variance expliquée jusqu'à cette composante.", + "eigenvalue": "Valeur propre de la matrice de covariance. Plus élevée = plus de variance.", + "n_components": "Nombre total de composantes principales calculées.", + "total_variance_explained": "Variance totale captée par toutes les composantes calculées.", + "components_for_90pct": "Nombre minimum de composantes pour ≥90% de variance expliquée.", + "top_component_variance": "Part de variance de la première (plus importante) composante.", + "total_rows": "Nombre total de lignes dans le jeu de données.", + "duplicate_rows": "Nombre de lignes exactement dupliquées.", + "unique_rows": "Nombre de lignes uniques (non dupliquées).", + "duplicate_ratio": "Part de lignes dupliquées = doublons / total.", + "uniqueness_ratio": "Ratio de valeurs uniques = unique / total non-null. 1,0 = toutes uniques.", + "total_non_null": "Valeurs non nulles utilisées pour le calcul d'unicité.", + "is_unique_key": "Vrai si chaque valeur non nulle est unique – clé primaire potentielle.", + "completeness": "Fraction de valeurs non manquantes = 1 − (manquants / total). 1,0 = aucune donnée manquante.", + "uniqueness": "Ratio de valeurs uniques / non-null. Plus élevé = plus diversifié.", + "consistency": "Cohérence des types. 1,0 = toutes les valeurs correspondent au type attendu.", + "validity": "Fraction de valeurs dans les plages/formats attendus. 1,0 = toutes valides.", + "overall": "Score qualité pondéré = 0,35×Complét. + 0,25×Unicit. + 0,20×Cohér. + 0,20×Valid.", + "quality_score": "Score qualité par colonne combinant complétude et unicité.", + "column": "Nom de la colonne dans le jeu de données.", + "component": "Identifiant de la composante principale (PC1, PC2, …).", + "value": "Valeur catégorielle ou discrète.", + "percentage": "Part en pourcentage = (nombre / total) × 100.", + "best_distribution": "Distribution scipy avec le meilleur ajustement AIC.", + "aic": "Critère d'information d'Akaike – plus bas = meilleur. Pénalise la complexité.", + "bic": "Critère d'information bayésien – plus bas = meilleur. Plus conservateur que l'AIC.", + "ks_statistic": "Statistique KS : écart CDF maximal par rapport à la distribution ajustée.", + "jarque_bera_stat": "Statistique du test de Jarque-Bera. Grande valeur → non-normalité.", + "jb_p_value": "p-value du test de Jarque-Bera. p<0,05 → rejeter la normalité.", + "recommended_transform": "Transformation de puissance recommandée (Box-Cox ou Yeo-Johnson).", + "original_skew": "Asymétrie de la colonne originale (non transformée).", + "transformed_skew": "Asymétrie après application de la transformation recommandée.", + "bandwidth_silverman": "Largeur de bande du noyau selon la règle de Silverman pour KDE.", + "bandwidth_scott": "Largeur de bande du noyau selon la règle de Scott pour KDE.", + "partial_corr": "Corrélation partielle – Pearson après suppression des effets confondants.", + "mutual_information": "Information mutuelle (bits) – mesure la dépendance non linéaire.", + "ci_lower": "Borne inférieure de l'IC bootstrap à 95% de la corrélation.", + "ci_upper": "Borne supérieure de l'IC bootstrap à 95% de la corrélation.", + "distance_corr": "Corrélation de distance de Szekely – capte les dépendances non linéaires (0=indépendant, 1=dépendant).", + "optimal_k": "Meilleur nombre de clusters selon l'analyse silhouette.", + "best_silhouette": "Score silhouette moyen le plus élevé (-1 à 1, plus haut = meilleur).", + "inertia": "Somme des carrés intra-cluster (WCSS). Plus bas = clusters plus compacts.", + "n_clusters_dbscan": "Nombre de clusters trouvés par DBSCAN (hors bruit).", + "noise_ratio": "Part des points étiquetés comme bruit par DBSCAN.", + "eps": "Epsilon DBSCAN – rayon de voisinage auto-estimé depuis le graphe k-distance.", + "kl_divergence": "Divergence de Kullback-Leibler de l'embedding t-SNE. Plus bas = meilleur.", + "tsne_perplexity": "Paramètre de perplexité pour t-SNE (équilibre local vs global).", + "n_factors": "Nombre de facteurs latents retenus par le critère de Kaiser (valeur propre > 1).", + "factor_loading": "Corrélation entre une variable observée et un facteur latent.", + "noise_variance": "Variance de bruit estimée (unicité) de chaque variable en analyse factorielle.", + "interaction_strength": "Corrélation Pearson entre un terme d'interaction (produit) et le meilleur feature.", + "monotonic_gap": "Écart entre corrélations Pearson et Spearman. Grand → relation monotone non linéaire.", + "entropy_equal_width": "Entropie de Shannon du binning à largeur égale. Plus bas = plus concentré.", + "entropy_equal_freq": "Entropie de Shannon du binning à fréquence égale. Plus bas = plus concentré.", + "cardinality": "Nombre de valeurs uniques d'une colonne catégorielle.", + "encoding_rec": "Stratégie d'encodage recommandée selon l'analyse de cardinalité.", + "leakage_risk": "Niveau de risque (faible/moyen/élevé) de fuite d'information cible.", + "anomaly_score_if": "Score d'anomalie Isolation Forest. Plus négatif = plus anomal.", + "lof_score": "Score LOF (minus). Plus négatif = plus anomal.", + "mahalanobis_dist": "Distance de Mahalanobis au centroïde. Plus grande = plus inhabituel.", + "consensus_flag": "Vrai si ≥2 des 3 méthodes considèrent le point comme anomal.", + "levene_stat": "Statistique du test de Levene pour l'égalité des variances.", + "levene_p": "p-value du test de Levene. p<0,05 → variances significativement différentes.", + "kw_stat": "Statistique H de Kruskal-Wallis – ANOVA non paramétrique à un facteur.", + "kw_p": "p-value du test de Kruskal-Wallis. p<0,05 → au moins un groupe diffère.", + "mw_stat": "Statistique U de Mann-Whitney – test de rang non paramétrique à deux échantillons.", + "mw_p": "p-value du test de Mann-Whitney U.", + "chi2_stat": "Statistique du test d'ajustement chi-deux vs distribution uniforme.", + "chi2_p": "p-value du test d'ajustement chi-deux.", + "grubbs_stat": "Statistique du test de Grubbs pour la détection d'un aberrant unique.", + "grubbs_p": "p-value du test de Grubbs.", + "adf_stat": "Statistique du test ADF (Augmented Dickey-Fuller) de stationnarité.", + "adf_p": "p-value du test ADF. p<0,05 → la série est stationnaire.", + "numeric_ratio": "Part des colonnes numériques.", + "categorical_ratio": "Part des colonnes catégorielles.", + "duplicate_row_ratio": "Part des lignes exactement dupliquées.", +} + + +def get_metric_tips_json() -> str: + "“”Return METRIC_TIPS_I18N dict as a JSON string for embedding in JS.“”" + import json + return json.dumps(METRIC_TIPS_I18N, ensure_ascii=False) diff --git a/f2a/stats/__init__.py b/f2a/stats/__init__.py new file mode 100644 index 0000000..341c810 --- /dev/null +++ b/f2a/stats/__init__.py @@ -0,0 +1,53 @@ +"""Stats module — statistical analysis engine.""" + +from f2a.stats.categorical import CategoricalStats +from f2a.stats.correlation import CorrelationStats +from f2a.stats.descriptive import DescriptiveStats +from f2a.stats.distribution import DistributionStats +from f2a.stats.duplicates import DuplicateStats +from f2a.stats.feature_importance import FeatureImportanceStats +from f2a.stats.missing import MissingStats +from f2a.stats.outlier import OutlierStats +from f2a.stats.pca_analysis import PCAStats +from f2a.stats.quality import QualityStats + +# Advanced stats modules +from f2a.stats.advanced_anomaly import AdvancedAnomalyStats +from f2a.stats.advanced_correlation import AdvancedCorrelationStats +from f2a.stats.advanced_dimreduction import AdvancedDimReductionStats +from f2a.stats.advanced_distribution import AdvancedDistributionStats +from f2a.stats.clustering import ClusteringStats +from f2a.stats.feature_insights import FeatureInsightsStats +from f2a.stats.statistical_tests import StatisticalTests + +# Enhancement modules (v2) +from f2a.stats.column_role import ColumnRoleClassifier +from f2a.stats.cross_analysis import CrossAnalysis +from f2a.stats.insight_engine import InsightEngine +from f2a.stats.ml_readiness import MLReadinessEvaluator + +__all__ = [ + "CategoricalStats", + "CorrelationStats", + "DescriptiveStats", + "DistributionStats", + "DuplicateStats", + "FeatureImportanceStats", + "MissingStats", + "OutlierStats", + "PCAStats", + "QualityStats", + # Advanced + "AdvancedAnomalyStats", + "AdvancedCorrelationStats", + "AdvancedDimReductionStats", + "AdvancedDistributionStats", + "ClusteringStats", + "FeatureInsightsStats", + "StatisticalTests", + # Enhancement + "ColumnRoleClassifier", + "CrossAnalysis", + "InsightEngine", + "MLReadinessEvaluator", +] diff --git a/f2a/stats/advanced_anomaly.py b/f2a/stats/advanced_anomaly.py new file mode 100644 index 0000000..e7baf14 --- /dev/null +++ b/f2a/stats/advanced_anomaly.py @@ -0,0 +1,379 @@ +"""Advanced anomaly detection module. + +Provides Isolation Forest, Local Outlier Factor, Mahalanobis distance, +and consensus anomaly scoring. + +References: + - Liu et al. (2008) — Isolation Forest + - Breunig et al. (2000) — Local Outlier Factor + - Mahalanobis (1936) — Mahalanobis distance +""" + +from __future__ import annotations + +from typing import Any + +import numpy as np +import pandas as pd + +from f2a.core.schema import DataSchema +from f2a.utils.logging import get_logger + +logger = get_logger(__name__) + + +class AdvancedAnomalyStats: + """Multi-method anomaly detection for numeric columns. + + Args: + df: Target DataFrame. + schema: Data schema. + max_sample: Max rows to sample for expensive operations. + contamination: Expected proportion of anomalies. + """ + + def __init__( + self, + df: pd.DataFrame, + schema: DataSchema, + max_sample: int = 5000, + contamination: float = 0.05, + ) -> None: + self._df = df + self._schema = schema + self._max_sample = max_sample + self._contamination = contamination + + def _prepare_data(self) -> tuple[np.ndarray, pd.DataFrame, list[str]] | None: + """Prepare and scale numeric data.""" + cols = self._schema.numeric_columns + if len(cols) < 2: + return None + + try: + from sklearn.preprocessing import StandardScaler + except ImportError: + return None + + df_clean = self._df[cols].dropna() + if len(df_clean) < 20: + return None + + if len(df_clean) > self._max_sample: + df_clean = df_clean.sample(self._max_sample, random_state=42) + + scaler = StandardScaler() + X = scaler.fit_transform(df_clean) + return X, df_clean, cols + + # ── Isolation Forest ────────────────────────────────── + + def isolation_forest(self) -> dict[str, Any]: + """Detect anomalies using Isolation Forest. + + Isolation Forest isolates observations by randomly selecting a + feature and then randomly selecting a split value. Anomalies + require fewer splits (shorter path length). + + Returns: + Dictionary with anomaly_count, anomaly_ratio, scores_summary. + """ + prepared = self._prepare_data() + if prepared is None: + return {} + + X, df_clean, cols = prepared + + try: + from sklearn.ensemble import IsolationForest + except ImportError: + return {} + + try: + iso = IsolationForest( + contamination=self._contamination, + random_state=42, + max_samples=min(256, len(X)), + n_estimators=100, + ) + labels = iso.fit_predict(X) # -1 = anomaly, 1 = normal + scores = iso.decision_function(X) + + n_anomaly = int((labels == -1).sum()) + + return { + "method": "Isolation Forest", + "anomaly_count": n_anomaly, + "anomaly_ratio": round(n_anomaly / len(X), 4), + "n_samples": len(X), + "score_mean": round(float(scores.mean()), 4), + "score_std": round(float(scores.std()), 4), + "score_min": round(float(scores.min()), 4), + "score_threshold": round(float(np.percentile(scores, self._contamination * 100)), 4), + "labels": labels, + "scores": scores, + } + except Exception as exc: + logger.debug("Isolation Forest failed: %s", exc) + return {} + + # ── Local Outlier Factor ────────────────────────────── + + def local_outlier_factor(self) -> dict[str, Any]: + """Detect anomalies using Local Outlier Factor (LOF). + + LOF measures the local deviation of density for each sample + compared to its neighbors. + + Returns: + Dictionary with anomaly_count, anomaly_ratio, scores summary. + """ + prepared = self._prepare_data() + if prepared is None: + return {} + + X, df_clean, cols = prepared + + try: + from sklearn.neighbors import LocalOutlierFactor + except ImportError: + return {} + + try: + n_neighbors = min(20, len(X) - 1) + lof = LocalOutlierFactor( + n_neighbors=n_neighbors, + contamination=self._contamination, + ) + labels = lof.fit_predict(X) # -1 = anomaly + scores = lof.negative_outlier_factor_ + + n_anomaly = int((labels == -1).sum()) + + return { + "method": "Local Outlier Factor", + "anomaly_count": n_anomaly, + "anomaly_ratio": round(n_anomaly / len(X), 4), + "n_samples": len(X), + "n_neighbors": n_neighbors, + "lof_mean": round(float(scores.mean()), 4), + "lof_std": round(float(scores.std()), 4), + "lof_min": round(float(scores.min()), 4), + "labels": labels, + "scores": scores, + } + except Exception as exc: + logger.debug("LOF failed: %s", exc) + return {} + + # ── Mahalanobis distance ────────────────────────────── + + def mahalanobis_distance(self) -> dict[str, Any]: + """Detect anomalies using Mahalanobis distance. + + Points with high Mahalanobis distance from the centroid + are potential multivariate outliers. + + Returns: + Dictionary with threshold, anomaly count, distances summary. + """ + cols = self._schema.numeric_columns + if len(cols) < 2: + return {} + + df_clean = self._df[cols].dropna() + if len(df_clean) < len(cols) + 5: + return {} + + if len(df_clean) > self._max_sample: + df_clean = df_clean.sample(self._max_sample, random_state=42) + + cols = cols[:30] # limit columns to avoid ill-conditioned matrices + df_clean = df_clean[cols] + data = df_clean.values + try: + mean = np.mean(data, axis=0) + cov = np.cov(data.T) + # Regularise to handle singular/near-singular covariance + cov += np.eye(cov.shape[0]) * 1e-6 + if np.linalg.cond(cov) > 1e10: + logger.debug("Covariance matrix ill-conditioned; skipping Mahalanobis.") + return {} + cov_inv = np.linalg.inv(cov) + + diff = data - mean + left = diff @ cov_inv + maha_sq = np.sum(left * diff, axis=1) + maha = np.sqrt(np.maximum(maha_sq, 0)) + + # Chi-squared threshold at 97.5% with p degrees of freedom + from scipy.stats import chi2 + + p = len(cols) + threshold = float(np.sqrt(chi2.ppf(0.975, p))) + + anomaly_mask = maha > threshold + n_anomaly = int(anomaly_mask.sum()) + + return { + "method": "Mahalanobis Distance", + "anomaly_count": n_anomaly, + "anomaly_ratio": round(n_anomaly / len(data), 4), + "threshold": round(threshold, 4), + "n_features": p, + "n_samples": len(data), + "distance_mean": round(float(maha.mean()), 4), + "distance_std": round(float(maha.std()), 4), + "distance_max": round(float(maha.max()), 4), + "distances": maha, + "labels": np.where(anomaly_mask, -1, 1), + } + except (np.linalg.LinAlgError, Exception) as exc: + logger.debug("Mahalanobis distance failed: %s", exc) + return {} + + # ── Consensus anomaly ───────────────────────────────── + + def consensus_anomaly(self) -> dict[str, Any]: + """Consensus anomaly detection combining multiple methods. + + An observation is flagged as anomalous if flagged by at least + 2 out of 3 methods (IF, LOF, Mahalanobis). + + Returns: + Dictionary with per-method counts, consensus count, and + agreement statistics. + """ + iso_result = self.isolation_forest() + lof_result = self.local_outlier_factor() + maha_result = self.mahalanobis_distance() + + methods = [] + if "labels" in iso_result: + methods.append(("isolation_forest", iso_result["labels"])) + if "labels" in lof_result: + methods.append(("local_outlier_factor", lof_result["labels"])) + if "labels" in maha_result: + methods.append(("mahalanobis", maha_result["labels"])) + + if len(methods) < 2: + return {} + + # Align lengths (should be same, but just in case) + min_len = min(len(labels) for _, labels in methods) + vote_matrix = np.zeros((min_len, len(methods))) + + for i, (_, labels) in enumerate(methods): + vote_matrix[:, i] = (labels[:min_len] == -1).astype(int) + + votes = vote_matrix.sum(axis=1) + # Consensus: flagged by >= 2 methods + consensus_mask = votes >= 2 + + per_method = {} + for name, labels in methods: + per_method[name] = int((labels[:min_len] == -1).sum()) + + return { + "methods_used": [name for name, _ in methods], + "per_method_counts": per_method, + "consensus_count": int(consensus_mask.sum()), + "consensus_ratio": round(float(consensus_mask.sum()) / min_len, 4), + "n_samples": min_len, + "consensus_threshold": 2, + "agreement_matrix": { + "all_agree_anomaly": int((votes == len(methods)).sum()), + "majority_anomaly": int(consensus_mask.sum()), + "any_anomaly": int((votes >= 1).sum()), + "no_anomaly": int((votes == 0).sum()), + }, + } + + # ── Summary ─────────────────────────────────────────── + + def summary_full(self) -> tuple[dict[str, Any], dict[str, Any]]: + """Return combined advanced anomaly results (stripped + full). + + Returns a tuple of (stripped_summary, full_results) so that + each method is only called once. + """ + result: dict[str, Any] = {} + full: dict[str, Any] = {} + + try: + iso = self.isolation_forest() + if iso: + result["isolation_forest"] = { + k: v for k, v in iso.items() if k not in ("labels", "scores") + } + full["isolation_forest"] = iso + except Exception as exc: + logger.debug("Isolation Forest skipped: %s", exc) + + try: + lof = self.local_outlier_factor() + if lof: + result["local_outlier_factor"] = { + k: v for k, v in lof.items() if k not in ("labels", "scores") + } + full["local_outlier_factor"] = lof + except Exception as exc: + logger.debug("LOF skipped: %s", exc) + + try: + maha = self.mahalanobis_distance() + if maha: + result["mahalanobis"] = { + k: v for k, v in maha.items() if k not in ("distances", "labels") + } + full["mahalanobis"] = maha + except Exception as exc: + logger.debug("Mahalanobis skipped: %s", exc) + + # Build consensus from already-computed results instead of re-running + try: + methods = [] + if "isolation_forest" in full and "labels" in full["isolation_forest"]: + methods.append(("isolation_forest", full["isolation_forest"]["labels"])) + if "local_outlier_factor" in full and "labels" in full["local_outlier_factor"]: + methods.append(("local_outlier_factor", full["local_outlier_factor"]["labels"])) + if "mahalanobis" in full and "labels" in full["mahalanobis"]: + methods.append(("mahalanobis", full["mahalanobis"]["labels"])) + + if len(methods) >= 2: + min_len = min(len(labels) for _, labels in methods) + vote_matrix = np.zeros((min_len, len(methods))) + for i, (_, labels) in enumerate(methods): + vote_matrix[:, i] = (labels[:min_len] == -1).astype(int) + votes = vote_matrix.sum(axis=1) + consensus_mask = votes >= 2 + + per_method = {} + for name, labels in methods: + per_method[name] = int((labels[:min_len] == -1).sum()) + + cons = { + "methods_used": [name for name, _ in methods], + "per_method_counts": per_method, + "consensus_count": int(consensus_mask.sum()), + "consensus_ratio": round(float(consensus_mask.sum()) / min_len, 4), + "n_samples": min_len, + "consensus_threshold": 2, + "agreement_matrix": { + "all_agree_anomaly": int((votes == len(methods)).sum()), + "majority_anomaly": int(consensus_mask.sum()), + "any_anomaly": int((votes >= 1).sum()), + "no_anomaly": int((votes == 0).sum()), + }, + } + result["consensus"] = cons + full["consensus"] = cons + except Exception as exc: + logger.debug("Consensus anomaly skipped: %s", exc) + + return result, full + + def summary(self) -> dict[str, Any]: + """Return combined advanced anomaly detection results (stripped).""" + result, _ = self.summary_full() + return result diff --git a/f2a/stats/advanced_correlation.py b/f2a/stats/advanced_correlation.py new file mode 100644 index 0000000..e43ce4e --- /dev/null +++ b/f2a/stats/advanced_correlation.py @@ -0,0 +1,369 @@ +"""Advanced correlation analysis module. + +Provides partial correlation matrix, mutual information matrix, +bootstrap correlation confidence intervals, and correlation network data. + +References: + - Székely et al. (2007) — distance correlation + - Reshef et al. (2011) — mutual information concepts +""" + +from __future__ import annotations + +from typing import Any + +import numpy as np +import pandas as pd + +from f2a.core.schema import DataSchema +from f2a.utils.logging import get_logger + +logger = get_logger(__name__) + + +class AdvancedCorrelationStats: + """Advanced correlation analysis for numeric columns. + + Args: + df: Target DataFrame. + schema: Data schema. + bootstrap_iterations: Number of bootstrap resamples for CI. + max_sample: Max rows to sample for expensive operations. + """ + + def __init__( + self, + df: pd.DataFrame, + schema: DataSchema, + bootstrap_iterations: int = 1000, + max_sample: int = 5000, + ) -> None: + self._df = df + self._schema = schema + self._bootstrap_n = bootstrap_iterations + self._max_sample = max_sample + + # ── Partial correlation ─────────────────────────────── + + def partial_correlation_matrix(self) -> pd.DataFrame: + """Compute the partial correlation matrix. + + Partial correlation measures the linear relationship between two + variables after removing the effect of all other variables. + Computed via the inverse of the correlation matrix. + + Returns: + Square DataFrame of partial correlations. + """ + cols = self._schema.numeric_columns + if len(cols) < 3: + return pd.DataFrame() + + cols = cols[:30] # limit + df_clean = self._df[cols].dropna() + if len(df_clean) < len(cols) + 2: + return pd.DataFrame() + + corr = df_clean.corr() + if corr.isna().any().any(): + logger.warning("NaN in correlation matrix (zero-variance columns); skipping partial correlation.") + return pd.DataFrame() + try: + precision = np.linalg.inv(corr.values) + except np.linalg.LinAlgError: + logger.warning("Singular correlation matrix; partial correlation unavailable.") + return pd.DataFrame() + + # Partial corr: -P_ij / sqrt(P_ii * P_jj) + d = np.sqrt(np.abs(np.diag(precision))) # abs to handle numerical noise + d[d == 0] = 1e-15 # avoid division by zero + partial = -precision / np.outer(d, d) + np.fill_diagonal(partial, 1.0) + + return pd.DataFrame( + np.round(partial, 4), + index=cols, + columns=cols, + ) + + # ── Mutual information matrix ───────────────────────── + + def mutual_information_matrix(self) -> pd.DataFrame: + """Compute pairwise mutual information between numeric columns. + + Uses sklearn's ``mutual_info_regression`` to estimate MI for + each pair of columns. + + Returns: + Square DataFrame of MI values. + """ + cols = self._schema.numeric_columns + if len(cols) < 2: + return pd.DataFrame() + + try: + from sklearn.feature_selection import mutual_info_regression + except ImportError: + logger.info("scikit-learn not available for MI computation.") + return pd.DataFrame() + + cols = cols[:30] # limit + df_clean = self._df[cols].dropna() + if len(df_clean) < 30: + return pd.DataFrame() + + # Sample for speed + if len(df_clean) > self._max_sample: + df_clean = df_clean.sample(self._max_sample, random_state=42) + + n = len(cols) + mi_matrix = np.zeros((n, n)) + + for i, col in enumerate(cols): + X = df_clean.drop(columns=[col]).values + y = df_clean[col].values + try: + mi = mutual_info_regression(X, y, random_state=42, n_neighbors=5) + other_cols = [c for c in cols if c != col] + for j, other in enumerate(other_cols): + idx = cols.index(other) + mi_matrix[i, idx] = float(mi[j]) + except Exception: + continue + + # Symmetrize + mi_matrix = (mi_matrix + mi_matrix.T) / 2 + np.fill_diagonal(mi_matrix, 0.0) + + return pd.DataFrame( + np.round(mi_matrix, 4), + index=cols, + columns=cols, + ) + + # ── Bootstrap correlation CI ────────────────────────── + + def bootstrap_correlation_ci( + self, + alpha: float = 0.05, + ) -> pd.DataFrame: + """Compute bootstrap confidence intervals for Pearson correlations. + + For each column pair, resamples ``bootstrap_iterations`` times + and reports the ``alpha/2`` and ``1 - alpha/2`` percentile bounds. + + Args: + alpha: Significance level (default 0.05 → 95% CI). + + Returns: + DataFrame with col_a, col_b, r, ci_lower, ci_upper, ci_width. + """ + cols = self._schema.numeric_columns + if len(cols) < 2: + return pd.DataFrame() + + cols = cols[:15] # limit pairs + df_clean = self._df[cols].dropna() + n = len(df_clean) + if n < 20: + return pd.DataFrame() + + # Sample for speed + if n > self._max_sample: + df_clean = df_clean.sample(self._max_sample, random_state=42) + n = len(df_clean) + + rng = np.random.default_rng(42) + rows: list[dict] = [] + + for i in range(len(cols)): + for j in range(i + 1, len(cols)): + x = df_clean[cols[i]].values + y = df_clean[cols[j]].values + + # Point estimate + r_point = float(np.corrcoef(x, y)[0, 1]) + + # Bootstrap + boot_corrs = np.empty(self._bootstrap_n) + for b in range(self._bootstrap_n): + idx = rng.integers(0, n, size=n) + bx, by = x[idx], y[idx] + std_x, std_y = bx.std(), by.std() + if std_x == 0 or std_y == 0: + boot_corrs[b] = 0.0 + else: + boot_corrs[b] = float(np.corrcoef(bx, by)[0, 1]) + + lower = float(np.percentile(boot_corrs, 100 * alpha / 2)) + upper = float(np.percentile(boot_corrs, 100 * (1 - alpha / 2))) + + rows.append({ + "col_a": cols[i], + "col_b": cols[j], + "pearson_r": round(r_point, 4), + "ci_lower": round(lower, 4), + "ci_upper": round(upper, 4), + "ci_width": round(upper - lower, 4), + "significant": not (lower <= 0 <= upper), + }) + + return pd.DataFrame(rows) if rows else pd.DataFrame() + + # ── Correlation network data ────────────────────────── + + def correlation_network(self, threshold: float = 0.5) -> dict[str, Any]: + """Build correlation network data for visualization. + + Nodes are columns; edges exist where |r| >= threshold. + + Args: + threshold: Minimum absolute correlation for an edge. + + Returns: + Dictionary with ``nodes`` (list of names) and ``edges`` + (list of {source, target, weight} dicts). + """ + cols = self._schema.numeric_columns + if len(cols) < 2: + return {"nodes": [], "edges": []} + + cols = cols[:30] + corr = self._df[cols].dropna().corr() + + edges: list[dict[str, Any]] = [] + for i in range(len(cols)): + for j in range(i + 1, len(cols)): + r = float(corr.iloc[i, j]) + if abs(r) >= threshold: + edges.append({ + "source": cols[i], + "target": cols[j], + "weight": round(r, 4), + "abs_weight": round(abs(r), 4), + }) + + # Only include nodes that have at least one edge + connected = set() + for e in edges: + connected.add(e["source"]) + connected.add(e["target"]) + + return { + "nodes": sorted(connected), + "edges": edges, + "threshold": threshold, + "n_edges": len(edges), + } + + # ── Distance correlation ────────────────────────────── + + def distance_correlation_matrix(self) -> pd.DataFrame: + """Compute pairwise distance correlations (Székely et al., 2007). + + Distance correlation can detect non-linear dependencies + that Pearson correlation misses. + + Returns: + Square DataFrame of distance correlations. + """ + cols = self._schema.numeric_columns + if len(cols) < 2: + return pd.DataFrame() + + cols = cols[:15] # expensive O(n^2) per pair + df_clean = self._df[cols].dropna() + if len(df_clean) < 10: + return pd.DataFrame() + + # Sample for speed + if len(df_clean) > min(self._max_sample, 2000): + df_clean = df_clean.sample(min(self._max_sample, 2000), random_state=42) + + n = len(cols) + matrix = np.eye(n) + + for i in range(n): + for j in range(i + 1, n): + dc = self._dcor(df_clean[cols[i]].values, df_clean[cols[j]].values) + matrix[i, j] = dc + matrix[j, i] = dc + + return pd.DataFrame( + np.round(matrix, 4), + index=cols, + columns=cols, + ) + + @staticmethod + def _dcor(x: np.ndarray, y: np.ndarray) -> float: + """Compute distance correlation between two 1-D arrays.""" + n = len(x) + if n < 4: + return 0.0 + + a = np.abs(x[:, None] - x[None, :]) + b = np.abs(y[:, None] - y[None, :]) + + # Double centering + a_row = a.mean(axis=1, keepdims=True) + a_col = a.mean(axis=0, keepdims=True) + a_grand = a.mean() + A = a - a_row - a_col + a_grand + + b_row = b.mean(axis=1, keepdims=True) + b_col = b.mean(axis=0, keepdims=True) + b_grand = b.mean() + B = b - b_row - b_col + b_grand + + dcov2 = (A * B).mean() + dvar_x = (A * A).mean() + dvar_y = (B * B).mean() + + if dvar_x <= 0 or dvar_y <= 0: + return 0.0 + + return float(np.sqrt(max(dcov2, 0) / np.sqrt(dvar_x * dvar_y))) + + # ── Summary ─────────────────────────────────────────── + + def summary(self) -> dict[str, Any]: + """Return combined advanced correlation results.""" + result: dict[str, Any] = {} + + try: + pcm = self.partial_correlation_matrix() + if not pcm.empty: + result["partial_correlation"] = pcm + except Exception as exc: + logger.debug("Partial correlation skipped: %s", exc) + + try: + mi = self.mutual_information_matrix() + if not mi.empty: + result["mutual_information"] = mi + except Exception as exc: + logger.debug("MI matrix skipped: %s", exc) + + try: + bci = self.bootstrap_correlation_ci() + if not bci.empty: + result["bootstrap_ci"] = bci + except Exception as exc: + logger.debug("Bootstrap CI skipped: %s", exc) + + try: + net = self.correlation_network() + if net.get("edges"): + result["network"] = net + except Exception as exc: + logger.debug("Correlation network skipped: %s", exc) + + try: + dc = self.distance_correlation_matrix() + if not dc.empty: + result["distance_correlation"] = dc + except Exception as exc: + logger.debug("Distance correlation skipped: %s", exc) + + return result diff --git a/f2a/stats/advanced_dimreduction.py b/f2a/stats/advanced_dimreduction.py new file mode 100644 index 0000000..f4ccf5a --- /dev/null +++ b/f2a/stats/advanced_dimreduction.py @@ -0,0 +1,337 @@ +"""Advanced dimensionality reduction module. + +Provides t-SNE, UMAP (optional), and Factor Analysis for +non-linear dimensionality reduction and latent factor discovery. + +References: + - van der Maaten & Hinton (2008) — t-SNE + - McInnes et al. (2018) — UMAP + - Spearman (1904) — Factor Analysis +""" + +from __future__ import annotations + +from typing import Any + +import numpy as np +import pandas as pd + +from f2a.core.schema import DataSchema +from f2a.utils.logging import get_logger + +logger = get_logger(__name__) + + +class AdvancedDimReductionStats: + """Advanced dimensionality reduction analysis. + + Args: + df: Target DataFrame. + schema: Data schema. + tsne_perplexity: t-SNE perplexity parameter. + max_sample: Max rows to sample. + """ + + def __init__( + self, + df: pd.DataFrame, + schema: DataSchema, + tsne_perplexity: float = 30.0, + max_sample: int = 5000, + ) -> None: + self._df = df + self._schema = schema + self._tsne_perplexity = tsne_perplexity + self._max_sample = max_sample + + def _prepare_data(self) -> tuple[np.ndarray, pd.DataFrame, list[str]] | None: + """Scale and sample numeric data.""" + cols = self._schema.numeric_columns + if len(cols) < 3: + return None + + try: + from sklearn.preprocessing import StandardScaler + except ImportError: + return None + + df_clean = self._df[cols].dropna() + if len(df_clean) < 20: + return None + + if len(df_clean) > self._max_sample: + df_clean = df_clean.sample(self._max_sample, random_state=42) + + scaler = StandardScaler() + X = scaler.fit_transform(df_clean) + return X, df_clean, cols + + # ── t-SNE ───────────────────────────────────────────── + + def tsne_2d(self) -> dict[str, Any]: + """Compute t-SNE 2D embedding. + + t-SNE (t-distributed Stochastic Neighbor Embedding) is excellent + for visualizing high-dimensional data in 2D. + + Returns: + Dictionary with embedding coordinates, parameters. + """ + prepared = self._prepare_data() + if prepared is None: + return {} + + X, df_clean, cols = prepared + + try: + from sklearn.manifold import TSNE + except ImportError: + return {} + + perplexity = min(self._tsne_perplexity, max(5, len(X) / 4)) + + try: + tsne = TSNE( + n_components=2, + perplexity=perplexity, + random_state=42, + max_iter=1000, + learning_rate="auto", + init="pca", + ) + embedding = tsne.fit_transform(X) + + return { + "method": "t-SNE", + "embedding": pd.DataFrame( + embedding, + columns=["tsne_1", "tsne_2"], + ), + "perplexity": perplexity, + "kl_divergence": round(float(tsne.kl_divergence_), 4), + "n_samples": len(X), + "n_features": X.shape[1], + } + except Exception as exc: + logger.debug("t-SNE failed: %s", exc) + return {} + + # ── UMAP ────────────────────────────────────────────── + + def umap_2d(self) -> dict[str, Any]: + """Compute UMAP 2D embedding (if umap-learn is installed). + + UMAP (Uniform Manifold Approximation and Projection) preserves + both local and global structure better than t-SNE. + + Returns: + Dictionary with embedding coordinates, parameters. + """ + prepared = self._prepare_data() + if prepared is None: + return {} + + X, df_clean, cols = prepared + + try: + from umap import UMAP + except ImportError: + logger.info("umap-learn not installed; UMAP analysis skipped.") + return {} + + try: + n_neighbors = min(15, max(2, len(X) // 10)) + reducer = UMAP( + n_components=2, + n_neighbors=n_neighbors, + min_dist=0.1, + random_state=42, + ) + embedding = reducer.fit_transform(X) + + return { + "method": "UMAP", + "embedding": pd.DataFrame( + embedding, + columns=["umap_1", "umap_2"], + ), + "n_neighbors": n_neighbors, + "min_dist": 0.1, + "n_samples": len(X), + "n_features": X.shape[1], + } + except Exception as exc: + logger.debug("UMAP failed: %s", exc) + return {} + + # ── Factor Analysis ─────────────────────────────────── + + def factor_analysis(self, n_factors: int | None = None) -> dict[str, Any]: + """Perform Factor Analysis to discover latent factors. + + Factor Analysis models observed variables as linear combinations + of unobserved latent factors plus error terms. + + Args: + n_factors: Number of factors. Auto-detected if None. + + Returns: + Dictionary with loadings, variance explained, factor scores. + """ + prepared = self._prepare_data() + if prepared is None: + return {} + + X, df_clean, cols = prepared + + try: + from sklearn.decomposition import FactorAnalysis + except ImportError: + return {} + + # Auto-detect n_factors using eigenvalue > 1 rule (Kaiser criterion) + if n_factors is None: + cov = np.cov(X.T) + eigenvalues = np.linalg.eigvalsh(cov)[::-1] + n_factors = max(1, int((eigenvalues > 1).sum())) + n_factors = min(n_factors, len(cols) - 1, 10) + + if n_factors < 1: + return {} + + try: + fa = FactorAnalysis(n_components=n_factors, random_state=42) + fa.fit(X) + + loadings = pd.DataFrame( + fa.components_.T, + index=cols, + columns=[f"factor_{i + 1}" for i in range(n_factors)], + ).round(4) + + # Variance explained by each factor (approximate) + factor_var = np.sum(fa.components_ ** 2, axis=1) + total_var = np.sum(np.var(X, axis=0)) + var_explained = factor_var / total_var + + noise_variance = pd.DataFrame({ + "column": cols, + "noise_variance": np.round(fa.noise_variance_, 4), + "communality": np.round( + 1 - fa.noise_variance_ / np.maximum(np.var(X, axis=0), 1e-15), 4 + ), + }).set_index("column") + + return { + "method": "Factor Analysis", + "n_factors": n_factors, + "loadings": loadings, + "variance_explained": [round(float(v), 4) for v in var_explained], + "total_variance_explained": round(float(var_explained.sum()), 4), + "noise_variance": noise_variance, + "n_samples": len(X), + "n_features": len(cols), + } + except Exception as exc: + logger.debug("Factor Analysis failed: %s", exc) + return {} + + # ── Feature contributions ───────────────────────────── + + def feature_contribution(self) -> pd.DataFrame: + """Analyze feature contributions across dimensionality reduction. + + Computes how much each feature contributes to the variance + captured by PCA components. + + Returns: + DataFrame with feature importance across components. + """ + cols = self._schema.numeric_columns + if len(cols) < 3: + return pd.DataFrame() + + try: + from sklearn.decomposition import PCA + from sklearn.preprocessing import StandardScaler + except ImportError: + return pd.DataFrame() + + df_clean = self._df[cols].dropna() + if len(df_clean) < len(cols) + 1: + return pd.DataFrame() + + if len(df_clean) > self._max_sample: + df_clean = df_clean.sample(self._max_sample, random_state=42) + + scaler = StandardScaler() + X = scaler.fit_transform(df_clean) + + n_comp = min(5, len(cols), len(df_clean) - 1) + pca = PCA(n_components=n_comp) + pca.fit(X) + + # Weighted contribution: |loading| * variance_explained + contributions = np.zeros(len(cols)) + for i in range(n_comp): + contributions += np.abs(pca.components_[i]) * pca.explained_variance_ratio_[i] + + result = pd.DataFrame({ + "column": cols, + "contribution_score": np.round(contributions, 4), + "rank": np.argsort(-contributions) + 1, + }).sort_values("contribution_score", ascending=False).set_index("column") + + return result + + # ── Summary ─────────────────────────────────────────── + + def summary(self) -> dict[str, Any]: + """Return combined dimensionality reduction results.""" + result: dict[str, Any] = {} + + try: + tsne = self.tsne_2d() + if tsne: + # Store summary without large embedding + result["tsne"] = { + k: v for k, v in tsne.items() if k != "embedding" + } + if "embedding" in tsne: + result["tsne_embedding"] = tsne["embedding"] + except Exception as exc: + logger.debug("t-SNE skipped: %s", exc) + + try: + umap_res = self.umap_2d() + if umap_res: + result["umap"] = { + k: v for k, v in umap_res.items() if k != "embedding" + } + if "embedding" in umap_res: + result["umap_embedding"] = umap_res["embedding"] + except Exception as exc: + logger.debug("UMAP skipped: %s", exc) + + try: + fa = self.factor_analysis() + if fa: + result["factor_analysis"] = { + k: v for k, v in fa.items() + if k not in ("loadings", "noise_variance") + } + if "loadings" in fa: + result["factor_loadings"] = fa["loadings"] + if "noise_variance" in fa: + result["factor_noise"] = fa["noise_variance"] + except Exception as exc: + logger.debug("Factor Analysis skipped: %s", exc) + + try: + fc = self.feature_contribution() + if not fc.empty: + result["feature_contribution"] = fc + except Exception as exc: + logger.debug("Feature contribution skipped: %s", exc) + + return result diff --git a/f2a/stats/advanced_distribution.py b/f2a/stats/advanced_distribution.py new file mode 100644 index 0000000..fb989e2 --- /dev/null +++ b/f2a/stats/advanced_distribution.py @@ -0,0 +1,348 @@ +"""Advanced distribution analysis module. + +Provides best-fit distribution testing, power-transform recommendation, +Jarque-Bera normality test, ECDF computation, and KDE bandwidth analysis. + +References: + - Box & Cox (1964) — power transform + - Jarque & Bera (1987) — normality test + - Silverman (1986) — KDE bandwidth selection +""" + +from __future__ import annotations + +from typing import Any + +import numpy as np +import pandas as pd +from scipy import stats as sp_stats + +from f2a.core.schema import DataSchema +from f2a.utils.logging import get_logger + +logger = get_logger(__name__) + +# Candidate distributions for best-fit analysis +_CANDIDATE_DISTRIBUTIONS = [ + ("norm", sp_stats.norm), + ("lognorm", sp_stats.lognorm), + ("expon", sp_stats.expon), + ("gamma", sp_stats.gamma), + ("beta", sp_stats.beta), + ("weibull_min", sp_stats.weibull_min), + ("uniform", sp_stats.uniform), +] + + +class AdvancedDistributionStats: + """Advanced distribution analysis for numeric columns. + + Args: + df: Target DataFrame. + schema: Data schema. + n_fits: Number of candidate distributions to fit. + max_sample: Max rows to sample for expensive operations. + """ + + def __init__( + self, + df: pd.DataFrame, + schema: DataSchema, + n_fits: int = 7, + max_sample: int = 5000, + ) -> None: + self._df = df + self._schema = schema + self._n_fits = min(n_fits, len(_CANDIDATE_DISTRIBUTIONS)) + self._max_sample = max_sample + + # ── Best-fit distribution ───────────────────────────── + + def best_fit(self) -> pd.DataFrame: + """Fit candidate distributions and rank by AIC/BIC. + + For each numeric column, fits up to ``n_fits`` scipy distributions, + computes AIC and BIC, and returns the best match. + + Returns: + DataFrame with columns: column, best_dist, aic, bic, ks_stat, ks_p, + params (per-column best). + """ + cols = self._schema.numeric_columns + if not cols: + return pd.DataFrame() + + rows: list[dict] = [] + candidates = _CANDIDATE_DISTRIBUTIONS[: self._n_fits] + + for col in cols: + series = self._df[col].dropna() + if len(series) < 20: + continue + + sample = ( + series.sample(self._max_sample, random_state=42) + if len(series) > self._max_sample + else series + ) + data = sample.values + + best: dict[str, Any] | None = None + + for name, dist in candidates: + try: + params = dist.fit(data) + # Log-likelihood + ll = np.sum(dist.logpdf(data, *params)) + if not np.isfinite(ll): + continue + k = len(params) + n = len(data) + aic = 2 * k - 2 * ll + bic = k * np.log(n) - 2 * ll + + ks_stat, ks_p = sp_stats.kstest(data, name, args=params) + + entry = { + "dist_name": name, + "aic": float(aic), + "bic": float(bic), + "ks_stat": float(ks_stat), + "ks_p": float(ks_p), + "params": params, + } + if best is None or aic < best["aic"]: + best = entry + except Exception: + continue + + if best is not None: + rows.append({ + "column": col, + "best_distribution": best["dist_name"], + "aic": round(best["aic"], 2), + "bic": round(best["bic"], 2), + "ks_statistic": round(best["ks_stat"], 4), + "ks_p_value": round(best["ks_p"], 6), + "fit_quality": ( + "good" if best["ks_p"] > 0.05 + else "moderate" if best["ks_p"] > 0.01 + else "poor" + ), + }) + + return pd.DataFrame(rows).set_index("column") if rows else pd.DataFrame() + + # ── Jarque-Bera normality test ──────────────────────── + + def jarque_bera(self) -> pd.DataFrame: + """Perform Jarque-Bera test for normality on each numeric column. + + The JB test jointly tests whether skewness and kurtosis + match a normal distribution. H0: data is normally distributed. + + Returns: + DataFrame with jb_stat, p_value, is_normal columns. + """ + cols = self._schema.numeric_columns + if not cols: + return pd.DataFrame() + + rows: list[dict] = [] + for col in cols: + series = self._df[col].dropna() + if len(series) < 8: + continue + try: + jb_stat, p_val = sp_stats.jarque_bera(series) + rows.append({ + "column": col, + "jb_statistic": round(float(jb_stat), 4), + "p_value": round(float(p_val), 6), + "is_normal_0.05": float(p_val) > 0.05, + "skewness": round(float(series.skew()), 4), + "kurtosis": round(float(series.kurtosis()), 4), + }) + except Exception: + continue + + return pd.DataFrame(rows).set_index("column") if rows else pd.DataFrame() + + # ── Power transform recommendation ──────────────────── + + def power_transform_recommendation(self) -> pd.DataFrame: + """Recommend power transformations (Box-Cox / Yeo-Johnson). + + Box-Cox requires strictly positive data; Yeo-Johnson works for any data. + Reports the optimal lambda and post-transform skewness. + + Returns: + DataFrame with method, lambda, original_skew, transformed_skew. + """ + cols = self._schema.numeric_columns + if not cols: + return pd.DataFrame() + + rows: list[dict] = [] + for col in cols: + series = self._df[col].dropna() + if len(series) < 10: + continue + + original_skew = float(series.skew()) + data = series.values + + # Try Box-Cox (positive data only) + bc_lambda = None + bc_skew = None + if (data > 0).all(): + try: + transformed, lmbda = sp_stats.boxcox(data) + bc_lambda = round(float(lmbda), 4) + bc_skew = round(float(pd.Series(transformed).skew()), 4) + except Exception: + pass + + # Yeo-Johnson (any data) + yj_lambda = None + yj_skew = None + try: + transformed, lmbda = sp_stats.yeojohnson(data) + yj_lambda = round(float(lmbda), 4) + yj_skew = round(float(pd.Series(transformed).skew()), 4) + except Exception: + pass + + # Recommendation + if bc_skew is not None and abs(bc_skew) < (abs(yj_skew) if yj_skew is not None else float("inf")): + recommended = "box-cox" + rec_lambda = bc_lambda + rec_skew = bc_skew + elif yj_skew is not None: + recommended = "yeo-johnson" + rec_lambda = yj_lambda + rec_skew = yj_skew + else: + recommended = "none" + rec_lambda = None + rec_skew = None + + needs_transform = abs(original_skew) > 0.5 + + rows.append({ + "column": col, + "original_skewness": round(original_skew, 4), + "recommended_method": recommended, + "optimal_lambda": rec_lambda, + "transformed_skewness": rec_skew, + "needs_transform": needs_transform, + "improvement": ( + round(abs(original_skew) - abs(rec_skew), 4) + if rec_skew is not None + else None + ), + }) + + return pd.DataFrame(rows).set_index("column") if rows else pd.DataFrame() + + # ── ECDF data ───────────────────────────────────────── + + def ecdf(self) -> dict[str, pd.DataFrame]: + """Compute Empirical Cumulative Distribution Function for each column. + + Returns: + Dictionary mapping column name to DataFrame with x, ecdf columns. + """ + cols = self._schema.numeric_columns + result: dict[str, pd.DataFrame] = {} + for col in cols: + series = self._df[col].dropna().sort_values() + if len(series) < 2: + continue + n = len(series) + # Subsample for very large data + if n > self._max_sample: + indices = np.linspace(0, n - 1, self._max_sample, dtype=int) + series = series.iloc[indices] + n = len(series) + result[col] = pd.DataFrame({ + "x": series.values, + "ecdf": np.arange(1, n + 1) / n, + }) + return result + + # ── KDE bandwidth analysis ──────────────────────────── + + def kde_analysis(self) -> pd.DataFrame: + """Compute optimal KDE bandwidth using Silverman's rule of thumb. + + Returns: + DataFrame with column, silverman_bw, scotts_bw, n. + """ + cols = self._schema.numeric_columns + if not cols: + return pd.DataFrame() + + rows: list[dict] = [] + for col in cols: + series = self._df[col].dropna() + n = len(series) + if n < 5: + continue + + std = float(series.std()) + iqr = float(series.quantile(0.75) - series.quantile(0.25)) + + # Silverman's rule: h = 0.9 * min(std, IQR/1.34) * n^(-1/5) + spread = min(std, iqr / 1.34) if iqr > 0 else std + silverman = 0.9 * spread * (n ** (-0.2)) if spread > 0 else None + + # Scott's rule: h = 3.49 * std * n^(-1/3) + scotts = 3.49 * std * (n ** (-1 / 3)) if std > 0 else None + + rows.append({ + "column": col, + "n": n, + "std": round(std, 4), + "iqr": round(iqr, 4), + "silverman_bandwidth": round(silverman, 4) if silverman else None, + "scotts_bandwidth": round(scotts, 4) if scotts else None, + }) + + return pd.DataFrame(rows).set_index("column") if rows else pd.DataFrame() + + # ── Combined summary ────────────────────────────────── + + def summary(self) -> dict[str, Any]: + """Return a combined advanced distribution analysis summary.""" + result: dict[str, Any] = {} + + try: + bf = self.best_fit() + if not bf.empty: + result["best_fit"] = bf + except Exception as exc: + logger.debug("Best-fit analysis skipped: %s", exc) + + try: + jb = self.jarque_bera() + if not jb.empty: + result["jarque_bera"] = jb + except Exception as exc: + logger.debug("Jarque-Bera test skipped: %s", exc) + + try: + pt = self.power_transform_recommendation() + if not pt.empty: + result["power_transform"] = pt + except Exception as exc: + logger.debug("Power transform analysis skipped: %s", exc) + + try: + kde = self.kde_analysis() + if not kde.empty: + result["kde_bandwidth"] = kde + except Exception as exc: + logger.debug("KDE analysis skipped: %s", exc) + + return result diff --git a/f2a/stats/categorical.py b/f2a/stats/categorical.py new file mode 100644 index 0000000..014c86d --- /dev/null +++ b/f2a/stats/categorical.py @@ -0,0 +1,146 @@ +"""Categorical data analysis module. + +Computes entropy, chi-square independence tests, and frequency analytics +for categorical columns. +""" + +from __future__ import annotations + +import numpy as np +import pandas as pd +from scipy.stats import chi2_contingency + +from f2a.core.schema import DataSchema +from f2a.utils.logging import get_logger + +logger = get_logger(__name__) + + +class CategoricalStats: + """Analyse categorical columns in depth. + + Args: + df: Target DataFrame. + schema: Data schema. + """ + + def __init__(self, df: pd.DataFrame, schema: DataSchema) -> None: + self._df = df + self._schema = schema + + # ── Frequency ───────────────────────────────────────── + + def frequency_table(self, column: str, top_n: int = 20) -> pd.DataFrame: + """Return a frequency table for a single column. + + Args: + column: Column name. + top_n: Max categories to show. + """ + series = self._df[column] + vc = series.value_counts() + total = int(series.count()) + + df = pd.DataFrame({ + "value": vc.index[:top_n], + "count": vc.values[:top_n], + "percentage": (vc.values[:top_n] / total * 100).round(2) if total > 0 else 0, + }) + if len(vc) > top_n: + other_count = int(vc.values[top_n:].sum()) + other_row = pd.DataFrame([{ + "value": f"(other {len(vc) - top_n} categories)", + "count": other_count, + "percentage": round(other_count / total * 100, 2) if total > 0 else 0, + }]) + df = pd.concat([df, other_row], ignore_index=True) + return df + + # ── Entropy ─────────────────────────────────────────── + + def entropy_summary(self) -> pd.DataFrame: + """Compute Shannon entropy for each categorical column.""" + cols = self._schema.categorical_columns + if not cols: + return pd.DataFrame() + + rows: list[dict] = [] + for col in cols: + vc = self._df[col].value_counts(normalize=True) + entropy = float(-np.sum(vc * np.log2(vc + 1e-15))) + max_entropy = float(np.log2(len(vc))) if len(vc) > 1 else 0.0 + rows.append({ + "column": col, + "unique_values": int(self._df[col].nunique()), + "entropy": round(entropy, 4), + "max_entropy": round(max_entropy, 4), + "normalized_entropy": round(entropy / max_entropy, 4) if max_entropy > 0 else 0.0, + }) + + return pd.DataFrame(rows).set_index("column") if rows else pd.DataFrame() + + # ── Chi-square independence ─────────────────────────── + + def chi_square_matrix(self) -> pd.DataFrame: + """Chi-square independence test p-values between categorical pairs. + + Returns: + Square DataFrame of p-values. A low p-value (<0.05) signals + a statistically significant association between two columns. + """ + cols = self._schema.categorical_columns + if len(cols) < 2: + return pd.DataFrame() + + # Limit to prevent combinatorial explosion + cols = cols[:15] + n = len(cols) + matrix = pd.DataFrame(np.ones((n, n)), index=cols, columns=cols) + + for i in range(n): + for j in range(i + 1, n): + try: + ct = pd.crosstab(self._df[cols[i]], self._df[cols[j]]) + if ct.size > 0 and ct.sum().sum() > 0: + _, p, _, _ = chi2_contingency(ct) + matrix.iloc[i, j] = round(p, 6) + matrix.iloc[j, i] = round(p, 6) + except Exception: + matrix.iloc[i, j] = np.nan + matrix.iloc[j, i] = np.nan + + return matrix + + # ── Combined summary ────────────────────────────────── + + def summary(self) -> pd.DataFrame: + """Return a combined categorical analysis summary table.""" + cols = self._schema.categorical_columns + if not cols: + return pd.DataFrame() + + rows: list[dict] = [] + for col in cols: + series = self._df[col] + vc = series.value_counts() + top = vc.index[0] if len(vc) > 0 else None + top_freq = int(vc.iloc[0]) if len(vc) > 0 else 0 + count = int(series.count()) + + # Entropy + vc_norm = series.value_counts(normalize=True) + entropy = float(-np.sum(vc_norm * np.log2(vc_norm + 1e-15))) + max_entropy = float(np.log2(len(vc_norm))) if len(vc_norm) > 1 else 0.0 + + rows.append({ + "column": col, + "count": count, + "unique": int(series.nunique()), + "top_value": str(top)[:50] if top is not None else None, + "top_frequency": top_freq, + "top_%": round(top_freq / count * 100, 2) if count > 0 else 0.0, + "entropy": round(entropy, 4), + "norm_entropy": round(entropy / max_entropy, 4) if max_entropy > 0 else 0.0, + }) + + return pd.DataFrame(rows).set_index("column") if rows else pd.DataFrame() diff --git a/f2a/stats/clustering.py b/f2a/stats/clustering.py new file mode 100644 index 0000000..d97bf64 --- /dev/null +++ b/f2a/stats/clustering.py @@ -0,0 +1,354 @@ +"""Clustering analysis module. + +Provides K-Means (with elbow + silhouette), DBSCAN (auto-eps), +hierarchical clustering, and cluster profiling. + +References: + - MacQueen (1967) — K-Means + - Ester et al. (1996) — DBSCAN + - Rousseeuw (1987) — silhouette score +""" + +from __future__ import annotations + +from typing import Any + +import numpy as np +import pandas as pd + +from f2a.core.schema import DataSchema +from f2a.utils.logging import get_logger + +logger = get_logger(__name__) + + +class ClusteringStats: + """Clustering analysis for numeric columns. + + Args: + df: Target DataFrame. + schema: Data schema. + max_k: Maximum k for K-Means elbow search. + max_sample: Max rows to sample. + """ + + def __init__( + self, + df: pd.DataFrame, + schema: DataSchema, + max_k: int = 10, + max_sample: int = 5000, + ) -> None: + self._df = df + self._schema = schema + self._max_k = max_k + self._max_sample = max_sample + + def _prepare_data(self) -> tuple[np.ndarray, list[str]] | None: + """Scale and sample numeric data for clustering.""" + cols = self._schema.numeric_columns + if len(cols) < 2: + return None + + try: + from sklearn.preprocessing import StandardScaler + except ImportError: + logger.info("scikit-learn not available for clustering.") + return None + + df_clean = self._df[cols].dropna() + if len(df_clean) < 10: + return None + + if len(df_clean) > self._max_sample: + df_clean = df_clean.sample(self._max_sample, random_state=42) + + scaler = StandardScaler() + X = scaler.fit_transform(df_clean) + return X, cols + + # ── K-Means with elbow & silhouette ─────────────────── + + def kmeans_analysis(self) -> dict[str, Any]: + """Perform K-Means clustering with elbow and silhouette analysis. + + Returns: + Dictionary with: + - ``elbow_data``: DataFrame (k, inertia, silhouette) + - ``optimal_k``: best k by silhouette score + - ``labels``: cluster labels for optimal k + - ``cluster_sizes``: dict of cluster → count + """ + prepared = self._prepare_data() + if prepared is None: + return {} + + X, cols = prepared + + try: + from sklearn.cluster import KMeans + from sklearn.metrics import silhouette_score + except ImportError: + return {} + + max_k = min(self._max_k, len(X) - 1) + if max_k < 2: + return {} + + rows: list[dict] = [] + best_score = -1.0 + best_k = 2 + best_labels: np.ndarray | None = None + + for k in range(2, max_k + 1): + try: + km = KMeans(n_clusters=k, random_state=42, n_init=10, max_iter=300) + labels = km.fit_predict(X) + inertia = float(km.inertia_) + sil = float(silhouette_score(X, labels)) + + rows.append({ + "k": k, + "inertia": round(inertia, 2), + "silhouette_score": round(sil, 4), + }) + + if sil > best_score: + best_score = sil + best_k = k + best_labels = labels + except Exception: + continue + + if not rows: + return {} + + elbow_df = pd.DataFrame(rows).set_index("k") + + # Cluster sizes + sizes: dict[str, int] = {} + if best_labels is not None: + unique, counts = np.unique(best_labels, return_counts=True) + sizes = {f"cluster_{int(u)}": int(c) for u, c in zip(unique, counts)} + + return { + "elbow_data": elbow_df, + "optimal_k": best_k, + "best_silhouette": round(best_score, 4), + "cluster_sizes": sizes, + "n_samples": len(X), + } + + # ── DBSCAN with auto-eps ────────────────────────────── + + def dbscan_analysis(self) -> dict[str, Any]: + """Perform DBSCAN clustering with automated eps selection. + + Uses the k-distance graph method to estimate eps. + + Returns: + Dictionary with labels, n_clusters, n_noise, cluster_sizes. + """ + prepared = self._prepare_data() + if prepared is None: + return {} + + X, cols = prepared + + try: + from sklearn.cluster import DBSCAN + from sklearn.neighbors import NearestNeighbors + except ImportError: + return {} + + # Auto-eps via k-distance graph (k = min_samples) + min_samples = max(2, min(5, len(X) // 20)) + try: + nn = NearestNeighbors(n_neighbors=min_samples) + nn.fit(X) + distances, _ = nn.kneighbors(X) + k_distances = np.sort(distances[:, -1]) + + # Estimate eps at the "elbow" using maximum curvature + n = len(k_distances) + if n < 10: + return {} + + # Simple elbow: point of maximum second derivative + second_deriv = np.diff(k_distances, n=2) + elbow_idx = int(np.argmax(second_deriv)) + 1 + eps = float(k_distances[elbow_idx]) + eps = max(eps, 0.1) # minimum eps + + db = DBSCAN(eps=eps, min_samples=min_samples) + labels = db.fit_predict(X) + + n_clusters = len(set(labels) - {-1}) + n_noise = int((labels == -1).sum()) + + sizes: dict[str, int] = {} + unique, counts = np.unique(labels, return_counts=True) + for u, c in zip(unique, counts): + lbl = "noise" if u == -1 else f"cluster_{int(u)}" + sizes[lbl] = int(c) + + return { + "eps": round(eps, 4), + "min_samples": min_samples, + "n_clusters": n_clusters, + "n_noise": n_noise, + "noise_ratio": round(n_noise / len(X), 4), + "cluster_sizes": sizes, + "n_samples": len(X), + } + except Exception as exc: + logger.debug("DBSCAN failed: %s", exc) + return {} + + # ── Hierarchical clustering ─────────────────────────── + + def hierarchical_analysis(self) -> dict[str, Any]: + """Perform hierarchical (agglomerative) clustering. + + Returns: + Dictionary with n_clusters (auto), labels, linkage method, + and dendrogram data. + """ + prepared = self._prepare_data() + if prepared is None: + return {} + + X, cols = prepared + + try: + from sklearn.cluster import AgglomerativeClustering + from sklearn.metrics import silhouette_score + from scipy.cluster.hierarchy import linkage + except ImportError: + return {} + + # Try different n_clusters, pick best silhouette + best_k = 2 + best_score = -1.0 + + max_k = min(self._max_k, len(X) - 1) + for k in range(2, max_k + 1): + try: + agg = AgglomerativeClustering(n_clusters=k, linkage="ward") + labels = agg.fit_predict(X) + score = float(silhouette_score(X, labels)) + if score > best_score: + best_score = score + best_k = k + except Exception: + continue + + # Final fit with best k + try: + agg = AgglomerativeClustering(n_clusters=best_k, linkage="ward") + labels = agg.fit_predict(X) + + sizes: dict[str, int] = {} + unique, counts = np.unique(labels, return_counts=True) + for u, c in zip(unique, counts): + sizes[f"cluster_{int(u)}"] = int(c) + + # Linkage matrix for dendrogram + Z = linkage(X[:min(500, len(X))], method="ward") + + return { + "optimal_k": best_k, + "silhouette_score": round(best_score, 4), + "linkage_method": "ward", + "cluster_sizes": sizes, + "linkage_matrix": Z, + "n_samples": len(X), + } + except Exception as exc: + logger.debug("Hierarchical clustering failed: %s", exc) + return {} + + # ── Cluster profiling ───────────────────────────────── + + def cluster_profiles(self, kmeans_result: dict[str, Any] | None = None) -> pd.DataFrame: + """Profile clusters by computing per-cluster mean of each feature. + + Uses the optimal K-Means clustering result. + + Args: + kmeans_result: Pre-computed K-Means result (avoids re-running). + + Returns: + DataFrame with cluster labels as index, feature means as columns. + """ + if kmeans_result is None: + kmeans_result = self.kmeans_analysis() + if not kmeans_result: + return pd.DataFrame() + + prepared = self._prepare_data() + if prepared is None: + return pd.DataFrame() + + X, cols = prepared + optimal_k = kmeans_result["optimal_k"] + + try: + from sklearn.cluster import KMeans + except ImportError: + return pd.DataFrame() + + try: + km = KMeans(n_clusters=optimal_k, random_state=42, n_init=10) + labels = km.fit_predict(X) + + # Build profiles using original (unscaled) data + df_clean = self._df[cols].dropna() + if len(df_clean) > self._max_sample: + df_clean = df_clean.sample(self._max_sample, random_state=42) + + df_clean = df_clean.copy() + df_clean["cluster"] = labels[: len(df_clean)] + + profiles = df_clean.groupby("cluster").mean().round(4) + profiles.index = [f"cluster_{i}" for i in profiles.index] + + return profiles + except Exception: + return pd.DataFrame() + + # ── Summary ─────────────────────────────────────────── + + def summary(self) -> dict[str, Any]: + """Return combined clustering analysis results.""" + result: dict[str, Any] = {} + + try: + km = self.kmeans_analysis() + if km: + result["kmeans"] = km + except Exception as exc: + logger.debug("K-Means analysis skipped: %s", exc) + + try: + db = self.dbscan_analysis() + if db: + result["dbscan"] = db + except Exception as exc: + logger.debug("DBSCAN analysis skipped: %s", exc) + + try: + hc = self.hierarchical_analysis() + if hc: + result["hierarchical"] = hc + except Exception as exc: + logger.debug("Hierarchical analysis skipped: %s", exc) + + try: + cp = self.cluster_profiles(kmeans_result=km if "kmeans" in result else None) + if not cp.empty: + result["profiles"] = cp + except Exception as exc: + logger.debug("Cluster profiling skipped: %s", exc) + + return result diff --git a/f2a/stats/column_role.py b/f2a/stats/column_role.py new file mode 100644 index 0000000..f48aaf9 --- /dev/null +++ b/f2a/stats/column_role.py @@ -0,0 +1,283 @@ +"""Column role classification — auto-detect the semantic role of each column. + +Infers whether a column acts as an ID, timestamp, numeric feature, categorical +feature, ordinal feature, binary variable, text field, constant, or potential +target variable. Each assignment comes with a confidence score and evidence +so downstream consumers (ML readiness, insight engine) can make informed +decisions. +""" + +from __future__ import annotations + +import re +from dataclasses import dataclass, field +from typing import Any + +import numpy as np +import pandas as pd + +from f2a.core.schema import DataSchema +from f2a.utils.logging import get_logger + +logger = get_logger(__name__) + +# ===================================================================== +# Data classes +# ===================================================================== + +@dataclass +class ColumnRole: + """Role assignment for a single column.""" + + column: str + primary_role: str # id | timestamp | numeric_feature | categorical_feature | ordinal_feature | binary | text | constant | target_candidate + confidence: float # 0-1 + secondary_role: str | None = None + properties: dict[str, Any] = field(default_factory=dict) + + def to_dict(self) -> dict[str, Any]: + return { + "column": self.column, + "primary_role": self.primary_role, + "confidence": self.confidence, + "secondary_role": self.secondary_role, + "properties": self.properties, + } + + +# Regex patterns for name-based heuristics +_ID_PATTERNS = re.compile( + r"(^id$|_id$|^pk$|^key$|^index$|^uid$|^uuid$|^guid$|^row_?num|^seq)", + re.IGNORECASE, +) +_TIME_PATTERNS = re.compile( + r"(date|time|_at$|_ts$|timestamp|created|updated|modified|year|month|day)", + re.IGNORECASE, +) +_TARGET_PATTERNS = re.compile( + r"(^target$|^label$|^y$|^class$|^outcome$|^response$|^result$|^is_|^has_)", + re.IGNORECASE, +) +_ORDINAL_PATTERNS = re.compile( + r"(level|grade|rating|rank|score|priority|stage|phase|tier|degree)", + re.IGNORECASE, +) + + +class ColumnRoleClassifier: + """Automatically assign a semantic role to every column in the dataset. + + Parameters + ---------- + df : pd.DataFrame + The analysis DataFrame. + schema : DataSchema + Column type metadata. + """ + + def __init__(self, df: pd.DataFrame, schema: DataSchema) -> None: + self._df = df + self._schema = schema + + def classify(self) -> list[ColumnRole]: + """Return a role assignment for each column.""" + roles: list[ColumnRole] = [] + for col_info in self._schema.columns: + role = self._classify_single(col_info) + roles.append(role) + return roles + + def summary(self) -> pd.DataFrame: + """Summary table: column × role × confidence.""" + roles = self.classify() + rows = [r.to_dict() for r in roles] + df = pd.DataFrame(rows) + if not df.empty: + df = df.set_index("column") + return df + + # ------------------------------------------------------------------ + + def _classify_single(self, col_info: Any) -> ColumnRole: + col_name = col_info.name + dtype = str(col_info.dtype) + inferred = col_info.inferred_type # "numeric", "categorical", "text", "datetime", "boolean" + n_unique = col_info.n_unique + n_missing = col_info.n_missing + n_total = self._schema.n_rows + + unique_ratio = n_unique / max(n_total, 1) + + # 1. Constant + if n_unique <= 1: + return ColumnRole( + column=col_name, + primary_role="constant", + confidence=1.0, + properties={"n_unique": n_unique}, + ) + + # 2. Binary + if n_unique == 2: + conf = 0.9 + secondary = None + if _TARGET_PATTERNS.search(col_name): + secondary = "target_candidate" + conf = 0.85 + return ColumnRole( + column=col_name, + primary_role="binary", + confidence=conf, + secondary_role=secondary, + properties={"n_unique": 2, "values": self._top_values(col_name, 2)}, + ) + + # 3. Datetime / timestamp + if inferred == "datetime": + return ColumnRole( + column=col_name, + primary_role="timestamp", + confidence=0.95, + properties={"dtype": dtype}, + ) + if _TIME_PATTERNS.search(col_name) and inferred == "numeric": + # Possibly an epoch timestamp + series = pd.to_numeric(self._df[col_name], errors="coerce").dropna() + if not series.empty and self._is_monotonic(series): + return ColumnRole( + column=col_name, + primary_role="timestamp", + confidence=0.70, + properties={"dtype": dtype, "hint": "monotonic numeric with time-like name"}, + ) + + # 4. ID-like + if self._is_id_like(col_name, unique_ratio, n_unique, inferred): + conf = 0.6 + if _ID_PATTERNS.search(col_name): + conf = 0.9 + elif unique_ratio > 0.99: + conf = 0.85 + return ColumnRole( + column=col_name, + primary_role="id", + confidence=conf, + properties={"unique_ratio": round(unique_ratio, 4)}, + ) + + # 5. Text + if inferred == "text": + return ColumnRole( + column=col_name, + primary_role="text", + confidence=0.9, + properties={"avg_length": self._avg_str_length(col_name)}, + ) + + # 6. Ordinal feature + if self._is_ordinal(col_name, inferred, n_unique, n_total): + conf = 0.7 + if _ORDINAL_PATTERNS.search(col_name): + conf = 0.85 + return ColumnRole( + column=col_name, + primary_role="ordinal_feature", + confidence=conf, + properties={"n_unique": n_unique}, + ) + + # 7. Target candidate (categorical with specific naming) + if _TARGET_PATTERNS.search(col_name) and n_unique <= 20: + return ColumnRole( + column=col_name, + primary_role="target_candidate", + confidence=0.7, + properties={"n_unique": n_unique, "inferred_type": inferred}, + ) + + # 8. Categorical feature + if inferred == "categorical" or inferred == "boolean": + return ColumnRole( + column=col_name, + primary_role="categorical_feature", + confidence=0.85, + properties={"n_unique": n_unique, "unique_ratio": round(unique_ratio, 4)}, + ) + + # 9. Numeric feature (default for numeric) + if inferred == "numeric": + secondary = None + if _TARGET_PATTERNS.search(col_name): + secondary = "target_candidate" + return ColumnRole( + column=col_name, + primary_role="numeric_feature", + confidence=0.85, + secondary_role=secondary, + properties={"dtype": dtype}, + ) + + # Fallback + return ColumnRole( + column=col_name, + primary_role="numeric_feature" if inferred == "numeric" else "categorical_feature", + confidence=0.5, + properties={"inferred_type": inferred}, + ) + + # ------------------------------------------------------------------ + # Heuristics + # ------------------------------------------------------------------ + + def _is_id_like(self, col_name: str, unique_ratio: float, n_unique: int, inferred: str) -> bool: + if _ID_PATTERNS.search(col_name): + return unique_ratio > 0.8 + if unique_ratio > 0.95 and n_unique > 20: + if inferred in ("text", "categorical"): + return True + if inferred == "numeric": + series = pd.to_numeric(self._df[col_name], errors="coerce").dropna() + if not series.empty and self._is_monotonic(series): + return True + return False + + @staticmethod + def _is_monotonic(series: pd.Series) -> bool: + """Check if a numeric series is (roughly) monotonic.""" + if len(series) < 5: + return False + diffs = series.diff().dropna() + if diffs.empty: + return False + pos = (diffs >= 0).sum() + neg = (diffs <= 0).sum() + ratio = max(pos, neg) / len(diffs) + return ratio > 0.95 + + def _is_ordinal(self, col_name: str, inferred: str, n_unique: int, n_total: int) -> bool: + """Heuristic: integer column with small distinct count and ordinal-like name.""" + if n_unique > 20 or n_unique < 3: + return False + if _ORDINAL_PATTERNS.search(col_name): + return True + if inferred == "numeric" and col_name in self._df.columns: + series = pd.to_numeric(self._df[col_name], errors="coerce").dropna() + if not series.empty and (series == series.astype(int)).all(): + vals = sorted(series.unique()) + if len(vals) <= 15: + # Check if values are roughly consecutive + span = vals[-1] - vals[0] + if span > 0 and len(vals) / (span + 1) > 0.5: + return True + return False + + def _top_values(self, col_name: str, n: int = 5) -> list: + if col_name not in self._df.columns: + return [] + return self._df[col_name].dropna().value_counts().head(n).index.tolist() + + def _avg_str_length(self, col_name: str) -> float: + if col_name not in self._df.columns: + return 0.0 + s = self._df[col_name].dropna().astype(str) + return round(float(s.str.len().mean()), 1) if not s.empty else 0.0 diff --git a/f2a/stats/correlation.py b/f2a/stats/correlation.py new file mode 100644 index 0000000..b66b379 --- /dev/null +++ b/f2a/stats/correlation.py @@ -0,0 +1,157 @@ +"""Correlation analysis module.""" + +from __future__ import annotations + +import numpy as np +import pandas as pd + +from f2a.core.schema import DataSchema +from f2a.utils.logging import get_logger + +logger = get_logger(__name__) + + +class CorrelationStats: + """Analyze correlations between columns. + + Args: + df: Target DataFrame to analyze. + schema: Data schema. + """ + + def __init__(self, df: pd.DataFrame, schema: DataSchema) -> None: + self._df = df + self._schema = schema + + def pearson(self) -> pd.DataFrame: + """Return the Pearson correlation matrix.""" + cols = self._schema.numeric_columns + if len(cols) < 2: + return pd.DataFrame() + return self._df[cols].corr(method="pearson") + + def spearman(self) -> pd.DataFrame: + """Return the Spearman rank correlation matrix.""" + cols = self._schema.numeric_columns + if len(cols) < 2: + return pd.DataFrame() + return self._df[cols].corr(method="spearman") + + def kendall(self) -> pd.DataFrame: + """Return the Kendall tau correlation matrix.""" + cols = self._schema.numeric_columns + if len(cols) < 2: + return pd.DataFrame() + # Kendall is expensive — limit columns + cols = cols[:15] + return self._df[cols].corr(method="kendall") + + def cramers_v_matrix(self) -> pd.DataFrame: + """Return the Cramer's V matrix for categorical columns.""" + cols = self._schema.categorical_columns + if len(cols) < 2: + return pd.DataFrame() + + cols = cols[:15] + n = len(cols) + matrix = pd.DataFrame(np.ones((n, n)), index=cols, columns=cols) + + for i in range(n): + for j in range(i + 1, n): + v = self._cramers_v(self._df[cols[i]], self._df[cols[j]]) + matrix.iloc[i, j] = v + matrix.iloc[j, i] = v + + return matrix + + def vif(self) -> pd.DataFrame: + """Compute Variance Inflation Factor for numeric columns. + + VIF > 5 suggests moderate multicollinearity; + VIF > 10 suggests severe multicollinearity. + + Uses the inverse-correlation-matrix diagonal method. + """ + cols = self._schema.numeric_columns + if len(cols) < 2: + return pd.DataFrame() + + df_clean = self._df[cols].dropna() + if len(df_clean) < len(cols) + 1: + return pd.DataFrame() + + corr = df_clean.corr() + try: + corr_inv = np.linalg.inv(corr.values) + vif_values = np.diag(corr_inv) + except np.linalg.LinAlgError: + logger.warning("Singular correlation matrix; VIF cannot be computed.") + return pd.DataFrame() + + rows: list[dict] = [] + for col, vif_val in zip(cols, vif_values): + severity = ( + "severe" if vif_val > 10 + else "moderate" if vif_val > 5 + else "low" + ) + rows.append({ + "column": col, + "VIF": round(float(vif_val), 2), + "multicollinearity": severity, + }) + + return ( + pd.DataFrame(rows) + .set_index("column") + .sort_values("VIF", ascending=False) + ) + + def high_correlations(self, threshold: float = 0.9) -> list[tuple[str, str, float]]: + """Return pairs with high correlation. + + Args: + threshold: Absolute correlation coefficient threshold. + + Returns: + List of ``(col_a, col_b, correlation)`` tuples. + """ + corr = self.pearson() + if corr.empty: + return [] + + pairs: list[tuple[str, str, float]] = [] + cols = corr.columns + for i in range(len(cols)): + for j in range(i + 1, len(cols)): + val = corr.iloc[i, j] + if abs(val) >= threshold: + pairs.append((cols[i], cols[j], round(float(val), 4))) + + if pairs: + logger.warning( + "Multicollinearity warning: %d column pairs have |r| >= %.2f.", + len(pairs), + threshold, + ) + + return pairs + + # ── Internal helpers ──────────────────────────────── + + @staticmethod + def _cramers_v(x: pd.Series, y: pd.Series) -> float: + """Compute Cramer's V between two categorical variables.""" + confusion = pd.crosstab(x, y) + n = confusion.sum().sum() + if n == 0: + return 0.0 + + from scipy.stats import chi2_contingency + + chi2, _, _, _ = chi2_contingency(confusion) + min_dim = min(confusion.shape) - 1 + if min_dim == 0: + return 0.0 + + return float(np.sqrt(chi2 / (n * min_dim))) diff --git a/f2a/stats/cross_analysis.py b/f2a/stats/cross_analysis.py new file mode 100644 index 0000000..95497ae --- /dev/null +++ b/f2a/stats/cross_analysis.py @@ -0,0 +1,497 @@ +"""Cross-dimensional analysis — discovers patterns across analysis boundaries. + +Instead of treating each analysis (correlation, cluster, outlier, missing, …) +in isolation, this module crosses two or more dimensions to reveal composite +patterns that single-axis analyses miss: + +* **Outlier × Cluster**: Are anomalies concentrated in specific clusters? +* **Missing × Correlation**: Is missingness systematic (MAR) or random (MCAR)? +* **Distribution × Outlier**: Which outlier method is appropriate given tail shape? +* **Cluster × Correlation (Simpson's Paradox)**: Does aggregation mask reversed relationships? +* **Feature Importance × Missing**: Are critical features losing information? +* **Dim-Reduction × Cluster × Anomaly**: Unified 2-D embedding overlay. +""" + +from __future__ import annotations + +from typing import Any + +import numpy as np +import pandas as pd +from scipy import stats as sp_stats + +from f2a.core.schema import DataSchema +from f2a.utils.logging import get_logger + +logger = get_logger(__name__) + + +class CrossAnalysis: + """Run all cross-dimensional analyses given pre-computed stats. + + Parameters + ---------- + df : pd.DataFrame + The (cleaned) analysis DataFrame. + schema : DataSchema + Inferred schema. + stats : StatsResult + Previously computed statistical results (basic + advanced). + max_cols : int + Maximum numeric columns to consider in expensive pairwise ops. + """ + + def __init__( + self, + df: pd.DataFrame, + schema: DataSchema, + stats: Any, + *, + max_cols: int = 20, + ) -> None: + self._df = df + self._schema = schema + self._stats = stats + self._max_cols = max_cols + + # ------------------------------------------------------------------ + # Public + # ------------------------------------------------------------------ + + def summary(self) -> dict[str, Any]: + """Execute all cross-analyses and return a combined dict.""" + result: dict[str, Any] = {} + + try: + r = self.outlier_by_cluster() + if r is not None: + result["outlier_by_cluster"] = r + except Exception as exc: + logger.debug("outlier_by_cluster failed: %s", exc) + + try: + r = self.missing_correlation() + if r is not None: + result["missing_correlation"] = r + except Exception as exc: + logger.debug("missing_correlation failed: %s", exc) + + try: + r = self.distribution_outlier_fitness() + if r is not None: + result["distribution_outlier_fitness"] = r + except Exception as exc: + logger.debug("distribution_outlier_fitness failed: %s", exc) + + try: + r = self.simpson_paradox() + if r is not None: + result["simpson_paradox"] = r + except Exception as exc: + logger.debug("simpson_paradox failed: %s", exc) + + try: + r = self.importance_vs_missing() + if r is not None: + result["importance_vs_missing"] = r + except Exception as exc: + logger.debug("importance_vs_missing failed: %s", exc) + + try: + r = self.unified_2d_embedding() + if r is not None: + result["unified_embedding"] = r + except Exception as exc: + logger.debug("unified_2d_embedding failed: %s", exc) + + return result + + # ------------------------------------------------------------------ + # X1. Outlier × Cluster + # ------------------------------------------------------------------ + + def outlier_by_cluster(self) -> dict[str, Any] | None: + """Per-cluster anomaly rates from consensus anomaly + K-Means labels.""" + adv = self._stats.advanced_stats + clustering = adv.get("clustering", {}) + anomaly_full = adv.get("advanced_anomaly_full", {}) + + km = clustering.get("kmeans") + iso = anomaly_full.get("isolation_forest") + if not km or not iso: + return None + + labels_cluster = km.get("labels") + labels_anomaly = iso.get("labels") + if labels_cluster is None or labels_anomaly is None: + return None + + # Align lengths (both should be n_samples after sampling) + n = min(len(labels_cluster), len(labels_anomaly)) + if n == 0: + return None + + c_labels = np.asarray(labels_cluster[:n]) + a_labels = np.asarray(labels_anomaly[:n]) + + anomaly_mask = a_labels == -1 + unique_clusters = np.unique(c_labels) + + rows = [] + for cl in unique_clusters: + cl_mask = c_labels == cl + cl_size = int(cl_mask.sum()) + cl_anomalies = int((cl_mask & anomaly_mask).sum()) + rows.append({ + "cluster": f"cluster_{cl}" if cl >= 0 else "noise", + "size": cl_size, + "anomaly_count": cl_anomalies, + "anomaly_rate": round(cl_anomalies / max(cl_size, 1), 4), + }) + + df_result = pd.DataFrame(rows) + + # Chi-square test for uniform anomaly distribution + expected_rate = anomaly_mask.sum() / max(n, 1) + chi2_p = None + if len(unique_clusters) >= 2 and anomaly_mask.sum() > 0: + observed = df_result["anomaly_count"].values + expected = df_result["size"].values * expected_rate + expected = np.where(expected < 1, 1, expected) + try: + chi2, p = sp_stats.chisquare(observed, f_exp=expected) + chi2_p = float(p) + except Exception: + pass + + return { + "table": df_result, + "overall_anomaly_rate": float(expected_rate), + "chi2_uniform_p": chi2_p, + "is_uniform": chi2_p is not None and chi2_p > 0.05, + } + + # ------------------------------------------------------------------ + # X2. Missing × Correlation (MAR detection) + # ------------------------------------------------------------------ + + def missing_correlation(self) -> dict[str, Any] | None: + """Correlate missing-indicators with numeric columns to diagnose MAR.""" + mi = self._stats.missing_info + if mi.empty or "missing_ratio" not in mi.columns: + return None + + # Columns with any missing + miss_cols = mi[mi["missing_ratio"] > 0].index.tolist() + if not miss_cols: + return None + + num_cols = self._schema.numeric_columns[:self._max_cols] + if not num_cols: + return None + + # Build indicator matrix + indicators = pd.DataFrame(index=self._df.index) + for col in miss_cols: + if col in self._df.columns: + indicators[f"{col}_missing"] = self._df[col].isna().astype(int) + + if indicators.empty: + return None + + # Correlate indicators with numeric columns + num_data = self._df[num_cols].apply(pd.to_numeric, errors="coerce") + + corr_matrix = pd.DataFrame( + np.nan, index=indicators.columns, columns=num_cols, + ) + mar_suspects: list[dict[str, Any]] = [] + + for ind_col in indicators.columns: + ind_series = indicators[ind_col] + if ind_series.sum() < 5 or ind_series.sum() == len(ind_series): + continue # too few or all missing + for num_col in num_cols: + valid = num_data[num_col].notna() & ind_series.notna() + if valid.sum() < 10: + continue + try: + r, p = sp_stats.pointbiserialr( + ind_series[valid].values, + num_data[num_col][valid].values, + ) + corr_matrix.loc[ind_col, num_col] = r + if abs(r) > 0.2 and p < 0.05: + mar_suspects.append({ + "missing_column": ind_col.replace("_missing", ""), + "correlated_with": num_col, + "correlation": round(float(r), 4), + "p_value": round(float(p), 6), + }) + except Exception: + continue + + # Diagnose MCAR vs MAR + diagnosis = "MCAR_likely" + if mar_suspects: + max_abs_r = max(abs(s["correlation"]) for s in mar_suspects) + if max_abs_r > 0.4: + diagnosis = "MAR_strong" + elif max_abs_r > 0.2: + diagnosis = "MAR_moderate" + + # Imputation strategy recommendation + strategies: dict[str, str] = {} + for col in miss_cols: + ratio = float(mi.loc[col, "missing_ratio"]) if col in mi.index else 0 + is_numeric = col in self._schema.numeric_columns + has_mar = any(s["missing_column"] == col for s in mar_suspects) + + if ratio > 0.5: + strategies[col] = "drop_column" + elif has_mar: + strategies[col] = "knn_or_mice" if is_numeric else "model_based" + elif is_numeric: + strategies[col] = "median" + else: + strategies[col] = "mode" + + return { + "indicator_correlation": corr_matrix.dropna(how="all", axis=0).dropna(how="all", axis=1), + "mar_suspects": pd.DataFrame(mar_suspects) if mar_suspects else pd.DataFrame(), + "diagnosis": diagnosis, + "imputation_strategy": strategies, + } + + # ------------------------------------------------------------------ + # X3. Distribution × Outlier Method Fitness + # ------------------------------------------------------------------ + + def distribution_outlier_fitness(self) -> pd.DataFrame | None: + """Recommend the best outlier detection method per column based on distribution shape.""" + dist = self._stats.distribution_info + summary = self._stats.summary + if dist.empty or summary.empty: + return None + + rows = [] + for col in dist.index: + if col not in summary.index: + continue + + skew = dist.loc[col].get("skewness", 0) or 0 + kurt = dist.loc[col].get("kurtosis", 0) or 0 + is_normal = dist.loc[col].get("is_normal_0.05", False) + + abs_skew = abs(skew) + reasons = [] + + if is_normal and abs_skew < 1 and abs(kurt) < 3: + method = "zscore" + reasons.append("approximately normal distribution") + elif abs_skew > 2 or kurt > 7: + method = "isolation_forest" + reasons.append("heavy-tailed or highly skewed distribution") + if abs_skew > 2: + reasons.append(f"skewness={skew:.2f}") + if kurt > 7: + reasons.append(f"kurtosis={kurt:.1f}") + elif abs_skew > 1 or kurt > 3: + method = "iqr" + reasons.append("moderately skewed/heavy-tailed") + else: + method = "iqr" + reasons.append("moderate distribution shape") + + rows.append({ + "column": col, + "skewness": round(float(skew), 3), + "kurtosis": round(float(kurt), 3), + "is_normal": bool(is_normal), + "recommended_method": method, + "reason": "; ".join(reasons), + }) + + if not rows: + return None + return pd.DataFrame(rows).set_index("column") + + # ------------------------------------------------------------------ + # X4. Cluster × Correlation — Simpson's Paradox Detection + # ------------------------------------------------------------------ + + def simpson_paradox(self) -> dict[str, Any] | None: + """Detect Simpson's paradox: overall correlation direction reverses within clusters.""" + adv = self._stats.advanced_stats + clustering = adv.get("clustering", {}) + km = clustering.get("kmeans") + if not km: + return None + + cluster_labels = km.get("labels") + if cluster_labels is None: + return None + + pearson = self._stats.correlation_matrix + if pearson.empty: + return None + + num_cols = [c for c in pearson.columns if c in self._df.columns][:self._max_cols] + if len(num_cols) < 2: + return None + + n = min(len(cluster_labels), len(self._df)) + labels = np.asarray(cluster_labels[:n]) + df_sub = self._df.iloc[:n] + + unique_clusters = np.unique(labels) + if len(unique_clusters) < 2: + return None + + paradoxes: list[dict[str, Any]] = [] + + for i, c1 in enumerate(num_cols): + for c2 in num_cols[i + 1:]: + overall_r = pearson.loc[c1, c2] if c1 in pearson.index and c2 in pearson.columns else 0 + if abs(overall_r) < 0.1: + continue # skip negligible correlations + + cluster_corrs = {} + n_reversed = 0 + for cl in unique_clusters: + mask = labels == cl + if mask.sum() < 10: + continue + try: + x = pd.to_numeric(df_sub.loc[mask.nonzero()[0], c1], errors="coerce").dropna() + y = pd.to_numeric(df_sub.loc[mask.nonzero()[0], c2], errors="coerce").dropna() + common_idx = x.index.intersection(y.index) + if len(common_idx) < 10: + continue + r, _ = sp_stats.pearsonr(x.loc[common_idx], y.loc[common_idx]) + cluster_corrs[f"cluster_{cl}"] = round(float(r), 4) + if np.sign(r) != np.sign(overall_r) and abs(r) > 0.1: + n_reversed += 1 + except Exception: + continue + + if n_reversed > 0 and len(cluster_corrs) >= 2: + paradoxes.append({ + "col_a": c1, + "col_b": c2, + "overall_corr": round(float(overall_r), 4), + "cluster_corrs": cluster_corrs, + "n_reversed_clusters": n_reversed, + "is_paradox": True, + "paradox_strength": round( + n_reversed / max(len(cluster_corrs), 1), 3 + ), + }) + + if not paradoxes: + return None + + paradoxes.sort(key=lambda x: x["paradox_strength"], reverse=True) + return { + "paradoxes": pd.DataFrame(paradoxes), + "n_paradoxes": len(paradoxes), + } + + # ------------------------------------------------------------------ + # X5. Feature Importance × Missing Rate + # ------------------------------------------------------------------ + + def importance_vs_missing(self) -> pd.DataFrame | None: + """Cross-tabulate feature importance with missing rate.""" + fi = self._stats.feature_importance + mi = self._stats.missing_info + if fi.empty or mi.empty: + return None + + if "missing_ratio" not in mi.columns: + return None + + # Detect the importance column name + imp_col = None + for candidate in ["variance", "cv", "mean_abs_corr", "mutual_info"]: + if candidate in fi.columns: + imp_col = candidate + break + if imp_col is None and len(fi.columns) > 0: + imp_col = fi.columns[0] + if imp_col is None: + return None + + common_cols = list(set(fi.index) & set(mi.index)) + if not common_cols: + return None + + rows = [] + for col in common_cols: + importance = float(fi.loc[col, imp_col]) if col in fi.index else 0 + missing_ratio = float(mi.loc[col, "missing_ratio"]) if col in mi.index else 0 + risk = "none" + if missing_ratio > 0.3 and importance > fi[imp_col].median(): + risk = "high" + elif missing_ratio > 0.1 and importance > fi[imp_col].median(): + risk = "medium" + elif missing_ratio > 0.05: + risk = "low" + rows.append({ + "column": col, + "importance": round(importance, 4), + "missing_ratio": round(missing_ratio, 4), + "information_loss_risk": risk, + }) + + df_result = pd.DataFrame(rows).set_index("column") + df_result = df_result.sort_values("importance", ascending=False) + return df_result + + # ------------------------------------------------------------------ + # X6. Unified 2D Embedding (t-SNE/UMAP + Cluster + Anomaly overlay) + # ------------------------------------------------------------------ + + def unified_2d_embedding(self) -> dict[str, Any] | None: + """Prepare a unified 2D scatter dataset with cluster + anomaly labels.""" + adv = self._stats.advanced_stats + dr = adv.get("dimreduction", {}) + clustering = adv.get("clustering", {}) + anomaly_full = adv.get("advanced_anomaly_full", {}) + + # Get 2D coordinates (prefer t-SNE then UMAP) + coords = None + method = None + for key in ["tsne", "umap"]: + emb = dr.get(key) + if emb is not None and isinstance(emb, dict): + c = emb.get("coordinates") + if c is not None and hasattr(c, "shape") and len(c) > 0: + coords = np.asarray(c) + method = key + break + + if coords is None or coords.shape[1] < 2: + return None + + n = coords.shape[0] + result: dict[str, Any] = { + "x": coords[:, 0].tolist(), + "y": coords[:, 1].tolist(), + "method": method, + "n_points": n, + } + + # Add cluster labels + km = clustering.get("kmeans") + if km and km.get("labels") is not None: + cl = np.asarray(km["labels"]) + result["cluster_labels"] = cl[:n].tolist() if len(cl) >= n else cl.tolist() + + # Add anomaly labels + iso = anomaly_full.get("isolation_forest") + if iso and iso.get("labels") is not None: + al = np.asarray(iso["labels"]) + result["anomaly_labels"] = al[:n].tolist() if len(al) >= n else al.tolist() + + return result diff --git a/f2a/stats/descriptive.py b/f2a/stats/descriptive.py new file mode 100644 index 0000000..53cc5f0 --- /dev/null +++ b/f2a/stats/descriptive.py @@ -0,0 +1,125 @@ +"""Descriptive statistics analysis module.""" + +from __future__ import annotations + +import numpy as np +import pandas as pd + +from f2a.core.schema import DataSchema +from f2a.utils.type_inference import ColumnType + + +class DescriptiveStats: + """Compute descriptive statistics. + + Args: + df: Target DataFrame to analyze. + schema: Data schema. + """ + + def __init__(self, df: pd.DataFrame, schema: DataSchema) -> None: + self._df = df + self._schema = schema + + def summary(self) -> pd.DataFrame: + """Return overall summary statistics. + + Generates a unified summary table covering both numeric and categorical columns. + + Returns: + Summary statistics DataFrame. + """ + rows: list[dict] = [] + for col_info in self._schema.columns: + series = self._df[col_info.name] + row: dict = { + "column": col_info.name, + "type": col_info.inferred_type.value, + "count": int(series.count()), + "missing": col_info.n_missing, + "missing_%": round(col_info.missing_ratio * 100, 2), + "unique": col_info.n_unique, + } + + if col_info.inferred_type == ColumnType.NUMERIC: + row.update(self._numeric_stats(series)) + elif col_info.inferred_type in (ColumnType.CATEGORICAL, ColumnType.BOOLEAN): + row.update(self._categorical_stats(series)) + + rows.append(row) + + return pd.DataFrame(rows).set_index("column") + + def numeric_summary(self) -> pd.DataFrame: + """Return summary statistics for numeric columns only.""" + cols = self._schema.numeric_columns + if not cols: + return pd.DataFrame() + return self._df[cols].describe().T + + def categorical_summary(self) -> pd.DataFrame: + """Return summary statistics for categorical columns only.""" + cols = self._schema.categorical_columns + if not cols: + return pd.DataFrame() + + rows: list[dict] = [] + for col in cols: + series = self._df[col] + top_val = series.mode().iloc[0] if not series.mode().empty else None + rows.append( + { + "column": col, + "count": int(series.count()), + "unique": int(series.nunique()), + "top": top_val, + "freq": int(series.value_counts().iloc[0]) if top_val is not None else 0, + } + ) + return pd.DataFrame(rows).set_index("column") + + # ── Internal helpers ──────────────────────────────── + + @staticmethod + def _numeric_stats(series: pd.Series) -> dict: + """Return numeric column statistics as a dictionary.""" + desc = series.describe() + q1 = float(desc.get("25%", np.nan)) + q3 = float(desc.get("75%", np.nan)) + mean = float(series.mean()) + std = float(series.std()) + count = int(series.count()) + skew_val = float(series.skew()) if count >= 3 else np.nan + kurt_val = float(series.kurtosis()) if count >= 4 else np.nan + se = std / np.sqrt(count) if count > 0 else np.nan + cv = abs(std / mean) if mean != 0 else np.nan + mad = float((series - series.median()).abs().median()) + + return { + "mean": round(mean, 4), + "median": round(float(series.median()), 4), + "std": round(std, 4), + "se": round(float(se), 4), + "cv": round(float(cv), 4) if not np.isnan(cv) else None, + "mad": round(mad, 4), + "min": float(series.min()), + "max": float(series.max()), + "range": round(float(series.max() - series.min()), 4), + "p5": round(float(series.quantile(0.05)), 4), + "q1": round(q1, 4), + "q3": round(q3, 4), + "p95": round(float(series.quantile(0.95)), 4), + "iqr": round(q3 - q1, 4), + "skewness": round(skew_val, 4) if not np.isnan(skew_val) else None, + "kurtosis": round(kurt_val, 4) if not np.isnan(kurt_val) else None, + } + + @staticmethod + def _categorical_stats(series: pd.Series) -> dict: + """Return categorical column statistics as a dictionary.""" + vc = series.value_counts() + top_val = vc.index[0] if len(vc) > 0 else None + return { + "top": top_val, + "freq": int(vc.iloc[0]) if len(vc) > 0 else 0, + } diff --git a/f2a/stats/distribution.py b/f2a/stats/distribution.py new file mode 100644 index 0000000..e8ce2f4 --- /dev/null +++ b/f2a/stats/distribution.py @@ -0,0 +1,151 @@ +"""Distribution analysis module.""" + +from __future__ import annotations + +import numpy as np +import pandas as pd +from scipy import stats as sp_stats + +from f2a.core.schema import DataSchema + + +class DistributionStats: + """Analyze distribution characteristics of numeric columns. + + Args: + df: Target DataFrame to analyze. + schema: Data schema. + """ + + def __init__(self, df: pd.DataFrame, schema: DataSchema) -> None: + self._df = df + self._schema = schema + + def analyze(self) -> pd.DataFrame: + """Return distribution information for numeric columns. + + Returns: + DataFrame containing skewness, kurtosis, and normality test results. + """ + cols = self._schema.numeric_columns + if not cols: + return pd.DataFrame() + + rows: list[dict] = [] + for col in cols: + series = self._df[col].dropna() + if len(series) < 3: + continue + rows.append(self._analyze_column(col, series)) + + return pd.DataFrame(rows).set_index("column") if rows else pd.DataFrame() + + def quantile_table(self, quantiles: list[float] | None = None) -> pd.DataFrame: + """Return quantile table for numeric columns. + + Args: + quantiles: List of quantiles to compute. Defaults to + ``[0.05, 0.10, 0.25, 0.50, 0.75, 0.90, 0.95]``. + + Returns: + Quantile DataFrame. + """ + if quantiles is None: + quantiles = [0.05, 0.10, 0.25, 0.50, 0.75, 0.90, 0.95] + + cols = self._schema.numeric_columns + if not cols: + return pd.DataFrame() + + return self._df[cols].quantile(quantiles) + + @staticmethod + def _analyze_column(col: str, series: pd.Series) -> dict: + """Analyze the distribution of a single numeric column.""" + skew = float(series.skew()) + kurt = float(series.kurtosis()) + + n = len(series) + + # ── Normality tests ────────────────────────────── + shapiro_p: float | None = None + dagostino_p: float | None = None + ks_p: float | None = None + anderson_stat: float | None = None + anderson_critical: float | None = None + + # Shapiro-Wilk (best for n <= 5000) + if 3 <= n <= 5000: + try: + _, shapiro_p = sp_stats.shapiro(series) + except Exception: + pass + + # D'Agostino-Pearson (good for n > 20) + if n > 20: + try: + _, dagostino_p = sp_stats.normaltest(series) + except Exception: + pass + + # Kolmogorov-Smirnov + if n >= 5: + try: + mean, std = series.mean(), series.std() + if std > 0: + _, ks_p = sp_stats.kstest(series, "norm", args=(mean, std)) + except Exception: + pass + + # Anderson-Darling + if n >= 8: + try: + ad = sp_stats.anderson(series, "norm") + anderson_stat = float(ad.statistic) + # Use the 5% significance level critical value + anderson_critical = float(ad.critical_values[2]) # index 2 = 5% + except Exception: + pass + + # Primary normality verdict (prefer Shapiro for small, D'Agostino for large) + primary_p: float | None = None + primary_test: str = "n/a" + if shapiro_p is not None: + primary_p = shapiro_p + primary_test = "shapiro" + elif dagostino_p is not None: + primary_p = dagostino_p + primary_test = "dagostino" + + # Skewness interpretation + if abs(skew) < 0.5: + skew_label = "symmetric" + elif abs(skew) < 1.0: + skew_label = "moderate skew" + else: + skew_label = "high skew" + + # Kurtosis interpretation (excess kurtosis: 0 = normal) + if abs(kurt) < 0.5: + kurt_label = "mesokurtic" + elif kurt > 0: + kurt_label = "leptokurtic" + else: + kurt_label = "platykurtic" + + return { + "column": col, + "n": n, + "skewness": round(skew, 4), + "skew_type": skew_label, + "kurtosis": round(kurt, 4), + "kurt_type": kurt_label, + "normality_test": primary_test, + "normality_p": round(primary_p, 6) if primary_p is not None else None, + "is_normal_0.05": primary_p > 0.05 if primary_p is not None else None, + "shapiro_p": round(shapiro_p, 6) if shapiro_p is not None else None, + "dagostino_p": round(dagostino_p, 6) if dagostino_p is not None else None, + "ks_p": round(ks_p, 6) if ks_p is not None else None, + "anderson_stat": round(anderson_stat, 4) if anderson_stat is not None else None, + "anderson_5pct_cv": round(anderson_critical, 4) if anderson_critical is not None else None, + } diff --git a/f2a/stats/duplicates.py b/f2a/stats/duplicates.py new file mode 100644 index 0000000..10c86ff --- /dev/null +++ b/f2a/stats/duplicates.py @@ -0,0 +1,61 @@ +"""Duplicate detection module.""" + +from __future__ import annotations + +from typing import Any + +import pandas as pd + +from f2a.core.schema import DataSchema + + +class DuplicateStats: + """Detect and analyse duplicate rows and column uniqueness. + + Args: + df: Target DataFrame. + schema: Data schema. + """ + + def __init__(self, df: pd.DataFrame, schema: DataSchema) -> None: + self._df = df + self._schema = schema + + def exact_duplicates(self) -> dict[str, Any]: + """Count exact duplicate rows. + + Returns: + Dictionary with ``total_rows``, ``duplicate_rows``, + ``unique_rows``, ``duplicate_ratio``. + """ + n = len(self._df) + n_dup = int(self._df.duplicated().sum()) + return { + "total_rows": n, + "duplicate_rows": n_dup, + "unique_rows": n - n_dup, + "duplicate_ratio": round(n_dup / n, 4) if n > 0 else 0.0, + } + + def column_uniqueness(self) -> pd.DataFrame: + """Return per-column uniqueness statistics. + + Returns: + DataFrame indexed by column name with uniqueness metrics. + """ + rows: list[dict] = [] + for col in self._df.columns: + n_unique = int(self._df[col].nunique()) + n_total = int(self._df[col].count()) + rows.append({ + "column": col, + "unique_values": n_unique, + "total_non_null": n_total, + "uniqueness_ratio": round(n_unique / n_total, 4) if n_total > 0 else 0.0, + "is_unique_key": n_unique == n_total > 0, + }) + return pd.DataFrame(rows).set_index("column") if rows else pd.DataFrame() + + def summary(self) -> dict[str, Any]: + """Return concise duplicate summary.""" + return self.exact_duplicates() diff --git a/f2a/stats/feature_importance.py b/f2a/stats/feature_importance.py new file mode 100644 index 0000000..d61c852 --- /dev/null +++ b/f2a/stats/feature_importance.py @@ -0,0 +1,140 @@ +"""Feature importance analysis module. + +Ranks features by variance, correlation, and mutual information. +""" + +from __future__ import annotations + +import numpy as np +import pandas as pd + +from f2a.core.schema import DataSchema +from f2a.utils.logging import get_logger + +logger = get_logger(__name__) + + +class FeatureImportanceStats: + """Compute feature importance rankings for numeric columns. + + Args: + df: Target DataFrame. + schema: Data schema. + """ + + def __init__(self, df: pd.DataFrame, schema: DataSchema) -> None: + self._df = df + self._schema = schema + + # ── Variance-based ranking ──────────────────────────── + + def variance_ranking(self) -> pd.DataFrame: + """Rank numeric features by normalised variance (coefficient of variation). + + Returns: + DataFrame sorted by variance (descending). + """ + cols = self._schema.numeric_columns + if not cols: + return pd.DataFrame() + + rows: list[dict] = [] + for col in cols: + series = self._df[col].dropna() + if len(series) < 2: + continue + mean = float(series.mean()) + std = float(series.std()) + cv = abs(std / mean) if mean != 0 else None + rows.append({ + "column": col, + "variance": round(float(series.var()), 4), + "std": round(std, 4), + "cv": round(cv, 4) if cv is not None else None, + "range": round(float(series.max() - series.min()), 4), + }) + + if not rows: + return pd.DataFrame() + return pd.DataFrame(rows).sort_values("variance", ascending=False).set_index("column") + + # ── Correlation-with-all ranking ────────────────────── + + def mean_abs_correlation(self) -> pd.DataFrame: + """Rank features by mean absolute correlation with all other features. + + Columns with higher mean |r| are more *connected* to the rest of the + dataset and may be more informative (or redundant). + """ + cols = self._schema.numeric_columns + if len(cols) < 2: + return pd.DataFrame() + + corr = self._df[cols].corr(method="pearson").abs() + # Exclude self-correlation + np.fill_diagonal(corr.values, 0) + mean_corr = corr.mean() + + df = pd.DataFrame({ + "column": mean_corr.index, + "mean_abs_corr": mean_corr.values.round(4), + }).sort_values("mean_abs_corr", ascending=False).set_index("column") + + return df + + # ── Mutual information ──────────────────────────────── + + def mutual_information(self) -> pd.DataFrame: + """Compute average mutual-information score per numeric feature. + + Requires ``scikit-learn``. Returns an empty DataFrame if unavailable. + """ + cols = self._schema.numeric_columns + if len(cols) < 2: + return pd.DataFrame() + + try: + from sklearn.feature_selection import mutual_info_regression + except ImportError: + logger.info("scikit-learn not installed; skipping mutual-information analysis.") + return pd.DataFrame() + + cols = cols[:15] # limit to avoid expensive computation + df_clean = self._df[cols].dropna() + if len(df_clean) < 30: + return pd.DataFrame() + + # For each column, compute MI against all others, then average + mi_scores: dict[str, float] = {col: 0.0 for col in cols} + n_pairs = 0 + for col in cols: + X = df_clean.drop(columns=[col]) + y = df_clean[col] + try: + mi = mutual_info_regression(X, y, random_state=42, n_neighbors=5) + for other_col, mi_val in zip(X.columns, mi): + mi_scores[other_col] += float(mi_val) + mi_scores[col] += float(mi_val) + n_pairs += len(X.columns) + except Exception: + continue + + if n_pairs == 0: + return pd.DataFrame() + + # Average + for col in mi_scores: + mi_scores[col] /= max(1, len(cols) - 1) + + df_result = pd.DataFrame({ + "column": list(mi_scores.keys()), + "avg_mutual_info": [round(v, 4) for v in mi_scores.values()], + }).sort_values("avg_mutual_info", ascending=False).set_index("column") + + return df_result + + # ── Combined summary ────────────────────────────────── + + def summary(self) -> pd.DataFrame: + """Return a combined feature-importance summary (variance-based).""" + return self.variance_ranking() diff --git a/f2a/stats/feature_insights.py b/f2a/stats/feature_insights.py new file mode 100644 index 0000000..4c4bd14 --- /dev/null +++ b/f2a/stats/feature_insights.py @@ -0,0 +1,378 @@ +"""Feature engineering insights module. + +Provides interaction detection, monotonic relationship analysis, +optimal binning, cardinality analysis, and data leakage detection. + +References: + - Friedman & Popescu (2008) — interaction detection + - Fayyad & Irani (1993) — entropy-based binning +""" + +from __future__ import annotations + +from typing import Any + +import numpy as np +import pandas as pd + +from f2a.core.schema import DataSchema +from f2a.utils.logging import get_logger + +logger = get_logger(__name__) + + +class FeatureInsightsStats: + """Feature engineering recommendations and insights. + + Args: + df: Target DataFrame. + schema: Data schema. + max_sample: Max rows to sample. + """ + + def __init__( + self, + df: pd.DataFrame, + schema: DataSchema, + max_sample: int = 5000, + ) -> None: + self._df = df + self._schema = schema + self._max_sample = max_sample + + # ── Interaction detection ───────────────────────────── + + def interaction_detection(self) -> pd.DataFrame: + """Detect potential feature interactions. + + For each pair of numeric features, computes the correlation of + their product with each feature individually. High product- + correlation suggests a meaningful interaction term. + + Returns: + DataFrame with col_a, col_b, interaction_strength, and + correlation of the product with each original feature. + """ + cols = self._schema.numeric_columns + if len(cols) < 2: + return pd.DataFrame() + + cols = cols[:15] + df_clean = self._df[cols].dropna() + if len(df_clean) < 30: + return pd.DataFrame() + + if len(df_clean) > self._max_sample: + df_clean = df_clean.sample(self._max_sample, random_state=42) + + rows: list[dict] = [] + for i in range(len(cols)): + for j in range(i + 1, len(cols)): + a = df_clean[cols[i]] + b = df_clean[cols[j]] + + # Product interaction + product = a * b + if product.std() == 0 or a.std() == 0 or b.std() == 0: + continue + + # How much does the product correlate beyond individual features? + r_prod_a = float(product.corr(a)) + r_prod_b = float(product.corr(b)) + r_ab = float(a.corr(b)) + + # Interaction strength: residual correlation after removing linear + interaction_strength = max(abs(r_prod_a), abs(r_prod_b)) - abs(r_ab) + + if abs(interaction_strength) > 0.1: + rows.append({ + "col_a": cols[i], + "col_b": cols[j], + "interaction_strength": round(interaction_strength, 4), + "corr_product_a": round(r_prod_a, 4), + "corr_product_b": round(r_prod_b, 4), + "corr_a_b": round(r_ab, 4), + "recommendation": ( + "Strong interaction" + if interaction_strength > 0.3 + else "Moderate interaction" + ), + }) + + if not rows: + return pd.DataFrame() + + return pd.DataFrame(rows).sort_values( + "interaction_strength", ascending=False + ).reset_index(drop=True) + + # ── Monotonic relationship detection ────────────────── + + def monotonic_detection(self) -> pd.DataFrame: + """Detect monotonic relationships using Spearman correlation. + + A high |Spearman| but low |Pearson| suggests a non-linear + monotonic relationship. + + Returns: + DataFrame with col_a, col_b, pearson, spearman, monotonic_gap. + """ + cols = self._schema.numeric_columns + if len(cols) < 2: + return pd.DataFrame() + + cols = cols[:20] + df_clean = self._df[cols].dropna() + if len(df_clean) < 20: + return pd.DataFrame() + + pearson = df_clean.corr(method="pearson") + spearman = df_clean.corr(method="spearman") + + rows: list[dict] = [] + for i in range(len(cols)): + for j in range(i + 1, len(cols)): + r_p = float(pearson.iloc[i, j]) + r_s = float(spearman.iloc[i, j]) + gap = abs(r_s) - abs(r_p) + + if gap > 0.05 and abs(r_s) > 0.3: + rows.append({ + "col_a": cols[i], + "col_b": cols[j], + "pearson": round(r_p, 4), + "spearman": round(r_s, 4), + "monotonic_gap": round(gap, 4), + "relationship": ( + "Strong non-linear monotonic" + if gap > 0.15 + else "Moderate non-linear monotonic" + ), + }) + + if not rows: + return pd.DataFrame() + + return pd.DataFrame(rows).sort_values( + "monotonic_gap", ascending=False + ).reset_index(drop=True) + + # ── Binning analysis ────────────────────────────────── + + def binning_analysis(self, n_bins: int = 10) -> pd.DataFrame: + """Analyze optimal binning for numeric columns. + + Computes equal-width and equal-frequency binning, then evaluates + the entropy of each binning to recommend the best strategy. + + Args: + n_bins: Number of bins. + + Returns: + DataFrame with binning statistics per column. + """ + cols = self._schema.numeric_columns + if not cols: + return pd.DataFrame() + + rows: list[dict] = [] + for col in cols: + series = self._df[col].dropna() + if len(series) < n_bins: + continue + + # Equal-width binning + try: + ew_bins = pd.cut(series, bins=n_bins) + ew_counts = ew_bins.value_counts(normalize=True) + ew_entropy = float(-np.sum( + ew_counts * np.log2(ew_counts + 1e-15) + )) + except Exception: + ew_entropy = None + + # Equal-frequency binning + try: + ef_bins = pd.qcut(series, q=n_bins, duplicates="drop") + ef_counts = ef_bins.value_counts(normalize=True) + ef_entropy = float(-np.sum( + ef_counts * np.log2(ef_counts + 1e-15) + )) + except Exception: + ef_entropy = None + + max_entropy = float(np.log2(n_bins)) + + recommendation = "equal_frequency" # default + if ew_entropy is not None and ef_entropy is not None: + if ew_entropy > ef_entropy * 0.95: + recommendation = "equal_width" + + rows.append({ + "column": col, + "n_bins": n_bins, + "equal_width_entropy": round(ew_entropy, 4) if ew_entropy else None, + "equal_freq_entropy": round(ef_entropy, 4) if ef_entropy else None, + "max_entropy": round(max_entropy, 4), + "recommended_method": recommendation, + "skewness": round(float(series.skew()), 4), + }) + + return pd.DataFrame(rows).set_index("column") if rows else pd.DataFrame() + + # ── Cardinality analysis ────────────────────────────── + + def cardinality_analysis(self) -> pd.DataFrame: + """Analyze cardinality of all columns for encoding recommendations. + + Returns: + DataFrame with cardinality stats and encoding recommendations. + """ + rows: list[dict] = [] + for col in self._df.columns: + series = self._df[col] + n_unique = int(series.nunique()) + n_total = int(series.count()) + ratio = n_unique / n_total if n_total > 0 else 0.0 + + # Determine type recommendation + if ratio > 0.95: + encoding = "id_column (drop or hash)" + elif n_unique <= 2: + encoding = "binary encoding" + elif n_unique <= 10: + encoding = "one-hot encoding" + elif n_unique <= 50: + encoding = "label encoding or target encoding" + elif n_unique <= 500: + encoding = "target encoding or frequency encoding" + else: + encoding = "hash encoding or embeddings" + + rows.append({ + "column": col, + "n_unique": n_unique, + "n_total": n_total, + "cardinality_ratio": round(ratio, 4), + "cardinality_level": ( + "binary" if n_unique <= 2 + else "low" if n_unique <= 10 + else "medium" if n_unique <= 50 + else "high" if n_unique <= 500 + else "very_high" + ), + "recommended_encoding": encoding, + }) + + return pd.DataFrame(rows).set_index("column") if rows else pd.DataFrame() + + # ── Leakage detection ───────────────────────────────── + + def leakage_detection(self) -> pd.DataFrame: + """Detect potential data leakage indicators. + + Flags columns with: + - Perfect or near-perfect correlation with other columns + - Suspiciously high unique ratio (possible target leak) + - Constant or near-constant values + + Returns: + DataFrame with leakage risk assessment per column. + """ + cols = self._schema.numeric_columns + all_cols = list(self._df.columns) + + rows: list[dict] = [] + + for col in all_cols: + series = self._df[col] + n_total = int(series.count()) + n_unique = int(series.nunique()) + ratio = n_unique / n_total if n_total > 0 else 0 + + risks: list[str] = [] + + # Near-constant + if n_unique <= 1: + risks.append("constant_column") + elif n_unique == 2 and n_total > 100: + top_freq = series.value_counts().iloc[0] / n_total + if top_freq > 0.99: + risks.append("near_constant") + + # ID-like + if ratio > 0.95 and n_total > 100: + risks.append("id_like") + + # Perfect correlation with another column + if col in cols: + for other in cols: + if other == col: + continue + try: + r = abs(float(self._df[col].corr(self._df[other]))) + if r > 0.99: + risks.append(f"perfect_corr_with_{other}") + break + except Exception: + continue + + risk_level = ( + "high" if len(risks) >= 2 + else "medium" if len(risks) == 1 + else "low" + ) + + if risks: + rows.append({ + "column": col, + "risk_level": risk_level, + "risks": "; ".join(risks), + "unique_ratio": round(ratio, 4), + "n_unique": n_unique, + }) + + return pd.DataFrame(rows).set_index("column") if rows else pd.DataFrame() + + # ── Summary ─────────────────────────────────────────── + + def summary(self) -> dict[str, Any]: + """Return combined feature insight results.""" + result: dict[str, Any] = {} + + try: + inter = self.interaction_detection() + if not inter.empty: + result["interactions"] = inter + except Exception as exc: + logger.debug("Interaction detection skipped: %s", exc) + + try: + mono = self.monotonic_detection() + if not mono.empty: + result["monotonic"] = mono + except Exception as exc: + logger.debug("Monotonic detection skipped: %s", exc) + + try: + bins = self.binning_analysis() + if not bins.empty: + result["binning"] = bins + except Exception as exc: + logger.debug("Binning analysis skipped: %s", exc) + + try: + card = self.cardinality_analysis() + if not card.empty: + result["cardinality"] = card + except Exception as exc: + logger.debug("Cardinality analysis skipped: %s", exc) + + try: + leak = self.leakage_detection() + if not leak.empty: + result["leakage"] = leak + except Exception as exc: + logger.debug("Leakage detection skipped: %s", exc) + + return result diff --git a/f2a/stats/insight_engine.py b/f2a/stats/insight_engine.py new file mode 100644 index 0000000..8762f07 --- /dev/null +++ b/f2a/stats/insight_engine.py @@ -0,0 +1,1094 @@ +"""Automatic Insight Engine — generates prioritised natural-language insights. + +The engine scans *all* previously computed statistics (basic + advanced) and +applies a comprehensive set of interpretive rules to surface: + +* ``FINDING`` — notable data patterns or facts +* ``WARNING`` — data quality or integrity concerns +* ``RECOMMENDATION`` — actionable preprocessing / modelling suggestions +* ``OPPORTUNITY`` — exploitable patterns or segmentation opportunities + +Every insight carries a severity (critical / high / medium / low), a +priority score for ranking, related column names, and concrete action items. +""" + +from __future__ import annotations + +import math +from dataclasses import dataclass, field +from enum import Enum +from typing import Any + +import numpy as np +import pandas as pd + +from f2a.core.schema import DataSchema +from f2a.utils.logging import get_logger + +logger = get_logger(__name__) + +# ===================================================================== +# Data classes +# ===================================================================== + +class InsightType(str, Enum): + FINDING = "finding" + WARNING = "warning" + RECOMMENDATION = "recommendation" + OPPORTUNITY = "opportunity" + + +class Severity(str, Enum): + CRITICAL = "critical" + HIGH = "high" + MEDIUM = "medium" + LOW = "low" + + +# Numeric weight for priority scoring +_SEV_WEIGHT = {Severity.CRITICAL: 1.0, Severity.HIGH: 0.75, Severity.MEDIUM: 0.5, Severity.LOW: 0.25} + + +@dataclass +class Insight: + """A single auto-generated insight.""" + + type: InsightType + severity: Severity + category: str # distribution | correlation | cluster | anomaly | missing | quality | feature | general + title: str + description: str + affected_columns: list[str] = field(default_factory=list) + evidence: dict[str, Any] = field(default_factory=dict) + action_items: list[str] = field(default_factory=list) + priority_score: float = 0.0 + + # Computed after instantiation + def __post_init__(self) -> None: + if self.priority_score == 0.0: + col_factor = min(len(self.affected_columns) / 5.0, 1.0) if self.affected_columns else 0.3 + actionable = 1.0 if self.action_items else 0.6 + self.priority_score = round( + _SEV_WEIGHT.get(self.severity, 0.5) * 0.5 + + col_factor * 0.3 + + actionable * 0.2, + 4, + ) + + def to_dict(self) -> dict[str, Any]: + return { + "type": self.type.value, + "severity": self.severity.value, + "category": self.category, + "title": self.title, + "description": self.description, + "affected_columns": self.affected_columns, + "evidence": {k: _safe_serialize(v) for k, v in self.evidence.items()}, + "action_items": self.action_items, + "priority_score": self.priority_score, + } + + +def _safe_serialize(v: Any) -> Any: + """Convert numpy / pandas types to JSON-safe Python primitives.""" + if isinstance(v, np.integer): + return int(v) + if isinstance(v, np.floating): + return float(v) + if isinstance(v, np.bool_): + return bool(v) + if isinstance(v, np.ndarray): + return v.tolist() + if isinstance(v, pd.DataFrame): + return v.to_dict() + if isinstance(v, pd.Series): + return v.to_dict() + return v + + +# ===================================================================== +# Insight Engine +# ===================================================================== + +class InsightEngine: + """Generate, rank, and present actionable insights from ``StatsResult``. + + Usage:: + + engine = InsightEngine(stats_result, data_schema) + insights = engine.generate() # list[Insight] + executive = engine.executive_summary() # str + """ + + def __init__(self, stats: Any, schema: DataSchema) -> None: + self._stats = stats + self._schema = schema + self._insights: list[Insight] = [] + + # ------------------------------------------------------------------ + # Public API + # ------------------------------------------------------------------ + + def generate(self) -> list[Insight]: + """Run all rule sets and return insights sorted by priority (desc).""" + self._insights.clear() + + try: + self._distribution_insights() + except Exception as exc: + logger.debug("Distribution insight rules failed: %s", exc) + + try: + self._correlation_insights() + except Exception as exc: + logger.debug("Correlation insight rules failed: %s", exc) + + try: + self._missing_insights() + except Exception as exc: + logger.debug("Missing insight rules failed: %s", exc) + + try: + self._outlier_insights() + except Exception as exc: + logger.debug("Outlier insight rules failed: %s", exc) + + try: + self._quality_insights() + except Exception as exc: + logger.debug("Quality insight rules failed: %s", exc) + + try: + self._clustering_insights() + except Exception as exc: + logger.debug("Clustering insight rules failed: %s", exc) + + try: + self._anomaly_insights() + except Exception as exc: + logger.debug("Anomaly insight rules failed: %s", exc) + + try: + self._feature_insights() + except Exception as exc: + logger.debug("Feature insight rules failed: %s", exc) + + try: + self._pca_insights() + except Exception as exc: + logger.debug("PCA insight rules failed: %s", exc) + + try: + self._duplicate_insights() + except Exception as exc: + logger.debug("Duplicate insight rules failed: %s", exc) + + try: + self._advanced_distribution_insights() + except Exception as exc: + logger.debug("Adv distribution insight rules failed: %s", exc) + + try: + self._advanced_correlation_insights() + except Exception as exc: + logger.debug("Adv correlation insight rules failed: %s", exc) + + try: + self._general_insights() + except Exception as exc: + logger.debug("General insight rules failed: %s", exc) + + self._insights.sort(key=lambda i: i.priority_score, reverse=True) + return self._insights + + def executive_summary(self) -> str: + """One-paragraph natural-language summary of the dataset.""" + if not self._insights: + self.generate() + + n = self._schema.n_rows + d = self._schema.n_cols + num = len(self._schema.numeric_columns) + cat = len(self._schema.categorical_columns) + + crit = sum(1 for i in self._insights if i.severity == Severity.CRITICAL) + high = sum(1 for i in self._insights if i.severity == Severity.HIGH) + med = sum(1 for i in self._insights if i.severity == Severity.MEDIUM) + + parts = [ + f"Dataset contains {n:,} rows and {d} columns ({num} numeric, {cat} categorical).", + ] + if crit: + parts.append(f"{crit} critical issue(s) require immediate attention.") + if high: + parts.append(f"{high} high-priority finding(s) detected.") + if med: + parts.append(f"{med} moderate observations noted.") + + # Top 3 headlines + top3 = self._insights[:3] + if top3: + parts.append("Key highlights:") + for idx, ins in enumerate(top3, 1): + parts.append(f" {idx}. {ins.title}") + + return " ".join(parts) + + def summary_dict(self) -> dict[str, Any]: + """Serialize all insights for storage / HTML rendering.""" + if not self._insights: + self.generate() + return { + "executive_summary": self.executive_summary(), + "total_count": len(self._insights), + "by_severity": { + s.value: sum(1 for i in self._insights if i.severity == s) + for s in Severity + }, + "by_type": { + t.value: sum(1 for i in self._insights if i.type == t) + for t in InsightType + }, + "insights": [i.to_dict() for i in self._insights], + } + + # ------------------------------------------------------------------ + # Helper + # ------------------------------------------------------------------ + + def _add(self, **kwargs: Any) -> None: + self._insights.append(Insight(**kwargs)) + + # ================================================================== + # Rule Sets + # ================================================================== + + # -- 1. Distribution -------------------------------------------------- + + def _distribution_insights(self) -> None: + summary = self._stats.summary + dist = self._stats.distribution_info + if summary.empty: + return + + numeric_rows = summary[summary.get("type", pd.Series(dtype=str)) == "numeric"] if "type" in summary.columns else summary + if numeric_rows.empty: + return + + # Extreme skewness + if "skewness" in numeric_rows.columns: + skewed = numeric_rows[numeric_rows["skewness"].abs() > 2.0].dropna(subset=["skewness"]) + if not skewed.empty: + cols = list(skewed.index) + worst = skewed["skewness"].abs().idxmax() + worst_val = skewed.loc[worst, "skewness"] + self._add( + type=InsightType.RECOMMENDATION, + severity=Severity.HIGH, + category="distribution", + title=f"{len(cols)} column(s) with extreme skewness", + description=( + f"Columns {cols[:5]} have |skewness| > 2, indicating " + f"heavily asymmetric distributions. " + f"Worst: '{worst}' (skewness={worst_val:.2f})." + ), + affected_columns=cols, + evidence={"worst_column": worst, "worst_skewness": float(worst_val)}, + action_items=[ + "Apply log or Box-Cox transform to reduce skewness", + "Consider robust statistics (median, MAD) instead of mean/std", + ], + ) + + # High kurtosis (heavy tails) + if "kurtosis" in numeric_rows.columns: + heavy = numeric_rows[numeric_rows["kurtosis"] > 7.0].dropna(subset=["kurtosis"]) + if not heavy.empty: + cols = list(heavy.index) + self._add( + type=InsightType.WARNING, + severity=Severity.MEDIUM, + category="distribution", + title=f"{len(cols)} column(s) with extreme kurtosis (heavy tails)", + description=( + f"Columns {cols[:5]} have kurtosis > 7, meaning very heavy tails. " + "Outliers may dominate summary statistics." + ), + affected_columns=cols, + evidence={"kurtosis_values": {c: float(numeric_rows.loc[c, "kurtosis"]) for c in cols[:5]}}, + action_items=[ + "Use winsorization or robust estimators", + "Check these columns for extreme outliers", + ], + ) + + # Normality summary + if not dist.empty and "is_normal_0.05" in dist.columns: + normal = dist[dist["is_normal_0.05"] == True] + non_normal = dist[dist["is_normal_0.05"] == False] + total = len(dist) + if len(non_normal) > total * 0.8 and total >= 3: + self._add( + type=InsightType.FINDING, + severity=Severity.MEDIUM, + category="distribution", + title=f"{len(non_normal)}/{total} numeric columns are non-normal", + description=( + "Most numeric columns fail normality tests (α=0.05). " + "Non-parametric methods may be more appropriate." + ), + affected_columns=list(non_normal.index), + action_items=[ + "Prefer non-parametric tests (Kruskal-Wallis, Mann-Whitney) over t-tests/ANOVA", + "Consider power transforms if normality is needed for downstream models", + ], + ) + + # Low variability + if "cv" in numeric_rows.columns: + low_var = numeric_rows[(numeric_rows["cv"].notna()) & (numeric_rows["cv"].abs() < 0.05)] + if not low_var.empty: + cols = list(low_var.index) + self._add( + type=InsightType.FINDING, + severity=Severity.LOW, + category="distribution", + title=f"{len(cols)} column(s) with very low variability (CV < 5%)", + description=( + f"Columns {cols[:5]} have coefficient of variation < 5%, " + "meaning values are tightly clustered. These may be near-constant." + ), + affected_columns=cols, + action_items=["Evaluate whether these columns carry useful information"], + ) + + # -- 2. Correlation --------------------------------------------------- + + def _correlation_insights(self) -> None: + corr = self._stats.correlation_matrix + vif = self._stats.vif_table + spearman = self._stats.spearman_matrix + + # Multicollinearity via VIF + if not vif.empty and "VIF" in vif.columns: + severe = vif[vif["VIF"] > 10] + if not severe.empty: + cols = list(severe.index) + worst = severe["VIF"].idxmax() + self._add( + type=InsightType.WARNING, + severity=Severity.CRITICAL if len(severe) > 3 else Severity.HIGH, + category="correlation", + title=f"{len(severe)} column(s) with severe multicollinearity (VIF>10)", + description=( + f"VIF > 10 detected for: {cols[:5]}. " + f"Worst: '{worst}' (VIF={severe.loc[worst, 'VIF']:.1f}). " + "Redundant information may cause model instability." + ), + affected_columns=cols, + evidence={"vif_values": {c: float(severe.loc[c, "VIF"]) for c in cols[:5]}}, + action_items=[ + "Remove one column from each highly correlated pair", + "Apply PCA or regularization (Ridge/Lasso) to handle collinearity", + ], + ) + + # High pearson correlation pairs + if not corr.empty: + pairs: list[tuple[str, str, float]] = [] + cols_list = corr.columns.tolist() + for i, c1 in enumerate(cols_list): + for c2 in cols_list[i + 1:]: + v = corr.loc[c1, c2] + if abs(v) > 0.9: + pairs.append((c1, c2, float(v))) + if pairs: + affected = list({c for p in pairs for c in p[:2]}) + self._add( + type=InsightType.WARNING, + severity=Severity.HIGH, + category="correlation", + title=f"{len(pairs)} column pair(s) with |r| > 0.9", + description=( + "Near-perfect linear relationships detected. " + f"Top pair: '{pairs[0][0]}' ↔ '{pairs[0][1]}' (r={pairs[0][2]:.3f})." + ), + affected_columns=affected, + evidence={"pairs": [(p[0], p[1], p[2]) for p in pairs[:5]]}, + action_items=[ + "Consider dropping one column from each pair to reduce redundancy", + "Verify these are not data leakage or duplicate columns", + ], + ) + + # No correlations at all (independent features) + if not corr.empty and corr.shape[0] >= 3: + upper = corr.where(np.triu(np.ones(corr.shape, dtype=bool), k=1)) + max_abs = upper.abs().max().max() + if max_abs < 0.3: + self._add( + type=InsightType.FINDING, + severity=Severity.LOW, + category="correlation", + title="All numeric columns are weakly correlated (max |r| < 0.3)", + description=( + "No strong linear relationships found between any pair of numeric columns. " + "Features appear largely independent." + ), + affected_columns=list(corr.columns), + action_items=["Check for non-linear relationships via MI or distance correlation"], + ) + + # -- 3. Missing ------------------------------------------------------- + + def _missing_insights(self) -> None: + mi = self._stats.missing_info + if mi.empty or "missing_ratio" not in mi.columns: + return + + total_ratio = mi["missing_ratio"].mean() if not mi.empty else 0 + high_miss = mi[mi["missing_ratio"] > 0.5] + moderate_miss = mi[(mi["missing_ratio"] > 0.1) & (mi["missing_ratio"] <= 0.5)] + no_miss = mi[mi["missing_ratio"] == 0] + + # Columns with >50% missing + if not high_miss.empty: + cols = list(high_miss.index) + self._add( + type=InsightType.WARNING, + severity=Severity.CRITICAL, + category="missing", + title=f"{len(cols)} column(s) with >50% missing values", + description=( + f"Columns {cols[:5]} are more than half empty. " + "These columns may not be usable without strong imputation." + ), + affected_columns=cols, + evidence={"missing_ratios": {c: float(high_miss.loc[c, "missing_ratio"]) for c in cols[:5]}}, + action_items=[ + "Consider dropping these columns unless domain-critical", + "If kept, use model-based imputation (KNN, MICE) rather than simple mean/median", + ], + ) + + # Moderate missing + if not moderate_miss.empty: + cols = list(moderate_miss.index) + self._add( + type=InsightType.RECOMMENDATION, + severity=Severity.MEDIUM, + category="missing", + title=f"{len(cols)} column(s) with 10-50% missing values", + description=( + f"Columns {cols[:5]} have noticeable missing rates. " + "Imputation strategy should be chosen carefully." + ), + affected_columns=cols, + action_items=[ + "Check if missingness is random (MCAR) or systematic (MAR/MNAR)", + "For numeric columns: median or KNN imputation; for categorical: mode or indicator variable", + ], + ) + + # Completely clean + if len(no_miss) == len(mi) and len(mi) > 0: + self._add( + type=InsightType.FINDING, + severity=Severity.LOW, + category="missing", + title="No missing values detected in any column", + description="All columns are fully populated — no imputation needed.", + affected_columns=[], + ) + + # -- 4. Outlier ------------------------------------------------------- + + def _outlier_insights(self) -> None: + out = self._stats.outlier_summary + if out.empty or "outlier_%" not in out.columns: + return + + extreme = out[out["outlier_%"] > 15] + moderate = out[(out["outlier_%"] > 5) & (out["outlier_%"] <= 15)] + + if not extreme.empty: + cols = list(extreme.index) + self._add( + type=InsightType.WARNING, + severity=Severity.HIGH, + category="anomaly", + title=f"{len(cols)} column(s) with extreme outlier rate (>15%)", + description=( + f"Columns {cols[:5]} have very high outlier percentages. " + "This may indicate data quality issues or heavy-tailed distributions." + ), + affected_columns=cols, + evidence={"outlier_rates": {c: float(extreme.loc[c, "outlier_%"]) for c in cols[:5]}}, + action_items=[ + "Check if the distribution is truly heavy-tailed (in which case outliers are expected)", + "Apply winsorization or log-transform if outliers are distorting analysis", + "Consider using robust methods (median, MAD, IQR-based)", + ], + ) + + if not moderate.empty: + cols = list(moderate.index) + self._add( + type=InsightType.FINDING, + severity=Severity.MEDIUM, + category="anomaly", + title=f"{len(cols)} column(s) with notable outlier rate (5-15%)", + description=f"Columns {cols[:5]} have moderate outlier rates.", + affected_columns=cols, + action_items=["Review outlier boundaries and adjust if domain knowledge warrants"], + ) + + # -- 5. Quality ------------------------------------------------------- + + def _quality_insights(self) -> None: + qs = self._stats.quality_scores + if not qs: + return + + overall = qs.get("overall", 1.0) + if overall < 0.5: + self._add( + type=InsightType.WARNING, + severity=Severity.CRITICAL, + category="quality", + title=f"Overall data quality is poor ({overall * 100:.0f}%)", + description=( + "The combined quality score across completeness, uniqueness, " + "consistency, and validity is below 50%." + ), + evidence=qs, + action_items=[ + "Address missing values and inconsistencies before analysis", + "Review data collection pipeline for systematic issues", + ], + ) + elif overall < 0.75: + self._add( + type=InsightType.RECOMMENDATION, + severity=Severity.MEDIUM, + category="quality", + title=f"Data quality is moderate ({overall * 100:.0f}%)", + description="Some quality dimensions need attention before production use.", + evidence=qs, + action_items=["Focus on the lowest-scoring quality dimension"], + ) + + # Per-dimension alerts + for dim, label in [("completeness", "Completeness"), ("uniqueness", "Uniqueness"), + ("consistency", "Consistency"), ("validity", "Validity")]: + score = qs.get(dim, 1.0) + if score < 0.6: + self._add( + type=InsightType.WARNING, + severity=Severity.HIGH, + category="quality", + title=f"{label} score is low ({score * 100:.0f}%)", + description=f"The {label.lower()} dimension scored {score * 100:.0f}%, dragging down overall quality.", + evidence={dim: score}, + action_items=[f"Investigate and improve {label.lower()} issues"], + ) + + # -- 6. Clustering ---------------------------------------------------- + + def _clustering_insights(self) -> None: + adv = self._stats.advanced_stats + clustering = adv.get("clustering") + if not clustering: + return + + km = clustering.get("kmeans") + if km: + k = km.get("optimal_k", 0) + sil = km.get("best_silhouette", 0) + sizes = km.get("cluster_sizes", {}) + + if k >= 2 and sil > 0.4: + self._add( + type=InsightType.OPPORTUNITY, + severity=Severity.HIGH, + category="cluster", + title=f"Clear cluster structure found (k={k}, silhouette={sil:.2f})", + description=( + f"K-Means identifies {k} well-separated clusters " + f"(silhouette={sil:.2f}). Cluster sizes: {sizes}." + ), + evidence={"optimal_k": k, "silhouette": sil, "sizes": sizes}, + action_items=[ + "Profile each cluster to understand segment characteristics", + "Use cluster labels as a feature for downstream modelling", + ], + ) + elif k >= 2 and sil > 0.2: + self._add( + type=InsightType.FINDING, + severity=Severity.MEDIUM, + category="cluster", + title=f"Moderate cluster structure (k={k}, silhouette={sil:.2f})", + description=( + f"Some grouping exists but clusters overlap. " + f"Silhouette={sil:.2f} suggests partial separation." + ), + evidence={"optimal_k": k, "silhouette": sil}, + action_items=["Consider density-based methods (DBSCAN) for better cluster boundaries"], + ) + + # Check for imbalanced clusters + if sizes: + total = sum(sizes.values()) + if total > 0: + min_pct = min(sizes.values()) / total + max_pct = max(sizes.values()) / total + if min_pct < 0.05: + tiny_clusters = [k for k, v in sizes.items() if v / total < 0.05] + self._add( + type=InsightType.FINDING, + severity=Severity.MEDIUM, + category="cluster", + title=f"Highly imbalanced clusters detected", + description=( + f"Cluster(s) {tiny_clusters} contain <5% of data. " + "These may represent anomalous sub-populations." + ), + evidence={"tiny_clusters": tiny_clusters, "min_pct": min_pct}, + action_items=["Inspect small clusters — they may be anomalies or niche segments"], + ) + + dbscan = clustering.get("dbscan") + if dbscan: + noise_ratio = dbscan.get("noise_ratio", 0) + if noise_ratio > 0.2: + self._add( + type=InsightType.WARNING, + severity=Severity.MEDIUM, + category="cluster", + title=f"DBSCAN labels {noise_ratio * 100:.0f}% of data as noise", + description=( + "A high proportion of data points don't belong to any density cluster. " + "This may indicate dispersed data or sub-optimal epsilon." + ), + evidence={"noise_ratio": noise_ratio, "eps": dbscan.get("eps")}, + action_items=["Try adjusting eps parameter or use HDBSCAN for adaptive density"], + ) + + # -- 7. Anomaly ------------------------------------------------------- + + def _anomaly_insights(self) -> None: + adv = self._stats.advanced_stats + anomaly = adv.get("advanced_anomaly", {}) + consensus = anomaly.get("consensus") + if not consensus: + return + + ratio = consensus.get("consensus_ratio", 0) + count = consensus.get("consensus_count", 0) + n = consensus.get("n_samples", 1) + agreement = consensus.get("agreement_matrix", {}) + + if ratio > 0.05: + self._add( + type=InsightType.WARNING, + severity=Severity.HIGH, + category="anomaly", + title=f"Multi-method consensus: {count} anomalies ({ratio * 100:.1f}%)", + description=( + f"{count} rows flagged as anomalous by ≥2 independent methods " + f"(IF + LOF + Mahalanobis). " + f"All-agree: {agreement.get('all_agree_anomaly', 0)}, " + f"majority: {agreement.get('majority_anomaly', 0)}." + ), + evidence={"consensus_ratio": ratio, "agreement": agreement}, + action_items=[ + "Investigate consensus anomalies — they are high-confidence outliers", + "Consider removing or winsorizing before modelling", + ], + ) + elif ratio > 0.01: + self._add( + type=InsightType.FINDING, + severity=Severity.MEDIUM, + category="anomaly", + title=f"Multi-method anomalies: {count} rows ({ratio * 100:.1f}%)", + description=( + f"A small fraction of rows are flagged by multiple anomaly detection methods." + ), + evidence={"consensus_ratio": ratio}, + action_items=["Review flagged rows for data entry errors or special cases"], + ) + + # -- 8. Feature Insights ---------------------------------------------- + + def _feature_insights(self) -> None: + adv = self._stats.advanced_stats + fi = adv.get("feature_insights", {}) + if not fi: + return + + # Leakage detection + leakage = fi.get("leakage") + if leakage is not None and not leakage.empty: + high_risk = leakage[leakage.get("risk_level", pd.Series()) == "high"] if "risk_level" in leakage.columns else pd.DataFrame() + if not high_risk.empty: + cols = list(high_risk.index) + self._add( + type=InsightType.WARNING, + severity=Severity.CRITICAL, + category="feature", + title=f"{len(cols)} column(s) flagged for potential data leakage", + description=( + f"Columns {cols[:5]} show high leakage risk " + "(constant, ID-like, or perfectly correlated with others)." + ), + affected_columns=cols, + action_items=[ + "Remove these columns before building any ML model", + "Verify they are not derived from the target variable", + ], + ) + + # Strong interactions + interactions = fi.get("interactions") + if interactions is not None and not interactions.empty: + strong = interactions[interactions.get("interaction_strength", pd.Series(dtype=float)) > 0.7] if "interaction_strength" in interactions.columns else pd.DataFrame() + if not strong.empty and len(strong) > 0: + top = strong.iloc[0] + self._add( + type=InsightType.OPPORTUNITY, + severity=Severity.MEDIUM, + category="feature", + title=f"{len(strong)} strong feature interaction(s) detected", + description=( + f"Top interaction: '{top.get('col_a', '?')}' × '{top.get('col_b', '?')}' " + f"(strength={top.get('interaction_strength', 0):.2f}). " + "Product features may improve model performance." + ), + affected_columns=[str(top.get("col_a", "")), str(top.get("col_b", ""))], + action_items=["Create interaction (product) features for the top pairs"], + ) + + # Cardinality / encoding + card = fi.get("cardinality") + if card is not None and not card.empty and "recommended_encoding" in card.columns: + hash_cols = card[card["recommended_encoding"] == "hashing"] + if not hash_cols.empty: + cols = list(hash_cols.index) + self._add( + type=InsightType.RECOMMENDATION, + severity=Severity.MEDIUM, + category="feature", + title=f"{len(cols)} high-cardinality column(s) need special encoding", + description=( + f"Columns {cols[:5]} have very high cardinality. " + "One-hot encoding would create too many features." + ), + affected_columns=cols, + action_items=[ + "Use target encoding, hashing, or embedding for these columns", + "Consider grouping rare categories into 'Other'", + ], + ) + + # -- 9. PCA ----------------------------------------------------------- + + def _pca_insights(self) -> None: + pca_sum = self._stats.pca_summary + pca_var = self._stats.pca_variance + if not pca_sum: + return + + comp90 = pca_sum.get("components_for_90pct", 0) + n_orig = len(self._schema.numeric_columns) + if n_orig > 0 and comp90 > 0: + reduction = 1 - comp90 / n_orig + if reduction > 0.5: + self._add( + type=InsightType.OPPORTUNITY, + severity=Severity.MEDIUM, + category="feature", + title=f"High dimensionality reduction potential: {n_orig} → {comp90} components for 90% variance", + description=( + f"PCA shows that {comp90} components explain 90% of variance " + f"from {n_orig} original features ({reduction * 100:.0f}% reduction)." + ), + evidence={"original_features": n_orig, "pca_components": comp90, "reduction": reduction}, + action_items=[ + "Consider PCA projection for dimensionality reduction in ML pipelines", + "Examine top PCA loadings to understand dominant variance directions", + ], + ) + + # First PC dominance + if not pca_var.empty and "variance_ratio" in pca_var.columns: + first_pc = pca_var.iloc[0]["variance_ratio"] if len(pca_var) > 0 else 0 + if first_pc > 0.6: + self._add( + type=InsightType.FINDING, + severity=Severity.MEDIUM, + category="feature", + title=f"First principal component explains {first_pc * 100:.0f}% of variance", + description=( + "A single axis captures most of the data's variability. " + "This suggests a dominant latent factor." + ), + evidence={"pc1_variance_ratio": first_pc}, + action_items=["Inspect PC1 loadings to identify the driving variables"], + ) + + # -- 10. Duplicates --------------------------------------------------- + + def _duplicate_insights(self) -> None: + dup = self._stats.duplicate_stats + if not dup: + return + + ratio = dup.get("duplicate_ratio", 0) + count = dup.get("duplicate_rows", 0) + + if ratio > 0.1: + self._add( + type=InsightType.WARNING, + severity=Severity.HIGH, + category="quality", + title=f"{count} duplicate rows ({ratio * 100:.1f}% of dataset)", + description="Significant portion of data is duplicated, which may bias analysis and modelling.", + evidence={"duplicate_rows": count, "duplicate_ratio": ratio}, + action_items=[ + "Remove exact duplicates before analysis", + "Check if duplicates are legitimate (e.g. repeated measurements) or data errors", + ], + ) + elif ratio > 0.01: + self._add( + type=InsightType.FINDING, + severity=Severity.LOW, + category="quality", + title=f"{count} duplicate rows ({ratio * 100:.1f}%)", + description="A small number of duplicate rows exist.", + evidence={"duplicate_rows": count, "duplicate_ratio": ratio}, + action_items=["Review whether duplicates should be removed for your use case"], + ) + + # -- 11. Advanced Distribution ---------------------------------------- + + def _advanced_distribution_insights(self) -> None: + adv = self._stats.advanced_stats + adv_dist = adv.get("advanced_distribution", {}) + if not adv_dist: + return + + # Best-fit distribution + best_fit = adv_dist.get("best_fit") + if best_fit is not None and not best_fit.empty and "best_distribution" in best_fit.columns: + non_normal = best_fit[best_fit["best_distribution"] != "norm"] + if not non_normal.empty: + dist_counts: dict[str, int] = {} + for d in non_normal["best_distribution"]: + dist_counts[d] = dist_counts.get(d, 0) + 1 + most_common = max(dist_counts, key=dist_counts.get) + self._add( + type=InsightType.FINDING, + severity=Severity.MEDIUM, + category="distribution", + title=f"{len(non_normal)} column(s) best fit by non-normal distributions", + description=( + f"Distribution fitting reveals non-Normal best fits. " + f"Most common: {most_common} ({dist_counts[most_common]} columns). " + f"Others: {dict(list(dist_counts.items())[:5])}." + ), + affected_columns=list(non_normal.index), + evidence={"distribution_counts": dist_counts}, + action_items=[ + "Use the identified distributions for parametric modeling or simulation", + "Transform columns toward normality if Gaussian assumptions are needed", + ], + ) + + # Power transform recommendations + pt = adv_dist.get("power_transform") + if pt is not None and not pt.empty and "needs_transform" in pt.columns: + needs = pt[pt["needs_transform"] == True] + if not needs.empty: + cols = list(needs.index) + self._add( + type=InsightType.RECOMMENDATION, + severity=Severity.MEDIUM, + category="distribution", + title=f"{len(cols)} column(s) benefit from power transformation", + description=( + f"Box-Cox / Yeo-Johnson transforms can significantly reduce skewness " + f"for columns: {cols[:5]}." + ), + affected_columns=cols, + action_items=[ + "Apply the recommended transform (Box-Cox or Yeo-Johnson) in preprocessing", + ], + ) + + # -- 12. Advanced Correlation ----------------------------------------- + + def _advanced_correlation_insights(self) -> None: + adv = self._stats.advanced_stats + adv_corr = adv.get("advanced_correlation", {}) + if not adv_corr: + return + + # Non-linear dependencies via MI + mi = adv_corr.get("mutual_information") + pearson = self._stats.correlation_matrix + if mi is not None and not mi.empty and not pearson.empty: + # Find pairs with high MI but low Pearson (non-linear relationship) + mi_cols = set(mi.columns) & set(pearson.columns) + nonlinear_pairs = [] + for c1 in mi_cols: + for c2 in mi_cols: + if c1 >= c2: + continue + mi_val = mi.loc[c1, c2] if c1 in mi.index and c2 in mi.columns else 0 + p_val = abs(pearson.loc[c1, c2]) if c1 in pearson.index and c2 in pearson.columns else 0 + if mi_val > 0.3 and p_val < 0.3: + nonlinear_pairs.append((c1, c2, float(mi_val), float(p_val))) + + if nonlinear_pairs: + nonlinear_pairs.sort(key=lambda x: x[2], reverse=True) + top = nonlinear_pairs[0] + self._add( + type=InsightType.FINDING, + severity=Severity.HIGH, + category="correlation", + title=f"{len(nonlinear_pairs)} non-linear dependency pair(s) detected", + description=( + f"High mutual information but low Pearson correlation suggests non-linear " + f"relationships. Top: '{top[0]}' ↔ '{top[1]}' (MI={top[2]:.2f}, r={top[3]:.2f})." + ), + affected_columns=[top[0], top[1]], + evidence={"nonlinear_pairs": nonlinear_pairs[:5]}, + action_items=[ + "Use non-linear models (tree-based, kernel) to capture these relationships", + "Consider polynomial or interaction features", + ], + ) + + # Confounded correlations (partial vs raw) + pcorr = adv_corr.get("partial_correlation") + if pcorr is not None and not pcorr.empty and not pearson.empty: + confounded = [] + pcorr_cols = set(pcorr.columns) & set(pearson.columns) + for c1 in pcorr_cols: + for c2 in pcorr_cols: + if c1 >= c2: + continue + raw = pearson.loc[c1, c2] if c1 in pearson.index and c2 in pearson.columns else 0 + part = pcorr.loc[c1, c2] if c1 in pcorr.index and c2 in pcorr.columns else 0 + if abs(raw) > 0.5 and abs(raw - part) > 0.3: + confounded.append((c1, c2, float(raw), float(part))) + + if confounded: + confounded.sort(key=lambda x: abs(x[2] - x[3]), reverse=True) + top = confounded[0] + self._add( + type=InsightType.FINDING, + severity=Severity.HIGH, + category="correlation", + title=f"{len(confounded)} likely confounded correlation(s) detected", + description=( + f"Raw correlation differs significantly from partial correlation, " + f"suggesting confounding variables. " + f"Top: '{top[0]}' ↔ '{top[1]}' (raw r={top[2]:.2f}, partial r={top[3]:.2f})." + ), + affected_columns=[top[0], top[1]], + evidence={"confounded_pairs": confounded[:5]}, + action_items=[ + "Do not assume causal relationship from raw correlation for these pairs", + "Investigate which variables are confounders", + ], + ) + + # Bootstrap CI stability + bci = adv_corr.get("bootstrap_ci") + if bci is not None and not bci.empty and "ci_width" in bci.columns: + unstable = bci[bci["ci_width"] > 0.4] + if not unstable.empty: + self._add( + type=InsightType.WARNING, + severity=Severity.MEDIUM, + category="correlation", + title=f"{len(unstable)} correlation estimate(s) with wide bootstrap CI", + description=( + "Correlation confidence intervals wider than 0.4 indicate " + "unreliable estimates — possibly due to small sample or outliers." + ), + evidence={"unstable_count": len(unstable)}, + action_items=[ + "Treat these correlations with caution", + "Consider collecting more data or removing outliers", + ], + ) + + # -- 13. General / Cross-Cutting -------------------------------------- + + def _general_insights(self) -> None: + n_rows = self._schema.n_rows + n_cols = self._schema.n_cols + n_num = len(self._schema.numeric_columns) + n_cat = len(self._schema.categorical_columns) + + # Curse of dimensionality + if n_cols > 0 and n_rows / n_cols < 10: + self._add( + type=InsightType.WARNING, + severity=Severity.HIGH, + category="general", + title=f"Low sample-to-feature ratio ({n_rows / n_cols:.1f}:1)", + description=( + f"With {n_rows} rows and {n_cols} columns, the sample-to-feature ratio is low. " + "This raises overfitting risk in ML models." + ), + evidence={"n_rows": n_rows, "n_cols": n_cols, "ratio": n_rows / n_cols}, + action_items=[ + "Apply dimensionality reduction (PCA, feature selection) before modelling", + "Use regularization (L1/L2) or simpler models", + "Collect more data if possible", + ], + ) + + # Very small dataset + if n_rows < 50: + self._add( + type=InsightType.WARNING, + severity=Severity.HIGH, + category="general", + title=f"Very small dataset ({n_rows} rows)", + description=( + "Statistical tests and ML models may be unreliable with so few samples. " + "Confidence intervals will be wide." + ), + evidence={"n_rows": n_rows}, + action_items=[ + "Use cross-validation with appropriate folds (e.g., leave-one-out for very small n)", + "Prefer non-parametric or Bayesian approaches", + ], + ) + + # All-numeric or all-categorical + if n_num > 0 and n_cat == 0 and n_cols > 3: + self._add( + type=InsightType.FINDING, + severity=Severity.LOW, + category="general", + title="Dataset is fully numeric (no categorical columns)", + description="All columns are numeric, which simplifies preprocessing but may miss categorical patterns.", + action_items=["Verify no categorical data was inadvertently coded as integers"], + ) + elif n_cat > 0 and n_num == 0 and n_cols > 3: + self._add( + type=InsightType.FINDING, + severity=Severity.LOW, + category="general", + title="Dataset is fully categorical (no numeric columns)", + description="All columns are categorical. Numeric encoding will be needed for most ML algorithms.", + action_items=["Plan encoding strategy (one-hot, target, ordinal) for all columns"], + ) diff --git a/f2a/stats/missing.py b/f2a/stats/missing.py new file mode 100644 index 0000000..d5287ce --- /dev/null +++ b/f2a/stats/missing.py @@ -0,0 +1,74 @@ +"""Missing data analysis module.""" + +from __future__ import annotations + +import pandas as pd + +from f2a.core.schema import DataSchema + + +class MissingStats: + """Analyze missing data patterns. + + Args: + df: Target DataFrame to analyze. + schema: Data schema. + """ + + def __init__(self, df: pd.DataFrame, schema: DataSchema) -> None: + self._df = df + self._schema = schema + + def column_summary(self) -> pd.DataFrame: + """Return per-column missing data summary. + + Returns: + DataFrame with missing count, ratio, and dtype per column. + """ + rows: list[dict] = [] + for col_info in self._schema.columns: + rows.append( + { + "column": col_info.name, + "missing_count": col_info.n_missing, + "missing_ratio": col_info.missing_ratio, + "missing_%": round(col_info.missing_ratio * 100, 2), + "dtype": col_info.dtype, + } + ) + + result = pd.DataFrame(rows).set_index("column") + return result.sort_values("missing_count", ascending=False) + + def row_missing_distribution(self) -> pd.DataFrame: + """Return per-row missing count distribution. + + Returns: + Frequency table of missing counts per row. + """ + row_missing = self._df.isna().sum(axis=1) + dist = row_missing.value_counts().sort_index() + return pd.DataFrame( + { + "missing_per_row": dist.index, + "row_count": dist.values, + "row_%": (dist.values / len(self._df) * 100).round(2), + } + ) + + def missing_matrix(self) -> pd.DataFrame: + """Return missing data matrix (boolean). + + Boolean matrix used for visualizing missing data patterns. + + Returns: + Boolean DataFrame where True indicates missing. + """ + return self._df.isna() + + def total_missing_ratio(self) -> float: + """Return the overall missing data ratio.""" + total_cells = self._df.shape[0] * self._df.shape[1] + if total_cells == 0: + return 0.0 + return round(float(self._df.isna().sum().sum() / total_cells), 4) diff --git a/f2a/stats/ml_readiness.py b/f2a/stats/ml_readiness.py new file mode 100644 index 0000000..354dc7e --- /dev/null +++ b/f2a/stats/ml_readiness.py @@ -0,0 +1,368 @@ +"""ML Readiness Evaluator — multi-dimensional assessment of dataset fitness. + +Evaluates a dataset across six dimensions to produce a composite *readiness +score* and letter grade, together with blocking issues that **must** be resolved +and improvement suggestions that **should** be considered before feeding the +data into a machine learning pipeline. + +Dimensions +---------- +1. **Completeness** — missing value burden +2. **Consistency** — type homogeneity, value-range sanity +3. **Balance** — class / category imbalance, outlier skew +4. **Informativeness** — variance, uniqueness, MI content +5. **Independence** — multicollinearity (VIF / high-r) +6. **Scale** — sample-to-feature ratio, curse of dimensionality +""" + +from __future__ import annotations + +import math +from dataclasses import dataclass, field +from typing import Any + +import numpy as np +import pandas as pd + +from f2a.core.schema import DataSchema +from f2a.utils.logging import get_logger + +logger = get_logger(__name__) + +# Grade thresholds +_GRADES = [ + (95, "A+"), (90, "A"), (85, "B+"), (80, "B"), + (75, "C+"), (70, "C"), (60, "D"), (0, "F"), +] + + +def _to_grade(score: float) -> str: + for threshold, grade in _GRADES: + if score >= threshold: + return grade + return "F" + + +@dataclass +class ReadinessScore: + """ML readiness evaluation result.""" + + overall: float # 0-100 + grade: str # A+, A, B+, B, C+, C, D, F + dimensions: dict[str, float] # each 0-100 + blocking_issues: list[str] = field(default_factory=list) + suggestions: list[str] = field(default_factory=list) + details: dict[str, Any] = field(default_factory=dict) + + def to_dict(self) -> dict[str, Any]: + return { + "overall": round(self.overall, 1), + "grade": self.grade, + "dimensions": {k: round(v, 1) for k, v in self.dimensions.items()}, + "blocking_issues": self.blocking_issues, + "suggestions": self.suggestions, + "details": self.details, + } + + +class MLReadinessEvaluator: + """Evaluate the ML-readiness of a dataset from pre-computed stats. + + Parameters + ---------- + df : pd.DataFrame + The (cleaned) analysis DataFrame. + schema : DataSchema + Type metadata. + stats : StatsResult + All pre-computed statistical results (basic + advanced). + column_roles : pd.DataFrame | None + Output of ``ColumnRoleClassifier.summary()`` (optional). + """ + + # Dimension weights — must sum to 1.0 + _WEIGHTS = { + "completeness": 0.25, + "consistency": 0.15, + "balance": 0.15, + "informativeness": 0.20, + "independence": 0.15, + "scale": 0.10, + } + + def __init__( + self, + df: pd.DataFrame, + schema: DataSchema, + stats: Any, + column_roles: pd.DataFrame | None = None, + ) -> None: + self._df = df + self._schema = schema + self._stats = stats + self._roles = column_roles + self._blocking: list[str] = [] + self._suggestions: list[str] = [] + + def evaluate(self) -> ReadinessScore: + """Compute the overall ML readiness score.""" + dims: dict[str, float] = {} + details: dict[str, Any] = {} + + for name, method in [ + ("completeness", self._completeness), + ("consistency", self._consistency), + ("balance", self._balance), + ("informativeness", self._informativeness), + ("independence", self._independence), + ("scale", self._scale), + ]: + try: + score, det = method() + dims[name] = max(0.0, min(100.0, score)) + details[name] = det + except Exception as exc: + logger.debug("ML readiness dimension '%s' failed: %s", name, exc) + dims[name] = 50.0 # neutral fallback + details[name] = {"error": str(exc)} + + overall = sum(dims[d] * self._WEIGHTS[d] for d in dims) + grade = _to_grade(overall) + + return ReadinessScore( + overall=round(overall, 1), + grade=grade, + dimensions=dims, + blocking_issues=self._blocking, + suggestions=self._suggestions, + details=details, + ) + + # ================================================================== + # Dimension scorers — each returns (score_0_100, detail_dict) + # ================================================================== + + def _completeness(self) -> tuple[float, dict]: + mi = self._stats.missing_info + detail: dict[str, Any] = {} + + if mi.empty or "missing_ratio" not in mi.columns: + return 100.0, {"no_missing_info": True} + + ratios = mi["missing_ratio"] + overall_miss = float(ratios.mean()) + high_miss_cols = list(mi[ratios > 0.5].index) + mod_miss_cols = list(mi[(ratios > 0.1) & (ratios <= 0.5)].index) + + detail["overall_missing_rate"] = round(overall_miss, 4) + detail["high_missing_columns"] = high_miss_cols[:10] + detail["moderate_missing_columns"] = mod_miss_cols[:10] + + if high_miss_cols: + self._blocking.append( + f"{len(high_miss_cols)} column(s) have >50% missing — drop or impute: " + f"{', '.join(high_miss_cols[:5])}" + ) + + if mod_miss_cols: + self._suggestions.append( + f"{len(mod_miss_cols)} column(s) have 10-50% missing — plan imputation strategy" + ) + + # Score: 100 if 0 missing, linearly degrade + score = max(0, 100 * (1 - overall_miss * 2)) # 50% average missing → 0 + return score, detail + + def _consistency(self) -> tuple[float, dict]: + detail: dict[str, Any] = {} + penalties = 0.0 + n_cols = self._schema.n_cols + + # Mixed types from preprocessing + pp = self._stats.preprocessing + mixed = len(pp.mixed_type_columns) if pp else 0 + inf_cols = len(pp.infinite_value_columns) if pp else 0 + + detail["mixed_type_columns"] = mixed + detail["infinite_value_columns"] = inf_cols + + if mixed > 0: + penalties += (mixed / max(n_cols, 1)) * 40 + self._suggestions.append(f"{mixed} mixed-type column(s) — cast to consistent types") + + if inf_cols > 0: + penalties += (inf_cols / max(n_cols, 1)) * 20 + self._suggestions.append(f"{inf_cols} column(s) contain infinity values — replace with NaN or cap") + + # ID-like columns that shouldn't be features + if self._roles is not None and not self._roles.empty: + ids = self._roles[self._roles["primary_role"] == "id"] + if not ids.empty: + detail["id_columns"] = list(ids.index) + self._suggestions.append( + f"Remove {len(ids)} ID-like column(s) before modelling: " + f"{', '.join(list(ids.index)[:5])}" + ) + + # Constants + if self._roles is not None and not self._roles.empty: + constants = self._roles[self._roles["primary_role"] == "constant"] + if not constants.empty: + penalties += len(constants) / max(n_cols, 1) * 20 + self._blocking.append( + f"{len(constants)} constant column(s) — remove before modelling" + ) + + score = max(0, 100 - penalties) + return score, detail + + def _balance(self) -> tuple[float, dict]: + detail: dict[str, Any] = {} + penalties = 0.0 + + # Outlier ratio + out = self._stats.outlier_summary + if not out.empty and "outlier_%" in out.columns: + avg_outlier = float(out["outlier_%"].mean()) + detail["avg_outlier_pct"] = round(avg_outlier, 2) + if avg_outlier > 20: + penalties += 30 + self._suggestions.append("High average outlier rate — consider winsorization or robust methods") + elif avg_outlier > 10: + penalties += 15 + + # Categorical imbalance (Gini index) + cat_cols = self._schema.categorical_columns[:20] + if cat_cols: + ginis = [] + for col in cat_cols: + if col in self._df.columns: + vc = self._df[col].value_counts(normalize=True).values + gini = 1 - np.sum(vc ** 2) + ginis.append(gini) + if ginis: + avg_gini = float(np.mean(ginis)) + detail["avg_categorical_gini"] = round(avg_gini, 4) + # Low Gini means imbalanced + if avg_gini < 0.3: + penalties += 20 + self._suggestions.append( + "Categorical columns are highly imbalanced — consider SMOTE or class weighting" + ) + + score = max(0, 100 - penalties) + return score, detail + + def _informativeness(self) -> tuple[float, dict]: + detail: dict[str, Any] = {} + penalties = 0.0 + n_cols = self._schema.n_cols + + # Duplicate ratio + dup = self._stats.duplicate_stats + dup_ratio = dup.get("duplicate_ratio", 0) if dup else 0 + detail["duplicate_ratio"] = round(dup_ratio, 4) + if dup_ratio > 0.2: + penalties += 25 + self._blocking.append(f"{dup_ratio * 100:.0f}% duplicate rows — remove before modelling") + elif dup_ratio > 0.05: + penalties += 10 + self._suggestions.append("Some duplicate rows exist — verify they are intentional") + + # Low-variance features (constant or near-constant) + summary = self._stats.summary + if not summary.empty and "cv" in summary.columns: + near_const = summary[(summary["cv"].notna()) & (summary["cv"].abs() < 0.01)] + if not near_const.empty: + penalties += (len(near_const) / max(n_cols, 1)) * 30 + detail["near_constant_columns"] = list(near_const.index)[:10] + self._suggestions.append( + f"{len(near_const)} near-constant column(s) carry very little information" + ) + + # PCA compressibility (high reduction = redundancy penalty, but also okay) + pca_sum = self._stats.pca_summary + if pca_sum: + comp90 = pca_sum.get("components_for_90pct", 0) + n_num = len(self._schema.numeric_columns) + if n_num > 0 and comp90 > 0: + compression = comp90 / n_num + detail["pca_compression"] = round(compression, 3) + if compression < 0.3: + # very compressible → lots of redundancy + penalties += 10 + self._suggestions.append( + f"90% variance in just {comp90}/{n_num} PCs — consider PCA for dimensionality reduction" + ) + + score = max(0, 100 - penalties) + return score, detail + + def _independence(self) -> tuple[float, dict]: + detail: dict[str, Any] = {} + penalties = 0.0 + + vif = self._stats.vif_table + if not vif.empty and "VIF" in vif.columns: + severe = vif[vif["VIF"] > 10] + moderate = vif[(vif["VIF"] > 5) & (vif["VIF"] <= 10)] + detail["severe_vif_columns"] = list(severe.index)[:10] + detail["moderate_vif_columns"] = list(moderate.index)[:10] + + if not severe.empty: + worst_vif = float(severe["VIF"].max()) + penalties += min(50, len(severe) * 10) + if worst_vif > 100: + self._blocking.append( + f"Extreme multicollinearity: VIF={worst_vif:.0f} for '{severe['VIF'].idxmax()}' — remove or combine" + ) + else: + self._suggestions.append( + f"{len(severe)} column(s) with VIF>10 — consider regularization or PCA" + ) + + if not moderate.empty: + penalties += len(moderate) * 3 + + # High-correlation pairs + corr = self._stats.correlation_matrix + if not corr.empty: + n_high = 0 + cols_list = corr.columns.tolist() + for i, c1 in enumerate(cols_list): + for c2 in cols_list[i + 1:]: + if abs(corr.loc[c1, c2]) > 0.95: + n_high += 1 + if n_high > 0: + detail["near_perfect_pairs"] = n_high + penalties += min(30, n_high * 5) + + score = max(0, 100 - penalties) + return score, detail + + def _scale(self) -> tuple[float, dict]: + detail: dict[str, Any] = {} + n_rows = self._schema.n_rows + n_features = len(self._schema.numeric_columns) + len(self._schema.categorical_columns) + + ratio = n_rows / max(n_features, 1) + detail["sample_feature_ratio"] = round(ratio, 1) + detail["n_rows"] = n_rows + detail["n_features"] = n_features + + if ratio < 5: + self._blocking.append( + f"Sample-to-feature ratio is {ratio:.1f}:1 — very high overfitting risk" + ) + score = max(0, ratio / 5 * 50) + elif ratio < 10: + self._suggestions.append( + f"Sample-to-feature ratio ({ratio:.0f}:1) is low — use regularization" + ) + score = 50 + (ratio - 5) / 5 * 30 + elif ratio < 20: + score = 80 + (ratio - 10) / 10 * 15 + else: + score = min(100, 95 + min(ratio / 100, 1) * 5) + + return score, detail diff --git a/f2a/stats/outlier.py b/f2a/stats/outlier.py new file mode 100644 index 0000000..8f5d1e9 --- /dev/null +++ b/f2a/stats/outlier.py @@ -0,0 +1,159 @@ +"""Outlier detection module. + +Provides IQR-based and Z-score-based outlier detection for numeric columns. +""" + +from __future__ import annotations + +import numpy as np +import pandas as pd + +from f2a.core.schema import DataSchema + + +class OutlierStats: + """Detect and summarise outliers in numeric columns. + + Args: + df: Target DataFrame. + schema: Data schema. + """ + + def __init__(self, df: pd.DataFrame, schema: DataSchema) -> None: + self._df = df + self._schema = schema + + # ── IQR method ──────────────────────────────────────── + + def iqr_summary(self, multiplier: float = 1.5) -> pd.DataFrame: + """Detect outliers using the IQR fence method. + + Args: + multiplier: IQR multiplier (default 1.5 for moderate outliers, + 3.0 for extreme outliers). + + Returns: + Per-column outlier summary DataFrame. + """ + cols = self._schema.numeric_columns + if not cols: + return pd.DataFrame() + + rows: list[dict] = [] + for col in cols: + series = self._df[col].dropna() + if len(series) == 0: + continue + + q1 = float(series.quantile(0.25)) + q3 = float(series.quantile(0.75)) + iqr = q3 - q1 + lower = q1 - multiplier * iqr + upper = q3 + multiplier * iqr + + outlier_mask = (series < lower) | (series > upper) + outliers = series[outlier_mask] + n_outliers = len(outliers) + + rows.append({ + "column": col, + "q1": round(q1, 4), + "q3": round(q3, 4), + "iqr": round(iqr, 4), + "lower_bound": round(lower, 4), + "upper_bound": round(upper, 4), + "outlier_count": n_outliers, + "outlier_%": round(n_outliers / len(series) * 100, 2), + "min_outlier": round(float(outliers.min()), 4) if n_outliers > 0 else None, + "max_outlier": round(float(outliers.max()), 4) if n_outliers > 0 else None, + }) + + return pd.DataFrame(rows).set_index("column") if rows else pd.DataFrame() + + # ── Z-score method ──────────────────────────────────── + + def zscore_summary(self, threshold: float = 3.0) -> pd.DataFrame: + """Detect outliers using the Z-score method. + + Args: + threshold: Z-score absolute threshold (default 3.0). + + Returns: + Per-column outlier summary DataFrame. + """ + cols = self._schema.numeric_columns + if not cols: + return pd.DataFrame() + + rows: list[dict] = [] + for col in cols: + series = self._df[col].dropna() + if len(series) < 3: + continue + + mean = float(series.mean()) + std = float(series.std()) + if std == 0: + continue + + z = np.abs((series - mean) / std) + n_outliers = int((z > threshold).sum()) + + rows.append({ + "column": col, + "mean": round(mean, 4), + "std": round(std, 4), + "threshold": threshold, + "outlier_count": n_outliers, + "outlier_%": round(n_outliers / len(series) * 100, 2), + "max_zscore": round(float(z.max()), 4), + }) + + return pd.DataFrame(rows).set_index("column") if rows else pd.DataFrame() + + # ── Convenience ─────────────────────────────────────── + + def summary(self, method: str = "iqr", **kwargs) -> pd.DataFrame: + """Return outlier summary using the specified *method*. + + Args: + method: ``"iqr"`` (default) or ``"zscore"``. + **kwargs: Passed to the underlying method. + """ + if method == "zscore": + return self.zscore_summary(**kwargs) + return self.iqr_summary(**kwargs) + + def outlier_mask(self, method: str = "iqr", **kwargs) -> pd.DataFrame: + """Return a boolean DataFrame where ``True`` marks an outlier. + + Useful for downstream visualisation. + """ + cols = self._schema.numeric_columns + if not cols: + return pd.DataFrame() + + mask = pd.DataFrame(False, index=self._df.index, columns=cols) + + if method == "zscore": + threshold = kwargs.get("threshold", 3.0) + for col in cols: + series = self._df[col].dropna() + if len(series) < 3 or series.std() == 0: + continue + z = np.abs((series - series.mean()) / series.std()) + mask.loc[z.index, col] = z > threshold + else: + multiplier = kwargs.get("multiplier", 1.5) + for col in cols: + series = self._df[col].dropna() + if len(series) == 0: + continue + q1 = series.quantile(0.25) + q3 = series.quantile(0.75) + iqr = q3 - q1 + mask.loc[series.index, col] = (series < q1 - multiplier * iqr) | ( + series > q3 + multiplier * iqr + ) + + return mask diff --git a/f2a/stats/pca_analysis.py b/f2a/stats/pca_analysis.py new file mode 100644 index 0000000..b167bff --- /dev/null +++ b/f2a/stats/pca_analysis.py @@ -0,0 +1,159 @@ +"""PCA (Principal Component Analysis) module. + +Computes variance explained, loadings, and transformed coordinates +for numeric columns. Requires ``scikit-learn``. +""" + +from __future__ import annotations + +from typing import Any + +import numpy as np +import pandas as pd + +from f2a.core.schema import DataSchema +from f2a.utils.logging import get_logger + +logger = get_logger(__name__) + + +class PCAStats: + """Perform PCA on numeric columns. + + Args: + df: Target DataFrame. + schema: Data schema. + max_components: Maximum number of components to compute. + """ + + def __init__( + self, + df: pd.DataFrame, + schema: DataSchema, + max_components: int = 10, + ) -> None: + self._df = df + self._schema = schema + self._max_components = max_components + + self._fitted = False + self._pca: Any = None + self._X_scaled: np.ndarray | None = None + self._feature_names: list[str] = [] + self._n_components = 0 + + # ── Lazy fitting ────────────────────────────────────── + + def _fit(self) -> bool: + """Fit PCA model. Returns ``True`` on success.""" + if self._fitted: + return self._pca is not None + + self._fitted = True + + cols = self._schema.numeric_columns + if len(cols) < 2: + return False + + try: + from sklearn.decomposition import PCA + from sklearn.preprocessing import StandardScaler + except ImportError: + logger.info("scikit-learn not installed; skipping PCA analysis.") + return False + + df_clean = self._df[cols].dropna() + if len(df_clean) < max(10, len(cols)): + return False + + try: + scaler = StandardScaler() + X = scaler.fit_transform(df_clean) + + self._n_components = min(self._max_components, len(cols), len(df_clean) - 1) + if self._n_components < 1: + return False + + self._pca = PCA(n_components=self._n_components) + self._pca.fit(X) + self._X_scaled = X + self._feature_names = list(cols) + return True + except Exception as exc: + logger.warning("PCA failed: %s", exc) + return False + + # ── Variance explained ──────────────────────────────── + + def variance_explained(self) -> pd.DataFrame: + """Return variance explained by each principal component. + + Returns: + DataFrame with variance ratio, cumulative ratio, and eigenvalue + per component. + """ + if not self._fit(): + return pd.DataFrame() + + rows: list[dict] = [] + cum = np.cumsum(self._pca.explained_variance_ratio_) + for i in range(self._n_components): + rows.append({ + "component": f"PC{i + 1}", + "variance_ratio": round(float(self._pca.explained_variance_ratio_[i]), 4), + "cumulative_ratio": round(float(cum[i]), 4), + "eigenvalue": round(float(self._pca.explained_variance_[i]), 4), + }) + + return pd.DataFrame(rows).set_index("component") + + # ── Loadings ────────────────────────────────────────── + + def loadings(self) -> pd.DataFrame: + """Return PCA loadings (feature weights per component). + + Returns: + DataFrame with features as rows and ``PC1 .. PCn`` as columns. + """ + if not self._fit(): + return pd.DataFrame() + + n_show = min(5, self._n_components) + cols = [f"PC{i + 1}" for i in range(n_show)] + return pd.DataFrame( + self._pca.components_[:n_show].T, + index=self._feature_names, + columns=cols, + ).round(4) + + # ── Transformed coordinates ─────────────────────────── + + def transformed(self, n_components: int = 2) -> pd.DataFrame: + """Return data projected onto the first *n_components* principal components.""" + if not self._fit() or self._X_scaled is None: + return pd.DataFrame() + + n = min(n_components, self._n_components) + coords = self._pca.transform(self._X_scaled)[:, :n] + cols = [f"PC{i + 1}" for i in range(n)] + return pd.DataFrame(coords, columns=cols) + + # ── Summary ─────────────────────────────────────────── + + def summary(self) -> dict[str, Any]: + """Return a concise PCA summary.""" + ve = self.variance_explained() + if ve.empty: + return {} + + # Number of components to reach 90 % variance + cum = ve["cumulative_ratio"] + above_90 = cum[cum >= 0.90] + n_for_90 = int(above_90.index[0].replace("PC", "")) if len(above_90) > 0 else len(cum) + + return { + "n_components": len(ve), + "total_variance_explained": round(float(cum.iloc[-1]), 4), + "components_for_90pct": n_for_90, + "top_component_variance": round(float(ve["variance_ratio"].iloc[0]), 4), + } diff --git a/f2a/stats/quality.py b/f2a/stats/quality.py new file mode 100644 index 0000000..461fe02 --- /dev/null +++ b/f2a/stats/quality.py @@ -0,0 +1,236 @@ +"""Data quality scoring module. + +Computes per-column and overall quality scores across **six** dimensions: + +1. **Completeness** — proportion of non-missing cells. +2. **Uniqueness** — proportion of non-duplicate rows. +3. **Consistency** — dtype-based type-uniformity check (fast). +4. **Validity** — proportion of finite numeric values (no ``inf``). +5. **Timeliness** — recency of datetime columns (optional). +6. **Conformity** — value-range and pattern compliance. + +The ``overall_score`` is a weighted average of whichever dimensions apply +to the dataset, ensuring the score adapts to the data's characteristics. +""" + +from __future__ import annotations + +import re +from typing import Any + +import numpy as np +import pandas as pd + +from f2a.core.schema import DataSchema + + +class QualityStats: + """Compute data quality scores. + + Args: + df: Target DataFrame. + schema: Data schema. + """ + + def __init__(self, df: pd.DataFrame, schema: DataSchema) -> None: + self._df = df + self._schema = schema + + # ── Dimension scores ────────────────────────────────── + + def completeness(self) -> float: + """Proportion of non-missing cells.""" + total = self._df.shape[0] * self._df.shape[1] + if total == 0: + return 1.0 + return round(1.0 - float(self._df.isna().sum().sum() / total), 4) + + def uniqueness(self) -> float: + """Proportion of non-duplicate rows.""" + n = len(self._df) + if n == 0: + return 1.0 + return round(1.0 - float(self._df.duplicated().sum() / n), 4) + + def consistency(self) -> float: + """Type-consistency score — fraction of columns with uniform dtype. + + Uses ``dtype.kind`` instead of the slow per-element ``apply(type)`` + approach, checking whether object-typed columns are truly mixed-type. + """ + ncol = len(self._df.columns) + if ncol == 0: + return 1.0 + + consistent = 0 + for col in self._df.columns: + kind = self._df[col].dtype.kind + if kind != "O": + # Non-object dtypes (int, float, bool, datetime, …) are + # inherently type-consistent. + consistent += 1 + continue + # For object columns, sample up to 1 000 values and check types. + non_null = self._df[col].dropna() + if len(non_null) == 0: + consistent += 1 + continue + sample = non_null.head(1_000) + types_seen = set(type(v).__name__ for v in sample.values) + if len(types_seen) <= 1: + consistent += 1 + + return round(consistent / ncol, 4) + + def validity(self) -> float: + """Proportion of finite numeric values (excludes ``inf`` / ``-inf``).""" + num_cols = self._schema.numeric_columns + if not num_cols: + return 1.0 + + total = 0 + valid = 0 + for col in num_cols: + series = self._df[col].dropna() + total += len(series) + valid += int(np.isfinite(series).sum()) + + return round(valid / total, 4) if total > 0 else 1.0 + + def timeliness(self) -> float | None: + """Recency score for datetime columns (0 = ancient, 1 = fresh). + + If no datetime columns exist, returns ``None`` and the dimension + is excluded from the overall score. + + Heuristic: score = mean(exp(−days_since / 365)) across datetime cols. + """ + dt_cols = self._schema.datetime_columns + if not dt_cols: + return None + + now = pd.Timestamp.now() + scores: list[float] = [] + for col in dt_cols: + series = pd.to_datetime(self._df[col], errors="coerce").dropna() + if series.empty: + continue + max_ts = series.max() + if pd.isna(max_ts): + continue + days_since = max((now - max_ts).days, 0) + # exponential decay with half-life ≈ 253 days + scores.append(float(np.exp(-days_since / 365.0))) + + if not scores: + return None + return round(float(np.mean(scores)), 4) + + def conformity(self) -> float: + """Pattern-and-range compliance score. + + Checks: + * Numeric columns: values within [μ ± 4σ] (i.e. no extreme outliers). + * String columns: no excessively long / short values or embedded + control characters. + + Returns: + Score in [0, 1]. 1.0 = fully conforming. + """ + scores: list[float] = [] + + # ── Numeric: fraction within ±4σ + for col in self._schema.numeric_columns: + series = self._df[col].dropna() + if len(series) < 10: + scores.append(1.0) + continue + mu, sigma = float(series.mean()), float(series.std()) + if sigma == 0: + scores.append(1.0) + continue + in_range = ((series >= mu - 4 * sigma) & (series <= mu + 4 * sigma)).sum() + scores.append(float(in_range) / len(series)) + + # ── String: no control characters (ASCII 0-31 except \n\r\t) + _CTRL_RE = re.compile(r"[\x00-\x08\x0b\x0c\x0e-\x1f]") + for col in self._schema.categorical_columns: + series = self._df[col].dropna().astype(str).head(2_000) + if series.empty: + scores.append(1.0) + continue + has_ctrl = series.apply(lambda v: bool(_CTRL_RE.search(v))) + scores.append(1.0 - float(has_ctrl.mean())) + + if not scores: + return 1.0 + return round(float(np.mean(scores)), 4) + + def overall_score(self) -> float: + """Weighted average of all applicable quality dimensions. + + Base weights (always active): + completeness 30 %, uniqueness 20 %, consistency 15 %, + validity 15 %, conformity 10 %. + If timeliness is available, it receives 10 % and the others + are proportionally reduced. + """ + dims: dict[str, tuple[float, float]] = { + "completeness": (0.30, self.completeness()), + "uniqueness": (0.20, self.uniqueness()), + "consistency": (0.15, self.consistency()), + "validity": (0.15, self.validity()), + "conformity": (0.10, self.conformity()), + } + + timeliness_val = self.timeliness() + if timeliness_val is not None: + dims["timeliness"] = (0.10, timeliness_val) + + total_weight = sum(w for w, _ in dims.values()) + score = sum(w * v for w, v in dims.values()) / total_weight + return round(score, 4) + + # ── Summaries ───────────────────────────────────────── + + def summary(self) -> dict[str, Any]: + """Return all quality dimension scores.""" + result: dict[str, Any] = { + "completeness": self.completeness(), + "uniqueness": self.uniqueness(), + "consistency": self.consistency(), + "validity": self.validity(), + "conformity": self.conformity(), + } + timeliness_val = self.timeliness() + if timeliness_val is not None: + result["timeliness"] = timeliness_val + result["overall"] = self.overall_score() + return result + + def column_quality(self) -> pd.DataFrame: + """Return per-column quality scores. + + Returns: + DataFrame indexed by column name with completeness, uniqueness, + type, and composite quality_score. + """ + rows: list[dict] = [] + for col_info in self._schema.columns: + col = col_info.name + series = self._df[col] + compl = 1.0 - col_info.missing_ratio + + n_total = int(series.count()) + n_unique = int(series.nunique()) + uniqueness = n_unique / n_total if n_total > 0 else 1.0 + + rows.append({ + "column": col, + "completeness": round(compl, 4), + "uniqueness": round(min(uniqueness, 1.0), 4), + "type": col_info.inferred_type.value, + "quality_score": round((compl + min(uniqueness, 1.0)) / 2, 4), + }) + + return pd.DataFrame(rows).set_index("column") if rows else pd.DataFrame() diff --git a/f2a/stats/statistical_tests.py b/f2a/stats/statistical_tests.py new file mode 100644 index 0000000..a0cbb5e --- /dev/null +++ b/f2a/stats/statistical_tests.py @@ -0,0 +1,494 @@ +"""Statistical hypothesis tests module. + +Provides Levene, Kruskal-Wallis, Mann-Whitney, Chi-Square goodness-of-fit, +Grubbs outlier test, and Augmented Dickey-Fuller stationarity test. + +**Enhancements over v1**: + +* **Kruskal-Wallis** now uses categorical columns as grouping variables + so each test compares one numeric column across groups of a factor — the + semantically correct usage. +* **Benjamini-Hochberg FDR** correction is applied to all pairwise / + multi-test batteries (Levene, Mann-Whitney, Kruskal-Wallis). +* **Effect sizes** are reported alongside every test: + - rank-biserial *r* for Mann-Whitney U + - η² (eta-squared) for Kruskal-Wallis H + - Cohen's *d* proxy for Levene (log-variance difference) + - Cramér's *V* for Chi-Square + +References: + - Levene (1960) — equality of variances + - Kruskal & Wallis (1952) — non-parametric one-way ANOVA + - Mann & Whitney (1947) — two-sample rank test + - Grubbs (1950) — single-outlier test + - Dickey & Fuller (1979) — stationarity test + - Benjamini & Hochberg (1995) — FDR control + - Rosenthal (1991) — rank-biserial correlation + - Cohen (1988) — effect size conventions +""" + +from __future__ import annotations + +from typing import Any + +import numpy as np +import pandas as pd +from scipy import stats as sp_stats + +from f2a.core.schema import DataSchema +from f2a.utils.logging import get_logger + +logger = get_logger(__name__) + +# ── Utility: Benjamini-Hochberg FDR correction ─────────── + + +def _bh_adjust(p_values: list[float]) -> list[float]: + """Return Benjamini-Hochberg adjusted p-values. + + Args: + p_values: Raw p-values (same order as rows). + + Returns: + Adjusted p-values clipped to [0, 1]. + """ + m = len(p_values) + if m == 0: + return [] + arr = np.asarray(p_values, dtype=float) + order = np.argsort(arr) + ranked = np.empty_like(arr) + ranked[order] = np.arange(1, m + 1) + + adjusted = arr * m / ranked + # enforce monotonicity (descending by rank order) + sorted_idx = np.argsort(ranked)[::-1] + cum_min = np.minimum.accumulate(adjusted[sorted_idx]) + adjusted[sorted_idx] = cum_min + return np.clip(adjusted, 0.0, 1.0).tolist() + + +def _significance_stars(p: float) -> str: + """Return significance star annotation.""" + if p < 0.001: + return "***" + if p < 0.01: + return "**" + if p < 0.05: + return "*" + if p < 0.1: + return "†" + return "ns" + + +class StatisticalTests: + """Perform various statistical hypothesis tests. + + Args: + df: Target DataFrame. + schema: Data schema. + """ + + _MAX_PAIRWISE = 15 + _MAX_CATEGORIES = 20 + _MIN_GROUP_SIZE = 5 + + def __init__(self, df: pd.DataFrame, schema: DataSchema) -> None: + self._df = df + self._schema = schema + + # ── Levene's test (homogeneity of variances) ────────── + + def levene_test(self) -> pd.DataFrame: + """Levene's test for equality of variances across numeric columns. + + Tests whether pairs of numeric columns have equal variances. + Results include BH-adjusted p-values and a log-variance-ratio + effect size proxy. + + Returns: + DataFrame with pairwise Levene test results. + """ + cols = self._schema.numeric_columns + if len(cols) < 2: + return pd.DataFrame() + + cols = cols[: self._MAX_PAIRWISE] + rows: list[dict] = [] + + for i in range(len(cols)): + for j in range(i + 1, len(cols)): + a = self._df[cols[i]].dropna().values + b = self._df[cols[j]].dropna().values + if len(a) < 3 or len(b) < 3: + continue + try: + stat, p = sp_stats.levene(a, b) + # Effect size: absolute log-variance ratio + var_a = float(np.var(a, ddof=1)) if len(a) > 1 else 1e-12 + var_b = float(np.var(b, ddof=1)) if len(b) > 1 else 1e-12 + log_var_ratio = abs( + float(np.log(max(var_a, 1e-12) / max(var_b, 1e-12))) + ) + rows.append({ + "col_a": cols[i], + "col_b": cols[j], + "levene_stat": round(float(stat), 4), + "p_value": round(float(p), 6), + "log_var_ratio": round(log_var_ratio, 4), + }) + except Exception: + continue + + if not rows: + return pd.DataFrame() + + # BH-adjusted p-values + raw_p = [r["p_value"] for r in rows] + adj_p = _bh_adjust(raw_p) + for r, ap in zip(rows, adj_p): + r["adjusted_p"] = round(ap, 6) + r["significant_0.05"] = ap < 0.05 + r["stars"] = _significance_stars(ap) + + return pd.DataFrame(rows) + + # ── Kruskal-Wallis test ─────────────────────────────── + + def kruskal_wallis(self) -> pd.DataFrame: + """Kruskal-Wallis H-test: numeric column grouped by categorical factor. + + For each (categorical, numeric) pair the test checks whether the + numeric distribution differs across the levels of the factor. + Reports η² (eta-squared) effect size and BH-adjusted p-values. + + Returns: + DataFrame with one row per (grouping_col, numeric_col) pair. + """ + num_cols = self._schema.numeric_columns + cat_cols = self._schema.categorical_columns + + if not num_cols or not cat_cols: + return pd.DataFrame() + + # Limit to manageable size + cat_cols = cat_cols[:10] + num_cols = num_cols[:15] + + rows: list[dict] = [] + + for cat in cat_cols: + groups_series = self._df[cat] + unique_vals = groups_series.dropna().unique() + # skip useless groupings (1 group, or >50 levels) + if len(unique_vals) < 2 or len(unique_vals) > 50: + continue + + for num in num_cols: + sub = self._df[[cat, num]].dropna() + grouped = [ + grp[num].values + for _, grp in sub.groupby(cat) + if len(grp) >= self._MIN_GROUP_SIZE + ] + if len(grouped) < 2: + continue + + try: + stat, p = sp_stats.kruskal(*grouped) + n_total = sum(len(g) for g in grouped) + k = len(grouped) + # η² = (H - k + 1) / (n - k) + eta_sq = max( + 0.0, (float(stat) - k + 1) / (n_total - k) + ) if n_total > k else 0.0 + rows.append({ + "grouping_col": cat, + "numeric_col": num, + "n_groups": k, + "h_statistic": round(float(stat), 4), + "p_value": round(float(p), 6), + "eta_squared": round(eta_sq, 4), + "effect_magnitude": ( + "large" if eta_sq >= 0.14 + else "medium" if eta_sq >= 0.06 + else "small" + ), + }) + except Exception: + continue + + if not rows: + return pd.DataFrame() + + # BH correction + raw_p = [r["p_value"] for r in rows] + adj_p = _bh_adjust(raw_p) + for r, ap in zip(rows, adj_p): + r["adjusted_p"] = round(ap, 6) + r["reject_h0_0.05"] = ap < 0.05 + r["stars"] = _significance_stars(ap) + r["interpretation"] = ( + f"Significant (η²={r['eta_squared']}, {r['effect_magnitude']})" + if ap < 0.05 + else "No significant difference" + ) + + return pd.DataFrame(rows) + + # ── Mann-Whitney U test ─────────────────────────────── + + def mann_whitney(self) -> pd.DataFrame: + """Pairwise Mann-Whitney U tests between numeric columns. + + Reports rank-biserial *r* effect size (Rosenthal, 1991) and + BH-adjusted p-values. + + Returns: + DataFrame with col_a, col_b, U-stat, p-value, effect size. + """ + cols = self._schema.numeric_columns + if len(cols) < 2: + return pd.DataFrame() + + cols = cols[: self._MAX_PAIRWISE] + rows: list[dict] = [] + + for i in range(len(cols)): + for j in range(i + 1, len(cols)): + a = self._df[cols[i]].dropna().values + b = self._df[cols[j]].dropna().values + if len(a) < self._MIN_GROUP_SIZE or len(b) < self._MIN_GROUP_SIZE: + continue + try: + stat, p = sp_stats.mannwhitneyu(a, b, alternative="two-sided") + n1, n2 = len(a), len(b) + # rank-biserial r = 1 - 2U / (n1 * n2) + r_rb = 1.0 - 2.0 * float(stat) / (n1 * n2) + rows.append({ + "col_a": cols[i], + "col_b": cols[j], + "u_statistic": round(float(stat), 2), + "p_value": round(float(p), 6), + "rank_biserial_r": round(r_rb, 4), + "effect_magnitude": ( + "large" if abs(r_rb) >= 0.5 + else "medium" if abs(r_rb) >= 0.3 + else "small" + ), + }) + except Exception: + continue + + if not rows: + return pd.DataFrame() + + # BH correction + raw_p = [r["p_value"] for r in rows] + adj_p = _bh_adjust(raw_p) + for r, ap in zip(rows, adj_p): + r["adjusted_p"] = round(ap, 6) + r["significant_0.05"] = ap < 0.05 + r["stars"] = _significance_stars(ap) + + return pd.DataFrame(rows) + + # ── Chi-square goodness-of-fit ──────────────────────── + + def chi_square_goodness(self) -> pd.DataFrame: + """Chi-square goodness-of-fit test for categorical columns. + + Tests whether observed frequencies differ from expected uniform. + Reports Cramér's *V* effect size. + + Returns: + DataFrame with test results per categorical column. + """ + cols = self._schema.categorical_columns + if not cols: + return pd.DataFrame() + + rows: list[dict] = [] + for col in cols[: self._MAX_CATEGORIES]: + vc = self._df[col].value_counts() + if len(vc) < 2 or len(vc) > 100: + continue + + observed = vc.values.astype(float) + expected = np.full_like(observed, observed.mean()) + n_obs = float(observed.sum()) + k = len(vc) + + try: + stat, p = sp_stats.chisquare(observed, f_exp=expected) + # Cramér's V for goodness-of-fit: sqrt(chi2 / (n*(k-1))) + cramers_v = float(np.sqrt(stat / (n_obs * (k - 1)))) if k > 1 else 0.0 + rows.append({ + "column": col, + "n_categories": k, + "chi2_stat": round(float(stat), 4), + "p_value": round(float(p), 6), + "cramers_v": round(cramers_v, 4), + "effect_magnitude": ( + "large" if cramers_v >= 0.5 + else "medium" if cramers_v >= 0.3 + else "small" + ), + "uniform_0.05": float(p) > 0.05, + "interpretation": ( + "Approximately uniform" + if float(p) > 0.05 + else "Non-uniform distribution" + ), + }) + except Exception: + continue + + return pd.DataFrame(rows).set_index("column") if rows else pd.DataFrame() + + # ── Grubbs' outlier test ────────────────────────────── + + def grubbs_test(self, alpha: float = 0.05) -> pd.DataFrame: + """Grubbs' test for a single outlier in each numeric column. + + Tests whether the maximum or minimum value is an outlier + assuming normal distribution. + + Args: + alpha: Significance level. + + Returns: + DataFrame with test results per column. + """ + cols = self._schema.numeric_columns + if not cols: + return pd.DataFrame() + + rows: list[dict] = [] + for col in cols: + series = self._df[col].dropna() + n = len(series) + if n < 7: + continue + + mean = float(series.mean()) + std = float(series.std()) + if std == 0: + continue + + # Test statistic = max(|x_i - mean|) / std + max_diff_idx = (series - mean).abs().idxmax() + max_val = float(series.loc[max_diff_idx]) + g_stat = abs(max_val - mean) / std + + # Critical value (t-distribution) + t_crit = float(sp_stats.t.ppf(1 - alpha / (2 * n), n - 2)) + g_crit = (n - 1) / np.sqrt(n) * np.sqrt(t_crit**2 / (n - 2 + t_crit**2)) + + is_outlier = g_stat > g_crit + + rows.append({ + "column": col, + "suspect_value": round(max_val, 4), + "grubbs_statistic": round(float(g_stat), 4), + "critical_value": round(float(g_crit), 4), + "is_outlier": is_outlier, + "n": n, + }) + + return pd.DataFrame(rows).set_index("column") if rows else pd.DataFrame() + + # ── Augmented Dickey-Fuller (stationarity) ──────────── + + def adf_test(self) -> pd.DataFrame: + """Augmented Dickey-Fuller test for stationarity. + + Tests whether a numeric time-series is stationary. + H0: The series has a unit root (non-stationary). + + Returns: + DataFrame with ADF results per numeric column. + """ + cols = self._schema.numeric_columns + if not cols: + return pd.DataFrame() + + try: + from statsmodels.tsa.stattools import adfuller + except ImportError: + logger.info("statsmodels not available; skipping ADF test.") + return pd.DataFrame() + + rows: list[dict] = [] + for col in cols: + series = self._df[col].dropna() + if len(series) < 20: + continue + try: + result = adfuller(series, autolag="AIC") + adf_stat, p_val, used_lag, nobs, critical_values, ic_best = result + rows.append({ + "column": col, + "adf_statistic": round(float(adf_stat), 4), + "p_value": round(float(p_val), 6), + "used_lag": int(used_lag), + "n_observations": int(nobs), + "critical_1%": round(float(critical_values["1%"]), 4), + "critical_5%": round(float(critical_values["5%"]), 4), + "critical_10%": round(float(critical_values["10%"]), 4), + "is_stationary_0.05": float(p_val) < 0.05, + }) + except Exception: + continue + + return pd.DataFrame(rows).set_index("column") if rows else pd.DataFrame() + + # ── Summary ─────────────────────────────────────────── + + def summary(self) -> dict[str, Any]: + """Return combined statistical test results.""" + result: dict[str, Any] = {} + + try: + lev = self.levene_test() + if not lev.empty: + result["levene"] = lev + except Exception as exc: + logger.debug("Levene test skipped: %s", exc) + + try: + kw = self.kruskal_wallis() + if not kw.empty: + result["kruskal_wallis"] = kw + except Exception as exc: + logger.debug("Kruskal-Wallis skipped: %s", exc) + + try: + mw = self.mann_whitney() + if not mw.empty: + result["mann_whitney"] = mw + except Exception as exc: + logger.debug("Mann-Whitney skipped: %s", exc) + + try: + csq = self.chi_square_goodness() + if not csq.empty: + result["chi_square_goodness"] = csq + except Exception as exc: + logger.debug("Chi-square goodness skipped: %s", exc) + + try: + grb = self.grubbs_test() + if not grb.empty: + result["grubbs"] = grb + except Exception as exc: + logger.debug("Grubbs test skipped: %s", exc) + + try: + adf = self.adf_test() + if not adf.empty: + result["adf"] = adf + except Exception as exc: + logger.debug("ADF test skipped: %s", exc) + + return result diff --git a/f2a/utils/__init__.py b/f2a/utils/__init__.py new file mode 100644 index 0000000..e650a02 --- /dev/null +++ b/f2a/utils/__init__.py @@ -0,0 +1 @@ +"""Utilities module — type inference, validation, and logging.""" diff --git a/f2a/utils/exceptions.py b/f2a/utils/exceptions.py new file mode 100644 index 0000000..2d3b068 --- /dev/null +++ b/f2a/utils/exceptions.py @@ -0,0 +1,32 @@ +"""Custom exception definitions.""" + + +class F2AError(Exception): + """Base exception for the f2a library.""" + + +class UnsupportedFormatError(F2AError): + """Unsupported file format.""" + + def __init__(self, source: str, detected: str | None = None) -> None: + msg = f"Unsupported file format: {source}" + if detected: + msg += f" (detected: {detected})" + super().__init__(msg) + + +class DataLoadError(F2AError): + """Data loading failure.""" + + def __init__(self, source: str, reason: str = "") -> None: + msg = f"Failed to load data: {source}" + if reason: + msg += f" — {reason}" + super().__init__(msg) + + +class EmptyDataError(F2AError): + """Empty dataset.""" + + def __init__(self, source: str) -> None: + super().__init__(f"Dataset is empty: {source}") diff --git a/f2a/utils/logging.py b/f2a/utils/logging.py new file mode 100644 index 0000000..a0f77f4 --- /dev/null +++ b/f2a/utils/logging.py @@ -0,0 +1,23 @@ +"""f2a logging configuration.""" + +import logging + +_LOG_FORMAT = "%(asctime)s [%(levelname)s] %(name)s: %(message)s" + + +def get_logger(name: str) -> logging.Logger: + """Return a module-level logger. + + Args: + name: Logger name (typically ``__name__``). + + Returns: + Configured :class:`logging.Logger` instance. + """ + logger = logging.getLogger(f"f2a.{name}") + if not logger.handlers: + handler = logging.StreamHandler() + handler.setFormatter(logging.Formatter(_LOG_FORMAT)) + logger.addHandler(handler) + logger.setLevel(logging.INFO) + return logger diff --git a/f2a/utils/type_inference.py b/f2a/utils/type_inference.py new file mode 100644 index 0000000..1607a54 --- /dev/null +++ b/f2a/utils/type_inference.py @@ -0,0 +1,95 @@ +"""Automatic data type inference utilities.""" + +from __future__ import annotations + +from enum import Enum + +import pandas as pd + + +class ColumnType(str, Enum): + """Column type classification.""" + + NUMERIC = "numeric" + CATEGORICAL = "categorical" + TEXT = "text" + DATETIME = "datetime" + BOOLEAN = "boolean" + + +# Max unique value ratio to consider a column categorical +_CATEGORICAL_RATIO_THRESHOLD = 0.05 # 5% +# Max absolute unique count to consider a column categorical +_CATEGORICAL_UNIQUE_THRESHOLD = 50 +# Min average string length to consider a column text +_TEXT_LENGTH_THRESHOLD = 50 + + +def infer_column_type(series: pd.Series) -> ColumnType: + """Infer the semantic type of a single column. + + Args: + series: Target pandas Series to analyze. + + Returns: + Inferred :class:`ColumnType`. + """ + # Boolean check + try: + if series.dtype == "bool" or set(series.dropna().unique()) <= {True, False, 0, 1}: + return ColumnType.BOOLEAN + except TypeError: + # Column contains unhashable types (e.g. numpy arrays, lists) + return ColumnType.TEXT + + # Datetime check + if pd.api.types.is_datetime64_any_dtype(series): + return ColumnType.DATETIME + + # Numeric check + if pd.api.types.is_numeric_dtype(series): + n_unique = series.nunique() + n_total = len(series) + # Treat as categorical if very few unique values + if n_unique <= 10 and n_total > 100: + return ColumnType.CATEGORICAL + return ColumnType.NUMERIC + + # String types + if pd.api.types.is_string_dtype(series) or pd.api.types.is_object_dtype(series): + n_unique = series.nunique() + n_total = len(series.dropna()) + + if n_total == 0: + return ColumnType.TEXT + + # Attempt datetime parsing + try: + pd.to_datetime(series.dropna().head(20)) + return ColumnType.DATETIME + except (ValueError, TypeError): + pass + + # Determine text vs categorical by unique ratio and string length + ratio = n_unique / n_total if n_total > 0 else 1.0 + avg_len = series.dropna().astype(str).str.len().mean() + + if avg_len > _TEXT_LENGTH_THRESHOLD: + return ColumnType.TEXT + if n_unique <= _CATEGORICAL_UNIQUE_THRESHOLD or ratio <= _CATEGORICAL_RATIO_THRESHOLD: + return ColumnType.CATEGORICAL + return ColumnType.TEXT + + return ColumnType.TEXT + + +def infer_all_types(df: pd.DataFrame) -> dict[str, ColumnType]: + """Infer types for all columns in a DataFrame. + + Args: + df: Target DataFrame to analyze. + + Returns: + Column name → :class:`ColumnType` mapping. + """ + return {col: infer_column_type(df[col]) for col in df.columns} diff --git a/f2a/utils/validators.py b/f2a/utils/validators.py new file mode 100644 index 0000000..03ede7d --- /dev/null +++ b/f2a/utils/validators.py @@ -0,0 +1,280 @@ +"""Input validation utilities.""" + +from __future__ import annotations + +import re +from pathlib import Path + +from f2a.utils.exceptions import UnsupportedFormatError + +# ── Supported extensions → source type mapping ──────── +# Register new formats here; they will be auto-routed. +SUPPORTED_EXTENSIONS: dict[str, str] = { + # CSV / delimited text + ".csv": "csv", + ".tsv": "tsv", + ".txt": "delimited", # auto-detect delimiter + ".dat": "delimited", + ".tab": "tsv", + # JSON family + ".json": "json", + ".jsonl": "jsonl", + ".ndjson": "jsonl", + # Spreadsheets + ".xlsx": "excel", + ".xls": "excel", + ".xlsm": "excel", + ".xlsb": "excel", + ".ods": "ods", + # Binary / columnar formats + ".parquet": "parquet", + ".pq": "parquet", + ".feather": "feather", + ".ftr": "feather", + ".arrow": "arrow_ipc", + ".ipc": "arrow_ipc", + ".orc": "orc", + ".hdf": "hdf5", + ".hdf5": "hdf5", + ".h5": "hdf5", + ".pkl": "pickle", + ".pickle": "pickle", + # Statistical packages + ".sas7bdat": "sas", + ".xpt": "sas_xport", + ".dta": "stata", + ".sav": "spss", + ".zsav": "spss", + ".por": "spss", + # Databases + ".db": "sqlite", + ".sqlite": "sqlite", + ".sqlite3": "sqlite", + ".ddb": "duckdb", + ".duckdb": "duckdb", + # Markup / structured text + ".xml": "xml", + ".html": "html", + ".htm": "html", + # Fixed-width + ".fwf": "fwf", +} + +HF_PREFIXES = ("hf://", "huggingface://") +HF_URL_PATTERN = re.compile( + r"^https?://huggingface\.co/datasets/" + r"(?P[^/?#]+(?:/[^/?#]+)?)" + r"(?:/viewer(?:/(?P[^/?#]+))?(?:/(?P[^/?#]+))?)?", + re.IGNORECASE, +) +URL_PREFIXES = ("http://", "https://", "ftp://") + + +def detect_source_type(source: str) -> str: + """Detect data source type from a source string. + + Detection priority: + 1. HuggingFace URL (https://huggingface.co/datasets/...) + 2. URL prefix (http/https/ftp) + 3. HuggingFace prefix (hf://, huggingface://) + 4. HuggingFace org/dataset pattern + 5. File extension matching + 6. Multi-extension matching (e.g., .sas7bdat) + 7. Content sniffing (if file exists) + + Args: + source: File path, URL, or HuggingFace address. + + Returns: + Source type string (``"csv"``, ``"json"``, ``"hf"``, ``"url"``, etc.). + + Raises: + UnsupportedFormatError: If the format is not supported. + """ + # 1. HuggingFace URL detection (before generic URL handling) + if HF_URL_PATTERN.match(source): + return "hf" + + # 2. URL detection + for prefix in URL_PREFIXES: + if source.lower().startswith(prefix): + return _detect_url_type(source) + + # 3. HuggingFace prefix detection (hf://, huggingface://) + for prefix in HF_PREFIXES: + if source.startswith(prefix): + return "hf" + + # 4. org/dataset pattern detection (contains slash, no extension) + if "/" in source and not Path(source).suffix: + parts = source.split("/") + if len(parts) == 2 and all( + re.match(r"^[a-zA-Z0-9_-]+$", part) for part in parts + ): + return "hf" + + # 5. File extension-based detection + path = Path(source) + ext = path.suffix.lower() + + # Multi-extension handling (.tar.gz, .sas7bdat, etc.) + full_suffixes = "".join(path.suffixes).lower() + if full_suffixes in SUPPORTED_EXTENSIONS: + return SUPPORTED_EXTENSIONS[full_suffixes] + + if ext in SUPPORTED_EXTENSIONS: + return SUPPORTED_EXTENSIONS[ext] + + # 6. Attempt content sniffing if file exists + if path.exists() and path.is_file(): + sniffed = _sniff_content(path) + if sniffed: + return sniffed + + raise UnsupportedFormatError(source, detected=ext if ext else None) + + +def _detect_url_type(url: str) -> str: + """Extract file type from URL. + + Check the URL path extension; defaults to ``"url_auto"`` if none found. + """ + from urllib.parse import urlparse + + parsed = urlparse(url) + path = parsed.path + ext = Path(path).suffix.lower() + + if ext in SUPPORTED_EXTENSIONS: + return SUPPORTED_EXTENSIONS[ext] + + # No extension found — mark as URL for auto-detection + return "url_auto" + + +def _sniff_content(path: Path, peek_bytes: int = 8192) -> str | None: + """Read the first few bytes of a file to guess its format. + + Args: + path: File path. + peek_bytes: Number of bytes to read. + + Returns: + Detected source type string, or None. + """ + try: + with open(path, "rb") as f: + header = f.read(peek_bytes) + except (OSError, PermissionError): + return None + + # ── Binary magic numbers ── + # Parquet: "PAR1" + if header[:4] == b"PAR1": + return "parquet" + + # Apache Arrow IPC: "ARROW1" + if header[:6] == b"ARROW1": + return "arrow_ipc" + + # ORC: "ORC" + if header[:3] == b"ORC": + return "orc" + + # HDF5: "\x89HDF\r\n\x1a\n" + if header[:8] == b"\x89HDF\r\n\x1a\n": + return "hdf5" + + # Feather (Arrow IPC v2): "ARROW1" or FEA1 + if header[:4] == b"FEA1": + return "feather" + + # SQLite: "SQLite format 3\x00" + if header[:16] == b"SQLite format 3\x00": + return "sqlite" + + # Pickle: various protocol magic bytes + if header[:2] in (b"\x80\x02", b"\x80\x03", b"\x80\x04", b"\x80\x05"): + return "pickle" + + # Excel XLSX (ZIP): "PK\x03\x04" + if header[:4] == b"PK\x03\x04": + # ZIP file — could be XLSX + if b"xl/" in header or b"[Content_Types].xml" in header: + return "excel" + return None + + # Excel XLS (OLE2): "\xd0\xcf\x11\xe0" + if header[:4] == b"\xd0\xcf\x11\xe0": + return "excel" + + # ── Text-based sniffing ── + try: + text = header.decode("utf-8", errors="replace") + except Exception: + return None + + text_stripped = text.strip() + + # JSON + if text_stripped.startswith(("{", "[")): + # JSONL: multi-line JSON objects + lines = text_stripped.split("\n", 5) + if len(lines) > 1 and all( + line.strip().startswith("{") for line in lines[:3] if line.strip() + ): + return "jsonl" + return "json" + + # XML / HTML + if text_stripped.startswith(" comma_count: + return "tsv" + + if "," in text_stripped: + return "csv" + + # Default: try as delimited text + if "\n" in text_stripped and len(text_stripped.split("\n")) > 1: + return "delimited" + + return None + + +def get_supported_formats() -> dict[str, list[str]]: + """Return supported formats and their file extensions. + + Returns: + Format name → extension list mapping. + """ + result: dict[str, list[str]] = {} + for ext, fmt in SUPPORTED_EXTENSIONS.items(): + result.setdefault(fmt, []).append(ext) + result["hf"] = ["hf://...", "org/dataset", "https://huggingface.co/datasets/..."] + result["url"] = ["http://...", "https://..."] + return result + + +def validate_source(source: str) -> str: + """Validate and normalize a source string. + + Args: + source: Input source string. + + Returns: + Normalized source string. + + Raises: + ValueError: If the source string is empty. + """ + if not source or not source.strip(): + raise ValueError("Source string is empty.") + return source.strip() diff --git a/f2a/viz/__init__.py b/f2a/viz/__init__.py new file mode 100644 index 0000000..4de355f --- /dev/null +++ b/f2a/viz/__init__.py @@ -0,0 +1,43 @@ +"""Viz module — visualization engine.""" + +from f2a.viz.categorical_plots import CategoricalPlotter +from f2a.viz.corr_plots import CorrelationPlotter +from f2a.viz.dist_plots import DistributionPlotter +from f2a.viz.missing_plots import MissingPlotter +from f2a.viz.outlier_plots import OutlierPlotter +from f2a.viz.pca_plots import PCAPlotter +from f2a.viz.plots import BasicPlotter +from f2a.viz.quality_plots import QualityPlotter +from f2a.viz.theme import F2ATheme + +# Advanced viz modules +from f2a.viz.advanced_anomaly_plots import AdvancedAnomalyPlotter +from f2a.viz.advanced_corr_plots import AdvancedCorrPlotter +from f2a.viz.advanced_dist_plots import AdvancedDistPlotter +from f2a.viz.cluster_plots import ClusterPlotter + +# New viz modules (enhancement) +from f2a.viz.cross_plots import CrossPlotter +from f2a.viz.dimreduction_plots import DimReductionPlotter +from f2a.viz.insight_plots import InsightPlotter + +__all__ = [ + "BasicPlotter", + "CategoricalPlotter", + "CorrelationPlotter", + "DistributionPlotter", + "MissingPlotter", + "OutlierPlotter", + "PCAPlotter", + "QualityPlotter", + "F2ATheme", + # Advanced + "AdvancedAnomalyPlotter", + "AdvancedCorrPlotter", + "AdvancedDistPlotter", + "ClusterPlotter", + # Enhancement + "CrossPlotter", + "DimReductionPlotter", + "InsightPlotter", +] diff --git a/f2a/viz/advanced_anomaly_plots.py b/f2a/viz/advanced_anomaly_plots.py new file mode 100644 index 0000000..f9d8850 --- /dev/null +++ b/f2a/viz/advanced_anomaly_plots.py @@ -0,0 +1,211 @@ +"""Advanced anomaly detection visualization module. + +Provides anomaly scatter plot, Mahalanobis distance histogram, +consensus comparison chart, and t-SNE / UMAP anomaly overlay. +""" + +from __future__ import annotations + +from typing import Any + +import matplotlib.pyplot as plt +import numpy as np +import seaborn as sns +import pandas as pd + +from f2a.viz.theme import DEFAULT_THEME, F2ATheme + + +class AdvancedAnomalyPlotter: + """Visualise advanced anomaly detection results. + + Args: + theme: Visualisation theme. + """ + + def __init__(self, theme: F2ATheme | None = None) -> None: + self._theme = theme or DEFAULT_THEME + self._theme.apply() + + def anomaly_scatter_2d( + self, + df: pd.DataFrame, + numeric_cols: list[str], + anomaly_result: dict[str, Any], + max_sample: int = 2000, + ) -> plt.Figure: + """Scatter plot of anomalies in 2D PCA space. + + Args: + df: Original DataFrame. + numeric_cols: Numeric column names. + anomaly_result: Result dict with 'labels' key (-1 = anomaly). + max_sample: Max points to plot. + """ + labels = anomaly_result.get("labels") + if labels is None or len(numeric_cols) < 2: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "No anomaly data", ha="center", va="center") + return fig + + labels = np.asarray(labels) # ensure ndarray for element-wise comparison + + try: + from sklearn.decomposition import PCA + from sklearn.preprocessing import StandardScaler + except ImportError: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "scikit-learn required", ha="center", va="center") + return fig + + df_clean = df[numeric_cols].dropna() + # Align labels with df_clean by truncating to min length first, then sample + n = min(len(df_clean), len(labels)) + df_clean = df_clean.iloc[:n] + labels = labels[:n] + if n > max_sample: + rng = np.random.RandomState(42) + idx = rng.choice(n, size=max_sample, replace=False) + df_clean = df_clean.iloc[idx] + labels = labels[idx] + + scaler = StandardScaler() + X = scaler.fit_transform(df_clean) + pca = PCA(n_components=2) + X_2d = pca.fit_transform(X) + + fig, ax = plt.subplots(figsize=(10, 8)) + + normal_mask = labels == 1 + anomaly_mask = labels == -1 + + ax.scatter(X_2d[normal_mask, 0], X_2d[normal_mask, 1], + c="#3498db", alpha=0.3, s=15, label="Normal") + ax.scatter(X_2d[anomaly_mask, 0], X_2d[anomaly_mask, 1], + c="#e74c3c", alpha=0.8, s=40, marker="x", + label=f"Anomaly ({anomaly_mask.sum()})") + + method = anomaly_result.get("method", "Unknown") + var_explained = pca.explained_variance_ratio_ + ax.set_xlabel(f"PC1 ({var_explained[0] * 100:.1f}%)") + ax.set_ylabel(f"PC2 ({var_explained[1] * 100:.1f}%)") + ax.set_title(f"Anomaly Detection: {method}", fontsize=self._theme.title_size) + ax.legend() + fig.tight_layout() + return fig + + def mahalanobis_histogram( + self, + maha_result: dict[str, Any], + ) -> plt.Figure: + """Histogram of Mahalanobis distances with threshold line. + + Args: + maha_result: Result dict from mahalanobis_distance(). + """ + distances = maha_result.get("distances") + if distances is None: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "No Mahalanobis data", ha="center", va="center") + return fig + + threshold = maha_result.get("threshold", 0) + + fig, ax = plt.subplots(figsize=(10, 5)) + ax.hist(distances, bins=50, color="#3498db", alpha=0.7, edgecolor="#2980b9") + ax.axvline(x=threshold, color="#e74c3c", linestyle="--", linewidth=2, + label=f"Threshold ({threshold:.2f})") + + n_anomaly = maha_result.get("anomaly_count", 0) + ax.set_xlabel("Mahalanobis Distance") + ax.set_ylabel("Frequency") + ax.set_title( + f"Mahalanobis Distance Distribution ({n_anomaly} anomalies)", + fontsize=self._theme.title_size, + ) + ax.legend() + fig.tight_layout() + return fig + + def consensus_comparison( + self, + consensus_result: dict[str, Any], + ) -> plt.Figure: + """Bar chart comparing anomaly counts across methods. + + Args: + consensus_result: Result dict from consensus_anomaly(). + """ + per_method = consensus_result.get("per_method_counts", {}) + if not per_method: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "No consensus data", ha="center", va="center") + return fig + + # Add consensus count + counts = dict(per_method) + counts["consensus"] = consensus_result.get("consensus_count", 0) + + labels = list(counts.keys()) + values = list(counts.values()) + colors = ["#3498db"] * len(per_method) + ["#e74c3c"] + + fig, ax = plt.subplots(figsize=(8, 5)) + bars = ax.bar(labels, values, color=colors, width=0.6) + + for bar, v in zip(bars, values): + ax.text(bar.get_x() + bar.get_width() / 2, bar.get_height() + 0.5, + str(v), ha="center", fontsize=10, fontweight="bold") + + n_samples = consensus_result.get("n_samples", 0) + ax.set_ylabel("Anomaly Count") + ax.set_title( + f"Anomaly Detection Method Comparison (n={n_samples})", + fontsize=self._theme.title_size, + ) + plt.xticks(rotation=15, ha="right") + fig.tight_layout() + return fig + + def tsne_anomaly_overlay( + self, + embedding: pd.DataFrame, + anomaly_labels: np.ndarray, + ) -> plt.Figure: + """Overlay anomaly labels on t-SNE / UMAP embedding. + + Args: + embedding: DataFrame with 2 columns (e.g. tsne_1, tsne_2). + anomaly_labels: Array of -1 (anomaly) or 1 (normal). + """ + if embedding.empty or anomaly_labels is None: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "No embedding data", ha="center", va="center") + return fig + + anomaly_labels = np.asarray(anomaly_labels) # ensure ndarray + if embedding.shape[1] < 2: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "Need ≥ 2 embedding dims", ha="center", va="center") + return fig + + n = min(len(embedding), len(anomaly_labels)) + X = embedding.values[:n] + labels = anomaly_labels[:n] + + fig, ax = plt.subplots(figsize=(10, 8)) + + normal = labels == 1 + anomaly = labels == -1 + + ax.scatter(X[normal, 0], X[normal, 1], c="#3498db", alpha=0.3, s=15, label="Normal") + ax.scatter(X[anomaly, 0], X[anomaly, 1], c="#e74c3c", alpha=0.8, s=40, + marker="x", label=f"Anomaly ({anomaly.sum()})") + + col_names = embedding.columns.tolist() + ax.set_xlabel(col_names[0]) + ax.set_ylabel(col_names[1]) + ax.set_title("Anomaly Overlay on Embedding", fontsize=self._theme.title_size) + ax.legend() + fig.tight_layout() + return fig diff --git a/f2a/viz/advanced_corr_plots.py b/f2a/viz/advanced_corr_plots.py new file mode 100644 index 0000000..6a1e2a3 --- /dev/null +++ b/f2a/viz/advanced_corr_plots.py @@ -0,0 +1,240 @@ +"""Advanced correlation visualization module. + +Provides partial correlation heatmap, MI heatmap, bootstrap CI plot, +correlation network graph, and distance correlation heatmap. +""" + +from __future__ import annotations + +from typing import Any + +import matplotlib.pyplot as plt +import numpy as np +import seaborn as sns +import pandas as pd + +from f2a.viz.theme import DEFAULT_THEME, F2ATheme + + +class AdvancedCorrPlotter: + """Visualise advanced correlation analysis results. + + Args: + theme: Visualisation theme. + """ + + def __init__(self, theme: F2ATheme | None = None) -> None: + self._theme = theme or DEFAULT_THEME + self._theme.apply() + + def partial_correlation_heatmap( + self, + pcorr: pd.DataFrame, + **kwargs: Any, + ) -> plt.Figure: + """Heatmap of partial correlations. + + Args: + pcorr: Partial correlation matrix DataFrame. + """ + if pcorr.empty: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "No partial correlation data", ha="center", va="center") + return fig + + n = len(pcorr) + fig, ax = plt.subplots(figsize=(max(8, n * 0.7), max(6, n * 0.6))) + kwargs.setdefault("annot", n <= 15) + kwargs.setdefault("fmt", ".2f") + kwargs.setdefault("cmap", "RdBu_r") + kwargs.setdefault("center", 0) + kwargs.setdefault("vmin", -1) + kwargs.setdefault("vmax", 1) + kwargs.setdefault("square", True) + + sns.heatmap(pcorr, ax=ax, **kwargs) + ax.set_title("Partial Correlation Matrix", fontsize=self._theme.title_size) + fig.tight_layout() + return fig + + def mi_heatmap( + self, + mi_matrix: pd.DataFrame, + **kwargs: Any, + ) -> plt.Figure: + """Heatmap of mutual information values. + + Args: + mi_matrix: MI matrix DataFrame. + """ + if mi_matrix.empty: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "No MI data", ha="center", va="center") + return fig + + n = len(mi_matrix) + fig, ax = plt.subplots(figsize=(max(8, n * 0.7), max(6, n * 0.6))) + kwargs.setdefault("annot", n <= 15) + kwargs.setdefault("fmt", ".3f") + kwargs.setdefault("cmap", "YlOrRd") + kwargs.setdefault("square", True) + + sns.heatmap(mi_matrix, ax=ax, **kwargs) + ax.set_title("Mutual Information Matrix", fontsize=self._theme.title_size) + fig.tight_layout() + return fig + + def bootstrap_ci_plot( + self, + ci_df: pd.DataFrame, + max_pairs: int = 20, + ) -> plt.Figure: + """Plot bootstrap confidence intervals for correlations. + + Args: + ci_df: DataFrame from bootstrap_correlation_ci(). + max_pairs: Max number of pairs to show. + """ + if ci_df.empty: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "No bootstrap CI data", ha="center", va="center") + return fig + + required_cols = {"col_a", "col_b", "pearson_r", "ci_lower", "ci_upper", "significant"} + if not required_cols.issubset(ci_df.columns): + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "Incomplete CI data", ha="center", va="center") + return fig + + df = ci_df.head(max_pairs).copy() + df["label"] = df["col_a"] + " ↔ " + df["col_b"] + df = df.sort_values("pearson_r") + + fig, ax = plt.subplots(figsize=(10, max(5, len(df) * 0.35))) + + y = range(len(df)) + colors = ["#27ae60" if sig else "#95a5a6" for sig in df["significant"]] + + ax.barh(y, df["pearson_r"], height=0.4, color=colors, alpha=0.7, label="Point estimate") + + for i, (_, row) in enumerate(df.iterrows()): + ax.plot( + [row["ci_lower"], row["ci_upper"]], + [i, i], + color="#2c3e50", + linewidth=2, + solid_capstyle="round", + ) + + ax.axvline(x=0, color="#e74c3c", linestyle="--", alpha=0.5) + ax.set_yticks(y) + ax.set_yticklabels(df["label"], fontsize=8) + ax.set_xlabel("Pearson r") + ax.set_title("Bootstrap 95% CI for Correlations", fontsize=self._theme.title_size) + ax.legend(fontsize=8) + fig.tight_layout() + return fig + + def correlation_network( + self, + network_data: dict[str, Any], + ) -> plt.Figure: + """Draw a correlation network graph. + + Args: + network_data: Dictionary from correlation_network(). + """ + nodes = network_data.get("nodes", []) + edges = network_data.get("edges", []) + + if not nodes or not edges: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "No network data", ha="center", va="center") + return fig + + fig, ax = plt.subplots(figsize=(10, 8)) + + # Layout nodes in a circle + n = len(nodes) + angles = np.linspace(0, 2 * np.pi, n, endpoint=False) + pos = {node: (np.cos(a), np.sin(a)) for node, a in zip(nodes, angles)} + + # Draw edges + for edge in edges: + src = edge.get("source") + tgt = edge.get("target") + if src not in pos or tgt not in pos: + continue + w = edge.get("abs_weight", 0.5) + color = "#e74c3c" if edge.get("weight", 0) < 0 else "#27ae60" + ax.plot( + [pos[src][0], pos[tgt][0]], + [pos[src][1], pos[tgt][1]], + color=color, + alpha=min(w, 1.0), + linewidth=w * 3, + ) + + # Draw nodes + for node in nodes: + x, y = pos[node] + ax.scatter(x, y, s=200, c="#3498db", zorder=5, edgecolors="#2c3e50") + ax.annotate( + node, (x, y), + textcoords="offset points", + xytext=(0, 12), + ha="center", + fontsize=8, + fontweight="bold", + ) + + threshold = network_data.get("threshold", 0.5) + ax.set_title( + f"Correlation Network (|r| ≥ {threshold}, {len(edges)} edges)", + fontsize=self._theme.title_size, + ) + ax.set_xlim(-1.5, 1.5) + ax.set_ylim(-1.5, 1.5) + ax.set_aspect("equal") + ax.axis("off") + + # Legend + from matplotlib.lines import Line2D + + legend_elements = [ + Line2D([0], [0], color="#27ae60", linewidth=2, label="Positive"), + Line2D([0], [0], color="#e74c3c", linewidth=2, label="Negative"), + ] + ax.legend(handles=legend_elements, loc="lower right", fontsize=8) + + fig.tight_layout() + return fig + + def distance_correlation_heatmap( + self, + dcorr: pd.DataFrame, + **kwargs: Any, + ) -> plt.Figure: + """Heatmap of distance correlations. + + Args: + dcorr: Distance correlation matrix DataFrame. + """ + if dcorr.empty: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "No distance correlation data", ha="center", va="center") + return fig + + n = len(dcorr) + fig, ax = plt.subplots(figsize=(max(8, n * 0.7), max(6, n * 0.6))) + kwargs.setdefault("annot", n <= 15) + kwargs.setdefault("fmt", ".3f") + kwargs.setdefault("cmap", "YlOrRd") + kwargs.setdefault("vmin", 0) + kwargs.setdefault("vmax", 1) + kwargs.setdefault("square", True) + + sns.heatmap(dcorr, ax=ax, **kwargs) + ax.set_title("Distance Correlation Matrix", fontsize=self._theme.title_size) + fig.tight_layout() + return fig diff --git a/f2a/viz/advanced_dist_plots.py b/f2a/viz/advanced_dist_plots.py new file mode 100644 index 0000000..5dac054 --- /dev/null +++ b/f2a/viz/advanced_dist_plots.py @@ -0,0 +1,237 @@ +"""Advanced distribution visualization module. + +Provides best-fit overlay plots, ECDF plots, power transform comparison, +and KDE bandwidth comparison plots. +""" + +from __future__ import annotations + +from typing import Any + +import matplotlib.pyplot as plt +import numpy as np +import seaborn as sns +import pandas as pd +from scipy import stats as sp_stats + +from f2a.viz.theme import DEFAULT_THEME, F2ATheme + + +class AdvancedDistPlotter: + """Visualise advanced distribution analysis results. + + Args: + theme: Visualisation theme. + """ + + def __init__(self, theme: F2ATheme | None = None) -> None: + self._theme = theme or DEFAULT_THEME + self._theme.apply() + + def best_fit_overlay( + self, + df: pd.DataFrame, + best_fit_df: pd.DataFrame, + max_cols: int = 9, + ) -> plt.Figure: + """Overlay best-fit distribution on histograms. + + Args: + df: Original DataFrame with numeric columns. + best_fit_df: DataFrame from AdvancedDistributionStats.best_fit(). + max_cols: Max columns to plot. + """ + if best_fit_df.empty: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "No best-fit data", ha="center", va="center") + return fig + + cols = list(best_fit_df.index[:max_cols]) + n = len(cols) + ncols = min(3, n) + nrows = (n + ncols - 1) // ncols + + fig, axes = plt.subplots(nrows, ncols, figsize=(5 * ncols, 4 * nrows)) + if n == 1: + axes = [axes] + else: + axes = list(axes.flat) + + for idx, col in enumerate(cols): + ax = axes[idx] + series = df[col].dropna() + if len(series) < 5: + ax.set_visible(False) + continue + + # Histogram + ax.hist(series, bins=30, density=True, alpha=0.5, color="#3498db", label="Data") + + # Best-fit curve overlay + row = best_fit_df.loc[col] + dist_name = row["best_distribution"] + try: + dist = getattr(sp_stats, dist_name) + params = dist.fit(series) + x = np.linspace(series.min(), series.max(), 200) + pdf = dist.pdf(x, *params) + ax.plot(x, pdf, "r-", linewidth=2, + label=f"{dist_name} (AIC={row['aic']:.0f})") + except Exception: + pass + + ax.set_title(col, fontsize=10) + ax.legend(fontsize=7) + + for idx in range(n, len(axes)): + axes[idx].set_visible(False) + + fig.suptitle("Best-Fit Distribution Overlay", fontsize=self._theme.title_size + 2, y=1.02) + fig.tight_layout() + return fig + + def ecdf_plot( + self, + ecdf_data: dict[str, pd.DataFrame], + max_cols: int = 9, + ) -> plt.Figure: + """Plot Empirical Cumulative Distribution Functions. + + Args: + ecdf_data: Dictionary mapping column name to ECDF DataFrame. + max_cols: Max columns to plot. + """ + if not ecdf_data: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "No ECDF data", ha="center", va="center") + return fig + + cols = list(ecdf_data.keys())[:max_cols] + n = len(cols) + ncols = min(3, n) + nrows = (n + ncols - 1) // ncols + + fig, axes = plt.subplots(nrows, ncols, figsize=(5 * ncols, 4 * nrows)) + if n == 1: + axes = [axes] + else: + axes = list(axes.flat) + + for idx, col in enumerate(cols): + ax = axes[idx] + edf = ecdf_data[col] + ax.step(edf["x"], edf["ecdf"], where="post", color="#2980B9", linewidth=1.5) + ax.fill_between(edf["x"], edf["ecdf"], step="post", alpha=0.15, color="#3498DB") + ax.set_title(col, fontsize=10) + ax.set_ylabel("ECDF") + ax.set_ylim(0, 1.05) + ax.axhline(y=0.5, color="#95A5A6", linestyle="--", alpha=0.5) + + for idx in range(n, len(axes)): + axes[idx].set_visible(False) + + fig.suptitle("Empirical CDF", fontsize=self._theme.title_size + 2, y=1.02) + fig.tight_layout() + return fig + + def power_transform_plot( + self, + df: pd.DataFrame, + power_df: pd.DataFrame, + max_cols: int = 6, + ) -> plt.Figure: + """Compare original vs. power-transformed distributions. + + Args: + df: Original DataFrame. + power_df: DataFrame from power_transform_recommendation(). + max_cols: Max columns to show. + """ + if power_df.empty: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "No power transform data", ha="center", va="center") + return fig + + # Only show columns that need transformation + need_transform = power_df[power_df["needs_transform"] == True] # noqa: E712 + if need_transform.empty: + need_transform = power_df.head(max_cols) + cols = list(need_transform.index[:max_cols]) + n = len(cols) + + fig, axes = plt.subplots(n, 2, figsize=(10, 3.5 * n), squeeze=False) + + for idx, col in enumerate(cols): + row = power_df.loc[col] + series = df[col].dropna() + if len(series) < 10: + axes[idx][0].set_visible(False) + axes[idx][1].set_visible(False) + continue + + ax_orig = axes[idx][0] + ax_trans = axes[idx][1] + + # Original + ax_orig.hist(series, bins=30, color="#e74c3c", alpha=0.6) + ax_orig.set_title(f"{col} (original, skew={row['original_skewness']:.2f})", + fontsize=9) + + # Transformed + method = row["recommended_method"] + if method == "box-cox" and (series > 0).all(): + try: + transformed, _ = sp_stats.boxcox(series.values) + ax_trans.hist(transformed, bins=30, color="#27ae60", alpha=0.6) + ax_trans.set_title( + f"{col} (Box-Cox, skew={row['transformed_skewness']:.2f})", + fontsize=9, + ) + except Exception: + ax_trans.text(0.5, 0.5, "Transform failed", ha="center", va="center") + elif method == "yeo-johnson": + try: + transformed, _ = sp_stats.yeojohnson(series.values) + ax_trans.hist(transformed, bins=30, color="#27ae60", alpha=0.6) + ax_trans.set_title( + f"{col} (Yeo-Johnson, skew={row['transformed_skewness']:.2f})", + fontsize=9, + ) + except Exception: + ax_trans.text(0.5, 0.5, "Transform failed", ha="center", va="center") + else: + ax_trans.text(0.5, 0.5, "No transform needed", ha="center", va="center", + transform=ax_trans.transAxes) + + fig.suptitle("Power Transform Comparison", fontsize=self._theme.title_size + 2, y=1.02) + fig.tight_layout() + return fig + + def jarque_bera_summary(self, jb_df: pd.DataFrame) -> plt.Figure: + """Visualize Jarque-Bera test results as a bar chart. + + Args: + jb_df: DataFrame from jarque_bera(). + """ + if jb_df.empty: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "No JB test data", ha="center", va="center") + return fig + + fig, ax = plt.subplots(figsize=(max(8, len(jb_df) * 0.8), 5)) + + cols = jb_df.index.tolist() + p_vals = jb_df["p_value"].values + colors = ["#27ae60" if p > 0.05 else "#e74c3c" for p in p_vals] + + bars = ax.barh(range(len(cols)), -np.log10(p_vals + 1e-15), color=colors) + ax.set_yticks(range(len(cols))) + ax.set_yticklabels(cols) + ax.invert_yaxis() + ax.set_xlabel("-log10(p-value)") + ax.axvline(x=-np.log10(0.05), color="#f39c12", linestyle="--", + label="α = 0.05", alpha=0.7) + ax.legend() + ax.set_title("Jarque-Bera Normality Test", fontsize=self._theme.title_size) + fig.tight_layout() + return fig diff --git a/f2a/viz/categorical_plots.py b/f2a/viz/categorical_plots.py new file mode 100644 index 0000000..b020956 --- /dev/null +++ b/f2a/viz/categorical_plots.py @@ -0,0 +1,125 @@ +"""Categorical visualization module. + +Bar charts, pie/donut charts, and chi-square heatmaps for categorical columns. +""" + +from __future__ import annotations + +from typing import Any + +import matplotlib.pyplot as plt +import numpy as np +import seaborn as sns +import pandas as pd + +from f2a.core.schema import DataSchema +from f2a.viz.theme import DEFAULT_THEME, F2ATheme + + +class CategoricalPlotter: + """Visualise categorical column distributions. + + Args: + df: Target DataFrame. + schema: Data schema. + theme: Visualisation theme. + """ + + def __init__( + self, + df: pd.DataFrame, + schema: DataSchema, + theme: F2ATheme | None = None, + ) -> None: + self._df = df + self._schema = schema + self._theme = theme or DEFAULT_THEME + self._theme.apply() + + def frequency_bars( + self, + columns: list[str] | None = None, + top_n: int = 15, + max_cols: int = 20, + **kwargs: Any, + ) -> plt.Figure: + """Horizontal frequency bar charts for categorical columns. + + Args: + columns: Columns to plot. Defaults to all categorical columns. + top_n: Max categories per column. + max_cols: Max subplot count. + **kwargs: Passed to ``seaborn.barplot``. + """ + cols = (columns or self._schema.categorical_columns)[:max_cols] + if not cols: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "No categorical columns", ha="center", va="center") + return fig + + n = len(cols) + ncols_grid = min(2, n) + nrows = (n + ncols_grid - 1) // ncols_grid + + fig, axes = plt.subplots(nrows, ncols_grid, figsize=(7 * ncols_grid, 4 * nrows)) + if n == 1: + axes = [axes] + else: + axes = list(axes.flat) + + palette = self._theme.get_colors(top_n) + + for idx, col in enumerate(cols): + ax = axes[idx] + vc = self._df[col].value_counts().head(top_n) + colors = palette[:len(vc)] + ax.barh(range(len(vc)), vc.values, color=colors) + ax.set_yticks(range(len(vc))) + ax.set_yticklabels([str(v)[:30] for v in vc.index], fontsize=9) + ax.invert_yaxis() + ax.set_xlabel("Frequency") + ax.set_title(f"{col} (top {min(top_n, len(vc))})") + + # Annotate with count + for i, v in enumerate(vc.values): + ax.text(v + max(vc.values) * 0.01, i, str(v), va="center", fontsize=8) + + for idx in range(n, len(axes)): + axes[idx].set_visible(False) + + fig.suptitle("Categorical Column Frequencies", + fontsize=self._theme.title_size + 2, y=1.02) + fig.tight_layout() + return fig + + def chi_square_heatmap( + self, + chi_sq_matrix: pd.DataFrame, + **kwargs: Any, + ) -> plt.Figure: + """Render the chi-square p-value matrix as a heatmap. + + Args: + chi_sq_matrix: Square DataFrame of p-values from + :meth:`CategoricalStats.chi_square_matrix`. + """ + if chi_sq_matrix.empty: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "Not enough categorical columns", ha="center", va="center") + return fig + + n = len(chi_sq_matrix) + fig, ax = plt.subplots(figsize=(max(7, n * 0.8), max(6, n * 0.7))) + + kwargs.setdefault("annot", True) + kwargs.setdefault("fmt", ".4f") + kwargs.setdefault("cmap", "YlOrRd_r") + kwargs.setdefault("vmin", 0) + kwargs.setdefault("vmax", 1) + kwargs.setdefault("square", True) + + sns.heatmap(chi_sq_matrix, ax=ax, **kwargs) + ax.set_title("Chi-Square Independence Test p-values\n(low = significant association)", + fontsize=self._theme.title_size) + fig.tight_layout() + return fig diff --git a/f2a/viz/cluster_plots.py b/f2a/viz/cluster_plots.py new file mode 100644 index 0000000..339dc1e --- /dev/null +++ b/f2a/viz/cluster_plots.py @@ -0,0 +1,230 @@ +"""Clustering visualization module. + +Provides elbow/silhouette plots, cluster scatter (2D PCA), DBSCAN results, +dendrogram, and cluster profile heatmap. +""" + +from __future__ import annotations + +from typing import Any + +import matplotlib.pyplot as plt +import numpy as np +import seaborn as sns +import pandas as pd + +from f2a.viz.theme import DEFAULT_THEME, F2ATheme + + +class ClusterPlotter: + """Visualise clustering analysis results. + + Args: + theme: Visualisation theme. + """ + + def __init__(self, theme: F2ATheme | None = None) -> None: + self._theme = theme or DEFAULT_THEME + self._theme.apply() + + def elbow_silhouette(self, kmeans_result: dict[str, Any]) -> plt.Figure: + """Plot elbow curve and silhouette scores. + + Args: + kmeans_result: Dictionary from ClusteringStats.kmeans_analysis(). + """ + elbow_df = kmeans_result.get("elbow_data") + if elbow_df is None or elbow_df.empty: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "No K-Means data", ha="center", va="center") + return fig + + fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 5)) + + ks = elbow_df.index.tolist() + inertias = elbow_df["inertia"].values + sil_scores = elbow_df["silhouette_score"].values + optimal_k = kmeans_result.get("optimal_k", 0) + + # Elbow curve + ax1.plot(ks, inertias, "bo-", linewidth=2, markersize=6) + if optimal_k in ks: + idx = ks.index(optimal_k) + ax1.axvline(x=optimal_k, color="#e74c3c", linestyle="--", + alpha=0.7, label=f"Optimal k={optimal_k}") + ax1.plot(optimal_k, inertias[idx], "r*", markersize=15, zorder=5) + ax1.set_xlabel("k (number of clusters)") + ax1.set_ylabel("Inertia") + ax1.set_title("Elbow Method", fontsize=self._theme.title_size) + ax1.legend() + + # Silhouette scores + colors = ["#e74c3c" if k == optimal_k else "#3498db" for k in ks] + ax2.bar(ks, sil_scores, color=colors, width=0.6) + ax2.set_xlabel("k (number of clusters)") + ax2.set_ylabel("Silhouette Score") + ax2.set_title("Silhouette Score per k", fontsize=self._theme.title_size) + + for k, s in zip(ks, sil_scores): + ax2.text(k, s + 0.01, f"{s:.3f}", ha="center", fontsize=8) + + fig.suptitle("K-Means Clustering Analysis", + fontsize=self._theme.title_size + 2, y=1.02) + fig.tight_layout() + return fig + + def cluster_scatter_2d( + self, + df: pd.DataFrame, + numeric_cols: list[str], + kmeans_result: dict[str, Any], + max_sample: int = 2000, + ) -> plt.Figure: + """Scatter plot of clusters in 2D PCA space. + + Args: + df: Original DataFrame. + numeric_cols: Numeric column names. + kmeans_result: K-Means result dict. + max_sample: Max points to plot. + """ + if not kmeans_result or len(numeric_cols) < 2: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "No cluster data", ha="center", va="center") + return fig + + try: + from sklearn.cluster import KMeans + from sklearn.decomposition import PCA + from sklearn.preprocessing import StandardScaler + except ImportError: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "scikit-learn required", ha="center", va="center") + return fig + + df_clean = df[numeric_cols].dropna() + if len(df_clean) < 10: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "Insufficient data", ha="center", va="center") + return fig + + if len(df_clean) > max_sample: + df_clean = df_clean.sample(max_sample, random_state=42) + + scaler = StandardScaler() + X = scaler.fit_transform(df_clean) + + # K-Means fit + optimal_k = kmeans_result.get("optimal_k", 3) + km = KMeans(n_clusters=optimal_k, random_state=42, n_init=10) + labels = km.fit_predict(X) + + # PCA to 2D + pca = PCA(n_components=2) + X_2d = pca.fit_transform(X) + + fig, ax = plt.subplots(figsize=(10, 8)) + palette = sns.color_palette("husl", optimal_k) + + for cluster_id in range(optimal_k): + mask = labels == cluster_id + ax.scatter( + X_2d[mask, 0], X_2d[mask, 1], + c=[palette[cluster_id]], label=f"Cluster {cluster_id}", + alpha=0.6, s=20, + ) + + # Centroids + centroids_2d = pca.transform(km.cluster_centers_) + ax.scatter( + centroids_2d[:, 0], centroids_2d[:, 1], + c="black", marker="X", s=150, zorder=5, + label="Centroids", + ) + + var_explained = pca.explained_variance_ratio_ + ax.set_xlabel(f"PC1 ({var_explained[0] * 100:.1f}% var)") + ax.set_ylabel(f"PC2 ({var_explained[1] * 100:.1f}% var)") + ax.set_title( + f"K-Means Clusters (k={optimal_k}) in PCA Space", + fontsize=self._theme.title_size, + ) + ax.legend(fontsize=8) + fig.tight_layout() + return fig + + def dendrogram( + self, + hierarchical_result: dict[str, Any], + max_leaf: int = 30, + ) -> plt.Figure: + """Draw a dendrogram from hierarchical clustering. + + Args: + hierarchical_result: Dictionary from hierarchical_analysis(). + max_leaf: Max leaf nodes shown. + """ + Z = hierarchical_result.get("linkage_matrix") + if Z is None: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "No hierarchical data", ha="center", va="center") + return fig + + try: + from scipy.cluster.hierarchy import dendrogram as scipy_dendro + except ImportError: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "scipy required", ha="center", va="center") + return fig + + fig, ax = plt.subplots(figsize=(12, 6)) + scipy_dendro( + Z, + ax=ax, + truncate_mode="lastp" if len(Z) > max_leaf else None, + p=max_leaf, + leaf_rotation=90, + leaf_font_size=8, + color_threshold=0, + ) + ax.set_title("Hierarchical Clustering Dendrogram (Ward)", + fontsize=self._theme.title_size) + ax.set_xlabel("Sample index or cluster size") + ax.set_ylabel("Distance") + fig.tight_layout() + return fig + + def cluster_profile_heatmap( + self, + profiles_df: pd.DataFrame, + **kwargs: Any, + ) -> plt.Figure: + """Heatmap of cluster profiles (mean feature values per cluster). + + Args: + profiles_df: DataFrame from ClusteringStats.cluster_profiles(). + """ + if profiles_df.empty: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "No cluster profile data", ha="center", va="center") + return fig + + # Normalize for heatmap (z-score per column) + normed = (profiles_df - profiles_df.mean()) / (profiles_df.std() + 1e-15) + n_clusters = len(normed) + n_features = len(normed.columns) + + fig, ax = plt.subplots( + figsize=(max(8, n_features * 0.6), max(4, n_clusters * 1.2)) + ) + kwargs.setdefault("cmap", "RdYlGn") + kwargs.setdefault("center", 0) + kwargs.setdefault("annot", True) + kwargs.setdefault("fmt", ".2f") + kwargs.setdefault("linewidths", 0.5) + + sns.heatmap(normed, ax=ax, **kwargs) + ax.set_title("Cluster Profiles (z-scored)", fontsize=self._theme.title_size) + ax.set_ylabel("Cluster") + fig.tight_layout() + return fig diff --git a/f2a/viz/corr_plots.py b/f2a/viz/corr_plots.py new file mode 100644 index 0000000..dcd663e --- /dev/null +++ b/f2a/viz/corr_plots.py @@ -0,0 +1,78 @@ +"""Correlation visualization module.""" + +from __future__ import annotations + +from typing import Any + +import matplotlib.pyplot as plt +import seaborn as sns +import pandas as pd + +from f2a.core.schema import DataSchema +from f2a.viz.theme import DEFAULT_THEME, F2ATheme + + +class CorrelationPlotter: + """Generate correlation visualizations.""" + + def __init__( + self, + df: pd.DataFrame, + schema: DataSchema, + theme: F2ATheme | None = None, + ) -> None: + self._df = df + self._schema = schema + self._theme = theme or DEFAULT_THEME + + def heatmap(self, method: str = "pearson", **kwargs: Any) -> plt.Figure: + """Generate a correlation coefficient heatmap. + + Args: + method: Correlation method (``"pearson"`` or ``"spearman"``). + **kwargs: Additional arguments passed to ``seaborn.heatmap``. + + Returns: + matplotlib Figure. + """ + cols = self._schema.numeric_columns + if len(cols) < 2: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "Not enough numeric columns for correlation analysis", ha="center", va="center") + return fig + + corr = self._df[cols].corr(method=method) + + fig, ax = plt.subplots(figsize=(max(8, len(cols)), max(6, len(cols) * 0.8))) + kwargs.setdefault("annot", True) + kwargs.setdefault("fmt", ".2f") + kwargs.setdefault("cmap", "coolwarm") + kwargs.setdefault("center", 0) + kwargs.setdefault("vmin", -1) + kwargs.setdefault("vmax", 1) + kwargs.setdefault("square", True) + + sns.heatmap(corr, ax=ax, **kwargs) + ax.set_title(f"Correlation Heatmap ({method.title()})", fontsize=self._theme.title_size) + fig.tight_layout() + return fig + + def pairplot(self, columns: list[str] | None = None, max_cols: int = 6, **kwargs: Any) -> sns.PairGrid: + """Generate pairplot for numeric columns. + + Args: + columns: Target columns. ``None`` for top ``max_cols`` numeric columns. + max_cols: Maximum number of columns. + **kwargs: Additional arguments passed to ``seaborn.pairplot``. + + Returns: + seaborn PairGrid. + """ + cols = columns or self._schema.numeric_columns[:max_cols] + if len(cols) < 2: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "Not enough columns for pairplot", ha="center", va="center") + return fig + + kwargs.setdefault("diag_kind", "kde") + return sns.pairplot(self._df[cols], **kwargs) diff --git a/f2a/viz/cross_plots.py b/f2a/viz/cross_plots.py new file mode 100644 index 0000000..462af9a --- /dev/null +++ b/f2a/viz/cross_plots.py @@ -0,0 +1,309 @@ +"""Cross-analysis visualization module. + +Provides charts that surface cross-dimensional patterns discovered by +``CrossAnalysis``: + +* **anomaly_by_cluster_bar** — per-cluster anomaly rate comparison. +* **missing_correlation_heatmap** — correlation of missingness indicators. +* **simpson_paradox_highlight** — Simpson's paradox direction-reversal plot. +* **importance_vs_missing_scatter** — feature importance vs. missing rate. +* **unified_2d_scatter** — 2-D embedding coloured by cluster + anomaly. +""" + +from __future__ import annotations + +from typing import Any + +import matplotlib.pyplot as plt +import matplotlib.patches as mpatches +import numpy as np +import pandas as pd +import seaborn as sns + +from f2a.viz.theme import DEFAULT_THEME, F2ATheme + + +class CrossPlotter: + """Visualise cross-dimensional analysis results. + + Args: + theme: Visualisation theme. + """ + + def __init__(self, theme: F2ATheme | None = None) -> None: + self._theme = theme or DEFAULT_THEME + self._theme.apply() + + # ── anomaly by cluster ──────────────────────────────── + + def anomaly_by_cluster_bar(self, cross_result: dict[str, Any]) -> plt.Figure: + """Grouped bar chart of anomaly rates per cluster. + + Args: + cross_result: Dict from ``CrossAnalysis.outlier_by_cluster()``. + """ + df = cross_result.get("per_cluster") + if df is None or (isinstance(df, pd.DataFrame) and df.empty): + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "No cluster-anomaly data", ha="center", va="center") + return fig + + if isinstance(df, pd.DataFrame): + data = df + else: + data = pd.DataFrame(df) + + fig, ax = plt.subplots(figsize=(10, 5)) + + clusters = data.get("cluster", data.index if "cluster" not in data.columns else data["cluster"]) + anomaly_rates = data.get("anomaly_rate", [0] * len(clusters)) + + colors = ["#e74c3c" if r > 0.15 else "#f39c12" if r > 0.05 else "#2ecc71" + for r in anomaly_rates] + ax.bar(range(len(clusters)), anomaly_rates, color=colors, edgecolor="white") + ax.set_xticks(range(len(clusters))) + ax.set_xticklabels([f"C{c}" for c in clusters], fontsize=9) + ax.set_ylabel("Anomaly Rate") + ax.set_title("Anomaly Rate by Cluster", fontsize=self._theme.title_size) + ax.axhline(y=float(np.mean(list(anomaly_rates))), color="#7f8c8d", + linestyle="--", alpha=0.6, label="Mean") + ax.legend() + fig.tight_layout() + return fig + + # ── missing-correlation heatmap ─────────────────────── + + def missing_correlation_heatmap(self, cross_result: dict[str, Any]) -> plt.Figure: + """Heatmap of point-biserial correlations between missingness flags. + + Args: + cross_result: Dict from ``CrossAnalysis.missing_correlation()``. + """ + df = cross_result.get("correlations") + if df is None or (isinstance(df, pd.DataFrame) and df.empty): + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "No missing-correlation data", ha="center", va="center") + return fig + + if not isinstance(df, pd.DataFrame): + df = pd.DataFrame(df) + + # Build a pivot: missing_col × numeric_col → correlation + if "missing_col" in df.columns and "numeric_col" in df.columns and "correlation" in df.columns: + pivot = df.pivot_table( + index="missing_col", + columns="numeric_col", + values="correlation", + aggfunc="first", + ) + else: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "Unexpected data format", ha="center", va="center") + return fig + + fig, ax = plt.subplots(figsize=(max(8, 0.6 * pivot.shape[1] + 2), + max(5, 0.5 * pivot.shape[0] + 2))) + sns.heatmap( + pivot.astype(float), + ax=ax, + cmap="RdBu_r", + center=0, + annot=True, + fmt=".2f", + linewidths=0.5, + cbar_kws={"label": "Point-biserial r"}, + ) + ax.set_title("Missingness ↔ Numeric Correlation", + fontsize=self._theme.title_size) + fig.tight_layout() + return fig + + # ── Simpson's paradox highlight ─────────────────────── + + def simpson_paradox_scatter(self, cross_result: dict[str, Any]) -> plt.Figure: + """Scatter plot highlighting Simpson's paradox direction reversals. + + Shows overall correlation line vs. per-cluster regression lines for + the most prominent reversal. + + Args: + cross_result: Dict from ``CrossAnalysis.simpson_paradox()``. + """ + cases = cross_result.get("cases", []) + if not cases: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "No Simpson's paradox detected", ha="center", va="center") + return fig + + # Show the strongest case + case = cases[0] if isinstance(cases, list) else cases + + fig, ax = plt.subplots(figsize=(10, 6)) + + overall_r = case.get("overall_corr", 0) + col_a = case.get("col_a", "X") + col_b = case.get("col_b", "Y") + cluster_corrs = case.get("cluster_correlations", {}) + + # Draw overall trend arrow + ax.annotate( + "", + xy=(0.85, 0.5 + 0.3 * np.sign(overall_r)), + xytext=(0.15, 0.5 - 0.3 * np.sign(overall_r)), + xycoords="axes fraction", + arrowprops=dict(arrowstyle="->", color="#e74c3c", lw=3, alpha=0.5), + ) + ax.text(0.5, 0.5 + 0.35 * np.sign(overall_r), + f"Overall r = {overall_r:+.3f}", + transform=ax.transAxes, ha="center", fontsize=12, + color="#e74c3c", fontweight="bold") + + # Cluster-level arrows + palette = self._theme.get_colors(max(len(cluster_corrs), 1)) + y_start = 0.2 + for idx, (cid, r) in enumerate(cluster_corrs.items()): + y = y_start + idx * 0.08 + color = palette[idx % len(palette)] + direction = "→" if r > 0 else "←" + ax.text(0.15, y, f"Cluster {cid}: r = {r:+.3f} {direction}", + transform=ax.transAxes, fontsize=10, color=color) + + ax.set_xlim(0, 1) + ax.set_ylim(0, 1) + ax.axis("off") + ax.set_title( + f"Simpson's Paradox: {col_a} vs {col_b}", + fontsize=self._theme.title_size, + ) + fig.tight_layout() + return fig + + # ── importance vs. missing scatter ──────────────────── + + def importance_vs_missing_scatter(self, cross_result: dict[str, Any]) -> plt.Figure: + """Scatter: feature importance (x) vs. missing rate (y). + + High-importance + high-missing = information loss risk. + + Args: + cross_result: Dict from ``CrossAnalysis.importance_vs_missing()``. + """ + df = cross_result.get("risk_table") + if df is None or (isinstance(df, pd.DataFrame) and df.empty): + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "No importance–missing data", ha="center", va="center") + return fig + + if not isinstance(df, pd.DataFrame): + df = pd.DataFrame(df) + + fig, ax = plt.subplots(figsize=(10, 7)) + + importance = df.get("importance", pd.Series(dtype=float)) + missing_rate = df.get("missing_rate", pd.Series(dtype=float)) + risk = df.get("risk_score", importance * missing_rate if len(importance) else pd.Series(dtype=float)) + + scatter = ax.scatter( + importance, + missing_rate, + c=risk, + cmap="YlOrRd", + s=80, + edgecolors="white", + linewidths=0.5, + alpha=0.8, + ) + plt.colorbar(scatter, ax=ax, label="Risk Score") + + # Label top-risk points + if "column" in df.columns: + top_risk = df.nlargest(5, "risk_score") if "risk_score" in df.columns else df.head(5) + for _, row in top_risk.iterrows(): + ax.annotate( + row["column"], + (row.get("importance", 0), row.get("missing_rate", 0)), + fontsize=8, + alpha=0.8, + xytext=(5, 5), + textcoords="offset points", + ) + + ax.set_xlabel("Feature Importance") + ax.set_ylabel("Missing Rate") + ax.set_title("Importance vs. Missing (Information Loss Risk)", + fontsize=self._theme.title_size) + + # Danger zone shading + ax.axhspan(0.3, 1.0, xmin=0.5, xmax=1.0, alpha=0.08, color="red") + ax.text(0.78, 0.85, "⚠ Danger Zone", transform=ax.transAxes, + fontsize=10, color="#e74c3c", alpha=0.6) + + fig.tight_layout() + return fig + + # ── unified 2-D embedding scatter ───────────────────── + + def unified_2d_scatter(self, cross_result: dict[str, Any]) -> plt.Figure: + """2-D embedding scatter coloured by cluster ID, anomaly shape. + + Args: + cross_result: Dict from ``CrossAnalysis.unified_2d_embedding()``. + """ + df = cross_result.get("embedding_df") + if df is None or (isinstance(df, pd.DataFrame) and df.empty): + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "No embedding data", ha="center", va="center") + return fig + + if not isinstance(df, pd.DataFrame): + df = pd.DataFrame(df) + + fig, ax = plt.subplots(figsize=(10, 8)) + + x_col = "x" if "x" in df.columns else df.columns[0] + y_col = "y" if "y" in df.columns else df.columns[1] + cluster_col = "cluster" if "cluster" in df.columns else None + anomaly_col = "is_anomaly" if "is_anomaly" in df.columns else None + + clusters = df[cluster_col].values if cluster_col else np.zeros(len(df)) + unique_clusters = sorted(set(clusters)) + palette = self._theme.get_colors(max(len(unique_clusters), 1)) + + # Normal points + for idx, cid in enumerate(unique_clusters): + mask = clusters == cid + if anomaly_col: + mask = mask & ~df[anomaly_col].astype(bool).values + ax.scatter( + df.loc[mask, x_col], + df.loc[mask, y_col], + c=[palette[idx % len(palette)]], + label=f"Cluster {cid}", + s=30, + alpha=0.6, + edgecolors="none", + ) + + # Anomaly points overlaid + if anomaly_col and df[anomaly_col].any(): + anom_mask = df[anomaly_col].astype(bool).values + ax.scatter( + df.loc[anom_mask, x_col], + df.loc[anom_mask, y_col], + c="none", + edgecolors="#e74c3c", + marker="x", + s=50, + linewidths=1.5, + label="Anomaly", + zorder=5, + ) + + ax.set_xlabel("Dimension 1") + ax.set_ylabel("Dimension 2") + method = cross_result.get("method", "Embedding") + ax.set_title(f"Unified {method} (Cluster + Anomaly)", + fontsize=self._theme.title_size) + ax.legend(fontsize=8, bbox_to_anchor=(1.01, 1), loc="upper left") + fig.tight_layout() + return fig diff --git a/f2a/viz/dimreduction_plots.py b/f2a/viz/dimreduction_plots.py new file mode 100644 index 0000000..68620a1 --- /dev/null +++ b/f2a/viz/dimreduction_plots.py @@ -0,0 +1,375 @@ +"""Dimension-reduction visualization module. + +Provides charts missing from the original report for the **Dim. Reduction** +advanced sub-tab: + +* **tsne_scatter** — t-SNE 2-D scatter. +* **umap_scatter** — UMAP 2-D scatter (graceful degradation if umap-learn + unavailable). +* **explained_variance_curve** — PCA cumulative explained variance. +* **factor_loadings_heatmap** — per-component feature loadings. +* **feature_contribution_bar** — top feature contributions per component. +* **biplot** — PCA biplot with loading arrows. +""" + +from __future__ import annotations + +from typing import Any + +import matplotlib.pyplot as plt +import numpy as np +import pandas as pd +import seaborn as sns + +from f2a.viz.theme import DEFAULT_THEME, F2ATheme + + +class DimReductionPlotter: + """Visualise dimension-reduction results (PCA / t-SNE / UMAP). + + Args: + theme: Visualisation theme. + """ + + def __init__(self, theme: F2ATheme | None = None) -> None: + self._theme = theme or DEFAULT_THEME + self._theme.apply() + + # ── t-SNE scatter ───────────────────────────────────── + + def tsne_scatter( + self, + df: pd.DataFrame, + numeric_cols: list[str], + perplexity: float = 30.0, + max_sample: int = 3000, + color_col: str | None = None, + ) -> plt.Figure: + """2-D t-SNE scatter plot. + + Args: + df: Source DataFrame. + numeric_cols: Numeric column names to embed. + perplexity: t-SNE perplexity parameter. + max_sample: Maximum sample size for efficiency. + color_col: Optional column name to colour points by. + + Returns: + matplotlib Figure. + """ + try: + from sklearn.manifold import TSNE + from sklearn.preprocessing import StandardScaler + except ImportError: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "scikit-learn required", ha="center", va="center") + return fig + + sub = df[numeric_cols].dropna() + if len(sub) < 10: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "Insufficient data for t-SNE", ha="center", va="center") + return fig + + if len(sub) > max_sample: + sub = sub.sample(max_sample, random_state=42) + + X = StandardScaler().fit_transform(sub.values) + perp = min(perplexity, max(5.0, len(X) / 4)) + emb = TSNE(n_components=2, perplexity=perp, random_state=42, init="pca").fit_transform(X) + + fig, ax = plt.subplots(figsize=(10, 8)) + + if color_col and color_col in df.columns: + c = df.loc[sub.index, color_col] + if c.dtype.kind in ("i", "f"): + scatter = ax.scatter(emb[:, 0], emb[:, 1], c=c, cmap="viridis", + s=15, alpha=0.6, edgecolors="none") + plt.colorbar(scatter, ax=ax, label=color_col) + else: + for cat in c.unique()[:12]: + mask = c == cat + ax.scatter(emb[mask, 0], emb[mask, 1], label=str(cat)[:20], + s=15, alpha=0.6) + ax.legend(fontsize=7, bbox_to_anchor=(1.01, 1), loc="upper left") + else: + ax.scatter(emb[:, 0], emb[:, 1], s=10, alpha=0.5, c="#3498db") + + ax.set_xlabel("t-SNE 1") + ax.set_ylabel("t-SNE 2") + ax.set_title("t-SNE 2-D Embedding", fontsize=self._theme.title_size) + fig.tight_layout() + return fig + + # ── UMAP scatter ────────────────────────────────────── + + def umap_scatter( + self, + df: pd.DataFrame, + numeric_cols: list[str], + n_neighbors: int = 15, + max_sample: int = 5000, + color_col: str | None = None, + ) -> plt.Figure: + """2-D UMAP scatter plot. + + Falls back to t-SNE if umap-learn is not installed. + + Args: + df: Source DataFrame. + numeric_cols: Numeric column names. + n_neighbors: UMAP neighbourhood parameter. + max_sample: Maximum sample size. + color_col: Optional column for colouring. + + Returns: + matplotlib Figure. + """ + try: + from umap import UMAP + from sklearn.preprocessing import StandardScaler + except ImportError: + # Graceful fallback + return self.tsne_scatter(df, numeric_cols, max_sample=max_sample, + color_col=color_col) + + sub = df[numeric_cols].dropna() + if len(sub) < 10: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "Insufficient data for UMAP", ha="center", va="center") + return fig + + if len(sub) > max_sample: + sub = sub.sample(max_sample, random_state=42) + + X = StandardScaler().fit_transform(sub.values) + emb = UMAP(n_components=2, n_neighbors=min(n_neighbors, len(X) - 1), + random_state=42).fit_transform(X) + + fig, ax = plt.subplots(figsize=(10, 8)) + + if color_col and color_col in df.columns: + c = df.loc[sub.index, color_col] + if c.dtype.kind in ("i", "f"): + scatter = ax.scatter(emb[:, 0], emb[:, 1], c=c, cmap="viridis", + s=15, alpha=0.6, edgecolors="none") + plt.colorbar(scatter, ax=ax, label=color_col) + else: + for cat in c.unique()[:12]: + mask = c == cat + ax.scatter(emb[mask, 0], emb[mask, 1], label=str(cat)[:20], + s=15, alpha=0.6) + ax.legend(fontsize=7, bbox_to_anchor=(1.01, 1), loc="upper left") + else: + ax.scatter(emb[:, 0], emb[:, 1], s=10, alpha=0.5, c="#2ecc71") + + ax.set_xlabel("UMAP 1") + ax.set_ylabel("UMAP 2") + ax.set_title("UMAP 2-D Embedding", fontsize=self._theme.title_size) + fig.tight_layout() + return fig + + # ── cumulative explained variance ───────────────────── + + def explained_variance_curve(self, pca_result: dict[str, Any]) -> plt.Figure: + """PCA cumulative explained variance curve. + + Args: + pca_result: Dict from ``PCAAnalysis.summary()`` containing + ``explained_variance_df``. + + Returns: + matplotlib Figure. + """ + ev_df = pca_result.get("explained_variance_df") + if ev_df is None or (isinstance(ev_df, pd.DataFrame) and ev_df.empty): + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "No PCA variance data", ha="center", va="center") + return fig + + if not isinstance(ev_df, pd.DataFrame): + ev_df = pd.DataFrame(ev_df) + + fig, ax = plt.subplots(figsize=(10, 5)) + + components = range(1, len(ev_df) + 1) + individual = ev_df.get("explained_variance_ratio", + ev_df.get("variance_ratio", pd.Series(dtype=float))) + cumulative = ev_df.get("cumulative_variance", + individual.cumsum() if len(individual) else pd.Series(dtype=float)) + + ax.bar(components, individual, color="#3498db", alpha=0.7, label="Individual") + ax.plot(components, cumulative, "ro-", linewidth=2, markersize=5, label="Cumulative") + ax.axhline(y=0.95, color="#e74c3c", linestyle="--", alpha=0.5, label="95% threshold") + ax.set_xlabel("Component") + ax.set_ylabel("Explained Variance Ratio") + ax.set_title("PCA Explained Variance", fontsize=self._theme.title_size) + ax.legend(fontsize=9) + ax.set_xticks(list(components)) + fig.tight_layout() + return fig + + # ── factor loadings heatmap ─────────────────────────── + + def factor_loadings_heatmap(self, pca_result: dict[str, Any]) -> plt.Figure: + """Heatmap of PCA factor loadings (components × features). + + Args: + pca_result: Dict with ``loadings_df`` (components as rows, + features as columns). + + Returns: + matplotlib Figure. + """ + loadings = pca_result.get("loadings_df") + if loadings is None or (isinstance(loadings, pd.DataFrame) and loadings.empty): + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "No loadings data", ha="center", va="center") + return fig + + if not isinstance(loadings, pd.DataFrame): + loadings = pd.DataFrame(loadings) + + # Limit to first 15 features × 8 components for readability + loadings = loadings.iloc[:8, :15] + + fig, ax = plt.subplots(figsize=(max(8, 0.6 * loadings.shape[1] + 2), + max(4, 0.6 * loadings.shape[0] + 2))) + sns.heatmap( + loadings.astype(float), + ax=ax, + cmap="RdBu_r", + center=0, + annot=True, + fmt=".2f", + linewidths=0.5, + cbar_kws={"label": "Loading"}, + ) + ax.set_title("PCA Factor Loadings", fontsize=self._theme.title_size) + ax.set_ylabel("Component") + ax.set_xlabel("Feature") + fig.tight_layout() + return fig + + # ── top feature contributions per component ─────────── + + def feature_contribution_bar( + self, + pca_result: dict[str, Any], + n_components: int = 3, + n_features: int = 10, + ) -> plt.Figure: + """Bar chart of top contributing features per PCA component. + + Args: + pca_result: Dict with ``loadings_df``. + n_components: Number of components to show. + n_features: Number of features per component. + + Returns: + matplotlib Figure. + """ + loadings = pca_result.get("loadings_df") + if loadings is None or (isinstance(loadings, pd.DataFrame) and loadings.empty): + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "No loadings data", ha="center", va="center") + return fig + + if not isinstance(loadings, pd.DataFrame): + loadings = pd.DataFrame(loadings) + + n_comp = min(n_components, len(loadings)) + fig, axes = plt.subplots(1, n_comp, figsize=(5 * n_comp, 5)) + if n_comp == 1: + axes = [axes] + + palette = self._theme.get_colors(2) + + for idx in range(n_comp): + ax = axes[idx] + row = loadings.iloc[idx].abs().sort_values(ascending=False).head(n_features) + signs = loadings.iloc[idx].loc[row.index] + colors = [palette[0] if s >= 0 else palette[1] for s in signs] + + ax.barh(range(len(row)), row.values, color=colors, edgecolor="white") + ax.set_yticks(range(len(row))) + ax.set_yticklabels(row.index, fontsize=8) + ax.invert_yaxis() + ax.set_xlabel("|Loading|") + ax.set_title(f"PC{idx + 1}", fontsize=self._theme.title_size - 1) + + fig.suptitle("Top Feature Contributions", fontsize=self._theme.title_size + 1, y=1.02) + fig.tight_layout() + return fig + + # ── PCA biplot ──────────────────────────────────────── + + def biplot( + self, + df: pd.DataFrame, + numeric_cols: list[str], + n_arrows: int = 8, + max_sample: int = 2000, + ) -> plt.Figure: + """PCA biplot: scatter of PC1 vs PC2 with loading arrows. + + Args: + df: Source DataFrame. + numeric_cols: Numeric column names. + n_arrows: Number of loading arrows to draw. + max_sample: Maximum sample size. + + Returns: + matplotlib Figure. + """ + try: + from sklearn.decomposition import PCA + from sklearn.preprocessing import StandardScaler + except ImportError: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "scikit-learn required", ha="center", va="center") + return fig + + sub = df[numeric_cols].dropna() + if len(sub) < 10 or len(numeric_cols) < 2: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "Insufficient data for biplot", ha="center", va="center") + return fig + + if len(sub) > max_sample: + sub = sub.sample(max_sample, random_state=42) + + X = StandardScaler().fit_transform(sub.values) + pca = PCA(n_components=2) + scores = pca.fit_transform(X) + + fig, ax = plt.subplots(figsize=(10, 8)) + ax.scatter(scores[:, 0], scores[:, 1], s=10, alpha=0.3, c="#3498db") + + # Loading arrows + loadings = pca.components_.T # (features, 2) + scale_factor = max(abs(scores[:, 0]).max(), abs(scores[:, 1]).max()) * 0.9 + magnitudes = np.sqrt((loadings ** 2).sum(axis=1)) + top_idx = magnitudes.argsort()[-n_arrows:] + + for i in top_idx: + lx, ly = loadings[i] * scale_factor + ax.annotate( + numeric_cols[i], + xy=(lx, ly), + xytext=(0, 0), + arrowprops=dict(arrowstyle="->", color="#e74c3c", lw=1.5), + fontsize=8, + color="#e74c3c", + ha="center", + ) + + var1 = pca.explained_variance_ratio_[0] * 100 + var2 = pca.explained_variance_ratio_[1] * 100 + ax.set_xlabel(f"PC1 ({var1:.1f}%)") + ax.set_ylabel(f"PC2 ({var2:.1f}%)") + ax.set_title("PCA Biplot", fontsize=self._theme.title_size) + ax.axhline(0, color="grey", linewidth=0.5, alpha=0.5) + ax.axvline(0, color="grey", linewidth=0.5, alpha=0.5) + fig.tight_layout() + return fig diff --git a/f2a/viz/dist_plots.py b/f2a/viz/dist_plots.py new file mode 100644 index 0000000..1861a0d --- /dev/null +++ b/f2a/viz/dist_plots.py @@ -0,0 +1,130 @@ +"""Distribution visualization module.""" + +from __future__ import annotations + +from typing import Any + +import matplotlib.pyplot as plt +import seaborn as sns +import numpy as np +import pandas as pd +from scipy import stats as sp_stats + +from f2a.core.schema import DataSchema +from f2a.viz.theme import DEFAULT_THEME, F2ATheme + + +class DistributionPlotter: + """Generate distribution-related visualizations.""" + + def __init__( + self, + df: pd.DataFrame, + schema: DataSchema, + theme: F2ATheme | None = None, + ) -> None: + self._df = df + self._schema = schema + self._theme = theme or DEFAULT_THEME + + def violin_plots(self, columns: list[str] | None = None, max_cols: int = 20, **kwargs: Any) -> plt.Figure: + """Generate violin plots for numeric columns.""" + cols = (columns or self._schema.numeric_columns)[:max_cols] + if not cols: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "No numeric columns found", ha="center", va="center") + return fig + + n = len(cols) + ncols = min(3, n) + nrows = (n + ncols - 1) // ncols + + fig, axes = plt.subplots(nrows, ncols, figsize=(5 * ncols, 4 * nrows)) + if n == 1: + axes = [axes] + else: + axes = list(axes.flat) + + for idx, col in enumerate(cols): + ax = axes[idx] + sns.violinplot(data=self._df, y=col, ax=ax, **kwargs) + ax.set_title(col) + + for idx in range(n, len(axes)): + axes[idx].set_visible(False) + + fig.suptitle("Violin Plots", fontsize=self._theme.title_size + 2, y=1.02) + fig.tight_layout() + return fig + + def kde_plots(self, columns: list[str] | None = None, max_cols: int = 20, **kwargs: Any) -> plt.Figure: + """Generate KDE (Kernel Density Estimation) plots for numeric columns.""" + cols = (columns or self._schema.numeric_columns)[:max_cols] + if not cols: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "No numeric columns found", ha="center", va="center") + return fig + + n = len(cols) + ncols = min(3, n) + nrows = (n + ncols - 1) // ncols + + fig, axes = plt.subplots(nrows, ncols, figsize=(5 * ncols, 4 * nrows)) + if n == 1: + axes = [axes] + else: + axes = list(axes.flat) + + for idx, col in enumerate(cols): + ax = axes[idx] + sns.kdeplot(data=self._df, x=col, ax=ax, fill=True, **kwargs) + ax.set_title(col) + + for idx in range(n, len(axes)): + axes[idx].set_visible(False) + + fig.suptitle("Kernel Density Estimation", fontsize=self._theme.title_size + 2, y=1.02) + fig.tight_layout() + return fig + + def qq_plots(self, columns: list[str] | None = None, max_cols: int = 20, **kwargs: Any) -> plt.Figure: + """Generate Q-Q (Quantile-Quantile) plots for numeric columns. + + Points close to the diagonal indicate normal distribution. + """ + cols = (columns or self._schema.numeric_columns)[:max_cols] + if not cols: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "No numeric columns found", ha="center", va="center") + return fig + + n = len(cols) + ncols = min(3, n) + nrows = (n + ncols - 1) // ncols + + fig, axes = plt.subplots(nrows, ncols, figsize=(5 * ncols, 4 * nrows)) + if n == 1: + axes = [axes] + else: + axes = list(axes.flat) + + for idx, col in enumerate(cols): + ax = axes[idx] + series = self._df[col].dropna() + if len(series) < 3: + ax.text(0.5, 0.5, f"Not enough data\n(n={len(series)})", + ha="center", va="center", transform=ax.transAxes) + ax.set_title(col) + continue + + sp_stats.probplot(series, dist="norm", plot=ax) + ax.set_title(col) + ax.get_lines()[0].set_markersize(3) + ax.get_lines()[0].set_alpha(0.5) + + for idx in range(n, len(axes)): + axes[idx].set_visible(False) + + fig.suptitle("Q-Q Plots (Normal Distribution)", fontsize=self._theme.title_size + 2, y=1.02) + fig.tight_layout() + return fig diff --git a/f2a/viz/insight_plots.py b/f2a/viz/insight_plots.py new file mode 100644 index 0000000..c2b9026 --- /dev/null +++ b/f2a/viz/insight_plots.py @@ -0,0 +1,230 @@ +"""Insight-engine visualization module. + +Provides charts that turn InsightEngine output into actionable visuals: + +* **severity_bar** — horizontal bar chart of insights by severity. +* **category_sunburst** — category breakdown (tree-map fallback). +* **top_insights_table_fig** — top-N insights as a table figure. +* **action_items_summary** — action-item bubble chart. +""" + +from __future__ import annotations + +from typing import Any + +import matplotlib.pyplot as plt +import matplotlib.patches as mpatches +import numpy as np +import pandas as pd + +from f2a.viz.theme import DEFAULT_THEME, F2ATheme + +# Severity → (colour, z-order) +_SEV_PALETTE: dict[str, str] = { + "critical": "#e74c3c", + "warning": "#f39c12", + "info": "#3498db", + "opportunity": "#2ecc71", +} + + +class InsightPlotter: + """Visualise auto-generated insights. + + Args: + theme: Visualisation theme. + """ + + def __init__(self, theme: F2ATheme | None = None) -> None: + self._theme = theme or DEFAULT_THEME + self._theme.apply() + + # ── severity bar chart ──────────────────────────────── + + def severity_bar(self, insights: list[dict[str, Any]]) -> plt.Figure: + """Horizontal bar chart: insight count by severity. + + Args: + insights: List of insight dicts (must contain ``severity`` key). + + Returns: + matplotlib Figure. + """ + if not insights: + fig, ax = plt.subplots(figsize=(8, 4)) + ax.text(0.5, 0.5, "No insights generated", ha="center", va="center") + return fig + + sev_order = ["critical", "warning", "info", "opportunity"] + counts = pd.Series([i.get("severity", "info") for i in insights]).value_counts() + counts = counts.reindex(sev_order).fillna(0).astype(int) + + fig, ax = plt.subplots(figsize=(8, 4)) + colors = [_SEV_PALETTE.get(s, "#95a5a6") for s in counts.index] + bars = ax.barh(counts.index, counts.values, color=colors, edgecolor="white") + + for bar, val in zip(bars, counts.values): + if val > 0: + ax.text( + bar.get_width() + 0.3, + bar.get_y() + bar.get_height() / 2, + str(int(val)), + va="center", + fontweight="bold", + ) + + ax.set_xlabel("Count") + ax.set_title("Insights by Severity", fontsize=self._theme.title_size) + ax.invert_yaxis() + fig.tight_layout() + return fig + + # ── category breakdown (treemap-style) ──────────────── + + def category_treemap(self, insights: list[dict[str, Any]]) -> plt.Figure: + """Category-breakdown chart (squarified treemap approximation). + + Args: + insights: List of insight dicts (must contain ``category`` key). + + Returns: + matplotlib Figure. + """ + if not insights: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "No insights", ha="center", va="center") + return fig + + cats = pd.Series([i.get("category", "other") for i in insights]).value_counts() + + fig, ax = plt.subplots(figsize=(10, 6)) + palette = self._theme.get_colors(len(cats)) + wedges, texts, pcts = ax.pie( + cats.values, + labels=cats.index, + autopct=lambda p: f"{p:.0f}%" if p > 5 else "", + colors=palette, + startangle=140, + pctdistance=0.8, + ) + for t in texts: + t.set_fontsize(9) + ax.set_title("Insights by Category", fontsize=self._theme.title_size) + fig.tight_layout() + return fig + + # ── top-N insights as a table figure ────────────────── + + def top_insights_table( + self, + insights: list[dict[str, Any]], + n: int = 10, + ) -> plt.Figure: + """Render top-N insights as a matplotlib table figure. + + Args: + insights: List of insight dicts, sorted by priority_score desc. + n: Number of insights to show. + + Returns: + matplotlib Figure. + """ + if not insights: + fig, ax = plt.subplots(figsize=(12, 2)) + ax.text(0.5, 0.5, "No insights", ha="center", va="center") + ax.axis("off") + return fig + + top = sorted(insights, key=lambda i: i.get("priority_score", 0), reverse=True)[:n] + + cell_text = [] + for rank, ins in enumerate(top, 1): + cell_text.append([ + str(rank), + ins.get("severity", "")[:4].upper(), + ins.get("category", ""), + ins.get("title", "")[:60], + f"{ins.get('priority_score', 0):.1f}", + ]) + + col_labels = ["#", "Sev", "Category", "Title", "Score"] + + nrows = len(cell_text) + fig_h = max(2.5, 0.45 * nrows + 1.2) + fig, ax = plt.subplots(figsize=(14, fig_h)) + ax.axis("off") + + tbl = ax.table( + cellText=cell_text, + colLabels=col_labels, + cellLoc="left", + colWidths=[0.05, 0.07, 0.15, 0.60, 0.08], + loc="center", + ) + tbl.auto_set_font_size(False) + tbl.set_fontsize(9) + tbl.scale(1.0, 1.4) + + # Color header + for j in range(len(col_labels)): + tbl[0, j].set_facecolor("#34495e") + tbl[0, j].set_text_props(color="white", fontweight="bold") + + # Severity-based row coloring + for i, row in enumerate(cell_text, 1): + sev = row[1].lower()[:4] + color = _SEV_PALETTE.get( + {"crit": "critical", "warn": "warning", "info": "info", "oppo": "opportunity"}.get(sev, "info"), + "#ecf0f1", + ) + for j in range(len(col_labels)): + tbl[i, j].set_facecolor(color + "22") # translucent + + ax.set_title(f"Top {n} Insights", fontsize=self._theme.title_size, pad=20) + fig.tight_layout() + return fig + + # ── action items summary ────────────────────────────── + + def action_items_chart(self, insights: list[dict[str, Any]]) -> plt.Figure: + """Aggregated action-item frequency bar chart. + + Collects all ``action_items`` across insights and shows top-15 most + common actions. + + Args: + insights: List of insight dicts. + + Returns: + matplotlib Figure. + """ + actions: list[str] = [] + for ins in insights: + actions.extend(ins.get("action_items", [])) + + if not actions: + fig, ax = plt.subplots(figsize=(8, 3)) + ax.text(0.5, 0.5, "No action items", ha="center", va="center") + ax.axis("off") + return fig + + action_counts = pd.Series(actions).value_counts().head(15) + + fig, ax = plt.subplots(figsize=(12, max(4, 0.4 * len(action_counts)))) + palette = self._theme.get_colors(len(action_counts)) + ax.barh( + range(len(action_counts)), + action_counts.values, + color=palette, + edgecolor="white", + ) + ax.set_yticks(range(len(action_counts))) + ax.set_yticklabels( + [a[:70] for a in action_counts.index], + fontsize=8, + ) + ax.invert_yaxis() + ax.set_xlabel("Frequency") + ax.set_title("Recommended Actions", fontsize=self._theme.title_size) + fig.tight_layout() + return fig diff --git a/f2a/viz/missing_plots.py b/f2a/viz/missing_plots.py new file mode 100644 index 0000000..86b045c --- /dev/null +++ b/f2a/viz/missing_plots.py @@ -0,0 +1,78 @@ +"""Missing data visualization module.""" + +from __future__ import annotations + +from typing import Any + +import matplotlib.pyplot as plt +import seaborn as sns +import numpy as np +import pandas as pd + +from f2a.core.schema import DataSchema +from f2a.viz.theme import DEFAULT_THEME, F2ATheme + + +class MissingPlotter: + """Generate missing data pattern visualizations.""" + + def __init__( + self, + df: pd.DataFrame, + schema: DataSchema, + theme: F2ATheme | None = None, + ) -> None: + self._df = df + self._schema = schema + self._theme = theme or DEFAULT_THEME + + def matrix(self, max_rows: int = 500, **kwargs: Any) -> plt.Figure: + """Generate a missing data matrix. + + White = missing, color = present. + + Args: + max_rows: Maximum rows to display (sampled). + **kwargs: Additional arguments. + + Returns: + matplotlib Figure. + """ + df_sample = self._df.head(max_rows) + missing = df_sample.isna() + + fig, ax = plt.subplots(figsize=(max(10, len(self._df.columns) * 0.5), 6)) + ax.imshow( + ~missing.values, + aspect="auto", + cmap="RdYlGn", + interpolation="nearest", + ) + ax.set_xticks(range(len(missing.columns))) + ax.set_xticklabels(missing.columns, rotation=45, ha="right") + ax.set_ylabel("Row Index") + ax.set_title("Missing Data Matrix (green=present, red=missing)") + fig.tight_layout() + return fig + + def bar(self, **kwargs: Any) -> plt.Figure: + """Generate a per-column missing ratio bar chart.""" + missing_ratio = self._df.isna().mean().sort_values(ascending=False) + missing_ratio = missing_ratio[missing_ratio > 0] + + if missing_ratio.empty: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "No missing data!", ha="center", va="center", fontsize=14) + return fig + + fig, ax = plt.subplots(figsize=(max(8, len(missing_ratio) * 0.5), 5)) + colors = ["#e74c3c" if v > 0.5 else "#f39c12" if v > 0.1 else "#2ecc71" for v in missing_ratio] + ax.bar(range(len(missing_ratio)), missing_ratio.values * 100, color=colors) + ax.set_xticks(range(len(missing_ratio))) + ax.set_xticklabels(missing_ratio.index, rotation=45, ha="right") + ax.set_ylabel("Missing Ratio (%)") + ax.set_title("Missing Ratio by Column") + ax.axhline(y=50, color="red", linestyle="--", alpha=0.5, label="50%") + ax.legend() + fig.tight_layout() + return fig diff --git a/f2a/viz/outlier_plots.py b/f2a/viz/outlier_plots.py new file mode 100644 index 0000000..47ecd6e --- /dev/null +++ b/f2a/viz/outlier_plots.py @@ -0,0 +1,120 @@ +"""Outlier visualization module. + +Provides boxplots with strip overlay to highlight outlier points. +""" + +from __future__ import annotations + +from typing import Any + +import matplotlib.pyplot as plt +import numpy as np +import seaborn as sns +import pandas as pd + +from f2a.core.schema import DataSchema +from f2a.viz.theme import DEFAULT_THEME, F2ATheme + + +class OutlierPlotter: + """Visualise outliers in numeric columns. + + Args: + df: Target DataFrame. + schema: Data schema. + theme: Visualisation theme. + """ + + def __init__( + self, + df: pd.DataFrame, + schema: DataSchema, + theme: F2ATheme | None = None, + ) -> None: + self._df = df + self._schema = schema + self._theme = theme or DEFAULT_THEME + self._theme.apply() + + def box_strip( + self, + columns: list[str] | None = None, + max_cols: int = 20, + multiplier: float = 1.5, + **kwargs: Any, + ) -> plt.Figure: + """Box-and-strip plot with outlier points highlighted. + + Args: + columns: Columns to plot. Defaults to all numeric columns. + max_cols: Maximum number of columns (avoids overly large figures). + multiplier: IQR multiplier for outlier classification. + **kwargs: Passed to ``seaborn.boxplot``. + + Returns: + matplotlib Figure. + """ + cols = (columns or self._schema.numeric_columns)[:max_cols] + if not cols: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "No numeric columns", ha="center", va="center") + return fig + + n = len(cols) + ncols = min(3, n) + nrows = (n + ncols - 1) // ncols + + fig, axes = plt.subplots(nrows, ncols, figsize=(5 * ncols, 4 * nrows)) + if n == 1: + axes = [axes] + else: + axes = list(axes.flat) + + for idx, col in enumerate(cols): + ax = axes[idx] + series = self._df[col].dropna() + + # Boxplot + sns.boxplot(data=self._df, y=col, ax=ax, width=0.4, + color="#AED6F1", **kwargs) + + # Overlay strip/scatter — highlight outliers + if len(series) > 0: + q1 = series.quantile(0.25) + q3 = series.quantile(0.75) + iqr = q3 - q1 + lower = q1 - multiplier * iqr + upper = q3 + multiplier * iqr + is_outlier = (series < lower) | (series > upper) + + normal = series[~is_outlier] + outliers = series[is_outlier] + + # Normal points (light / small) + if len(normal) > 0: + sample = normal.sample(min(100, len(normal)), random_state=42) + ax.scatter( + np.random.normal(0, 0.04, len(sample)), + sample, + alpha=0.3, s=8, color="#2980B9", zorder=3, + ) + + # Outlier points (red / larger) + if len(outliers) > 0: + ax.scatter( + np.random.normal(0, 0.04, len(outliers)), + outliers, + alpha=0.7, s=25, color="#E74C3C", zorder=4, + label=f"outliers ({len(outliers)})", + ) + ax.legend(fontsize=8) + + ax.set_title(col) + + for idx in range(n, len(axes)): + axes[idx].set_visible(False) + + fig.suptitle("Outlier Detection (IQR Method)", + fontsize=self._theme.title_size + 2, y=1.02) + fig.tight_layout() + return fig diff --git a/f2a/viz/pca_plots.py b/f2a/viz/pca_plots.py new file mode 100644 index 0000000..b467c1a --- /dev/null +++ b/f2a/viz/pca_plots.py @@ -0,0 +1,112 @@ +"""PCA visualization module. + +Scree plot (variance explained) and loadings heatmap. +""" + +from __future__ import annotations + +from typing import Any + +import matplotlib.pyplot as plt +import numpy as np +import seaborn as sns +import pandas as pd + +from f2a.viz.theme import DEFAULT_THEME, F2ATheme + + +class PCAPlotter: + """Visualise PCA results. + + Args: + theme: Visualisation theme. + """ + + def __init__(self, theme: F2ATheme | None = None) -> None: + self._theme = theme or DEFAULT_THEME + self._theme.apply() + + def scree_plot(self, variance_df: pd.DataFrame, **kwargs: Any) -> plt.Figure: + """Draw a scree plot (variance explained per component). + + Args: + variance_df: DataFrame from :meth:`PCAStats.variance_explained`. + """ + if variance_df.empty: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "PCA not available", ha="center", va="center") + return fig + + components = variance_df.index.tolist() + var_ratio = variance_df["variance_ratio"].values + cum_ratio = variance_df["cumulative_ratio"].values + n = len(components) + + fig, ax1 = plt.subplots(figsize=(max(8, n * 0.8), 5)) + color1 = "#3498DB" + color2 = "#E74C3C" + + # Bar — individual variance + bars = ax1.bar(range(n), var_ratio * 100, color=color1, alpha=0.7, label="Individual") + ax1.set_xlabel("Principal Component") + ax1.set_ylabel("Variance Explained (%)", color=color1) + ax1.set_xticks(range(n)) + ax1.set_xticklabels(components, rotation=45 if n > 6 else 0) + ax1.tick_params(axis="y", labelcolor=color1) + + # Line — cumulative variance + ax2 = ax1.twinx() + ax2.plot(range(n), cum_ratio * 100, color=color2, marker="o", + linewidth=2, label="Cumulative") + ax2.set_ylabel("Cumulative Variance (%)", color=color2) + ax2.tick_params(axis="y", labelcolor=color2) + ax2.set_ylim(0, 105) + + # 90% threshold line + ax2.axhline(y=90, color="#95A5A6", linestyle="--", alpha=0.7, label="90% threshold") + + # Annotate bars + for i, (bar, v) in enumerate(zip(bars, var_ratio)): + ax1.text(bar.get_x() + bar.get_width() / 2, bar.get_height() + 0.5, + f"{v * 100:.1f}%", ha="center", va="bottom", fontsize=8) + + # Combined legend + lines1, labels1 = ax1.get_legend_handles_labels() + lines2, labels2 = ax2.get_legend_handles_labels() + ax1.legend(lines1 + lines2, labels1 + labels2, loc="center right") + + fig.suptitle("PCA Scree Plot", fontsize=self._theme.title_size + 2) + fig.tight_layout() + return fig + + def loadings_heatmap(self, loadings_df: pd.DataFrame, **kwargs: Any) -> plt.Figure: + """Draw a heatmap of PCA loadings. + + Args: + loadings_df: DataFrame from :meth:`PCAStats.loadings`. + """ + if loadings_df.empty: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "PCA not available", ha="center", va="center") + return fig + + n_features = len(loadings_df) + n_components = len(loadings_df.columns) + + fig, ax = plt.subplots( + figsize=(max(7, n_components * 1.5), max(5, n_features * 0.4)) + ) + + kwargs.setdefault("annot", True) + kwargs.setdefault("fmt", ".3f") + kwargs.setdefault("cmap", "RdBu_r") + kwargs.setdefault("center", 0) + kwargs.setdefault("vmin", -1) + kwargs.setdefault("vmax", 1) + + sns.heatmap(loadings_df, ax=ax, **kwargs) + ax.set_title("PCA Loadings", fontsize=self._theme.title_size) + ax.set_ylabel("Feature") + ax.set_xlabel("Component") + fig.tight_layout() + return fig diff --git a/f2a/viz/plots.py b/f2a/viz/plots.py new file mode 100644 index 0000000..b2f7483 --- /dev/null +++ b/f2a/viz/plots.py @@ -0,0 +1,143 @@ +"""Basic plots — histograms, boxplots, bar charts.""" + +from __future__ import annotations + +from typing import Any + +import matplotlib.pyplot as plt +import seaborn as sns +import pandas as pd + +from f2a.core.schema import DataSchema +from f2a.viz.theme import DEFAULT_THEME, F2ATheme + + +class BasicPlotter: + """Generate basic visualizations. + + Args: + df: Target DataFrame for visualization. + schema: Data schema. + theme: Visualization theme (default: ``DEFAULT_THEME``). + """ + + def __init__( + self, + df: pd.DataFrame, + schema: DataSchema, + theme: F2ATheme | None = None, + ) -> None: + self._df = df + self._schema = schema + self._theme = theme or DEFAULT_THEME + self._theme.apply() + + def histograms(self, columns: list[str] | None = None, **kwargs: Any) -> plt.Figure: + """Generate histograms for numeric columns. + + Args: + columns: Target column list. ``None`` for all numeric columns. + **kwargs: Additional arguments passed to ``seaborn.histplot``. + + Returns: + matplotlib Figure. + """ + cols = columns or self._schema.numeric_columns + if not cols: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "No numeric columns found", ha="center", va="center") + return fig + + n = len(cols) + ncols = min(3, n) + nrows = (n + ncols - 1) // ncols + + fig, axes = plt.subplots(nrows, ncols, figsize=(5 * ncols, 4 * nrows)) + axes = axes.flat if n > 1 else [axes] + + for idx, col in enumerate(cols): + ax = axes[idx] + kwargs.setdefault("kde", True) + sns.histplot(data=self._df, x=col, ax=ax, **kwargs) + ax.set_title(col) + + # Hide empty subplots + for idx in range(n, len(list(axes))): + axes[idx].set_visible(False) + + fig.suptitle("Numeric Column Distributions", fontsize=self._theme.title_size + 2, y=1.02) + fig.tight_layout() + return fig + + def boxplots(self, columns: list[str] | None = None, **kwargs: Any) -> plt.Figure: + """Generate boxplots for numeric columns. + + Args: + columns: Target column list. + **kwargs: Additional arguments passed to ``seaborn.boxplot``. + + Returns: + matplotlib Figure. + """ + cols = columns or self._schema.numeric_columns + if not cols: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "No numeric columns found", ha="center", va="center") + return fig + + n = len(cols) + ncols = min(3, n) + nrows = (n + ncols - 1) // ncols + + fig, axes = plt.subplots(nrows, ncols, figsize=(5 * ncols, 4 * nrows)) + axes = axes.flat if n > 1 else [axes] + + for idx, col in enumerate(cols): + ax = axes[idx] + sns.boxplot(data=self._df, y=col, ax=ax, **kwargs) + ax.set_title(col) + + for idx in range(n, len(list(axes))): + axes[idx].set_visible(False) + + fig.suptitle("Numeric Column Boxplots", fontsize=self._theme.title_size + 2, y=1.02) + fig.tight_layout() + return fig + + def bar_charts(self, columns: list[str] | None = None, top_n: int = 15, **kwargs: Any) -> plt.Figure: + """Generate frequency bar charts for categorical columns. + + Args: + columns: Target column list. ``None`` for all categorical columns. + top_n: Maximum categories to display per column. + **kwargs: Additional arguments passed to ``seaborn.barplot``. + + Returns: + matplotlib Figure. + """ + cols = columns or self._schema.categorical_columns + if not cols: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "No categorical columns found", ha="center", va="center") + return fig + + n = len(cols) + ncols = min(2, n) + nrows = (n + ncols - 1) // ncols + + fig, axes = plt.subplots(nrows, ncols, figsize=(6 * ncols, 4 * nrows)) + axes = axes.flat if n > 1 else [axes] + + for idx, col in enumerate(cols): + ax = axes[idx] + vc = self._df[col].value_counts().head(top_n) + sns.barplot(x=vc.values, y=vc.index, ax=ax, **kwargs) + ax.set_title(f"{col} (top {min(top_n, len(vc))})") + ax.set_xlabel("Frequency") + + for idx in range(n, len(list(axes))): + axes[idx].set_visible(False) + + fig.suptitle("Categorical Column Frequencies", fontsize=self._theme.title_size + 2, y=1.02) + fig.tight_layout() + return fig diff --git a/f2a/viz/quality_plots.py b/f2a/viz/quality_plots.py new file mode 100644 index 0000000..c73dfa3 --- /dev/null +++ b/f2a/viz/quality_plots.py @@ -0,0 +1,159 @@ +"""Data quality visualization module. + +Bar chart and heatmap of data-quality scores. +""" + +from __future__ import annotations + +from typing import Any + +import matplotlib.pyplot as plt +import numpy as np +import seaborn as sns +import pandas as pd + +from f2a.viz.theme import DEFAULT_THEME, F2ATheme + + +class QualityPlotter: + """Visualise data-quality scores. + + Args: + theme: Visualisation theme. + """ + + def __init__(self, theme: F2ATheme | None = None) -> None: + self._theme = theme or DEFAULT_THEME + self._theme.apply() + + def dimension_bar(self, scores: dict[str, float], **kwargs: Any) -> plt.Figure: + """Bar chart of quality dimension scores. + + Args: + scores: Dict from :meth:`QualityStats.summary` containing + ``completeness``, ``uniqueness``, ``consistency``, + ``validity``, ``overall``. + """ + if not scores: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "No quality scores", ha="center", va="center") + return fig + + dims = ["completeness", "uniqueness", "consistency", "validity", "overall"] + labels = ["Completeness", "Uniqueness", "Consistency", "Validity", "Overall"] + values = [scores.get(d, 0) * 100 for d in dims] + + fig, ax = plt.subplots(figsize=(8, 5)) + + colors = [] + for v in values: + if v >= 90: + colors.append("#27AE60") + elif v >= 70: + colors.append("#F39C12") + else: + colors.append("#E74C3C") + + bars = ax.barh(range(len(labels)), values, color=colors, height=0.6) + ax.set_yticks(range(len(labels))) + ax.set_yticklabels(labels) + ax.invert_yaxis() + ax.set_xlim(0, 105) + ax.set_xlabel("Score (%)") + ax.set_title("Data Quality Scores", fontsize=self._theme.title_size) + + # Threshold lines + ax.axvline(x=90, color="#27AE60", linestyle="--", alpha=0.4, label="Good (90%)") + ax.axvline(x=70, color="#F39C12", linestyle="--", alpha=0.4, label="Fair (70%)") + ax.legend(fontsize=8, loc="lower right") + + # Value labels + for bar, v in zip(bars, values): + ax.text(bar.get_width() + 1, bar.get_y() + bar.get_height() / 2, + f"{v:.1f}%", va="center", fontsize=10, fontweight="bold") + + fig.tight_layout() + return fig + + def column_quality_heatmap( + self, + quality_df: pd.DataFrame, + **kwargs: Any, + ) -> plt.Figure: + """Heatmap of per-column quality scores. + + Args: + quality_df: DataFrame from :meth:`QualityStats.column_quality`. + """ + if quality_df.empty: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "No column quality data", ha="center", va="center") + return fig + + numeric_cols = [c for c in quality_df.columns if c in ("completeness", "uniqueness", "quality_score")] + if not numeric_cols: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "No numeric quality columns", ha="center", va="center") + return fig + + plot_df = quality_df[numeric_cols] + + # Limit rows for readability + if len(plot_df) > 40: + plot_df = plot_df.head(40) + + fig, ax = plt.subplots(figsize=(max(6, len(numeric_cols) * 2), max(5, len(plot_df) * 0.3))) + + kwargs.setdefault("annot", True) + kwargs.setdefault("fmt", ".2f") + kwargs.setdefault("cmap", "RdYlGn") + kwargs.setdefault("vmin", 0) + kwargs.setdefault("vmax", 1) + + sns.heatmap(plot_df, ax=ax, **kwargs) + ax.set_title("Column Quality Scores", fontsize=self._theme.title_size) + ax.set_ylabel("") + fig.tight_layout() + return fig + + def feature_importance_bar( + self, + importance_df: pd.DataFrame, + title: str = "Feature Importance (Variance Ranking)", + **kwargs: Any, + ) -> plt.Figure: + """Horizontal bar chart of feature importance scores. + + Args: + importance_df: DataFrame with numeric importance values. + title: Chart title. + """ + if importance_df.empty: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "No feature importance data", ha="center", va="center") + return fig + + # Use first numeric column as the value + val_col = None + for c in importance_df.columns: + if importance_df[c].dtype in ("float64", "float32", "int64", "int32"): + val_col = c + break + if val_col is None: + fig, ax = plt.subplots() + ax.text(0.5, 0.5, "No numeric column found", ha="center", va="center") + return fig + + df = importance_df.head(20).copy() + df = df.sort_values(val_col, ascending=True) + + fig, ax = plt.subplots(figsize=(8, max(4, len(df) * 0.35))) + colors = sns.color_palette("viridis", len(df)) + ax.barh(range(len(df)), df[val_col].values, color=colors, height=0.7) + ax.set_yticks(range(len(df))) + ax.set_yticklabels(df.index) + ax.set_xlabel(val_col) + ax.set_title(title, fontsize=self._theme.title_size) + + fig.tight_layout() + return fig diff --git a/f2a/viz/theme.py b/f2a/viz/theme.py new file mode 100644 index 0000000..1c3a8f7 --- /dev/null +++ b/f2a/viz/theme.py @@ -0,0 +1,85 @@ +"""Visualization theme and style management.""" + +from __future__ import annotations + +import platform +from dataclasses import dataclass, field + +import matplotlib.pyplot as plt +import matplotlib.font_manager as fm +import seaborn as sns + + +def _get_korean_font() -> str | None: + """Find an available Korean font on the system.""" + system = platform.system() + candidates: list[str] = [] + + if system == "Windows": + candidates = ["Malgun Gothic", "맑은 고딕", "NanumGothic", "NanumBarunGothic"] + elif system == "Darwin": + candidates = ["AppleGothic", "Apple SD Gothic Neo", "NanumGothic"] + else: + candidates = ["NanumGothic", "NanumBarunGothic", "UnDotum", "Noto Sans CJK KR"] + + available = {f.name for f in fm.fontManager.ttflist} + for font in candidates: + if font in available: + return font + return None + + +@dataclass +class F2ATheme: + """f2a visualization theme configuration. + + Attributes: + palette: Seaborn color palette name. + figsize: Default figure size. + title_size: Title font size. + label_size: Label font size. + dpi: Output resolution. + style: Seaborn style. + """ + + palette: str = "husl" + figsize: tuple[float, float] = (10, 6) + title_size: int = 14 + label_size: int = 11 + dpi: int = 100 + style: str = "whitegrid" + context: str = "notebook" + font_scale: float = 1.0 + _colors: list[str] = field(default_factory=list) + + def apply(self) -> None: + """Apply the current theme to matplotlib/seaborn.""" + sns.set_theme( + style=self.style, + context=self.context, + font_scale=self.font_scale, + palette=self.palette, + ) + + rc_params: dict = { + "figure.figsize": self.figsize, + "figure.dpi": self.dpi, + "axes.titlesize": self.title_size, + "axes.labelsize": self.label_size, + } + + # Auto-configure Korean font + korean_font = _get_korean_font() + if korean_font: + rc_params["font.family"] = korean_font + rc_params["axes.unicode_minus"] = False # Prevent minus sign rendering issues + + plt.rcParams.update(rc_params) + + def get_colors(self, n: int = 10) -> list[str]: + """Return n colors from the palette.""" + return [str(c) for c in sns.color_palette(self.palette, n)] + + +# Default theme instance +DEFAULT_THEME = F2ATheme() diff --git a/tests/__init__.py b/git_action/tests/__init__.py similarity index 100% rename from tests/__init__.py rename to git_action/tests/__init__.py diff --git a/git_action/tests/conftest.py b/git_action/tests/conftest.py new file mode 100644 index 0000000..a1ee376 --- /dev/null +++ b/git_action/tests/conftest.py @@ -0,0 +1,62 @@ +"""pytest fixtures for f2a tests.""" + +from __future__ import annotations + +from pathlib import Path + +import pandas as pd +import numpy as np +import pytest + + +@pytest.fixture +def sample_numeric_df() -> pd.DataFrame: + """Sample DataFrame with primarily numeric columns.""" + np.random.seed(42) + n = 200 + return pd.DataFrame( + { + "age": np.random.randint(18, 80, n), + "income": np.random.normal(50000, 15000, n).round(2), + "score": np.random.uniform(0, 100, n).round(2), + "height": np.random.normal(170, 10, n).round(1), + } + ) + + +@pytest.fixture +def sample_mixed_df() -> pd.DataFrame: + """Sample DataFrame with numeric, categorical, and missing values.""" + np.random.seed(42) + n = 150 + df = pd.DataFrame( + { + "id": range(n), + "name": [f"user_{i}" for i in range(n)], + "age": np.random.randint(18, 80, n), + "city": np.random.choice(["Seoul", "Busan", "Daegu", "Incheon", "Gwangju"], n), + "salary": np.random.normal(50000, 15000, n).round(2), + "rating": np.random.uniform(1, 5, n).round(1), + } + ) + # Insert missing values + mask = np.random.random(n) < 0.1 + df.loc[mask, "salary"] = np.nan + df.loc[np.random.random(n) < 0.05, "city"] = np.nan + return df + + +@pytest.fixture +def sample_csv_path(tmp_path: Path, sample_mixed_df: pd.DataFrame) -> Path: + """Temporary CSV file path.""" + csv_path = tmp_path / "test_data.csv" + sample_mixed_df.to_csv(csv_path, index=False) + return csv_path + + +@pytest.fixture +def sample_json_path(tmp_path: Path, sample_mixed_df: pd.DataFrame) -> Path: + """Temporary JSON file path.""" + json_path = tmp_path / "test_data.json" + sample_mixed_df.to_json(json_path, orient="records", force_ascii=False) + return json_path diff --git a/git_action/tests/test_descriptive.py b/git_action/tests/test_descriptive.py new file mode 100644 index 0000000..6487402 --- /dev/null +++ b/git_action/tests/test_descriptive.py @@ -0,0 +1,71 @@ +"""Descriptive statistics tests.""" + +from __future__ import annotations + +import pandas as pd +import pytest + +from f2a.core.schema import infer_schema +from f2a.stats.descriptive import DescriptiveStats +from f2a.stats.correlation import CorrelationStats +from f2a.stats.missing import MissingStats + + +class TestDescriptiveStats: + """DescriptiveStats tests.""" + + def test_summary_returns_dataframe(self, sample_mixed_df: pd.DataFrame) -> None: + schema = infer_schema(sample_mixed_df) + stats = DescriptiveStats(sample_mixed_df, schema) + result = stats.summary() + assert isinstance(result, pd.DataFrame) + assert len(result) == len(sample_mixed_df.columns) + + def test_numeric_summary(self, sample_numeric_df: pd.DataFrame) -> None: + schema = infer_schema(sample_numeric_df) + stats = DescriptiveStats(sample_numeric_df, schema) + result = stats.numeric_summary() + assert isinstance(result, pd.DataFrame) + assert "mean" in result.columns + + def test_summary_contains_expected_columns(self, sample_mixed_df: pd.DataFrame) -> None: + schema = infer_schema(sample_mixed_df) + stats = DescriptiveStats(sample_mixed_df, schema) + result = stats.summary() + assert "type" in result.columns + assert "count" in result.columns + assert "missing" in result.columns + + +class TestCorrelationStats: + """CorrelationStats tests.""" + + def test_pearson(self, sample_numeric_df: pd.DataFrame) -> None: + schema = infer_schema(sample_numeric_df) + corr = CorrelationStats(sample_numeric_df, schema) + result = corr.pearson() + assert isinstance(result, pd.DataFrame) + assert result.shape[0] == result.shape[1] + + def test_spearman(self, sample_numeric_df: pd.DataFrame) -> None: + schema = infer_schema(sample_numeric_df) + corr = CorrelationStats(sample_numeric_df, schema) + result = corr.spearman() + assert isinstance(result, pd.DataFrame) + + +class TestMissingStats: + """MissingStats tests.""" + + def test_column_summary(self, sample_mixed_df: pd.DataFrame) -> None: + schema = infer_schema(sample_mixed_df) + miss = MissingStats(sample_mixed_df, schema) + result = miss.column_summary() + assert isinstance(result, pd.DataFrame) + assert "missing_count" in result.columns + + def test_total_missing_ratio(self, sample_mixed_df: pd.DataFrame) -> None: + schema = infer_schema(sample_mixed_df) + miss = MissingStats(sample_mixed_df, schema) + ratio = miss.total_missing_ratio() + assert 0.0 <= ratio <= 1.0 diff --git a/git_action/tests/test_loader.py b/git_action/tests/test_loader.py new file mode 100644 index 0000000..221bcc7 --- /dev/null +++ b/git_action/tests/test_loader.py @@ -0,0 +1,499 @@ +"""DataLoader tests — auto-detection and loading of various formats.""" + +from __future__ import annotations + +import json +import sqlite3 +from pathlib import Path + +import numpy as np +import pandas as pd +import pytest + +from f2a.core.loader import DataLoader +from f2a.utils.exceptions import DataLoadError, EmptyDataError, UnsupportedFormatError +from f2a.utils.validators import detect_source_type, get_supported_formats + + +# ── fixtures: test file creation for various formats ────────────────── + +@pytest.fixture +def base_df() -> pd.DataFrame: + """Base DataFrame used for all format tests.""" + np.random.seed(42) + return pd.DataFrame( + { + "id": range(1, 51), + "name": [f"item_{i}" for i in range(1, 51)], + "value": np.random.normal(100, 20, 50).round(2), + "category": np.random.choice(["A", "B", "C"], 50), + "score": np.random.uniform(0, 100, 50).round(1), + } + ) + + +@pytest.fixture +def csv_file(tmp_path: Path, base_df: pd.DataFrame) -> Path: + p = tmp_path / "test.csv" + base_df.to_csv(p, index=False) + return p + + +@pytest.fixture +def tsv_file(tmp_path: Path, base_df: pd.DataFrame) -> Path: + p = tmp_path / "test.tsv" + base_df.to_csv(p, index=False, sep="\t") + return p + + +@pytest.fixture +def json_file(tmp_path: Path, base_df: pd.DataFrame) -> Path: + p = tmp_path / "test.json" + base_df.to_json(p, orient="records", force_ascii=False) + return p + + +@pytest.fixture +def jsonl_file(tmp_path: Path, base_df: pd.DataFrame) -> Path: + p = tmp_path / "test.jsonl" + base_df.to_json(p, orient="records", lines=True, force_ascii=False) + return p + + +@pytest.fixture +def ndjson_file(tmp_path: Path, base_df: pd.DataFrame) -> Path: + """ndjson extension test.""" + p = tmp_path / "test.ndjson" + base_df.to_json(p, orient="records", lines=True, force_ascii=False) + return p + + +@pytest.fixture +def delimited_pipe_file(tmp_path: Path, base_df: pd.DataFrame) -> Path: + """Pipe (|) delimited text file.""" + p = tmp_path / "test.txt" + base_df.to_csv(p, index=False, sep="|") + return p + + +@pytest.fixture +def delimited_semicolon_file(tmp_path: Path, base_df: pd.DataFrame) -> Path: + """Semicolon (;) delimited dat file.""" + p = tmp_path / "test.dat" + base_df.to_csv(p, index=False, sep=";") + return p + + +@pytest.fixture +def nested_json_file(tmp_path: Path) -> Path: + """Nested JSON file.""" + data = { + "metadata": {"source": "test", "version": 1}, + "records": [ + {"id": 1, "name": "a", "val": 10}, + {"id": 2, "name": "b", "val": 20}, + {"id": 3, "name": "c", "val": 30}, + ], + } + p = tmp_path / "nested.json" + p.write_text(json.dumps(data, ensure_ascii=False), encoding="utf-8") + return p + + +@pytest.fixture +def sqlite_file(tmp_path: Path, base_df: pd.DataFrame) -> Path: + p = tmp_path / "test.db" + conn = sqlite3.connect(str(p)) + base_df.to_sql("main_table", conn, index=False) + base_df.head(10).to_sql("small_table", conn, index=False) + conn.close() + return p + + +@pytest.fixture +def stata_file(tmp_path: Path, base_df: pd.DataFrame) -> Path: + p = tmp_path / "test.dta" + base_df.to_stata(p, write_index=False) + return p + + +@pytest.fixture +def pickle_file(tmp_path: Path, base_df: pd.DataFrame) -> Path: + p = tmp_path / "test.pkl" + base_df.to_pickle(p) + return p + + +@pytest.fixture +def html_file(tmp_path: Path, base_df: pd.DataFrame) -> Path: + p = tmp_path / "test.html" + html_content = f""" +

    Test

    +
    ignore
    + {base_df.to_html(index=False)} + """ + p.write_text(html_content, encoding="utf-8") + return p + + +@pytest.fixture +def xml_file(tmp_path: Path) -> Path: + p = tmp_path / "test.xml" + xml_content = """ + + 1alpha10.5 + 2beta20.3 + 3gamma30.1 +""" + p.write_text(xml_content, encoding="utf-8") + return p + + +@pytest.fixture +def cp949_csv_file(tmp_path: Path) -> Path: + """CP949-encoded CSV (Korean).""" + p = tmp_path / "korean.csv" + df = pd.DataFrame({"이름": ["홍길동", "김철수"], "나이": [30, 25]}) + df.to_csv(p, index=False, encoding="cp949") + return p + + +@pytest.fixture +def fwf_file(tmp_path: Path) -> Path: + """Fixed-width file.""" + p = tmp_path / "test.fwf" + content = """Name Age Score +Alice 28 95.5 +Bob 34 87.3 +Charlie 22 91.0 +""" + p.write_text(content, encoding="utf-8") + return p + + +# ================================================================ +# Source type detection tests +# ================================================================ + + +class TestDetectSourceType: + """Source type detection tests.""" + + # ── Extension-based ── + def test_csv(self) -> None: + assert detect_source_type("data.csv") == "csv" + + def test_tsv(self) -> None: + assert detect_source_type("data.tsv") == "tsv" + + def test_tab(self) -> None: + assert detect_source_type("data.tab") == "tsv" + + def test_txt(self) -> None: + assert detect_source_type("data.txt") == "delimited" + + def test_dat(self) -> None: + assert detect_source_type("data.dat") == "delimited" + + def test_json(self) -> None: + assert detect_source_type("data.json") == "json" + + def test_jsonl(self) -> None: + assert detect_source_type("data.jsonl") == "jsonl" + + def test_ndjson(self) -> None: + assert detect_source_type("data.ndjson") == "jsonl" + + def test_parquet(self) -> None: + assert detect_source_type("data.parquet") == "parquet" + + def test_pq(self) -> None: + assert detect_source_type("data.pq") == "parquet" + + def test_excel_xlsx(self) -> None: + assert detect_source_type("data.xlsx") == "excel" + + def test_excel_xls(self) -> None: + assert detect_source_type("data.xls") == "excel" + + def test_excel_xlsm(self) -> None: + assert detect_source_type("data.xlsm") == "excel" + + def test_excel_xlsb(self) -> None: + assert detect_source_type("data.xlsb") == "excel" + + def test_ods(self) -> None: + assert detect_source_type("data.ods") == "ods" + + def test_feather(self) -> None: + assert detect_source_type("data.feather") == "feather" + + def test_arrow_ipc(self) -> None: + assert detect_source_type("data.arrow") == "arrow_ipc" + + def test_orc(self) -> None: + assert detect_source_type("data.orc") == "orc" + + def test_hdf5(self) -> None: + assert detect_source_type("data.h5") == "hdf5" + + def test_hdf5_ext(self) -> None: + assert detect_source_type("data.hdf5") == "hdf5" + + def test_pickle(self) -> None: + assert detect_source_type("data.pkl") == "pickle" + + def test_pickle_ext(self) -> None: + assert detect_source_type("data.pickle") == "pickle" + + def test_sas(self) -> None: + assert detect_source_type("data.sas7bdat") == "sas" + + def test_sas_xport(self) -> None: + assert detect_source_type("data.xpt") == "sas_xport" + + def test_stata(self) -> None: + assert detect_source_type("data.dta") == "stata" + + def test_spss(self) -> None: + assert detect_source_type("data.sav") == "spss" + + def test_sqlite(self) -> None: + assert detect_source_type("data.db") == "sqlite" + + def test_sqlite3(self) -> None: + assert detect_source_type("data.sqlite3") == "sqlite" + + def test_duckdb(self) -> None: + assert detect_source_type("data.duckdb") == "duckdb" + + def test_xml(self) -> None: + assert detect_source_type("data.xml") == "xml" + + def test_html(self) -> None: + assert detect_source_type("data.html") == "html" + + def test_htm(self) -> None: + assert detect_source_type("data.htm") == "html" + + def test_fwf(self) -> None: + assert detect_source_type("data.fwf") == "fwf" + + # ── HuggingFace ── + def test_hf_prefix(self) -> None: + assert detect_source_type("hf://imdb") == "hf" + + def test_hf_huggingface_prefix(self) -> None: + assert detect_source_type("huggingface://squad") == "hf" + + def test_hf_org_pattern(self) -> None: + assert detect_source_type("openai/gsm8k") == "hf" + + # ── URL ── + def test_url_csv(self) -> None: + result = detect_source_type("https://example.com/data.csv") + assert result == "csv" + + def test_url_json(self) -> None: + result = detect_source_type("https://example.com/api/data.json") + assert result == "json" + + def test_url_no_ext(self) -> None: + result = detect_source_type("https://example.com/api/data") + assert result == "url_auto" + + # ── Errors ── + def test_unsupported(self) -> None: + with pytest.raises(UnsupportedFormatError): + detect_source_type("data.xyz") + + # ── Utilities ── + def test_supported_formats_returns_dict(self) -> None: + result = get_supported_formats() + assert isinstance(result, dict) + assert "csv" in result + assert "hf" in result + assert "url" in result + + def test_loader_supported_formats(self) -> None: + formats = DataLoader.supported_formats() + assert "csv" in formats + assert "parquet" in formats + assert "sqlite" in formats + assert "hf" in formats + + +# ================================================================ +# Content sniffing tests +# ================================================================ + + +class TestContentSniffing: + """Content-based detection tests for files without extensions.""" + + def test_sniff_csv_content(self, tmp_path: Path) -> None: + p = tmp_path / "noext" + pd.DataFrame({"a": [1, 2], "b": [3, 4]}).to_csv(p, index=False) + assert detect_source_type(str(p)) == "csv" + + def test_sniff_tsv_content(self, tmp_path: Path) -> None: + p = tmp_path / "noext_tsv" + pd.DataFrame({"a": [1, 2], "b": [3, 4]}).to_csv(p, index=False, sep="\t") + assert detect_source_type(str(p)) == "tsv" + + def test_sniff_json_content(self, tmp_path: Path) -> None: + p = tmp_path / "noext_json" + p.write_text('[{"a": 1}, {"a": 2}]', encoding="utf-8") + assert detect_source_type(str(p)) == "json" + + def test_sniff_jsonl_content(self, tmp_path: Path) -> None: + p = tmp_path / "noext_jsonl" + p.write_text('{"a": 1}\n{"a": 2}\n{"a": 3}\n', encoding="utf-8") + assert detect_source_type(str(p)) == "jsonl" + + def test_sniff_xml_content(self, tmp_path: Path) -> None: + p = tmp_path / "noext_xml" + p.write_text('1', encoding="utf-8") + assert detect_source_type(str(p)) == "xml" + + def test_sniff_html_content(self, tmp_path: Path) -> None: + p = tmp_path / "noext_html" + p.write_text('
    1
    ', encoding="utf-8") + assert detect_source_type(str(p)) == "html" + + def test_sniff_sqlite_content(self, tmp_path: Path) -> None: + p = tmp_path / "noext_db" + conn = sqlite3.connect(str(p)) + pd.DataFrame({"x": [1]}).to_sql("t", conn, index=False) + conn.close() + assert detect_source_type(str(p)) == "sqlite" + + +# ================================================================ +# DataLoader — file loading tests +# ================================================================ + + +class TestDataLoaderCSV: + """CSV loading tests.""" + + def test_load_csv(self, csv_file: Path) -> None: + df = DataLoader().load(str(csv_file)) + assert len(df) == 50 + assert "id" in df.columns + + def test_load_csv_cp949(self, cp949_csv_file: Path) -> None: + """CP949-encoded CSV auto-handling test.""" + df = DataLoader().load(str(cp949_csv_file)) + assert len(df) == 2 + assert "이름" in df.columns + + +class TestDataLoaderTSV: + def test_load_tsv(self, tsv_file: Path) -> None: + df = DataLoader().load(str(tsv_file)) + assert len(df) == 50 + + +class TestDataLoaderJSON: + def test_load_json(self, json_file: Path) -> None: + df = DataLoader().load(str(json_file)) + assert len(df) == 50 + + def test_load_jsonl(self, jsonl_file: Path) -> None: + df = DataLoader().load(str(jsonl_file)) + assert len(df) == 50 + + def test_load_ndjson(self, ndjson_file: Path) -> None: + df = DataLoader().load(str(ndjson_file)) + assert len(df) == 50 + + def test_load_nested_json(self, nested_json_file: Path) -> None: + """Nested JSON auto-flatten test.""" + df = DataLoader().load(str(nested_json_file)) + assert len(df) == 3 + assert "id" in df.columns + + +class TestDataLoaderDelimited: + def test_load_pipe_delimited(self, delimited_pipe_file: Path) -> None: + """Pipe delimiter auto-detection test.""" + df = DataLoader().load(str(delimited_pipe_file)) + assert len(df) == 50 + assert len(df.columns) >= 5 + + def test_load_semicolon_delimited(self, delimited_semicolon_file: Path) -> None: + """Semicolon delimiter auto-detection test.""" + df = DataLoader().load(str(delimited_semicolon_file)) + assert len(df) == 50 + assert len(df.columns) >= 5 + + def test_load_fwf(self, fwf_file: Path) -> None: + df = DataLoader().load(str(fwf_file)) + assert len(df) >= 3 + + +class TestDataLoaderSQLite: + def test_load_sqlite_auto(self, sqlite_file: Path) -> None: + """First table auto-selection test.""" + df = DataLoader().load(str(sqlite_file)) + assert len(df) == 50 + + def test_load_sqlite_specific_table(self, sqlite_file: Path) -> None: + df = DataLoader().load(str(sqlite_file), table="small_table") + assert len(df) == 10 + + def test_load_sqlite_query(self, sqlite_file: Path) -> None: + df = DataLoader().load( + str(sqlite_file), query="SELECT * FROM main_table WHERE value > 100" + ) + assert len(df) > 0 + + def test_load_sqlite_missing_table(self, sqlite_file: Path) -> None: + with pytest.raises(DataLoadError, match="Table"): + DataLoader().load(str(sqlite_file), table="nonexistent") + + +class TestDataLoaderStata: + def test_load_stata(self, stata_file: Path) -> None: + df = DataLoader().load(str(stata_file)) + assert len(df) == 50 + + +class TestDataLoaderPickle: + def test_load_pickle(self, pickle_file: Path) -> None: + df = DataLoader().load(str(pickle_file)) + assert len(df) == 50 + + +class TestDataLoaderHTML: + def test_load_html_largest_table(self, html_file: Path) -> None: + """HTML largest table auto-selection test.""" + df = DataLoader().load(str(html_file)) + assert len(df) == 50 + + def test_load_html_specific_table(self, html_file: Path) -> None: + df = DataLoader().load(str(html_file), table_index=1) + assert len(df) == 50 + + +class TestDataLoaderXML: + def test_load_xml(self, xml_file: Path) -> None: + df = DataLoader().load(str(xml_file)) + assert len(df) == 3 + assert "name" in df.columns + + +class TestDataLoaderErrors: + def test_nonexistent_file(self) -> None: + with pytest.raises(Exception): + DataLoader().load("nonexistent_file.csv") + + def test_empty_source(self) -> None: + with pytest.raises(ValueError): + from f2a.utils.validators import validate_source + validate_source("") + + def test_unsupported_format(self) -> None: + with pytest.raises(UnsupportedFormatError): + DataLoader().load("file.xyz") diff --git a/git_action/tests/test_report.py b/git_action/tests/test_report.py new file mode 100644 index 0000000..76fbbd6 --- /dev/null +++ b/git_action/tests/test_report.py @@ -0,0 +1,43 @@ +"""Report generation tests.""" + +from __future__ import annotations + +import matplotlib +matplotlib.use("Agg") + +from pathlib import Path + +import pandas as pd +import pytest + +from f2a.core.analyzer import analyze + + +class TestAnalysisReport: + """Integration analysis report tests.""" + + def test_analyze_csv(self, sample_csv_path: Path) -> None: + report = analyze(str(sample_csv_path)) + assert report.shape[0] > 0 + assert report.shape[1] > 0 + assert not report.stats.summary.empty + + def test_report_show(self, sample_csv_path: Path, capsys: pytest.CaptureFixture) -> None: + report = analyze(str(sample_csv_path)) + report.show() + captured = capsys.readouterr() + assert "f2a Analysis Report" in captured.out + + def test_report_to_html(self, sample_csv_path: Path, tmp_path: Path) -> None: + report = analyze(str(sample_csv_path)) + html_path = report.to_html(str(tmp_path)) + assert html_path.exists() + content = html_path.read_text(encoding="utf-8") + assert "f2a Analysis Report" in content + + def test_report_to_dict(self, sample_csv_path: Path) -> None: + report = analyze(str(sample_csv_path)) + d = report.to_dict() + assert "dataset_name" in d + assert "shape" in d + assert "schema" in d diff --git a/git_action/tests/test_viz.py b/git_action/tests/test_viz.py new file mode 100644 index 0000000..5deaf50 --- /dev/null +++ b/git_action/tests/test_viz.py @@ -0,0 +1,69 @@ +"""Visualization tests.""" + +from __future__ import annotations + +import matplotlib +matplotlib.use("Agg") # Render without GUI during tests + +import matplotlib.pyplot as plt +import pandas as pd +import pytest + +from f2a.core.schema import infer_schema +from f2a.viz.plots import BasicPlotter +from f2a.viz.corr_plots import CorrelationPlotter +from f2a.viz.missing_plots import MissingPlotter + + +class TestBasicPlotter: + """BasicPlotter tests.""" + + def test_histograms(self, sample_numeric_df: pd.DataFrame) -> None: + schema = infer_schema(sample_numeric_df) + plotter = BasicPlotter(sample_numeric_df, schema) + fig = plotter.histograms() + assert isinstance(fig, plt.Figure) + plt.close(fig) + + def test_boxplots(self, sample_numeric_df: pd.DataFrame) -> None: + schema = infer_schema(sample_numeric_df) + plotter = BasicPlotter(sample_numeric_df, schema) + fig = plotter.boxplots() + assert isinstance(fig, plt.Figure) + plt.close(fig) + + def test_bar_charts(self, sample_mixed_df: pd.DataFrame) -> None: + schema = infer_schema(sample_mixed_df) + plotter = BasicPlotter(sample_mixed_df, schema) + fig = plotter.bar_charts() + assert isinstance(fig, plt.Figure) + plt.close(fig) + + +class TestCorrelationPlotter: + """CorrelationPlotter tests.""" + + def test_heatmap(self, sample_numeric_df: pd.DataFrame) -> None: + schema = infer_schema(sample_numeric_df) + plotter = CorrelationPlotter(sample_numeric_df, schema) + fig = plotter.heatmap() + assert isinstance(fig, plt.Figure) + plt.close(fig) + + +class TestMissingPlotter: + """MissingPlotter tests.""" + + def test_bar(self, sample_mixed_df: pd.DataFrame) -> None: + schema = infer_schema(sample_mixed_df) + plotter = MissingPlotter(sample_mixed_df, schema) + fig = plotter.bar() + assert isinstance(fig, plt.Figure) + plt.close(fig) + + def test_matrix(self, sample_mixed_df: pd.DataFrame) -> None: + schema = infer_schema(sample_mixed_df) + plotter = MissingPlotter(sample_mixed_df, schema) + fig = plotter.matrix() + assert isinstance(fig, plt.Figure) + plt.close(fig) diff --git a/img/cluster.png b/img/cluster.png new file mode 100644 index 0000000..66686cd Binary files /dev/null and b/img/cluster.png differ diff --git a/img/overview.png b/img/overview.png new file mode 100644 index 0000000..6b94b9c Binary files /dev/null and b/img/overview.png differ diff --git a/lerobot_test_library.py b/lerobot_test_library.py new file mode 100644 index 0000000..b342710 --- /dev/null +++ b/lerobot_test_library.py @@ -0,0 +1,11 @@ +"""Quick test script: generate f2a report for lerobot/roboturk.""" + +import warnings + +warnings.filterwarnings("ignore") + +import f2a + +result = f2a.analyze("lerobot/roboturk") +path = result.to_html("output") +print(f"Report saved: {path}") diff --git a/lerobot_test_local.py b/lerobot_test_local.py new file mode 100644 index 0000000..ef0f16b --- /dev/null +++ b/lerobot_test_local.py @@ -0,0 +1,16 @@ +"""Quick test script: generate f2a report for lerobot/roboturk (local version).""" + +import sys +import warnings +from pathlib import Path + +warnings.filterwarnings("ignore") + +# Ensure the local f2a package is used instead of the installed one +sys.path.insert(0, str(Path(__file__).resolve().parent)) + +import f2a # noqa: E402 + +result = f2a.analyze("lerobot/roboturk") +path = result.to_html("output") +print(f"Report saved: {path}") diff --git a/pyproject.toml b/pyproject.toml index ab6fa78..565b457 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,68 +1,63 @@ [build-system] -requires = ["maturin>=1.7,<2.0"] -build-backend = "maturin" +requires = ["hatchling"] +build-backend = "hatchling.build" [project] name = "f2a" -version = "1.0.3" -description = "File to Analysis -- Automatically perform statistical analysis from any data source (Rust-powered)" -license = { text = "Apache-2.0" } +version = "1.1.0" +description = "File to Analysis — Automatically perform descriptive statistical analysis and visualization from any data source" readme = "README.md" +license = { text = "MIT" } requires-python = ">=3.10" +authors = [ + { name = "CocoRoF" }, +] +keywords = ["statistics", "visualization", "data-analysis", "eda", "huggingface"] classifiers = [ "Development Status :: 3 - Alpha", - "Intended Audience :: Science/Research", "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Rust", - "Programming Language :: Python :: Implementation :: CPython", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Topic :: Scientific/Engineering", "Topic :: Scientific/Engineering :: Visualization", "Topic :: Scientific/Engineering :: Information Analysis", ] -authors = [ - { name = "CocoRoF" }, -] -keywords = ["statistics", "visualization", "data-analysis", "eda", "rust", "performance"] + dependencies = [ "pandas>=2.0", "numpy>=1.24", "matplotlib>=3.7", "seaborn>=0.13", "scipy>=1.11", - "pyarrow>=12.0", - "rich>=13.0", - "jinja2>=3.1", -] - -[project.optional-dependencies] -io = [ + "datasets>=2.14", "openpyxl>=3.1", + "pyarrow>=12.0", "pyreadstat>=1.2", "tables>=3.8", "odfpy>=1.4", "lxml>=4.9", "duckdb>=0.9", - "datasets>=2.14", + "rich>=13.0", + "jinja2>=3.1", + "scikit-learn>=1.3", ] + +[project.optional-dependencies] advanced = [ - "scikit-learn>=1.3", "networkx>=3.0", "umap-learn>=0.5", "statsmodels>=0.14", ] dev = [ "pytest>=7.0", - "pytest-benchmark", - "ruff", - "black", - "isort", - "mypy", - "maturin>=1.7", + "pytest-cov>=4.0", + "ruff>=0.1", + "black>=23.0", + "isort>=5.12", + "mypy>=1.5", ] [project.urls] @@ -71,14 +66,30 @@ Documentation = "https://github.com/CocoRoF/f2a#readme" Repository = "https://github.com/CocoRoF/f2a" Issues = "https://github.com/CocoRoF/f2a/issues" -[tool.maturin] -python-source = "python" -module-name = "f2a._core" -features = ["pyo3/extension-module"] +[tool.hatch.build.targets.wheel] +packages = ["f2a"] + +# ── Ruff ────────────────────────────────────────────── +[tool.ruff] +target-version = "py310" +line-length = 100 + +[tool.ruff.lint] +select = ["E", "F", "W", "I", "N", "UP", "B", "SIM"] + +# ── Black ───────────────────────────────────────────── +[tool.black] +target-version = ["py310"] +line-length = 100 + +# ── isort ───────────────────────────────────────────── +[tool.isort] +profile = "black" +line_length = 100 -[tool.pytest.ini_options] -testpaths = ["tests"] -python_files = "test_*.py" -python_classes = "Test*" -python_functions = "test_*" -addopts = "-v --tb=short" +# ── mypy ────────────────────────────────────────────── +[tool.mypy] +python_version = "3.10" +warn_return_any = true +warn_unused_configs = true +disallow_untyped_defs = true diff --git a/python/f2a/__init__.py b/python/f2a/__init__.py deleted file mode 100644 index e0cf49e..0000000 --- a/python/f2a/__init__.py +++ /dev/null @@ -1,19 +0,0 @@ -""" -f2a -- File to Analysis (Rust-powered) -========================================= - -High-performance data analysis library with Rust computation core. - -Usage:: - - import f2a - - report = f2a.analyze("data.csv") - report.show() # console summary - report.to_html("./out") # self-contained HTML report -""" - -from f2a._version import __version__ -from f2a.api import AnalysisConfig, analyze - -__all__ = ["__version__", "analyze", "AnalysisConfig"] diff --git a/python/f2a/_version.py b/python/f2a/_version.py deleted file mode 100644 index 86f2862..0000000 --- a/python/f2a/_version.py +++ /dev/null @@ -1,7 +0,0 @@ -from importlib.metadata import version, PackageNotFoundError - -try: - __version__: str = version("f2a") -except PackageNotFoundError: - # Fallback for editable / dev installs where metadata isn't available yet - __version__ = "0.0.0-dev" diff --git a/python/f2a/api.py b/python/f2a/api.py deleted file mode 100644 index 62a27d3..0000000 --- a/python/f2a/api.py +++ /dev/null @@ -1,275 +0,0 @@ -""" -Public API for f2a -- mirrors the original f2a interface. - - report = f2a.analyze("data.csv", config=AnalysisConfig(advanced=False)) - report.show() - report.to_html("./output") -""" - -from __future__ import annotations - -import json -import time -from dataclasses import dataclass, field, asdict -from datetime import datetime -from pathlib import Path -from typing import Any, Optional - -from rich.console import Console -from rich.table import Table - -# Import the Rust _core extension -from f2a import _core - - -# ─── AnalysisConfig ────────────────────────────────────────────────── - -@dataclass -class AnalysisConfig: - """Configuration for analysis. Mirrors the Rust AnalysisConfig.""" - - # Basic toggles - descriptive: bool = True - correlation: bool = True - distribution: bool = True - missing: bool = True - outlier: bool = True - categorical: bool = True - feature_importance: bool = True - pca: bool = True - duplicates: bool = True - quality: bool = True - preprocessing: bool = True - - # Advanced toggles - advanced: bool = True - advanced_distribution: bool = True - advanced_correlation: bool = True - clustering: bool = True - advanced_dimreduction: bool = True - feature_insights: bool = True - advanced_anomaly: bool = True - statistical_tests: bool = True - data_profiling: bool = True - insight_engine: bool = True - cross_analysis: bool = True - column_role: bool = True - ml_readiness: bool = True - - # Parameters - outlier_threshold: float = 1.5 - outlier_method: str = "iqr" - correlation_threshold: float = 0.9 - pca_max_components: int = 10 - max_categories: int = 50 - max_plot_columns: int = 20 - max_cluster_k: int = 10 - tsne_perplexity: float = 30.0 - bootstrap_iterations: int = 1000 - max_sample_for_advanced: int = 5000 - n_distribution_fits: int = 7 - - def to_json(self) -> str: - """Serialize to JSON for the Rust core.""" - return json.dumps(asdict(self)) - - @classmethod - def minimal(cls) -> "AnalysisConfig": - """Only descriptive statistics.""" - cfg_json = _core.minimal_config() - return cls._from_json(cfg_json) - - @classmethod - def fast(cls) -> "AnalysisConfig": - """Skip heavy analyses (PCA, feature importance, all advanced).""" - cfg_json = _core.fast_config() - return cls._from_json(cfg_json) - - @classmethod - def basic_only(cls) -> "AnalysisConfig": - """All basic on, all advanced off.""" - cfg_json = _core.basic_only_config() - return cls._from_json(cfg_json) - - @classmethod - def _from_json(cls, json_str: str) -> "AnalysisConfig": - d = json.loads(json_str) - return cls(**{k: v for k, v in d.items() if k in cls.__dataclass_fields__}) - - -# ─── AnalysisReport ────────────────────────────────────────────────── - -@dataclass -class AnalysisReport: - """Result of ``f2a.analyze()``.""" - - source: str - schema: dict - config: dict - results: dict - preprocessing: Optional[dict] = None - analysis_started_at: str = "" - analysis_duration_sec: float = 0.0 - - # ── Console output ─────────────────────────────────────────── - - def show(self) -> None: - """Print a rich summary to the console.""" - console = Console() - - # Header - console.print(f"\n[bold cyan]=== f2a Analysis Report ===[/bold cyan]") - console.print(f" Source: [green]{self.source}[/green]") - console.print( - f" Shape: {self.schema.get('n_rows', '?')} rows x {self.schema.get('n_cols', '?')} cols" - ) - console.print( - f" Duration: {self.analysis_duration_sec:.2f}s\n" - ) - - # Schema overview - if "columns" in self.schema: - table = Table(title="Schema", show_lines=False) - table.add_column("Column", style="bold") - table.add_column("DType") - table.add_column("Inferred") - table.add_column("Unique", justify="right") - table.add_column("Missing", justify="right") - - for col in self.schema["columns"][:30]: - missing_str = f"{col.get('n_missing', 0)} ({col.get('missing_ratio', 0)*100:.1f}%)" - table.add_row( - col["name"], - col.get("dtype", ""), - col.get("inferred_type", ""), - str(col.get("n_unique", "")), - missing_str, - ) - console.print(table) - - # Result sections - sections = list(self.results.keys()) - console.print(f"\n [bold]Analysis sections:[/bold] {', '.join(sections)}") - - # Insight summary - if "insight_engine" in self.results: - ie = self.results["insight_engine"] - summary = ie.get("summary", {}) - console.print( - f"\n [bold]Insights:[/bold] {summary.get('total', 0)} total " - f"({summary.get('critical', 0)} critical, " - f"{summary.get('warning', 0)} warning, " - f"{summary.get('info', 0)} info)" - ) - - # ML readiness - if "ml_readiness" in self.results: - ml = self.results["ml_readiness"] - console.print( - f" [bold]ML Readiness:[/bold] {ml.get('grade', '?')} " - f"({ml.get('overall_score', 0)*100:.0f}%)" - ) - - # Quality - if "quality" in self.results: - q = self.results["quality"] - console.print( - f" [bold]Quality Score:[/bold] {q.get('overall_score', 0)*100:.0f}%" - ) - - console.print() - - # ── HTML report ────────────────────────────────────────────── - - def to_html(self, output_dir: str = ".", lang: str = "en") -> Path: - """Generate a self-contained HTML report. - - Parameters - ---------- - output_dir : str - Directory to write the HTML file (created if needed). - lang : str - Language code for the report ('en', 'ko', 'ja', 'zh', 'de', 'fr'). - - Returns - ------- - pathlib.Path - Path to the generated HTML file. - """ - from f2a.report.generator import ReportGenerator - - output_path = Path(output_dir) - output_path.mkdir(parents=True, exist_ok=True) - - dataset_name = Path(self.source).stem - timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") - filename = f"{dataset_name}_{timestamp}_report.html" - filepath = output_path / filename - - generator = ReportGenerator(lang=lang) - generator.save_html( - output_path=filepath, - report=self, - ) - - return filepath - - # ── Convenience accessors ──────────────────────────────────── - - def get(self, section: str) -> Optional[dict]: - """Get results for a named section.""" - return self.results.get(section) - - @property - def sections(self) -> list[str]: - """List of computed analysis sections.""" - return list(self.results.keys()) - - -# ─── analyze() entry-point ─────────────────────────────────────────── - -def analyze( - source: str, - config: Optional[AnalysisConfig] = None, -) -> AnalysisReport: - """Run a full analysis on a data file. - - Parameters - ---------- - source : str - Path to a data file (CSV, TSV, Parquet, JSON, JSONL, Feather). - config : AnalysisConfig, optional - Configuration overrides. Defaults to all analyses enabled. - - Returns - ------- - AnalysisReport - Rich object with ``.show()``, ``.to_html()``, and dict-like access. - - Examples - -------- - >>> import f2a - >>> report = f2a.analyze("data.csv") - >>> report.show() - >>> report.to_html("./output") - """ - if config is None: - config = AnalysisConfig() - - config_json = config.to_json() - - start = time.perf_counter() - raw_json = _core.analyze(source, config_json) - duration = time.perf_counter() - start - - raw = json.loads(raw_json) - - return AnalysisReport( - source=raw.get("source", source), - schema=raw.get("schema", {}), - config=raw.get("config", {}), - results=raw.get("results", {}), - preprocessing=raw.get("preprocessing"), - analysis_started_at=datetime.now().isoformat(), - analysis_duration_sec=duration, - ) diff --git a/python/f2a/report/__init__.py b/python/f2a/report/__init__.py deleted file mode 100644 index 4ee8439..0000000 --- a/python/f2a/report/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -from f2a.report.generator import ReportGenerator -from f2a.report.i18n import t, SUPPORTED_LANGUAGES - -__all__ = ["ReportGenerator", "t", "SUPPORTED_LANGUAGES"] diff --git a/python/f2a/report/generator.py b/python/f2a/report/generator.py deleted file mode 100644 index 8bdaa74..0000000 --- a/python/f2a/report/generator.py +++ /dev/null @@ -1,612 +0,0 @@ -""" -HTML report generator for f2a. - -Produces a self-contained single-file HTML report with embedded charts -(base64 PNG) and interactive navigation. -""" - -from __future__ import annotations - -import base64 -import io -import json -from pathlib import Path -from typing import Any, Optional - -from f2a.report.i18n import t - -try: - import matplotlib - matplotlib.use("Agg") - import matplotlib.pyplot as plt - import seaborn as sns - HAS_VIZ = True -except ImportError: - HAS_VIZ = False - - -class ReportGenerator: - """Generates a self-contained HTML report from an AnalysisReport.""" - - def __init__(self, lang: str = "en"): - self.lang = lang - - @staticmethod - def _get_version() -> str: - from f2a._version import __version__ - return __version__ - - def save_html(self, output_path: Path, report: Any) -> None: - """Write the report as a single HTML file.""" - html = self._build_html(report) - with open(output_path, "w", encoding="utf-8") as f: - f.write(html) - - # ── Main HTML builder ──────────────────────────────────────── - - def _build_html(self, report: Any) -> str: - sections_html = [] - - # Overview - sections_html.append(self._section_overview(report)) - - # Schema - if "columns" in report.schema: - sections_html.append(self._section_schema(report)) - - # Preprocessing - if report.preprocessing: - sections_html.append(self._section_preprocessing(report)) - - # Each analysis section - section_order = [ - "descriptive", "missing", "distribution", "outlier", - "correlation", "categorical", "duplicates", "quality", - "feature_importance", "pca", "statistical_tests", - "clustering", "advanced_anomaly", "advanced_correlation", - "advanced_distribution", "advanced_dimreduction", - "feature_insights", "insight_engine", "column_role", - "cross_analysis", "ml_readiness", - ] - - for section_key in section_order: - if section_key in report.results: - title = t(section_key, self.lang) - data = report.results[section_key] - sections_html.append( - self._generic_section(section_key, title, data) - ) - - nav_items = self._build_nav(report, section_order) - body = "\n".join(sections_html) - - return self._wrap_html( - title=f"{t('report_title', self.lang)} — {Path(report.source).stem}", - nav=nav_items, - body=body, - ) - - # ── Overview section ───────────────────────────────────────── - - def _section_overview(self, report: Any) -> str: - schema = report.schema - n_rows = schema.get("n_rows", "?") - n_cols = schema.get("n_cols", "?") - mem_bytes = schema.get("memory_usage_bytes", 0) - mem_str = self._format_bytes(mem_bytes) - - cards = f""" -
    -
    -
    {t('rows', self.lang)}
    -
    {n_rows:,}
    -
    -
    -
    {t('columns', self.lang)}
    -
    {n_cols}
    -
    -
    -
    {t('memory', self.lang)}
    -
    {mem_str}
    -
    -
    -
    {t('duration', self.lang)}
    -
    {report.analysis_duration_sec:.2f}s
    -
    -
    - """ - - # ML readiness badge - ml_html = "" - if "ml_readiness" in report.results: - ml = report.results["ml_readiness"] - grade = ml.get("grade", "?") - score = ml.get("overall_score", 0) * 100 - color = {"A": "#22c55e", "B": "#84cc16", "C": "#eab308", "D": "#f97316", "F": "#ef4444"}.get(grade, "#888") - ml_html = f""" -
    - {t('ml_readiness', self.lang)}: {grade} ({score:.0f}%) -
    - """ - - return f""" -
    -

    {t('overview', self.lang)}

    - {cards} - {ml_html} -
    - """ - - # ── Schema section ─────────────────────────────────────────── - - def _section_schema(self, report: Any) -> str: - columns = report.schema.get("columns", []) - rows = "" - for col in columns: - missing_pct = col.get("missing_ratio", 0) * 100 - bar_color = "#22c55e" if missing_pct < 5 else "#eab308" if missing_pct < 30 else "#ef4444" - rows += f""" - - {col['name']} - {col.get('dtype', '')} - {col.get('inferred_type', '')} - {col.get('n_unique', '')} - - {col.get('n_missing', 0)} - ({missing_pct:.1f}%) -
    - - - """ - - return f""" -
    -

    {t('schema', self.lang)}

    -
    - - - - - - - - {rows} -
    ColumnDTypeInferredUniqueMissing
    -
    -
    - """ - - # ── Preprocessing section ──────────────────────────────────── - - def _section_preprocessing(self, report: Any) -> str: - pp = report.preprocessing - if not pp: - return "" - - items = [ - f"Rows: {pp.get('rows_before', '?')} → {pp.get('rows_after', '?')}", - f"Cols: {pp.get('cols_before', '?')} → {pp.get('cols_after', '?')}", - f"Duplicates: {pp.get('duplicate_row_count', 0)} ({pp.get('duplicate_row_ratio', 0)*100:.1f}%)", - ] - - const_cols = pp.get("constant_columns", []) - if const_cols: - items.append(f"Constant columns: {', '.join(const_cols)}") - - id_cols = pp.get("id_like_columns", []) - if id_cols: - items.append(f"ID-like columns: {', '.join(id_cols)}") - - li = "".join(f"
  • {item}
  • " for item in items) - - return f""" -
    -

    {t('preprocessing', self.lang)}

    -
      {li}
    -
    - """ - - # ── Generic JSON-to-HTML section ───────────────────────────── - - def _generic_section(self, key: str, title: str, data: Any) -> str: - """Render any analysis section as collapsible JSON + summary tables.""" - content_parts = [] - - # Special renderers - if key == "insight_engine": - content_parts.append(self._render_insights(data)) - elif key == "ml_readiness": - content_parts.append(self._render_ml_readiness(data)) - elif key == "quality": - content_parts.append(self._render_quality(data)) - elif key == "descriptive": - content_parts.append(self._render_descriptive(data)) - elif key == "missing": - content_parts.append(self._render_missing(data)) - else: - # Fallback: render as pretty-printed JSON - content_parts.append(self._render_json(data)) - - content = "\n".join(content_parts) - - return f""" -
    -

    {title}

    - {content} -
    - """ - - # ── Specialized renderers ──────────────────────────────────── - - def _render_insights(self, data: dict) -> str: - summary = data.get("summary", {}) - insights = data.get("insights", []) - - header = f""" -
    - {summary.get('critical', 0)} {t('critical', self.lang)} - {summary.get('warning', 0)} {t('warning', self.lang)} - {summary.get('info', 0)} {t('info', self.lang)} -
    - """ - - rows = "" - for ins in insights: - sev = ins.get("severity", "Info") - sev_class = sev.lower() - rows += f""" - - {sev} - {ins.get('column', '—')} - {ins.get('message', '')} - {ins.get('recommendation', '')} - - """ - - table = f""" -
    - - - {rows} -
    SeverityColumnMessageRecommendation
    -
    - """ - return header + table - - def _render_ml_readiness(self, data: dict) -> str: - grade = data.get("grade", "?") - score = data.get("overall_score", 0) - dims = data.get("dimensions", []) - recs = data.get("recommendations", []) - - # Dimensions table - dim_rows = "" - for d in dims: - bar_width = d.get("score", 0) * 100 - color = "#22c55e" if bar_width >= 80 else "#eab308" if bar_width >= 60 else "#ef4444" - dim_rows += f""" - - {d['name']} - {d.get('score', 0)*100:.0f}% -
    - - {d.get('detail', '')} - - """ - - # Recommendations - rec_html = "" - if recs: - rec_items = "".join(f"
  • {r}
  • " for r in recs) - rec_html = f"

    {t('recommendations', self.lang)}

      {rec_items}
    " - - return f""" -
    {grade} ({score*100:.0f}%)
    -
    - - - {dim_rows} -
    DimensionScoreDetail
    -
    - {rec_html} - """ - - def _render_quality(self, data: dict) -> str: - overall = data.get("overall_score", 0) - dims = data.get("dimensions", []) - - dim_rows = "" - for d in dims: - s = d.get("score", 0) - bar_width = s * 100 - color = "#22c55e" if bar_width >= 80 else "#eab308" if bar_width >= 60 else "#ef4444" - dim_rows += f""" - - {d.get('name', '')} - {s*100:.0f}% -
    - - - """ - - return f""" -
    {t('overall_score', self.lang)}: {overall*100:.0f}%
    -
    - - - {dim_rows} -
    DimensionScore
    -
    - """ - - def _render_descriptive(self, data: dict) -> str: - num = data.get("numeric", []) - cat = data.get("categorical", []) - parts = [] - - if num: - rows = "" - for col_data in num[:30]: - rows += f""" - - {col_data.get('column', '')} - {col_data.get('count', '')} - {self._fmt(col_data.get('mean'))} - {self._fmt(col_data.get('std'))} - {self._fmt(col_data.get('min'))} - {self._fmt(col_data.get('median'))} - {self._fmt(col_data.get('max'))} - {self._fmt(col_data.get('skewness'))} - {self._fmt(col_data.get('kurtosis'))} - - """ - parts.append(f""" -

    Numeric

    -
    - - - - - - - - - {rows} -
    ColumnCountMeanStdMinMedianMaxSkewKurt
    -
    - """) - - if cat: - rows = "" - for col_data in cat[:30]: - rows += f""" - - {col_data.get('column', '')} - {col_data.get('count', '')} - {col_data.get('unique', '')} - {col_data.get('top', '')} - {col_data.get('freq', '')} - - """ - parts.append(f""" -

    Categorical

    -
    - - - - - - - {rows} -
    ColumnCountUniqueTopFreq
    -
    - """) - - return "\n".join(parts) if parts else self._render_json(data) - - def _render_missing(self, data: dict) -> str: - per_col = data.get("per_column", []) - if not per_col: - return "

    No missing values detected.

    " - - rows = "" - for col_data in per_col: - n = col_data.get("n_missing", 0) - ratio = col_data.get("missing_ratio", 0) - if n > 0: - bar_width = ratio * 100 - color = "#22c55e" if bar_width < 5 else "#eab308" if bar_width < 30 else "#ef4444" - rows += f""" - - {col_data.get('column', '')} - {n} - {ratio*100:.1f}% -
    - - - """ - - if not rows: - return "

    No missing values detected.

    " - - return f""" -
    - - - {rows} -
    ColumnMissingRatio
    -
    - """ - - def _render_json(self, data: Any) -> str: - """Fallback: render as collapsible JSON.""" - json_str = json.dumps(data, indent=2, ensure_ascii=False, default=str) - # Truncate very long JSON - if len(json_str) > 50_000: - json_str = json_str[:50_000] + "\n... (truncated)" - return f""" -
    - Raw JSON data -
    {self._escape_html(json_str)}
    -
    - """ - - # ── Navigation ─────────────────────────────────────────────── - - def _build_nav(self, report: Any, section_order: list[str]) -> str: - items = [f'{t("overview", self.lang)}'] - - if "columns" in report.schema: - items.append(f'{t("schema", self.lang)}') - - if report.preprocessing: - items.append(f'{t("preprocessing", self.lang)}') - - for key in section_order: - if key in report.results: - title = t(key, self.lang) - items.append(f'{title}') - - return "\n".join(items) - - # ── Full HTML wrapper ──────────────────────────────────────── - - def _wrap_html(self, title: str, nav: str, body: str) -> str: - return f""" - - - - - {self._escape_html(title)} - - - - -
    -

    {self._escape_html(title)}

    -

    {t('generated_by', self.lang)} v{self._get_version()}

    - {body} -
    - - -""" - - # ── CSS ────────────────────────────────────────────────────── - - def _css(self) -> str: - return """ -:root { - --bg: #0d1117; --surface: #161b22; --border: #30363d; - --text: #e6edf3; --text2: #8b949e; --accent: #58a6ff; - --success: #22c55e; --warn: #eab308; --danger: #ef4444; - --font: 'Segoe UI', -apple-system, BlinkMacSystemFont, sans-serif; - --mono: 'Cascadia Code', 'Fira Code', 'Consolas', monospace; -} -* { margin: 0; padding: 0; box-sizing: border-box; } -body { font-family: var(--font); background: var(--bg); color: var(--text); display: flex; min-height: 100vh; } -.sidebar { - position: fixed; top: 0; left: 0; width: 240px; height: 100vh; - background: var(--surface); border-right: 1px solid var(--border); - padding: 1rem 0; overflow-y: auto; z-index: 100; -} -.sidebar a { - display: block; padding: 8px 16px; color: var(--text2); - text-decoration: none; font-size: 13px; transition: all .15s; -} -.sidebar a:hover { color: var(--accent); background: rgba(88,166,255,.08); } -.content { margin-left: 240px; padding: 2rem 3rem; max-width: 1200px; width: 100%; } -h1 { font-size: 1.8rem; margin-bottom: .25rem; } -h2 { font-size: 1.3rem; margin-top: 2rem; margin-bottom: 1rem; padding-bottom: .5rem; border-bottom: 1px solid var(--border); } -h3 { font-size: 1.1rem; margin-top: 1.5rem; margin-bottom: .75rem; color: var(--text2); } -.subtitle { color: var(--text2); margin-bottom: 2rem; } -section { margin-bottom: 2rem; } -.card-grid { display: grid; grid-template-columns: repeat(auto-fill, minmax(180px, 1fr)); gap: 1rem; margin: 1rem 0; } -.card { - background: var(--surface); border: 1px solid var(--border); border-radius: 8px; - padding: 1rem; text-align: center; -} -.card-label { font-size: .8rem; color: var(--text2); margin-bottom: .25rem; } -.card-value { font-size: 1.4rem; font-weight: 600; } -.table-wrap { overflow-x: auto; margin: .75rem 0; } -table { width: 100%; border-collapse: collapse; font-size: .85rem; } -thead { background: var(--surface); } -th, td { padding: 8px 12px; text-align: left; border-bottom: 1px solid var(--border); } -th { font-weight: 600; color: var(--text2); font-size: .8rem; text-transform: uppercase; letter-spacing: .5px; } -tr:hover { background: rgba(88,166,255,.04); } -.right { text-align: right; } -.pct { color: var(--text2); font-size: .8em; margin-left: 4px; } -.bar { height: 3px; border-radius: 2px; margin-top: 4px; transition: width .3s; } -code { font-family: var(--mono); font-size: .85em; background: rgba(110,118,129,.15); padding: 2px 6px; border-radius: 4px; } -pre.json-pre { background: var(--surface); border: 1px solid var(--border); border-radius: 8px; padding: 1rem; overflow-x: auto; font-size: .8rem; max-height: 400px; overflow-y: auto; } -details { margin: .75rem 0; } -details summary { cursor: pointer; color: var(--accent); font-size: .9rem; } -ul { padding-left: 1.5rem; margin: .5rem 0; } -li { margin: .25rem 0; } -.badge { - display: inline-block; padding: 3px 10px; border-radius: 12px; font-size: .75rem; font-weight: 600; -} -.badge-critical { background: rgba(239,68,68,.15); color: var(--danger); } -.badge-warning { background: rgba(234,179,8,.15); color: var(--warn); } -.badge-info { background: rgba(88,166,255,.15); color: var(--accent); } -.insight-summary { display: flex; gap: 1rem; margin-bottom: 1rem; } -.ml-badge { display: inline-block; padding: 6px 16px; border-radius: 8px; color: #fff; font-weight: 700; margin: .5rem 0; } -.grade-badge { font-size: 1.2rem; font-weight: 700; margin: .5rem 0; } -.grade-a { color: var(--success); } -.grade-b { color: #84cc16; } -.grade-c { color: var(--warn); } -.grade-d { color: #f97316; } -.grade-f { color: var(--danger); } -@media (max-width: 768px) { - .sidebar { width: 100%; height: auto; position: relative; display: flex; flex-wrap: wrap; } - .sidebar a { padding: 6px 12px; } - .content { margin-left: 0; padding: 1rem; } -} -""" - - # ── JS ─────────────────────────────────────────────────────── - - def _js(self) -> str: - return """ -// Smooth scroll and active state -document.querySelectorAll('.sidebar a').forEach(a => { - a.addEventListener('click', e => { - e.preventDefault(); - const target = document.querySelector(a.getAttribute('href')); - if (target) target.scrollIntoView({ behavior: 'smooth', block: 'start' }); - }); -}); -""" - - # ── Utilities ──────────────────────────────────────────────── - - @staticmethod - def _escape_html(text: str) -> str: - return text.replace("&", "&").replace("<", "<").replace(">", ">").replace('"', """) - - @staticmethod - def _format_bytes(n: int) -> str: - for unit in ("B", "KB", "MB", "GB"): - if n < 1024: - return f"{n:.1f} {unit}" if unit != "B" else f"{n} {unit}" - n /= 1024 - return f"{n:.1f} TB" - - @staticmethod - def _fmt(val: Any) -> str: - if val is None: - return "—" - if isinstance(val, float): - if abs(val) >= 1e6: - return f"{val:.2e}" - return f"{val:.4f}" - return str(val) - - @staticmethod - def fig_to_base64(fig) -> str: - """Convert a matplotlib figure to a base64-encoded PNG.""" - buf = io.BytesIO() - fig.savefig(buf, format="png", dpi=100, bbox_inches="tight", facecolor="#0d1117") - buf.seek(0) - b64 = base64.b64encode(buf.read()).decode("utf-8") - plt.close(fig) - return f"data:image/png;base64,{b64}" diff --git a/python/f2a/report/i18n.py b/python/f2a/report/i18n.py deleted file mode 100644 index 83fde5d..0000000 --- a/python/f2a/report/i18n.py +++ /dev/null @@ -1,155 +0,0 @@ -""" -Internationalisation support for f2a reports. -""" - -SUPPORTED_LANGUAGES = [ - {"code": "en", "name": "English"}, - {"code": "ko", "name": "한국어"}, - {"code": "ja", "name": "日本語"}, - {"code": "zh", "name": "中文"}, - {"code": "de", "name": "Deutsch"}, - {"code": "fr", "name": "Français"}, -] - -DEFAULT_LANG = "en" - -TRANSLATIONS: dict[str, dict[str, str]] = { - "en": { - "report_title": "Data Analysis Report", - "overview": "Overview", - "schema": "Schema", - "descriptive": "Descriptive Statistics", - "correlation": "Correlation Analysis", - "distribution": "Distribution Analysis", - "missing": "Missing Values", - "outlier": "Outlier Detection", - "categorical": "Categorical Analysis", - "duplicates": "Duplicate Analysis", - "quality": "Data Quality", - "feature_importance": "Feature Importance", - "pca": "PCA Analysis", - "statistical_tests": "Statistical Tests", - "clustering": "Clustering", - "advanced_anomaly": "Anomaly Detection", - "advanced_correlation": "Advanced Correlation", - "advanced_distribution": "Advanced Distribution", - "advanced_dimreduction": "Dimensionality Reduction", - "feature_insights": "Feature Insights", - "insight_engine": "Insight Engine", - "column_role": "Column Roles", - "cross_analysis": "Cross Analysis", - "ml_readiness": "ML Readiness", - "preprocessing": "Preprocessing", - "rows": "Rows", - "columns": "Columns", - "memory": "Memory", - "duration": "Duration", - "generated_by": "Generated by f2a", - "numeric": "Numeric", - "categorical_type": "Categorical", - "datetime": "DateTime", - "text": "Text", - "boolean": "Boolean", - "overall_score": "Overall Score", - "grade": "Grade", - "recommendations": "Recommendations", - "insights": "Insights", - "critical": "Critical", - "warning": "Warning", - "info": "Info", - }, - "ko": { - "report_title": "데이터 분석 리포트", - "overview": "개요", - "schema": "스키마", - "descriptive": "기술 통계", - "correlation": "상관관계 분석", - "distribution": "분포 분석", - "missing": "결측치 분석", - "outlier": "이상치 탐지", - "categorical": "범주형 분석", - "duplicates": "중복 분석", - "quality": "데이터 품질", - "feature_importance": "피처 중요도", - "pca": "PCA 분석", - "statistical_tests": "통계 검정", - "clustering": "클러스터링", - "advanced_anomaly": "이상 탐지", - "advanced_correlation": "고급 상관관계", - "advanced_distribution": "고급 분포", - "advanced_dimreduction": "차원 축소", - "feature_insights": "피처 인사이트", - "insight_engine": "인사이트 엔진", - "column_role": "컬럼 역할", - "cross_analysis": "교차 분석", - "ml_readiness": "ML 준비도", - "preprocessing": "전처리", - "rows": "행", - "columns": "열", - "memory": "메모리", - "duration": "소요시간", - "generated_by": "f2a 생성", - "numeric": "수치형", - "categorical_type": "범주형", - "datetime": "날짜/시간", - "text": "텍스트", - "boolean": "불리언", - "overall_score": "종합 점수", - "grade": "등급", - "recommendations": "권장 사항", - "insights": "인사이트", - "critical": "심각", - "warning": "경고", - "info": "정보", - }, - "ja": { - "report_title": "データ分析レポート", - "overview": "概要", - "schema": "スキーマ", - "descriptive": "記述統計", - "correlation": "相関分析", - "distribution": "分布分析", - "missing": "欠損値", - "outlier": "外れ値検出", - "categorical": "カテゴリ分析", - "duplicates": "重複分析", - "quality": "データ品質", - "feature_importance": "特徴量重要度", - "pca": "PCA分析", - "statistical_tests": "統計検定", - "clustering": "クラスタリング", - "advanced_anomaly": "異常検出", - "advanced_correlation": "高度な相関", - "advanced_distribution": "高度な分布", - "advanced_dimreduction": "次元削減", - "feature_insights": "特徴量インサイト", - "insight_engine": "インサイトエンジン", - "column_role": "カラム役割", - "cross_analysis": "クロス分析", - "ml_readiness": "ML準備度", - "preprocessing": "前処理", - "rows": "行", - "columns": "列", - "memory": "メモリ", - "duration": "所要時間", - "generated_by": "f2a生成", - "numeric": "数値", - "categorical_type": "カテゴリ", - "datetime": "日時", - "text": "テキスト", - "boolean": "ブーリアン", - "overall_score": "総合スコア", - "grade": "グレード", - "recommendations": "推奨事項", - "insights": "インサイト", - "critical": "重大", - "warning": "警告", - "info": "情報", - }, -} - - -def t(key: str, lang: str = DEFAULT_LANG) -> str: - """Translate a key to the requested language, with English fallback.""" - lang_dict = TRANSLATIONS.get(lang, TRANSLATIONS["en"]) - return lang_dict.get(key, TRANSLATIONS["en"].get(key, key)) diff --git a/python/f2a/viz/__init__.py b/python/f2a/viz/__init__.py deleted file mode 100644 index b211589..0000000 --- a/python/f2a/viz/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -""" -Visualization module for f2a. - -Charts are generated using matplotlib/seaborn and embedded as base64 PNG -in the HTML report. This module re-exports key plotting utilities. -""" - -from f2a.viz.theme import apply_dark_theme, F2A_PALETTE -from f2a.viz.plots import ( - plot_correlation_heatmap, - plot_distribution_grid, - plot_missing_heatmap, - plot_outlier_boxplots, - plot_pca_variance, - plot_quality_radar, -) - -__all__ = [ - "apply_dark_theme", - "F2A_PALETTE", - "plot_correlation_heatmap", - "plot_distribution_grid", - "plot_missing_heatmap", - "plot_outlier_boxplots", - "plot_pca_variance", - "plot_quality_radar", -] diff --git a/python/f2a/viz/plots.py b/python/f2a/viz/plots.py deleted file mode 100644 index b44d5cf..0000000 --- a/python/f2a/viz/plots.py +++ /dev/null @@ -1,228 +0,0 @@ -""" -Core plot functions for f2a reports. - -All functions return a matplotlib Figure object. -""" - -from __future__ import annotations - -from typing import Any, Optional - -import numpy as np - -try: - import matplotlib - matplotlib.use("Agg") - import matplotlib.pyplot as plt - import seaborn as sns - HAS_VIZ = True -except ImportError: - HAS_VIZ = False - -from f2a.viz.theme import apply_dark_theme, F2A_PALETTE - - -def _ensure_viz(): - if not HAS_VIZ: - raise ImportError("matplotlib and seaborn are required for visualization") - apply_dark_theme() - - -# ─── Correlation Heatmap ───────────────────────────────────────────── - -def plot_correlation_heatmap( - matrix: list[list[float]], - labels: list[str], - title: str = "Correlation Matrix", - figsize: tuple[int, int] = (10, 8), -) -> Any: - """Plot a correlation matrix heatmap.""" - _ensure_viz() - - arr = np.array(matrix) - fig, ax = plt.subplots(figsize=figsize) - - mask = np.triu(np.ones_like(arr, dtype=bool), k=1) - sns.heatmap( - arr, - mask=mask, - annot=True if len(labels) <= 15 else False, - fmt=".2f", - cmap="RdBu_r", - center=0, - vmin=-1, - vmax=1, - xticklabels=labels, - yticklabels=labels, - ax=ax, - linewidths=0.5, - square=True, - ) - ax.set_title(title, fontsize=14, pad=12) - fig.tight_layout() - return fig - - -# ─── Distribution Grid ────────────────────────────────────────────── - -def plot_distribution_grid( - columns_data: dict[str, list[float]], - cols_per_row: int = 3, - figsize_per_subplot: tuple[float, float] = (4, 3), -) -> Any: - """Plot histograms for multiple numeric columns in a grid.""" - _ensure_viz() - - n = len(columns_data) - if n == 0: - fig, ax = plt.subplots(figsize=(6, 4)) - ax.text(0.5, 0.5, "No numeric columns", ha="center", va="center") - return fig - - n_rows = (n + cols_per_row - 1) // cols_per_row - w = figsize_per_subplot[0] * cols_per_row - h = figsize_per_subplot[1] * n_rows - fig, axes = plt.subplots(n_rows, cols_per_row, figsize=(w, h)) - axes = np.array(axes).flatten() if n > 1 else [axes] - - for idx, (col_name, values) in enumerate(columns_data.items()): - ax = axes[idx] - vals = [v for v in values if v is not None and not np.isnan(v)] - if vals: - ax.hist(vals, bins=30, color=F2A_PALETTE[idx % len(F2A_PALETTE)], alpha=0.75, edgecolor="none") - ax.set_title(col_name, fontsize=10) - ax.tick_params(labelsize=8) - - # Hide empty subplots - for idx in range(n, len(axes)): - axes[idx].set_visible(False) - - fig.suptitle("Distribution Overview", fontsize=14, y=1.02) - fig.tight_layout() - return fig - - -# ─── Missing Values Heatmap ───────────────────────────────────────── - -def plot_missing_heatmap( - missing_matrix: list[list[bool]], - column_names: list[str], - figsize: tuple[int, int] = (12, 6), -) -> Any: - """Plot a binary heatmap of missing values.""" - _ensure_viz() - - arr = np.array(missing_matrix, dtype=float) - fig, ax = plt.subplots(figsize=figsize) - - ax.imshow(arr.T, aspect="auto", cmap="YlOrRd", interpolation="nearest") - ax.set_yticks(range(len(column_names))) - ax.set_yticklabels(column_names, fontsize=8) - ax.set_xlabel("Row index") - ax.set_title("Missing Values Pattern", fontsize=14, pad=12) - fig.tight_layout() - return fig - - -# ─── Outlier Boxplots ─────────────────────────────────────────────── - -def plot_outlier_boxplots( - columns_data: dict[str, list[float]], - cols_per_row: int = 4, - figsize_per_subplot: tuple[float, float] = (3, 4), -) -> Any: - """Box plots for outlier visualization.""" - _ensure_viz() - - n = len(columns_data) - if n == 0: - fig, ax = plt.subplots(figsize=(6, 4)) - ax.text(0.5, 0.5, "No numeric columns", ha="center", va="center") - return fig - - n_rows = (n + cols_per_row - 1) // cols_per_row - w = figsize_per_subplot[0] * cols_per_row - h = figsize_per_subplot[1] * n_rows - fig, axes = plt.subplots(n_rows, cols_per_row, figsize=(w, h)) - axes = np.array(axes).flatten() if n > 1 else [axes] - - for idx, (col_name, values) in enumerate(columns_data.items()): - ax = axes[idx] - vals = [v for v in values if v is not None and not np.isnan(v)] - if vals: - bp = ax.boxplot(vals, vert=True, patch_artist=True) - for patch in bp["boxes"]: - patch.set_facecolor(F2A_PALETTE[idx % len(F2A_PALETTE)]) - patch.set_alpha(0.6) - ax.set_title(col_name, fontsize=10) - ax.tick_params(labelsize=8) - - for idx in range(n, len(axes)): - axes[idx].set_visible(False) - - fig.suptitle("Outlier Detection (Boxplots)", fontsize=14, y=1.02) - fig.tight_layout() - return fig - - -# ─── PCA Variance Plot ────────────────────────────────────────────── - -def plot_pca_variance( - explained_variance: list[float], - cumulative_variance: list[float], - figsize: tuple[int, int] = (8, 5), -) -> Any: - """Scree plot + cumulative variance explained.""" - _ensure_viz() - - n = len(explained_variance) - x = list(range(1, n + 1)) - - fig, ax1 = plt.subplots(figsize=figsize) - - ax1.bar(x, [v * 100 for v in explained_variance], color=F2A_PALETTE[0], alpha=0.7, label="Individual") - ax1.set_xlabel("Principal Component") - ax1.set_ylabel("Variance Explained (%)") - - ax2 = ax1.twinx() - ax2.plot(x, [v * 100 for v in cumulative_variance], "o-", color=F2A_PALETTE[1], label="Cumulative") - ax2.axhline(y=90, color=F2A_PALETTE[3], linestyle="--", alpha=0.5, label="90% Threshold") - ax2.set_ylabel("Cumulative (%)") - - lines1, labels1 = ax1.get_legend_handles_labels() - lines2, labels2 = ax2.get_legend_handles_labels() - ax1.legend(lines1 + lines2, labels1 + labels2, loc="center right") - - ax1.set_title("PCA — Variance Explained", fontsize=14, pad=12) - fig.tight_layout() - return fig - - -# ─── Quality Radar Chart ──────────────────────────────────────────── - -def plot_quality_radar( - dimensions: list[dict], - figsize: tuple[int, int] = (7, 7), -) -> Any: - """Radar chart for quality dimensions.""" - _ensure_viz() - - labels = [d["name"] for d in dimensions] - values = [d["score"] for d in dimensions] - - n = len(labels) - angles = np.linspace(0, 2 * np.pi, n, endpoint=False).tolist() - values_plot = values + [values[0]] - angles += [angles[0]] - - fig, ax = plt.subplots(figsize=figsize, subplot_kw=dict(polar=True)) - ax.fill(angles, values_plot, color=F2A_PALETTE[0], alpha=0.2) - ax.plot(angles, values_plot, "o-", color=F2A_PALETTE[0], linewidth=2) - - ax.set_xticks(angles[:-1]) - ax.set_xticklabels(labels, fontsize=10) - ax.set_ylim(0, 1) - ax.set_title("Data Quality Dimensions", fontsize=14, pad=20) - - fig.tight_layout() - return fig diff --git a/python/f2a/viz/theme.py b/python/f2a/viz/theme.py deleted file mode 100644 index 35efab3..0000000 --- a/python/f2a/viz/theme.py +++ /dev/null @@ -1,38 +0,0 @@ -"""Dark theme configuration for matplotlib/seaborn charts.""" - -import matplotlib.pyplot as plt -import matplotlib as mpl - -F2A_PALETTE = [ - "#58a6ff", "#3fb950", "#f97316", "#d2a8ff", - "#79c0ff", "#56d364", "#e3b341", "#ff7b72", - "#a5d6ff", "#7ee787", "#d29922", "#ffa198", -] - -BG_COLOR = "#0d1117" -SURFACE_COLOR = "#161b22" -TEXT_COLOR = "#e6edf3" -GRID_COLOR = "#30363d" - - -def apply_dark_theme() -> None: - """Apply the f2a dark theme to matplotlib globally.""" - plt.rcParams.update({ - "figure.facecolor": BG_COLOR, - "axes.facecolor": SURFACE_COLOR, - "axes.edgecolor": GRID_COLOR, - "axes.labelcolor": TEXT_COLOR, - "axes.grid": True, - "grid.color": GRID_COLOR, - "grid.alpha": 0.3, - "text.color": TEXT_COLOR, - "xtick.color": TEXT_COLOR, - "ytick.color": TEXT_COLOR, - "legend.facecolor": SURFACE_COLOR, - "legend.edgecolor": GRID_COLOR, - "font.family": "sans-serif", - "font.size": 10, - "figure.dpi": 100, - "savefig.dpi": 100, - "savefig.facecolor": BG_COLOR, - }) diff --git a/sample/lerobot_roboturk_20260317_090024_report.html b/sample/lerobot_roboturk_20260317_090024_report.html new file mode 100644 index 0000000..91dfd26 --- /dev/null +++ b/sample/lerobot_roboturk_20260317_090024_report.html @@ -0,0 +1,2335 @@ + + + + + +f2a Report - lerobot/roboturk + + + +
    +
    +

    f2a Analysis Report

    +

    lerobot/roboturk

    +
    Analysis Time: 2026-03-16T23:59:57+00:00 — Duration: 26.8s
    +
    +
    +
    + Total: 187,507 rows across + 1 subsets / splits +
    +
    +

    default / train

    Overview

    187,507
    Rows
    +
    9
    Columns
    +
    4
    Numeric
    +
    1
    Categorical
    +
    2
    Text
    +
    0
    Datetime
    +
    50
    Memory Mb

    Descriptive Statistics

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    columntypecountmissingmissing_%uniquemeanmedianstdsecvmadminmaxrangep5q1q3p95iqrskewnesskurtosistopfreq
    observation.statetext18750700.0000187507nannannannannannannannannannannannannannannannannannan
    actiontext18750700.0000187507nannannannannannannannannannannannannannannannannannan
    timestampnumeric18750700.00003526.31215.30004.86590.01120.77093.30000.000035.100035.10000.40002.40009.300015.00006.90001.05301.4028nannan
    episode_indexnumeric18750700.000019951004.9688998.0000573.21261.32380.5704500.00000.00001994.00001994.0000108.0000501.00001503.00001890.00001002.0000-0.0093-1.2072nannan
    frame_indexnumeric18750700.000035263.120953.000048.65860.11240.770933.00000.0000351.0000351.00004.000024.000093.0000150.000069.00001.05301.4028nannan
    next.rewardboolean18750700.00001nannannannannannannannannannannannannannannannan0.0187507.0000
    next.doneboolean18750700.00002nannannannannannannannannannannannannannannannanFalse185512.0000
    indexnumeric18750700.000018750793753.000093753.000054128.7528125.00270.577446877.00000.0000187506.0000187506.00009375.300046876.5000140629.5000178130.700093753.0000-0.0000-1.2000nannan
    task_indexcategorical18750700.00003nannannannannannannannannannannannannannannannan267436.0000

    Distribution Histograms

    Distribution Histograms
    +

    Boxplots

    Boxplots

    Distribution Analysis

    Normality Tests & Shape

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    columnnskewnessskew_typekurtosiskurt_typenormality_testnormality_pis_normal_0.05shapiro_pdagostino_pks_panderson_statanderson_5pct_cv
    timestamp1875071.0530high skew1.4028leptokurticdagostino0.0000FalseNaN0.00000.00003223.82220.7520
    episode_index187507-0.0093symmetric-1.2072platykurticdagostino0.0000FalseNaN0.00000.00002148.62680.7520
    frame_index1875071.0530high skew1.4028leptokurticdagostino0.0000FalseNaN0.00000.00003223.82050.7520
    index187507-0.0000symmetric-1.2000platykurticdagostino0.0000FalseNaN0.00000.00002084.92740.7520

    Violin Plots

    Violin Plots
    +

    Q-Q Plots

    Q-Q Plots

    Correlation Analysis

    Correlation Heatmap (Pearson)

    Correlation Heatmap (Pearson)
    +

    Correlation Heatmap (Spearman)

    Correlation Heatmap (Spearman)

    Variance Inflation Factor (VIF)

    + + + + + + + + + + + + + + + + + + + + + + + + + + +
    columnVIFmulticollinearity
    episode_index266.2900severe
    index-0.0000low
    frame_index-57054521769846057730048.0000low
    timestamp-57054521769871039004672.0000low

    Missing Data

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    columnmissing_countmissing_ratiomissing_%dtype
    observation.state00.00000.0000object
    action00.00000.0000object
    timestamp00.00000.0000float32
    episode_index00.00000.0000int64
    frame_index00.00000.0000int64
    next.reward00.00000.0000float32
    next.done00.00000.0000bool
    index00.00000.0000int64
    task_index00.00000.0000int64

    Missing Data

    Missing Data
    +

    Missing Data Matrix

    Missing Data Matrix

    Outlier Detection

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    columnq1q3iqrlower_boundupper_boundoutlier_countoutlier_%min_outliermax_outlier
    timestamp2.40009.30006.9000-7.950019.65002712.00001.450019.700035.1000
    episode_index501.00001503.00001002.0000-1002.00003006.00000.00000.0000nannan
    frame_index24.000093.000069.0000-79.5000196.50002712.00001.4500197.0000351.0000
    index46876.5000140629.500093753.0000-93753.0000281259.00000.00000.0000nannan

    Outlier Detection

    Outlier Detection

    Categorical Analysis

    Summary

    + + + + + + + + + + + + + + + + + + + + + +
    columncountuniquetop_valuetop_frequencytop_%entropynorm_entropy
    task_index187507326743635.96001.58060.9973

    Feature Importance

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    columnvariancestdcvrange
    index2929921879.666754128.75280.5774187506.0000
    episode_index328572.6540573.21260.57041994.0000
    frame_index2367.660448.65860.7709351.0000
    timestamp23.67664.86590.770935.1000

    Feature Importance

    Feature Importance

    PCA Analysis

    4
    N Components
    +
    100.0%
    Total Variance Explained
    +
    2
    Components For 90Pct
    +
    1
    Top Component Variance

    Variance Explained

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    componentvariance_ratiocumulative_ratioeigenvalue
    PC10.50580.50582.0232
    PC20.49421.00001.9768
    PC30.00001.00000.0001
    PC40.00001.00000.0000

    Loadings

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    PC1PC2PC3PC4
    timestamp0.5003-0.4997-0.00010.7071
    episode_index0.49960.5004-0.70710.0000
    frame_index0.5003-0.4997-0.0001-0.7071
    index0.49980.50020.7071-0.0000

    PCA Scree Plot

    PCA Scree Plot
    +

    PCA Loadings

    PCA Loadings

    Warnings

    • High correlation: timestamp <-> frame_index (r=1.0)
    • High correlation: episode_index <-> index (r=0.9999)

    Auto-Generated InsightsADV

    Executive Summary

    Dataset contains 187,507 rows and 9 columns (4 numeric, 1 categorical). 4 high-priority finding(s) detected. 5 moderate observations noted. Key highlights: 1. 2 column pair(s) with |r| > 0.9 2. 2 likely confounded correlation(s) detected 3. 4/4 numeric columns are non-normal

    Total Insights0
    Critical0
    High4
    Medium5
    Low1

    Insight Details

    2 column pair(s) with |r| > 0.9HIGH · 0.8
    correlation

    Near-perfect linear relationships detected. Top pair: 'timestamp' ↔ 'frame_index' (r=1.000).

    • Consider dropping one column from each pair to reduce redundancy
    • Verify these are not data leakage or duplicate columns
    2 likely confounded correlation(s) detectedHIGH · 0.7
    correlation

    Raw correlation differs significantly from partial correlation, suggesting confounding variables. Top: 'episode_index' ↔ 'index' (raw r=1.00, partial r=8183723376125764.00).

    • Do not assume causal relationship from raw correlation for these pairs
    • Investigate which variables are confounders
    4/4 numeric columns are non-normalMEDIUM · 0.7
    distribution

    Most numeric columns fail normality tests (α=0.05). Non-parametric methods may be more appropriate.

    • Prefer non-parametric tests (Kruskal-Wallis, Mann-Whitney) over t-tests/ANOVA
    • Consider power transforms if normality is needed for downstream models
    4 column(s) best fit by non-normal distributionsMEDIUM · 0.7
    distribution

    Distribution fitting reveals non-Normal best fits. Most common: beta (2 columns). Others: {'beta': 2, 'lognorm': 1, 'uniform': 1}.

    • Use the identified distributions for parametric modeling or simulation
    • Transform columns toward normality if Gaussian assumptions are needed
    Clear cluster structure found (k=3, silhouette=0.40)HIGH · 0.7
    cluster

    K-Means identifies 3 well-separated clusters (silhouette=0.40). Cluster sizes: {'cluster_0': 1895, 'cluster_1': 1882, 'cluster_2': 1223}.

    • Profile each cluster to understand segment characteristics
    • Use cluster labels as a feature for downstream modelling
    1 column(s) with severe multicollinearity (VIF>10)HIGH · 0.6
    correlation

    VIF > 10 detected for: ['episode_index']. Worst: 'episode_index' (VIF=266.3). Redundant information may cause model instability.

    • Remove one column from each highly correlated pair
    • Apply PCA or regularization (Ridge/Lasso) to handle collinearity
    4 strong feature interaction(s) detectedMEDIUM · 0.6
    feature

    Top interaction: 'timestamp' × 'episode_index' (strength=0.73). Product features may improve model performance.

    • Create interaction (product) features for the top pairs
    2 column(s) benefit from power transformationMEDIUM · 0.6
    distribution

    Box-Cox / Yeo-Johnson transforms can significantly reduce skewness for columns: ['timestamp', 'frame_index'].

    • Apply the recommended transform (Box-Cox or Yeo-Johnson) in preprocessing
    Multi-method anomalies: 120 rows (2.4%)MEDIUM · 0.5
    anomaly

    A small fraction of rows are flagged by multiple anomaly detection methods.

    • Review flagged rows for data entry errors or special cases
    No missing values detected in any columnLOW · 0.3
    missing

    All columns are fully populated — no imputation needed.

    Insight Severity Distribution

    Insight Severity Distribution
    +

    Top Insights

    Top Insights

    Advanced Distribution AnalysisADV

    Best-Fit Distribution

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    columnbest_distributionaicbicks_statisticks_p_valuefit_quality
    timestampbeta28275.560028301.63000.03630.0000poor
    episode_indexbeta75977.650076003.72000.01500.2102good
    frame_indexlognorm20261.000020280.55000.50980.0000poor
    indexuniform121419.0200121432.06000.01360.3124good

    Jarque-Bera Normality Test

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    columnjb_statisticp_valueis_normal_0.05skewnesskurtosis
    timestamp50024.39840.0000False1.05301.4028
    episode_index11389.02540.0000False-0.0093-1.2072
    frame_index50024.36840.0000False1.05301.4028
    index11250.42000.0000False-0.0000-1.2000

    Power Transform Recommendation

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    columnoriginal_skewnessrecommended_methodoptimal_lambdatransformed_skewnessneeds_transformimprovement
    timestamp1.0530yeo-johnson0.2569-0.0496True1.0034
    episode_index-0.0093yeo-johnson0.7184-0.2834False-0.2741
    frame_index1.0530yeo-johnson0.3990-0.0965True0.9565
    index-0.0000yeo-johnson0.7071-0.2916False-0.2916

    KDE Bandwidth Analysis

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    columnnstdiqrsilverman_bandwidthscotts_bandwidth
    timestamp187507.00004.86596.90000.38620.2967
    episode_index187507.0000573.21261002.000045.494134.9517
    frame_index187507.000048.658669.00003.86192.9670
    index187507.000054128.752893753.00004296.02733300.5092

    Best-Fit Distribution Overlay

    Best-Fit Distribution Overlay
    +

    ECDF Plot

    ECDF Plot
    +

    Power Transform Comparison

    Power Transform Comparison
    +

    Jarque-Bera Normality Test

    Jarque-Bera Normality Test

    Advanced Correlation AnalysisADV

    Partial Correlation Matrix

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    timestampepisode_indexframe_indexindex
    timestamp1.00006.0442-1.0000-99974086307298640.0000
    episode_index6.04421.0000-6.04428183723376125764.0000
    frame_index-1.0000-6.04421.000099974086307314160.0000
    index-99974086307298656.00008183723376125764.000099974086307314160.00001.0000

    Mutual Information Matrix

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    timestampepisode_indexframe_indexindex
    timestamp0.00000.03625.06930.0353
    episode_index0.03620.00000.00006.3049
    frame_index5.06930.00000.00000.0000
    index0.03536.30490.00000.0000

    Bootstrap Correlation 95% CI

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    col_acol_bpearson_rci_lowerci_upperci_widthsignificant
    0timestampepisode_index0.0221-0.00390.04850.0524False
    1timestampframe_index1.00001.00001.00000.0000True
    2timestampindex0.0225-0.00480.05080.0556False
    3episode_indexframe_index0.0221-0.00450.04960.0541False
    4episode_indexindex0.99990.99991.00000.0000True
    5frame_indexindex0.0225-0.00540.05000.0554False

    Distance Correlation Matrix

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    timestampepisode_indexframe_indexindex
    timestamp1.00000.03601.00000.0361
    episode_index0.03601.00000.03600.9999
    frame_index1.00000.03601.00000.0361
    index0.03610.99990.03611.0000

    Partial Correlation Heatmap

    Partial Correlation Heatmap
    +

    Mutual Information Heatmap

    Mutual Information Heatmap
    +

    Bootstrap Correlation CI

    Bootstrap Correlation CI
    +

    Correlation Network

    Correlation Network
    +

    Distance Correlation Heatmap

    Distance Correlation Heatmap

    Clustering AnalysisADV

    K-Means Summary

    3
    Optimal K
    +
    0
    Best Silhouette
    +
    1,895
    Largest Cluster

    DBSCAN Summary

    1
    N Clusters Dbscan
    +
    0.0%
    Noise Ratio
    +
    1
    Eps

    Hierarchical Clustering

    3
    Optimal K
    +
    0
    Best Silhouette

    Cluster Profiles

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    timestampepisode_indexframe_indexindex
    cluster_04.14731508.901341.4728141229.8786
    cluster_14.3556481.747643.556344448.0032
    cluster_213.21531064.5127132.152999369.0989

    Elbow & Silhouette

    Elbow & Silhouette
    +

    Cluster Scatter

    Cluster Scatter
    +

    Dendrogram

    Dendrogram
    +

    Cluster Profiles

    Cluster Profiles

    Dimensionality ReductionADV

    t-SNE Embedding

    1
    Kl Divergence
    +
    5,000
    N Points

    Factor Analysis

    2
    N Factors

    Factor Loadings

    + + + + + + + + + + + + + + + + + + + + + + + + + + +
    factor_1factor_2
    timestamp1.0000-0.0000
    episode_index0.0221-0.9997
    frame_index1.00000.0000
    index0.0225-0.9997

    PCA-Weighted Feature Contribution

    + + + + + + + + + + + + + + + + + + + + + + + + + + +
    columncontribution_scorerank
    timestamp0.50004.0000
    episode_index0.50002.0000
    frame_index0.50003.0000
    index0.50001.0000

    PCA Biplot

    PCA Biplot
    +

    Explained Variance Curve

    Explained Variance Curve
    +

    Factor Loadings Heatmap

    Factor Loadings Heatmap

    Feature Engineering InsightsADV

    Interaction Detection

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    col_acol_binteraction_strengthcorr_product_acorr_product_bcorr_a_brecommendation
    0timestampepisode_index0.72590.74800.54590.0221Strong interaction
    1episode_indexframe_index0.72590.54590.74800.0221Strong interaction
    2timestampindex0.72170.74420.54950.0225Strong interaction
    3frame_indexindex0.72170.74420.54950.0225Strong interaction

    Binning Analysis

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    columnn_binsequal_width_entropyequal_freq_entropymax_entropyrecommended_methodskewness
    timestamp102.23003.32113.3219equal_frequency1.0530
    episode_index103.32083.32193.3219equal_width-0.0093
    frame_index102.23003.32113.3219equal_frequency1.0530
    index103.32193.32193.3219equal_width-0.0000

    Advanced Anomaly DetectionADV

    Isolation Forest

    250
    Anomaly Count
    +
    5.0%
    Anomaly Ratio

    Local Outlier Factor

    250
    Anomaly Count
    +
    5.0%
    Anomaly Ratio

    Consensus (>=2/3 agree)

    120
    Consensus Count
    +
    2.4%
    Consensus Ratio

    Anomaly Scatter

    Anomaly Scatter
    +

    Consensus Anomaly Comparison

    Consensus Anomaly Comparison

    Statistical TestsADV

    Levene's Test (Equality of Variances)

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    col_acol_blevene_statp_valuelog_var_ratioadjusted_psignificant_0.05stars
    0timestampepisode_index564897.51970.00009.53800.0000True***
    1timestampframe_index222079.03320.00004.60520.0000True***
    2timestampindex562425.89150.000018.63380.0000True***
    3episode_indexframe_index482754.03260.00004.93290.0000True***
    4episode_indexindex550576.43330.00009.09570.0000True***
    5frame_indexindex561596.56660.000014.02860.0000True***

    Kruskal-Wallis Test

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    grouping_colnumeric_coln_groupsh_statisticp_valueeta_squaredeffect_magnitudeadjusted_preject_h0_0.05starsinterpretation
    0task_indextimestamp310663.74320.00000.0569small0.0000True***Significant (η²=0.0569, small)
    1task_indexepisode_index3625.02410.00000.0033small0.0000True***Significant (η²=0.0033, small)
    2task_indexframe_index310663.68520.00000.0569small0.0000True***Significant (η²=0.0569, small)
    3task_indexindex3625.02380.00000.0033small0.0000True***Significant (η²=0.0033, small)

    Mann-Whitney U Test

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    col_acol_bu_statisticp_valuerank_biserial_reffect_magnitudeadjusted_psignificant_0.05stars
    0timestampepisode_index95185168.00000.00000.9946large0.0000True***
    1timestampframe_index2516704372.00000.00000.8568large0.0000True***
    2timestampindex1278510.00000.00000.9999large0.0000True***
    3episode_indexframe_index34126631417.00000.0000-0.9413large0.0000True***
    4episode_indexindex188532431.50000.00000.9893large0.0000True***
    5frame_indexindex11929357.50000.00000.9993large0.0000True***

    Chi-Square Goodness of Fit

    + + + + + + + + + + + + + + + + + + + + + +
    columnn_categorieschi2_statp_valuecramers_veffect_magnitudeuniform_0.05interpretation
    task_index31112.78920.00000.0545smallFalseNon-uniform distribution

    Grubbs Outlier Test

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    columnsuspect_valuegrubbs_statisticcritical_valueis_outliern
    timestamp35.10005.91635.1454True187507
    episode_index0.00001.75325.1454False187507
    frame_index351.00005.91635.1454True187507
    index0.00001.73205.1454False187507

    Data Profiling SummaryADV

    Column Roles

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    columnprimary_roleconfidencesecondary_roleproperties
    observation.stateid0.8500NaN{'unique_ratio': 1.0}
    actionid0.8500NaN{'unique_ratio': 1.0}
    timestamptimestamp0.7000NaN{'dtype': 'float32', 'hint': 'monotonic numeric with time-like name'}
    episode_indexnumeric_feature0.8500NaN{'dtype': 'int64'}
    frame_indexnumeric_feature0.8500NaN{'dtype': 'int64'}
    next.rewardconstant1.0000NaN{'n_unique': 1}
    next.donebinary0.9000NaN{'n_unique': 2, 'values': [False, True]}
    indexid0.9000NaN{'unique_ratio': 1.0}
    task_indexcategorical_feature0.8500NaN{'n_unique': 3, 'unique_ratio': 0.0}

    ML Readiness

    Overall Score97/100 (A+)
    completeness100.0
    consistency97.8
    balance100.0
    informativeness100.0
    independence80.0
    scale100.0

    Blocking Issues

    • 1 constant column(s) — remove before modelling
    • Extreme multicollinearity: VIF=266 for 'episode_index' — remove or combine

    Suggestions

    • Remove 3 ID-like column(s) before modelling: observation.state, action, index
    +
    +
    Generated by f2a (File to Analysis)
    + + + + + + + + + + + \ No newline at end of file diff --git a/src/core/analyzer.rs b/src/core/analyzer.rs deleted file mode 100644 index e15cb51..0000000 --- a/src/core/analyzer.rs +++ /dev/null @@ -1,274 +0,0 @@ -//! Top-level analysis orchestrator. -//! -//! `Analyzer::run` executes the entire pipeline: -//! load → schema → preprocess → basic stats → advanced stats → collect results -//! -//! Results are returned as a `serde_json::Value` tree so that the PyO3 -//! boundary can convert it straight into a Python dict. - -use std::collections::HashMap; - -use polars::prelude::*; -use serde::{Deserialize, Serialize}; - -use crate::core::config::AnalysisConfig; -use crate::core::loader::DataLoader; -use crate::core::preprocessor::{PreprocessingResult, Preprocessor}; -use crate::core::schema::DataSchema; -use crate::utils::errors::F2aResult; - -// Individual stats modules -use crate::stats::advanced_anomaly::AdvancedAnomalyStats; -use crate::stats::advanced_correlation::AdvancedCorrelationStats; -use crate::stats::advanced_dimreduction::AdvancedDimReductionStats; -use crate::stats::advanced_distribution::AdvancedDistributionStats; -use crate::stats::categorical::CategoricalStats; -use crate::stats::clustering::ClusteringStats; -use crate::stats::column_role::ColumnRoleClassifier; -use crate::stats::correlation::CorrelationStats; -use crate::stats::cross_analysis::CrossAnalysisStats; -use crate::stats::descriptive::DescriptiveStats; -use crate::stats::distribution::DistributionStats; -use crate::stats::duplicates::DuplicateStats; -use crate::stats::feature_importance::FeatureImportanceStats; -use crate::stats::feature_insights::FeatureInsightsStats; -use crate::stats::insight_engine::InsightEngine; -use crate::stats::missing::MissingStats; -use crate::stats::ml_readiness::MlReadinessStats; -use crate::stats::outlier::OutlierStats; -use crate::stats::pca::PcaStats; -use crate::stats::quality::QualityStats; -use crate::stats::statistical_tests::StatisticalTests; - -// ─── AnalysisReport ───────────────────────────────────────────────── - -/// Full analysis report – serialisable to JSON for the Python layer. -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct AnalysisReport { - pub source: String, - pub schema: DataSchema, - #[serde(skip_serializing_if = "Option::is_none")] - pub preprocessing: Option, - pub config: AnalysisConfig, - pub results: HashMap, -} - -// ─── Analyzer ─────────────────────────────────────────────────────── - -pub struct Analyzer; - -impl Analyzer { - /// Run the full analysis pipeline on a file path. - pub fn run_from_file(source: &str, config: &AnalysisConfig) -> F2aResult { - let df = DataLoader::load(source)?; - Self::run(source, df, config) - } - - /// Run the full analysis pipeline on an already-loaded DataFrame. - pub fn run(source: &str, df: DataFrame, config: &AnalysisConfig) -> F2aResult { - // ── 1. Schema ─────────────────────────────────────────── - let schema = DataSchema::from_dataframe(&df); - - // ── 2. Preprocessing ──────────────────────────────────── - let (work_df, preprocess_result) = if config.preprocessing { - let (cleaned, result) = Preprocessor::process(&df, &schema, 0.95); - (cleaned, Some(result)) - } else { - (df.clone(), None) - }; - - // Re-derive schema on the cleaned frame - let work_schema = if config.preprocessing { - DataSchema::from_dataframe(&work_df) - } else { - schema.clone() - }; - - // ── 3. Compute all enabled analyses ───────────────────── - let mut results: HashMap = HashMap::new(); - - // ── Basic analyses ────────────────────────────────────── - if config.descriptive { - let s = DescriptiveStats::new(&work_df, &work_schema); - if let Ok(val) = serde_json::to_value(s.compute()) { - results.insert("descriptive".into(), val); - } - } - - if config.correlation { - let s = CorrelationStats::new(&work_df, &work_schema, config.correlation_threshold); - if let Ok(val) = serde_json::to_value(s.compute()) { - results.insert("correlation".into(), val); - } - } - - if config.distribution { - let s = DistributionStats::new(&work_df, &work_schema); - if let Ok(val) = serde_json::to_value(s.compute()) { - results.insert("distribution".into(), val); - } - } - - if config.missing { - let s = MissingStats::new(&work_df, &work_schema); - if let Ok(val) = serde_json::to_value(s.compute()) { - results.insert("missing".into(), val); - } - } - - if config.outlier { - let s = OutlierStats::new( - &work_df, - &work_schema, - config.outlier_method, - config.outlier_threshold, - ); - if let Ok(val) = serde_json::to_value(s.compute()) { - results.insert("outlier".into(), val); - } - } - - if config.categorical { - let s = CategoricalStats::new(&work_df, &work_schema, config.max_categories); - if let Ok(val) = serde_json::to_value(s.compute()) { - results.insert("categorical".into(), val); - } - } - - if config.duplicates { - let s = DuplicateStats::new(&work_df, &work_schema); - if let Ok(val) = serde_json::to_value(s.compute()) { - results.insert("duplicates".into(), val); - } - } - - if config.quality { - let s = QualityStats::new(&work_df, &work_schema); - if let Ok(val) = serde_json::to_value(s.compute()) { - results.insert("quality".into(), val); - } - } - - if config.feature_importance { - let s = FeatureImportanceStats::new(&work_df, &work_schema); - if let Ok(val) = serde_json::to_value(s.compute()) { - results.insert("feature_importance".into(), val); - } - } - - if config.pca { - let s = PcaStats::new(&work_df, &work_schema, config.pca_max_components); - if let Some(pca_result) = s.compute() { - if let Ok(val) = serde_json::to_value(pca_result) { - results.insert("pca".into(), val); - } - } - } - - if config.statistical_tests { - let s = StatisticalTests::new(&work_df, &work_schema); - if let Ok(val) = serde_json::to_value(s.compute()) { - results.insert("statistical_tests".into(), val); - } - } - - // ── Advanced analyses (gated by `config.advanced`) ────── - if config.advanced { - let max_sample = config.max_sample_for_advanced; - - if config.clustering { - let s = - ClusteringStats::new(&work_df, &work_schema, config.max_cluster_k, max_sample); - if let Ok(val) = serde_json::to_value(s.compute()) { - results.insert("clustering".into(), val); - } - } - - if config.advanced_anomaly { - let s = AdvancedAnomalyStats::new(&work_df, &work_schema, max_sample, 0.05); - if let Ok(val) = serde_json::to_value(s.compute()) { - results.insert("advanced_anomaly".into(), val); - } - } - - if config.advanced_correlation { - let s = AdvancedCorrelationStats::new( - &work_df, - &work_schema, - config.bootstrap_iterations, - max_sample, - ); - if let Ok(val) = serde_json::to_value(s.compute()) { - results.insert("advanced_correlation".into(), val); - } - } - - if config.advanced_distribution { - let s = AdvancedDistributionStats::new( - &work_df, - &work_schema, - config.n_distribution_fits, - max_sample, - ); - if let Ok(val) = serde_json::to_value(s.compute()) { - results.insert("advanced_distribution".into(), val); - } - } - - if config.advanced_dimreduction { - let s = AdvancedDimReductionStats::new( - &work_df, - &work_schema, - config.tsne_perplexity, - max_sample, - ); - if let Ok(val) = serde_json::to_value(s.compute()) { - results.insert("advanced_dimreduction".into(), val); - } - } - - if config.feature_insights { - let s = FeatureInsightsStats::new(&work_df, &work_schema, max_sample); - if let Ok(val) = serde_json::to_value(s.compute()) { - results.insert("feature_insights".into(), val); - } - } - - if config.insight_engine { - let s = InsightEngine::new(&work_df, &work_schema); - if let Ok(val) = serde_json::to_value(s.compute()) { - results.insert("insight_engine".into(), val); - } - } - - if config.column_role { - let s = ColumnRoleClassifier::new(&work_df, &work_schema, None); - if let Ok(val) = serde_json::to_value(s.compute()) { - results.insert("column_role".into(), val); - } - } - - if config.cross_analysis { - let s = CrossAnalysisStats::new(&work_df, &work_schema, 30); - if let Ok(val) = serde_json::to_value(s.compute()) { - results.insert("cross_analysis".into(), val); - } - } - - if config.ml_readiness { - let s = MlReadinessStats::new(&work_df, &work_schema); - if let Ok(val) = serde_json::to_value(s.compute()) { - results.insert("ml_readiness".into(), val); - } - } - } - - Ok(AnalysisReport { - source: source.to_string(), - schema, - preprocessing: preprocess_result, - config: config.clone(), - results, - }) - } -} diff --git a/src/core/config.rs b/src/core/config.rs deleted file mode 100644 index 2dd61f0..0000000 --- a/src/core/config.rs +++ /dev/null @@ -1,173 +0,0 @@ -use serde::{Deserialize, Serialize}; - -/// Master configuration for analysis – mirrors `f2a.AnalysisConfig`. -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct AnalysisConfig { - // ── Basic analysis toggles ────────────────────────────────── - pub descriptive: bool, - pub correlation: bool, - pub distribution: bool, - pub missing: bool, - pub outlier: bool, - pub categorical: bool, - pub feature_importance: bool, - pub pca: bool, - pub duplicates: bool, - pub quality: bool, - pub preprocessing: bool, - - // ── Advanced analysis toggles ─────────────────────────────── - pub advanced: bool, - pub advanced_distribution: bool, - pub advanced_correlation: bool, - pub clustering: bool, - pub advanced_dimreduction: bool, - pub feature_insights: bool, - pub advanced_anomaly: bool, - pub statistical_tests: bool, - pub data_profiling: bool, - pub insight_engine: bool, - pub cross_analysis: bool, - pub column_role: bool, - pub ml_readiness: bool, - - // ── Parameters ────────────────────────────────────────────── - pub outlier_threshold: f64, - pub outlier_method: OutlierMethod, - pub correlation_threshold: f64, - pub pca_max_components: usize, - pub max_categories: usize, - pub max_plot_columns: usize, - pub max_cluster_k: usize, - pub tsne_perplexity: f64, - pub bootstrap_iterations: usize, - pub max_sample_for_advanced: usize, - pub n_distribution_fits: usize, -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "lowercase")] -pub enum OutlierMethod { - Iqr, - Zscore, -} - -impl Default for AnalysisConfig { - fn default() -> Self { - Self { - // Basic – all on by default - descriptive: true, - correlation: true, - distribution: true, - missing: true, - outlier: true, - categorical: true, - feature_importance: true, - pca: true, - duplicates: true, - quality: true, - preprocessing: true, - // Advanced – all on by default - advanced: true, - advanced_distribution: true, - advanced_correlation: true, - clustering: true, - advanced_dimreduction: true, - feature_insights: true, - advanced_anomaly: true, - statistical_tests: true, - data_profiling: true, - insight_engine: true, - cross_analysis: true, - column_role: true, - ml_readiness: true, - // Parameters - outlier_threshold: 1.5, - outlier_method: OutlierMethod::Iqr, - correlation_threshold: 0.9, - pca_max_components: 10, - max_categories: 50, - max_plot_columns: 20, - max_cluster_k: 10, - tsne_perplexity: 30.0, - bootstrap_iterations: 1000, - max_sample_for_advanced: 5000, - n_distribution_fits: 7, - } - } -} - -impl AnalysisConfig { - /// Minimal config – only descriptive stats. - pub fn minimal() -> Self { - Self { - descriptive: true, - correlation: false, - distribution: false, - missing: false, - outlier: false, - categorical: false, - feature_importance: false, - pca: false, - duplicates: false, - quality: false, - preprocessing: false, - advanced: false, - advanced_distribution: false, - advanced_correlation: false, - clustering: false, - advanced_dimreduction: false, - feature_insights: false, - advanced_anomaly: false, - statistical_tests: false, - data_profiling: false, - insight_engine: false, - cross_analysis: false, - column_role: false, - ml_readiness: false, - ..Default::default() - } - } - - /// Fast config – skip heavy analyses (PCA, feature importance, advanced). - pub fn fast() -> Self { - Self { - pca: false, - feature_importance: false, - advanced: false, - advanced_distribution: false, - advanced_correlation: false, - clustering: false, - advanced_dimreduction: false, - feature_insights: false, - advanced_anomaly: false, - statistical_tests: false, - data_profiling: false, - insight_engine: false, - cross_analysis: false, - column_role: false, - ml_readiness: false, - ..Default::default() - } - } - - /// Basic-only – all basic analyses on, all advanced off. - pub fn basic_only() -> Self { - Self { - advanced: false, - advanced_distribution: false, - advanced_correlation: false, - clustering: false, - advanced_dimreduction: false, - feature_insights: false, - advanced_anomaly: false, - statistical_tests: false, - data_profiling: false, - insight_engine: false, - cross_analysis: false, - column_role: false, - ml_readiness: false, - ..Default::default() - } - } -} diff --git a/src/core/loader.rs b/src/core/loader.rs deleted file mode 100644 index cdba52f..0000000 --- a/src/core/loader.rs +++ /dev/null @@ -1,125 +0,0 @@ -use std::path::Path; - -use polars::prelude::*; - -use crate::utils::errors::{F2aError, F2aResult}; - -/// Supported file formats. -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub enum FileFormat { - Csv, - Tsv, - Parquet, - Json, - JsonLines, - Excel, - Feather, // Arrow IPC -} - -impl FileFormat { - /// Detect format from file extension. - pub fn from_path(path: &Path) -> F2aResult { - let ext = path - .extension() - .and_then(|e| e.to_str()) - .map(|e| e.to_lowercase()) - .unwrap_or_default(); - - match ext.as_str() { - "csv" => Ok(FileFormat::Csv), - "tsv" | "tab" => Ok(FileFormat::Tsv), - "parquet" | "pq" => Ok(FileFormat::Parquet), - "json" => Ok(FileFormat::Json), - "jsonl" | "ndjson" => Ok(FileFormat::JsonLines), - "xlsx" | "xls" | "xlsm" | "xlsb" => Ok(FileFormat::Excel), - "feather" | "arrow" | "ipc" => Ok(FileFormat::Feather), - _ => Err(F2aError::UnsupportedFormat(ext)), - } - } -} - -// ─── DataLoader ───────────────────────────────────────────────────── - -/// Fast data loader backed by Polars. -pub struct DataLoader; - -impl DataLoader { - /// Load a file into a Polars `DataFrame`. - /// - /// Automatically detects format from the file extension. - pub fn load(source: &str) -> F2aResult { - let path = Path::new(source); - - if !path.exists() { - return Err(F2aError::DataLoadError(format!( - "File not found: {}", - source - ))); - } - - let fmt = FileFormat::from_path(path)?; - - let df = match fmt { - FileFormat::Csv => Self::load_csv(path)?, - FileFormat::Tsv => Self::load_tsv(path)?, - FileFormat::Parquet => Self::load_parquet(path)?, - FileFormat::Json => Self::load_json(path)?, - FileFormat::JsonLines => Self::load_jsonlines(path)?, - FileFormat::Feather => Self::load_feather(path)?, - FileFormat::Excel => { - return Err(F2aError::UnsupportedFormat( - "Excel loading requires the Python layer (openpyxl)".into(), - )); - } - }; - - if df.height() == 0 || df.width() == 0 { - return Err(F2aError::EmptyData); - } - - Ok(df) - } - - fn load_csv(path: &Path) -> F2aResult { - let df = CsvReadOptions::default() - .with_has_header(true) - .with_infer_schema_length(Some(10000)) - .try_into_reader_with_file_path(Some(path.into()))? - .finish()?; - Ok(df) - } - - fn load_tsv(path: &Path) -> F2aResult { - let df = CsvReadOptions::default() - .with_has_header(true) - .with_parse_options(CsvParseOptions::default().with_separator(b'\t')) - .with_infer_schema_length(Some(10000)) - .try_into_reader_with_file_path(Some(path.into()))? - .finish()?; - Ok(df) - } - - fn load_parquet(path: &Path) -> F2aResult { - let file = std::fs::File::open(path)?; - let df = ParquetReader::new(file).finish()?; - Ok(df) - } - - fn load_json(path: &Path) -> F2aResult { - let file = std::fs::File::open(path)?; - let df = JsonReader::new(file).finish()?; - Ok(df) - } - - fn load_jsonlines(path: &Path) -> F2aResult { - let file = std::fs::File::open(path)?; - let df = JsonLineReader::new(file).finish()?; - Ok(df) - } - - fn load_feather(path: &Path) -> F2aResult { - let file = std::fs::File::open(path)?; - let df = IpcReader::new(file).finish()?; - Ok(df) - } -} diff --git a/src/core/mod.rs b/src/core/mod.rs deleted file mode 100644 index 741e009..0000000 --- a/src/core/mod.rs +++ /dev/null @@ -1,5 +0,0 @@ -pub mod analyzer; -pub mod config; -pub mod loader; -pub mod preprocessor; -pub mod schema; diff --git a/src/core/preprocessor.rs b/src/core/preprocessor.rs deleted file mode 100644 index 93b9832..0000000 --- a/src/core/preprocessor.rs +++ /dev/null @@ -1,136 +0,0 @@ -use polars::prelude::*; -use serde::{Deserialize, Serialize}; - -use crate::core::schema::DataSchema; - -/// Detected preprocessing issues. -#[derive(Debug, Clone, Default, Serialize, Deserialize)] -pub struct PreprocessingResult { - pub constant_columns: Vec, - pub duplicate_row_count: usize, - pub duplicate_row_ratio: f64, - pub high_missing_columns: Vec<(String, f64)>, // (col, ratio) - pub id_like_columns: Vec, - pub mixed_type_columns: Vec, - pub infinite_value_columns: Vec<(String, usize)>, // (col, count) - pub rows_before: usize, - pub rows_after: usize, - pub cols_before: usize, - pub cols_after: usize, -} - -/// Preprocessor – detects data quality issues and optionally cleans the DataFrame. -pub struct Preprocessor; - -impl Preprocessor { - /// Analyse the DataFrame for issues; return (cleaned_df, issues). - /// - /// Cleaning is **non-destructive**: the original DataFrame is not mutated. - /// The cleaned DataFrame has: - /// - Constant columns removed - /// - Duplicate rows removed - /// - Columns with ≥ `missing_threshold` missing ratio removed - pub fn process( - df: &DataFrame, - schema: &DataSchema, - missing_threshold: f64, - ) -> (DataFrame, PreprocessingResult) { - let mut result = PreprocessingResult { - rows_before: df.height(), - cols_before: df.width(), - ..Default::default() - }; - - // ── 1. Detect constant columns ────────────────────────── - for col in df.get_columns() { - let n_unique = col.n_unique().unwrap_or(0); - // A column is constant if it has 0 or 1 unique non-null values - if n_unique <= 1 { - result.constant_columns.push(col.name().to_string()); - } - } - - // ── 2. Detect duplicate rows ──────────────────────────── - let dup_mask = df - .is_duplicated() - .unwrap_or_else(|_| BooleanChunked::new("dup".into(), vec![false; df.height()])); - let dup_count = dup_mask.sum().unwrap_or(0) as usize; - result.duplicate_row_count = dup_count; - result.duplicate_row_ratio = if df.height() > 0 { - dup_count as f64 / df.height() as f64 - } else { - 0.0 - }; - - // ── 3. High-missing columns ───────────────────────────── - for info in &schema.columns { - if info.missing_ratio >= missing_threshold { - result - .high_missing_columns - .push((info.name.clone(), info.missing_ratio)); - } - } - - // ── 4. ID-like columns (unique ratio ≥ 0.95, string/int) ─ - for col in df.get_columns() { - let n_unique = col.n_unique().unwrap_or(0); - if df.height() > 20 { - let unique_ratio = n_unique as f64 / df.height() as f64; - if unique_ratio >= 0.95 { - let name_lower = col.name().to_lowercase(); - let is_id_name = name_lower.contains("id") - || name_lower.ends_with("_id") - || name_lower == "index" - || name_lower == "key"; - if is_id_name || unique_ratio >= 0.99 { - result.id_like_columns.push(col.name().to_string()); - } - } - } - } - - // ── 5. Infinite value columns (numeric only) ──────────── - for col in df.get_columns() { - if col.dtype().is_float() { - if let Ok(float_col) = col.f64() { - let inf_count = float_col - .into_iter() - .filter(|v| v.map_or(false, |x| x.is_infinite())) - .count(); - if inf_count > 0 { - result - .infinite_value_columns - .push((col.name().to_string(), inf_count)); - } - } - } - } - - // ── Build cleaned DataFrame ───────────────────────────── - let mut cleaned = df.clone(); - - // Drop constant columns - for col_name in &result.constant_columns { - let _ = cleaned.drop_in_place(col_name.as_str().into()); - } - - // Drop high-missing columns - for (col_name, _) in &result.high_missing_columns { - let _ = cleaned.drop_in_place(col_name.as_str().into()); - } - - // Remove duplicate rows - if dup_count > 0 { - if let Ok(deduped) = - cleaned.unique::<&str, PlSmallStr>(None, UniqueKeepStrategy::First, None) - { - cleaned = deduped; - } - } - - result.rows_after = cleaned.height(); - result.cols_after = cleaned.width(); - - (cleaned, result) - } -} diff --git a/src/core/schema.rs b/src/core/schema.rs deleted file mode 100644 index a2d03a0..0000000 --- a/src/core/schema.rs +++ /dev/null @@ -1,135 +0,0 @@ -use polars::prelude::*; -use serde::{Deserialize, Serialize}; - -use crate::utils::types::{infer_column_type, ColumnType}; - -// ─── Per-column metadata ──────────────────────────────────────────── - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct ColumnInfo { - pub name: String, - pub dtype: String, - pub inferred_type: ColumnType, - pub n_unique: usize, - pub n_missing: usize, - pub missing_ratio: f64, -} - -// ─── Dataset-level schema ─────────────────────────────────────────── - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct DataSchema { - pub n_rows: usize, - pub n_cols: usize, - pub columns: Vec, - pub memory_usage_bytes: usize, -} - -impl DataSchema { - /// Build a `DataSchema` from a Polars `DataFrame`. - pub fn from_dataframe(df: &DataFrame) -> Self { - let n_rows = df.height(); - let n_cols = df.width(); - - let columns: Vec = df - .get_columns() - .iter() - .map(|col| { - let name = col.name().to_string(); - let dtype = format!("{:?}", col.dtype()); - let n_missing = col.null_count(); - let missing_ratio = if n_rows > 0 { - n_missing as f64 / n_rows as f64 - } else { - 0.0 - }; - - let n_unique = col.n_unique().unwrap_or(0); - - // Compute avg string length for text classification heuristic - let avg_str_len = if col.dtype() == &DataType::String { - let lengths = col.str().ok().map(|ca| { - let total: usize = - ca.into_iter().filter_map(|opt| opt.map(|s| s.len())).sum(); - let count = ca.into_iter().filter(|o| o.is_some()).count(); - if count > 0 { - total as f64 / count as f64 - } else { - 0.0 - } - }); - lengths - } else { - None - }; - - let inferred_type = infer_column_type(col.dtype(), n_unique, n_rows, avg_str_len); - - ColumnInfo { - name, - dtype, - inferred_type, - n_unique, - n_missing, - missing_ratio, - } - }) - .collect(); - - // Rough memory estimation - let memory_usage_bytes = df.estimated_size(); - - DataSchema { - n_rows, - n_cols, - columns, - memory_usage_bytes, - } - } - - // ── Convenience accessors ─────────────────────────────────── - - pub fn numeric_columns(&self) -> Vec<&str> { - self.columns - .iter() - .filter(|c| c.inferred_type == ColumnType::Numeric) - .map(|c| c.name.as_str()) - .collect() - } - - pub fn categorical_columns(&self) -> Vec<&str> { - self.columns - .iter() - .filter(|c| c.inferred_type == ColumnType::Categorical) - .map(|c| c.name.as_str()) - .collect() - } - - pub fn text_columns(&self) -> Vec<&str> { - self.columns - .iter() - .filter(|c| c.inferred_type == ColumnType::Text) - .map(|c| c.name.as_str()) - .collect() - } - - pub fn datetime_columns(&self) -> Vec<&str> { - self.columns - .iter() - .filter(|c| c.inferred_type == ColumnType::DateTime) - .map(|c| c.name.as_str()) - .collect() - } - - pub fn boolean_columns(&self) -> Vec<&str> { - self.columns - .iter() - .filter(|c| c.inferred_type == ColumnType::Boolean) - .map(|c| c.name.as_str()) - .collect() - } - - pub fn column_info(&self, name: &str) -> Option<&ColumnInfo> { - self.columns.iter().find(|c| c.name == name) - } -} diff --git a/src/lib.rs b/src/lib.rs deleted file mode 100644 index 3967a48..0000000 --- a/src/lib.rs +++ /dev/null @@ -1,298 +0,0 @@ -//! PyO3 module entry-point for `f2a._core`. -//! -//! Exposes the Rust analysis engine to Python. -//! -// Many Rust items (fields, methods, structs) are intentionally unused from Rust -// because the consumer is Python via PyO3 bindings, not other Rust code. -// Some coding patterns were generated for consistency across 21 stats modules. -#![allow(dead_code)] -#![allow( - clippy::useless_conversion, - clippy::double_parens, - clippy::filter_map_identity, - clippy::implicit_saturating_sub, - clippy::let_and_return, - clippy::manual_clamp, - clippy::manual_flatten, - clippy::manual_is_multiple_of, - clippy::needless_range_loop, - clippy::question_mark, - clippy::redundant_closure, - clippy::unnecessary_map_or -)] - -//! Main entry: -//! `_core.analyze(source, config_json=None) -> str` (JSON string) -//! -//! Individual module functions are also exposed for fine-grained usage. - -use pyo3::prelude::*; -use pyo3::types::PyDict; - -mod core; -mod stats; -mod utils; - -use crate::core::analyzer::Analyzer; -use crate::core::config::AnalysisConfig; -use crate::core::loader::DataLoader; -use crate::core::preprocessor::Preprocessor; -use crate::core::schema::DataSchema; - -// ─── Helper: Python dict → AnalysisConfig ─────────────────────────── - -fn config_from_pydict(py: Python<'_>, dict: &Bound<'_, PyDict>) -> PyResult { - // Serialize the Python dict → JSON string → Rust struct - let json_mod = py.import_bound("json")?; - let json_str: String = json_mod.call_method1("dumps", (dict,))?.extract()?; - let config: AnalysisConfig = serde_json::from_str(&json_str) - .map_err(|e| pyo3::exceptions::PyValueError::new_err(format!("Invalid config: {}", e)))?; - Ok(config) -} - -fn config_from_json(json_str: &str) -> PyResult { - serde_json::from_str(json_str) - .map_err(|e| pyo3::exceptions::PyValueError::new_err(format!("Invalid config JSON: {}", e))) -} - -// ─── Core functions ───────────────────────────────────────────────── - -/// Run a full analysis on a file and return the results as a JSON string. -/// -/// Parameters -/// ---------- -/// source : str -/// Path to a data file (CSV, Parquet, JSON, etc.) -/// config_json : str, optional -/// JSON string of AnalysisConfig overrides. -/// Omit to use the default configuration (all analyses enabled). -/// -/// Returns -/// ------- -/// str -/// JSON string with the full AnalysisReport. -#[pyfunction] -#[pyo3(signature = (source, config_json=None))] -fn analyze(source: &str, config_json: Option<&str>) -> PyResult { - let config = match config_json { - Some(json) => config_from_json(json)?, - None => AnalysisConfig::default(), - }; - - let report = Analyzer::run_from_file(source, &config)?; - let json = serde_json::to_string(&report).map_err(|e| { - pyo3::exceptions::PyRuntimeError::new_err(format!("Serialization error: {}", e)) - })?; - Ok(json) -} - -/// Run analysis with a Python dict config and return a Python dict. -/// -/// Parameters -/// ---------- -/// py : Python -/// GIL token -/// source : str -/// Path to a data file -/// config_dict : dict, optional -/// Config overrides as a Python dict -/// -/// Returns -/// ------- -/// dict -/// Python dict with the full AnalysisReport. -#[pyfunction] -#[pyo3(signature = (source, config_dict=None))] -fn analyze_to_dict( - py: Python<'_>, - source: &str, - config_dict: Option<&Bound<'_, PyDict>>, -) -> PyResult { - let config = match config_dict { - Some(d) => config_from_pydict(py, d)?, - None => AnalysisConfig::default(), - }; - - let report = Analyzer::run_from_file(source, &config)?; - let json_str = serde_json::to_string(&report).map_err(|e| { - pyo3::exceptions::PyRuntimeError::new_err(format!("Serialization error: {}", e)) - })?; - - // Parse JSON → Python dict via json.loads - let json_mod = py.import_bound("json")?; - let result = json_mod.call_method1("loads", (json_str,))?; - Ok(result.into_py(py)) -} - -/// Load a data file and return schema info as JSON. -#[pyfunction] -fn load_schema(source: &str) -> PyResult { - let df = DataLoader::load(source)?; - let schema = DataSchema::from_dataframe(&df); - let json = serde_json::to_string(&schema) - .map_err(|e| pyo3::exceptions::PyRuntimeError::new_err(e.to_string()))?; - Ok(json) -} - -/// Return the default config as a JSON string. -#[pyfunction] -fn default_config() -> PyResult { - let config = AnalysisConfig::default(); - serde_json::to_string(&config) - .map_err(|e| pyo3::exceptions::PyRuntimeError::new_err(e.to_string())) -} - -/// Return a minimal config (only descriptive) as JSON. -#[pyfunction] -fn minimal_config() -> PyResult { - let config = AnalysisConfig::minimal(); - serde_json::to_string(&config) - .map_err(|e| pyo3::exceptions::PyRuntimeError::new_err(e.to_string())) -} - -/// Return a fast config (basic analyses only) as JSON. -#[pyfunction] -fn fast_config() -> PyResult { - let config = AnalysisConfig::fast(); - serde_json::to_string(&config) - .map_err(|e| pyo3::exceptions::PyRuntimeError::new_err(e.to_string())) -} - -/// Return the basic-only config as JSON. -#[pyfunction] -fn basic_only_config() -> PyResult { - let config = AnalysisConfig::basic_only(); - serde_json::to_string(&config) - .map_err(|e| pyo3::exceptions::PyRuntimeError::new_err(e.to_string())) -} - -/// Preprocess a data file and return preprocessing report as JSON. -#[pyfunction] -#[pyo3(signature = (source, missing_threshold=0.95))] -fn preprocess(source: &str, missing_threshold: f64) -> PyResult { - let df = DataLoader::load(source)?; - let schema = DataSchema::from_dataframe(&df); - let (_, result) = Preprocessor::process(&df, &schema, missing_threshold); - serde_json::to_string(&result) - .map_err(|e| pyo3::exceptions::PyRuntimeError::new_err(e.to_string())) -} - -/// Library version. -#[pyfunction] -fn version() -> &'static str { - env!("CARGO_PKG_VERSION") -} - -// ─── Individual stat functions (for fine-grained Python access) ───── - -#[pyfunction] -fn compute_descriptive(source: &str) -> PyResult { - let df = DataLoader::load(source)?; - let schema = DataSchema::from_dataframe(&df); - let result = crate::stats::descriptive::DescriptiveStats::new(&df, &schema).compute(); - serde_json::to_string(&result) - .map_err(|e| pyo3::exceptions::PyRuntimeError::new_err(e.to_string())) -} - -#[pyfunction] -#[pyo3(signature = (source, threshold=0.9))] -fn compute_correlation(source: &str, threshold: f64) -> PyResult { - let df = DataLoader::load(source)?; - let schema = DataSchema::from_dataframe(&df); - let result = - crate::stats::correlation::CorrelationStats::new(&df, &schema, threshold).compute(); - serde_json::to_string(&result) - .map_err(|e| pyo3::exceptions::PyRuntimeError::new_err(e.to_string())) -} - -#[pyfunction] -fn compute_distribution(source: &str) -> PyResult { - let df = DataLoader::load(source)?; - let schema = DataSchema::from_dataframe(&df); - let result = crate::stats::distribution::DistributionStats::new(&df, &schema).compute(); - serde_json::to_string(&result) - .map_err(|e| pyo3::exceptions::PyRuntimeError::new_err(e.to_string())) -} - -#[pyfunction] -fn compute_missing(source: &str) -> PyResult { - let df = DataLoader::load(source)?; - let schema = DataSchema::from_dataframe(&df); - let result = crate::stats::missing::MissingStats::new(&df, &schema).compute(); - serde_json::to_string(&result) - .map_err(|e| pyo3::exceptions::PyRuntimeError::new_err(e.to_string())) -} - -#[pyfunction] -#[pyo3(signature = (source, method="iqr", threshold=1.5))] -fn compute_outlier(source: &str, method: &str, threshold: f64) -> PyResult { - let df = DataLoader::load(source)?; - let schema = DataSchema::from_dataframe(&df); - let m = match method { - "zscore" | "z" => crate::stats::outlier::OutlierMethod::Zscore, - _ => crate::stats::outlier::OutlierMethod::Iqr, - }; - let result = crate::stats::outlier::OutlierStats::new(&df, &schema, m, threshold).compute(); - serde_json::to_string(&result) - .map_err(|e| pyo3::exceptions::PyRuntimeError::new_err(e.to_string())) -} - -#[pyfunction] -fn compute_quality(source: &str) -> PyResult { - let df = DataLoader::load(source)?; - let schema = DataSchema::from_dataframe(&df); - let result = crate::stats::quality::QualityStats::new(&df, &schema).compute(); - serde_json::to_string(&result) - .map_err(|e| pyo3::exceptions::PyRuntimeError::new_err(e.to_string())) -} - -#[pyfunction] -fn compute_insight_engine(source: &str) -> PyResult { - let df = DataLoader::load(source)?; - let schema = DataSchema::from_dataframe(&df); - let result = crate::stats::insight_engine::InsightEngine::new(&df, &schema).compute(); - serde_json::to_string(&result) - .map_err(|e| pyo3::exceptions::PyRuntimeError::new_err(e.to_string())) -} - -#[pyfunction] -fn compute_ml_readiness(source: &str) -> PyResult { - let df = DataLoader::load(source)?; - let schema = DataSchema::from_dataframe(&df); - let result = crate::stats::ml_readiness::MlReadinessStats::new(&df, &schema).compute(); - serde_json::to_string(&result) - .map_err(|e| pyo3::exceptions::PyRuntimeError::new_err(e.to_string())) -} - -// ─── Module definition ────────────────────────────────────────────── - -/// The `_core` native extension module. -#[pymodule] -fn _core(m: &Bound<'_, PyModule>) -> PyResult<()> { - // Core functions - m.add_function(wrap_pyfunction!(analyze, m)?)?; - m.add_function(wrap_pyfunction!(analyze_to_dict, m)?)?; - m.add_function(wrap_pyfunction!(load_schema, m)?)?; - m.add_function(wrap_pyfunction!(preprocess, m)?)?; - - // Config helpers - m.add_function(wrap_pyfunction!(default_config, m)?)?; - m.add_function(wrap_pyfunction!(minimal_config, m)?)?; - m.add_function(wrap_pyfunction!(fast_config, m)?)?; - m.add_function(wrap_pyfunction!(basic_only_config, m)?)?; - - // Individual stats - m.add_function(wrap_pyfunction!(compute_descriptive, m)?)?; - m.add_function(wrap_pyfunction!(compute_correlation, m)?)?; - m.add_function(wrap_pyfunction!(compute_distribution, m)?)?; - m.add_function(wrap_pyfunction!(compute_missing, m)?)?; - m.add_function(wrap_pyfunction!(compute_outlier, m)?)?; - m.add_function(wrap_pyfunction!(compute_quality, m)?)?; - m.add_function(wrap_pyfunction!(compute_insight_engine, m)?)?; - m.add_function(wrap_pyfunction!(compute_ml_readiness, m)?)?; - - // Meta - m.add_function(wrap_pyfunction!(version, m)?)?; - - Ok(()) -} diff --git a/src/stats/advanced_anomaly.rs b/src/stats/advanced_anomaly.rs deleted file mode 100644 index 6d25e25..0000000 --- a/src/stats/advanced_anomaly.rs +++ /dev/null @@ -1,507 +0,0 @@ -use ndarray::Array2; -use polars::prelude::*; -use serde::{Deserialize, Serialize}; - -use crate::core::schema::DataSchema; -use crate::stats::descriptive::DescriptiveStats; - -// ─── Result types ─────────────────────────────────────────────────── - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct AnomalyMethodResult { - pub method: String, - pub n_anomalies: usize, - pub anomaly_ratio: f64, - pub scores: Vec, - pub labels: Vec, // true = anomaly -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct ConsensusAnomaly { - pub n_anomalies: usize, - pub anomaly_ratio: f64, - pub labels: Vec, - pub vote_counts: Vec, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct AdvancedAnomalyResult { - pub isolation_forest: Option, - pub local_outlier_factor: Option, - pub mahalanobis: Option, - pub consensus: Option, -} - -// ─── Implementation ───────────────────────────────────────────────── - -pub struct AdvancedAnomalyStats<'a> { - df: &'a DataFrame, - schema: &'a DataSchema, - max_sample: usize, - contamination: f64, -} - -impl<'a> AdvancedAnomalyStats<'a> { - pub fn new( - df: &'a DataFrame, - schema: &'a DataSchema, - max_sample: usize, - contamination: f64, - ) -> Self { - Self { - df, - schema, - max_sample, - contamination, - } - } - - pub fn compute(&self) -> AdvancedAnomalyResult { - let matrix = match self.prepare_data() { - Some(m) => m, - None => { - return AdvancedAnomalyResult { - isolation_forest: None, - local_outlier_factor: None, - mahalanobis: None, - consensus: None, - }; - } - }; - - let isolation_forest = self.isolation_forest(&matrix); - let local_outlier_factor = self.local_outlier_factor(&matrix); - let mahalanobis = self.mahalanobis_distance(&matrix); - - let consensus = self.compute_consensus( - isolation_forest.as_ref(), - local_outlier_factor.as_ref(), - mahalanobis.as_ref(), - ); - - AdvancedAnomalyResult { - isolation_forest, - local_outlier_factor, - mahalanobis, - consensus, - } - } - - /// Isolation Forest: anomaly scores via random binary trees. - fn isolation_forest(&self, data: &Array2) -> Option { - let n = data.nrows(); - let d = data.ncols(); - if n < 10 || d == 0 { - return None; - } - - let n_trees = 100; - let sample_size = n.min(256); - let mut scores = vec![0.0f64; n]; - - // Build multiple isolation trees and average path lengths - for tree_idx in 0..n_trees { - // Subsample indices (deterministic based on tree_idx) - let step = (n as f64 / sample_size as f64).max(1.0); - let indices: Vec = (0..sample_size) - .map(|i| ((i as f64 * step + tree_idx as f64) as usize) % n) - .collect(); - - // For each point, compute isolation path length - for i in 0..n { - let path_len = self.isolation_path_length(data, i, &indices, d, 0, 10); - scores[i] += path_len; - } - } - - // Average and normalize - let c_n = Self::average_path_length(sample_size); - for s in scores.iter_mut() { - *s /= n_trees as f64; - // Anomaly score: s(x) = 2^(-E[h(x)] / c(n)) - *s = 2.0f64.powf(-(*s) / c_n); - } - - // Threshold: top `contamination` fraction - let mut sorted_scores = scores.clone(); - sorted_scores.sort_by(|a, b| b.partial_cmp(a).unwrap_or(std::cmp::Ordering::Equal)); - let threshold_idx = (n as f64 * self.contamination) as usize; - let threshold = sorted_scores - .get(threshold_idx.min(sorted_scores.len().saturating_sub(1))) - .copied() - .unwrap_or(0.5); - - let labels: Vec = scores.iter().map(|&s| s >= threshold).collect(); - let n_anomalies = labels.iter().filter(|&&l| l).count(); - - Some(AnomalyMethodResult { - method: "isolation_forest".into(), - n_anomalies, - anomaly_ratio: n_anomalies as f64 / n as f64, - scores, - labels, - }) - } - - fn isolation_path_length( - &self, - data: &Array2, - point_idx: usize, - subset: &[usize], - n_features: usize, - depth: usize, - max_depth: usize, - ) -> f64 { - if depth >= max_depth || subset.len() <= 1 { - return depth as f64 + Self::average_path_length(subset.len()); - } - - // Pick a random feature and split point - let feat = (point_idx + depth) % n_features; - let vals: Vec = subset.iter().map(|&i| data[[i, feat]]).collect(); - let min_val = vals.iter().cloned().fold(f64::INFINITY, f64::min); - let max_val = vals.iter().cloned().fold(f64::NEG_INFINITY, f64::max); - - if (max_val - min_val).abs() < f64::EPSILON { - return depth as f64; - } - - let split = (min_val + max_val) / 2.0; - let point_val = data[[point_idx, feat]]; - - let left: Vec = subset - .iter() - .filter(|&&i| data[[i, feat]] < split) - .cloned() - .collect(); - let right: Vec = subset - .iter() - .filter(|&&i| data[[i, feat]] >= split) - .cloned() - .collect(); - - if point_val < split { - self.isolation_path_length(data, point_idx, &left, n_features, depth + 1, max_depth) - } else { - self.isolation_path_length(data, point_idx, &right, n_features, depth + 1, max_depth) - } - } - - fn average_path_length(n: usize) -> f64 { - if n <= 1 { - return 0.0; - } - let nf = n as f64; - 2.0 * ((nf - 1.0).ln() + 0.5772156649) - 2.0 * (nf - 1.0) / nf - } - - /// Local Outlier Factor (simplified). - fn local_outlier_factor(&self, data: &Array2) -> Option { - let n = data.nrows(); - if n < 20 { - return None; - } - - let k = ((n as f64).sqrt().ceil() as usize).min(20).max(5); - - // Compute k-nearest neighbor distances - let mut knn_dists = vec![Vec::new(); n]; - let mut k_dist = vec![0.0f64; n]; - - for i in 0..n { - let mut dists: Vec<(usize, f64)> = (0..n) - .filter(|&j| j != i) - .map(|j| { - let d: f64 = data - .row(i) - .iter() - .zip(data.row(j).iter()) - .map(|(a, b)| (a - b).powi(2)) - .sum::() - .sqrt(); - (j, d) - }) - .collect(); - dists.sort_by(|a, b| a.1.partial_cmp(&b.1).unwrap_or(std::cmp::Ordering::Equal)); - let knn: Vec<(usize, f64)> = dists.into_iter().take(k).collect(); - k_dist[i] = knn.last().map(|x| x.1).unwrap_or(0.0); - knn_dists[i] = knn; - } - - // Local reachability density - let mut lrd = vec![0.0f64; n]; - for i in 0..n { - let reach_sum: f64 = knn_dists[i].iter().map(|(j, d)| d.max(k_dist[*j])).sum(); - lrd[i] = if reach_sum > f64::EPSILON { - k as f64 / reach_sum - } else { - 1.0 - }; - } - - // LOF scores - let scores: Vec = (0..n) - .map(|i| { - let lof: f64 = knn_dists[i] - .iter() - .map(|(j, _)| { - if lrd[i] > f64::EPSILON { - lrd[*j] / lrd[i] - } else { - 1.0 - } - }) - .sum::() - / k as f64; - lof - }) - .collect(); - - // Threshold - let mut sorted_scores = scores.clone(); - sorted_scores.sort_by(|a, b| b.partial_cmp(a).unwrap_or(std::cmp::Ordering::Equal)); - let threshold_idx = (n as f64 * self.contamination) as usize; - let threshold = sorted_scores - .get(threshold_idx.min(sorted_scores.len().saturating_sub(1))) - .copied() - .unwrap_or(1.5); - - let labels: Vec = scores.iter().map(|&s| s >= threshold).collect(); - let n_anomalies = labels.iter().filter(|&&l| l).count(); - - Some(AnomalyMethodResult { - method: "local_outlier_factor".into(), - n_anomalies, - anomaly_ratio: n_anomalies as f64 / n as f64, - scores, - labels, - }) - } - - /// Mahalanobis distance based anomaly detection. - fn mahalanobis_distance(&self, data: &Array2) -> Option { - let n = data.nrows(); - let d = data.ncols(); - if n <= d + 1 || d == 0 { - return None; - } - - // Compute mean and covariance - let means: Vec = (0..d) - .map(|j| data.column(j).mean().unwrap_or(0.0)) - .collect(); - - let mut cov = Array2::::zeros((d, d)); - for i in 0..d { - for j in i..d { - let val: f64 = (0..n) - .map(|k| (data[[k, i]] - means[i]) * (data[[k, j]] - means[j])) - .sum::() - / (n as f64 - 1.0); - cov[[i, j]] = val; - cov[[j, i]] = val; - } - } - - // Regularize covariance (add small diagonal) - for i in 0..d { - cov[[i, i]] += 1e-6; - } - - // Invert covariance using Gaussian elimination - let cov_inv = match Self::invert_matrix(&cov) { - Some(inv) => inv, - None => return None, - }; - - // Compute Mahalanobis distances - let scores: Vec = (0..n) - .map(|i| { - let diff: Vec = (0..d).map(|j| data[[i, j]] - means[j]).collect(); - let mut md = 0.0; - for j in 0..d { - for k in 0..d { - md += diff[j] * cov_inv[[j, k]] * diff[k]; - } - } - md.sqrt() - }) - .collect(); - - // Chi-square threshold at df=d, alpha=contamination - let threshold = (d as f64 * (1.0 + self.contamination * 3.0)).sqrt() * 2.0; - - let labels: Vec = scores.iter().map(|&s| s > threshold).collect(); - let n_anomalies = labels.iter().filter(|&&l| l).count(); - - Some(AnomalyMethodResult { - method: "mahalanobis".into(), - n_anomalies, - anomaly_ratio: n_anomalies as f64 / n as f64, - scores, - labels, - }) - } - - /// Consensus: anomaly if ≥2 of 3 methods agree. - fn compute_consensus( - &self, - if_result: Option<&AnomalyMethodResult>, - lof_result: Option<&AnomalyMethodResult>, - mah_result: Option<&AnomalyMethodResult>, - ) -> Option { - let _n = self.df.height().min(self.max_sample); - let methods: Vec<&AnomalyMethodResult> = [if_result, lof_result, mah_result] - .iter() - .filter_map(|r| *r) - .collect(); - - if methods.len() < 2 { - return None; - } - - let min_n = methods.iter().map(|m| m.labels.len()).min().unwrap_or(0); - - let vote_counts: Vec = (0..min_n) - .map(|i| { - methods - .iter() - .map(|m| { - if m.labels.get(i).copied().unwrap_or(false) { - 1u8 - } else { - 0u8 - } - }) - .sum() - }) - .collect(); - - let labels: Vec = vote_counts.iter().map(|&v| v >= 2).collect(); - let n_anomalies = labels.iter().filter(|&&l| l).count(); - - Some(ConsensusAnomaly { - n_anomalies, - anomaly_ratio: if min_n > 0 { - n_anomalies as f64 / min_n as f64 - } else { - 0.0 - }, - labels, - vote_counts, - }) - } - - // ── Helpers ───────────────────────────────────────────────── - - fn prepare_data(&self) -> Option> { - let num_cols = self.schema.numeric_columns(); - if num_cols.len() < 2 { - return None; - } - - let mut col_data: Vec> = Vec::new(); - for &col_name in &num_cols { - if let Some(vals) = self - .df - .column(col_name) - .ok() - .and_then(|c| DescriptiveStats::column_to_f64_vec(c)) - { - col_data.push(vals); - } - } - - if col_data.is_empty() { - return None; - } - - let min_len = col_data.iter().map(|v| v.len()).min().unwrap_or(0); - let sample_len = min_len.min(self.max_sample); - let step = (min_len / sample_len).max(1); - let n_cols = col_data.len(); - - let mut matrix = Array2::::zeros((sample_len, n_cols)); - for (j, data) in col_data.iter().enumerate() { - for (i_out, i_in) in (0..min_len).step_by(step).take(sample_len).enumerate() { - matrix[[i_out, j]] = data[i_in]; - } - } - - // Standardize - for j in 0..n_cols { - let col = matrix.column(j); - let mean = col.mean().unwrap_or(0.0); - let var: f64 = col.iter().map(|x| (x - mean).powi(2)).sum::() - / (sample_len as f64 - 1.0).max(1.0); - let std = var.sqrt(); - if std > f64::EPSILON { - for i in 0..sample_len { - matrix[[i, j]] = (matrix[[i, j]] - mean) / std; - } - } - } - - Some(matrix) - } - - pub(crate) fn invert_matrix(a: &Array2) -> Option> { - let n = a.nrows(); - if n != a.ncols() { - return None; - } - - let mut aug = Array2::::zeros((n, 2 * n)); - for i in 0..n { - for j in 0..n { - aug[[i, j]] = a[[i, j]]; - } - aug[[i, n + i]] = 1.0; - } - - for col in 0..n { - let mut max_row = col; - let mut max_val = aug[[col, col]].abs(); - for row in (col + 1)..n { - if aug[[row, col]].abs() > max_val { - max_val = aug[[row, col]].abs(); - max_row = row; - } - } - if max_val < 1e-10 { - return None; - } - - if max_row != col { - for j in 0..(2 * n) { - let tmp = aug[[col, j]]; - aug[[col, j]] = aug[[max_row, j]]; - aug[[max_row, j]] = tmp; - } - } - - let pivot = aug[[col, col]]; - for j in 0..(2 * n) { - aug[[col, j]] /= pivot; - } - - for row in 0..n { - if row == col { - continue; - } - let factor = aug[[row, col]]; - for j in 0..(2 * n) { - aug[[row, j]] -= factor * aug[[col, j]]; - } - } - } - - let mut inv = Array2::::zeros((n, n)); - for i in 0..n { - for j in 0..n { - inv[[i, j]] = aug[[i, n + j]]; - } - } - Some(inv) - } -} diff --git a/src/stats/advanced_correlation.rs b/src/stats/advanced_correlation.rs deleted file mode 100644 index 2246ead..0000000 --- a/src/stats/advanced_correlation.rs +++ /dev/null @@ -1,505 +0,0 @@ -use ndarray::Array2; -use polars::prelude::*; -use serde::{Deserialize, Serialize}; - -use crate::core::schema::DataSchema; -use crate::stats::descriptive::DescriptiveStats; - -// ─── Result types ─────────────────────────────────────────────────── - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct PartialCorrelationEntry { - pub col_a: String, - pub col_b: String, - pub partial_r: f64, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct MutualInfoEntry { - pub col_a: String, - pub col_b: String, - pub mi: f64, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct BootstrapCIEntry { - pub col_a: String, - pub col_b: String, - pub r: f64, - pub ci_lower: f64, - pub ci_upper: f64, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct CorrelationEdge { - pub source: String, - pub target: String, - pub weight: f64, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct AdvancedCorrelationResult { - pub partial_correlations: Vec, - pub mutual_information: Vec, - pub bootstrap_ci: Vec, - pub correlation_network: Vec, - pub distance_correlations: Vec<(String, String, f64)>, -} - -// ─── Implementation ───────────────────────────────────────────────── - -pub struct AdvancedCorrelationStats<'a> { - df: &'a DataFrame, - schema: &'a DataSchema, - bootstrap_iterations: usize, - max_sample: usize, -} - -impl<'a> AdvancedCorrelationStats<'a> { - pub fn new( - df: &'a DataFrame, - schema: &'a DataSchema, - bootstrap_iterations: usize, - max_sample: usize, - ) -> Self { - Self { - df, - schema, - bootstrap_iterations, - max_sample, - } - } - - pub fn compute(&self) -> AdvancedCorrelationResult { - let num_cols = self.schema.numeric_columns(); - let (names, matrix) = self.extract_matrix(&num_cols); - - let partial_correlations = if matrix.ncols() >= 3 { - self.partial_correlation_matrix(&names, &matrix) - } else { - vec![] - }; - - let mutual_information = self.compute_mutual_information(&names, &matrix); - let bootstrap_ci = self.bootstrap_correlation_ci(&names, &matrix); - let correlation_network = self.build_correlation_network(&names, &matrix, 0.5); - let distance_correlations = self.compute_distance_correlations(&names, &matrix); - - AdvancedCorrelationResult { - partial_correlations, - mutual_information, - bootstrap_ci, - correlation_network, - distance_correlations, - } - } - - /// Partial correlation via precision matrix (inverse of correlation matrix). - fn partial_correlation_matrix( - &self, - names: &[String], - matrix: &Array2, - ) -> Vec { - let p = matrix.ncols(); - let n = matrix.nrows(); - if p < 3 || n < p + 1 { - return vec![]; - } - - // Compute correlation matrix - let corr = self.pearson_matrix(matrix); - - // Regularize and invert - let mut reg = corr.clone(); - for i in 0..p { - reg[[i, i]] += 0.01; - } - - let precision = - match crate::stats::advanced_anomaly::AdvancedAnomalyStats::invert_matrix(®) { - Some(inv) => inv, - None => return vec![], - }; - - let mut results = Vec::new(); - for i in 0..p { - for j in (i + 1)..p { - let denom = (precision[[i, i]] * precision[[j, j]]).sqrt(); - let partial_r = if denom > f64::EPSILON { - -precision[[i, j]] / denom - } else { - 0.0 - }; - results.push(PartialCorrelationEntry { - col_a: names[i].clone(), - col_b: names[j].clone(), - partial_r: partial_r.clamp(-1.0, 1.0), - }); - } - } - results - } - - /// Mutual information estimation via histogram-based approach. - fn compute_mutual_information( - &self, - names: &[String], - matrix: &Array2, - ) -> Vec { - let p = matrix.ncols(); - let n = matrix.nrows(); - if p < 2 || n < 20 { - return vec![]; - } - - let n_bins = ((n as f64).sqrt().ceil() as usize).max(5).min(50); - let mut results = Vec::new(); - - for i in 0..p { - for j in (i + 1)..p { - let mi = Self::mutual_information_pair( - &matrix.column(i).to_vec(), - &matrix.column(j).to_vec(), - n_bins, - ); - results.push(MutualInfoEntry { - col_a: names[i].clone(), - col_b: names[j].clone(), - mi, - }); - } - } - results - } - - fn mutual_information_pair(x: &[f64], y: &[f64], n_bins: usize) -> f64 { - let n = x.len().min(y.len()); - if n == 0 { - return 0.0; - } - - let x_min = x.iter().cloned().fold(f64::INFINITY, f64::min); - let x_max = x.iter().cloned().fold(f64::NEG_INFINITY, f64::max); - let y_min = y.iter().cloned().fold(f64::INFINITY, f64::min); - let y_max = y.iter().cloned().fold(f64::NEG_INFINITY, f64::max); - - let x_range = (x_max - x_min).max(f64::EPSILON); - let y_range = (y_max - y_min).max(f64::EPSILON); - - // Joint and marginal histograms - let mut joint = vec![vec![0usize; n_bins]; n_bins]; - let mut x_hist = vec![0usize; n_bins]; - let mut y_hist = vec![0usize; n_bins]; - - for k in 0..n { - let xi = ((x[k] - x_min) / x_range * (n_bins - 1) as f64) as usize; - let yi = ((y[k] - y_min) / y_range * (n_bins - 1) as f64) as usize; - let xi = xi.min(n_bins - 1); - let yi = yi.min(n_bins - 1); - joint[xi][yi] += 1; - x_hist[xi] += 1; - y_hist[yi] += 1; - } - - let nf = n as f64; - let mut mi = 0.0f64; - for i in 0..n_bins { - for j in 0..n_bins { - if joint[i][j] > 0 && x_hist[i] > 0 && y_hist[j] > 0 { - let pxy = joint[i][j] as f64 / nf; - let px = x_hist[i] as f64 / nf; - let py = y_hist[j] as f64 / nf; - mi += pxy * (pxy / (px * py)).ln(); - } - } - } - - mi.max(0.0) - } - - /// Bootstrap confidence intervals for Pearson correlation. - fn bootstrap_correlation_ci( - &self, - names: &[String], - matrix: &Array2, - ) -> Vec { - let p = matrix.ncols(); - let n = matrix.nrows(); - if p < 2 || n < 20 { - return vec![]; - } - - let n_iter = self.bootstrap_iterations.min(500); - let mut results = Vec::new(); - - for i in 0..p { - for j in (i + 1)..p { - let x = matrix.column(i); - let y = matrix.column(j); - let r = Self::pearson_vec(&x.to_vec(), &y.to_vec()); - - // Bootstrap resampling - let mut boot_rs: Vec = Vec::with_capacity(n_iter); - for b in 0..n_iter { - // Deterministic pseudo-random indices - let boot_x: Vec = (0..n) - .map(|k| { - let idx = (k * 7 + b * 13 + 37) % n; - x[idx] - }) - .collect(); - let boot_y: Vec = (0..n) - .map(|k| { - let idx = (k * 7 + b * 13 + 37) % n; - y[idx] - }) - .collect(); - boot_rs.push(Self::pearson_vec(&boot_x, &boot_y)); - } - - boot_rs.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal)); - let ci_lower = boot_rs[(n_iter as f64 * 0.025) as usize]; - let ci_upper = boot_rs[(n_iter as f64 * 0.975) as usize]; - - results.push(BootstrapCIEntry { - col_a: names[i].clone(), - col_b: names[j].clone(), - r, - ci_lower, - ci_upper, - }); - } - } - results - } - - fn build_correlation_network( - &self, - names: &[String], - matrix: &Array2, - threshold: f64, - ) -> Vec { - let p = matrix.ncols(); - let mut edges = Vec::new(); - - for i in 0..p { - for j in (i + 1)..p { - let r = Self::pearson_vec(&matrix.column(i).to_vec(), &matrix.column(j).to_vec()); - if r.abs() >= threshold { - edges.push(CorrelationEdge { - source: names[i].clone(), - target: names[j].clone(), - weight: r, - }); - } - } - } - edges - } - - /// Distance correlation (Székely). - fn compute_distance_correlations( - &self, - names: &[String], - matrix: &Array2, - ) -> Vec<(String, String, f64)> { - let p = matrix.ncols(); - let n = matrix.nrows(); - if p < 2 || n < 10 { - return vec![]; - } - - // Limit for computational cost - let max_pairs = 50; - let mut results = Vec::new(); - let mut count = 0; - - for i in 0..p { - for j in (i + 1)..p { - if count >= max_pairs { - break; - } - let dcor = Self::distance_correlation( - &matrix.column(i).to_vec(), - &matrix.column(j).to_vec(), - ); - results.push((names[i].clone(), names[j].clone(), dcor)); - count += 1; - } - } - results - } - - fn distance_correlation(x: &[f64], y: &[f64]) -> f64 { - let n = x.len().min(y.len()); - if n < 4 { - return 0.0; - } - - let a = Self::double_centered_distances(x); - let b = Self::double_centered_distances(y); - - let dcov_xy: f64 = { - let a = &a; - let b = &b; - (0..n) - .flat_map(|i| (0..n).map(move |j| (i, j))) - .map(|(i, j)| a[i * n + j] * b[i * n + j]) - .sum::() - / (n * n) as f64 - }; - - let dcov_xx: f64 = { - let a = &a; - (0..n) - .flat_map(|i| (0..n).map(move |j| (i, j))) - .map(|(i, j)| a[i * n + j] * a[i * n + j]) - .sum::() - / (n * n) as f64 - }; - - let dcov_yy: f64 = { - let b = &b; - (0..n) - .flat_map(|i| (0..n).map(move |j| (i, j))) - .map(|(i, j)| b[i * n + j] * b[i * n + j]) - .sum::() - / (n * n) as f64 - }; - - let denom = (dcov_xx * dcov_yy).sqrt(); - if denom > f64::EPSILON { - (dcov_xy / denom).sqrt().clamp(0.0, 1.0) - } else { - 0.0 - } - } - - fn double_centered_distances(x: &[f64]) -> Vec { - let n = x.len(); - let mut d = vec![0.0f64; n * n]; - - // Distance matrix - for i in 0..n { - for j in 0..n { - d[i * n + j] = (x[i] - x[j]).abs(); - } - } - - // Row means, column means, grand mean - let row_means: Vec = (0..n) - .map(|i| (0..n).map(|j| d[i * n + j]).sum::() / n as f64) - .collect(); - let col_means: Vec = (0..n) - .map(|j| (0..n).map(|i| d[i * n + j]).sum::() / n as f64) - .collect(); - let grand_mean: f64 = row_means.iter().sum::() / n as f64; - - // Double centering - for i in 0..n { - for j in 0..n { - d[i * n + j] = d[i * n + j] - row_means[i] - col_means[j] + grand_mean; - } - } - - d - } - - // ── Helpers ───────────────────────────────────────────────── - - fn pearson_matrix(&self, matrix: &Array2) -> Array2 { - let n = matrix.nrows(); - let p = matrix.ncols(); - let mut corr = Array2::::eye(p); - - let means: Vec = (0..p) - .map(|j| matrix.column(j).mean().unwrap_or(0.0)) - .collect(); - let stds: Vec = (0..p) - .map(|j| { - let m = means[j]; - let v: f64 = matrix - .column(j) - .iter() - .map(|x| (x - m).powi(2)) - .sum::() - / (n as f64 - 1.0).max(1.0); - v.sqrt() - }) - .collect(); - - for i in 0..p { - for j in (i + 1)..p { - if stds[i] < f64::EPSILON || stds[j] < f64::EPSILON { - continue; - } - let cov: f64 = (0..n) - .map(|k| (matrix[[k, i]] - means[i]) * (matrix[[k, j]] - means[j])) - .sum::() - / (n as f64 - 1.0).max(1.0); - let r = (cov / (stds[i] * stds[j])).clamp(-1.0, 1.0); - corr[[i, j]] = r; - corr[[j, i]] = r; - } - } - corr - } - - fn pearson_vec(x: &[f64], y: &[f64]) -> f64 { - let n = x.len().min(y.len()) as f64; - if n < 2.0 { - return f64::NAN; - } - let mx = x.iter().sum::() / n; - let my = y.iter().sum::() / n; - let mut cov = 0.0; - let mut sx = 0.0; - let mut sy = 0.0; - for i in 0..x.len().min(y.len()) { - let dx = x[i] - mx; - let dy = y[i] - my; - cov += dx * dy; - sx += dx * dx; - sy += dy * dy; - } - if sx < f64::EPSILON || sy < f64::EPSILON { - return 0.0; - } - (cov / (sx * sy).sqrt()).clamp(-1.0, 1.0) - } - - fn extract_matrix(&self, num_cols: &[&str]) -> (Vec, Array2) { - let mut names = Vec::new(); - let mut col_data = Vec::new(); - - for &col_name in num_cols { - if let Some(vals) = self - .df - .column(col_name) - .ok() - .and_then(|c| DescriptiveStats::column_to_f64_vec(c)) - { - names.push(col_name.to_string()); - col_data.push(vals); - } - } - - if names.is_empty() { - return (vec![], Array2::zeros((0, 0))); - } - - let min_len = col_data.iter().map(|v| v.len()).min().unwrap_or(0); - let sample_len = min_len.min(self.max_sample); - let step = (min_len / sample_len).max(1); - let n_cols = names.len(); - - let mut matrix = Array2::::zeros((sample_len, n_cols)); - for (j, data) in col_data.iter().enumerate() { - for (i_out, i_in) in (0..min_len).step_by(step).take(sample_len).enumerate() { - matrix[[i_out, j]] = data[i_in]; - } - } - - (names, matrix) - } -} diff --git a/src/stats/advanced_dimreduction.rs b/src/stats/advanced_dimreduction.rs deleted file mode 100644 index 66357fe..0000000 --- a/src/stats/advanced_dimreduction.rs +++ /dev/null @@ -1,403 +0,0 @@ -use ndarray::Array2; -use polars::prelude::*; -use serde::{Deserialize, Serialize}; - -use crate::core::schema::DataSchema; -use crate::stats::descriptive::DescriptiveStats; - -// ─── Result types ─────────────────────────────────────────────────── - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct TsneResult { - pub embedding: Vec<[f64; 2]>, - pub kl_divergence: f64, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct FactorAnalysisResult { - pub n_factors: usize, - pub loadings: Vec>, // features × factors - pub variance_explained: Vec, - pub feature_names: Vec, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct FeatureContribution { - pub column: String, - pub contribution: f64, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct AdvancedDimReductionResult { - pub tsne: Option, - pub factor_analysis: Option, - pub feature_contributions: Vec, -} - -// ─── Implementation ───────────────────────────────────────────────── - -pub struct AdvancedDimReductionStats<'a> { - df: &'a DataFrame, - schema: &'a DataSchema, - tsne_perplexity: f64, - max_sample: usize, -} - -impl<'a> AdvancedDimReductionStats<'a> { - pub fn new( - df: &'a DataFrame, - schema: &'a DataSchema, - tsne_perplexity: f64, - max_sample: usize, - ) -> Self { - Self { - df, - schema, - tsne_perplexity, - max_sample, - } - } - - pub fn compute(&self) -> AdvancedDimReductionResult { - let (names, matrix) = self.prepare_data(); - - let tsne = if matrix.nrows() >= 10 && matrix.ncols() >= 2 { - self.tsne_2d(&matrix) - } else { - None - }; - - let factor_analysis = if matrix.nrows() >= 20 && matrix.ncols() >= 3 { - self.factor_analysis(&names, &matrix) - } else { - None - }; - - let feature_contributions = self.compute_contributions(&names, &matrix); - - AdvancedDimReductionResult { - tsne, - factor_analysis, - feature_contributions, - } - } - - /// Simplified t-SNE implementation (Barnes-Hut style). - fn tsne_2d(&self, data: &Array2) -> Option { - let n = data.nrows(); - let d = data.ncols(); - if n < 4 { - return None; - } - - let perplexity = self.tsne_perplexity.min((n as f64 - 1.0) / 3.0); - - // Compute pairwise distances - let mut dist_sq = Array2::::zeros((n, n)); - for i in 0..n { - for j in (i + 1)..n { - let d2: f64 = (0..d).map(|k| (data[[i, k]] - data[[j, k]]).powi(2)).sum(); - dist_sq[[i, j]] = d2; - dist_sq[[j, i]] = d2; - } - } - - // Compute joint probabilities P - let mut p_matrix = vec![vec![0.0f64; n]; n]; - for i in 0..n { - let sigma = Self::binary_search_sigma(&dist_sq, i, perplexity); - let sigma2 = 2.0 * sigma * sigma; - let mut sum = 0.0f64; - for j in 0..n { - if i != j { - let pij = (-dist_sq[[i, j]] / sigma2).exp(); - p_matrix[i][j] = pij; - sum += pij; - } - } - if sum > f64::EPSILON { - for j in 0..n { - p_matrix[i][j] /= sum; - } - } - } - - // Symmetrize - for i in 0..n { - for j in (i + 1)..n { - let pij = (p_matrix[i][j] + p_matrix[j][i]) / (2.0 * n as f64); - let pij = pij.max(1e-12); - p_matrix[i][j] = pij; - p_matrix[j][i] = pij; - } - } - - // Initialize embedding - let mut y = Array2::::zeros((n, 2)); - for i in 0..n { - y[[i, 0]] = (i as f64 * 0.1).sin() * 0.01; - y[[i, 1]] = (i as f64 * 0.1).cos() * 0.01; - } - - let learning_rate = 200.0; - let momentum = 0.8; - let mut gains = Array2::::ones((n, 2)); - let mut update = Array2::::zeros((n, 2)); - - let n_iter = 300; - - for _iter in 0..n_iter { - // Compute Q (Student-t kernel) - let mut q_matrix = vec![vec![0.0f64; n]; n]; - let mut q_sum = 0.0f64; - for i in 0..n { - for j in (i + 1)..n { - let d2: f64 = (0..2).map(|k| (y[[i, k]] - y[[j, k]]).powi(2)).sum(); - let qij = 1.0 / (1.0 + d2); - q_matrix[i][j] = qij; - q_matrix[j][i] = qij; - q_sum += 2.0 * qij; - } - } - - if q_sum > f64::EPSILON { - for i in 0..n { - for j in 0..n { - q_matrix[i][j] /= q_sum; - q_matrix[i][j] = q_matrix[i][j].max(1e-12); - } - } - } - - // Gradients - let mut grad = Array2::::zeros((n, 2)); - for i in 0..n { - for j in 0..n { - if i == j { - continue; - } - let d2: f64 = (0..2).map(|k| (y[[i, k]] - y[[j, k]]).powi(2)).sum(); - let mult = 4.0 * (p_matrix[i][j] - q_matrix[i][j]) / (1.0 + d2); - for k in 0..2 { - grad[[i, k]] += mult * (y[[i, k]] - y[[j, k]]); - } - } - } - - // Update - for i in 0..n { - for k in 0..2 { - let sign_match = (grad[[i, k]] > 0.0) == (update[[i, k]] > 0.0); - gains[[i, k]] = if sign_match { - (gains[[i, k]] * 0.8).max(0.01) - } else { - gains[[i, k]] + 0.2 - }; - update[[i, k]] = - momentum * update[[i, k]] - learning_rate * gains[[i, k]] * grad[[i, k]]; - y[[i, k]] += update[[i, k]]; - } - } - - // Center - let mean0 = y.column(0).mean().unwrap_or(0.0); - let mean1 = y.column(1).mean().unwrap_or(0.0); - for i in 0..n { - y[[i, 0]] -= mean0; - y[[i, 1]] -= mean1; - } - } - - // KL divergence - let mut kl = 0.0f64; - for i in 0..n { - for j in 0..n { - if i != j && p_matrix[i][j] > 1e-12 { - let d2: f64 = (0..2).map(|k| (y[[i, k]] - y[[j, k]]).powi(2)).sum(); - let qij = (1.0 / (1.0 + d2)).max(1e-12); - kl += p_matrix[i][j] * (p_matrix[i][j] / qij).ln(); - } - } - } - - let embedding: Vec<[f64; 2]> = (0..n).map(|i| [y[[i, 0]], y[[i, 1]]]).collect(); - - Some(TsneResult { - embedding, - kl_divergence: kl, - }) - } - - fn binary_search_sigma(dist_sq: &Array2, i: usize, target_perp: f64) -> f64 { - let n = dist_sq.nrows(); - let log_perp = target_perp.ln(); - let mut lo = 1e-10f64; - let mut hi = 1e4f64; - let mut sigma = 1.0; - - for _ in 0..50 { - sigma = (lo + hi) / 2.0; - let sigma2 = 2.0 * sigma * sigma; - let mut sum = 0.0f64; - let mut h = 0.0f64; - for j in 0..n { - if j != i { - let pij = (-dist_sq[[i, j]] / sigma2).exp(); - sum += pij; - h -= pij * (-dist_sq[[i, j]] / sigma2); - } - } - if sum > f64::EPSILON { - h = h / sum + sum.ln(); - } - - if (h - log_perp).abs() < 1e-5 { - break; - } - if h > log_perp { - hi = sigma; - } else { - lo = sigma; - } - } - sigma - } - - /// Factor Analysis (simplified: PCA loadings rotated). - fn factor_analysis( - &self, - names: &[String], - matrix: &Array2, - ) -> Option { - let p = matrix.ncols(); - if p < 3 { - return None; - } - - // Use PCA as initial factor extraction - let standardized = Self::standardize(matrix); - let cov = Self::covariance_matrix(&standardized); - - // Eigendecomposition - let n_factors = ((p as f64).sqrt().ceil() as usize).min(p / 2).max(1); - let (eigenvalues, eigenvectors) = - crate::stats::pca::PcaStats::eigen_decomposition(&cov, n_factors); - - let total: f64 = eigenvalues.iter().sum(); - let variance_explained: Vec = eigenvalues - .iter() - .map(|&ev| { - if total > f64::EPSILON { - ev / total - } else { - 0.0 - } - }) - .collect(); - - // Loadings: eigenvectors * sqrt(eigenvalues) - let loadings: Vec> = (0..p) - .map(|feat| { - (0..n_factors) - .map(|f| eigenvectors[[feat, f]] * eigenvalues.get(f).unwrap_or(&0.0).sqrt()) - .collect() - }) - .collect(); - - Some(FactorAnalysisResult { - n_factors, - loadings, - variance_explained, - feature_names: names.to_vec(), - }) - } - - fn compute_contributions( - &self, - names: &[String], - matrix: &Array2, - ) -> Vec { - if matrix.ncols() < 2 || matrix.nrows() < 3 { - return vec![]; - } - - let standardized = Self::standardize(matrix); - let cov = Self::covariance_matrix(&standardized); - let (eigenvalues, eigenvectors) = - crate::stats::pca::PcaStats::eigen_decomposition(&cov, 2.min(matrix.ncols())); - - let total_var: f64 = eigenvalues.iter().sum::().max(f64::EPSILON); - - let mut contributions: Vec = names - .iter() - .enumerate() - .map(|(i, name)| { - let contrib: f64 = eigenvalues - .iter() - .enumerate() - .map(|(k, &ev)| { - let weight = ev / total_var; - eigenvectors[[i, k]].powi(2) * weight - }) - .sum(); - FeatureContribution { - column: name.clone(), - contribution: contrib, - } - }) - .collect(); - - contributions.sort_by(|a, b| { - b.contribution - .partial_cmp(&a.contribution) - .unwrap_or(std::cmp::Ordering::Equal) - }); - contributions - } - - // ── Helpers ───────────────────────────────────────────────── - - fn prepare_data(&self) -> (Vec, Array2) { - let num_cols = self.schema.numeric_columns(); - let mut names = Vec::new(); - let mut col_data = Vec::new(); - - for &col_name in &num_cols { - if let Some(vals) = self - .df - .column(col_name) - .ok() - .and_then(|c| DescriptiveStats::column_to_f64_vec(c)) - { - names.push(col_name.to_string()); - col_data.push(vals); - } - } - - if names.is_empty() { - return (vec![], Array2::zeros((0, 0))); - } - - let min_len = col_data.iter().map(|v| v.len()).min().unwrap_or(0); - let sample_len = min_len.min(self.max_sample); - let step = (min_len / sample_len).max(1); - let n_cols = names.len(); - - let mut matrix = Array2::::zeros((sample_len, n_cols)); - for (j, data) in col_data.iter().enumerate() { - for (i_out, i_in) in (0..min_len).step_by(step).take(sample_len).enumerate() { - matrix[[i_out, j]] = data[i_in]; - } - } - - (names, matrix) - } - - fn standardize(matrix: &Array2) -> Array2 { - crate::stats::pca::PcaStats::standardize(matrix) - } - - fn covariance_matrix(matrix: &Array2) -> Array2 { - crate::stats::pca::PcaStats::covariance_matrix(matrix) - } -} diff --git a/src/stats/advanced_distribution.rs b/src/stats/advanced_distribution.rs deleted file mode 100644 index 6239613..0000000 --- a/src/stats/advanced_distribution.rs +++ /dev/null @@ -1,320 +0,0 @@ -use polars::prelude::*; -use serde::{Deserialize, Serialize}; -use statrs::distribution::ContinuousCDF; - -use crate::core::schema::DataSchema; -use crate::stats::descriptive::DescriptiveStats; - -// ─── Result types ─────────────────────────────────────────────────── - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct BestFitEntry { - pub column: String, - pub distribution: String, - pub aic: f64, - pub bic: f64, - pub ks_statistic: f64, - pub ks_p_value: f64, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct JarqueBeraEntry { - pub column: String, - pub statistic: f64, - pub p_value: f64, - pub is_normal: bool, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct PowerTransformEntry { - pub column: String, - pub method: String, - pub skewness_before: f64, - pub skewness_after: f64, - pub improvement: f64, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct KdeBandwidthEntry { - pub column: String, - pub silverman: f64, - pub scott: f64, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct AdvancedDistributionResult { - pub best_fits: Vec, - pub jarque_bera: Vec, - pub power_transforms: Vec, - pub kde_bandwidths: Vec, -} - -// ─── Implementation ───────────────────────────────────────────────── - -pub struct AdvancedDistributionStats<'a> { - df: &'a DataFrame, - schema: &'a DataSchema, - n_fits: usize, - max_sample: usize, -} - -impl<'a> AdvancedDistributionStats<'a> { - pub fn new( - df: &'a DataFrame, - schema: &'a DataSchema, - n_fits: usize, - max_sample: usize, - ) -> Self { - Self { - df, - schema, - n_fits, - max_sample, - } - } - - pub fn compute(&self) -> AdvancedDistributionResult { - let num_cols = self.schema.numeric_columns(); - - let mut best_fits = Vec::new(); - let mut jarque_bera = Vec::new(); - let mut power_transforms = Vec::new(); - let mut kde_bandwidths = Vec::new(); - - for &col_name in &num_cols { - let col = match self.df.column(col_name) { - Ok(c) => c, - Err(_) => continue, - }; - let values = match DescriptiveStats::column_to_f64_vec(col) { - Some(v) if v.len() >= 10 => v, - _ => continue, - }; - - // Sample if needed - let vals = if values.len() > self.max_sample { - let step = values.len() / self.max_sample; - values - .iter() - .step_by(step) - .copied() - .take(self.max_sample) - .collect::>() - } else { - values.clone() - }; - - // Best fit distribution - if let Some(bf) = self.fit_distributions(col_name, &vals) { - best_fits.push(bf); - } - - // Jarque-Bera test - jarque_bera.push(self.jarque_bera_test(col_name, &vals)); - - // Power transform recommendation - power_transforms.push(self.power_transform_rec(col_name, &vals)); - - // KDE bandwidths - kde_bandwidths.push(self.kde_bandwidth(col_name, &vals)); - } - - AdvancedDistributionResult { - best_fits, - jarque_bera, - power_transforms, - kde_bandwidths, - } - } - - /// Fit candidate distributions and rank by AIC. - fn fit_distributions(&self, name: &str, values: &[f64]) -> Option { - let n = values.len() as f64; - let mean = values.iter().sum::() / n; - let std = (values.iter().map(|x| (x - mean).powi(2)).sum::() / (n - 1.0)).sqrt(); - - if std < f64::EPSILON { - return None; - } - - // Test normal distribution fit - let normal = statrs::distribution::Normal::new(mean, std).ok()?; - - // Log-likelihood for normal - let ll_normal: f64 = values - .iter() - .map(|&x| { - let z = (x - mean) / std; - -0.5 * z * z - 0.5 * (2.0 * std::f64::consts::PI).ln() - std.ln() - }) - .sum(); - - let k_normal = 2.0; // Two parameters (mean, std) - let aic_normal = 2.0 * k_normal - 2.0 * ll_normal; - let bic_normal = k_normal * n.ln() - 2.0 * ll_normal; - - // KS test for normal - let mut sorted = values.to_vec(); - sorted.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal)); - - let ks_stat: f64 = sorted - .iter() - .enumerate() - .map(|(i, &x)| { - let ecdf = (i + 1) as f64 / n; - let cdf = normal.cdf(x); - (ecdf - cdf).abs().max((i as f64 / n - cdf).abs()) - }) - .fold(0.0f64, f64::max); - - let sqrt_n = n.sqrt(); - let lambda = (sqrt_n + 0.12 + 0.11 / sqrt_n) * ks_stat; - let ks_p = if lambda < 0.001 { - 1.0 - } else { - let mut p = 0.0; - for k in 1..=100 { - let sign = if k % 2 == 0 { -1.0 } else { 1.0 }; - p += sign * (-2.0 * (k as f64).powi(2) * lambda.powi(2)).exp(); - } - (2.0 * p).clamp(0.0, 1.0) - }; - - Some(BestFitEntry { - column: name.to_string(), - distribution: "normal".to_string(), - aic: aic_normal, - bic: bic_normal, - ks_statistic: ks_stat, - ks_p_value: ks_p, - }) - } - - /// Jarque-Bera test for normality. - fn jarque_bera_test(&self, name: &str, values: &[f64]) -> JarqueBeraEntry { - let n = values.len() as f64; - let mean = values.iter().sum::() / n; - let m2: f64 = values.iter().map(|x| (x - mean).powi(2)).sum::() / n; - let m3: f64 = values.iter().map(|x| (x - mean).powi(3)).sum::() / n; - let m4: f64 = values.iter().map(|x| (x - mean).powi(4)).sum::() / n; - - let skewness = if m2 > f64::EPSILON { - m3 / m2.powf(1.5) - } else { - 0.0 - }; - let kurtosis = if m2 > f64::EPSILON { - m4 / m2.powi(2) - 3.0 - } else { - 0.0 - }; - - let jb = n / 6.0 * (skewness.powi(2) + kurtosis.powi(2) / 4.0); - let p_value = (-jb / 2.0).exp(); // Chi2(df=2) approximation - - JarqueBeraEntry { - column: name.to_string(), - statistic: jb, - p_value, - is_normal: p_value > 0.05, - } - } - - /// Power transform recommendation (Yeo-Johnson). - fn power_transform_rec(&self, name: &str, values: &[f64]) -> PowerTransformEntry { - let n = values.len() as f64; - let mean = values.iter().sum::() / n; - let std = (values.iter().map(|x| (x - mean).powi(2)).sum::() / (n - 1.0)).sqrt(); - - let skewness_before = if std > f64::EPSILON { - let m3: f64 = values - .iter() - .map(|x| ((x - mean) / std).powi(3)) - .sum::() - / n; - m3 - } else { - 0.0 - }; - - // Try log(1+x) transform for positive-skewed data - let (method, skewness_after) = if skewness_before > 0.5 && values.iter().all(|&x| x > 0.0) { - let transformed: Vec = values.iter().map(|x| (1.0 + x).ln()).collect(); - let t_mean = transformed.iter().sum::() / n; - let t_std = (transformed - .iter() - .map(|x| (x - t_mean).powi(2)) - .sum::() - / (n - 1.0)) - .sqrt(); - let t_skew = if t_std > f64::EPSILON { - transformed - .iter() - .map(|x| ((x - t_mean) / t_std).powi(3)) - .sum::() - / n - } else { - 0.0 - }; - ("log1p".to_string(), t_skew) - } else if skewness_before < -0.5 { - // Try square transform for left-skewed - let transformed: Vec = values.iter().map(|x| x.powi(2)).collect(); - let t_mean = transformed.iter().sum::() / n; - let t_std = (transformed - .iter() - .map(|x| (x - t_mean).powi(2)) - .sum::() - / (n - 1.0)) - .sqrt(); - let t_skew = if t_std > f64::EPSILON { - transformed - .iter() - .map(|x| ((x - t_mean) / t_std).powi(3)) - .sum::() - / n - } else { - 0.0 - }; - ("square".to_string(), t_skew) - } else { - ("none".to_string(), skewness_before) - }; - - let improvement = (skewness_before.abs() - skewness_after.abs()).max(0.0); - - PowerTransformEntry { - column: name.to_string(), - method, - skewness_before, - skewness_after, - improvement, - } - } - - /// KDE bandwidth estimation (Silverman & Scott rules). - fn kde_bandwidth(&self, name: &str, values: &[f64]) -> KdeBandwidthEntry { - let n = values.len() as f64; - let mean = values.iter().sum::() / n; - let std = (values.iter().map(|x| (x - mean).powi(2)).sum::() / (n - 1.0)).sqrt(); - - // IQR - let mut sorted = values.to_vec(); - sorted.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal)); - let q1_idx = (n * 0.25) as usize; - let q3_idx = (n * 0.75) as usize; - let iqr = sorted.get(q3_idx).unwrap_or(&0.0) - sorted.get(q1_idx).unwrap_or(&0.0); - - // Silverman's rule - let silverman = 0.9 * std.min(iqr / 1.34) * n.powf(-1.0 / 5.0); - - // Scott's rule - let scott = 3.49 * std * n.powf(-1.0 / 3.0); - - KdeBandwidthEntry { - column: name.to_string(), - silverman: silverman.max(f64::EPSILON), - scott: scott.max(f64::EPSILON), - } - } -} diff --git a/src/stats/categorical.rs b/src/stats/categorical.rs deleted file mode 100644 index 59f4170..0000000 --- a/src/stats/categorical.rs +++ /dev/null @@ -1,253 +0,0 @@ -use indexmap::IndexMap; -use polars::prelude::*; -use serde::{Deserialize, Serialize}; - -use crate::core::schema::DataSchema; - -// ─── Result types ─────────────────────────────────────────────────── - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct CategoryFrequency { - pub value: String, - pub count: usize, - pub ratio: f64, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct CategoricalColumnResult { - pub column: String, - pub n_unique: usize, - pub entropy: f64, - pub normalized_entropy: f64, - pub frequencies: Vec, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct ChiSquareEntry { - pub col_a: String, - pub col_b: String, - pub chi2: f64, - pub p_value: f64, - pub cramers_v: f64, - pub dof: usize, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct CategoricalResult { - pub columns: Vec, - pub chi_square_tests: Vec, -} - -// ─── Implementation ───────────────────────────────────────────────── - -pub struct CategoricalStats<'a> { - df: &'a DataFrame, - schema: &'a DataSchema, - max_categories: usize, -} - -impl<'a> CategoricalStats<'a> { - pub fn new(df: &'a DataFrame, schema: &'a DataSchema, max_categories: usize) -> Self { - Self { - df, - schema, - max_categories, - } - } - - pub fn compute(&self) -> CategoricalResult { - let cat_cols = self.schema.categorical_columns(); - let n_rows = self.df.height(); - - // ── Per-column analysis ───────────────────────────────── - let columns: Vec = cat_cols - .iter() - .filter_map(|&col_name| { - let col = self.df.column(col_name).ok()?; - let str_col = col.cast(&DataType::String).ok()?; - let ca = str_col.str().ok()?; - Some(self.analyse_column(col_name, ca, n_rows)) - }) - .collect(); - - // ── Chi-square independence tests ─────────────────────── - let chi_square_tests = self.chi_square_independence(&cat_cols, n_rows); - - CategoricalResult { - columns, - chi_square_tests, - } - } - - fn analyse_column( - &self, - name: &str, - ca: &StringChunked, - n_rows: usize, - ) -> CategoricalColumnResult { - // Count frequencies - let mut freq_map: IndexMap = IndexMap::new(); - for opt_val in ca.into_iter() { - let key = opt_val.unwrap_or("(missing)").to_string(); - *freq_map.entry(key).or_insert(0) += 1; - } - - let n_unique = freq_map.len(); - - // Sort by frequency descending - let mut sorted: Vec<(String, usize)> = freq_map.into_iter().collect(); - sorted.sort_by(|a, b| b.1.cmp(&a.1)); - - // Group low-frequency categories into "Other" if exceeding max_categories - let frequencies = if sorted.len() > self.max_categories { - let mut result: Vec = sorted[..self.max_categories] - .iter() - .map(|(val, count)| CategoryFrequency { - value: val.clone(), - count: *count, - ratio: *count as f64 / n_rows as f64, - }) - .collect(); - - let other_count: usize = sorted[self.max_categories..].iter().map(|(_, c)| c).sum(); - result.push(CategoryFrequency { - value: "(Other)".to_string(), - count: other_count, - ratio: other_count as f64 / n_rows as f64, - }); - result - } else { - sorted - .iter() - .map(|(val, count)| CategoryFrequency { - value: val.clone(), - count: *count, - ratio: *count as f64 / n_rows as f64, - }) - .collect() - }; - - // Shannon entropy - let total = n_rows as f64; - let entropy: f64 = frequencies - .iter() - .filter(|f| f.count > 0) - .map(|f| { - let p = f.count as f64 / total; - if p > 0.0 { - -p * p.ln() - } else { - 0.0 - } - }) - .sum(); - - let max_entropy = if n_unique > 1 { - (n_unique as f64).ln() - } else { - 1.0 - }; - let normalized_entropy = if max_entropy > f64::EPSILON { - entropy / max_entropy - } else { - 0.0 - }; - - CategoricalColumnResult { - column: name.to_string(), - n_unique, - entropy, - normalized_entropy, - frequencies, - } - } - - /// Chi-square test of independence between all pairs of categorical columns. - fn chi_square_independence(&self, cat_cols: &[&str], n_rows: usize) -> Vec { - let n = n_rows as f64; - if n < 1.0 || cat_cols.len() < 2 { - return vec![]; - } - - let mut results = Vec::new(); - - for i in 0..cat_cols.len() { - for j in (i + 1)..cat_cols.len() { - if let Some(entry) = self.chi_square_pair(cat_cols[i], cat_cols[j], n) { - results.push(entry); - } - } - } - - results - } - - fn chi_square_pair(&self, col_a: &str, col_b: &str, n: f64) -> Option { - let a = self.df.column(col_a).ok()?.cast(&DataType::String).ok()?; - let b = self.df.column(col_b).ok()?.cast(&DataType::String).ok()?; - let a_str = a.str().ok()?; - let b_str = b.str().ok()?; - - let mut contingency: IndexMap<(String, String), usize> = IndexMap::new(); - let mut a_counts: IndexMap = IndexMap::new(); - let mut b_counts: IndexMap = IndexMap::new(); - - for (va, vb) in a_str.into_iter().zip(b_str.into_iter()) { - let ka = va.unwrap_or("(NA)").to_string(); - let kb = vb.unwrap_or("(NA)").to_string(); - *contingency.entry((ka.clone(), kb.clone())).or_insert(0) += 1; - *a_counts.entry(ka).or_insert(0) += 1; - *b_counts.entry(kb).or_insert(0) += 1; - } - - let r = a_counts.len(); - let k = b_counts.len(); - if r < 2 || k < 2 { - return None; - } - - let dof = (r - 1) * (k - 1); - - // Chi-square statistic - let mut chi2 = 0.0f64; - for ((ka, kb), &observed) in &contingency { - let ea = *a_counts.get(ka).unwrap_or(&0) as f64; - let eb = *b_counts.get(kb).unwrap_or(&0) as f64; - let expected = ea * eb / n; - if expected > 0.0 { - chi2 += (observed as f64 - expected).powi(2) / expected; - } - } - - // P-value approximation using chi-square survival function - let p_value = Self::chi_square_sf(chi2, dof); - - let min_dim = (r - 1).min(k - 1) as f64; - let cramers_v = if min_dim >= 1.0 && n > 0.0 { - (chi2 / (n * min_dim)).sqrt().clamp(0.0, 1.0) - } else { - 0.0 - }; - - Some(ChiSquareEntry { - col_a: col_a.to_string(), - col_b: col_b.to_string(), - chi2, - p_value, - cramers_v, - dof, - }) - } - - /// Chi-square survival function approximation using Wilson-Hilferty. - pub(crate) fn chi_square_sf(x: f64, dof: usize) -> f64 { - if dof == 0 { - return 1.0; - } - let k = dof as f64; - // Wilson-Hilferty approximation - let z = ((x / k).powf(1.0 / 3.0) - (1.0 - 2.0 / (9.0 * k))) / (2.0 / (9.0 * k)).sqrt(); - // Standard normal survival - 0.5 * statrs::function::erf::erfc(z / std::f64::consts::SQRT_2) - } -} diff --git a/src/stats/clustering.rs b/src/stats/clustering.rs deleted file mode 100644 index 0aa4711..0000000 --- a/src/stats/clustering.rs +++ /dev/null @@ -1,445 +0,0 @@ -use ndarray::Array2; -use polars::prelude::*; -use serde::{Deserialize, Serialize}; - -use crate::core::schema::DataSchema; -use crate::stats::descriptive::DescriptiveStats; - -// ─── Result types ─────────────────────────────────────────────────── - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct ElbowPoint { - pub k: usize, - pub inertia: f64, - pub silhouette: f64, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct KMeansResult { - pub elbow_data: Vec, - pub optimal_k: usize, - pub silhouette_score: f64, - pub cluster_sizes: Vec, - pub labels: Vec, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct DbscanResult { - pub eps: f64, - pub min_samples: usize, - pub n_clusters: usize, - pub n_noise: usize, - pub cluster_sizes: Vec, - pub labels: Vec, // -1 = noise -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct ClusteringResult { - pub kmeans: Option, - pub dbscan: Option, -} - -// ─── Implementation ───────────────────────────────────────────────── - -pub struct ClusteringStats<'a> { - df: &'a DataFrame, - schema: &'a DataSchema, - max_k: usize, - max_sample: usize, -} - -impl<'a> ClusteringStats<'a> { - pub fn new(df: &'a DataFrame, schema: &'a DataSchema, max_k: usize, max_sample: usize) -> Self { - Self { - df, - schema, - max_k, - max_sample, - } - } - - pub fn compute(&self) -> ClusteringResult { - let matrix = match self.prepare_data() { - Some(m) => m, - None => { - return ClusteringResult { - kmeans: None, - dbscan: None, - } - } - }; - - let kmeans = self.kmeans_analysis(&matrix); - let dbscan = self.dbscan_analysis(&matrix); - - ClusteringResult { kmeans, dbscan } - } - - // ── K-Means ───────────────────────────────────────────────── - - fn kmeans_analysis(&self, data: &Array2) -> Option { - let n = data.nrows(); - if n < 4 { - return None; - } - - let max_k = self.max_k.min(n / 2).max(2); - let mut elbow_data = Vec::new(); - let mut best_silhouette = -1.0f64; - let mut best_k = 2; - let mut best_labels = vec![0usize; n]; - - for k in 2..=max_k { - let (labels, inertia) = Self::kmeans_fit(data, k, 100); - let silhouette = Self::silhouette_score(data, &labels, k); - - elbow_data.push(ElbowPoint { - k, - inertia, - silhouette, - }); - - if silhouette > best_silhouette { - best_silhouette = silhouette; - best_k = k; - best_labels = labels; - } - } - - let mut cluster_sizes = vec![0usize; best_k]; - for &l in &best_labels { - if l < best_k { - cluster_sizes[l] += 1; - } - } - - Some(KMeansResult { - elbow_data, - optimal_k: best_k, - silhouette_score: best_silhouette, - cluster_sizes, - labels: best_labels, - }) - } - - /// Simple K-Means implementation (Lloyd's algorithm). - fn kmeans_fit(data: &Array2, k: usize, max_iter: usize) -> (Vec, f64) { - let n = data.nrows(); - let d = data.ncols(); - - // Initialize centroids using K-Means++ style - let mut centroids = Array2::::zeros((k, d)); - - // First centroid: random (just pick first row for determinism) - centroids.row_mut(0).assign(&data.row(0)); - - for c in 1..k { - // Pick the point furthest from existing centroids - let mut max_dist = 0.0f64; - let mut max_idx = 0; - for i in 0..n { - let min_dist: f64 = (0..c) - .map(|j| Self::euclidean_sq(&data.row(i), ¢roids.row(j))) - .fold(f64::INFINITY, f64::min); - if min_dist > max_dist { - max_dist = min_dist; - max_idx = i; - } - } - centroids.row_mut(c).assign(&data.row(max_idx)); - } - - let mut labels = vec![0usize; n]; - - for _iter in 0..max_iter { - // Assign step - let mut changed = false; - for i in 0..n { - let mut best_c = 0; - let mut best_d = f64::INFINITY; - for c in 0..k { - let dist = Self::euclidean_sq(&data.row(i), ¢roids.row(c)); - if dist < best_d { - best_d = dist; - best_c = c; - } - } - if labels[i] != best_c { - labels[i] = best_c; - changed = true; - } - } - - if !changed { - break; - } - - // Update step - let mut sums = Array2::::zeros((k, d)); - let mut counts = vec![0usize; k]; - for i in 0..n { - let c = labels[i]; - for j in 0..d { - sums[[c, j]] += data[[i, j]]; - } - counts[c] += 1; - } - for c in 0..k { - if counts[c] > 0 { - for j in 0..d { - centroids[[c, j]] = sums[[c, j]] / counts[c] as f64; - } - } - } - } - - // Inertia (within-cluster sum of squares) - let inertia: f64 = (0..n) - .map(|i| Self::euclidean_sq(&data.row(i), ¢roids.row(labels[i]))) - .sum(); - - (labels, inertia) - } - - /// Simplified silhouette score. - fn silhouette_score(data: &Array2, labels: &[usize], k: usize) -> f64 { - let n = data.nrows(); - if n < 2 || k < 2 { - return 0.0; - } - - // Sample for large datasets - let sample_size = n.min(1000); - let step = n / sample_size; - - let mut total = 0.0f64; - let mut count = 0; - - for i in (0..n).step_by(step.max(1)) { - let mut intra_sum = 0.0; - let mut intra_count = 0; - - // Mean distance to same-cluster points (a) - for j in 0..n { - if i != j && labels[i] == labels[j] { - intra_sum += Self::euclidean(&data.row(i), &data.row(j)); - intra_count += 1; - } - } - let a = if intra_count > 0 { - intra_sum / intra_count as f64 - } else { - 0.0 - }; - - // Mean distance to nearest other cluster (b) - let mut b = f64::INFINITY; - for c in 0..k { - if c == labels[i] { - continue; - } - let mut inter_sum = 0.0; - let mut inter_count = 0; - for j in 0..n { - if labels[j] == c { - inter_sum += Self::euclidean(&data.row(i), &data.row(j)); - inter_count += 1; - } - } - if inter_count > 0 { - b = b.min(inter_sum / inter_count as f64); - } - } - - let s = if a.max(b) > f64::EPSILON { - (b - a) / a.max(b) - } else { - 0.0 - }; - total += s; - count += 1; - } - - if count > 0 { - total / count as f64 - } else { - 0.0 - } - } - - // ── DBSCAN ────────────────────────────────────────────────── - - fn dbscan_analysis(&self, data: &Array2) -> Option { - let n = data.nrows(); - if n < 5 { - return None; - } - - let min_samples = (n as f64).ln().ceil() as usize; - let eps = self.estimate_eps(data, min_samples); - - // DBSCAN implementation - let mut labels = vec![-2i32; n]; // -2 = unvisited - let mut cluster_id = 0i32; - - for i in 0..n { - if labels[i] != -2 { - continue; - } - - let neighbors = self.range_query(data, i, eps); - if neighbors.len() < min_samples { - labels[i] = -1; // noise - continue; - } - - labels[i] = cluster_id; - let mut seed_set: Vec = neighbors; - let mut idx = 0; - - while idx < seed_set.len() { - let q = seed_set[idx]; - if labels[q] == -1 { - labels[q] = cluster_id; - } - if labels[q] != -2 { - idx += 1; - continue; - } - - labels[q] = cluster_id; - let q_neighbors = self.range_query(data, q, eps); - if q_neighbors.len() >= min_samples { - for nn in q_neighbors { - if !seed_set.contains(&nn) { - seed_set.push(nn); - } - } - } - idx += 1; - } - - cluster_id += 1; - } - - let n_clusters = cluster_id as usize; - let n_noise = labels.iter().filter(|&&l| l == -1).count(); - let mut cluster_sizes = vec![0usize; n_clusters]; - for &l in &labels { - if l >= 0 { - cluster_sizes[l as usize] += 1; - } - } - - Some(DbscanResult { - eps, - min_samples, - n_clusters, - n_noise, - cluster_sizes, - labels, - }) - } - - /// Estimate eps using k-nearest neighbor distance heuristic. - fn estimate_eps(&self, data: &Array2, k: usize) -> f64 { - let n = data.nrows(); - let sample_size = n.min(500); - let step = (n / sample_size).max(1); - - let mut k_distances: Vec = Vec::new(); - - for i in (0..n).step_by(step) { - let mut dists: Vec = (0..n) - .filter(|&j| j != i) - .map(|j| Self::euclidean(&data.row(i), &data.row(j))) - .collect(); - dists.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal)); - if dists.len() >= k { - k_distances.push(dists[k - 1]); - } - } - - k_distances.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal)); - - // Use the "knee" of the sorted k-distance plot - // Approximate: use the value at 90th percentile - let idx = (k_distances.len() as f64 * 0.9) as usize; - k_distances - .get(idx.min(k_distances.len().saturating_sub(1))) - .copied() - .unwrap_or(1.0) - } - - fn range_query(&self, data: &Array2, idx: usize, eps: f64) -> Vec { - let n = data.nrows(); - (0..n) - .filter(|&j| Self::euclidean(&data.row(idx), &data.row(j)) <= eps) - .collect() - } - - // ── Data preparation ──────────────────────────────────────── - - fn prepare_data(&self) -> Option> { - let num_cols = self.schema.numeric_columns(); - if num_cols.len() < 2 { - return None; - } - - let mut col_data: Vec> = Vec::new(); - for &col_name in &num_cols { - if let Some(vals) = self - .df - .column(col_name) - .ok() - .and_then(|c| DescriptiveStats::column_to_f64_vec(c)) - { - col_data.push(vals); - } - } - - if col_data.is_empty() { - return None; - } - - let min_len = col_data.iter().map(|v| v.len()).min().unwrap_or(0); - let sample_len = min_len.min(self.max_sample); - let step = (min_len / sample_len).max(1); - let n_cols = col_data.len(); - - let mut matrix = Array2::::zeros((sample_len, n_cols)); - for (j, data) in col_data.iter().enumerate() { - for (i_out, i_in) in (0..min_len).step_by(step).take(sample_len).enumerate() { - matrix[[i_out, j]] = data[i_in]; - } - } - - // Standardize - for j in 0..n_cols { - let col = matrix.column(j); - let mean = col.mean().unwrap_or(0.0); - let std = { - let var: f64 = col.iter().map(|x| (x - mean).powi(2)).sum::() - / (sample_len as f64 - 1.0).max(1.0); - var.sqrt() - }; - if std > f64::EPSILON { - for i in 0..sample_len { - matrix[[i, j]] = (matrix[[i, j]] - mean) / std; - } - } - } - - Some(matrix) - } - - // ── Distance helpers ──────────────────────────────────────── - - fn euclidean_sq(a: &ndarray::ArrayView1, b: &ndarray::ArrayView1) -> f64 { - a.iter().zip(b.iter()).map(|(x, y)| (x - y).powi(2)).sum() - } - - fn euclidean(a: &ndarray::ArrayView1, b: &ndarray::ArrayView1) -> f64 { - Self::euclidean_sq(a, b).sqrt() - } -} diff --git a/src/stats/column_role.rs b/src/stats/column_role.rs deleted file mode 100644 index a9d9aac..0000000 --- a/src/stats/column_role.rs +++ /dev/null @@ -1,203 +0,0 @@ -use polars::prelude::*; -use serde::{Deserialize, Serialize}; - -use crate::core::schema::DataSchema; -use crate::utils::types::ColumnType; - -// ─── Types ────────────────────────────────────────────────────────── - -#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)] -pub enum ColumnRole { - Target, - Feature, - Id, - Datetime, - Text, - Constant, - HighMissing, - Unknown, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct ColumnRoleEntry { - pub column: String, - pub role: ColumnRole, - pub confidence: f64, - pub reason: String, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct ColumnRoleResult { - pub roles: Vec, - pub target_candidates: Vec, - pub id_columns: Vec, - pub droppable_columns: Vec, -} - -// ─── Classifier ───────────────────────────────────────────────────── - -pub struct ColumnRoleClassifier<'a> { - df: &'a DataFrame, - schema: &'a DataSchema, - target_hint: Option, -} - -impl<'a> ColumnRoleClassifier<'a> { - pub fn new(df: &'a DataFrame, schema: &'a DataSchema, target_hint: Option) -> Self { - Self { - df, - schema, - target_hint, - } - } - - pub fn compute(&self) -> ColumnRoleResult { - let n = self.schema.n_rows.max(1); - let mut roles: Vec = Vec::new(); - - for col_info in &self.schema.columns { - let (role, confidence, reason) = self.classify_column(col_info, n); - roles.push(ColumnRoleEntry { - column: col_info.name.clone(), - role, - confidence, - reason, - }); - } - - let target_candidates: Vec = roles - .iter() - .filter(|r| r.role == ColumnRole::Target) - .map(|r| r.column.clone()) - .collect(); - - let id_columns: Vec = roles - .iter() - .filter(|r| r.role == ColumnRole::Id) - .map(|r| r.column.clone()) - .collect(); - - let droppable_columns: Vec = roles - .iter() - .filter(|r| { - matches!( - r.role, - ColumnRole::Id | ColumnRole::Constant | ColumnRole::HighMissing - ) - }) - .map(|r| r.column.clone()) - .collect(); - - ColumnRoleResult { - roles, - target_candidates, - id_columns, - droppable_columns, - } - } - - fn classify_column( - &self, - col_info: &crate::core::schema::ColumnInfo, - n_rows: usize, - ) -> (ColumnRole, f64, String) { - let name_lower = col_info.name.to_lowercase(); - - // ── Explicit target_hint ──────────────────────────── - if let Some(ref hint) = self.target_hint { - if col_info.name == *hint { - return ( - ColumnRole::Target, - 1.0, - "Explicitly specified as target".into(), - ); - } - } - - // ── High-missing column (>70%) ────────────────────── - if col_info.missing_ratio > 0.70 { - return ( - ColumnRole::HighMissing, - 0.95, - format!( - "{:.1}% missing — likely unusable", - col_info.missing_ratio * 100.0 - ), - ); - } - - // ── Constant column ───────────────────────────────── - if col_info.n_unique <= 1 { - return (ColumnRole::Constant, 1.0, "Only 1 unique value".into()); - } - - // ── Datetime column ───────────────────────────────── - if col_info.inferred_type == ColumnType::DateTime { - return (ColumnRole::Datetime, 0.95, "Datetime type detected".into()); - } - - // ── ID-like column heuristics ─────────────────────── - let uniqueness_ratio = col_info.n_unique as f64 / n_rows as f64; - let is_name_id = name_lower.contains("id") - || name_lower == "index" - || name_lower.ends_with("_key") - || name_lower.ends_with("_no") - || name_lower.ends_with("_num") - || name_lower == "pk" - || name_lower == "uid"; - - if is_name_id && uniqueness_ratio > 0.95 && col_info.n_missing == 0 { - return ( - ColumnRole::Id, - 0.90, - "Name pattern + high uniqueness".into(), - ); - } - if uniqueness_ratio > 0.999 && col_info.n_missing == 0 { - return ( - ColumnRole::Id, - 0.80, - "Near-unique without missing values".into(), - ); - } - - // ── Text column ───────────────────────────────────── - if col_info.inferred_type == ColumnType::Text { - return (ColumnRole::Text, 0.85, "Inferred as free text".into()); - } - - // ── Target candidate heuristics (common column names) ── - let target_names = [ - "target", "label", "class", "y", "output", "response", "is_fraud", "churn", "survived", - "income", "price", "sale", "revenue", "default", - ]; - for t in &target_names { - if name_lower == *t || name_lower.ends_with(&format!("_{}", t)) { - return ( - ColumnRole::Target, - 0.60, - format!("Column name matches common target pattern '{}'", t), - ); - } - } - - // ── Binary column with target-like name ───────────── - if col_info.n_unique == 2 { - // Binary columns are often targets in classification - if name_lower.starts_with("is_") || name_lower.starts_with("has_") { - return ( - ColumnRole::Target, - 0.45, - "Binary column with boolean-like name".into(), - ); - } - } - - // ── Default: Feature ──────────────────────────────── - ( - ColumnRole::Feature, - 0.50, - "Default classification as feature".into(), - ) - } -} diff --git a/src/stats/correlation.rs b/src/stats/correlation.rs deleted file mode 100644 index 49e5e1d..0000000 --- a/src/stats/correlation.rs +++ /dev/null @@ -1,456 +0,0 @@ -use indexmap::IndexMap; -use ndarray::{Array1, Array2}; -use polars::prelude::*; -use serde::{Deserialize, Serialize}; - -use crate::core::schema::DataSchema; -use crate::stats::descriptive::DescriptiveStats; - -// ─── Result types ─────────────────────────────────────────────────── - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct CorrelationResult { - /// Column names (axis labels). - pub columns: Vec, - /// Pearson correlation matrix (row-major). - pub pearson: Vec>, - /// Spearman correlation matrix (row-major). - pub spearman: Vec>, - /// Cramér's V matrix for categorical columns. - pub cramers_v: Option, - /// Variance Inflation Factors per numeric column. - pub vif: Vec, - /// Pairs with |r| ≥ threshold. - pub high_correlation_pairs: Vec, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct CramersVResult { - pub columns: Vec, - pub matrix: Vec>, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct VifEntry { - pub column: String, - pub vif: f64, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct HighCorrPair { - pub col_a: String, - pub col_b: String, - pub pearson: f64, - pub spearman: f64, -} - -// ─── Implementation ───────────────────────────────────────────────── - -pub struct CorrelationStats<'a> { - df: &'a DataFrame, - schema: &'a DataSchema, - threshold: f64, -} - -impl<'a> CorrelationStats<'a> { - pub fn new(df: &'a DataFrame, schema: &'a DataSchema, threshold: f64) -> Self { - Self { - df, - schema, - threshold, - } - } - - pub fn compute(&self) -> CorrelationResult { - let num_cols = self.schema.numeric_columns(); - let (columns, matrix) = self.extract_numeric_matrix(&num_cols); - - let pearson = if matrix.ncols() >= 2 { - Self::correlation_matrix(&matrix) - } else { - vec![vec![1.0]; columns.len().max(1)] - }; - - let spearman = if matrix.ncols() >= 2 { - let ranked = Self::rank_matrix(&matrix); - Self::correlation_matrix(&ranked) - } else { - vec![vec![1.0]; columns.len().max(1)] - }; - - let high_correlation_pairs = - Self::find_high_correlations(&columns, &pearson, &spearman, self.threshold); - - let vif = self.compute_vif(&columns, &matrix); - - let cramers_v = self.compute_cramers_v(); - - CorrelationResult { - columns, - pearson, - spearman, - cramers_v, - vif, - high_correlation_pairs, - } - } - - // ── Pearson correlation matrix ────────────────────────────── - - fn correlation_matrix(matrix: &Array2) -> Vec> { - let n_rows = matrix.nrows(); - let n_cols = matrix.ncols(); - if n_rows == 0 || n_cols == 0 { - return vec![]; - } - - // Column means - let means: Vec = (0..n_cols) - .map(|j| matrix.column(j).mean().unwrap_or(0.0)) - .collect(); - - // Column standard deviations - let stds: Vec = (0..n_cols) - .map(|j| { - let col = matrix.column(j); - let mean = means[j]; - let var = col.iter().map(|x| (x - mean).powi(2)).sum::() - / (n_rows as f64 - 1.0).max(1.0); - var.sqrt() - }) - .collect(); - - // Correlation - let mut result = vec![vec![0.0f64; n_cols]; n_cols]; - for i in 0..n_cols { - result[i][i] = 1.0; - for j in (i + 1)..n_cols { - if stds[i] < f64::EPSILON || stds[j] < f64::EPSILON { - result[i][j] = 0.0; - result[j][i] = 0.0; - continue; - } - let cov: f64 = (0..n_rows) - .map(|k| (matrix[[k, i]] - means[i]) * (matrix[[k, j]] - means[j])) - .sum::() - / (n_rows as f64 - 1.0).max(1.0); - let r = cov / (stds[i] * stds[j]); - let r = r.clamp(-1.0, 1.0); - result[i][j] = r; - result[j][i] = r; - } - } - result - } - - // ── Rank transform for Spearman ───────────────────────────── - - fn rank_matrix(matrix: &Array2) -> Array2 { - let n_rows = matrix.nrows(); - let n_cols = matrix.ncols(); - let mut ranked = Array2::::zeros((n_rows, n_cols)); - - for j in 0..n_cols { - let col = matrix.column(j); - let mut indexed: Vec<(usize, f64)> = col.iter().copied().enumerate().collect(); - indexed.sort_by(|a, b| a.1.partial_cmp(&b.1).unwrap_or(std::cmp::Ordering::Equal)); - - // Average ranks for ties - let mut i = 0; - while i < indexed.len() { - let mut end = i + 1; - while end < indexed.len() && (indexed[end].1 - indexed[i].1).abs() < f64::EPSILON { - end += 1; - } - let avg_rank = (i..end).map(|k| k + 1).sum::() as f64 / (end - i) as f64; - for k in i..end { - ranked[[indexed[k].0, j]] = avg_rank; - } - i = end; - } - } - ranked - } - - // ── VIF (Variance Inflation Factor) ───────────────────────── - - fn compute_vif(&self, columns: &[String], matrix: &Array2) -> Vec { - let n_cols = matrix.ncols(); - if n_cols < 2 { - return vec![]; - } - - columns - .iter() - .enumerate() - .map(|(target_idx, col_name)| { - // Regress column target_idx on all other columns - // VIF = 1 / (1 - R²) - let r_squared = Self::r_squared_from_others(matrix, target_idx); - let vif = if (1.0 - r_squared).abs() < f64::EPSILON { - f64::INFINITY - } else { - 1.0 / (1.0 - r_squared) - }; - VifEntry { - column: col_name.clone(), - vif, - } - }) - .collect() - } - - /// Compute R² of column `target` regressed on all other columns via OLS. - fn r_squared_from_others(matrix: &Array2, target: usize) -> f64 { - let n = matrix.nrows(); - let p = matrix.ncols(); - if p < 2 || n <= p { - return 0.0; - } - - let y = matrix.column(target).to_owned(); - let y_mean = y.mean().unwrap_or(0.0); - - // Build X matrix with intercept (all other columns + 1s column) - let other_cols: Vec = (0..p).filter(|&i| i != target).collect(); - let x_cols = other_cols.len() + 1; // +1 for intercept - - let mut x = Array2::::ones((n, x_cols)); - for (new_j, &old_j) in other_cols.iter().enumerate() { - for i in 0..n { - x[[i, new_j + 1]] = matrix[[i, old_j]]; - } - } - - // OLS: beta = (X'X)^(-1) X'y - let xt = x.t(); - let xtx = xt.dot(&x); - let xty = xt.dot(&y); - - // Simple matrix inversion for small matrices using Gauss-Jordan - let beta = match Self::solve_linear(&xtx, &xty) { - Some(b) => b, - None => return 0.0, - }; - - let y_hat = x.dot(&beta); - let ss_res: f64 = y - .iter() - .zip(y_hat.iter()) - .map(|(a, b)| (a - b).powi(2)) - .sum(); - let ss_tot: f64 = y.iter().map(|a| (a - y_mean).powi(2)).sum(); - - if ss_tot < f64::EPSILON { - 0.0 - } else { - 1.0 - ss_res / ss_tot - } - } - - /// Solve Ax = b via LU-style Gaussian elimination. - fn solve_linear(a: &Array2, b: &Array1) -> Option> { - let n = a.nrows(); - if n != a.ncols() || n != b.len() { - return None; - } - - // Augmented matrix [A|b] - let mut aug = Array2::::zeros((n, n + 1)); - for i in 0..n { - for j in 0..n { - aug[[i, j]] = a[[i, j]]; - } - aug[[i, n]] = b[i]; - } - - // Forward elimination with partial pivoting - for col in 0..n { - // Find pivot - let mut max_row = col; - let mut max_val = aug[[col, col]].abs(); - for row in (col + 1)..n { - if aug[[row, col]].abs() > max_val { - max_val = aug[[row, col]].abs(); - max_row = row; - } - } - if max_val < 1e-12 { - return None; // Singular - } - - // Swap rows - if max_row != col { - for j in 0..=n { - let tmp = aug[[col, j]]; - aug[[col, j]] = aug[[max_row, j]]; - aug[[max_row, j]] = tmp; - } - } - - // Eliminate below - let pivot = aug[[col, col]]; - for row in (col + 1)..n { - let factor = aug[[row, col]] / pivot; - for j in col..=n { - aug[[row, j]] -= factor * aug[[col, j]]; - } - } - } - - // Back substitution - let mut x = Array1::::zeros(n); - for i in (0..n).rev() { - let mut sum = aug[[i, n]]; - for j in (i + 1)..n { - sum -= aug[[i, j]] * x[j]; - } - x[i] = sum / aug[[i, i]]; - } - - Some(x) - } - - // ── Cramér's V ────────────────────────────────────────────── - - fn compute_cramers_v(&self) -> Option { - let cat_cols = self.schema.categorical_columns(); - if cat_cols.len() < 2 { - return None; - } - - let n = self.df.height() as f64; - let n_cats = cat_cols.len(); - let mut matrix = vec![vec![0.0f64; n_cats]; n_cats]; - let columns: Vec = cat_cols.iter().map(|s| s.to_string()).collect(); - - for i in 0..n_cats { - matrix[i][i] = 1.0; - for j in (i + 1)..n_cats { - let v = self.cramers_v_pair(cat_cols[i], cat_cols[j], n); - matrix[i][j] = v; - matrix[j][i] = v; - } - } - - Some(CramersVResult { columns, matrix }) - } - - /// Compute Cramér's V between two categorical columns. - fn cramers_v_pair(&self, col_a: &str, col_b: &str, n: f64) -> f64 { - let a = match self.df.column(col_a) { - Ok(c) => c.cast(&DataType::String).unwrap_or_default(), - Err(_) => return 0.0, - }; - let b = match self.df.column(col_b) { - Ok(c) => c.cast(&DataType::String).unwrap_or_default(), - Err(_) => return 0.0, - }; - - let a_str = a.str().unwrap(); - let b_str = b.str().unwrap(); - - // Build contingency table - let mut contingency: IndexMap<(String, String), usize> = IndexMap::new(); - let mut a_counts: IndexMap = IndexMap::new(); - let mut b_counts: IndexMap = IndexMap::new(); - - for (va, vb) in a_str.into_iter().zip(b_str.into_iter()) { - let ka = va.unwrap_or("(NA)").to_string(); - let kb = vb.unwrap_or("(NA)").to_string(); - *contingency.entry((ka.clone(), kb.clone())).or_insert(0) += 1; - *a_counts.entry(ka).or_insert(0) += 1; - *b_counts.entry(kb).or_insert(0) += 1; - } - - // Chi-square statistic - let mut chi2 = 0.0f64; - for ((ka, kb), &observed) in &contingency { - let ea = *a_counts.get(ka).unwrap_or(&0) as f64; - let eb = *b_counts.get(kb).unwrap_or(&0) as f64; - let expected = ea * eb / n; - if expected > 0.0 { - chi2 += (observed as f64 - expected).powi(2) / expected; - } - } - - let r = a_counts.len() as f64; - let k = b_counts.len() as f64; - let min_dim = (r - 1.0).min(k - 1.0); - - if min_dim < 1.0 || n < 1.0 { - return 0.0; - } - - (chi2 / (n * min_dim)).sqrt().clamp(0.0, 1.0) - } - - // ── High-correlation pair detection ───────────────────────── - - fn find_high_correlations( - columns: &[String], - pearson: &[Vec], - spearman: &[Vec], - threshold: f64, - ) -> Vec { - let mut pairs = Vec::new(); - let n = columns.len(); - for i in 0..n { - for j in (i + 1)..n { - let p = pearson[i][j]; - let s = spearman[i][j]; - if p.abs() >= threshold || s.abs() >= threshold { - pairs.push(HighCorrPair { - col_a: columns[i].clone(), - col_b: columns[j].clone(), - pearson: p, - spearman: s, - }); - } - } - } - pairs.sort_by(|a, b| { - b.pearson - .abs() - .partial_cmp(&a.pearson.abs()) - .unwrap_or(std::cmp::Ordering::Equal) - }); - pairs - } - - // ── Utility: extract numeric columns into ndarray matrix ──── - - fn extract_numeric_matrix(&self, num_cols: &[&str]) -> (Vec, Array2) { - let mut valid_cols: Vec = Vec::new(); - let mut col_data: Vec> = Vec::new(); - - for &col_name in num_cols { - if let Some(vals) = self - .df - .column(col_name) - .ok() - .and_then(|c| DescriptiveStats::column_to_f64_vec(c)) - { - valid_cols.push(col_name.to_string()); - col_data.push(vals); - } - } - - if valid_cols.is_empty() { - return (vec![], Array2::zeros((0, 0))); - } - - // Use the minimum row count across all columns (after dropping nulls) - let min_len = col_data.iter().map(|v| v.len()).min().unwrap_or(0); - let n_cols = valid_cols.len(); - - let mut matrix = Array2::::zeros((min_len, n_cols)); - for (j, data) in col_data.iter().enumerate() { - for i in 0..min_len { - matrix[[i, j]] = data[i]; - } - } - - (valid_cols, matrix) - } -} diff --git a/src/stats/cross_analysis.rs b/src/stats/cross_analysis.rs deleted file mode 100644 index 2556c2b..0000000 --- a/src/stats/cross_analysis.rs +++ /dev/null @@ -1,402 +0,0 @@ -use polars::prelude::*; -use serde::{Deserialize, Serialize}; - -use crate::core::schema::DataSchema; -use crate::stats::descriptive::DescriptiveStats; - -// ─── Result types ─────────────────────────────────────────────────── - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct OutlierClusterEntry { - pub column: String, - pub outlier_ratio_in_cluster: Vec, - pub cluster_with_most_outliers: usize, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct MissingCorrelationEntry { - pub col_a: String, - pub col_b: String, - pub jaccard_similarity: f64, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct SimpsonEntry { - pub feature: String, - pub group: String, - pub overall_direction: f64, - pub subgroup_directions: Vec, - pub is_paradox: bool, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct ImportanceMissingEntry { - pub column: String, - pub variance_rank: f64, - pub missing_ratio: f64, - pub risk: String, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct CrossAnalysisResult { - pub outlier_cluster: Vec, - pub missing_correlation: Vec, - pub simpson_candidates: Vec, - pub importance_vs_missing: Vec, -} - -// ─── Implementation ───────────────────────────────────────────────── - -pub struct CrossAnalysisStats<'a> { - df: &'a DataFrame, - schema: &'a DataSchema, - max_pairs: usize, -} - -impl<'a> CrossAnalysisStats<'a> { - pub fn new(df: &'a DataFrame, schema: &'a DataSchema, max_pairs: usize) -> Self { - Self { - df, - schema, - max_pairs, - } - } - - pub fn compute(&self) -> CrossAnalysisResult { - CrossAnalysisResult { - outlier_cluster: self.outlier_cluster_cross(), - missing_correlation: self.missing_correlation(), - simpson_candidates: self.simpson_paradox_scan(), - importance_vs_missing: self.importance_vs_missing(), - } - } - - // ── Outlier × Cluster interplay ───────────────────────────── - - fn outlier_cluster_cross(&self) -> Vec { - let num_cols = self.schema.numeric_columns(); - let n = self.df.height(); - if num_cols.len() < 2 || n < 20 { - return vec![]; - } - - // Simplified: split data into 3 equal bins per column and check outlier proportions - let mut results = Vec::new(); - for &col_name in &num_cols { - if let Some(vals) = self - .df - .column(col_name) - .ok() - .and_then(|c| DescriptiveStats::column_to_f64_vec(c)) - { - let mut sorted = vals.clone(); - sorted.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal)); - - let q1 = sorted[n / 4]; - let q3 = sorted[3 * n / 4]; - let iqr = q3 - q1; - let lower = q1 - 1.5 * iqr; - let upper = q3 + 1.5 * iqr; - - // Split into 3 clusters by tertiles - let t1 = sorted[n / 3]; - let t2 = sorted[2 * n / 3]; - - let mut cluster_outlier_counts = [0usize; 3]; - let mut cluster_sizes = [0usize; 3]; - - for &v in &vals { - let cluster = if v <= t1 { - 0 - } else if v <= t2 { - 1 - } else { - 2 - }; - cluster_sizes[cluster] += 1; - if v < lower || v > upper { - cluster_outlier_counts[cluster] += 1; - } - } - - let ratios: Vec = (0..3) - .map(|c| { - if cluster_sizes[c] > 0 { - cluster_outlier_counts[c] as f64 / cluster_sizes[c] as f64 - } else { - 0.0 - } - }) - .collect(); - - let max_idx = ratios - .iter() - .enumerate() - .max_by(|a, b| a.1.partial_cmp(b.1).unwrap_or(std::cmp::Ordering::Equal)) - .map(|(i, _)| i) - .unwrap_or(0); - - results.push(OutlierClusterEntry { - column: col_name.to_string(), - outlier_ratio_in_cluster: ratios, - cluster_with_most_outliers: max_idx, - }); - } - } - results.truncate(self.max_pairs); - results - } - - // ── Missing-value correlation (Jaccard similarity on NaN masks) ── - - fn missing_correlation(&self) -> Vec { - // Columns with at least some missing values - let cols_with_missing: Vec<(&str, Vec)> = self - .schema - .columns - .iter() - .filter(|c| c.n_missing > 0) - .filter_map(|c| { - let col = self.df.column(&c.name).ok()?; - let mask: Vec = (0..col.len()) - .map(|i| col.get(i).ok().map(|v| v == AnyValue::Null).unwrap_or(true)) - .collect(); - Some((c.name.as_str(), mask)) - }) - .collect(); - - let mut results = Vec::new(); - for i in 0..cols_with_missing.len() { - for j in (i + 1)..cols_with_missing.len() { - let (na, ma) = &cols_with_missing[i]; - let (nb, mb) = &cols_with_missing[j]; - - let jaccard = Self::jaccard_bool(ma, mb); - if jaccard > 0.1 { - results.push(MissingCorrelationEntry { - col_a: na.to_string(), - col_b: nb.to_string(), - jaccard_similarity: jaccard, - }); - } - } - } - - results.sort_by(|a, b| { - b.jaccard_similarity - .partial_cmp(&a.jaccard_similarity) - .unwrap_or(std::cmp::Ordering::Equal) - }); - results.truncate(self.max_pairs); - results - } - - // ── Simpson's paradox scan ────────────────────────────────── - - fn simpson_paradox_scan(&self) -> Vec { - let num_cols = self.schema.numeric_columns(); - let cat_cols = self.schema.categorical_columns(); - if num_cols.len() < 2 || cat_cols.is_empty() { - return vec![]; - } - - let mut results = Vec::new(); - - // For each pair (numeric_x, numeric_y), grouped by each categorical - for ci in 0..num_cols.len().min(5) { - for cj in (ci + 1)..num_cols.len().min(5) { - let x_name = num_cols[ci]; - let y_name = num_cols[cj]; - - let x_vals = match self - .df - .column(x_name) - .ok() - .and_then(|c| DescriptiveStats::column_to_f64_vec(c)) - { - Some(v) => v, - None => continue, - }; - let y_vals = match self - .df - .column(y_name) - .ok() - .and_then(|c| DescriptiveStats::column_to_f64_vec(c)) - { - Some(v) => v, - None => continue, - }; - - let min_len = x_vals.len().min(y_vals.len()); - let overall_corr = Self::pearson(&x_vals[..min_len], &y_vals[..min_len]); - - for &cat_name in &cat_cols { - if let Ok(cat_col) = self.df.column(cat_name) { - // Group by category - let groups = Self::group_indices(cat_col); - if groups.len() < 2 || groups.len() > 10 { - continue; - } - - let sub_corrs: Vec = groups - .iter() - .map(|(_, indices)| { - let sx: Vec = indices - .iter() - .filter(|&&i| i < min_len) - .map(|&i| x_vals[i]) - .collect(); - let sy: Vec = indices - .iter() - .filter(|&&i| i < min_len) - .map(|&i| y_vals[i]) - .collect(); - if sx.len() > 5 { - Self::pearson(&sx, &sy) - } else { - 0.0 - } - }) - .collect(); - - // Check for paradox: overall direction differs from subgroup directions - let paradox = sub_corrs.iter().all(|&r| r > 0.0 && overall_corr < -0.1) - || sub_corrs.iter().all(|&r| r < 0.0 && overall_corr > 0.1); - - if paradox { - results.push(SimpsonEntry { - feature: format!("{} vs {}", x_name, y_name), - group: cat_name.to_string(), - overall_direction: overall_corr, - subgroup_directions: sub_corrs, - is_paradox: true, - }); - } - } - } - } - } - - results - } - - // ── Feature importance vs missing ─────────────────────────── - - fn importance_vs_missing(&self) -> Vec { - let num_cols = self.schema.numeric_columns(); - if num_cols.is_empty() { - return vec![]; - } - - // Compute variance as a proxy for importance - let mut var_entries: Vec<(String, f64, f64)> = Vec::new(); - for &col_name in &num_cols { - if let Some(vals) = self - .df - .column(col_name) - .ok() - .and_then(|c| DescriptiveStats::column_to_f64_vec(c)) - { - if vals.is_empty() { - continue; - } - let mean = vals.iter().sum::() / vals.len() as f64; - let var = vals.iter().map(|v| (v - mean).powi(2)).sum::() / vals.len() as f64; - - let missing_ratio = self - .schema - .columns - .iter() - .find(|c| c.name == col_name) - .map(|c| c.missing_ratio) - .unwrap_or(0.0); - - var_entries.push((col_name.to_string(), var, missing_ratio)); - } - } - - if var_entries.is_empty() { - return vec![]; - } - - // Rank by variance (higher = more important) - var_entries.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap_or(std::cmp::Ordering::Equal)); - - var_entries - .iter() - .enumerate() - .map(|(rank, (name, _var, missing_ratio))| { - let variance_rank = (rank + 1) as f64 / var_entries.len() as f64; - let risk = if variance_rank < 0.3 && *missing_ratio > 0.1 { - "high" - } else if variance_rank < 0.5 && *missing_ratio > 0.05 { - "medium" - } else { - "low" - }; - ImportanceMissingEntry { - column: name.clone(), - variance_rank, - missing_ratio: *missing_ratio, - risk: risk.to_string(), - } - }) - .collect() - } - - // ── Helpers ───────────────────────────────────────────────── - - fn jaccard_bool(a: &[bool], b: &[bool]) -> f64 { - let min_len = a.len().min(b.len()); - let mut inter = 0usize; - let mut union = 0usize; - for i in 0..min_len { - if a[i] || b[i] { - union += 1; - } - if a[i] && b[i] { - inter += 1; - } - } - if union == 0 { - 0.0 - } else { - inter as f64 / union as f64 - } - } - - fn group_indices(col: &Column) -> Vec<(String, Vec)> { - let mut groups: std::collections::HashMap> = - std::collections::HashMap::new(); - for i in 0..col.len() { - if let Ok(val) = col.get(i) { - let key = format!("{}", val); - groups.entry(key).or_default().push(i); - } - } - groups.into_iter().collect() - } - - fn pearson(x: &[f64], y: &[f64]) -> f64 { - let n = x.len().min(y.len()) as f64; - if n < 2.0 { - return 0.0; - } - let mx = x.iter().sum::() / n; - let my = y.iter().sum::() / n; - let mut cov = 0.0; - let mut sx = 0.0; - let mut sy = 0.0; - for i in 0..x.len().min(y.len()) { - let dx = x[i] - mx; - let dy = y[i] - my; - cov += dx * dy; - sx += dx * dx; - sy += dy * dy; - } - if sx < f64::EPSILON || sy < f64::EPSILON { - return 0.0; - } - cov / (sx * sy).sqrt() - } -} diff --git a/src/stats/descriptive.rs b/src/stats/descriptive.rs deleted file mode 100644 index 5cdac03..0000000 --- a/src/stats/descriptive.rs +++ /dev/null @@ -1,278 +0,0 @@ -use indexmap::IndexMap; -use polars::prelude::*; -use serde::{Deserialize, Serialize}; -use statrs::statistics::{Data, Distribution, Max, Min, OrderStatistics}; - -use crate::core::schema::DataSchema; - -// ─── Result types ─────────────────────────────────────────────────── - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct NumericColumnStats { - pub column: String, - pub count: usize, - pub mean: f64, - pub std: f64, - pub se: f64, // standard error of the mean - pub cv: f64, // coefficient of variation - pub mad: f64, // median absolute deviation - pub min: f64, - pub p5: f64, - pub q1: f64, - pub median: f64, - pub q3: f64, - pub p95: f64, - pub max: f64, - pub range: f64, - pub iqr: f64, - pub skewness: f64, - pub kurtosis: f64, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct CategoricalColumnStats { - pub column: String, - pub count: usize, - pub unique: usize, - pub top: String, - pub freq: usize, - pub frequencies: Vec<(String, usize)>, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct DescriptiveResult { - pub numeric: Vec, - pub categorical: Vec, - pub summary: IndexMap, -} - -// ─── Implementation ───────────────────────────────────────────────── - -pub struct DescriptiveStats<'a> { - df: &'a DataFrame, - schema: &'a DataSchema, -} - -impl<'a> DescriptiveStats<'a> { - pub fn new(df: &'a DataFrame, schema: &'a DataSchema) -> Self { - Self { df, schema } - } - - /// Compute descriptive statistics for all columns. - pub fn compute(&self) -> DescriptiveResult { - let numeric = self.compute_numeric(); - let categorical = self.compute_categorical(); - let summary = self.build_summary(&numeric, &categorical); - - DescriptiveResult { - numeric, - categorical, - summary, - } - } - - /// Numeric column statistics. - fn compute_numeric(&self) -> Vec { - let num_cols = self.schema.numeric_columns(); - - num_cols - .iter() - .filter_map(|&col_name| { - let col = self.df.column(col_name).ok()?; - let values = Self::column_to_f64_vec(col)?; - - if values.is_empty() { - return None; - } - - Some(Self::compute_numeric_stats(col_name, &values)) - }) - .collect() - } - - /// Compute stats for a single numeric column. - fn compute_numeric_stats(name: &str, values: &[f64]) -> NumericColumnStats { - let n = values.len(); - let mut data = Data::new(values.to_vec()); - - let mean = data.mean().unwrap_or(f64::NAN); - let std = data.std_dev().unwrap_or(f64::NAN); - let variance = data.variance().unwrap_or(f64::NAN); - let median = data.median(); - let min = data.min(); - let max = data.max(); - - // Percentiles - let p5 = data.percentile(5); - let q1 = data.percentile(25); - let q3 = data.percentile(75); - let p95 = data.percentile(95); - let iqr = q3 - q1; - - // Standard error of the mean - let se = if n > 0 { - std / (n as f64).sqrt() - } else { - f64::NAN - }; - - // Coefficient of variation - let cv = if mean.abs() > f64::EPSILON { - std / mean.abs() - } else { - f64::NAN - }; - - // Median absolute deviation - let mad = { - let mut deviations: Vec = values.iter().map(|x| (x - median).abs()).collect(); - deviations.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal)); - if deviations.is_empty() { - f64::NAN - } else { - let mid = deviations.len() / 2; - if deviations.len() % 2 == 0 { - (deviations[mid - 1] + deviations[mid]) / 2.0 - } else { - deviations[mid] - } - } - }; - - // Skewness (Fisher's definition) - let skewness = if n >= 3 && variance > f64::EPSILON { - let m3: f64 = values - .iter() - .map(|x| ((x - mean) / std).powi(3)) - .sum::() - / n as f64; - let adjustment = ((n * (n - 1)) as f64).sqrt() / (n - 2) as f64; - m3 * adjustment - } else { - f64::NAN - }; - - // Excess kurtosis (Fisher's definition) - let kurtosis = if n >= 4 && variance > f64::EPSILON { - let m4: f64 = values - .iter() - .map(|x| ((x - mean) / std).powi(4)) - .sum::() - / n as f64; - // Adjusted Fisher kurtosis - let nf = n as f64; - let excess = - (nf - 1.0) / ((nf - 2.0) * (nf - 3.0)) * ((nf + 1.0) * m4 - 3.0 * (nf - 1.0)); - excess - } else { - f64::NAN - }; - - NumericColumnStats { - column: name.to_string(), - count: n, - mean, - std, - se, - cv, - mad, - min, - p5, - q1, - median, - q3, - p95, - max, - range: max - min, - iqr, - skewness, - kurtosis, - } - } - - /// Categorical column statistics. - fn compute_categorical(&self) -> Vec { - let cat_cols = self.schema.categorical_columns(); - - cat_cols - .iter() - .filter_map(|&col_name| { - let col = self.df.column(col_name).ok()?; - let str_col = col.cast(&DataType::String).ok()?; - let ca = str_col.str().ok()?; - - let mut freq_map: IndexMap = IndexMap::new(); - for opt_val in ca.into_iter() { - let key = opt_val.unwrap_or("(missing)").to_string(); - *freq_map.entry(key).or_insert(0) += 1; - } - - // Sort by frequency descending - let mut frequencies: Vec<(String, usize)> = freq_map.into_iter().collect(); - frequencies.sort_by(|a, b| b.1.cmp(&a.1)); - - let (top, freq) = frequencies.first().cloned().unwrap_or(("".to_string(), 0)); - - let count = ca.len(); - let unique = ca.n_unique().unwrap_or(0); - - Some(CategoricalColumnStats { - column: col_name.to_string(), - count, - unique, - top, - freq, - frequencies, - }) - }) - .collect() - } - - /// Build an overall summary. - fn build_summary( - &self, - numeric: &[NumericColumnStats], - categorical: &[CategoricalColumnStats], - ) -> IndexMap { - let mut summary = IndexMap::new(); - summary.insert( - "n_rows".into(), - serde_json::Value::Number(self.schema.n_rows.into()), - ); - summary.insert( - "n_cols".into(), - serde_json::Value::Number(self.schema.n_cols.into()), - ); - summary.insert( - "n_numeric".into(), - serde_json::Value::Number(numeric.len().into()), - ); - summary.insert( - "n_categorical".into(), - serde_json::Value::Number(categorical.len().into()), - ); - summary.insert( - "memory_mb".into(), - serde_json::json!(self.schema.memory_usage_bytes as f64 / 1_048_576.0), - ); - summary - } - - // ── Helpers ───────────────────────────────────────────────── - - /// Extract non-null f64 values from a column. - pub(crate) fn column_to_f64_vec(col: &Column) -> Option> { - let casted = col.cast(&DataType::Float64).ok()?; - let ca = casted.f64().ok()?; - let values: Vec = ca - .into_iter() - .filter_map(|v| v) - .filter(|v| v.is_finite()) - .collect(); - if values.is_empty() { - None - } else { - Some(values) - } - } -} diff --git a/src/stats/distribution.rs b/src/stats/distribution.rs deleted file mode 100644 index 5d80705..0000000 --- a/src/stats/distribution.rs +++ /dev/null @@ -1,294 +0,0 @@ -use polars::prelude::*; -use serde::{Deserialize, Serialize}; -use statrs::distribution::{ContinuousCDF, Normal}; -use statrs::statistics::{Data, Distribution}; - -use crate::core::schema::DataSchema; -use crate::stats::descriptive::DescriptiveStats; - -// ─── Result types ─────────────────────────────────────────────────── - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct DistributionColumnResult { - pub column: String, - pub skewness: f64, - pub kurtosis: f64, - pub skewness_interpretation: String, - pub kurtosis_interpretation: String, - /// Shapiro-Wilk test (only for n ≤ 5000) - pub shapiro_p: Option, - /// Kolmogorov-Smirnov test against normal - pub ks_statistic: f64, - pub ks_p_value: f64, - /// D'Agostino-Pearson omnibus test - pub dagostino_statistic: Option, - pub dagostino_p_value: Option, - /// Anderson-Darling statistic - pub anderson_statistic: f64, - /// Whether column appears normally distributed (all tests agree at α=0.05) - pub is_normal: bool, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct DistributionResult { - pub columns: Vec, -} - -// ─── Implementation ───────────────────────────────────────────────── - -pub struct DistributionStats<'a> { - df: &'a DataFrame, - schema: &'a DataSchema, -} - -impl<'a> DistributionStats<'a> { - pub fn new(df: &'a DataFrame, schema: &'a DataSchema) -> Self { - Self { df, schema } - } - - pub fn compute(&self) -> DistributionResult { - let num_cols = self.schema.numeric_columns(); - - let columns: Vec = num_cols - .iter() - .filter_map(|&col_name| { - let col = self.df.column(col_name).ok()?; - let values = DescriptiveStats::column_to_f64_vec(col)?; - if values.len() < 8 { - return None; - } - Some(self.analyse_column(col_name, &values)) - }) - .collect(); - - DistributionResult { columns } - } - - fn analyse_column(&self, name: &str, values: &[f64]) -> DistributionColumnResult { - let n = values.len(); - let data = Data::new(values.to_vec()); - let mean = data.mean().unwrap_or(0.0); - let std = data.std_dev().unwrap_or(1.0); - - // ── Skewness & kurtosis ───────────────────────────────── - let skewness = Self::skewness(values, mean, std); - let kurtosis = Self::excess_kurtosis(values, mean, std); - - let skewness_interpretation = match skewness.abs() { - s if s < 0.5 => "approximately symmetric".to_string(), - s if s < 1.0 => { - if skewness > 0.0 { - "moderately right-skewed".to_string() - } else { - "moderately left-skewed".to_string() - } - } - _ => { - if skewness > 0.0 { - "highly right-skewed".to_string() - } else { - "highly left-skewed".to_string() - } - } - }; - - let kurtosis_interpretation = match kurtosis { - k if k < -1.0 => "platykurtic (light-tailed)".to_string(), - k if k > 1.0 => "leptokurtic (heavy-tailed)".to_string(), - _ => "mesokurtic (normal-like tails)".to_string(), - }; - - // ── Kolmogorov-Smirnov test against Normal ────────────── - let (ks_statistic, ks_p_value) = Self::ks_test_normal(values, mean, std); - - // ── Anderson-Darling test ─────────────────────────────── - let anderson_statistic = Self::anderson_darling(values, mean, std); - - // ── D'Agostino-Pearson omnibus test ───────────────────── - let (dagostino_statistic, dagostino_p_value) = if n >= 20 { - let (stat, p) = Self::dagostino_pearson(values, mean, std); - (Some(stat), Some(p)) - } else { - (None, None) - }; - - // ── Normality consensus ───────────────────────────────── - let alpha = 0.05; - let mut normal_votes = 0u32; - let mut total_tests = 0u32; - - total_tests += 1; - if ks_p_value > alpha { - normal_votes += 1; - } - - // Anderson-Darling: compare with critical value for normal at 5% - // Approximate critical value ≈ 0.752 - total_tests += 1; - if anderson_statistic < 0.752 { - normal_votes += 1; - } - - if let Some(p) = dagostino_p_value { - total_tests += 1; - if p > alpha { - normal_votes += 1; - } - } - - let is_normal = normal_votes > total_tests / 2; - - DistributionColumnResult { - column: name.to_string(), - skewness, - kurtosis, - skewness_interpretation, - kurtosis_interpretation, - shapiro_p: None, // Shapiro-Wilk is complex; delegate to Python/scipy - ks_statistic, - ks_p_value, - dagostino_statistic, - dagostino_p_value, - anderson_statistic, - is_normal, - } - } - - // ── Statistical computation helpers ───────────────────────── - - fn skewness(values: &[f64], mean: f64, std: f64) -> f64 { - let n = values.len() as f64; - if n < 3.0 || std < f64::EPSILON { - return f64::NAN; - } - let m3: f64 = values - .iter() - .map(|x| ((x - mean) / std).powi(3)) - .sum::() - / n; - let adjustment = (n * (n - 1.0)).sqrt() / (n - 2.0); - m3 * adjustment - } - - fn excess_kurtosis(values: &[f64], mean: f64, std: f64) -> f64 { - let n = values.len() as f64; - if n < 4.0 || std < f64::EPSILON { - return f64::NAN; - } - let m4: f64 = values - .iter() - .map(|x| ((x - mean) / std).powi(4)) - .sum::() - / n; - let excess = (n - 1.0) / ((n - 2.0) * (n - 3.0)) * ((n + 1.0) * m4 - 3.0 * (n - 1.0)); - excess - } - - /// One-sample Kolmogorov-Smirnov test against Normal(mean, std). - fn ks_test_normal(values: &[f64], mean: f64, std: f64) -> (f64, f64) { - let n = values.len(); - if n == 0 || std < f64::EPSILON { - return (f64::NAN, f64::NAN); - } - - let normal = Normal::new(mean, std).unwrap_or(Normal::new(0.0, 1.0).unwrap()); - - let mut sorted = values.to_vec(); - sorted.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal)); - - let mut d_max = 0.0f64; - for (i, &x) in sorted.iter().enumerate() { - let ecdf = (i + 1) as f64 / n as f64; - let cdf = normal.cdf(x); - let ecdf_prev = i as f64 / n as f64; - d_max = d_max.max((ecdf - cdf).abs()).max((ecdf_prev - cdf).abs()); - } - - // P-value approximation (Kolmogorov distribution) - let sqrt_n = (n as f64).sqrt(); - let p_value = Self::kolmogorov_p_value(d_max, sqrt_n); - - (d_max, p_value) - } - - /// Kolmogorov distribution p-value approximation. - fn kolmogorov_p_value(d: f64, sqrt_n: f64) -> f64 { - let lambda = (sqrt_n + 0.12 + 0.11 / sqrt_n) * d; - if lambda < 0.001 { - return 1.0; - } - // Series approximation - let mut p = 0.0f64; - for k in 1..=100 { - let sign = if k % 2 == 0 { -1.0 } else { 1.0 }; - let term = sign * (-2.0 * (k as f64).powi(2) * lambda.powi(2)).exp(); - p += term; - } - let p = 2.0 * p; - p.clamp(0.0, 1.0) - } - - /// Anderson-Darling statistic against Normal(mean, std). - fn anderson_darling(values: &[f64], mean: f64, std: f64) -> f64 { - let n = values.len(); - if n == 0 || std < f64::EPSILON { - return f64::NAN; - } - - let normal = Normal::new(mean, std).unwrap_or(Normal::new(0.0, 1.0).unwrap()); - - let mut sorted = values.to_vec(); - sorted.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal)); - - let nf = n as f64; - let mut s = 0.0f64; - for (i, &x) in sorted.iter().enumerate() { - let f = normal.cdf(x).clamp(1e-10, 1.0 - 1e-10); - let f_rev = normal.cdf(sorted[n - 1 - i]).clamp(1e-10, 1.0 - 1e-10); - let w = (2.0 * (i as f64) + 1.0) / nf; - s += w * (f.ln() + (1.0 - f_rev).ln()); - } - - let a2 = -nf - s; - // Apply correction factor for estimated parameters - a2 * (1.0 + 0.75 / nf + 2.25 / (nf * nf)) - } - - /// D'Agostino-Pearson omnibus test for normality. - fn dagostino_pearson(values: &[f64], mean: f64, std: f64) -> (f64, f64) { - let n = values.len() as f64; - if n < 20.0 { - return (f64::NAN, f64::NAN); - } - - let skew = Self::skewness(values, mean, std); - let kurt = Self::excess_kurtosis(values, mean, std); - - // Z-score for skewness (D'Agostino 1970) - let y = skew * ((n + 1.0) * (n + 3.0) / (6.0 * (n - 2.0))).sqrt(); - let beta2 = 3.0 * (n * n + 27.0 * n - 70.0) * (n + 1.0) * (n + 3.0) - / ((n - 2.0) * (n + 5.0) * (n + 7.0) * (n + 9.0)); - let w2 = (2.0 * (beta2 - 1.0)).sqrt() - 1.0; - let delta = 1.0 / (0.5 * w2.ln()).sqrt(); - let alpha_s = (2.0 / (w2 - 1.0)).sqrt(); - let z_s = delta * (y / alpha_s + ((y / alpha_s).powi(2) + 1.0).sqrt()).ln(); - - // Z-score for kurtosis (Anscombe & Glynn 1983) - let e_kurt = 3.0 * (n - 1.0) / (n + 1.0) - 3.0; // Expected kurtosis - let var_kurt = - 24.0 * n * (n - 2.0) * (n - 3.0) / ((n + 1.0).powi(2) * (n + 3.0) * (n + 5.0)); - let z_k = if var_kurt > 0.0 { - (kurt - e_kurt) / var_kurt.sqrt() - } else { - 0.0 - }; - - // Omnibus K² = Z_s² + Z_k² - let k2 = z_s.powi(2) + z_k.powi(2); - - // P-value from chi-square distribution with df=2 - let p_value = (-k2 / 2.0).exp(); // Quick approximation for chi2(df=2) - - (k2, p_value) - } -} diff --git a/src/stats/duplicates.rs b/src/stats/duplicates.rs deleted file mode 100644 index 3cf1bb2..0000000 --- a/src/stats/duplicates.rs +++ /dev/null @@ -1,104 +0,0 @@ -use polars::prelude::*; -use serde::{Deserialize, Serialize}; - -use crate::core::schema::DataSchema; - -// ─── Result types ─────────────────────────────────────────────────── - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct DuplicateResult { - pub n_exact_duplicates: usize, - pub duplicate_ratio: f64, - /// Per-column uniqueness info. - pub column_uniqueness: Vec, - /// Columns that could serve as unique keys (100% unique, no nulls). - pub unique_key_candidates: Vec, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct ColumnUniqueness { - pub column: String, - pub n_unique: usize, - pub uniqueness_ratio: f64, - pub n_duplicated_values: usize, -} - -// ─── Implementation ───────────────────────────────────────────────── - -pub struct DuplicateStats<'a> { - df: &'a DataFrame, - #[allow(dead_code)] - schema: &'a DataSchema, -} - -impl<'a> DuplicateStats<'a> { - pub fn new(df: &'a DataFrame, schema: &'a DataSchema) -> Self { - Self { df, schema } - } - - pub fn compute(&self) -> DuplicateResult { - let n_rows = self.df.height(); - - // ── Exact row duplicates ──────────────────────────────── - let n_exact_duplicates = self - .df - .is_duplicated() - .map(|mask| mask.sum().unwrap_or(0) as usize) - .unwrap_or(0); - - let duplicate_ratio = if n_rows > 0 { - n_exact_duplicates as f64 / n_rows as f64 - } else { - 0.0 - }; - - // ── Per-column uniqueness ─────────────────────────────── - let column_uniqueness: Vec = self - .df - .get_columns() - .iter() - .map(|col| { - let n_unique = col.n_unique().unwrap_or(0); - let uniqueness_ratio = if n_rows > 0 { - n_unique as f64 / n_rows as f64 - } else { - 0.0 - }; - // Count of values that appear more than once - let n_duplicated_values = if n_rows > n_unique { - n_rows - n_unique - } else { - 0 - }; - - ColumnUniqueness { - column: col.name().to_string(), - n_unique, - uniqueness_ratio, - n_duplicated_values, - } - }) - .collect(); - - // ── Unique key candidates ─────────────────────────────── - let unique_key_candidates: Vec = column_uniqueness - .iter() - .filter(|cu| { - cu.uniqueness_ratio >= 1.0 - f64::EPSILON - && self - .df - .column(&cu.column) - .map(|c| c.null_count() == 0) - .unwrap_or(false) - }) - .map(|cu| cu.column.clone()) - .collect(); - - DuplicateResult { - n_exact_duplicates, - duplicate_ratio, - column_uniqueness, - unique_key_candidates, - } - } -} diff --git a/src/stats/feature_importance.rs b/src/stats/feature_importance.rs deleted file mode 100644 index 3621f6b..0000000 --- a/src/stats/feature_importance.rs +++ /dev/null @@ -1,170 +0,0 @@ -use polars::prelude::*; -use serde::{Deserialize, Serialize}; - -use crate::core::schema::DataSchema; -use crate::stats::descriptive::DescriptiveStats; - -// ─── Result types ─────────────────────────────────────────────────── - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct FeatureRanking { - pub column: String, - pub variance: f64, - pub std: f64, - pub cv: f64, // coefficient of variation - pub range: f64, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct MeanAbsCorrelation { - pub column: String, - pub mean_abs_corr: f64, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct FeatureImportanceResult { - pub variance_ranking: Vec, - pub mean_abs_correlations: Vec, -} - -// ─── Implementation ───────────────────────────────────────────────── - -pub struct FeatureImportanceStats<'a> { - df: &'a DataFrame, - schema: &'a DataSchema, -} - -impl<'a> FeatureImportanceStats<'a> { - pub fn new(df: &'a DataFrame, schema: &'a DataSchema) -> Self { - Self { df, schema } - } - - pub fn compute(&self) -> FeatureImportanceResult { - let variance_ranking = self.variance_ranking(); - let mean_abs_correlations = self.mean_abs_correlation(); - - FeatureImportanceResult { - variance_ranking, - mean_abs_correlations, - } - } - - fn variance_ranking(&self) -> Vec { - let num_cols = self.schema.numeric_columns(); - let mut rankings: Vec = num_cols - .iter() - .filter_map(|&col_name| { - let col = self.df.column(col_name).ok()?; - let values = DescriptiveStats::column_to_f64_vec(col)?; - let n = values.len() as f64; - if n < 2.0 { - return None; - } - - let mean = values.iter().sum::() / n; - let variance = values.iter().map(|x| (x - mean).powi(2)).sum::() / (n - 1.0); - let std = variance.sqrt(); - let cv = if mean.abs() > f64::EPSILON { - std / mean.abs() - } else { - f64::NAN - }; - let min = values.iter().cloned().fold(f64::INFINITY, f64::min); - let max = values.iter().cloned().fold(f64::NEG_INFINITY, f64::max); - - Some(FeatureRanking { - column: col_name.to_string(), - variance, - std, - cv, - range: max - min, - }) - }) - .collect(); - - rankings.sort_by(|a, b| { - b.variance - .partial_cmp(&a.variance) - .unwrap_or(std::cmp::Ordering::Equal) - }); - rankings - } - - fn mean_abs_correlation(&self) -> Vec { - let num_cols = self.schema.numeric_columns(); - if num_cols.len() < 2 { - return vec![]; - } - - // Compute pairwise Pearson correlations - let mut col_vals: Vec<(&str, Vec)> = Vec::new(); - for &col_name in &num_cols { - if let Some(vals) = self - .df - .column(col_name) - .ok() - .and_then(|c| DescriptiveStats::column_to_f64_vec(c)) - { - col_vals.push((col_name, vals)); - } - } - - let min_len = col_vals.iter().map(|(_, v)| v.len()).min().unwrap_or(0); - let p = col_vals.len(); - - let mut results: Vec = Vec::new(); - - for i in 0..p { - let mut sum_abs_r = 0.0; - let mut count = 0; - for j in 0..p { - if i == j { - continue; - } - let r = Self::pearson(&col_vals[i].1[..min_len], &col_vals[j].1[..min_len]); - if r.is_finite() { - sum_abs_r += r.abs(); - count += 1; - } - } - results.push(MeanAbsCorrelation { - column: col_vals[i].0.to_string(), - mean_abs_corr: if count > 0 { - sum_abs_r / count as f64 - } else { - 0.0 - }, - }); - } - - results.sort_by(|a, b| { - b.mean_abs_corr - .partial_cmp(&a.mean_abs_corr) - .unwrap_or(std::cmp::Ordering::Equal) - }); - results - } - - fn pearson(x: &[f64], y: &[f64]) -> f64 { - let n = x.len().min(y.len()) as f64; - if n < 2.0 { - return f64::NAN; - } - let mx = x.iter().sum::() / n; - let my = y.iter().sum::() / n; - let mut cov = 0.0; - let mut sx = 0.0; - let mut sy = 0.0; - for i in 0..x.len().min(y.len()) { - let dx = x[i] - mx; - let dy = y[i] - my; - cov += dx * dy; - sx += dx * dx; - sy += dy * dy; - } - if sx < f64::EPSILON || sy < f64::EPSILON { - return 0.0; - } - cov / (sx * sy).sqrt() - } -} diff --git a/src/stats/feature_insights.rs b/src/stats/feature_insights.rs deleted file mode 100644 index 7ec2025..0000000 --- a/src/stats/feature_insights.rs +++ /dev/null @@ -1,378 +0,0 @@ -use polars::prelude::*; -use serde::{Deserialize, Serialize}; - -use crate::core::schema::DataSchema; -use crate::stats::descriptive::DescriptiveStats; - -// ─── Result types ─────────────────────────────────────────────────── - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct InteractionEntry { - pub col_a: String, - pub col_b: String, - pub interaction_strength: f64, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct MonotonicEntry { - pub col_a: String, - pub col_b: String, - pub pearson: f64, - pub spearman: f64, - pub gap: f64, - pub is_nonlinear_monotonic: bool, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct BinningEntry { - pub column: String, - pub equal_width_entropy: f64, - pub equal_freq_entropy: f64, - pub recommended: String, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct CardinalityEntry { - pub column: String, - pub cardinality: usize, - pub cardinality_ratio: f64, - pub recommended_encoding: String, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct LeakageEntry { - pub column: String, - pub risk: String, // "low", "medium", "high" - pub reason: String, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct FeatureInsightsResult { - pub interactions: Vec, - pub monotonic: Vec, - pub binning: Vec, - pub cardinality: Vec, - pub leakage: Vec, -} - -// ─── Implementation ───────────────────────────────────────────────── - -pub struct FeatureInsightsStats<'a> { - df: &'a DataFrame, - schema: &'a DataSchema, - max_sample: usize, -} - -impl<'a> FeatureInsightsStats<'a> { - pub fn new(df: &'a DataFrame, schema: &'a DataSchema, max_sample: usize) -> Self { - Self { - df, - schema, - max_sample, - } - } - - pub fn compute(&self) -> FeatureInsightsResult { - let interactions = self.interaction_detection(); - let monotonic = self.monotonic_detection(); - let binning = self.binning_analysis(10); - let cardinality = self.cardinality_analysis(); - let leakage = self.leakage_detection(); - - FeatureInsightsResult { - interactions, - monotonic, - binning, - cardinality, - leakage, - } - } - - fn interaction_detection(&self) -> Vec { - let num_cols = self.schema.numeric_columns(); - if num_cols.len() < 3 { - return vec![]; - } - - let mut col_vals: Vec<(&str, Vec)> = Vec::new(); - for &col_name in &num_cols { - if let Some(vals) = self - .df - .column(col_name) - .ok() - .and_then(|c| DescriptiveStats::column_to_f64_vec(c)) - { - col_vals.push((col_name, vals)); - } - } - - let min_len = col_vals.iter().map(|(_, v)| v.len()).min().unwrap_or(0); - let sample_len = min_len.min(self.max_sample); - let mut results = Vec::new(); - - for i in 0..col_vals.len() { - for j in (i + 1)..col_vals.len() { - // Compute product interaction correlated with other features - let product: Vec = (0..sample_len) - .map(|k| col_vals[i].1[k] * col_vals[j].1[k]) - .collect(); - - let mut max_corr = 0.0f64; - for k in 0..col_vals.len() { - if k == i || k == j { - continue; - } - let r = Self::pearson(&product, &col_vals[k].1[..sample_len]); - if r.abs() > max_corr.abs() { - max_corr = r; - } - } - - if max_corr.abs() > 0.3 { - results.push(InteractionEntry { - col_a: col_vals[i].0.to_string(), - col_b: col_vals[j].0.to_string(), - interaction_strength: max_corr.abs(), - }); - } - } - } - - results.sort_by(|a, b| { - b.interaction_strength - .partial_cmp(&a.interaction_strength) - .unwrap_or(std::cmp::Ordering::Equal) - }); - results.truncate(20); - results - } - - fn monotonic_detection(&self) -> Vec { - let num_cols = self.schema.numeric_columns(); - if num_cols.len() < 2 { - return vec![]; - } - - let mut col_vals: Vec<(&str, Vec)> = Vec::new(); - for &col_name in &num_cols { - if let Some(vals) = self - .df - .column(col_name) - .ok() - .and_then(|c| DescriptiveStats::column_to_f64_vec(c)) - { - col_vals.push((col_name, vals)); - } - } - - let min_len = col_vals.iter().map(|(_, v)| v.len()).min().unwrap_or(0); - let mut results = Vec::new(); - - for i in 0..col_vals.len() { - for j in (i + 1)..col_vals.len() { - let pearson = Self::pearson(&col_vals[i].1[..min_len], &col_vals[j].1[..min_len]); - let spearman = Self::spearman(&col_vals[i].1[..min_len], &col_vals[j].1[..min_len]); - let gap = (spearman.abs() - pearson.abs()).abs(); - - results.push(MonotonicEntry { - col_a: col_vals[i].0.to_string(), - col_b: col_vals[j].0.to_string(), - pearson, - spearman, - gap, - is_nonlinear_monotonic: gap > 0.1 && spearman.abs() > 0.5, - }); - } - } - results - } - - fn binning_analysis(&self, n_bins: usize) -> Vec { - let num_cols = self.schema.numeric_columns(); - - num_cols - .iter() - .filter_map(|&col_name| { - let col = self.df.column(col_name).ok()?; - let values = DescriptiveStats::column_to_f64_vec(col)?; - if values.len() < n_bins * 2 { - return None; - } - - let ew_entropy = Self::equal_width_entropy(&values, n_bins); - let ef_entropy = Self::equal_freq_entropy(&values, n_bins); - - let recommended = if ef_entropy < ew_entropy { - "equal_frequency" - } else { - "equal_width" - }; - - Some(BinningEntry { - column: col_name.to_string(), - equal_width_entropy: ew_entropy, - equal_freq_entropy: ef_entropy, - recommended: recommended.to_string(), - }) - }) - .collect() - } - - fn cardinality_analysis(&self) -> Vec { - let n_rows = self.df.height(); - - self.schema - .categorical_columns() - .iter() - .filter_map(|&col_name| { - let col = self.df.column(col_name).ok()?; - let cardinality = col.n_unique().unwrap_or(0); - let cardinality_ratio = if n_rows > 0 { - cardinality as f64 / n_rows as f64 - } else { - 0.0 - }; - - let recommended_encoding = if cardinality <= 2 { - "binary" - } else if cardinality <= 10 { - "one_hot" - } else if cardinality <= 50 { - "target_encoding" - } else { - "hash_encoding" - }; - - Some(CardinalityEntry { - column: col_name.to_string(), - cardinality, - cardinality_ratio, - recommended_encoding: recommended_encoding.to_string(), - }) - }) - .collect() - } - - fn leakage_detection(&self) -> Vec { - let n_rows = self.df.height(); - let mut entries = Vec::new(); - - for info in &self.schema.columns { - let mut risk = "low".to_string(); - let mut reason = String::new(); - - // High cardinality + unique → possible ID leakage - if info.n_unique as f64 / n_rows.max(1) as f64 > 0.99 && info.n_missing == 0 { - risk = "high".to_string(); - reason = "Near-unique column (possible ID or future info leakage)".into(); - } - // Perfect or near-zero entropy in target-like columns - else if info.missing_ratio > 0.5 { - risk = "medium".to_string(); - reason = - "High missing ratio may indicate data not available at prediction time".into(); - } - - if risk != "low" { - entries.push(LeakageEntry { - column: info.name.clone(), - risk, - reason, - }); - } - } - - entries - } - - // ── Helpers ───────────────────────────────────────────────── - - fn pearson(x: &[f64], y: &[f64]) -> f64 { - let n = x.len().min(y.len()) as f64; - if n < 2.0 { - return 0.0; - } - let mx = x.iter().sum::() / n; - let my = y.iter().sum::() / n; - let mut cov = 0.0; - let mut sx = 0.0; - let mut sy = 0.0; - for i in 0..x.len().min(y.len()) { - let dx = x[i] - mx; - let dy = y[i] - my; - cov += dx * dy; - sx += dx * dx; - sy += dy * dy; - } - if sx < f64::EPSILON || sy < f64::EPSILON { - return 0.0; - } - cov / (sx * sy).sqrt() - } - - fn spearman(x: &[f64], y: &[f64]) -> f64 { - let rx = Self::rank(x); - let ry = Self::rank(y); - Self::pearson(&rx, &ry) - } - - fn rank(values: &[f64]) -> Vec { - let mut indexed: Vec<(usize, f64)> = values.iter().copied().enumerate().collect(); - indexed.sort_by(|a, b| a.1.partial_cmp(&b.1).unwrap_or(std::cmp::Ordering::Equal)); - let mut ranks = vec![0.0f64; values.len()]; - let mut i = 0; - while i < indexed.len() { - let mut j = i + 1; - while j < indexed.len() && (indexed[j].1 - indexed[i].1).abs() < f64::EPSILON { - j += 1; - } - let avg = (i..j).map(|k| k + 1).sum::() as f64 / (j - i) as f64; - for k in i..j { - ranks[indexed[k].0] = avg; - } - i = j; - } - ranks - } - - fn equal_width_entropy(values: &[f64], n_bins: usize) -> f64 { - let min = values.iter().cloned().fold(f64::INFINITY, f64::min); - let max = values.iter().cloned().fold(f64::NEG_INFINITY, f64::max); - let range = (max - min).max(f64::EPSILON); - - let mut bins = vec![0usize; n_bins]; - for &v in values { - let idx = ((v - min) / range * (n_bins - 1) as f64) as usize; - bins[idx.min(n_bins - 1)] += 1; - } - - let n = values.len() as f64; - bins.iter() - .filter(|&&c| c > 0) - .map(|&c| { - let p = c as f64 / n; - -p * p.log2() - }) - .sum() - } - - fn equal_freq_entropy(values: &[f64], n_bins: usize) -> f64 { - let mut sorted = values.to_vec(); - sorted.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal)); - let bin_size = (sorted.len() / n_bins).max(1); - - let mut bins = Vec::new(); - for chunk in sorted.chunks(bin_size) { - bins.push(chunk.len()); - } - - let n = values.len() as f64; - bins.iter() - .filter(|&&c| c > 0) - .map(|&c| { - let p = c as f64 / n; - -p * p.log2() - }) - .sum() - } -} diff --git a/src/stats/insight_engine.rs b/src/stats/insight_engine.rs deleted file mode 100644 index da509f6..0000000 --- a/src/stats/insight_engine.rs +++ /dev/null @@ -1,618 +0,0 @@ -use polars::prelude::*; -use serde::{Deserialize, Serialize}; - -use crate::core::schema::DataSchema; -use crate::stats::descriptive::DescriptiveStats; - -// ─── Types ────────────────────────────────────────────────────────── - -#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)] -pub enum InsightSeverity { - Info, - Warning, - Critical, -} - -#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)] -pub enum InsightType { - HighMissing, - ConstantColumn, - HighCardinality, - DuplicateRows, - SkewedDistribution, - HighCorrelation, - OutlierProportion, - ClassImbalance, - InfiniteValues, - LowVariance, - IdLikeColumn, - MixedTypes, - DatetimePattern, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct Insight { - pub insight_type: InsightType, - pub severity: InsightSeverity, - pub column: Option, - pub message: String, - pub detail: String, - pub recommendation: String, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct InsightEngineResult { - pub insights: Vec, - pub summary: InsightSummary, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct InsightSummary { - pub total: usize, - pub critical: usize, - pub warning: usize, - pub info: usize, -} - -// ─── Engine ───────────────────────────────────────────────────────── - -pub struct InsightEngine<'a> { - df: &'a DataFrame, - schema: &'a DataSchema, - missing_threshold: f64, - high_card_threshold: f64, - corr_threshold: f64, - outlier_threshold: f64, - skew_threshold: f64, - variance_threshold: f64, -} - -impl<'a> InsightEngine<'a> { - pub fn new(df: &'a DataFrame, schema: &'a DataSchema) -> Self { - Self { - df, - schema, - missing_threshold: 0.3, - high_card_threshold: 0.9, - corr_threshold: 0.95, - outlier_threshold: 0.05, - skew_threshold: 2.0, - variance_threshold: 0.01, - } - } - - pub fn with_thresholds( - mut self, - missing: f64, - high_card: f64, - corr: f64, - outlier: f64, - skew: f64, - variance: f64, - ) -> Self { - self.missing_threshold = missing; - self.high_card_threshold = high_card; - self.corr_threshold = corr; - self.outlier_threshold = outlier; - self.skew_threshold = skew; - self.variance_threshold = variance; - self - } - - pub fn compute(&self) -> InsightEngineResult { - let mut insights = Vec::new(); - - self.check_high_missing(&mut insights); - self.check_constant_columns(&mut insights); - self.check_high_cardinality(&mut insights); - self.check_duplicate_rows(&mut insights); - self.check_skewed_distributions(&mut insights); - self.check_high_correlations(&mut insights); - self.check_outlier_proportions(&mut insights); - self.check_infinite_values(&mut insights); - self.check_low_variance(&mut insights); - self.check_id_like_columns(&mut insights); - self.check_class_imbalance(&mut insights); - self.check_mixed_types(&mut insights); - self.check_datetime_patterns(&mut insights); - - // Build summary - let critical = insights - .iter() - .filter(|i| i.severity == InsightSeverity::Critical) - .count(); - let warning = insights - .iter() - .filter(|i| i.severity == InsightSeverity::Warning) - .count(); - let info = insights - .iter() - .filter(|i| i.severity == InsightSeverity::Info) - .count(); - - let summary = InsightSummary { - total: insights.len(), - critical, - warning, - info, - }; - - InsightEngineResult { insights, summary } - } - - // ── Rule 1: High Missing ──────────────────────────────────── - - fn check_high_missing(&self, out: &mut Vec) { - for col_info in &self.schema.columns { - if col_info.missing_ratio >= self.missing_threshold { - let severity = if col_info.missing_ratio > 0.7 { - InsightSeverity::Critical - } else { - InsightSeverity::Warning - }; - out.push(Insight { - insight_type: InsightType::HighMissing, - severity, - column: Some(col_info.name.clone()), - message: format!( - "Column '{}' has {:.1}% missing values", - col_info.name, - col_info.missing_ratio * 100.0 - ), - detail: format!( - "{} out of {} values are missing", - col_info.n_missing, self.schema.n_rows - ), - recommendation: if col_info.missing_ratio > 0.7 { - "Consider dropping this column".into() - } else { - "Consider imputation or investigate the cause".into() - }, - }); - } - } - } - - // ── Rule 2: Constant Columns ──────────────────────────────── - - fn check_constant_columns(&self, out: &mut Vec) { - for col_info in &self.schema.columns { - if col_info.n_unique <= 1 { - out.push(Insight { - insight_type: InsightType::ConstantColumn, - severity: InsightSeverity::Warning, - column: Some(col_info.name.clone()), - message: format!( - "Column '{}' is constant (only {} unique value)", - col_info.name, col_info.n_unique - ), - detail: "Constant columns carry no information".into(), - recommendation: "Drop this column as it has no predictive power".into(), - }); - } - } - } - - // ── Rule 3: High Cardinality ───────────────────────────────── - - fn check_high_cardinality(&self, out: &mut Vec) { - let n = self.schema.n_rows.max(1); - for col_name in self.schema.categorical_columns() { - if let Some(info) = self.schema.columns.iter().find(|c| c.name == col_name) { - let ratio = info.n_unique as f64 / n as f64; - if ratio > self.high_card_threshold { - out.push(Insight { - insight_type: InsightType::HighCardinality, - severity: InsightSeverity::Warning, - column: Some(col_name.to_string()), - message: format!( - "Categorical column '{}' has very high cardinality ({} unique / {} rows = {:.1}%)", - col_name, info.n_unique, n, ratio * 100.0 - ), - detail: "High cardinality may cause one-hot encoding explosion".into(), - recommendation: "Use target encoding, hashing, or grouping rare categories".into(), - }); - } - } - } - } - - // ── Rule 4: Duplicate Rows ────────────────────────────────── - - fn check_duplicate_rows(&self, out: &mut Vec) { - let n = self.df.height(); - if n == 0 { - return; - } - if let Ok(unique) = self.df.unique_stable(None, UniqueKeepStrategy::First, None) { - let n_dup = n - unique.height(); - if n_dup > 0 { - let ratio = n_dup as f64 / n as f64; - let severity = if ratio > 0.1 { - InsightSeverity::Warning - } else { - InsightSeverity::Info - }; - out.push(Insight { - insight_type: InsightType::DuplicateRows, - severity, - column: None, - message: format!( - "Dataset has {} duplicate rows ({:.1}%)", - n_dup, - ratio * 100.0 - ), - detail: format!("{} rows are exact duplicates", n_dup), - recommendation: "Investigate and remove duplicates if not intentional".into(), - }); - } - } - } - - // ── Rule 5: Skewed Distributions ──────────────────────────── - - fn check_skewed_distributions(&self, out: &mut Vec) { - for col_name in self.schema.numeric_columns() { - if let Some(vals) = self - .df - .column(col_name) - .ok() - .and_then(|c| DescriptiveStats::column_to_f64_vec(c)) - { - let skew = Self::compute_skewness(&vals); - if skew.abs() > self.skew_threshold { - out.push(Insight { - insight_type: InsightType::SkewedDistribution, - severity: InsightSeverity::Info, - column: Some(col_name.to_string()), - message: format!( - "Column '{}' is heavily skewed (skewness = {:.3})", - col_name, skew - ), - detail: if skew > 0.0 { - "Right-skewed distribution".into() - } else { - "Left-skewed distribution".into() - }, - recommendation: "Consider log, Box-Cox, or Yeo-Johnson transform".into(), - }); - } - } - } - } - - // ── Rule 6: High Correlations ─────────────────────────────── - - fn check_high_correlations(&self, out: &mut Vec) { - let num_cols = self.schema.numeric_columns(); - if num_cols.len() < 2 { - return; - } - - let mut col_vals: Vec<(&str, Vec)> = Vec::new(); - for &col_name in &num_cols { - if let Some(vals) = self - .df - .column(col_name) - .ok() - .and_then(|c| DescriptiveStats::column_to_f64_vec(c)) - { - col_vals.push((col_name, vals)); - } - } - - let min_len = col_vals.iter().map(|(_, v)| v.len()).min().unwrap_or(0); - for i in 0..col_vals.len() { - for j in (i + 1)..col_vals.len() { - let r = Self::pearson(&col_vals[i].1[..min_len], &col_vals[j].1[..min_len]); - if r.abs() > self.corr_threshold { - out.push(Insight { - insight_type: InsightType::HighCorrelation, - severity: InsightSeverity::Warning, - column: Some(format!("{}, {}", col_vals[i].0, col_vals[j].0)), - message: format!( - "Columns '{}' and '{}' are highly correlated (r = {:.3})", - col_vals[i].0, col_vals[j].0, r - ), - detail: "Near-perfect correlation may indicate redundancy".into(), - recommendation: "Consider removing one to reduce multicollinearity".into(), - }); - } - } - } - } - - // ── Rule 7: Outlier Proportions ───────────────────────────── - - fn check_outlier_proportions(&self, out: &mut Vec) { - for col_name in self.schema.numeric_columns() { - if let Some(vals) = self - .df - .column(col_name) - .ok() - .and_then(|c| DescriptiveStats::column_to_f64_vec(c)) - { - let n = vals.len(); - if n < 10 { - continue; - } - let mut sorted = vals.clone(); - sorted.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal)); - let q1 = sorted[n / 4]; - let q3 = sorted[3 * n / 4]; - let iqr = q3 - q1; - let lower = q1 - 1.5 * iqr; - let upper = q3 + 1.5 * iqr; - let outlier_count = vals.iter().filter(|&&v| v < lower || v > upper).count(); - let ratio = outlier_count as f64 / n as f64; - - if ratio > self.outlier_threshold { - out.push(Insight { - insight_type: InsightType::OutlierProportion, - severity: if ratio > 0.15 { - InsightSeverity::Warning - } else { - InsightSeverity::Info - }, - column: Some(col_name.to_string()), - message: format!( - "Column '{}' has {:.1}% outliers (IQR method)", - col_name, - ratio * 100.0 - ), - detail: format!("{} outlier values detected out of {}", outlier_count, n), - recommendation: - "Investigate outliers — clip, transform, or treat separately".into(), - }); - } - } - } - } - - // ── Rule 8: Infinite Values ───────────────────────────────── - - fn check_infinite_values(&self, out: &mut Vec) { - for col_name in self.schema.numeric_columns() { - if let Ok(col) = self.df.column(col_name) { - if let Ok(f) = col.cast(&DataType::Float64) { - let ca = f.f64().unwrap(); - let inf_count = ca - .into_iter() - .filter(|v| matches!(v, Some(x) if x.is_infinite())) - .count(); - if inf_count > 0 { - out.push(Insight { - insight_type: InsightType::InfiniteValues, - severity: InsightSeverity::Critical, - column: Some(col_name.to_string()), - message: format!( - "Column '{}' contains {} infinite values", - col_name, inf_count - ), - detail: "Infinite values will cause issues with most algorithms".into(), - recommendation: - "Replace infinite values with NaN or clip to a max value".into(), - }); - } - } - } - } - } - - // ── Rule 9: Low Variance ──────────────────────────────────── - - fn check_low_variance(&self, out: &mut Vec) { - for col_name in self.schema.numeric_columns() { - if let Some(vals) = self - .df - .column(col_name) - .ok() - .and_then(|c| DescriptiveStats::column_to_f64_vec(c)) - { - if vals.is_empty() { - continue; - } - let mean = vals.iter().sum::() / vals.len() as f64; - let var = vals.iter().map(|v| (v - mean).powi(2)).sum::() / vals.len() as f64; - if var < self.variance_threshold && var > 0.0 { - out.push(Insight { - insight_type: InsightType::LowVariance, - severity: InsightSeverity::Info, - column: Some(col_name.to_string()), - message: format!( - "Column '{}' has very low variance ({:.6})", - col_name, var - ), - detail: "Low variance features may not contribute to model learning".into(), - recommendation: "Consider removing or investigating this feature".into(), - }); - } - } - } - } - - // ── Rule 10: ID-like Columns ──────────────────────────────── - - fn check_id_like_columns(&self, out: &mut Vec) { - let n = self.schema.n_rows.max(1); - for col_info in &self.schema.columns { - let ratio = col_info.n_unique as f64 / n as f64; - if ratio > 0.99 && col_info.n_missing == 0 { - let name_lower = col_info.name.to_lowercase(); - if name_lower.contains("id") - || name_lower.contains("index") - || name_lower.contains("key") - || name_lower.ends_with("_no") - || name_lower.ends_with("_num") - { - out.push(Insight { - insight_type: InsightType::IdLikeColumn, - severity: InsightSeverity::Warning, - column: Some(col_info.name.clone()), - message: format!( - "Column '{}' appears to be an ID column ({} unique values)", - col_info.name, col_info.n_unique - ), - detail: "ID columns have no predictive value".into(), - recommendation: "Drop ID columns before modeling".into(), - }); - } - } - } - } - - // ── Rule 11: Class Imbalance ──────────────────────────────── - - fn check_class_imbalance(&self, out: &mut Vec) { - let n = self.df.height(); - for col_name in self.schema.categorical_columns() { - if let Some(info) = self.schema.columns.iter().find(|c| c.name == col_name) { - if info.n_unique >= 2 && info.n_unique <= 10 { - if let Ok(col) = self.df.column(col_name) { - let counts: Vec = col - .unique() - .ok() - .map(|u| { - (0..u.len()) - .filter_map(|i| { - u.get(i).ok().map(|val| { - let val_str = format!("{:?}", val); - (0..col.len()) - .filter(|&j| { - col.get(j) - .ok() - .map(|v| format!("{:?}", v) == val_str) - .unwrap_or(false) - }) - .count() - }) - }) - .collect() - }) - .unwrap_or_default(); - - if let (Some(&min_c), Some(&max_c)) = - (counts.iter().min(), counts.iter().max()) - { - if max_c > 0 && n > 0 { - let ratio = min_c as f64 / max_c as f64; - if ratio < 0.1 { - out.push(Insight { - insight_type: InsightType::ClassImbalance, - severity: InsightSeverity::Warning, - column: Some(col_name.to_string()), - message: format!( - "Column '{}' shows class imbalance (min/max ratio = {:.3})", - col_name, ratio - ), - detail: format!( - "Smallest class: {} samples, largest: {} samples", - min_c, max_c - ), - recommendation: - "Consider oversampling, undersampling, or class weights" - .into(), - }); - } - } - } - } - } - } - } - } - - // ── Rule 12: Mixed Types ──────────────────────────────────── - - fn check_mixed_types(&self, out: &mut Vec) { - // Detect columns where dtype is string but many values look numeric - for col_name in self.schema.categorical_columns() { - if let Ok(col) = self.df.column(col_name) { - let total = col.len(); - if total == 0 { - continue; - } - let str_col = col.cast(&DataType::String).ok(); - if let Some(s) = str_col { - if let Ok(ca) = s.str() { - let numeric_count = ca - .into_iter() - .filter(|v| v.map(|s| s.parse::().is_ok()).unwrap_or(false)) - .count(); - let ratio = numeric_count as f64 / total as f64; - if ratio > 0.5 && ratio < 0.99 { - out.push(Insight { - insight_type: InsightType::MixedTypes, - severity: InsightSeverity::Warning, - column: Some(col_name.to_string()), - message: format!( - "Column '{}' appears to have mixed types ({:.0}% numeric in string column)", - col_name, - ratio * 100.0 - ), - detail: format!("{} of {} values are parseable as numbers", numeric_count, total), - recommendation: "Clean and convert to the appropriate type".into(), - }); - } - } - } - } - } - } - - // ── Rule 13: DateTime Patterns ────────────────────────────── - - fn check_datetime_patterns(&self, out: &mut Vec) { - for col_name in self.schema.datetime_columns() { - // Just note datetime columns exist — useful for time series - out.push(Insight { - insight_type: InsightType::DatetimePattern, - severity: InsightSeverity::Info, - column: Some(col_name.to_string()), - message: format!("Column '{}' contains datetime data", col_name), - detail: "Datetime columns can be decomposed into year, month, day, etc.".into(), - recommendation: "Extract temporal features for improved modeling".into(), - }); - } - } - - // ── Stat helpers ──────────────────────────────────────────── - - fn compute_skewness(vals: &[f64]) -> f64 { - let n = vals.len() as f64; - if n < 3.0 { - return 0.0; - } - let mean = vals.iter().sum::() / n; - let m2: f64 = vals.iter().map(|v| (v - mean).powi(2)).sum::() / n; - let m3: f64 = vals.iter().map(|v| (v - mean).powi(3)).sum::() / n; - let std = m2.sqrt(); - if std < f64::EPSILON { - return 0.0; - } - m3 / std.powi(3) - } - - fn pearson(x: &[f64], y: &[f64]) -> f64 { - let n = x.len().min(y.len()) as f64; - if n < 2.0 { - return 0.0; - } - let mx = x.iter().sum::() / n; - let my = y.iter().sum::() / n; - let mut cov = 0.0; - let mut sx = 0.0; - let mut sy = 0.0; - for i in 0..x.len().min(y.len()) { - let dx = x[i] - mx; - let dy = y[i] - my; - cov += dx * dy; - sx += dx * dx; - sy += dy * dy; - } - if sx < f64::EPSILON || sy < f64::EPSILON { - return 0.0; - } - cov / (sx * sy).sqrt() - } -} diff --git a/src/stats/missing.rs b/src/stats/missing.rs deleted file mode 100644 index 37115a4..0000000 --- a/src/stats/missing.rs +++ /dev/null @@ -1,126 +0,0 @@ -use polars::prelude::*; -use serde::{Deserialize, Serialize}; - -use crate::core::schema::DataSchema; - -// ─── Result types ─────────────────────────────────────────────────── - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct MissingColumnInfo { - pub column: String, - pub missing_count: usize, - pub missing_ratio: f64, - pub dtype: String, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct MissingResult { - /// Per-column missing information. - pub columns: Vec, - /// Number of rows with at least one missing value. - pub rows_with_missing: usize, - /// Overall missing ratio (total missing / total cells). - pub overall_missing_ratio: f64, - /// Distribution of missing count per row: (n_missing, count_of_rows). - pub row_missing_distribution: Vec<(usize, usize)>, - /// Boolean missing matrix (columns × is_missing) for pattern analysis. - /// Stored as column_name → Vec for serialization. - pub missing_matrix: Vec<(String, Vec)>, -} - -// ─── Implementation ───────────────────────────────────────────────── - -pub struct MissingStats<'a> { - df: &'a DataFrame, - schema: &'a DataSchema, -} - -impl<'a> MissingStats<'a> { - pub fn new(df: &'a DataFrame, schema: &'a DataSchema) -> Self { - Self { df, schema } - } - - pub fn compute(&self) -> MissingResult { - let n_rows = self.df.height(); - let n_cols = self.df.width(); - let total_cells = n_rows * n_cols; - - // ── Per-column info ───────────────────────────────────── - let columns: Vec = self - .df - .get_columns() - .iter() - .map(|col| { - let missing_count = col.null_count(); - let missing_ratio = if n_rows > 0 { - missing_count as f64 / n_rows as f64 - } else { - 0.0 - }; - MissingColumnInfo { - column: col.name().to_string(), - missing_count, - missing_ratio, - dtype: format!("{:?}", col.dtype()), - } - }) - .collect(); - - let total_missing: usize = columns.iter().map(|c| c.missing_count).sum(); - let overall_missing_ratio = if total_cells > 0 { - total_missing as f64 / total_cells as f64 - } else { - 0.0 - }; - - // ── Per-row missing count ─────────────────────────────── - let mut row_missing_counts = vec![0usize; n_rows]; - for col in self.df.get_columns() { - let is_null = col.is_null(); - for (i, val) in is_null.into_iter().enumerate() { - if val.unwrap_or(false) { - row_missing_counts[i] += 1; - } - } - } - - let rows_with_missing = row_missing_counts.iter().filter(|&&c| c > 0).count(); - - // Distribution: how many rows have 0 missing, 1 missing, etc. - let max_missing = *row_missing_counts.iter().max().unwrap_or(&0); - let mut distribution = vec![0usize; max_missing + 1]; - for &c in &row_missing_counts { - distribution[c] += 1; - } - let row_missing_distribution: Vec<(usize, usize)> = distribution - .into_iter() - .enumerate() - .filter(|(_, count)| *count > 0) - .collect(); - - // ── Missing matrix ────────────────────────────────────── - // Only generate for columns that have any missing (to save memory) - let missing_matrix: Vec<(String, Vec)> = self - .df - .get_columns() - .iter() - .filter(|col| col.null_count() > 0) - .map(|col| { - let mask: Vec = col - .is_null() - .into_iter() - .map(|v| v.unwrap_or(false)) - .collect(); - (col.name().to_string(), mask) - }) - .collect(); - - MissingResult { - columns, - rows_with_missing, - overall_missing_ratio, - row_missing_distribution, - missing_matrix, - } - } -} diff --git a/src/stats/ml_readiness.rs b/src/stats/ml_readiness.rs deleted file mode 100644 index 205173c..0000000 --- a/src/stats/ml_readiness.rs +++ /dev/null @@ -1,571 +0,0 @@ -use polars::prelude::*; -use serde::{Deserialize, Serialize}; - -use crate::core::schema::DataSchema; -use crate::stats::descriptive::DescriptiveStats; -use crate::utils::types::ColumnType; - -// ─── Result types ─────────────────────────────────────────────────── - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct DimensionScore { - pub name: String, - pub score: f64, - pub detail: String, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct ColumnReadiness { - pub column: String, - pub data_type: String, - pub needs_encoding: bool, - pub needs_imputation: bool, - pub needs_scaling: bool, - pub needs_transform: bool, - pub warnings: Vec, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct MlReadinessResult { - pub overall_score: f64, - pub grade: String, - pub dimensions: Vec, - pub column_readiness: Vec, - pub recommendations: Vec, -} - -// ─── Implementation ───────────────────────────────────────────────── - -pub struct MlReadinessStats<'a> { - df: &'a DataFrame, - schema: &'a DataSchema, -} - -impl<'a> MlReadinessStats<'a> { - pub fn new(df: &'a DataFrame, schema: &'a DataSchema) -> Self { - Self { df, schema } - } - - pub fn compute(&self) -> MlReadinessResult { - let completeness = self.score_completeness(); - let uniqueness = self.score_uniqueness(); - let consistency = self.score_consistency(); - let balance = self.score_balance(); - let informativeness = self.score_informativeness(); - let independence = self.score_independence(); - let scale_uniformity = self.score_scale(); - - let dimensions = vec![ - DimensionScore { - name: "completeness".into(), - score: completeness.0, - detail: completeness.1, - }, - DimensionScore { - name: "uniqueness".into(), - score: uniqueness.0, - detail: uniqueness.1, - }, - DimensionScore { - name: "consistency".into(), - score: consistency.0, - detail: consistency.1, - }, - DimensionScore { - name: "balance".into(), - score: balance.0, - detail: balance.1, - }, - DimensionScore { - name: "informativeness".into(), - score: informativeness.0, - detail: informativeness.1, - }, - DimensionScore { - name: "independence".into(), - score: independence.0, - detail: independence.1, - }, - DimensionScore { - name: "scale_uniformity".into(), - score: scale_uniformity.0, - detail: scale_uniformity.1, - }, - ]; - - // Weighted overall score - let weights = [0.20, 0.10, 0.15, 0.10, 0.15, 0.15, 0.15]; - let scores: Vec = dimensions.iter().map(|d| d.score).collect(); - let overall_score: f64 = scores.iter().zip(weights.iter()).map(|(s, w)| s * w).sum(); - - let grade = Self::grade_from_score(overall_score); - let column_readiness = self.column_readiness(); - let recommendations = self.generate_recommendations(&dimensions, &column_readiness); - - MlReadinessResult { - overall_score, - grade, - dimensions, - column_readiness, - recommendations, - } - } - - // ── Dimension 1: Completeness ─────────────────────────────── - - fn score_completeness(&self) -> (f64, String) { - if self.schema.columns.is_empty() { - return (1.0, "No columns to evaluate".into()); - } - let avg_complete: f64 = self - .schema - .columns - .iter() - .map(|c| 1.0 - c.missing_ratio) - .sum::() - / self.schema.columns.len() as f64; - - let n_high_missing = self - .schema - .columns - .iter() - .filter(|c| c.missing_ratio > 0.3) - .count(); - - let penalty = (n_high_missing as f64 * 0.05).min(0.3); - let score = (avg_complete - penalty).clamp(0.0, 1.0); - - ( - score, - format!( - "Avg completeness {:.1}%, {} columns with >30% missing", - avg_complete * 100.0, - n_high_missing - ), - ) - } - - // ── Dimension 2: Uniqueness (no excessive duplicates) ─────── - - fn score_uniqueness(&self) -> (f64, String) { - let n = self.df.height(); - if n == 0 { - return (1.0, "Empty dataset".into()); - } - - let n_unique = self - .df - .unique_stable(None, UniqueKeepStrategy::First, None) - .map(|u| u.height()) - .unwrap_or(n); - - let dup_ratio = 1.0 - (n_unique as f64 / n as f64); - let score = (1.0 - dup_ratio * 2.0).clamp(0.0, 1.0); - - (score, format!("{:.1}% duplicate rows", dup_ratio * 100.0)) - } - - // ── Dimension 3: Consistency (no mixed types, no outlier excess) - - fn score_consistency(&self) -> (f64, String) { - let mut issues = 0; - - // Check numeric columns for extreme outliers - for col_name in self.schema.numeric_columns() { - if let Some(vals) = self - .df - .column(col_name) - .ok() - .and_then(|c| DescriptiveStats::column_to_f64_vec(c)) - { - if vals.len() < 10 { - continue; - } - let mut sorted = vals.clone(); - sorted.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal)); - let q1 = sorted[vals.len() / 4]; - let q3 = sorted[3 * vals.len() / 4]; - let iqr = q3 - q1; - let outlier_count = vals - .iter() - .filter(|&&v| v < q1 - 3.0 * iqr || v > q3 + 3.0 * iqr) - .count(); - if outlier_count as f64 / vals.len() as f64 > 0.05 { - issues += 1; - } - } - } - - // Check for infinite values - for col_name in self.schema.numeric_columns() { - if let Ok(col) = self.df.column(col_name) { - if let Ok(f) = col.cast(&DataType::Float64) { - let ca = f.f64().unwrap(); - let inf_count = ca - .into_iter() - .filter(|v| matches!(v, Some(x) if x.is_infinite())) - .count(); - if inf_count > 0 { - issues += 1; - } - } - } - } - - let score = (1.0 - issues as f64 * 0.1).clamp(0.0, 1.0); - (score, format!("{} consistency issues found", issues)) - } - - // ── Dimension 4: Balance (class distribution) ─────────────── - - fn score_balance(&self) -> (f64, String) { - let cat_cols = self.schema.categorical_columns(); - if cat_cols.is_empty() { - return (0.8, "No categorical columns to evaluate balance".into()); - } - - let mut worst_gini = 0.0f64; - for &col_name in &cat_cols { - if let Some(info) = self.schema.columns.iter().find(|c| c.name == col_name) { - if info.n_unique >= 2 && info.n_unique <= 20 { - if let Ok(col) = self.df.column(col_name) { - let n = col.len(); - let groups = Self::count_groups(col); - if groups.len() >= 2 { - let gini = Self::gini_impurity(&groups, n); - if gini > worst_gini { - worst_gini = gini; - } - } - } - } - } - } - - // Higher gini = more balanced → better - let score = worst_gini.clamp(0.0, 1.0); - (score, format!("Gini impurity: {:.3}", worst_gini)) - } - - // ── Dimension 5: Informativeness ──────────────────────────── - - fn score_informativeness(&self) -> (f64, String) { - let n_cols = self.schema.columns.len().max(1); - let mut low_info = 0usize; - - // Constant or near-constant columns - for col_info in &self.schema.columns { - if col_info.n_unique <= 1 { - low_info += 1; - } - } - - // Low-variance numeric columns - for col_name in self.schema.numeric_columns() { - if let Some(vals) = self - .df - .column(col_name) - .ok() - .and_then(|c| DescriptiveStats::column_to_f64_vec(c)) - { - if vals.is_empty() { - continue; - } - let mean = vals.iter().sum::() / vals.len() as f64; - let var = vals.iter().map(|v| (v - mean).powi(2)).sum::() / vals.len() as f64; - let cv = if mean.abs() > f64::EPSILON { - var.sqrt() / mean.abs() - } else { - var.sqrt() - }; - if cv < 0.01 { - low_info += 1; - } - } - } - - let ratio = low_info as f64 / n_cols as f64; - let score = (1.0 - ratio * 2.0).clamp(0.0, 1.0); - ( - score, - format!("{} of {} columns have low information", low_info, n_cols), - ) - } - - // ── Dimension 6: Independence (low multicollinearity) ─────── - - fn score_independence(&self) -> (f64, String) { - let num_cols = self.schema.numeric_columns(); - if num_cols.len() < 2 { - return (1.0, "Fewer than 2 numeric columns".into()); - } - - let mut col_vals: Vec> = Vec::new(); - for &name in &num_cols { - if let Some(vals) = self - .df - .column(name) - .ok() - .and_then(|c| DescriptiveStats::column_to_f64_vec(c)) - { - col_vals.push(vals); - } - } - - let min_len = col_vals.iter().map(|v| v.len()).min().unwrap_or(0); - let mut high_corr_pairs = 0usize; - let total_pairs = col_vals.len() * (col_vals.len() - 1) / 2; - - for i in 0..col_vals.len() { - for j in (i + 1)..col_vals.len() { - let r = Self::pearson(&col_vals[i][..min_len], &col_vals[j][..min_len]); - if r.abs() > 0.9 { - high_corr_pairs += 1; - } - } - } - - let ratio = if total_pairs > 0 { - high_corr_pairs as f64 / total_pairs as f64 - } else { - 0.0 - }; - let score = (1.0 - ratio * 3.0).clamp(0.0, 1.0); - ( - score, - format!( - "{} of {} column pairs have |r| > 0.9", - high_corr_pairs, total_pairs - ), - ) - } - - // ── Dimension 7: Scale uniformity ─────────────────────────── - - fn score_scale(&self) -> (f64, String) { - let num_cols = self.schema.numeric_columns(); - if num_cols.is_empty() { - return (1.0, "No numeric columns".into()); - } - - let mut ranges: Vec = Vec::new(); - for &col_name in &num_cols { - if let Some(vals) = self - .df - .column(col_name) - .ok() - .and_then(|c| DescriptiveStats::column_to_f64_vec(c)) - { - if vals.is_empty() { - continue; - } - let min = vals.iter().cloned().fold(f64::INFINITY, f64::min); - let max = vals.iter().cloned().fold(f64::NEG_INFINITY, f64::max); - ranges.push((max - min).abs()); - } - } - - if ranges.len() < 2 { - return (1.0, "Single numeric column".into()); - } - - let min_range = ranges.iter().cloned().fold(f64::INFINITY, f64::min); - let max_range = ranges.iter().cloned().fold(f64::NEG_INFINITY, f64::max); - - let ratio = if max_range > f64::EPSILON { - min_range / max_range - } else { - 1.0 - }; - - // If ranges differ by > 1000x, scaling is needed - let score = if ratio < 0.001 { - 0.3 - } else if ratio < 0.01 { - 0.5 - } else if ratio < 0.1 { - 0.7 - } else { - 0.9 - }; - - ( - score, - format!( - "Feature range ratio: {:.4} (min range: {:.2}, max range: {:.2})", - ratio, min_range, max_range - ), - ) - } - - // ── Per-column readiness ──────────────────────────────────── - - fn column_readiness(&self) -> Vec { - self.schema - .columns - .iter() - .map(|col_info| { - let mut warnings = Vec::new(); - let needs_imputation = col_info.n_missing > 0; - let needs_encoding = col_info.inferred_type == ColumnType::Categorical - || col_info.inferred_type == ColumnType::Text; - let needs_scaling = col_info.inferred_type == ColumnType::Numeric; - - // Check for skew → transform - let needs_transform = if col_info.inferred_type == ColumnType::Numeric { - if let Some(vals) = self - .df - .column(&col_info.name) - .ok() - .and_then(|c| DescriptiveStats::column_to_f64_vec(c)) - { - let skew = Self::compute_skewness(&vals); - if skew.abs() > 2.0 { - warnings.push(format!("High skewness ({:.2})", skew)); - true - } else { - false - } - } else { - false - } - } else { - false - }; - - if needs_imputation { - warnings.push(format!("{:.1}% missing", col_info.missing_ratio * 100.0)); - } - if col_info.n_unique <= 1 { - warnings.push("Constant column".into()); - } - - ColumnReadiness { - column: col_info.name.clone(), - data_type: format!("{:?}", col_info.inferred_type), - needs_encoding, - needs_imputation, - needs_scaling, - needs_transform, - warnings, - } - }) - .collect() - } - - // ── Recommendation generator ──────────────────────────────── - - fn generate_recommendations( - &self, - dimensions: &[DimensionScore], - columns: &[ColumnReadiness], - ) -> Vec { - let mut recs = Vec::new(); - - for dim in dimensions { - if dim.score < 0.6 { - match dim.name.as_str() { - "completeness" => recs.push("해결: 결측치가 많습니다. 결측 컬럼 제거 또는 대체 전략을 적용하세요.".into()), - "uniqueness" => recs.push("해결: 중복 행이 많습니다. 중복 제거를 고려하세요.".into()), - "consistency" => recs.push("해결: 이상치나 비정상 값이 많습니다. 클리핑 또는 변환을 고려하세요.".into()), - "balance" => recs.push("해결: 클래스 불균형이 발견되었습니다. 오버/언더 샘플링을 고려하세요.".into()), - "informativeness" => recs.push("해결: 정보량이 낮은 컬럼이 있습니다. 상수 컬럼 제거를 고려하세요.".into()), - "independence" => recs.push("해결: 다중공선성이 높습니다. 상관성 높은 컬럼 중 하나를 제거하세요.".into()), - "scale_uniformity" => recs.push("해결: 피처 스케일이 불균일합니다. StandardScaler 또는 MinMaxScaler를 적용하세요.".into()), - _ => {} - } - } - } - - let n_needs_encoding = columns.iter().filter(|c| c.needs_encoding).count(); - if n_needs_encoding > 0 { - recs.push(format!( - "{}개 범주형 컬럼에 인코딩이 필요합니다 (OneHot, Target, 또는 Label Encoding).", - n_needs_encoding - )); - } - - let n_needs_imputation = columns.iter().filter(|c| c.needs_imputation).count(); - if n_needs_imputation > 0 { - recs.push(format!( - "{}개 컬럼에 결측치 대체가 필요합니다 (mean, median, 또는 KNN imputation).", - n_needs_imputation - )); - } - - recs - } - - // ── Helpers ───────────────────────────────────────────────── - - fn grade_from_score(score: f64) -> String { - if score >= 0.9 { - "A".into() - } else if score >= 0.8 { - "B".into() - } else if score >= 0.7 { - "C".into() - } else if score >= 0.6 { - "D".into() - } else { - "F".into() - } - } - - fn count_groups(col: &Column) -> Vec { - let mut map: std::collections::HashMap = std::collections::HashMap::new(); - for i in 0..col.len() { - if let Ok(val) = col.get(i) { - let key = format!("{}", val); - *map.entry(key).or_default() += 1; - } - } - map.values().copied().collect() - } - - fn gini_impurity(counts: &[usize], total: usize) -> f64 { - if total == 0 { - return 0.0; - } - let n = total as f64; - 1.0 - counts.iter().map(|&c| (c as f64 / n).powi(2)).sum::() - } - - fn compute_skewness(vals: &[f64]) -> f64 { - let n = vals.len() as f64; - if n < 3.0 { - return 0.0; - } - let mean = vals.iter().sum::() / n; - let m2 = vals.iter().map(|v| (v - mean).powi(2)).sum::() / n; - let m3 = vals.iter().map(|v| (v - mean).powi(3)).sum::() / n; - let std = m2.sqrt(); - if std < f64::EPSILON { - return 0.0; - } - m3 / std.powi(3) - } - - fn pearson(x: &[f64], y: &[f64]) -> f64 { - let n = x.len().min(y.len()) as f64; - if n < 2.0 { - return 0.0; - } - let mx = x.iter().sum::() / n; - let my = y.iter().sum::() / n; - let mut cov = 0.0; - let mut sx = 0.0; - let mut sy = 0.0; - for i in 0..x.len().min(y.len()) { - let dx = x[i] - mx; - let dy = y[i] - my; - cov += dx * dy; - sx += dx * dx; - sy += dy * dy; - } - if sx < f64::EPSILON || sy < f64::EPSILON { - return 0.0; - } - cov / (sx * sy).sqrt() - } -} diff --git a/src/stats/mod.rs b/src/stats/mod.rs deleted file mode 100644 index 6fab4a1..0000000 --- a/src/stats/mod.rs +++ /dev/null @@ -1,21 +0,0 @@ -pub mod advanced_anomaly; -pub mod advanced_correlation; -pub mod advanced_dimreduction; -pub mod advanced_distribution; -pub mod categorical; -pub mod clustering; -pub mod column_role; -pub mod correlation; -pub mod cross_analysis; -pub mod descriptive; -pub mod distribution; -pub mod duplicates; -pub mod feature_importance; -pub mod feature_insights; -pub mod insight_engine; -pub mod missing; -pub mod ml_readiness; -pub mod outlier; -pub mod pca; -pub mod quality; -pub mod statistical_tests; diff --git a/src/stats/outlier.rs b/src/stats/outlier.rs deleted file mode 100644 index 0579a05..0000000 --- a/src/stats/outlier.rs +++ /dev/null @@ -1,256 +0,0 @@ -use polars::prelude::*; -use serde::{Deserialize, Serialize}; -use statrs::statistics::{Data, OrderStatistics}; - -pub use crate::core::config::OutlierMethod; -use crate::core::schema::DataSchema; -use crate::stats::descriptive::DescriptiveStats; - -// ─── Result types ─────────────────────────────────────────────────── - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct OutlierColumnResult { - pub column: String, - pub method: String, - pub n_outliers: usize, - pub outlier_ratio: f64, - pub lower_bound: f64, - pub upper_bound: f64, - pub n_below: usize, - pub n_above: usize, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct OutlierResult { - pub columns: Vec, - /// Per-column boolean mask: true = outlier row. - pub masks: Vec<(String, Vec)>, - pub total_outlier_cells: usize, - pub total_cells: usize, - pub overall_outlier_ratio: f64, -} - -// ─── Implementation ───────────────────────────────────────────────── - -pub struct OutlierStats<'a> { - df: &'a DataFrame, - schema: &'a DataSchema, - method: OutlierMethod, - threshold: f64, -} - -impl<'a> OutlierStats<'a> { - pub fn new( - df: &'a DataFrame, - schema: &'a DataSchema, - method: OutlierMethod, - threshold: f64, - ) -> Self { - Self { - df, - schema, - method, - threshold, - } - } - - pub fn compute(&self) -> OutlierResult { - let num_cols = self.schema.numeric_columns(); - let n_rows = self.df.height(); - let mut columns = Vec::new(); - let mut masks = Vec::new(); - let mut total_outlier_cells = 0usize; - - for &col_name in &num_cols { - let col = match self.df.column(col_name) { - Ok(c) => c, - Err(_) => continue, - }; - - // Get all values including NaN positions - let (result, mask) = match self.method { - OutlierMethod::Iqr => self.iqr_method(col_name, col), - OutlierMethod::Zscore => self.zscore_method(col_name, col), - }; - - total_outlier_cells += result.n_outliers; - columns.push(result); - masks.push((col_name.to_string(), mask)); - } - - let total_cells = n_rows * num_cols.len(); - let overall_outlier_ratio = if total_cells > 0 { - total_outlier_cells as f64 / total_cells as f64 - } else { - 0.0 - }; - - OutlierResult { - columns, - masks, - total_outlier_cells, - total_cells, - overall_outlier_ratio, - } - } - - /// IQR (Interquartile Range) method. - fn iqr_method(&self, name: &str, col: &Column) -> (OutlierColumnResult, Vec) { - let n_rows = self.df.height(); - let mut mask = vec![false; n_rows]; - - // Get non-null values for percentile computation - let values = match DescriptiveStats::column_to_f64_vec(col) { - Some(v) => v, - None => { - return ( - OutlierColumnResult { - column: name.to_string(), - method: "iqr".to_string(), - n_outliers: 0, - outlier_ratio: 0.0, - lower_bound: f64::NEG_INFINITY, - upper_bound: f64::INFINITY, - n_below: 0, - n_above: 0, - }, - mask, - ); - } - }; - - let mut data = Data::new(values.clone()); - let q1 = data.percentile(25); - let q3 = data.percentile(75); - let iqr = q3 - q1; - let lower = q1 - self.threshold * iqr; - let upper = q3 + self.threshold * iqr; - - // Apply to all rows (including original null positions) - let casted = col.cast(&DataType::Float64).unwrap_or_default(); - let ca = casted.f64().unwrap(); - let mut n_below = 0usize; - let mut n_above = 0usize; - - for (i, opt_val) in ca.into_iter().enumerate() { - if let Some(v) = opt_val { - if v < lower { - mask[i] = true; - n_below += 1; - } else if v > upper { - mask[i] = true; - n_above += 1; - } - } - } - - let n_outliers = n_below + n_above; - let outlier_ratio = if n_rows > 0 { - n_outliers as f64 / n_rows as f64 - } else { - 0.0 - }; - - ( - OutlierColumnResult { - column: name.to_string(), - method: "iqr".to_string(), - n_outliers, - outlier_ratio, - lower_bound: lower, - upper_bound: upper, - n_below, - n_above, - }, - mask, - ) - } - - /// Z-score method. - fn zscore_method(&self, name: &str, col: &Column) -> (OutlierColumnResult, Vec) { - let n_rows = self.df.height(); - let mut mask = vec![false; n_rows]; - - let values = match DescriptiveStats::column_to_f64_vec(col) { - Some(v) => v, - None => { - return ( - OutlierColumnResult { - column: name.to_string(), - method: "zscore".to_string(), - n_outliers: 0, - outlier_ratio: 0.0, - lower_bound: f64::NEG_INFINITY, - upper_bound: f64::INFINITY, - n_below: 0, - n_above: 0, - }, - mask, - ); - } - }; - - let n = values.len() as f64; - let mean = values.iter().sum::() / n; - let std = - (values.iter().map(|x| (x - mean).powi(2)).sum::() / (n - 1.0).max(1.0)).sqrt(); - - if std < f64::EPSILON { - return ( - OutlierColumnResult { - column: name.to_string(), - method: "zscore".to_string(), - n_outliers: 0, - outlier_ratio: 0.0, - lower_bound: mean, - upper_bound: mean, - n_below: 0, - n_above: 0, - }, - mask, - ); - } - - let lower = mean - self.threshold * std; - let upper = mean + self.threshold * std; - - let casted = col.cast(&DataType::Float64).unwrap_or_default(); - let ca = casted.f64().unwrap(); - let mut n_below = 0usize; - let mut n_above = 0usize; - - for (i, opt_val) in ca.into_iter().enumerate() { - if let Some(v) = opt_val { - let z = (v - mean) / std; - if z < -self.threshold { - mask[i] = true; - n_below += 1; - } else if z > self.threshold { - mask[i] = true; - n_above += 1; - } - } - } - - let n_outliers = n_below + n_above; - let outlier_ratio = if n_rows > 0 { - n_outliers as f64 / n_rows as f64 - } else { - 0.0 - }; - - ( - OutlierColumnResult { - column: name.to_string(), - method: "zscore".to_string(), - n_outliers, - outlier_ratio, - lower_bound: lower, - upper_bound: upper, - n_below, - n_above, - }, - mask, - ) - } -} diff --git a/src/stats/pca.rs b/src/stats/pca.rs deleted file mode 100644 index 1f4340d..0000000 --- a/src/stats/pca.rs +++ /dev/null @@ -1,246 +0,0 @@ -use ndarray::{Array1, Array2}; -use polars::prelude::*; -use serde::{Deserialize, Serialize}; - -use crate::core::schema::DataSchema; -use crate::stats::descriptive::DescriptiveStats; - -// ─── Result types ─────────────────────────────────────────────────── - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct PcaResult { - /// Column names used for PCA. - pub feature_names: Vec, - /// Variance explained ratio per component. - pub variance_ratio: Vec, - /// Cumulative variance ratio. - pub cumulative_ratio: Vec, - /// Eigenvalues. - pub eigenvalues: Vec, - /// Loadings matrix: features × components. - pub loadings: Vec>, - /// Number of components needed for 90% variance. - pub components_for_90pct: usize, - /// Total variance explained by all computed components. - pub total_variance_explained: f64, -} - -// ─── Implementation ───────────────────────────────────────────────── - -pub struct PcaStats<'a> { - df: &'a DataFrame, - schema: &'a DataSchema, - max_components: usize, -} - -impl<'a> PcaStats<'a> { - pub fn new(df: &'a DataFrame, schema: &'a DataSchema, max_components: usize) -> Self { - Self { - df, - schema, - max_components, - } - } - - pub fn compute(&self) -> Option { - let num_cols = self.schema.numeric_columns(); - if num_cols.len() < 2 { - return None; - } - - // Extract and standardize numeric matrix - let (feature_names, raw_matrix) = self.extract_matrix(&num_cols); - let n = raw_matrix.nrows(); - let p = raw_matrix.ncols(); - if n < 2 || p < 2 { - return None; - } - - let standardized = Self::standardize(&raw_matrix); - - // Covariance matrix (on standardized data = correlation matrix) - let cov = Self::covariance_matrix(&standardized); - - // Eigendecomposition via power iteration (for top-k eigenvalues) - let n_components = self.max_components.min(p).min(n - 1); - let (eigenvalues, eigenvectors) = Self::eigen_decomposition(&cov, n_components); - - let total_eigenvalue_sum: f64 = { - // Trace of covariance = sum of all eigenvalues - (0..p).map(|i| cov[[i, i]]).sum() - }; - - let variance_ratio: Vec = eigenvalues - .iter() - .map(|&ev| { - if total_eigenvalue_sum > f64::EPSILON { - ev / total_eigenvalue_sum - } else { - 0.0 - } - }) - .collect(); - - let mut cumulative_ratio = Vec::with_capacity(variance_ratio.len()); - let mut cum = 0.0; - for &vr in &variance_ratio { - cum += vr; - cumulative_ratio.push(cum); - } - - let components_for_90pct = cumulative_ratio - .iter() - .position(|&c| c >= 0.9) - .map(|i| i + 1) - .unwrap_or(n_components); - - let total_variance_explained = cum; - - // Loadings: eigenvectors as rows of features - let loadings: Vec> = (0..p) - .map(|feat| { - (0..eigenvectors.ncols()) - .map(|comp| eigenvectors[[feat, comp]]) - .collect() - }) - .collect(); - - Some(PcaResult { - feature_names, - variance_ratio, - cumulative_ratio, - eigenvalues, - loadings, - components_for_90pct, - total_variance_explained, - }) - } - - // ── Helpers ───────────────────────────────────────────────── - - fn extract_matrix(&self, num_cols: &[&str]) -> (Vec, Array2) { - let mut names = Vec::new(); - let mut col_data = Vec::new(); - - for &col_name in num_cols { - if let Some(vals) = self - .df - .column(col_name) - .ok() - .and_then(|c| DescriptiveStats::column_to_f64_vec(c)) - { - names.push(col_name.to_string()); - col_data.push(vals); - } - } - - if names.is_empty() { - return (vec![], Array2::zeros((0, 0))); - } - - let min_len = col_data.iter().map(|v| v.len()).min().unwrap_or(0); - let n_cols = names.len(); - let mut matrix = Array2::::zeros((min_len, n_cols)); - for (j, data) in col_data.iter().enumerate() { - for i in 0..min_len { - matrix[[i, j]] = data[i]; - } - } - - (names, matrix) - } - - pub(crate) fn standardize(matrix: &Array2) -> Array2 { - let n = matrix.nrows(); - let p = matrix.ncols(); - let mut result = Array2::::zeros((n, p)); - - for j in 0..p { - let col = matrix.column(j); - let mean = col.mean().unwrap_or(0.0); - let std = { - let var: f64 = - col.iter().map(|x| (x - mean).powi(2)).sum::() / (n as f64 - 1.0).max(1.0); - var.sqrt() - }; - for i in 0..n { - result[[i, j]] = if std > f64::EPSILON { - (matrix[[i, j]] - mean) / std - } else { - 0.0 - }; - } - } - result - } - - pub(crate) fn covariance_matrix(matrix: &Array2) -> Array2 { - let n = matrix.nrows(); - let p = matrix.ncols(); - - let means: Vec = (0..p) - .map(|j| matrix.column(j).mean().unwrap_or(0.0)) - .collect(); - - let mut cov = Array2::::zeros((p, p)); - for i in 0..p { - for j in i..p { - let val: f64 = (0..n) - .map(|k| (matrix[[k, i]] - means[i]) * (matrix[[k, j]] - means[j])) - .sum::() - / (n as f64 - 1.0).max(1.0); - cov[[i, j]] = val; - cov[[j, i]] = val; - } - } - cov - } - - /// Simple eigendecomposition via power iteration with deflation. - pub(crate) fn eigen_decomposition( - cov: &Array2, - n_components: usize, - ) -> (Vec, Array2) { - let p = cov.nrows(); - let mut eigenvalues = Vec::with_capacity(n_components); - let mut eigenvectors = Array2::::zeros((p, n_components)); - let mut matrix = cov.clone(); - - for k in 0..n_components { - let (eigenvalue, eigenvector) = Self::power_iteration(&matrix, 200); - eigenvalues.push(eigenvalue); - for i in 0..p { - eigenvectors[[i, k]] = eigenvector[i]; - } - - // Deflate: A = A - λ * v * v^T - for i in 0..p { - for j in 0..p { - matrix[[i, j]] -= eigenvalue * eigenvector[i] * eigenvector[j]; - } - } - } - - (eigenvalues, eigenvectors) - } - - /// Power iteration to find the dominant eigenvalue/eigenvector. - fn power_iteration(matrix: &Array2, max_iter: usize) -> (f64, Array1) { - let p = matrix.nrows(); - let mut v = Array1::::from_elem(p, 1.0 / (p as f64).sqrt()); - - let mut eigenvalue = 0.0; - - for _ in 0..max_iter { - let new_v = matrix.dot(&v); - eigenvalue = new_v.dot(&v); - let norm: f64 = new_v.iter().map(|x| x.powi(2)).sum::().sqrt(); - if norm < f64::EPSILON { - break; - } - v = new_v / norm; - } - - (eigenvalue.max(0.0), v) - } -} diff --git a/src/stats/quality.rs b/src/stats/quality.rs deleted file mode 100644 index 79acdfd..0000000 --- a/src/stats/quality.rs +++ /dev/null @@ -1,372 +0,0 @@ -use polars::prelude::*; -use serde::{Deserialize, Serialize}; - -use crate::core::schema::DataSchema; -use crate::utils::types::ColumnType; - -// ─── Result types ─────────────────────────────────────────────────── - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct QualityDimension { - pub name: String, - pub score: f64, // 0.0 – 1.0 - pub details: String, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct ColumnQuality { - pub column: String, - pub completeness: f64, - pub uniqueness: f64, - pub validity: f64, - pub overall: f64, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct QualityResult { - pub overall_score: f64, - pub dimensions: Vec, - pub by_column: Vec, -} - -// ─── Implementation ───────────────────────────────────────────────── - -pub struct QualityStats<'a> { - df: &'a DataFrame, - schema: &'a DataSchema, -} - -impl<'a> QualityStats<'a> { - pub fn new(df: &'a DataFrame, schema: &'a DataSchema) -> Self { - Self { df, schema } - } - - pub fn compute(&self) -> QualityResult { - let dimensions = vec![ - self.completeness(), - self.uniqueness(), - self.consistency(), - self.validity(), - self.timeliness(), - self.conformity(), - ]; - - let by_column = self.per_column_quality(); - - // Weighted overall score - let weights = [0.25, 0.15, 0.15, 0.20, 0.10, 0.15]; - let overall_score: f64 = dimensions - .iter() - .zip(weights.iter()) - .map(|(d, w)| d.score * w) - .sum(); - - QualityResult { - overall_score, - dimensions, - by_column, - } - } - - // ── Completeness (1 - missing ratio) ──────────────────────── - - fn completeness(&self) -> QualityDimension { - let n_rows = self.df.height(); - let n_cols = self.df.width(); - let total = n_rows * n_cols; - let missing: usize = self.df.get_columns().iter().map(|c| c.null_count()).sum(); - - let score = if total > 0 { - 1.0 - (missing as f64 / total as f64) - } else { - 1.0 - }; - - QualityDimension { - name: "completeness".into(), - score, - details: format!( - "{} missing values out of {} cells ({:.1}% complete)", - missing, - total, - score * 100.0 - ), - } - } - - // ── Uniqueness (ratio of distinct rows) ───────────────────── - - fn uniqueness(&self) -> QualityDimension { - let n_rows = self.df.height(); - let n_unique = self - .df - .unique::<&str, PlSmallStr>(None, UniqueKeepStrategy::First, None) - .map(|u| u.height()) - .unwrap_or(n_rows); - - let score = if n_rows > 0 { - n_unique as f64 / n_rows as f64 - } else { - 1.0 - }; - - QualityDimension { - name: "uniqueness".into(), - score, - details: format!( - "{} unique rows out of {} ({:.1}% unique)", - n_unique, - n_rows, - score * 100.0 - ), - } - } - - // ── Consistency (type uniformity within columns) ──────────── - - fn consistency(&self) -> QualityDimension { - let mut consistent_cols = 0; - let total_cols = self.df.width(); - - for col in self.df.get_columns() { - // A column is "consistent" if its physical dtype is clean - // (i.e. not Object / mixed-type) - match col.dtype() { - DataType::Unknown(_) | DataType::Null => {} - _ => consistent_cols += 1, - } - } - - let score = if total_cols > 0 { - consistent_cols as f64 / total_cols as f64 - } else { - 1.0 - }; - - QualityDimension { - name: "consistency".into(), - score, - details: format!( - "{} of {} columns have consistent types ({:.1}%)", - consistent_cols, - total_cols, - score * 100.0 - ), - } - } - - // ── Validity (values within ±4σ for numeric) ──────────────── - - fn validity(&self) -> QualityDimension { - let num_cols = self.schema.numeric_columns(); - if num_cols.is_empty() { - return QualityDimension { - name: "validity".into(), - score: 1.0, - details: "No numeric columns to validate".into(), - }; - } - - let mut total_values = 0usize; - let mut valid_values = 0usize; - - for &col_name in &num_cols { - if let Ok(col) = self.df.column(col_name) { - if let Ok(casted) = col.cast(&DataType::Float64) { - if let Ok(ca) = casted.f64() { - let vals: Vec = ca.into_iter().filter_map(|v| v).collect(); - let n = vals.len(); - if n == 0 { - continue; - } - let mean = vals.iter().sum::() / n as f64; - let std = (vals.iter().map(|x| (x - mean).powi(2)).sum::() - / (n as f64 - 1.0).max(1.0)) - .sqrt(); - - total_values += n; - if std > f64::EPSILON { - valid_values += vals - .iter() - .filter(|&&x| { - let z = (x - mean).abs() / std; - z <= 4.0 - }) - .count(); - } else { - valid_values += n; - } - } - } - } - } - - let score = if total_values > 0 { - valid_values as f64 / total_values as f64 - } else { - 1.0 - }; - - QualityDimension { - name: "validity".into(), - score, - details: format!( - "{} of {} numeric values within ±4σ ({:.1}%)", - valid_values, - total_values, - score * 100.0 - ), - } - } - - // ── Timeliness (datetime column recency / range) ──────────── - - fn timeliness(&self) -> QualityDimension { - let dt_cols = self.schema.datetime_columns(); - if dt_cols.is_empty() { - return QualityDimension { - name: "timeliness".into(), - score: 1.0, - details: "No datetime columns – timeliness check skipped".into(), - }; - } - - // For now, score based on whether datetime columns parse correctly - // Full implementation would check recency, gaps, etc. - QualityDimension { - name: "timeliness".into(), - score: 0.8, - details: format!("{} datetime columns detected", dt_cols.len()), - } - } - - // ── Conformity (no control characters, valid ranges) ──────── - - fn conformity(&self) -> QualityDimension { - let str_cols: Vec<&str> = self - .schema - .columns - .iter() - .filter(|c| { - c.inferred_type == ColumnType::Categorical || c.inferred_type == ColumnType::Text - }) - .map(|c| c.name.as_str()) - .collect(); - - if str_cols.is_empty() { - return QualityDimension { - name: "conformity".into(), - score: 1.0, - details: "No string columns to check".into(), - }; - } - - let mut total_strings = 0usize; - let mut conforming_strings = 0usize; - - for &col_name in &str_cols { - if let Ok(col) = self.df.column(col_name) { - if let Ok(str_col) = col.str() { - for opt_val in str_col.into_iter() { - if let Some(s) = opt_val { - total_strings += 1; - // Check for control characters (except \n, \r, \t) - let has_control = s - .chars() - .any(|c| c.is_control() && c != '\n' && c != '\r' && c != '\t'); - if !has_control { - conforming_strings += 1; - } - } - } - } - } - } - - let score = if total_strings > 0 { - conforming_strings as f64 / total_strings as f64 - } else { - 1.0 - }; - - QualityDimension { - name: "conformity".into(), - score, - details: format!( - "{} of {} strings conform to standards ({:.1}%)", - conforming_strings, - total_strings, - score * 100.0 - ), - } - } - - // ── Per-column quality ────────────────────────────────────── - - fn per_column_quality(&self) -> Vec { - let n_rows = self.df.height(); - - self.df - .get_columns() - .iter() - .map(|col| { - let name = col.name().to_string(); - let completeness = if n_rows > 0 { - 1.0 - (col.null_count() as f64 / n_rows as f64) - } else { - 1.0 - }; - let n_unique = col.n_unique().unwrap_or(0); - let uniqueness = if n_rows > 0 { - n_unique as f64 / n_rows as f64 - } else { - 1.0 - }; - - // Validity: for numeric, ratio within ±4σ; for others, non-null = valid - let validity = if col.dtype().is_float() || col.dtype().is_integer() { - Self::column_validity_numeric(col) - } else { - completeness - }; - - let overall = (completeness + uniqueness.min(1.0) + validity) / 3.0; - - ColumnQuality { - column: name, - completeness, - uniqueness: uniqueness.min(1.0), - validity, - overall, - } - }) - .collect() - } - - fn column_validity_numeric(col: &Column) -> f64 { - if let Ok(casted) = col.cast(&DataType::Float64) { - if let Ok(ca) = casted.f64() { - let vals: Vec = ca.into_iter().filter_map(|v| v).collect(); - let n = vals.len(); - if n == 0 { - return 1.0; - } - let mean = vals.iter().sum::() / n as f64; - let std = (vals.iter().map(|x| (x - mean).powi(2)).sum::() - / (n as f64 - 1.0).max(1.0)) - .sqrt(); - - if std < f64::EPSILON { - return 1.0; - } - - let valid = vals - .iter() - .filter(|&&x| ((x - mean) / std).abs() <= 4.0) - .count(); - return valid as f64 / n as f64; - } - } - 1.0 - } -} diff --git a/src/stats/statistical_tests.rs b/src/stats/statistical_tests.rs deleted file mode 100644 index 0da1350..0000000 --- a/src/stats/statistical_tests.rs +++ /dev/null @@ -1,356 +0,0 @@ -use polars::prelude::*; -use serde::{Deserialize, Serialize}; -use statrs::distribution::{ContinuousCDF, StudentsT}; - -use crate::core::schema::DataSchema; -use crate::stats::descriptive::DescriptiveStats; - -// ─── Result types ─────────────────────────────────────────────────── - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct LeveneTestResult { - pub col_a: String, - pub col_b: String, - pub statistic: f64, - pub p_value: f64, - pub log_var_ratio: f64, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct KruskalWallisResult { - pub numeric_col: String, - pub group_col: String, - pub h_statistic: f64, - pub p_value: f64, - pub eta_squared: f64, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct MannWhitneyResult { - pub col_a: String, - pub col_b: String, - pub u_statistic: f64, - pub p_value: f64, - pub rank_biserial_r: f64, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct GrubbsTestResult { - pub column: String, - pub statistic: f64, - pub critical_value: f64, - pub outlier_value: f64, - pub is_outlier: bool, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct StatisticalTestsResult { - pub levene: Vec, - pub kruskal_wallis: Vec, - pub grubbs: Vec, -} - -// ─── Implementation ───────────────────────────────────────────────── - -pub struct StatisticalTests<'a> { - df: &'a DataFrame, - schema: &'a DataSchema, -} - -impl<'a> StatisticalTests<'a> { - pub fn new(df: &'a DataFrame, schema: &'a DataSchema) -> Self { - Self { df, schema } - } - - pub fn compute(&self) -> StatisticalTestsResult { - let levene = self.levene_tests(); - let kruskal_wallis = self.kruskal_wallis_tests(); - let grubbs = self.grubbs_tests(); - - StatisticalTestsResult { - levene, - kruskal_wallis, - grubbs, - } - } - - /// Pairwise Levene's test for equality of variances between numeric columns. - fn levene_tests(&self) -> Vec { - let num_cols = self.schema.numeric_columns(); - let mut results = Vec::new(); - - for i in 0..num_cols.len() { - for j in (i + 1)..num_cols.len() { - let a_vals = self - .df - .column(num_cols[i]) - .ok() - .and_then(|c| DescriptiveStats::column_to_f64_vec(c)); - let b_vals = self - .df - .column(num_cols[j]) - .ok() - .and_then(|c| DescriptiveStats::column_to_f64_vec(c)); - - if let (Some(a), Some(b)) = (a_vals, b_vals) { - if a.len() < 2 || b.len() < 2 { - continue; - } - - let med_a = Self::median_of(&a); - let med_b = Self::median_of(&b); - let za: Vec = a.iter().map(|x| (x - med_a).abs()).collect(); - let zb: Vec = b.iter().map(|x| (x - med_b).abs()).collect(); - - let n1 = za.len() as f64; - let n2 = zb.len() as f64; - let mean_za = za.iter().sum::() / n1; - let mean_zb = zb.iter().sum::() / n2; - let grand_mean = (za.iter().sum::() + zb.iter().sum::()) / (n1 + n2); - - let between = - n1 * (mean_za - grand_mean).powi(2) + n2 * (mean_zb - grand_mean).powi(2); - let within: f64 = za.iter().map(|z| (z - mean_za).powi(2)).sum::() - + zb.iter().map(|z| (z - mean_zb).powi(2)).sum::(); - - let dof_between = 1.0; - let dof_within = n1 + n2 - 2.0; - let f_stat = if within > f64::EPSILON { - (between / dof_between) / (within / dof_within) - } else { - 0.0 - }; - - // F-distribution p-value approximation - let p_value = Self::f_distribution_sf(f_stat, dof_between, dof_within); - - let var_a = a.iter().map(|x| (x - med_a).powi(2)).sum::() / (n1 - 1.0); - let var_b = b.iter().map(|x| (x - med_b).powi(2)).sum::() / (n2 - 1.0); - let log_var_ratio = if var_b > f64::EPSILON { - (var_a / var_b).ln() - } else { - f64::NAN - }; - - results.push(LeveneTestResult { - col_a: num_cols[i].to_string(), - col_b: num_cols[j].to_string(), - statistic: f_stat, - p_value, - log_var_ratio, - }); - } - } - } - results - } - - /// Kruskal-Wallis H test: numeric column grouped by categorical. - fn kruskal_wallis_tests(&self) -> Vec { - let num_cols = self.schema.numeric_columns(); - let cat_cols = self.schema.categorical_columns(); - let mut results = Vec::new(); - - for &num_col in &num_cols { - for &cat_col in &cat_cols { - if let Some(r) = self.kruskal_wallis_pair(num_col, cat_col) { - results.push(r); - } - } - } - results - } - - fn kruskal_wallis_pair(&self, num_col: &str, cat_col: &str) -> Option { - let num_series = self.df.column(num_col).ok()?; - let cat_series = self.df.column(cat_col).ok()?; - let cat_str = cat_series.cast(&DataType::String).ok()?; - let cat_ca = cat_str.str().ok()?; - - // Group numeric values by category - let mut groups: indexmap::IndexMap> = indexmap::IndexMap::new(); - let num_f64 = num_series.cast(&DataType::Float64).ok()?; - let num_ca = num_f64.f64().ok()?; - - for (nv, cv) in num_ca.into_iter().zip(cat_ca.into_iter()) { - if let (Some(n), Some(c)) = (nv, cv) { - if n.is_finite() { - groups.entry(c.to_string()).or_default().push(n); - } - } - } - - if groups.len() < 2 { - return None; - } - - // Rank all values together - let mut all_vals: Vec<(f64, usize)> = Vec::new(); // (value, group_idx) - let group_keys: Vec = groups.keys().cloned().collect(); - for (g_idx, key) in group_keys.iter().enumerate() { - for &val in &groups[key] { - all_vals.push((val, g_idx)); - } - } - all_vals.sort_by(|a, b| a.0.partial_cmp(&b.0).unwrap_or(std::cmp::Ordering::Equal)); - - let n_total = all_vals.len() as f64; - let mut ranks = vec![0.0f64; all_vals.len()]; - - // Assign average ranks for ties - let mut i = 0; - while i < all_vals.len() { - let mut j = i + 1; - while j < all_vals.len() && (all_vals[j].0 - all_vals[i].0).abs() < f64::EPSILON { - j += 1; - } - let avg_rank = (i + 1..=j).sum::() as f64 / (j - i) as f64; - for k in i..j { - ranks[k] = avg_rank; - } - i = j; - } - - // Compute H statistic - let mut group_rank_sums = vec![0.0f64; groups.len()]; - let mut group_ns = vec![0usize; groups.len()]; - for (idx, (_, g_idx)) in all_vals.iter().enumerate() { - group_rank_sums[*g_idx] += ranks[idx]; - group_ns[*g_idx] += 1; - } - - let h: f64 = group_rank_sums - .iter() - .zip(group_ns.iter()) - .map( - |(&sum, &ni)| { - if ni > 0 { - sum.powi(2) / ni as f64 - } else { - 0.0 - } - }, - ) - .sum::(); - - let h = (12.0 / (n_total * (n_total + 1.0))) * h - 3.0 * (n_total + 1.0); - - let dof = groups.len() - 1; - let p_value = crate::stats::categorical::CategoricalStats::chi_square_sf(h.max(0.0), dof); - - let eta_squared = if n_total > 1.0 { - (h - dof as f64 + 1.0) / (n_total - dof as f64) - } else { - 0.0 - }; - - Some(KruskalWallisResult { - numeric_col: num_col.to_string(), - group_col: cat_col.to_string(), - h_statistic: h.max(0.0), - p_value, - eta_squared: eta_squared.clamp(0.0, 1.0), - }) - } - - /// Grubbs' test for a single outlier per numeric column. - fn grubbs_tests(&self) -> Vec { - let num_cols = self.schema.numeric_columns(); - - num_cols - .iter() - .filter_map(|&col_name| { - let col = self.df.column(col_name).ok()?; - let values = DescriptiveStats::column_to_f64_vec(col)?; - let n = values.len(); - if n < 3 { - return None; - } - - let nf = n as f64; - let mean = values.iter().sum::() / nf; - let std = - (values.iter().map(|x| (x - mean).powi(2)).sum::() / (nf - 1.0)).sqrt(); - - if std < f64::EPSILON { - return None; - } - - // Find the value furthest from the mean - let (outlier_idx, _) = values.iter().enumerate().max_by(|(_, a), (_, b)| { - (*a - mean) - .abs() - .partial_cmp(&(*b - mean).abs()) - .unwrap_or(std::cmp::Ordering::Equal) - })?; - - let outlier_value = values[outlier_idx]; - let g = (outlier_value - mean).abs() / std; - - // Critical value using t-distribution - let alpha = 0.05; - let t_crit = StudentsT::new(0.0, 1.0, nf - 2.0) - .ok() - .map(|t| t.inverse_cdf(1.0 - alpha / (2.0 * nf))) - .unwrap_or(2.0); - - let critical_value = ((nf - 1.0) / nf.sqrt()) - * (t_crit.powi(2) / (nf - 2.0 + t_crit.powi(2))).sqrt(); - - Some(GrubbsTestResult { - column: col_name.to_string(), - statistic: g, - critical_value, - outlier_value, - is_outlier: g > critical_value, - }) - }) - .collect() - } - - // ── Helpers ───────────────────────────────────────────────── - - fn median_of(vals: &[f64]) -> f64 { - let mut sorted = vals.to_vec(); - sorted.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal)); - let mid = sorted.len() / 2; - if sorted.len() % 2 == 0 { - (sorted[mid - 1] + sorted[mid]) / 2.0 - } else { - sorted[mid] - } - } - - /// F-distribution survival function approximation. - fn f_distribution_sf(f: f64, d1: f64, d2: f64) -> f64 { - if f <= 0.0 || d1 <= 0.0 || d2 <= 0.0 { - return 1.0; - } - let x = d2 / (d2 + d1 * f); - Self::incomplete_beta(d2 / 2.0, d1 / 2.0, x) - } - - /// Regularized incomplete beta function approximation (simple continued fraction). - fn incomplete_beta(a: f64, b: f64, x: f64) -> f64 { - if x <= 0.0 { - return 0.0; - } - if x >= 1.0 { - return 1.0; - } - // Use series expansion for small x - let mut sum = 1.0f64; - let mut term = 1.0f64; - for n in 1..200 { - let nf = n as f64; - term *= x * (a + b + nf - 1.0) * (a + nf - 1.0) - / ((a + 2.0 * nf - 1.0) * (a + 2.0 * nf) * nf); - term *= (a + 2.0 * nf) * nf / ((a + nf) * (b - nf).max(0.001)); - if term.abs() < 1e-12 { - break; - } - sum += term; - } - (x.powf(a) * (1.0 - x).powf(b) * sum / a).clamp(0.0, 1.0) - } -} diff --git a/src/utils/errors.rs b/src/utils/errors.rs deleted file mode 100644 index be28a19..0000000 --- a/src/utils/errors.rs +++ /dev/null @@ -1,48 +0,0 @@ -use thiserror::Error; - -/// Top-level error type for the f2a library. -#[derive(Error, Debug)] -pub enum F2aError { - #[error("Unsupported file format: {0}")] - UnsupportedFormat(String), - - #[error("Failed to load data: {0}")] - DataLoadError(String), - - #[error("Empty dataset – no rows or no columns")] - EmptyData, - - #[error("Column not found: {0}")] - ColumnNotFound(String), - - #[error("Computation error: {0}")] - ComputationError(String), - - #[error("Configuration error: {0}")] - ConfigError(String), - - #[error("Polars error: {0}")] - PolarsError(#[from] polars::prelude::PolarsError), - - #[error("IO error: {0}")] - IoError(#[from] std::io::Error), - - #[error("JSON error: {0}")] - JsonError(#[from] serde_json::Error), -} - -/// Convenience alias. -pub type F2aResult = Result; - -impl F2aError { - pub fn computation(msg: impl Into) -> Self { - F2aError::ComputationError(msg.into()) - } -} - -// Convert to PyO3 error -impl From for pyo3::PyErr { - fn from(err: F2aError) -> pyo3::PyErr { - pyo3::exceptions::PyRuntimeError::new_err(err.to_string()) - } -} diff --git a/src/utils/mod.rs b/src/utils/mod.rs deleted file mode 100644 index 422d669..0000000 --- a/src/utils/mod.rs +++ /dev/null @@ -1,2 +0,0 @@ -pub mod errors; -pub mod types; diff --git a/src/utils/types.rs b/src/utils/types.rs deleted file mode 100644 index d317e86..0000000 --- a/src/utils/types.rs +++ /dev/null @@ -1,92 +0,0 @@ -use serde::{Deserialize, Serialize}; - -/// Semantic column type – mirrors the Python `ColumnType` enum. -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] -#[serde(rename_all = "lowercase")] -pub enum ColumnType { - Numeric, - Categorical, - Text, - DateTime, - Boolean, -} - -impl ColumnType { - pub fn as_str(&self) -> &'static str { - match self { - ColumnType::Numeric => "numeric", - ColumnType::Categorical => "categorical", - ColumnType::Text => "text", - ColumnType::DateTime => "datetime", - ColumnType::Boolean => "boolean", - } - } -} - -impl std::fmt::Display for ColumnType { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.write_str(self.as_str()) - } -} - -/// Infer the semantic `ColumnType` from a Polars dtype and column statistics. -/// -/// Heuristics: -/// - Boolean → `Boolean` -/// - Numeric with ≤10 unique values (when n_rows > 100) → `Categorical` -/// - String with high unique ratio and long avg length → `Text` -/// - String / low-cardinality string → `Categorical` -/// - Date/Time/Datetime/Duration → `DateTime` -/// - Everything else numeric → `Numeric` -pub fn infer_column_type( - dtype: &polars::prelude::DataType, - n_unique: usize, - n_rows: usize, - avg_str_len: Option, -) -> ColumnType { - use polars::prelude::DataType; - - match dtype { - DataType::Boolean => ColumnType::Boolean, - - DataType::Date | DataType::Time | DataType::Datetime(_, _) | DataType::Duration(_) => { - ColumnType::DateTime - } - - DataType::Int8 - | DataType::Int16 - | DataType::Int32 - | DataType::Int64 - | DataType::UInt8 - | DataType::UInt16 - | DataType::UInt32 - | DataType::UInt64 - | DataType::Float32 - | DataType::Float64 => { - // Numeric with very low cardinality on a large dataset → treat as categorical - if n_rows > 100 && n_unique <= 10 { - ColumnType::Categorical - } else { - ColumnType::Numeric - } - } - - DataType::String => { - let unique_ratio = if n_rows > 0 { - n_unique as f64 / n_rows as f64 - } else { - 0.0 - }; - let avg_len = avg_str_len.unwrap_or(0.0); - - // High-cardinality + long strings → text - if unique_ratio > 0.5 && avg_len > 50.0 { - ColumnType::Text - } else { - ColumnType::Categorical - } - } - - _ => ColumnType::Categorical, - } -} diff --git a/test_e2e.py b/test_e2e.py deleted file mode 100644 index e27e7ee..0000000 --- a/test_e2e.py +++ /dev/null @@ -1,408 +0,0 @@ -""" -Comprehensive end-to-end test for f2a (Rust-powered). -Mirrors the original lerobot_test_local.py pattern and verifies -all key API surfaces work correctly. -""" - -import json -import sys -import time -import traceback -import warnings -from pathlib import Path - -import numpy as np -import pandas as pd - -warnings.filterwarnings("ignore") - -# Ensure the local f2a package is used -sys.path.insert(0, str(Path(__file__).resolve().parent / "python")) - -PASS = 0 -FAIL = 0 -RESULTS: list[tuple[str, bool, str]] = [] - - -def check(name: str, fn): - """Run a test function, record pass/fail.""" - global PASS, FAIL - try: - msg = fn() - PASS += 1 - RESULTS.append((name, True, msg or "OK")) - except Exception as e: - FAIL += 1 - RESULTS.append((name, False, f"{e.__class__.__name__}: {e}")) - traceback.print_exc() - - -# ═══════════════════════════════════════════════════════════════════════ -# 0. Create test datasets -# ═══════════════════════════════════════════════════════════════════════ - -DATA_DIR = Path(__file__).parent / "test_data_e2e" -DATA_DIR.mkdir(exist_ok=True) - -np.random.seed(42) -N = 300 - -# Mixed-type dataset with missing values -mixed_df = pd.DataFrame({ - "id": range(N), - "age": np.random.randint(18, 80, N), - "income": np.random.lognormal(10, 1, N), - "score": np.random.normal(75, 15, N), - "category": np.random.choice(["A", "B", "C", "D"], N), - "city": np.random.choice(["Seoul", "Busan", "Daegu", "Incheon"], N), - "passed": np.random.choice([True, False], N), -}) -mixed_df.loc[np.random.choice(N, 20, replace=False), "income"] = np.nan -mixed_df.loc[np.random.choice(N, 15, replace=False), "score"] = np.nan -mixed_df.loc[np.random.choice(N, 10, replace=False), "city"] = np.nan - -CSV_PATH = DATA_DIR / "mixed_data.csv" -mixed_df.to_csv(CSV_PATH, index=False) - -TSV_PATH = DATA_DIR / "mixed_data.tsv" -mixed_df.to_csv(TSV_PATH, index=False, sep="\t") - -JSON_PATH = DATA_DIR / "mixed_data.json" -mixed_df.to_json(JSON_PATH, orient="records", force_ascii=False) - -JSONL_PATH = DATA_DIR / "mixed_data.jsonl" -mixed_df.to_json(JSONL_PATH, orient="records", lines=True, force_ascii=False) - -PARQUET_PATH = DATA_DIR / "mixed_data.parquet" -mixed_df.to_parquet(PARQUET_PATH, index=False) - -# Numeric-only dataset -numeric_df = pd.DataFrame({ - "x1": np.random.normal(0, 1, N), - "x2": np.random.normal(5, 2, N), - "x3": np.random.exponential(2, N), - "x4": np.random.uniform(-10, 10, N), - "x5": np.random.poisson(3, N).astype(float), -}) -NUMERIC_CSV = DATA_DIR / "numeric_only.csv" -numeric_df.to_csv(NUMERIC_CSV, index=False) - - -print("=" * 70) -print(" f2a 1.0 (Rust) - End-to-End Validation") -print("=" * 70) - - -# ═══════════════════════════════════════════════════════════════════════ -# 1. Import test -# ═══════════════════════════════════════════════════════════════════════ - -def test_import(): - import f2a - assert hasattr(f2a, "__version__") - assert hasattr(f2a, "analyze") - assert hasattr(f2a, "AnalysisConfig") - return f"version={f2a.__version__}" - -check("1. Import f2a", test_import) - - -# ═══════════════════════════════════════════════════════════════════════ -# 2. Rust core direct access -# ═══════════════════════════════════════════════════════════════════════ - -def test_rust_core(): - from f2a._core import version, default_config, minimal_config, fast_config - assert version() == "1.0.0" - cfg = json.loads(default_config()) - assert cfg["descriptive"] is True - assert cfg["correlation"] is True - mcfg = json.loads(minimal_config()) - assert mcfg["correlation"] is False - fcfg = json.loads(fast_config()) - assert fcfg["pca"] is False - return "version/config OK" - -check("2. Rust core functions", test_rust_core) - - -# ═══════════════════════════════════════════════════════════════════════ -# 3. AnalysisConfig -# ═══════════════════════════════════════════════════════════════════════ - -def test_config(): - from f2a import AnalysisConfig - cfg = AnalysisConfig() - assert cfg.descriptive is True - assert cfg.advanced is True - - cfg_min = AnalysisConfig.minimal() - assert cfg_min.correlation is False - - cfg_fast = AnalysisConfig.fast() - assert cfg_fast.pca is False - - cfg_basic = AnalysisConfig.basic_only() - assert cfg_basic.advanced is False - - cfg_json = cfg.to_json() - parsed = json.loads(cfg_json) - assert isinstance(parsed, dict) - return "AnalysisConfig OK" - -check("3. AnalysisConfig", test_config) - - -# ═══════════════════════════════════════════════════════════════════════ -# 4. CSV analysis (full pipeline) -# ═══════════════════════════════════════════════════════════════════════ - -def test_csv_analysis(): - from f2a import analyze - t0 = time.perf_counter() - report = analyze(str(CSV_PATH)) - dt = time.perf_counter() - t0 - - # Structure checks - assert report.source is not None - assert isinstance(report.schema, dict) - assert isinstance(report.results, dict) - assert len(report.sections) > 0 - - # Key sections present - expected = {"descriptive", "correlation", "distribution", "missing", - "outlier", "quality", "categorical", "pca"} - present = set(report.sections) - missing = expected - present - assert not missing, f"Missing sections: {missing}" - - # Descriptive results - desc = report.get("descriptive") - assert desc is not None - assert len(desc.get("numeric", [])) >= 3 # age, income, score - nc = desc["numeric"][0] - assert "mean" in nc and "std" in nc and "min" in nc and "max" in nc - - # Categorical - cat = report.get("categorical") - assert cat is not None - - # Quality - qual = report.get("quality") - assert qual is not None - score = qual.get("overall_score", 0) - assert 0.0 < score <= 1.0 - - return f"{len(report.sections)} sections, {dt:.2f}s" - -check("4. CSV full analysis", test_csv_analysis) - - -# ═══════════════════════════════════════════════════════════════════════ -# 5. Multi-format loading -# ═══════════════════════════════════════════════════════════════════════ - -def test_tsv(): - from f2a import analyze, AnalysisConfig - cfg = AnalysisConfig.minimal() - r = analyze(str(TSV_PATH), config=cfg) - assert len(r.sections) > 0 - return f"TSV: {len(r.sections)} sections" - -check("5a. TSV loading", test_tsv) - - -def test_json(): - from f2a import analyze, AnalysisConfig - cfg = AnalysisConfig.minimal() - r = analyze(str(JSON_PATH), config=cfg) - assert len(r.sections) > 0 - return f"JSON: {len(r.sections)} sections" - -check("5b. JSON loading", test_json) - - -def test_jsonl(): - from f2a import analyze, AnalysisConfig - cfg = AnalysisConfig.minimal() - r = analyze(str(JSONL_PATH), config=cfg) - assert len(r.sections) > 0 - return f"JSONL: {len(r.sections)} sections" - -check("5c. JSONL loading", test_jsonl) - - -def test_parquet(): - from f2a import analyze, AnalysisConfig - cfg = AnalysisConfig.minimal() - r = analyze(str(PARQUET_PATH), config=cfg) - assert len(r.sections) > 0 - return f"Parquet: {len(r.sections)} sections" - -check("5d. Parquet loading", test_parquet) - - -# ═══════════════════════════════════════════════════════════════════════ -# 6. Advanced analysis (all 21 modules) -# ═══════════════════════════════════════════════════════════════════════ - -def test_advanced(): - from f2a import analyze, AnalysisConfig - cfg = AnalysisConfig(advanced=True) - r = analyze(str(CSV_PATH), config=cfg) - - all_expected = { - "descriptive", "correlation", "distribution", "missing", - "outlier", "categorical", "feature_importance", "pca", - "duplicates", "quality", "statistical_tests", - # Advanced - "clustering", "advanced_anomaly", "advanced_correlation", - "advanced_distribution", "advanced_dimreduction", - "feature_insights", "insight_engine", "column_role", - "cross_analysis", "ml_readiness", - } - present = set(r.sections) - missing = all_expected - present - assert not missing, f"Missing advanced sections: {missing}" - - # ML readiness details - ml = r.get("ml_readiness") - assert ml is not None - assert "overall_score" in ml - assert "grade" in ml - assert 0.0 <= ml["overall_score"] <= 1.0 - - # Insight engine - ie = r.get("insight_engine") - assert ie is not None - assert "insights" in ie - - # Clustering - cl = r.get("clustering") - assert cl is not None - - return f"All {len(all_expected)} modules present, ML={ml['grade']}" - -check("6. Advanced analysis (21 modules)", test_advanced) - - -# ═══════════════════════════════════════════════════════════════════════ -# 7. Numeric-only dataset -# ═══════════════════════════════════════════════════════════════════════ - -def test_numeric_only(): - from f2a import analyze - r = analyze(str(NUMERIC_CSV)) - desc = r.get("descriptive") - assert desc is not None - # x5 (Poisson, low cardinality integers) may be classified as categorical - assert len(desc.get("numeric", [])) >= 4 - return f"{len(desc['numeric'])} numeric, {len(desc.get('categorical', []))} categorical" - -check("7. Numeric-only dataset", test_numeric_only) - - -# ═══════════════════════════════════════════════════════════════════════ -# 8. HTML report generation -# ═══════════════════════════════════════════════════════════════════════ - -def test_html_report(): - from f2a import analyze - r = analyze(str(CSV_PATH)) - out_dir = DATA_DIR / "html_output" - path = r.to_html(output_dir=str(out_dir)) - assert path.exists(), f"HTML file not found: {path}" - content = path.read_text(encoding="utf-8") - assert len(content) > 1000 - assert "= 0xAC00 for c in content), "No Korean characters in report" - return f"Korean HTML {len(content):,} bytes" - -check("9. HTML report (Korean)", test_html_korean) - - -# ═══════════════════════════════════════════════════════════════════════ -# 10. Console show() -# ═══════════════════════════════════════════════════════════════════════ - -def test_show(): - from f2a import analyze - r = analyze(str(CSV_PATH)) - # show() should not raise - r.show() - return "show() OK" - -check("10. Console show()", test_show) - - -# ═══════════════════════════════════════════════════════════════════════ -# 11. Performance benchmark -# ═══════════════════════════════════════════════════════════════════════ - -def test_performance(): - from f2a import analyze, AnalysisConfig - cfg = AnalysisConfig(advanced=True) - times = [] - for _ in range(3): - t0 = time.perf_counter() - analyze(str(CSV_PATH), config=cfg) - times.append(time.perf_counter() - t0) - avg = sum(times) / len(times) - return f"Avg {avg:.3f}s over 3 runs (min={min(times):.3f}s)" - -check("11. Performance benchmark", test_performance) - - -# ═══════════════════════════════════════════════════════════════════════ -# 12. Error handling -# ═══════════════════════════════════════════════════════════════════════ - -def test_error_nonexistent(): - from f2a import analyze - try: - analyze("nonexistent_file.csv") - raise AssertionError("Should have raised an error") - except Exception as e: - assert "nonexistent" in str(e).lower() or "not found" in str(e).lower() or "error" in str(e).lower() - return "Error raised for missing file" - -check("12. Error handling", test_error_nonexistent) - - -# ═══════════════════════════════════════════════════════════════════════ -# SUMMARY -# ═══════════════════════════════════════════════════════════════════════ - -print("\n" + "=" * 70) -print(" RESULTS") -print("=" * 70) -for name, ok, msg in RESULTS: - status = "[PASS]" if ok else "[FAIL]" - print(f" {status} {name}: {msg}") - -print(f"\n Total: {PASS} passed, {FAIL} failed out of {PASS + FAIL}") -print("=" * 70) - -if FAIL > 0: - sys.exit(1) -else: - print("\n All tests passed! f2a 1.0 (Rust) is ready.\n") - sys.exit(0) diff --git a/tests/conftest.py b/tests/conftest.py deleted file mode 100644 index e249dfc..0000000 --- a/tests/conftest.py +++ /dev/null @@ -1,95 +0,0 @@ -"""pytest fixtures for f2a tests.""" - -from __future__ import annotations - -from pathlib import Path - -import numpy as np -import pandas as pd -import pytest - - -@pytest.fixture(scope="session") -def mixed_df() -> pd.DataFrame: - """Session-scoped mixed-type DataFrame with missing values.""" - np.random.seed(42) - n = 300 - df = pd.DataFrame( - { - "id": range(n), - "age": np.random.randint(18, 80, n), - "income": np.random.lognormal(10, 1, n), - "score": np.random.normal(75, 15, n), - "category": np.random.choice(["A", "B", "C", "D"], n), - "city": np.random.choice(["Seoul", "Busan", "Daegu", "Incheon"], n), - "passed": np.random.choice([True, False], n), - } - ) - df.loc[np.random.choice(n, 20, replace=False), "income"] = np.nan - df.loc[np.random.choice(n, 15, replace=False), "score"] = np.nan - df.loc[np.random.choice(n, 10, replace=False), "city"] = np.nan - return df - - -@pytest.fixture(scope="session") -def numeric_df() -> pd.DataFrame: - """Session-scoped numeric-only DataFrame.""" - np.random.seed(42) - n = 300 - return pd.DataFrame( - { - "x1": np.random.normal(0, 1, n), - "x2": np.random.normal(5, 2, n), - "x3": np.random.exponential(2, n), - "x4": np.random.uniform(-10, 10, n), - "x5": np.random.poisson(3, n).astype(float), - } - ) - - -@pytest.fixture(scope="session") -def data_dir(tmp_path_factory: pytest.TempPathFactory) -> Path: - """Session-scoped temp directory for test data files.""" - return tmp_path_factory.mktemp("f2a_test_data") - - -@pytest.fixture(scope="session") -def csv_path(data_dir: Path, mixed_df: pd.DataFrame) -> Path: - p = data_dir / "mixed_data.csv" - mixed_df.to_csv(p, index=False) - return p - - -@pytest.fixture(scope="session") -def tsv_path(data_dir: Path, mixed_df: pd.DataFrame) -> Path: - p = data_dir / "mixed_data.tsv" - mixed_df.to_csv(p, index=False, sep="\t") - return p - - -@pytest.fixture(scope="session") -def json_path(data_dir: Path, mixed_df: pd.DataFrame) -> Path: - p = data_dir / "mixed_data.json" - mixed_df.to_json(p, orient="records", force_ascii=False) - return p - - -@pytest.fixture(scope="session") -def jsonl_path(data_dir: Path, mixed_df: pd.DataFrame) -> Path: - p = data_dir / "mixed_data.jsonl" - mixed_df.to_json(p, orient="records", lines=True, force_ascii=False) - return p - - -@pytest.fixture(scope="session") -def parquet_path(data_dir: Path, mixed_df: pd.DataFrame) -> Path: - p = data_dir / "mixed_data.parquet" - mixed_df.to_parquet(p, index=False) - return p - - -@pytest.fixture(scope="session") -def numeric_csv_path(data_dir: Path, numeric_df: pd.DataFrame) -> Path: - p = data_dir / "numeric_only.csv" - numeric_df.to_csv(p, index=False) - return p diff --git a/tests/test_analysis.py b/tests/test_analysis.py deleted file mode 100644 index 556ec53..0000000 --- a/tests/test_analysis.py +++ /dev/null @@ -1,131 +0,0 @@ -"""Tests: file loading, multi-format support, analysis pipeline.""" - -from __future__ import annotations - -from pathlib import Path - -import pytest - -from f2a import AnalysisConfig, analyze - - -class TestCSVAnalysis: - """Full CSV analysis pipeline.""" - - def test_sections_present(self, csv_path: Path): - report = analyze(str(csv_path)) - expected = { - "descriptive", - "correlation", - "distribution", - "missing", - "outlier", - "quality", - "categorical", - "pca", - } - present = set(report.sections) - missing = expected - present - assert not missing, f"Missing sections: {missing}" - - def test_descriptive_stats(self, csv_path: Path): - report = analyze(str(csv_path)) - desc = report.get("descriptive") - assert desc is not None - assert len(desc.get("numeric", [])) >= 3 - nc = desc["numeric"][0] - for key in ("mean", "std", "min", "max"): - assert key in nc, f"Missing key '{key}' in numeric stats" - - def test_categorical_stats(self, csv_path: Path): - report = analyze(str(csv_path)) - cat = report.get("categorical") - assert cat is not None - - def test_quality_score(self, csv_path: Path): - report = analyze(str(csv_path)) - qual = report.get("quality") - assert qual is not None - score = qual.get("overall_score", 0) - assert 0.0 < score <= 1.0 - - def test_source_and_schema(self, csv_path: Path): - report = analyze(str(csv_path)) - assert report.source is not None - assert isinstance(report.schema, dict) - assert isinstance(report.results, dict) - - -class TestMultiFormat: - """Multi-format loading.""" - - @pytest.fixture(params=["csv_path", "tsv_path", "json_path", "jsonl_path", "parquet_path"]) - def data_path(self, request) -> Path: - return request.getfixturevalue(request.param) - - def test_load_and_analyze(self, data_path: Path): - cfg = AnalysisConfig.minimal() - report = analyze(str(data_path), config=cfg) - assert len(report.sections) > 0 - - -class TestNumericOnly: - """Numeric-only dataset analysis.""" - - def test_numeric_detection(self, numeric_csv_path: Path): - report = analyze(str(numeric_csv_path)) - desc = report.get("descriptive") - assert desc is not None - # x5 (Poisson with low cardinality) may be classified categorical - assert len(desc.get("numeric", [])) >= 4 - - def test_all_21_sections(self, csv_path: Path): - cfg = AnalysisConfig(advanced=True) - report = analyze(str(csv_path), config=cfg) - - all_expected = { - "descriptive", - "correlation", - "distribution", - "missing", - "outlier", - "categorical", - "feature_importance", - "pca", - "duplicates", - "quality", - "statistical_tests", - "clustering", - "advanced_anomaly", - "advanced_correlation", - "advanced_distribution", - "advanced_dimreduction", - "feature_insights", - "insight_engine", - "column_role", - "cross_analysis", - "ml_readiness", - } - present = set(report.sections) - missing = all_expected - present - assert not missing, f"Missing advanced sections: {missing}" - - -class TestMLReadiness: - """ML readiness scoring.""" - - def test_ml_readiness_fields(self, csv_path: Path): - cfg = AnalysisConfig(advanced=True) - report = analyze(str(csv_path), config=cfg) - ml = report.get("ml_readiness") - assert ml is not None - assert "overall_score" in ml - assert "grade" in ml - assert 0.0 <= ml["overall_score"] <= 1.0 - - def test_insight_engine(self, csv_path: Path): - cfg = AnalysisConfig(advanced=True) - report = analyze(str(csv_path), config=cfg) - ie = report.get("insight_engine") - assert ie is not None - assert "insights" in ie diff --git a/tests/test_core.py b/tests/test_core.py deleted file mode 100644 index 7ebb44b..0000000 --- a/tests/test_core.py +++ /dev/null @@ -1,70 +0,0 @@ -"""Tests: import, version, Rust core, AnalysisConfig.""" - -from __future__ import annotations - -import json - - -def test_import(): - import f2a - - assert hasattr(f2a, "__version__") - assert hasattr(f2a, "analyze") - assert hasattr(f2a, "AnalysisConfig") - - -def test_version(): - import f2a - - # Version must be a valid semver-like string, not the dev fallback - assert f2a.__version__ != "0.0.0-dev" - parts = f2a.__version__.split(".") - assert len(parts) >= 2, f"Unexpected version format: {f2a.__version__}" - - -def test_rust_core_version(): - from f2a._core import version - import f2a - - # Rust core version (from Cargo.toml) must match Python package version (from pyproject.toml) - assert version() == f2a.__version__, ( - f"Version mismatch: Rust core={version()}, Python package={f2a.__version__}" - ) - - -def test_rust_core_configs(): - from f2a._core import default_config, fast_config, minimal_config - - cfg = json.loads(default_config()) - assert cfg["descriptive"] is True - assert cfg["correlation"] is True - - mcfg = json.loads(minimal_config()) - assert mcfg["correlation"] is False - - fcfg = json.loads(fast_config()) - assert fcfg["pca"] is False - - -def test_analysis_config_defaults(): - from f2a import AnalysisConfig - - cfg = AnalysisConfig() - assert cfg.descriptive is True - assert cfg.advanced is True - - -def test_analysis_config_presets(): - from f2a import AnalysisConfig - - assert AnalysisConfig.minimal().correlation is False - assert AnalysisConfig.fast().pca is False - assert AnalysisConfig.basic_only().advanced is False - - -def test_analysis_config_to_json(): - from f2a import AnalysisConfig - - parsed = json.loads(AnalysisConfig().to_json()) - assert isinstance(parsed, dict) - assert "descriptive" in parsed diff --git a/tests/test_report.py b/tests/test_report.py deleted file mode 100644 index f2f0c8f..0000000 --- a/tests/test_report.py +++ /dev/null @@ -1,51 +0,0 @@ -"""Tests: HTML report generation, i18n, console show().""" - -from __future__ import annotations - -from pathlib import Path - -from f2a import analyze - - -class TestHTMLReport: - """HTML report generation.""" - - def test_generates_html_file(self, csv_path: Path, data_dir: Path): - report = analyze(str(csv_path)) - out_dir = data_dir / "html_output" - path = report.to_html(output_dir=str(out_dir)) - assert path.exists(), f"HTML file not found: {path}" - content = path.read_text(encoding="utf-8") - assert len(content) > 1000 - assert "= 0xAC00 for c in content), "No Korean characters found" - - -class TestConsoleOutput: - """Console show() method.""" - - def test_show_does_not_raise(self, csv_path: Path): - report = analyze(str(csv_path)) - report.show() # must not raise - - -class TestErrorHandling: - """Error handling for invalid inputs.""" - - def test_nonexistent_file(self): - try: - analyze("nonexistent_file_12345.csv") - raise AssertionError("Should have raised an error") - except AssertionError: - raise - except Exception as e: - err = str(e).lower() - assert any(w in err for w in ("not found", "error", "nonexistent", "no such")) diff --git a/uv.lock b/uv.lock new file mode 100644 index 0000000..b67fb7c --- /dev/null +++ b/uv.lock @@ -0,0 +1,3939 @@ +version = 1 +revision = 3 +requires-python = ">=3.10" +resolution-markers = [ + "python_full_version >= '3.14' and sys_platform == 'win32'", + "python_full_version >= '3.14' and sys_platform == 'emscripten'", + "python_full_version >= '3.14' and sys_platform != 'emscripten' and sys_platform != 'win32'", + "python_full_version >= '3.11' and python_full_version < '3.14' and sys_platform == 'win32'", + "python_full_version >= '3.11' and python_full_version < '3.14' and sys_platform == 'emscripten'", + "python_full_version >= '3.11' and python_full_version < '3.14' and sys_platform != 'emscripten' and sys_platform != 'win32'", + "python_full_version < '3.11'", +] + +[[package]] +name = "aiohappyeyeballs" +version = "2.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/26/30/f84a107a9c4331c14b2b586036f40965c128aa4fee4dda5d3d51cb14ad54/aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558", size = 22760, upload-time = "2025-03-12T01:42:48.764Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8", size = 15265, upload-time = "2025-03-12T01:42:47.083Z" }, +] + +[[package]] +name = "aiohttp" +version = "3.13.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohappyeyeballs" }, + { name = "aiosignal" }, + { name = "async-timeout", marker = "python_full_version < '3.11'" }, + { name = "attrs" }, + { name = "frozenlist" }, + { name = "multidict" }, + { name = "propcache" }, + { name = "yarl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/50/42/32cf8e7704ceb4481406eb87161349abb46a57fee3f008ba9cb610968646/aiohttp-3.13.3.tar.gz", hash = "sha256:a949eee43d3782f2daae4f4a2819b2cb9b0c5d3b7f7a927067cc84dafdbb9f88", size = 7844556, upload-time = "2026-01-03T17:33:05.204Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/36/d6/5aec9313ee6ea9c7cde8b891b69f4ff4001416867104580670a31daeba5b/aiohttp-3.13.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d5a372fd5afd301b3a89582817fdcdb6c34124787c70dbcc616f259013e7eef7", size = 738950, upload-time = "2026-01-03T17:29:13.002Z" }, + { url = "https://files.pythonhosted.org/packages/68/03/8fa90a7e6d11ff20a18837a8e2b5dd23db01aabc475aa9271c8ad33299f5/aiohttp-3.13.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:147e422fd1223005c22b4fe080f5d93ced44460f5f9c105406b753612b587821", size = 496099, upload-time = "2026-01-03T17:29:15.268Z" }, + { url = "https://files.pythonhosted.org/packages/d2/23/b81f744d402510a8366b74eb420fc0cc1170d0c43daca12d10814df85f10/aiohttp-3.13.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:859bd3f2156e81dd01432f5849fc73e2243d4a487c4fd26609b1299534ee1845", size = 491072, upload-time = "2026-01-03T17:29:16.922Z" }, + { url = "https://files.pythonhosted.org/packages/d5/e1/56d1d1c0dd334cd203dd97706ce004c1aa24b34a813b0b8daf3383039706/aiohttp-3.13.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dca68018bf48c251ba17c72ed479f4dafe9dbd5a73707ad8d28a38d11f3d42af", size = 1671588, upload-time = "2026-01-03T17:29:18.539Z" }, + { url = "https://files.pythonhosted.org/packages/5f/34/8d7f962604f4bc2b4e39eb1220dac7d4e4cba91fb9ba0474b4ecd67db165/aiohttp-3.13.3-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:fee0c6bc7db1de362252affec009707a17478a00ec69f797d23ca256e36d5940", size = 1640334, upload-time = "2026-01-03T17:29:21.028Z" }, + { url = "https://files.pythonhosted.org/packages/94/1d/fcccf2c668d87337ddeef9881537baee13c58d8f01f12ba8a24215f2b804/aiohttp-3.13.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c048058117fd649334d81b4b526e94bde3ccaddb20463a815ced6ecbb7d11160", size = 1722656, upload-time = "2026-01-03T17:29:22.531Z" }, + { url = "https://files.pythonhosted.org/packages/aa/98/c6f3b081c4c606bc1e5f2ec102e87d6411c73a9ef3616fea6f2d5c98c062/aiohttp-3.13.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:215a685b6fbbfcf71dfe96e3eba7a6f58f10da1dfdf4889c7dd856abe430dca7", size = 1817625, upload-time = "2026-01-03T17:29:24.276Z" }, + { url = "https://files.pythonhosted.org/packages/2c/c0/cfcc3d2e11b477f86e1af2863f3858c8850d751ce8dc39c4058a072c9e54/aiohttp-3.13.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de2c184bb1fe2cbd2cefba613e9db29a5ab559323f994b6737e370d3da0ac455", size = 1672604, upload-time = "2026-01-03T17:29:26.099Z" }, + { url = "https://files.pythonhosted.org/packages/1e/77/6b4ffcbcac4c6a5d041343a756f34a6dd26174ae07f977a64fe028dda5b0/aiohttp-3.13.3-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:75ca857eba4e20ce9f546cd59c7007b33906a4cd48f2ff6ccf1ccfc3b646f279", size = 1554370, upload-time = "2026-01-03T17:29:28.121Z" }, + { url = "https://files.pythonhosted.org/packages/f2/f0/e3ddfa93f17d689dbe014ba048f18e0c9f9b456033b70e94349a2e9048be/aiohttp-3.13.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:81e97251d9298386c2b7dbeb490d3d1badbdc69107fb8c9299dd04eb39bddc0e", size = 1642023, upload-time = "2026-01-03T17:29:30.002Z" }, + { url = "https://files.pythonhosted.org/packages/eb/45/c14019c9ec60a8e243d06d601b33dcc4fd92379424bde3021725859d7f99/aiohttp-3.13.3-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:c0e2d366af265797506f0283487223146af57815b388623f0357ef7eac9b209d", size = 1649680, upload-time = "2026-01-03T17:29:31.782Z" }, + { url = "https://files.pythonhosted.org/packages/9c/fd/09c9451dae5aa5c5ed756df95ff9ef549d45d4be663bafd1e4954fd836f0/aiohttp-3.13.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4e239d501f73d6db1522599e14b9b321a7e3b1de66ce33d53a765d975e9f4808", size = 1692407, upload-time = "2026-01-03T17:29:33.392Z" }, + { url = "https://files.pythonhosted.org/packages/a6/81/938bc2ec33c10efd6637ccb3d22f9f3160d08e8f3aa2587a2c2d5ab578eb/aiohttp-3.13.3-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:0db318f7a6f065d84cb1e02662c526294450b314a02bd9e2a8e67f0d8564ce40", size = 1543047, upload-time = "2026-01-03T17:29:34.855Z" }, + { url = "https://files.pythonhosted.org/packages/f7/23/80488ee21c8d567c83045e412e1d9b7077d27171591a4eb7822586e8c06a/aiohttp-3.13.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:bfc1cc2fe31a6026a8a88e4ecfb98d7f6b1fec150cfd708adbfd1d2f42257c29", size = 1715264, upload-time = "2026-01-03T17:29:36.389Z" }, + { url = "https://files.pythonhosted.org/packages/e2/83/259a8da6683182768200b368120ab3deff5370bed93880fb9a3a86299f34/aiohttp-3.13.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:af71fff7bac6bb7508956696dce8f6eec2bbb045eceb40343944b1ae62b5ef11", size = 1657275, upload-time = "2026-01-03T17:29:38.162Z" }, + { url = "https://files.pythonhosted.org/packages/3f/4f/2c41f800a0b560785c10fb316216ac058c105f9be50bdc6a285de88db625/aiohttp-3.13.3-cp310-cp310-win32.whl", hash = "sha256:37da61e244d1749798c151421602884db5270faf479cf0ef03af0ff68954c9dd", size = 434053, upload-time = "2026-01-03T17:29:40.074Z" }, + { url = "https://files.pythonhosted.org/packages/80/df/29cd63c7ecfdb65ccc12f7d808cac4fa2a19544660c06c61a4a48462de0c/aiohttp-3.13.3-cp310-cp310-win_amd64.whl", hash = "sha256:7e63f210bc1b57ef699035f2b4b6d9ce096b5914414a49b0997c839b2bd2223c", size = 456687, upload-time = "2026-01-03T17:29:41.819Z" }, + { url = "https://files.pythonhosted.org/packages/f1/4c/a164164834f03924d9a29dc3acd9e7ee58f95857e0b467f6d04298594ebb/aiohttp-3.13.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5b6073099fb654e0a068ae678b10feff95c5cae95bbfcbfa7af669d361a8aa6b", size = 746051, upload-time = "2026-01-03T17:29:43.287Z" }, + { url = "https://files.pythonhosted.org/packages/82/71/d5c31390d18d4f58115037c432b7e0348c60f6f53b727cad33172144a112/aiohttp-3.13.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cb93e166e6c28716c8c6aeb5f99dfb6d5ccf482d29fe9bf9a794110e6d0ab64", size = 499234, upload-time = "2026-01-03T17:29:44.822Z" }, + { url = "https://files.pythonhosted.org/packages/0e/c9/741f8ac91e14b1d2e7100690425a5b2b919a87a5075406582991fb7de920/aiohttp-3.13.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:28e027cf2f6b641693a09f631759b4d9ce9165099d2b5d92af9bd4e197690eea", size = 494979, upload-time = "2026-01-03T17:29:46.405Z" }, + { url = "https://files.pythonhosted.org/packages/75/b5/31d4d2e802dfd59f74ed47eba48869c1c21552c586d5e81a9d0d5c2ad640/aiohttp-3.13.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3b61b7169ababd7802f9568ed96142616a9118dd2be0d1866e920e77ec8fa92a", size = 1748297, upload-time = "2026-01-03T17:29:48.083Z" }, + { url = "https://files.pythonhosted.org/packages/1a/3e/eefad0ad42959f226bb79664826883f2687d602a9ae2941a18e0484a74d3/aiohttp-3.13.3-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:80dd4c21b0f6237676449c6baaa1039abae86b91636b6c91a7f8e61c87f89540", size = 1707172, upload-time = "2026-01-03T17:29:49.648Z" }, + { url = "https://files.pythonhosted.org/packages/c5/3a/54a64299fac2891c346cdcf2aa6803f994a2e4beeaf2e5a09dcc54acc842/aiohttp-3.13.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:65d2ccb7eabee90ce0503c17716fc77226be026dcc3e65cce859a30db715025b", size = 1805405, upload-time = "2026-01-03T17:29:51.244Z" }, + { url = "https://files.pythonhosted.org/packages/6c/70/ddc1b7169cf64075e864f64595a14b147a895a868394a48f6a8031979038/aiohttp-3.13.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5b179331a481cb5529fca8b432d8d3c7001cb217513c94cd72d668d1248688a3", size = 1899449, upload-time = "2026-01-03T17:29:53.938Z" }, + { url = "https://files.pythonhosted.org/packages/a1/7e/6815aab7d3a56610891c76ef79095677b8b5be6646aaf00f69b221765021/aiohttp-3.13.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d4c940f02f49483b18b079d1c27ab948721852b281f8b015c058100e9421dd1", size = 1748444, upload-time = "2026-01-03T17:29:55.484Z" }, + { url = "https://files.pythonhosted.org/packages/6b/f2/073b145c4100da5511f457dc0f7558e99b2987cf72600d42b559db856fbc/aiohttp-3.13.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f9444f105664c4ce47a2a7171a2418bce5b7bae45fb610f4e2c36045d85911d3", size = 1606038, upload-time = "2026-01-03T17:29:57.179Z" }, + { url = "https://files.pythonhosted.org/packages/0a/c1/778d011920cae03ae01424ec202c513dc69243cf2db303965615b81deeea/aiohttp-3.13.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:694976222c711d1d00ba131904beb60534f93966562f64440d0c9d41b8cdb440", size = 1724156, upload-time = "2026-01-03T17:29:58.914Z" }, + { url = "https://files.pythonhosted.org/packages/0e/cb/3419eabf4ec1e9ec6f242c32b689248365a1cf621891f6f0386632525494/aiohttp-3.13.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:f33ed1a2bf1997a36661874b017f5c4b760f41266341af36febaf271d179f6d7", size = 1722340, upload-time = "2026-01-03T17:30:01.962Z" }, + { url = "https://files.pythonhosted.org/packages/7a/e5/76cf77bdbc435bf233c1f114edad39ed4177ccbfab7c329482b179cff4f4/aiohttp-3.13.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e636b3c5f61da31a92bf0d91da83e58fdfa96f178ba682f11d24f31944cdd28c", size = 1783041, upload-time = "2026-01-03T17:30:03.609Z" }, + { url = "https://files.pythonhosted.org/packages/9d/d4/dd1ca234c794fd29c057ce8c0566b8ef7fd6a51069de5f06fa84b9a1971c/aiohttp-3.13.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:5d2d94f1f5fcbe40838ac51a6ab5704a6f9ea42e72ceda48de5e6b898521da51", size = 1596024, upload-time = "2026-01-03T17:30:05.132Z" }, + { url = "https://files.pythonhosted.org/packages/55/58/4345b5f26661a6180afa686c473620c30a66afdf120ed3dd545bbc809e85/aiohttp-3.13.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2be0e9ccf23e8a94f6f0650ce06042cefc6ac703d0d7ab6c7a917289f2539ad4", size = 1804590, upload-time = "2026-01-03T17:30:07.135Z" }, + { url = "https://files.pythonhosted.org/packages/7b/06/05950619af6c2df7e0a431d889ba2813c9f0129cec76f663e547a5ad56f2/aiohttp-3.13.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9af5e68ee47d6534d36791bbe9b646d2a7c7deb6fc24d7943628edfbb3581f29", size = 1740355, upload-time = "2026-01-03T17:30:09.083Z" }, + { url = "https://files.pythonhosted.org/packages/3e/80/958f16de79ba0422d7c1e284b2abd0c84bc03394fbe631d0a39ffa10e1eb/aiohttp-3.13.3-cp311-cp311-win32.whl", hash = "sha256:a2212ad43c0833a873d0fb3c63fa1bacedd4cf6af2fee62bf4b739ceec3ab239", size = 433701, upload-time = "2026-01-03T17:30:10.869Z" }, + { url = "https://files.pythonhosted.org/packages/dc/f2/27cdf04c9851712d6c1b99df6821a6623c3c9e55956d4b1e318c337b5a48/aiohttp-3.13.3-cp311-cp311-win_amd64.whl", hash = "sha256:642f752c3eb117b105acbd87e2c143de710987e09860d674e068c4c2c441034f", size = 457678, upload-time = "2026-01-03T17:30:12.719Z" }, + { url = "https://files.pythonhosted.org/packages/a0/be/4fc11f202955a69e0db803a12a062b8379c970c7c84f4882b6da17337cc1/aiohttp-3.13.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:b903a4dfee7d347e2d87697d0713be59e0b87925be030c9178c5faa58ea58d5c", size = 739732, upload-time = "2026-01-03T17:30:14.23Z" }, + { url = "https://files.pythonhosted.org/packages/97/2c/621d5b851f94fa0bb7430d6089b3aa970a9d9b75196bc93bb624b0db237a/aiohttp-3.13.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a45530014d7a1e09f4a55f4f43097ba0fd155089372e105e4bff4ca76cb1b168", size = 494293, upload-time = "2026-01-03T17:30:15.96Z" }, + { url = "https://files.pythonhosted.org/packages/5d/43/4be01406b78e1be8320bb8316dc9c42dbab553d281c40364e0f862d5661c/aiohttp-3.13.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:27234ef6d85c914f9efeb77ff616dbf4ad2380be0cda40b4db086ffc7ddd1b7d", size = 493533, upload-time = "2026-01-03T17:30:17.431Z" }, + { url = "https://files.pythonhosted.org/packages/8d/a8/5a35dc56a06a2c90d4742cbf35294396907027f80eea696637945a106f25/aiohttp-3.13.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d32764c6c9aafb7fb55366a224756387cd50bfa720f32b88e0e6fa45b27dcf29", size = 1737839, upload-time = "2026-01-03T17:30:19.422Z" }, + { url = "https://files.pythonhosted.org/packages/bf/62/4b9eeb331da56530bf2e198a297e5303e1c1ebdceeb00fe9b568a65c5a0c/aiohttp-3.13.3-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b1a6102b4d3ebc07dad44fbf07b45bb600300f15b552ddf1851b5390202ea2e3", size = 1703932, upload-time = "2026-01-03T17:30:21.756Z" }, + { url = "https://files.pythonhosted.org/packages/7c/f6/af16887b5d419e6a367095994c0b1332d154f647e7dc2bd50e61876e8e3d/aiohttp-3.13.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c014c7ea7fb775dd015b2d3137378b7be0249a448a1612268b5a90c2d81de04d", size = 1771906, upload-time = "2026-01-03T17:30:23.932Z" }, + { url = "https://files.pythonhosted.org/packages/ce/83/397c634b1bcc24292fa1e0c7822800f9f6569e32934bdeef09dae7992dfb/aiohttp-3.13.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2b8d8ddba8f95ba17582226f80e2de99c7a7948e66490ef8d947e272a93e9463", size = 1871020, upload-time = "2026-01-03T17:30:26Z" }, + { url = "https://files.pythonhosted.org/packages/86/f6/a62cbbf13f0ac80a70f71b1672feba90fdb21fd7abd8dbf25c0105fb6fa3/aiohttp-3.13.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9ae8dd55c8e6c4257eae3a20fd2c8f41edaea5992ed67156642493b8daf3cecc", size = 1755181, upload-time = "2026-01-03T17:30:27.554Z" }, + { url = "https://files.pythonhosted.org/packages/0a/87/20a35ad487efdd3fba93d5843efdfaa62d2f1479eaafa7453398a44faf13/aiohttp-3.13.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:01ad2529d4b5035578f5081606a465f3b814c542882804e2e8cda61adf5c71bf", size = 1561794, upload-time = "2026-01-03T17:30:29.254Z" }, + { url = "https://files.pythonhosted.org/packages/de/95/8fd69a66682012f6716e1bc09ef8a1a2a91922c5725cb904689f112309c4/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bb4f7475e359992b580559e008c598091c45b5088f28614e855e42d39c2f1033", size = 1697900, upload-time = "2026-01-03T17:30:31.033Z" }, + { url = "https://files.pythonhosted.org/packages/e5/66/7b94b3b5ba70e955ff597672dad1691333080e37f50280178967aff68657/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:c19b90316ad3b24c69cd78d5c9b4f3aa4497643685901185b65166293d36a00f", size = 1728239, upload-time = "2026-01-03T17:30:32.703Z" }, + { url = "https://files.pythonhosted.org/packages/47/71/6f72f77f9f7d74719692ab65a2a0252584bf8d5f301e2ecb4c0da734530a/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:96d604498a7c782cb15a51c406acaea70d8c027ee6b90c569baa6e7b93073679", size = 1740527, upload-time = "2026-01-03T17:30:34.695Z" }, + { url = "https://files.pythonhosted.org/packages/fa/b4/75ec16cbbd5c01bdaf4a05b19e103e78d7ce1ef7c80867eb0ace42ff4488/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:084911a532763e9d3dd95adf78a78f4096cd5f58cdc18e6fdbc1b58417a45423", size = 1554489, upload-time = "2026-01-03T17:30:36.864Z" }, + { url = "https://files.pythonhosted.org/packages/52/8f/bc518c0eea29f8406dcf7ed1f96c9b48e3bc3995a96159b3fc11f9e08321/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7a4a94eb787e606d0a09404b9c38c113d3b099d508021faa615d70a0131907ce", size = 1767852, upload-time = "2026-01-03T17:30:39.433Z" }, + { url = "https://files.pythonhosted.org/packages/9d/f2/a07a75173124f31f11ea6f863dc44e6f09afe2bca45dd4e64979490deab1/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:87797e645d9d8e222e04160ee32aa06bc5c163e8499f24db719e7852ec23093a", size = 1722379, upload-time = "2026-01-03T17:30:41.081Z" }, + { url = "https://files.pythonhosted.org/packages/3c/4a/1a3fee7c21350cac78e5c5cef711bac1b94feca07399f3d406972e2d8fcd/aiohttp-3.13.3-cp312-cp312-win32.whl", hash = "sha256:b04be762396457bef43f3597c991e192ee7da460a4953d7e647ee4b1c28e7046", size = 428253, upload-time = "2026-01-03T17:30:42.644Z" }, + { url = "https://files.pythonhosted.org/packages/d9/b7/76175c7cb4eb73d91ad63c34e29fc4f77c9386bba4a65b53ba8e05ee3c39/aiohttp-3.13.3-cp312-cp312-win_amd64.whl", hash = "sha256:e3531d63d3bdfa7e3ac5e9b27b2dd7ec9df3206a98e0b3445fa906f233264c57", size = 455407, upload-time = "2026-01-03T17:30:44.195Z" }, + { url = "https://files.pythonhosted.org/packages/97/8a/12ca489246ca1faaf5432844adbfce7ff2cc4997733e0af120869345643a/aiohttp-3.13.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:5dff64413671b0d3e7d5918ea490bdccb97a4ad29b3f311ed423200b2203e01c", size = 734190, upload-time = "2026-01-03T17:30:45.832Z" }, + { url = "https://files.pythonhosted.org/packages/32/08/de43984c74ed1fca5c014808963cc83cb00d7bb06af228f132d33862ca76/aiohttp-3.13.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:87b9aab6d6ed88235aa2970294f496ff1a1f9adcd724d800e9b952395a80ffd9", size = 491783, upload-time = "2026-01-03T17:30:47.466Z" }, + { url = "https://files.pythonhosted.org/packages/17/f8/8dd2cf6112a5a76f81f81a5130c57ca829d101ad583ce57f889179accdda/aiohttp-3.13.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:425c126c0dc43861e22cb1c14ba4c8e45d09516d0a3ae0a3f7494b79f5f233a3", size = 490704, upload-time = "2026-01-03T17:30:49.373Z" }, + { url = "https://files.pythonhosted.org/packages/6d/40/a46b03ca03936f832bc7eaa47cfbb1ad012ba1be4790122ee4f4f8cba074/aiohttp-3.13.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7f9120f7093c2a32d9647abcaf21e6ad275b4fbec5b55969f978b1a97c7c86bf", size = 1720652, upload-time = "2026-01-03T17:30:50.974Z" }, + { url = "https://files.pythonhosted.org/packages/f7/7e/917fe18e3607af92657e4285498f500dca797ff8c918bd7d90b05abf6c2a/aiohttp-3.13.3-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:697753042d57f4bf7122cab985bf15d0cef23c770864580f5af4f52023a56bd6", size = 1692014, upload-time = "2026-01-03T17:30:52.729Z" }, + { url = "https://files.pythonhosted.org/packages/71/b6/cefa4cbc00d315d68973b671cf105b21a609c12b82d52e5d0c9ae61d2a09/aiohttp-3.13.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6de499a1a44e7de70735d0b39f67c8f25eb3d91eb3103be99ca0fa882cdd987d", size = 1759777, upload-time = "2026-01-03T17:30:54.537Z" }, + { url = "https://files.pythonhosted.org/packages/fb/e3/e06ee07b45e59e6d81498b591fc589629be1553abb2a82ce33efe2a7b068/aiohttp-3.13.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:37239e9f9a7ea9ac5bf6b92b0260b01f8a22281996da609206a84df860bc1261", size = 1861276, upload-time = "2026-01-03T17:30:56.512Z" }, + { url = "https://files.pythonhosted.org/packages/7c/24/75d274228acf35ceeb2850b8ce04de9dd7355ff7a0b49d607ee60c29c518/aiohttp-3.13.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f76c1e3fe7d7c8afad7ed193f89a292e1999608170dcc9751a7462a87dfd5bc0", size = 1743131, upload-time = "2026-01-03T17:30:58.256Z" }, + { url = "https://files.pythonhosted.org/packages/04/98/3d21dde21889b17ca2eea54fdcff21b27b93f45b7bb94ca029c31ab59dc3/aiohttp-3.13.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fc290605db2a917f6e81b0e1e0796469871f5af381ce15c604a3c5c7e51cb730", size = 1556863, upload-time = "2026-01-03T17:31:00.445Z" }, + { url = "https://files.pythonhosted.org/packages/9e/84/da0c3ab1192eaf64782b03971ab4055b475d0db07b17eff925e8c93b3aa5/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4021b51936308aeea0367b8f006dc999ca02bc118a0cc78c303f50a2ff6afb91", size = 1682793, upload-time = "2026-01-03T17:31:03.024Z" }, + { url = "https://files.pythonhosted.org/packages/ff/0f/5802ada182f575afa02cbd0ec5180d7e13a402afb7c2c03a9aa5e5d49060/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:49a03727c1bba9a97d3e93c9f93ca03a57300f484b6e935463099841261195d3", size = 1716676, upload-time = "2026-01-03T17:31:04.842Z" }, + { url = "https://files.pythonhosted.org/packages/3f/8c/714d53bd8b5a4560667f7bbbb06b20c2382f9c7847d198370ec6526af39c/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3d9908a48eb7416dc1f4524e69f1d32e5d90e3981e4e37eb0aa1cd18f9cfa2a4", size = 1733217, upload-time = "2026-01-03T17:31:06.868Z" }, + { url = "https://files.pythonhosted.org/packages/7d/79/e2176f46d2e963facea939f5be2d26368ce543622be6f00a12844d3c991f/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:2712039939ec963c237286113c68dbad80a82a4281543f3abf766d9d73228998", size = 1552303, upload-time = "2026-01-03T17:31:08.958Z" }, + { url = "https://files.pythonhosted.org/packages/ab/6a/28ed4dea1759916090587d1fe57087b03e6c784a642b85ef48217b0277ae/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:7bfdc049127717581866fa4708791220970ce291c23e28ccf3922c700740fdc0", size = 1763673, upload-time = "2026-01-03T17:31:10.676Z" }, + { url = "https://files.pythonhosted.org/packages/e8/35/4a3daeb8b9fab49240d21c04d50732313295e4bd813a465d840236dd0ce1/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8057c98e0c8472d8846b9c79f56766bcc57e3e8ac7bfd510482332366c56c591", size = 1721120, upload-time = "2026-01-03T17:31:12.575Z" }, + { url = "https://files.pythonhosted.org/packages/bc/9f/d643bb3c5fb99547323e635e251c609fbbc660d983144cfebec529e09264/aiohttp-3.13.3-cp313-cp313-win32.whl", hash = "sha256:1449ceddcdbcf2e0446957863af03ebaaa03f94c090f945411b61269e2cb5daf", size = 427383, upload-time = "2026-01-03T17:31:14.382Z" }, + { url = "https://files.pythonhosted.org/packages/4e/f1/ab0395f8a79933577cdd996dd2f9aa6014af9535f65dddcf88204682fe62/aiohttp-3.13.3-cp313-cp313-win_amd64.whl", hash = "sha256:693781c45a4033d31d4187d2436f5ac701e7bbfe5df40d917736108c1cc7436e", size = 453899, upload-time = "2026-01-03T17:31:15.958Z" }, + { url = "https://files.pythonhosted.org/packages/99/36/5b6514a9f5d66f4e2597e40dea2e3db271e023eb7a5d22defe96ba560996/aiohttp-3.13.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:ea37047c6b367fd4bd632bff8077449b8fa034b69e812a18e0132a00fae6e808", size = 737238, upload-time = "2026-01-03T17:31:17.909Z" }, + { url = "https://files.pythonhosted.org/packages/f7/49/459327f0d5bcd8c6c9ca69e60fdeebc3622861e696490d8674a6d0cb90a6/aiohttp-3.13.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:6fc0e2337d1a4c3e6acafda6a78a39d4c14caea625124817420abceed36e2415", size = 492292, upload-time = "2026-01-03T17:31:19.919Z" }, + { url = "https://files.pythonhosted.org/packages/e8/0b/b97660c5fd05d3495b4eb27f2d0ef18dc1dc4eff7511a9bf371397ff0264/aiohttp-3.13.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c685f2d80bb67ca8c3837823ad76196b3694b0159d232206d1e461d3d434666f", size = 493021, upload-time = "2026-01-03T17:31:21.636Z" }, + { url = "https://files.pythonhosted.org/packages/54/d4/438efabdf74e30aeceb890c3290bbaa449780583b1270b00661126b8aae4/aiohttp-3.13.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:48e377758516d262bde50c2584fc6c578af272559c409eecbdd2bae1601184d6", size = 1717263, upload-time = "2026-01-03T17:31:23.296Z" }, + { url = "https://files.pythonhosted.org/packages/71/f2/7bddc7fd612367d1459c5bcf598a9e8f7092d6580d98de0e057eb42697ad/aiohttp-3.13.3-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:34749271508078b261c4abb1767d42b8d0c0cc9449c73a4df494777dc55f0687", size = 1669107, upload-time = "2026-01-03T17:31:25.334Z" }, + { url = "https://files.pythonhosted.org/packages/00/5a/1aeaecca40e22560f97610a329e0e5efef5e0b5afdf9f857f0d93839ab2e/aiohttp-3.13.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:82611aeec80eb144416956ec85b6ca45a64d76429c1ed46ae1b5f86c6e0c9a26", size = 1760196, upload-time = "2026-01-03T17:31:27.394Z" }, + { url = "https://files.pythonhosted.org/packages/f8/f8/0ff6992bea7bd560fc510ea1c815f87eedd745fe035589c71ce05612a19a/aiohttp-3.13.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2fff83cfc93f18f215896e3a190e8e5cb413ce01553901aca925176e7568963a", size = 1843591, upload-time = "2026-01-03T17:31:29.238Z" }, + { url = "https://files.pythonhosted.org/packages/e3/d1/e30e537a15f53485b61f5be525f2157da719819e8377298502aebac45536/aiohttp-3.13.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bbe7d4cecacb439e2e2a8a1a7b935c25b812af7a5fd26503a66dadf428e79ec1", size = 1720277, upload-time = "2026-01-03T17:31:31.053Z" }, + { url = "https://files.pythonhosted.org/packages/84/45/23f4c451d8192f553d38d838831ebbc156907ea6e05557f39563101b7717/aiohttp-3.13.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b928f30fe49574253644b1ca44b1b8adbd903aa0da4b9054a6c20fc7f4092a25", size = 1548575, upload-time = "2026-01-03T17:31:32.87Z" }, + { url = "https://files.pythonhosted.org/packages/6a/ed/0a42b127a43712eda7807e7892c083eadfaf8429ca8fb619662a530a3aab/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7b5e8fe4de30df199155baaf64f2fcd604f4c678ed20910db8e2c66dc4b11603", size = 1679455, upload-time = "2026-01-03T17:31:34.76Z" }, + { url = "https://files.pythonhosted.org/packages/2e/b5/c05f0c2b4b4fe2c9d55e73b6d3ed4fd6c9dc2684b1d81cbdf77e7fad9adb/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:8542f41a62bcc58fc7f11cf7c90e0ec324ce44950003feb70640fc2a9092c32a", size = 1687417, upload-time = "2026-01-03T17:31:36.699Z" }, + { url = "https://files.pythonhosted.org/packages/c9/6b/915bc5dad66aef602b9e459b5a973529304d4e89ca86999d9d75d80cbd0b/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:5e1d8c8b8f1d91cd08d8f4a3c2b067bfca6ec043d3ff36de0f3a715feeedf926", size = 1729968, upload-time = "2026-01-03T17:31:38.622Z" }, + { url = "https://files.pythonhosted.org/packages/11/3b/e84581290a9520024a08640b63d07673057aec5ca548177a82026187ba73/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:90455115e5da1c3c51ab619ac57f877da8fd6d73c05aacd125c5ae9819582aba", size = 1545690, upload-time = "2026-01-03T17:31:40.57Z" }, + { url = "https://files.pythonhosted.org/packages/f5/04/0c3655a566c43fd647c81b895dfe361b9f9ad6d58c19309d45cff52d6c3b/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:042e9e0bcb5fba81886c8b4fbb9a09d6b8a00245fd8d88e4d989c1f96c74164c", size = 1746390, upload-time = "2026-01-03T17:31:42.857Z" }, + { url = "https://files.pythonhosted.org/packages/1f/53/71165b26978f719c3419381514c9690bd5980e764a09440a10bb816ea4ab/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2eb752b102b12a76ca02dff751a801f028b4ffbbc478840b473597fc91a9ed43", size = 1702188, upload-time = "2026-01-03T17:31:44.984Z" }, + { url = "https://files.pythonhosted.org/packages/29/a7/cbe6c9e8e136314fa1980da388a59d2f35f35395948a08b6747baebb6aa6/aiohttp-3.13.3-cp314-cp314-win32.whl", hash = "sha256:b556c85915d8efaed322bf1bdae9486aa0f3f764195a0fb6ee962e5c71ef5ce1", size = 433126, upload-time = "2026-01-03T17:31:47.463Z" }, + { url = "https://files.pythonhosted.org/packages/de/56/982704adea7d3b16614fc5936014e9af85c0e34b58f9046655817f04306e/aiohttp-3.13.3-cp314-cp314-win_amd64.whl", hash = "sha256:9bf9f7a65e7aa20dd764151fb3d616c81088f91f8df39c3893a536e279b4b984", size = 459128, upload-time = "2026-01-03T17:31:49.2Z" }, + { url = "https://files.pythonhosted.org/packages/6c/2a/3c79b638a9c3d4658d345339d22070241ea341ed4e07b5ac60fb0f418003/aiohttp-3.13.3-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:05861afbbec40650d8a07ea324367cb93e9e8cc7762e04dd4405df99fa65159c", size = 769512, upload-time = "2026-01-03T17:31:51.134Z" }, + { url = "https://files.pythonhosted.org/packages/29/b9/3e5014d46c0ab0db8707e0ac2711ed28c4da0218c358a4e7c17bae0d8722/aiohttp-3.13.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2fc82186fadc4a8316768d61f3722c230e2c1dcab4200d52d2ebdf2482e47592", size = 506444, upload-time = "2026-01-03T17:31:52.85Z" }, + { url = "https://files.pythonhosted.org/packages/90/03/c1d4ef9a054e151cd7839cdc497f2638f00b93cbe8043983986630d7a80c/aiohttp-3.13.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:0add0900ff220d1d5c5ebbf99ed88b0c1bbf87aa7e4262300ed1376a6b13414f", size = 510798, upload-time = "2026-01-03T17:31:54.91Z" }, + { url = "https://files.pythonhosted.org/packages/ea/76/8c1e5abbfe8e127c893fe7ead569148a4d5a799f7cf958d8c09f3eedf097/aiohttp-3.13.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:568f416a4072fbfae453dcf9a99194bbb8bdeab718e08ee13dfa2ba0e4bebf29", size = 1868835, upload-time = "2026-01-03T17:31:56.733Z" }, + { url = "https://files.pythonhosted.org/packages/8e/ac/984c5a6f74c363b01ff97adc96a3976d9c98940b8969a1881575b279ac5d/aiohttp-3.13.3-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:add1da70de90a2569c5e15249ff76a631ccacfe198375eead4aadf3b8dc849dc", size = 1720486, upload-time = "2026-01-03T17:31:58.65Z" }, + { url = "https://files.pythonhosted.org/packages/b2/9a/b7039c5f099c4eb632138728828b33428585031a1e658d693d41d07d89d1/aiohttp-3.13.3-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:10b47b7ba335d2e9b1239fa571131a87e2d8ec96b333e68b2a305e7a98b0bae2", size = 1847951, upload-time = "2026-01-03T17:32:00.989Z" }, + { url = "https://files.pythonhosted.org/packages/3c/02/3bec2b9a1ba3c19ff89a43a19324202b8eb187ca1e928d8bdac9bbdddebd/aiohttp-3.13.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3dd4dce1c718e38081c8f35f323209d4c1df7d4db4bab1b5c88a6b4d12b74587", size = 1941001, upload-time = "2026-01-03T17:32:03.122Z" }, + { url = "https://files.pythonhosted.org/packages/37/df/d879401cedeef27ac4717f6426c8c36c3091c6e9f08a9178cc87549c537f/aiohttp-3.13.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:34bac00a67a812570d4a460447e1e9e06fae622946955f939051e7cc895cfab8", size = 1797246, upload-time = "2026-01-03T17:32:05.255Z" }, + { url = "https://files.pythonhosted.org/packages/8d/15/be122de1f67e6953add23335c8ece6d314ab67c8bebb3f181063010795a7/aiohttp-3.13.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a19884d2ee70b06d9204b2727a7b9f983d0c684c650254679e716b0b77920632", size = 1627131, upload-time = "2026-01-03T17:32:07.607Z" }, + { url = "https://files.pythonhosted.org/packages/12/12/70eedcac9134cfa3219ab7af31ea56bc877395b1ac30d65b1bc4b27d0438/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5f8ca7f2bb6ba8348a3614c7918cc4bb73268c5ac2a207576b7afea19d3d9f64", size = 1795196, upload-time = "2026-01-03T17:32:09.59Z" }, + { url = "https://files.pythonhosted.org/packages/32/11/b30e1b1cd1f3054af86ebe60df96989c6a414dd87e27ad16950eee420bea/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:b0d95340658b9d2f11d9697f59b3814a9d3bb4b7a7c20b131df4bcef464037c0", size = 1782841, upload-time = "2026-01-03T17:32:11.445Z" }, + { url = "https://files.pythonhosted.org/packages/88/0d/d98a9367b38912384a17e287850f5695c528cff0f14f791ce8ee2e4f7796/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:a1e53262fd202e4b40b70c3aff944a8155059beedc8a89bba9dc1f9ef06a1b56", size = 1795193, upload-time = "2026-01-03T17:32:13.705Z" }, + { url = "https://files.pythonhosted.org/packages/43/a5/a2dfd1f5ff5581632c7f6a30e1744deda03808974f94f6534241ef60c751/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:d60ac9663f44168038586cab2157e122e46bdef09e9368b37f2d82d354c23f72", size = 1621979, upload-time = "2026-01-03T17:32:15.965Z" }, + { url = "https://files.pythonhosted.org/packages/fa/f0/12973c382ae7c1cccbc4417e129c5bf54c374dfb85af70893646e1f0e749/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:90751b8eed69435bac9ff4e3d2f6b3af1f57e37ecb0fbeee59c0174c9e2d41df", size = 1822193, upload-time = "2026-01-03T17:32:18.219Z" }, + { url = "https://files.pythonhosted.org/packages/3c/5f/24155e30ba7f8c96918af1350eb0663e2430aad9e001c0489d89cd708ab1/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:fc353029f176fd2b3ec6cfc71be166aba1936fe5d73dd1992ce289ca6647a9aa", size = 1769801, upload-time = "2026-01-03T17:32:20.25Z" }, + { url = "https://files.pythonhosted.org/packages/eb/f8/7314031ff5c10e6ece114da79b338ec17eeff3a079e53151f7e9f43c4723/aiohttp-3.13.3-cp314-cp314t-win32.whl", hash = "sha256:2e41b18a58da1e474a057b3d35248d8320029f61d70a37629535b16a0c8f3767", size = 466523, upload-time = "2026-01-03T17:32:22.215Z" }, + { url = "https://files.pythonhosted.org/packages/b4/63/278a98c715ae467624eafe375542d8ba9b4383a016df8fdefe0ae28382a7/aiohttp-3.13.3-cp314-cp314t-win_amd64.whl", hash = "sha256:44531a36aa2264a1860089ffd4dce7baf875ee5a6079d5fb42e261c704ef7344", size = 499694, upload-time = "2026-01-03T17:32:24.546Z" }, +] + +[[package]] +name = "aiosignal" +version = "1.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "frozenlist" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/61/62/06741b579156360248d1ec624842ad0edf697050bbaf7c3e46394e106ad1/aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7", size = 25007, upload-time = "2025-07-03T22:54:43.528Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490, upload-time = "2025-07-03T22:54:42.156Z" }, +] + +[[package]] +name = "annotated-doc" +version = "0.0.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/57/ba/046ceea27344560984e26a590f90bc7f4a75b06701f653222458922b558c/annotated_doc-0.0.4.tar.gz", hash = "sha256:fbcda96e87e9c92ad167c2e53839e57503ecfda18804ea28102353485033faa4", size = 7288, upload-time = "2025-11-10T22:07:42.062Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/d3/26bf1008eb3d2daa8ef4cacc7f3bfdc11818d111f7e2d0201bc6e3b49d45/annotated_doc-0.0.4-py3-none-any.whl", hash = "sha256:571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320", size = 5303, upload-time = "2025-11-10T22:07:40.673Z" }, +] + +[[package]] +name = "anyio" +version = "4.12.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "idna" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/96/f0/5eb65b2bb0d09ac6776f2eb54adee6abe8228ea05b20a5ad0e4945de8aac/anyio-4.12.1.tar.gz", hash = "sha256:41cfcc3a4c85d3f05c932da7c26d0201ac36f72abd4435ba90d0464a3ffed703", size = 228685, upload-time = "2026-01-06T11:45:21.246Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl", hash = "sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c", size = 113592, upload-time = "2026-01-06T11:45:19.497Z" }, +] + +[[package]] +name = "async-timeout" +version = "5.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a5/ae/136395dfbfe00dfc94da3f3e136d0b13f394cba8f4841120e34226265780/async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3", size = 9274, upload-time = "2024-11-06T16:41:39.6Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fe/ba/e2081de779ca30d473f21f5b30e0e737c438205440784c7dfc81efc2b029/async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c", size = 6233, upload-time = "2024-11-06T16:41:37.9Z" }, +] + +[[package]] +name = "attrs" +version = "25.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6b/5c/685e6633917e101e5dcb62b9dd76946cbb57c26e133bae9e0cd36033c0a9/attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11", size = 934251, upload-time = "2025-10-06T13:54:44.725Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373", size = 67615, upload-time = "2025-10-06T13:54:43.17Z" }, +] + +[[package]] +name = "black" +version = "26.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "mypy-extensions" }, + { name = "packaging" }, + { name = "pathspec" }, + { name = "platformdirs" }, + { name = "pytokens" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e1/c5/61175d618685d42b005847464b8fb4743a67b1b8fdb75e50e5a96c31a27a/black-26.3.1.tar.gz", hash = "sha256:2c50f5063a9641c7eed7795014ba37b0f5fa227f3d408b968936e24bc0566b07", size = 666155, upload-time = "2026-03-12T03:36:03.593Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/32/a8/11170031095655d36ebc6664fe0897866f6023892396900eec0e8fdc4299/black-26.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:86a8b5035fce64f5dcd1b794cf8ec4d31fe458cf6ce3986a30deb434df82a1d2", size = 1866562, upload-time = "2026-03-12T03:39:58.639Z" }, + { url = "https://files.pythonhosted.org/packages/69/ce/9e7548d719c3248c6c2abfd555d11169457cbd584d98d179111338423790/black-26.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5602bdb96d52d2d0672f24f6ffe5218795736dd34807fd0fd55ccd6bf206168b", size = 1703623, upload-time = "2026-03-12T03:40:00.347Z" }, + { url = "https://files.pythonhosted.org/packages/7f/0a/8d17d1a9c06f88d3d030d0b1d4373c1551146e252afe4547ed601c0e697f/black-26.3.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6c54a4a82e291a1fee5137371ab488866b7c86a3305af4026bdd4dc78642e1ac", size = 1768388, upload-time = "2026-03-12T03:40:01.765Z" }, + { url = "https://files.pythonhosted.org/packages/52/79/c1ee726e221c863cde5164f925bacf183dfdf0397d4e3f94889439b947b4/black-26.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:6e131579c243c98f35bce64a7e08e87fb2d610544754675d4a0e73a070a5aa3a", size = 1412969, upload-time = "2026-03-12T03:40:03.252Z" }, + { url = "https://files.pythonhosted.org/packages/73/a5/15c01d613f5756f68ed8f6d4ec0a1e24b82b18889fa71affd3d1f7fad058/black-26.3.1-cp310-cp310-win_arm64.whl", hash = "sha256:5ed0ca58586c8d9a487352a96b15272b7fa55d139fc8496b519e78023a8dab0a", size = 1220345, upload-time = "2026-03-12T03:40:04.892Z" }, + { url = "https://files.pythonhosted.org/packages/17/57/5f11c92861f9c92eb9dddf515530bc2d06db843e44bdcf1c83c1427824bc/black-26.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:28ef38aee69e4b12fda8dba75e21f9b4f979b490c8ac0baa7cb505369ac9e1ff", size = 1851987, upload-time = "2026-03-12T03:40:06.248Z" }, + { url = "https://files.pythonhosted.org/packages/54/aa/340a1463660bf6831f9e39646bf774086dbd8ca7fc3cded9d59bbdf4ad0a/black-26.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bf9bf162ed91a26f1adba8efda0b573bc6924ec1408a52cc6f82cb73ec2b142c", size = 1689499, upload-time = "2026-03-12T03:40:07.642Z" }, + { url = "https://files.pythonhosted.org/packages/f3/01/b726c93d717d72733da031d2de10b92c9fa4c8d0c67e8a8a372076579279/black-26.3.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:474c27574d6d7037c1bc875a81d9be0a9a4f9ee95e62800dab3cfaadbf75acd5", size = 1754369, upload-time = "2026-03-12T03:40:09.279Z" }, + { url = "https://files.pythonhosted.org/packages/e3/09/61e91881ca291f150cfc9eb7ba19473c2e59df28859a11a88248b5cbbc4d/black-26.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:5e9d0d86df21f2e1677cc4bd090cd0e446278bcbbe49bf3659c308c3e402843e", size = 1413613, upload-time = "2026-03-12T03:40:10.943Z" }, + { url = "https://files.pythonhosted.org/packages/16/73/544f23891b22e7efe4d8f812371ab85b57f6a01b2fc45e3ba2e52ba985b8/black-26.3.1-cp311-cp311-win_arm64.whl", hash = "sha256:9a5e9f45e5d5e1c5b5c29b3bd4265dcc90e8b92cf4534520896ed77f791f4da5", size = 1219719, upload-time = "2026-03-12T03:40:12.597Z" }, + { url = "https://files.pythonhosted.org/packages/dc/f8/da5eae4fc75e78e6dceb60624e1b9662ab00d6b452996046dfa9b8a6025b/black-26.3.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b5e6f89631eb88a7302d416594a32faeee9fb8fb848290da9d0a5f2903519fc1", size = 1895920, upload-time = "2026-03-12T03:40:13.921Z" }, + { url = "https://files.pythonhosted.org/packages/2c/9f/04e6f26534da2e1629b2b48255c264cabf5eedc5141d04516d9d68a24111/black-26.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:41cd2012d35b47d589cb8a16faf8a32ef7a336f56356babd9fcf70939ad1897f", size = 1718499, upload-time = "2026-03-12T03:40:15.239Z" }, + { url = "https://files.pythonhosted.org/packages/04/91/a5935b2a63e31b331060c4a9fdb5a6c725840858c599032a6f3aac94055f/black-26.3.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f76ff19ec5297dd8e66eb64deda23631e642c9393ab592826fd4bdc97a4bce7", size = 1794994, upload-time = "2026-03-12T03:40:17.124Z" }, + { url = "https://files.pythonhosted.org/packages/e7/0a/86e462cdd311a3c2a8ece708d22aba17d0b2a0d5348ca34b40cdcbea512e/black-26.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:ddb113db38838eb9f043623ba274cfaf7d51d5b0c22ecb30afe58b1bb8322983", size = 1420867, upload-time = "2026-03-12T03:40:18.83Z" }, + { url = "https://files.pythonhosted.org/packages/5b/e5/22515a19cb7eaee3440325a6b0d95d2c0e88dd180cb011b12ae488e031d1/black-26.3.1-cp312-cp312-win_arm64.whl", hash = "sha256:dfdd51fc3e64ea4f35873d1b3fb25326773d55d2329ff8449139ebaad7357efb", size = 1230124, upload-time = "2026-03-12T03:40:20.425Z" }, + { url = "https://files.pythonhosted.org/packages/f5/77/5728052a3c0450c53d9bb3945c4c46b91baa62b2cafab6801411b6271e45/black-26.3.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:855822d90f884905362f602880ed8b5df1b7e3ee7d0db2502d4388a954cc8c54", size = 1895034, upload-time = "2026-03-12T03:40:21.813Z" }, + { url = "https://files.pythonhosted.org/packages/52/73/7cae55fdfdfbe9d19e9a8d25d145018965fe2079fa908101c3733b0c55a0/black-26.3.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8a33d657f3276328ce00e4d37fe70361e1ec7614da5d7b6e78de5426cb56332f", size = 1718503, upload-time = "2026-03-12T03:40:23.666Z" }, + { url = "https://files.pythonhosted.org/packages/e1/87/af89ad449e8254fdbc74654e6467e3c9381b61472cc532ee350d28cfdafb/black-26.3.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f1cd08e99d2f9317292a311dfe578fd2a24b15dbce97792f9c4d752275c1fa56", size = 1793557, upload-time = "2026-03-12T03:40:25.497Z" }, + { url = "https://files.pythonhosted.org/packages/43/10/d6c06a791d8124b843bf325ab4ac7d2f5b98731dff84d6064eafd687ded1/black-26.3.1-cp313-cp313-win_amd64.whl", hash = "sha256:c7e72339f841b5a237ff14f7d3880ddd0fc7f98a1199e8c4327f9a4f478c1839", size = 1422766, upload-time = "2026-03-12T03:40:27.14Z" }, + { url = "https://files.pythonhosted.org/packages/59/4f/40a582c015f2d841ac24fed6390bd68f0fc896069ff3a886317959c9daf8/black-26.3.1-cp313-cp313-win_arm64.whl", hash = "sha256:afc622538b430aa4c8c853f7f63bc582b3b8030fd8c80b70fb5fa5b834e575c2", size = 1232140, upload-time = "2026-03-12T03:40:28.882Z" }, + { url = "https://files.pythonhosted.org/packages/d5/da/e36e27c9cebc1311b7579210df6f1c86e50f2d7143ae4fcf8a5017dc8809/black-26.3.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:2d6bfaf7fd0993b420bed691f20f9492d53ce9a2bcccea4b797d34e947318a78", size = 1889234, upload-time = "2026-03-12T03:40:30.964Z" }, + { url = "https://files.pythonhosted.org/packages/0e/7b/9871acf393f64a5fa33668c19350ca87177b181f44bb3d0c33b2d534f22c/black-26.3.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:f89f2ab047c76a9c03f78d0d66ca519e389519902fa27e7a91117ef7611c0568", size = 1720522, upload-time = "2026-03-12T03:40:32.346Z" }, + { url = "https://files.pythonhosted.org/packages/03/87/e766c7f2e90c07fb7586cc787c9ae6462b1eedab390191f2b7fc7f6170a9/black-26.3.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b07fc0dab849d24a80a29cfab8d8a19187d1c4685d8a5e6385a5ce323c1f015f", size = 1787824, upload-time = "2026-03-12T03:40:33.636Z" }, + { url = "https://files.pythonhosted.org/packages/ac/94/2424338fb2d1875e9e83eed4c8e9c67f6905ec25afd826a911aea2b02535/black-26.3.1-cp314-cp314-win_amd64.whl", hash = "sha256:0126ae5b7c09957da2bdbd91a9ba1207453feada9e9fe51992848658c6c8e01c", size = 1445855, upload-time = "2026-03-12T03:40:35.442Z" }, + { url = "https://files.pythonhosted.org/packages/86/43/0c3338bd928afb8ee7471f1a4eec3bdbe2245ccb4a646092a222e8669840/black-26.3.1-cp314-cp314-win_arm64.whl", hash = "sha256:92c0ec1f2cc149551a2b7b47efc32c866406b6891b0ee4625e95967c8f4acfb1", size = 1258109, upload-time = "2026-03-12T03:40:36.832Z" }, + { url = "https://files.pythonhosted.org/packages/8e/0d/52d98722666d6fc6c3dd4c76df339501d6efd40e0ff95e6186a7b7f0befd/black-26.3.1-py3-none-any.whl", hash = "sha256:2bd5aa94fc267d38bb21a70d7410a89f1a1d318841855f698746f8e7f51acd1b", size = 207542, upload-time = "2026-03-12T03:36:01.668Z" }, +] + +[[package]] +name = "blosc2" +version = "4.1.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "msgpack" }, + { name = "ndindex" }, + { name = "numexpr", marker = "platform_machine != 'wasm32'" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "numpy", version = "2.4.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f8/fa/d72f624903dad1f2e95cb97d4e3777284f7eb398792f0d3380fdd73c1fc4/blosc2-4.1.2.tar.gz", hash = "sha256:c127342d976de44fee242137e83660097e0b072779f4164a34e149ac9f693c8a", size = 4341120, upload-time = "2026-03-03T11:05:14.496Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3f/ec/dbe9d64258d68b3251c362425347e0477e2ccd769dd14dd17baa6011487b/blosc2-4.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bb5f383a1b83a96e4b8ea31b976efdfeb4c7df0016b6ce6c7e735eeac92c3874", size = 4644710, upload-time = "2026-03-03T11:04:28.449Z" }, + { url = "https://files.pythonhosted.org/packages/a4/1d/e162c4dfdc5971ee0159c06b26da3b46b0279e558209e422163fb16fc0e2/blosc2-4.1.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e69c2f90d5691a439ab6f6f873246ae77b26f5ed46794d5693b5cdba5b60f3e8", size = 4118209, upload-time = "2026-03-03T11:04:30.475Z" }, + { url = "https://files.pythonhosted.org/packages/17/c7/45255f959c81379674b041d85bae1e8edabd80173a1fb1ee3fb1c28b6d5e/blosc2-4.1.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:10612397b4829120ac25cd50c2f923edf9cbb830c2d0e467d272013f06a7ffbc", size = 5094511, upload-time = "2026-03-03T11:04:31.763Z" }, + { url = "https://files.pythonhosted.org/packages/63/83/df6cf0828eb76ef0a12bc4a576f70c3850b45a2ab950445e4cb475ef1cd7/blosc2-4.1.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bb93d560e6a0b7f4642795311659a4ab7b2e12b6c79b2077fe06ec7a99784339", size = 5230560, upload-time = "2026-03-03T11:04:33.291Z" }, + { url = "https://files.pythonhosted.org/packages/c5/07/4afc4612a6e323e5d5394267b9b36c50250d075bf4b3bebfb54f2527e639/blosc2-4.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:4c8740e61f97ce96d7a46569a9b51b9b6fdac7b4792a8a486250a00ef6f7fd8b", size = 3146745, upload-time = "2026-03-03T11:04:34.846Z" }, + { url = "https://files.pythonhosted.org/packages/81/85/1240460e27c82897608df1c3f4b9c9243019a2e2345215d5f04e1a36fb15/blosc2-4.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7f8cd9d95563fbd6a76eed81ba85669d11c53385fb01ee8a91bed3b8070fa661", size = 4641818, upload-time = "2026-03-03T11:04:36.099Z" }, + { url = "https://files.pythonhosted.org/packages/f1/51/a988a96be0cc8a7c187d26579c21770c0f679a1d12b6268629f554640e33/blosc2-4.1.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0dcddab7db4398c11190f21b48c2fe468d99b4a003bbacb30011c00be11d9c75", size = 4116600, upload-time = "2026-03-03T11:04:37.505Z" }, + { url = "https://files.pythonhosted.org/packages/f2/b6/1c8b6b703d6f000df557ca594451883d18dcbe89b881fe1be75df82d76e7/blosc2-4.1.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:09127cb9d32e2b0d04333ef9e50f7bcf00765f1a20216cede40a04f36d786069", size = 5094195, upload-time = "2026-03-03T11:04:39.035Z" }, + { url = "https://files.pythonhosted.org/packages/2d/2a/d71c52fae9ae60337ad34f872a8537d1329be028b7449951c8f61421298f/blosc2-4.1.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ca2a8cdb85d6c3dc2ed19b550ee57041c9ea9230f9fa9829cfd0fe467fa6ad1d", size = 5229746, upload-time = "2026-03-03T11:04:40.669Z" }, + { url = "https://files.pythonhosted.org/packages/50/3d/e29f14df0053c510ed75a3f8a7a1429b6e5e72f8e3928911d15f6b63fe36/blosc2-4.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:ab8e878763b8f19c284cca8854f312988cb8039181eda508a01a1174f97938c5", size = 3145620, upload-time = "2026-03-03T11:04:42.262Z" }, + { url = "https://files.pythonhosted.org/packages/5a/74/ef2f1cea5239062be872fe7db384fcb5f7532257efcec11c960a15a5134f/blosc2-4.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2f39bc24bfde0ba2938f23b3ecd6a69f7788c9e775c88e0be37a3b4680bc84c8", size = 4686887, upload-time = "2026-03-03T11:04:43.857Z" }, + { url = "https://files.pythonhosted.org/packages/d2/87/834a234879ae8bcb61be4bf88855e29f62d06da0b5b45a01f6e7898e9d5f/blosc2-4.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e0bd7e752f636cde649f92acb735d58e23d0813ed9b24fb02f65eaaa7a415cdd", size = 4117160, upload-time = "2026-03-03T11:04:45.11Z" }, + { url = "https://files.pythonhosted.org/packages/dd/d0/84d10472414a605bac9e794e03ff53ce464e22fe83edc365dc88b6833c14/blosc2-4.1.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ceb440269004619a416813b7c36abef94b028fd702dd8209b5d41311b6ce39c4", size = 5071905, upload-time = "2026-03-03T11:04:47.327Z" }, + { url = "https://files.pythonhosted.org/packages/96/c6/6c98cb75da1ef26cb27fedb3edb4b3cdd1b3aa2f1056bdd9de0823effed9/blosc2-4.1.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:830addc8d8722348421e5d99d719c53a36ff34a468980a7af05938ddb336cf4f", size = 5208010, upload-time = "2026-03-03T11:04:48.542Z" }, + { url = "https://files.pythonhosted.org/packages/fe/d7/d4988cd88c070b2a24b446bf780fc43a7cb73a4af1e092b11edc832f616c/blosc2-4.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:9abc9432f7aa9335c87eb7b3cec72ac7bf3b764518e775b4f60159617e0817bf", size = 3147758, upload-time = "2026-03-03T11:04:49.789Z" }, + { url = "https://files.pythonhosted.org/packages/c4/b2/3d0a6711f9376ed2e84e420c3c74656e51803420ed2d0df997b027b6fd2d/blosc2-4.1.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:572fda198a250ee5e2c6b52d0067805ffa0d46d7e22213fcc23917164c33b8e5", size = 4686973, upload-time = "2026-03-03T11:04:51.321Z" }, + { url = "https://files.pythonhosted.org/packages/f7/5d/caa4c7eeac59664dcce968c69823e2416bf4f184af0b89507f52c085a98e/blosc2-4.1.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:192f3508522ce8867cd9aee70782450eeb89eb2de882f16d563320362ddf145a", size = 4116819, upload-time = "2026-03-03T11:04:52.66Z" }, + { url = "https://files.pythonhosted.org/packages/d3/ba/e038eec32caaf498f8d95e276c9a294895bf18419ba2504cee77bfec0008/blosc2-4.1.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:45075f00eb92e8d1abed1ea89038c9827ebd846d47e53c5c9988e22f7044f01f", size = 5071700, upload-time = "2026-03-03T11:04:53.856Z" }, + { url = "https://files.pythonhosted.org/packages/59/74/394d53ac3b3583163f7cc5b43d59d457e6398d8f1b51b85bc9f7bd7cf430/blosc2-4.1.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8f453b76764753c7c0ba3ce13ffcf0cefa191b0668adb28979f88cb9093ad7ae", size = 5208120, upload-time = "2026-03-03T11:04:55.413Z" }, + { url = "https://files.pythonhosted.org/packages/6e/e2/d5b09cec0383381026c41fd071ae6a9342dfd70d0584aeae672e77dda82f/blosc2-4.1.2-cp313-cp313-win_amd64.whl", hash = "sha256:a72cc1fdc74744723092ccb63d03cf49c64f911450d2c9296182ce7bcda45d04", size = 3147727, upload-time = "2026-03-03T11:04:57.506Z" }, + { url = "https://files.pythonhosted.org/packages/02/bf/20bc86e3eef536cf077be84c2b52583620ac877852962cf2d6c0281052ed/blosc2-4.1.2-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:1d8b7c45d537bfeb4b4c6d93c042ae4c07fe5aa6ce47d1acccb028802b2091d7", size = 4689092, upload-time = "2026-03-03T11:04:59.094Z" }, + { url = "https://files.pythonhosted.org/packages/04/f6/c0e9a30bdd151294203c933a2d612559548bdbd21e3ebfc4671982117f3d/blosc2-4.1.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9303b3e4a503a15cb4c42eb9c194a75a41603b879d89945967d72b5606857395", size = 4119002, upload-time = "2026-03-03T11:05:00.573Z" }, + { url = "https://files.pythonhosted.org/packages/37/75/59a2b35ae875198528b2bd89015fc4f143e40f859749735395877d7fdf96/blosc2-4.1.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0dcd142b6ec74b69f9ccfc006a98ea3e91617b245c0455f894a41a03cd88bd73", size = 5076726, upload-time = "2026-03-03T11:05:02.189Z" }, + { url = "https://files.pythonhosted.org/packages/24/98/c8c1e711d65e45c7109cd1ea90dd98d30dd2bc5d1c8d670fa91a5c563137/blosc2-4.1.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:05551c7111e96095b88f7070ec36dacb892a7f8c52c7550c019c93f892c511a9", size = 5209021, upload-time = "2026-03-03T11:05:03.813Z" }, + { url = "https://files.pythonhosted.org/packages/a9/85/4457050893f21c0b3237ce2c279a63f7e6cbf9b86126a42f17f5b83cafe6/blosc2-4.1.2-cp314-cp314-win_amd64.whl", hash = "sha256:68d04c8ea0ed1798baf0921b34434b564197c8a11569f5c64d9bea195329987c", size = 3220427, upload-time = "2026-03-03T11:05:05.689Z" }, + { url = "https://files.pythonhosted.org/packages/85/1c/18c47a98ba38a618f0cd3a1872d71b3db8553ce5466e7b5fd74b03dbe377/blosc2-4.1.2-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:52f69fd854cf2d9ce83cb0f6f214c6c9fb7f9149c24bd9af929482cbe95d3ff1", size = 4705783, upload-time = "2026-03-03T11:05:07.2Z" }, + { url = "https://files.pythonhosted.org/packages/8a/97/72ddd8146f8bd77026c1c28813e113c6b8a40b4f9bd4fe064f3618cebcd8/blosc2-4.1.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:cdfb208850c082e629dbed2aa8ff0328b64bfca691fcfdd89141af20f5fcc908", size = 4141025, upload-time = "2026-03-03T11:05:08.781Z" }, + { url = "https://files.pythonhosted.org/packages/cc/43/537635bf12f258db17a1a80e56c39bfefce218e1baab5459c05a4ff9739f/blosc2-4.1.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:df3e78642af359f3bdc46f4446f0517f2deca2b3d4c9c92caf49d4abf6ce2a9c", size = 5061103, upload-time = "2026-03-03T11:05:10.475Z" }, + { url = "https://files.pythonhosted.org/packages/36/e3/ad7dff6eaf0e36a0959865ebd5a16026929f5a919cf0158858c307d6971d/blosc2-4.1.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:482e2f1447d47241af1952a563573cf12f67fcb86a2d87227dc28e427b29f865", size = 5195395, upload-time = "2026-03-03T11:05:11.768Z" }, + { url = "https://files.pythonhosted.org/packages/a6/9e/b028eed46dfa45def2ca9c3e66aa3b8a3188a8a4998d017c699caf2bf0d9/blosc2-4.1.2-cp314-cp314t-win_amd64.whl", hash = "sha256:9ee2217b03ecca4e823ff22701f423b7630f2b0a44773e0486ddbaa953ed39e9", size = 3243706, upload-time = "2026-03-03T11:05:13.294Z" }, +] + +[[package]] +name = "certifi" +version = "2026.2.25" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/af/2d/7bf41579a8986e348fa033a31cdd0e4121114f6bce2457e8876010b092dd/certifi-2026.2.25.tar.gz", hash = "sha256:e887ab5cee78ea814d3472169153c2d12cd43b14bd03329a39a9c6e2e80bfba7", size = 155029, upload-time = "2026-02-25T02:54:17.342Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9a/3c/c17fb3ca2d9c3acff52e30b309f538586f9f5b9c9cf454f3845fc9af4881/certifi-2026.2.25-py3-none-any.whl", hash = "sha256:027692e4402ad994f1c42e52a4997a9763c646b73e4096e4d5d6db8af1d6f0fa", size = 153684, upload-time = "2026-02-25T02:54:15.766Z" }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1d/35/02daf95b9cd686320bb622eb148792655c9412dbb9b67abb5694e5910a24/charset_normalizer-3.4.5.tar.gz", hash = "sha256:95adae7b6c42a6c5b5b559b1a99149f090a57128155daeea91732c8d970d8644", size = 134804, upload-time = "2026-03-06T06:03:19.46Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/21/a2b1505639008ba2e6ef03733a81fc6cfd6a07ea6139a2b76421230b8dad/charset_normalizer-3.4.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4167a621a9a1a986c73777dbc15d4b5eac8ac5c10393374109a343d4013ec765", size = 283319, upload-time = "2026-03-06T06:00:26.433Z" }, + { url = "https://files.pythonhosted.org/packages/70/67/df234c29b68f4e1e095885c9db1cb4b69b8aba49cf94fac041db4aaf1267/charset_normalizer-3.4.5-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3f64c6bf8f32f9133b668c7f7a7cbdbc453412bc95ecdbd157f3b1e377a92990", size = 189974, upload-time = "2026-03-06T06:00:28.222Z" }, + { url = "https://files.pythonhosted.org/packages/df/7f/fc66af802961c6be42e2c7b69c58f95cbd1f39b0e81b3365d8efe2a02a04/charset_normalizer-3.4.5-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:568e3c34b58422075a1b49575a6abc616d9751b4d61b23f712e12ebb78fe47b2", size = 207866, upload-time = "2026-03-06T06:00:29.769Z" }, + { url = "https://files.pythonhosted.org/packages/c9/23/404eb36fac4e95b833c50e305bba9a241086d427bb2167a42eac7c4f7da4/charset_normalizer-3.4.5-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:036c079aa08a6a592b82487f97c60b439428320ed1b2ea0b3912e99d30c77765", size = 203239, upload-time = "2026-03-06T06:00:31.086Z" }, + { url = "https://files.pythonhosted.org/packages/4b/2f/8a1d989bfadd120c90114ab33e0d2a0cbde05278c1fc15e83e62d570f50a/charset_normalizer-3.4.5-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:340810d34ef83af92148e96e3e44cb2d3f910d2bf95e5618a5c467d9f102231d", size = 196529, upload-time = "2026-03-06T06:00:32.608Z" }, + { url = "https://files.pythonhosted.org/packages/a5/0c/c75f85ff7ca1f051958bb518cd43922d86f576c03947a050fbedfdfb4f15/charset_normalizer-3.4.5-cp310-cp310-manylinux_2_31_armv7l.whl", hash = "sha256:cd2d0f0ec9aa977a27731a3209ebbcacebebaf41f902bd453a928bfd281cf7f8", size = 184152, upload-time = "2026-03-06T06:00:33.93Z" }, + { url = "https://files.pythonhosted.org/packages/f9/20/4ed37f6199af5dde94d4aeaf577f3813a5ec6635834cda1d957013a09c76/charset_normalizer-3.4.5-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0b362bcd27819f9c07cbf23db4e0e8cd4b44c5ecd900c2ff907b2b92274a7412", size = 195226, upload-time = "2026-03-06T06:00:35.469Z" }, + { url = "https://files.pythonhosted.org/packages/28/31/7ba1102178cba7c34dcc050f43d427172f389729e356038f0726253dd914/charset_normalizer-3.4.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:77be992288f720306ab4108fe5c74797de327f3248368dfc7e1a916d6ed9e5a2", size = 192933, upload-time = "2026-03-06T06:00:36.83Z" }, + { url = "https://files.pythonhosted.org/packages/4b/23/f86443ab3921e6a60b33b93f4a1161222231f6c69bc24fb18f3bee7b8518/charset_normalizer-3.4.5-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:8b78d8a609a4b82c273257ee9d631ded7fac0d875bdcdccc109f3ee8328cfcb1", size = 185647, upload-time = "2026-03-06T06:00:38.367Z" }, + { url = "https://files.pythonhosted.org/packages/82/44/08b8be891760f1f5a6d23ce11d6d50c92981603e6eb740b4f72eea9424e2/charset_normalizer-3.4.5-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ba20bdf69bd127f66d0174d6f2a93e69045e0b4036dc1ca78e091bcc765830c4", size = 209533, upload-time = "2026-03-06T06:00:41.931Z" }, + { url = "https://files.pythonhosted.org/packages/3b/5f/df114f23406199f8af711ddccfbf409ffbc5b7cdc18fa19644997ff0c9bb/charset_normalizer-3.4.5-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:76a9d0de4d0eab387822e7b35d8f89367dd237c72e82ab42b9f7bf5e15ada00f", size = 195901, upload-time = "2026-03-06T06:00:43.978Z" }, + { url = "https://files.pythonhosted.org/packages/07/83/71ef34a76fe8aa05ff8f840244bda2d61e043c2ef6f30d200450b9f6a1be/charset_normalizer-3.4.5-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8fff79bf5978c693c9b1a4d71e4a94fddfb5fe744eb062a318e15f4a2f63a550", size = 204950, upload-time = "2026-03-06T06:00:45.202Z" }, + { url = "https://files.pythonhosted.org/packages/58/40/0253be623995365137d7dc68e45245036207ab2227251e69a3d93ce43183/charset_normalizer-3.4.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c7e84e0c0005e3bdc1a9211cd4e62c78ba80bc37b2365ef4410cd2007a9047f2", size = 198546, upload-time = "2026-03-06T06:00:46.481Z" }, + { url = "https://files.pythonhosted.org/packages/ed/5c/5f3cb5b259a130895ef5ae16b38eaf141430fa3f7af50cd06c5d67e4f7b2/charset_normalizer-3.4.5-cp310-cp310-win32.whl", hash = "sha256:58ad8270cfa5d4bef1bc85bd387217e14ff154d6630e976c6f56f9a040757475", size = 132516, upload-time = "2026-03-06T06:00:47.924Z" }, + { url = "https://files.pythonhosted.org/packages/a5/c3/84fb174e7770f2df2e1a2115090771bfbc2227fb39a765c6d00568d1aab4/charset_normalizer-3.4.5-cp310-cp310-win_amd64.whl", hash = "sha256:02a9d1b01c1e12c27883b0c9349e0bcd9ae92e727ff1a277207e1a262b1cbf05", size = 142906, upload-time = "2026-03-06T06:00:49.389Z" }, + { url = "https://files.pythonhosted.org/packages/d7/b2/6f852f8b969f2cbd0d4092d2e60139ab1af95af9bb651337cae89ec0f684/charset_normalizer-3.4.5-cp310-cp310-win_arm64.whl", hash = "sha256:039215608ac7b358c4da0191d10fc76868567fbf276d54c14721bdedeb6de064", size = 133258, upload-time = "2026-03-06T06:00:51.051Z" }, + { url = "https://files.pythonhosted.org/packages/8f/9e/bcec3b22c64ecec47d39bf5167c2613efd41898c019dccd4183f6aa5d6a7/charset_normalizer-3.4.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:610f72c0ee565dfb8ae1241b666119582fdbfe7c0975c175be719f940e110694", size = 279531, upload-time = "2026-03-06T06:00:52.252Z" }, + { url = "https://files.pythonhosted.org/packages/58/12/81fd25f7e7078ab5d1eedbb0fac44be4904ae3370a3bf4533c8f2d159acd/charset_normalizer-3.4.5-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:60d68e820af339df4ae8358c7a2e7596badeb61e544438e489035f9fbf3246a5", size = 188006, upload-time = "2026-03-06T06:00:53.8Z" }, + { url = "https://files.pythonhosted.org/packages/ae/6e/f2d30e8c27c1b0736a6520311982cf5286cfc7f6cac77d7bc1325e3a23f2/charset_normalizer-3.4.5-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:10b473fc8dca1c3ad8559985794815f06ca3fc71942c969129070f2c3cdf7281", size = 205085, upload-time = "2026-03-06T06:00:55.311Z" }, + { url = "https://files.pythonhosted.org/packages/d0/90/d12cefcb53b5931e2cf792a33718d7126efb116a320eaa0742c7059a95e4/charset_normalizer-3.4.5-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d4eb8ac7469b2a5d64b5b8c04f84d8bf3ad340f4514b98523805cbf46e3b3923", size = 200545, upload-time = "2026-03-06T06:00:56.532Z" }, + { url = "https://files.pythonhosted.org/packages/03/f4/44d3b830a20e89ff82a3134912d9a1cf6084d64f3b95dcad40f74449a654/charset_normalizer-3.4.5-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5bcb3227c3d9aaf73eaaab1db7ccd80a8995c509ee9941e2aae060ca6e4e5d81", size = 193863, upload-time = "2026-03-06T06:00:57.823Z" }, + { url = "https://files.pythonhosted.org/packages/25/4b/f212119c18a6320a9d4a730d1b4057875cdeabf21b3614f76549042ef8a8/charset_normalizer-3.4.5-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:75ee9c1cce2911581a70a3c0919d8bccf5b1cbc9b0e5171400ec736b4b569497", size = 181827, upload-time = "2026-03-06T06:00:59.323Z" }, + { url = "https://files.pythonhosted.org/packages/74/00/b26158e48b425a202a92965f8069e8a63d9af1481dfa206825d7f74d2a3c/charset_normalizer-3.4.5-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:1d1401945cb77787dbd3af2446ff2d75912327c4c3a1526ab7955ecf8600687c", size = 191085, upload-time = "2026-03-06T06:01:00.546Z" }, + { url = "https://files.pythonhosted.org/packages/c4/c2/1c1737bf6fd40335fe53d28fe49afd99ee4143cc57a845e99635ce0b9b6d/charset_normalizer-3.4.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a45e504f5e1be0bd385935a8e1507c442349ca36f511a47057a71c9d1d6ea9e", size = 190688, upload-time = "2026-03-06T06:01:02.479Z" }, + { url = "https://files.pythonhosted.org/packages/5a/3d/abb5c22dc2ef493cd56522f811246a63c5427c08f3e3e50ab663de27fcf4/charset_normalizer-3.4.5-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:e09f671a54ce70b79a1fc1dc6da3072b7ef7251fadb894ed92d9aa8218465a5f", size = 183077, upload-time = "2026-03-06T06:01:04.231Z" }, + { url = "https://files.pythonhosted.org/packages/44/33/5298ad4d419a58e25b3508e87f2758d1442ff00c2471f8e0403dab8edad5/charset_normalizer-3.4.5-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:d01de5e768328646e6a3fa9e562706f8f6641708c115c62588aef2b941a4f88e", size = 206706, upload-time = "2026-03-06T06:01:05.773Z" }, + { url = "https://files.pythonhosted.org/packages/7b/17/51e7895ac0f87c3b91d276a449ef09f5532a7529818f59646d7a55089432/charset_normalizer-3.4.5-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:131716d6786ad5e3dc542f5cc6f397ba3339dc0fb87f87ac30e550e8987756af", size = 191665, upload-time = "2026-03-06T06:01:07.473Z" }, + { url = "https://files.pythonhosted.org/packages/90/8f/cce9adf1883e98906dbae380d769b4852bb0fa0004bc7d7a2243418d3ea8/charset_normalizer-3.4.5-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:1a374cc0b88aa710e8865dc1bd6edb3743c59f27830f0293ab101e4cf3ce9f85", size = 201950, upload-time = "2026-03-06T06:01:08.973Z" }, + { url = "https://files.pythonhosted.org/packages/08/ca/bce99cd5c397a52919e2769d126723f27a4c037130374c051c00470bcd38/charset_normalizer-3.4.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d31f0d1671e1534e395f9eb84a68e0fb670e1edb1fe819a9d7f564ae3bc4e53f", size = 195830, upload-time = "2026-03-06T06:01:10.155Z" }, + { url = "https://files.pythonhosted.org/packages/87/4f/2e3d023a06911f1281f97b8f036edc9872167036ca6f55cc874a0be6c12c/charset_normalizer-3.4.5-cp311-cp311-win32.whl", hash = "sha256:cace89841c0599d736d3d74a27bc5821288bb47c5441923277afc6059d7fbcb4", size = 132029, upload-time = "2026-03-06T06:01:11.706Z" }, + { url = "https://files.pythonhosted.org/packages/fe/1f/a853b73d386521fd44b7f67ded6b17b7b2367067d9106a5c4b44f9a34274/charset_normalizer-3.4.5-cp311-cp311-win_amd64.whl", hash = "sha256:f8102ae93c0bc863b1d41ea0f4499c20a83229f52ed870850892df555187154a", size = 142404, upload-time = "2026-03-06T06:01:12.865Z" }, + { url = "https://files.pythonhosted.org/packages/b4/10/dba36f76b71c38e9d391abe0fd8a5b818790e053c431adecfc98c35cd2a9/charset_normalizer-3.4.5-cp311-cp311-win_arm64.whl", hash = "sha256:ed98364e1c262cf5f9363c3eca8c2df37024f52a8fa1180a3610014f26eac51c", size = 132796, upload-time = "2026-03-06T06:01:14.106Z" }, + { url = "https://files.pythonhosted.org/packages/9c/b6/9ee9c1a608916ca5feae81a344dffbaa53b26b90be58cc2159e3332d44ec/charset_normalizer-3.4.5-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ed97c282ee4f994ef814042423a529df9497e3c666dca19be1d4cd1129dc7ade", size = 280976, upload-time = "2026-03-06T06:01:15.276Z" }, + { url = "https://files.pythonhosted.org/packages/f8/d8/a54f7c0b96f1df3563e9190f04daf981e365a9b397eedfdfb5dbef7e5c6c/charset_normalizer-3.4.5-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0294916d6ccf2d069727d65973c3a1ca477d68708db25fd758dd28b0827cff54", size = 189356, upload-time = "2026-03-06T06:01:16.511Z" }, + { url = "https://files.pythonhosted.org/packages/42/69/2bf7f76ce1446759a5787cb87d38f6a61eb47dbbdf035cfebf6347292a65/charset_normalizer-3.4.5-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:dc57a0baa3eeedd99fafaef7511b5a6ef4581494e8168ee086031744e2679467", size = 206369, upload-time = "2026-03-06T06:01:17.853Z" }, + { url = "https://files.pythonhosted.org/packages/10/9c/949d1a46dab56b959d9a87272482195f1840b515a3380e39986989a893ae/charset_normalizer-3.4.5-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ed1a9a204f317ef879b32f9af507d47e49cd5e7f8e8d5d96358c98373314fc60", size = 203285, upload-time = "2026-03-06T06:01:19.473Z" }, + { url = "https://files.pythonhosted.org/packages/67/5c/ae30362a88b4da237d71ea214a8c7eb915db3eec941adda511729ac25fa2/charset_normalizer-3.4.5-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7ad83b8f9379176c841f8865884f3514d905bcd2a9a3b210eaa446e7d2223e4d", size = 196274, upload-time = "2026-03-06T06:01:20.728Z" }, + { url = "https://files.pythonhosted.org/packages/b2/07/c9f2cb0e46cb6d64fdcc4f95953747b843bb2181bda678dc4e699b8f0f9a/charset_normalizer-3.4.5-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:a118e2e0b5ae6b0120d5efa5f866e58f2bb826067a646431da4d6a2bdae7950e", size = 184715, upload-time = "2026-03-06T06:01:22.194Z" }, + { url = "https://files.pythonhosted.org/packages/36/64/6b0ca95c44fddf692cd06d642b28f63009d0ce325fad6e9b2b4d0ef86a52/charset_normalizer-3.4.5-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:754f96058e61a5e22e91483f823e07df16416ce76afa4ebf306f8e1d1296d43f", size = 193426, upload-time = "2026-03-06T06:01:23.795Z" }, + { url = "https://files.pythonhosted.org/packages/50/bc/a730690d726403743795ca3f5bb2baf67838c5fea78236098f324b965e40/charset_normalizer-3.4.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0c300cefd9b0970381a46394902cd18eaf2aa00163f999590ace991989dcd0fc", size = 191780, upload-time = "2026-03-06T06:01:25.053Z" }, + { url = "https://files.pythonhosted.org/packages/97/4f/6c0bc9af68222b22951552d73df4532b5be6447cee32d58e7e8c74ecbb7b/charset_normalizer-3.4.5-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:c108f8619e504140569ee7de3f97d234f0fbae338a7f9f360455071ef9855a95", size = 185805, upload-time = "2026-03-06T06:01:26.294Z" }, + { url = "https://files.pythonhosted.org/packages/dd/b9/a523fb9b0ee90814b503452b2600e4cbc118cd68714d57041564886e7325/charset_normalizer-3.4.5-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:d1028de43596a315e2720a9849ee79007ab742c06ad8b45a50db8cdb7ed4a82a", size = 208342, upload-time = "2026-03-06T06:01:27.55Z" }, + { url = "https://files.pythonhosted.org/packages/4d/61/c59e761dee4464050713e50e27b58266cc8e209e518c0b378c1580c959ba/charset_normalizer-3.4.5-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:19092dde50335accf365cce21998a1c6dd8eafd42c7b226eb54b2747cdce2fac", size = 193661, upload-time = "2026-03-06T06:01:29.051Z" }, + { url = "https://files.pythonhosted.org/packages/1c/43/729fa30aad69783f755c5ad8649da17ee095311ca42024742701e202dc59/charset_normalizer-3.4.5-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4354e401eb6dab9aed3c7b4030514328a6c748d05e1c3e19175008ca7de84fb1", size = 204819, upload-time = "2026-03-06T06:01:30.298Z" }, + { url = "https://files.pythonhosted.org/packages/87/33/d9b442ce5a91b96fc0840455a9e49a611bbadae6122778d0a6a79683dd31/charset_normalizer-3.4.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a68766a3c58fde7f9aaa22b3786276f62ab2f594efb02d0a1421b6282e852e98", size = 198080, upload-time = "2026-03-06T06:01:31.478Z" }, + { url = "https://files.pythonhosted.org/packages/56/5a/b8b5a23134978ee9885cee2d6995f4c27cc41f9baded0a9685eabc5338f0/charset_normalizer-3.4.5-cp312-cp312-win32.whl", hash = "sha256:1827734a5b308b65ac54e86a618de66f935a4f63a8a462ff1e19a6788d6c2262", size = 132630, upload-time = "2026-03-06T06:01:33.056Z" }, + { url = "https://files.pythonhosted.org/packages/70/53/e44a4c07e8904500aec95865dc3f6464dc3586a039ef0df606eb3ac38e35/charset_normalizer-3.4.5-cp312-cp312-win_amd64.whl", hash = "sha256:728c6a963dfab66ef865f49286e45239384249672cd598576765acc2a640a636", size = 142856, upload-time = "2026-03-06T06:01:34.489Z" }, + { url = "https://files.pythonhosted.org/packages/ea/aa/c5628f7cad591b1cf45790b7a61483c3e36cf41349c98af7813c483fd6e8/charset_normalizer-3.4.5-cp312-cp312-win_arm64.whl", hash = "sha256:75dfd1afe0b1647449e852f4fb428195a7ed0588947218f7ba929f6538487f02", size = 132982, upload-time = "2026-03-06T06:01:35.641Z" }, + { url = "https://files.pythonhosted.org/packages/f5/48/9f34ec4bb24aa3fdba1890c1bddb97c8a4be1bd84ef5c42ac2352563ad05/charset_normalizer-3.4.5-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ac59c15e3f1465f722607800c68713f9fbc2f672b9eb649fe831da4019ae9b23", size = 280788, upload-time = "2026-03-06T06:01:37.126Z" }, + { url = "https://files.pythonhosted.org/packages/0e/09/6003e7ffeb90cc0560da893e3208396a44c210c5ee42efff539639def59b/charset_normalizer-3.4.5-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:165c7b21d19365464e8f70e5ce5e12524c58b48c78c1f5a57524603c1ab003f8", size = 188890, upload-time = "2026-03-06T06:01:38.73Z" }, + { url = "https://files.pythonhosted.org/packages/42/1e/02706edf19e390680daa694d17e2b8eab4b5f7ac285e2a51168b4b22ee6b/charset_normalizer-3.4.5-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:28269983f25a4da0425743d0d257a2d6921ea7d9b83599d4039486ec5b9f911d", size = 206136, upload-time = "2026-03-06T06:01:40.016Z" }, + { url = "https://files.pythonhosted.org/packages/c7/87/942c3def1b37baf3cf786bad01249190f3ca3d5e63a84f831e704977de1f/charset_normalizer-3.4.5-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d27ce22ec453564770d29d03a9506d449efbb9fa13c00842262b2f6801c48cce", size = 202551, upload-time = "2026-03-06T06:01:41.522Z" }, + { url = "https://files.pythonhosted.org/packages/94/0a/af49691938dfe175d71b8a929bd7e4ace2809c0c5134e28bc535660d5262/charset_normalizer-3.4.5-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0625665e4ebdddb553ab185de5db7054393af8879fb0c87bd5690d14379d6819", size = 195572, upload-time = "2026-03-06T06:01:43.208Z" }, + { url = "https://files.pythonhosted.org/packages/20/ea/dfb1792a8050a8e694cfbde1570ff97ff74e48afd874152d38163d1df9ae/charset_normalizer-3.4.5-cp313-cp313-manylinux_2_31_armv7l.whl", hash = "sha256:c23eb3263356d94858655b3e63f85ac5d50970c6e8febcdde7830209139cc37d", size = 184438, upload-time = "2026-03-06T06:01:44.755Z" }, + { url = "https://files.pythonhosted.org/packages/72/12/c281e2067466e3ddd0595bfaea58a6946765ace5c72dfa3edc2f5f118026/charset_normalizer-3.4.5-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e6302ca4ae283deb0af68d2fbf467474b8b6aedcd3dab4db187e07f94c109763", size = 193035, upload-time = "2026-03-06T06:01:46.051Z" }, + { url = "https://files.pythonhosted.org/packages/ba/4f/3792c056e7708e10464bad0438a44708886fb8f92e3c3d29ec5e2d964d42/charset_normalizer-3.4.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e51ae7d81c825761d941962450f50d041db028b7278e7b08930b4541b3e45cb9", size = 191340, upload-time = "2026-03-06T06:01:47.547Z" }, + { url = "https://files.pythonhosted.org/packages/e7/86/80ddba897127b5c7a9bccc481b0cd36c8fefa485d113262f0fe4332f0bf4/charset_normalizer-3.4.5-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:597d10dec876923e5c59e48dbd366e852eacb2b806029491d307daea6b917d7c", size = 185464, upload-time = "2026-03-06T06:01:48.764Z" }, + { url = "https://files.pythonhosted.org/packages/4d/00/b5eff85ba198faacab83e0e4b6f0648155f072278e3b392a82478f8b988b/charset_normalizer-3.4.5-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:5cffde4032a197bd3b42fd0b9509ec60fb70918d6970e4cc773f20fc9180ca67", size = 208014, upload-time = "2026-03-06T06:01:50.371Z" }, + { url = "https://files.pythonhosted.org/packages/c8/11/d36f70be01597fd30850dde8a1269ebc8efadd23ba5785808454f2389bde/charset_normalizer-3.4.5-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:2da4eedcb6338e2321e831a0165759c0c620e37f8cd044a263ff67493be8ffb3", size = 193297, upload-time = "2026-03-06T06:01:51.933Z" }, + { url = "https://files.pythonhosted.org/packages/1a/1d/259eb0a53d4910536c7c2abb9cb25f4153548efb42800c6a9456764649c0/charset_normalizer-3.4.5-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:65a126fb4b070d05340a84fc709dd9e7c75d9b063b610ece8a60197a291d0adf", size = 204321, upload-time = "2026-03-06T06:01:53.887Z" }, + { url = "https://files.pythonhosted.org/packages/84/31/faa6c5b9d3688715e1ed1bb9d124c384fe2fc1633a409e503ffe1c6398c1/charset_normalizer-3.4.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c7a80a9242963416bd81f99349d5f3fce1843c303bd404f204918b6d75a75fd6", size = 197509, upload-time = "2026-03-06T06:01:56.439Z" }, + { url = "https://files.pythonhosted.org/packages/fd/a5/c7d9dd1503ffc08950b3260f5d39ec2366dd08254f0900ecbcf3a6197c7c/charset_normalizer-3.4.5-cp313-cp313-win32.whl", hash = "sha256:f1d725b754e967e648046f00c4facc42d414840f5ccc670c5670f59f83693e4f", size = 132284, upload-time = "2026-03-06T06:01:57.812Z" }, + { url = "https://files.pythonhosted.org/packages/b9/0f/57072b253af40c8aa6636e6de7d75985624c1eb392815b2f934199340a89/charset_normalizer-3.4.5-cp313-cp313-win_amd64.whl", hash = "sha256:e37bd100d2c5d3ba35db9c7c5ba5a9228cbcffe5c4778dc824b164e5257813d7", size = 142630, upload-time = "2026-03-06T06:01:59.062Z" }, + { url = "https://files.pythonhosted.org/packages/31/41/1c4b7cc9f13bd9d369ce3bc993e13d374ce25fa38a2663644283ecf422c1/charset_normalizer-3.4.5-cp313-cp313-win_arm64.whl", hash = "sha256:93b3b2cc5cf1b8743660ce77a4f45f3f6d1172068207c1defc779a36eea6bb36", size = 133254, upload-time = "2026-03-06T06:02:00.281Z" }, + { url = "https://files.pythonhosted.org/packages/43/be/0f0fd9bb4a7fa4fb5067fb7d9ac693d4e928d306f80a0d02bde43a7c4aee/charset_normalizer-3.4.5-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:8197abe5ca1ffb7d91e78360f915eef5addff270f8a71c1fc5be24a56f3e4873", size = 280232, upload-time = "2026-03-06T06:02:01.508Z" }, + { url = "https://files.pythonhosted.org/packages/28/02/983b5445e4bef49cd8c9da73a8e029f0825f39b74a06d201bfaa2e55142a/charset_normalizer-3.4.5-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a2aecdb364b8a1802afdc7f9327d55dad5366bc97d8502d0f5854e50712dbc5f", size = 189688, upload-time = "2026-03-06T06:02:02.857Z" }, + { url = "https://files.pythonhosted.org/packages/d0/88/152745c5166437687028027dc080e2daed6fe11cfa95a22f4602591c42db/charset_normalizer-3.4.5-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a66aa5022bf81ab4b1bebfb009db4fd68e0c6d4307a1ce5ef6a26e5878dfc9e4", size = 206833, upload-time = "2026-03-06T06:02:05.127Z" }, + { url = "https://files.pythonhosted.org/packages/cb/0f/ebc15c8b02af2f19be9678d6eed115feeeccc45ce1f4b098d986c13e8769/charset_normalizer-3.4.5-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d77f97e515688bd615c1d1f795d540f32542d514242067adcb8ef532504cb9ee", size = 202879, upload-time = "2026-03-06T06:02:06.446Z" }, + { url = "https://files.pythonhosted.org/packages/38/9c/71336bff6934418dc8d1e8a1644176ac9088068bc571da612767619c97b3/charset_normalizer-3.4.5-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:01a1ed54b953303ca7e310fafe0fe347aab348bd81834a0bcd602eb538f89d66", size = 195764, upload-time = "2026-03-06T06:02:08.763Z" }, + { url = "https://files.pythonhosted.org/packages/b7/95/ce92fde4f98615661871bc282a856cf9b8a15f686ba0af012984660d480b/charset_normalizer-3.4.5-cp314-cp314-manylinux_2_31_armv7l.whl", hash = "sha256:b2d37d78297b39a9eb9eb92c0f6df98c706467282055419df141389b23f93362", size = 183728, upload-time = "2026-03-06T06:02:10.137Z" }, + { url = "https://files.pythonhosted.org/packages/1c/e7/f5b4588d94e747ce45ae680f0f242bc2d98dbd4eccfab73e6160b6893893/charset_normalizer-3.4.5-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e71bbb595973622b817c042bd943c3f3667e9c9983ce3d205f973f486fec98a7", size = 192937, upload-time = "2026-03-06T06:02:11.663Z" }, + { url = "https://files.pythonhosted.org/packages/f9/29/9d94ed6b929bf9f48bf6ede6e7474576499f07c4c5e878fb186083622716/charset_normalizer-3.4.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4cd966c2559f501c6fd69294d082c2934c8dd4719deb32c22961a5ac6db0df1d", size = 192040, upload-time = "2026-03-06T06:02:13.489Z" }, + { url = "https://files.pythonhosted.org/packages/15/d2/1a093a1cf827957f9445f2fe7298bcc16f8fc5e05c1ed2ad1af0b239035e/charset_normalizer-3.4.5-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:d5e52d127045d6ae01a1e821acfad2f3a1866c54d0e837828538fabe8d9d1bd6", size = 184107, upload-time = "2026-03-06T06:02:14.83Z" }, + { url = "https://files.pythonhosted.org/packages/0f/7d/82068ce16bd36135df7b97f6333c5d808b94e01d4599a682e2337ed5fd14/charset_normalizer-3.4.5-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:30a2b1a48478c3428d047ed9690d57c23038dac838a87ad624c85c0a78ebeb39", size = 208310, upload-time = "2026-03-06T06:02:16.165Z" }, + { url = "https://files.pythonhosted.org/packages/84/4e/4dfb52307bb6af4a5c9e73e482d171b81d36f522b21ccd28a49656baa680/charset_normalizer-3.4.5-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:d8ed79b8f6372ca4254955005830fd61c1ccdd8c0fac6603e2c145c61dd95db6", size = 192918, upload-time = "2026-03-06T06:02:18.144Z" }, + { url = "https://files.pythonhosted.org/packages/08/a4/159ff7da662cf7201502ca89980b8f06acf3e887b278956646a8aeb178ab/charset_normalizer-3.4.5-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:c5af897b45fa606b12464ccbe0014bbf8c09191e0a66aab6aa9d5cf6e77e0c94", size = 204615, upload-time = "2026-03-06T06:02:19.821Z" }, + { url = "https://files.pythonhosted.org/packages/d6/62/0dd6172203cb6b429ffffc9935001fde42e5250d57f07b0c28c6046deb6b/charset_normalizer-3.4.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:1088345bcc93c58d8d8f3d783eca4a6e7a7752bbff26c3eee7e73c597c191c2e", size = 197784, upload-time = "2026-03-06T06:02:21.86Z" }, + { url = "https://files.pythonhosted.org/packages/c7/5e/1aab5cb737039b9c59e63627dc8bbc0d02562a14f831cc450e5f91d84ce1/charset_normalizer-3.4.5-cp314-cp314-win32.whl", hash = "sha256:ee57b926940ba00bca7ba7041e665cc956e55ef482f851b9b65acb20d867e7a2", size = 133009, upload-time = "2026-03-06T06:02:23.289Z" }, + { url = "https://files.pythonhosted.org/packages/40/65/e7c6c77d7aaa4c0d7974f2e403e17f0ed2cb0fc135f77d686b916bf1eead/charset_normalizer-3.4.5-cp314-cp314-win_amd64.whl", hash = "sha256:4481e6da1830c8a1cc0b746b47f603b653dadb690bcd851d039ffaefe70533aa", size = 143511, upload-time = "2026-03-06T06:02:26.195Z" }, + { url = "https://files.pythonhosted.org/packages/ba/91/52b0841c71f152f563b8e072896c14e3d83b195c188b338d3cc2e582d1d4/charset_normalizer-3.4.5-cp314-cp314-win_arm64.whl", hash = "sha256:97ab7787092eb9b50fb47fa04f24c75b768a606af1bcba1957f07f128a7219e4", size = 133775, upload-time = "2026-03-06T06:02:27.473Z" }, + { url = "https://files.pythonhosted.org/packages/c5/60/3a621758945513adfd4db86827a5bafcc615f913dbd0b4c2ed64a65731be/charset_normalizer-3.4.5-py3-none-any.whl", hash = "sha256:9db5e3fcdcee89a78c04dffb3fe33c79f77bd741a624946db2591c81b2fc85b0", size = 55455, upload-time = "2026-03-06T06:03:17.827Z" }, +] + +[[package]] +name = "click" +version = "8.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3d/fa/656b739db8587d7b5dfa22e22ed02566950fbfbcdc20311993483657a5c0/click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a", size = 295065, upload-time = "2025-11-15T20:45:42.706Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274, upload-time = "2025-11-15T20:45:41.139Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "contourpy" +version = "1.3.2" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.11'", +] +dependencies = [ + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/54/eb9bfc647b19f2009dd5c7f5ec51c4e6ca831725f1aea7a993034f483147/contourpy-1.3.2.tar.gz", hash = "sha256:b6945942715a034c671b7fc54f9588126b0b8bf23db2696e3ca8328f3ff0ab54", size = 13466130, upload-time = "2025-04-15T17:47:53.79Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/a3/da4153ec8fe25d263aa48c1a4cbde7f49b59af86f0b6f7862788c60da737/contourpy-1.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ba38e3f9f330af820c4b27ceb4b9c7feee5fe0493ea53a8720f4792667465934", size = 268551, upload-time = "2025-04-15T17:34:46.581Z" }, + { url = "https://files.pythonhosted.org/packages/2f/6c/330de89ae1087eb622bfca0177d32a7ece50c3ef07b28002de4757d9d875/contourpy-1.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dc41ba0714aa2968d1f8674ec97504a8f7e334f48eeacebcaa6256213acb0989", size = 253399, upload-time = "2025-04-15T17:34:51.427Z" }, + { url = "https://files.pythonhosted.org/packages/c1/bd/20c6726b1b7f81a8bee5271bed5c165f0a8e1f572578a9d27e2ccb763cb2/contourpy-1.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9be002b31c558d1ddf1b9b415b162c603405414bacd6932d031c5b5a8b757f0d", size = 312061, upload-time = "2025-04-15T17:34:55.961Z" }, + { url = "https://files.pythonhosted.org/packages/22/fc/a9665c88f8a2473f823cf1ec601de9e5375050f1958cbb356cdf06ef1ab6/contourpy-1.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8d2e74acbcba3bfdb6d9d8384cdc4f9260cae86ed9beee8bd5f54fee49a430b9", size = 351956, upload-time = "2025-04-15T17:35:00.992Z" }, + { url = "https://files.pythonhosted.org/packages/25/eb/9f0a0238f305ad8fb7ef42481020d6e20cf15e46be99a1fcf939546a177e/contourpy-1.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e259bced5549ac64410162adc973c5e2fb77f04df4a439d00b478e57a0e65512", size = 320872, upload-time = "2025-04-15T17:35:06.177Z" }, + { url = "https://files.pythonhosted.org/packages/32/5c/1ee32d1c7956923202f00cf8d2a14a62ed7517bdc0ee1e55301227fc273c/contourpy-1.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad687a04bc802cbe8b9c399c07162a3c35e227e2daccf1668eb1f278cb698631", size = 325027, upload-time = "2025-04-15T17:35:11.244Z" }, + { url = "https://files.pythonhosted.org/packages/83/bf/9baed89785ba743ef329c2b07fd0611d12bfecbedbdd3eeecf929d8d3b52/contourpy-1.3.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cdd22595308f53ef2f891040ab2b93d79192513ffccbd7fe19be7aa773a5e09f", size = 1306641, upload-time = "2025-04-15T17:35:26.701Z" }, + { url = "https://files.pythonhosted.org/packages/d4/cc/74e5e83d1e35de2d28bd97033426b450bc4fd96e092a1f7a63dc7369b55d/contourpy-1.3.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b4f54d6a2defe9f257327b0f243612dd051cc43825587520b1bf74a31e2f6ef2", size = 1374075, upload-time = "2025-04-15T17:35:43.204Z" }, + { url = "https://files.pythonhosted.org/packages/0c/42/17f3b798fd5e033b46a16f8d9fcb39f1aba051307f5ebf441bad1ecf78f8/contourpy-1.3.2-cp310-cp310-win32.whl", hash = "sha256:f939a054192ddc596e031e50bb13b657ce318cf13d264f095ce9db7dc6ae81c0", size = 177534, upload-time = "2025-04-15T17:35:46.554Z" }, + { url = "https://files.pythonhosted.org/packages/54/ec/5162b8582f2c994721018d0c9ece9dc6ff769d298a8ac6b6a652c307e7df/contourpy-1.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:c440093bbc8fc21c637c03bafcbef95ccd963bc6e0514ad887932c18ca2a759a", size = 221188, upload-time = "2025-04-15T17:35:50.064Z" }, + { url = "https://files.pythonhosted.org/packages/b3/b9/ede788a0b56fc5b071639d06c33cb893f68b1178938f3425debebe2dab78/contourpy-1.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6a37a2fb93d4df3fc4c0e363ea4d16f83195fc09c891bc8ce072b9d084853445", size = 269636, upload-time = "2025-04-15T17:35:54.473Z" }, + { url = "https://files.pythonhosted.org/packages/e6/75/3469f011d64b8bbfa04f709bfc23e1dd71be54d05b1b083be9f5b22750d1/contourpy-1.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b7cd50c38f500bbcc9b6a46643a40e0913673f869315d8e70de0438817cb7773", size = 254636, upload-time = "2025-04-15T17:35:58.283Z" }, + { url = "https://files.pythonhosted.org/packages/8d/2f/95adb8dae08ce0ebca4fd8e7ad653159565d9739128b2d5977806656fcd2/contourpy-1.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6658ccc7251a4433eebd89ed2672c2ed96fba367fd25ca9512aa92a4b46c4f1", size = 313053, upload-time = "2025-04-15T17:36:03.235Z" }, + { url = "https://files.pythonhosted.org/packages/c3/a6/8ccf97a50f31adfa36917707fe39c9a0cbc24b3bbb58185577f119736cc9/contourpy-1.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:70771a461aaeb335df14deb6c97439973d253ae70660ca085eec25241137ef43", size = 352985, upload-time = "2025-04-15T17:36:08.275Z" }, + { url = "https://files.pythonhosted.org/packages/1d/b6/7925ab9b77386143f39d9c3243fdd101621b4532eb126743201160ffa7e6/contourpy-1.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65a887a6e8c4cd0897507d814b14c54a8c2e2aa4ac9f7686292f9769fcf9a6ab", size = 323750, upload-time = "2025-04-15T17:36:13.29Z" }, + { url = "https://files.pythonhosted.org/packages/c2/f3/20c5d1ef4f4748e52d60771b8560cf00b69d5c6368b5c2e9311bcfa2a08b/contourpy-1.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3859783aefa2b8355697f16642695a5b9792e7a46ab86da1118a4a23a51a33d7", size = 326246, upload-time = "2025-04-15T17:36:18.329Z" }, + { url = "https://files.pythonhosted.org/packages/8c/e5/9dae809e7e0b2d9d70c52b3d24cba134dd3dad979eb3e5e71f5df22ed1f5/contourpy-1.3.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:eab0f6db315fa4d70f1d8ab514e527f0366ec021ff853d7ed6a2d33605cf4b83", size = 1308728, upload-time = "2025-04-15T17:36:33.878Z" }, + { url = "https://files.pythonhosted.org/packages/e2/4a/0058ba34aeea35c0b442ae61a4f4d4ca84d6df8f91309bc2d43bb8dd248f/contourpy-1.3.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d91a3ccc7fea94ca0acab82ceb77f396d50a1f67412efe4c526f5d20264e6ecd", size = 1375762, upload-time = "2025-04-15T17:36:51.295Z" }, + { url = "https://files.pythonhosted.org/packages/09/33/7174bdfc8b7767ef2c08ed81244762d93d5c579336fc0b51ca57b33d1b80/contourpy-1.3.2-cp311-cp311-win32.whl", hash = "sha256:1c48188778d4d2f3d48e4643fb15d8608b1d01e4b4d6b0548d9b336c28fc9b6f", size = 178196, upload-time = "2025-04-15T17:36:55.002Z" }, + { url = "https://files.pythonhosted.org/packages/5e/fe/4029038b4e1c4485cef18e480b0e2cd2d755448bb071eb9977caac80b77b/contourpy-1.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:5ebac872ba09cb8f2131c46b8739a7ff71de28a24c869bcad554477eb089a878", size = 222017, upload-time = "2025-04-15T17:36:58.576Z" }, + { url = "https://files.pythonhosted.org/packages/34/f7/44785876384eff370c251d58fd65f6ad7f39adce4a093c934d4a67a7c6b6/contourpy-1.3.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4caf2bcd2969402bf77edc4cb6034c7dd7c0803213b3523f111eb7460a51b8d2", size = 271580, upload-time = "2025-04-15T17:37:03.105Z" }, + { url = "https://files.pythonhosted.org/packages/93/3b/0004767622a9826ea3d95f0e9d98cd8729015768075d61f9fea8eeca42a8/contourpy-1.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:82199cb78276249796419fe36b7386bd8d2cc3f28b3bc19fe2454fe2e26c4c15", size = 255530, upload-time = "2025-04-15T17:37:07.026Z" }, + { url = "https://files.pythonhosted.org/packages/e7/bb/7bd49e1f4fa805772d9fd130e0d375554ebc771ed7172f48dfcd4ca61549/contourpy-1.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:106fab697af11456fcba3e352ad50effe493a90f893fca6c2ca5c033820cea92", size = 307688, upload-time = "2025-04-15T17:37:11.481Z" }, + { url = "https://files.pythonhosted.org/packages/fc/97/e1d5dbbfa170725ef78357a9a0edc996b09ae4af170927ba8ce977e60a5f/contourpy-1.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d14f12932a8d620e307f715857107b1d1845cc44fdb5da2bc8e850f5ceba9f87", size = 347331, upload-time = "2025-04-15T17:37:18.212Z" }, + { url = "https://files.pythonhosted.org/packages/6f/66/e69e6e904f5ecf6901be3dd16e7e54d41b6ec6ae3405a535286d4418ffb4/contourpy-1.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:532fd26e715560721bb0d5fc7610fce279b3699b018600ab999d1be895b09415", size = 318963, upload-time = "2025-04-15T17:37:22.76Z" }, + { url = "https://files.pythonhosted.org/packages/a8/32/b8a1c8965e4f72482ff2d1ac2cd670ce0b542f203c8e1d34e7c3e6925da7/contourpy-1.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f26b383144cf2d2c29f01a1e8170f50dacf0eac02d64139dcd709a8ac4eb3cfe", size = 323681, upload-time = "2025-04-15T17:37:33.001Z" }, + { url = "https://files.pythonhosted.org/packages/30/c6/12a7e6811d08757c7162a541ca4c5c6a34c0f4e98ef2b338791093518e40/contourpy-1.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c49f73e61f1f774650a55d221803b101d966ca0c5a2d6d5e4320ec3997489441", size = 1308674, upload-time = "2025-04-15T17:37:48.64Z" }, + { url = "https://files.pythonhosted.org/packages/2a/8a/bebe5a3f68b484d3a2b8ffaf84704b3e343ef1addea528132ef148e22b3b/contourpy-1.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3d80b2c0300583228ac98d0a927a1ba6a2ba6b8a742463c564f1d419ee5b211e", size = 1380480, upload-time = "2025-04-15T17:38:06.7Z" }, + { url = "https://files.pythonhosted.org/packages/34/db/fcd325f19b5978fb509a7d55e06d99f5f856294c1991097534360b307cf1/contourpy-1.3.2-cp312-cp312-win32.whl", hash = "sha256:90df94c89a91b7362e1142cbee7568f86514412ab8a2c0d0fca72d7e91b62912", size = 178489, upload-time = "2025-04-15T17:38:10.338Z" }, + { url = "https://files.pythonhosted.org/packages/01/c8/fadd0b92ffa7b5eb5949bf340a63a4a496a6930a6c37a7ba0f12acb076d6/contourpy-1.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:8c942a01d9163e2e5cfb05cb66110121b8d07ad438a17f9e766317bcb62abf73", size = 223042, upload-time = "2025-04-15T17:38:14.239Z" }, + { url = "https://files.pythonhosted.org/packages/2e/61/5673f7e364b31e4e7ef6f61a4b5121c5f170f941895912f773d95270f3a2/contourpy-1.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:de39db2604ae755316cb5967728f4bea92685884b1e767b7c24e983ef5f771cb", size = 271630, upload-time = "2025-04-15T17:38:19.142Z" }, + { url = "https://files.pythonhosted.org/packages/ff/66/a40badddd1223822c95798c55292844b7e871e50f6bfd9f158cb25e0bd39/contourpy-1.3.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3f9e896f447c5c8618f1edb2bafa9a4030f22a575ec418ad70611450720b5b08", size = 255670, upload-time = "2025-04-15T17:38:23.688Z" }, + { url = "https://files.pythonhosted.org/packages/1e/c7/cf9fdee8200805c9bc3b148f49cb9482a4e3ea2719e772602a425c9b09f8/contourpy-1.3.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71e2bd4a1c4188f5c2b8d274da78faab884b59df20df63c34f74aa1813c4427c", size = 306694, upload-time = "2025-04-15T17:38:28.238Z" }, + { url = "https://files.pythonhosted.org/packages/dd/e7/ccb9bec80e1ba121efbffad7f38021021cda5be87532ec16fd96533bb2e0/contourpy-1.3.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de425af81b6cea33101ae95ece1f696af39446db9682a0b56daaa48cfc29f38f", size = 345986, upload-time = "2025-04-15T17:38:33.502Z" }, + { url = "https://files.pythonhosted.org/packages/dc/49/ca13bb2da90391fa4219fdb23b078d6065ada886658ac7818e5441448b78/contourpy-1.3.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:977e98a0e0480d3fe292246417239d2d45435904afd6d7332d8455981c408b85", size = 318060, upload-time = "2025-04-15T17:38:38.672Z" }, + { url = "https://files.pythonhosted.org/packages/c8/65/5245ce8c548a8422236c13ffcdcdada6a2a812c361e9e0c70548bb40b661/contourpy-1.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:434f0adf84911c924519d2b08fc10491dd282b20bdd3fa8f60fd816ea0b48841", size = 322747, upload-time = "2025-04-15T17:38:43.712Z" }, + { url = "https://files.pythonhosted.org/packages/72/30/669b8eb48e0a01c660ead3752a25b44fdb2e5ebc13a55782f639170772f9/contourpy-1.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c66c4906cdbc50e9cba65978823e6e00b45682eb09adbb78c9775b74eb222422", size = 1308895, upload-time = "2025-04-15T17:39:00.224Z" }, + { url = "https://files.pythonhosted.org/packages/05/5a/b569f4250decee6e8d54498be7bdf29021a4c256e77fe8138c8319ef8eb3/contourpy-1.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8b7fc0cd78ba2f4695fd0a6ad81a19e7e3ab825c31b577f384aa9d7817dc3bef", size = 1379098, upload-time = "2025-04-15T17:43:29.649Z" }, + { url = "https://files.pythonhosted.org/packages/19/ba/b227c3886d120e60e41b28740ac3617b2f2b971b9f601c835661194579f1/contourpy-1.3.2-cp313-cp313-win32.whl", hash = "sha256:15ce6ab60957ca74cff444fe66d9045c1fd3e92c8936894ebd1f3eef2fff075f", size = 178535, upload-time = "2025-04-15T17:44:44.532Z" }, + { url = "https://files.pythonhosted.org/packages/12/6e/2fed56cd47ca739b43e892707ae9a13790a486a3173be063681ca67d2262/contourpy-1.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:e1578f7eafce927b168752ed7e22646dad6cd9bca673c60bff55889fa236ebf9", size = 223096, upload-time = "2025-04-15T17:44:48.194Z" }, + { url = "https://files.pythonhosted.org/packages/54/4c/e76fe2a03014a7c767d79ea35c86a747e9325537a8b7627e0e5b3ba266b4/contourpy-1.3.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0475b1f6604896bc7c53bb070e355e9321e1bc0d381735421a2d2068ec56531f", size = 285090, upload-time = "2025-04-15T17:43:34.084Z" }, + { url = "https://files.pythonhosted.org/packages/7b/e2/5aba47debd55d668e00baf9651b721e7733975dc9fc27264a62b0dd26eb8/contourpy-1.3.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:c85bb486e9be652314bb5b9e2e3b0d1b2e643d5eec4992c0fbe8ac71775da739", size = 268643, upload-time = "2025-04-15T17:43:38.626Z" }, + { url = "https://files.pythonhosted.org/packages/a1/37/cd45f1f051fe6230f751cc5cdd2728bb3a203f5619510ef11e732109593c/contourpy-1.3.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:745b57db7758f3ffc05a10254edd3182a2a83402a89c00957a8e8a22f5582823", size = 310443, upload-time = "2025-04-15T17:43:44.522Z" }, + { url = "https://files.pythonhosted.org/packages/8b/a2/36ea6140c306c9ff6dd38e3bcec80b3b018474ef4d17eb68ceecd26675f4/contourpy-1.3.2-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:970e9173dbd7eba9b4e01aab19215a48ee5dd3f43cef736eebde064a171f89a5", size = 349865, upload-time = "2025-04-15T17:43:49.545Z" }, + { url = "https://files.pythonhosted.org/packages/95/b7/2fc76bc539693180488f7b6cc518da7acbbb9e3b931fd9280504128bf956/contourpy-1.3.2-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6c4639a9c22230276b7bffb6a850dfc8258a2521305e1faefe804d006b2e532", size = 321162, upload-time = "2025-04-15T17:43:54.203Z" }, + { url = "https://files.pythonhosted.org/packages/f4/10/76d4f778458b0aa83f96e59d65ece72a060bacb20cfbee46cf6cd5ceba41/contourpy-1.3.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc829960f34ba36aad4302e78eabf3ef16a3a100863f0d4eeddf30e8a485a03b", size = 327355, upload-time = "2025-04-15T17:44:01.025Z" }, + { url = "https://files.pythonhosted.org/packages/43/a3/10cf483ea683f9f8ab096c24bad3cce20e0d1dd9a4baa0e2093c1c962d9d/contourpy-1.3.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:d32530b534e986374fc19eaa77fcb87e8a99e5431499949b828312bdcd20ac52", size = 1307935, upload-time = "2025-04-15T17:44:17.322Z" }, + { url = "https://files.pythonhosted.org/packages/78/73/69dd9a024444489e22d86108e7b913f3528f56cfc312b5c5727a44188471/contourpy-1.3.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:e298e7e70cf4eb179cc1077be1c725b5fd131ebc81181bf0c03525c8abc297fd", size = 1372168, upload-time = "2025-04-15T17:44:33.43Z" }, + { url = "https://files.pythonhosted.org/packages/0f/1b/96d586ccf1b1a9d2004dd519b25fbf104a11589abfd05484ff12199cca21/contourpy-1.3.2-cp313-cp313t-win32.whl", hash = "sha256:d0e589ae0d55204991450bb5c23f571c64fe43adaa53f93fc902a84c96f52fe1", size = 189550, upload-time = "2025-04-15T17:44:37.092Z" }, + { url = "https://files.pythonhosted.org/packages/b0/e6/6000d0094e8a5e32ad62591c8609e269febb6e4db83a1c75ff8868b42731/contourpy-1.3.2-cp313-cp313t-win_amd64.whl", hash = "sha256:78e9253c3de756b3f6a5174d024c4835acd59eb3f8e2ca13e775dbffe1558f69", size = 238214, upload-time = "2025-04-15T17:44:40.827Z" }, + { url = "https://files.pythonhosted.org/packages/33/05/b26e3c6ecc05f349ee0013f0bb850a761016d89cec528a98193a48c34033/contourpy-1.3.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:fd93cc7f3139b6dd7aab2f26a90dde0aa9fc264dbf70f6740d498a70b860b82c", size = 265681, upload-time = "2025-04-15T17:44:59.314Z" }, + { url = "https://files.pythonhosted.org/packages/2b/25/ac07d6ad12affa7d1ffed11b77417d0a6308170f44ff20fa1d5aa6333f03/contourpy-1.3.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:107ba8a6a7eec58bb475329e6d3b95deba9440667c4d62b9b6063942b61d7f16", size = 315101, upload-time = "2025-04-15T17:45:04.165Z" }, + { url = "https://files.pythonhosted.org/packages/8f/4d/5bb3192bbe9d3f27e3061a6a8e7733c9120e203cb8515767d30973f71030/contourpy-1.3.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ded1706ed0c1049224531b81128efbd5084598f18d8a2d9efae833edbd2b40ad", size = 220599, upload-time = "2025-04-15T17:45:08.456Z" }, + { url = "https://files.pythonhosted.org/packages/ff/c0/91f1215d0d9f9f343e4773ba6c9b89e8c0cc7a64a6263f21139da639d848/contourpy-1.3.2-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5f5964cdad279256c084b69c3f412b7801e15356b16efa9d78aa974041903da0", size = 266807, upload-time = "2025-04-15T17:45:15.535Z" }, + { url = "https://files.pythonhosted.org/packages/d4/79/6be7e90c955c0487e7712660d6cead01fa17bff98e0ea275737cc2bc8e71/contourpy-1.3.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49b65a95d642d4efa8f64ba12558fcb83407e58a2dfba9d796d77b63ccfcaff5", size = 318729, upload-time = "2025-04-15T17:45:20.166Z" }, + { url = "https://files.pythonhosted.org/packages/87/68/7f46fb537958e87427d98a4074bcde4b67a70b04900cfc5ce29bc2f556c1/contourpy-1.3.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:8c5acb8dddb0752bf252e01a3035b21443158910ac16a3b0d20e7fed7d534ce5", size = 221791, upload-time = "2025-04-15T17:45:24.794Z" }, +] + +[[package]] +name = "contourpy" +version = "1.3.3" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.14' and sys_platform == 'win32'", + "python_full_version >= '3.14' and sys_platform == 'emscripten'", + "python_full_version >= '3.14' and sys_platform != 'emscripten' and sys_platform != 'win32'", + "python_full_version >= '3.11' and python_full_version < '3.14' and sys_platform == 'win32'", + "python_full_version >= '3.11' and python_full_version < '3.14' and sys_platform == 'emscripten'", + "python_full_version >= '3.11' and python_full_version < '3.14' and sys_platform != 'emscripten' and sys_platform != 'win32'", +] +dependencies = [ + { name = "numpy", version = "2.4.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/58/01/1253e6698a07380cd31a736d248a3f2a50a7c88779a1813da27503cadc2a/contourpy-1.3.3.tar.gz", hash = "sha256:083e12155b210502d0bca491432bb04d56dc3432f95a979b429f2848c3dbe880", size = 13466174, upload-time = "2025-07-26T12:03:12.549Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/91/2e/c4390a31919d8a78b90e8ecf87cd4b4c4f05a5b48d05ec17db8e5404c6f4/contourpy-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:709a48ef9a690e1343202916450bc48b9e51c049b089c7f79a267b46cffcdaa1", size = 288773, upload-time = "2025-07-26T12:01:02.277Z" }, + { url = "https://files.pythonhosted.org/packages/0d/44/c4b0b6095fef4dc9c420e041799591e3b63e9619e3044f7f4f6c21c0ab24/contourpy-1.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:23416f38bfd74d5d28ab8429cc4d63fa67d5068bd711a85edb1c3fb0c3e2f381", size = 270149, upload-time = "2025-07-26T12:01:04.072Z" }, + { url = "https://files.pythonhosted.org/packages/30/2e/dd4ced42fefac8470661d7cb7e264808425e6c5d56d175291e93890cce09/contourpy-1.3.3-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:929ddf8c4c7f348e4c0a5a3a714b5c8542ffaa8c22954862a46ca1813b667ee7", size = 329222, upload-time = "2025-07-26T12:01:05.688Z" }, + { url = "https://files.pythonhosted.org/packages/f2/74/cc6ec2548e3d276c71389ea4802a774b7aa3558223b7bade3f25787fafc2/contourpy-1.3.3-cp311-cp311-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9e999574eddae35f1312c2b4b717b7885d4edd6cb46700e04f7f02db454e67c1", size = 377234, upload-time = "2025-07-26T12:01:07.054Z" }, + { url = "https://files.pythonhosted.org/packages/03/b3/64ef723029f917410f75c09da54254c5f9ea90ef89b143ccadb09df14c15/contourpy-1.3.3-cp311-cp311-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0bf67e0e3f482cb69779dd3061b534eb35ac9b17f163d851e2a547d56dba0a3a", size = 380555, upload-time = "2025-07-26T12:01:08.801Z" }, + { url = "https://files.pythonhosted.org/packages/5f/4b/6157f24ca425b89fe2eb7e7be642375711ab671135be21e6faa100f7448c/contourpy-1.3.3-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:51e79c1f7470158e838808d4a996fa9bac72c498e93d8ebe5119bc1e6becb0db", size = 355238, upload-time = "2025-07-26T12:01:10.319Z" }, + { url = "https://files.pythonhosted.org/packages/98/56/f914f0dd678480708a04cfd2206e7c382533249bc5001eb9f58aa693e200/contourpy-1.3.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:598c3aaece21c503615fd59c92a3598b428b2f01bfb4b8ca9c4edeecc2438620", size = 1326218, upload-time = "2025-07-26T12:01:12.659Z" }, + { url = "https://files.pythonhosted.org/packages/fb/d7/4a972334a0c971acd5172389671113ae82aa7527073980c38d5868ff1161/contourpy-1.3.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:322ab1c99b008dad206d406bb61d014cf0174df491ae9d9d0fac6a6fda4f977f", size = 1392867, upload-time = "2025-07-26T12:01:15.533Z" }, + { url = "https://files.pythonhosted.org/packages/75/3e/f2cc6cd56dc8cff46b1a56232eabc6feea52720083ea71ab15523daab796/contourpy-1.3.3-cp311-cp311-win32.whl", hash = "sha256:fd907ae12cd483cd83e414b12941c632a969171bf90fc937d0c9f268a31cafff", size = 183677, upload-time = "2025-07-26T12:01:17.088Z" }, + { url = "https://files.pythonhosted.org/packages/98/4b/9bd370b004b5c9d8045c6c33cf65bae018b27aca550a3f657cdc99acdbd8/contourpy-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:3519428f6be58431c56581f1694ba8e50626f2dd550af225f82fb5f5814d2a42", size = 225234, upload-time = "2025-07-26T12:01:18.256Z" }, + { url = "https://files.pythonhosted.org/packages/d9/b6/71771e02c2e004450c12b1120a5f488cad2e4d5b590b1af8bad060360fe4/contourpy-1.3.3-cp311-cp311-win_arm64.whl", hash = "sha256:15ff10bfada4bf92ec8b31c62bf7c1834c244019b4a33095a68000d7075df470", size = 193123, upload-time = "2025-07-26T12:01:19.848Z" }, + { url = "https://files.pythonhosted.org/packages/be/45/adfee365d9ea3d853550b2e735f9d66366701c65db7855cd07621732ccfc/contourpy-1.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b08a32ea2f8e42cf1d4be3169a98dd4be32bafe4f22b6c4cb4ba810fa9e5d2cb", size = 293419, upload-time = "2025-07-26T12:01:21.16Z" }, + { url = "https://files.pythonhosted.org/packages/53/3e/405b59cfa13021a56bba395a6b3aca8cec012b45bf177b0eaf7a202cde2c/contourpy-1.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:556dba8fb6f5d8742f2923fe9457dbdd51e1049c4a43fd3986a0b14a1d815fc6", size = 273979, upload-time = "2025-07-26T12:01:22.448Z" }, + { url = "https://files.pythonhosted.org/packages/d4/1c/a12359b9b2ca3a845e8f7f9ac08bdf776114eb931392fcad91743e2ea17b/contourpy-1.3.3-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92d9abc807cf7d0e047b95ca5d957cf4792fcd04e920ca70d48add15c1a90ea7", size = 332653, upload-time = "2025-07-26T12:01:24.155Z" }, + { url = "https://files.pythonhosted.org/packages/63/12/897aeebfb475b7748ea67b61e045accdfcf0d971f8a588b67108ed7f5512/contourpy-1.3.3-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b2e8faa0ed68cb29af51edd8e24798bb661eac3bd9f65420c1887b6ca89987c8", size = 379536, upload-time = "2025-07-26T12:01:25.91Z" }, + { url = "https://files.pythonhosted.org/packages/43/8a/a8c584b82deb248930ce069e71576fc09bd7174bbd35183b7943fb1064fd/contourpy-1.3.3-cp312-cp312-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:626d60935cf668e70a5ce6ff184fd713e9683fb458898e4249b63be9e28286ea", size = 384397, upload-time = "2025-07-26T12:01:27.152Z" }, + { url = "https://files.pythonhosted.org/packages/cc/8f/ec6289987824b29529d0dfda0d74a07cec60e54b9c92f3c9da4c0ac732de/contourpy-1.3.3-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4d00e655fcef08aba35ec9610536bfe90267d7ab5ba944f7032549c55a146da1", size = 362601, upload-time = "2025-07-26T12:01:28.808Z" }, + { url = "https://files.pythonhosted.org/packages/05/0a/a3fe3be3ee2dceb3e615ebb4df97ae6f3828aa915d3e10549ce016302bd1/contourpy-1.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:451e71b5a7d597379ef572de31eeb909a87246974d960049a9848c3bc6c41bf7", size = 1331288, upload-time = "2025-07-26T12:01:31.198Z" }, + { url = "https://files.pythonhosted.org/packages/33/1d/acad9bd4e97f13f3e2b18a3977fe1b4a37ecf3d38d815333980c6c72e963/contourpy-1.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:459c1f020cd59fcfe6650180678a9993932d80d44ccde1fa1868977438f0b411", size = 1403386, upload-time = "2025-07-26T12:01:33.947Z" }, + { url = "https://files.pythonhosted.org/packages/cf/8f/5847f44a7fddf859704217a99a23a4f6417b10e5ab1256a179264561540e/contourpy-1.3.3-cp312-cp312-win32.whl", hash = "sha256:023b44101dfe49d7d53932be418477dba359649246075c996866106da069af69", size = 185018, upload-time = "2025-07-26T12:01:35.64Z" }, + { url = "https://files.pythonhosted.org/packages/19/e8/6026ed58a64563186a9ee3f29f41261fd1828f527dd93d33b60feca63352/contourpy-1.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:8153b8bfc11e1e4d75bcb0bff1db232f9e10b274e0929de9d608027e0d34ff8b", size = 226567, upload-time = "2025-07-26T12:01:36.804Z" }, + { url = "https://files.pythonhosted.org/packages/d1/e2/f05240d2c39a1ed228d8328a78b6f44cd695f7ef47beb3e684cf93604f86/contourpy-1.3.3-cp312-cp312-win_arm64.whl", hash = "sha256:07ce5ed73ecdc4a03ffe3e1b3e3c1166db35ae7584be76f65dbbe28a7791b0cc", size = 193655, upload-time = "2025-07-26T12:01:37.999Z" }, + { url = "https://files.pythonhosted.org/packages/68/35/0167aad910bbdb9599272bd96d01a9ec6852f36b9455cf2ca67bd4cc2d23/contourpy-1.3.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:177fb367556747a686509d6fef71d221a4b198a3905fe824430e5ea0fda54eb5", size = 293257, upload-time = "2025-07-26T12:01:39.367Z" }, + { url = "https://files.pythonhosted.org/packages/96/e4/7adcd9c8362745b2210728f209bfbcf7d91ba868a2c5f40d8b58f54c509b/contourpy-1.3.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d002b6f00d73d69333dac9d0b8d5e84d9724ff9ef044fd63c5986e62b7c9e1b1", size = 274034, upload-time = "2025-07-26T12:01:40.645Z" }, + { url = "https://files.pythonhosted.org/packages/73/23/90e31ceeed1de63058a02cb04b12f2de4b40e3bef5e082a7c18d9c8ae281/contourpy-1.3.3-cp313-cp313-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:348ac1f5d4f1d66d3322420f01d42e43122f43616e0f194fc1c9f5d830c5b286", size = 334672, upload-time = "2025-07-26T12:01:41.942Z" }, + { url = "https://files.pythonhosted.org/packages/ed/93/b43d8acbe67392e659e1d984700e79eb67e2acb2bd7f62012b583a7f1b55/contourpy-1.3.3-cp313-cp313-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:655456777ff65c2c548b7c454af9c6f33f16c8884f11083244b5819cc214f1b5", size = 381234, upload-time = "2025-07-26T12:01:43.499Z" }, + { url = "https://files.pythonhosted.org/packages/46/3b/bec82a3ea06f66711520f75a40c8fc0b113b2a75edb36aa633eb11c4f50f/contourpy-1.3.3-cp313-cp313-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:644a6853d15b2512d67881586bd03f462c7ab755db95f16f14d7e238f2852c67", size = 385169, upload-time = "2025-07-26T12:01:45.219Z" }, + { url = "https://files.pythonhosted.org/packages/4b/32/e0f13a1c5b0f8572d0ec6ae2f6c677b7991fafd95da523159c19eff0696a/contourpy-1.3.3-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4debd64f124ca62069f313a9cb86656ff087786016d76927ae2cf37846b006c9", size = 362859, upload-time = "2025-07-26T12:01:46.519Z" }, + { url = "https://files.pythonhosted.org/packages/33/71/e2a7945b7de4e58af42d708a219f3b2f4cff7386e6b6ab0a0fa0033c49a9/contourpy-1.3.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a15459b0f4615b00bbd1e91f1b9e19b7e63aea7483d03d804186f278c0af2659", size = 1332062, upload-time = "2025-07-26T12:01:48.964Z" }, + { url = "https://files.pythonhosted.org/packages/12/fc/4e87ac754220ccc0e807284f88e943d6d43b43843614f0a8afa469801db0/contourpy-1.3.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ca0fdcd73925568ca027e0b17ab07aad764be4706d0a925b89227e447d9737b7", size = 1403932, upload-time = "2025-07-26T12:01:51.979Z" }, + { url = "https://files.pythonhosted.org/packages/a6/2e/adc197a37443f934594112222ac1aa7dc9a98faf9c3842884df9a9d8751d/contourpy-1.3.3-cp313-cp313-win32.whl", hash = "sha256:b20c7c9a3bf701366556e1b1984ed2d0cedf999903c51311417cf5f591d8c78d", size = 185024, upload-time = "2025-07-26T12:01:53.245Z" }, + { url = "https://files.pythonhosted.org/packages/18/0b/0098c214843213759692cc638fce7de5c289200a830e5035d1791d7a2338/contourpy-1.3.3-cp313-cp313-win_amd64.whl", hash = "sha256:1cadd8b8969f060ba45ed7c1b714fe69185812ab43bd6b86a9123fe8f99c3263", size = 226578, upload-time = "2025-07-26T12:01:54.422Z" }, + { url = "https://files.pythonhosted.org/packages/8a/9a/2f6024a0c5995243cd63afdeb3651c984f0d2bc727fd98066d40e141ad73/contourpy-1.3.3-cp313-cp313-win_arm64.whl", hash = "sha256:fd914713266421b7536de2bfa8181aa8c699432b6763a0ea64195ebe28bff6a9", size = 193524, upload-time = "2025-07-26T12:01:55.73Z" }, + { url = "https://files.pythonhosted.org/packages/c0/b3/f8a1a86bd3298513f500e5b1f5fd92b69896449f6cab6a146a5d52715479/contourpy-1.3.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:88df9880d507169449d434c293467418b9f6cbe82edd19284aa0409e7fdb933d", size = 306730, upload-time = "2025-07-26T12:01:57.051Z" }, + { url = "https://files.pythonhosted.org/packages/3f/11/4780db94ae62fc0c2053909b65dc3246bd7cecfc4f8a20d957ad43aa4ad8/contourpy-1.3.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d06bb1f751ba5d417047db62bca3c8fde202b8c11fb50742ab3ab962c81e8216", size = 287897, upload-time = "2025-07-26T12:01:58.663Z" }, + { url = "https://files.pythonhosted.org/packages/ae/15/e59f5f3ffdd6f3d4daa3e47114c53daabcb18574a26c21f03dc9e4e42ff0/contourpy-1.3.3-cp313-cp313t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e4e6b05a45525357e382909a4c1600444e2a45b4795163d3b22669285591c1ae", size = 326751, upload-time = "2025-07-26T12:02:00.343Z" }, + { url = "https://files.pythonhosted.org/packages/0f/81/03b45cfad088e4770b1dcf72ea78d3802d04200009fb364d18a493857210/contourpy-1.3.3-cp313-cp313t-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ab3074b48c4e2cf1a960e6bbeb7f04566bf36b1861d5c9d4d8ac04b82e38ba20", size = 375486, upload-time = "2025-07-26T12:02:02.128Z" }, + { url = "https://files.pythonhosted.org/packages/0c/ba/49923366492ffbdd4486e970d421b289a670ae8cf539c1ea9a09822b371a/contourpy-1.3.3-cp313-cp313t-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6c3d53c796f8647d6deb1abe867daeb66dcc8a97e8455efa729516b997b8ed99", size = 388106, upload-time = "2025-07-26T12:02:03.615Z" }, + { url = "https://files.pythonhosted.org/packages/9f/52/5b00ea89525f8f143651f9f03a0df371d3cbd2fccd21ca9b768c7a6500c2/contourpy-1.3.3-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50ed930df7289ff2a8d7afeb9603f8289e5704755c7e5c3bbd929c90c817164b", size = 352548, upload-time = "2025-07-26T12:02:05.165Z" }, + { url = "https://files.pythonhosted.org/packages/32/1d/a209ec1a3a3452d490f6b14dd92e72280c99ae3d1e73da74f8277d4ee08f/contourpy-1.3.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4feffb6537d64b84877da813a5c30f1422ea5739566abf0bd18065ac040e120a", size = 1322297, upload-time = "2025-07-26T12:02:07.379Z" }, + { url = "https://files.pythonhosted.org/packages/bc/9e/46f0e8ebdd884ca0e8877e46a3f4e633f6c9c8c4f3f6e72be3fe075994aa/contourpy-1.3.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2b7e9480ffe2b0cd2e787e4df64270e3a0440d9db8dc823312e2c940c167df7e", size = 1391023, upload-time = "2025-07-26T12:02:10.171Z" }, + { url = "https://files.pythonhosted.org/packages/b9/70/f308384a3ae9cd2209e0849f33c913f658d3326900d0ff5d378d6a1422d2/contourpy-1.3.3-cp313-cp313t-win32.whl", hash = "sha256:283edd842a01e3dcd435b1c5116798d661378d83d36d337b8dde1d16a5fc9ba3", size = 196157, upload-time = "2025-07-26T12:02:11.488Z" }, + { url = "https://files.pythonhosted.org/packages/b2/dd/880f890a6663b84d9e34a6f88cded89d78f0091e0045a284427cb6b18521/contourpy-1.3.3-cp313-cp313t-win_amd64.whl", hash = "sha256:87acf5963fc2b34825e5b6b048f40e3635dd547f590b04d2ab317c2619ef7ae8", size = 240570, upload-time = "2025-07-26T12:02:12.754Z" }, + { url = "https://files.pythonhosted.org/packages/80/99/2adc7d8ffead633234817ef8e9a87115c8a11927a94478f6bb3d3f4d4f7d/contourpy-1.3.3-cp313-cp313t-win_arm64.whl", hash = "sha256:3c30273eb2a55024ff31ba7d052dde990d7d8e5450f4bbb6e913558b3d6c2301", size = 199713, upload-time = "2025-07-26T12:02:14.4Z" }, + { url = "https://files.pythonhosted.org/packages/72/8b/4546f3ab60f78c514ffb7d01a0bd743f90de36f0019d1be84d0a708a580a/contourpy-1.3.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fde6c716d51c04b1c25d0b90364d0be954624a0ee9d60e23e850e8d48353d07a", size = 292189, upload-time = "2025-07-26T12:02:16.095Z" }, + { url = "https://files.pythonhosted.org/packages/fd/e1/3542a9cb596cadd76fcef413f19c79216e002623158befe6daa03dbfa88c/contourpy-1.3.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:cbedb772ed74ff5be440fa8eee9bd49f64f6e3fc09436d9c7d8f1c287b121d77", size = 273251, upload-time = "2025-07-26T12:02:17.524Z" }, + { url = "https://files.pythonhosted.org/packages/b1/71/f93e1e9471d189f79d0ce2497007731c1e6bf9ef6d1d61b911430c3db4e5/contourpy-1.3.3-cp314-cp314-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:22e9b1bd7a9b1d652cd77388465dc358dafcd2e217d35552424aa4f996f524f5", size = 335810, upload-time = "2025-07-26T12:02:18.9Z" }, + { url = "https://files.pythonhosted.org/packages/91/f9/e35f4c1c93f9275d4e38681a80506b5510e9327350c51f8d4a5a724d178c/contourpy-1.3.3-cp314-cp314-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a22738912262aa3e254e4f3cb079a95a67132fc5a063890e224393596902f5a4", size = 382871, upload-time = "2025-07-26T12:02:20.418Z" }, + { url = "https://files.pythonhosted.org/packages/b5/71/47b512f936f66a0a900d81c396a7e60d73419868fba959c61efed7a8ab46/contourpy-1.3.3-cp314-cp314-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:afe5a512f31ee6bd7d0dda52ec9864c984ca3d66664444f2d72e0dc4eb832e36", size = 386264, upload-time = "2025-07-26T12:02:21.916Z" }, + { url = "https://files.pythonhosted.org/packages/04/5f/9ff93450ba96b09c7c2b3f81c94de31c89f92292f1380261bd7195bea4ea/contourpy-1.3.3-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f64836de09927cba6f79dcd00fdd7d5329f3fccc633468507079c829ca4db4e3", size = 363819, upload-time = "2025-07-26T12:02:23.759Z" }, + { url = "https://files.pythonhosted.org/packages/3e/a6/0b185d4cc480ee494945cde102cb0149ae830b5fa17bf855b95f2e70ad13/contourpy-1.3.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:1fd43c3be4c8e5fd6e4f2baeae35ae18176cf2e5cced681cca908addf1cdd53b", size = 1333650, upload-time = "2025-07-26T12:02:26.181Z" }, + { url = "https://files.pythonhosted.org/packages/43/d7/afdc95580ca56f30fbcd3060250f66cedbde69b4547028863abd8aa3b47e/contourpy-1.3.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:6afc576f7b33cf00996e5c1102dc2a8f7cc89e39c0b55df93a0b78c1bd992b36", size = 1404833, upload-time = "2025-07-26T12:02:28.782Z" }, + { url = "https://files.pythonhosted.org/packages/e2/e2/366af18a6d386f41132a48f033cbd2102e9b0cf6345d35ff0826cd984566/contourpy-1.3.3-cp314-cp314-win32.whl", hash = "sha256:66c8a43a4f7b8df8b71ee1840e4211a3c8d93b214b213f590e18a1beca458f7d", size = 189692, upload-time = "2025-07-26T12:02:30.128Z" }, + { url = "https://files.pythonhosted.org/packages/7d/c2/57f54b03d0f22d4044b8afb9ca0e184f8b1afd57b4f735c2fa70883dc601/contourpy-1.3.3-cp314-cp314-win_amd64.whl", hash = "sha256:cf9022ef053f2694e31d630feaacb21ea24224be1c3ad0520b13d844274614fd", size = 232424, upload-time = "2025-07-26T12:02:31.395Z" }, + { url = "https://files.pythonhosted.org/packages/18/79/a9416650df9b525737ab521aa181ccc42d56016d2123ddcb7b58e926a42c/contourpy-1.3.3-cp314-cp314-win_arm64.whl", hash = "sha256:95b181891b4c71de4bb404c6621e7e2390745f887f2a026b2d99e92c17892339", size = 198300, upload-time = "2025-07-26T12:02:32.956Z" }, + { url = "https://files.pythonhosted.org/packages/1f/42/38c159a7d0f2b7b9c04c64ab317042bb6952b713ba875c1681529a2932fe/contourpy-1.3.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:33c82d0138c0a062380332c861387650c82e4cf1747aaa6938b9b6516762e772", size = 306769, upload-time = "2025-07-26T12:02:34.2Z" }, + { url = "https://files.pythonhosted.org/packages/c3/6c/26a8205f24bca10974e77460de68d3d7c63e282e23782f1239f226fcae6f/contourpy-1.3.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:ea37e7b45949df430fe649e5de8351c423430046a2af20b1c1961cae3afcda77", size = 287892, upload-time = "2025-07-26T12:02:35.807Z" }, + { url = "https://files.pythonhosted.org/packages/66/06/8a475c8ab718ebfd7925661747dbb3c3ee9c82ac834ccb3570be49d129f4/contourpy-1.3.3-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d304906ecc71672e9c89e87c4675dc5c2645e1f4269a5063b99b0bb29f232d13", size = 326748, upload-time = "2025-07-26T12:02:37.193Z" }, + { url = "https://files.pythonhosted.org/packages/b4/a3/c5ca9f010a44c223f098fccd8b158bb1cb287378a31ac141f04730dc49be/contourpy-1.3.3-cp314-cp314t-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ca658cd1a680a5c9ea96dc61cdbae1e85c8f25849843aa799dfd3cb370ad4fbe", size = 375554, upload-time = "2025-07-26T12:02:38.894Z" }, + { url = "https://files.pythonhosted.org/packages/80/5b/68bd33ae63fac658a4145088c1e894405e07584a316738710b636c6d0333/contourpy-1.3.3-cp314-cp314t-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ab2fd90904c503739a75b7c8c5c01160130ba67944a7b77bbf36ef8054576e7f", size = 388118, upload-time = "2025-07-26T12:02:40.642Z" }, + { url = "https://files.pythonhosted.org/packages/40/52/4c285a6435940ae25d7410a6c36bda5145839bc3f0beb20c707cda18b9d2/contourpy-1.3.3-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7301b89040075c30e5768810bc96a8e8d78085b47d8be6e4c3f5a0b4ed478a0", size = 352555, upload-time = "2025-07-26T12:02:42.25Z" }, + { url = "https://files.pythonhosted.org/packages/24/ee/3e81e1dd174f5c7fefe50e85d0892de05ca4e26ef1c9a59c2a57e43b865a/contourpy-1.3.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:2a2a8b627d5cc6b7c41a4beff6c5ad5eb848c88255fda4a8745f7e901b32d8e4", size = 1322295, upload-time = "2025-07-26T12:02:44.668Z" }, + { url = "https://files.pythonhosted.org/packages/3c/b2/6d913d4d04e14379de429057cd169e5e00f6c2af3bb13e1710bcbdb5da12/contourpy-1.3.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:fd6ec6be509c787f1caf6b247f0b1ca598bef13f4ddeaa126b7658215529ba0f", size = 1391027, upload-time = "2025-07-26T12:02:47.09Z" }, + { url = "https://files.pythonhosted.org/packages/93/8a/68a4ec5c55a2971213d29a9374913f7e9f18581945a7a31d1a39b5d2dfe5/contourpy-1.3.3-cp314-cp314t-win32.whl", hash = "sha256:e74a9a0f5e3fff48fb5a7f2fd2b9b70a3fe014a67522f79b7cca4c0c7e43c9ae", size = 202428, upload-time = "2025-07-26T12:02:48.691Z" }, + { url = "https://files.pythonhosted.org/packages/fa/96/fd9f641ffedc4fa3ace923af73b9d07e869496c9cc7a459103e6e978992f/contourpy-1.3.3-cp314-cp314t-win_amd64.whl", hash = "sha256:13b68d6a62db8eafaebb8039218921399baf6e47bf85006fd8529f2a08ef33fc", size = 250331, upload-time = "2025-07-26T12:02:50.137Z" }, + { url = "https://files.pythonhosted.org/packages/ae/8c/469afb6465b853afff216f9528ffda78a915ff880ed58813ba4faf4ba0b6/contourpy-1.3.3-cp314-cp314t-win_arm64.whl", hash = "sha256:b7448cb5a725bb1e35ce88771b86fba35ef418952474492cf7c764059933ff8b", size = 203831, upload-time = "2025-07-26T12:02:51.449Z" }, + { url = "https://files.pythonhosted.org/packages/a5/29/8dcfe16f0107943fa92388c23f6e05cff0ba58058c4c95b00280d4c75a14/contourpy-1.3.3-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:cd5dfcaeb10f7b7f9dc8941717c6c2ade08f587be2226222c12b25f0483ed497", size = 278809, upload-time = "2025-07-26T12:02:52.74Z" }, + { url = "https://files.pythonhosted.org/packages/85/a9/8b37ef4f7dafeb335daee3c8254645ef5725be4d9c6aa70b50ec46ef2f7e/contourpy-1.3.3-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:0c1fc238306b35f246d61a1d416a627348b5cf0648648a031e14bb8705fcdfe8", size = 261593, upload-time = "2025-07-26T12:02:54.037Z" }, + { url = "https://files.pythonhosted.org/packages/0a/59/ebfb8c677c75605cc27f7122c90313fd2f375ff3c8d19a1694bda74aaa63/contourpy-1.3.3-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:70f9aad7de812d6541d29d2bbf8feb22ff7e1c299523db288004e3157ff4674e", size = 302202, upload-time = "2025-07-26T12:02:55.947Z" }, + { url = "https://files.pythonhosted.org/packages/3c/37/21972a15834d90bfbfb009b9d004779bd5a07a0ec0234e5ba8f64d5736f4/contourpy-1.3.3-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5ed3657edf08512fc3fe81b510e35c2012fbd3081d2e26160f27ca28affec989", size = 329207, upload-time = "2025-07-26T12:02:57.468Z" }, + { url = "https://files.pythonhosted.org/packages/0c/58/bd257695f39d05594ca4ad60df5bcb7e32247f9951fd09a9b8edb82d1daa/contourpy-1.3.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:3d1a3799d62d45c18bafd41c5fa05120b96a28079f2393af559b843d1a966a77", size = 225315, upload-time = "2025-07-26T12:02:58.801Z" }, +] + +[[package]] +name = "coverage" +version = "7.13.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/24/56/95b7e30fa389756cb56630faa728da46a27b8c6eb46f9d557c68fff12b65/coverage-7.13.4.tar.gz", hash = "sha256:e5c8f6ed1e61a8b2dcdf31eb0b9bbf0130750ca79c1c49eb898e2ad86f5ccc91", size = 827239, upload-time = "2026-02-09T12:59:03.86Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/44/d4/7827d9ffa34d5d4d752eec907022aa417120936282fc488306f5da08c292/coverage-7.13.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0fc31c787a84f8cd6027eba44010517020e0d18487064cd3d8968941856d1415", size = 219152, upload-time = "2026-02-09T12:56:11.974Z" }, + { url = "https://files.pythonhosted.org/packages/35/b0/d69df26607c64043292644dbb9dc54b0856fabaa2cbb1eeee3331cc9e280/coverage-7.13.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a32ebc02a1805adf637fc8dec324b5cdacd2e493515424f70ee33799573d661b", size = 219667, upload-time = "2026-02-09T12:56:13.33Z" }, + { url = "https://files.pythonhosted.org/packages/82/a4/c1523f7c9e47b2271dbf8c2a097e7a1f89ef0d66f5840bb59b7e8814157b/coverage-7.13.4-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:e24f9156097ff9dc286f2f913df3a7f63c0e333dcafa3c196f2c18b4175ca09a", size = 246425, upload-time = "2026-02-09T12:56:14.552Z" }, + { url = "https://files.pythonhosted.org/packages/f8/02/aa7ec01d1a5023c4b680ab7257f9bfde9defe8fdddfe40be096ac19e8177/coverage-7.13.4-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8041b6c5bfdc03257666e9881d33b1abc88daccaf73f7b6340fb7946655cd10f", size = 248229, upload-time = "2026-02-09T12:56:16.31Z" }, + { url = "https://files.pythonhosted.org/packages/35/98/85aba0aed5126d896162087ef3f0e789a225697245256fc6181b95f47207/coverage-7.13.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2a09cfa6a5862bc2fc6ca7c3def5b2926194a56b8ab78ffcf617d28911123012", size = 250106, upload-time = "2026-02-09T12:56:18.024Z" }, + { url = "https://files.pythonhosted.org/packages/96/72/1db59bd67494bc162e3e4cd5fbc7edba2c7026b22f7c8ef1496d58c2b94c/coverage-7.13.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:296f8b0af861d3970c2a4d8c91d48eb4dd4771bcef9baedec6a9b515d7de3def", size = 252021, upload-time = "2026-02-09T12:56:19.272Z" }, + { url = "https://files.pythonhosted.org/packages/9d/97/72899c59c7066961de6e3daa142d459d47d104956db43e057e034f015c8a/coverage-7.13.4-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e101609bcbbfb04605ea1027b10dc3735c094d12d40826a60f897b98b1c30256", size = 247114, upload-time = "2026-02-09T12:56:21.051Z" }, + { url = "https://files.pythonhosted.org/packages/39/1f/f1885573b5970235e908da4389176936c8933e86cb316b9620aab1585fa2/coverage-7.13.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:aa3feb8db2e87ff5e6d00d7e1480ae241876286691265657b500886c98f38bda", size = 248143, upload-time = "2026-02-09T12:56:22.585Z" }, + { url = "https://files.pythonhosted.org/packages/a8/cf/e80390c5b7480b722fa3e994f8202807799b85bc562aa4f1dde209fbb7be/coverage-7.13.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:4fc7fa81bbaf5a02801b65346c8b3e657f1d93763e58c0abdf7c992addd81a92", size = 246152, upload-time = "2026-02-09T12:56:23.748Z" }, + { url = "https://files.pythonhosted.org/packages/44/bf/f89a8350d85572f95412debb0fb9bb4795b1d5b5232bd652923c759e787b/coverage-7.13.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:33901f604424145c6e9c2398684b92e176c0b12df77d52db81c20abd48c3794c", size = 249959, upload-time = "2026-02-09T12:56:25.209Z" }, + { url = "https://files.pythonhosted.org/packages/f7/6e/612a02aece8178c818df273e8d1642190c4875402ca2ba74514394b27aba/coverage-7.13.4-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:bb28c0f2cf2782508a40cec377935829d5fcc3ad9a3681375af4e84eb34b6b58", size = 246416, upload-time = "2026-02-09T12:56:26.475Z" }, + { url = "https://files.pythonhosted.org/packages/cb/98/b5afc39af67c2fa6786b03c3a7091fc300947387ce8914b096db8a73d67a/coverage-7.13.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9d107aff57a83222ddbd8d9ee705ede2af2cc926608b57abed8ef96b50b7e8f9", size = 247025, upload-time = "2026-02-09T12:56:27.727Z" }, + { url = "https://files.pythonhosted.org/packages/51/30/2bba8ef0682d5bd210c38fe497e12a06c9f8d663f7025e9f5c2c31ce847d/coverage-7.13.4-cp310-cp310-win32.whl", hash = "sha256:a6f94a7d00eb18f1b6d403c91a88fd58cfc92d4b16080dfdb774afc8294469bf", size = 221758, upload-time = "2026-02-09T12:56:29.051Z" }, + { url = "https://files.pythonhosted.org/packages/78/13/331f94934cf6c092b8ea59ff868eb587bc8fe0893f02c55bc6c0183a192e/coverage-7.13.4-cp310-cp310-win_amd64.whl", hash = "sha256:2cb0f1e000ebc419632bbe04366a8990b6e32c4e0b51543a6484ffe15eaeda95", size = 222693, upload-time = "2026-02-09T12:56:30.366Z" }, + { url = "https://files.pythonhosted.org/packages/b4/ad/b59e5b451cf7172b8d1043dc0fa718f23aab379bc1521ee13d4bd9bfa960/coverage-7.13.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d490ba50c3f35dd7c17953c68f3270e7ccd1c6642e2d2afe2d8e720b98f5a053", size = 219278, upload-time = "2026-02-09T12:56:31.673Z" }, + { url = "https://files.pythonhosted.org/packages/f1/17/0cb7ca3de72e5f4ef2ec2fa0089beafbcaaaead1844e8b8a63d35173d77d/coverage-7.13.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:19bc3c88078789f8ef36acb014d7241961dbf883fd2533d18cb1e7a5b4e28b11", size = 219783, upload-time = "2026-02-09T12:56:33.104Z" }, + { url = "https://files.pythonhosted.org/packages/ab/63/325d8e5b11e0eaf6d0f6a44fad444ae58820929a9b0de943fa377fe73e85/coverage-7.13.4-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3998e5a32e62fdf410c0dbd3115df86297995d6e3429af80b8798aad894ca7aa", size = 250200, upload-time = "2026-02-09T12:56:34.474Z" }, + { url = "https://files.pythonhosted.org/packages/76/53/c16972708cbb79f2942922571a687c52bd109a7bd51175aeb7558dff2236/coverage-7.13.4-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8e264226ec98e01a8e1054314af91ee6cde0eacac4f465cc93b03dbe0bce2fd7", size = 252114, upload-time = "2026-02-09T12:56:35.749Z" }, + { url = "https://files.pythonhosted.org/packages/eb/c2/7ab36d8b8cc412bec9ea2d07c83c48930eb4ba649634ba00cb7e4e0f9017/coverage-7.13.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a3aa4e7b9e416774b21797365b358a6e827ffadaaca81b69ee02946852449f00", size = 254220, upload-time = "2026-02-09T12:56:37.796Z" }, + { url = "https://files.pythonhosted.org/packages/d6/4d/cf52c9a3322c89a0e6febdfbc83bb45c0ed3c64ad14081b9503adee702e7/coverage-7.13.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:71ca20079dd8f27fcf808817e281e90220475cd75115162218d0e27549f95fef", size = 256164, upload-time = "2026-02-09T12:56:39.016Z" }, + { url = "https://files.pythonhosted.org/packages/78/e9/eb1dd17bd6de8289df3580e967e78294f352a5df8a57ff4671ee5fc3dcd0/coverage-7.13.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e2f25215f1a359ab17320b47bcdaca3e6e6356652e8256f2441e4ef972052903", size = 250325, upload-time = "2026-02-09T12:56:40.668Z" }, + { url = "https://files.pythonhosted.org/packages/71/07/8c1542aa873728f72267c07278c5cc0ec91356daf974df21335ccdb46368/coverage-7.13.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d65b2d373032411e86960604dc4edac91fdfb5dca539461cf2cbe78327d1e64f", size = 251913, upload-time = "2026-02-09T12:56:41.97Z" }, + { url = "https://files.pythonhosted.org/packages/74/d7/c62e2c5e4483a748e27868e4c32ad3daa9bdddbba58e1bc7a15e252baa74/coverage-7.13.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94eb63f9b363180aff17de3e7c8760c3ba94664ea2695c52f10111244d16a299", size = 249974, upload-time = "2026-02-09T12:56:43.323Z" }, + { url = "https://files.pythonhosted.org/packages/98/9f/4c5c015a6e98ced54efd0f5cf8d31b88e5504ecb6857585fc0161bb1e600/coverage-7.13.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e856bf6616714c3a9fbc270ab54103f4e685ba236fa98c054e8f87f266c93505", size = 253741, upload-time = "2026-02-09T12:56:45.155Z" }, + { url = "https://files.pythonhosted.org/packages/bd/59/0f4eef89b9f0fcd9633b5d350016f54126ab49426a70ff4c4e87446cabdc/coverage-7.13.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:65dfcbe305c3dfe658492df2d85259e0d79ead4177f9ae724b6fb245198f55d6", size = 249695, upload-time = "2026-02-09T12:56:46.636Z" }, + { url = "https://files.pythonhosted.org/packages/b5/2c/b7476f938deb07166f3eb281a385c262675d688ff4659ad56c6c6b8e2e70/coverage-7.13.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b507778ae8a4c915436ed5c2e05b4a6cecfa70f734e19c22a005152a11c7b6a9", size = 250599, upload-time = "2026-02-09T12:56:48.13Z" }, + { url = "https://files.pythonhosted.org/packages/b8/34/c3420709d9846ee3785b9f2831b4d94f276f38884032dca1457fa83f7476/coverage-7.13.4-cp311-cp311-win32.whl", hash = "sha256:784fc3cf8be001197b652d51d3fd259b1e2262888693a4636e18879f613a62a9", size = 221780, upload-time = "2026-02-09T12:56:50.479Z" }, + { url = "https://files.pythonhosted.org/packages/61/08/3d9c8613079d2b11c185b865de9a4c1a68850cfda2b357fae365cf609f29/coverage-7.13.4-cp311-cp311-win_amd64.whl", hash = "sha256:2421d591f8ca05b308cf0092807308b2facbefe54af7c02ac22548b88b95c98f", size = 222715, upload-time = "2026-02-09T12:56:51.815Z" }, + { url = "https://files.pythonhosted.org/packages/18/1a/54c3c80b2f056164cc0a6cdcb040733760c7c4be9d780fe655f356f433e4/coverage-7.13.4-cp311-cp311-win_arm64.whl", hash = "sha256:79e73a76b854d9c6088fe5d8b2ebe745f8681c55f7397c3c0a016192d681045f", size = 221385, upload-time = "2026-02-09T12:56:53.194Z" }, + { url = "https://files.pythonhosted.org/packages/d1/81/4ce2fdd909c5a0ed1f6dedb88aa57ab79b6d1fbd9b588c1ac7ef45659566/coverage-7.13.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:02231499b08dabbe2b96612993e5fc34217cdae907a51b906ac7fca8027a4459", size = 219449, upload-time = "2026-02-09T12:56:54.889Z" }, + { url = "https://files.pythonhosted.org/packages/5d/96/5238b1efc5922ddbdc9b0db9243152c09777804fb7c02ad1741eb18a11c0/coverage-7.13.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40aa8808140e55dc022b15d8aa7f651b6b3d68b365ea0398f1441e0b04d859c3", size = 219810, upload-time = "2026-02-09T12:56:56.33Z" }, + { url = "https://files.pythonhosted.org/packages/78/72/2f372b726d433c9c35e56377cf1d513b4c16fe51841060d826b95caacec1/coverage-7.13.4-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5b856a8ccf749480024ff3bd7310adaef57bf31fd17e1bfc404b7940b6986634", size = 251308, upload-time = "2026-02-09T12:56:57.858Z" }, + { url = "https://files.pythonhosted.org/packages/5d/a0/2ea570925524ef4e00bb6c82649f5682a77fac5ab910a65c9284de422600/coverage-7.13.4-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2c048ea43875fbf8b45d476ad79f179809c590ec7b79e2035c662e7afa3192e3", size = 254052, upload-time = "2026-02-09T12:56:59.754Z" }, + { url = "https://files.pythonhosted.org/packages/e8/ac/45dc2e19a1939098d783c846e130b8f862fbb50d09e0af663988f2f21973/coverage-7.13.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b7b38448866e83176e28086674fe7368ab8590e4610fb662b44e345b86d63ffa", size = 255165, upload-time = "2026-02-09T12:57:01.287Z" }, + { url = "https://files.pythonhosted.org/packages/2d/4d/26d236ff35abc3b5e63540d3386e4c3b192168c1d96da5cb2f43c640970f/coverage-7.13.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:de6defc1c9badbf8b9e67ae90fd00519186d6ab64e5cc5f3d21359c2a9b2c1d3", size = 257432, upload-time = "2026-02-09T12:57:02.637Z" }, + { url = "https://files.pythonhosted.org/packages/ec/55/14a966c757d1348b2e19caf699415a2a4c4f7feaa4bbc6326a51f5c7dd1b/coverage-7.13.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7eda778067ad7ffccd23ecffce537dface96212576a07924cbf0d8799d2ded5a", size = 251716, upload-time = "2026-02-09T12:57:04.056Z" }, + { url = "https://files.pythonhosted.org/packages/77/33/50116647905837c66d28b2af1321b845d5f5d19be9655cb84d4a0ea806b4/coverage-7.13.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e87f6c587c3f34356c3759f0420693e35e7eb0e2e41e4c011cb6ec6ecbbf1db7", size = 253089, upload-time = "2026-02-09T12:57:05.503Z" }, + { url = "https://files.pythonhosted.org/packages/c2/b4/8efb11a46e3665d92635a56e4f2d4529de6d33f2cb38afd47d779d15fc99/coverage-7.13.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8248977c2e33aecb2ced42fef99f2d319e9904a36e55a8a68b69207fb7e43edc", size = 251232, upload-time = "2026-02-09T12:57:06.879Z" }, + { url = "https://files.pythonhosted.org/packages/51/24/8cd73dd399b812cc76bb0ac260e671c4163093441847ffe058ac9fda1e32/coverage-7.13.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:25381386e80ae727608e662474db537d4df1ecd42379b5ba33c84633a2b36d47", size = 255299, upload-time = "2026-02-09T12:57:08.245Z" }, + { url = "https://files.pythonhosted.org/packages/03/94/0a4b12f1d0e029ce1ccc1c800944a9984cbe7d678e470bb6d3c6bc38a0da/coverage-7.13.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:ee756f00726693e5ba94d6df2bdfd64d4852d23b09bb0bc700e3b30e6f333985", size = 250796, upload-time = "2026-02-09T12:57:10.142Z" }, + { url = "https://files.pythonhosted.org/packages/73/44/6002fbf88f6698ca034360ce474c406be6d5a985b3fdb3401128031eef6b/coverage-7.13.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fdfc1e28e7c7cdce44985b3043bc13bbd9c747520f94a4d7164af8260b3d91f0", size = 252673, upload-time = "2026-02-09T12:57:12.197Z" }, + { url = "https://files.pythonhosted.org/packages/de/c6/a0279f7c00e786be75a749a5674e6fa267bcbd8209cd10c9a450c655dfa7/coverage-7.13.4-cp312-cp312-win32.whl", hash = "sha256:01d4cbc3c283a17fc1e42d614a119f7f438eabb593391283adca8dc86eff1246", size = 221990, upload-time = "2026-02-09T12:57:14.085Z" }, + { url = "https://files.pythonhosted.org/packages/77/4e/c0a25a425fcf5557d9abd18419c95b63922e897bc86c1f327f155ef234a9/coverage-7.13.4-cp312-cp312-win_amd64.whl", hash = "sha256:9401ebc7ef522f01d01d45532c68c5ac40fb27113019b6b7d8b208f6e9baa126", size = 222800, upload-time = "2026-02-09T12:57:15.944Z" }, + { url = "https://files.pythonhosted.org/packages/47/ac/92da44ad9a6f4e3a7debd178949d6f3769bedca33830ce9b1dcdab589a37/coverage-7.13.4-cp312-cp312-win_arm64.whl", hash = "sha256:b1ec7b6b6e93255f952e27ab58fbc68dcc468844b16ecbee881aeb29b6ab4d8d", size = 221415, upload-time = "2026-02-09T12:57:17.497Z" }, + { url = "https://files.pythonhosted.org/packages/db/23/aad45061a31677d68e47499197a131eea55da4875d16c1f42021ab963503/coverage-7.13.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b66a2da594b6068b48b2692f043f35d4d3693fb639d5ea8b39533c2ad9ac3ab9", size = 219474, upload-time = "2026-02-09T12:57:19.332Z" }, + { url = "https://files.pythonhosted.org/packages/a5/70/9b8b67a0945f3dfec1fd896c5cefb7c19d5a3a6d74630b99a895170999ae/coverage-7.13.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3599eb3992d814d23b35c536c28df1a882caa950f8f507cef23d1cbf334995ac", size = 219844, upload-time = "2026-02-09T12:57:20.66Z" }, + { url = "https://files.pythonhosted.org/packages/97/fd/7e859f8fab324cef6c4ad7cff156ca7c489fef9179d5749b0c8d321281c2/coverage-7.13.4-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:93550784d9281e374fb5a12bf1324cc8a963fd63b2d2f223503ef0fd4aa339ea", size = 250832, upload-time = "2026-02-09T12:57:22.007Z" }, + { url = "https://files.pythonhosted.org/packages/e4/dc/b2442d10020c2f52617828862d8b6ee337859cd8f3a1f13d607dddda9cf7/coverage-7.13.4-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b720ce6a88a2755f7c697c23268ddc47a571b88052e6b155224347389fdf6a3b", size = 253434, upload-time = "2026-02-09T12:57:23.339Z" }, + { url = "https://files.pythonhosted.org/packages/5a/88/6728a7ad17428b18d836540630487231f5470fb82454871149502f5e5aa2/coverage-7.13.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7b322db1284a2ed3aa28ffd8ebe3db91c929b7a333c0820abec3d838ef5b3525", size = 254676, upload-time = "2026-02-09T12:57:24.774Z" }, + { url = "https://files.pythonhosted.org/packages/7c/bc/21244b1b8cedf0dff0a2b53b208015fe798d5f2a8d5348dbfece04224fff/coverage-7.13.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f4594c67d8a7c89cf922d9df0438c7c7bb022ad506eddb0fdb2863359ff78242", size = 256807, upload-time = "2026-02-09T12:57:26.125Z" }, + { url = "https://files.pythonhosted.org/packages/97/a0/ddba7ed3251cff51006737a727d84e05b61517d1784a9988a846ba508877/coverage-7.13.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:53d133df809c743eb8bce33b24bcababb371f4441340578cd406e084d94a6148", size = 251058, upload-time = "2026-02-09T12:57:27.614Z" }, + { url = "https://files.pythonhosted.org/packages/9b/55/e289addf7ff54d3a540526f33751951bf0878f3809b47f6dfb3def69c6f7/coverage-7.13.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:76451d1978b95ba6507a039090ba076105c87cc76fc3efd5d35d72093964d49a", size = 252805, upload-time = "2026-02-09T12:57:29.066Z" }, + { url = "https://files.pythonhosted.org/packages/13/4e/cc276b1fa4a59be56d96f1dabddbdc30f4ba22e3b1cd42504c37b3313255/coverage-7.13.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7f57b33491e281e962021de110b451ab8a24182589be17e12a22c79047935e23", size = 250766, upload-time = "2026-02-09T12:57:30.522Z" }, + { url = "https://files.pythonhosted.org/packages/94/44/1093b8f93018f8b41a8cf29636c9292502f05e4a113d4d107d14a3acd044/coverage-7.13.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:1731dc33dc276dafc410a885cbf5992f1ff171393e48a21453b78727d090de80", size = 254923, upload-time = "2026-02-09T12:57:31.946Z" }, + { url = "https://files.pythonhosted.org/packages/8b/55/ea2796da2d42257f37dbea1aab239ba9263b31bd91d5527cdd6db5efe174/coverage-7.13.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:bd60d4fe2f6fa7dff9223ca1bbc9f05d2b6697bc5961072e5d3b952d46e1b1ea", size = 250591, upload-time = "2026-02-09T12:57:33.842Z" }, + { url = "https://files.pythonhosted.org/packages/d4/fa/7c4bb72aacf8af5020675aa633e59c1fbe296d22aed191b6a5b711eb2bc7/coverage-7.13.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9181a3ccead280b828fae232df12b16652702b49d41e99d657f46cc7b1f6ec7a", size = 252364, upload-time = "2026-02-09T12:57:35.743Z" }, + { url = "https://files.pythonhosted.org/packages/5c/38/a8d2ec0146479c20bbaa7181b5b455a0c41101eed57f10dd19a78ab44c80/coverage-7.13.4-cp313-cp313-win32.whl", hash = "sha256:f53d492307962561ac7de4cd1de3e363589b000ab69617c6156a16ba7237998d", size = 222010, upload-time = "2026-02-09T12:57:37.25Z" }, + { url = "https://files.pythonhosted.org/packages/e2/0c/dbfafbe90a185943dcfbc766fe0e1909f658811492d79b741523a414a6cc/coverage-7.13.4-cp313-cp313-win_amd64.whl", hash = "sha256:e6f70dec1cc557e52df5306d051ef56003f74d56e9c4dd7ddb07e07ef32a84dd", size = 222818, upload-time = "2026-02-09T12:57:38.734Z" }, + { url = "https://files.pythonhosted.org/packages/04/d1/934918a138c932c90d78301f45f677fb05c39a3112b96fd2c8e60503cdc7/coverage-7.13.4-cp313-cp313-win_arm64.whl", hash = "sha256:fb07dc5da7e849e2ad31a5d74e9bece81f30ecf5a42909d0a695f8bd1874d6af", size = 221438, upload-time = "2026-02-09T12:57:40.223Z" }, + { url = "https://files.pythonhosted.org/packages/52/57/ee93ced533bcb3e6df961c0c6e42da2fc6addae53fb95b94a89b1e33ebd7/coverage-7.13.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:40d74da8e6c4b9ac18b15331c4b5ebc35a17069410cad462ad4f40dcd2d50c0d", size = 220165, upload-time = "2026-02-09T12:57:41.639Z" }, + { url = "https://files.pythonhosted.org/packages/c5/e0/969fc285a6fbdda49d91af278488d904dcd7651b2693872f0ff94e40e84a/coverage-7.13.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4223b4230a376138939a9173f1bdd6521994f2aff8047fae100d6d94d50c5a12", size = 220516, upload-time = "2026-02-09T12:57:44.215Z" }, + { url = "https://files.pythonhosted.org/packages/b1/b8/9531944e16267e2735a30a9641ff49671f07e8138ecf1ca13db9fd2560c7/coverage-7.13.4-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1d4be36a5114c499f9f1f9195e95ebf979460dbe2d88e6816ea202010ba1c34b", size = 261804, upload-time = "2026-02-09T12:57:45.989Z" }, + { url = "https://files.pythonhosted.org/packages/8a/f3/e63df6d500314a2a60390d1989240d5f27318a7a68fa30ad3806e2a9323e/coverage-7.13.4-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:200dea7d1e8095cc6e98cdabe3fd1d21ab17d3cee6dab00cadbb2fe35d9c15b9", size = 263885, upload-time = "2026-02-09T12:57:47.42Z" }, + { url = "https://files.pythonhosted.org/packages/f3/67/7654810de580e14b37670b60a09c599fa348e48312db5b216d730857ffe6/coverage-7.13.4-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b8eb931ee8e6d8243e253e5ed7336deea6904369d2fd8ae6e43f68abbf167092", size = 266308, upload-time = "2026-02-09T12:57:49.345Z" }, + { url = "https://files.pythonhosted.org/packages/37/6f/39d41eca0eab3cc82115953ad41c4e77935286c930e8fad15eaed1389d83/coverage-7.13.4-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:75eab1ebe4f2f64d9509b984f9314d4aa788540368218b858dad56dc8f3e5eb9", size = 267452, upload-time = "2026-02-09T12:57:50.811Z" }, + { url = "https://files.pythonhosted.org/packages/50/6d/39c0fbb8fc5cd4d2090811e553c2108cf5112e882f82505ee7495349a6bf/coverage-7.13.4-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c35eb28c1d085eb7d8c9b3296567a1bebe03ce72962e932431b9a61f28facf26", size = 261057, upload-time = "2026-02-09T12:57:52.447Z" }, + { url = "https://files.pythonhosted.org/packages/a4/a2/60010c669df5fa603bb5a97fb75407e191a846510da70ac657eb696b7fce/coverage-7.13.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:eb88b316ec33760714a4720feb2816a3a59180fd58c1985012054fa7aebee4c2", size = 263875, upload-time = "2026-02-09T12:57:53.938Z" }, + { url = "https://files.pythonhosted.org/packages/3e/d9/63b22a6bdbd17f1f96e9ed58604c2a6b0e72a9133e37d663bef185877cf6/coverage-7.13.4-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7d41eead3cc673cbd38a4417deb7fd0b4ca26954ff7dc6078e33f6ff97bed940", size = 261500, upload-time = "2026-02-09T12:57:56.012Z" }, + { url = "https://files.pythonhosted.org/packages/70/bf/69f86ba1ad85bc3ad240e4c0e57a2e620fbc0e1645a47b5c62f0e941ad7f/coverage-7.13.4-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:fb26a934946a6afe0e326aebe0730cdff393a8bc0bbb65a2f41e30feddca399c", size = 265212, upload-time = "2026-02-09T12:57:57.5Z" }, + { url = "https://files.pythonhosted.org/packages/ae/f2/5f65a278a8c2148731831574c73e42f57204243d33bedaaf18fa79c5958f/coverage-7.13.4-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:dae88bc0fc77edaa65c14be099bd57ee140cf507e6bfdeea7938457ab387efb0", size = 260398, upload-time = "2026-02-09T12:57:59.027Z" }, + { url = "https://files.pythonhosted.org/packages/ef/80/6e8280a350ee9fea92f14b8357448a242dcaa243cb2c72ab0ca591f66c8c/coverage-7.13.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:845f352911777a8e722bfce168958214951e07e47e5d5d9744109fa5fe77f79b", size = 262584, upload-time = "2026-02-09T12:58:01.129Z" }, + { url = "https://files.pythonhosted.org/packages/22/63/01ff182fc95f260b539590fb12c11ad3e21332c15f9799cb5e2386f71d9f/coverage-7.13.4-cp313-cp313t-win32.whl", hash = "sha256:2fa8d5f8de70688a28240de9e139fa16b153cc3cbb01c5f16d88d6505ebdadf9", size = 222688, upload-time = "2026-02-09T12:58:02.736Z" }, + { url = "https://files.pythonhosted.org/packages/a9/43/89de4ef5d3cd53b886afa114065f7e9d3707bdb3e5efae13535b46ae483d/coverage-7.13.4-cp313-cp313t-win_amd64.whl", hash = "sha256:9351229c8c8407645840edcc277f4a2d44814d1bc34a2128c11c2a031d45a5dd", size = 223746, upload-time = "2026-02-09T12:58:05.362Z" }, + { url = "https://files.pythonhosted.org/packages/35/39/7cf0aa9a10d470a5309b38b289b9bb07ddeac5d61af9b664fe9775a4cb3e/coverage-7.13.4-cp313-cp313t-win_arm64.whl", hash = "sha256:30b8d0512f2dc8c8747557e8fb459d6176a2c9e5731e2b74d311c03b78451997", size = 222003, upload-time = "2026-02-09T12:58:06.952Z" }, + { url = "https://files.pythonhosted.org/packages/92/11/a9cf762bb83386467737d32187756a42094927150c3e107df4cb078e8590/coverage-7.13.4-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:300deaee342f90696ed186e3a00c71b5b3d27bffe9e827677954f4ee56969601", size = 219522, upload-time = "2026-02-09T12:58:08.623Z" }, + { url = "https://files.pythonhosted.org/packages/d3/28/56e6d892b7b052236d67c95f1936b6a7cf7c3e2634bf27610b8cbd7f9c60/coverage-7.13.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:29e3220258d682b6226a9b0925bc563ed9a1ebcff3cad30f043eceea7eaf2689", size = 219855, upload-time = "2026-02-09T12:58:10.176Z" }, + { url = "https://files.pythonhosted.org/packages/e5/69/233459ee9eb0c0d10fcc2fe425a029b3fa5ce0f040c966ebce851d030c70/coverage-7.13.4-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:391ee8f19bef69210978363ca930f7328081c6a0152f1166c91f0b5fdd2a773c", size = 250887, upload-time = "2026-02-09T12:58:12.503Z" }, + { url = "https://files.pythonhosted.org/packages/06/90/2cdab0974b9b5bbc1623f7876b73603aecac11b8d95b85b5b86b32de5eab/coverage-7.13.4-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:0dd7ab8278f0d58a0128ba2fca25824321f05d059c1441800e934ff2efa52129", size = 253396, upload-time = "2026-02-09T12:58:14.615Z" }, + { url = "https://files.pythonhosted.org/packages/ac/15/ea4da0f85bf7d7b27635039e649e99deb8173fe551096ea15017f7053537/coverage-7.13.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:78cdf0d578b15148b009ccf18c686aa4f719d887e76e6b40c38ffb61d264a552", size = 254745, upload-time = "2026-02-09T12:58:16.162Z" }, + { url = "https://files.pythonhosted.org/packages/99/11/bb356e86920c655ca4d61daee4e2bbc7258f0a37de0be32d233b561134ff/coverage-7.13.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:48685fee12c2eb3b27c62f2658e7ea21e9c3239cba5a8a242801a0a3f6a8c62a", size = 257055, upload-time = "2026-02-09T12:58:17.892Z" }, + { url = "https://files.pythonhosted.org/packages/c9/0f/9ae1f8cb17029e09da06ca4e28c9e1d5c1c0a511c7074592e37e0836c915/coverage-7.13.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:4e83efc079eb39480e6346a15a1bcb3e9b04759c5202d157e1dd4303cd619356", size = 250911, upload-time = "2026-02-09T12:58:19.495Z" }, + { url = "https://files.pythonhosted.org/packages/89/3a/adfb68558fa815cbc29747b553bc833d2150228f251b127f1ce97e48547c/coverage-7.13.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ecae9737b72408d6a950f7e525f30aca12d4bd8dd95e37342e5beb3a2a8c4f71", size = 252754, upload-time = "2026-02-09T12:58:21.064Z" }, + { url = "https://files.pythonhosted.org/packages/32/b1/540d0c27c4e748bd3cd0bd001076ee416eda993c2bae47a73b7cc9357931/coverage-7.13.4-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:ae4578f8528569d3cf303fef2ea569c7f4c4059a38c8667ccef15c6e1f118aa5", size = 250720, upload-time = "2026-02-09T12:58:22.622Z" }, + { url = "https://files.pythonhosted.org/packages/c7/95/383609462b3ffb1fe133014a7c84fc0dd01ed55ac6140fa1093b5af7ebb1/coverage-7.13.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:6fdef321fdfbb30a197efa02d48fcd9981f0d8ad2ae8903ac318adc653f5df98", size = 254994, upload-time = "2026-02-09T12:58:24.548Z" }, + { url = "https://files.pythonhosted.org/packages/f7/ba/1761138e86c81680bfc3c49579d66312865457f9fe405b033184e5793cb3/coverage-7.13.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b0f6ccf3dbe577170bebfce1318707d0e8c3650003cb4b3a9dd744575daa8b5", size = 250531, upload-time = "2026-02-09T12:58:26.271Z" }, + { url = "https://files.pythonhosted.org/packages/f8/8e/05900df797a9c11837ab59c4d6fe94094e029582aab75c3309a93e6fb4e3/coverage-7.13.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:75fcd519f2a5765db3f0e391eb3b7d150cce1a771bf4c9f861aeab86c767a3c0", size = 252189, upload-time = "2026-02-09T12:58:27.807Z" }, + { url = "https://files.pythonhosted.org/packages/00/bd/29c9f2db9ea4ed2738b8a9508c35626eb205d51af4ab7bf56a21a2e49926/coverage-7.13.4-cp314-cp314-win32.whl", hash = "sha256:8e798c266c378da2bd819b0677df41ab46d78065fb2a399558f3f6cae78b2fbb", size = 222258, upload-time = "2026-02-09T12:58:29.441Z" }, + { url = "https://files.pythonhosted.org/packages/a7/4d/1f8e723f6829977410efeb88f73673d794075091c8c7c18848d273dc9d73/coverage-7.13.4-cp314-cp314-win_amd64.whl", hash = "sha256:245e37f664d89861cf2329c9afa2c1fe9e6d4e1a09d872c947e70718aeeac505", size = 223073, upload-time = "2026-02-09T12:58:31.026Z" }, + { url = "https://files.pythonhosted.org/packages/51/5b/84100025be913b44e082ea32abcf1afbf4e872f5120b7a1cab1d331b1e13/coverage-7.13.4-cp314-cp314-win_arm64.whl", hash = "sha256:ad27098a189e5838900ce4c2a99f2fe42a0bf0c2093c17c69b45a71579e8d4a2", size = 221638, upload-time = "2026-02-09T12:58:32.599Z" }, + { url = "https://files.pythonhosted.org/packages/a7/e4/c884a405d6ead1370433dad1e3720216b4f9fd8ef5b64bfd984a2a60a11a/coverage-7.13.4-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:85480adfb35ffc32d40918aad81b89c69c9cc5661a9b8a81476d3e645321a056", size = 220246, upload-time = "2026-02-09T12:58:34.181Z" }, + { url = "https://files.pythonhosted.org/packages/81/5c/4d7ed8b23b233b0fffbc9dfec53c232be2e695468523242ea9fd30f97ad2/coverage-7.13.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:79be69cf7f3bf9b0deeeb062eab7ac7f36cd4cc4c4dd694bd28921ba4d8596cc", size = 220514, upload-time = "2026-02-09T12:58:35.704Z" }, + { url = "https://files.pythonhosted.org/packages/2f/6f/3284d4203fd2f28edd73034968398cd2d4cb04ab192abc8cff007ea35679/coverage-7.13.4-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:caa421e2684e382c5d8973ac55e4f36bed6821a9bad5c953494de960c74595c9", size = 261877, upload-time = "2026-02-09T12:58:37.864Z" }, + { url = "https://files.pythonhosted.org/packages/09/aa/b672a647bbe1556a85337dc95bfd40d146e9965ead9cc2fe81bde1e5cbce/coverage-7.13.4-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:14375934243ee05f56c45393fe2ce81fe5cc503c07cee2bdf1725fb8bef3ffaf", size = 264004, upload-time = "2026-02-09T12:58:39.492Z" }, + { url = "https://files.pythonhosted.org/packages/79/a1/aa384dbe9181f98bba87dd23dda436f0c6cf2e148aecbb4e50fc51c1a656/coverage-7.13.4-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:25a41c3104d08edb094d9db0d905ca54d0cd41c928bb6be3c4c799a54753af55", size = 266408, upload-time = "2026-02-09T12:58:41.852Z" }, + { url = "https://files.pythonhosted.org/packages/53/5e/5150bf17b4019bc600799f376bb9606941e55bd5a775dc1e096b6ffea952/coverage-7.13.4-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6f01afcff62bf9a08fb32b2c1d6e924236c0383c02c790732b6537269e466a72", size = 267544, upload-time = "2026-02-09T12:58:44.093Z" }, + { url = "https://files.pythonhosted.org/packages/e0/ed/f1de5c675987a4a7a672250d2c5c9d73d289dbf13410f00ed7181d8017dd/coverage-7.13.4-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:eb9078108fbf0bcdde37c3f4779303673c2fa1fe8f7956e68d447d0dd426d38a", size = 260980, upload-time = "2026-02-09T12:58:45.721Z" }, + { url = "https://files.pythonhosted.org/packages/b3/e3/fe758d01850aa172419a6743fe76ba8b92c29d181d4f676ffe2dae2ba631/coverage-7.13.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:0e086334e8537ddd17e5f16a344777c1ab8194986ec533711cbe6c41cde841b6", size = 263871, upload-time = "2026-02-09T12:58:47.334Z" }, + { url = "https://files.pythonhosted.org/packages/b6/76/b829869d464115e22499541def9796b25312b8cf235d3bb00b39f1675395/coverage-7.13.4-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:725d985c5ab621268b2edb8e50dfe57633dc69bda071abc470fed55a14935fd3", size = 261472, upload-time = "2026-02-09T12:58:48.995Z" }, + { url = "https://files.pythonhosted.org/packages/14/9e/caedb1679e73e2f6ad240173f55218488bfe043e38da577c4ec977489915/coverage-7.13.4-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:3c06f0f1337c667b971ca2f975523347e63ec5e500b9aa5882d91931cd3ef750", size = 265210, upload-time = "2026-02-09T12:58:51.178Z" }, + { url = "https://files.pythonhosted.org/packages/3a/10/0dd02cb009b16ede425b49ec344aba13a6ae1dc39600840ea6abcb085ac4/coverage-7.13.4-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:590c0ed4bf8e85f745e6b805b2e1c457b2e33d5255dd9729743165253bc9ad39", size = 260319, upload-time = "2026-02-09T12:58:53.081Z" }, + { url = "https://files.pythonhosted.org/packages/92/8e/234d2c927af27c6d7a5ffad5bd2cf31634c46a477b4c7adfbfa66baf7ebb/coverage-7.13.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:eb30bf180de3f632cd043322dad5751390e5385108b2807368997d1a92a509d0", size = 262638, upload-time = "2026-02-09T12:58:55.258Z" }, + { url = "https://files.pythonhosted.org/packages/2f/64/e5547c8ff6964e5965c35a480855911b61509cce544f4d442caa759a0702/coverage-7.13.4-cp314-cp314t-win32.whl", hash = "sha256:c4240e7eded42d131a2d2c4dec70374b781b043ddc79a9de4d55ca71f8e98aea", size = 223040, upload-time = "2026-02-09T12:58:56.936Z" }, + { url = "https://files.pythonhosted.org/packages/c7/96/38086d58a181aac86d503dfa9c47eb20715a79c3e3acbdf786e92e5c09a8/coverage-7.13.4-cp314-cp314t-win_amd64.whl", hash = "sha256:4c7d3cc01e7350f2f0f6f7036caaf5673fb56b6998889ccfe9e1c1fe75a9c932", size = 224148, upload-time = "2026-02-09T12:58:58.645Z" }, + { url = "https://files.pythonhosted.org/packages/ce/72/8d10abd3740a0beb98c305e0c3faf454366221c0f37a8bcf8f60020bb65a/coverage-7.13.4-cp314-cp314t-win_arm64.whl", hash = "sha256:23e3f687cf945070d1c90f85db66d11e3025665d8dafa831301a0e0038f3db9b", size = 222172, upload-time = "2026-02-09T12:59:00.396Z" }, + { url = "https://files.pythonhosted.org/packages/0d/4a/331fe2caf6799d591109bb9c08083080f6de90a823695d412a935622abb2/coverage-7.13.4-py3-none-any.whl", hash = "sha256:1af1641e57cf7ba1bd67d677c9abdbcd6cc2ab7da3bca7fa1e2b7e50e65f2ad0", size = 211242, upload-time = "2026-02-09T12:59:02.032Z" }, +] + +[package.optional-dependencies] +toml = [ + { name = "tomli", marker = "python_full_version <= '3.11'" }, +] + +[[package]] +name = "cycler" +version = "0.12.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a9/95/a3dbbb5028f35eafb79008e7522a75244477d2838f38cbb722248dabc2a8/cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c", size = 7615, upload-time = "2023-10-07T05:32:18.335Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e7/05/c19819d5e3d95294a6f5947fb9b9629efb316b96de511b418c53d245aae6/cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30", size = 8321, upload-time = "2023-10-07T05:32:16.783Z" }, +] + +[[package]] +name = "datasets" +version = "4.7.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "dill" }, + { name = "filelock" }, + { name = "fsspec", extra = ["http"] }, + { name = "httpx" }, + { name = "huggingface-hub" }, + { name = "multiprocess" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "numpy", version = "2.4.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "packaging" }, + { name = "pandas", version = "2.3.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "pandas", version = "3.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "pyarrow" }, + { name = "pyyaml" }, + { name = "requests" }, + { name = "tqdm" }, + { name = "xxhash" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1c/9c/ba18de0b70858533e422ed6cfe0e46789473cef7fc7fc3653e23fa494730/datasets-4.7.0.tar.gz", hash = "sha256:4984cdfc65d04464da7f95205a55cb50515fd94ae3176caacb50a1b7273792e2", size = 602008, upload-time = "2026-03-09T19:01:49.298Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/03/c6d9c3119cf712f638fe763e887ecaac6acbb62bf1e2acc3cbde0df340fd/datasets-4.7.0-py3-none-any.whl", hash = "sha256:d5fe3025ec6acc3b5649f10d5576dff5e054134927604e6913c1467a04adc3c2", size = 527530, upload-time = "2026-03-09T19:01:47.443Z" }, +] + +[[package]] +name = "defusedxml" +version = "0.7.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0f/d5/c66da9b79e5bdb124974bfe172b4daf3c984ebd9c2a06e2b8a4dc7331c72/defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69", size = 75520, upload-time = "2021-03-08T10:59:26.269Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/07/6c/aa3f2f849e01cb6a001cd8554a88d4c77c5c1a31c95bdf1cf9301e6d9ef4/defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61", size = 25604, upload-time = "2021-03-08T10:59:24.45Z" }, +] + +[[package]] +name = "dill" +version = "0.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/12/80/630b4b88364e9a8c8c5797f4602d0f76ef820909ee32f0bacb9f90654042/dill-0.4.0.tar.gz", hash = "sha256:0633f1d2df477324f53a895b02c901fb961bdbf65a17122586ea7019292cbcf0", size = 186976, upload-time = "2025-04-16T00:41:48.867Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/50/3d/9373ad9c56321fdab5b41197068e1d8c25883b3fea29dd361f9b55116869/dill-0.4.0-py3-none-any.whl", hash = "sha256:44f54bf6412c2c8464c14e8243eb163690a9800dbe2c367330883b19c7561049", size = 119668, upload-time = "2025-04-16T00:41:47.671Z" }, +] + +[[package]] +name = "duckdb" +version = "1.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/11/e05a7eb73a373d523e45d83c261025e02bc31ebf868e6282c30c4d02cc59/duckdb-1.5.0.tar.gz", hash = "sha256:f974b61b1c375888ee62bc3125c60ac11c4e45e4457dd1bb31a8f8d3cf277edd", size = 17981141, upload-time = "2026-03-09T12:50:26.372Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e0/5d/8fa129bbd604d0e91aa9a0a407e7d2acc559b6024c3f887868fd7a13871d/duckdb-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:47fbb1c053a627a91fa71ec883951561317f14a82df891c00dcace435e8fea78", size = 30012348, upload-time = "2026-03-09T12:48:39.133Z" }, + { url = "https://files.pythonhosted.org/packages/0c/31/db320641a262a897755e634d16838c98d5ca7dc91f4e096e104e244a3a01/duckdb-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2b546a30a6ac020165a86ab3abac553255a6e8244d5437d17859a6aa338611aa", size = 15940515, upload-time = "2026-03-09T12:48:41.905Z" }, + { url = "https://files.pythonhosted.org/packages/0b/45/5725684794fbabf54d8dbae5247685799a6bf8e1e930ebff3a76a726772c/duckdb-1.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:122396041c0acb78e66d7dc7d36c55f03f67fe6ad012155c132d82739722e381", size = 14193724, upload-time = "2026-03-09T12:48:44.105Z" }, + { url = "https://files.pythonhosted.org/packages/27/68/f110c66b43e27191d7e53d3587e118568b73d66f23cb9bd6c7e0a560fd6d/duckdb-1.5.0-cp310-cp310-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4a2cd73d50ea2c2bf618a4b7d22fe7c4115a1c9083d35654a0d5d421620ed999", size = 19218777, upload-time = "2026-03-09T12:48:46.399Z" }, + { url = "https://files.pythonhosted.org/packages/ec/9d/46affc9257377cbc865e494650312a7a08a56e85aa8d702eb297bec430b7/duckdb-1.5.0-cp310-cp310-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63a8ea3b060a881c90d1c1b9454abed3daf95b6160c39bbb9506fee3a9711730", size = 21311205, upload-time = "2026-03-09T12:48:48.895Z" }, + { url = "https://files.pythonhosted.org/packages/3b/34/dac03ab7340989cda258655387959c88342ea3b44949751391267bcbc830/duckdb-1.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:238d576ae1dda441f8c79ed1370c5ccf863e4a5d59ca2563f9c96cd26b2188ac", size = 13043217, upload-time = "2026-03-09T12:48:51.262Z" }, + { url = "https://files.pythonhosted.org/packages/01/0c/0282b10a1c96810606b916b8d58a03f2131bd3ede14d2851f58b0b860e7c/duckdb-1.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3298bd17cf0bb5f342fb51a4edc9aadacae882feb2b04161a03eb93271c70c86", size = 30014615, upload-time = "2026-03-09T12:48:54.061Z" }, + { url = "https://files.pythonhosted.org/packages/71/e8/cbbc920078a794f24f63017fc55c9cbdb17d6fb94d3973f479b2d9f2983d/duckdb-1.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:13f94c49ca389731c439524248e05007fb1a86cd26f1e38f706abc261069cd41", size = 15940493, upload-time = "2026-03-09T12:48:57.85Z" }, + { url = "https://files.pythonhosted.org/packages/31/b6/6cae794d5856259b0060f79d5db71c7fdba043950eaa6a9d72b0bad16095/duckdb-1.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ab9d597b1e8668466f1c164d0ea07eaf0ebb516950f5a2e794b0f52c81ff3b16", size = 14194663, upload-time = "2026-03-09T12:49:00.416Z" }, + { url = "https://files.pythonhosted.org/packages/82/07/aba3887658b93a36ce702dd00ca6a6422de3d14c7ee3a4b4c03ea20a99c0/duckdb-1.5.0-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a43f8289b11c0b50d13f96ab03210489d37652f3fd7911dc8eab04d61b049da2", size = 19220501, upload-time = "2026-03-09T12:49:03.431Z" }, + { url = "https://files.pythonhosted.org/packages/fc/a2/723e6df48754e468fa50d7878eb860906c975eafe317c4134a8482ca220e/duckdb-1.5.0-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f514e796a116c5de070e99974e42d0b8c2e6c303386790e58408c481150d417", size = 21316142, upload-time = "2026-03-09T12:49:06.223Z" }, + { url = "https://files.pythonhosted.org/packages/03/af/4dcbdf8f2349ed0b054c254ec59bc362ce6ddf603af35f770124c0984686/duckdb-1.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:cf503ba2c753d97c76beb111e74572fef8803265b974af2dca67bba1de4176d2", size = 13043445, upload-time = "2026-03-09T12:49:08.892Z" }, + { url = "https://files.pythonhosted.org/packages/60/5e/1bb7e75a63bf3dc49bc5a2cd27a65ffeef151f52a32db980983516f2d9f6/duckdb-1.5.0-cp311-cp311-win_arm64.whl", hash = "sha256:a1156e91e4e47f0e7d9c9404e559a1d71b372cd61790a407d65eb26948ae8298", size = 13883145, upload-time = "2026-03-09T12:49:11.566Z" }, + { url = "https://files.pythonhosted.org/packages/43/73/120e673e48ae25aaf689044c25ef51b0ea1d088563c9a2532612aea18e0a/duckdb-1.5.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9ea988d1d5c8737720d1b2852fd70e4d9e83b1601b8896a1d6d31df5e6afc7dd", size = 30057869, upload-time = "2026-03-09T12:49:14.65Z" }, + { url = "https://files.pythonhosted.org/packages/21/e9/61143471958d36d3f3e764cb4cd43330be208ddbff1c78d3310b9ee67fe8/duckdb-1.5.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cb786d5472afc16cc3c7355eb2007172538311d6f0cc6f6a0859e84a60220375", size = 15963092, upload-time = "2026-03-09T12:49:17.478Z" }, + { url = "https://files.pythonhosted.org/packages/4f/71/76e37c9a599ad89dd944e6cbb3e6a8ad196944a421758e83adea507637b6/duckdb-1.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dc92b238f4122800a7592e99134124cc9048c50f766c37a0778dd2637f5cbe59", size = 14220562, upload-time = "2026-03-09T12:49:23.518Z" }, + { url = "https://files.pythonhosted.org/packages/db/b8/de1831656d5d13173e27c79c7259c8b9a7bdc314fdc8920604838ea4c46d/duckdb-1.5.0-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b74cb205c21d3696d8f8b88adca401e1063d6e6f57c1c4f56a243610b086e30", size = 19245329, upload-time = "2026-03-09T12:49:26.307Z" }, + { url = "https://files.pythonhosted.org/packages/1f/8d/33d349a3bcbd3e9b7b4e904c19d5b97f058c4c20791b89a8d6323bb93dce/duckdb-1.5.0-cp312-cp312-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6e56c19ffd1ffe3642fa89639e71e2e00ab0cf107b62fe16e88030acaebcbde6", size = 21348041, upload-time = "2026-03-09T12:49:30.283Z" }, + { url = "https://files.pythonhosted.org/packages/e2/ec/591a4cad582fae04bc8f8b4a435eceaaaf3838cf0ca771daae16a3c2995b/duckdb-1.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:86525e565ec0c43420106fd34ba2c739a54c01814d476c7fed3007c9ed6efd86", size = 13053781, upload-time = "2026-03-09T12:49:33.574Z" }, + { url = "https://files.pythonhosted.org/packages/db/62/42e0a13f9919173bec121c0ff702406e1cdd91d8084c3e0b3412508c3891/duckdb-1.5.0-cp312-cp312-win_arm64.whl", hash = "sha256:5faeebc178c986a7bfa68868a023001137a95a1110bf09b7356442a4eae0f7e7", size = 13862906, upload-time = "2026-03-09T12:49:36.598Z" }, + { url = "https://files.pythonhosted.org/packages/35/5d/af5501221f42e4e3662c047ecec4dcd0761229fceeba3c67ad4d9d8741df/duckdb-1.5.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:11dd05b827846c87f0ae2f67b9ae1d60985882a7c08ce855379e4a08d5be0e1d", size = 30057396, upload-time = "2026-03-09T12:49:39.95Z" }, + { url = "https://files.pythonhosted.org/packages/43/bd/a278d73fedbd3783bf9aedb09cad4171fe8e55bd522952a84f6849522eb6/duckdb-1.5.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5ad8d9c91b7c280ab6811f59deff554b845706c20baa28c4e8f80a95690b252b", size = 15962700, upload-time = "2026-03-09T12:49:43.504Z" }, + { url = "https://files.pythonhosted.org/packages/76/fc/c916e928606946209c20fb50898dabf120241fb528a244e2bd8cde1bd9e2/duckdb-1.5.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0ee4dabe03ed810d64d93927e0fd18cd137060b81ee75dcaeaaff32cbc816656", size = 14220272, upload-time = "2026-03-09T12:49:46.867Z" }, + { url = "https://files.pythonhosted.org/packages/53/07/1390e69db922423b2e111e32ed342b3e8fad0a31c144db70681ea1ba4d56/duckdb-1.5.0-cp313-cp313-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9409ed1184b363ddea239609c5926f5148ee412b8d9e5ffa617718d755d942f6", size = 19244401, upload-time = "2026-03-09T12:49:49.865Z" }, + { url = "https://files.pythonhosted.org/packages/54/13/b58d718415cde993823a54952ea511d2612302f1d2bc220549d0cef752a4/duckdb-1.5.0-cp313-cp313-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1df8c4f9c853a45f3ec1e79ed7fe1957a203e5ec893bbbb853e727eb93e0090f", size = 21345827, upload-time = "2026-03-09T12:49:52.977Z" }, + { url = "https://files.pythonhosted.org/packages/e0/96/4460429651e371eb5ff745a4790e7fa0509c7a58c71fc4f0f893404c9646/duckdb-1.5.0-cp313-cp313-win_amd64.whl", hash = "sha256:9a3d3dfa2d8bc74008ce3ad9564761ae23505a9e4282f6a36df29bd87249620b", size = 13053101, upload-time = "2026-03-09T12:49:56.134Z" }, + { url = "https://files.pythonhosted.org/packages/ba/54/6d5b805113214b830fa3c267bb3383fb8febaa30760d0162ef59aadb110a/duckdb-1.5.0-cp313-cp313-win_arm64.whl", hash = "sha256:2deebcbafd9d39c04f31ec968f4dd7cee832c021e10d96b32ab0752453e247c8", size = 13865071, upload-time = "2026-03-09T12:49:59.282Z" }, + { url = "https://files.pythonhosted.org/packages/66/9f/dd806d4e8ecd99006eb240068f34e1054533da1857ad06ac726305cd102d/duckdb-1.5.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:d4b618de670cd2271dd7b3397508c7b3c62d8ea70c592c755643211a6f9154fa", size = 30065704, upload-time = "2026-03-09T12:50:02.671Z" }, + { url = "https://files.pythonhosted.org/packages/79/c2/7b7b8a5c65d5535c88a513e267b5e6d7a55ab3e9b67e4ddd474454653268/duckdb-1.5.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:065ae50cb185bac4b904287df72e6b4801b3bee2ad85679576dd712b8ba07021", size = 15964883, upload-time = "2026-03-09T12:50:06.343Z" }, + { url = "https://files.pythonhosted.org/packages/23/c5/9a52a2cdb228b8d8d191a603254364d929274d9cc7d285beada8f7daa712/duckdb-1.5.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:6be5e48e287a24d98306ce9dd55093c3b105a8fbd8a2e7a45e13df34bf081985", size = 14221498, upload-time = "2026-03-09T12:50:10.567Z" }, + { url = "https://files.pythonhosted.org/packages/b8/68/646045cb97982702a8a143dc2e45f3bdcb79fbe2d559a98d74b8c160e5e2/duckdb-1.5.0-cp314-cp314-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a5ee41a0bf793882f02192ce105b9a113c3e8c505a27c7ef9437d7b756317113", size = 19249787, upload-time = "2026-03-09T12:50:13.524Z" }, + { url = "https://files.pythonhosted.org/packages/15/1b/5abf0c7f38febb3b4a231c784223fceccfd3f2bfd957699d786f46e41ce6/duckdb-1.5.0-cp314-cp314-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f8e42aaf3cd217417c5dc9ff522dc3939d18b25a6fe5f846348277e831e6f59c", size = 21351583, upload-time = "2026-03-09T12:50:16.701Z" }, + { url = "https://files.pythonhosted.org/packages/93/a4/a90f2901cc0a1ce7ca4f0564b8492b9dbfe048a6395b27933d46ae9be473/duckdb-1.5.0-cp314-cp314-win_amd64.whl", hash = "sha256:11ae50aaeda2145b50294ee0247e4f11fb9448b3cc3d2aea1cfc456637dfb977", size = 13575130, upload-time = "2026-03-09T12:50:19.716Z" }, + { url = "https://files.pythonhosted.org/packages/64/aa/f14dd5e241ec80d9f9d82196ca65e0c53badfc8a7a619d5497c5626657ad/duckdb-1.5.0-cp314-cp314-win_arm64.whl", hash = "sha256:d6d2858c734d1a7e7a1b6e9b8403b3fce26dfefb4e0a2479c420fba6cd36db36", size = 14341879, upload-time = "2026-03-09T12:50:22.347Z" }, +] + +[[package]] +name = "et-xmlfile" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d3/38/af70d7ab1ae9d4da450eeec1fa3918940a5fafb9055e934af8d6eb0c2313/et_xmlfile-2.0.0.tar.gz", hash = "sha256:dab3f4764309081ce75662649be815c4c9081e88f0837825f90fd28317d4da54", size = 17234, upload-time = "2024-10-25T17:25:40.039Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/8b/5fe2cc11fee489817272089c4203e679c63b570a5aaeb18d852ae3cbba6a/et_xmlfile-2.0.0-py3-none-any.whl", hash = "sha256:7a91720bc756843502c3b7504c77b8fe44217c85c537d85037f0f536151b2caa", size = 18059, upload-time = "2024-10-25T17:25:39.051Z" }, +] + +[[package]] +name = "exceptiongroup" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/50/79/66800aadf48771f6b62f7eb014e352e5d06856655206165d775e675a02c9/exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219", size = 30371, upload-time = "2025-11-21T23:01:54.787Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8a/0e/97c33bf5009bdbac74fd2beace167cab3f978feb69cc36f1ef79360d6c4e/exceptiongroup-1.3.1-py3-none-any.whl", hash = "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598", size = 16740, upload-time = "2025-11-21T23:01:53.443Z" }, +] + +[[package]] +name = "f2a" +version = "0.1.4" +source = { editable = "." } +dependencies = [ + { name = "datasets" }, + { name = "duckdb" }, + { name = "jinja2" }, + { name = "lxml" }, + { name = "matplotlib" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "numpy", version = "2.4.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "odfpy" }, + { name = "openpyxl" }, + { name = "pandas", version = "2.3.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "pandas", version = "3.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "pyarrow" }, + { name = "pyreadstat" }, + { name = "rich" }, + { name = "scikit-learn", version = "1.7.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "scikit-learn", version = "1.8.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "scipy", version = "1.15.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "scipy", version = "1.17.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "seaborn" }, + { name = "tables", version = "3.10.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "tables", version = "3.11.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, +] + +[package.optional-dependencies] +advanced = [ + { name = "networkx", version = "3.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "networkx", version = "3.6.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "statsmodels" }, + { name = "umap-learn" }, +] +dev = [ + { name = "black" }, + { name = "isort" }, + { name = "mypy" }, + { name = "pytest" }, + { name = "pytest-cov" }, + { name = "ruff" }, +] + +[package.metadata] +requires-dist = [ + { name = "black", marker = "extra == 'dev'", specifier = ">=23.0" }, + { name = "datasets", specifier = ">=2.14" }, + { name = "duckdb", specifier = ">=0.9" }, + { name = "isort", marker = "extra == 'dev'", specifier = ">=5.12" }, + { name = "jinja2", specifier = ">=3.1" }, + { name = "lxml", specifier = ">=4.9" }, + { name = "matplotlib", specifier = ">=3.7" }, + { name = "mypy", marker = "extra == 'dev'", specifier = ">=1.5" }, + { name = "networkx", marker = "extra == 'advanced'", specifier = ">=3.0" }, + { name = "numpy", specifier = ">=1.24" }, + { name = "odfpy", specifier = ">=1.4" }, + { name = "openpyxl", specifier = ">=3.1" }, + { name = "pandas", specifier = ">=2.0" }, + { name = "pyarrow", specifier = ">=12.0" }, + { name = "pyreadstat", specifier = ">=1.2" }, + { name = "pytest", marker = "extra == 'dev'", specifier = ">=7.0" }, + { name = "pytest-cov", marker = "extra == 'dev'", specifier = ">=4.0" }, + { name = "rich", specifier = ">=13.0" }, + { name = "ruff", marker = "extra == 'dev'", specifier = ">=0.1" }, + { name = "scikit-learn", specifier = ">=1.3" }, + { name = "scipy", specifier = ">=1.11" }, + { name = "seaborn", specifier = ">=0.13" }, + { name = "statsmodels", marker = "extra == 'advanced'", specifier = ">=0.14" }, + { name = "tables", specifier = ">=3.8" }, + { name = "umap-learn", marker = "extra == 'advanced'", specifier = ">=0.5" }, +] +provides-extras = ["advanced", "dev"] + +[[package]] +name = "filelock" +version = "3.25.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/b8/00651a0f559862f3bb7d6f7477b192afe3f583cc5e26403b44e59a55ab34/filelock-3.25.2.tar.gz", hash = "sha256:b64ece2b38f4ca29dd3e810287aa8c48182bbecd1ae6e9ae126c9b35f1382694", size = 40480, upload-time = "2026-03-11T20:45:38.487Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a4/a5/842ae8f0c08b61d6484b52f99a03510a3a72d23141942d216ebe81fefbce/filelock-3.25.2-py3-none-any.whl", hash = "sha256:ca8afb0da15f229774c9ad1b455ed96e85a81373065fb10446672f64444ddf70", size = 26759, upload-time = "2026-03-11T20:45:37.437Z" }, +] + +[[package]] +name = "fonttools" +version = "4.62.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/96/686339e0fda8142b7ebed39af53f4a5694602a729662f42a6209e3be91d0/fonttools-4.62.0.tar.gz", hash = "sha256:0dc477c12b8076b4eb9af2e440421b0433ffa9e1dcb39e0640a6c94665ed1098", size = 3579521, upload-time = "2026-03-09T16:50:06.217Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/82/e0/9db48ec7f6b95bae7b20667ded54f18dba8e759ef66232c8683822ae26fc/fonttools-4.62.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:62b6a3d0028e458e9b59501cf7124a84cd69681c433570e4861aff4fb54a236c", size = 2873527, upload-time = "2026-03-09T16:48:12.416Z" }, + { url = "https://files.pythonhosted.org/packages/dd/45/86eccfdc922cb9fafc63189a9793fa9f6dd60e68a07be42e454ef2c0deae/fonttools-4.62.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:966557078b55e697f65300b18025c54e872d7908d1899b7314d7c16e64868cb2", size = 2417427, upload-time = "2026-03-09T16:48:15.122Z" }, + { url = "https://files.pythonhosted.org/packages/d3/98/f547a1fceeae81a9a5c6461bde2badac8bf50bda7122a8012b32b1e65396/fonttools-4.62.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9cf34861145b516cddd19b07ae6f4a61ea1c6326031b960ec9ddce8ee815e888", size = 4934993, upload-time = "2026-03-09T16:48:18.186Z" }, + { url = "https://files.pythonhosted.org/packages/5c/57/a23a051fcff998fdfabdd33c6721b5bad499da08b586d3676993410071f0/fonttools-4.62.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e2ff573de2775508c8a366351fb901c4ced5dc6cf2d87dd15c973bedcdd5216", size = 4892154, upload-time = "2026-03-09T16:48:20.736Z" }, + { url = "https://files.pythonhosted.org/packages/e2/62/e27644b433dc6db1d47bc6028a27d772eec5cc8338e24a9a1fce5d7120aa/fonttools-4.62.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:55b189a1b3033860a38e4e5bd0626c5aa25c7ce9caee7bc784a8caec7a675401", size = 4911635, upload-time = "2026-03-09T16:48:23.174Z" }, + { url = "https://files.pythonhosted.org/packages/7e/e2/1bf141911a5616bacfe9cf237c80ccd69d0d92482c38c0f7f6a55d063ad9/fonttools-4.62.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:825f98cd14907c74a4d0a3f7db8570886ffce9c6369fed1385020febf919abf6", size = 5031492, upload-time = "2026-03-09T16:48:25.095Z" }, + { url = "https://files.pythonhosted.org/packages/2f/59/790c292f4347ecfa77d9c7e0d1d91e04ab227f6e4a337ed4fe37ca388048/fonttools-4.62.0-cp310-cp310-win32.whl", hash = "sha256:c858030560f92a054444c6e46745227bfd3bb4e55383c80d79462cd47289e4b5", size = 1507656, upload-time = "2026-03-09T16:48:26.973Z" }, + { url = "https://files.pythonhosted.org/packages/e9/ee/08c0b7f8bac6e44638de6fe9a3e710a623932f60eccd58912c4d4743516d/fonttools-4.62.0-cp310-cp310-win_amd64.whl", hash = "sha256:9bf75eb69330e34ad2a096fac67887102c8537991eb6cac1507fc835bbb70e0a", size = 1556540, upload-time = "2026-03-09T16:48:30.359Z" }, + { url = "https://files.pythonhosted.org/packages/e4/33/63d79ca41020dd460b51f1e0f58ad1ff0a36b7bcbdf8f3971d52836581e9/fonttools-4.62.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:196cafef9aeec5258425bd31a4e9a414b2ee0d1557bca184d7923d3d3bcd90f9", size = 2870816, upload-time = "2026-03-09T16:48:32.39Z" }, + { url = "https://files.pythonhosted.org/packages/c0/7a/9aeec114bc9fc00d757a41f092f7107863d372e684a5b5724c043654477c/fonttools-4.62.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:153afc3012ff8761b1733e8fbe5d98623409774c44ffd88fbcb780e240c11d13", size = 2416127, upload-time = "2026-03-09T16:48:34.627Z" }, + { url = "https://files.pythonhosted.org/packages/5a/71/12cfd8ae0478b7158ffa8850786781f67e73c00fd897ef9d053415c5f88b/fonttools-4.62.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:13b663fb197334de84db790353d59da2a7288fd14e9be329f5debc63ec0500a5", size = 5100678, upload-time = "2026-03-09T16:48:36.454Z" }, + { url = "https://files.pythonhosted.org/packages/8a/d7/8e4845993ee233c2023d11babe9b3dae7d30333da1d792eeccebcb77baab/fonttools-4.62.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:591220d5333264b1df0d3285adbdfe2af4f6a45bbf9ca2b485f97c9f577c49ff", size = 5070859, upload-time = "2026-03-09T16:48:38.786Z" }, + { url = "https://files.pythonhosted.org/packages/ae/a0/287ae04cd883a52e7bb1d92dfc4997dcffb54173761c751106845fa9e316/fonttools-4.62.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:579f35c121528a50c96bf6fcb6a393e81e7f896d4326bf40e379f1c971603db9", size = 5076689, upload-time = "2026-03-09T16:48:41.886Z" }, + { url = "https://files.pythonhosted.org/packages/6d/4e/a2377ad26c36fcd3e671a1c316ea5ed83107de1588e2d897a98349363bc7/fonttools-4.62.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:44956b003151d5a289eba6c71fe590d63509267c37e26de1766ba15d9c589582", size = 5202053, upload-time = "2026-03-09T16:48:43.867Z" }, + { url = "https://files.pythonhosted.org/packages/44/2e/ad0472e69b02f83dc88983a9910d122178461606404be5b4838af6d1744a/fonttools-4.62.0-cp311-cp311-win32.whl", hash = "sha256:42c7848fa8836ab92c23b1617c407a905642521ff2d7897fe2bf8381530172f1", size = 2292852, upload-time = "2026-03-09T16:48:46.962Z" }, + { url = "https://files.pythonhosted.org/packages/77/ce/f5a4c42c117f8113ce04048053c128d17426751a508f26398110c993a074/fonttools-4.62.0-cp311-cp311-win_amd64.whl", hash = "sha256:4da779e8f342a32856075ddb193b2a024ad900bc04ecb744014c32409ae871ed", size = 2344367, upload-time = "2026-03-09T16:48:48.818Z" }, + { url = "https://files.pythonhosted.org/packages/ab/9d/7ad1ffc080619f67d0b1e0fa6a0578f0be077404f13fd8e448d1616a94a3/fonttools-4.62.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:22bde4dc12a9e09b5ced77f3b5053d96cf10c4976c6ac0dee293418ef289d221", size = 2870004, upload-time = "2026-03-09T16:48:50.837Z" }, + { url = "https://files.pythonhosted.org/packages/4d/8b/ba59069a490f61b737e064c3129453dbd28ee38e81d56af0d04d7e6b4de4/fonttools-4.62.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7199c73b326bad892f1cb53ffdd002128bfd58a89b8f662204fbf1daf8d62e85", size = 2414662, upload-time = "2026-03-09T16:48:53.295Z" }, + { url = "https://files.pythonhosted.org/packages/8c/8c/c52a4310de58deeac7e9ea800892aec09b00bb3eb0c53265b31ec02be115/fonttools-4.62.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d732938633681d6e2324e601b79e93f7f72395ec8681f9cdae5a8c08bc167e72", size = 5032975, upload-time = "2026-03-09T16:48:55.718Z" }, + { url = "https://files.pythonhosted.org/packages/0b/a1/d16318232964d786907b9b3613b8409f74cf0be2da400854509d3a864e43/fonttools-4.62.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:31a804c16d76038cc4e3826e07678efb0a02dc4f15396ea8e07088adbfb2578e", size = 4988544, upload-time = "2026-03-09T16:48:57.715Z" }, + { url = "https://files.pythonhosted.org/packages/b2/8d/7e745ca3e65852adc5e52a83dc213fe1b07d61cb5b394970fcd4b1199d1e/fonttools-4.62.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:090e74ac86e68c20150e665ef8e7e0c20cb9f8b395302c9419fa2e4d332c3b51", size = 4971296, upload-time = "2026-03-09T16:48:59.678Z" }, + { url = "https://files.pythonhosted.org/packages/e6/d4/b717a4874175146029ca1517e85474b1af80c9d9a306fc3161e71485eea5/fonttools-4.62.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8f086120e8be9e99ca1288aa5ce519833f93fe0ec6ebad2380c1dee18781f0b5", size = 5122503, upload-time = "2026-03-09T16:49:02.464Z" }, + { url = "https://files.pythonhosted.org/packages/cb/4b/92cfcba4bf8373f51c49c5ae4b512ead6fbda7d61a0e8c35a369d0db40a0/fonttools-4.62.0-cp312-cp312-win32.whl", hash = "sha256:37a73e5e38fd05c637daede6ffed5f3496096be7df6e4a3198d32af038f87527", size = 2281060, upload-time = "2026-03-09T16:49:04.385Z" }, + { url = "https://files.pythonhosted.org/packages/cd/06/cc96468781a4dc8ae2f14f16f32b32f69bde18cb9384aad27ccc7adf76f7/fonttools-4.62.0-cp312-cp312-win_amd64.whl", hash = "sha256:658ab837c878c4d2a652fcbb319547ea41693890e6434cf619e66f79387af3b8", size = 2331193, upload-time = "2026-03-09T16:49:06.598Z" }, + { url = "https://files.pythonhosted.org/packages/82/c7/985c1670aa6d82ef270f04cde11394c168f2002700353bd2bde405e59b8f/fonttools-4.62.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:274c8b8a87e439faf565d3bcd3f9f9e31bca7740755776a4a90a4bfeaa722efa", size = 2864929, upload-time = "2026-03-09T16:49:09.331Z" }, + { url = "https://files.pythonhosted.org/packages/c1/dc/c409c8ceec0d3119e9ab0b7b1a2e3c76d1f4d66e4a9db5c59e6b7652e7df/fonttools-4.62.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93e27131a5a0ae82aaadcffe309b1bae195f6711689722af026862bede05c07c", size = 2412586, upload-time = "2026-03-09T16:49:11.378Z" }, + { url = "https://files.pythonhosted.org/packages/5f/ac/8e300dbf7b4d135287c261ffd92ede02d9f48f0d2db14665fbc8b059588a/fonttools-4.62.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:83c6524c5b93bad9c2939d88e619fedc62e913c19e673f25d5ab74e7a5d074e5", size = 5013708, upload-time = "2026-03-09T16:49:14.063Z" }, + { url = "https://files.pythonhosted.org/packages/fb/bc/60d93477b653eeb1ddf5f9ec34be689b79234d82dbdded269ac0252715b8/fonttools-4.62.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:106aec9226f9498fc5345125ff7200842c01eda273ae038f5049b0916907acee", size = 4964355, upload-time = "2026-03-09T16:49:16.515Z" }, + { url = "https://files.pythonhosted.org/packages/cb/eb/6dc62bcc3c3598c28a3ecb77e69018869c3e109bd83031d4973c059d318b/fonttools-4.62.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:15d86b96c79013320f13bc1b15f94789edb376c0a2d22fb6088f33637e8dfcbc", size = 4953472, upload-time = "2026-03-09T16:49:18.494Z" }, + { url = "https://files.pythonhosted.org/packages/82/b3/3af7592d9b254b7b7fec018135f8776bfa0d1ad335476c2791b1334dc5e4/fonttools-4.62.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4f16c07e5250d5d71d0f990a59460bc5620c3cc456121f2cfb5b60475699905f", size = 5094701, upload-time = "2026-03-09T16:49:21.67Z" }, + { url = "https://files.pythonhosted.org/packages/31/3d/976645583ab567d3ee75ff87b33aa1330fa2baeeeae5fc46210b4274dd45/fonttools-4.62.0-cp313-cp313-win32.whl", hash = "sha256:d31558890f3fa00d4f937d12708f90c7c142c803c23eaeb395a71f987a77ebe3", size = 2279710, upload-time = "2026-03-09T16:49:23.812Z" }, + { url = "https://files.pythonhosted.org/packages/f5/7a/e25245a30457595740041dba9d0ea8ec1b2517f2f1a6a741f15eba1a4edc/fonttools-4.62.0-cp313-cp313-win_amd64.whl", hash = "sha256:6826a5aa53fb6def8a66bf423939745f415546c4e92478a7c531b8b6282b6c3b", size = 2330291, upload-time = "2026-03-09T16:49:26.237Z" }, + { url = "https://files.pythonhosted.org/packages/1a/64/61f69298aa6e7c363dcf00dd6371a654676900abe27d1effd1a74b43e5d0/fonttools-4.62.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:4fa5a9c716e2f75ef34b5a5c2ca0ee4848d795daa7e6792bf30fd4abf8993449", size = 2864222, upload-time = "2026-03-09T16:49:28.285Z" }, + { url = "https://files.pythonhosted.org/packages/c6/57/6b08756fe4455336b1fe160ab3c11fccc90768ccb6ee03fb0b45851aace4/fonttools-4.62.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:625f5cbeb0b8f4e42343eaeb4bc2786718ddd84760a2f5e55fdd3db049047c00", size = 2410674, upload-time = "2026-03-09T16:49:30.504Z" }, + { url = "https://files.pythonhosted.org/packages/6f/86/db65b63bb1b824b63e602e9be21b18741ddc99bcf5a7850f9181159ae107/fonttools-4.62.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6247e58b96b982709cd569a91a2ba935d406dccf17b6aa615afaed37ac3856aa", size = 4999387, upload-time = "2026-03-09T16:49:32.593Z" }, + { url = "https://files.pythonhosted.org/packages/86/c8/c6669e42d2f4efd60d38a3252cebbb28851f968890efb2b9b15f9d1092b0/fonttools-4.62.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:840632ea9c1eab7b7f01c369e408c0721c287dfd7500ab937398430689852fd1", size = 4912506, upload-time = "2026-03-09T16:49:34.927Z" }, + { url = "https://files.pythonhosted.org/packages/2e/49/0ae552aa098edd0ec548413fbf818f52ceb70535016215094a5ce9bf8f70/fonttools-4.62.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:28a9ea2a7467a816d1bec22658b0cce4443ac60abac3e293bdee78beb74588f3", size = 4951202, upload-time = "2026-03-09T16:49:37.1Z" }, + { url = "https://files.pythonhosted.org/packages/71/65/ae38fc8a4cea6f162d74cf11f58e9aeef1baa7d0e3d1376dabd336c129e5/fonttools-4.62.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5ae611294f768d413949fd12693a8cba0e6332fbc1e07aba60121be35eac68d0", size = 5060758, upload-time = "2026-03-09T16:49:39.464Z" }, + { url = "https://files.pythonhosted.org/packages/db/3d/bb797496f35c60544cd5af71ffa5aad62df14ef7286908d204cb5c5096fe/fonttools-4.62.0-cp314-cp314-win32.whl", hash = "sha256:273acb61f316d07570a80ed5ff0a14a23700eedbec0ad968b949abaa4d3f6bb5", size = 2283496, upload-time = "2026-03-09T16:49:42.448Z" }, + { url = "https://files.pythonhosted.org/packages/2e/9f/91081ffe5881253177c175749cce5841f5ec6e931f5d52f4a817207b7429/fonttools-4.62.0-cp314-cp314-win_amd64.whl", hash = "sha256:a5f974006d14f735c6c878fc4b117ad031dc93638ddcc450ca69f8fd64d5e104", size = 2335426, upload-time = "2026-03-09T16:49:44.228Z" }, + { url = "https://files.pythonhosted.org/packages/f8/65/f47f9b3db1ec156a1f222f1089ba076b2cc9ee1d024a8b0a60c54258517e/fonttools-4.62.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:0361a7d41d86937f1f752717c19f719d0fde064d3011038f9f19bdf5fc2f5c95", size = 2947079, upload-time = "2026-03-09T16:49:46.471Z" }, + { url = "https://files.pythonhosted.org/packages/52/73/bc62e5058a0c22cf02b1e0169ef0c3ca6c3247216d719f95bead3c05a991/fonttools-4.62.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:d4108c12773b3c97aa592311557c405d5b4fc03db2b969ed928fcf68e7b3c887", size = 2448802, upload-time = "2026-03-09T16:49:48.328Z" }, + { url = "https://files.pythonhosted.org/packages/2b/df/bfaa0e845884935355670e6e68f137185ab87295f8bc838db575e4a66064/fonttools-4.62.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b448075f32708e8fb377fe7687f769a5f51a027172c591ba9a58693631b077a8", size = 5137378, upload-time = "2026-03-09T16:49:50.223Z" }, + { url = "https://files.pythonhosted.org/packages/32/32/04f616979a18b48b52e634988b93d847b6346260faf85ecccaf7e2e9057f/fonttools-4.62.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e5f1fa8cc9f1a56a3e33ee6b954d6d9235e6b9d11eb7a6c9dfe2c2f829dc24db", size = 4920714, upload-time = "2026-03-09T16:49:53.172Z" }, + { url = "https://files.pythonhosted.org/packages/3b/2e/274e16689c1dfee5c68302cd7c444213cfddd23cf4620374419625037ec6/fonttools-4.62.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:f8c8ea812f82db1e884b9cdb663080453e28f0f9a1f5027a5adb59c4cc8d38d1", size = 5016012, upload-time = "2026-03-09T16:49:55.762Z" }, + { url = "https://files.pythonhosted.org/packages/7f/0c/b08117270626e7117ac2f89d732fdd4386ec37d2ab3a944462d29e6f89a1/fonttools-4.62.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:03c6068adfdc67c565d217e92386b1cdd951abd4240d65180cec62fa74ba31b2", size = 5042766, upload-time = "2026-03-09T16:49:57.726Z" }, + { url = "https://files.pythonhosted.org/packages/11/83/a48b73e54efa272ee65315a6331b30a9b3a98733310bc11402606809c50e/fonttools-4.62.0-cp314-cp314t-win32.whl", hash = "sha256:d28d5baacb0017d384df14722a63abe6e0230d8ce642b1615a27d78ffe3bc983", size = 2347785, upload-time = "2026-03-09T16:49:59.698Z" }, + { url = "https://files.pythonhosted.org/packages/f8/27/c67eab6dc3525bdc39586511b1b3d7161e972dacc0f17476dbaf932e708b/fonttools-4.62.0-cp314-cp314t-win_amd64.whl", hash = "sha256:3f9e20c4618f1e04190c802acae6dc337cb6db9fa61e492fd97cd5c5a9ff6d07", size = 2413914, upload-time = "2026-03-09T16:50:02.251Z" }, + { url = "https://files.pythonhosted.org/packages/9c/57/c2487c281dde03abb2dec244fd67059b8d118bd30a653cbf69e94084cb23/fonttools-4.62.0-py3-none-any.whl", hash = "sha256:75064f19a10c50c74b336aa5ebe7b1f89fd0fb5255807bfd4b0c6317098f4af3", size = 1152427, upload-time = "2026-03-09T16:50:04.074Z" }, +] + +[[package]] +name = "frozenlist" +version = "1.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2d/f5/c831fac6cc817d26fd54c7eaccd04ef7e0288806943f7cc5bbf69f3ac1f0/frozenlist-1.8.0.tar.gz", hash = "sha256:3ede829ed8d842f6cd48fc7081d7a41001a56f1f38603f9d49bf3020d59a31ad", size = 45875, upload-time = "2025-10-06T05:38:17.865Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/83/4a/557715d5047da48d54e659203b9335be7bfaafda2c3f627b7c47e0b3aaf3/frozenlist-1.8.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b37f6d31b3dcea7deb5e9696e529a6aa4a898adc33db82da12e4c60a7c4d2011", size = 86230, upload-time = "2025-10-06T05:35:23.699Z" }, + { url = "https://files.pythonhosted.org/packages/a2/fb/c85f9fed3ea8fe8740e5b46a59cc141c23b842eca617da8876cfce5f760e/frozenlist-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef2b7b394f208233e471abc541cc6991f907ffd47dc72584acee3147899d6565", size = 49621, upload-time = "2025-10-06T05:35:25.341Z" }, + { url = "https://files.pythonhosted.org/packages/63/70/26ca3f06aace16f2352796b08704338d74b6d1a24ca38f2771afbb7ed915/frozenlist-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a88f062f072d1589b7b46e951698950e7da00442fc1cacbe17e19e025dc327ad", size = 49889, upload-time = "2025-10-06T05:35:26.797Z" }, + { url = "https://files.pythonhosted.org/packages/5d/ed/c7895fd2fde7f3ee70d248175f9b6cdf792fb741ab92dc59cd9ef3bd241b/frozenlist-1.8.0-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f57fb59d9f385710aa7060e89410aeb5058b99e62f4d16b08b91986b9a2140c2", size = 219464, upload-time = "2025-10-06T05:35:28.254Z" }, + { url = "https://files.pythonhosted.org/packages/6b/83/4d587dccbfca74cb8b810472392ad62bfa100bf8108c7223eb4c4fa2f7b3/frozenlist-1.8.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:799345ab092bee59f01a915620b5d014698547afd011e691a208637312db9186", size = 221649, upload-time = "2025-10-06T05:35:29.454Z" }, + { url = "https://files.pythonhosted.org/packages/6a/c6/fd3b9cd046ec5fff9dab66831083bc2077006a874a2d3d9247dea93ddf7e/frozenlist-1.8.0-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c23c3ff005322a6e16f71bf8692fcf4d5a304aaafe1e262c98c6d4adc7be863e", size = 219188, upload-time = "2025-10-06T05:35:30.951Z" }, + { url = "https://files.pythonhosted.org/packages/ce/80/6693f55eb2e085fc8afb28cf611448fb5b90e98e068fa1d1b8d8e66e5c7d/frozenlist-1.8.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8a76ea0f0b9dfa06f254ee06053d93a600865b3274358ca48a352ce4f0798450", size = 231748, upload-time = "2025-10-06T05:35:32.101Z" }, + { url = "https://files.pythonhosted.org/packages/97/d6/e9459f7c5183854abd989ba384fe0cc1a0fb795a83c033f0571ec5933ca4/frozenlist-1.8.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c7366fe1418a6133d5aa824ee53d406550110984de7637d65a178010f759c6ef", size = 236351, upload-time = "2025-10-06T05:35:33.834Z" }, + { url = "https://files.pythonhosted.org/packages/97/92/24e97474b65c0262e9ecd076e826bfd1d3074adcc165a256e42e7b8a7249/frozenlist-1.8.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:13d23a45c4cebade99340c4165bd90eeb4a56c6d8a9d8aa49568cac19a6d0dc4", size = 218767, upload-time = "2025-10-06T05:35:35.205Z" }, + { url = "https://files.pythonhosted.org/packages/ee/bf/dc394a097508f15abff383c5108cb8ad880d1f64a725ed3b90d5c2fbf0bb/frozenlist-1.8.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:e4a3408834f65da56c83528fb52ce7911484f0d1eaf7b761fc66001db1646eff", size = 235887, upload-time = "2025-10-06T05:35:36.354Z" }, + { url = "https://files.pythonhosted.org/packages/40/90/25b201b9c015dbc999a5baf475a257010471a1fa8c200c843fd4abbee725/frozenlist-1.8.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:42145cd2748ca39f32801dad54aeea10039da6f86e303659db90db1c4b614c8c", size = 228785, upload-time = "2025-10-06T05:35:37.949Z" }, + { url = "https://files.pythonhosted.org/packages/84/f4/b5bc148df03082f05d2dd30c089e269acdbe251ac9a9cf4e727b2dbb8a3d/frozenlist-1.8.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:e2de870d16a7a53901e41b64ffdf26f2fbb8917b3e6ebf398098d72c5b20bd7f", size = 230312, upload-time = "2025-10-06T05:35:39.178Z" }, + { url = "https://files.pythonhosted.org/packages/db/4b/87e95b5d15097c302430e647136b7d7ab2398a702390cf4c8601975709e7/frozenlist-1.8.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:20e63c9493d33ee48536600d1a5c95eefc870cd71e7ab037763d1fbb89cc51e7", size = 217650, upload-time = "2025-10-06T05:35:40.377Z" }, + { url = "https://files.pythonhosted.org/packages/e5/70/78a0315d1fea97120591a83e0acd644da638c872f142fd72a6cebee825f3/frozenlist-1.8.0-cp310-cp310-win32.whl", hash = "sha256:adbeebaebae3526afc3c96fad434367cafbfd1b25d72369a9e5858453b1bb71a", size = 39659, upload-time = "2025-10-06T05:35:41.863Z" }, + { url = "https://files.pythonhosted.org/packages/66/aa/3f04523fb189a00e147e60c5b2205126118f216b0aa908035c45336e27e4/frozenlist-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:667c3777ca571e5dbeb76f331562ff98b957431df140b54c85fd4d52eea8d8f6", size = 43837, upload-time = "2025-10-06T05:35:43.205Z" }, + { url = "https://files.pythonhosted.org/packages/39/75/1135feecdd7c336938bd55b4dc3b0dfc46d85b9be12ef2628574b28de776/frozenlist-1.8.0-cp310-cp310-win_arm64.whl", hash = "sha256:80f85f0a7cc86e7a54c46d99c9e1318ff01f4687c172ede30fd52d19d1da1c8e", size = 39989, upload-time = "2025-10-06T05:35:44.596Z" }, + { url = "https://files.pythonhosted.org/packages/bc/03/077f869d540370db12165c0aa51640a873fb661d8b315d1d4d67b284d7ac/frozenlist-1.8.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:09474e9831bc2b2199fad6da3c14c7b0fbdd377cce9d3d77131be28906cb7d84", size = 86912, upload-time = "2025-10-06T05:35:45.98Z" }, + { url = "https://files.pythonhosted.org/packages/df/b5/7610b6bd13e4ae77b96ba85abea1c8cb249683217ef09ac9e0ae93f25a91/frozenlist-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:17c883ab0ab67200b5f964d2b9ed6b00971917d5d8a92df149dc2c9779208ee9", size = 50046, upload-time = "2025-10-06T05:35:47.009Z" }, + { url = "https://files.pythonhosted.org/packages/6e/ef/0e8f1fe32f8a53dd26bdd1f9347efe0778b0fddf62789ea683f4cc7d787d/frozenlist-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa47e444b8ba08fffd1c18e8cdb9a75db1b6a27f17507522834ad13ed5922b93", size = 50119, upload-time = "2025-10-06T05:35:48.38Z" }, + { url = "https://files.pythonhosted.org/packages/11/b1/71a477adc7c36e5fb628245dfbdea2166feae310757dea848d02bd0689fd/frozenlist-1.8.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2552f44204b744fba866e573be4c1f9048d6a324dfe14475103fd51613eb1d1f", size = 231067, upload-time = "2025-10-06T05:35:49.97Z" }, + { url = "https://files.pythonhosted.org/packages/45/7e/afe40eca3a2dc19b9904c0f5d7edfe82b5304cb831391edec0ac04af94c2/frozenlist-1.8.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:957e7c38f250991e48a9a73e6423db1bb9dd14e722a10f6b8bb8e16a0f55f695", size = 233160, upload-time = "2025-10-06T05:35:51.729Z" }, + { url = "https://files.pythonhosted.org/packages/a6/aa/7416eac95603ce428679d273255ffc7c998d4132cfae200103f164b108aa/frozenlist-1.8.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8585e3bb2cdea02fc88ffa245069c36555557ad3609e83be0ec71f54fd4abb52", size = 228544, upload-time = "2025-10-06T05:35:53.246Z" }, + { url = "https://files.pythonhosted.org/packages/8b/3d/2a2d1f683d55ac7e3875e4263d28410063e738384d3adc294f5ff3d7105e/frozenlist-1.8.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:edee74874ce20a373d62dc28b0b18b93f645633c2943fd90ee9d898550770581", size = 243797, upload-time = "2025-10-06T05:35:54.497Z" }, + { url = "https://files.pythonhosted.org/packages/78/1e/2d5565b589e580c296d3bb54da08d206e797d941a83a6fdea42af23be79c/frozenlist-1.8.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c9a63152fe95756b85f31186bddf42e4c02c6321207fd6601a1c89ebac4fe567", size = 247923, upload-time = "2025-10-06T05:35:55.861Z" }, + { url = "https://files.pythonhosted.org/packages/aa/c3/65872fcf1d326a7f101ad4d86285c403c87be7d832b7470b77f6d2ed5ddc/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b6db2185db9be0a04fecf2f241c70b63b1a242e2805be291855078f2b404dd6b", size = 230886, upload-time = "2025-10-06T05:35:57.399Z" }, + { url = "https://files.pythonhosted.org/packages/a0/76/ac9ced601d62f6956f03cc794f9e04c81719509f85255abf96e2510f4265/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:f4be2e3d8bc8aabd566f8d5b8ba7ecc09249d74ba3c9ed52e54dc23a293f0b92", size = 245731, upload-time = "2025-10-06T05:35:58.563Z" }, + { url = "https://files.pythonhosted.org/packages/b9/49/ecccb5f2598daf0b4a1415497eba4c33c1e8ce07495eb07d2860c731b8d5/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:c8d1634419f39ea6f5c427ea2f90ca85126b54b50837f31497f3bf38266e853d", size = 241544, upload-time = "2025-10-06T05:35:59.719Z" }, + { url = "https://files.pythonhosted.org/packages/53/4b/ddf24113323c0bbcc54cb38c8b8916f1da7165e07b8e24a717b4a12cbf10/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:1a7fa382a4a223773ed64242dbe1c9c326ec09457e6b8428efb4118c685c3dfd", size = 241806, upload-time = "2025-10-06T05:36:00.959Z" }, + { url = "https://files.pythonhosted.org/packages/a7/fb/9b9a084d73c67175484ba2789a59f8eebebd0827d186a8102005ce41e1ba/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:11847b53d722050808926e785df837353bd4d75f1d494377e59b23594d834967", size = 229382, upload-time = "2025-10-06T05:36:02.22Z" }, + { url = "https://files.pythonhosted.org/packages/95/a3/c8fb25aac55bf5e12dae5c5aa6a98f85d436c1dc658f21c3ac73f9fa95e5/frozenlist-1.8.0-cp311-cp311-win32.whl", hash = "sha256:27c6e8077956cf73eadd514be8fb04d77fc946a7fe9f7fe167648b0b9085cc25", size = 39647, upload-time = "2025-10-06T05:36:03.409Z" }, + { url = "https://files.pythonhosted.org/packages/0a/f5/603d0d6a02cfd4c8f2a095a54672b3cf967ad688a60fb9faf04fc4887f65/frozenlist-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:ac913f8403b36a2c8610bbfd25b8013488533e71e62b4b4adce9c86c8cea905b", size = 44064, upload-time = "2025-10-06T05:36:04.368Z" }, + { url = "https://files.pythonhosted.org/packages/5d/16/c2c9ab44e181f043a86f9a8f84d5124b62dbcb3a02c0977ec72b9ac1d3e0/frozenlist-1.8.0-cp311-cp311-win_arm64.whl", hash = "sha256:d4d3214a0f8394edfa3e303136d0575eece0745ff2b47bd2cb2e66dd92d4351a", size = 39937, upload-time = "2025-10-06T05:36:05.669Z" }, + { url = "https://files.pythonhosted.org/packages/69/29/948b9aa87e75820a38650af445d2ef2b6b8a6fab1a23b6bb9e4ef0be2d59/frozenlist-1.8.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:78f7b9e5d6f2fdb88cdde9440dc147259b62b9d3b019924def9f6478be254ac1", size = 87782, upload-time = "2025-10-06T05:36:06.649Z" }, + { url = "https://files.pythonhosted.org/packages/64/80/4f6e318ee2a7c0750ed724fa33a4bdf1eacdc5a39a7a24e818a773cd91af/frozenlist-1.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:229bf37d2e4acdaf808fd3f06e854a4a7a3661e871b10dc1f8f1896a3b05f18b", size = 50594, upload-time = "2025-10-06T05:36:07.69Z" }, + { url = "https://files.pythonhosted.org/packages/2b/94/5c8a2b50a496b11dd519f4a24cb5496cf125681dd99e94c604ccdea9419a/frozenlist-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f833670942247a14eafbb675458b4e61c82e002a148f49e68257b79296e865c4", size = 50448, upload-time = "2025-10-06T05:36:08.78Z" }, + { url = "https://files.pythonhosted.org/packages/6a/bd/d91c5e39f490a49df14320f4e8c80161cfcce09f1e2cde1edd16a551abb3/frozenlist-1.8.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:494a5952b1c597ba44e0e78113a7266e656b9794eec897b19ead706bd7074383", size = 242411, upload-time = "2025-10-06T05:36:09.801Z" }, + { url = "https://files.pythonhosted.org/packages/8f/83/f61505a05109ef3293dfb1ff594d13d64a2324ac3482be2cedc2be818256/frozenlist-1.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96f423a119f4777a4a056b66ce11527366a8bb92f54e541ade21f2374433f6d4", size = 243014, upload-time = "2025-10-06T05:36:11.394Z" }, + { url = "https://files.pythonhosted.org/packages/d8/cb/cb6c7b0f7d4023ddda30cf56b8b17494eb3a79e3fda666bf735f63118b35/frozenlist-1.8.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3462dd9475af2025c31cc61be6652dfa25cbfb56cbbf52f4ccfe029f38decaf8", size = 234909, upload-time = "2025-10-06T05:36:12.598Z" }, + { url = "https://files.pythonhosted.org/packages/31/c5/cd7a1f3b8b34af009fb17d4123c5a778b44ae2804e3ad6b86204255f9ec5/frozenlist-1.8.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4c800524c9cd9bac5166cd6f55285957fcfc907db323e193f2afcd4d9abd69b", size = 250049, upload-time = "2025-10-06T05:36:14.065Z" }, + { url = "https://files.pythonhosted.org/packages/c0/01/2f95d3b416c584a1e7f0e1d6d31998c4a795f7544069ee2e0962a4b60740/frozenlist-1.8.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d6a5df73acd3399d893dafc71663ad22534b5aa4f94e8a2fabfe856c3c1b6a52", size = 256485, upload-time = "2025-10-06T05:36:15.39Z" }, + { url = "https://files.pythonhosted.org/packages/ce/03/024bf7720b3abaebcff6d0793d73c154237b85bdf67b7ed55e5e9596dc9a/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:405e8fe955c2280ce66428b3ca55e12b3c4e9c336fb2103a4937e891c69a4a29", size = 237619, upload-time = "2025-10-06T05:36:16.558Z" }, + { url = "https://files.pythonhosted.org/packages/69/fa/f8abdfe7d76b731f5d8bd217827cf6764d4f1d9763407e42717b4bed50a0/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:908bd3f6439f2fef9e85031b59fd4f1297af54415fb60e4254a95f75b3cab3f3", size = 250320, upload-time = "2025-10-06T05:36:17.821Z" }, + { url = "https://files.pythonhosted.org/packages/f5/3c/b051329f718b463b22613e269ad72138cc256c540f78a6de89452803a47d/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:294e487f9ec720bd8ffcebc99d575f7eff3568a08a253d1ee1a0378754b74143", size = 246820, upload-time = "2025-10-06T05:36:19.046Z" }, + { url = "https://files.pythonhosted.org/packages/0f/ae/58282e8f98e444b3f4dd42448ff36fa38bef29e40d40f330b22e7108f565/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:74c51543498289c0c43656701be6b077f4b265868fa7f8a8859c197006efb608", size = 250518, upload-time = "2025-10-06T05:36:20.763Z" }, + { url = "https://files.pythonhosted.org/packages/8f/96/007e5944694d66123183845a106547a15944fbbb7154788cbf7272789536/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:776f352e8329135506a1d6bf16ac3f87bc25b28e765949282dcc627af36123aa", size = 239096, upload-time = "2025-10-06T05:36:22.129Z" }, + { url = "https://files.pythonhosted.org/packages/66/bb/852b9d6db2fa40be96f29c0d1205c306288f0684df8fd26ca1951d461a56/frozenlist-1.8.0-cp312-cp312-win32.whl", hash = "sha256:433403ae80709741ce34038da08511d4a77062aa924baf411ef73d1146e74faf", size = 39985, upload-time = "2025-10-06T05:36:23.661Z" }, + { url = "https://files.pythonhosted.org/packages/b8/af/38e51a553dd66eb064cdf193841f16f077585d4d28394c2fa6235cb41765/frozenlist-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:34187385b08f866104f0c0617404c8eb08165ab1272e884abc89c112e9c00746", size = 44591, upload-time = "2025-10-06T05:36:24.958Z" }, + { url = "https://files.pythonhosted.org/packages/a7/06/1dc65480ab147339fecc70797e9c2f69d9cea9cf38934ce08df070fdb9cb/frozenlist-1.8.0-cp312-cp312-win_arm64.whl", hash = "sha256:fe3c58d2f5db5fbd18c2987cba06d51b0529f52bc3a6cdc33d3f4eab725104bd", size = 40102, upload-time = "2025-10-06T05:36:26.333Z" }, + { url = "https://files.pythonhosted.org/packages/2d/40/0832c31a37d60f60ed79e9dfb5a92e1e2af4f40a16a29abcc7992af9edff/frozenlist-1.8.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8d92f1a84bb12d9e56f818b3a746f3efba93c1b63c8387a73dde655e1e42282a", size = 85717, upload-time = "2025-10-06T05:36:27.341Z" }, + { url = "https://files.pythonhosted.org/packages/30/ba/b0b3de23f40bc55a7057bd38434e25c34fa48e17f20ee273bbde5e0650f3/frozenlist-1.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:96153e77a591c8adc2ee805756c61f59fef4cf4073a9275ee86fe8cba41241f7", size = 49651, upload-time = "2025-10-06T05:36:28.855Z" }, + { url = "https://files.pythonhosted.org/packages/0c/ab/6e5080ee374f875296c4243c381bbdef97a9ac39c6e3ce1d5f7d42cb78d6/frozenlist-1.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f21f00a91358803399890ab167098c131ec2ddd5f8f5fd5fe9c9f2c6fcd91e40", size = 49417, upload-time = "2025-10-06T05:36:29.877Z" }, + { url = "https://files.pythonhosted.org/packages/d5/4e/e4691508f9477ce67da2015d8c00acd751e6287739123113a9fca6f1604e/frozenlist-1.8.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fb30f9626572a76dfe4293c7194a09fb1fe93ba94c7d4f720dfae3b646b45027", size = 234391, upload-time = "2025-10-06T05:36:31.301Z" }, + { url = "https://files.pythonhosted.org/packages/40/76/c202df58e3acdf12969a7895fd6f3bc016c642e6726aa63bd3025e0fc71c/frozenlist-1.8.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eaa352d7047a31d87dafcacbabe89df0aa506abb5b1b85a2fb91bc3faa02d822", size = 233048, upload-time = "2025-10-06T05:36:32.531Z" }, + { url = "https://files.pythonhosted.org/packages/f9/c0/8746afb90f17b73ca5979c7a3958116e105ff796e718575175319b5bb4ce/frozenlist-1.8.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:03ae967b4e297f58f8c774c7eabcce57fe3c2434817d4385c50661845a058121", size = 226549, upload-time = "2025-10-06T05:36:33.706Z" }, + { url = "https://files.pythonhosted.org/packages/7e/eb/4c7eefc718ff72f9b6c4893291abaae5fbc0c82226a32dcd8ef4f7a5dbef/frozenlist-1.8.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f6292f1de555ffcc675941d65fffffb0a5bcd992905015f85d0592201793e0e5", size = 239833, upload-time = "2025-10-06T05:36:34.947Z" }, + { url = "https://files.pythonhosted.org/packages/c2/4e/e5c02187cf704224f8b21bee886f3d713ca379535f16893233b9d672ea71/frozenlist-1.8.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29548f9b5b5e3460ce7378144c3010363d8035cea44bc0bf02d57f5a685e084e", size = 245363, upload-time = "2025-10-06T05:36:36.534Z" }, + { url = "https://files.pythonhosted.org/packages/1f/96/cb85ec608464472e82ad37a17f844889c36100eed57bea094518bf270692/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ec3cc8c5d4084591b4237c0a272cc4f50a5b03396a47d9caaf76f5d7b38a4f11", size = 229314, upload-time = "2025-10-06T05:36:38.582Z" }, + { url = "https://files.pythonhosted.org/packages/5d/6f/4ae69c550e4cee66b57887daeebe006fe985917c01d0fff9caab9883f6d0/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:517279f58009d0b1f2e7c1b130b377a349405da3f7621ed6bfae50b10adf20c1", size = 243365, upload-time = "2025-10-06T05:36:40.152Z" }, + { url = "https://files.pythonhosted.org/packages/7a/58/afd56de246cf11780a40a2c28dc7cbabbf06337cc8ddb1c780a2d97e88d8/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:db1e72ede2d0d7ccb213f218df6a078a9c09a7de257c2fe8fcef16d5925230b1", size = 237763, upload-time = "2025-10-06T05:36:41.355Z" }, + { url = "https://files.pythonhosted.org/packages/cb/36/cdfaf6ed42e2644740d4a10452d8e97fa1c062e2a8006e4b09f1b5fd7d63/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b4dec9482a65c54a5044486847b8a66bf10c9cb4926d42927ec4e8fd5db7fed8", size = 240110, upload-time = "2025-10-06T05:36:42.716Z" }, + { url = "https://files.pythonhosted.org/packages/03/a8/9ea226fbefad669f11b52e864c55f0bd57d3c8d7eb07e9f2e9a0b39502e1/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:21900c48ae04d13d416f0e1e0c4d81f7931f73a9dfa0b7a8746fb2fe7dd970ed", size = 233717, upload-time = "2025-10-06T05:36:44.251Z" }, + { url = "https://files.pythonhosted.org/packages/1e/0b/1b5531611e83ba7d13ccc9988967ea1b51186af64c42b7a7af465dcc9568/frozenlist-1.8.0-cp313-cp313-win32.whl", hash = "sha256:8b7b94a067d1c504ee0b16def57ad5738701e4ba10cec90529f13fa03c833496", size = 39628, upload-time = "2025-10-06T05:36:45.423Z" }, + { url = "https://files.pythonhosted.org/packages/d8/cf/174c91dbc9cc49bc7b7aab74d8b734e974d1faa8f191c74af9b7e80848e6/frozenlist-1.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:878be833caa6a3821caf85eb39c5ba92d28e85df26d57afb06b35b2efd937231", size = 43882, upload-time = "2025-10-06T05:36:46.796Z" }, + { url = "https://files.pythonhosted.org/packages/c1/17/502cd212cbfa96eb1388614fe39a3fc9ab87dbbe042b66f97acb57474834/frozenlist-1.8.0-cp313-cp313-win_arm64.whl", hash = "sha256:44389d135b3ff43ba8cc89ff7f51f5a0bb6b63d829c8300f79a2fe4fe61bcc62", size = 39676, upload-time = "2025-10-06T05:36:47.8Z" }, + { url = "https://files.pythonhosted.org/packages/d2/5c/3bbfaa920dfab09e76946a5d2833a7cbdf7b9b4a91c714666ac4855b88b4/frozenlist-1.8.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:e25ac20a2ef37e91c1b39938b591457666a0fa835c7783c3a8f33ea42870db94", size = 89235, upload-time = "2025-10-06T05:36:48.78Z" }, + { url = "https://files.pythonhosted.org/packages/d2/d6/f03961ef72166cec1687e84e8925838442b615bd0b8854b54923ce5b7b8a/frozenlist-1.8.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:07cdca25a91a4386d2e76ad992916a85038a9b97561bf7a3fd12d5d9ce31870c", size = 50742, upload-time = "2025-10-06T05:36:49.837Z" }, + { url = "https://files.pythonhosted.org/packages/1e/bb/a6d12b7ba4c3337667d0e421f7181c82dda448ce4e7ad7ecd249a16fa806/frozenlist-1.8.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4e0c11f2cc6717e0a741f84a527c52616140741cd812a50422f83dc31749fb52", size = 51725, upload-time = "2025-10-06T05:36:50.851Z" }, + { url = "https://files.pythonhosted.org/packages/bc/71/d1fed0ffe2c2ccd70b43714c6cab0f4188f09f8a67a7914a6b46ee30f274/frozenlist-1.8.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b3210649ee28062ea6099cfda39e147fa1bc039583c8ee4481cb7811e2448c51", size = 284533, upload-time = "2025-10-06T05:36:51.898Z" }, + { url = "https://files.pythonhosted.org/packages/c9/1f/fb1685a7b009d89f9bf78a42d94461bc06581f6e718c39344754a5d9bada/frozenlist-1.8.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:581ef5194c48035a7de2aefc72ac6539823bb71508189e5de01d60c9dcd5fa65", size = 292506, upload-time = "2025-10-06T05:36:53.101Z" }, + { url = "https://files.pythonhosted.org/packages/e6/3b/b991fe1612703f7e0d05c0cf734c1b77aaf7c7d321df4572e8d36e7048c8/frozenlist-1.8.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3ef2d026f16a2b1866e1d86fc4e1291e1ed8a387b2c333809419a2f8b3a77b82", size = 274161, upload-time = "2025-10-06T05:36:54.309Z" }, + { url = "https://files.pythonhosted.org/packages/ca/ec/c5c618767bcdf66e88945ec0157d7f6c4a1322f1473392319b7a2501ded7/frozenlist-1.8.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5500ef82073f599ac84d888e3a8c1f77ac831183244bfd7f11eaa0289fb30714", size = 294676, upload-time = "2025-10-06T05:36:55.566Z" }, + { url = "https://files.pythonhosted.org/packages/7c/ce/3934758637d8f8a88d11f0585d6495ef54b2044ed6ec84492a91fa3b27aa/frozenlist-1.8.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:50066c3997d0091c411a66e710f4e11752251e6d2d73d70d8d5d4c76442a199d", size = 300638, upload-time = "2025-10-06T05:36:56.758Z" }, + { url = "https://files.pythonhosted.org/packages/fc/4f/a7e4d0d467298f42de4b41cbc7ddaf19d3cfeabaf9ff97c20c6c7ee409f9/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:5c1c8e78426e59b3f8005e9b19f6ff46e5845895adbde20ece9218319eca6506", size = 283067, upload-time = "2025-10-06T05:36:57.965Z" }, + { url = "https://files.pythonhosted.org/packages/dc/48/c7b163063d55a83772b268e6d1affb960771b0e203b632cfe09522d67ea5/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:eefdba20de0d938cec6a89bd4d70f346a03108a19b9df4248d3cf0d88f1b0f51", size = 292101, upload-time = "2025-10-06T05:36:59.237Z" }, + { url = "https://files.pythonhosted.org/packages/9f/d0/2366d3c4ecdc2fd391e0afa6e11500bfba0ea772764d631bbf82f0136c9d/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:cf253e0e1c3ceb4aaff6df637ce033ff6535fb8c70a764a8f46aafd3d6ab798e", size = 289901, upload-time = "2025-10-06T05:37:00.811Z" }, + { url = "https://files.pythonhosted.org/packages/b8/94/daff920e82c1b70e3618a2ac39fbc01ae3e2ff6124e80739ce5d71c9b920/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:032efa2674356903cd0261c4317a561a6850f3ac864a63fc1583147fb05a79b0", size = 289395, upload-time = "2025-10-06T05:37:02.115Z" }, + { url = "https://files.pythonhosted.org/packages/e3/20/bba307ab4235a09fdcd3cc5508dbabd17c4634a1af4b96e0f69bfe551ebd/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6da155091429aeba16851ecb10a9104a108bcd32f6c1642867eadaee401c1c41", size = 283659, upload-time = "2025-10-06T05:37:03.711Z" }, + { url = "https://files.pythonhosted.org/packages/fd/00/04ca1c3a7a124b6de4f8a9a17cc2fcad138b4608e7a3fc5877804b8715d7/frozenlist-1.8.0-cp313-cp313t-win32.whl", hash = "sha256:0f96534f8bfebc1a394209427d0f8a63d343c9779cda6fc25e8e121b5fd8555b", size = 43492, upload-time = "2025-10-06T05:37:04.915Z" }, + { url = "https://files.pythonhosted.org/packages/59/5e/c69f733a86a94ab10f68e496dc6b7e8bc078ebb415281d5698313e3af3a1/frozenlist-1.8.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5d63a068f978fc69421fb0e6eb91a9603187527c86b7cd3f534a5b77a592b888", size = 48034, upload-time = "2025-10-06T05:37:06.343Z" }, + { url = "https://files.pythonhosted.org/packages/16/6c/be9d79775d8abe79b05fa6d23da99ad6e7763a1d080fbae7290b286093fd/frozenlist-1.8.0-cp313-cp313t-win_arm64.whl", hash = "sha256:bf0a7e10b077bf5fb9380ad3ae8ce20ef919a6ad93b4552896419ac7e1d8e042", size = 41749, upload-time = "2025-10-06T05:37:07.431Z" }, + { url = "https://files.pythonhosted.org/packages/f1/c8/85da824b7e7b9b6e7f7705b2ecaf9591ba6f79c1177f324c2735e41d36a2/frozenlist-1.8.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:cee686f1f4cadeb2136007ddedd0aaf928ab95216e7691c63e50a8ec066336d0", size = 86127, upload-time = "2025-10-06T05:37:08.438Z" }, + { url = "https://files.pythonhosted.org/packages/8e/e8/a1185e236ec66c20afd72399522f142c3724c785789255202d27ae992818/frozenlist-1.8.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:119fb2a1bd47307e899c2fac7f28e85b9a543864df47aa7ec9d3c1b4545f096f", size = 49698, upload-time = "2025-10-06T05:37:09.48Z" }, + { url = "https://files.pythonhosted.org/packages/a1/93/72b1736d68f03fda5fdf0f2180fb6caaae3894f1b854d006ac61ecc727ee/frozenlist-1.8.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4970ece02dbc8c3a92fcc5228e36a3e933a01a999f7094ff7c23fbd2beeaa67c", size = 49749, upload-time = "2025-10-06T05:37:10.569Z" }, + { url = "https://files.pythonhosted.org/packages/a7/b2/fabede9fafd976b991e9f1b9c8c873ed86f202889b864756f240ce6dd855/frozenlist-1.8.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:cba69cb73723c3f329622e34bdbf5ce1f80c21c290ff04256cff1cd3c2036ed2", size = 231298, upload-time = "2025-10-06T05:37:11.993Z" }, + { url = "https://files.pythonhosted.org/packages/3a/3b/d9b1e0b0eed36e70477ffb8360c49c85c8ca8ef9700a4e6711f39a6e8b45/frozenlist-1.8.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:778a11b15673f6f1df23d9586f83c4846c471a8af693a22e066508b77d201ec8", size = 232015, upload-time = "2025-10-06T05:37:13.194Z" }, + { url = "https://files.pythonhosted.org/packages/dc/94/be719d2766c1138148564a3960fc2c06eb688da592bdc25adcf856101be7/frozenlist-1.8.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0325024fe97f94c41c08872db482cf8ac4800d80e79222c6b0b7b162d5b13686", size = 225038, upload-time = "2025-10-06T05:37:14.577Z" }, + { url = "https://files.pythonhosted.org/packages/e4/09/6712b6c5465f083f52f50cf74167b92d4ea2f50e46a9eea0523d658454ae/frozenlist-1.8.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:97260ff46b207a82a7567b581ab4190bd4dfa09f4db8a8b49d1a958f6aa4940e", size = 240130, upload-time = "2025-10-06T05:37:15.781Z" }, + { url = "https://files.pythonhosted.org/packages/f8/d4/cd065cdcf21550b54f3ce6a22e143ac9e4836ca42a0de1022da8498eac89/frozenlist-1.8.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:54b2077180eb7f83dd52c40b2750d0a9f175e06a42e3213ce047219de902717a", size = 242845, upload-time = "2025-10-06T05:37:17.037Z" }, + { url = "https://files.pythonhosted.org/packages/62/c3/f57a5c8c70cd1ead3d5d5f776f89d33110b1addae0ab010ad774d9a44fb9/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:2f05983daecab868a31e1da44462873306d3cbfd76d1f0b5b69c473d21dbb128", size = 229131, upload-time = "2025-10-06T05:37:18.221Z" }, + { url = "https://files.pythonhosted.org/packages/6c/52/232476fe9cb64f0742f3fde2b7d26c1dac18b6d62071c74d4ded55e0ef94/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:33f48f51a446114bc5d251fb2954ab0164d5be02ad3382abcbfe07e2531d650f", size = 240542, upload-time = "2025-10-06T05:37:19.771Z" }, + { url = "https://files.pythonhosted.org/packages/5f/85/07bf3f5d0fb5414aee5f47d33c6f5c77bfe49aac680bfece33d4fdf6a246/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:154e55ec0655291b5dd1b8731c637ecdb50975a2ae70c606d100750a540082f7", size = 237308, upload-time = "2025-10-06T05:37:20.969Z" }, + { url = "https://files.pythonhosted.org/packages/11/99/ae3a33d5befd41ac0ca2cc7fd3aa707c9c324de2e89db0e0f45db9a64c26/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:4314debad13beb564b708b4a496020e5306c7333fa9a3ab90374169a20ffab30", size = 238210, upload-time = "2025-10-06T05:37:22.252Z" }, + { url = "https://files.pythonhosted.org/packages/b2/60/b1d2da22f4970e7a155f0adde9b1435712ece01b3cd45ba63702aea33938/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:073f8bf8becba60aa931eb3bc420b217bb7d5b8f4750e6f8b3be7f3da85d38b7", size = 231972, upload-time = "2025-10-06T05:37:23.5Z" }, + { url = "https://files.pythonhosted.org/packages/3f/ab/945b2f32de889993b9c9133216c068b7fcf257d8595a0ac420ac8677cab0/frozenlist-1.8.0-cp314-cp314-win32.whl", hash = "sha256:bac9c42ba2ac65ddc115d930c78d24ab8d4f465fd3fc473cdedfccadb9429806", size = 40536, upload-time = "2025-10-06T05:37:25.581Z" }, + { url = "https://files.pythonhosted.org/packages/59/ad/9caa9b9c836d9ad6f067157a531ac48b7d36499f5036d4141ce78c230b1b/frozenlist-1.8.0-cp314-cp314-win_amd64.whl", hash = "sha256:3e0761f4d1a44f1d1a47996511752cf3dcec5bbdd9cc2b4fe595caf97754b7a0", size = 44330, upload-time = "2025-10-06T05:37:26.928Z" }, + { url = "https://files.pythonhosted.org/packages/82/13/e6950121764f2676f43534c555249f57030150260aee9dcf7d64efda11dd/frozenlist-1.8.0-cp314-cp314-win_arm64.whl", hash = "sha256:d1eaff1d00c7751b7c6662e9c5ba6eb2c17a2306ba5e2a37f24ddf3cc953402b", size = 40627, upload-time = "2025-10-06T05:37:28.075Z" }, + { url = "https://files.pythonhosted.org/packages/c0/c7/43200656ecc4e02d3f8bc248df68256cd9572b3f0017f0a0c4e93440ae23/frozenlist-1.8.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:d3bb933317c52d7ea5004a1c442eef86f426886fba134ef8cf4226ea6ee1821d", size = 89238, upload-time = "2025-10-06T05:37:29.373Z" }, + { url = "https://files.pythonhosted.org/packages/d1/29/55c5f0689b9c0fb765055629f472c0de484dcaf0acee2f7707266ae3583c/frozenlist-1.8.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:8009897cdef112072f93a0efdce29cd819e717fd2f649ee3016efd3cd885a7ed", size = 50738, upload-time = "2025-10-06T05:37:30.792Z" }, + { url = "https://files.pythonhosted.org/packages/ba/7d/b7282a445956506fa11da8c2db7d276adcbf2b17d8bb8407a47685263f90/frozenlist-1.8.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2c5dcbbc55383e5883246d11fd179782a9d07a986c40f49abe89ddf865913930", size = 51739, upload-time = "2025-10-06T05:37:32.127Z" }, + { url = "https://files.pythonhosted.org/packages/62/1c/3d8622e60d0b767a5510d1d3cf21065b9db874696a51ea6d7a43180a259c/frozenlist-1.8.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:39ecbc32f1390387d2aa4f5a995e465e9e2f79ba3adcac92d68e3e0afae6657c", size = 284186, upload-time = "2025-10-06T05:37:33.21Z" }, + { url = "https://files.pythonhosted.org/packages/2d/14/aa36d5f85a89679a85a1d44cd7a6657e0b1c75f61e7cad987b203d2daca8/frozenlist-1.8.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92db2bf818d5cc8d9c1f1fc56b897662e24ea5adb36ad1f1d82875bd64e03c24", size = 292196, upload-time = "2025-10-06T05:37:36.107Z" }, + { url = "https://files.pythonhosted.org/packages/05/23/6bde59eb55abd407d34f77d39a5126fb7b4f109a3f611d3929f14b700c66/frozenlist-1.8.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2dc43a022e555de94c3b68a4ef0b11c4f747d12c024a520c7101709a2144fb37", size = 273830, upload-time = "2025-10-06T05:37:37.663Z" }, + { url = "https://files.pythonhosted.org/packages/d2/3f/22cff331bfad7a8afa616289000ba793347fcd7bc275f3b28ecea2a27909/frozenlist-1.8.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:cb89a7f2de3602cfed448095bab3f178399646ab7c61454315089787df07733a", size = 294289, upload-time = "2025-10-06T05:37:39.261Z" }, + { url = "https://files.pythonhosted.org/packages/a4/89/5b057c799de4838b6c69aa82b79705f2027615e01be996d2486a69ca99c4/frozenlist-1.8.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:33139dc858c580ea50e7e60a1b0ea003efa1fd42e6ec7fdbad78fff65fad2fd2", size = 300318, upload-time = "2025-10-06T05:37:43.213Z" }, + { url = "https://files.pythonhosted.org/packages/30/de/2c22ab3eb2a8af6d69dc799e48455813bab3690c760de58e1bf43b36da3e/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:168c0969a329b416119507ba30b9ea13688fafffac1b7822802537569a1cb0ef", size = 282814, upload-time = "2025-10-06T05:37:45.337Z" }, + { url = "https://files.pythonhosted.org/packages/59/f7/970141a6a8dbd7f556d94977858cfb36fa9b66e0892c6dd780d2219d8cd8/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:28bd570e8e189d7f7b001966435f9dac6718324b5be2990ac496cf1ea9ddb7fe", size = 291762, upload-time = "2025-10-06T05:37:46.657Z" }, + { url = "https://files.pythonhosted.org/packages/c1/15/ca1adae83a719f82df9116d66f5bb28bb95557b3951903d39135620ef157/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:b2a095d45c5d46e5e79ba1e5b9cb787f541a8dee0433836cea4b96a2c439dcd8", size = 289470, upload-time = "2025-10-06T05:37:47.946Z" }, + { url = "https://files.pythonhosted.org/packages/ac/83/dca6dc53bf657d371fbc88ddeb21b79891e747189c5de990b9dfff2ccba1/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:eab8145831a0d56ec9c4139b6c3e594c7a83c2c8be25d5bcf2d86136a532287a", size = 289042, upload-time = "2025-10-06T05:37:49.499Z" }, + { url = "https://files.pythonhosted.org/packages/96/52/abddd34ca99be142f354398700536c5bd315880ed0a213812bc491cff5e4/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:974b28cf63cc99dfb2188d8d222bc6843656188164848c4f679e63dae4b0708e", size = 283148, upload-time = "2025-10-06T05:37:50.745Z" }, + { url = "https://files.pythonhosted.org/packages/af/d3/76bd4ed4317e7119c2b7f57c3f6934aba26d277acc6309f873341640e21f/frozenlist-1.8.0-cp314-cp314t-win32.whl", hash = "sha256:342c97bf697ac5480c0a7ec73cd700ecfa5a8a40ac923bd035484616efecc2df", size = 44676, upload-time = "2025-10-06T05:37:52.222Z" }, + { url = "https://files.pythonhosted.org/packages/89/76/c615883b7b521ead2944bb3480398cbb07e12b7b4e4d073d3752eb721558/frozenlist-1.8.0-cp314-cp314t-win_amd64.whl", hash = "sha256:06be8f67f39c8b1dc671f5d83aaefd3358ae5cdcf8314552c57e7ed3e6475bdd", size = 49451, upload-time = "2025-10-06T05:37:53.425Z" }, + { url = "https://files.pythonhosted.org/packages/e0/a3/5982da14e113d07b325230f95060e2169f5311b1017ea8af2a29b374c289/frozenlist-1.8.0-cp314-cp314t-win_arm64.whl", hash = "sha256:102e6314ca4da683dca92e3b1355490fed5f313b768500084fbe6371fddfdb79", size = 42507, upload-time = "2025-10-06T05:37:54.513Z" }, + { url = "https://files.pythonhosted.org/packages/9a/9a/e35b4a917281c0b8419d4207f4334c8e8c5dbf4f3f5f9ada73958d937dcc/frozenlist-1.8.0-py3-none-any.whl", hash = "sha256:0c18a16eab41e82c295618a77502e17b195883241c563b00f0aa5106fc4eaa0d", size = 13409, upload-time = "2025-10-06T05:38:16.721Z" }, +] + +[[package]] +name = "fsspec" +version = "2026.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/51/7c/f60c259dcbf4f0c47cc4ddb8f7720d2dcdc8888c8e5ad84c73ea4531cc5b/fsspec-2026.2.0.tar.gz", hash = "sha256:6544e34b16869f5aacd5b90bdf1a71acb37792ea3ddf6125ee69a22a53fb8bff", size = 313441, upload-time = "2026-02-05T21:50:53.743Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e6/ab/fb21f4c939bb440104cc2b396d3be1d9b7a9fd3c6c2a53d98c45b3d7c954/fsspec-2026.2.0-py3-none-any.whl", hash = "sha256:98de475b5cb3bd66bedd5c4679e87b4fdfe1a3bf4d707b151b3c07e58c9a2437", size = 202505, upload-time = "2026-02-05T21:50:51.819Z" }, +] + +[package.optional-dependencies] +http = [ + { name = "aiohttp" }, +] + +[[package]] +name = "h11" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, +] + +[[package]] +name = "hf-xet" +version = "1.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/59/3f/c4000b25702612ccbae00e941c53f3564067f1c6e94f2f423be1d0205d09/hf_xet-1.4.1.tar.gz", hash = "sha256:e16cf5b6dadf95f80b3138b3af778f5f1b4ae9c4738241f0da59c219b281d29f", size = 672345, upload-time = "2026-03-12T22:30:37.039Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/7d/da7b1fbed1e9951b52117122aca06f8871329fe9e6b3fcfdf7394dbbb85b/hf_xet-1.4.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:6859dfca607b852016aca9aa774da8f27f59663542eab008289300460c903c8e", size = 3796337, upload-time = "2026-03-12T22:30:16.734Z" }, + { url = "https://files.pythonhosted.org/packages/c8/e3/435380282215214acd499995f36ccdb6e4794966e8968e625b33915a3481/hf_xet-1.4.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ec3ccfc640974a8db27e9ddcb2e52aefa9229d7807b6f2b50e4415547c1ff64f", size = 3555870, upload-time = "2026-03-12T22:30:14.766Z" }, + { url = "https://files.pythonhosted.org/packages/e8/98/1eab277d4307bcad51dfd50e5fc7bec97b929e7a9015d81987b16f2b7774/hf_xet-1.4.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1ee3e60fe0c4071a84e4ddae2921518850302cc18965f6d8b770292083690384", size = 4211746, upload-time = "2026-03-12T22:30:04.983Z" }, + { url = "https://files.pythonhosted.org/packages/10/d0/435b837cbc1cb5b5e2ab2d88bc0d3487268fd7b900f71445a8c81714c411/hf_xet-1.4.1-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:30a2d968ba96a511d3d67a18a0340b2bd8c1c085c6464931cc576a05ccb78cf0", size = 3988671, upload-time = "2026-03-12T22:30:03.118Z" }, + { url = "https://files.pythonhosted.org/packages/e1/ed/6a1de8bc161187355aebf87921f702592fe29992b4bbda81fe5376e6c3fe/hf_xet-1.4.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:5739118c76d837d09ba246bfdb91bd076cf336fed9f49eb80bc7e4f1e323e9e9", size = 4188546, upload-time = "2026-03-12T22:30:25.914Z" }, + { url = "https://files.pythonhosted.org/packages/13/6e/723a2a04866697e047d60535b60c9a0f7186fb68466c1964f870fca1757e/hf_xet-1.4.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d89cbc44931201cb67ad55aeedb14b371bed00e26af84c979effd3b41e6576e2", size = 4428262, upload-time = "2026-03-12T22:30:27.772Z" }, + { url = "https://files.pythonhosted.org/packages/3b/99/263744ecf4caaea039338d8ab79e2db7ec0c4963c1ea2493ca4b6e0f8d3e/hf_xet-1.4.1-cp313-cp313t-win_amd64.whl", hash = "sha256:7275d117da0fbef825dba05b503ea7ab40e90bdaa76bdc644d7bbc94364dd9ec", size = 3665375, upload-time = "2026-03-12T22:30:41.314Z" }, + { url = "https://files.pythonhosted.org/packages/0f/d1/e28271aa87058698f8e9377d905e619554738b894da98c149ca145790014/hf_xet-1.4.1-cp313-cp313t-win_arm64.whl", hash = "sha256:c16731cc4b8cc5e3b9e5e9103f178e3fbef6f1cc78d6d7f89c7e8192174e1af1", size = 3528871, upload-time = "2026-03-12T22:30:38.889Z" }, + { url = "https://files.pythonhosted.org/packages/88/aa/666146eac9e4f1f2e3a293feb22d8596df1469e7c01d16839f16eb2fc8ed/hf_xet-1.4.1-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:f1e166b3eb3fb6c188245ccc21106833c305ffa56a538da82f63498269694810", size = 3796578, upload-time = "2026-03-12T22:30:24.159Z" }, + { url = "https://files.pythonhosted.org/packages/73/ed/716ef4fdfc66e3d2c68efba1051d5971e81d1f70dbfa6ac2513d2fa28720/hf_xet-1.4.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:54672d32a08e38c405c8e246fd756c7fe349b65649ecc3eb26c8a97c9d1e60b5", size = 3555602, upload-time = "2026-03-12T22:30:22.461Z" }, + { url = "https://files.pythonhosted.org/packages/ac/d9/2ab1648b854095a3cd09e2e0005b3f127ea376d6628a48a1bb5dfd8dda50/hf_xet-1.4.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7293e7763602d06f053f22fb88a98e0549d24c0eb3119da99e89965b78de1f62", size = 4211996, upload-time = "2026-03-12T22:30:12.862Z" }, + { url = "https://files.pythonhosted.org/packages/26/4c/e4ed15bfc1259dcaa240447d4f6f558c84770e158b5458f97fcb0fa87cf4/hf_xet-1.4.1-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:bf6ebb0a597344b71c572f04700054b59ceeb2937fb59a6fdd575462718b3897", size = 3987668, upload-time = "2026-03-12T22:30:10.861Z" }, + { url = "https://files.pythonhosted.org/packages/e3/2f/62be2b80164f0d5de16e120e658600c96dcbb7818ee47cfe2cddb7de2904/hf_xet-1.4.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:9c58eb27344d7c0e2d4eecc66268779289bceb7916193463a86970796b4547ff", size = 4188488, upload-time = "2026-03-12T22:30:33.598Z" }, + { url = "https://files.pythonhosted.org/packages/d4/ea/048fd429b34cbf397e26a06e8720486cd615cde8e90055ab8d61c98a078a/hf_xet-1.4.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d25f98d13c8bc3aac0a1e79d86f0bbbb0f0d82697adab7dd3ebf12e5ce53f6ea", size = 4428144, upload-time = "2026-03-12T22:30:35.504Z" }, + { url = "https://files.pythonhosted.org/packages/33/19/1d3a0a7b5a55af7dcfdccc1e1bba1381d84f7ec7e81e22f701f95b71cec2/hf_xet-1.4.1-cp314-cp314t-win_amd64.whl", hash = "sha256:9154171d0aaedd82476bbf47efcc171ed473b65f9e11d71e99728921737becac", size = 3665953, upload-time = "2026-03-12T22:30:48.946Z" }, + { url = "https://files.pythonhosted.org/packages/92/5f/257845b1fa1e783f2fdc02dc4e839a83fbeebff4428d09d306a166169855/hf_xet-1.4.1-cp314-cp314t-win_arm64.whl", hash = "sha256:dfc754b80edb3741e5675d36c3883f1c12461cb0d99d2fe37f1e211e21df1700", size = 3529088, upload-time = "2026-03-12T22:30:46.853Z" }, + { url = "https://files.pythonhosted.org/packages/5b/9d/57d7af8c30a01da69ec4e0f26b43937345f7f0ee258adecde1652094536e/hf_xet-1.4.1-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:01e1fa399a43b057d8a9117def7a30e04b12aa4563c81dc7343854b95734c8ab", size = 3800407, upload-time = "2026-03-12T22:30:20.37Z" }, + { url = "https://files.pythonhosted.org/packages/b1/be/c8b5891e8af1c28291ee817fec67a5bb7338571ec0670966f204c05c8ea6/hf_xet-1.4.1-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:1f0ee88aff43b78039a0661a2ea28d22c6f68fcce3dae44984ac7953e3930e25", size = 3559643, upload-time = "2026-03-12T22:30:18.469Z" }, + { url = "https://files.pythonhosted.org/packages/44/2a/0581e23eb6fad8f05cef25fc545d4185037d2b4b6a5ea218f034fe7a240d/hf_xet-1.4.1-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fb73af369d2fb4fbb67df8d56ac40c3f3cf09d2bd962294220a3038dd6359524", size = 4217475, upload-time = "2026-03-12T22:30:09.111Z" }, + { url = "https://files.pythonhosted.org/packages/93/db/8e16d9ba175cb49292b2eef668be26cf7bf547114ed0886f366c62ba4675/hf_xet-1.4.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ac4ab0c4eabe2e25b2ec5f807f3014496169b1b6dda761cdb52e339866ba8cca", size = 3993259, upload-time = "2026-03-12T22:30:06.95Z" }, + { url = "https://files.pythonhosted.org/packages/84/20/b0cf23af484a535d0cb22baa118844b59d2e7234cabadb59884aefeec8f6/hf_xet-1.4.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:4afe580557e2bb264989cf954bb7c37398db4f1ec2512ca4804993be8031241f", size = 4194024, upload-time = "2026-03-12T22:30:30.018Z" }, + { url = "https://files.pythonhosted.org/packages/20/35/7de82cc7a4ac987748781b9cb4ce8572e635f3350a24cec519b14a8855d7/hf_xet-1.4.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:7d0eb889e9c36870b12a4fea9dc9d26782edf2fb0af030a63cdc9bf0f416157c", size = 4432709, upload-time = "2026-03-12T22:30:31.803Z" }, + { url = "https://files.pythonhosted.org/packages/5b/17/9d67808070f6b9884bb1f565224e6f9bc35cb9f4b2375cc70a9f0d171ff8/hf_xet-1.4.1-cp37-abi3-win_amd64.whl", hash = "sha256:850d920b4fde0fc0e6ed3e00e4dcb7a9d93cd8847f4d054c76cb98b3256db307", size = 3670976, upload-time = "2026-03-12T22:30:45.079Z" }, + { url = "https://files.pythonhosted.org/packages/67/7e/fc236b968a40598ff4f2e15a49912813b368f77e7a1920a9911da85eec52/hf_xet-1.4.1-cp37-abi3-win_arm64.whl", hash = "sha256:803bc82799412e3dfd595831faae443ce311e107fde8eedcc4a260011f040635", size = 3533708, upload-time = "2026-03-12T22:30:43.264Z" }, +] + +[[package]] +name = "httpcore" +version = "1.0.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, +] + +[[package]] +name = "httpx" +version = "0.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, +] + +[[package]] +name = "huggingface-hub" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "filelock" }, + { name = "fsspec" }, + { name = "hf-xet", marker = "platform_machine == 'AMD64' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'arm64' or platform_machine == 'x86_64'" }, + { name = "httpx" }, + { name = "packaging" }, + { name = "pyyaml" }, + { name = "tqdm" }, + { name = "typer" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d5/7a/304cec37112382c4fe29a43bcb0d5891f922785d18745883d2aa4eb74e4b/huggingface_hub-1.6.0.tar.gz", hash = "sha256:d931ddad8ba8dfc1e816bf254810eb6f38e5c32f60d4184b5885662a3b167325", size = 717071, upload-time = "2026-03-06T14:19:18.524Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/92/e3/e3a44f54c8e2f28983fcf07f13d4260b37bd6a0d3a081041bc60b91d230e/huggingface_hub-1.6.0-py3-none-any.whl", hash = "sha256:ef40e2d5cb85e48b2c067020fa5142168342d5108a1b267478ed384ecbf18961", size = 612874, upload-time = "2026-03-06T14:19:16.844Z" }, +] + +[[package]] +name = "idna" +version = "3.11" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, +] + +[[package]] +name = "isort" +version = "8.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ef/7c/ec4ab396d31b3b395e2e999c8f46dec78c5e29209fac49d1f4dace04041d/isort-8.0.1.tar.gz", hash = "sha256:171ac4ff559cdc060bcfff550bc8404a486fee0caab245679c2abe7cb253c78d", size = 769592, upload-time = "2026-02-28T10:08:20.685Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3e/95/c7c34aa53c16353c56d0b802fba48d5f5caa2cdee7958acbcb795c830416/isort-8.0.1-py3-none-any.whl", hash = "sha256:28b89bc70f751b559aeca209e6120393d43fbe2490de0559662be7a9787e3d75", size = 89733, upload-time = "2026-02-28T10:08:19.466Z" }, +] + +[[package]] +name = "jinja2" +version = "3.1.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, +] + +[[package]] +name = "joblib" +version = "1.5.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/41/f2/d34e8b3a08a9cc79a50b2208a93dce981fe615b64d5a4d4abee421d898df/joblib-1.5.3.tar.gz", hash = "sha256:8561a3269e6801106863fd0d6d84bb737be9e7631e33aaed3fb9ce5953688da3", size = 331603, upload-time = "2025-12-15T08:41:46.427Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7b/91/984aca2ec129e2757d1e4e3c81c3fcda9d0f85b74670a094cc443d9ee949/joblib-1.5.3-py3-none-any.whl", hash = "sha256:5fc3c5039fc5ca8c0276333a188bbd59d6b7ab37fe6632daa76bc7f9ec18e713", size = 309071, upload-time = "2025-12-15T08:41:44.973Z" }, +] + +[[package]] +name = "kiwisolver" +version = "1.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d0/67/9c61eccb13f0bdca9307614e782fec49ffdde0f7a2314935d489fa93cd9c/kiwisolver-1.5.0.tar.gz", hash = "sha256:d4193f3d9dc3f6f79aaed0e5637f45d98850ebf01f7ca20e69457f3e8946b66a", size = 103482, upload-time = "2026-03-09T13:15:53.382Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ac/f8/06549565caa026e540b7e7bab5c5a90eb7ca986015f4c48dace243cd24d9/kiwisolver-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:32cc0a5365239a6ea0c6ed461e8838d053b57e397443c0ca894dcc8e388d4374", size = 122802, upload-time = "2026-03-09T13:12:37.515Z" }, + { url = "https://files.pythonhosted.org/packages/84/eb/8476a0818850c563ff343ea7c9c05dcdcbd689a38e01aa31657df01f91fa/kiwisolver-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cc0b66c1eec9021353a4b4483afb12dfd50e3669ffbb9152d6842eb34c7e29fd", size = 66216, upload-time = "2026-03-09T13:12:38.812Z" }, + { url = "https://files.pythonhosted.org/packages/f3/c4/f9c8a6b4c21aed4198566e45923512986d6cef530e7263b3a5f823546561/kiwisolver-1.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:86e0287879f75621ae85197b0877ed2f8b7aa57b511c7331dce2eb6f4de7d476", size = 63917, upload-time = "2026-03-09T13:12:40.053Z" }, + { url = "https://files.pythonhosted.org/packages/f1/0e/ba4ae25d03722f64de8b2c13e80d82ab537a06b30fc7065183c6439357e3/kiwisolver-1.5.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:62f59da443c4f4849f73a51a193b1d9d258dcad0c41bc4d1b8fb2bcc04bfeb22", size = 1628776, upload-time = "2026-03-09T13:12:41.976Z" }, + { url = "https://files.pythonhosted.org/packages/8a/e4/3f43a011bc8a0860d1c96f84d32fa87439d3feedf66e672fef03bf5e8bac/kiwisolver-1.5.0-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9190426b7aa26c5229501fa297b8d0653cfd3f5a36f7990c264e157cbf886b3b", size = 1228164, upload-time = "2026-03-09T13:12:44.002Z" }, + { url = "https://files.pythonhosted.org/packages/4b/34/3a901559a1e0c218404f9a61a93be82d45cb8f44453ba43088644980f033/kiwisolver-1.5.0-cp310-cp310-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c8277104ded0a51e699c8c3aff63ce2c56d4ed5519a5f73e0fd7057f959a2b9e", size = 1246656, upload-time = "2026-03-09T13:12:45.557Z" }, + { url = "https://files.pythonhosted.org/packages/87/9e/f78c466ea20527822b95ad38f141f2de1dcd7f23fb8716b002b0d91bbe59/kiwisolver-1.5.0-cp310-cp310-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8f9baf6f0a6e7571c45c8863010b45e837c3ee1c2c77fcd6ef423be91b21fedb", size = 1295562, upload-time = "2026-03-09T13:12:47.562Z" }, + { url = "https://files.pythonhosted.org/packages/0a/66/fd0e4a612e3a286c24e6d6f3a5428d11258ed1909bc530ba3b59807fd980/kiwisolver-1.5.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cff8e5383db4989311f99e814feeb90c4723eb4edca425b9d5d9c3fefcdd9537", size = 2178473, upload-time = "2026-03-09T13:12:50.254Z" }, + { url = "https://files.pythonhosted.org/packages/dc/8e/6cac929e0049539e5ee25c1ee937556f379ba5204840d03008363ced662d/kiwisolver-1.5.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ebae99ed6764f2b5771c522477b311be313e8841d2e0376db2b10922daebbba4", size = 2274035, upload-time = "2026-03-09T13:12:51.785Z" }, + { url = "https://files.pythonhosted.org/packages/ca/d3/9d0c18f1b52ea8074b792452cf17f1f5a56bd0302a85191f405cfbf9da16/kiwisolver-1.5.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:d5cd5189fc2b6a538b75ae45433140c4823463918f7b1617c31e68b085c0022c", size = 2443217, upload-time = "2026-03-09T13:12:53.329Z" }, + { url = "https://files.pythonhosted.org/packages/45/2a/6e19368803a038b2a90857bf4ee9e3c7b667216d045866bf22d3439fd75e/kiwisolver-1.5.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f42c23db5d1521218a3276bb08666dcb662896a0be7347cba864eca45ff64ede", size = 2249196, upload-time = "2026-03-09T13:12:55.057Z" }, + { url = "https://files.pythonhosted.org/packages/75/2b/3f641dfcbe72e222175d626bacf2f72c3b34312afec949dd1c50afa400f5/kiwisolver-1.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:94eff26096eb5395136634622515b234ecb6c9979824c1f5004c6e3c3c85ccd2", size = 73389, upload-time = "2026-03-09T13:12:56.496Z" }, + { url = "https://files.pythonhosted.org/packages/da/88/299b137b9e0025d8982e03d2d52c123b0a2b159e84b0ef1501ef446339cf/kiwisolver-1.5.0-cp310-cp310-win_arm64.whl", hash = "sha256:dd952e03bfbb096cfe2dd35cd9e00f269969b67536cb4370994afc20ff2d0875", size = 64782, upload-time = "2026-03-09T13:12:57.609Z" }, + { url = "https://files.pythonhosted.org/packages/12/dd/a495a9c104be1c476f0386e714252caf2b7eca883915422a64c50b88c6f5/kiwisolver-1.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9eed0f7edbb274413b6ee781cca50541c8c0facd3d6fd289779e494340a2b85c", size = 122798, upload-time = "2026-03-09T13:12:58.963Z" }, + { url = "https://files.pythonhosted.org/packages/11/60/37b4047a2af0cf5ef6d8b4b26e91829ae6fc6a2d1f74524bcb0e7cd28a32/kiwisolver-1.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c4923e404d6bcd91b6779c009542e5647fef32e4a5d75e115e3bbac6f2335eb", size = 66216, upload-time = "2026-03-09T13:13:00.155Z" }, + { url = "https://files.pythonhosted.org/packages/0a/aa/510dc933d87767584abfe03efa445889996c70c2990f6f87c3ebaa0a18c5/kiwisolver-1.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0df54df7e686afa55e6f21fb86195224a6d9beb71d637e8d7920c95cf0f89aac", size = 63911, upload-time = "2026-03-09T13:13:01.671Z" }, + { url = "https://files.pythonhosted.org/packages/80/46/bddc13df6c2a40741e0cc7865bb1c9ed4796b6760bd04ce5fae3928ef917/kiwisolver-1.5.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2517e24d7315eb51c10664cdb865195df38ab74456c677df67bb47f12d088a27", size = 1438209, upload-time = "2026-03-09T13:13:03.385Z" }, + { url = "https://files.pythonhosted.org/packages/fd/d6/76621246f5165e5372f02f5e6f3f48ea336a8f9e96e43997d45b240ed8cd/kiwisolver-1.5.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ff710414307fefa903e0d9bdf300972f892c23477829f49504e59834f4195398", size = 1248888, upload-time = "2026-03-09T13:13:05.231Z" }, + { url = "https://files.pythonhosted.org/packages/b2/c1/31559ec6fb39a5b48035ce29bb63ade628f321785f38c384dee3e2c08bc1/kiwisolver-1.5.0-cp311-cp311-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6176c1811d9d5a04fa391c490cc44f451e240697a16977f11c6f722efb9041db", size = 1266304, upload-time = "2026-03-09T13:13:06.743Z" }, + { url = "https://files.pythonhosted.org/packages/5e/ef/1cb8276f2d29cc6a41e0a042f27946ca347d3a4a75acf85d0a16aa6dcc82/kiwisolver-1.5.0-cp311-cp311-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:50847dca5d197fcbd389c805aa1a1cf32f25d2e7273dc47ab181a517666b68cc", size = 1319650, upload-time = "2026-03-09T13:13:08.607Z" }, + { url = "https://files.pythonhosted.org/packages/4c/e4/5ba3cecd7ce6236ae4a80f67e5d5531287337d0e1f076ca87a5abe4cd5d0/kiwisolver-1.5.0-cp311-cp311-manylinux_2_39_riscv64.whl", hash = "sha256:01808c6d15f4c3e8559595d6d1fe6411c68e4a3822b4b9972b44473b24f4e679", size = 970949, upload-time = "2026-03-09T13:13:10.299Z" }, + { url = "https://files.pythonhosted.org/packages/5a/69/dc61f7ae9a2f071f26004ced87f078235b5507ab6e5acd78f40365655034/kiwisolver-1.5.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f1f9f4121ec58628c96baa3de1a55a4e3a333c5102c8e94b64e23bf7b2083309", size = 2199125, upload-time = "2026-03-09T13:13:11.841Z" }, + { url = "https://files.pythonhosted.org/packages/e5/7b/abbe0f1b5afa85f8d084b73e90e5f801c0939eba16ac2e49af7c61a6c28d/kiwisolver-1.5.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:b7d335370ae48a780c6e6a6bbfa97342f563744c39c35562f3f367665f5c1de2", size = 2293783, upload-time = "2026-03-09T13:13:14.399Z" }, + { url = "https://files.pythonhosted.org/packages/8a/80/5908ae149d96d81580d604c7f8aefd0e98f4fd728cf172f477e9f2a81744/kiwisolver-1.5.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:800ee55980c18545af444d93fdd60c56b580db5cc54867d8cbf8a1dc0829938c", size = 1960726, upload-time = "2026-03-09T13:13:16.047Z" }, + { url = "https://files.pythonhosted.org/packages/84/08/a78cb776f8c085b7143142ce479859cfec086bd09ee638a317040b6ef420/kiwisolver-1.5.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:c438f6ca858697c9ab67eb28246c92508af972e114cac34e57a6d4ba17a3ac08", size = 2464738, upload-time = "2026-03-09T13:13:17.897Z" }, + { url = "https://files.pythonhosted.org/packages/b1/e1/65584da5356ed6cb12c63791a10b208860ac40a83de165cb6a6751a686e3/kiwisolver-1.5.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8c63c91f95173f9c2a67c7c526b2cea976828a0e7fced9cdcead2802dc10f8a4", size = 2270718, upload-time = "2026-03-09T13:13:19.421Z" }, + { url = "https://files.pythonhosted.org/packages/be/6c/28f17390b62b8f2f520e2915095b3c94d88681ecf0041e75389d9667f202/kiwisolver-1.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:beb7f344487cdcb9e1efe4b7a29681b74d34c08f0043a327a74da852a6749e7b", size = 73480, upload-time = "2026-03-09T13:13:20.818Z" }, + { url = "https://files.pythonhosted.org/packages/d8/0e/2ee5debc4f77a625778fec5501ff3e8036fe361b7ee28ae402a485bb9694/kiwisolver-1.5.0-cp311-cp311-win_arm64.whl", hash = "sha256:ad4ae4ffd1ee9cd11357b4c66b612da9888f4f4daf2f36995eda64bd45370cac", size = 64930, upload-time = "2026-03-09T13:13:21.997Z" }, + { url = "https://files.pythonhosted.org/packages/4d/b2/818b74ebea34dabe6d0c51cb1c572e046730e64844da6ed646d5298c40ce/kiwisolver-1.5.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:4e9750bc21b886308024f8a54ccb9a2cc38ac9fa813bf4348434e3d54f337ff9", size = 123158, upload-time = "2026-03-09T13:13:23.127Z" }, + { url = "https://files.pythonhosted.org/packages/bf/d9/405320f8077e8e1c5c4bd6adc45e1e6edf6d727b6da7f2e2533cf58bff71/kiwisolver-1.5.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:72ec46b7eba5b395e0a7b63025490d3214c11013f4aacb4f5e8d6c3041829588", size = 66388, upload-time = "2026-03-09T13:13:24.765Z" }, + { url = "https://files.pythonhosted.org/packages/99/9f/795fedf35634f746151ca8839d05681ceb6287fbed6cc1c9bf235f7887c2/kiwisolver-1.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ed3a984b31da7481b103f68776f7128a89ef26ed40f4dc41a2223cda7fb24819", size = 64068, upload-time = "2026-03-09T13:13:25.878Z" }, + { url = "https://files.pythonhosted.org/packages/c4/13/680c54afe3e65767bed7ec1a15571e1a2f1257128733851ade24abcefbcc/kiwisolver-1.5.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bb5136fb5352d3f422df33f0c879a1b0c204004324150cc3b5e3c4f310c9049f", size = 1477934, upload-time = "2026-03-09T13:13:27.166Z" }, + { url = "https://files.pythonhosted.org/packages/c8/2f/cebfcdb60fd6a9b0f6b47a9337198bcbad6fbe15e68189b7011fd914911f/kiwisolver-1.5.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b2af221f268f5af85e776a73d62b0845fc8baf8ef0abfae79d29c77d0e776aaf", size = 1278537, upload-time = "2026-03-09T13:13:28.707Z" }, + { url = "https://files.pythonhosted.org/packages/f2/0d/9b782923aada3fafb1d6b84e13121954515c669b18af0c26e7d21f579855/kiwisolver-1.5.0-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b0f172dc8ffaccb8522d7c5d899de00133f2f1ca7b0a49b7da98e901de87bf2d", size = 1296685, upload-time = "2026-03-09T13:13:30.528Z" }, + { url = "https://files.pythonhosted.org/packages/27/70/83241b6634b04fe44e892688d5208332bde130f38e610c0418f9ede47ded/kiwisolver-1.5.0-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6ab8ba9152203feec73758dad83af9a0bbe05001eb4639e547207c40cfb52083", size = 1346024, upload-time = "2026-03-09T13:13:32.818Z" }, + { url = "https://files.pythonhosted.org/packages/e4/db/30ed226fb271ae1a6431fc0fe0edffb2efe23cadb01e798caeb9f2ceae8f/kiwisolver-1.5.0-cp312-cp312-manylinux_2_39_riscv64.whl", hash = "sha256:cdee07c4d7f6d72008d3f73b9bf027f4e11550224c7c50d8df1ae4a37c1402a6", size = 987241, upload-time = "2026-03-09T13:13:34.435Z" }, + { url = "https://files.pythonhosted.org/packages/ec/bd/c314595208e4c9587652d50959ead9e461995389664e490f4dce7ff0f782/kiwisolver-1.5.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7c60d3c9b06fb23bd9c6139281ccbdc384297579ae037f08ae90c69f6845c0b1", size = 2227742, upload-time = "2026-03-09T13:13:36.4Z" }, + { url = "https://files.pythonhosted.org/packages/c1/43/0499cec932d935229b5543d073c2b87c9c22846aab48881e9d8d6e742a2d/kiwisolver-1.5.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:e315e5ec90d88e140f57696ff85b484ff68bb311e36f2c414aa4286293e6dee0", size = 2323966, upload-time = "2026-03-09T13:13:38.204Z" }, + { url = "https://files.pythonhosted.org/packages/3d/6f/79b0d760907965acfd9d61826a3d41f8f093c538f55cd2633d3f0db269f6/kiwisolver-1.5.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:1465387ac63576c3e125e5337a6892b9e99e0627d52317f3ca79e6930d889d15", size = 1977417, upload-time = "2026-03-09T13:13:39.966Z" }, + { url = "https://files.pythonhosted.org/packages/ab/31/01d0537c41cb75a551a438c3c7a80d0c60d60b81f694dac83dd436aec0d0/kiwisolver-1.5.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:530a3fd64c87cffa844d4b6b9768774763d9caa299e9b75d8eca6a4423b31314", size = 2491238, upload-time = "2026-03-09T13:13:41.698Z" }, + { url = "https://files.pythonhosted.org/packages/e4/34/8aefdd0be9cfd00a44509251ba864f5caf2991e36772e61c408007e7f417/kiwisolver-1.5.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1d9daea4ea6b9be74fe2f01f7fbade8d6ffab263e781274cffca0dba9be9eec9", size = 2294947, upload-time = "2026-03-09T13:13:43.343Z" }, + { url = "https://files.pythonhosted.org/packages/ad/cf/0348374369ca588f8fe9c338fae49fa4e16eeb10ffb3d012f23a54578a9e/kiwisolver-1.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:f18c2d9782259a6dc132fdc7a63c168cbc74b35284b6d75c673958982a378384", size = 73569, upload-time = "2026-03-09T13:13:45.792Z" }, + { url = "https://files.pythonhosted.org/packages/28/26/192b26196e2316e2bd29deef67e37cdf9870d9af8e085e521afff0fed526/kiwisolver-1.5.0-cp312-cp312-win_arm64.whl", hash = "sha256:f7c7553b13f69c1b29a5bde08ddc6d9d0c8bfb84f9ed01c30db25944aeb852a7", size = 64997, upload-time = "2026-03-09T13:13:46.878Z" }, + { url = "https://files.pythonhosted.org/packages/9d/69/024d6711d5ba575aa65d5538042e99964104e97fa153a9f10bc369182bc2/kiwisolver-1.5.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:fd40bb9cd0891c4c3cb1ddf83f8bbfa15731a248fdc8162669405451e2724b09", size = 123166, upload-time = "2026-03-09T13:13:48.032Z" }, + { url = "https://files.pythonhosted.org/packages/ce/48/adbb40df306f587054a348831220812b9b1d787aff714cfbc8556e38fccd/kiwisolver-1.5.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c0e1403fd7c26d77c1f03e096dc58a5c726503fa0db0456678b8668f76f521e3", size = 66395, upload-time = "2026-03-09T13:13:49.365Z" }, + { url = "https://files.pythonhosted.org/packages/a8/3a/d0a972b34e1c63e2409413104216cd1caa02c5a37cb668d1687d466c1c45/kiwisolver-1.5.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:dda366d548e89a90d88a86c692377d18d8bd64b39c1fb2b92cb31370e2896bbd", size = 64065, upload-time = "2026-03-09T13:13:50.562Z" }, + { url = "https://files.pythonhosted.org/packages/2b/0a/7b98e1e119878a27ba8618ca1e18b14f992ff1eda40f47bccccf4de44121/kiwisolver-1.5.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:332b4f0145c30b5f5ad9374881133e5aa64320428a57c2c2b61e9d891a51c2f3", size = 1477903, upload-time = "2026-03-09T13:13:52.084Z" }, + { url = "https://files.pythonhosted.org/packages/18/d8/55638d89ffd27799d5cc3d8aa28e12f4ce7a64d67b285114dbedc8ea4136/kiwisolver-1.5.0-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0c50b89ffd3e1a911c69a1dd3de7173c0cd10b130f56222e57898683841e4f96", size = 1278751, upload-time = "2026-03-09T13:13:54.673Z" }, + { url = "https://files.pythonhosted.org/packages/b8/97/b4c8d0d18421ecceba20ad8701358453b88e32414e6f6950b5a4bad54e65/kiwisolver-1.5.0-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4db576bb8c3ef9365f8b40fe0f671644de6736ae2c27a2c62d7d8a1b4329f099", size = 1296793, upload-time = "2026-03-09T13:13:56.287Z" }, + { url = "https://files.pythonhosted.org/packages/c4/10/f862f94b6389d8957448ec9df59450b81bec4abb318805375c401a1e6892/kiwisolver-1.5.0-cp313-cp313-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0b85aad90cea8ac6797a53b5d5f2e967334fa4d1149f031c4537569972596cb8", size = 1346041, upload-time = "2026-03-09T13:13:58.269Z" }, + { url = "https://files.pythonhosted.org/packages/a3/6a/f1650af35821eaf09de398ec0bc2aefc8f211f0cda50204c9f1673741ba9/kiwisolver-1.5.0-cp313-cp313-manylinux_2_39_riscv64.whl", hash = "sha256:d36ca54cb4c6c4686f7cbb7b817f66f5911c12ddb519450bbe86707155028f87", size = 987292, upload-time = "2026-03-09T13:13:59.871Z" }, + { url = "https://files.pythonhosted.org/packages/de/19/d7fb82984b9238115fe629c915007be608ebd23dc8629703d917dbfaffd4/kiwisolver-1.5.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:38f4a703656f493b0ad185211ccfca7f0386120f022066b018eb5296d8613e23", size = 2227865, upload-time = "2026-03-09T13:14:01.401Z" }, + { url = "https://files.pythonhosted.org/packages/7f/b9/46b7f386589fd222dac9e9de9c956ce5bcefe2ee73b4e79891381dda8654/kiwisolver-1.5.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3ac2360e93cb41be81121755c6462cff3beaa9967188c866e5fce5cf13170859", size = 2324369, upload-time = "2026-03-09T13:14:02.972Z" }, + { url = "https://files.pythonhosted.org/packages/92/8b/95e237cf3d9c642960153c769ddcbe278f182c8affb20cecc1cc983e7cc5/kiwisolver-1.5.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c95cab08d1965db3d84a121f1c7ce7479bdd4072c9b3dafd8fecce48a2e6b902", size = 1977989, upload-time = "2026-03-09T13:14:04.503Z" }, + { url = "https://files.pythonhosted.org/packages/1b/95/980c9df53501892784997820136c01f62bc1865e31b82b9560f980c0e649/kiwisolver-1.5.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:fc20894c3d21194d8041a28b65622d5b86db786da6e3cfe73f0c762951a61167", size = 2491645, upload-time = "2026-03-09T13:14:06.106Z" }, + { url = "https://files.pythonhosted.org/packages/cb/32/900647fd0840abebe1561792c6b31e6a7c0e278fc3973d30572a965ca14c/kiwisolver-1.5.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7a32f72973f0f950c1920475d5c5ea3d971b81b6f0ec53b8d0a956cc965f22e0", size = 2295237, upload-time = "2026-03-09T13:14:08.891Z" }, + { url = "https://files.pythonhosted.org/packages/be/8a/be60e3bbcf513cc5a50f4a3e88e1dcecebb79c1ad607a7222877becaa101/kiwisolver-1.5.0-cp313-cp313-win_amd64.whl", hash = "sha256:0bf3acf1419fa93064a4c2189ac0b58e3be7872bf6ee6177b0d4c63dc4cea276", size = 73573, upload-time = "2026-03-09T13:14:12.327Z" }, + { url = "https://files.pythonhosted.org/packages/4d/d2/64be2e429eb4fca7f7e1c52a91b12663aeaf25de3895e5cca0f47ef2a8d0/kiwisolver-1.5.0-cp313-cp313-win_arm64.whl", hash = "sha256:fa8eb9ecdb7efb0b226acec134e0d709e87a909fa4971a54c0c4f6e88635484c", size = 64998, upload-time = "2026-03-09T13:14:13.469Z" }, + { url = "https://files.pythonhosted.org/packages/b0/69/ce68dd0c85755ae2de490bf015b62f2cea5f6b14ff00a463f9d0774449ff/kiwisolver-1.5.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:db485b3847d182b908b483b2ed133c66d88d49cacf98fd278fadafe11b4478d1", size = 125700, upload-time = "2026-03-09T13:14:14.636Z" }, + { url = "https://files.pythonhosted.org/packages/74/aa/937aac021cf9d4349990d47eb319309a51355ed1dbdc9c077cdc9224cb11/kiwisolver-1.5.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:be12f931839a3bdfe28b584db0e640a65a8bcbc24560ae3fdb025a449b3d754e", size = 67537, upload-time = "2026-03-09T13:14:15.808Z" }, + { url = "https://files.pythonhosted.org/packages/ee/20/3a87fbece2c40ad0f6f0aefa93542559159c5f99831d596050e8afae7a9f/kiwisolver-1.5.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:16b85d37c2cbb3253226d26e64663f755d88a03439a9c47df6246b35defbdfb7", size = 65514, upload-time = "2026-03-09T13:14:18.035Z" }, + { url = "https://files.pythonhosted.org/packages/f0/7f/f943879cda9007c45e1f7dba216d705c3a18d6b35830e488b6c6a4e7cdf0/kiwisolver-1.5.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4432b835675f0ea7414aab3d37d119f7226d24869b7a829caeab49ebda407b0c", size = 1584848, upload-time = "2026-03-09T13:14:19.745Z" }, + { url = "https://files.pythonhosted.org/packages/37/f8/4d4f85cc1870c127c88d950913370dd76138482161cd07eabbc450deff01/kiwisolver-1.5.0-cp313-cp313t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b0feb50971481a2cc44d94e88bdb02cdd497618252ae226b8eb1201b957e368", size = 1391542, upload-time = "2026-03-09T13:14:21.54Z" }, + { url = "https://files.pythonhosted.org/packages/04/0b/65dd2916c84d252b244bd405303220f729e7c17c9d7d33dca6feeff9ffc4/kiwisolver-1.5.0-cp313-cp313t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:56fa888f10d0f367155e76ce849fa1166fc9730d13bd2d65a2aa13b6f5424489", size = 1404447, upload-time = "2026-03-09T13:14:23.205Z" }, + { url = "https://files.pythonhosted.org/packages/39/5c/2606a373247babce9b1d056c03a04b65f3cf5290a8eac5d7bdead0a17e21/kiwisolver-1.5.0-cp313-cp313t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:940dda65d5e764406b9fb92761cbf462e4e63f712ab60ed98f70552e496f3bf1", size = 1455918, upload-time = "2026-03-09T13:14:24.74Z" }, + { url = "https://files.pythonhosted.org/packages/d5/d1/c6078b5756670658e9192a2ef11e939c92918833d2745f85cd14a6004bdf/kiwisolver-1.5.0-cp313-cp313t-manylinux_2_39_riscv64.whl", hash = "sha256:89fc958c702ee9a745e4700378f5d23fddbc46ff89e8fdbf5395c24d5c1452a3", size = 1072856, upload-time = "2026-03-09T13:14:26.597Z" }, + { url = "https://files.pythonhosted.org/packages/cb/c8/7def6ddf16eb2b3741d8b172bdaa9af882b03c78e9b0772975408801fa63/kiwisolver-1.5.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9027d773c4ff81487181a925945743413f6069634d0b122d0b37684ccf4f1e18", size = 2333580, upload-time = "2026-03-09T13:14:28.237Z" }, + { url = "https://files.pythonhosted.org/packages/9e/87/2ac1fce0eb1e616fcd3c35caa23e665e9b1948bb984f4764790924594128/kiwisolver-1.5.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:5b233ea3e165e43e35dba1d2b8ecc21cf070b45b65ae17dd2747d2713d942021", size = 2423018, upload-time = "2026-03-09T13:14:30.018Z" }, + { url = "https://files.pythonhosted.org/packages/67/13/c6700ccc6cc218716bfcda4935e4b2997039869b4ad8a94f364c5a3b8e63/kiwisolver-1.5.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:ce9bf03dad3b46408c08649c6fbd6ca28a9fce0eb32fdfffa6775a13103b5310", size = 2062804, upload-time = "2026-03-09T13:14:32.888Z" }, + { url = "https://files.pythonhosted.org/packages/1b/bd/877056304626943ff0f1f44c08f584300c199b887cb3176cd7e34f1515f1/kiwisolver-1.5.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:fc4d3f1fb9ca0ae9f97b095963bc6326f1dbfd3779d6679a1e016b9baaa153d3", size = 2597482, upload-time = "2026-03-09T13:14:34.971Z" }, + { url = "https://files.pythonhosted.org/packages/75/19/c60626c47bf0f8ac5dcf72c6c98e266d714f2fbbfd50cf6dab5ede3aaa50/kiwisolver-1.5.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f443b4825c50a51ee68585522ab4a1d1257fac65896f282b4c6763337ac9f5d2", size = 2394328, upload-time = "2026-03-09T13:14:36.816Z" }, + { url = "https://files.pythonhosted.org/packages/47/84/6a6d5e5bb8273756c27b7d810d47f7ef2f1f9b9fd23c9ee9a3f8c75c9cef/kiwisolver-1.5.0-cp313-cp313t-win_arm64.whl", hash = "sha256:893ff3a711d1b515ba9da14ee090519bad4610ed1962fbe298a434e8c5f8db53", size = 68410, upload-time = "2026-03-09T13:14:38.695Z" }, + { url = "https://files.pythonhosted.org/packages/e4/d7/060f45052f2a01ad5762c8fdecd6d7a752b43400dc29ff75cd47225a40fd/kiwisolver-1.5.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:8df31fe574b8b3993cc61764f40941111b25c2d9fea13d3ce24a49907cd2d615", size = 123231, upload-time = "2026-03-09T13:14:41.323Z" }, + { url = "https://files.pythonhosted.org/packages/c2/a7/78da680eadd06ff35edef6ef68a1ad273bad3e2a0936c9a885103230aece/kiwisolver-1.5.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:1d49a49ac4cbfb7c1375301cd1ec90169dfeae55ff84710d782260ce77a75a02", size = 66489, upload-time = "2026-03-09T13:14:42.534Z" }, + { url = "https://files.pythonhosted.org/packages/49/b2/97980f3ad4fae37dd7fe31626e2bf75fbf8bdf5d303950ec1fab39a12da8/kiwisolver-1.5.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0cbe94b69b819209a62cb27bdfa5dc2a8977d8de2f89dfd97ba4f53ed3af754e", size = 64063, upload-time = "2026-03-09T13:14:44.759Z" }, + { url = "https://files.pythonhosted.org/packages/e7/f9/b06c934a6aa8bc91f566bd2a214fd04c30506c2d9e2b6b171953216a65b6/kiwisolver-1.5.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:80aa065ffd378ff784822a6d7c3212f2d5f5e9c3589614b5c228b311fd3063ac", size = 1475913, upload-time = "2026-03-09T13:14:46.247Z" }, + { url = "https://files.pythonhosted.org/packages/6b/f0/f768ae564a710135630672981231320bc403cf9152b5596ec5289de0f106/kiwisolver-1.5.0-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e7f886f47ab881692f278ae901039a234e4025a68e6dfab514263a0b1c4ae05", size = 1282782, upload-time = "2026-03-09T13:14:48.458Z" }, + { url = "https://files.pythonhosted.org/packages/e2/9f/1de7aad00697325f05238a5f2eafbd487fb637cc27a558b5367a5f37fb7f/kiwisolver-1.5.0-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5060731cc3ed12ca3a8b57acd4aeca5bbc2f49216dd0bec1650a1acd89486bcd", size = 1300815, upload-time = "2026-03-09T13:14:50.721Z" }, + { url = "https://files.pythonhosted.org/packages/5a/c2/297f25141d2e468e0ce7f7a7b92e0cf8918143a0cbd3422c1ad627e85a06/kiwisolver-1.5.0-cp314-cp314-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:7a4aa69609f40fce3cbc3f87b2061f042eee32f94b8f11db707b66a26461591a", size = 1347925, upload-time = "2026-03-09T13:14:52.304Z" }, + { url = "https://files.pythonhosted.org/packages/b9/d3/f4c73a02eb41520c47610207b21afa8cdd18fdbf64ffd94674ae21c4812d/kiwisolver-1.5.0-cp314-cp314-manylinux_2_39_riscv64.whl", hash = "sha256:d168fda2dbff7b9b5f38e693182d792a938c31db4dac3a80a4888de603c99554", size = 991322, upload-time = "2026-03-09T13:14:54.637Z" }, + { url = "https://files.pythonhosted.org/packages/7b/46/d3f2efef7732fcda98d22bf4ad5d3d71d545167a852ca710a494f4c15343/kiwisolver-1.5.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:413b820229730d358efd838ecbab79902fe97094565fdc80ddb6b0a18c18a581", size = 2232857, upload-time = "2026-03-09T13:14:56.471Z" }, + { url = "https://files.pythonhosted.org/packages/3f/ec/2d9756bf2b6d26ae4349b8d3662fb3993f16d80c1f971c179ce862b9dbae/kiwisolver-1.5.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:5124d1ea754509b09e53738ec185584cc609aae4a3b510aaf4ed6aa047ef9303", size = 2329376, upload-time = "2026-03-09T13:14:58.072Z" }, + { url = "https://files.pythonhosted.org/packages/8f/9f/876a0a0f2260f1bde92e002b3019a5fabc35e0939c7d945e0fa66185eb20/kiwisolver-1.5.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:e4415a8db000bf49a6dd1c478bf70062eaacff0f462b92b0ba68791a905861f9", size = 1982549, upload-time = "2026-03-09T13:14:59.668Z" }, + { url = "https://files.pythonhosted.org/packages/6c/4f/ba3624dfac23a64d54ac4179832860cb537c1b0af06024936e82ca4154a0/kiwisolver-1.5.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:d618fd27420381a4f6044faa71f46d8bfd911bd077c555f7138ed88729bfbe79", size = 2494680, upload-time = "2026-03-09T13:15:01.364Z" }, + { url = "https://files.pythonhosted.org/packages/39/b7/97716b190ab98911b20d10bf92eca469121ec483b8ce0edd314f51bc85af/kiwisolver-1.5.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5092eb5b1172947f57d6ea7d89b2f29650414e4293c47707eb499ec07a0ac796", size = 2297905, upload-time = "2026-03-09T13:15:03.925Z" }, + { url = "https://files.pythonhosted.org/packages/a3/36/4e551e8aa55c9188bca9abb5096805edbf7431072b76e2298e34fd3a3008/kiwisolver-1.5.0-cp314-cp314-win_amd64.whl", hash = "sha256:d76e2d8c75051d58177e762164d2e9ab92886534e3a12e795f103524f221dd8e", size = 75086, upload-time = "2026-03-09T13:15:07.775Z" }, + { url = "https://files.pythonhosted.org/packages/70/15/9b90f7df0e31a003c71649cf66ef61c3c1b862f48c81007fa2383c8bd8d7/kiwisolver-1.5.0-cp314-cp314-win_arm64.whl", hash = "sha256:fa6248cd194edff41d7ea9425ced8ca3a6f838bfb295f6f1d6e6bb694a8518df", size = 66577, upload-time = "2026-03-09T13:15:09.139Z" }, + { url = "https://files.pythonhosted.org/packages/17/01/7dc8c5443ff42b38e72731643ed7cf1ed9bf01691ae5cdca98501999ed83/kiwisolver-1.5.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:d1ffeb80b5676463d7a7d56acbe8e37a20ce725570e09549fe738e02ca6b7e1e", size = 125794, upload-time = "2026-03-09T13:15:10.525Z" }, + { url = "https://files.pythonhosted.org/packages/46/8a/b4ebe46ebaac6a303417fab10c2e165c557ddaff558f9699d302b256bc53/kiwisolver-1.5.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:bc4d8e252f532ab46a1de9349e2d27b91fce46736a9eedaa37beaca66f574ed4", size = 67646, upload-time = "2026-03-09T13:15:12.016Z" }, + { url = "https://files.pythonhosted.org/packages/60/35/10a844afc5f19d6f567359bf4789e26661755a2f36200d5d1ed8ad0126e5/kiwisolver-1.5.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6783e069732715ad0c3ce96dbf21dbc2235ab0593f2baf6338101f70371f4028", size = 65511, upload-time = "2026-03-09T13:15:13.311Z" }, + { url = "https://files.pythonhosted.org/packages/f8/8a/685b297052dd041dcebce8e8787b58923b6e78acc6115a0dc9189011c44b/kiwisolver-1.5.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e7c4c09a490dc4d4a7f8cbee56c606a320f9dc28cf92a7157a39d1ce7676a657", size = 1584858, upload-time = "2026-03-09T13:15:15.103Z" }, + { url = "https://files.pythonhosted.org/packages/9e/80/04865e3d4638ac5bddec28908916df4a3075b8c6cc101786a96803188b96/kiwisolver-1.5.0-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2a075bd7bd19c70cf67c8badfa36cf7c5d8de3c9ddb8420c51e10d9c50e94920", size = 1392539, upload-time = "2026-03-09T13:15:16.661Z" }, + { url = "https://files.pythonhosted.org/packages/ba/01/77a19cacc0893fa13fafa46d1bba06fb4dc2360b3292baf4b56d8e067b24/kiwisolver-1.5.0-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:bdd3e53429ff02aa319ba59dfe4ceeec345bf46cf180ec2cf6fd5b942e7975e9", size = 1405310, upload-time = "2026-03-09T13:15:18.229Z" }, + { url = "https://files.pythonhosted.org/packages/53/39/bcaf5d0cca50e604cfa9b4e3ae1d64b50ca1ae5b754122396084599ef903/kiwisolver-1.5.0-cp314-cp314t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3cdcb35dc9d807259c981a85531048ede628eabcffb3239adf3d17463518992d", size = 1456244, upload-time = "2026-03-09T13:15:20.444Z" }, + { url = "https://files.pythonhosted.org/packages/d0/7a/72c187abc6975f6978c3e39b7cf67aeb8b3c0a8f9790aa7fd412855e9e1f/kiwisolver-1.5.0-cp314-cp314t-manylinux_2_39_riscv64.whl", hash = "sha256:70d593af6a6ca332d1df73d519fddb5148edb15cd90d5f0155e3746a6d4fcc65", size = 1073154, upload-time = "2026-03-09T13:15:22.039Z" }, + { url = "https://files.pythonhosted.org/packages/c7/ca/cf5b25783ebbd59143b4371ed0c8428a278abe68d6d0104b01865b1bbd0f/kiwisolver-1.5.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:377815a8616074cabbf3f53354e1d040c35815a134e01d7614b7692e4bf8acfa", size = 2334377, upload-time = "2026-03-09T13:15:23.741Z" }, + { url = "https://files.pythonhosted.org/packages/4a/e5/b1f492adc516796e88751282276745340e2a72dcd0d36cf7173e0daf3210/kiwisolver-1.5.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:0255a027391d52944eae1dbb5d4cc5903f57092f3674e8e544cdd2622826b3f0", size = 2425288, upload-time = "2026-03-09T13:15:25.789Z" }, + { url = "https://files.pythonhosted.org/packages/e6/e5/9b21fbe91a61b8f409d74a26498706e97a48008bfcd1864373d32a6ba31c/kiwisolver-1.5.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:012b1eb16e28718fa782b5e61dc6f2da1f0792ca73bd05d54de6cb9561665fc9", size = 2063158, upload-time = "2026-03-09T13:15:27.63Z" }, + { url = "https://files.pythonhosted.org/packages/b1/02/83f47986138310f95ea95531f851b2a62227c11cbc3e690ae1374fe49f0f/kiwisolver-1.5.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:0e3aafb33aed7479377e5e9a82e9d4bf87063741fc99fc7ae48b0f16e32bdd6f", size = 2597260, upload-time = "2026-03-09T13:15:29.421Z" }, + { url = "https://files.pythonhosted.org/packages/07/18/43a5f24608d8c313dd189cf838c8e68d75b115567c6279de7796197cfb6a/kiwisolver-1.5.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:e7a116ae737f0000343218c4edf5bd45893bfeaff0993c0b215d7124c9f77646", size = 2394403, upload-time = "2026-03-09T13:15:31.517Z" }, + { url = "https://files.pythonhosted.org/packages/3b/b5/98222136d839b8afabcaa943b09bd05888c2d36355b7e448550211d1fca4/kiwisolver-1.5.0-cp314-cp314t-win_amd64.whl", hash = "sha256:1dd9b0b119a350976a6d781e7278ec7aca0b201e1a9e2d23d9804afecb6ca681", size = 79687, upload-time = "2026-03-09T13:15:33.204Z" }, + { url = "https://files.pythonhosted.org/packages/99/a2/ca7dc962848040befed12732dff6acae7fb3c4f6fc4272b3f6c9a30b8713/kiwisolver-1.5.0-cp314-cp314t-win_arm64.whl", hash = "sha256:58f812017cd2985c21fbffb4864d59174d4903dd66fa23815e74bbc7a0e2dd57", size = 70032, upload-time = "2026-03-09T13:15:34.411Z" }, + { url = "https://files.pythonhosted.org/packages/1c/fa/2910df836372d8761bb6eff7d8bdcb1613b5c2e03f260efe7abe34d388a7/kiwisolver-1.5.0-graalpy312-graalpy250_312_native-macosx_10_13_x86_64.whl", hash = "sha256:5ae8e62c147495b01a0f4765c878e9bfdf843412446a247e28df59936e99e797", size = 130262, upload-time = "2026-03-09T13:15:35.629Z" }, + { url = "https://files.pythonhosted.org/packages/0f/41/c5f71f9f00aabcc71fee8b7475e3f64747282580c2fe748961ba29b18385/kiwisolver-1.5.0-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:f6764a4ccab3078db14a632420930f6186058750df066b8ea2a7106df91d3203", size = 138036, upload-time = "2026-03-09T13:15:36.894Z" }, + { url = "https://files.pythonhosted.org/packages/fa/06/7399a607f434119c6e1fdc8ec89a8d51ccccadf3341dee4ead6bd14caaf5/kiwisolver-1.5.0-graalpy312-graalpy250_312_native-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c31c13da98624f957b0fb1b5bae5383b2333c2c3f6793d9825dd5ce79b525cb7", size = 194295, upload-time = "2026-03-09T13:15:38.22Z" }, + { url = "https://files.pythonhosted.org/packages/b5/91/53255615acd2a1eaca307ede3c90eb550bae9c94581f8c00081b6b1c8f44/kiwisolver-1.5.0-graalpy312-graalpy250_312_native-win_amd64.whl", hash = "sha256:1f1489f769582498610e015a8ef2d36f28f505ab3096d0e16b4858a9ec214f57", size = 75987, upload-time = "2026-03-09T13:15:39.65Z" }, + { url = "https://files.pythonhosted.org/packages/17/6f/6fd4f690a40c2582fa34b97d2678f718acf3706b91d270c65ecb455d0a06/kiwisolver-1.5.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:295d9ffe712caa9f8a3081de8d32fc60191b4b51c76f02f951fd8407253528f4", size = 59606, upload-time = "2026-03-09T13:15:40.81Z" }, + { url = "https://files.pythonhosted.org/packages/82/a0/2355d5e3b338f13ce63f361abb181e3b6ea5fffdb73f739b3e80efa76159/kiwisolver-1.5.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:51e8c4084897de9f05898c2c2a39af6318044ae969d46ff7a34ed3f96274adca", size = 57537, upload-time = "2026-03-09T13:15:42.071Z" }, + { url = "https://files.pythonhosted.org/packages/c8/b9/1d50e610ecadebe205b71d6728fd224ce0e0ca6aba7b9cbe1da049203ac5/kiwisolver-1.5.0-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b83af57bdddef03c01a9138034c6ff03181a3028d9a1003b301eb1a55e161a3f", size = 79888, upload-time = "2026-03-09T13:15:43.317Z" }, + { url = "https://files.pythonhosted.org/packages/cd/ee/b85ffcd75afed0357d74f0e6fc02a4507da441165de1ca4760b9f496390d/kiwisolver-1.5.0-pp310-pypy310_pp73-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bf4679a3d71012a7c2bf360e5cd878fbd5e4fcac0896b56393dec239d81529ed", size = 77584, upload-time = "2026-03-09T13:15:44.605Z" }, + { url = "https://files.pythonhosted.org/packages/6b/dd/644d0dde6010a8583b4cd66dd41c5f83f5325464d15c4f490b3340ab73b4/kiwisolver-1.5.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:41024ed50e44ab1a60d3fe0a9d15a4ccc9f5f2b1d814ff283c8d01134d5b81bc", size = 73390, upload-time = "2026-03-09T13:15:45.832Z" }, + { url = "https://files.pythonhosted.org/packages/e9/eb/5fcbbbf9a0e2c3a35effb88831a483345326bbc3a030a3b5b69aee647f84/kiwisolver-1.5.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:ec4c85dc4b687c7f7f15f553ff26a98bfe8c58f5f7f0ac8905f0ba4c7be60232", size = 59532, upload-time = "2026-03-09T13:15:47.047Z" }, + { url = "https://files.pythonhosted.org/packages/c3/9b/e17104555bb4db148fd52327feea1e96be4b88e8e008b029002c281a21ab/kiwisolver-1.5.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:12e91c215a96e39f57989c8912ae761286ac5a9584d04030ceb3368a357f017a", size = 57420, upload-time = "2026-03-09T13:15:48.199Z" }, + { url = "https://files.pythonhosted.org/packages/48/44/2b5b95b7aa39fb2d8d9d956e0f3d5d45aef2ae1d942d4c3ffac2f9cfed1a/kiwisolver-1.5.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:be4a51a55833dc29ab5d7503e7bcb3b3af3402d266018137127450005cdfe737", size = 79892, upload-time = "2026-03-09T13:15:49.694Z" }, + { url = "https://files.pythonhosted.org/packages/52/7d/7157f9bba6b455cfb4632ed411e199fc8b8977642c2b12082e1bd9e6d173/kiwisolver-1.5.0-pp311-pypy311_pp73-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:daae526907e262de627d8f70058a0f64acc9e2641c164c99c8f594b34a799a16", size = 77603, upload-time = "2026-03-09T13:15:50.945Z" }, + { url = "https://files.pythonhosted.org/packages/0a/dd/8050c947d435c8d4bc94e3252f4d8bb8a76cfb424f043a8680be637a57f1/kiwisolver-1.5.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:59cd8683f575d96df5bb48f6add94afc055012c29e28124fcae2b63661b9efb1", size = 73558, upload-time = "2026-03-09T13:15:52.112Z" }, +] + +[[package]] +name = "librt" +version = "0.8.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/56/9c/b4b0c54d84da4a94b37bd44151e46d5e583c9534c7e02250b961b1b6d8a8/librt-0.8.1.tar.gz", hash = "sha256:be46a14693955b3bd96014ccbdb8339ee8c9346fbe11c1b78901b55125f14c73", size = 177471, upload-time = "2026-02-17T16:13:06.101Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7c/5f/63f5fa395c7a8a93558c0904ba8f1c8d1b997ca6a3de61bc7659970d66bf/librt-0.8.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:81fd938344fecb9373ba1b155968c8a329491d2ce38e7ddb76f30ffb938f12dc", size = 65697, upload-time = "2026-02-17T16:11:06.903Z" }, + { url = "https://files.pythonhosted.org/packages/ff/e0/0472cf37267b5920eff2f292ccfaede1886288ce35b7f3203d8de00abfe6/librt-0.8.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5db05697c82b3a2ec53f6e72b2ed373132b0c2e05135f0696784e97d7f5d48e7", size = 68376, upload-time = "2026-02-17T16:11:08.395Z" }, + { url = "https://files.pythonhosted.org/packages/c8/be/8bd1359fdcd27ab897cd5963294fa4a7c83b20a8564678e4fd12157e56a5/librt-0.8.1-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d56bc4011975f7460bea7b33e1ff425d2f1adf419935ff6707273c77f8a4ada6", size = 197084, upload-time = "2026-02-17T16:11:09.774Z" }, + { url = "https://files.pythonhosted.org/packages/e2/fe/163e33fdd091d0c2b102f8a60cc0a61fd730ad44e32617cd161e7cd67a01/librt-0.8.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5cdc0f588ff4b663ea96c26d2a230c525c6fc62b28314edaaaca8ed5af931ad0", size = 207337, upload-time = "2026-02-17T16:11:11.311Z" }, + { url = "https://files.pythonhosted.org/packages/01/99/f85130582f05dcf0c8902f3d629270231d2f4afdfc567f8305a952ac7f14/librt-0.8.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:97c2b54ff6717a7a563b72627990bec60d8029df17df423f0ed37d56a17a176b", size = 219980, upload-time = "2026-02-17T16:11:12.499Z" }, + { url = "https://files.pythonhosted.org/packages/6f/54/cb5e4d03659e043a26c74e08206412ac9a3742f0477d96f9761a55313b5f/librt-0.8.1-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8f1125e6bbf2f1657d9a2f3ccc4a2c9b0c8b176965bb565dd4d86be67eddb4b6", size = 212921, upload-time = "2026-02-17T16:11:14.484Z" }, + { url = "https://files.pythonhosted.org/packages/b1/81/a3a01e4240579c30f3487f6fed01eb4bc8ef0616da5b4ebac27ca19775f3/librt-0.8.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8f4bb453f408137d7581be309b2fbc6868a80e7ef60c88e689078ee3a296ae71", size = 221381, upload-time = "2026-02-17T16:11:17.459Z" }, + { url = "https://files.pythonhosted.org/packages/08/b0/fc2d54b4b1c6fb81e77288ff31ff25a2c1e62eaef4424a984f228839717b/librt-0.8.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:c336d61d2fe74a3195edc1646d53ff1cddd3a9600b09fa6ab75e5514ba4862a7", size = 216714, upload-time = "2026-02-17T16:11:19.197Z" }, + { url = "https://files.pythonhosted.org/packages/96/96/85daa73ffbd87e1fb287d7af6553ada66bf25a2a6b0de4764344a05469f6/librt-0.8.1-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:eb5656019db7c4deacf0c1a55a898c5bb8f989be904597fcb5232a2f4828fa05", size = 214777, upload-time = "2026-02-17T16:11:20.443Z" }, + { url = "https://files.pythonhosted.org/packages/12/9c/c3aa7a2360383f4bf4f04d98195f2739a579128720c603f4807f006a4225/librt-0.8.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c25d9e338d5bed46c1632f851babf3d13c78f49a225462017cf5e11e845c5891", size = 237398, upload-time = "2026-02-17T16:11:22.083Z" }, + { url = "https://files.pythonhosted.org/packages/61/19/d350ea89e5274665185dabc4bbb9c3536c3411f862881d316c8b8e00eb66/librt-0.8.1-cp310-cp310-win32.whl", hash = "sha256:aaab0e307e344cb28d800957ef3ec16605146ef0e59e059a60a176d19543d1b7", size = 54285, upload-time = "2026-02-17T16:11:23.27Z" }, + { url = "https://files.pythonhosted.org/packages/4f/d6/45d587d3d41c112e9543a0093d883eb57a24a03e41561c127818aa2a6bcc/librt-0.8.1-cp310-cp310-win_amd64.whl", hash = "sha256:56e04c14b696300d47b3bc5f1d10a00e86ae978886d0cee14e5714fafb5df5d2", size = 61352, upload-time = "2026-02-17T16:11:24.207Z" }, + { url = "https://files.pythonhosted.org/packages/1d/01/0e748af5e4fee180cf7cd12bd12b0513ad23b045dccb2a83191bde82d168/librt-0.8.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:681dc2451d6d846794a828c16c22dc452d924e9f700a485b7ecb887a30aad1fd", size = 65315, upload-time = "2026-02-17T16:11:25.152Z" }, + { url = "https://files.pythonhosted.org/packages/9d/4d/7184806efda571887c798d573ca4134c80ac8642dcdd32f12c31b939c595/librt-0.8.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3b4350b13cc0e6f5bec8fa7caf29a8fb8cdc051a3bae45cfbfd7ce64f009965", size = 68021, upload-time = "2026-02-17T16:11:26.129Z" }, + { url = "https://files.pythonhosted.org/packages/ae/88/c3c52d2a5d5101f28d3dc89298444626e7874aa904eed498464c2af17627/librt-0.8.1-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:ac1e7817fd0ed3d14fd7c5df91daed84c48e4c2a11ee99c0547f9f62fdae13da", size = 194500, upload-time = "2026-02-17T16:11:27.177Z" }, + { url = "https://files.pythonhosted.org/packages/d6/5d/6fb0a25b6a8906e85b2c3b87bee1d6ed31510be7605b06772f9374ca5cb3/librt-0.8.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:747328be0c5b7075cde86a0e09d7a9196029800ba75a1689332348e998fb85c0", size = 205622, upload-time = "2026-02-17T16:11:28.242Z" }, + { url = "https://files.pythonhosted.org/packages/b2/a6/8006ae81227105476a45691f5831499e4d936b1c049b0c1feb17c11b02d1/librt-0.8.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f0af2bd2bc204fa27f3d6711d0f360e6b8c684a035206257a81673ab924aa11e", size = 218304, upload-time = "2026-02-17T16:11:29.344Z" }, + { url = "https://files.pythonhosted.org/packages/ee/19/60e07886ad16670aae57ef44dada41912c90906a6fe9f2b9abac21374748/librt-0.8.1-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d480de377f5b687b6b1bc0c0407426da556e2a757633cc7e4d2e1a057aa688f3", size = 211493, upload-time = "2026-02-17T16:11:30.445Z" }, + { url = "https://files.pythonhosted.org/packages/9c/cf/f666c89d0e861d05600438213feeb818c7514d3315bae3648b1fc145d2b6/librt-0.8.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d0ee06b5b5291f609ddb37b9750985b27bc567791bc87c76a569b3feed8481ac", size = 219129, upload-time = "2026-02-17T16:11:32.021Z" }, + { url = "https://files.pythonhosted.org/packages/8f/ef/f1bea01e40b4a879364c031476c82a0dc69ce068daad67ab96302fed2d45/librt-0.8.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9e2c6f77b9ad48ce5603b83b7da9ee3e36b3ab425353f695cba13200c5d96596", size = 213113, upload-time = "2026-02-17T16:11:33.192Z" }, + { url = "https://files.pythonhosted.org/packages/9b/80/cdab544370cc6bc1b72ea369525f547a59e6938ef6863a11ab3cd24759af/librt-0.8.1-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:439352ba9373f11cb8e1933da194dcc6206daf779ff8df0ed69c5e39113e6a99", size = 212269, upload-time = "2026-02-17T16:11:34.373Z" }, + { url = "https://files.pythonhosted.org/packages/9d/9c/48d6ed8dac595654f15eceab2035131c136d1ae9a1e3548e777bb6dbb95d/librt-0.8.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:82210adabbc331dbb65d7868b105185464ef13f56f7f76688565ad79f648b0fe", size = 234673, upload-time = "2026-02-17T16:11:36.063Z" }, + { url = "https://files.pythonhosted.org/packages/16/01/35b68b1db517f27a01be4467593292eb5315def8900afad29fabf56304ba/librt-0.8.1-cp311-cp311-win32.whl", hash = "sha256:52c224e14614b750c0a6d97368e16804a98c684657c7518752c356834fff83bb", size = 54597, upload-time = "2026-02-17T16:11:37.544Z" }, + { url = "https://files.pythonhosted.org/packages/71/02/796fe8f02822235966693f257bf2c79f40e11337337a657a8cfebba5febc/librt-0.8.1-cp311-cp311-win_amd64.whl", hash = "sha256:c00e5c884f528c9932d278d5c9cbbea38a6b81eb62c02e06ae53751a83a4d52b", size = 61733, upload-time = "2026-02-17T16:11:38.691Z" }, + { url = "https://files.pythonhosted.org/packages/28/ad/232e13d61f879a42a4e7117d65e4984bb28371a34bb6fb9ca54ec2c8f54e/librt-0.8.1-cp311-cp311-win_arm64.whl", hash = "sha256:f7cdf7f26c2286ffb02e46d7bac56c94655540b26347673bea15fa52a6af17e9", size = 52273, upload-time = "2026-02-17T16:11:40.308Z" }, + { url = "https://files.pythonhosted.org/packages/95/21/d39b0a87ac52fc98f621fb6f8060efb017a767ebbbac2f99fbcbc9ddc0d7/librt-0.8.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a28f2612ab566b17f3698b0da021ff9960610301607c9a5e8eaca62f5e1c350a", size = 66516, upload-time = "2026-02-17T16:11:41.604Z" }, + { url = "https://files.pythonhosted.org/packages/69/f1/46375e71441c43e8ae335905e069f1c54febee63a146278bcee8782c84fd/librt-0.8.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:60a78b694c9aee2a0f1aaeaa7d101cf713e92e8423a941d2897f4fa37908dab9", size = 68634, upload-time = "2026-02-17T16:11:43.268Z" }, + { url = "https://files.pythonhosted.org/packages/0a/33/c510de7f93bf1fa19e13423a606d8189a02624a800710f6e6a0a0f0784b3/librt-0.8.1-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:758509ea3f1eba2a57558e7e98f4659d0ea7670bff49673b0dde18a3c7e6c0eb", size = 198941, upload-time = "2026-02-17T16:11:44.28Z" }, + { url = "https://files.pythonhosted.org/packages/dd/36/e725903416409a533d92398e88ce665476f275081d0d7d42f9c4951999e5/librt-0.8.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:039b9f2c506bd0ab0f8725aa5ba339c6f0cd19d3b514b50d134789809c24285d", size = 209991, upload-time = "2026-02-17T16:11:45.462Z" }, + { url = "https://files.pythonhosted.org/packages/30/7a/8d908a152e1875c9f8eac96c97a480df425e657cdb47854b9efaa4998889/librt-0.8.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5bb54f1205a3a6ab41a6fd71dfcdcbd278670d3a90ca502a30d9da583105b6f7", size = 224476, upload-time = "2026-02-17T16:11:46.542Z" }, + { url = "https://files.pythonhosted.org/packages/a8/b8/a22c34f2c485b8903a06f3fe3315341fe6876ef3599792344669db98fcff/librt-0.8.1-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:05bd41cdee35b0c59c259f870f6da532a2c5ca57db95b5f23689fcb5c9e42440", size = 217518, upload-time = "2026-02-17T16:11:47.746Z" }, + { url = "https://files.pythonhosted.org/packages/79/6f/5c6fea00357e4f82ba44f81dbfb027921f1ab10e320d4a64e1c408d035d9/librt-0.8.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:adfab487facf03f0d0857b8710cf82d0704a309d8ffc33b03d9302b4c64e91a9", size = 225116, upload-time = "2026-02-17T16:11:49.298Z" }, + { url = "https://files.pythonhosted.org/packages/f2/a0/95ced4e7b1267fe1e2720a111685bcddf0e781f7e9e0ce59d751c44dcfe5/librt-0.8.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:153188fe98a72f206042be10a2c6026139852805215ed9539186312d50a8e972", size = 217751, upload-time = "2026-02-17T16:11:50.49Z" }, + { url = "https://files.pythonhosted.org/packages/93/c2/0517281cb4d4101c27ab59472924e67f55e375bc46bedae94ac6dc6e1902/librt-0.8.1-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:dd3c41254ee98604b08bd5b3af5bf0a89740d4ee0711de95b65166bf44091921", size = 218378, upload-time = "2026-02-17T16:11:51.783Z" }, + { url = "https://files.pythonhosted.org/packages/43/e8/37b3ac108e8976888e559a7b227d0ceac03c384cfd3e7a1c2ee248dbae79/librt-0.8.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e0d138c7ae532908cbb342162b2611dbd4d90c941cd25ab82084aaf71d2c0bd0", size = 241199, upload-time = "2026-02-17T16:11:53.561Z" }, + { url = "https://files.pythonhosted.org/packages/4b/5b/35812d041c53967fedf551a39399271bbe4257e681236a2cf1a69c8e7fa1/librt-0.8.1-cp312-cp312-win32.whl", hash = "sha256:43353b943613c5d9c49a25aaffdba46f888ec354e71e3529a00cca3f04d66a7a", size = 54917, upload-time = "2026-02-17T16:11:54.758Z" }, + { url = "https://files.pythonhosted.org/packages/de/d1/fa5d5331b862b9775aaf2a100f5ef86854e5d4407f71bddf102f4421e034/librt-0.8.1-cp312-cp312-win_amd64.whl", hash = "sha256:ff8baf1f8d3f4b6b7257fcb75a501f2a5499d0dda57645baa09d4d0d34b19444", size = 62017, upload-time = "2026-02-17T16:11:55.748Z" }, + { url = "https://files.pythonhosted.org/packages/c7/7c/c614252f9acda59b01a66e2ddfd243ed1c7e1deab0293332dfbccf862808/librt-0.8.1-cp312-cp312-win_arm64.whl", hash = "sha256:0f2ae3725904f7377e11cc37722d5d401e8b3d5851fb9273d7f4fe04f6b3d37d", size = 52441, upload-time = "2026-02-17T16:11:56.801Z" }, + { url = "https://files.pythonhosted.org/packages/c5/3c/f614c8e4eaac7cbf2bbdf9528790b21d89e277ee20d57dc6e559c626105f/librt-0.8.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7e6bad1cd94f6764e1e21950542f818a09316645337fd5ab9a7acc45d99a8f35", size = 66529, upload-time = "2026-02-17T16:11:57.809Z" }, + { url = "https://files.pythonhosted.org/packages/ab/96/5836544a45100ae411eda07d29e3d99448e5258b6e9c8059deb92945f5c2/librt-0.8.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cf450f498c30af55551ba4f66b9123b7185362ec8b625a773b3d39aa1a717583", size = 68669, upload-time = "2026-02-17T16:11:58.843Z" }, + { url = "https://files.pythonhosted.org/packages/06/53/f0b992b57af6d5531bf4677d75c44f095f2366a1741fb695ee462ae04b05/librt-0.8.1-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:eca45e982fa074090057132e30585a7e8674e9e885d402eae85633e9f449ce6c", size = 199279, upload-time = "2026-02-17T16:11:59.862Z" }, + { url = "https://files.pythonhosted.org/packages/f3/ad/4848cc16e268d14280d8168aee4f31cea92bbd2b79ce33d3e166f2b4e4fc/librt-0.8.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0c3811485fccfda840861905b8c70bba5ec094e02825598bb9d4ca3936857a04", size = 210288, upload-time = "2026-02-17T16:12:00.954Z" }, + { url = "https://files.pythonhosted.org/packages/52/05/27fdc2e95de26273d83b96742d8d3b7345f2ea2bdbd2405cc504644f2096/librt-0.8.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5e4af413908f77294605e28cfd98063f54b2c790561383971d2f52d113d9c363", size = 224809, upload-time = "2026-02-17T16:12:02.108Z" }, + { url = "https://files.pythonhosted.org/packages/7a/d0/78200a45ba3240cb042bc597d6f2accba9193a2c57d0356268cbbe2d0925/librt-0.8.1-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:5212a5bd7fae98dae95710032902edcd2ec4dc994e883294f75c857b83f9aba0", size = 218075, upload-time = "2026-02-17T16:12:03.631Z" }, + { url = "https://files.pythonhosted.org/packages/af/72/a210839fa74c90474897124c064ffca07f8d4b347b6574d309686aae7ca6/librt-0.8.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e692aa2d1d604e6ca12d35e51fdc36f4cda6345e28e36374579f7ef3611b3012", size = 225486, upload-time = "2026-02-17T16:12:04.725Z" }, + { url = "https://files.pythonhosted.org/packages/a3/c1/a03cc63722339ddbf087485f253493e2b013039f5b707e8e6016141130fa/librt-0.8.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4be2a5c926b9770c9e08e717f05737a269b9d0ebc5d2f0060f0fe3fe9ce47acb", size = 218219, upload-time = "2026-02-17T16:12:05.828Z" }, + { url = "https://files.pythonhosted.org/packages/58/f5/fff6108af0acf941c6f274a946aea0e484bd10cd2dc37610287ce49388c5/librt-0.8.1-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:fd1a720332ea335ceb544cf0a03f81df92abd4bb887679fd1e460976b0e6214b", size = 218750, upload-time = "2026-02-17T16:12:07.09Z" }, + { url = "https://files.pythonhosted.org/packages/71/67/5a387bfef30ec1e4b4f30562c8586566faf87e47d696768c19feb49e3646/librt-0.8.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:93c2af9e01e0ef80d95ae3c720be101227edae5f2fe7e3dc63d8857fadfc5a1d", size = 241624, upload-time = "2026-02-17T16:12:08.43Z" }, + { url = "https://files.pythonhosted.org/packages/d4/be/24f8502db11d405232ac1162eb98069ca49c3306c1d75c6ccc61d9af8789/librt-0.8.1-cp313-cp313-win32.whl", hash = "sha256:086a32dbb71336627e78cc1d6ee305a68d038ef7d4c39aaff41ae8c9aa46e91a", size = 54969, upload-time = "2026-02-17T16:12:09.633Z" }, + { url = "https://files.pythonhosted.org/packages/5c/73/c9fdf6cb2a529c1a092ce769a12d88c8cca991194dfe641b6af12fa964d2/librt-0.8.1-cp313-cp313-win_amd64.whl", hash = "sha256:e11769a1dbda4da7b00a76cfffa67aa47cfa66921d2724539eee4b9ede780b79", size = 62000, upload-time = "2026-02-17T16:12:10.632Z" }, + { url = "https://files.pythonhosted.org/packages/d3/97/68f80ca3ac4924f250cdfa6e20142a803e5e50fca96ef5148c52ee8c10ea/librt-0.8.1-cp313-cp313-win_arm64.whl", hash = "sha256:924817ab3141aca17893386ee13261f1d100d1ef410d70afe4389f2359fea4f0", size = 52495, upload-time = "2026-02-17T16:12:11.633Z" }, + { url = "https://files.pythonhosted.org/packages/c9/6a/907ef6800f7bca71b525a05f1839b21f708c09043b1c6aa77b6b827b3996/librt-0.8.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:6cfa7fe54fd4d1f47130017351a959fe5804bda7a0bc7e07a2cdbc3fdd28d34f", size = 66081, upload-time = "2026-02-17T16:12:12.766Z" }, + { url = "https://files.pythonhosted.org/packages/1b/18/25e991cd5640c9fb0f8d91b18797b29066b792f17bf8493da183bf5caabe/librt-0.8.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:228c2409c079f8c11fb2e5d7b277077f694cb93443eb760e00b3b83cb8b3176c", size = 68309, upload-time = "2026-02-17T16:12:13.756Z" }, + { url = "https://files.pythonhosted.org/packages/a4/36/46820d03f058cfb5a9de5940640ba03165ed8aded69e0733c417bb04df34/librt-0.8.1-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7aae78ab5e3206181780e56912d1b9bb9f90a7249ce12f0e8bf531d0462dd0fc", size = 196804, upload-time = "2026-02-17T16:12:14.818Z" }, + { url = "https://files.pythonhosted.org/packages/59/18/5dd0d3b87b8ff9c061849fbdb347758d1f724b9a82241aa908e0ec54ccd0/librt-0.8.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:172d57ec04346b047ca6af181e1ea4858086c80bdf455f61994c4aa6fc3f866c", size = 206907, upload-time = "2026-02-17T16:12:16.513Z" }, + { url = "https://files.pythonhosted.org/packages/d1/96/ef04902aad1424fd7299b62d1890e803e6ab4018c3044dca5922319c4b97/librt-0.8.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6b1977c4ea97ce5eb7755a78fae68d87e4102e4aaf54985e8b56806849cc06a3", size = 221217, upload-time = "2026-02-17T16:12:17.906Z" }, + { url = "https://files.pythonhosted.org/packages/6d/ff/7e01f2dda84a8f5d280637a2e5827210a8acca9a567a54507ef1c75b342d/librt-0.8.1-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:10c42e1f6fd06733ef65ae7bebce2872bcafd8d6e6b0a08fe0a05a23b044fb14", size = 214622, upload-time = "2026-02-17T16:12:19.108Z" }, + { url = "https://files.pythonhosted.org/packages/1e/8c/5b093d08a13946034fed57619742f790faf77058558b14ca36a6e331161e/librt-0.8.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4c8dfa264b9193c4ee19113c985c95f876fae5e51f731494fc4e0cf594990ba7", size = 221987, upload-time = "2026-02-17T16:12:20.331Z" }, + { url = "https://files.pythonhosted.org/packages/d3/cc/86b0b3b151d40920ad45a94ce0171dec1aebba8a9d72bb3fa00c73ab25dd/librt-0.8.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:01170b6729a438f0dedc4a26ed342e3dc4f02d1000b4b19f980e1877f0c297e6", size = 215132, upload-time = "2026-02-17T16:12:21.54Z" }, + { url = "https://files.pythonhosted.org/packages/fc/be/8588164a46edf1e69858d952654e216a9a91174688eeefb9efbb38a9c799/librt-0.8.1-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:7b02679a0d783bdae30d443025b94465d8c3dc512f32f5b5031f93f57ac32071", size = 215195, upload-time = "2026-02-17T16:12:23.073Z" }, + { url = "https://files.pythonhosted.org/packages/f5/f2/0b9279bea735c734d69344ecfe056c1ba211694a72df10f568745c899c76/librt-0.8.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:190b109bb69592a3401fe1ffdea41a2e73370ace2ffdc4a0e8e2b39cdea81b78", size = 237946, upload-time = "2026-02-17T16:12:24.275Z" }, + { url = "https://files.pythonhosted.org/packages/e9/cc/5f2a34fbc8aeb35314a3641f9956fa9051a947424652fad9882be7a97949/librt-0.8.1-cp314-cp314-win32.whl", hash = "sha256:e70a57ecf89a0f64c24e37f38d3fe217a58169d2fe6ed6d70554964042474023", size = 50689, upload-time = "2026-02-17T16:12:25.766Z" }, + { url = "https://files.pythonhosted.org/packages/a0/76/cd4d010ab2147339ca2b93e959c3686e964edc6de66ddacc935c325883d7/librt-0.8.1-cp314-cp314-win_amd64.whl", hash = "sha256:7e2f3edca35664499fbb36e4770650c4bd4a08abc1f4458eab9df4ec56389730", size = 57875, upload-time = "2026-02-17T16:12:27.465Z" }, + { url = "https://files.pythonhosted.org/packages/84/0f/2143cb3c3ca48bd3379dcd11817163ca50781927c4537345d608b5045998/librt-0.8.1-cp314-cp314-win_arm64.whl", hash = "sha256:0d2f82168e55ddefd27c01c654ce52379c0750ddc31ee86b4b266bcf4d65f2a3", size = 48058, upload-time = "2026-02-17T16:12:28.556Z" }, + { url = "https://files.pythonhosted.org/packages/d2/0e/9b23a87e37baf00311c3efe6b48d6b6c168c29902dfc3f04c338372fd7db/librt-0.8.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2c74a2da57a094bd48d03fa5d196da83d2815678385d2978657499063709abe1", size = 68313, upload-time = "2026-02-17T16:12:29.659Z" }, + { url = "https://files.pythonhosted.org/packages/db/9a/859c41e5a4f1c84200a7d2b92f586aa27133c8243b6cac9926f6e54d01b9/librt-0.8.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a355d99c4c0d8e5b770313b8b247411ed40949ca44e33e46a4789b9293a907ee", size = 70994, upload-time = "2026-02-17T16:12:31.516Z" }, + { url = "https://files.pythonhosted.org/packages/4c/28/10605366ee599ed34223ac2bf66404c6fb59399f47108215d16d5ad751a8/librt-0.8.1-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:2eb345e8b33fb748227409c9f1233d4df354d6e54091f0e8fc53acdb2ffedeb7", size = 220770, upload-time = "2026-02-17T16:12:33.294Z" }, + { url = "https://files.pythonhosted.org/packages/af/8d/16ed8fd452dafae9c48d17a6bc1ee3e818fd40ef718d149a8eff2c9f4ea2/librt-0.8.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9be2f15e53ce4e83cc08adc29b26fb5978db62ef2a366fbdf716c8a6c8901040", size = 235409, upload-time = "2026-02-17T16:12:35.443Z" }, + { url = "https://files.pythonhosted.org/packages/89/1b/7bdf3e49349c134b25db816e4a3db6b94a47ac69d7d46b1e682c2c4949be/librt-0.8.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:785ae29c1f5c6e7c2cde2c7c0e148147f4503da3abc5d44d482068da5322fd9e", size = 246473, upload-time = "2026-02-17T16:12:36.656Z" }, + { url = "https://files.pythonhosted.org/packages/4e/8a/91fab8e4fd2a24930a17188c7af5380eb27b203d72101c9cc000dbdfd95a/librt-0.8.1-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:1d3a7da44baf692f0c6aeb5b2a09c5e6fc7a703bca9ffa337ddd2e2da53f7732", size = 238866, upload-time = "2026-02-17T16:12:37.849Z" }, + { url = "https://files.pythonhosted.org/packages/b9/e0/c45a098843fc7c07e18a7f8a24ca8496aecbf7bdcd54980c6ca1aaa79a8e/librt-0.8.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5fc48998000cbc39ec0d5311312dda93ecf92b39aaf184c5e817d5d440b29624", size = 250248, upload-time = "2026-02-17T16:12:39.445Z" }, + { url = "https://files.pythonhosted.org/packages/82/30/07627de23036640c952cce0c1fe78972e77d7d2f8fd54fa5ef4554ff4a56/librt-0.8.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:e96baa6820280077a78244b2e06e416480ed859bbd8e5d641cf5742919d8beb4", size = 240629, upload-time = "2026-02-17T16:12:40.889Z" }, + { url = "https://files.pythonhosted.org/packages/fb/c1/55bfe1ee3542eba055616f9098eaf6eddb966efb0ca0f44eaa4aba327307/librt-0.8.1-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:31362dbfe297b23590530007062c32c6f6176f6099646bb2c95ab1b00a57c382", size = 239615, upload-time = "2026-02-17T16:12:42.446Z" }, + { url = "https://files.pythonhosted.org/packages/2b/39/191d3d28abc26c9099b19852e6c99f7f6d400b82fa5a4e80291bd3803e19/librt-0.8.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:cc3656283d11540ab0ea01978378e73e10002145117055e03722417aeab30994", size = 263001, upload-time = "2026-02-17T16:12:43.627Z" }, + { url = "https://files.pythonhosted.org/packages/b9/eb/7697f60fbe7042ab4e88f4ee6af496b7f222fffb0a4e3593ef1f29f81652/librt-0.8.1-cp314-cp314t-win32.whl", hash = "sha256:738f08021b3142c2918c03692608baed43bc51144c29e35807682f8070ee2a3a", size = 51328, upload-time = "2026-02-17T16:12:45.148Z" }, + { url = "https://files.pythonhosted.org/packages/7c/72/34bf2eb7a15414a23e5e70ecb9440c1d3179f393d9349338a91e2781c0fb/librt-0.8.1-cp314-cp314t-win_amd64.whl", hash = "sha256:89815a22daf9c51884fb5dbe4f1ef65ee6a146e0b6a8df05f753e2e4a9359bf4", size = 58722, upload-time = "2026-02-17T16:12:46.85Z" }, + { url = "https://files.pythonhosted.org/packages/b2/c8/d148e041732d631fc76036f8b30fae4e77b027a1e95b7a84bb522481a940/librt-0.8.1-cp314-cp314t-win_arm64.whl", hash = "sha256:bf512a71a23504ed08103a13c941f763db13fb11177beb3d9244c98c29fb4a61", size = 48755, upload-time = "2026-02-17T16:12:47.943Z" }, +] + +[[package]] +name = "llvmlite" +version = "0.46.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/74/cd/08ae687ba099c7e3d21fe2ea536500563ef1943c5105bf6ab4ee3829f68e/llvmlite-0.46.0.tar.gz", hash = "sha256:227c9fd6d09dce2783c18b754b7cd9d9b3b3515210c46acc2d3c5badd9870ceb", size = 193456, upload-time = "2025-12-08T18:15:36.295Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3d/a4/3959e1c61c5ca9db7921e5fd115b344c29b9d57a5dadd87bef97963ca1a5/llvmlite-0.46.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4323177e936d61ae0f73e653e2e614284d97d14d5dd12579adc92b6c2b0597b0", size = 37232766, upload-time = "2025-12-08T18:14:34.765Z" }, + { url = "https://files.pythonhosted.org/packages/c2/a5/a4d916f1015106e1da876028606a8e87fd5d5c840f98c87bc2d5153b6a2f/llvmlite-0.46.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0a2d461cb89537b7c20feb04c46c32e12d5ad4f0896c9dfc0f60336219ff248e", size = 56275176, upload-time = "2025-12-08T18:14:37.944Z" }, + { url = "https://files.pythonhosted.org/packages/79/7f/a7f2028805dac8c1a6fae7bda4e739b7ebbcd45b29e15bf6d21556fcd3d5/llvmlite-0.46.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b1f6595a35b7b39c3518b85a28bf18f45e075264e4b2dce3f0c2a4f232b4a910", size = 55128629, upload-time = "2025-12-08T18:14:41.674Z" }, + { url = "https://files.pythonhosted.org/packages/b2/bc/4689e1ba0c073c196b594471eb21be0aa51d9e64b911728aa13cd85ef0ae/llvmlite-0.46.0-cp310-cp310-win_amd64.whl", hash = "sha256:e7a34d4aa6f9a97ee006b504be6d2b8cb7f755b80ab2f344dda1ef992f828559", size = 38138651, upload-time = "2025-12-08T18:14:45.845Z" }, + { url = "https://files.pythonhosted.org/packages/7a/a1/2ad4b2367915faeebe8447f0a057861f646dbf5fbbb3561db42c65659cf3/llvmlite-0.46.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:82f3d39b16f19aa1a56d5fe625883a6ab600d5cc9ea8906cca70ce94cabba067", size = 37232766, upload-time = "2025-12-08T18:14:48.836Z" }, + { url = "https://files.pythonhosted.org/packages/12/b5/99cf8772fdd846c07da4fd70f07812a3c8fd17ea2409522c946bb0f2b277/llvmlite-0.46.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a3df43900119803bbc52720e758c76f316a9a0f34612a886862dfe0a5591a17e", size = 56275175, upload-time = "2025-12-08T18:14:51.604Z" }, + { url = "https://files.pythonhosted.org/packages/38/f2/ed806f9c003563732da156139c45d970ee435bd0bfa5ed8de87ba972b452/llvmlite-0.46.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:de183fefc8022d21b0aa37fc3e90410bc3524aed8617f0ff76732fc6c3af5361", size = 55128630, upload-time = "2025-12-08T18:14:55.107Z" }, + { url = "https://files.pythonhosted.org/packages/19/0c/8f5a37a65fc9b7b17408508145edd5f86263ad69c19d3574e818f533a0eb/llvmlite-0.46.0-cp311-cp311-win_amd64.whl", hash = "sha256:e8b10bc585c58bdffec9e0c309bb7d51be1f2f15e169a4b4d42f2389e431eb93", size = 38138652, upload-time = "2025-12-08T18:14:58.171Z" }, + { url = "https://files.pythonhosted.org/packages/2b/f8/4db016a5e547d4e054ff2f3b99203d63a497465f81ab78ec8eb2ff7b2304/llvmlite-0.46.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6b9588ad4c63b4f0175a3984b85494f0c927c6b001e3a246a3a7fb3920d9a137", size = 37232767, upload-time = "2025-12-08T18:15:00.737Z" }, + { url = "https://files.pythonhosted.org/packages/aa/85/4890a7c14b4fa54400945cb52ac3cd88545bbdb973c440f98ca41591cdc5/llvmlite-0.46.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3535bd2bb6a2d7ae4012681ac228e5132cdb75fefb1bcb24e33f2f3e0c865ed4", size = 56275176, upload-time = "2025-12-08T18:15:03.936Z" }, + { url = "https://files.pythonhosted.org/packages/6a/07/3d31d39c1a1a08cd5337e78299fca77e6aebc07c059fbd0033e3edfab45c/llvmlite-0.46.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4cbfd366e60ff87ea6cc62f50bc4cd800ebb13ed4c149466f50cf2163a473d1e", size = 55128630, upload-time = "2025-12-08T18:15:07.196Z" }, + { url = "https://files.pythonhosted.org/packages/2a/6b/d139535d7590a1bba1ceb68751bef22fadaa5b815bbdf0e858e3875726b2/llvmlite-0.46.0-cp312-cp312-win_amd64.whl", hash = "sha256:398b39db462c39563a97b912d4f2866cd37cba60537975a09679b28fbbc0fb38", size = 38138940, upload-time = "2025-12-08T18:15:10.162Z" }, + { url = "https://files.pythonhosted.org/packages/e6/ff/3eba7eb0aed4b6fca37125387cd417e8c458e750621fce56d2c541f67fa8/llvmlite-0.46.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:30b60892d034bc560e0ec6654737aaa74e5ca327bd8114d82136aa071d611172", size = 37232767, upload-time = "2025-12-08T18:15:13.22Z" }, + { url = "https://files.pythonhosted.org/packages/0e/54/737755c0a91558364b9200702c3c9c15d70ed63f9b98a2c32f1c2aa1f3ba/llvmlite-0.46.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6cc19b051753368a9c9f31dc041299059ee91aceec81bd57b0e385e5d5bf1a54", size = 56275176, upload-time = "2025-12-08T18:15:16.339Z" }, + { url = "https://files.pythonhosted.org/packages/e6/91/14f32e1d70905c1c0aa4e6609ab5d705c3183116ca02ac6df2091868413a/llvmlite-0.46.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bca185892908f9ede48c0acd547fe4dc1bafefb8a4967d47db6cf664f9332d12", size = 55128629, upload-time = "2025-12-08T18:15:19.493Z" }, + { url = "https://files.pythonhosted.org/packages/4a/a7/d526ae86708cea531935ae777b6dbcabe7db52718e6401e0fb9c5edea80e/llvmlite-0.46.0-cp313-cp313-win_amd64.whl", hash = "sha256:67438fd30e12349ebb054d86a5a1a57fd5e87d264d2451bcfafbbbaa25b82a35", size = 38138941, upload-time = "2025-12-08T18:15:22.536Z" }, + { url = "https://files.pythonhosted.org/packages/95/ae/af0ffb724814cc2ea64445acad05f71cff5f799bb7efb22e47ee99340dbc/llvmlite-0.46.0-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:d252edfb9f4ac1fcf20652258e3f102b26b03eef738dc8a6ffdab7d7d341d547", size = 37232768, upload-time = "2025-12-08T18:15:25.055Z" }, + { url = "https://files.pythonhosted.org/packages/c9/19/5018e5352019be753b7b07f7759cdabb69ca5779fea2494be8839270df4c/llvmlite-0.46.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:379fdd1c59badeff8982cb47e4694a6143bec3bb49aa10a466e095410522064d", size = 56275173, upload-time = "2025-12-08T18:15:28.109Z" }, + { url = "https://files.pythonhosted.org/packages/9f/c9/d57877759d707e84c082163c543853245f91b70c804115a5010532890f18/llvmlite-0.46.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2e8cbfff7f6db0fa2c771ad24154e2a7e457c2444d7673e6de06b8b698c3b269", size = 55128628, upload-time = "2025-12-08T18:15:31.098Z" }, + { url = "https://files.pythonhosted.org/packages/30/a8/e61a8c2b3cc7a597073d9cde1fcbb567e9d827f1db30c93cf80422eac70d/llvmlite-0.46.0-cp314-cp314-win_amd64.whl", hash = "sha256:7821eda3ec1f18050f981819756631d60b6d7ab1a6cf806d9efefbe3f4082d61", size = 39153056, upload-time = "2025-12-08T18:15:33.938Z" }, +] + +[[package]] +name = "lxml" +version = "6.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/aa/88/262177de60548e5a2bfc46ad28232c9e9cbde697bd94132aeb80364675cb/lxml-6.0.2.tar.gz", hash = "sha256:cd79f3367bd74b317dda655dc8fcfa304d9eb6e4fb06b7168c5cf27f96e0cd62", size = 4073426, upload-time = "2025-09-22T04:04:59.287Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/db/8a/f8192a08237ef2fb1b19733f709db88a4c43bc8ab8357f01cb41a27e7f6a/lxml-6.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e77dd455b9a16bbd2a5036a63ddbd479c19572af81b624e79ef422f929eef388", size = 8590589, upload-time = "2025-09-22T04:00:10.51Z" }, + { url = "https://files.pythonhosted.org/packages/12/64/27bcd07ae17ff5e5536e8d88f4c7d581b48963817a13de11f3ac3329bfa2/lxml-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d444858b9f07cefff6455b983aea9a67f7462ba1f6cbe4a21e8bf6791bf2153", size = 4629671, upload-time = "2025-09-22T04:00:15.411Z" }, + { url = "https://files.pythonhosted.org/packages/02/5a/a7d53b3291c324e0b6e48f3c797be63836cc52156ddf8f33cd72aac78866/lxml-6.0.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f952dacaa552f3bb8834908dddd500ba7d508e6ea6eb8c52eb2d28f48ca06a31", size = 4999961, upload-time = "2025-09-22T04:00:17.619Z" }, + { url = "https://files.pythonhosted.org/packages/f5/55/d465e9b89df1761674d8672bb3e4ae2c47033b01ec243964b6e334c6743f/lxml-6.0.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:71695772df6acea9f3c0e59e44ba8ac50c4f125217e84aab21074a1a55e7e5c9", size = 5157087, upload-time = "2025-09-22T04:00:19.868Z" }, + { url = "https://files.pythonhosted.org/packages/62/38/3073cd7e3e8dfc3ba3c3a139e33bee3a82de2bfb0925714351ad3d255c13/lxml-6.0.2-cp310-cp310-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:17f68764f35fd78d7c4cc4ef209a184c38b65440378013d24b8aecd327c3e0c8", size = 5067620, upload-time = "2025-09-22T04:00:21.877Z" }, + { url = "https://files.pythonhosted.org/packages/4a/d3/1e001588c5e2205637b08985597827d3827dbaaece16348c8822bfe61c29/lxml-6.0.2-cp310-cp310-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:058027e261afed589eddcfe530fcc6f3402d7fd7e89bfd0532df82ebc1563dba", size = 5406664, upload-time = "2025-09-22T04:00:23.714Z" }, + { url = "https://files.pythonhosted.org/packages/20/cf/cab09478699b003857ed6ebfe95e9fb9fa3d3c25f1353b905c9b73cfb624/lxml-6.0.2-cp310-cp310-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8ffaeec5dfea5881d4c9d8913a32d10cfe3923495386106e4a24d45300ef79c", size = 5289397, upload-time = "2025-09-22T04:00:25.544Z" }, + { url = "https://files.pythonhosted.org/packages/a3/84/02a2d0c38ac9a8b9f9e5e1bbd3f24b3f426044ad618b552e9549ee91bd63/lxml-6.0.2-cp310-cp310-manylinux_2_31_armv7l.whl", hash = "sha256:f2e3b1a6bb38de0bc713edd4d612969dd250ca8b724be8d460001a387507021c", size = 4772178, upload-time = "2025-09-22T04:00:27.602Z" }, + { url = "https://files.pythonhosted.org/packages/56/87/e1ceadcc031ec4aa605fe95476892d0b0ba3b7f8c7dcdf88fdeff59a9c86/lxml-6.0.2-cp310-cp310-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d6690ec5ec1cce0385cb20896b16be35247ac8c2046e493d03232f1c2414d321", size = 5358148, upload-time = "2025-09-22T04:00:29.323Z" }, + { url = "https://files.pythonhosted.org/packages/fe/13/5bb6cf42bb228353fd4ac5f162c6a84fd68a4d6f67c1031c8cf97e131fc6/lxml-6.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f2a50c3c1d11cad0ebebbac357a97b26aa79d2bcaf46f256551152aa85d3a4d1", size = 5112035, upload-time = "2025-09-22T04:00:31.061Z" }, + { url = "https://files.pythonhosted.org/packages/e4/e2/ea0498552102e59834e297c5c6dff8d8ded3db72ed5e8aad77871476f073/lxml-6.0.2-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:3efe1b21c7801ffa29a1112fab3b0f643628c30472d507f39544fd48e9549e34", size = 4799111, upload-time = "2025-09-22T04:00:33.11Z" }, + { url = "https://files.pythonhosted.org/packages/6a/9e/8de42b52a73abb8af86c66c969b3b4c2a96567b6ac74637c037d2e3baa60/lxml-6.0.2-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:59c45e125140b2c4b33920d21d83681940ca29f0b83f8629ea1a2196dc8cfe6a", size = 5351662, upload-time = "2025-09-22T04:00:35.237Z" }, + { url = "https://files.pythonhosted.org/packages/28/a2/de776a573dfb15114509a37351937c367530865edb10a90189d0b4b9b70a/lxml-6.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:452b899faa64f1805943ec1c0c9ebeaece01a1af83e130b69cdefeda180bb42c", size = 5314973, upload-time = "2025-09-22T04:00:37.086Z" }, + { url = "https://files.pythonhosted.org/packages/50/a0/3ae1b1f8964c271b5eec91db2043cf8c6c0bce101ebb2a633b51b044db6c/lxml-6.0.2-cp310-cp310-win32.whl", hash = "sha256:1e786a464c191ca43b133906c6903a7e4d56bef376b75d97ccbb8ec5cf1f0a4b", size = 3611953, upload-time = "2025-09-22T04:00:39.224Z" }, + { url = "https://files.pythonhosted.org/packages/d1/70/bd42491f0634aad41bdfc1e46f5cff98825fb6185688dc82baa35d509f1a/lxml-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:dacf3c64ef3f7440e3167aa4b49aa9e0fb99e0aa4f9ff03795640bf94531bcb0", size = 4032695, upload-time = "2025-09-22T04:00:41.402Z" }, + { url = "https://files.pythonhosted.org/packages/d2/d0/05c6a72299f54c2c561a6c6cbb2f512e047fca20ea97a05e57931f194ac4/lxml-6.0.2-cp310-cp310-win_arm64.whl", hash = "sha256:45f93e6f75123f88d7f0cfd90f2d05f441b808562bf0bc01070a00f53f5028b5", size = 3680051, upload-time = "2025-09-22T04:00:43.525Z" }, + { url = "https://files.pythonhosted.org/packages/77/d5/becbe1e2569b474a23f0c672ead8a29ac50b2dc1d5b9de184831bda8d14c/lxml-6.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:13e35cbc684aadf05d8711a5d1b5857c92e5e580efa9a0d2be197199c8def607", size = 8634365, upload-time = "2025-09-22T04:00:45.672Z" }, + { url = "https://files.pythonhosted.org/packages/28/66/1ced58f12e804644426b85d0bb8a4478ca77bc1761455da310505f1a3526/lxml-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b1675e096e17c6fe9c0e8c81434f5736c0739ff9ac6123c87c2d452f48fc938", size = 4650793, upload-time = "2025-09-22T04:00:47.783Z" }, + { url = "https://files.pythonhosted.org/packages/11/84/549098ffea39dfd167e3f174b4ce983d0eed61f9d8d25b7bf2a57c3247fc/lxml-6.0.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8ac6e5811ae2870953390452e3476694196f98d447573234592d30488147404d", size = 4944362, upload-time = "2025-09-22T04:00:49.845Z" }, + { url = "https://files.pythonhosted.org/packages/ac/bd/f207f16abf9749d2037453d56b643a7471d8fde855a231a12d1e095c4f01/lxml-6.0.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5aa0fc67ae19d7a64c3fe725dc9a1bb11f80e01f78289d05c6f62545affec438", size = 5083152, upload-time = "2025-09-22T04:00:51.709Z" }, + { url = "https://files.pythonhosted.org/packages/15/ae/bd813e87d8941d52ad5b65071b1affb48da01c4ed3c9c99e40abb266fbff/lxml-6.0.2-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:de496365750cc472b4e7902a485d3f152ecf57bd3ba03ddd5578ed8ceb4c5964", size = 5023539, upload-time = "2025-09-22T04:00:53.593Z" }, + { url = "https://files.pythonhosted.org/packages/02/cd/9bfef16bd1d874fbe0cb51afb00329540f30a3283beb9f0780adbb7eec03/lxml-6.0.2-cp311-cp311-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:200069a593c5e40b8f6fc0d84d86d970ba43138c3e68619ffa234bc9bb806a4d", size = 5344853, upload-time = "2025-09-22T04:00:55.524Z" }, + { url = "https://files.pythonhosted.org/packages/b8/89/ea8f91594bc5dbb879734d35a6f2b0ad50605d7fb419de2b63d4211765cc/lxml-6.0.2-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7d2de809c2ee3b888b59f995625385f74629707c9355e0ff856445cdcae682b7", size = 5225133, upload-time = "2025-09-22T04:00:57.269Z" }, + { url = "https://files.pythonhosted.org/packages/b9/37/9c735274f5dbec726b2db99b98a43950395ba3d4a1043083dba2ad814170/lxml-6.0.2-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:b2c3da8d93cf5db60e8858c17684c47d01fee6405e554fb55018dd85fc23b178", size = 4677944, upload-time = "2025-09-22T04:00:59.052Z" }, + { url = "https://files.pythonhosted.org/packages/20/28/7dfe1ba3475d8bfca3878365075abe002e05d40dfaaeb7ec01b4c587d533/lxml-6.0.2-cp311-cp311-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:442de7530296ef5e188373a1ea5789a46ce90c4847e597856570439621d9c553", size = 5284535, upload-time = "2025-09-22T04:01:01.335Z" }, + { url = "https://files.pythonhosted.org/packages/e7/cf/5f14bc0de763498fc29510e3532bf2b4b3a1c1d5d0dff2e900c16ba021ef/lxml-6.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2593c77efde7bfea7f6389f1ab249b15ed4aa5bc5cb5131faa3b843c429fbedb", size = 5067343, upload-time = "2025-09-22T04:01:03.13Z" }, + { url = "https://files.pythonhosted.org/packages/1c/b0/bb8275ab5472f32b28cfbbcc6db7c9d092482d3439ca279d8d6fa02f7025/lxml-6.0.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:3e3cb08855967a20f553ff32d147e14329b3ae70ced6edc2f282b94afbc74b2a", size = 4725419, upload-time = "2025-09-22T04:01:05.013Z" }, + { url = "https://files.pythonhosted.org/packages/25/4c/7c222753bc72edca3b99dbadba1b064209bc8ed4ad448af990e60dcce462/lxml-6.0.2-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:2ed6c667fcbb8c19c6791bbf40b7268ef8ddf5a96940ba9404b9f9a304832f6c", size = 5275008, upload-time = "2025-09-22T04:01:07.327Z" }, + { url = "https://files.pythonhosted.org/packages/6c/8c/478a0dc6b6ed661451379447cdbec77c05741a75736d97e5b2b729687828/lxml-6.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b8f18914faec94132e5b91e69d76a5c1d7b0c73e2489ea8929c4aaa10b76bbf7", size = 5248906, upload-time = "2025-09-22T04:01:09.452Z" }, + { url = "https://files.pythonhosted.org/packages/2d/d9/5be3a6ab2784cdf9accb0703b65e1b64fcdd9311c9f007630c7db0cfcce1/lxml-6.0.2-cp311-cp311-win32.whl", hash = "sha256:6605c604e6daa9e0d7f0a2137bdc47a2e93b59c60a65466353e37f8272f47c46", size = 3610357, upload-time = "2025-09-22T04:01:11.102Z" }, + { url = "https://files.pythonhosted.org/packages/e2/7d/ca6fb13349b473d5732fb0ee3eec8f6c80fc0688e76b7d79c1008481bf1f/lxml-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e5867f2651016a3afd8dd2c8238baa66f1e2802f44bc17e236f547ace6647078", size = 4036583, upload-time = "2025-09-22T04:01:12.766Z" }, + { url = "https://files.pythonhosted.org/packages/ab/a2/51363b5ecd3eab46563645f3a2c3836a2fc67d01a1b87c5017040f39f567/lxml-6.0.2-cp311-cp311-win_arm64.whl", hash = "sha256:4197fb2534ee05fd3e7afaab5d8bfd6c2e186f65ea7f9cd6a82809c887bd1285", size = 3680591, upload-time = "2025-09-22T04:01:14.874Z" }, + { url = "https://files.pythonhosted.org/packages/f3/c8/8ff2bc6b920c84355146cd1ab7d181bc543b89241cfb1ebee824a7c81457/lxml-6.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a59f5448ba2ceccd06995c95ea59a7674a10de0810f2ce90c9006f3cbc044456", size = 8661887, upload-time = "2025-09-22T04:01:17.265Z" }, + { url = "https://files.pythonhosted.org/packages/37/6f/9aae1008083bb501ef63284220ce81638332f9ccbfa53765b2b7502203cf/lxml-6.0.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e8113639f3296706fbac34a30813929e29247718e88173ad849f57ca59754924", size = 4667818, upload-time = "2025-09-22T04:01:19.688Z" }, + { url = "https://files.pythonhosted.org/packages/f1/ca/31fb37f99f37f1536c133476674c10b577e409c0a624384147653e38baf2/lxml-6.0.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a8bef9b9825fa8bc816a6e641bb67219489229ebc648be422af695f6e7a4fa7f", size = 4950807, upload-time = "2025-09-22T04:01:21.487Z" }, + { url = "https://files.pythonhosted.org/packages/da/87/f6cb9442e4bada8aab5ae7e1046264f62fdbeaa6e3f6211b93f4c0dd97f1/lxml-6.0.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:65ea18d710fd14e0186c2f973dc60bb52039a275f82d3c44a0e42b43440ea534", size = 5109179, upload-time = "2025-09-22T04:01:23.32Z" }, + { url = "https://files.pythonhosted.org/packages/c8/20/a7760713e65888db79bbae4f6146a6ae5c04e4a204a3c48896c408cd6ed2/lxml-6.0.2-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c371aa98126a0d4c739ca93ceffa0fd7a5d732e3ac66a46e74339acd4d334564", size = 5023044, upload-time = "2025-09-22T04:01:25.118Z" }, + { url = "https://files.pythonhosted.org/packages/a2/b0/7e64e0460fcb36471899f75831509098f3fd7cd02a3833ac517433cb4f8f/lxml-6.0.2-cp312-cp312-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:700efd30c0fa1a3581d80a748157397559396090a51d306ea59a70020223d16f", size = 5359685, upload-time = "2025-09-22T04:01:27.398Z" }, + { url = "https://files.pythonhosted.org/packages/b9/e1/e5df362e9ca4e2f48ed6411bd4b3a0ae737cc842e96877f5bf9428055ab4/lxml-6.0.2-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c33e66d44fe60e72397b487ee92e01da0d09ba2d66df8eae42d77b6d06e5eba0", size = 5654127, upload-time = "2025-09-22T04:01:29.629Z" }, + { url = "https://files.pythonhosted.org/packages/c6/d1/232b3309a02d60f11e71857778bfcd4acbdb86c07db8260caf7d008b08f8/lxml-6.0.2-cp312-cp312-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:90a345bbeaf9d0587a3aaffb7006aa39ccb6ff0e96a57286c0cb2fd1520ea192", size = 5253958, upload-time = "2025-09-22T04:01:31.535Z" }, + { url = "https://files.pythonhosted.org/packages/35/35/d955a070994725c4f7d80583a96cab9c107c57a125b20bb5f708fe941011/lxml-6.0.2-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:064fdadaf7a21af3ed1dcaa106b854077fbeada827c18f72aec9346847cd65d0", size = 4711541, upload-time = "2025-09-22T04:01:33.801Z" }, + { url = "https://files.pythonhosted.org/packages/1e/be/667d17363b38a78c4bd63cfd4b4632029fd68d2c2dc81f25ce9eb5224dd5/lxml-6.0.2-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fbc74f42c3525ac4ffa4b89cbdd00057b6196bcefe8bce794abd42d33a018092", size = 5267426, upload-time = "2025-09-22T04:01:35.639Z" }, + { url = "https://files.pythonhosted.org/packages/ea/47/62c70aa4a1c26569bc958c9ca86af2bb4e1f614e8c04fb2989833874f7ae/lxml-6.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6ddff43f702905a4e32bc24f3f2e2edfe0f8fde3277d481bffb709a4cced7a1f", size = 5064917, upload-time = "2025-09-22T04:01:37.448Z" }, + { url = "https://files.pythonhosted.org/packages/bd/55/6ceddaca353ebd0f1908ef712c597f8570cc9c58130dbb89903198e441fd/lxml-6.0.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:6da5185951d72e6f5352166e3da7b0dc27aa70bd1090b0eb3f7f7212b53f1bb8", size = 4788795, upload-time = "2025-09-22T04:01:39.165Z" }, + { url = "https://files.pythonhosted.org/packages/cf/e8/fd63e15da5e3fd4c2146f8bbb3c14e94ab850589beab88e547b2dbce22e1/lxml-6.0.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:57a86e1ebb4020a38d295c04fc79603c7899e0df71588043eb218722dabc087f", size = 5676759, upload-time = "2025-09-22T04:01:41.506Z" }, + { url = "https://files.pythonhosted.org/packages/76/47/b3ec58dc5c374697f5ba37412cd2728f427d056315d124dd4b61da381877/lxml-6.0.2-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:2047d8234fe735ab77802ce5f2297e410ff40f5238aec569ad7c8e163d7b19a6", size = 5255666, upload-time = "2025-09-22T04:01:43.363Z" }, + { url = "https://files.pythonhosted.org/packages/19/93/03ba725df4c3d72afd9596eef4a37a837ce8e4806010569bedfcd2cb68fd/lxml-6.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6f91fd2b2ea15a6800c8e24418c0775a1694eefc011392da73bc6cef2623b322", size = 5277989, upload-time = "2025-09-22T04:01:45.215Z" }, + { url = "https://files.pythonhosted.org/packages/c6/80/c06de80bfce881d0ad738576f243911fccf992687ae09fd80b734712b39c/lxml-6.0.2-cp312-cp312-win32.whl", hash = "sha256:3ae2ce7d6fedfb3414a2b6c5e20b249c4c607f72cb8d2bb7cc9c6ec7c6f4e849", size = 3611456, upload-time = "2025-09-22T04:01:48.243Z" }, + { url = "https://files.pythonhosted.org/packages/f7/d7/0cdfb6c3e30893463fb3d1e52bc5f5f99684a03c29a0b6b605cfae879cd5/lxml-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:72c87e5ee4e58a8354fb9c7c84cbf95a1c8236c127a5d1b7683f04bed8361e1f", size = 4011793, upload-time = "2025-09-22T04:01:50.042Z" }, + { url = "https://files.pythonhosted.org/packages/ea/7b/93c73c67db235931527301ed3785f849c78991e2e34f3fd9a6663ffda4c5/lxml-6.0.2-cp312-cp312-win_arm64.whl", hash = "sha256:61cb10eeb95570153e0c0e554f58df92ecf5109f75eacad4a95baa709e26c3d6", size = 3672836, upload-time = "2025-09-22T04:01:52.145Z" }, + { url = "https://files.pythonhosted.org/packages/53/fd/4e8f0540608977aea078bf6d79f128e0e2c2bba8af1acf775c30baa70460/lxml-6.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:9b33d21594afab46f37ae58dfadd06636f154923c4e8a4d754b0127554eb2e77", size = 8648494, upload-time = "2025-09-22T04:01:54.242Z" }, + { url = "https://files.pythonhosted.org/packages/5d/f4/2a94a3d3dfd6c6b433501b8d470a1960a20ecce93245cf2db1706adf6c19/lxml-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6c8963287d7a4c5c9a432ff487c52e9c5618667179c18a204bdedb27310f022f", size = 4661146, upload-time = "2025-09-22T04:01:56.282Z" }, + { url = "https://files.pythonhosted.org/packages/25/2e/4efa677fa6b322013035d38016f6ae859d06cac67437ca7dc708a6af7028/lxml-6.0.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1941354d92699fb5ffe6ed7b32f9649e43c2feb4b97205f75866f7d21aa91452", size = 4946932, upload-time = "2025-09-22T04:01:58.989Z" }, + { url = "https://files.pythonhosted.org/packages/ce/0f/526e78a6d38d109fdbaa5049c62e1d32fdd70c75fb61c4eadf3045d3d124/lxml-6.0.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bb2f6ca0ae2d983ded09357b84af659c954722bbf04dea98030064996d156048", size = 5100060, upload-time = "2025-09-22T04:02:00.812Z" }, + { url = "https://files.pythonhosted.org/packages/81/76/99de58d81fa702cc0ea7edae4f4640416c2062813a00ff24bd70ac1d9c9b/lxml-6.0.2-cp313-cp313-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eb2a12d704f180a902d7fa778c6d71f36ceb7b0d317f34cdc76a5d05aa1dd1df", size = 5019000, upload-time = "2025-09-22T04:02:02.671Z" }, + { url = "https://files.pythonhosted.org/packages/b5/35/9e57d25482bc9a9882cb0037fdb9cc18f4b79d85df94fa9d2a89562f1d25/lxml-6.0.2-cp313-cp313-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:6ec0e3f745021bfed19c456647f0298d60a24c9ff86d9d051f52b509663feeb1", size = 5348496, upload-time = "2025-09-22T04:02:04.904Z" }, + { url = "https://files.pythonhosted.org/packages/a6/8e/cb99bd0b83ccc3e8f0f528e9aa1f7a9965dfec08c617070c5db8d63a87ce/lxml-6.0.2-cp313-cp313-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:846ae9a12d54e368933b9759052d6206a9e8b250291109c48e350c1f1f49d916", size = 5643779, upload-time = "2025-09-22T04:02:06.689Z" }, + { url = "https://files.pythonhosted.org/packages/d0/34/9e591954939276bb679b73773836c6684c22e56d05980e31d52a9a8deb18/lxml-6.0.2-cp313-cp313-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ef9266d2aa545d7374938fb5c484531ef5a2ec7f2d573e62f8ce722c735685fd", size = 5244072, upload-time = "2025-09-22T04:02:08.587Z" }, + { url = "https://files.pythonhosted.org/packages/8d/27/b29ff065f9aaca443ee377aff699714fcbffb371b4fce5ac4ca759e436d5/lxml-6.0.2-cp313-cp313-manylinux_2_31_armv7l.whl", hash = "sha256:4077b7c79f31755df33b795dc12119cb557a0106bfdab0d2c2d97bd3cf3dffa6", size = 4718675, upload-time = "2025-09-22T04:02:10.783Z" }, + { url = "https://files.pythonhosted.org/packages/2b/9f/f756f9c2cd27caa1a6ef8c32ae47aadea697f5c2c6d07b0dae133c244fbe/lxml-6.0.2-cp313-cp313-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a7c5d5e5f1081955358533be077166ee97ed2571d6a66bdba6ec2f609a715d1a", size = 5255171, upload-time = "2025-09-22T04:02:12.631Z" }, + { url = "https://files.pythonhosted.org/packages/61/46/bb85ea42d2cb1bd8395484fd72f38e3389611aa496ac7772da9205bbda0e/lxml-6.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:8f8d0cbd0674ee89863a523e6994ac25fd5be9c8486acfc3e5ccea679bad2679", size = 5057175, upload-time = "2025-09-22T04:02:14.718Z" }, + { url = "https://files.pythonhosted.org/packages/95/0c/443fc476dcc8e41577f0af70458c50fe299a97bb6b7505bb1ae09aa7f9ac/lxml-6.0.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:2cbcbf6d6e924c28f04a43f3b6f6e272312a090f269eff68a2982e13e5d57659", size = 4785688, upload-time = "2025-09-22T04:02:16.957Z" }, + { url = "https://files.pythonhosted.org/packages/48/78/6ef0b359d45bb9697bc5a626e1992fa5d27aa3f8004b137b2314793b50a0/lxml-6.0.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:dfb874cfa53340009af6bdd7e54ebc0d21012a60a4e65d927c2e477112e63484", size = 5660655, upload-time = "2025-09-22T04:02:18.815Z" }, + { url = "https://files.pythonhosted.org/packages/ff/ea/e1d33808f386bc1339d08c0dcada6e4712d4ed8e93fcad5f057070b7988a/lxml-6.0.2-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:fb8dae0b6b8b7f9e96c26fdd8121522ce5de9bb5538010870bd538683d30e9a2", size = 5247695, upload-time = "2025-09-22T04:02:20.593Z" }, + { url = "https://files.pythonhosted.org/packages/4f/47/eba75dfd8183673725255247a603b4ad606f4ae657b60c6c145b381697da/lxml-6.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:358d9adae670b63e95bc59747c72f4dc97c9ec58881d4627fe0120da0f90d314", size = 5269841, upload-time = "2025-09-22T04:02:22.489Z" }, + { url = "https://files.pythonhosted.org/packages/76/04/5c5e2b8577bc936e219becb2e98cdb1aca14a4921a12995b9d0c523502ae/lxml-6.0.2-cp313-cp313-win32.whl", hash = "sha256:e8cd2415f372e7e5a789d743d133ae474290a90b9023197fd78f32e2dc6873e2", size = 3610700, upload-time = "2025-09-22T04:02:24.465Z" }, + { url = "https://files.pythonhosted.org/packages/fe/0a/4643ccc6bb8b143e9f9640aa54e38255f9d3b45feb2cbe7ae2ca47e8782e/lxml-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:b30d46379644fbfc3ab81f8f82ae4de55179414651f110a1514f0b1f8f6cb2d7", size = 4010347, upload-time = "2025-09-22T04:02:26.286Z" }, + { url = "https://files.pythonhosted.org/packages/31/ef/dcf1d29c3f530577f61e5fe2f1bd72929acf779953668a8a47a479ae6f26/lxml-6.0.2-cp313-cp313-win_arm64.whl", hash = "sha256:13dcecc9946dca97b11b7c40d29fba63b55ab4170d3c0cf8c0c164343b9bfdcf", size = 3671248, upload-time = "2025-09-22T04:02:27.918Z" }, + { url = "https://files.pythonhosted.org/packages/03/15/d4a377b385ab693ce97b472fe0c77c2b16ec79590e688b3ccc71fba19884/lxml-6.0.2-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:b0c732aa23de8f8aec23f4b580d1e52905ef468afb4abeafd3fec77042abb6fe", size = 8659801, upload-time = "2025-09-22T04:02:30.113Z" }, + { url = "https://files.pythonhosted.org/packages/c8/e8/c128e37589463668794d503afaeb003987373c5f94d667124ffd8078bbd9/lxml-6.0.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:4468e3b83e10e0317a89a33d28f7aeba1caa4d1a6fd457d115dd4ffe90c5931d", size = 4659403, upload-time = "2025-09-22T04:02:32.119Z" }, + { url = "https://files.pythonhosted.org/packages/00/ce/74903904339decdf7da7847bb5741fc98a5451b42fc419a86c0c13d26fe2/lxml-6.0.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:abd44571493973bad4598a3be7e1d807ed45aa2adaf7ab92ab7c62609569b17d", size = 4966974, upload-time = "2025-09-22T04:02:34.155Z" }, + { url = "https://files.pythonhosted.org/packages/1f/d3/131dec79ce61c5567fecf82515bd9bc36395df42501b50f7f7f3bd065df0/lxml-6.0.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:370cd78d5855cfbffd57c422851f7d3864e6ae72d0da615fca4dad8c45d375a5", size = 5102953, upload-time = "2025-09-22T04:02:36.054Z" }, + { url = "https://files.pythonhosted.org/packages/3a/ea/a43ba9bb750d4ffdd885f2cd333572f5bb900cd2408b67fdda07e85978a0/lxml-6.0.2-cp314-cp314-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:901e3b4219fa04ef766885fb40fa516a71662a4c61b80c94d25336b4934b71c0", size = 5055054, upload-time = "2025-09-22T04:02:38.154Z" }, + { url = "https://files.pythonhosted.org/packages/60/23/6885b451636ae286c34628f70a7ed1fcc759f8d9ad382d132e1c8d3d9bfd/lxml-6.0.2-cp314-cp314-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:a4bf42d2e4cf52c28cc1812d62426b9503cdb0c87a6de81442626aa7d69707ba", size = 5352421, upload-time = "2025-09-22T04:02:40.413Z" }, + { url = "https://files.pythonhosted.org/packages/48/5b/fc2ddfc94ddbe3eebb8e9af6e3fd65e2feba4967f6a4e9683875c394c2d8/lxml-6.0.2-cp314-cp314-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b2c7fdaa4d7c3d886a42534adec7cfac73860b89b4e5298752f60aa5984641a0", size = 5673684, upload-time = "2025-09-22T04:02:42.288Z" }, + { url = "https://files.pythonhosted.org/packages/29/9c/47293c58cc91769130fbf85531280e8cc7868f7fbb6d92f4670071b9cb3e/lxml-6.0.2-cp314-cp314-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:98a5e1660dc7de2200b00d53fa00bcd3c35a3608c305d45a7bbcaf29fa16e83d", size = 5252463, upload-time = "2025-09-22T04:02:44.165Z" }, + { url = "https://files.pythonhosted.org/packages/9b/da/ba6eceb830c762b48e711ded880d7e3e89fc6c7323e587c36540b6b23c6b/lxml-6.0.2-cp314-cp314-manylinux_2_31_armv7l.whl", hash = "sha256:dc051506c30b609238d79eda75ee9cab3e520570ec8219844a72a46020901e37", size = 4698437, upload-time = "2025-09-22T04:02:46.524Z" }, + { url = "https://files.pythonhosted.org/packages/a5/24/7be3f82cb7990b89118d944b619e53c656c97dc89c28cfb143fdb7cd6f4d/lxml-6.0.2-cp314-cp314-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8799481bbdd212470d17513a54d568f44416db01250f49449647b5ab5b5dccb9", size = 5269890, upload-time = "2025-09-22T04:02:48.812Z" }, + { url = "https://files.pythonhosted.org/packages/1b/bd/dcfb9ea1e16c665efd7538fc5d5c34071276ce9220e234217682e7d2c4a5/lxml-6.0.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:9261bb77c2dab42f3ecd9103951aeca2c40277701eb7e912c545c1b16e0e4917", size = 5097185, upload-time = "2025-09-22T04:02:50.746Z" }, + { url = "https://files.pythonhosted.org/packages/21/04/a60b0ff9314736316f28316b694bccbbabe100f8483ad83852d77fc7468e/lxml-6.0.2-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:65ac4a01aba353cfa6d5725b95d7aed6356ddc0a3cd734de00124d285b04b64f", size = 4745895, upload-time = "2025-09-22T04:02:52.968Z" }, + { url = "https://files.pythonhosted.org/packages/d6/bd/7d54bd1846e5a310d9c715921c5faa71cf5c0853372adf78aee70c8d7aa2/lxml-6.0.2-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:b22a07cbb82fea98f8a2fd814f3d1811ff9ed76d0fc6abc84eb21527596e7cc8", size = 5695246, upload-time = "2025-09-22T04:02:54.798Z" }, + { url = "https://files.pythonhosted.org/packages/fd/32/5643d6ab947bc371da21323acb2a6e603cedbe71cb4c99c8254289ab6f4e/lxml-6.0.2-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:d759cdd7f3e055d6bc8d9bec3ad905227b2e4c785dc16c372eb5b5e83123f48a", size = 5260797, upload-time = "2025-09-22T04:02:57.058Z" }, + { url = "https://files.pythonhosted.org/packages/33/da/34c1ec4cff1eea7d0b4cd44af8411806ed943141804ac9c5d565302afb78/lxml-6.0.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:945da35a48d193d27c188037a05fec5492937f66fb1958c24fc761fb9d40d43c", size = 5277404, upload-time = "2025-09-22T04:02:58.966Z" }, + { url = "https://files.pythonhosted.org/packages/82/57/4eca3e31e54dc89e2c3507e1cd411074a17565fa5ffc437c4ae0a00d439e/lxml-6.0.2-cp314-cp314-win32.whl", hash = "sha256:be3aaa60da67e6153eb15715cc2e19091af5dc75faef8b8a585aea372507384b", size = 3670072, upload-time = "2025-09-22T04:03:38.05Z" }, + { url = "https://files.pythonhosted.org/packages/e3/e0/c96cf13eccd20c9421ba910304dae0f619724dcf1702864fd59dd386404d/lxml-6.0.2-cp314-cp314-win_amd64.whl", hash = "sha256:fa25afbadead523f7001caf0c2382afd272c315a033a7b06336da2637d92d6ed", size = 4080617, upload-time = "2025-09-22T04:03:39.835Z" }, + { url = "https://files.pythonhosted.org/packages/d5/5d/b3f03e22b3d38d6f188ef044900a9b29b2fe0aebb94625ce9fe244011d34/lxml-6.0.2-cp314-cp314-win_arm64.whl", hash = "sha256:063eccf89df5b24e361b123e257e437f9e9878f425ee9aae3144c77faf6da6d8", size = 3754930, upload-time = "2025-09-22T04:03:41.565Z" }, + { url = "https://files.pythonhosted.org/packages/5e/5c/42c2c4c03554580708fc738d13414801f340c04c3eff90d8d2d227145275/lxml-6.0.2-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:6162a86d86893d63084faaf4ff937b3daea233e3682fb4474db07395794fa80d", size = 8910380, upload-time = "2025-09-22T04:03:01.645Z" }, + { url = "https://files.pythonhosted.org/packages/bf/4f/12df843e3e10d18d468a7557058f8d3733e8b6e12401f30b1ef29360740f/lxml-6.0.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:414aaa94e974e23a3e92e7ca5b97d10c0cf37b6481f50911032c69eeb3991bba", size = 4775632, upload-time = "2025-09-22T04:03:03.814Z" }, + { url = "https://files.pythonhosted.org/packages/e4/0c/9dc31e6c2d0d418483cbcb469d1f5a582a1cd00a1f4081953d44051f3c50/lxml-6.0.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:48461bd21625458dd01e14e2c38dd0aea69addc3c4f960c30d9f59d7f93be601", size = 4975171, upload-time = "2025-09-22T04:03:05.651Z" }, + { url = "https://files.pythonhosted.org/packages/e7/2b/9b870c6ca24c841bdd887504808f0417aa9d8d564114689266f19ddf29c8/lxml-6.0.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:25fcc59afc57d527cfc78a58f40ab4c9b8fd096a9a3f964d2781ffb6eb33f4ed", size = 5110109, upload-time = "2025-09-22T04:03:07.452Z" }, + { url = "https://files.pythonhosted.org/packages/bf/0c/4f5f2a4dd319a178912751564471355d9019e220c20d7db3fb8307ed8582/lxml-6.0.2-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5179c60288204e6ddde3f774a93350177e08876eaf3ab78aa3a3649d43eb7d37", size = 5041061, upload-time = "2025-09-22T04:03:09.297Z" }, + { url = "https://files.pythonhosted.org/packages/12/64/554eed290365267671fe001a20d72d14f468ae4e6acef1e179b039436967/lxml-6.0.2-cp314-cp314t-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:967aab75434de148ec80597b75062d8123cadf2943fb4281f385141e18b21338", size = 5306233, upload-time = "2025-09-22T04:03:11.651Z" }, + { url = "https://files.pythonhosted.org/packages/7a/31/1d748aa275e71802ad9722df32a7a35034246b42c0ecdd8235412c3396ef/lxml-6.0.2-cp314-cp314t-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d100fcc8930d697c6561156c6810ab4a508fb264c8b6779e6e61e2ed5e7558f9", size = 5604739, upload-time = "2025-09-22T04:03:13.592Z" }, + { url = "https://files.pythonhosted.org/packages/8f/41/2c11916bcac09ed561adccacceaedd2bf0e0b25b297ea92aab99fd03d0fa/lxml-6.0.2-cp314-cp314t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ca59e7e13e5981175b8b3e4ab84d7da57993eeff53c07764dcebda0d0e64ecd", size = 5225119, upload-time = "2025-09-22T04:03:15.408Z" }, + { url = "https://files.pythonhosted.org/packages/99/05/4e5c2873d8f17aa018e6afde417c80cc5d0c33be4854cce3ef5670c49367/lxml-6.0.2-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:957448ac63a42e2e49531b9d6c0fa449a1970dbc32467aaad46f11545be9af1d", size = 4633665, upload-time = "2025-09-22T04:03:17.262Z" }, + { url = "https://files.pythonhosted.org/packages/0f/c9/dcc2da1bebd6275cdc723b515f93edf548b82f36a5458cca3578bc899332/lxml-6.0.2-cp314-cp314t-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b7fc49c37f1786284b12af63152fe1d0990722497e2d5817acfe7a877522f9a9", size = 5234997, upload-time = "2025-09-22T04:03:19.14Z" }, + { url = "https://files.pythonhosted.org/packages/9c/e2/5172e4e7468afca64a37b81dba152fc5d90e30f9c83c7c3213d6a02a5ce4/lxml-6.0.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e19e0643cc936a22e837f79d01a550678da8377d7d801a14487c10c34ee49c7e", size = 5090957, upload-time = "2025-09-22T04:03:21.436Z" }, + { url = "https://files.pythonhosted.org/packages/a5/b3/15461fd3e5cd4ddcb7938b87fc20b14ab113b92312fc97afe65cd7c85de1/lxml-6.0.2-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:1db01e5cf14345628e0cbe71067204db658e2fb8e51e7f33631f5f4735fefd8d", size = 4764372, upload-time = "2025-09-22T04:03:23.27Z" }, + { url = "https://files.pythonhosted.org/packages/05/33/f310b987c8bf9e61c4dd8e8035c416bd3230098f5e3cfa69fc4232de7059/lxml-6.0.2-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:875c6b5ab39ad5291588aed6925fac99d0097af0dd62f33c7b43736043d4a2ec", size = 5634653, upload-time = "2025-09-22T04:03:25.767Z" }, + { url = "https://files.pythonhosted.org/packages/70/ff/51c80e75e0bc9382158133bdcf4e339b5886c6ee2418b5199b3f1a61ed6d/lxml-6.0.2-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:cdcbed9ad19da81c480dfd6dd161886db6096083c9938ead313d94b30aadf272", size = 5233795, upload-time = "2025-09-22T04:03:27.62Z" }, + { url = "https://files.pythonhosted.org/packages/56/4d/4856e897df0d588789dd844dbed9d91782c4ef0b327f96ce53c807e13128/lxml-6.0.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:80dadc234ebc532e09be1975ff538d154a7fa61ea5031c03d25178855544728f", size = 5257023, upload-time = "2025-09-22T04:03:30.056Z" }, + { url = "https://files.pythonhosted.org/packages/0f/85/86766dfebfa87bea0ab78e9ff7a4b4b45225df4b4d3b8cc3c03c5cd68464/lxml-6.0.2-cp314-cp314t-win32.whl", hash = "sha256:da08e7bb297b04e893d91087df19638dc7a6bb858a954b0cc2b9f5053c922312", size = 3911420, upload-time = "2025-09-22T04:03:32.198Z" }, + { url = "https://files.pythonhosted.org/packages/fe/1a/b248b355834c8e32614650b8008c69ffeb0ceb149c793961dd8c0b991bb3/lxml-6.0.2-cp314-cp314t-win_amd64.whl", hash = "sha256:252a22982dca42f6155125ac76d3432e548a7625d56f5a273ee78a5057216eca", size = 4406837, upload-time = "2025-09-22T04:03:34.027Z" }, + { url = "https://files.pythonhosted.org/packages/92/aa/df863bcc39c5e0946263454aba394de8a9084dbaff8ad143846b0d844739/lxml-6.0.2-cp314-cp314t-win_arm64.whl", hash = "sha256:bb4c1847b303835d89d785a18801a883436cdfd5dc3d62947f9c49e24f0f5a2c", size = 3822205, upload-time = "2025-09-22T04:03:36.249Z" }, + { url = "https://files.pythonhosted.org/packages/e7/9c/780c9a8fce3f04690b374f72f41306866b0400b9d0fdf3e17aaa37887eed/lxml-6.0.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e748d4cf8fef2526bb2a589a417eba0c8674e29ffcb570ce2ceca44f1e567bf6", size = 3939264, upload-time = "2025-09-22T04:04:32.892Z" }, + { url = "https://files.pythonhosted.org/packages/f5/5a/1ab260c00adf645d8bf7dec7f920f744b032f69130c681302821d5debea6/lxml-6.0.2-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4ddb1049fa0579d0cbd00503ad8c58b9ab34d1254c77bc6a5576d96ec7853dba", size = 4216435, upload-time = "2025-09-22T04:04:34.907Z" }, + { url = "https://files.pythonhosted.org/packages/f2/37/565f3b3d7ffede22874b6d86be1a1763d00f4ea9fc5b9b6ccb11e4ec8612/lxml-6.0.2-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cb233f9c95f83707dae461b12b720c1af9c28c2d19208e1be03387222151daf5", size = 4325913, upload-time = "2025-09-22T04:04:37.205Z" }, + { url = "https://files.pythonhosted.org/packages/22/ec/f3a1b169b2fb9d03467e2e3c0c752ea30e993be440a068b125fc7dd248b0/lxml-6.0.2-pp310-pypy310_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bc456d04db0515ce3320d714a1eac7a97774ff0849e7718b492d957da4631dd4", size = 4269357, upload-time = "2025-09-22T04:04:39.322Z" }, + { url = "https://files.pythonhosted.org/packages/77/a2/585a28fe3e67daa1cf2f06f34490d556d121c25d500b10082a7db96e3bcd/lxml-6.0.2-pp310-pypy310_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2613e67de13d619fd283d58bda40bff0ee07739f624ffee8b13b631abf33083d", size = 4412295, upload-time = "2025-09-22T04:04:41.647Z" }, + { url = "https://files.pythonhosted.org/packages/7b/d9/a57dd8bcebd7c69386c20263830d4fa72d27e6b72a229ef7a48e88952d9a/lxml-6.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:24a8e756c982c001ca8d59e87c80c4d9dcd4d9b44a4cbeb8d9be4482c514d41d", size = 3516913, upload-time = "2025-09-22T04:04:43.602Z" }, + { url = "https://files.pythonhosted.org/packages/0b/11/29d08bc103a62c0eba8016e7ed5aeebbf1e4312e83b0b1648dd203b0e87d/lxml-6.0.2-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1c06035eafa8404b5cf475bb37a9f6088b0aca288d4ccc9d69389750d5543700", size = 3949829, upload-time = "2025-09-22T04:04:45.608Z" }, + { url = "https://files.pythonhosted.org/packages/12/b3/52ab9a3b31e5ab8238da241baa19eec44d2ab426532441ee607165aebb52/lxml-6.0.2-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c7d13103045de1bdd6fe5d61802565f1a3537d70cd3abf596aa0af62761921ee", size = 4226277, upload-time = "2025-09-22T04:04:47.754Z" }, + { url = "https://files.pythonhosted.org/packages/a0/33/1eaf780c1baad88224611df13b1c2a9dfa460b526cacfe769103ff50d845/lxml-6.0.2-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0a3c150a95fbe5ac91de323aa756219ef9cf7fde5a3f00e2281e30f33fa5fa4f", size = 4330433, upload-time = "2025-09-22T04:04:49.907Z" }, + { url = "https://files.pythonhosted.org/packages/7a/c1/27428a2ff348e994ab4f8777d3a0ad510b6b92d37718e5887d2da99952a2/lxml-6.0.2-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:60fa43be34f78bebb27812ed90f1925ec99560b0fa1decdb7d12b84d857d31e9", size = 4272119, upload-time = "2025-09-22T04:04:51.801Z" }, + { url = "https://files.pythonhosted.org/packages/f0/d0/3020fa12bcec4ab62f97aab026d57c2f0cfd480a558758d9ca233bb6a79d/lxml-6.0.2-pp311-pypy311_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:21c73b476d3cfe836be731225ec3421fa2f048d84f6df6a8e70433dff1376d5a", size = 4417314, upload-time = "2025-09-22T04:04:55.024Z" }, + { url = "https://files.pythonhosted.org/packages/6c/77/d7f491cbc05303ac6801651aabeb262d43f319288c1ea96c66b1d2692ff3/lxml-6.0.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:27220da5be049e936c3aca06f174e8827ca6445a4353a1995584311487fc4e3e", size = 3518768, upload-time = "2025-09-22T04:04:57.097Z" }, +] + +[[package]] +name = "markdown-it-py" +version = "4.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mdurl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" }, +] + +[[package]] +name = "markupsafe" +version = "3.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e8/4b/3541d44f3937ba468b75da9eebcae497dcf67adb65caa16760b0a6807ebb/markupsafe-3.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2f981d352f04553a7171b8e44369f2af4055f888dfb147d55e42d29e29e74559", size = 11631, upload-time = "2025-09-27T18:36:05.558Z" }, + { url = "https://files.pythonhosted.org/packages/98/1b/fbd8eed11021cabd9226c37342fa6ca4e8a98d8188a8d9b66740494960e4/markupsafe-3.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e1c1493fb6e50ab01d20a22826e57520f1284df32f2d8601fdd90b6304601419", size = 12057, upload-time = "2025-09-27T18:36:07.165Z" }, + { url = "https://files.pythonhosted.org/packages/40/01/e560d658dc0bb8ab762670ece35281dec7b6c1b33f5fbc09ebb57a185519/markupsafe-3.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1ba88449deb3de88bd40044603fafffb7bc2b055d626a330323a9ed736661695", size = 22050, upload-time = "2025-09-27T18:36:08.005Z" }, + { url = "https://files.pythonhosted.org/packages/af/cd/ce6e848bbf2c32314c9b237839119c5a564a59725b53157c856e90937b7a/markupsafe-3.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f42d0984e947b8adf7dd6dde396e720934d12c506ce84eea8476409563607591", size = 20681, upload-time = "2025-09-27T18:36:08.881Z" }, + { url = "https://files.pythonhosted.org/packages/c9/2a/b5c12c809f1c3045c4d580b035a743d12fcde53cf685dbc44660826308da/markupsafe-3.0.3-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c0c0b3ade1c0b13b936d7970b1d37a57acde9199dc2aecc4c336773e1d86049c", size = 20705, upload-time = "2025-09-27T18:36:10.131Z" }, + { url = "https://files.pythonhosted.org/packages/cf/e3/9427a68c82728d0a88c50f890d0fc072a1484de2f3ac1ad0bfc1a7214fd5/markupsafe-3.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0303439a41979d9e74d18ff5e2dd8c43ed6c6001fd40e5bf2e43f7bd9bbc523f", size = 21524, upload-time = "2025-09-27T18:36:11.324Z" }, + { url = "https://files.pythonhosted.org/packages/bc/36/23578f29e9e582a4d0278e009b38081dbe363c5e7165113fad546918a232/markupsafe-3.0.3-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:d2ee202e79d8ed691ceebae8e0486bd9a2cd4794cec4824e1c99b6f5009502f6", size = 20282, upload-time = "2025-09-27T18:36:12.573Z" }, + { url = "https://files.pythonhosted.org/packages/56/21/dca11354e756ebd03e036bd8ad58d6d7168c80ce1fe5e75218e4945cbab7/markupsafe-3.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:177b5253b2834fe3678cb4a5f0059808258584c559193998be2601324fdeafb1", size = 20745, upload-time = "2025-09-27T18:36:13.504Z" }, + { url = "https://files.pythonhosted.org/packages/87/99/faba9369a7ad6e4d10b6a5fbf71fa2a188fe4a593b15f0963b73859a1bbd/markupsafe-3.0.3-cp310-cp310-win32.whl", hash = "sha256:2a15a08b17dd94c53a1da0438822d70ebcd13f8c3a95abe3a9ef9f11a94830aa", size = 14571, upload-time = "2025-09-27T18:36:14.779Z" }, + { url = "https://files.pythonhosted.org/packages/d6/25/55dc3ab959917602c96985cb1253efaa4ff42f71194bddeb61eb7278b8be/markupsafe-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:c4ffb7ebf07cfe8931028e3e4c85f0357459a3f9f9490886198848f4fa002ec8", size = 15056, upload-time = "2025-09-27T18:36:16.125Z" }, + { url = "https://files.pythonhosted.org/packages/d0/9e/0a02226640c255d1da0b8d12e24ac2aa6734da68bff14c05dd53b94a0fc3/markupsafe-3.0.3-cp310-cp310-win_arm64.whl", hash = "sha256:e2103a929dfa2fcaf9bb4e7c091983a49c9ac3b19c9061b6d5427dd7d14d81a1", size = 13932, upload-time = "2025-09-27T18:36:17.311Z" }, + { url = "https://files.pythonhosted.org/packages/08/db/fefacb2136439fc8dd20e797950e749aa1f4997ed584c62cfb8ef7c2be0e/markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad", size = 11631, upload-time = "2025-09-27T18:36:18.185Z" }, + { url = "https://files.pythonhosted.org/packages/e1/2e/5898933336b61975ce9dc04decbc0a7f2fee78c30353c5efba7f2d6ff27a/markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a", size = 12058, upload-time = "2025-09-27T18:36:19.444Z" }, + { url = "https://files.pythonhosted.org/packages/1d/09/adf2df3699d87d1d8184038df46a9c80d78c0148492323f4693df54e17bb/markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50", size = 24287, upload-time = "2025-09-27T18:36:20.768Z" }, + { url = "https://files.pythonhosted.org/packages/30/ac/0273f6fcb5f42e314c6d8cd99effae6a5354604d461b8d392b5ec9530a54/markupsafe-3.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf", size = 22940, upload-time = "2025-09-27T18:36:22.249Z" }, + { url = "https://files.pythonhosted.org/packages/19/ae/31c1be199ef767124c042c6c3e904da327a2f7f0cd63a0337e1eca2967a8/markupsafe-3.0.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f", size = 21887, upload-time = "2025-09-27T18:36:23.535Z" }, + { url = "https://files.pythonhosted.org/packages/b2/76/7edcab99d5349a4532a459e1fe64f0b0467a3365056ae550d3bcf3f79e1e/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a", size = 23692, upload-time = "2025-09-27T18:36:24.823Z" }, + { url = "https://files.pythonhosted.org/packages/a4/28/6e74cdd26d7514849143d69f0bf2399f929c37dc2b31e6829fd2045b2765/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115", size = 21471, upload-time = "2025-09-27T18:36:25.95Z" }, + { url = "https://files.pythonhosted.org/packages/62/7e/a145f36a5c2945673e590850a6f8014318d5577ed7e5920a4b3448e0865d/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a", size = 22923, upload-time = "2025-09-27T18:36:27.109Z" }, + { url = "https://files.pythonhosted.org/packages/0f/62/d9c46a7f5c9adbeeeda52f5b8d802e1094e9717705a645efc71b0913a0a8/markupsafe-3.0.3-cp311-cp311-win32.whl", hash = "sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19", size = 14572, upload-time = "2025-09-27T18:36:28.045Z" }, + { url = "https://files.pythonhosted.org/packages/83/8a/4414c03d3f891739326e1783338e48fb49781cc915b2e0ee052aa490d586/markupsafe-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01", size = 15077, upload-time = "2025-09-27T18:36:29.025Z" }, + { url = "https://files.pythonhosted.org/packages/35/73/893072b42e6862f319b5207adc9ae06070f095b358655f077f69a35601f0/markupsafe-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c", size = 13876, upload-time = "2025-09-27T18:36:29.954Z" }, + { url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e", size = 11615, upload-time = "2025-09-27T18:36:30.854Z" }, + { url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce", size = 12020, upload-time = "2025-09-27T18:36:31.971Z" }, + { url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d", size = 24332, upload-time = "2025-09-27T18:36:32.813Z" }, + { url = "https://files.pythonhosted.org/packages/3c/2e/8d0c2ab90a8c1d9a24f0399058ab8519a3279d1bd4289511d74e909f060e/markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d", size = 22947, upload-time = "2025-09-27T18:36:33.86Z" }, + { url = "https://files.pythonhosted.org/packages/2c/54/887f3092a85238093a0b2154bd629c89444f395618842e8b0c41783898ea/markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a", size = 21962, upload-time = "2025-09-27T18:36:35.099Z" }, + { url = "https://files.pythonhosted.org/packages/c9/2f/336b8c7b6f4a4d95e91119dc8521402461b74a485558d8f238a68312f11c/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b", size = 23760, upload-time = "2025-09-27T18:36:36.001Z" }, + { url = "https://files.pythonhosted.org/packages/32/43/67935f2b7e4982ffb50a4d169b724d74b62a3964bc1a9a527f5ac4f1ee2b/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f", size = 21529, upload-time = "2025-09-27T18:36:36.906Z" }, + { url = "https://files.pythonhosted.org/packages/89/e0/4486f11e51bbba8b0c041098859e869e304d1c261e59244baa3d295d47b7/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b", size = 23015, upload-time = "2025-09-27T18:36:37.868Z" }, + { url = "https://files.pythonhosted.org/packages/2f/e1/78ee7a023dac597a5825441ebd17170785a9dab23de95d2c7508ade94e0e/markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d", size = 14540, upload-time = "2025-09-27T18:36:38.761Z" }, + { url = "https://files.pythonhosted.org/packages/aa/5b/bec5aa9bbbb2c946ca2733ef9c4ca91c91b6a24580193e891b5f7dbe8e1e/markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c", size = 15105, upload-time = "2025-09-27T18:36:39.701Z" }, + { url = "https://files.pythonhosted.org/packages/e5/f1/216fc1bbfd74011693a4fd837e7026152e89c4bcf3e77b6692fba9923123/markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f", size = 13906, upload-time = "2025-09-27T18:36:40.689Z" }, + { url = "https://files.pythonhosted.org/packages/38/2f/907b9c7bbba283e68f20259574b13d005c121a0fa4c175f9bed27c4597ff/markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795", size = 11622, upload-time = "2025-09-27T18:36:41.777Z" }, + { url = "https://files.pythonhosted.org/packages/9c/d9/5f7756922cdd676869eca1c4e3c0cd0df60ed30199ffd775e319089cb3ed/markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219", size = 12029, upload-time = "2025-09-27T18:36:43.257Z" }, + { url = "https://files.pythonhosted.org/packages/00/07/575a68c754943058c78f30db02ee03a64b3c638586fba6a6dd56830b30a3/markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6", size = 24374, upload-time = "2025-09-27T18:36:44.508Z" }, + { url = "https://files.pythonhosted.org/packages/a9/21/9b05698b46f218fc0e118e1f8168395c65c8a2c750ae2bab54fc4bd4e0e8/markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676", size = 22980, upload-time = "2025-09-27T18:36:45.385Z" }, + { url = "https://files.pythonhosted.org/packages/7f/71/544260864f893f18b6827315b988c146b559391e6e7e8f7252839b1b846a/markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9", size = 21990, upload-time = "2025-09-27T18:36:46.916Z" }, + { url = "https://files.pythonhosted.org/packages/c2/28/b50fc2f74d1ad761af2f5dcce7492648b983d00a65b8c0e0cb457c82ebbe/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1", size = 23784, upload-time = "2025-09-27T18:36:47.884Z" }, + { url = "https://files.pythonhosted.org/packages/ed/76/104b2aa106a208da8b17a2fb72e033a5a9d7073c68f7e508b94916ed47a9/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc", size = 21588, upload-time = "2025-09-27T18:36:48.82Z" }, + { url = "https://files.pythonhosted.org/packages/b5/99/16a5eb2d140087ebd97180d95249b00a03aa87e29cc224056274f2e45fd6/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12", size = 23041, upload-time = "2025-09-27T18:36:49.797Z" }, + { url = "https://files.pythonhosted.org/packages/19/bc/e7140ed90c5d61d77cea142eed9f9c303f4c4806f60a1044c13e3f1471d0/markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed", size = 14543, upload-time = "2025-09-27T18:36:51.584Z" }, + { url = "https://files.pythonhosted.org/packages/05/73/c4abe620b841b6b791f2edc248f556900667a5a1cf023a6646967ae98335/markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5", size = 15113, upload-time = "2025-09-27T18:36:52.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/3a/fa34a0f7cfef23cf9500d68cb7c32dd64ffd58a12b09225fb03dd37d5b80/markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485", size = 13911, upload-time = "2025-09-27T18:36:53.513Z" }, + { url = "https://files.pythonhosted.org/packages/e4/d7/e05cd7efe43a88a17a37b3ae96e79a19e846f3f456fe79c57ca61356ef01/markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73", size = 11658, upload-time = "2025-09-27T18:36:54.819Z" }, + { url = "https://files.pythonhosted.org/packages/99/9e/e412117548182ce2148bdeacdda3bb494260c0b0184360fe0d56389b523b/markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37", size = 12066, upload-time = "2025-09-27T18:36:55.714Z" }, + { url = "https://files.pythonhosted.org/packages/bc/e6/fa0ffcda717ef64a5108eaa7b4f5ed28d56122c9a6d70ab8b72f9f715c80/markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19", size = 25639, upload-time = "2025-09-27T18:36:56.908Z" }, + { url = "https://files.pythonhosted.org/packages/96/ec/2102e881fe9d25fc16cb4b25d5f5cde50970967ffa5dddafdb771237062d/markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025", size = 23569, upload-time = "2025-09-27T18:36:57.913Z" }, + { url = "https://files.pythonhosted.org/packages/4b/30/6f2fce1f1f205fc9323255b216ca8a235b15860c34b6798f810f05828e32/markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6", size = 23284, upload-time = "2025-09-27T18:36:58.833Z" }, + { url = "https://files.pythonhosted.org/packages/58/47/4a0ccea4ab9f5dcb6f79c0236d954acb382202721e704223a8aafa38b5c8/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f", size = 24801, upload-time = "2025-09-27T18:36:59.739Z" }, + { url = "https://files.pythonhosted.org/packages/6a/70/3780e9b72180b6fecb83a4814d84c3bf4b4ae4bf0b19c27196104149734c/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb", size = 22769, upload-time = "2025-09-27T18:37:00.719Z" }, + { url = "https://files.pythonhosted.org/packages/98/c5/c03c7f4125180fc215220c035beac6b9cb684bc7a067c84fc69414d315f5/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009", size = 23642, upload-time = "2025-09-27T18:37:01.673Z" }, + { url = "https://files.pythonhosted.org/packages/80/d6/2d1b89f6ca4bff1036499b1e29a1d02d282259f3681540e16563f27ebc23/markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354", size = 14612, upload-time = "2025-09-27T18:37:02.639Z" }, + { url = "https://files.pythonhosted.org/packages/2b/98/e48a4bfba0a0ffcf9925fe2d69240bfaa19c6f7507b8cd09c70684a53c1e/markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218", size = 15200, upload-time = "2025-09-27T18:37:03.582Z" }, + { url = "https://files.pythonhosted.org/packages/0e/72/e3cc540f351f316e9ed0f092757459afbc595824ca724cbc5a5d4263713f/markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287", size = 13973, upload-time = "2025-09-27T18:37:04.929Z" }, + { url = "https://files.pythonhosted.org/packages/33/8a/8e42d4838cd89b7dde187011e97fe6c3af66d8c044997d2183fbd6d31352/markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe", size = 11619, upload-time = "2025-09-27T18:37:06.342Z" }, + { url = "https://files.pythonhosted.org/packages/b5/64/7660f8a4a8e53c924d0fa05dc3a55c9cee10bbd82b11c5afb27d44b096ce/markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026", size = 12029, upload-time = "2025-09-27T18:37:07.213Z" }, + { url = "https://files.pythonhosted.org/packages/da/ef/e648bfd021127bef5fa12e1720ffed0c6cbb8310c8d9bea7266337ff06de/markupsafe-3.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f34c41761022dd093b4b6896d4810782ffbabe30f2d443ff5f083e0cbbb8c737", size = 24408, upload-time = "2025-09-27T18:37:09.572Z" }, + { url = "https://files.pythonhosted.org/packages/41/3c/a36c2450754618e62008bf7435ccb0f88053e07592e6028a34776213d877/markupsafe-3.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:457a69a9577064c05a97c41f4e65148652db078a3a509039e64d3467b9e7ef97", size = 23005, upload-time = "2025-09-27T18:37:10.58Z" }, + { url = "https://files.pythonhosted.org/packages/bc/20/b7fdf89a8456b099837cd1dc21974632a02a999ec9bf7ca3e490aacd98e7/markupsafe-3.0.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e8afc3f2ccfa24215f8cb28dcf43f0113ac3c37c2f0f0806d8c70e4228c5cf4d", size = 22048, upload-time = "2025-09-27T18:37:11.547Z" }, + { url = "https://files.pythonhosted.org/packages/9a/a7/591f592afdc734f47db08a75793a55d7fbcc6902a723ae4cfbab61010cc5/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ec15a59cf5af7be74194f7ab02d0f59a62bdcf1a537677ce67a2537c9b87fcda", size = 23821, upload-time = "2025-09-27T18:37:12.48Z" }, + { url = "https://files.pythonhosted.org/packages/7d/33/45b24e4f44195b26521bc6f1a82197118f74df348556594bd2262bda1038/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:0eb9ff8191e8498cca014656ae6b8d61f39da5f95b488805da4bb029cccbfbaf", size = 21606, upload-time = "2025-09-27T18:37:13.485Z" }, + { url = "https://files.pythonhosted.org/packages/ff/0e/53dfaca23a69fbfbbf17a4b64072090e70717344c52eaaaa9c5ddff1e5f0/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2713baf880df847f2bece4230d4d094280f4e67b1e813eec43b4c0e144a34ffe", size = 23043, upload-time = "2025-09-27T18:37:14.408Z" }, + { url = "https://files.pythonhosted.org/packages/46/11/f333a06fc16236d5238bfe74daccbca41459dcd8d1fa952e8fbd5dccfb70/markupsafe-3.0.3-cp314-cp314-win32.whl", hash = "sha256:729586769a26dbceff69f7a7dbbf59ab6572b99d94576a5592625d5b411576b9", size = 14747, upload-time = "2025-09-27T18:37:15.36Z" }, + { url = "https://files.pythonhosted.org/packages/28/52/182836104b33b444e400b14f797212f720cbc9ed6ba34c800639d154e821/markupsafe-3.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:bdc919ead48f234740ad807933cdf545180bfbe9342c2bb451556db2ed958581", size = 15341, upload-time = "2025-09-27T18:37:16.496Z" }, + { url = "https://files.pythonhosted.org/packages/6f/18/acf23e91bd94fd7b3031558b1f013adfa21a8e407a3fdb32745538730382/markupsafe-3.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:5a7d5dc5140555cf21a6fefbdbf8723f06fcd2f63ef108f2854de715e4422cb4", size = 14073, upload-time = "2025-09-27T18:37:17.476Z" }, + { url = "https://files.pythonhosted.org/packages/3c/f0/57689aa4076e1b43b15fdfa646b04653969d50cf30c32a102762be2485da/markupsafe-3.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1353ef0c1b138e1907ae78e2f6c63ff67501122006b0f9abad68fda5f4ffc6ab", size = 11661, upload-time = "2025-09-27T18:37:18.453Z" }, + { url = "https://files.pythonhosted.org/packages/89/c3/2e67a7ca217c6912985ec766c6393b636fb0c2344443ff9d91404dc4c79f/markupsafe-3.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1085e7fbddd3be5f89cc898938f42c0b3c711fdcb37d75221de2666af647c175", size = 12069, upload-time = "2025-09-27T18:37:19.332Z" }, + { url = "https://files.pythonhosted.org/packages/f0/00/be561dce4e6ca66b15276e184ce4b8aec61fe83662cce2f7d72bd3249d28/markupsafe-3.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b52b4fb9df4eb9ae465f8d0c228a00624de2334f216f178a995ccdcf82c4634", size = 25670, upload-time = "2025-09-27T18:37:20.245Z" }, + { url = "https://files.pythonhosted.org/packages/50/09/c419f6f5a92e5fadde27efd190eca90f05e1261b10dbd8cbcb39cd8ea1dc/markupsafe-3.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed51ac40f757d41b7c48425901843666a6677e3e8eb0abcff09e4ba6e664f50", size = 23598, upload-time = "2025-09-27T18:37:21.177Z" }, + { url = "https://files.pythonhosted.org/packages/22/44/a0681611106e0b2921b3033fc19bc53323e0b50bc70cffdd19f7d679bb66/markupsafe-3.0.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f190daf01f13c72eac4efd5c430a8de82489d9cff23c364c3ea822545032993e", size = 23261, upload-time = "2025-09-27T18:37:22.167Z" }, + { url = "https://files.pythonhosted.org/packages/5f/57/1b0b3f100259dc9fffe780cfb60d4be71375510e435efec3d116b6436d43/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e56b7d45a839a697b5eb268c82a71bd8c7f6c94d6fd50c3d577fa39a9f1409f5", size = 24835, upload-time = "2025-09-27T18:37:23.296Z" }, + { url = "https://files.pythonhosted.org/packages/26/6a/4bf6d0c97c4920f1597cc14dd720705eca0bf7c787aebc6bb4d1bead5388/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:f3e98bb3798ead92273dc0e5fd0f31ade220f59a266ffd8a4f6065e0a3ce0523", size = 22733, upload-time = "2025-09-27T18:37:24.237Z" }, + { url = "https://files.pythonhosted.org/packages/14/c7/ca723101509b518797fedc2fdf79ba57f886b4aca8a7d31857ba3ee8281f/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5678211cb9333a6468fb8d8be0305520aa073f50d17f089b5b4b477ea6e67fdc", size = 23672, upload-time = "2025-09-27T18:37:25.271Z" }, + { url = "https://files.pythonhosted.org/packages/fb/df/5bd7a48c256faecd1d36edc13133e51397e41b73bb77e1a69deab746ebac/markupsafe-3.0.3-cp314-cp314t-win32.whl", hash = "sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d", size = 14819, upload-time = "2025-09-27T18:37:26.285Z" }, + { url = "https://files.pythonhosted.org/packages/1a/8a/0402ba61a2f16038b48b39bccca271134be00c5c9f0f623208399333c448/markupsafe-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9", size = 15426, upload-time = "2025-09-27T18:37:27.316Z" }, + { url = "https://files.pythonhosted.org/packages/70/bc/6f1c2f612465f5fa89b95bead1f44dcb607670fd42891d8fdcd5d039f4f4/markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa", size = 14146, upload-time = "2025-09-27T18:37:28.327Z" }, +] + +[[package]] +name = "matplotlib" +version = "3.10.8" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "contourpy", version = "1.3.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "contourpy", version = "1.3.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "cycler" }, + { name = "fonttools" }, + { name = "kiwisolver" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "numpy", version = "2.4.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "packaging" }, + { name = "pillow" }, + { name = "pyparsing" }, + { name = "python-dateutil" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8a/76/d3c6e3a13fe484ebe7718d14e269c9569c4eb0020a968a327acb3b9a8fe6/matplotlib-3.10.8.tar.gz", hash = "sha256:2299372c19d56bcd35cf05a2738308758d32b9eaed2371898d8f5bd33f084aa3", size = 34806269, upload-time = "2025-12-10T22:56:51.155Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/58/be/a30bd917018ad220c400169fba298f2bb7003c8ccbc0c3e24ae2aacad1e8/matplotlib-3.10.8-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:00270d217d6b20d14b584c521f810d60c5c78406dc289859776550df837dcda7", size = 8239828, upload-time = "2025-12-10T22:55:02.313Z" }, + { url = "https://files.pythonhosted.org/packages/58/27/ca01e043c4841078e82cf6e80a6993dfecd315c3d79f5f3153afbb8e1ec6/matplotlib-3.10.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37b3c1cc42aa184b3f738cfa18c1c1d72fd496d85467a6cf7b807936d39aa656", size = 8128050, upload-time = "2025-12-10T22:55:04.997Z" }, + { url = "https://files.pythonhosted.org/packages/cb/aa/7ab67f2b729ae6a91bcf9dcac0affb95fb8c56f7fd2b2af894ae0b0cf6fa/matplotlib-3.10.8-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ee40c27c795bda6a5292e9cff9890189d32f7e3a0bf04e0e3c9430c4a00c37df", size = 8700452, upload-time = "2025-12-10T22:55:07.47Z" }, + { url = "https://files.pythonhosted.org/packages/73/ae/2d5817b0acee3c49b7e7ccfbf5b273f284957cc8e270adf36375db353190/matplotlib-3.10.8-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a48f2b74020919552ea25d222d5cc6af9ca3f4eb43a93e14d068457f545c2a17", size = 9534928, upload-time = "2025-12-10T22:55:10.566Z" }, + { url = "https://files.pythonhosted.org/packages/c9/5b/8e66653e9f7c39cb2e5cab25fce4810daffa2bff02cbf5f3077cea9e942c/matplotlib-3.10.8-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f254d118d14a7f99d616271d6c3c27922c092dac11112670b157798b89bf4933", size = 9586377, upload-time = "2025-12-10T22:55:12.362Z" }, + { url = "https://files.pythonhosted.org/packages/e2/e2/fd0bbadf837f81edb0d208ba8f8cb552874c3b16e27cb91a31977d90875d/matplotlib-3.10.8-cp310-cp310-win_amd64.whl", hash = "sha256:f9b587c9c7274c1613a30afabf65a272114cd6cdbe67b3406f818c79d7ab2e2a", size = 8128127, upload-time = "2025-12-10T22:55:14.436Z" }, + { url = "https://files.pythonhosted.org/packages/f8/86/de7e3a1cdcfc941483af70609edc06b83e7c8a0e0dc9ac325200a3f4d220/matplotlib-3.10.8-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6be43b667360fef5c754dda5d25a32e6307a03c204f3c0fc5468b78fa87b4160", size = 8251215, upload-time = "2025-12-10T22:55:16.175Z" }, + { url = "https://files.pythonhosted.org/packages/fd/14/baad3222f424b19ce6ad243c71de1ad9ec6b2e4eb1e458a48fdc6d120401/matplotlib-3.10.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a2b336e2d91a3d7006864e0990c83b216fcdca64b5a6484912902cef87313d78", size = 8139625, upload-time = "2025-12-10T22:55:17.712Z" }, + { url = "https://files.pythonhosted.org/packages/8f/a0/7024215e95d456de5883e6732e708d8187d9753a21d32f8ddb3befc0c445/matplotlib-3.10.8-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:efb30e3baaea72ce5928e32bab719ab4770099079d66726a62b11b1ef7273be4", size = 8712614, upload-time = "2025-12-10T22:55:20.8Z" }, + { url = "https://files.pythonhosted.org/packages/5a/f4/b8347351da9a5b3f41e26cf547252d861f685c6867d179a7c9d60ad50189/matplotlib-3.10.8-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d56a1efd5bfd61486c8bc968fa18734464556f0fb8e51690f4ac25d85cbbbbc2", size = 9540997, upload-time = "2025-12-10T22:55:23.258Z" }, + { url = "https://files.pythonhosted.org/packages/9e/c0/c7b914e297efe0bc36917bf216b2acb91044b91e930e878ae12981e461e5/matplotlib-3.10.8-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:238b7ce5717600615c895050239ec955d91f321c209dd110db988500558e70d6", size = 9596825, upload-time = "2025-12-10T22:55:25.217Z" }, + { url = "https://files.pythonhosted.org/packages/6f/d3/a4bbc01c237ab710a1f22b4da72f4ff6d77eb4c7735ea9811a94ae239067/matplotlib-3.10.8-cp311-cp311-win_amd64.whl", hash = "sha256:18821ace09c763ec93aef5eeff087ee493a24051936d7b9ebcad9662f66501f9", size = 8135090, upload-time = "2025-12-10T22:55:27.162Z" }, + { url = "https://files.pythonhosted.org/packages/89/dd/a0b6588f102beab33ca6f5218b31725216577b2a24172f327eaf6417d5c9/matplotlib-3.10.8-cp311-cp311-win_arm64.whl", hash = "sha256:bab485bcf8b1c7d2060b4fcb6fc368a9e6f4cd754c9c2fea281f4be21df394a2", size = 8012377, upload-time = "2025-12-10T22:55:29.185Z" }, + { url = "https://files.pythonhosted.org/packages/9e/67/f997cdcbb514012eb0d10cd2b4b332667997fb5ebe26b8d41d04962fa0e6/matplotlib-3.10.8-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:64fcc24778ca0404ce0cb7b6b77ae1f4c7231cdd60e6778f999ee05cbd581b9a", size = 8260453, upload-time = "2025-12-10T22:55:30.709Z" }, + { url = "https://files.pythonhosted.org/packages/7e/65/07d5f5c7f7c994f12c768708bd2e17a4f01a2b0f44a1c9eccad872433e2e/matplotlib-3.10.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b9a5ca4ac220a0cdd1ba6bcba3608547117d30468fefce49bb26f55c1a3d5c58", size = 8148321, upload-time = "2025-12-10T22:55:33.265Z" }, + { url = "https://files.pythonhosted.org/packages/3e/f3/c5195b1ae57ef85339fd7285dfb603b22c8b4e79114bae5f4f0fcf688677/matplotlib-3.10.8-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3ab4aabc72de4ff77b3ec33a6d78a68227bf1123465887f9905ba79184a1cc04", size = 8716944, upload-time = "2025-12-10T22:55:34.922Z" }, + { url = "https://files.pythonhosted.org/packages/00/f9/7638f5cc82ec8a7aa005de48622eecc3ed7c9854b96ba15bd76b7fd27574/matplotlib-3.10.8-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:24d50994d8c5816ddc35411e50a86ab05f575e2530c02752e02538122613371f", size = 9550099, upload-time = "2025-12-10T22:55:36.789Z" }, + { url = "https://files.pythonhosted.org/packages/57/61/78cd5920d35b29fd2a0fe894de8adf672ff52939d2e9b43cb83cd5ce1bc7/matplotlib-3.10.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:99eefd13c0dc3b3c1b4d561c1169e65fe47aab7b8158754d7c084088e2329466", size = 9613040, upload-time = "2025-12-10T22:55:38.715Z" }, + { url = "https://files.pythonhosted.org/packages/30/4e/c10f171b6e2f44d9e3a2b96efa38b1677439d79c99357600a62cc1e9594e/matplotlib-3.10.8-cp312-cp312-win_amd64.whl", hash = "sha256:dd80ecb295460a5d9d260df63c43f4afbdd832d725a531f008dad1664f458adf", size = 8142717, upload-time = "2025-12-10T22:55:41.103Z" }, + { url = "https://files.pythonhosted.org/packages/f1/76/934db220026b5fef85f45d51a738b91dea7d70207581063cd9bd8fafcf74/matplotlib-3.10.8-cp312-cp312-win_arm64.whl", hash = "sha256:3c624e43ed56313651bc18a47f838b60d7b8032ed348911c54906b130b20071b", size = 8012751, upload-time = "2025-12-10T22:55:42.684Z" }, + { url = "https://files.pythonhosted.org/packages/3d/b9/15fd5541ef4f5b9a17eefd379356cf12175fe577424e7b1d80676516031a/matplotlib-3.10.8-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3f2e409836d7f5ac2f1c013110a4d50b9f7edc26328c108915f9075d7d7a91b6", size = 8261076, upload-time = "2025-12-10T22:55:44.648Z" }, + { url = "https://files.pythonhosted.org/packages/8d/a0/2ba3473c1b66b9c74dc7107c67e9008cb1782edbe896d4c899d39ae9cf78/matplotlib-3.10.8-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:56271f3dac49a88d7fca5060f004d9d22b865f743a12a23b1e937a0be4818ee1", size = 8148794, upload-time = "2025-12-10T22:55:46.252Z" }, + { url = "https://files.pythonhosted.org/packages/75/97/a471f1c3eb1fd6f6c24a31a5858f443891d5127e63a7788678d14e249aea/matplotlib-3.10.8-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a0a7f52498f72f13d4a25ea70f35f4cb60642b466cbb0a9be951b5bc3f45a486", size = 8718474, upload-time = "2025-12-10T22:55:47.864Z" }, + { url = "https://files.pythonhosted.org/packages/01/be/cd478f4b66f48256f42927d0acbcd63a26a893136456cd079c0cc24fbabf/matplotlib-3.10.8-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:646d95230efb9ca614a7a594d4fcacde0ac61d25e37dd51710b36477594963ce", size = 9549637, upload-time = "2025-12-10T22:55:50.048Z" }, + { url = "https://files.pythonhosted.org/packages/5d/7c/8dc289776eae5109e268c4fb92baf870678dc048a25d4ac903683b86d5bf/matplotlib-3.10.8-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f89c151aab2e2e23cb3fe0acad1e8b82841fd265379c4cecd0f3fcb34c15e0f6", size = 9613678, upload-time = "2025-12-10T22:55:52.21Z" }, + { url = "https://files.pythonhosted.org/packages/64/40/37612487cc8a437d4dd261b32ca21fe2d79510fe74af74e1f42becb1bdb8/matplotlib-3.10.8-cp313-cp313-win_amd64.whl", hash = "sha256:e8ea3e2d4066083e264e75c829078f9e149fa119d27e19acd503de65e0b13149", size = 8142686, upload-time = "2025-12-10T22:55:54.253Z" }, + { url = "https://files.pythonhosted.org/packages/66/52/8d8a8730e968185514680c2a6625943f70269509c3dcfc0dcf7d75928cb8/matplotlib-3.10.8-cp313-cp313-win_arm64.whl", hash = "sha256:c108a1d6fa78a50646029cb6d49808ff0fc1330fda87fa6f6250c6b5369b6645", size = 8012917, upload-time = "2025-12-10T22:55:56.268Z" }, + { url = "https://files.pythonhosted.org/packages/b5/27/51fe26e1062f298af5ef66343d8ef460e090a27fea73036c76c35821df04/matplotlib-3.10.8-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:ad3d9833a64cf48cc4300f2b406c3d0f4f4724a91c0bd5640678a6ba7c102077", size = 8305679, upload-time = "2025-12-10T22:55:57.856Z" }, + { url = "https://files.pythonhosted.org/packages/2c/1e/4de865bc591ac8e3062e835f42dd7fe7a93168d519557837f0e37513f629/matplotlib-3.10.8-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:eb3823f11823deade26ce3b9f40dcb4a213da7a670013929f31d5f5ed1055b22", size = 8198336, upload-time = "2025-12-10T22:55:59.371Z" }, + { url = "https://files.pythonhosted.org/packages/c6/cb/2f7b6e75fb4dce87ef91f60cac4f6e34f4c145ab036a22318ec837971300/matplotlib-3.10.8-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d9050fee89a89ed57b4fb2c1bfac9a3d0c57a0d55aed95949eedbc42070fea39", size = 8731653, upload-time = "2025-12-10T22:56:01.032Z" }, + { url = "https://files.pythonhosted.org/packages/46/b3/bd9c57d6ba670a37ab31fb87ec3e8691b947134b201f881665b28cc039ff/matplotlib-3.10.8-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b44d07310e404ba95f8c25aa5536f154c0a8ec473303535949e52eb71d0a1565", size = 9561356, upload-time = "2025-12-10T22:56:02.95Z" }, + { url = "https://files.pythonhosted.org/packages/c0/3d/8b94a481456dfc9dfe6e39e93b5ab376e50998cddfd23f4ae3b431708f16/matplotlib-3.10.8-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:0a33deb84c15ede243aead39f77e990469fff93ad1521163305095b77b72ce4a", size = 9614000, upload-time = "2025-12-10T22:56:05.411Z" }, + { url = "https://files.pythonhosted.org/packages/bd/cd/bc06149fe5585ba800b189a6a654a75f1f127e8aab02fd2be10df7fa500c/matplotlib-3.10.8-cp313-cp313t-win_amd64.whl", hash = "sha256:3a48a78d2786784cc2413e57397981fb45c79e968d99656706018d6e62e57958", size = 8220043, upload-time = "2025-12-10T22:56:07.551Z" }, + { url = "https://files.pythonhosted.org/packages/e3/de/b22cf255abec916562cc04eef457c13e58a1990048de0c0c3604d082355e/matplotlib-3.10.8-cp313-cp313t-win_arm64.whl", hash = "sha256:15d30132718972c2c074cd14638c7f4592bd98719e2308bccea40e0538bc0cb5", size = 8062075, upload-time = "2025-12-10T22:56:09.178Z" }, + { url = "https://files.pythonhosted.org/packages/3c/43/9c0ff7a2f11615e516c3b058e1e6e8f9614ddeca53faca06da267c48345d/matplotlib-3.10.8-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:b53285e65d4fa4c86399979e956235deb900be5baa7fc1218ea67fbfaeaadd6f", size = 8262481, upload-time = "2025-12-10T22:56:10.885Z" }, + { url = "https://files.pythonhosted.org/packages/6f/ca/e8ae28649fcdf039fda5ef554b40a95f50592a3c47e6f7270c9561c12b07/matplotlib-3.10.8-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:32f8dce744be5569bebe789e46727946041199030db8aeb2954d26013a0eb26b", size = 8151473, upload-time = "2025-12-10T22:56:12.377Z" }, + { url = "https://files.pythonhosted.org/packages/f1/6f/009d129ae70b75e88cbe7e503a12a4c0670e08ed748a902c2568909e9eb5/matplotlib-3.10.8-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4cf267add95b1c88300d96ca837833d4112756045364f5c734a2276038dae27d", size = 9553896, upload-time = "2025-12-10T22:56:14.432Z" }, + { url = "https://files.pythonhosted.org/packages/f5/26/4221a741eb97967bc1fd5e4c52b9aa5a91b2f4ec05b59f6def4d820f9df9/matplotlib-3.10.8-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2cf5bd12cecf46908f286d7838b2abc6c91cda506c0445b8223a7c19a00df008", size = 9824193, upload-time = "2025-12-10T22:56:16.29Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f3/3abf75f38605772cf48a9daf5821cd4f563472f38b4b828c6fba6fa6d06e/matplotlib-3.10.8-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:41703cc95688f2516b480f7f339d8851a6035f18e100ee6a32bc0b8536a12a9c", size = 9615444, upload-time = "2025-12-10T22:56:18.155Z" }, + { url = "https://files.pythonhosted.org/packages/93/a5/de89ac80f10b8dc615807ee1133cd99ac74082581196d4d9590bea10690d/matplotlib-3.10.8-cp314-cp314-win_amd64.whl", hash = "sha256:83d282364ea9f3e52363da262ce32a09dfe241e4080dcedda3c0db059d3c1f11", size = 8272719, upload-time = "2025-12-10T22:56:20.366Z" }, + { url = "https://files.pythonhosted.org/packages/69/ce/b006495c19ccc0a137b48083168a37bd056392dee02f87dba0472f2797fe/matplotlib-3.10.8-cp314-cp314-win_arm64.whl", hash = "sha256:2c1998e92cd5999e295a731bcb2911c75f597d937341f3030cc24ef2733d78a8", size = 8144205, upload-time = "2025-12-10T22:56:22.239Z" }, + { url = "https://files.pythonhosted.org/packages/68/d9/b31116a3a855bd313c6fcdb7226926d59b041f26061c6c5b1be66a08c826/matplotlib-3.10.8-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:b5a2b97dbdc7d4f353ebf343744f1d1f1cca8aa8bfddb4262fcf4306c3761d50", size = 8305785, upload-time = "2025-12-10T22:56:24.218Z" }, + { url = "https://files.pythonhosted.org/packages/1e/90/6effe8103f0272685767ba5f094f453784057072f49b393e3ea178fe70a5/matplotlib-3.10.8-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:3f5c3e4da343bba819f0234186b9004faba952cc420fbc522dc4e103c1985908", size = 8198361, upload-time = "2025-12-10T22:56:26.787Z" }, + { url = "https://files.pythonhosted.org/packages/d7/65/a73188711bea603615fc0baecca1061429ac16940e2385433cc778a9d8e7/matplotlib-3.10.8-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f62550b9a30afde8c1c3ae450e5eb547d579dd69b25c2fc7a1c67f934c1717a", size = 9561357, upload-time = "2025-12-10T22:56:28.953Z" }, + { url = "https://files.pythonhosted.org/packages/f4/3d/b5c5d5d5be8ce63292567f0e2c43dde9953d3ed86ac2de0a72e93c8f07a1/matplotlib-3.10.8-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:495672de149445ec1b772ff2c9ede9b769e3cb4f0d0aa7fa730d7f59e2d4e1c1", size = 9823610, upload-time = "2025-12-10T22:56:31.455Z" }, + { url = "https://files.pythonhosted.org/packages/4d/4b/e7beb6bbd49f6bae727a12b270a2654d13c397576d25bd6786e47033300f/matplotlib-3.10.8-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:595ba4d8fe983b88f0eec8c26a241e16d6376fe1979086232f481f8f3f67494c", size = 9614011, upload-time = "2025-12-10T22:56:33.85Z" }, + { url = "https://files.pythonhosted.org/packages/7c/e6/76f2813d31f032e65f6f797e3f2f6e4aab95b65015924b1c51370395c28a/matplotlib-3.10.8-cp314-cp314t-win_amd64.whl", hash = "sha256:25d380fe8b1dc32cf8f0b1b448470a77afb195438bafdf1d858bfb876f3edf7b", size = 8362801, upload-time = "2025-12-10T22:56:36.107Z" }, + { url = "https://files.pythonhosted.org/packages/5d/49/d651878698a0b67f23aa28e17f45a6d6dd3d3f933fa29087fa4ce5947b5a/matplotlib-3.10.8-cp314-cp314t-win_arm64.whl", hash = "sha256:113bb52413ea508ce954a02c10ffd0d565f9c3bc7f2eddc27dfe1731e71c7b5f", size = 8192560, upload-time = "2025-12-10T22:56:38.008Z" }, + { url = "https://files.pythonhosted.org/packages/f5/43/31d59500bb950b0d188e149a2e552040528c13d6e3d6e84d0cccac593dcd/matplotlib-3.10.8-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:f97aeb209c3d2511443f8797e3e5a569aebb040d4f8bc79aa3ee78a8fb9e3dd8", size = 8237252, upload-time = "2025-12-10T22:56:39.529Z" }, + { url = "https://files.pythonhosted.org/packages/0c/2c/615c09984f3c5f907f51c886538ad785cf72e0e11a3225de2c0f9442aecc/matplotlib-3.10.8-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:fb061f596dad3a0f52b60dc6a5dec4a0c300dec41e058a7efe09256188d170b7", size = 8124693, upload-time = "2025-12-10T22:56:41.758Z" }, + { url = "https://files.pythonhosted.org/packages/91/e1/2757277a1c56041e1fc104b51a0f7b9a4afc8eb737865d63cababe30bc61/matplotlib-3.10.8-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:12d90df9183093fcd479f4172ac26b322b1248b15729cb57f42f71f24c7e37a3", size = 8702205, upload-time = "2025-12-10T22:56:43.415Z" }, + { url = "https://files.pythonhosted.org/packages/04/30/3afaa31c757f34b7725ab9d2ba8b48b5e89c2019c003e7d0ead143aabc5a/matplotlib-3.10.8-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:6da7c2ce169267d0d066adcf63758f0604aa6c3eebf67458930f9d9b79ad1db1", size = 8249198, upload-time = "2025-12-10T22:56:45.584Z" }, + { url = "https://files.pythonhosted.org/packages/48/2f/6334aec331f57485a642a7c8be03cb286f29111ae71c46c38b363230063c/matplotlib-3.10.8-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:9153c3292705be9f9c64498a8872118540c3f4123d1a1c840172edf262c8be4a", size = 8136817, upload-time = "2025-12-10T22:56:47.339Z" }, + { url = "https://files.pythonhosted.org/packages/73/e4/6d6f14b2a759c622f191b2d67e9075a3f56aaccb3be4bb9bb6890030d0a0/matplotlib-3.10.8-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1ae029229a57cd1e8fe542485f27e7ca7b23aa9e8944ddb4985d0bc444f1eca2", size = 8713867, upload-time = "2025-12-10T22:56:48.954Z" }, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, +] + +[[package]] +name = "msgpack" +version = "1.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4d/f2/bfb55a6236ed8725a96b0aa3acbd0ec17588e6a2c3b62a93eb513ed8783f/msgpack-1.1.2.tar.gz", hash = "sha256:3b60763c1373dd60f398488069bcdc703cd08a711477b5d480eecc9f9626f47e", size = 173581, upload-time = "2025-10-08T09:15:56.596Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f5/a2/3b68a9e769db68668b25c6108444a35f9bd163bb848c0650d516761a59c0/msgpack-1.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0051fffef5a37ca2cd16978ae4f0aef92f164df86823871b5162812bebecd8e2", size = 81318, upload-time = "2025-10-08T09:14:38.722Z" }, + { url = "https://files.pythonhosted.org/packages/5b/e1/2b720cc341325c00be44e1ed59e7cfeae2678329fbf5aa68f5bda57fe728/msgpack-1.1.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a605409040f2da88676e9c9e5853b3449ba8011973616189ea5ee55ddbc5bc87", size = 83786, upload-time = "2025-10-08T09:14:40.082Z" }, + { url = "https://files.pythonhosted.org/packages/71/e5/c2241de64bfceac456b140737812a2ab310b10538a7b34a1d393b748e095/msgpack-1.1.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b696e83c9f1532b4af884045ba7f3aa741a63b2bc22617293a2c6a7c645f251", size = 398240, upload-time = "2025-10-08T09:14:41.151Z" }, + { url = "https://files.pythonhosted.org/packages/b7/09/2a06956383c0fdebaef5aa9246e2356776f12ea6f2a44bd1368abf0e46c4/msgpack-1.1.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:365c0bbe981a27d8932da71af63ef86acc59ed5c01ad929e09a0b88c6294e28a", size = 406070, upload-time = "2025-10-08T09:14:42.821Z" }, + { url = "https://files.pythonhosted.org/packages/0e/74/2957703f0e1ef20637d6aead4fbb314330c26f39aa046b348c7edcf6ca6b/msgpack-1.1.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:41d1a5d875680166d3ac5c38573896453bbbea7092936d2e107214daf43b1d4f", size = 393403, upload-time = "2025-10-08T09:14:44.38Z" }, + { url = "https://files.pythonhosted.org/packages/a5/09/3bfc12aa90f77b37322fc33e7a8a7c29ba7c8edeadfa27664451801b9860/msgpack-1.1.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:354e81bcdebaab427c3df4281187edc765d5d76bfb3a7c125af9da7a27e8458f", size = 398947, upload-time = "2025-10-08T09:14:45.56Z" }, + { url = "https://files.pythonhosted.org/packages/4b/4f/05fcebd3b4977cb3d840f7ef6b77c51f8582086de5e642f3fefee35c86fc/msgpack-1.1.2-cp310-cp310-win32.whl", hash = "sha256:e64c8d2f5e5d5fda7b842f55dec6133260ea8f53c4257d64494c534f306bf7a9", size = 64769, upload-time = "2025-10-08T09:14:47.334Z" }, + { url = "https://files.pythonhosted.org/packages/d0/3e/b4547e3a34210956382eed1c85935fff7e0f9b98be3106b3745d7dec9c5e/msgpack-1.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:db6192777d943bdaaafb6ba66d44bf65aa0e9c5616fa1d2da9bb08828c6b39aa", size = 71293, upload-time = "2025-10-08T09:14:48.665Z" }, + { url = "https://files.pythonhosted.org/packages/2c/97/560d11202bcd537abca693fd85d81cebe2107ba17301de42b01ac1677b69/msgpack-1.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2e86a607e558d22985d856948c12a3fa7b42efad264dca8a3ebbcfa2735d786c", size = 82271, upload-time = "2025-10-08T09:14:49.967Z" }, + { url = "https://files.pythonhosted.org/packages/83/04/28a41024ccbd67467380b6fb440ae916c1e4f25e2cd4c63abe6835ac566e/msgpack-1.1.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:283ae72fc89da59aa004ba147e8fc2f766647b1251500182fac0350d8af299c0", size = 84914, upload-time = "2025-10-08T09:14:50.958Z" }, + { url = "https://files.pythonhosted.org/packages/71/46/b817349db6886d79e57a966346cf0902a426375aadc1e8e7a86a75e22f19/msgpack-1.1.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:61c8aa3bd513d87c72ed0b37b53dd5c5a0f58f2ff9f26e1555d3bd7948fb7296", size = 416962, upload-time = "2025-10-08T09:14:51.997Z" }, + { url = "https://files.pythonhosted.org/packages/da/e0/6cc2e852837cd6086fe7d8406af4294e66827a60a4cf60b86575a4a65ca8/msgpack-1.1.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:454e29e186285d2ebe65be34629fa0e8605202c60fbc7c4c650ccd41870896ef", size = 426183, upload-time = "2025-10-08T09:14:53.477Z" }, + { url = "https://files.pythonhosted.org/packages/25/98/6a19f030b3d2ea906696cedd1eb251708e50a5891d0978b012cb6107234c/msgpack-1.1.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7bc8813f88417599564fafa59fd6f95be417179f76b40325b500b3c98409757c", size = 411454, upload-time = "2025-10-08T09:14:54.648Z" }, + { url = "https://files.pythonhosted.org/packages/b7/cd/9098fcb6adb32187a70b7ecaabf6339da50553351558f37600e53a4a2a23/msgpack-1.1.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bafca952dc13907bdfdedfc6a5f579bf4f292bdd506fadb38389afa3ac5b208e", size = 422341, upload-time = "2025-10-08T09:14:56.328Z" }, + { url = "https://files.pythonhosted.org/packages/e6/ae/270cecbcf36c1dc85ec086b33a51a4d7d08fc4f404bdbc15b582255d05ff/msgpack-1.1.2-cp311-cp311-win32.whl", hash = "sha256:602b6740e95ffc55bfb078172d279de3773d7b7db1f703b2f1323566b878b90e", size = 64747, upload-time = "2025-10-08T09:14:57.882Z" }, + { url = "https://files.pythonhosted.org/packages/2a/79/309d0e637f6f37e83c711f547308b91af02b72d2326ddd860b966080ef29/msgpack-1.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:d198d275222dc54244bf3327eb8cbe00307d220241d9cec4d306d49a44e85f68", size = 71633, upload-time = "2025-10-08T09:14:59.177Z" }, + { url = "https://files.pythonhosted.org/packages/73/4d/7c4e2b3d9b1106cd0aa6cb56cc57c6267f59fa8bfab7d91df5adc802c847/msgpack-1.1.2-cp311-cp311-win_arm64.whl", hash = "sha256:86f8136dfa5c116365a8a651a7d7484b65b13339731dd6faebb9a0242151c406", size = 64755, upload-time = "2025-10-08T09:15:00.48Z" }, + { url = "https://files.pythonhosted.org/packages/ad/bd/8b0d01c756203fbab65d265859749860682ccd2a59594609aeec3a144efa/msgpack-1.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:70a0dff9d1f8da25179ffcf880e10cf1aad55fdb63cd59c9a49a1b82290062aa", size = 81939, upload-time = "2025-10-08T09:15:01.472Z" }, + { url = "https://files.pythonhosted.org/packages/34/68/ba4f155f793a74c1483d4bdef136e1023f7bcba557f0db4ef3db3c665cf1/msgpack-1.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:446abdd8b94b55c800ac34b102dffd2f6aa0ce643c55dfc017ad89347db3dbdb", size = 85064, upload-time = "2025-10-08T09:15:03.764Z" }, + { url = "https://files.pythonhosted.org/packages/f2/60/a064b0345fc36c4c3d2c743c82d9100c40388d77f0b48b2f04d6041dbec1/msgpack-1.1.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c63eea553c69ab05b6747901b97d620bb2a690633c77f23feb0c6a947a8a7b8f", size = 417131, upload-time = "2025-10-08T09:15:05.136Z" }, + { url = "https://files.pythonhosted.org/packages/65/92/a5100f7185a800a5d29f8d14041f61475b9de465ffcc0f3b9fba606e4505/msgpack-1.1.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:372839311ccf6bdaf39b00b61288e0557916c3729529b301c52c2d88842add42", size = 427556, upload-time = "2025-10-08T09:15:06.837Z" }, + { url = "https://files.pythonhosted.org/packages/f5/87/ffe21d1bf7d9991354ad93949286f643b2bb6ddbeab66373922b44c3b8cc/msgpack-1.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2929af52106ca73fcb28576218476ffbb531a036c2adbcf54a3664de124303e9", size = 404920, upload-time = "2025-10-08T09:15:08.179Z" }, + { url = "https://files.pythonhosted.org/packages/ff/41/8543ed2b8604f7c0d89ce066f42007faac1eaa7d79a81555f206a5cdb889/msgpack-1.1.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:be52a8fc79e45b0364210eef5234a7cf8d330836d0a64dfbb878efa903d84620", size = 415013, upload-time = "2025-10-08T09:15:09.83Z" }, + { url = "https://files.pythonhosted.org/packages/41/0d/2ddfaa8b7e1cee6c490d46cb0a39742b19e2481600a7a0e96537e9c22f43/msgpack-1.1.2-cp312-cp312-win32.whl", hash = "sha256:1fff3d825d7859ac888b0fbda39a42d59193543920eda9d9bea44d958a878029", size = 65096, upload-time = "2025-10-08T09:15:11.11Z" }, + { url = "https://files.pythonhosted.org/packages/8c/ec/d431eb7941fb55a31dd6ca3404d41fbb52d99172df2e7707754488390910/msgpack-1.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:1de460f0403172cff81169a30b9a92b260cb809c4cb7e2fc79ae8d0510c78b6b", size = 72708, upload-time = "2025-10-08T09:15:12.554Z" }, + { url = "https://files.pythonhosted.org/packages/c5/31/5b1a1f70eb0e87d1678e9624908f86317787b536060641d6798e3cf70ace/msgpack-1.1.2-cp312-cp312-win_arm64.whl", hash = "sha256:be5980f3ee0e6bd44f3a9e9dea01054f175b50c3e6cdb692bc9424c0bbb8bf69", size = 64119, upload-time = "2025-10-08T09:15:13.589Z" }, + { url = "https://files.pythonhosted.org/packages/6b/31/b46518ecc604d7edf3a4f94cb3bf021fc62aa301f0cb849936968164ef23/msgpack-1.1.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4efd7b5979ccb539c221a4c4e16aac1a533efc97f3b759bb5a5ac9f6d10383bf", size = 81212, upload-time = "2025-10-08T09:15:14.552Z" }, + { url = "https://files.pythonhosted.org/packages/92/dc/c385f38f2c2433333345a82926c6bfa5ecfff3ef787201614317b58dd8be/msgpack-1.1.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:42eefe2c3e2af97ed470eec850facbe1b5ad1d6eacdbadc42ec98e7dcf68b4b7", size = 84315, upload-time = "2025-10-08T09:15:15.543Z" }, + { url = "https://files.pythonhosted.org/packages/d3/68/93180dce57f684a61a88a45ed13047558ded2be46f03acb8dec6d7c513af/msgpack-1.1.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1fdf7d83102bf09e7ce3357de96c59b627395352a4024f6e2458501f158bf999", size = 412721, upload-time = "2025-10-08T09:15:16.567Z" }, + { url = "https://files.pythonhosted.org/packages/5d/ba/459f18c16f2b3fc1a1ca871f72f07d70c07bf768ad0a507a698b8052ac58/msgpack-1.1.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fac4be746328f90caa3cd4bc67e6fe36ca2bf61d5c6eb6d895b6527e3f05071e", size = 424657, upload-time = "2025-10-08T09:15:17.825Z" }, + { url = "https://files.pythonhosted.org/packages/38/f8/4398c46863b093252fe67368b44edc6c13b17f4e6b0e4929dbf0bdb13f23/msgpack-1.1.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:fffee09044073e69f2bad787071aeec727183e7580443dfeb8556cbf1978d162", size = 402668, upload-time = "2025-10-08T09:15:19.003Z" }, + { url = "https://files.pythonhosted.org/packages/28/ce/698c1eff75626e4124b4d78e21cca0b4cc90043afb80a507626ea354ab52/msgpack-1.1.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5928604de9b032bc17f5099496417f113c45bc6bc21b5c6920caf34b3c428794", size = 419040, upload-time = "2025-10-08T09:15:20.183Z" }, + { url = "https://files.pythonhosted.org/packages/67/32/f3cd1667028424fa7001d82e10ee35386eea1408b93d399b09fb0aa7875f/msgpack-1.1.2-cp313-cp313-win32.whl", hash = "sha256:a7787d353595c7c7e145e2331abf8b7ff1e6673a6b974ded96e6d4ec09f00c8c", size = 65037, upload-time = "2025-10-08T09:15:21.416Z" }, + { url = "https://files.pythonhosted.org/packages/74/07/1ed8277f8653c40ebc65985180b007879f6a836c525b3885dcc6448ae6cb/msgpack-1.1.2-cp313-cp313-win_amd64.whl", hash = "sha256:a465f0dceb8e13a487e54c07d04ae3ba131c7c5b95e2612596eafde1dccf64a9", size = 72631, upload-time = "2025-10-08T09:15:22.431Z" }, + { url = "https://files.pythonhosted.org/packages/e5/db/0314e4e2db56ebcf450f277904ffd84a7988b9e5da8d0d61ab2d057df2b6/msgpack-1.1.2-cp313-cp313-win_arm64.whl", hash = "sha256:e69b39f8c0aa5ec24b57737ebee40be647035158f14ed4b40e6f150077e21a84", size = 64118, upload-time = "2025-10-08T09:15:23.402Z" }, + { url = "https://files.pythonhosted.org/packages/22/71/201105712d0a2ff07b7873ed3c220292fb2ea5120603c00c4b634bcdafb3/msgpack-1.1.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e23ce8d5f7aa6ea6d2a2b326b4ba46c985dbb204523759984430db7114f8aa00", size = 81127, upload-time = "2025-10-08T09:15:24.408Z" }, + { url = "https://files.pythonhosted.org/packages/1b/9f/38ff9e57a2eade7bf9dfee5eae17f39fc0e998658050279cbb14d97d36d9/msgpack-1.1.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:6c15b7d74c939ebe620dd8e559384be806204d73b4f9356320632d783d1f7939", size = 84981, upload-time = "2025-10-08T09:15:25.812Z" }, + { url = "https://files.pythonhosted.org/packages/8e/a9/3536e385167b88c2cc8f4424c49e28d49a6fc35206d4a8060f136e71f94c/msgpack-1.1.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:99e2cb7b9031568a2a5c73aa077180f93dd2e95b4f8d3b8e14a73ae94a9e667e", size = 411885, upload-time = "2025-10-08T09:15:27.22Z" }, + { url = "https://files.pythonhosted.org/packages/2f/40/dc34d1a8d5f1e51fc64640b62b191684da52ca469da9cd74e84936ffa4a6/msgpack-1.1.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:180759d89a057eab503cf62eeec0aa61c4ea1200dee709f3a8e9397dbb3b6931", size = 419658, upload-time = "2025-10-08T09:15:28.4Z" }, + { url = "https://files.pythonhosted.org/packages/3b/ef/2b92e286366500a09a67e03496ee8b8ba00562797a52f3c117aa2b29514b/msgpack-1.1.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:04fb995247a6e83830b62f0b07bf36540c213f6eac8e851166d8d86d83cbd014", size = 403290, upload-time = "2025-10-08T09:15:29.764Z" }, + { url = "https://files.pythonhosted.org/packages/78/90/e0ea7990abea5764e4655b8177aa7c63cdfa89945b6e7641055800f6c16b/msgpack-1.1.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:8e22ab046fa7ede9e36eeb4cfad44d46450f37bb05d5ec482b02868f451c95e2", size = 415234, upload-time = "2025-10-08T09:15:31.022Z" }, + { url = "https://files.pythonhosted.org/packages/72/4e/9390aed5db983a2310818cd7d3ec0aecad45e1f7007e0cda79c79507bb0d/msgpack-1.1.2-cp314-cp314-win32.whl", hash = "sha256:80a0ff7d4abf5fecb995fcf235d4064b9a9a8a40a3ab80999e6ac1e30b702717", size = 66391, upload-time = "2025-10-08T09:15:32.265Z" }, + { url = "https://files.pythonhosted.org/packages/6e/f1/abd09c2ae91228c5f3998dbd7f41353def9eac64253de3c8105efa2082f7/msgpack-1.1.2-cp314-cp314-win_amd64.whl", hash = "sha256:9ade919fac6a3e7260b7f64cea89df6bec59104987cbea34d34a2fa15d74310b", size = 73787, upload-time = "2025-10-08T09:15:33.219Z" }, + { url = "https://files.pythonhosted.org/packages/6a/b0/9d9f667ab48b16ad4115c1935d94023b82b3198064cb84a123e97f7466c1/msgpack-1.1.2-cp314-cp314-win_arm64.whl", hash = "sha256:59415c6076b1e30e563eb732e23b994a61c159cec44deaf584e5cc1dd662f2af", size = 66453, upload-time = "2025-10-08T09:15:34.225Z" }, + { url = "https://files.pythonhosted.org/packages/16/67/93f80545eb1792b61a217fa7f06d5e5cb9e0055bed867f43e2b8e012e137/msgpack-1.1.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:897c478140877e5307760b0ea66e0932738879e7aa68144d9b78ea4c8302a84a", size = 85264, upload-time = "2025-10-08T09:15:35.61Z" }, + { url = "https://files.pythonhosted.org/packages/87/1c/33c8a24959cf193966ef11a6f6a2995a65eb066bd681fd085afd519a57ce/msgpack-1.1.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a668204fa43e6d02f89dbe79a30b0d67238d9ec4c5bd8a940fc3a004a47b721b", size = 89076, upload-time = "2025-10-08T09:15:36.619Z" }, + { url = "https://files.pythonhosted.org/packages/fc/6b/62e85ff7193663fbea5c0254ef32f0c77134b4059f8da89b958beb7696f3/msgpack-1.1.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5559d03930d3aa0f3aacb4c42c776af1a2ace2611871c84a75afe436695e6245", size = 435242, upload-time = "2025-10-08T09:15:37.647Z" }, + { url = "https://files.pythonhosted.org/packages/c1/47/5c74ecb4cc277cf09f64e913947871682ffa82b3b93c8dad68083112f412/msgpack-1.1.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:70c5a7a9fea7f036b716191c29047374c10721c389c21e9ffafad04df8c52c90", size = 432509, upload-time = "2025-10-08T09:15:38.794Z" }, + { url = "https://files.pythonhosted.org/packages/24/a4/e98ccdb56dc4e98c929a3f150de1799831c0a800583cde9fa022fa90602d/msgpack-1.1.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:f2cb069d8b981abc72b41aea1c580ce92d57c673ec61af4c500153a626cb9e20", size = 415957, upload-time = "2025-10-08T09:15:40.238Z" }, + { url = "https://files.pythonhosted.org/packages/da/28/6951f7fb67bc0a4e184a6b38ab71a92d9ba58080b27a77d3e2fb0be5998f/msgpack-1.1.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d62ce1f483f355f61adb5433ebfd8868c5f078d1a52d042b0a998682b4fa8c27", size = 422910, upload-time = "2025-10-08T09:15:41.505Z" }, + { url = "https://files.pythonhosted.org/packages/f0/03/42106dcded51f0a0b5284d3ce30a671e7bd3f7318d122b2ead66ad289fed/msgpack-1.1.2-cp314-cp314t-win32.whl", hash = "sha256:1d1418482b1ee984625d88aa9585db570180c286d942da463533b238b98b812b", size = 75197, upload-time = "2025-10-08T09:15:42.954Z" }, + { url = "https://files.pythonhosted.org/packages/15/86/d0071e94987f8db59d4eeb386ddc64d0bb9b10820a8d82bcd3e53eeb2da6/msgpack-1.1.2-cp314-cp314t-win_amd64.whl", hash = "sha256:5a46bf7e831d09470ad92dff02b8b1ac92175ca36b087f904a0519857c6be3ff", size = 85772, upload-time = "2025-10-08T09:15:43.954Z" }, + { url = "https://files.pythonhosted.org/packages/81/f2/08ace4142eb281c12701fc3b93a10795e4d4dc7f753911d836675050f886/msgpack-1.1.2-cp314-cp314t-win_arm64.whl", hash = "sha256:d99ef64f349d5ec3293688e91486c5fdb925ed03807f64d98d205d2713c60b46", size = 70868, upload-time = "2025-10-08T09:15:44.959Z" }, +] + +[[package]] +name = "multidict" +version = "6.7.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1a/c2/c2d94cbe6ac1753f3fc980da97b3d930efe1da3af3c9f5125354436c073d/multidict-6.7.1.tar.gz", hash = "sha256:ec6652a1bee61c53a3e5776b6049172c53b6aaba34f18c9ad04f82712bac623d", size = 102010, upload-time = "2026-01-26T02:46:45.979Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/84/0b/19348d4c98980c4851d2f943f8ebafdece2ae7ef737adcfa5994ce8e5f10/multidict-6.7.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:c93c3db7ea657dd4637d57e74ab73de31bccefe144d3d4ce370052035bc85fb5", size = 77176, upload-time = "2026-01-26T02:42:59.784Z" }, + { url = "https://files.pythonhosted.org/packages/ef/04/9de3f8077852e3d438215c81e9b691244532d2e05b4270e89ce67b7d103c/multidict-6.7.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:974e72a2474600827abaeda71af0c53d9ebbc3c2eb7da37b37d7829ae31232d8", size = 44996, upload-time = "2026-01-26T02:43:01.674Z" }, + { url = "https://files.pythonhosted.org/packages/31/5c/08c7f7fe311f32e83f7621cd3f99d805f45519cd06fafb247628b861da7d/multidict-6.7.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cdea2e7b2456cfb6694fb113066fd0ec7ea4d67e3a35e1f4cbeea0b448bf5872", size = 44631, upload-time = "2026-01-26T02:43:03.169Z" }, + { url = "https://files.pythonhosted.org/packages/b7/7f/0e3b1390ae772f27501199996b94b52ceeb64fe6f9120a32c6c3f6b781be/multidict-6.7.1-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:17207077e29342fdc2c9a82e4b306f1127bf1ea91f8b71e02d4798a70bb99991", size = 242561, upload-time = "2026-01-26T02:43:04.733Z" }, + { url = "https://files.pythonhosted.org/packages/dd/f4/8719f4f167586af317b69dd3e90f913416c91ca610cac79a45c53f590312/multidict-6.7.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d4f49cb5661344764e4c7c7973e92a47a59b8fc19b6523649ec9dc4960e58a03", size = 242223, upload-time = "2026-01-26T02:43:06.695Z" }, + { url = "https://files.pythonhosted.org/packages/47/ab/7c36164cce64a6ad19c6d9a85377b7178ecf3b89f8fd589c73381a5eedfd/multidict-6.7.1-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a9fc4caa29e2e6ae408d1c450ac8bf19892c5fca83ee634ecd88a53332c59981", size = 222322, upload-time = "2026-01-26T02:43:08.472Z" }, + { url = "https://files.pythonhosted.org/packages/f5/79/a25add6fb38035b5337bc5734f296d9afc99163403bbcf56d4170f97eb62/multidict-6.7.1-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c5f0c21549ab432b57dcc82130f388d84ad8179824cc3f223d5e7cfbfd4143f6", size = 254005, upload-time = "2026-01-26T02:43:10.127Z" }, + { url = "https://files.pythonhosted.org/packages/4a/7b/64a87cf98e12f756fc8bd444b001232ffff2be37288f018ad0d3f0aae931/multidict-6.7.1-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:7dfb78d966b2c906ae1d28ccf6e6712a3cd04407ee5088cd276fe8cb42186190", size = 251173, upload-time = "2026-01-26T02:43:11.731Z" }, + { url = "https://files.pythonhosted.org/packages/4b/ac/b605473de2bb404e742f2cc3583d12aedb2352a70e49ae8fce455b50c5aa/multidict-6.7.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9b0d9b91d1aa44db9c1f1ecd0d9d2ae610b2f4f856448664e01a3b35899f3f92", size = 243273, upload-time = "2026-01-26T02:43:13.063Z" }, + { url = "https://files.pythonhosted.org/packages/03/65/11492d6a0e259783720f3bc1d9ea55579a76f1407e31ed44045c99542004/multidict-6.7.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:dd96c01a9dcd4889dcfcf9eb5544ca0c77603f239e3ffab0524ec17aea9a93ee", size = 238956, upload-time = "2026-01-26T02:43:14.843Z" }, + { url = "https://files.pythonhosted.org/packages/5f/a7/7ee591302af64e7c196fb63fe856c788993c1372df765102bd0448e7e165/multidict-6.7.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:067343c68cd6612d375710f895337b3a98a033c94f14b9a99eff902f205424e2", size = 233477, upload-time = "2026-01-26T02:43:16.025Z" }, + { url = "https://files.pythonhosted.org/packages/9c/99/c109962d58756c35fd9992fed7f2355303846ea2ff054bb5f5e9d6b888de/multidict-6.7.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5884a04f4ff56c6120f6ccf703bdeb8b5079d808ba604d4d53aec0d55dc33568", size = 243615, upload-time = "2026-01-26T02:43:17.84Z" }, + { url = "https://files.pythonhosted.org/packages/d5/5f/1973e7c771c86e93dcfe1c9cc55a5481b610f6614acfc28c0d326fe6bfad/multidict-6.7.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8affcf1c98b82bc901702eb73b6947a1bfa170823c153fe8a47b5f5f02e48e40", size = 249930, upload-time = "2026-01-26T02:43:19.06Z" }, + { url = "https://files.pythonhosted.org/packages/5d/a5/f170fc2268c3243853580203378cd522446b2df632061e0a5409817854c7/multidict-6.7.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:0d17522c37d03e85c8098ec8431636309b2682cf12e58f4dbc76121fb50e4962", size = 243807, upload-time = "2026-01-26T02:43:20.286Z" }, + { url = "https://files.pythonhosted.org/packages/de/01/73856fab6d125e5bc652c3986b90e8699a95e84b48d72f39ade6c0e74a8c/multidict-6.7.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:24c0cf81544ca5e17cfcb6e482e7a82cd475925242b308b890c9452a074d4505", size = 239103, upload-time = "2026-01-26T02:43:21.508Z" }, + { url = "https://files.pythonhosted.org/packages/e7/46/f1220bd9944d8aa40d8ccff100eeeee19b505b857b6f603d6078cb5315b0/multidict-6.7.1-cp310-cp310-win32.whl", hash = "sha256:d82dd730a95e6643802f4454b8fdecdf08667881a9c5670db85bc5a56693f122", size = 41416, upload-time = "2026-01-26T02:43:22.703Z" }, + { url = "https://files.pythonhosted.org/packages/68/00/9b38e272a770303692fc406c36e1a4c740f401522d5787691eb38a8925a8/multidict-6.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:cf37cbe5ced48d417ba045aca1b21bafca67489452debcde94778a576666a1df", size = 46022, upload-time = "2026-01-26T02:43:23.77Z" }, + { url = "https://files.pythonhosted.org/packages/64/65/d8d42490c02ee07b6bbe00f7190d70bb4738b3cce7629aaf9f213ef730dd/multidict-6.7.1-cp310-cp310-win_arm64.whl", hash = "sha256:59bc83d3f66b41dac1e7460aac1d196edc70c9ba3094965c467715a70ecb46db", size = 43238, upload-time = "2026-01-26T02:43:24.882Z" }, + { url = "https://files.pythonhosted.org/packages/ce/f1/a90635c4f88fb913fbf4ce660b83b7445b7a02615bda034b2f8eb38fd597/multidict-6.7.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7ff981b266af91d7b4b3793ca3382e53229088d193a85dfad6f5f4c27fc73e5d", size = 76626, upload-time = "2026-01-26T02:43:26.485Z" }, + { url = "https://files.pythonhosted.org/packages/a6/9b/267e64eaf6fc637a15b35f5de31a566634a2740f97d8d094a69d34f524a4/multidict-6.7.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:844c5bca0b5444adb44a623fb0a1310c2f4cd41f402126bb269cd44c9b3f3e1e", size = 44706, upload-time = "2026-01-26T02:43:27.607Z" }, + { url = "https://files.pythonhosted.org/packages/dd/a4/d45caf2b97b035c57267791ecfaafbd59c68212004b3842830954bb4b02e/multidict-6.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f2a0a924d4c2e9afcd7ec64f9de35fcd96915149b2216e1cb2c10a56df483855", size = 44356, upload-time = "2026-01-26T02:43:28.661Z" }, + { url = "https://files.pythonhosted.org/packages/fd/d2/0a36c8473f0cbaeadd5db6c8b72d15bbceeec275807772bfcd059bef487d/multidict-6.7.1-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:8be1802715a8e892c784c0197c2ace276ea52702a0ede98b6310c8f255a5afb3", size = 244355, upload-time = "2026-01-26T02:43:31.165Z" }, + { url = "https://files.pythonhosted.org/packages/5d/16/8c65be997fd7dd311b7d39c7b6e71a0cb449bad093761481eccbbe4b42a2/multidict-6.7.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2e2d2ed645ea29f31c4c7ea1552fcfd7cb7ba656e1eafd4134a6620c9f5fdd9e", size = 246433, upload-time = "2026-01-26T02:43:32.581Z" }, + { url = "https://files.pythonhosted.org/packages/01/fb/4dbd7e848d2799c6a026ec88ad39cf2b8416aa167fcc903baa55ecaa045c/multidict-6.7.1-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:95922cee9a778659e91db6497596435777bd25ed116701a4c034f8e46544955a", size = 225376, upload-time = "2026-01-26T02:43:34.417Z" }, + { url = "https://files.pythonhosted.org/packages/b6/8a/4a3a6341eac3830f6053062f8fbc9a9e54407c80755b3f05bc427295c2d0/multidict-6.7.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6b83cabdc375ffaaa15edd97eb7c0c672ad788e2687004990074d7d6c9b140c8", size = 257365, upload-time = "2026-01-26T02:43:35.741Z" }, + { url = "https://files.pythonhosted.org/packages/f7/a2/dd575a69c1aa206e12d27d0770cdf9b92434b48a9ef0cd0d1afdecaa93c4/multidict-6.7.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:38fb49540705369bab8484db0689d86c0a33a0a9f2c1b197f506b71b4b6c19b0", size = 254747, upload-time = "2026-01-26T02:43:36.976Z" }, + { url = "https://files.pythonhosted.org/packages/5a/56/21b27c560c13822ed93133f08aa6372c53a8e067f11fbed37b4adcdac922/multidict-6.7.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:439cbebd499f92e9aa6793016a8acaa161dfa749ae86d20960189f5398a19144", size = 246293, upload-time = "2026-01-26T02:43:38.258Z" }, + { url = "https://files.pythonhosted.org/packages/5a/a4/23466059dc3854763423d0ad6c0f3683a379d97673b1b89ec33826e46728/multidict-6.7.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6d3bc717b6fe763b8be3f2bee2701d3c8eb1b2a8ae9f60910f1b2860c82b6c49", size = 242962, upload-time = "2026-01-26T02:43:40.034Z" }, + { url = "https://files.pythonhosted.org/packages/1f/67/51dd754a3524d685958001e8fa20a0f5f90a6a856e0a9dcabff69be3dbb7/multidict-6.7.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:619e5a1ac57986dbfec9f0b301d865dddf763696435e2962f6d9cf2fdff2bb71", size = 237360, upload-time = "2026-01-26T02:43:41.752Z" }, + { url = "https://files.pythonhosted.org/packages/64/3f/036dfc8c174934d4b55d86ff4f978e558b0e585cef70cfc1ad01adc6bf18/multidict-6.7.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0b38ebffd9be37c1170d33bc0f36f4f262e0a09bc1aac1c34c7aa51a7293f0b3", size = 245940, upload-time = "2026-01-26T02:43:43.042Z" }, + { url = "https://files.pythonhosted.org/packages/3d/20/6214d3c105928ebc353a1c644a6ef1408bc5794fcb4f170bb524a3c16311/multidict-6.7.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:10ae39c9cfe6adedcdb764f5e8411d4a92b055e35573a2eaa88d3323289ef93c", size = 253502, upload-time = "2026-01-26T02:43:44.371Z" }, + { url = "https://files.pythonhosted.org/packages/b1/e2/c653bc4ae1be70a0f836b82172d643fcf1dade042ba2676ab08ec08bff0f/multidict-6.7.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:25167cc263257660290fba06b9318d2026e3c910be240a146e1f66dd114af2b0", size = 247065, upload-time = "2026-01-26T02:43:45.745Z" }, + { url = "https://files.pythonhosted.org/packages/c8/11/a854b4154cd3bd8b1fd375e8a8ca9d73be37610c361543d56f764109509b/multidict-6.7.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:128441d052254f42989ef98b7b6a6ecb1e6f708aa962c7984235316db59f50fa", size = 241870, upload-time = "2026-01-26T02:43:47.054Z" }, + { url = "https://files.pythonhosted.org/packages/13/bf/9676c0392309b5fdae322333d22a829715b570edb9baa8016a517b55b558/multidict-6.7.1-cp311-cp311-win32.whl", hash = "sha256:d62b7f64ffde3b99d06b707a280db04fb3855b55f5a06df387236051d0668f4a", size = 41302, upload-time = "2026-01-26T02:43:48.753Z" }, + { url = "https://files.pythonhosted.org/packages/c9/68/f16a3a8ba6f7b6dc92a1f19669c0810bd2c43fc5a02da13b1cbf8e253845/multidict-6.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:bdbf9f3b332abd0cdb306e7c2113818ab1e922dc84b8f8fd06ec89ed2a19ab8b", size = 45981, upload-time = "2026-01-26T02:43:49.921Z" }, + { url = "https://files.pythonhosted.org/packages/ac/ad/9dd5305253fa00cd3c7555dbef69d5bf4133debc53b87ab8d6a44d411665/multidict-6.7.1-cp311-cp311-win_arm64.whl", hash = "sha256:b8c990b037d2fff2f4e33d3f21b9b531c5745b33a49a7d6dbe7a177266af44f6", size = 43159, upload-time = "2026-01-26T02:43:51.635Z" }, + { url = "https://files.pythonhosted.org/packages/8d/9c/f20e0e2cf80e4b2e4b1c365bf5fe104ee633c751a724246262db8f1a0b13/multidict-6.7.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a90f75c956e32891a4eda3639ce6dd86e87105271f43d43442a3aedf3cddf172", size = 76893, upload-time = "2026-01-26T02:43:52.754Z" }, + { url = "https://files.pythonhosted.org/packages/fe/cf/18ef143a81610136d3da8193da9d80bfe1cb548a1e2d1c775f26b23d024a/multidict-6.7.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3fccb473e87eaa1382689053e4a4618e7ba7b9b9b8d6adf2027ee474597128cd", size = 45456, upload-time = "2026-01-26T02:43:53.893Z" }, + { url = "https://files.pythonhosted.org/packages/a9/65/1caac9d4cd32e8433908683446eebc953e82d22b03d10d41a5f0fefe991b/multidict-6.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0fa96985700739c4c7853a43c0b3e169360d6855780021bfc6d0f1ce7c123e7", size = 43872, upload-time = "2026-01-26T02:43:55.041Z" }, + { url = "https://files.pythonhosted.org/packages/cf/3b/d6bd75dc4f3ff7c73766e04e705b00ed6dbbaccf670d9e05a12b006f5a21/multidict-6.7.1-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cb2a55f408c3043e42b40cc8eecd575afa27b7e0b956dfb190de0f8499a57a53", size = 251018, upload-time = "2026-01-26T02:43:56.198Z" }, + { url = "https://files.pythonhosted.org/packages/fd/80/c959c5933adedb9ac15152e4067c702a808ea183a8b64cf8f31af8ad3155/multidict-6.7.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eb0ce7b2a32d09892b3dd6cc44877a0d02a33241fafca5f25c8b6b62374f8b75", size = 258883, upload-time = "2026-01-26T02:43:57.499Z" }, + { url = "https://files.pythonhosted.org/packages/86/85/7ed40adafea3d4f1c8b916e3b5cc3a8e07dfcdcb9cd72800f4ed3ca1b387/multidict-6.7.1-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c3a32d23520ee37bf327d1e1a656fec76a2edd5c038bf43eddfa0572ec49c60b", size = 242413, upload-time = "2026-01-26T02:43:58.755Z" }, + { url = "https://files.pythonhosted.org/packages/d2/57/b8565ff533e48595503c785f8361ff9a4fde4d67de25c207cd0ba3befd03/multidict-6.7.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9c90fed18bffc0189ba814749fdcc102b536e83a9f738a9003e569acd540a733", size = 268404, upload-time = "2026-01-26T02:44:00.216Z" }, + { url = "https://files.pythonhosted.org/packages/e0/50/9810c5c29350f7258180dfdcb2e52783a0632862eb334c4896ac717cebcb/multidict-6.7.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:da62917e6076f512daccfbbde27f46fed1c98fee202f0559adec8ee0de67f71a", size = 269456, upload-time = "2026-01-26T02:44:02.202Z" }, + { url = "https://files.pythonhosted.org/packages/f3/8d/5e5be3ced1d12966fefb5c4ea3b2a5b480afcea36406559442c6e31d4a48/multidict-6.7.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bfde23ef6ed9db7eaee6c37dcec08524cb43903c60b285b172b6c094711b3961", size = 256322, upload-time = "2026-01-26T02:44:03.56Z" }, + { url = "https://files.pythonhosted.org/packages/31/6e/d8a26d81ac166a5592782d208dd90dfdc0a7a218adaa52b45a672b46c122/multidict-6.7.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3758692429e4e32f1ba0df23219cd0b4fc0a52f476726fff9337d1a57676a582", size = 253955, upload-time = "2026-01-26T02:44:04.845Z" }, + { url = "https://files.pythonhosted.org/packages/59/4c/7c672c8aad41534ba619bcd4ade7a0dc87ed6b8b5c06149b85d3dd03f0cd/multidict-6.7.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:398c1478926eca669f2fd6a5856b6de9c0acf23a2cb59a14c0ba5844fa38077e", size = 251254, upload-time = "2026-01-26T02:44:06.133Z" }, + { url = "https://files.pythonhosted.org/packages/7b/bd/84c24de512cbafbdbc39439f74e967f19570ce7924e3007174a29c348916/multidict-6.7.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c102791b1c4f3ab36ce4101154549105a53dc828f016356b3e3bcae2e3a039d3", size = 252059, upload-time = "2026-01-26T02:44:07.518Z" }, + { url = "https://files.pythonhosted.org/packages/fa/ba/f5449385510825b73d01c2d4087bf6d2fccc20a2d42ac34df93191d3dd03/multidict-6.7.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:a088b62bd733e2ad12c50dad01b7d0166c30287c166e137433d3b410add807a6", size = 263588, upload-time = "2026-01-26T02:44:09.382Z" }, + { url = "https://files.pythonhosted.org/packages/d7/11/afc7c677f68f75c84a69fe37184f0f82fce13ce4b92f49f3db280b7e92b3/multidict-6.7.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:3d51ff4785d58d3f6c91bdbffcb5e1f7ddfda557727043aa20d20ec4f65e324a", size = 259642, upload-time = "2026-01-26T02:44:10.73Z" }, + { url = "https://files.pythonhosted.org/packages/2b/17/ebb9644da78c4ab36403739e0e6e0e30ebb135b9caf3440825001a0bddcb/multidict-6.7.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fc5907494fccf3e7d3f94f95c91d6336b092b5fc83811720fae5e2765890dfba", size = 251377, upload-time = "2026-01-26T02:44:12.042Z" }, + { url = "https://files.pythonhosted.org/packages/ca/a4/840f5b97339e27846c46307f2530a2805d9d537d8b8bd416af031cad7fa0/multidict-6.7.1-cp312-cp312-win32.whl", hash = "sha256:28ca5ce2fd9716631133d0e9a9b9a745ad7f60bac2bccafb56aa380fc0b6c511", size = 41887, upload-time = "2026-01-26T02:44:14.245Z" }, + { url = "https://files.pythonhosted.org/packages/80/31/0b2517913687895f5904325c2069d6a3b78f66cc641a86a2baf75a05dcbb/multidict-6.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcee94dfbd638784645b066074b338bc9cc155d4b4bffa4adce1615c5a426c19", size = 46053, upload-time = "2026-01-26T02:44:15.371Z" }, + { url = "https://files.pythonhosted.org/packages/0c/5b/aba28e4ee4006ae4c7df8d327d31025d760ffa992ea23812a601d226e682/multidict-6.7.1-cp312-cp312-win_arm64.whl", hash = "sha256:ba0a9fb644d0c1a2194cf7ffb043bd852cea63a57f66fbd33959f7dae18517bf", size = 43307, upload-time = "2026-01-26T02:44:16.852Z" }, + { url = "https://files.pythonhosted.org/packages/f2/22/929c141d6c0dba87d3e1d38fbdf1ba8baba86b7776469f2bc2d3227a1e67/multidict-6.7.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:2b41f5fed0ed563624f1c17630cb9941cf2309d4df00e494b551b5f3e3d67a23", size = 76174, upload-time = "2026-01-26T02:44:18.509Z" }, + { url = "https://files.pythonhosted.org/packages/c7/75/bc704ae15fee974f8fccd871305e254754167dce5f9e42d88a2def741a1d/multidict-6.7.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:84e61e3af5463c19b67ced91f6c634effb89ef8bfc5ca0267f954451ed4bb6a2", size = 45116, upload-time = "2026-01-26T02:44:19.745Z" }, + { url = "https://files.pythonhosted.org/packages/79/76/55cd7186f498ed080a18440c9013011eb548f77ae1b297206d030eb1180a/multidict-6.7.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:935434b9853c7c112eee7ac891bc4cb86455aa631269ae35442cb316790c1445", size = 43524, upload-time = "2026-01-26T02:44:21.571Z" }, + { url = "https://files.pythonhosted.org/packages/e9/3c/414842ef8d5a1628d68edee29ba0e5bcf235dbfb3ccd3ea303a7fe8c72ff/multidict-6.7.1-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:432feb25a1cb67fe82a9680b4d65fb542e4635cb3166cd9c01560651ad60f177", size = 249368, upload-time = "2026-01-26T02:44:22.803Z" }, + { url = "https://files.pythonhosted.org/packages/f6/32/befed7f74c458b4a525e60519fe8d87eef72bb1e99924fa2b0f9d97a221e/multidict-6.7.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e82d14e3c948952a1a85503817e038cba5905a3352de76b9a465075d072fba23", size = 256952, upload-time = "2026-01-26T02:44:24.306Z" }, + { url = "https://files.pythonhosted.org/packages/03/d6/c878a44ba877f366630c860fdf74bfb203c33778f12b6ac274936853c451/multidict-6.7.1-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4cfb48c6ea66c83bcaaf7e4dfa7ec1b6bbcf751b7db85a328902796dfde4c060", size = 240317, upload-time = "2026-01-26T02:44:25.772Z" }, + { url = "https://files.pythonhosted.org/packages/68/49/57421b4d7ad2e9e60e25922b08ceb37e077b90444bde6ead629095327a6f/multidict-6.7.1-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1d540e51b7e8e170174555edecddbd5538105443754539193e3e1061864d444d", size = 267132, upload-time = "2026-01-26T02:44:27.648Z" }, + { url = "https://files.pythonhosted.org/packages/b7/fe/ec0edd52ddbcea2a2e89e174f0206444a61440b40f39704e64dc807a70bd/multidict-6.7.1-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:273d23f4b40f3dce4d6c8a821c741a86dec62cded82e1175ba3d99be128147ed", size = 268140, upload-time = "2026-01-26T02:44:29.588Z" }, + { url = "https://files.pythonhosted.org/packages/b0/73/6e1b01cbeb458807aa0831742232dbdd1fa92bfa33f52a3f176b4ff3dc11/multidict-6.7.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d624335fd4fa1c08a53f8b4be7676ebde19cd092b3895c421045ca87895b429", size = 254277, upload-time = "2026-01-26T02:44:30.902Z" }, + { url = "https://files.pythonhosted.org/packages/6a/b2/5fb8c124d7561a4974c342bc8c778b471ebbeb3cc17df696f034a7e9afe7/multidict-6.7.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:12fad252f8b267cc75b66e8fc51b3079604e8d43a75428ffe193cd9e2195dfd6", size = 252291, upload-time = "2026-01-26T02:44:32.31Z" }, + { url = "https://files.pythonhosted.org/packages/5a/96/51d4e4e06bcce92577fcd488e22600bd38e4fd59c20cb49434d054903bd2/multidict-6.7.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:03ede2a6ffbe8ef936b92cb4529f27f42be7f56afcdab5ab739cd5f27fb1cbf9", size = 250156, upload-time = "2026-01-26T02:44:33.734Z" }, + { url = "https://files.pythonhosted.org/packages/db/6b/420e173eec5fba721a50e2a9f89eda89d9c98fded1124f8d5c675f7a0c0f/multidict-6.7.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:90efbcf47dbe33dcf643a1e400d67d59abeac5db07dc3f27d6bdeae497a2198c", size = 249742, upload-time = "2026-01-26T02:44:35.222Z" }, + { url = "https://files.pythonhosted.org/packages/44/a3/ec5b5bd98f306bc2aa297b8c6f11a46714a56b1e6ef5ebda50a4f5d7c5fb/multidict-6.7.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:5c4b9bfc148f5a91be9244d6264c53035c8a0dcd2f51f1c3c6e30e30ebaa1c84", size = 262221, upload-time = "2026-01-26T02:44:36.604Z" }, + { url = "https://files.pythonhosted.org/packages/cd/f7/e8c0d0da0cd1e28d10e624604e1a36bcc3353aaebdfdc3a43c72bc683a12/multidict-6.7.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:401c5a650f3add2472d1d288c26deebc540f99e2fb83e9525007a74cd2116f1d", size = 258664, upload-time = "2026-01-26T02:44:38.008Z" }, + { url = "https://files.pythonhosted.org/packages/52/da/151a44e8016dd33feed44f730bd856a66257c1ee7aed4f44b649fb7edeb3/multidict-6.7.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:97891f3b1b3ffbded884e2916cacf3c6fc87b66bb0dde46f7357404750559f33", size = 249490, upload-time = "2026-01-26T02:44:39.386Z" }, + { url = "https://files.pythonhosted.org/packages/87/af/a3b86bf9630b732897f6fc3f4c4714b90aa4361983ccbdcd6c0339b21b0c/multidict-6.7.1-cp313-cp313-win32.whl", hash = "sha256:e1c5988359516095535c4301af38d8a8838534158f649c05dd1050222321bcb3", size = 41695, upload-time = "2026-01-26T02:44:41.318Z" }, + { url = "https://files.pythonhosted.org/packages/b2/35/e994121b0e90e46134673422dd564623f93304614f5d11886b1b3e06f503/multidict-6.7.1-cp313-cp313-win_amd64.whl", hash = "sha256:960c83bf01a95b12b08fd54324a4eb1d5b52c88932b5cba5d6e712bb3ed12eb5", size = 45884, upload-time = "2026-01-26T02:44:42.488Z" }, + { url = "https://files.pythonhosted.org/packages/ca/61/42d3e5dbf661242a69c97ea363f2d7b46c567da8eadef8890022be6e2ab0/multidict-6.7.1-cp313-cp313-win_arm64.whl", hash = "sha256:563fe25c678aaba333d5399408f5ec3c383ca5b663e7f774dd179a520b8144df", size = 43122, upload-time = "2026-01-26T02:44:43.664Z" }, + { url = "https://files.pythonhosted.org/packages/6d/b3/e6b21c6c4f314bb956016b0b3ef2162590a529b84cb831c257519e7fde44/multidict-6.7.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:c76c4bec1538375dad9d452d246ca5368ad6e1c9039dadcf007ae59c70619ea1", size = 83175, upload-time = "2026-01-26T02:44:44.894Z" }, + { url = "https://files.pythonhosted.org/packages/fb/76/23ecd2abfe0957b234f6c960f4ade497f55f2c16aeb684d4ecdbf1c95791/multidict-6.7.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:57b46b24b5d5ebcc978da4ec23a819a9402b4228b8a90d9c656422b4bdd8a963", size = 48460, upload-time = "2026-01-26T02:44:46.106Z" }, + { url = "https://files.pythonhosted.org/packages/c4/57/a0ed92b23f3a042c36bc4227b72b97eca803f5f1801c1ab77c8a212d455e/multidict-6.7.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e954b24433c768ce78ab7929e84ccf3422e46deb45a4dc9f93438f8217fa2d34", size = 46930, upload-time = "2026-01-26T02:44:47.278Z" }, + { url = "https://files.pythonhosted.org/packages/b5/66/02ec7ace29162e447f6382c495dc95826bf931d3818799bbef11e8f7df1a/multidict-6.7.1-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3bd231490fa7217cc832528e1cd8752a96f0125ddd2b5749390f7c3ec8721b65", size = 242582, upload-time = "2026-01-26T02:44:48.604Z" }, + { url = "https://files.pythonhosted.org/packages/58/18/64f5a795e7677670e872673aca234162514696274597b3708b2c0d276cce/multidict-6.7.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:253282d70d67885a15c8a7716f3a73edf2d635793ceda8173b9ecc21f2fb8292", size = 250031, upload-time = "2026-01-26T02:44:50.544Z" }, + { url = "https://files.pythonhosted.org/packages/c8/ed/e192291dbbe51a8290c5686f482084d31bcd9d09af24f63358c3d42fd284/multidict-6.7.1-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0b4c48648d7649c9335cf1927a8b87fa692de3dcb15faa676c6a6f1f1aabda43", size = 228596, upload-time = "2026-01-26T02:44:51.951Z" }, + { url = "https://files.pythonhosted.org/packages/1e/7e/3562a15a60cf747397e7f2180b0a11dc0c38d9175a650e75fa1b4d325e15/multidict-6.7.1-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:98bc624954ec4d2c7cb074b8eefc2b5d0ce7d482e410df446414355d158fe4ca", size = 257492, upload-time = "2026-01-26T02:44:53.902Z" }, + { url = "https://files.pythonhosted.org/packages/24/02/7d0f9eae92b5249bb50ac1595b295f10e263dd0078ebb55115c31e0eaccd/multidict-6.7.1-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:1b99af4d9eec0b49927b4402bcbb58dea89d3e0db8806a4086117019939ad3dd", size = 255899, upload-time = "2026-01-26T02:44:55.316Z" }, + { url = "https://files.pythonhosted.org/packages/00/e3/9b60ed9e23e64c73a5cde95269ef1330678e9c6e34dd4eb6b431b85b5a10/multidict-6.7.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6aac4f16b472d5b7dc6f66a0d49dd57b0e0902090be16594dc9ebfd3d17c47e7", size = 247970, upload-time = "2026-01-26T02:44:56.783Z" }, + { url = "https://files.pythonhosted.org/packages/3e/06/538e58a63ed5cfb0bd4517e346b91da32fde409d839720f664e9a4ae4f9d/multidict-6.7.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:21f830fe223215dffd51f538e78c172ed7c7f60c9b96a2bf05c4848ad49921c3", size = 245060, upload-time = "2026-01-26T02:44:58.195Z" }, + { url = "https://files.pythonhosted.org/packages/b2/2f/d743a3045a97c895d401e9bd29aaa09b94f5cbdf1bd561609e5a6c431c70/multidict-6.7.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:f5dd81c45b05518b9aa4da4aa74e1c93d715efa234fd3e8a179df611cc85e5f4", size = 235888, upload-time = "2026-01-26T02:44:59.57Z" }, + { url = "https://files.pythonhosted.org/packages/38/83/5a325cac191ab28b63c52f14f1131f3b0a55ba3b9aa65a6d0bf2a9b921a0/multidict-6.7.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:eb304767bca2bb92fb9c5bd33cedc95baee5bb5f6c88e63706533a1c06ad08c8", size = 243554, upload-time = "2026-01-26T02:45:01.054Z" }, + { url = "https://files.pythonhosted.org/packages/20/1f/9d2327086bd15da2725ef6aae624208e2ef828ed99892b17f60c344e57ed/multidict-6.7.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:c9035dde0f916702850ef66460bc4239d89d08df4d02023a5926e7446724212c", size = 252341, upload-time = "2026-01-26T02:45:02.484Z" }, + { url = "https://files.pythonhosted.org/packages/e8/2c/2a1aa0280cf579d0f6eed8ee5211c4f1730bd7e06c636ba2ee6aafda302e/multidict-6.7.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:af959b9beeb66c822380f222f0e0a1889331597e81f1ded7f374f3ecb0fd6c52", size = 246391, upload-time = "2026-01-26T02:45:03.862Z" }, + { url = "https://files.pythonhosted.org/packages/e5/03/7ca022ffc36c5a3f6e03b179a5ceb829be9da5783e6fe395f347c0794680/multidict-6.7.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:41f2952231456154ee479651491e94118229844dd7226541788be783be2b5108", size = 243422, upload-time = "2026-01-26T02:45:05.296Z" }, + { url = "https://files.pythonhosted.org/packages/dc/1d/b31650eab6c5778aceed46ba735bd97f7c7d2f54b319fa916c0f96e7805b/multidict-6.7.1-cp313-cp313t-win32.whl", hash = "sha256:df9f19c28adcb40b6aae30bbaa1478c389efd50c28d541d76760199fc1037c32", size = 47770, upload-time = "2026-01-26T02:45:06.754Z" }, + { url = "https://files.pythonhosted.org/packages/ac/5b/2d2d1d522e51285bd61b1e20df8f47ae1a9d80839db0b24ea783b3832832/multidict-6.7.1-cp313-cp313t-win_amd64.whl", hash = "sha256:d54ecf9f301853f2c5e802da559604b3e95bb7a3b01a9c295c6ee591b9882de8", size = 53109, upload-time = "2026-01-26T02:45:08.044Z" }, + { url = "https://files.pythonhosted.org/packages/3d/a3/cc409ba012c83ca024a308516703cf339bdc4b696195644a7215a5164a24/multidict-6.7.1-cp313-cp313t-win_arm64.whl", hash = "sha256:5a37ca18e360377cfda1d62f5f382ff41f2b8c4ccb329ed974cc2e1643440118", size = 45573, upload-time = "2026-01-26T02:45:09.349Z" }, + { url = "https://files.pythonhosted.org/packages/91/cc/db74228a8be41884a567e88a62fd589a913708fcf180d029898c17a9a371/multidict-6.7.1-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:8f333ec9c5eb1b7105e3b84b53141e66ca05a19a605368c55450b6ba208cb9ee", size = 75190, upload-time = "2026-01-26T02:45:10.651Z" }, + { url = "https://files.pythonhosted.org/packages/d5/22/492f2246bb5b534abd44804292e81eeaf835388901f0c574bac4eeec73c5/multidict-6.7.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:a407f13c188f804c759fc6a9f88286a565c242a76b27626594c133b82883b5c2", size = 44486, upload-time = "2026-01-26T02:45:11.938Z" }, + { url = "https://files.pythonhosted.org/packages/f1/4f/733c48f270565d78b4544f2baddc2fb2a245e5a8640254b12c36ac7ac68e/multidict-6.7.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0e161ddf326db5577c3a4cc2d8648f81456e8a20d40415541587a71620d7a7d1", size = 43219, upload-time = "2026-01-26T02:45:14.346Z" }, + { url = "https://files.pythonhosted.org/packages/24/bb/2c0c2287963f4259c85e8bcbba9182ced8d7fca65c780c38e99e61629d11/multidict-6.7.1-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1e3a8bb24342a8201d178c3b4984c26ba81a577c80d4d525727427460a50c22d", size = 245132, upload-time = "2026-01-26T02:45:15.712Z" }, + { url = "https://files.pythonhosted.org/packages/a7/f9/44d4b3064c65079d2467888794dea218d1601898ac50222ab8a9a8094460/multidict-6.7.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97231140a50f5d447d3164f994b86a0bed7cd016e2682f8650d6a9158e14fd31", size = 252420, upload-time = "2026-01-26T02:45:17.293Z" }, + { url = "https://files.pythonhosted.org/packages/8b/13/78f7275e73fa17b24c9a51b0bd9d73ba64bb32d0ed51b02a746eb876abe7/multidict-6.7.1-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6b10359683bd8806a200fd2909e7c8ca3a7b24ec1d8132e483d58e791d881048", size = 233510, upload-time = "2026-01-26T02:45:19.356Z" }, + { url = "https://files.pythonhosted.org/packages/4b/25/8167187f62ae3cbd52da7893f58cb036b47ea3fb67138787c76800158982/multidict-6.7.1-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:283ddac99f7ac25a4acadbf004cb5ae34480bbeb063520f70ce397b281859362", size = 264094, upload-time = "2026-01-26T02:45:20.834Z" }, + { url = "https://files.pythonhosted.org/packages/a1/e7/69a3a83b7b030cf283fb06ce074a05a02322359783424d7edf0f15fe5022/multidict-6.7.1-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:538cec1e18c067d0e6103aa9a74f9e832904c957adc260e61cd9d8cf0c3b3d37", size = 260786, upload-time = "2026-01-26T02:45:22.818Z" }, + { url = "https://files.pythonhosted.org/packages/fe/3b/8ec5074bcfc450fe84273713b4b0a0dd47c0249358f5d82eb8104ffe2520/multidict-6.7.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7eee46ccb30ff48a1e35bb818cc90846c6be2b68240e42a78599166722cea709", size = 248483, upload-time = "2026-01-26T02:45:24.368Z" }, + { url = "https://files.pythonhosted.org/packages/48/5a/d5a99e3acbca0e29c5d9cba8f92ceb15dce78bab963b308ae692981e3a5d/multidict-6.7.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:fa263a02f4f2dd2d11a7b1bb4362aa7cb1049f84a9235d31adf63f30143469a0", size = 248403, upload-time = "2026-01-26T02:45:25.982Z" }, + { url = "https://files.pythonhosted.org/packages/35/48/e58cd31f6c7d5102f2a4bf89f96b9cf7e00b6c6f3d04ecc44417c00a5a3c/multidict-6.7.1-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:2e1425e2f99ec5bd36c15a01b690a1a2456209c5deed58f95469ffb46039ccbb", size = 240315, upload-time = "2026-01-26T02:45:27.487Z" }, + { url = "https://files.pythonhosted.org/packages/94/33/1cd210229559cb90b6786c30676bb0c58249ff42f942765f88793b41fdce/multidict-6.7.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:497394b3239fc6f0e13a78a3e1b61296e72bf1c5f94b4c4eb80b265c37a131cd", size = 245528, upload-time = "2026-01-26T02:45:28.991Z" }, + { url = "https://files.pythonhosted.org/packages/64/f2/6e1107d226278c876c783056b7db43d800bb64c6131cec9c8dfb6903698e/multidict-6.7.1-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:233b398c29d3f1b9676b4b6f75c518a06fcb2ea0b925119fb2c1bc35c05e1601", size = 258784, upload-time = "2026-01-26T02:45:30.503Z" }, + { url = "https://files.pythonhosted.org/packages/4d/c1/11f664f14d525e4a1b5327a82d4de61a1db604ab34c6603bb3c2cc63ad34/multidict-6.7.1-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:93b1818e4a6e0930454f0f2af7dfce69307ca03cdcfb3739bf4d91241967b6c1", size = 251980, upload-time = "2026-01-26T02:45:32.603Z" }, + { url = "https://files.pythonhosted.org/packages/e1/9f/75a9ac888121d0c5bbd4ecf4eead45668b1766f6baabfb3b7f66a410e231/multidict-6.7.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:f33dc2a3abe9249ea5d8360f969ec7f4142e7ac45ee7014d8f8d5acddf178b7b", size = 243602, upload-time = "2026-01-26T02:45:34.043Z" }, + { url = "https://files.pythonhosted.org/packages/9a/e7/50bf7b004cc8525d80dbbbedfdc7aed3e4c323810890be4413e589074032/multidict-6.7.1-cp314-cp314-win32.whl", hash = "sha256:3ab8b9d8b75aef9df299595d5388b14530839f6422333357af1339443cff777d", size = 40930, upload-time = "2026-01-26T02:45:36.278Z" }, + { url = "https://files.pythonhosted.org/packages/e0/bf/52f25716bbe93745595800f36fb17b73711f14da59ed0bb2eba141bc9f0f/multidict-6.7.1-cp314-cp314-win_amd64.whl", hash = "sha256:5e01429a929600e7dab7b166062d9bb54a5eed752384c7384c968c2afab8f50f", size = 45074, upload-time = "2026-01-26T02:45:37.546Z" }, + { url = "https://files.pythonhosted.org/packages/97/ab/22803b03285fa3a525f48217963da3a65ae40f6a1b6f6cf2768879e208f9/multidict-6.7.1-cp314-cp314-win_arm64.whl", hash = "sha256:4885cb0e817aef5d00a2e8451d4665c1808378dc27c2705f1bf4ef8505c0d2e5", size = 42471, upload-time = "2026-01-26T02:45:38.889Z" }, + { url = "https://files.pythonhosted.org/packages/e0/6d/f9293baa6146ba9507e360ea0292b6422b016907c393e2f63fc40ab7b7b5/multidict-6.7.1-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:0458c978acd8e6ea53c81eefaddbbee9c6c5e591f41b3f5e8e194780fe026581", size = 82401, upload-time = "2026-01-26T02:45:40.254Z" }, + { url = "https://files.pythonhosted.org/packages/7a/68/53b5494738d83558d87c3c71a486504d8373421c3e0dbb6d0db48ad42ee0/multidict-6.7.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:c0abd12629b0af3cf590982c0b413b1e7395cd4ec026f30986818ab95bfaa94a", size = 48143, upload-time = "2026-01-26T02:45:41.635Z" }, + { url = "https://files.pythonhosted.org/packages/37/e8/5284c53310dcdc99ce5d66563f6e5773531a9b9fe9ec7a615e9bc306b05f/multidict-6.7.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:14525a5f61d7d0c94b368a42cff4c9a4e7ba2d52e2672a7b23d84dc86fb02b0c", size = 46507, upload-time = "2026-01-26T02:45:42.99Z" }, + { url = "https://files.pythonhosted.org/packages/e4/fc/6800d0e5b3875568b4083ecf5f310dcf91d86d52573160834fb4bfcf5e4f/multidict-6.7.1-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:17307b22c217b4cf05033dabefe68255a534d637c6c9b0cc8382718f87be4262", size = 239358, upload-time = "2026-01-26T02:45:44.376Z" }, + { url = "https://files.pythonhosted.org/packages/41/75/4ad0973179361cdf3a113905e6e088173198349131be2b390f9fa4da5fc6/multidict-6.7.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7a7e590ff876a3eaf1c02a4dfe0724b6e69a9e9de6d8f556816f29c496046e59", size = 246884, upload-time = "2026-01-26T02:45:47.167Z" }, + { url = "https://files.pythonhosted.org/packages/c3/9c/095bb28b5da139bd41fb9a5d5caff412584f377914bd8787c2aa98717130/multidict-6.7.1-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:5fa6a95dfee63893d80a34758cd0e0c118a30b8dcb46372bf75106c591b77889", size = 225878, upload-time = "2026-01-26T02:45:48.698Z" }, + { url = "https://files.pythonhosted.org/packages/07/d0/c0a72000243756e8f5a277b6b514fa005f2c73d481b7d9e47cd4568aa2e4/multidict-6.7.1-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a0543217a6a017692aa6ae5cc39adb75e587af0f3a82288b1492eb73dd6cc2a4", size = 253542, upload-time = "2026-01-26T02:45:50.164Z" }, + { url = "https://files.pythonhosted.org/packages/c0/6b/f69da15289e384ecf2a68837ec8b5ad8c33e973aa18b266f50fe55f24b8c/multidict-6.7.1-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f99fe611c312b3c1c0ace793f92464d8cd263cc3b26b5721950d977b006b6c4d", size = 252403, upload-time = "2026-01-26T02:45:51.779Z" }, + { url = "https://files.pythonhosted.org/packages/a2/76/b9669547afa5a1a25cd93eaca91c0da1c095b06b6d2d8ec25b713588d3a1/multidict-6.7.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9004d8386d133b7e6135679424c91b0b854d2d164af6ea3f289f8f2761064609", size = 244889, upload-time = "2026-01-26T02:45:53.27Z" }, + { url = "https://files.pythonhosted.org/packages/7e/a9/a50d2669e506dad33cfc45b5d574a205587b7b8a5f426f2fbb2e90882588/multidict-6.7.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e628ef0e6859ffd8273c69412a2465c4be4a9517d07261b33334b5ec6f3c7489", size = 241982, upload-time = "2026-01-26T02:45:54.919Z" }, + { url = "https://files.pythonhosted.org/packages/c5/bb/1609558ad8b456b4827d3c5a5b775c93b87878fd3117ed3db3423dfbce1b/multidict-6.7.1-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:841189848ba629c3552035a6a7f5bf3b02eb304e9fea7492ca220a8eda6b0e5c", size = 232415, upload-time = "2026-01-26T02:45:56.981Z" }, + { url = "https://files.pythonhosted.org/packages/d8/59/6f61039d2aa9261871e03ab9dc058a550d240f25859b05b67fd70f80d4b3/multidict-6.7.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:ce1bbd7d780bb5a0da032e095c951f7014d6b0a205f8318308140f1a6aba159e", size = 240337, upload-time = "2026-01-26T02:45:58.698Z" }, + { url = "https://files.pythonhosted.org/packages/a1/29/fdc6a43c203890dc2ae9249971ecd0c41deaedfe00d25cb6564b2edd99eb/multidict-6.7.1-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:b26684587228afed0d50cf804cc71062cc9c1cdf55051c4c6345d372947b268c", size = 248788, upload-time = "2026-01-26T02:46:00.862Z" }, + { url = "https://files.pythonhosted.org/packages/a9/14/a153a06101323e4cf086ecee3faadba52ff71633d471f9685c42e3736163/multidict-6.7.1-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:9f9af11306994335398293f9958071019e3ab95e9a707dc1383a35613f6abcb9", size = 242842, upload-time = "2026-01-26T02:46:02.824Z" }, + { url = "https://files.pythonhosted.org/packages/41/5f/604ae839e64a4a6efc80db94465348d3b328ee955e37acb24badbcd24d83/multidict-6.7.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:b4938326284c4f1224178a560987b6cf8b4d38458b113d9b8c1db1a836e640a2", size = 240237, upload-time = "2026-01-26T02:46:05.898Z" }, + { url = "https://files.pythonhosted.org/packages/5f/60/c3a5187bf66f6fb546ff4ab8fb5a077cbdd832d7b1908d4365c7f74a1917/multidict-6.7.1-cp314-cp314t-win32.whl", hash = "sha256:98655c737850c064a65e006a3df7c997cd3b220be4ec8fe26215760b9697d4d7", size = 48008, upload-time = "2026-01-26T02:46:07.468Z" }, + { url = "https://files.pythonhosted.org/packages/0c/f7/addf1087b860ac60e6f382240f64fb99f8bfb532bb06f7c542b83c29ca61/multidict-6.7.1-cp314-cp314t-win_amd64.whl", hash = "sha256:497bde6223c212ba11d462853cfa4f0ae6ef97465033e7dc9940cdb3ab5b48e5", size = 53542, upload-time = "2026-01-26T02:46:08.809Z" }, + { url = "https://files.pythonhosted.org/packages/4c/81/4629d0aa32302ef7b2ec65c75a728cc5ff4fa410c50096174c1632e70b3e/multidict-6.7.1-cp314-cp314t-win_arm64.whl", hash = "sha256:2bbd113e0d4af5db41d5ebfe9ccaff89de2120578164f86a5d17d5a576d1e5b2", size = 44719, upload-time = "2026-01-26T02:46:11.146Z" }, + { url = "https://files.pythonhosted.org/packages/81/08/7036c080d7117f28a4af526d794aab6a84463126db031b007717c1a6676e/multidict-6.7.1-py3-none-any.whl", hash = "sha256:55d97cc6dae627efa6a6e548885712d4864b81110ac76fa4e534c03819fa4a56", size = 12319, upload-time = "2026-01-26T02:46:44.004Z" }, +] + +[[package]] +name = "multiprocess" +version = "0.70.18" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "dill" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/72/fd/2ae3826f5be24c6ed87266bc4e59c46ea5b059a103f3d7e7eb76a52aeecb/multiprocess-0.70.18.tar.gz", hash = "sha256:f9597128e6b3e67b23956da07cf3d2e5cba79e2f4e0fba8d7903636663ec6d0d", size = 1798503, upload-time = "2025-04-17T03:11:27.742Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/f8/7f9a8f08bf98cea1dfaa181e05cc8bbcb59cecf044b5a9ac3cce39f9c449/multiprocess-0.70.18-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:25d4012dcaaf66b9e8e955f58482b42910c2ee526d532844d8bcf661bbc604df", size = 135083, upload-time = "2025-04-17T03:11:04.223Z" }, + { url = "https://files.pythonhosted.org/packages/e5/03/b7b10dbfc17b2b3ce07d4d30b3ba8367d0ed32d6d46cd166e298f161dd46/multiprocess-0.70.18-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:06b19433de0d02afe5869aec8931dd5c01d99074664f806c73896b0d9e527213", size = 135128, upload-time = "2025-04-17T03:11:06.045Z" }, + { url = "https://files.pythonhosted.org/packages/c1/a3/5f8d3b9690ea5580bee5868ab7d7e2cfca74b7e826b28192b40aa3881cdc/multiprocess-0.70.18-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6fa1366f994373aaf2d4738b0f56e707caeaa05486e97a7f71ee0853823180c2", size = 135132, upload-time = "2025-04-17T03:11:07.533Z" }, + { url = "https://files.pythonhosted.org/packages/55/4d/9af0d1279c84618bcd35bf5fd7e371657358c7b0a523e54a9cffb87461f8/multiprocess-0.70.18-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:8b8940ae30139e04b076da6c5b83e9398585ebdf0f2ad3250673fef5b2ff06d6", size = 144695, upload-time = "2025-04-17T03:11:09.161Z" }, + { url = "https://files.pythonhosted.org/packages/17/bf/87323e79dd0562474fad3373c21c66bc6c3c9963b68eb2a209deb4c8575e/multiprocess-0.70.18-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:0929ba95831adb938edbd5fb801ac45e705ecad9d100b3e653946b7716cb6bd3", size = 144742, upload-time = "2025-04-17T03:11:10.072Z" }, + { url = "https://files.pythonhosted.org/packages/dd/74/cb8c831e58dc6d5cf450b17c7db87f14294a1df52eb391da948b5e0a0b94/multiprocess-0.70.18-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4d77f8e4bfe6c6e2e661925bbf9aed4d5ade9a1c6502d5dfc10129b9d1141797", size = 144745, upload-time = "2025-04-17T03:11:11.453Z" }, + { url = "https://files.pythonhosted.org/packages/ba/d8/0cba6cf51a1a31f20471fbc823a716170c73012ddc4fb85d706630ed6e8f/multiprocess-0.70.18-py310-none-any.whl", hash = "sha256:60c194974c31784019c1f459d984e8f33ee48f10fcf42c309ba97b30d9bd53ea", size = 134948, upload-time = "2025-04-17T03:11:20.223Z" }, + { url = "https://files.pythonhosted.org/packages/4b/88/9039f2fed1012ef584751d4ceff9ab4a51e5ae264898f0b7cbf44340a859/multiprocess-0.70.18-py311-none-any.whl", hash = "sha256:5aa6eef98e691281b3ad923be2832bf1c55dd2c859acd73e5ec53a66aae06a1d", size = 144462, upload-time = "2025-04-17T03:11:21.657Z" }, + { url = "https://files.pythonhosted.org/packages/bf/b6/5f922792be93b82ec6b5f270bbb1ef031fd0622847070bbcf9da816502cc/multiprocess-0.70.18-py312-none-any.whl", hash = "sha256:9b78f8e5024b573730bfb654783a13800c2c0f2dfc0c25e70b40d184d64adaa2", size = 150287, upload-time = "2025-04-17T03:11:22.69Z" }, + { url = "https://files.pythonhosted.org/packages/ee/25/7d7e78e750bc1aecfaf0efbf826c69a791d2eeaf29cf20cba93ff4cced78/multiprocess-0.70.18-py313-none-any.whl", hash = "sha256:871743755f43ef57d7910a38433cfe41319e72be1bbd90b79c7a5ac523eb9334", size = 151917, upload-time = "2025-04-17T03:11:24.044Z" }, + { url = "https://files.pythonhosted.org/packages/3b/c3/ca84c19bd14cdfc21c388fdcebf08b86a7a470ebc9f5c3c084fc2dbc50f7/multiprocess-0.70.18-py38-none-any.whl", hash = "sha256:dbf705e52a154fe5e90fb17b38f02556169557c2dd8bb084f2e06c2784d8279b", size = 132636, upload-time = "2025-04-17T03:11:24.936Z" }, + { url = "https://files.pythonhosted.org/packages/6c/28/dd72947e59a6a8c856448a5e74da6201cb5502ddff644fbc790e4bd40b9a/multiprocess-0.70.18-py39-none-any.whl", hash = "sha256:e78ca805a72b1b810c690b6b4cc32579eba34f403094bbbae962b7b5bf9dfcb8", size = 133478, upload-time = "2025-04-17T03:11:26.253Z" }, +] + +[[package]] +name = "mypy" +version = "1.19.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "librt", marker = "platform_python_implementation != 'PyPy'" }, + { name = "mypy-extensions" }, + { name = "pathspec" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f5/db/4efed9504bc01309ab9c2da7e352cc223569f05478012b5d9ece38fd44d2/mypy-1.19.1.tar.gz", hash = "sha256:19d88bb05303fe63f71dd2c6270daca27cb9401c4ca8255fe50d1d920e0eb9ba", size = 3582404, upload-time = "2025-12-15T05:03:48.42Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2f/63/e499890d8e39b1ff2df4c0c6ce5d371b6844ee22b8250687a99fd2f657a8/mypy-1.19.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5f05aa3d375b385734388e844bc01733bd33c644ab48e9684faa54e5389775ec", size = 13101333, upload-time = "2025-12-15T05:03:03.28Z" }, + { url = "https://files.pythonhosted.org/packages/72/4b/095626fc136fba96effc4fd4a82b41d688ab92124f8c4f7564bffe5cf1b0/mypy-1.19.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:022ea7279374af1a5d78dfcab853fe6a536eebfda4b59deab53cd21f6cd9f00b", size = 12164102, upload-time = "2025-12-15T05:02:33.611Z" }, + { url = "https://files.pythonhosted.org/packages/0c/5b/952928dd081bf88a83a5ccd49aaecfcd18fd0d2710c7ff07b8fb6f7032b9/mypy-1.19.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee4c11e460685c3e0c64a4c5de82ae143622410950d6be863303a1c4ba0e36d6", size = 12765799, upload-time = "2025-12-15T05:03:28.44Z" }, + { url = "https://files.pythonhosted.org/packages/2a/0d/93c2e4a287f74ef11a66fb6d49c7a9f05e47b0a4399040e6719b57f500d2/mypy-1.19.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de759aafbae8763283b2ee5869c7255391fbc4de3ff171f8f030b5ec48381b74", size = 13522149, upload-time = "2025-12-15T05:02:36.011Z" }, + { url = "https://files.pythonhosted.org/packages/7b/0e/33a294b56aaad2b338d203e3a1d8b453637ac36cb278b45005e0901cf148/mypy-1.19.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ab43590f9cd5108f41aacf9fca31841142c786827a74ab7cc8a2eacb634e09a1", size = 13810105, upload-time = "2025-12-15T05:02:40.327Z" }, + { url = "https://files.pythonhosted.org/packages/0e/fd/3e82603a0cb66b67c5e7abababce6bf1a929ddf67bf445e652684af5c5a0/mypy-1.19.1-cp310-cp310-win_amd64.whl", hash = "sha256:2899753e2f61e571b3971747e302d5f420c3fd09650e1951e99f823bc3089dac", size = 10057200, upload-time = "2025-12-15T05:02:51.012Z" }, + { url = "https://files.pythonhosted.org/packages/ef/47/6b3ebabd5474d9cdc170d1342fbf9dddc1b0ec13ec90bf9004ee6f391c31/mypy-1.19.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d8dfc6ab58ca7dda47d9237349157500468e404b17213d44fc1cb77bce532288", size = 13028539, upload-time = "2025-12-15T05:03:44.129Z" }, + { url = "https://files.pythonhosted.org/packages/5c/a6/ac7c7a88a3c9c54334f53a941b765e6ec6c4ebd65d3fe8cdcfbe0d0fd7db/mypy-1.19.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e3f276d8493c3c97930e354b2595a44a21348b320d859fb4a2b9f66da9ed27ab", size = 12083163, upload-time = "2025-12-15T05:03:37.679Z" }, + { url = "https://files.pythonhosted.org/packages/67/af/3afa9cf880aa4a2c803798ac24f1d11ef72a0c8079689fac5cfd815e2830/mypy-1.19.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2abb24cf3f17864770d18d673c85235ba52456b36a06b6afc1e07c1fdcd3d0e6", size = 12687629, upload-time = "2025-12-15T05:02:31.526Z" }, + { url = "https://files.pythonhosted.org/packages/2d/46/20f8a7114a56484ab268b0ab372461cb3a8f7deed31ea96b83a4e4cfcfca/mypy-1.19.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a009ffa5a621762d0c926a078c2d639104becab69e79538a494bcccb62cc0331", size = 13436933, upload-time = "2025-12-15T05:03:15.606Z" }, + { url = "https://files.pythonhosted.org/packages/5b/f8/33b291ea85050a21f15da910002460f1f445f8007adb29230f0adea279cb/mypy-1.19.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f7cee03c9a2e2ee26ec07479f38ea9c884e301d42c6d43a19d20fb014e3ba925", size = 13661754, upload-time = "2025-12-15T05:02:26.731Z" }, + { url = "https://files.pythonhosted.org/packages/fd/a3/47cbd4e85bec4335a9cd80cf67dbc02be21b5d4c9c23ad6b95d6c5196bac/mypy-1.19.1-cp311-cp311-win_amd64.whl", hash = "sha256:4b84a7a18f41e167f7995200a1d07a4a6810e89d29859df936f1c3923d263042", size = 10055772, upload-time = "2025-12-15T05:03:26.179Z" }, + { url = "https://files.pythonhosted.org/packages/06/8a/19bfae96f6615aa8a0604915512e0289b1fad33d5909bf7244f02935d33a/mypy-1.19.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a8174a03289288c1f6c46d55cef02379b478bfbc8e358e02047487cad44c6ca1", size = 13206053, upload-time = "2025-12-15T05:03:46.622Z" }, + { url = "https://files.pythonhosted.org/packages/a5/34/3e63879ab041602154ba2a9f99817bb0c85c4df19a23a1443c8986e4d565/mypy-1.19.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ffcebe56eb09ff0c0885e750036a095e23793ba6c2e894e7e63f6d89ad51f22e", size = 12219134, upload-time = "2025-12-15T05:03:24.367Z" }, + { url = "https://files.pythonhosted.org/packages/89/cc/2db6f0e95366b630364e09845672dbee0cbf0bbe753a204b29a944967cd9/mypy-1.19.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b64d987153888790bcdb03a6473d321820597ab8dd9243b27a92153c4fa50fd2", size = 12731616, upload-time = "2025-12-15T05:02:44.725Z" }, + { url = "https://files.pythonhosted.org/packages/00/be/dd56c1fd4807bc1eba1cf18b2a850d0de7bacb55e158755eb79f77c41f8e/mypy-1.19.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c35d298c2c4bba75feb2195655dfea8124d855dfd7343bf8b8c055421eaf0cf8", size = 13620847, upload-time = "2025-12-15T05:03:39.633Z" }, + { url = "https://files.pythonhosted.org/packages/6d/42/332951aae42b79329f743bf1da088cd75d8d4d9acc18fbcbd84f26c1af4e/mypy-1.19.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:34c81968774648ab5ac09c29a375fdede03ba253f8f8287847bd480782f73a6a", size = 13834976, upload-time = "2025-12-15T05:03:08.786Z" }, + { url = "https://files.pythonhosted.org/packages/6f/63/e7493e5f90e1e085c562bb06e2eb32cae27c5057b9653348d38b47daaecc/mypy-1.19.1-cp312-cp312-win_amd64.whl", hash = "sha256:b10e7c2cd7870ba4ad9b2d8a6102eb5ffc1f16ca35e3de6bfa390c1113029d13", size = 10118104, upload-time = "2025-12-15T05:03:10.834Z" }, + { url = "https://files.pythonhosted.org/packages/de/9f/a6abae693f7a0c697dbb435aac52e958dc8da44e92e08ba88d2e42326176/mypy-1.19.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e3157c7594ff2ef1634ee058aafc56a82db665c9438fd41b390f3bde1ab12250", size = 13201927, upload-time = "2025-12-15T05:02:29.138Z" }, + { url = "https://files.pythonhosted.org/packages/9a/a4/45c35ccf6e1c65afc23a069f50e2c66f46bd3798cbe0d680c12d12935caa/mypy-1.19.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bdb12f69bcc02700c2b47e070238f42cb87f18c0bc1fc4cdb4fb2bc5fd7a3b8b", size = 12206730, upload-time = "2025-12-15T05:03:01.325Z" }, + { url = "https://files.pythonhosted.org/packages/05/bb/cdcf89678e26b187650512620eec8368fded4cfd99cfcb431e4cdfd19dec/mypy-1.19.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f859fb09d9583a985be9a493d5cfc5515b56b08f7447759a0c5deaf68d80506e", size = 12724581, upload-time = "2025-12-15T05:03:20.087Z" }, + { url = "https://files.pythonhosted.org/packages/d1/32/dd260d52babf67bad8e6770f8e1102021877ce0edea106e72df5626bb0ec/mypy-1.19.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c9a6538e0415310aad77cb94004ca6482330fece18036b5f360b62c45814c4ef", size = 13616252, upload-time = "2025-12-15T05:02:49.036Z" }, + { url = "https://files.pythonhosted.org/packages/71/d0/5e60a9d2e3bd48432ae2b454b7ef2b62a960ab51292b1eda2a95edd78198/mypy-1.19.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:da4869fc5e7f62a88f3fe0b5c919d1d9f7ea3cef92d3689de2823fd27e40aa75", size = 13840848, upload-time = "2025-12-15T05:02:55.95Z" }, + { url = "https://files.pythonhosted.org/packages/98/76/d32051fa65ecf6cc8c6610956473abdc9b4c43301107476ac03559507843/mypy-1.19.1-cp313-cp313-win_amd64.whl", hash = "sha256:016f2246209095e8eda7538944daa1d60e1e8134d98983b9fc1e92c1fc0cb8dd", size = 10135510, upload-time = "2025-12-15T05:02:58.438Z" }, + { url = "https://files.pythonhosted.org/packages/de/eb/b83e75f4c820c4247a58580ef86fcd35165028f191e7e1ba57128c52782d/mypy-1.19.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:06e6170bd5836770e8104c8fdd58e5e725cfeb309f0a6c681a811f557e97eac1", size = 13199744, upload-time = "2025-12-15T05:03:30.823Z" }, + { url = "https://files.pythonhosted.org/packages/94/28/52785ab7bfa165f87fcbb61547a93f98bb20e7f82f90f165a1f69bce7b3d/mypy-1.19.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:804bd67b8054a85447c8954215a906d6eff9cabeabe493fb6334b24f4bfff718", size = 12215815, upload-time = "2025-12-15T05:02:42.323Z" }, + { url = "https://files.pythonhosted.org/packages/0a/c6/bdd60774a0dbfb05122e3e925f2e9e846c009e479dcec4821dad881f5b52/mypy-1.19.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:21761006a7f497cb0d4de3d8ef4ca70532256688b0523eee02baf9eec895e27b", size = 12740047, upload-time = "2025-12-15T05:03:33.168Z" }, + { url = "https://files.pythonhosted.org/packages/32/2a/66ba933fe6c76bd40d1fe916a83f04fed253152f451a877520b3c4a5e41e/mypy-1.19.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:28902ee51f12e0f19e1e16fbe2f8f06b6637f482c459dd393efddd0ec7f82045", size = 13601998, upload-time = "2025-12-15T05:03:13.056Z" }, + { url = "https://files.pythonhosted.org/packages/e3/da/5055c63e377c5c2418760411fd6a63ee2b96cf95397259038756c042574f/mypy-1.19.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:481daf36a4c443332e2ae9c137dfee878fcea781a2e3f895d54bd3002a900957", size = 13807476, upload-time = "2025-12-15T05:03:17.977Z" }, + { url = "https://files.pythonhosted.org/packages/cd/09/4ebd873390a063176f06b0dbf1f7783dd87bd120eae7727fa4ae4179b685/mypy-1.19.1-cp314-cp314-win_amd64.whl", hash = "sha256:8bb5c6f6d043655e055be9b542aa5f3bdd30e4f3589163e85f93f3640060509f", size = 10281872, upload-time = "2025-12-15T05:03:05.549Z" }, + { url = "https://files.pythonhosted.org/packages/8d/f4/4ce9a05ce5ded1de3ec1c1d96cf9f9504a04e54ce0ed55cfa38619a32b8d/mypy-1.19.1-py3-none-any.whl", hash = "sha256:f1235f5ea01b7db5468d53ece6aaddf1ad0b88d9e7462b86ef96fe04995d7247", size = 2471239, upload-time = "2025-12-15T05:03:07.248Z" }, +] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, +] + +[[package]] +name = "narwhals" +version = "2.18.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/47/b4/02a8add181b8d2cd5da3b667cd102ae536e8c9572ab1a130816d70a89edb/narwhals-2.18.0.tar.gz", hash = "sha256:1de5cee338bc17c338c6278df2c38c0dd4290499fcf70d75e0a51d5f22a6e960", size = 620222, upload-time = "2026-03-10T15:51:27.14Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fe/75/0b4a10da17a44cf13567d08a9c7632a285297e46253263f1ae119129d10a/narwhals-2.18.0-py3-none-any.whl", hash = "sha256:68378155ee706ac9c5b25868ef62ecddd62947b6df7801a0a156bc0a615d2d0d", size = 444865, upload-time = "2026-03-10T15:51:24.085Z" }, +] + +[[package]] +name = "ndindex" +version = "1.10.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f5/92/4b9d2f4e0f3eabcfc7b02b48261f6e5ad36a3e2c1bbdcc4e3b7b6c768fa6/ndindex-1.10.1.tar.gz", hash = "sha256:0f6113c1f031248f8818cbee1aa92aa3c9472b7701debcce9fddebcd2f610f11", size = 271395, upload-time = "2025-11-19T20:40:08.899Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4b/71/aff23bd84111d038efdcdaea4d218b463a0b2129ff49f30613cbc6f535ff/ndindex-1.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8644c76e74c0fbbdaa54752de30b7c6b98b1e8f6c05f0c6228632a29c862d83f", size = 172022, upload-time = "2025-11-19T20:38:12.429Z" }, + { url = "https://files.pythonhosted.org/packages/99/a6/adcc17b685b24362983b00f965ee5c8607f74e7c68049a20facbd7ceb0b6/ndindex-1.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a9a211ec2198994cb3600cd46adb335a740f27e4d406b40d48ed7b98d2d2a89b", size = 171057, upload-time = "2025-11-19T20:38:13.846Z" }, + { url = "https://files.pythonhosted.org/packages/ee/28/b0b1bde7818d2ccd5c288802c1f24b69705e03f3975bc948c005eccab25a/ndindex-1.10.1-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cdb86a4176f2ae23bd4bcd0401ca35d5dad2d1ed0d0dca1ff64480ebe41b75d9", size = 498925, upload-time = "2025-11-19T20:38:17.214Z" }, + { url = "https://files.pythonhosted.org/packages/ec/46/55c3800048ef5310de542f188e1aad00e0b1d37713230c0eae980e88c895/ndindex-1.10.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3ce3bd0882572269ca09285112cf38ce84baa2aaa5891551af968ca7c18f84bb", size = 495662, upload-time = "2025-11-19T20:38:20.026Z" }, + { url = "https://files.pythonhosted.org/packages/48/a4/0103c3ee3778d7079c3ff7dd879c79362afe3a7e9d3b8dcdaa25b49ca413/ndindex-1.10.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2d6442ecce9b395aade5e9f2431e169e01393953a069f6d2d53a63b6c94d1d06", size = 1471263, upload-time = "2025-11-19T20:38:21.545Z" }, + { url = "https://files.pythonhosted.org/packages/95/5a/eaa38b18757c3d8e7b2438faa5001a02f193b51a68a5558d6066f3c407e6/ndindex-1.10.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bada24abee6bc6ca438b2e6b68a752fc9b58b67bdcb54008e2bc6330ecb0a777", size = 1522878, upload-time = "2025-11-19T20:38:23.064Z" }, + { url = "https://files.pythonhosted.org/packages/a3/93/a40920c849fa128c9439bc3eb0add814696216dde235497eaa415f14d5e7/ndindex-1.10.1-cp310-cp310-win32.whl", hash = "sha256:bc236d1612714cbd80610cf25a6ef92584ff1402e9d5a5c50e926195716f7d22", size = 149268, upload-time = "2025-11-19T20:38:25.12Z" }, + { url = "https://files.pythonhosted.org/packages/85/d9/baf1655d0b2d36eb46134fddf7dd0ef0093203c9c91d17f8ce01b9060366/ndindex-1.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:4cea15cff221e76abd12e3e940c26124184735cf421c229307f5db6742e14dd7", size = 157151, upload-time = "2025-11-19T20:38:27.229Z" }, + { url = "https://files.pythonhosted.org/packages/8c/d9/c94ab6151c9fdd199c2b560f23e3759a9fb86a7a1275855e0b97291bf05a/ndindex-1.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e2ad917bcdf8dc5ba1e21f01054c991d26862d4d01c3c203a50e907096d558ac", size = 172128, upload-time = "2025-11-19T20:38:28.977Z" }, + { url = "https://files.pythonhosted.org/packages/3a/34/880c4073750766e44492d51280d025f28e36475394ca3d741b0a4adad4b0/ndindex-1.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e851990a68937db5f485cd9f3e760c1fd47fa0f2a99f63a5e2cc880908faf3bb", size = 171423, upload-time = "2025-11-19T20:38:30.357Z" }, + { url = "https://files.pythonhosted.org/packages/f0/1e/0342da55dabe4075efc2b2ab91a6a22ed3047c5bd511ef771a7a3f822c90/ndindex-1.10.1-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:27385939f317b55773ea53f6bf9334810cf1d66206034c0a6a6f2a88f2001c3c", size = 519590, upload-time = "2025-11-19T20:38:32.464Z" }, + { url = "https://files.pythonhosted.org/packages/fd/cb/7a02b6f29b15a16cd0002f4591d14493eff8e9236f7ca4c02ee4d4bcefbd/ndindex-1.10.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9fdf3ca16efcdfbb8800aa88fbab1bc6528e6a0504bcb9cf7af4cb9d50e9f5d9", size = 516676, upload-time = "2025-11-19T20:38:34.276Z" }, + { url = "https://files.pythonhosted.org/packages/67/d5/38da808f968a54b0fead2d7e15ca011d3df93c96a07f4914e8ef3974506e/ndindex-1.10.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3307817bdc92846b18f309fae3582856f567dd6e0742fb0b41ac68682bfc4e2a", size = 1491141, upload-time = "2025-11-19T20:38:35.785Z" }, + { url = "https://files.pythonhosted.org/packages/bc/1f/8c66ef982a01ae4cbdabba679a2bc711f262cedf23bfb9682293146f8a98/ndindex-1.10.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ae73cd2d66b09ef2f2a7d7f93bad396d6abf168d1ee825e403c6c5fb8ae1341c", size = 1543876, upload-time = "2025-11-19T20:38:37.456Z" }, + { url = "https://files.pythonhosted.org/packages/05/a1/7c7e3a3c6e81b4284fd0d53cbaec51d9e5b90df26dd78e9bde06cb307217/ndindex-1.10.1-cp311-cp311-win32.whl", hash = "sha256:890bb92f0a779e6f16bdbcc8bd2e06c32bcc0239e5893ba246114eb924aecaaa", size = 149149, upload-time = "2025-11-19T20:38:38.911Z" }, + { url = "https://files.pythonhosted.org/packages/3b/38/99e1fb0effdef74b883be615ea0053ebcea28a53fd8b896263f4e99b0113/ndindex-1.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:1827a40301405b44ad709e388c5b48cf35cd90a67f77e63f0f17d87f6000fa81", size = 157246, upload-time = "2025-11-19T20:38:40.197Z" }, + { url = "https://files.pythonhosted.org/packages/65/90/774ddd08b2a1b41faa56da111f0fbfeb4f17ee537214c938ef41d61af949/ndindex-1.10.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:87f83e8c35a7f49a68cd3a3054c406e6c22f8c1315f3905f7a778c657669187e", size = 177348, upload-time = "2025-11-19T20:38:41.768Z" }, + { url = "https://files.pythonhosted.org/packages/ed/ee/a423e857f5b45da3adc8ddbcfbfd4a0e9a047edce3915d3e3d6e189b6bd9/ndindex-1.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cf9e05986b2eb8c5993bce0f911d6cedd15bda30b5e35dd354b1ad1f4cc3599d", size = 176561, upload-time = "2025-11-19T20:38:43.06Z" }, + { url = "https://files.pythonhosted.org/packages/1f/40/139b6b050ba2b2a0bb40e0381a352b1eb6551302dcb8f86fb4c97dd34e92/ndindex-1.10.1-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:046c1e88d46b2bd2fd3483e06d27b4e85132b55bc693f2fca2db0bb56eea1e78", size = 542901, upload-time = "2025-11-19T20:38:44.43Z" }, + { url = "https://files.pythonhosted.org/packages/27/ae/defd665dbbeb2fffa077491365ed160acaec49274ce8d4b979f55db71f18/ndindex-1.10.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:03cf1e6cdac876bd8fc92d3b65bb223496b1581d10eab3ba113f7c195121a959", size = 546875, upload-time = "2025-11-19T20:38:45.938Z" }, + { url = "https://files.pythonhosted.org/packages/59/43/6d54d48e8eaee25cdab70d3e4c4f579ddb0255e4f1660040d5ad55e029c6/ndindex-1.10.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:752e78a5e87911ded117c57a7246596f26c9c6da066de3c2b533b3db694949bb", size = 1510036, upload-time = "2025-11-19T20:38:47.444Z" }, + { url = "https://files.pythonhosted.org/packages/09/61/e28ba3b98eacd18193176526526b34d7d70d2a6f9fd2b4d8309ab5692678/ndindex-1.10.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c9dd58d91220b1c1fe516324bfcf4114566c98e84b1cbbe416abe345c75bd557", size = 1571849, upload-time = "2025-11-19T20:38:48.951Z" }, + { url = "https://files.pythonhosted.org/packages/8f/63/83fff78a3712cb9f478dd84a19ec389acf6f8c7b01dc347a65ae74e6123d/ndindex-1.10.1-cp312-cp312-win32.whl", hash = "sha256:3b0d9ce2c8488444499ab6d40e92e09867bf4413f5cf04c01635de923f44aa67", size = 149792, upload-time = "2025-11-19T20:38:50.959Z" }, + { url = "https://files.pythonhosted.org/packages/52/fd/a5e3c8c043d0dddea6cd4567bfaea568f022ac197301882b3d85d9c1e9b3/ndindex-1.10.1-cp312-cp312-win_amd64.whl", hash = "sha256:5c026dbbf2455d97ce6456d8a50b349aee8fefa11027d020638c89e9be2c9c4c", size = 158164, upload-time = "2025-11-19T20:38:52.242Z" }, + { url = "https://files.pythonhosted.org/packages/60/ea/03676266cb38cc671679a9d258cc59bfc58c69726db87b0d6eeafb308895/ndindex-1.10.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:157b5c34a1b779f5d27b790d9bd7e7b156d284e76be83c591a3ba003984f4956", size = 176323, upload-time = "2025-11-19T20:38:53.528Z" }, + { url = "https://files.pythonhosted.org/packages/89/f4/2d350439031b108b0bb8897cad315390c5ad88c14d87419a54c2ffa95c80/ndindex-1.10.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f99b3e89220da3244d03c9c5473669c7107d361c129fd9b064622744dee1ce15", size = 175584, upload-time = "2025-11-19T20:38:57.968Z" }, + { url = "https://files.pythonhosted.org/packages/77/34/a51b7c6f7159718a6a0a694fc1058b94d793c416d9a4fd649f1924cce5f8/ndindex-1.10.1-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6928e47fb008903f2e41309b7ff1e59b16abbcd59e2e945454571c28b2433c9e", size = 524127, upload-time = "2025-11-19T20:38:59.412Z" }, + { url = "https://files.pythonhosted.org/packages/21/91/d8f19f0b8fc9c5585b50fda44c05415da0bdc5fa9c9c69011015dac27880/ndindex-1.10.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e69a2cb1ac7be955c3c77f1def83f410775a81525c9ce2d4c0a3f2a61589ed47", size = 528213, upload-time = "2025-11-19T20:39:00.882Z" }, + { url = "https://files.pythonhosted.org/packages/2c/a9/77d9d037e871a3faa8579b354ca2dd09cc5bbf3e085d9e3c67f786d55ee3/ndindex-1.10.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cb76e0f3f235d8b1c768b17e771de48775d281713795c3aa045e8114ad61bdda", size = 1492172, upload-time = "2025-11-19T20:39:02.387Z" }, + { url = "https://files.pythonhosted.org/packages/ac/29/ad13676fc9312e0aa1a80a7c04bcb0b502b877ed4956136117ad663eced0/ndindex-1.10.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7da34a78410c14341d5fff73be5ce924bd36500bf7f640fc59b8607d3a0df95e", size = 1552614, upload-time = "2025-11-19T20:39:04.232Z" }, + { url = "https://files.pythonhosted.org/packages/63/34/e6e6fd81423810c07ae623c4d36e099f42a812994977e8e3bfa182c02472/ndindex-1.10.1-cp313-cp313-win32.whl", hash = "sha256:9599fcb7411ffe601c367f0a5d4bc0ed588e3e7d9dc7604bdb32c8f669456b9e", size = 149330, upload-time = "2025-11-19T20:39:05.727Z" }, + { url = "https://files.pythonhosted.org/packages/4d/d3/830a20626e2ec0e31a926be90e67068a029930f99e6cfebf2f9768e7b7b1/ndindex-1.10.1-cp313-cp313-win_amd64.whl", hash = "sha256:ef3ef22390a892d16286505083ee5b326317b21c255a0c7f744b1290a0b964a6", size = 157309, upload-time = "2025-11-19T20:39:07.394Z" }, + { url = "https://files.pythonhosted.org/packages/4a/73/3bdeecd1f6ec0ad81478a53d96da4ba9be74ed297c95f2b4fbe2b80843e1/ndindex-1.10.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:72af787dcee3661f36fff9d144d989aacefe32e2c8b51ceef9babd46afb93a18", size = 181022, upload-time = "2025-11-19T20:39:10.487Z" }, + { url = "https://files.pythonhosted.org/packages/b9/b1/0d97ba134b5aa71b5ed638fac193a7ec4d987e091e2f4e4162ebdaacbda1/ndindex-1.10.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:fa60637dfae1ee3fc057e420a52cc4ace38cf2c0d1a0451af2a3cba84d281842", size = 181289, upload-time = "2025-11-19T20:39:11.793Z" }, + { url = "https://files.pythonhosted.org/packages/e2/d7/1df02df24880ce3f3c8137b6f3ca5a901a58d9079dcfd8c818419277ff87/ndindex-1.10.1-cp313-cp313t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d0ebdba2fade3f6916fe21fd49e2a0935af4f58c56100a60f3f2eb26e20baee7", size = 632517, upload-time = "2025-11-19T20:39:13.259Z" }, + { url = "https://files.pythonhosted.org/packages/34/96/b509c2b14e9b10710fe6ab6ba8bda1ee6ce36ab16397ff2f5bbb33bbbba3/ndindex-1.10.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:346a4bf09f5771548665c8206e81daadb6b9925d409746e709894bdd98adc701", size = 616179, upload-time = "2025-11-19T20:39:14.757Z" }, + { url = "https://files.pythonhosted.org/packages/38/e3/f89d60cf351c33a484bf1a4546a5dee6f4e7a6a973613ffa12bd316b14ad/ndindex-1.10.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:23d35696f802548143b5cc199bf2f171efb0061aa7934959251dd3bae56d038c", size = 1588373, upload-time = "2025-11-19T20:39:16.62Z" }, + { url = "https://files.pythonhosted.org/packages/ee/19/002fc1e6a4abeef8d92e9aa2e43aea4d462f6b170090f7752ea8887f4897/ndindex-1.10.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a91e1a0398120233d5c3b23ccb2d4b78e970d66136f1a7221fa9a53873c3d5c5", size = 1636436, upload-time = "2025-11-19T20:39:18.266Z" }, + { url = "https://files.pythonhosted.org/packages/5f/8f/28b1ad78c787ac8fafd6e26419a80366617784b1779e3857fa687492f6bc/ndindex-1.10.1-cp313-cp313t-win32.whl", hash = "sha256:78bfe25941d2dac406391ddd9baf0b0fce163807b98ecc2c47a3030ee8466319", size = 158780, upload-time = "2025-11-19T20:39:20.454Z" }, + { url = "https://files.pythonhosted.org/packages/d0/56/b81060607a19865bb8be8d705b1b3e8aefb8747c0fbd383e38b4cae4bd71/ndindex-1.10.1-cp313-cp313t-win_amd64.whl", hash = "sha256:08bfdc1f7a0b408d15b3ce61d141ebbebdb47a25341967e425e104c5bd512a5c", size = 167485, upload-time = "2025-11-19T20:39:21.733Z" }, + { url = "https://files.pythonhosted.org/packages/da/9b/aac1131e9f3a5635ba7b0312c3bfa610511ab4108f85c0d914a32887aa00/ndindex-1.10.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:9b5297f207ebc068c7cdf9e3cd7b95aa5c9ec04295d0a7e56b529f66787d4685", size = 176478, upload-time = "2025-11-19T20:39:23.747Z" }, + { url = "https://files.pythonhosted.org/packages/1a/05/a0d8ca0432c84550bc17af6d6479a803936895b8b8403a1216c5a55475fb/ndindex-1.10.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c5e9762452b163e33cfb6e821f86e45ba0b53bdfcd23ab5d57b48a8f566898cb", size = 175480, upload-time = "2025-11-19T20:39:25.365Z" }, + { url = "https://files.pythonhosted.org/packages/09/4a/028ab78a9f29fd2a7e86a90337cde4658eaa77b425c63045d83a1d2e4f26/ndindex-1.10.1-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cf80241b40adffdc3276b2c9fb63a96c6c98b4a9d941892738de8add65083962", size = 528125, upload-time = "2025-11-19T20:39:26.798Z" }, + { url = "https://files.pythonhosted.org/packages/00/a9/bd823b345fb06c83ade6ef1c1933521d4357cd04490e684d4fa30126926c/ndindex-1.10.1-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cf5855881884b8467dfcf45764ccf2e4279075be14b155b89c96994bb08d2e6f", size = 527328, upload-time = "2025-11-19T20:39:28.292Z" }, + { url = "https://files.pythonhosted.org/packages/91/4f/40b9c15588cbf9dde43c4fb88a31dd1f636a913fa29649f18f8e3ebca36a/ndindex-1.10.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:e81a9bd36fe054b6c9fcc53d26bc9a28cf15d1ab52a0f5b854f894116f3a54e1", size = 1497508, upload-time = "2025-11-19T20:39:30.735Z" }, + { url = "https://files.pythonhosted.org/packages/24/8f/b8048f7837d2e9dff0af507b398307fa84a2aa9ea3db71b4aa800b21da4a/ndindex-1.10.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:588e8875d836a93b3cd9af482c8074bb02288ae1aff92cf277e1f02d9ae0f992", size = 1552625, upload-time = "2025-11-19T20:39:32.404Z" }, + { url = "https://files.pythonhosted.org/packages/20/aa/0ecb53c7e690a44769f2f92a843723ccb1d0ce080d93ba1ea811304cca12/ndindex-1.10.1-cp314-cp314-win32.whl", hash = "sha256:28741daca5926adff402247cd406f453ed5bb6042e82d6855938f805190e5ce9", size = 151237, upload-time = "2025-11-19T20:39:34.847Z" }, + { url = "https://files.pythonhosted.org/packages/8c/4e/197982fa8b4e6e6b9d15c38505c41076d1c552921f09f4d35acbbbbc0b70/ndindex-1.10.1-cp314-cp314-win_amd64.whl", hash = "sha256:59a3222befc0f7cdc85fb9b90a567ae890f70a864bdeb660517e9ebcb36bf1bc", size = 158925, upload-time = "2025-11-19T20:39:37.149Z" }, + { url = "https://files.pythonhosted.org/packages/24/ad/116b6154046a69fc04e2d4490905801d3839a3f21290c0b4d49b1044e251/ndindex-1.10.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:967b87b88dadb62555ec1039695c347254eccb8ca3d124c0e5dbe084c525fa93", size = 181724, upload-time = "2025-11-19T20:39:38.635Z" }, + { url = "https://files.pythonhosted.org/packages/c4/00/3ce4351366c890bcc87a5e9f1f90102547962eef356ac7c799bfdd0dddce/ndindex-1.10.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c67dde588c0fb89d872931a4ed5f9b4d21c1c70a3d92fdf0812a1de154239816", size = 181653, upload-time = "2025-11-19T20:39:40.048Z" }, + { url = "https://files.pythonhosted.org/packages/4d/05/a6fda696a2f02a3f8dd2ee9d816cb2edff6423bf0110a4876cc3b1259732/ndindex-1.10.1-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c65ca639a7abf72d79f22424f4abd18dece1f289a2b7b028a0ca455edd2168d4", size = 630898, upload-time = "2025-11-19T20:39:41.495Z" }, + { url = "https://files.pythonhosted.org/packages/73/78/eb2e5d067d4c054451e33eaece74cbdcb58236dc60516e73d783dae34c7e/ndindex-1.10.1-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5c3634a8df43e7928122225a3d64d850c8957bd1edf2e403907deacb478af27b", size = 614419, upload-time = "2025-11-19T20:39:43.254Z" }, + { url = "https://files.pythonhosted.org/packages/78/51/261bfb49eb7920c2a7314cacba5821930a529911dce48c7c6cd786096a5a/ndindex-1.10.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:9d581f931e61f182478f18bdf5edd3955899df5da4892ed0d5de547a4cfd5b6f", size = 1587517, upload-time = "2025-11-19T20:39:44.809Z" }, + { url = "https://files.pythonhosted.org/packages/ec/37/084a332ecdf8b0049151bd78001a7baf2daf7f500d043beb8a1f95d0f4e3/ndindex-1.10.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:78ce45106ebf67aeba99714818c721d8fd5fb9534daebd2565665a2d64b50fc9", size = 1635372, upload-time = "2025-11-19T20:39:47.231Z" }, + { url = "https://files.pythonhosted.org/packages/28/f4/716580fbb03018ab1daa86ed12c1925c67e79689db5fee82393e840758a2/ndindex-1.10.1-cp314-cp314t-win32.whl", hash = "sha256:fe5341e24dc992b09c258456ac90a09a6d25efdc2cb86dcc91d32c8891e1df9a", size = 162186, upload-time = "2025-11-19T20:39:48.81Z" }, + { url = "https://files.pythonhosted.org/packages/4d/20/28f669c09a470e7f523b0cc10b94336664d9648594015e3f2a1ec29047b1/ndindex-1.10.1-cp314-cp314t-win_amd64.whl", hash = "sha256:37f87f0e7690ae0324334740e0661d6297f2e62c9bf925127d249fb7eddd0ad8", size = 171077, upload-time = "2025-11-19T20:39:50.108Z" }, +] + +[[package]] +name = "networkx" +version = "3.4.2" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.11'", +] +sdist = { url = "https://files.pythonhosted.org/packages/fd/1d/06475e1cd5264c0b870ea2cc6fdb3e37177c1e565c43f56ff17a10e3937f/networkx-3.4.2.tar.gz", hash = "sha256:307c3669428c5362aab27c8a1260aa8f47c4e91d3891f48be0141738d8d053e1", size = 2151368, upload-time = "2024-10-21T12:39:38.695Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b9/54/dd730b32ea14ea797530a4479b2ed46a6fb250f682a9cfb997e968bf0261/networkx-3.4.2-py3-none-any.whl", hash = "sha256:df5d4365b724cf81b8c6a7312509d0c22386097011ad1abe274afd5e9d3bbc5f", size = 1723263, upload-time = "2024-10-21T12:39:36.247Z" }, +] + +[[package]] +name = "networkx" +version = "3.6.1" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.14' and sys_platform == 'win32'", + "python_full_version >= '3.14' and sys_platform == 'emscripten'", + "python_full_version >= '3.14' and sys_platform != 'emscripten' and sys_platform != 'win32'", + "python_full_version >= '3.11' and python_full_version < '3.14' and sys_platform == 'win32'", + "python_full_version >= '3.11' and python_full_version < '3.14' and sys_platform == 'emscripten'", + "python_full_version >= '3.11' and python_full_version < '3.14' and sys_platform != 'emscripten' and sys_platform != 'win32'", +] +sdist = { url = "https://files.pythonhosted.org/packages/6a/51/63fe664f3908c97be9d2e4f1158eb633317598cfa6e1fc14af5383f17512/networkx-3.6.1.tar.gz", hash = "sha256:26b7c357accc0c8cde558ad486283728b65b6a95d85ee1cd66bafab4c8168509", size = 2517025, upload-time = "2025-12-08T17:02:39.908Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9e/c9/b2622292ea83fbb4ec318f5b9ab867d0a28ab43c5717bb85b0a5f6b3b0a4/networkx-3.6.1-py3-none-any.whl", hash = "sha256:d47fbf302e7d9cbbb9e2555a0d267983d2aa476bac30e90dfbe5669bd57f3762", size = 2068504, upload-time = "2025-12-08T17:02:38.159Z" }, +] + +[[package]] +name = "numba" +version = "0.64.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "llvmlite" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "numpy", version = "2.4.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/23/c9/a0fb41787d01d621046138da30f6c2100d80857bf34b3390dd68040f27a3/numba-0.64.0.tar.gz", hash = "sha256:95e7300af648baa3308127b1955b52ce6d11889d16e8cfe637b4f85d2fca52b1", size = 2765679, upload-time = "2026-02-18T18:41:20.974Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4c/5e/604fed821cd7e3426bb3bc99a7ed6ac0bcb489f4cd93052256437d082f95/numba-0.64.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cc09b79440952e3098eeebea4bf6e8d2355fb7f12734fcd9fc5039f0dca90727", size = 2683250, upload-time = "2026-02-18T18:40:45.829Z" }, + { url = "https://files.pythonhosted.org/packages/4f/9f/9275a723d050b5f1a9b1c7fb7dbfce324fef301a8e50c5f88338569db06c/numba-0.64.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1afe3a80b8c2f376b211fb7a49e536ef9eafc92436afc95a2f41ea5392f8cc65", size = 3742168, upload-time = "2026-02-18T18:40:48.066Z" }, + { url = "https://files.pythonhosted.org/packages/e2/d1/97ca7dddaa36b16f4c46319bdb6b4913ba15d0245317d0d8ccde7b2d7d92/numba-0.64.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:23804194b93b8cd416c6444b5fbc4956082a45fed2d25436ef49c594666e7f7e", size = 3449103, upload-time = "2026-02-18T18:40:49.905Z" }, + { url = "https://files.pythonhosted.org/packages/52/0a/b9e137ad78415373e3353564500e8bf29dbce3c0d73633bb384d4e5d7537/numba-0.64.0-cp310-cp310-win_amd64.whl", hash = "sha256:e2a9fe998bb2cf848960b34db02c2c3b5e02cf82c07a26d9eef3494069740278", size = 2749950, upload-time = "2026-02-18T18:40:51.536Z" }, + { url = "https://files.pythonhosted.org/packages/89/a3/1a4286a1c16136c8896d8e2090d950e79b3ec626d3a8dc9620f6234d5a38/numba-0.64.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:766156ee4b8afeeb2b2e23c81307c5d19031f18d5ce76ae2c5fb1429e72fa92b", size = 2682938, upload-time = "2026-02-18T18:40:52.897Z" }, + { url = "https://files.pythonhosted.org/packages/19/16/aa6e3ba3cd45435c117d1101b278b646444ed05b7c712af631b91353f573/numba-0.64.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d17071b4ffc9d39b75d8e6c101a36f0c81b646123859898c9799cb31807c8f78", size = 3747376, upload-time = "2026-02-18T18:40:54.925Z" }, + { url = "https://files.pythonhosted.org/packages/c0/f1/dd2f25e18d75fdf897f730b78c5a7b00cc4450f2405564dbebfaf359f21f/numba-0.64.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4ead5630434133bac87fa67526eacb264535e4e9a2d5ec780e0b4fc381a7d275", size = 3453292, upload-time = "2026-02-18T18:40:56.818Z" }, + { url = "https://files.pythonhosted.org/packages/31/29/e09d5630578a50a2b3fa154990b6b839cf95327aa0709e2d50d0b6816cd1/numba-0.64.0-cp311-cp311-win_amd64.whl", hash = "sha256:f2b1fd93e7aaac07d6fbaed059c00679f591f2423885c206d8c1b55d65ca3f2d", size = 2749824, upload-time = "2026-02-18T18:40:58.392Z" }, + { url = "https://files.pythonhosted.org/packages/70/a6/9fc52cb4f0d5e6d8b5f4d81615bc01012e3cf24e1052a60f17a68deb8092/numba-0.64.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:69440a8e8bc1a81028446f06b363e28635aa67bd51b1e498023f03b812e0ce68", size = 2683418, upload-time = "2026-02-18T18:40:59.886Z" }, + { url = "https://files.pythonhosted.org/packages/9b/89/1a74ea99b180b7a5587b0301ed1b183a2937c4b4b67f7994689b5d36fc34/numba-0.64.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f13721011f693ba558b8dd4e4db7f2640462bba1b855bdc804be45bbeb55031a", size = 3804087, upload-time = "2026-02-18T18:41:01.699Z" }, + { url = "https://files.pythonhosted.org/packages/91/e1/583c647404b15f807410510fec1eb9b80cb8474165940b7749f026f21cbc/numba-0.64.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e0b180b1133f2b5d8b3f09d96b6d7a9e51a7da5dda3c09e998b5bcfac85d222c", size = 3504309, upload-time = "2026-02-18T18:41:03.252Z" }, + { url = "https://files.pythonhosted.org/packages/85/23/0fce5789b8a5035e7ace21216a468143f3144e02013252116616c58339aa/numba-0.64.0-cp312-cp312-win_amd64.whl", hash = "sha256:e63dc94023b47894849b8b106db28ccb98b49d5498b98878fac1a38f83ac007a", size = 2752740, upload-time = "2026-02-18T18:41:05.097Z" }, + { url = "https://files.pythonhosted.org/packages/52/80/2734de90f9300a6e2503b35ee50d9599926b90cbb7ac54f9e40074cd07f1/numba-0.64.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:3bab2c872194dcd985f1153b70782ec0fbbe348fffef340264eacd3a76d59fd6", size = 2683392, upload-time = "2026-02-18T18:41:06.563Z" }, + { url = "https://files.pythonhosted.org/packages/42/e8/14b5853ebefd5b37723ef365c5318a30ce0702d39057eaa8d7d76392859d/numba-0.64.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:703a246c60832cad231d2e73c1182f25bf3cc8b699759ec8fe58a2dbc689a70c", size = 3812245, upload-time = "2026-02-18T18:41:07.963Z" }, + { url = "https://files.pythonhosted.org/packages/8a/a2/f60dc6c96d19b7185144265a5fbf01c14993d37ff4cd324b09d0212aa7ce/numba-0.64.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7e2e49a7900ee971d32af7609adc0cfe6aa7477c6f6cccdf6d8138538cf7756f", size = 3511328, upload-time = "2026-02-18T18:41:09.504Z" }, + { url = "https://files.pythonhosted.org/packages/9c/2a/fe7003ea7e7237ee7014f8eaeeb7b0d228a2db22572ca85bab2648cf52cb/numba-0.64.0-cp313-cp313-win_amd64.whl", hash = "sha256:396f43c3f77e78d7ec84cdfc6b04969c78f8f169351b3c4db814b97e7acf4245", size = 2752668, upload-time = "2026-02-18T18:41:11.455Z" }, + { url = "https://files.pythonhosted.org/packages/3d/8a/77d26afe0988c592dd97cb8d4e80bfb3dfc7dbdacfca7d74a7c5c81dd8c2/numba-0.64.0-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:f565d55eaeff382cbc86c63c8c610347453af3d1e7afb2b6569aac1c9b5c93ce", size = 2683590, upload-time = "2026-02-18T18:41:12.897Z" }, + { url = "https://files.pythonhosted.org/packages/8e/4b/600b8b7cdbc7f9cebee9ea3d13bb70052a79baf28944024ffcb59f0712e3/numba-0.64.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9b55169b18892c783f85e9ad9e6f5297a6d12967e4414e6b71361086025ff0bb", size = 3781163, upload-time = "2026-02-18T18:41:15.377Z" }, + { url = "https://files.pythonhosted.org/packages/ff/73/53f2d32bfa45b7175e9944f6b816d8c32840178c3eee9325033db5bf838e/numba-0.64.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:196bcafa02c9dd1707e068434f6d5cedde0feb787e3432f7f1f0e993cc336c4c", size = 3481172, upload-time = "2026-02-18T18:41:17.281Z" }, + { url = "https://files.pythonhosted.org/packages/b5/00/aebd2f7f1e11e38814bb96e95a27580817a7b340608d3ac085fdbab83174/numba-0.64.0-cp314-cp314-win_amd64.whl", hash = "sha256:213e9acbe7f1c05090592e79020315c1749dd52517b90e94c517dca3f014d4a1", size = 2754700, upload-time = "2026-02-18T18:41:19.277Z" }, +] + +[[package]] +name = "numexpr" +version = "2.14.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "numpy", version = "2.4.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cb/2f/fdba158c9dbe5caca9c3eca3eaffffb251f2fb8674bf8e2d0aed5f38d319/numexpr-2.14.1.tar.gz", hash = "sha256:4be00b1086c7b7a5c32e31558122b7b80243fe098579b170967da83f3152b48b", size = 119400, upload-time = "2025-10-13T16:17:27.351Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/db/91/ccd504cbe5b88d06987c77f42ba37a13ef05065fdab4afe6dcfeb2961faf/numexpr-2.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d0fab3fd06a04f6b86102552b26aa5d85e20ac7d8296c15764c726eeabae6cc8", size = 163200, upload-time = "2025-10-13T16:16:25.47Z" }, + { url = "https://files.pythonhosted.org/packages/f3/89/6b07977baf2af75fb6692f9e7a1fb612a15f600fc921f3f565366de01f4a/numexpr-2.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:64ae5dfd62d74a3ef82fe0b37f80527247f3626171ad82025900f46ffca4b39a", size = 152085, upload-time = "2025-10-13T16:16:29.508Z" }, + { url = "https://files.pythonhosted.org/packages/28/c2/c5775541256c4bf16b4d88fa1cffa74a0126703e513093c8774d911b0bb7/numexpr-2.14.1-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:955c92b064f9074d2970cf3138f5e3b965be673b82024962ed526f39bc25a920", size = 449435, upload-time = "2025-10-13T16:13:16.257Z" }, + { url = "https://files.pythonhosted.org/packages/34/d4/d1a410901c620f7a6a3c5c2b1fc9dab22170be05a89d2c02ae699e27bd3f/numexpr-2.14.1-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:75440c54fc01e130396650fdf307aa9d41a67dc06ddbfb288971b591c13a395b", size = 440197, upload-time = "2025-10-13T16:14:44.109Z" }, + { url = "https://files.pythonhosted.org/packages/ac/c8/fa85f0cc5c39db587ba4927b862a92477c017ee8476e415e8120a100457b/numexpr-2.14.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:dde9fa47ed319e1e1728940a539df3cb78326b7754bc7c6ab3152afc91808f9b", size = 1414125, upload-time = "2025-10-13T16:13:19.882Z" }, + { url = "https://files.pythonhosted.org/packages/08/72/a58ddc05e0eabb3fa8d3fcd319f3d97870e6b41520832acfd04a6734c2c0/numexpr-2.14.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:76db0bc6267e591ab9c4df405ffb533598e4c88239db7338d11ae9e4b368a85a", size = 1463041, upload-time = "2025-10-13T16:14:47.502Z" }, + { url = "https://files.pythonhosted.org/packages/c4/c5/bdd1862302bb71a78dba941eaf7060e1274f1cf6af2d1b0f1880bfcb289b/numexpr-2.14.1-cp310-cp310-win32.whl", hash = "sha256:0d1dcbdc4d0374c0d523cee2f94f06b001623cbc1fd163612841017a3495427c", size = 166833, upload-time = "2025-10-13T16:17:03.543Z" }, + { url = "https://files.pythonhosted.org/packages/18/af/26773a246716922794388786529e5640676399efabb0ee217ce034df9d27/numexpr-2.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:823cd82c8e7937981339f634e7a9c6a92cb2d0b9d0a5cf627a5e394fffc05377", size = 160068, upload-time = "2025-10-13T16:17:05.191Z" }, + { url = "https://files.pythonhosted.org/packages/b2/a3/67999bdd1ed1f938d38f3fedd4969632f2f197b090e50505f7cc1fa82510/numexpr-2.14.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2d03fcb4644a12f70a14d74006f72662824da5b6128bf1bcd10cc3ed80e64c34", size = 163195, upload-time = "2025-10-13T16:16:31.212Z" }, + { url = "https://files.pythonhosted.org/packages/25/95/d64f680ea1fc56d165457287e0851d6708800f9fcea346fc1b9957942ee6/numexpr-2.14.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2773ee1133f77009a1fc2f34fe236f3d9823779f5f75450e183137d49f00499f", size = 152088, upload-time = "2025-10-13T16:16:33.186Z" }, + { url = "https://files.pythonhosted.org/packages/0e/7f/3bae417cb13ae08afd86d08bb0301c32440fe0cae4e6262b530e0819aeda/numexpr-2.14.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ebe4980f9494b9f94d10d2e526edc29e72516698d3bf95670ba79415492212a4", size = 451126, upload-time = "2025-10-13T16:13:22.248Z" }, + { url = "https://files.pythonhosted.org/packages/4c/1a/edbe839109518364ac0bd9e918cf874c755bb2c128040e920f198c494263/numexpr-2.14.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2a381e5e919a745c9503bcefffc1c7f98c972c04ec58fc8e999ed1a929e01ba6", size = 442012, upload-time = "2025-10-13T16:14:51.416Z" }, + { url = "https://files.pythonhosted.org/packages/66/b1/be4ce99bff769a5003baddac103f34681997b31d4640d5a75c0e8ed59c78/numexpr-2.14.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d08856cfc1b440eb1caaa60515235369654321995dd68eb9377577392020f6cb", size = 1415975, upload-time = "2025-10-13T16:13:26.088Z" }, + { url = "https://files.pythonhosted.org/packages/e7/33/b33b8fdc032a05d9ebb44a51bfcd4b92c178a2572cd3e6c1b03d8a4b45b2/numexpr-2.14.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03130afa04edf83a7b590d207444f05a00363c9b9ea5d81c0f53b1ea13fad55a", size = 1464683, upload-time = "2025-10-13T16:14:58.87Z" }, + { url = "https://files.pythonhosted.org/packages/d0/b2/ddcf0ac6cf0a1d605e5aecd4281507fd79a9628a67896795ab2e975de5df/numexpr-2.14.1-cp311-cp311-win32.whl", hash = "sha256:db78fa0c9fcbaded3ae7453faf060bd7a18b0dc10299d7fcd02d9362be1213ed", size = 166838, upload-time = "2025-10-13T16:17:06.765Z" }, + { url = "https://files.pythonhosted.org/packages/64/72/4ca9bd97b2eb6dce9f5e70a3b6acec1a93e1fb9b079cb4cba2cdfbbf295d/numexpr-2.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:e9b2f957798c67a2428be96b04bce85439bed05efe78eb78e4c2ca43737578e7", size = 160069, upload-time = "2025-10-13T16:17:08.752Z" }, + { url = "https://files.pythonhosted.org/packages/9d/20/c473fc04a371f5e2f8c5749e04505c13e7a8ede27c09e9f099b2ad6f43d6/numexpr-2.14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:91ebae0ab18c799b0e6b8c5a8d11e1fa3848eb4011271d99848b297468a39430", size = 162790, upload-time = "2025-10-13T16:16:34.903Z" }, + { url = "https://files.pythonhosted.org/packages/45/93/b6760dd1904c2a498e5f43d1bb436f59383c3ddea3815f1461dfaa259373/numexpr-2.14.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:47041f2f7b9e69498fb311af672ba914a60e6e6d804011caacb17d66f639e659", size = 152196, upload-time = "2025-10-13T16:16:36.593Z" }, + { url = "https://files.pythonhosted.org/packages/72/94/cc921e35593b820521e464cbbeaf8212bbdb07f16dc79fe283168df38195/numexpr-2.14.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d686dfb2c1382d9e6e0ee0b7647f943c1886dba3adbf606c625479f35f1956c1", size = 452468, upload-time = "2025-10-13T16:13:29.531Z" }, + { url = "https://files.pythonhosted.org/packages/d9/43/560e9ba23c02c904b5934496486d061bcb14cd3ebba2e3cf0e2dccb6c22b/numexpr-2.14.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:eee6d4fbbbc368e6cdd0772734d6249128d957b3b8ad47a100789009f4de7083", size = 443631, upload-time = "2025-10-13T16:15:02.473Z" }, + { url = "https://files.pythonhosted.org/packages/7b/6c/78f83b6219f61c2c22d71ab6e6c2d4e5d7381334c6c29b77204e59edb039/numexpr-2.14.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3a2839efa25f3c8d4133252ea7342d8f81226c7c4dda81f97a57e090b9d87a48", size = 1417670, upload-time = "2025-10-13T16:13:33.464Z" }, + { url = "https://files.pythonhosted.org/packages/0e/bb/1ccc9dcaf46281568ce769888bf16294c40e98a5158e4b16c241de31d0d3/numexpr-2.14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9f9137f1351b310436662b5dc6f4082a245efa8950c3b0d9008028df92fefb9b", size = 1466212, upload-time = "2025-10-13T16:15:12.828Z" }, + { url = "https://files.pythonhosted.org/packages/31/9f/203d82b9e39dadd91d64bca55b3c8ca432e981b822468dcef41a4418626b/numexpr-2.14.1-cp312-cp312-win32.whl", hash = "sha256:36f8d5c1bd1355df93b43d766790f9046cccfc1e32b7c6163f75bcde682cda07", size = 166996, upload-time = "2025-10-13T16:17:10.369Z" }, + { url = "https://files.pythonhosted.org/packages/1f/67/ffe750b5452eb66de788c34e7d21ec6d886abb4d7c43ad1dc88ceb3d998f/numexpr-2.14.1-cp312-cp312-win_amd64.whl", hash = "sha256:fdd886f4b7dbaf167633ee396478f0d0aa58ea2f9e7ccc3c6431019623e8d68f", size = 160187, upload-time = "2025-10-13T16:17:11.974Z" }, + { url = "https://files.pythonhosted.org/packages/73/b4/9f6d637fd79df42be1be29ee7ba1f050fab63b7182cb922a0e08adc12320/numexpr-2.14.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:09078ba73cffe94745abfbcc2d81ab8b4b4e9d7bfbbde6cac2ee5dbf38eee222", size = 162794, upload-time = "2025-10-13T16:16:38.291Z" }, + { url = "https://files.pythonhosted.org/packages/35/ae/d58558d8043de0c49f385ea2fa789e3cfe4d436c96be80200c5292f45f15/numexpr-2.14.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:dce0b5a0447baa7b44bc218ec2d7dcd175b8eee6083605293349c0c1d9b82fb6", size = 152203, upload-time = "2025-10-13T16:16:39.907Z" }, + { url = "https://files.pythonhosted.org/packages/13/65/72b065f9c75baf8f474fd5d2b768350935989d4917db1c6c75b866d4067c/numexpr-2.14.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:06855053de7a3a8425429bd996e8ae3c50b57637ad3e757e0fa0602a7874be30", size = 455860, upload-time = "2025-10-13T16:13:35.811Z" }, + { url = "https://files.pythonhosted.org/packages/fc/f9/c9457652dfe28e2eb898372da2fe786c6db81af9540c0f853ee04a0699cc/numexpr-2.14.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:05f9366d23a2e991fd5a8b5e61a17558f028ba86158a4552f8f239b005cdf83c", size = 446574, upload-time = "2025-10-13T16:15:17.367Z" }, + { url = "https://files.pythonhosted.org/packages/b6/99/8d3879c4d67d3db5560cf2de65ce1778b80b75f6fa415eb5c3e7bd37ba27/numexpr-2.14.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c5f1b1605695778896534dfc6e130d54a65cd52be7ed2cd0cfee3981fd676bf5", size = 1417306, upload-time = "2025-10-13T16:13:42.813Z" }, + { url = "https://files.pythonhosted.org/packages/ea/05/6bddac9f18598ba94281e27a6943093f7d0976544b0cb5d92272c64719bd/numexpr-2.14.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a4ba71db47ea99c659d88ee6233fa77b6dc83392f1d324e0c90ddf617ae3f421", size = 1466145, upload-time = "2025-10-13T16:15:27.464Z" }, + { url = "https://files.pythonhosted.org/packages/24/5d/cbeb67aca0c5a76ead13df7e8bd8dd5e0d49145f90da697ba1d9f07005b0/numexpr-2.14.1-cp313-cp313-win32.whl", hash = "sha256:638dce8320f4a1483d5ca4fda69f60a70ed7e66be6e68bc23fb9f1a6b78a9e3b", size = 166996, upload-time = "2025-10-13T16:17:13.803Z" }, + { url = "https://files.pythonhosted.org/packages/cc/23/9281bceaeb282cead95f0aa5f7f222ffc895670ea689cc1398355f6e3001/numexpr-2.14.1-cp313-cp313-win_amd64.whl", hash = "sha256:9fdcd4735121658a313f878fd31136d1bfc6a5b913219e7274e9fca9f8dac3bb", size = 160189, upload-time = "2025-10-13T16:17:15.417Z" }, + { url = "https://files.pythonhosted.org/packages/f3/76/7aac965fd93a56803cbe502aee2adcad667253ae34b0badf6c5af7908b6c/numexpr-2.14.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:557887ad7f5d3c2a40fd7310e50597045a68e66b20a77b3f44d7bc7608523b4b", size = 163524, upload-time = "2025-10-13T16:16:42.213Z" }, + { url = "https://files.pythonhosted.org/packages/58/65/79d592d5e63fbfab3b59a60c386853d9186a44a3fa3c87ba26bdc25b6195/numexpr-2.14.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:af111c8fe6fc55d15e4c7cab11920fc50740d913636d486545b080192cd0ad73", size = 152919, upload-time = "2025-10-13T16:16:44.229Z" }, + { url = "https://files.pythonhosted.org/packages/84/78/3c8335f713d4aeb99fa758d7c62f0be1482d4947ce5b508e2052bb7aeee9/numexpr-2.14.1-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:33265294376e7e2ae4d264d75b798a915d2acf37b9dd2b9405e8b04f84d05cfc", size = 465972, upload-time = "2025-10-13T16:13:45.061Z" }, + { url = "https://files.pythonhosted.org/packages/35/81/9ee5f69b811e8f18746c12d6f71848617684edd3161927f95eee7a305631/numexpr-2.14.1-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:83647d846d3eeeb9a9255311236135286728b398d0d41d35dedb532dca807fe9", size = 456953, upload-time = "2025-10-13T16:15:31.186Z" }, + { url = "https://files.pythonhosted.org/packages/6d/39/9b8bc6e294d85cbb54a634e47b833e9f3276a8bdf7ce92aa808718a0212d/numexpr-2.14.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6e575fd3ad41ddf3355d0c7ef6bd0168619dc1779a98fe46693cad5e95d25e6e", size = 1426199, upload-time = "2025-10-13T16:13:48.231Z" }, + { url = "https://files.pythonhosted.org/packages/1e/ce/0d4fcd31ab49319740d934fba1734d7dad13aa485532ca754e555ca16c8b/numexpr-2.14.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:67ea4771029ce818573b1998f5ca416bd255156feea017841b86176a938f7d19", size = 1474214, upload-time = "2025-10-13T16:15:38.893Z" }, + { url = "https://files.pythonhosted.org/packages/b7/47/b2a93cbdb3ba4e009728ad1b9ef1550e2655ea2c86958ebaf03b9615f275/numexpr-2.14.1-cp313-cp313t-win32.whl", hash = "sha256:15015d47d3d1487072d58c0e7682ef2eb608321e14099c39d52e2dd689483611", size = 167676, upload-time = "2025-10-13T16:17:17.351Z" }, + { url = "https://files.pythonhosted.org/packages/86/99/ee3accc589ed032eea68e12172515ed96a5568534c213ad109e1f4411df1/numexpr-2.14.1-cp313-cp313t-win_amd64.whl", hash = "sha256:94c711f6d8f17dfb4606842b403699603aa591ab9f6bf23038b488ea9cfb0f09", size = 161096, upload-time = "2025-10-13T16:17:19.174Z" }, + { url = "https://files.pythonhosted.org/packages/ac/36/9db78dfbfdfa1f8bf0872993f1a334cdd8fca5a5b6567e47dcb128bcb7c2/numexpr-2.14.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:ede79f7ff06629f599081de644546ce7324f1581c09b0ac174da88a470d39c21", size = 162848, upload-time = "2025-10-13T16:16:46.216Z" }, + { url = "https://files.pythonhosted.org/packages/13/c1/a5c78ae637402c5550e2e0ba175275d2515d432ec28af0cdc23c9b476e65/numexpr-2.14.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:2eac7a5a2f70b3768c67056445d1ceb4ecd9b853c8eda9563823b551aeaa5082", size = 152270, upload-time = "2025-10-13T16:16:47.92Z" }, + { url = "https://files.pythonhosted.org/packages/9a/ed/aabd8678077848dd9a751c5558c2057839f5a09e2a176d8dfcd0850ee00e/numexpr-2.14.1-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5aedf38d4c0c19d3cecfe0334c3f4099fb496f54c146223d30fa930084bc8574", size = 455918, upload-time = "2025-10-13T16:13:50.338Z" }, + { url = "https://files.pythonhosted.org/packages/88/e1/3db65117f02cdefb0e5e4c440daf1c30beb45051b7f47aded25b7f4f2f34/numexpr-2.14.1-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:439ec4d57b853792ebe5456e3160312281c3a7071ecac5532ded3278ede614de", size = 446512, upload-time = "2025-10-13T16:15:42.313Z" }, + { url = "https://files.pythonhosted.org/packages/9a/fb/7ceb9ee55b5f67e4a3e4d73d5af4c7e37e3c9f37f54bee90361b64b17e3f/numexpr-2.14.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:e23b87f744e04e302d82ac5e2189ae20a533566aec76a46885376e20b0645bf8", size = 1417845, upload-time = "2025-10-13T16:13:53.836Z" }, + { url = "https://files.pythonhosted.org/packages/45/2d/9b5764d0eafbbb2889288f80de773791358acf6fad1a55767538d8b79599/numexpr-2.14.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:44f84e0e5af219dbb62a081606156420815890e041b87252fbcea5df55214c4c", size = 1466211, upload-time = "2025-10-13T16:15:48.985Z" }, + { url = "https://files.pythonhosted.org/packages/5d/21/204db708eccd71aa8bc55bcad55bc0fc6c5a4e01ad78e14ee5714a749386/numexpr-2.14.1-cp314-cp314-win32.whl", hash = "sha256:1f1a5e817c534539351aa75d26088e9e1e0ef1b3a6ab484047618a652ccc4fc3", size = 168835, upload-time = "2025-10-13T16:17:20.82Z" }, + { url = "https://files.pythonhosted.org/packages/4f/3e/d83e9401a1c3449a124f7d4b3fb44084798e0d30f7c11e60712d9b94cf11/numexpr-2.14.1-cp314-cp314-win_amd64.whl", hash = "sha256:587c41509bc373dfb1fe6086ba55a73147297247bedb6d588cda69169fc412f2", size = 162608, upload-time = "2025-10-13T16:17:22.228Z" }, + { url = "https://files.pythonhosted.org/packages/7f/d6/ec947806bb57836d6379a8c8a253c2aeaa602b12fef2336bfd2462bb4ed5/numexpr-2.14.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:ec368819502b64f190c3f71be14a304780b5935c42aae5bf22c27cc2cbba70b5", size = 163525, upload-time = "2025-10-13T16:16:50.133Z" }, + { url = "https://files.pythonhosted.org/packages/0d/77/048f30dcf661a3d52963a88c29b52b6d5ce996d38e9313a56a922451c1e0/numexpr-2.14.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7e87f6d203ac57239de32261c941e9748f9309cbc0da6295eabd0c438b920d3a", size = 152917, upload-time = "2025-10-13T16:16:52.055Z" }, + { url = "https://files.pythonhosted.org/packages/9e/d3/956a13e628d722d649fbf2fded615134a308c082e122a48bad0e90a99ce9/numexpr-2.14.1-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dd72d8c2a165fe45ea7650b16eb8cc1792a94a722022006bb97c86fe51fd2091", size = 466242, upload-time = "2025-10-13T16:13:55.795Z" }, + { url = "https://files.pythonhosted.org/packages/d6/dd/abe848678d82486940892f2cacf39e82eec790e8930d4d713d3f9191063b/numexpr-2.14.1-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:70d80fcb418a54ca208e9a38e58ddc425c07f66485176b261d9a67c7f2864f73", size = 457149, upload-time = "2025-10-13T16:15:52.036Z" }, + { url = "https://files.pythonhosted.org/packages/fd/bb/797b583b5fb9da5700a5708ca6eb4f889c94d81abb28de4d642c0f4b3258/numexpr-2.14.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:edea2f20c2040df8b54ee8ca8ebda63de9545b2112872466118e9df4d0ae99f3", size = 1426493, upload-time = "2025-10-13T16:13:59.244Z" }, + { url = "https://files.pythonhosted.org/packages/77/c4/0519ab028fdc35e3e7ee700def7f2b4631b175cd9e1202bd7966c1695c33/numexpr-2.14.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:790447be6879a6c51b9545f79612d24c9ea0a41d537a84e15e6a8ddef0b6268e", size = 1474413, upload-time = "2025-10-13T16:15:59.211Z" }, + { url = "https://files.pythonhosted.org/packages/d4/4a/33044878c8f4a75213cfe9c11d4c02058bb710a7a063fe14f362e8de1077/numexpr-2.14.1-cp314-cp314t-win32.whl", hash = "sha256:538961096c2300ea44240209181e31fae82759d26b51713b589332b9f2a4117e", size = 169502, upload-time = "2025-10-13T16:17:23.829Z" }, + { url = "https://files.pythonhosted.org/packages/41/a2/5a1a2c72528b429337f49911b18c302ecd36eeab00f409147e1aa4ae4519/numexpr-2.14.1-cp314-cp314t-win_amd64.whl", hash = "sha256:a40b350cd45b4446076fa11843fa32bbe07024747aeddf6d467290bf9011b392", size = 163589, upload-time = "2025-10-13T16:17:25.696Z" }, +] + +[[package]] +name = "numpy" +version = "2.2.6" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.11'", +] +sdist = { url = "https://files.pythonhosted.org/packages/76/21/7d2a95e4bba9dc13d043ee156a356c0a8f0c6309dff6b21b4d71a073b8a8/numpy-2.2.6.tar.gz", hash = "sha256:e29554e2bef54a90aa5cc07da6ce955accb83f21ab5de01a62c8478897b264fd", size = 20276440, upload-time = "2025-05-17T22:38:04.611Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9a/3e/ed6db5be21ce87955c0cbd3009f2803f59fa08df21b5df06862e2d8e2bdd/numpy-2.2.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b412caa66f72040e6d268491a59f2c43bf03eb6c96dd8f0307829feb7fa2b6fb", size = 21165245, upload-time = "2025-05-17T21:27:58.555Z" }, + { url = "https://files.pythonhosted.org/packages/22/c2/4b9221495b2a132cc9d2eb862e21d42a009f5a60e45fc44b00118c174bff/numpy-2.2.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e41fd67c52b86603a91c1a505ebaef50b3314de0213461c7a6e99c9a3beff90", size = 14360048, upload-time = "2025-05-17T21:28:21.406Z" }, + { url = "https://files.pythonhosted.org/packages/fd/77/dc2fcfc66943c6410e2bf598062f5959372735ffda175b39906d54f02349/numpy-2.2.6-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:37e990a01ae6ec7fe7fa1c26c55ecb672dd98b19c3d0e1d1f326fa13cb38d163", size = 5340542, upload-time = "2025-05-17T21:28:30.931Z" }, + { url = "https://files.pythonhosted.org/packages/7a/4f/1cb5fdc353a5f5cc7feb692db9b8ec2c3d6405453f982435efc52561df58/numpy-2.2.6-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:5a6429d4be8ca66d889b7cf70f536a397dc45ba6faeb5f8c5427935d9592e9cf", size = 6878301, upload-time = "2025-05-17T21:28:41.613Z" }, + { url = "https://files.pythonhosted.org/packages/eb/17/96a3acd228cec142fcb8723bd3cc39c2a474f7dcf0a5d16731980bcafa95/numpy-2.2.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:efd28d4e9cd7d7a8d39074a4d44c63eda73401580c5c76acda2ce969e0a38e83", size = 14297320, upload-time = "2025-05-17T21:29:02.78Z" }, + { url = "https://files.pythonhosted.org/packages/b4/63/3de6a34ad7ad6646ac7d2f55ebc6ad439dbbf9c4370017c50cf403fb19b5/numpy-2.2.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc7b73d02efb0e18c000e9ad8b83480dfcd5dfd11065997ed4c6747470ae8915", size = 16801050, upload-time = "2025-05-17T21:29:27.675Z" }, + { url = "https://files.pythonhosted.org/packages/07/b6/89d837eddef52b3d0cec5c6ba0456c1bf1b9ef6a6672fc2b7873c3ec4e2e/numpy-2.2.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:74d4531beb257d2c3f4b261bfb0fc09e0f9ebb8842d82a7b4209415896adc680", size = 15807034, upload-time = "2025-05-17T21:29:51.102Z" }, + { url = "https://files.pythonhosted.org/packages/01/c8/dc6ae86e3c61cfec1f178e5c9f7858584049b6093f843bca541f94120920/numpy-2.2.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8fc377d995680230e83241d8a96def29f204b5782f371c532579b4f20607a289", size = 18614185, upload-time = "2025-05-17T21:30:18.703Z" }, + { url = "https://files.pythonhosted.org/packages/5b/c5/0064b1b7e7c89137b471ccec1fd2282fceaae0ab3a9550f2568782d80357/numpy-2.2.6-cp310-cp310-win32.whl", hash = "sha256:b093dd74e50a8cba3e873868d9e93a85b78e0daf2e98c6797566ad8044e8363d", size = 6527149, upload-time = "2025-05-17T21:30:29.788Z" }, + { url = "https://files.pythonhosted.org/packages/a3/dd/4b822569d6b96c39d1215dbae0582fd99954dcbcf0c1a13c61783feaca3f/numpy-2.2.6-cp310-cp310-win_amd64.whl", hash = "sha256:f0fd6321b839904e15c46e0d257fdd101dd7f530fe03fd6359c1ea63738703f3", size = 12904620, upload-time = "2025-05-17T21:30:48.994Z" }, + { url = "https://files.pythonhosted.org/packages/da/a8/4f83e2aa666a9fbf56d6118faaaf5f1974d456b1823fda0a176eff722839/numpy-2.2.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f9f1adb22318e121c5c69a09142811a201ef17ab257a1e66ca3025065b7f53ae", size = 21176963, upload-time = "2025-05-17T21:31:19.36Z" }, + { url = "https://files.pythonhosted.org/packages/b3/2b/64e1affc7972decb74c9e29e5649fac940514910960ba25cd9af4488b66c/numpy-2.2.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c820a93b0255bc360f53eca31a0e676fd1101f673dda8da93454a12e23fc5f7a", size = 14406743, upload-time = "2025-05-17T21:31:41.087Z" }, + { url = "https://files.pythonhosted.org/packages/4a/9f/0121e375000b5e50ffdd8b25bf78d8e1a5aa4cca3f185d41265198c7b834/numpy-2.2.6-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:3d70692235e759f260c3d837193090014aebdf026dfd167834bcba43e30c2a42", size = 5352616, upload-time = "2025-05-17T21:31:50.072Z" }, + { url = "https://files.pythonhosted.org/packages/31/0d/b48c405c91693635fbe2dcd7bc84a33a602add5f63286e024d3b6741411c/numpy-2.2.6-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:481b49095335f8eed42e39e8041327c05b0f6f4780488f61286ed3c01368d491", size = 6889579, upload-time = "2025-05-17T21:32:01.712Z" }, + { url = "https://files.pythonhosted.org/packages/52/b8/7f0554d49b565d0171eab6e99001846882000883998e7b7d9f0d98b1f934/numpy-2.2.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b64d8d4d17135e00c8e346e0a738deb17e754230d7e0810ac5012750bbd85a5a", size = 14312005, upload-time = "2025-05-17T21:32:23.332Z" }, + { url = "https://files.pythonhosted.org/packages/b3/dd/2238b898e51bd6d389b7389ffb20d7f4c10066d80351187ec8e303a5a475/numpy-2.2.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba10f8411898fc418a521833e014a77d3ca01c15b0c6cdcce6a0d2897e6dbbdf", size = 16821570, upload-time = "2025-05-17T21:32:47.991Z" }, + { url = "https://files.pythonhosted.org/packages/83/6c/44d0325722cf644f191042bf47eedad61c1e6df2432ed65cbe28509d404e/numpy-2.2.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:bd48227a919f1bafbdda0583705e547892342c26fb127219d60a5c36882609d1", size = 15818548, upload-time = "2025-05-17T21:33:11.728Z" }, + { url = "https://files.pythonhosted.org/packages/ae/9d/81e8216030ce66be25279098789b665d49ff19eef08bfa8cb96d4957f422/numpy-2.2.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9551a499bf125c1d4f9e250377c1ee2eddd02e01eac6644c080162c0c51778ab", size = 18620521, upload-time = "2025-05-17T21:33:39.139Z" }, + { url = "https://files.pythonhosted.org/packages/6a/fd/e19617b9530b031db51b0926eed5345ce8ddc669bb3bc0044b23e275ebe8/numpy-2.2.6-cp311-cp311-win32.whl", hash = "sha256:0678000bb9ac1475cd454c6b8c799206af8107e310843532b04d49649c717a47", size = 6525866, upload-time = "2025-05-17T21:33:50.273Z" }, + { url = "https://files.pythonhosted.org/packages/31/0a/f354fb7176b81747d870f7991dc763e157a934c717b67b58456bc63da3df/numpy-2.2.6-cp311-cp311-win_amd64.whl", hash = "sha256:e8213002e427c69c45a52bbd94163084025f533a55a59d6f9c5b820774ef3303", size = 12907455, upload-time = "2025-05-17T21:34:09.135Z" }, + { url = "https://files.pythonhosted.org/packages/82/5d/c00588b6cf18e1da539b45d3598d3557084990dcc4331960c15ee776ee41/numpy-2.2.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:41c5a21f4a04fa86436124d388f6ed60a9343a6f767fced1a8a71c3fbca038ff", size = 20875348, upload-time = "2025-05-17T21:34:39.648Z" }, + { url = "https://files.pythonhosted.org/packages/66/ee/560deadcdde6c2f90200450d5938f63a34b37e27ebff162810f716f6a230/numpy-2.2.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:de749064336d37e340f640b05f24e9e3dd678c57318c7289d222a8a2f543e90c", size = 14119362, upload-time = "2025-05-17T21:35:01.241Z" }, + { url = "https://files.pythonhosted.org/packages/3c/65/4baa99f1c53b30adf0acd9a5519078871ddde8d2339dc5a7fde80d9d87da/numpy-2.2.6-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:894b3a42502226a1cac872f840030665f33326fc3dac8e57c607905773cdcde3", size = 5084103, upload-time = "2025-05-17T21:35:10.622Z" }, + { url = "https://files.pythonhosted.org/packages/cc/89/e5a34c071a0570cc40c9a54eb472d113eea6d002e9ae12bb3a8407fb912e/numpy-2.2.6-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:71594f7c51a18e728451bb50cc60a3ce4e6538822731b2933209a1f3614e9282", size = 6625382, upload-time = "2025-05-17T21:35:21.414Z" }, + { url = "https://files.pythonhosted.org/packages/f8/35/8c80729f1ff76b3921d5c9487c7ac3de9b2a103b1cd05e905b3090513510/numpy-2.2.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2618db89be1b4e05f7a1a847a9c1c0abd63e63a1607d892dd54668dd92faf87", size = 14018462, upload-time = "2025-05-17T21:35:42.174Z" }, + { url = "https://files.pythonhosted.org/packages/8c/3d/1e1db36cfd41f895d266b103df00ca5b3cbe965184df824dec5c08c6b803/numpy-2.2.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd83c01228a688733f1ded5201c678f0c53ecc1006ffbc404db9f7a899ac6249", size = 16527618, upload-time = "2025-05-17T21:36:06.711Z" }, + { url = "https://files.pythonhosted.org/packages/61/c6/03ed30992602c85aa3cd95b9070a514f8b3c33e31124694438d88809ae36/numpy-2.2.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:37c0ca431f82cd5fa716eca9506aefcabc247fb27ba69c5062a6d3ade8cf8f49", size = 15505511, upload-time = "2025-05-17T21:36:29.965Z" }, + { url = "https://files.pythonhosted.org/packages/b7/25/5761d832a81df431e260719ec45de696414266613c9ee268394dd5ad8236/numpy-2.2.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fe27749d33bb772c80dcd84ae7e8df2adc920ae8297400dabec45f0dedb3f6de", size = 18313783, upload-time = "2025-05-17T21:36:56.883Z" }, + { url = "https://files.pythonhosted.org/packages/57/0a/72d5a3527c5ebffcd47bde9162c39fae1f90138c961e5296491ce778e682/numpy-2.2.6-cp312-cp312-win32.whl", hash = "sha256:4eeaae00d789f66c7a25ac5f34b71a7035bb474e679f410e5e1a94deb24cf2d4", size = 6246506, upload-time = "2025-05-17T21:37:07.368Z" }, + { url = "https://files.pythonhosted.org/packages/36/fa/8c9210162ca1b88529ab76b41ba02d433fd54fecaf6feb70ef9f124683f1/numpy-2.2.6-cp312-cp312-win_amd64.whl", hash = "sha256:c1f9540be57940698ed329904db803cf7a402f3fc200bfe599334c9bd84a40b2", size = 12614190, upload-time = "2025-05-17T21:37:26.213Z" }, + { url = "https://files.pythonhosted.org/packages/f9/5c/6657823f4f594f72b5471f1db1ab12e26e890bb2e41897522d134d2a3e81/numpy-2.2.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0811bb762109d9708cca4d0b13c4f67146e3c3b7cf8d34018c722adb2d957c84", size = 20867828, upload-time = "2025-05-17T21:37:56.699Z" }, + { url = "https://files.pythonhosted.org/packages/dc/9e/14520dc3dadf3c803473bd07e9b2bd1b69bc583cb2497b47000fed2fa92f/numpy-2.2.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:287cc3162b6f01463ccd86be154f284d0893d2b3ed7292439ea97eafa8170e0b", size = 14143006, upload-time = "2025-05-17T21:38:18.291Z" }, + { url = "https://files.pythonhosted.org/packages/4f/06/7e96c57d90bebdce9918412087fc22ca9851cceaf5567a45c1f404480e9e/numpy-2.2.6-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:f1372f041402e37e5e633e586f62aa53de2eac8d98cbfb822806ce4bbefcb74d", size = 5076765, upload-time = "2025-05-17T21:38:27.319Z" }, + { url = "https://files.pythonhosted.org/packages/73/ed/63d920c23b4289fdac96ddbdd6132e9427790977d5457cd132f18e76eae0/numpy-2.2.6-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:55a4d33fa519660d69614a9fad433be87e5252f4b03850642f88993f7b2ca566", size = 6617736, upload-time = "2025-05-17T21:38:38.141Z" }, + { url = "https://files.pythonhosted.org/packages/85/c5/e19c8f99d83fd377ec8c7e0cf627a8049746da54afc24ef0a0cb73d5dfb5/numpy-2.2.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f92729c95468a2f4f15e9bb94c432a9229d0d50de67304399627a943201baa2f", size = 14010719, upload-time = "2025-05-17T21:38:58.433Z" }, + { url = "https://files.pythonhosted.org/packages/19/49/4df9123aafa7b539317bf6d342cb6d227e49f7a35b99c287a6109b13dd93/numpy-2.2.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bc23a79bfabc5d056d106f9befb8d50c31ced2fbc70eedb8155aec74a45798f", size = 16526072, upload-time = "2025-05-17T21:39:22.638Z" }, + { url = "https://files.pythonhosted.org/packages/b2/6c/04b5f47f4f32f7c2b0e7260442a8cbcf8168b0e1a41ff1495da42f42a14f/numpy-2.2.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e3143e4451880bed956e706a3220b4e5cf6172ef05fcc397f6f36a550b1dd868", size = 15503213, upload-time = "2025-05-17T21:39:45.865Z" }, + { url = "https://files.pythonhosted.org/packages/17/0a/5cd92e352c1307640d5b6fec1b2ffb06cd0dabe7d7b8227f97933d378422/numpy-2.2.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b4f13750ce79751586ae2eb824ba7e1e8dba64784086c98cdbbcc6a42112ce0d", size = 18316632, upload-time = "2025-05-17T21:40:13.331Z" }, + { url = "https://files.pythonhosted.org/packages/f0/3b/5cba2b1d88760ef86596ad0f3d484b1cbff7c115ae2429678465057c5155/numpy-2.2.6-cp313-cp313-win32.whl", hash = "sha256:5beb72339d9d4fa36522fc63802f469b13cdbe4fdab4a288f0c441b74272ebfd", size = 6244532, upload-time = "2025-05-17T21:43:46.099Z" }, + { url = "https://files.pythonhosted.org/packages/cb/3b/d58c12eafcb298d4e6d0d40216866ab15f59e55d148a5658bb3132311fcf/numpy-2.2.6-cp313-cp313-win_amd64.whl", hash = "sha256:b0544343a702fa80c95ad5d3d608ea3599dd54d4632df855e4c8d24eb6ecfa1c", size = 12610885, upload-time = "2025-05-17T21:44:05.145Z" }, + { url = "https://files.pythonhosted.org/packages/6b/9e/4bf918b818e516322db999ac25d00c75788ddfd2d2ade4fa66f1f38097e1/numpy-2.2.6-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0bca768cd85ae743b2affdc762d617eddf3bcf8724435498a1e80132d04879e6", size = 20963467, upload-time = "2025-05-17T21:40:44Z" }, + { url = "https://files.pythonhosted.org/packages/61/66/d2de6b291507517ff2e438e13ff7b1e2cdbdb7cb40b3ed475377aece69f9/numpy-2.2.6-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:fc0c5673685c508a142ca65209b4e79ed6740a4ed6b2267dbba90f34b0b3cfda", size = 14225144, upload-time = "2025-05-17T21:41:05.695Z" }, + { url = "https://files.pythonhosted.org/packages/e4/25/480387655407ead912e28ba3a820bc69af9adf13bcbe40b299d454ec011f/numpy-2.2.6-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:5bd4fc3ac8926b3819797a7c0e2631eb889b4118a9898c84f585a54d475b7e40", size = 5200217, upload-time = "2025-05-17T21:41:15.903Z" }, + { url = "https://files.pythonhosted.org/packages/aa/4a/6e313b5108f53dcbf3aca0c0f3e9c92f4c10ce57a0a721851f9785872895/numpy-2.2.6-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:fee4236c876c4e8369388054d02d0e9bb84821feb1a64dd59e137e6511a551f8", size = 6712014, upload-time = "2025-05-17T21:41:27.321Z" }, + { url = "https://files.pythonhosted.org/packages/b7/30/172c2d5c4be71fdf476e9de553443cf8e25feddbe185e0bd88b096915bcc/numpy-2.2.6-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1dda9c7e08dc141e0247a5b8f49cf05984955246a327d4c48bda16821947b2f", size = 14077935, upload-time = "2025-05-17T21:41:49.738Z" }, + { url = "https://files.pythonhosted.org/packages/12/fb/9e743f8d4e4d3c710902cf87af3512082ae3d43b945d5d16563f26ec251d/numpy-2.2.6-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f447e6acb680fd307f40d3da4852208af94afdfab89cf850986c3ca00562f4fa", size = 16600122, upload-time = "2025-05-17T21:42:14.046Z" }, + { url = "https://files.pythonhosted.org/packages/12/75/ee20da0e58d3a66f204f38916757e01e33a9737d0b22373b3eb5a27358f9/numpy-2.2.6-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:389d771b1623ec92636b0786bc4ae56abafad4a4c513d36a55dce14bd9ce8571", size = 15586143, upload-time = "2025-05-17T21:42:37.464Z" }, + { url = "https://files.pythonhosted.org/packages/76/95/bef5b37f29fc5e739947e9ce5179ad402875633308504a52d188302319c8/numpy-2.2.6-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8e9ace4a37db23421249ed236fdcdd457d671e25146786dfc96835cd951aa7c1", size = 18385260, upload-time = "2025-05-17T21:43:05.189Z" }, + { url = "https://files.pythonhosted.org/packages/09/04/f2f83279d287407cf36a7a8053a5abe7be3622a4363337338f2585e4afda/numpy-2.2.6-cp313-cp313t-win32.whl", hash = "sha256:038613e9fb8c72b0a41f025a7e4c3f0b7a1b5d768ece4796b674c8f3fe13efff", size = 6377225, upload-time = "2025-05-17T21:43:16.254Z" }, + { url = "https://files.pythonhosted.org/packages/67/0e/35082d13c09c02c011cf21570543d202ad929d961c02a147493cb0c2bdf5/numpy-2.2.6-cp313-cp313t-win_amd64.whl", hash = "sha256:6031dd6dfecc0cf9f668681a37648373bddd6421fff6c66ec1624eed0180ee06", size = 12771374, upload-time = "2025-05-17T21:43:35.479Z" }, + { url = "https://files.pythonhosted.org/packages/9e/3b/d94a75f4dbf1ef5d321523ecac21ef23a3cd2ac8b78ae2aac40873590229/numpy-2.2.6-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0b605b275d7bd0c640cad4e5d30fa701a8d59302e127e5f79138ad62762c3e3d", size = 21040391, upload-time = "2025-05-17T21:44:35.948Z" }, + { url = "https://files.pythonhosted.org/packages/17/f4/09b2fa1b58f0fb4f7c7963a1649c64c4d315752240377ed74d9cd878f7b5/numpy-2.2.6-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:7befc596a7dc9da8a337f79802ee8adb30a552a94f792b9c9d18c840055907db", size = 6786754, upload-time = "2025-05-17T21:44:47.446Z" }, + { url = "https://files.pythonhosted.org/packages/af/30/feba75f143bdc868a1cc3f44ccfa6c4b9ec522b36458e738cd00f67b573f/numpy-2.2.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce47521a4754c8f4593837384bd3424880629f718d87c5d44f8ed763edd63543", size = 16643476, upload-time = "2025-05-17T21:45:11.871Z" }, + { url = "https://files.pythonhosted.org/packages/37/48/ac2a9584402fb6c0cd5b5d1a91dcf176b15760130dd386bbafdbfe3640bf/numpy-2.2.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d042d24c90c41b54fd506da306759e06e568864df8ec17ccc17e9e884634fd00", size = 12812666, upload-time = "2025-05-17T21:45:31.426Z" }, +] + +[[package]] +name = "numpy" +version = "2.4.3" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.14' and sys_platform == 'win32'", + "python_full_version >= '3.14' and sys_platform == 'emscripten'", + "python_full_version >= '3.14' and sys_platform != 'emscripten' and sys_platform != 'win32'", + "python_full_version >= '3.11' and python_full_version < '3.14' and sys_platform == 'win32'", + "python_full_version >= '3.11' and python_full_version < '3.14' and sys_platform == 'emscripten'", + "python_full_version >= '3.11' and python_full_version < '3.14' and sys_platform != 'emscripten' and sys_platform != 'win32'", +] +sdist = { url = "https://files.pythonhosted.org/packages/10/8b/c265f4823726ab832de836cdd184d0986dcf94480f81e8739692a7ac7af2/numpy-2.4.3.tar.gz", hash = "sha256:483a201202b73495f00dbc83796c6ae63137a9bdade074f7648b3e32613412dd", size = 20727743, upload-time = "2026-03-09T07:58:53.426Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f9/51/5093a2df15c4dc19da3f79d1021e891f5dcf1d9d1db6ba38891d5590f3fe/numpy-2.4.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:33b3bf58ee84b172c067f56aeadc7ee9ab6de69c5e800ab5b10295d54c581adb", size = 16957183, upload-time = "2026-03-09T07:55:57.774Z" }, + { url = "https://files.pythonhosted.org/packages/b5/7c/c061f3de0630941073d2598dc271ac2f6cbcf5c83c74a5870fea07488333/numpy-2.4.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8ba7b51e71c05aa1f9bc3641463cd82308eab40ce0d5c7e1fd4038cbf9938147", size = 14968734, upload-time = "2026-03-09T07:56:00.494Z" }, + { url = "https://files.pythonhosted.org/packages/ef/27/d26c85cbcd86b26e4f125b0668e7a7c0542d19dd7d23ee12e87b550e95b5/numpy-2.4.3-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:a1988292870c7cb9d0ebb4cc96b4d447513a9644801de54606dc7aabf2b7d920", size = 5475288, upload-time = "2026-03-09T07:56:02.857Z" }, + { url = "https://files.pythonhosted.org/packages/2b/09/3c4abbc1dcd8010bf1a611d174c7aa689fc505585ec806111b4406f6f1b1/numpy-2.4.3-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:23b46bb6d8ecb68b58c09944483c135ae5f0e9b8d8858ece5e4ead783771d2a9", size = 6805253, upload-time = "2026-03-09T07:56:04.53Z" }, + { url = "https://files.pythonhosted.org/packages/21/bc/e7aa3f6817e40c3f517d407742337cbb8e6fc4b83ce0b55ab780c829243b/numpy-2.4.3-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a016db5c5dba78fa8fe9f5d80d6708f9c42ab087a739803c0ac83a43d686a470", size = 15969479, upload-time = "2026-03-09T07:56:06.638Z" }, + { url = "https://files.pythonhosted.org/packages/78/51/9f5d7a41f0b51649ddf2f2320595e15e122a40610b233d51928dd6c92353/numpy-2.4.3-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:715de7f82e192e8cae5a507a347d97ad17598f8e026152ca97233e3666daaa71", size = 16901035, upload-time = "2026-03-09T07:56:09.405Z" }, + { url = "https://files.pythonhosted.org/packages/64/6e/b221dd847d7181bc5ee4857bfb026182ef69499f9305eb1371cbb1aea626/numpy-2.4.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2ddb7919366ee468342b91dea2352824c25b55814a987847b6c52003a7c97f15", size = 17325657, upload-time = "2026-03-09T07:56:12.067Z" }, + { url = "https://files.pythonhosted.org/packages/eb/b8/8f3fd2da596e1063964b758b5e3c970aed1949a05200d7e3d46a9d46d643/numpy-2.4.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a315e5234d88067f2d97e1f2ef670a7569df445d55400f1e33d117418d008d52", size = 18635512, upload-time = "2026-03-09T07:56:14.629Z" }, + { url = "https://files.pythonhosted.org/packages/5c/24/2993b775c37e39d2f8ab4125b44337ab0b2ba106c100980b7c274a22bee7/numpy-2.4.3-cp311-cp311-win32.whl", hash = "sha256:2b3f8d2c4589b1a2028d2a770b0fc4d1f332fb5e01521f4de3199a896d158ddd", size = 6238100, upload-time = "2026-03-09T07:56:17.243Z" }, + { url = "https://files.pythonhosted.org/packages/76/1d/edccf27adedb754db7c4511d5eac8b83f004ae948fe2d3509e8b78097d4c/numpy-2.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:77e76d932c49a75617c6d13464e41203cd410956614d0a0e999b25e9e8d27eec", size = 12609816, upload-time = "2026-03-09T07:56:19.089Z" }, + { url = "https://files.pythonhosted.org/packages/92/82/190b99153480076c8dce85f4cfe7d53ea84444145ffa54cb58dcd460d66b/numpy-2.4.3-cp311-cp311-win_arm64.whl", hash = "sha256:eb610595dd91560905c132c709412b512135a60f1851ccbd2c959e136431ff67", size = 10485757, upload-time = "2026-03-09T07:56:21.753Z" }, + { url = "https://files.pythonhosted.org/packages/a9/ed/6388632536f9788cea23a3a1b629f25b43eaacd7d7377e5d6bc7b9deb69b/numpy-2.4.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:61b0cbabbb6126c8df63b9a3a0c4b1f44ebca5e12ff6997b80fcf267fb3150ef", size = 16669628, upload-time = "2026-03-09T07:56:24.252Z" }, + { url = "https://files.pythonhosted.org/packages/74/1b/ee2abfc68e1ce728b2958b6ba831d65c62e1b13ce3017c13943f8f9b5b2e/numpy-2.4.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7395e69ff32526710748f92cd8c9849b361830968ea3e24a676f272653e8983e", size = 14696872, upload-time = "2026-03-09T07:56:26.991Z" }, + { url = "https://files.pythonhosted.org/packages/ba/d1/780400e915ff5638166f11ca9dc2c5815189f3d7cf6f8759a1685e586413/numpy-2.4.3-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:abdce0f71dcb4a00e4e77f3faf05e4616ceccfe72ccaa07f47ee79cda3b7b0f4", size = 5203489, upload-time = "2026-03-09T07:56:29.414Z" }, + { url = "https://files.pythonhosted.org/packages/0b/bb/baffa907e9da4cc34a6e556d6d90e032f6d7a75ea47968ea92b4858826c4/numpy-2.4.3-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:48da3a4ee1336454b07497ff7ec83903efa5505792c4e6d9bf83d99dc07a1e18", size = 6550814, upload-time = "2026-03-09T07:56:32.225Z" }, + { url = "https://files.pythonhosted.org/packages/7b/12/8c9f0c6c95f76aeb20fc4a699c33e9f827fa0d0f857747c73bb7b17af945/numpy-2.4.3-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:32e3bef222ad6b052280311d1d60db8e259e4947052c3ae7dd6817451fc8a4c5", size = 15666601, upload-time = "2026-03-09T07:56:34.461Z" }, + { url = "https://files.pythonhosted.org/packages/bd/79/cc665495e4d57d0aa6fbcc0aa57aa82671dfc78fbf95fe733ed86d98f52a/numpy-2.4.3-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e7dd01a46700b1967487141a66ac1a3cf0dd8ebf1f08db37d46389401512ca97", size = 16621358, upload-time = "2026-03-09T07:56:36.852Z" }, + { url = "https://files.pythonhosted.org/packages/a8/40/b4ecb7224af1065c3539f5ecfff879d090de09608ad1008f02c05c770cb3/numpy-2.4.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:76f0f283506c28b12bba319c0fab98217e9f9b54e6160e9c79e9f7348ba32e9c", size = 17016135, upload-time = "2026-03-09T07:56:39.337Z" }, + { url = "https://files.pythonhosted.org/packages/f7/b1/6a88e888052eed951afed7a142dcdf3b149a030ca59b4c71eef085858e43/numpy-2.4.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:737f630a337364665aba3b5a77e56a68cc42d350edd010c345d65a3efa3addcc", size = 18345816, upload-time = "2026-03-09T07:56:42.31Z" }, + { url = "https://files.pythonhosted.org/packages/f3/8f/103a60c5f8c3d7fc678c19cd7b2476110da689ccb80bc18050efbaeae183/numpy-2.4.3-cp312-cp312-win32.whl", hash = "sha256:26952e18d82a1dbbc2f008d402021baa8d6fc8e84347a2072a25e08b46d698b9", size = 5960132, upload-time = "2026-03-09T07:56:44.851Z" }, + { url = "https://files.pythonhosted.org/packages/d7/7c/f5ee1bf6ed888494978046a809df2882aad35d414b622893322df7286879/numpy-2.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:65f3c2455188f09678355f5cae1f959a06b778bc66d535da07bf2ef20cd319d5", size = 12316144, upload-time = "2026-03-09T07:56:47.057Z" }, + { url = "https://files.pythonhosted.org/packages/71/46/8d1cb3f7a00f2fb6394140e7e6623696e54c6318a9d9691bb4904672cf42/numpy-2.4.3-cp312-cp312-win_arm64.whl", hash = "sha256:2abad5c7fef172b3377502bde47892439bae394a71bc329f31df0fd829b41a9e", size = 10220364, upload-time = "2026-03-09T07:56:49.849Z" }, + { url = "https://files.pythonhosted.org/packages/b6/d0/1fe47a98ce0df229238b77611340aff92d52691bcbc10583303181abf7fc/numpy-2.4.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b346845443716c8e542d54112966383b448f4a3ba5c66409771b8c0889485dd3", size = 16665297, upload-time = "2026-03-09T07:56:52.296Z" }, + { url = "https://files.pythonhosted.org/packages/27/d9/4e7c3f0e68dfa91f21c6fb6cf839bc829ec920688b1ce7ec722b1a6202fb/numpy-2.4.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2629289168f4897a3c4e23dc98d6f1731f0fc0fe52fb9db19f974041e4cc12b9", size = 14691853, upload-time = "2026-03-09T07:56:54.992Z" }, + { url = "https://files.pythonhosted.org/packages/3a/66/bd096b13a87549683812b53ab211e6d413497f84e794fb3c39191948da97/numpy-2.4.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:bb2e3cf95854233799013779216c57e153c1ee67a0bf92138acca0e429aefaee", size = 5198435, upload-time = "2026-03-09T07:56:57.184Z" }, + { url = "https://files.pythonhosted.org/packages/a2/2f/687722910b5a5601de2135c891108f51dfc873d8e43c8ed9f4ebb440b4a2/numpy-2.4.3-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:7f3408ff897f8ab07a07fbe2823d7aee6ff644c097cc1f90382511fe982f647f", size = 6546347, upload-time = "2026-03-09T07:56:59.531Z" }, + { url = "https://files.pythonhosted.org/packages/bf/ec/7971c4e98d86c564750393fab8d7d83d0a9432a9d78bb8a163a6dc59967a/numpy-2.4.3-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:decb0eb8a53c3b009b0962378065589685d66b23467ef5dac16cbe818afde27f", size = 15664626, upload-time = "2026-03-09T07:57:01.385Z" }, + { url = "https://files.pythonhosted.org/packages/7e/eb/7daecbea84ec935b7fc732e18f532073064a3816f0932a40a17f3349185f/numpy-2.4.3-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d5f51900414fc9204a0e0da158ba2ac52b75656e7dce7e77fb9f84bfa343b4cc", size = 16608916, upload-time = "2026-03-09T07:57:04.008Z" }, + { url = "https://files.pythonhosted.org/packages/df/58/2a2b4a817ffd7472dca4421d9f0776898b364154e30c95f42195041dc03b/numpy-2.4.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6bd06731541f89cdc01b261ba2c9e037f1543df7472517836b78dfb15bd6e476", size = 17015824, upload-time = "2026-03-09T07:57:06.347Z" }, + { url = "https://files.pythonhosted.org/packages/4a/ca/627a828d44e78a418c55f82dd4caea8ea4a8ef24e5144d9e71016e52fb40/numpy-2.4.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:22654fe6be0e5206f553a9250762c653d3698e46686eee53b399ab90da59bd92", size = 18334581, upload-time = "2026-03-09T07:57:09.114Z" }, + { url = "https://files.pythonhosted.org/packages/cd/c0/76f93962fc79955fcba30a429b62304332345f22d4daec1cb33653425643/numpy-2.4.3-cp313-cp313-win32.whl", hash = "sha256:d71e379452a2f670ccb689ec801b1218cd3983e253105d6e83780967e899d687", size = 5958618, upload-time = "2026-03-09T07:57:11.432Z" }, + { url = "https://files.pythonhosted.org/packages/b1/3c/88af0040119209b9b5cb59485fa48b76f372c73068dbf9254784b975ac53/numpy-2.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:0a60e17a14d640f49146cb38e3f105f571318db7826d9b6fef7e4dce758faecd", size = 12312824, upload-time = "2026-03-09T07:57:13.586Z" }, + { url = "https://files.pythonhosted.org/packages/58/ce/3d07743aced3d173f877c3ef6a454c2174ba42b584ab0b7e6d99374f51ed/numpy-2.4.3-cp313-cp313-win_arm64.whl", hash = "sha256:c9619741e9da2059cd9c3f206110b97583c7152c1dc9f8aafd4beb450ac1c89d", size = 10221218, upload-time = "2026-03-09T07:57:16.183Z" }, + { url = "https://files.pythonhosted.org/packages/62/09/d96b02a91d09e9d97862f4fc8bfebf5400f567d8eb1fe4b0cc4795679c15/numpy-2.4.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:7aa4e54f6469300ebca1d9eb80acd5253cdfa36f2c03d79a35883687da430875", size = 14819570, upload-time = "2026-03-09T07:57:18.564Z" }, + { url = "https://files.pythonhosted.org/packages/b5/ca/0b1aba3905fdfa3373d523b2b15b19029f4f3031c87f4066bd9d20ef6c6b/numpy-2.4.3-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:d1b90d840b25874cf5cd20c219af10bac3667db3876d9a495609273ebe679070", size = 5326113, upload-time = "2026-03-09T07:57:21.052Z" }, + { url = "https://files.pythonhosted.org/packages/c0/63/406e0fd32fcaeb94180fd6a4c41e55736d676c54346b7efbce548b94a914/numpy-2.4.3-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:a749547700de0a20a6718293396ec237bb38218049cfce788e08fcb716e8cf73", size = 6646370, upload-time = "2026-03-09T07:57:22.804Z" }, + { url = "https://files.pythonhosted.org/packages/b6/d0/10f7dc157d4b37af92720a196be6f54f889e90dcd30dce9dc657ed92c257/numpy-2.4.3-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:94f3c4a151a2e529adf49c1d54f0f57ff8f9b233ee4d44af623a81553ab86368", size = 15723499, upload-time = "2026-03-09T07:57:24.693Z" }, + { url = "https://files.pythonhosted.org/packages/66/f1/d1c2bf1161396629701bc284d958dc1efa3a5a542aab83cf11ee6eb4cba5/numpy-2.4.3-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:22c31dc07025123aedf7f2db9e91783df13f1776dc52c6b22c620870dc0fab22", size = 16657164, upload-time = "2026-03-09T07:57:27.676Z" }, + { url = "https://files.pythonhosted.org/packages/1a/be/cca19230b740af199ac47331a21c71e7a3d0ba59661350483c1600d28c37/numpy-2.4.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:148d59127ac95979d6f07e4d460f934ebdd6eed641db9c0db6c73026f2b2101a", size = 17081544, upload-time = "2026-03-09T07:57:30.664Z" }, + { url = "https://files.pythonhosted.org/packages/b9/c5/9602b0cbb703a0936fb40f8a95407e8171935b15846de2f0776e08af04c7/numpy-2.4.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a97cbf7e905c435865c2d939af3d93f99d18eaaa3cabe4256f4304fb51604349", size = 18380290, upload-time = "2026-03-09T07:57:33.763Z" }, + { url = "https://files.pythonhosted.org/packages/ed/81/9f24708953cd30be9ee36ec4778f4b112b45165812f2ada4cc5ea1c1f254/numpy-2.4.3-cp313-cp313t-win32.whl", hash = "sha256:be3b8487d725a77acccc9924f65fd8bce9af7fac8c9820df1049424a2115af6c", size = 6082814, upload-time = "2026-03-09T07:57:36.491Z" }, + { url = "https://files.pythonhosted.org/packages/e2/9e/52f6eaa13e1a799f0ab79066c17f7016a4a8ae0c1aefa58c82b4dab690b4/numpy-2.4.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1ec84fd7c8e652b0f4aaaf2e6e9cc8eaa9b1b80a537e06b2e3a2fb176eedcb26", size = 12452673, upload-time = "2026-03-09T07:57:38.281Z" }, + { url = "https://files.pythonhosted.org/packages/c4/04/b8cece6ead0b30c9fbd99bb835ad7ea0112ac5f39f069788c5558e3b1ab2/numpy-2.4.3-cp313-cp313t-win_arm64.whl", hash = "sha256:120df8c0a81ebbf5b9020c91439fccd85f5e018a927a39f624845be194a2be02", size = 10290907, upload-time = "2026-03-09T07:57:40.747Z" }, + { url = "https://files.pythonhosted.org/packages/70/ae/3936f79adebf8caf81bd7a599b90a561334a658be4dcc7b6329ebf4ee8de/numpy-2.4.3-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:5884ce5c7acfae1e4e1b6fde43797d10aa506074d25b531b4f54bde33c0c31d4", size = 16664563, upload-time = "2026-03-09T07:57:43.817Z" }, + { url = "https://files.pythonhosted.org/packages/9b/62/760f2b55866b496bb1fa7da2a6db076bef908110e568b02fcfc1422e2a3a/numpy-2.4.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:297837823f5bc572c5f9379b0c9f3a3365f08492cbdc33bcc3af174372ebb168", size = 14702161, upload-time = "2026-03-09T07:57:46.169Z" }, + { url = "https://files.pythonhosted.org/packages/32/af/a7a39464e2c0a21526fb4fb76e346fb172ebc92f6d1c7a07c2c139cc17b1/numpy-2.4.3-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:a111698b4a3f8dcbe54c64a7708f049355abd603e619013c346553c1fd4ca90b", size = 5208738, upload-time = "2026-03-09T07:57:48.506Z" }, + { url = "https://files.pythonhosted.org/packages/29/8c/2a0cf86a59558fa078d83805589c2de490f29ed4fb336c14313a161d358a/numpy-2.4.3-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:4bd4741a6a676770e0e97fe9ab2e51de01183df3dcbcec591d26d331a40de950", size = 6543618, upload-time = "2026-03-09T07:57:50.591Z" }, + { url = "https://files.pythonhosted.org/packages/aa/b8/612ce010c0728b1c363fa4ea3aa4c22fe1c5da1de008486f8c2f5cb92fae/numpy-2.4.3-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:54f29b877279d51e210e0c80709ee14ccbbad647810e8f3d375561c45ef613dd", size = 15680676, upload-time = "2026-03-09T07:57:52.34Z" }, + { url = "https://files.pythonhosted.org/packages/a9/7e/4f120ecc54ba26ddf3dc348eeb9eb063f421de65c05fc961941798feea18/numpy-2.4.3-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:679f2a834bae9020f81534671c56fd0cc76dd7e5182f57131478e23d0dc59e24", size = 16613492, upload-time = "2026-03-09T07:57:54.91Z" }, + { url = "https://files.pythonhosted.org/packages/2c/86/1b6020db73be330c4b45d5c6ee4295d59cfeef0e3ea323959d053e5a6909/numpy-2.4.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:d84f0f881cb2225c2dfd7f78a10a5645d487a496c6668d6cc39f0f114164f3d0", size = 17031789, upload-time = "2026-03-09T07:57:57.641Z" }, + { url = "https://files.pythonhosted.org/packages/07/3a/3b90463bf41ebc21d1b7e06079f03070334374208c0f9a1f05e4ae8455e7/numpy-2.4.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d213c7e6e8d211888cc359bab7199670a00f5b82c0978b9d1c75baf1eddbeac0", size = 18339941, upload-time = "2026-03-09T07:58:00.577Z" }, + { url = "https://files.pythonhosted.org/packages/a8/74/6d736c4cd962259fd8bae9be27363eb4883a2f9069763747347544c2a487/numpy-2.4.3-cp314-cp314-win32.whl", hash = "sha256:52077feedeff7c76ed7c9f1a0428558e50825347b7545bbb8523da2cd55c547a", size = 6007503, upload-time = "2026-03-09T07:58:03.331Z" }, + { url = "https://files.pythonhosted.org/packages/48/39/c56ef87af669364356bb011922ef0734fc49dad51964568634c72a009488/numpy-2.4.3-cp314-cp314-win_amd64.whl", hash = "sha256:0448e7f9caefb34b4b7dd2b77f21e8906e5d6f0365ad525f9f4f530b13df2afc", size = 12444915, upload-time = "2026-03-09T07:58:06.353Z" }, + { url = "https://files.pythonhosted.org/packages/9d/1f/ab8528e38d295fd349310807496fabb7cf9fe2e1f70b97bc20a483ea9d4a/numpy-2.4.3-cp314-cp314-win_arm64.whl", hash = "sha256:b44fd60341c4d9783039598efadd03617fa28d041fc37d22b62d08f2027fa0e7", size = 10494875, upload-time = "2026-03-09T07:58:08.734Z" }, + { url = "https://files.pythonhosted.org/packages/e6/ef/b7c35e4d5ef141b836658ab21a66d1a573e15b335b1d111d31f26c8ef80f/numpy-2.4.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:0a195f4216be9305a73c0e91c9b026a35f2161237cf1c6de9b681637772ea657", size = 14822225, upload-time = "2026-03-09T07:58:11.034Z" }, + { url = "https://files.pythonhosted.org/packages/cd/8d/7730fa9278cf6648639946cc816e7cc89f0d891602584697923375f801ed/numpy-2.4.3-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:cd32fbacb9fd1bf041bf8e89e4576b6f00b895f06d00914820ae06a616bdfef7", size = 5328769, upload-time = "2026-03-09T07:58:13.67Z" }, + { url = "https://files.pythonhosted.org/packages/47/01/d2a137317c958b074d338807c1b6a383406cdf8b8e53b075d804cc3d211d/numpy-2.4.3-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:2e03c05abaee1f672e9d67bc858f300b5ccba1c21397211e8d77d98350972093", size = 6649461, upload-time = "2026-03-09T07:58:15.912Z" }, + { url = "https://files.pythonhosted.org/packages/5c/34/812ce12bc0f00272a4b0ec0d713cd237cb390666eb6206323d1cc9cedbb2/numpy-2.4.3-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7d1ce23cce91fcea443320a9d0ece9b9305d4368875bab09538f7a5b4131938a", size = 15725809, upload-time = "2026-03-09T07:58:17.787Z" }, + { url = "https://files.pythonhosted.org/packages/25/c0/2aed473a4823e905e765fee3dc2cbf504bd3e68ccb1150fbdabd5c39f527/numpy-2.4.3-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c59020932feb24ed49ffd03704fbab89f22aa9c0d4b180ff45542fe8918f5611", size = 16655242, upload-time = "2026-03-09T07:58:20.476Z" }, + { url = "https://files.pythonhosted.org/packages/f2/c8/7e052b2fc87aa0e86de23f20e2c42bd261c624748aa8efd2c78f7bb8d8c6/numpy-2.4.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:9684823a78a6cd6ad7511fc5e25b07947d1d5b5e2812c93fe99d7d4195130720", size = 17080660, upload-time = "2026-03-09T07:58:23.067Z" }, + { url = "https://files.pythonhosted.org/packages/f3/3d/0876746044db2adcb11549f214d104f2e1be00f07a67edbb4e2812094847/numpy-2.4.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:0200b25c687033316fb39f0ff4e3e690e8957a2c3c8d22499891ec58c37a3eb5", size = 18380384, upload-time = "2026-03-09T07:58:25.839Z" }, + { url = "https://files.pythonhosted.org/packages/07/12/8160bea39da3335737b10308df4f484235fd297f556745f13092aa039d3b/numpy-2.4.3-cp314-cp314t-win32.whl", hash = "sha256:5e10da9e93247e554bb1d22f8edc51847ddd7dde52d85ce31024c1b4312bfba0", size = 6154547, upload-time = "2026-03-09T07:58:28.289Z" }, + { url = "https://files.pythonhosted.org/packages/42/f3/76534f61f80d74cc9cdf2e570d3d4eeb92c2280a27c39b0aaf471eda7b48/numpy-2.4.3-cp314-cp314t-win_amd64.whl", hash = "sha256:45f003dbdffb997a03da2d1d0cb41fbd24a87507fb41605c0420a3db5bd4667b", size = 12633645, upload-time = "2026-03-09T07:58:30.384Z" }, + { url = "https://files.pythonhosted.org/packages/1f/b6/7c0d4334c15983cec7f92a69e8ce9b1e6f31857e5ee3a413ac424e6bd63d/numpy-2.4.3-cp314-cp314t-win_arm64.whl", hash = "sha256:4d382735cecd7bcf090172489a525cd7d4087bc331f7df9f60ddc9a296cf208e", size = 10565454, upload-time = "2026-03-09T07:58:33.031Z" }, + { url = "https://files.pythonhosted.org/packages/64/e4/4dab9fb43c83719c29241c535d9e07be73bea4bc0c6686c5816d8e1b6689/numpy-2.4.3-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:c6b124bfcafb9e8d3ed09130dbee44848c20b3e758b6bbf006e641778927c028", size = 16834892, upload-time = "2026-03-09T07:58:35.334Z" }, + { url = "https://files.pythonhosted.org/packages/c9/29/f8b6d4af90fed3dfda84ebc0df06c9833d38880c79ce954e5b661758aa31/numpy-2.4.3-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:76dbb9d4e43c16cf9aa711fcd8de1e2eeb27539dcefb60a1d5e9f12fae1d1ed8", size = 14893070, upload-time = "2026-03-09T07:58:37.7Z" }, + { url = "https://files.pythonhosted.org/packages/9a/04/a19b3c91dbec0a49269407f15d5753673a09832daed40c45e8150e6fa558/numpy-2.4.3-pp311-pypy311_pp73-macosx_14_0_arm64.whl", hash = "sha256:29363fbfa6f8ee855d7569c96ce524845e3d726d6c19b29eceec7dd555dab152", size = 5399609, upload-time = "2026-03-09T07:58:39.853Z" }, + { url = "https://files.pythonhosted.org/packages/79/34/4d73603f5420eab89ea8a67097b31364bf7c30f811d4dd84b1659c7476d9/numpy-2.4.3-pp311-pypy311_pp73-macosx_14_0_x86_64.whl", hash = "sha256:bc71942c789ef415a37f0d4eab90341425a00d538cd0642445d30b41023d3395", size = 6714355, upload-time = "2026-03-09T07:58:42.365Z" }, + { url = "https://files.pythonhosted.org/packages/58/ad/1100d7229bb248394939a12a8074d485b655e8ed44207d328fdd7fcebc7b/numpy-2.4.3-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7e58765ad74dcebd3ef0208a5078fba32dc8ec3578fe84a604432950cd043d79", size = 15800434, upload-time = "2026-03-09T07:58:44.837Z" }, + { url = "https://files.pythonhosted.org/packages/0c/fd/16d710c085d28ba4feaf29ac60c936c9d662e390344f94a6beaa2ac9899b/numpy-2.4.3-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8e236dbda4e1d319d681afcbb136c0c4a8e0f1a5c58ceec2adebb547357fe857", size = 16729409, upload-time = "2026-03-09T07:58:47.972Z" }, + { url = "https://files.pythonhosted.org/packages/57/a7/b35835e278c18b85206834b3aa3abe68e77a98769c59233d1f6300284781/numpy-2.4.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:4b42639cdde6d24e732ff823a3fa5b701d8acad89c4142bc1d0bd6dc85200ba5", size = 12504685, upload-time = "2026-03-09T07:58:50.525Z" }, +] + +[[package]] +name = "odfpy" +version = "1.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "defusedxml" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/97/73/8ade73f6749177003f7ce3304f524774adda96e6aaab30ea79fd8fda7934/odfpy-1.4.1.tar.gz", hash = "sha256:db766a6e59c5103212f3cc92ec8dd50a0f3a02790233ed0b52148b70d3c438ec", size = 717045, upload-time = "2020-01-18T16:55:48.852Z" } + +[[package]] +name = "openpyxl" +version = "3.1.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "et-xmlfile" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3d/f9/88d94a75de065ea32619465d2f77b29a0469500e99012523b91cc4141cd1/openpyxl-3.1.5.tar.gz", hash = "sha256:cf0e3cf56142039133628b5acffe8ef0c12bc902d2aadd3e0fe5878dc08d1050", size = 186464, upload-time = "2024-06-28T14:03:44.161Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c0/da/977ded879c29cbd04de313843e76868e6e13408a94ed6b987245dc7c8506/openpyxl-3.1.5-py2.py3-none-any.whl", hash = "sha256:5282c12b107bffeef825f4617dc029afaf41d0ea60823bbb665ef3079dc79de2", size = 250910, upload-time = "2024-06-28T14:03:41.161Z" }, +] + +[[package]] +name = "packaging" +version = "26.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/65/ee/299d360cdc32edc7d2cf530f3accf79c4fca01e96ffc950d8a52213bd8e4/packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4", size = 143416, upload-time = "2026-01-21T20:50:39.064Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366, upload-time = "2026-01-21T20:50:37.788Z" }, +] + +[[package]] +name = "pandas" +version = "2.3.3" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.11'", +] +dependencies = [ + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "python-dateutil", marker = "python_full_version < '3.11'" }, + { name = "pytz", marker = "python_full_version < '3.11'" }, + { name = "tzdata", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/33/01/d40b85317f86cf08d853a4f495195c73815fdf205eef3993821720274518/pandas-2.3.3.tar.gz", hash = "sha256:e05e1af93b977f7eafa636d043f9f94c7ee3ac81af99c13508215942e64c993b", size = 4495223, upload-time = "2025-09-29T23:34:51.853Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3d/f7/f425a00df4fcc22b292c6895c6831c0c8ae1d9fac1e024d16f98a9ce8749/pandas-2.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:376c6446ae31770764215a6c937f72d917f214b43560603cd60da6408f183b6c", size = 11555763, upload-time = "2025-09-29T23:16:53.287Z" }, + { url = "https://files.pythonhosted.org/packages/13/4f/66d99628ff8ce7857aca52fed8f0066ce209f96be2fede6cef9f84e8d04f/pandas-2.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e19d192383eab2f4ceb30b412b22ea30690c9e618f78870357ae1d682912015a", size = 10801217, upload-time = "2025-09-29T23:17:04.522Z" }, + { url = "https://files.pythonhosted.org/packages/1d/03/3fc4a529a7710f890a239cc496fc6d50ad4a0995657dccc1d64695adb9f4/pandas-2.3.3-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5caf26f64126b6c7aec964f74266f435afef1c1b13da3b0636c7518a1fa3e2b1", size = 12148791, upload-time = "2025-09-29T23:17:18.444Z" }, + { url = "https://files.pythonhosted.org/packages/40/a8/4dac1f8f8235e5d25b9955d02ff6f29396191d4e665d71122c3722ca83c5/pandas-2.3.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dd7478f1463441ae4ca7308a70e90b33470fa593429f9d4c578dd00d1fa78838", size = 12769373, upload-time = "2025-09-29T23:17:35.846Z" }, + { url = "https://files.pythonhosted.org/packages/df/91/82cc5169b6b25440a7fc0ef3a694582418d875c8e3ebf796a6d6470aa578/pandas-2.3.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4793891684806ae50d1288c9bae9330293ab4e083ccd1c5e383c34549c6e4250", size = 13200444, upload-time = "2025-09-29T23:17:49.341Z" }, + { url = "https://files.pythonhosted.org/packages/10/ae/89b3283800ab58f7af2952704078555fa60c807fff764395bb57ea0b0dbd/pandas-2.3.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:28083c648d9a99a5dd035ec125d42439c6c1c525098c58af0fc38dd1a7a1b3d4", size = 13858459, upload-time = "2025-09-29T23:18:03.722Z" }, + { url = "https://files.pythonhosted.org/packages/85/72/530900610650f54a35a19476eca5104f38555afccda1aa11a92ee14cb21d/pandas-2.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:503cf027cf9940d2ceaa1a93cfb5f8c8c7e6e90720a2850378f0b3f3b1e06826", size = 11346086, upload-time = "2025-09-29T23:18:18.505Z" }, + { url = "https://files.pythonhosted.org/packages/c1/fa/7ac648108144a095b4fb6aa3de1954689f7af60a14cf25583f4960ecb878/pandas-2.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:602b8615ebcc4a0c1751e71840428ddebeb142ec02c786e8ad6b1ce3c8dec523", size = 11578790, upload-time = "2025-09-29T23:18:30.065Z" }, + { url = "https://files.pythonhosted.org/packages/9b/35/74442388c6cf008882d4d4bdfc4109be87e9b8b7ccd097ad1e7f006e2e95/pandas-2.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8fe25fc7b623b0ef6b5009149627e34d2a4657e880948ec3c840e9402e5c1b45", size = 10833831, upload-time = "2025-09-29T23:38:56.071Z" }, + { url = "https://files.pythonhosted.org/packages/fe/e4/de154cbfeee13383ad58d23017da99390b91d73f8c11856f2095e813201b/pandas-2.3.3-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b468d3dad6ff947df92dcb32ede5b7bd41a9b3cceef0a30ed925f6d01fb8fa66", size = 12199267, upload-time = "2025-09-29T23:18:41.627Z" }, + { url = "https://files.pythonhosted.org/packages/bf/c9/63f8d545568d9ab91476b1818b4741f521646cbdd151c6efebf40d6de6f7/pandas-2.3.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b98560e98cb334799c0b07ca7967ac361a47326e9b4e5a7dfb5ab2b1c9d35a1b", size = 12789281, upload-time = "2025-09-29T23:18:56.834Z" }, + { url = "https://files.pythonhosted.org/packages/f2/00/a5ac8c7a0e67fd1a6059e40aa08fa1c52cc00709077d2300e210c3ce0322/pandas-2.3.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37b5848ba49824e5c30bedb9c830ab9b7751fd049bc7914533e01c65f79791", size = 13240453, upload-time = "2025-09-29T23:19:09.247Z" }, + { url = "https://files.pythonhosted.org/packages/27/4d/5c23a5bc7bd209231618dd9e606ce076272c9bc4f12023a70e03a86b4067/pandas-2.3.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:db4301b2d1f926ae677a751eb2bd0e8c5f5319c9cb3f88b0becbbb0b07b34151", size = 13890361, upload-time = "2025-09-29T23:19:25.342Z" }, + { url = "https://files.pythonhosted.org/packages/8e/59/712db1d7040520de7a4965df15b774348980e6df45c129b8c64d0dbe74ef/pandas-2.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:f086f6fe114e19d92014a1966f43a3e62285109afe874f067f5abbdcbb10e59c", size = 11348702, upload-time = "2025-09-29T23:19:38.296Z" }, + { url = "https://files.pythonhosted.org/packages/9c/fb/231d89e8637c808b997d172b18e9d4a4bc7bf31296196c260526055d1ea0/pandas-2.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d21f6d74eb1725c2efaa71a2bfc661a0689579b58e9c0ca58a739ff0b002b53", size = 11597846, upload-time = "2025-09-29T23:19:48.856Z" }, + { url = "https://files.pythonhosted.org/packages/5c/bd/bf8064d9cfa214294356c2d6702b716d3cf3bb24be59287a6a21e24cae6b/pandas-2.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3fd2f887589c7aa868e02632612ba39acb0b8948faf5cc58f0850e165bd46f35", size = 10729618, upload-time = "2025-09-29T23:39:08.659Z" }, + { url = "https://files.pythonhosted.org/packages/57/56/cf2dbe1a3f5271370669475ead12ce77c61726ffd19a35546e31aa8edf4e/pandas-2.3.3-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ecaf1e12bdc03c86ad4a7ea848d66c685cb6851d807a26aa245ca3d2017a1908", size = 11737212, upload-time = "2025-09-29T23:19:59.765Z" }, + { url = "https://files.pythonhosted.org/packages/e5/63/cd7d615331b328e287d8233ba9fdf191a9c2d11b6af0c7a59cfcec23de68/pandas-2.3.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b3d11d2fda7eb164ef27ffc14b4fcab16a80e1ce67e9f57e19ec0afaf715ba89", size = 12362693, upload-time = "2025-09-29T23:20:14.098Z" }, + { url = "https://files.pythonhosted.org/packages/a6/de/8b1895b107277d52f2b42d3a6806e69cfef0d5cf1d0ba343470b9d8e0a04/pandas-2.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a68e15f780eddf2b07d242e17a04aa187a7ee12b40b930bfdd78070556550e98", size = 12771002, upload-time = "2025-09-29T23:20:26.76Z" }, + { url = "https://files.pythonhosted.org/packages/87/21/84072af3187a677c5893b170ba2c8fbe450a6ff911234916da889b698220/pandas-2.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:371a4ab48e950033bcf52b6527eccb564f52dc826c02afd9a1bc0ab731bba084", size = 13450971, upload-time = "2025-09-29T23:20:41.344Z" }, + { url = "https://files.pythonhosted.org/packages/86/41/585a168330ff063014880a80d744219dbf1dd7a1c706e75ab3425a987384/pandas-2.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:a16dcec078a01eeef8ee61bf64074b4e524a2a3f4b3be9326420cabe59c4778b", size = 10992722, upload-time = "2025-09-29T23:20:54.139Z" }, + { url = "https://files.pythonhosted.org/packages/cd/4b/18b035ee18f97c1040d94debd8f2e737000ad70ccc8f5513f4eefad75f4b/pandas-2.3.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:56851a737e3470de7fa88e6131f41281ed440d29a9268dcbf0002da5ac366713", size = 11544671, upload-time = "2025-09-29T23:21:05.024Z" }, + { url = "https://files.pythonhosted.org/packages/31/94/72fac03573102779920099bcac1c3b05975c2cb5f01eac609faf34bed1ca/pandas-2.3.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bdcd9d1167f4885211e401b3036c0c8d9e274eee67ea8d0758a256d60704cfe8", size = 10680807, upload-time = "2025-09-29T23:21:15.979Z" }, + { url = "https://files.pythonhosted.org/packages/16/87/9472cf4a487d848476865321de18cc8c920b8cab98453ab79dbbc98db63a/pandas-2.3.3-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e32e7cc9af0f1cc15548288a51a3b681cc2a219faa838e995f7dc53dbab1062d", size = 11709872, upload-time = "2025-09-29T23:21:27.165Z" }, + { url = "https://files.pythonhosted.org/packages/15/07/284f757f63f8a8d69ed4472bfd85122bd086e637bf4ed09de572d575a693/pandas-2.3.3-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:318d77e0e42a628c04dc56bcef4b40de67918f7041c2b061af1da41dcff670ac", size = 12306371, upload-time = "2025-09-29T23:21:40.532Z" }, + { url = "https://files.pythonhosted.org/packages/33/81/a3afc88fca4aa925804a27d2676d22dcd2031c2ebe08aabd0ae55b9ff282/pandas-2.3.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4e0a175408804d566144e170d0476b15d78458795bb18f1304fb94160cabf40c", size = 12765333, upload-time = "2025-09-29T23:21:55.77Z" }, + { url = "https://files.pythonhosted.org/packages/8d/0f/b4d4ae743a83742f1153464cf1a8ecfafc3ac59722a0b5c8602310cb7158/pandas-2.3.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:93c2d9ab0fc11822b5eece72ec9587e172f63cff87c00b062f6e37448ced4493", size = 13418120, upload-time = "2025-09-29T23:22:10.109Z" }, + { url = "https://files.pythonhosted.org/packages/4f/c7/e54682c96a895d0c808453269e0b5928a07a127a15704fedb643e9b0a4c8/pandas-2.3.3-cp313-cp313-win_amd64.whl", hash = "sha256:f8bfc0e12dc78f777f323f55c58649591b2cd0c43534e8355c51d3fede5f4dee", size = 10993991, upload-time = "2025-09-29T23:25:04.889Z" }, + { url = "https://files.pythonhosted.org/packages/f9/ca/3f8d4f49740799189e1395812f3bf23b5e8fc7c190827d55a610da72ce55/pandas-2.3.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:75ea25f9529fdec2d2e93a42c523962261e567d250b0013b16210e1d40d7c2e5", size = 12048227, upload-time = "2025-09-29T23:22:24.343Z" }, + { url = "https://files.pythonhosted.org/packages/0e/5a/f43efec3e8c0cc92c4663ccad372dbdff72b60bdb56b2749f04aa1d07d7e/pandas-2.3.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:74ecdf1d301e812db96a465a525952f4dde225fdb6d8e5a521d47e1f42041e21", size = 11411056, upload-time = "2025-09-29T23:22:37.762Z" }, + { url = "https://files.pythonhosted.org/packages/46/b1/85331edfc591208c9d1a63a06baa67b21d332e63b7a591a5ba42a10bb507/pandas-2.3.3-cp313-cp313t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6435cb949cb34ec11cc9860246ccb2fdc9ecd742c12d3304989017d53f039a78", size = 11645189, upload-time = "2025-09-29T23:22:51.688Z" }, + { url = "https://files.pythonhosted.org/packages/44/23/78d645adc35d94d1ac4f2a3c4112ab6f5b8999f4898b8cdf01252f8df4a9/pandas-2.3.3-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:900f47d8f20860de523a1ac881c4c36d65efcb2eb850e6948140fa781736e110", size = 12121912, upload-time = "2025-09-29T23:23:05.042Z" }, + { url = "https://files.pythonhosted.org/packages/53/da/d10013df5e6aaef6b425aa0c32e1fc1f3e431e4bcabd420517dceadce354/pandas-2.3.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a45c765238e2ed7d7c608fc5bc4a6f88b642f2f01e70c0c23d2224dd21829d86", size = 12712160, upload-time = "2025-09-29T23:23:28.57Z" }, + { url = "https://files.pythonhosted.org/packages/bd/17/e756653095a083d8a37cbd816cb87148debcfcd920129b25f99dd8d04271/pandas-2.3.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c4fc4c21971a1a9f4bdb4c73978c7f7256caa3e62b323f70d6cb80db583350bc", size = 13199233, upload-time = "2025-09-29T23:24:24.876Z" }, + { url = "https://files.pythonhosted.org/packages/04/fd/74903979833db8390b73b3a8a7d30d146d710bd32703724dd9083950386f/pandas-2.3.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:ee15f284898e7b246df8087fc82b87b01686f98ee67d85a17b7ab44143a3a9a0", size = 11540635, upload-time = "2025-09-29T23:25:52.486Z" }, + { url = "https://files.pythonhosted.org/packages/21/00/266d6b357ad5e6d3ad55093a7e8efc7dd245f5a842b584db9f30b0f0a287/pandas-2.3.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1611aedd912e1ff81ff41c745822980c49ce4a7907537be8692c8dbc31924593", size = 10759079, upload-time = "2025-09-29T23:26:33.204Z" }, + { url = "https://files.pythonhosted.org/packages/ca/05/d01ef80a7a3a12b2f8bbf16daba1e17c98a2f039cbc8e2f77a2c5a63d382/pandas-2.3.3-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6d2cefc361461662ac48810cb14365a365ce864afe85ef1f447ff5a1e99ea81c", size = 11814049, upload-time = "2025-09-29T23:27:15.384Z" }, + { url = "https://files.pythonhosted.org/packages/15/b2/0e62f78c0c5ba7e3d2c5945a82456f4fac76c480940f805e0b97fcbc2f65/pandas-2.3.3-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ee67acbbf05014ea6c763beb097e03cd629961c8a632075eeb34247120abcb4b", size = 12332638, upload-time = "2025-09-29T23:27:51.625Z" }, + { url = "https://files.pythonhosted.org/packages/c5/33/dd70400631b62b9b29c3c93d2feee1d0964dc2bae2e5ad7a6c73a7f25325/pandas-2.3.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c46467899aaa4da076d5abc11084634e2d197e9460643dd455ac3db5856b24d6", size = 12886834, upload-time = "2025-09-29T23:28:21.289Z" }, + { url = "https://files.pythonhosted.org/packages/d3/18/b5d48f55821228d0d2692b34fd5034bb185e854bdb592e9c640f6290e012/pandas-2.3.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:6253c72c6a1d990a410bc7de641d34053364ef8bcd3126f7e7450125887dffe3", size = 13409925, upload-time = "2025-09-29T23:28:58.261Z" }, + { url = "https://files.pythonhosted.org/packages/a6/3d/124ac75fcd0ecc09b8fdccb0246ef65e35b012030defb0e0eba2cbbbe948/pandas-2.3.3-cp314-cp314-win_amd64.whl", hash = "sha256:1b07204a219b3b7350abaae088f451860223a52cfb8a6c53358e7948735158e5", size = 11109071, upload-time = "2025-09-29T23:32:27.484Z" }, + { url = "https://files.pythonhosted.org/packages/89/9c/0e21c895c38a157e0faa1fb64587a9226d6dd46452cac4532d80c3c4a244/pandas-2.3.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2462b1a365b6109d275250baaae7b760fd25c726aaca0054649286bcfbb3e8ec", size = 12048504, upload-time = "2025-09-29T23:29:31.47Z" }, + { url = "https://files.pythonhosted.org/packages/d7/82/b69a1c95df796858777b68fbe6a81d37443a33319761d7c652ce77797475/pandas-2.3.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:0242fe9a49aa8b4d78a4fa03acb397a58833ef6199e9aa40a95f027bb3a1b6e7", size = 11410702, upload-time = "2025-09-29T23:29:54.591Z" }, + { url = "https://files.pythonhosted.org/packages/f9/88/702bde3ba0a94b8c73a0181e05144b10f13f29ebfc2150c3a79062a8195d/pandas-2.3.3-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a21d830e78df0a515db2b3d2f5570610f5e6bd2e27749770e8bb7b524b89b450", size = 11634535, upload-time = "2025-09-29T23:30:21.003Z" }, + { url = "https://files.pythonhosted.org/packages/a4/1e/1bac1a839d12e6a82ec6cb40cda2edde64a2013a66963293696bbf31fbbb/pandas-2.3.3-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2e3ebdb170b5ef78f19bfb71b0dc5dc58775032361fa188e814959b74d726dd5", size = 12121582, upload-time = "2025-09-29T23:30:43.391Z" }, + { url = "https://files.pythonhosted.org/packages/44/91/483de934193e12a3b1d6ae7c8645d083ff88dec75f46e827562f1e4b4da6/pandas-2.3.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:d051c0e065b94b7a3cea50eb1ec32e912cd96dba41647eb24104b6c6c14c5788", size = 12699963, upload-time = "2025-09-29T23:31:10.009Z" }, + { url = "https://files.pythonhosted.org/packages/70/44/5191d2e4026f86a2a109053e194d3ba7a31a2d10a9c2348368c63ed4e85a/pandas-2.3.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:3869faf4bd07b3b66a9f462417d0ca3a9df29a9f6abd5d0d0dbab15dac7abe87", size = 13202175, upload-time = "2025-09-29T23:31:59.173Z" }, +] + +[[package]] +name = "pandas" +version = "3.0.1" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.14' and sys_platform == 'win32'", + "python_full_version >= '3.14' and sys_platform == 'emscripten'", + "python_full_version >= '3.14' and sys_platform != 'emscripten' and sys_platform != 'win32'", + "python_full_version >= '3.11' and python_full_version < '3.14' and sys_platform == 'win32'", + "python_full_version >= '3.11' and python_full_version < '3.14' and sys_platform == 'emscripten'", + "python_full_version >= '3.11' and python_full_version < '3.14' and sys_platform != 'emscripten' and sys_platform != 'win32'", +] +dependencies = [ + { name = "numpy", version = "2.4.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "python-dateutil", marker = "python_full_version >= '3.11'" }, + { name = "tzdata", marker = "(python_full_version >= '3.11' and sys_platform == 'emscripten') or (python_full_version >= '3.11' and sys_platform == 'win32')" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2e/0c/b28ed414f080ee0ad153f848586d61d1878f91689950f037f976ce15f6c8/pandas-3.0.1.tar.gz", hash = "sha256:4186a699674af418f655dbd420ed87f50d56b4cd6603784279d9eef6627823c8", size = 4641901, upload-time = "2026-02-17T22:20:16.434Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ff/07/c7087e003ceee9b9a82539b40414ec557aa795b584a1a346e89180853d79/pandas-3.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:de09668c1bf3b925c07e5762291602f0d789eca1b3a781f99c1c78f6cac0e7ea", size = 10323380, upload-time = "2026-02-17T22:18:16.133Z" }, + { url = "https://files.pythonhosted.org/packages/c1/27/90683c7122febeefe84a56f2cde86a9f05f68d53885cebcc473298dfc33e/pandas-3.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:24ba315ba3d6e5806063ac6eb717504e499ce30bd8c236d8693a5fd3f084c796", size = 9923455, upload-time = "2026-02-17T22:18:19.13Z" }, + { url = "https://files.pythonhosted.org/packages/0e/f1/ed17d927f9950643bc7631aa4c99ff0cc83a37864470bc419345b656a41f/pandas-3.0.1-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:406ce835c55bac912f2a0dcfaf27c06d73c6b04a5dde45f1fd3169ce31337389", size = 10753464, upload-time = "2026-02-17T22:18:21.134Z" }, + { url = "https://files.pythonhosted.org/packages/2e/7c/870c7e7daec2a6c7ff2ac9e33b23317230d4e4e954b35112759ea4a924a7/pandas-3.0.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:830994d7e1f31dd7e790045235605ab61cff6c94defc774547e8b7fdfbff3dc7", size = 11255234, upload-time = "2026-02-17T22:18:24.175Z" }, + { url = "https://files.pythonhosted.org/packages/5c/39/3653fe59af68606282b989c23d1a543ceba6e8099cbcc5f1d506a7bae2aa/pandas-3.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a64ce8b0f2de1d2efd2ae40b0abe7f8ae6b29fbfb3812098ed5a6f8e235ad9bf", size = 11767299, upload-time = "2026-02-17T22:18:26.824Z" }, + { url = "https://files.pythonhosted.org/packages/9b/31/1daf3c0c94a849c7a8dab8a69697b36d313b229918002ba3e409265c7888/pandas-3.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9832c2c69da24b602c32e0c7b1b508a03949c18ba08d4d9f1c1033426685b447", size = 12333292, upload-time = "2026-02-17T22:18:28.996Z" }, + { url = "https://files.pythonhosted.org/packages/1f/67/af63f83cd6ca603a00fe8530c10a60f0879265b8be00b5930e8e78c5b30b/pandas-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:84f0904a69e7365f79a0c77d3cdfccbfb05bf87847e3a51a41e1426b0edb9c79", size = 9892176, upload-time = "2026-02-17T22:18:31.79Z" }, + { url = "https://files.pythonhosted.org/packages/79/ab/9c776b14ac4b7b4140788eca18468ea39894bc7340a408f1d1e379856a6b/pandas-3.0.1-cp311-cp311-win_arm64.whl", hash = "sha256:4a68773d5a778afb31d12e34f7dd4612ab90de8c6fb1d8ffe5d4a03b955082a1", size = 9151328, upload-time = "2026-02-17T22:18:35.721Z" }, + { url = "https://files.pythonhosted.org/packages/37/51/b467209c08dae2c624873d7491ea47d2b47336e5403309d433ea79c38571/pandas-3.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:476f84f8c20c9f5bc47252b66b4bb25e1a9fc2fa98cead96744d8116cb85771d", size = 10344357, upload-time = "2026-02-17T22:18:38.262Z" }, + { url = "https://files.pythonhosted.org/packages/7c/f1/e2567ffc8951ab371db2e40b2fe068e36b81d8cf3260f06ae508700e5504/pandas-3.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0ab749dfba921edf641d4036c4c21c0b3ea70fea478165cb98a998fb2a261955", size = 9884543, upload-time = "2026-02-17T22:18:41.476Z" }, + { url = "https://files.pythonhosted.org/packages/d7/39/327802e0b6d693182403c144edacbc27eb82907b57062f23ef5a4c4a5ea7/pandas-3.0.1-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b8e36891080b87823aff3640c78649b91b8ff6eea3c0d70aeabd72ea43ab069b", size = 10396030, upload-time = "2026-02-17T22:18:43.822Z" }, + { url = "https://files.pythonhosted.org/packages/3d/fe/89d77e424365280b79d99b3e1e7d606f5165af2f2ecfaf0c6d24c799d607/pandas-3.0.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:532527a701281b9dd371e2f582ed9094f4c12dd9ffb82c0c54ee28d8ac9520c4", size = 10876435, upload-time = "2026-02-17T22:18:45.954Z" }, + { url = "https://files.pythonhosted.org/packages/b5/a6/2a75320849dd154a793f69c951db759aedb8d1dd3939eeacda9bdcfa1629/pandas-3.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:356e5c055ed9b0da1580d465657bc7d00635af4fd47f30afb23025352ba764d1", size = 11405133, upload-time = "2026-02-17T22:18:48.533Z" }, + { url = "https://files.pythonhosted.org/packages/58/53/1d68fafb2e02d7881df66aa53be4cd748d25cbe311f3b3c85c93ea5d30ca/pandas-3.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9d810036895f9ad6345b8f2a338dd6998a74e8483847403582cab67745bff821", size = 11932065, upload-time = "2026-02-17T22:18:50.837Z" }, + { url = "https://files.pythonhosted.org/packages/75/08/67cc404b3a966b6df27b38370ddd96b3b023030b572283d035181854aac5/pandas-3.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:536232a5fe26dd989bd633e7a0c450705fdc86a207fec7254a55e9a22950fe43", size = 9741627, upload-time = "2026-02-17T22:18:53.905Z" }, + { url = "https://files.pythonhosted.org/packages/86/4f/caf9952948fb00d23795f09b893d11f1cacb384e666854d87249530f7cbe/pandas-3.0.1-cp312-cp312-win_arm64.whl", hash = "sha256:0f463ebfd8de7f326d38037c7363c6dacb857c5881ab8961fb387804d6daf2f7", size = 9052483, upload-time = "2026-02-17T22:18:57.31Z" }, + { url = "https://files.pythonhosted.org/packages/0b/48/aad6ec4f8d007534c091e9a7172b3ec1b1ee6d99a9cbb936b5eab6c6cf58/pandas-3.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5272627187b5d9c20e55d27caf5f2cd23e286aba25cadf73c8590e432e2b7262", size = 10317509, upload-time = "2026-02-17T22:18:59.498Z" }, + { url = "https://files.pythonhosted.org/packages/a8/14/5990826f779f79148ae9d3a2c39593dc04d61d5d90541e71b5749f35af95/pandas-3.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:661e0f665932af88c7877f31da0dc743fe9c8f2524bdffe23d24fdcb67ef9d56", size = 9860561, upload-time = "2026-02-17T22:19:02.265Z" }, + { url = "https://files.pythonhosted.org/packages/fa/80/f01ff54664b6d70fed71475543d108a9b7c888e923ad210795bef04ffb7d/pandas-3.0.1-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:75e6e292ff898679e47a2199172593d9f6107fd2dd3617c22c2946e97d5df46e", size = 10365506, upload-time = "2026-02-17T22:19:05.017Z" }, + { url = "https://files.pythonhosted.org/packages/f2/85/ab6d04733a7d6ff32bfc8382bf1b07078228f5d6ebec5266b91bfc5c4ff7/pandas-3.0.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1ff8cf1d2896e34343197685f432450ec99a85ba8d90cce2030c5eee2ef98791", size = 10873196, upload-time = "2026-02-17T22:19:07.204Z" }, + { url = "https://files.pythonhosted.org/packages/48/a9/9301c83d0b47c23ac5deab91c6b39fd98d5b5db4d93b25df8d381451828f/pandas-3.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:eca8b4510f6763f3d37359c2105df03a7a221a508f30e396a51d0713d462e68a", size = 11370859, upload-time = "2026-02-17T22:19:09.436Z" }, + { url = "https://files.pythonhosted.org/packages/59/fe/0c1fc5bd2d29c7db2ab372330063ad555fb83e08422829c785f5ec2176ca/pandas-3.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:06aff2ad6f0b94a17822cf8b83bbb563b090ed82ff4fe7712db2ce57cd50d9b8", size = 11924584, upload-time = "2026-02-17T22:19:11.562Z" }, + { url = "https://files.pythonhosted.org/packages/d6/7d/216a1588b65a7aa5f4535570418a599d943c85afb1d95b0876fc00aa1468/pandas-3.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:9fea306c783e28884c29057a1d9baa11a349bbf99538ec1da44c8476563d1b25", size = 9742769, upload-time = "2026-02-17T22:19:13.926Z" }, + { url = "https://files.pythonhosted.org/packages/c4/cb/810a22a6af9a4e97c8ab1c946b47f3489c5bca5adc483ce0ffc84c9cc768/pandas-3.0.1-cp313-cp313-win_arm64.whl", hash = "sha256:a8d37a43c52917427e897cb2e429f67a449327394396a81034a4449b99afda59", size = 9043855, upload-time = "2026-02-17T22:19:16.09Z" }, + { url = "https://files.pythonhosted.org/packages/92/fa/423c89086cca1f039cf1253c3ff5b90f157b5b3757314aa635f6bf3e30aa/pandas-3.0.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d54855f04f8246ed7b6fc96b05d4871591143c46c0b6f4af874764ed0d2d6f06", size = 10752673, upload-time = "2026-02-17T22:19:18.304Z" }, + { url = "https://files.pythonhosted.org/packages/22/23/b5a08ec1f40020397f0faba72f1e2c11f7596a6169c7b3e800abff0e433f/pandas-3.0.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4e1b677accee34a09e0dc2ce5624e4a58a1870ffe56fc021e9caf7f23cd7668f", size = 10404967, upload-time = "2026-02-17T22:19:20.726Z" }, + { url = "https://files.pythonhosted.org/packages/5c/81/94841f1bb4afdc2b52a99daa895ac2c61600bb72e26525ecc9543d453ebc/pandas-3.0.1-cp313-cp313t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a9cabbdcd03f1b6cd254d6dda8ae09b0252524be1592594c00b7895916cb1324", size = 10320575, upload-time = "2026-02-17T22:19:24.919Z" }, + { url = "https://files.pythonhosted.org/packages/0a/8b/2ae37d66a5342a83adadfd0cb0b4bf9c3c7925424dd5f40d15d6cfaa35ee/pandas-3.0.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5ae2ab1f166668b41e770650101e7090824fd34d17915dd9cd479f5c5e0065e9", size = 10710921, upload-time = "2026-02-17T22:19:27.181Z" }, + { url = "https://files.pythonhosted.org/packages/a2/61/772b2e2757855e232b7ccf7cb8079a5711becb3a97f291c953def15a833f/pandas-3.0.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6bf0603c2e30e2cafac32807b06435f28741135cb8697eae8b28c7d492fc7d76", size = 11334191, upload-time = "2026-02-17T22:19:29.411Z" }, + { url = "https://files.pythonhosted.org/packages/1b/08/b16c6df3ef555d8495d1d265a7963b65be166785d28f06a350913a4fac78/pandas-3.0.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6c426422973973cae1f4a23e51d4ae85974f44871b24844e4f7de752dd877098", size = 11782256, upload-time = "2026-02-17T22:19:32.34Z" }, + { url = "https://files.pythonhosted.org/packages/55/80/178af0594890dee17e239fca96d3d8670ba0f5ff59b7d0439850924a9c09/pandas-3.0.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b03f91ae8c10a85c1613102c7bef5229b5379f343030a3ccefeca8a33414cf35", size = 10485047, upload-time = "2026-02-17T22:19:34.605Z" }, + { url = "https://files.pythonhosted.org/packages/bb/8b/4bb774a998b97e6c2fd62a9e6cfdaae133b636fd1c468f92afb4ae9a447a/pandas-3.0.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:99d0f92ed92d3083d140bf6b97774f9f13863924cf3f52a70711f4e7588f9d0a", size = 10322465, upload-time = "2026-02-17T22:19:36.803Z" }, + { url = "https://files.pythonhosted.org/packages/72/3a/5b39b51c64159f470f1ca3b1c2a87da290657ca022f7cd11442606f607d1/pandas-3.0.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:3b66857e983208654294bb6477b8a63dee26b37bdd0eb34d010556e91261784f", size = 9910632, upload-time = "2026-02-17T22:19:39.001Z" }, + { url = "https://files.pythonhosted.org/packages/4e/f7/b449ffb3f68c11da12fc06fbf6d2fa3a41c41e17d0284d23a79e1c13a7e4/pandas-3.0.1-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:56cf59638bf24dc9bdf2154c81e248b3289f9a09a6d04e63608c159022352749", size = 10440535, upload-time = "2026-02-17T22:19:41.157Z" }, + { url = "https://files.pythonhosted.org/packages/55/77/6ea82043db22cb0f2bbfe7198da3544000ddaadb12d26be36e19b03a2dc5/pandas-3.0.1-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c1a9f55e0f46951874b863d1f3906dcb57df2d9be5c5847ba4dfb55b2c815249", size = 10893940, upload-time = "2026-02-17T22:19:43.493Z" }, + { url = "https://files.pythonhosted.org/packages/03/30/f1b502a72468c89412c1b882a08f6eed8a4ee9dc033f35f65d0663df6081/pandas-3.0.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:1849f0bba9c8a2fb0f691d492b834cc8dadf617e29015c66e989448d58d011ee", size = 11442711, upload-time = "2026-02-17T22:19:46.074Z" }, + { url = "https://files.pythonhosted.org/packages/0d/f0/ebb6ddd8fc049e98cabac5c2924d14d1dda26a20adb70d41ea2e428d3ec4/pandas-3.0.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c3d288439e11b5325b02ae6e9cc83e6805a62c40c5a6220bea9beb899c073b1c", size = 11963918, upload-time = "2026-02-17T22:19:48.838Z" }, + { url = "https://files.pythonhosted.org/packages/09/f8/8ce132104074f977f907442790eaae24e27bce3b3b454e82faa3237ff098/pandas-3.0.1-cp314-cp314-win_amd64.whl", hash = "sha256:93325b0fe372d192965f4cca88d97667f49557398bbf94abdda3bf1b591dbe66", size = 9862099, upload-time = "2026-02-17T22:19:51.081Z" }, + { url = "https://files.pythonhosted.org/packages/e6/b7/6af9aac41ef2456b768ef0ae60acf8abcebb450a52043d030a65b4b7c9bd/pandas-3.0.1-cp314-cp314-win_arm64.whl", hash = "sha256:97ca08674e3287c7148f4858b01136f8bdfe7202ad25ad04fec602dd1d29d132", size = 9185333, upload-time = "2026-02-17T22:19:53.266Z" }, + { url = "https://files.pythonhosted.org/packages/66/fc/848bb6710bc6061cb0c5badd65b92ff75c81302e0e31e496d00029fe4953/pandas-3.0.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:58eeb1b2e0fb322befcf2bbc9ba0af41e616abadb3d3414a6bc7167f6cbfce32", size = 10772664, upload-time = "2026-02-17T22:19:55.806Z" }, + { url = "https://files.pythonhosted.org/packages/69/5c/866a9bbd0f79263b4b0db6ec1a341be13a1473323f05c122388e0f15b21d/pandas-3.0.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:cd9af1276b5ca9e298bd79a26bda32fa9cc87ed095b2a9a60978d2ca058eaf87", size = 10421286, upload-time = "2026-02-17T22:19:58.091Z" }, + { url = "https://files.pythonhosted.org/packages/51/a4/2058fb84fb1cfbfb2d4a6d485e1940bb4ad5716e539d779852494479c580/pandas-3.0.1-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:94f87a04984d6b63788327cd9f79dda62b7f9043909d2440ceccf709249ca988", size = 10342050, upload-time = "2026-02-17T22:20:01.376Z" }, + { url = "https://files.pythonhosted.org/packages/22/1b/674e89996cc4be74db3c4eb09240c4bb549865c9c3f5d9b086ff8fcfbf00/pandas-3.0.1-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85fe4c4df62e1e20f9db6ebfb88c844b092c22cd5324bdcf94bfa2fc1b391221", size = 10740055, upload-time = "2026-02-17T22:20:04.328Z" }, + { url = "https://files.pythonhosted.org/packages/d0/f8/e954b750764298c22fa4614376531fe63c521ef517e7059a51f062b87dca/pandas-3.0.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:331ca75a2f8672c365ae25c0b29e46f5ac0c6551fdace8eec4cd65e4fac271ff", size = 11357632, upload-time = "2026-02-17T22:20:06.647Z" }, + { url = "https://files.pythonhosted.org/packages/6d/02/c6e04b694ffd68568297abd03588b6d30295265176a5c01b7459d3bc35a3/pandas-3.0.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:15860b1fdb1973fffade772fdb931ccf9b2f400a3f5665aef94a00445d7d8dd5", size = 11810974, upload-time = "2026-02-17T22:20:08.946Z" }, + { url = "https://files.pythonhosted.org/packages/89/41/d7dfb63d2407f12055215070c42fc6ac41b66e90a2946cdc5e759058398b/pandas-3.0.1-cp314-cp314t-win_amd64.whl", hash = "sha256:44f1364411d5670efa692b146c748f4ed013df91ee91e9bec5677fb1fd58b937", size = 10884622, upload-time = "2026-02-17T22:20:11.711Z" }, + { url = "https://files.pythonhosted.org/packages/68/b0/34937815889fa982613775e4b97fddd13250f11012d769949c5465af2150/pandas-3.0.1-cp314-cp314t-win_arm64.whl", hash = "sha256:108dd1790337a494aa80e38def654ca3f0968cf4f362c85f44c15e471667102d", size = 9452085, upload-time = "2026-02-17T22:20:14.331Z" }, +] + +[[package]] +name = "pathspec" +version = "1.0.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fa/36/e27608899f9b8d4dff0617b2d9ab17ca5608956ca44461ac14ac48b44015/pathspec-1.0.4.tar.gz", hash = "sha256:0210e2ae8a21a9137c0d470578cb0e595af87edaa6ebf12ff176f14a02e0e645", size = 131200, upload-time = "2026-01-27T03:59:46.938Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/3c/2c197d226f9ea224a9ab8d197933f9da0ae0aac5b6e0f884e2b8d9c8e9f7/pathspec-1.0.4-py3-none-any.whl", hash = "sha256:fb6ae2fd4e7c921a165808a552060e722767cfa526f99ca5156ed2ce45a5c723", size = 55206, upload-time = "2026-01-27T03:59:45.137Z" }, +] + +[[package]] +name = "patsy" +version = "1.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "numpy", version = "2.4.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/be/44/ed13eccdd0519eff265f44b670d46fbb0ec813e2274932dc1c0e48520f7d/patsy-1.0.2.tar.gz", hash = "sha256:cdc995455f6233e90e22de72c37fcadb344e7586fb83f06696f54d92f8ce74c0", size = 399942, upload-time = "2025-10-20T16:17:37.535Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f1/70/ba4b949bdc0490ab78d545459acd7702b211dfccf7eb89bbc1060f52818d/patsy-1.0.2-py2.py3-none-any.whl", hash = "sha256:37bfddbc58fcf0362febb5f54f10743f8b21dd2aa73dec7e7ef59d1b02ae668a", size = 233301, upload-time = "2025-10-20T16:17:36.563Z" }, +] + +[[package]] +name = "pillow" +version = "12.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1f/42/5c74462b4fd957fcd7b13b04fb3205ff8349236ea74c7c375766d6c82288/pillow-12.1.1.tar.gz", hash = "sha256:9ad8fa5937ab05218e2b6a4cff30295ad35afd2f83ac592e68c0d871bb0fdbc4", size = 46980264, upload-time = "2026-02-11T04:23:07.146Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1d/30/5bd3d794762481f8c8ae9c80e7b76ecea73b916959eb587521358ef0b2f9/pillow-12.1.1-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1f1625b72740fdda5d77b4def688eb8fd6490975d06b909fd19f13f391e077e0", size = 5304099, upload-time = "2026-02-11T04:20:06.13Z" }, + { url = "https://files.pythonhosted.org/packages/bd/c1/aab9e8f3eeb4490180e357955e15c2ef74b31f64790ff356c06fb6cf6d84/pillow-12.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:178aa072084bd88ec759052feca8e56cbb14a60b39322b99a049e58090479713", size = 4657880, upload-time = "2026-02-11T04:20:09.291Z" }, + { url = "https://files.pythonhosted.org/packages/f1/0a/9879e30d56815ad529d3985aeff5af4964202425c27261a6ada10f7cbf53/pillow-12.1.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b66e95d05ba806247aaa1561f080abc7975daf715c30780ff92a20e4ec546e1b", size = 6222587, upload-time = "2026-02-11T04:20:10.82Z" }, + { url = "https://files.pythonhosted.org/packages/5a/5f/a1b72ff7139e4f89014e8d451442c74a774d5c43cd938fb0a9f878576b37/pillow-12.1.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:89c7e895002bbe49cdc5426150377cbbc04767d7547ed145473f496dfa40408b", size = 8027678, upload-time = "2026-02-11T04:20:12.455Z" }, + { url = "https://files.pythonhosted.org/packages/e2/c2/c7cb187dac79a3d22c3ebeae727abee01e077c8c7d930791dc592f335153/pillow-12.1.1-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a5cbdcddad0af3da87cb16b60d23648bc3b51967eb07223e9fed77a82b457c4", size = 6335777, upload-time = "2026-02-11T04:20:14.441Z" }, + { url = "https://files.pythonhosted.org/packages/0c/7b/f9b09a7804ec7336effb96c26d37c29d27225783dc1501b7d62dcef6ae25/pillow-12.1.1-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9f51079765661884a486727f0729d29054242f74b46186026582b4e4769918e4", size = 7027140, upload-time = "2026-02-11T04:20:16.387Z" }, + { url = "https://files.pythonhosted.org/packages/98/b2/2fa3c391550bd421b10849d1a2144c44abcd966daadd2f7c12e19ea988c4/pillow-12.1.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:99c1506ea77c11531d75e3a412832a13a71c7ebc8192ab9e4b2e355555920e3e", size = 6449855, upload-time = "2026-02-11T04:20:18.554Z" }, + { url = "https://files.pythonhosted.org/packages/96/ff/9caf4b5b950c669263c39e96c78c0d74a342c71c4f43fd031bb5cb7ceac9/pillow-12.1.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:36341d06738a9f66c8287cf8b876d24b18db9bd8740fa0672c74e259ad408cff", size = 7151329, upload-time = "2026-02-11T04:20:20.646Z" }, + { url = "https://files.pythonhosted.org/packages/7b/f8/4b24841f582704da675ca535935bccb32b00a6da1226820845fac4a71136/pillow-12.1.1-cp310-cp310-win32.whl", hash = "sha256:6c52f062424c523d6c4db85518774cc3d50f5539dd6eed32b8f6229b26f24d40", size = 6325574, upload-time = "2026-02-11T04:20:22.43Z" }, + { url = "https://files.pythonhosted.org/packages/f8/f9/9f6b01c0881d7036063aa6612ef04c0e2cad96be21325a1e92d0203f8e91/pillow-12.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:c6008de247150668a705a6338156efb92334113421ceecf7438a12c9a12dab23", size = 7032347, upload-time = "2026-02-11T04:20:23.932Z" }, + { url = "https://files.pythonhosted.org/packages/79/13/c7922edded3dcdaf10c59297540b72785620abc0538872c819915746757d/pillow-12.1.1-cp310-cp310-win_arm64.whl", hash = "sha256:1a9b0ee305220b392e1124a764ee4265bd063e54a751a6b62eff69992f457fa9", size = 2453457, upload-time = "2026-02-11T04:20:25.392Z" }, + { url = "https://files.pythonhosted.org/packages/2b/46/5da1ec4a5171ee7bf1a0efa064aba70ba3d6e0788ce3f5acd1375d23c8c0/pillow-12.1.1-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:e879bb6cd5c73848ef3b2b48b8af9ff08c5b71ecda8048b7dd22d8a33f60be32", size = 5304084, upload-time = "2026-02-11T04:20:27.501Z" }, + { url = "https://files.pythonhosted.org/packages/78/93/a29e9bc02d1cf557a834da780ceccd54e02421627200696fcf805ebdc3fb/pillow-12.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:365b10bb9417dd4498c0e3b128018c4a624dc11c7b97d8cc54effe3b096f4c38", size = 4657866, upload-time = "2026-02-11T04:20:29.827Z" }, + { url = "https://files.pythonhosted.org/packages/13/84/583a4558d492a179d31e4aae32eadce94b9acf49c0337c4ce0b70e0a01f2/pillow-12.1.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d4ce8e329c93845720cd2014659ca67eac35f6433fd3050393d85f3ecef0dad5", size = 6232148, upload-time = "2026-02-11T04:20:31.329Z" }, + { url = "https://files.pythonhosted.org/packages/d5/e2/53c43334bbbb2d3b938978532fbda8e62bb6e0b23a26ce8592f36bcc4987/pillow-12.1.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fc354a04072b765eccf2204f588a7a532c9511e8b9c7f900e1b64e3e33487090", size = 8038007, upload-time = "2026-02-11T04:20:34.225Z" }, + { url = "https://files.pythonhosted.org/packages/b8/a6/3d0e79c8a9d58150dd98e199d7c1c56861027f3829a3a60b3c2784190180/pillow-12.1.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7e7976bf1910a8116b523b9f9f58bf410f3e8aa330cd9a2bb2953f9266ab49af", size = 6345418, upload-time = "2026-02-11T04:20:35.858Z" }, + { url = "https://files.pythonhosted.org/packages/a2/c8/46dfeac5825e600579157eea177be43e2f7ff4a99da9d0d0a49533509ac5/pillow-12.1.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:597bd9c8419bc7c6af5604e55847789b69123bbe25d65cc6ad3012b4f3c98d8b", size = 7034590, upload-time = "2026-02-11T04:20:37.91Z" }, + { url = "https://files.pythonhosted.org/packages/af/bf/e6f65d3db8a8bbfeaf9e13cc0417813f6319863a73de934f14b2229ada18/pillow-12.1.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2c1fc0f2ca5f96a3c8407e41cca26a16e46b21060fe6d5b099d2cb01412222f5", size = 6458655, upload-time = "2026-02-11T04:20:39.496Z" }, + { url = "https://files.pythonhosted.org/packages/f9/c2/66091f3f34a25894ca129362e510b956ef26f8fb67a0e6417bc5744e56f1/pillow-12.1.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:578510d88c6229d735855e1f278aa305270438d36a05031dfaae5067cc8eb04d", size = 7159286, upload-time = "2026-02-11T04:20:41.139Z" }, + { url = "https://files.pythonhosted.org/packages/7b/5a/24bc8eb526a22f957d0cec6243146744966d40857e3d8deb68f7902ca6c1/pillow-12.1.1-cp311-cp311-win32.whl", hash = "sha256:7311c0a0dcadb89b36b7025dfd8326ecfa36964e29913074d47382706e516a7c", size = 6328663, upload-time = "2026-02-11T04:20:43.184Z" }, + { url = "https://files.pythonhosted.org/packages/31/03/bef822e4f2d8f9d7448c133d0a18185d3cce3e70472774fffefe8b0ed562/pillow-12.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:fbfa2a7c10cc2623f412753cddf391c7f971c52ca40a3f65dc5039b2939e8563", size = 7031448, upload-time = "2026-02-11T04:20:44.696Z" }, + { url = "https://files.pythonhosted.org/packages/49/70/f76296f53610bd17b2e7d31728b8b7825e3ac3b5b3688b51f52eab7c0818/pillow-12.1.1-cp311-cp311-win_arm64.whl", hash = "sha256:b81b5e3511211631b3f672a595e3221252c90af017e399056d0faabb9538aa80", size = 2453651, upload-time = "2026-02-11T04:20:46.243Z" }, + { url = "https://files.pythonhosted.org/packages/07/d3/8df65da0d4df36b094351dce696f2989bec731d4f10e743b1c5f4da4d3bf/pillow-12.1.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ab323b787d6e18b3d91a72fc99b1a2c28651e4358749842b8f8dfacd28ef2052", size = 5262803, upload-time = "2026-02-11T04:20:47.653Z" }, + { url = "https://files.pythonhosted.org/packages/d6/71/5026395b290ff404b836e636f51d7297e6c83beceaa87c592718747e670f/pillow-12.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:adebb5bee0f0af4909c30db0d890c773d1a92ffe83da908e2e9e720f8edf3984", size = 4657601, upload-time = "2026-02-11T04:20:49.328Z" }, + { url = "https://files.pythonhosted.org/packages/b1/2e/1001613d941c67442f745aff0f7cc66dd8df9a9c084eb497e6a543ee6f7e/pillow-12.1.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bb66b7cc26f50977108790e2456b7921e773f23db5630261102233eb355a3b79", size = 6234995, upload-time = "2026-02-11T04:20:51.032Z" }, + { url = "https://files.pythonhosted.org/packages/07/26/246ab11455b2549b9233dbd44d358d033a2f780fa9007b61a913c5b2d24e/pillow-12.1.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:aee2810642b2898bb187ced9b349e95d2a7272930796e022efaf12e99dccd293", size = 8045012, upload-time = "2026-02-11T04:20:52.882Z" }, + { url = "https://files.pythonhosted.org/packages/b2/8b/07587069c27be7535ac1fe33874e32de118fbd34e2a73b7f83436a88368c/pillow-12.1.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a0b1cd6232e2b618adcc54d9882e4e662a089d5768cd188f7c245b4c8c44a397", size = 6349638, upload-time = "2026-02-11T04:20:54.444Z" }, + { url = "https://files.pythonhosted.org/packages/ff/79/6df7b2ee763d619cda2fb4fea498e5f79d984dae304d45a8999b80d6cf5c/pillow-12.1.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7aac39bcf8d4770d089588a2e1dd111cbaa42df5a94be3114222057d68336bd0", size = 7041540, upload-time = "2026-02-11T04:20:55.97Z" }, + { url = "https://files.pythonhosted.org/packages/2c/5e/2ba19e7e7236d7529f4d873bdaf317a318896bac289abebd4bb00ef247f0/pillow-12.1.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ab174cd7d29a62dd139c44bf74b698039328f45cb03b4596c43473a46656b2f3", size = 6462613, upload-time = "2026-02-11T04:20:57.542Z" }, + { url = "https://files.pythonhosted.org/packages/03/03/31216ec124bb5c3dacd74ce8efff4cc7f52643653bad4825f8f08c697743/pillow-12.1.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:339ffdcb7cbeaa08221cd401d517d4b1fe7a9ed5d400e4a8039719238620ca35", size = 7166745, upload-time = "2026-02-11T04:20:59.196Z" }, + { url = "https://files.pythonhosted.org/packages/1f/e7/7c4552d80052337eb28653b617eafdef39adfb137c49dd7e831b8dc13bc5/pillow-12.1.1-cp312-cp312-win32.whl", hash = "sha256:5d1f9575a12bed9e9eedd9a4972834b08c97a352bd17955ccdebfeca5913fa0a", size = 6328823, upload-time = "2026-02-11T04:21:01.385Z" }, + { url = "https://files.pythonhosted.org/packages/3d/17/688626d192d7261bbbf98846fc98995726bddc2c945344b65bec3a29d731/pillow-12.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:21329ec8c96c6e979cd0dfd29406c40c1d52521a90544463057d2aaa937d66a6", size = 7033367, upload-time = "2026-02-11T04:21:03.536Z" }, + { url = "https://files.pythonhosted.org/packages/ed/fe/a0ef1f73f939b0eca03ee2c108d0043a87468664770612602c63266a43c4/pillow-12.1.1-cp312-cp312-win_arm64.whl", hash = "sha256:af9a332e572978f0218686636610555ae3defd1633597be015ed50289a03c523", size = 2453811, upload-time = "2026-02-11T04:21:05.116Z" }, + { url = "https://files.pythonhosted.org/packages/d5/11/6db24d4bd7685583caeae54b7009584e38da3c3d4488ed4cd25b439de486/pillow-12.1.1-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:d242e8ac078781f1de88bf823d70c1a9b3c7950a44cdf4b7c012e22ccbcd8e4e", size = 4062689, upload-time = "2026-02-11T04:21:06.804Z" }, + { url = "https://files.pythonhosted.org/packages/33/c0/ce6d3b1fe190f0021203e0d9b5b99e57843e345f15f9ef22fcd43842fd21/pillow-12.1.1-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:02f84dfad02693676692746df05b89cf25597560db2857363a208e393429f5e9", size = 4138535, upload-time = "2026-02-11T04:21:08.452Z" }, + { url = "https://files.pythonhosted.org/packages/a0/c6/d5eb6a4fb32a3f9c21a8c7613ec706534ea1cf9f4b3663e99f0d83f6fca8/pillow-12.1.1-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:e65498daf4b583091ccbb2556c7000abf0f3349fcd57ef7adc9a84a394ed29f6", size = 3601364, upload-time = "2026-02-11T04:21:10.194Z" }, + { url = "https://files.pythonhosted.org/packages/14/a1/16c4b823838ba4c9c52c0e6bbda903a3fe5a1bdbf1b8eb4fff7156f3e318/pillow-12.1.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6c6db3b84c87d48d0088943bf33440e0c42370b99b1c2a7989216f7b42eede60", size = 5262561, upload-time = "2026-02-11T04:21:11.742Z" }, + { url = "https://files.pythonhosted.org/packages/bb/ad/ad9dc98ff24f485008aa5cdedaf1a219876f6f6c42a4626c08bc4e80b120/pillow-12.1.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8b7e5304e34942bf62e15184219a7b5ad4ff7f3bb5cca4d984f37df1a0e1aee2", size = 4657460, upload-time = "2026-02-11T04:21:13.786Z" }, + { url = "https://files.pythonhosted.org/packages/9e/1b/f1a4ea9a895b5732152789326202a82464d5254759fbacae4deea3069334/pillow-12.1.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:18e5bddd742a44b7e6b1e773ab5db102bd7a94c32555ba656e76d319d19c3850", size = 6232698, upload-time = "2026-02-11T04:21:15.949Z" }, + { url = "https://files.pythonhosted.org/packages/95/f4/86f51b8745070daf21fd2e5b1fe0eb35d4db9ca26e6d58366562fb56a743/pillow-12.1.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fc44ef1f3de4f45b50ccf9136999d71abb99dca7706bc75d222ed350b9fd2289", size = 8041706, upload-time = "2026-02-11T04:21:17.723Z" }, + { url = "https://files.pythonhosted.org/packages/29/9b/d6ecd956bb1266dd1045e995cce9b8d77759e740953a1c9aad9502a0461e/pillow-12.1.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5a8eb7ed8d4198bccbd07058416eeec51686b498e784eda166395a23eb99138e", size = 6346621, upload-time = "2026-02-11T04:21:19.547Z" }, + { url = "https://files.pythonhosted.org/packages/71/24/538bff45bde96535d7d998c6fed1a751c75ac7c53c37c90dc2601b243893/pillow-12.1.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:47b94983da0c642de92ced1702c5b6c292a84bd3a8e1d1702ff923f183594717", size = 7038069, upload-time = "2026-02-11T04:21:21.378Z" }, + { url = "https://files.pythonhosted.org/packages/94/0e/58cb1a6bc48f746bc4cb3adb8cabff73e2742c92b3bf7a220b7cf69b9177/pillow-12.1.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:518a48c2aab7ce596d3bf79d0e275661b846e86e4d0e7dec34712c30fe07f02a", size = 6460040, upload-time = "2026-02-11T04:21:23.148Z" }, + { url = "https://files.pythonhosted.org/packages/6c/57/9045cb3ff11eeb6c1adce3b2d60d7d299d7b273a2e6c8381a524abfdc474/pillow-12.1.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a550ae29b95c6dc13cf69e2c9dc5747f814c54eeb2e32d683e5e93af56caa029", size = 7164523, upload-time = "2026-02-11T04:21:25.01Z" }, + { url = "https://files.pythonhosted.org/packages/73/f2/9be9cb99f2175f0d4dbadd6616ce1bf068ee54a28277ea1bf1fbf729c250/pillow-12.1.1-cp313-cp313-win32.whl", hash = "sha256:a003d7422449f6d1e3a34e3dd4110c22148336918ddbfc6a32581cd54b2e0b2b", size = 6332552, upload-time = "2026-02-11T04:21:27.238Z" }, + { url = "https://files.pythonhosted.org/packages/3f/eb/b0834ad8b583d7d9d42b80becff092082a1c3c156bb582590fcc973f1c7c/pillow-12.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:344cf1e3dab3be4b1fa08e449323d98a2a3f819ad20f4b22e77a0ede31f0faa1", size = 7040108, upload-time = "2026-02-11T04:21:29.462Z" }, + { url = "https://files.pythonhosted.org/packages/d5/7d/fc09634e2aabdd0feabaff4a32f4a7d97789223e7c2042fd805ea4b4d2c2/pillow-12.1.1-cp313-cp313-win_arm64.whl", hash = "sha256:5c0dd1636633e7e6a0afe7bf6a51a14992b7f8e60de5789018ebbdfae55b040a", size = 2453712, upload-time = "2026-02-11T04:21:31.072Z" }, + { url = "https://files.pythonhosted.org/packages/19/2a/b9d62794fc8a0dd14c1943df68347badbd5511103e0d04c035ffe5cf2255/pillow-12.1.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0330d233c1a0ead844fc097a7d16c0abff4c12e856c0b325f231820fee1f39da", size = 5264880, upload-time = "2026-02-11T04:21:32.865Z" }, + { url = "https://files.pythonhosted.org/packages/26/9d/e03d857d1347fa5ed9247e123fcd2a97b6220e15e9cb73ca0a8d91702c6e/pillow-12.1.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5dae5f21afb91322f2ff791895ddd8889e5e947ff59f71b46041c8ce6db790bc", size = 4660616, upload-time = "2026-02-11T04:21:34.97Z" }, + { url = "https://files.pythonhosted.org/packages/f7/ec/8a6d22afd02570d30954e043f09c32772bfe143ba9285e2fdb11284952cd/pillow-12.1.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2e0c664be47252947d870ac0d327fea7e63985a08794758aa8af5b6cb6ec0c9c", size = 6269008, upload-time = "2026-02-11T04:21:36.623Z" }, + { url = "https://files.pythonhosted.org/packages/3d/1d/6d875422c9f28a4a361f495a5f68d9de4a66941dc2c619103ca335fa6446/pillow-12.1.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:691ab2ac363b8217f7d31b3497108fb1f50faab2f75dfb03284ec2f217e87bf8", size = 8073226, upload-time = "2026-02-11T04:21:38.585Z" }, + { url = "https://files.pythonhosted.org/packages/a1/cd/134b0b6ee5eda6dc09e25e24b40fdafe11a520bc725c1d0bbaa5e00bf95b/pillow-12.1.1-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e9e8064fb1cc019296958595f6db671fba95209e3ceb0c4734c9baf97de04b20", size = 6380136, upload-time = "2026-02-11T04:21:40.562Z" }, + { url = "https://files.pythonhosted.org/packages/7a/a9/7628f013f18f001c1b98d8fffe3452f306a70dc6aba7d931019e0492f45e/pillow-12.1.1-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:472a8d7ded663e6162dafdf20015c486a7009483ca671cece7a9279b512fcb13", size = 7067129, upload-time = "2026-02-11T04:21:42.521Z" }, + { url = "https://files.pythonhosted.org/packages/1e/f8/66ab30a2193b277785601e82ee2d49f68ea575d9637e5e234faaa98efa4c/pillow-12.1.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:89b54027a766529136a06cfebeecb3a04900397a3590fd252160b888479517bf", size = 6491807, upload-time = "2026-02-11T04:21:44.22Z" }, + { url = "https://files.pythonhosted.org/packages/da/0b/a877a6627dc8318fdb84e357c5e1a758c0941ab1ddffdafd231983788579/pillow-12.1.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:86172b0831b82ce4f7877f280055892b31179e1576aa00d0df3bb1bbf8c3e524", size = 7190954, upload-time = "2026-02-11T04:21:46.114Z" }, + { url = "https://files.pythonhosted.org/packages/83/43/6f732ff85743cf746b1361b91665d9f5155e1483817f693f8d57ea93147f/pillow-12.1.1-cp313-cp313t-win32.whl", hash = "sha256:44ce27545b6efcf0fdbdceb31c9a5bdea9333e664cda58a7e674bb74608b3986", size = 6336441, upload-time = "2026-02-11T04:21:48.22Z" }, + { url = "https://files.pythonhosted.org/packages/3b/44/e865ef3986611bb75bfabdf94a590016ea327833f434558801122979cd0e/pillow-12.1.1-cp313-cp313t-win_amd64.whl", hash = "sha256:a285e3eb7a5a45a2ff504e31f4a8d1b12ef62e84e5411c6804a42197c1cf586c", size = 7045383, upload-time = "2026-02-11T04:21:50.015Z" }, + { url = "https://files.pythonhosted.org/packages/a8/c6/f4fb24268d0c6908b9f04143697ea18b0379490cb74ba9e8d41b898bd005/pillow-12.1.1-cp313-cp313t-win_arm64.whl", hash = "sha256:cc7d296b5ea4d29e6570dabeaed58d31c3fea35a633a69679fb03d7664f43fb3", size = 2456104, upload-time = "2026-02-11T04:21:51.633Z" }, + { url = "https://files.pythonhosted.org/packages/03/d0/bebb3ffbf31c5a8e97241476c4cf8b9828954693ce6744b4a2326af3e16b/pillow-12.1.1-cp314-cp314-ios_13_0_arm64_iphoneos.whl", hash = "sha256:417423db963cb4be8bac3fc1204fe61610f6abeed1580a7a2cbb2fbda20f12af", size = 4062652, upload-time = "2026-02-11T04:21:53.19Z" }, + { url = "https://files.pythonhosted.org/packages/2d/c0/0e16fb0addda4851445c28f8350d8c512f09de27bbb0d6d0bbf8b6709605/pillow-12.1.1-cp314-cp314-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:b957b71c6b2387610f556a7eb0828afbe40b4a98036fc0d2acfa5a44a0c2036f", size = 4138823, upload-time = "2026-02-11T04:22:03.088Z" }, + { url = "https://files.pythonhosted.org/packages/6b/fb/6170ec655d6f6bb6630a013dd7cf7bc218423d7b5fa9071bf63dc32175ae/pillow-12.1.1-cp314-cp314-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:097690ba1f2efdeb165a20469d59d8bb03c55fb6621eb2041a060ae8ea3e9642", size = 3601143, upload-time = "2026-02-11T04:22:04.909Z" }, + { url = "https://files.pythonhosted.org/packages/59/04/dc5c3f297510ba9a6837cbb318b87dd2b8f73eb41a43cc63767f65cb599c/pillow-12.1.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:2815a87ab27848db0321fb78c7f0b2c8649dee134b7f2b80c6a45c6831d75ccd", size = 5266254, upload-time = "2026-02-11T04:22:07.656Z" }, + { url = "https://files.pythonhosted.org/packages/05/30/5db1236b0d6313f03ebf97f5e17cda9ca060f524b2fcc875149a8360b21c/pillow-12.1.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:f7ed2c6543bad5a7d5530eb9e78c53132f93dfa44a28492db88b41cdab885202", size = 4657499, upload-time = "2026-02-11T04:22:09.613Z" }, + { url = "https://files.pythonhosted.org/packages/6f/18/008d2ca0eb612e81968e8be0bbae5051efba24d52debf930126d7eaacbba/pillow-12.1.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:652a2c9ccfb556235b2b501a3a7cf3742148cd22e04b5625c5fe057ea3e3191f", size = 6232137, upload-time = "2026-02-11T04:22:11.434Z" }, + { url = "https://files.pythonhosted.org/packages/70/f1/f14d5b8eeb4b2cd62b9f9f847eb6605f103df89ef619ac68f92f748614ea/pillow-12.1.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d6e4571eedf43af33d0fc233a382a76e849badbccdf1ac438841308652a08e1f", size = 8042721, upload-time = "2026-02-11T04:22:13.321Z" }, + { url = "https://files.pythonhosted.org/packages/5a/d6/17824509146e4babbdabf04d8171491fa9d776f7061ff6e727522df9bd03/pillow-12.1.1-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b574c51cf7d5d62e9be37ba446224b59a2da26dc4c1bb2ecbe936a4fb1a7cb7f", size = 6347798, upload-time = "2026-02-11T04:22:15.449Z" }, + { url = "https://files.pythonhosted.org/packages/d1/ee/c85a38a9ab92037a75615aba572c85ea51e605265036e00c5b67dfafbfe2/pillow-12.1.1-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a37691702ed687799de29a518d63d4682d9016932db66d4e90c345831b02fb4e", size = 7039315, upload-time = "2026-02-11T04:22:17.24Z" }, + { url = "https://files.pythonhosted.org/packages/ec/f3/bc8ccc6e08a148290d7523bde4d9a0d6c981db34631390dc6e6ec34cacf6/pillow-12.1.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f95c00d5d6700b2b890479664a06e754974848afaae5e21beb4d83c106923fd0", size = 6462360, upload-time = "2026-02-11T04:22:19.111Z" }, + { url = "https://files.pythonhosted.org/packages/f6/ab/69a42656adb1d0665ab051eec58a41f169ad295cf81ad45406963105408f/pillow-12.1.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:559b38da23606e68681337ad74622c4dbba02254fc9cb4488a305dd5975c7eeb", size = 7165438, upload-time = "2026-02-11T04:22:21.041Z" }, + { url = "https://files.pythonhosted.org/packages/02/46/81f7aa8941873f0f01d4b55cc543b0a3d03ec2ee30d617a0448bf6bd6dec/pillow-12.1.1-cp314-cp314-win32.whl", hash = "sha256:03edcc34d688572014ff223c125a3f77fb08091e4607e7745002fc214070b35f", size = 6431503, upload-time = "2026-02-11T04:22:22.833Z" }, + { url = "https://files.pythonhosted.org/packages/40/72/4c245f7d1044b67affc7f134a09ea619d4895333d35322b775b928180044/pillow-12.1.1-cp314-cp314-win_amd64.whl", hash = "sha256:50480dcd74fa63b8e78235957d302d98d98d82ccbfac4c7e12108ba9ecbdba15", size = 7176748, upload-time = "2026-02-11T04:22:24.64Z" }, + { url = "https://files.pythonhosted.org/packages/e4/ad/8a87bdbe038c5c698736e3348af5c2194ffb872ea52f11894c95f9305435/pillow-12.1.1-cp314-cp314-win_arm64.whl", hash = "sha256:5cb1785d97b0c3d1d1a16bc1d710c4a0049daefc4935f3a8f31f827f4d3d2e7f", size = 2544314, upload-time = "2026-02-11T04:22:26.685Z" }, + { url = "https://files.pythonhosted.org/packages/6c/9d/efd18493f9de13b87ede7c47e69184b9e859e4427225ea962e32e56a49bc/pillow-12.1.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:1f90cff8aa76835cba5769f0b3121a22bd4eb9e6884cfe338216e557a9a548b8", size = 5268612, upload-time = "2026-02-11T04:22:29.884Z" }, + { url = "https://files.pythonhosted.org/packages/f8/f1/4f42eb2b388eb2ffc660dcb7f7b556c1015c53ebd5f7f754965ef997585b/pillow-12.1.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1f1be78ce9466a7ee64bfda57bdba0f7cc499d9794d518b854816c41bf0aa4e9", size = 4660567, upload-time = "2026-02-11T04:22:31.799Z" }, + { url = "https://files.pythonhosted.org/packages/01/54/df6ef130fa43e4b82e32624a7b821a2be1c5653a5fdad8469687a7db4e00/pillow-12.1.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:42fc1f4677106188ad9a55562bbade416f8b55456f522430fadab3cef7cd4e60", size = 6269951, upload-time = "2026-02-11T04:22:33.921Z" }, + { url = "https://files.pythonhosted.org/packages/a9/48/618752d06cc44bb4aae8ce0cd4e6426871929ed7b46215638088270d9b34/pillow-12.1.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:98edb152429ab62a1818039744d8fbb3ccab98a7c29fc3d5fcef158f3f1f68b7", size = 8074769, upload-time = "2026-02-11T04:22:35.877Z" }, + { url = "https://files.pythonhosted.org/packages/c3/bd/f1d71eb39a72fa088d938655afba3e00b38018d052752f435838961127d8/pillow-12.1.1-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d470ab1178551dd17fdba0fef463359c41aaa613cdcd7ff8373f54be629f9f8f", size = 6381358, upload-time = "2026-02-11T04:22:37.698Z" }, + { url = "https://files.pythonhosted.org/packages/64/ef/c784e20b96674ed36a5af839305f55616f8b4f8aa8eeccf8531a6e312243/pillow-12.1.1-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6408a7b064595afcab0a49393a413732a35788f2a5092fdc6266952ed67de586", size = 7068558, upload-time = "2026-02-11T04:22:39.597Z" }, + { url = "https://files.pythonhosted.org/packages/73/cb/8059688b74422ae61278202c4e1ad992e8a2e7375227be0a21c6b87ca8d5/pillow-12.1.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5d8c41325b382c07799a3682c1c258469ea2ff97103c53717b7893862d0c98ce", size = 6493028, upload-time = "2026-02-11T04:22:42.73Z" }, + { url = "https://files.pythonhosted.org/packages/c6/da/e3c008ed7d2dd1f905b15949325934510b9d1931e5df999bb15972756818/pillow-12.1.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:c7697918b5be27424e9ce568193efd13d925c4481dd364e43f5dff72d33e10f8", size = 7191940, upload-time = "2026-02-11T04:22:44.543Z" }, + { url = "https://files.pythonhosted.org/packages/01/4a/9202e8d11714c1fc5951f2e1ef362f2d7fbc595e1f6717971d5dd750e969/pillow-12.1.1-cp314-cp314t-win32.whl", hash = "sha256:d2912fd8114fc5545aa3a4b5576512f64c55a03f3ebcca4c10194d593d43ea36", size = 6438736, upload-time = "2026-02-11T04:22:46.347Z" }, + { url = "https://files.pythonhosted.org/packages/f3/ca/cbce2327eb9885476b3957b2e82eb12c866a8b16ad77392864ad601022ce/pillow-12.1.1-cp314-cp314t-win_amd64.whl", hash = "sha256:4ceb838d4bd9dab43e06c363cab2eebf63846d6a4aeaea283bbdfd8f1a8ed58b", size = 7182894, upload-time = "2026-02-11T04:22:48.114Z" }, + { url = "https://files.pythonhosted.org/packages/ec/d2/de599c95ba0a973b94410477f8bf0b6f0b5e67360eb89bcb1ad365258beb/pillow-12.1.1-cp314-cp314t-win_arm64.whl", hash = "sha256:7b03048319bfc6170e93bd60728a1af51d3dd7704935feb228c4d4faab35d334", size = 2546446, upload-time = "2026-02-11T04:22:50.342Z" }, + { url = "https://files.pythonhosted.org/packages/56/11/5d43209aa4cb58e0cc80127956ff1796a68b928e6324bbf06ef4db34367b/pillow-12.1.1-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:600fd103672b925fe62ed08e0d874ea34d692474df6f4bf7ebe148b30f89f39f", size = 5228606, upload-time = "2026-02-11T04:22:52.106Z" }, + { url = "https://files.pythonhosted.org/packages/5f/d5/3b005b4e4fda6698b371fa6c21b097d4707585d7db99e98d9b0b87ac612a/pillow-12.1.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:665e1b916b043cef294bc54d47bf02d87e13f769bc4bc5fa225a24b3a6c5aca9", size = 4622321, upload-time = "2026-02-11T04:22:53.827Z" }, + { url = "https://files.pythonhosted.org/packages/df/36/ed3ea2d594356fd8037e5a01f6156c74bc8d92dbb0fa60746cc96cabb6e8/pillow-12.1.1-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:495c302af3aad1ca67420ddd5c7bd480c8867ad173528767d906428057a11f0e", size = 5247579, upload-time = "2026-02-11T04:22:56.094Z" }, + { url = "https://files.pythonhosted.org/packages/54/9a/9cc3e029683cf6d20ae5085da0dafc63148e3252c2f13328e553aaa13cfb/pillow-12.1.1-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8fd420ef0c52c88b5a035a0886f367748c72147b2b8f384c9d12656678dfdfa9", size = 6989094, upload-time = "2026-02-11T04:22:58.288Z" }, + { url = "https://files.pythonhosted.org/packages/00/98/fc53ab36da80b88df0967896b6c4b4cd948a0dc5aa40a754266aa3ae48b3/pillow-12.1.1-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f975aa7ef9684ce7e2c18a3aa8f8e2106ce1e46b94ab713d156b2898811651d3", size = 5313850, upload-time = "2026-02-11T04:23:00.554Z" }, + { url = "https://files.pythonhosted.org/packages/30/02/00fa585abfd9fe9d73e5f6e554dc36cc2b842898cbfc46d70353dae227f8/pillow-12.1.1-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8089c852a56c2966cf18835db62d9b34fef7ba74c726ad943928d494fa7f4735", size = 5963343, upload-time = "2026-02-11T04:23:02.934Z" }, + { url = "https://files.pythonhosted.org/packages/f2/26/c56ce33ca856e358d27fda9676c055395abddb82c35ac0f593877ed4562e/pillow-12.1.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:cb9bb857b2d057c6dfc72ac5f3b44836924ba15721882ef103cecb40d002d80e", size = 7029880, upload-time = "2026-02-11T04:23:04.783Z" }, +] + +[[package]] +name = "platformdirs" +version = "4.9.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/19/56/8d4c30c8a1d07013911a8fdbd8f89440ef9f08d07a1b50ab8ca8be5a20f9/platformdirs-4.9.4.tar.gz", hash = "sha256:1ec356301b7dc906d83f371c8f487070e99d3ccf9e501686456394622a01a934", size = 28737, upload-time = "2026-03-05T18:34:13.271Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/63/d7/97f7e3a6abb67d8080dd406fd4df842c2be0efaf712d1c899c32a075027c/platformdirs-4.9.4-py3-none-any.whl", hash = "sha256:68a9a4619a666ea6439f2ff250c12a853cd1cbd5158d258bd824a7df6be2f868", size = 21216, upload-time = "2026-03-05T18:34:12.172Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "propcache" +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9e/da/e9fc233cf63743258bff22b3dfa7ea5baef7b5bc324af47a0ad89b8ffc6f/propcache-0.4.1.tar.gz", hash = "sha256:f48107a8c637e80362555f37ecf49abe20370e557cc4ab374f04ec4423c97c3d", size = 46442, upload-time = "2025-10-08T19:49:02.291Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/0e/934b541323035566a9af292dba85a195f7b78179114f2c6ebb24551118a9/propcache-0.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c2d1fa3201efaf55d730400d945b5b3ab6e672e100ba0f9a409d950ab25d7db", size = 79534, upload-time = "2025-10-08T19:46:02.083Z" }, + { url = "https://files.pythonhosted.org/packages/a1/6b/db0d03d96726d995dc7171286c6ba9d8d14251f37433890f88368951a44e/propcache-0.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1eb2994229cc8ce7fe9b3db88f5465f5fd8651672840b2e426b88cdb1a30aac8", size = 45526, upload-time = "2025-10-08T19:46:03.884Z" }, + { url = "https://files.pythonhosted.org/packages/e4/c3/82728404aea669e1600f304f2609cde9e665c18df5a11cdd57ed73c1dceb/propcache-0.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:66c1f011f45a3b33d7bcb22daed4b29c0c9e2224758b6be00686731e1b46f925", size = 47263, upload-time = "2025-10-08T19:46:05.405Z" }, + { url = "https://files.pythonhosted.org/packages/df/1b/39313ddad2bf9187a1432654c38249bab4562ef535ef07f5eb6eb04d0b1b/propcache-0.4.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9a52009f2adffe195d0b605c25ec929d26b36ef986ba85244891dee3b294df21", size = 201012, upload-time = "2025-10-08T19:46:07.165Z" }, + { url = "https://files.pythonhosted.org/packages/5b/01/f1d0b57d136f294a142acf97f4ed58c8e5b974c21e543000968357115011/propcache-0.4.1-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5d4e2366a9c7b837555cf02fb9be2e3167d333aff716332ef1b7c3a142ec40c5", size = 209491, upload-time = "2025-10-08T19:46:08.909Z" }, + { url = "https://files.pythonhosted.org/packages/a1/c8/038d909c61c5bb039070b3fb02ad5cccdb1dde0d714792e251cdb17c9c05/propcache-0.4.1-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:9d2b6caef873b4f09e26ea7e33d65f42b944837563a47a94719cc3544319a0db", size = 215319, upload-time = "2025-10-08T19:46:10.7Z" }, + { url = "https://files.pythonhosted.org/packages/08/57/8c87e93142b2c1fa2408e45695205a7ba05fb5db458c0bf5c06ba0e09ea6/propcache-0.4.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b16ec437a8c8a965ecf95739448dd938b5c7f56e67ea009f4300d8df05f32b7", size = 196856, upload-time = "2025-10-08T19:46:12.003Z" }, + { url = "https://files.pythonhosted.org/packages/42/df/5615fec76aa561987a534759b3686008a288e73107faa49a8ae5795a9f7a/propcache-0.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:296f4c8ed03ca7476813fe666c9ea97869a8d7aec972618671b33a38a5182ef4", size = 193241, upload-time = "2025-10-08T19:46:13.495Z" }, + { url = "https://files.pythonhosted.org/packages/d5/21/62949eb3a7a54afe8327011c90aca7e03547787a88fb8bd9726806482fea/propcache-0.4.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:1f0978529a418ebd1f49dad413a2b68af33f85d5c5ca5c6ca2a3bed375a7ac60", size = 190552, upload-time = "2025-10-08T19:46:14.938Z" }, + { url = "https://files.pythonhosted.org/packages/30/ee/ab4d727dd70806e5b4de96a798ae7ac6e4d42516f030ee60522474b6b332/propcache-0.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fd138803047fb4c062b1c1dd95462f5209456bfab55c734458f15d11da288f8f", size = 200113, upload-time = "2025-10-08T19:46:16.695Z" }, + { url = "https://files.pythonhosted.org/packages/8a/0b/38b46208e6711b016aa8966a3ac793eee0d05c7159d8342aa27fc0bc365e/propcache-0.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8c9b3cbe4584636d72ff556d9036e0c9317fa27b3ac1f0f558e7e84d1c9c5900", size = 200778, upload-time = "2025-10-08T19:46:18.023Z" }, + { url = "https://files.pythonhosted.org/packages/cf/81/5abec54355ed344476bee711e9f04815d4b00a311ab0535599204eecc257/propcache-0.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f93243fdc5657247533273ac4f86ae106cc6445a0efacb9a1bfe982fcfefd90c", size = 193047, upload-time = "2025-10-08T19:46:19.449Z" }, + { url = "https://files.pythonhosted.org/packages/ec/b6/1f237c04e32063cb034acd5f6ef34ef3a394f75502e72703545631ab1ef6/propcache-0.4.1-cp310-cp310-win32.whl", hash = "sha256:a0ee98db9c5f80785b266eb805016e36058ac72c51a064040f2bc43b61101cdb", size = 38093, upload-time = "2025-10-08T19:46:20.643Z" }, + { url = "https://files.pythonhosted.org/packages/a6/67/354aac4e0603a15f76439caf0427781bcd6797f370377f75a642133bc954/propcache-0.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:1cdb7988c4e5ac7f6d175a28a9aa0c94cb6f2ebe52756a3c0cda98d2809a9e37", size = 41638, upload-time = "2025-10-08T19:46:21.935Z" }, + { url = "https://files.pythonhosted.org/packages/e0/e1/74e55b9fd1a4c209ff1a9a824bf6c8b3d1fc5a1ac3eabe23462637466785/propcache-0.4.1-cp310-cp310-win_arm64.whl", hash = "sha256:d82ad62b19645419fe79dd63b3f9253e15b30e955c0170e5cebc350c1844e581", size = 38229, upload-time = "2025-10-08T19:46:23.368Z" }, + { url = "https://files.pythonhosted.org/packages/8c/d4/4e2c9aaf7ac2242b9358f98dccd8f90f2605402f5afeff6c578682c2c491/propcache-0.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:60a8fda9644b7dfd5dece8c61d8a85e271cb958075bfc4e01083c148b61a7caf", size = 80208, upload-time = "2025-10-08T19:46:24.597Z" }, + { url = "https://files.pythonhosted.org/packages/c2/21/d7b68e911f9c8e18e4ae43bdbc1e1e9bbd971f8866eb81608947b6f585ff/propcache-0.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c30b53e7e6bda1d547cabb47c825f3843a0a1a42b0496087bb58d8fedf9f41b5", size = 45777, upload-time = "2025-10-08T19:46:25.733Z" }, + { url = "https://files.pythonhosted.org/packages/d3/1d/11605e99ac8ea9435651ee71ab4cb4bf03f0949586246476a25aadfec54a/propcache-0.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6918ecbd897443087a3b7cd978d56546a812517dcaaca51b49526720571fa93e", size = 47647, upload-time = "2025-10-08T19:46:27.304Z" }, + { url = "https://files.pythonhosted.org/packages/58/1a/3c62c127a8466c9c843bccb503d40a273e5cc69838805f322e2826509e0d/propcache-0.4.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3d902a36df4e5989763425a8ab9e98cd8ad5c52c823b34ee7ef307fd50582566", size = 214929, upload-time = "2025-10-08T19:46:28.62Z" }, + { url = "https://files.pythonhosted.org/packages/56/b9/8fa98f850960b367c4b8fe0592e7fc341daa7a9462e925228f10a60cf74f/propcache-0.4.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a9695397f85973bb40427dedddf70d8dc4a44b22f1650dd4af9eedf443d45165", size = 221778, upload-time = "2025-10-08T19:46:30.358Z" }, + { url = "https://files.pythonhosted.org/packages/46/a6/0ab4f660eb59649d14b3d3d65c439421cf2f87fe5dd68591cbe3c1e78a89/propcache-0.4.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2bb07ffd7eaad486576430c89f9b215f9e4be68c4866a96e97db9e97fead85dc", size = 228144, upload-time = "2025-10-08T19:46:32.607Z" }, + { url = "https://files.pythonhosted.org/packages/52/6a/57f43e054fb3d3a56ac9fc532bc684fc6169a26c75c353e65425b3e56eef/propcache-0.4.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd6f30fdcf9ae2a70abd34da54f18da086160e4d7d9251f81f3da0ff84fc5a48", size = 210030, upload-time = "2025-10-08T19:46:33.969Z" }, + { url = "https://files.pythonhosted.org/packages/40/e2/27e6feebb5f6b8408fa29f5efbb765cd54c153ac77314d27e457a3e993b7/propcache-0.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fc38cba02d1acba4e2869eef1a57a43dfbd3d49a59bf90dda7444ec2be6a5570", size = 208252, upload-time = "2025-10-08T19:46:35.309Z" }, + { url = "https://files.pythonhosted.org/packages/9e/f8/91c27b22ccda1dbc7967f921c42825564fa5336a01ecd72eb78a9f4f53c2/propcache-0.4.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:67fad6162281e80e882fb3ec355398cf72864a54069d060321f6cd0ade95fe85", size = 202064, upload-time = "2025-10-08T19:46:36.993Z" }, + { url = "https://files.pythonhosted.org/packages/f2/26/7f00bd6bd1adba5aafe5f4a66390f243acab58eab24ff1a08bebb2ef9d40/propcache-0.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f10207adf04d08bec185bae14d9606a1444715bc99180f9331c9c02093e1959e", size = 212429, upload-time = "2025-10-08T19:46:38.398Z" }, + { url = "https://files.pythonhosted.org/packages/84/89/fd108ba7815c1117ddca79c228f3f8a15fc82a73bca8b142eb5de13b2785/propcache-0.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e9b0d8d0845bbc4cfcdcbcdbf5086886bc8157aa963c31c777ceff7846c77757", size = 216727, upload-time = "2025-10-08T19:46:39.732Z" }, + { url = "https://files.pythonhosted.org/packages/79/37/3ec3f7e3173e73f1d600495d8b545b53802cbf35506e5732dd8578db3724/propcache-0.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:981333cb2f4c1896a12f4ab92a9cc8f09ea664e9b7dbdc4eff74627af3a11c0f", size = 205097, upload-time = "2025-10-08T19:46:41.025Z" }, + { url = "https://files.pythonhosted.org/packages/61/b0/b2631c19793f869d35f47d5a3a56fb19e9160d3c119f15ac7344fc3ccae7/propcache-0.4.1-cp311-cp311-win32.whl", hash = "sha256:f1d2f90aeec838a52f1c1a32fe9a619fefd5e411721a9117fbf82aea638fe8a1", size = 38084, upload-time = "2025-10-08T19:46:42.693Z" }, + { url = "https://files.pythonhosted.org/packages/f4/78/6cce448e2098e9f3bfc91bb877f06aa24b6ccace872e39c53b2f707c4648/propcache-0.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:364426a62660f3f699949ac8c621aad6977be7126c5807ce48c0aeb8e7333ea6", size = 41637, upload-time = "2025-10-08T19:46:43.778Z" }, + { url = "https://files.pythonhosted.org/packages/9c/e9/754f180cccd7f51a39913782c74717c581b9cc8177ad0e949f4d51812383/propcache-0.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:e53f3a38d3510c11953f3e6a33f205c6d1b001129f972805ca9b42fc308bc239", size = 38064, upload-time = "2025-10-08T19:46:44.872Z" }, + { url = "https://files.pythonhosted.org/packages/a2/0f/f17b1b2b221d5ca28b4b876e8bb046ac40466513960646bda8e1853cdfa2/propcache-0.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e153e9cd40cc8945138822807139367f256f89c6810c2634a4f6902b52d3b4e2", size = 80061, upload-time = "2025-10-08T19:46:46.075Z" }, + { url = "https://files.pythonhosted.org/packages/76/47/8ccf75935f51448ba9a16a71b783eb7ef6b9ee60f5d14c7f8a8a79fbeed7/propcache-0.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cd547953428f7abb73c5ad82cbb32109566204260d98e41e5dfdc682eb7f8403", size = 46037, upload-time = "2025-10-08T19:46:47.23Z" }, + { url = "https://files.pythonhosted.org/packages/0a/b6/5c9a0e42df4d00bfb4a3cbbe5cf9f54260300c88a0e9af1f47ca5ce17ac0/propcache-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f048da1b4f243fc44f205dfd320933a951b8d89e0afd4c7cacc762a8b9165207", size = 47324, upload-time = "2025-10-08T19:46:48.384Z" }, + { url = "https://files.pythonhosted.org/packages/9e/d3/6c7ee328b39a81ee877c962469f1e795f9db87f925251efeb0545e0020d0/propcache-0.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ec17c65562a827bba85e3872ead335f95405ea1674860d96483a02f5c698fa72", size = 225505, upload-time = "2025-10-08T19:46:50.055Z" }, + { url = "https://files.pythonhosted.org/packages/01/5d/1c53f4563490b1d06a684742cc6076ef944bc6457df6051b7d1a877c057b/propcache-0.4.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:405aac25c6394ef275dee4c709be43745d36674b223ba4eb7144bf4d691b7367", size = 230242, upload-time = "2025-10-08T19:46:51.815Z" }, + { url = "https://files.pythonhosted.org/packages/20/e1/ce4620633b0e2422207c3cb774a0ee61cac13abc6217763a7b9e2e3f4a12/propcache-0.4.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0013cb6f8dde4b2a2f66903b8ba740bdfe378c943c4377a200551ceb27f379e4", size = 238474, upload-time = "2025-10-08T19:46:53.208Z" }, + { url = "https://files.pythonhosted.org/packages/46/4b/3aae6835b8e5f44ea6a68348ad90f78134047b503765087be2f9912140ea/propcache-0.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:15932ab57837c3368b024473a525e25d316d8353016e7cc0e5ba9eb343fbb1cf", size = 221575, upload-time = "2025-10-08T19:46:54.511Z" }, + { url = "https://files.pythonhosted.org/packages/6e/a5/8a5e8678bcc9d3a1a15b9a29165640d64762d424a16af543f00629c87338/propcache-0.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:031dce78b9dc099f4c29785d9cf5577a3faf9ebf74ecbd3c856a7b92768c3df3", size = 216736, upload-time = "2025-10-08T19:46:56.212Z" }, + { url = "https://files.pythonhosted.org/packages/f1/63/b7b215eddeac83ca1c6b934f89d09a625aa9ee4ba158338854c87210cc36/propcache-0.4.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ab08df6c9a035bee56e31af99be621526bd237bea9f32def431c656b29e41778", size = 213019, upload-time = "2025-10-08T19:46:57.595Z" }, + { url = "https://files.pythonhosted.org/packages/57/74/f580099a58c8af587cac7ba19ee7cb418506342fbbe2d4a4401661cca886/propcache-0.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4d7af63f9f93fe593afbf104c21b3b15868efb2c21d07d8732c0c4287e66b6a6", size = 220376, upload-time = "2025-10-08T19:46:59.067Z" }, + { url = "https://files.pythonhosted.org/packages/c4/ee/542f1313aff7eaf19c2bb758c5d0560d2683dac001a1c96d0774af799843/propcache-0.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cfc27c945f422e8b5071b6e93169679e4eb5bf73bbcbf1ba3ae3a83d2f78ebd9", size = 226988, upload-time = "2025-10-08T19:47:00.544Z" }, + { url = "https://files.pythonhosted.org/packages/8f/18/9c6b015dd9c6930f6ce2229e1f02fb35298b847f2087ea2b436a5bfa7287/propcache-0.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:35c3277624a080cc6ec6f847cbbbb5b49affa3598c4535a0a4682a697aaa5c75", size = 215615, upload-time = "2025-10-08T19:47:01.968Z" }, + { url = "https://files.pythonhosted.org/packages/80/9e/e7b85720b98c45a45e1fca6a177024934dc9bc5f4d5dd04207f216fc33ed/propcache-0.4.1-cp312-cp312-win32.whl", hash = "sha256:671538c2262dadb5ba6395e26c1731e1d52534bfe9ae56d0b5573ce539266aa8", size = 38066, upload-time = "2025-10-08T19:47:03.503Z" }, + { url = "https://files.pythonhosted.org/packages/54/09/d19cff2a5aaac632ec8fc03737b223597b1e347416934c1b3a7df079784c/propcache-0.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:cb2d222e72399fcf5890d1d5cc1060857b9b236adff2792ff48ca2dfd46c81db", size = 41655, upload-time = "2025-10-08T19:47:04.973Z" }, + { url = "https://files.pythonhosted.org/packages/68/ab/6b5c191bb5de08036a8c697b265d4ca76148efb10fa162f14af14fb5f076/propcache-0.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:204483131fb222bdaaeeea9f9e6c6ed0cac32731f75dfc1d4a567fc1926477c1", size = 37789, upload-time = "2025-10-08T19:47:06.077Z" }, + { url = "https://files.pythonhosted.org/packages/bf/df/6d9c1b6ac12b003837dde8a10231a7344512186e87b36e855bef32241942/propcache-0.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:43eedf29202c08550aac1d14e0ee619b0430aaef78f85864c1a892294fbc28cf", size = 77750, upload-time = "2025-10-08T19:47:07.648Z" }, + { url = "https://files.pythonhosted.org/packages/8b/e8/677a0025e8a2acf07d3418a2e7ba529c9c33caf09d3c1f25513023c1db56/propcache-0.4.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d62cdfcfd89ccb8de04e0eda998535c406bf5e060ffd56be6c586cbcc05b3311", size = 44780, upload-time = "2025-10-08T19:47:08.851Z" }, + { url = "https://files.pythonhosted.org/packages/89/a4/92380f7ca60f99ebae761936bc48a72a639e8a47b29050615eef757cb2a7/propcache-0.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cae65ad55793da34db5f54e4029b89d3b9b9490d8abe1b4c7ab5d4b8ec7ebf74", size = 46308, upload-time = "2025-10-08T19:47:09.982Z" }, + { url = "https://files.pythonhosted.org/packages/2d/48/c5ac64dee5262044348d1d78a5f85dd1a57464a60d30daee946699963eb3/propcache-0.4.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:333ddb9031d2704a301ee3e506dc46b1fe5f294ec198ed6435ad5b6a085facfe", size = 208182, upload-time = "2025-10-08T19:47:11.319Z" }, + { url = "https://files.pythonhosted.org/packages/c6/0c/cd762dd011a9287389a6a3eb43aa30207bde253610cca06824aeabfe9653/propcache-0.4.1-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:fd0858c20f078a32cf55f7e81473d96dcf3b93fd2ccdb3d40fdf54b8573df3af", size = 211215, upload-time = "2025-10-08T19:47:13.146Z" }, + { url = "https://files.pythonhosted.org/packages/30/3e/49861e90233ba36890ae0ca4c660e95df565b2cd15d4a68556ab5865974e/propcache-0.4.1-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:678ae89ebc632c5c204c794f8dab2837c5f159aeb59e6ed0539500400577298c", size = 218112, upload-time = "2025-10-08T19:47:14.913Z" }, + { url = "https://files.pythonhosted.org/packages/f1/8b/544bc867e24e1bd48f3118cecd3b05c694e160a168478fa28770f22fd094/propcache-0.4.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d472aeb4fbf9865e0c6d622d7f4d54a4e101a89715d8904282bb5f9a2f476c3f", size = 204442, upload-time = "2025-10-08T19:47:16.277Z" }, + { url = "https://files.pythonhosted.org/packages/50/a6/4282772fd016a76d3e5c0df58380a5ea64900afd836cec2c2f662d1b9bb3/propcache-0.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4d3df5fa7e36b3225954fba85589da77a0fe6a53e3976de39caf04a0db4c36f1", size = 199398, upload-time = "2025-10-08T19:47:17.962Z" }, + { url = "https://files.pythonhosted.org/packages/3e/ec/d8a7cd406ee1ddb705db2139f8a10a8a427100347bd698e7014351c7af09/propcache-0.4.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:ee17f18d2498f2673e432faaa71698032b0127ebf23ae5974eeaf806c279df24", size = 196920, upload-time = "2025-10-08T19:47:19.355Z" }, + { url = "https://files.pythonhosted.org/packages/f6/6c/f38ab64af3764f431e359f8baf9e0a21013e24329e8b85d2da32e8ed07ca/propcache-0.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:580e97762b950f993ae618e167e7be9256b8353c2dcd8b99ec100eb50f5286aa", size = 203748, upload-time = "2025-10-08T19:47:21.338Z" }, + { url = "https://files.pythonhosted.org/packages/d6/e3/fa846bd70f6534d647886621388f0a265254d30e3ce47e5c8e6e27dbf153/propcache-0.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:501d20b891688eb8e7aa903021f0b72d5a55db40ffaab27edefd1027caaafa61", size = 205877, upload-time = "2025-10-08T19:47:23.059Z" }, + { url = "https://files.pythonhosted.org/packages/e2/39/8163fc6f3133fea7b5f2827e8eba2029a0277ab2c5beee6c1db7b10fc23d/propcache-0.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a0bd56e5b100aef69bd8562b74b46254e7c8812918d3baa700c8a8009b0af66", size = 199437, upload-time = "2025-10-08T19:47:24.445Z" }, + { url = "https://files.pythonhosted.org/packages/93/89/caa9089970ca49c7c01662bd0eeedfe85494e863e8043565aeb6472ce8fe/propcache-0.4.1-cp313-cp313-win32.whl", hash = "sha256:bcc9aaa5d80322bc2fb24bb7accb4a30f81e90ab8d6ba187aec0744bc302ad81", size = 37586, upload-time = "2025-10-08T19:47:25.736Z" }, + { url = "https://files.pythonhosted.org/packages/f5/ab/f76ec3c3627c883215b5c8080debb4394ef5a7a29be811f786415fc1e6fd/propcache-0.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:381914df18634f5494334d201e98245c0596067504b9372d8cf93f4bb23e025e", size = 40790, upload-time = "2025-10-08T19:47:26.847Z" }, + { url = "https://files.pythonhosted.org/packages/59/1b/e71ae98235f8e2ba5004d8cb19765a74877abf189bc53fc0c80d799e56c3/propcache-0.4.1-cp313-cp313-win_arm64.whl", hash = "sha256:8873eb4460fd55333ea49b7d189749ecf6e55bf85080f11b1c4530ed3034cba1", size = 37158, upload-time = "2025-10-08T19:47:27.961Z" }, + { url = "https://files.pythonhosted.org/packages/83/ce/a31bbdfc24ee0dcbba458c8175ed26089cf109a55bbe7b7640ed2470cfe9/propcache-0.4.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:92d1935ee1f8d7442da9c0c4fa7ac20d07e94064184811b685f5c4fada64553b", size = 81451, upload-time = "2025-10-08T19:47:29.445Z" }, + { url = "https://files.pythonhosted.org/packages/25/9c/442a45a470a68456e710d96cacd3573ef26a1d0a60067e6a7d5e655621ed/propcache-0.4.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:473c61b39e1460d386479b9b2f337da492042447c9b685f28be4f74d3529e566", size = 46374, upload-time = "2025-10-08T19:47:30.579Z" }, + { url = "https://files.pythonhosted.org/packages/f4/bf/b1d5e21dbc3b2e889ea4327044fb16312a736d97640fb8b6aa3f9c7b3b65/propcache-0.4.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:c0ef0aaafc66fbd87842a3fe3902fd889825646bc21149eafe47be6072725835", size = 48396, upload-time = "2025-10-08T19:47:31.79Z" }, + { url = "https://files.pythonhosted.org/packages/f4/04/5b4c54a103d480e978d3c8a76073502b18db0c4bc17ab91b3cb5092ad949/propcache-0.4.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95393b4d66bfae908c3ca8d169d5f79cd65636ae15b5e7a4f6e67af675adb0e", size = 275950, upload-time = "2025-10-08T19:47:33.481Z" }, + { url = "https://files.pythonhosted.org/packages/b4/c1/86f846827fb969c4b78b0af79bba1d1ea2156492e1b83dea8b8a6ae27395/propcache-0.4.1-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c07fda85708bc48578467e85099645167a955ba093be0a2dcba962195676e859", size = 273856, upload-time = "2025-10-08T19:47:34.906Z" }, + { url = "https://files.pythonhosted.org/packages/36/1d/fc272a63c8d3bbad6878c336c7a7dea15e8f2d23a544bda43205dfa83ada/propcache-0.4.1-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:af223b406d6d000830c6f65f1e6431783fc3f713ba3e6cc8c024d5ee96170a4b", size = 280420, upload-time = "2025-10-08T19:47:36.338Z" }, + { url = "https://files.pythonhosted.org/packages/07/0c/01f2219d39f7e53d52e5173bcb09c976609ba30209912a0680adfb8c593a/propcache-0.4.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a78372c932c90ee474559c5ddfffd718238e8673c340dc21fe45c5b8b54559a0", size = 263254, upload-time = "2025-10-08T19:47:37.692Z" }, + { url = "https://files.pythonhosted.org/packages/2d/18/cd28081658ce597898f0c4d174d4d0f3c5b6d4dc27ffafeef835c95eb359/propcache-0.4.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:564d9f0d4d9509e1a870c920a89b2fec951b44bf5ba7d537a9e7c1ccec2c18af", size = 261205, upload-time = "2025-10-08T19:47:39.659Z" }, + { url = "https://files.pythonhosted.org/packages/7a/71/1f9e22eb8b8316701c2a19fa1f388c8a3185082607da8e406a803c9b954e/propcache-0.4.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:17612831fda0138059cc5546f4d12a2aacfb9e47068c06af35c400ba58ba7393", size = 247873, upload-time = "2025-10-08T19:47:41.084Z" }, + { url = "https://files.pythonhosted.org/packages/4a/65/3d4b61f36af2b4eddba9def857959f1016a51066b4f1ce348e0cf7881f58/propcache-0.4.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:41a89040cb10bd345b3c1a873b2bf36413d48da1def52f268a055f7398514874", size = 262739, upload-time = "2025-10-08T19:47:42.51Z" }, + { url = "https://files.pythonhosted.org/packages/2a/42/26746ab087faa77c1c68079b228810436ccd9a5ce9ac85e2b7307195fd06/propcache-0.4.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e35b88984e7fa64aacecea39236cee32dd9bd8c55f57ba8a75cf2399553f9bd7", size = 263514, upload-time = "2025-10-08T19:47:43.927Z" }, + { url = "https://files.pythonhosted.org/packages/94/13/630690fe201f5502d2403dd3cfd451ed8858fe3c738ee88d095ad2ff407b/propcache-0.4.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f8b465489f927b0df505cbe26ffbeed4d6d8a2bbc61ce90eb074ff129ef0ab1", size = 257781, upload-time = "2025-10-08T19:47:45.448Z" }, + { url = "https://files.pythonhosted.org/packages/92/f7/1d4ec5841505f423469efbfc381d64b7b467438cd5a4bbcbb063f3b73d27/propcache-0.4.1-cp313-cp313t-win32.whl", hash = "sha256:2ad890caa1d928c7c2965b48f3a3815c853180831d0e5503d35cf00c472f4717", size = 41396, upload-time = "2025-10-08T19:47:47.202Z" }, + { url = "https://files.pythonhosted.org/packages/48/f0/615c30622316496d2cbbc29f5985f7777d3ada70f23370608c1d3e081c1f/propcache-0.4.1-cp313-cp313t-win_amd64.whl", hash = "sha256:f7ee0e597f495cf415bcbd3da3caa3bd7e816b74d0d52b8145954c5e6fd3ff37", size = 44897, upload-time = "2025-10-08T19:47:48.336Z" }, + { url = "https://files.pythonhosted.org/packages/fd/ca/6002e46eccbe0e33dcd4069ef32f7f1c9e243736e07adca37ae8c4830ec3/propcache-0.4.1-cp313-cp313t-win_arm64.whl", hash = "sha256:929d7cbe1f01bb7baffb33dc14eb5691c95831450a26354cd210a8155170c93a", size = 39789, upload-time = "2025-10-08T19:47:49.876Z" }, + { url = "https://files.pythonhosted.org/packages/8e/5c/bca52d654a896f831b8256683457ceddd490ec18d9ec50e97dfd8fc726a8/propcache-0.4.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3f7124c9d820ba5548d431afb4632301acf965db49e666aa21c305cbe8c6de12", size = 78152, upload-time = "2025-10-08T19:47:51.051Z" }, + { url = "https://files.pythonhosted.org/packages/65/9b/03b04e7d82a5f54fb16113d839f5ea1ede58a61e90edf515f6577c66fa8f/propcache-0.4.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:c0d4b719b7da33599dfe3b22d3db1ef789210a0597bc650b7cee9c77c2be8c5c", size = 44869, upload-time = "2025-10-08T19:47:52.594Z" }, + { url = "https://files.pythonhosted.org/packages/b2/fa/89a8ef0468d5833a23fff277b143d0573897cf75bd56670a6d28126c7d68/propcache-0.4.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9f302f4783709a78240ebc311b793f123328716a60911d667e0c036bc5dcbded", size = 46596, upload-time = "2025-10-08T19:47:54.073Z" }, + { url = "https://files.pythonhosted.org/packages/86/bd/47816020d337f4a746edc42fe8d53669965138f39ee117414c7d7a340cfe/propcache-0.4.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c80ee5802e3fb9ea37938e7eecc307fb984837091d5fd262bb37238b1ae97641", size = 206981, upload-time = "2025-10-08T19:47:55.715Z" }, + { url = "https://files.pythonhosted.org/packages/df/f6/c5fa1357cc9748510ee55f37173eb31bfde6d94e98ccd9e6f033f2fc06e1/propcache-0.4.1-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ed5a841e8bb29a55fb8159ed526b26adc5bdd7e8bd7bf793ce647cb08656cdf4", size = 211490, upload-time = "2025-10-08T19:47:57.499Z" }, + { url = "https://files.pythonhosted.org/packages/80/1e/e5889652a7c4a3846683401a48f0f2e5083ce0ec1a8a5221d8058fbd1adf/propcache-0.4.1-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:55c72fd6ea2da4c318e74ffdf93c4fe4e926051133657459131a95c846d16d44", size = 215371, upload-time = "2025-10-08T19:47:59.317Z" }, + { url = "https://files.pythonhosted.org/packages/b2/f2/889ad4b2408f72fe1a4f6a19491177b30ea7bf1a0fd5f17050ca08cfc882/propcache-0.4.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8326e144341460402713f91df60ade3c999d601e7eb5ff8f6f7862d54de0610d", size = 201424, upload-time = "2025-10-08T19:48:00.67Z" }, + { url = "https://files.pythonhosted.org/packages/27/73/033d63069b57b0812c8bd19f311faebeceb6ba31b8f32b73432d12a0b826/propcache-0.4.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:060b16ae65bc098da7f6d25bf359f1f31f688384858204fe5d652979e0015e5b", size = 197566, upload-time = "2025-10-08T19:48:02.604Z" }, + { url = "https://files.pythonhosted.org/packages/dc/89/ce24f3dc182630b4e07aa6d15f0ff4b14ed4b9955fae95a0b54c58d66c05/propcache-0.4.1-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:89eb3fa9524f7bec9de6e83cf3faed9d79bffa560672c118a96a171a6f55831e", size = 193130, upload-time = "2025-10-08T19:48:04.499Z" }, + { url = "https://files.pythonhosted.org/packages/a9/24/ef0d5fd1a811fb5c609278d0209c9f10c35f20581fcc16f818da959fc5b4/propcache-0.4.1-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:dee69d7015dc235f526fe80a9c90d65eb0039103fe565776250881731f06349f", size = 202625, upload-time = "2025-10-08T19:48:06.213Z" }, + { url = "https://files.pythonhosted.org/packages/f5/02/98ec20ff5546f68d673df2f7a69e8c0d076b5abd05ca882dc7ee3a83653d/propcache-0.4.1-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:5558992a00dfd54ccbc64a32726a3357ec93825a418a401f5cc67df0ac5d9e49", size = 204209, upload-time = "2025-10-08T19:48:08.432Z" }, + { url = "https://files.pythonhosted.org/packages/a0/87/492694f76759b15f0467a2a93ab68d32859672b646aa8a04ce4864e7932d/propcache-0.4.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c9b822a577f560fbd9554812526831712c1436d2c046cedee4c3796d3543b144", size = 197797, upload-time = "2025-10-08T19:48:09.968Z" }, + { url = "https://files.pythonhosted.org/packages/ee/36/66367de3575db1d2d3f3d177432bd14ee577a39d3f5d1b3d5df8afe3b6e2/propcache-0.4.1-cp314-cp314-win32.whl", hash = "sha256:ab4c29b49d560fe48b696cdcb127dd36e0bc2472548f3bf56cc5cb3da2b2984f", size = 38140, upload-time = "2025-10-08T19:48:11.232Z" }, + { url = "https://files.pythonhosted.org/packages/0c/2a/a758b47de253636e1b8aef181c0b4f4f204bf0dd964914fb2af90a95b49b/propcache-0.4.1-cp314-cp314-win_amd64.whl", hash = "sha256:5a103c3eb905fcea0ab98be99c3a9a5ab2de60228aa5aceedc614c0281cf6153", size = 41257, upload-time = "2025-10-08T19:48:12.707Z" }, + { url = "https://files.pythonhosted.org/packages/34/5e/63bd5896c3fec12edcbd6f12508d4890d23c265df28c74b175e1ef9f4f3b/propcache-0.4.1-cp314-cp314-win_arm64.whl", hash = "sha256:74c1fb26515153e482e00177a1ad654721bf9207da8a494a0c05e797ad27b992", size = 38097, upload-time = "2025-10-08T19:48:13.923Z" }, + { url = "https://files.pythonhosted.org/packages/99/85/9ff785d787ccf9bbb3f3106f79884a130951436f58392000231b4c737c80/propcache-0.4.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:824e908bce90fb2743bd6b59db36eb4f45cd350a39637c9f73b1c1ea66f5b75f", size = 81455, upload-time = "2025-10-08T19:48:15.16Z" }, + { url = "https://files.pythonhosted.org/packages/90/85/2431c10c8e7ddb1445c1f7c4b54d886e8ad20e3c6307e7218f05922cad67/propcache-0.4.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c2b5e7db5328427c57c8e8831abda175421b709672f6cfc3d630c3b7e2146393", size = 46372, upload-time = "2025-10-08T19:48:16.424Z" }, + { url = "https://files.pythonhosted.org/packages/01/20/b0972d902472da9bcb683fa595099911f4d2e86e5683bcc45de60dd05dc3/propcache-0.4.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6f6ff873ed40292cd4969ef5310179afd5db59fdf055897e282485043fc80ad0", size = 48411, upload-time = "2025-10-08T19:48:17.577Z" }, + { url = "https://files.pythonhosted.org/packages/e2/e3/7dc89f4f21e8f99bad3d5ddb3a3389afcf9da4ac69e3deb2dcdc96e74169/propcache-0.4.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:49a2dc67c154db2c1463013594c458881a069fcf98940e61a0569016a583020a", size = 275712, upload-time = "2025-10-08T19:48:18.901Z" }, + { url = "https://files.pythonhosted.org/packages/20/67/89800c8352489b21a8047c773067644e3897f02ecbbd610f4d46b7f08612/propcache-0.4.1-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:005f08e6a0529984491e37d8dbc3dd86f84bd78a8ceb5fa9a021f4c48d4984be", size = 273557, upload-time = "2025-10-08T19:48:20.762Z" }, + { url = "https://files.pythonhosted.org/packages/e2/a1/b52b055c766a54ce6d9c16d9aca0cad8059acd9637cdf8aa0222f4a026ef/propcache-0.4.1-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5c3310452e0d31390da9035c348633b43d7e7feb2e37be252be6da45abd1abcc", size = 280015, upload-time = "2025-10-08T19:48:22.592Z" }, + { url = "https://files.pythonhosted.org/packages/48/c8/33cee30bd890672c63743049f3c9e4be087e6780906bfc3ec58528be59c1/propcache-0.4.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c3c70630930447f9ef1caac7728c8ad1c56bc5015338b20fed0d08ea2480b3a", size = 262880, upload-time = "2025-10-08T19:48:23.947Z" }, + { url = "https://files.pythonhosted.org/packages/0c/b1/8f08a143b204b418285c88b83d00edbd61afbc2c6415ffafc8905da7038b/propcache-0.4.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8e57061305815dfc910a3634dcf584f08168a8836e6999983569f51a8544cd89", size = 260938, upload-time = "2025-10-08T19:48:25.656Z" }, + { url = "https://files.pythonhosted.org/packages/cf/12/96e4664c82ca2f31e1c8dff86afb867348979eb78d3cb8546a680287a1e9/propcache-0.4.1-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:521a463429ef54143092c11a77e04056dd00636f72e8c45b70aaa3140d639726", size = 247641, upload-time = "2025-10-08T19:48:27.207Z" }, + { url = "https://files.pythonhosted.org/packages/18/ed/e7a9cfca28133386ba52278136d42209d3125db08d0a6395f0cba0c0285c/propcache-0.4.1-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:120c964da3fdc75e3731aa392527136d4ad35868cc556fd09bb6d09172d9a367", size = 262510, upload-time = "2025-10-08T19:48:28.65Z" }, + { url = "https://files.pythonhosted.org/packages/f5/76/16d8bf65e8845dd62b4e2b57444ab81f07f40caa5652b8969b87ddcf2ef6/propcache-0.4.1-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:d8f353eb14ee3441ee844ade4277d560cdd68288838673273b978e3d6d2c8f36", size = 263161, upload-time = "2025-10-08T19:48:30.133Z" }, + { url = "https://files.pythonhosted.org/packages/e7/70/c99e9edb5d91d5ad8a49fa3c1e8285ba64f1476782fed10ab251ff413ba1/propcache-0.4.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ab2943be7c652f09638800905ee1bab2c544e537edb57d527997a24c13dc1455", size = 257393, upload-time = "2025-10-08T19:48:31.567Z" }, + { url = "https://files.pythonhosted.org/packages/08/02/87b25304249a35c0915d236575bc3574a323f60b47939a2262b77632a3ee/propcache-0.4.1-cp314-cp314t-win32.whl", hash = "sha256:05674a162469f31358c30bcaa8883cb7829fa3110bf9c0991fe27d7896c42d85", size = 42546, upload-time = "2025-10-08T19:48:32.872Z" }, + { url = "https://files.pythonhosted.org/packages/cb/ef/3c6ecf8b317aa982f309835e8f96987466123c6e596646d4e6a1dfcd080f/propcache-0.4.1-cp314-cp314t-win_amd64.whl", hash = "sha256:990f6b3e2a27d683cb7602ed6c86f15ee6b43b1194736f9baaeb93d0016633b1", size = 46259, upload-time = "2025-10-08T19:48:34.226Z" }, + { url = "https://files.pythonhosted.org/packages/c4/2d/346e946d4951f37eca1e4f55be0f0174c52cd70720f84029b02f296f4a38/propcache-0.4.1-cp314-cp314t-win_arm64.whl", hash = "sha256:ecef2343af4cc68e05131e45024ba34f6095821988a9d0a02aa7c73fcc448aa9", size = 40428, upload-time = "2025-10-08T19:48:35.441Z" }, + { url = "https://files.pythonhosted.org/packages/5b/5a/bc7b4a4ef808fa59a816c17b20c4bef6884daebbdf627ff2a161da67da19/propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237", size = 13305, upload-time = "2025-10-08T19:49:00.792Z" }, +] + +[[package]] +name = "py-cpuinfo" +version = "9.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/37/a8/d832f7293ebb21690860d2e01d8115e5ff6f2ae8bbdc953f0eb0fa4bd2c7/py-cpuinfo-9.0.0.tar.gz", hash = "sha256:3cdbbf3fac90dc6f118bfd64384f309edeadd902d7c8fb17f02ffa1fc3f49690", size = 104716, upload-time = "2022-10-25T20:38:06.303Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e0/a9/023730ba63db1e494a271cb018dcd361bd2c917ba7004c3e49d5daf795a2/py_cpuinfo-9.0.0-py3-none-any.whl", hash = "sha256:859625bc251f64e21f077d099d4162689c762b5d6a4c3c97553d56241c9674d5", size = 22335, upload-time = "2022-10-25T20:38:27.636Z" }, +] + +[[package]] +name = "pyarrow" +version = "23.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/88/22/134986a4cc224d593c1afde5494d18ff629393d74cc2eddb176669f234a4/pyarrow-23.0.1.tar.gz", hash = "sha256:b8c5873e33440b2bc2f4a79d2b47017a89c5a24116c055625e6f2ee50523f019", size = 1167336, upload-time = "2026-02-16T10:14:12.39Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bc/a8/24e5dc6855f50a62936ceb004e6e9645e4219a8065f304145d7fb8a79d5d/pyarrow-23.0.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:3fab8f82571844eb3c460f90a75583801d14ca0cc32b1acc8c361650e006fd56", size = 34307390, upload-time = "2026-02-16T10:08:08.654Z" }, + { url = "https://files.pythonhosted.org/packages/bc/8e/4be5617b4aaae0287f621ad31c6036e5f63118cfca0dc57d42121ff49b51/pyarrow-23.0.1-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:3f91c038b95f71ddfc865f11d5876c42f343b4495535bd262c7b321b0b94507c", size = 35853761, upload-time = "2026-02-16T10:08:17.811Z" }, + { url = "https://files.pythonhosted.org/packages/2e/08/3e56a18819462210432ae37d10f5c8eed3828be1d6c751b6e6a2e93c286a/pyarrow-23.0.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:d0744403adabef53c985a7f8a082b502a368510c40d184df349a0a8754533258", size = 44493116, upload-time = "2026-02-16T10:08:25.792Z" }, + { url = "https://files.pythonhosted.org/packages/f8/82/c40b68001dbec8a3faa4c08cd8c200798ac732d2854537c5449dc859f55a/pyarrow-23.0.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:c33b5bf406284fd0bba436ed6f6c3ebe8e311722b441d89397c54f871c6863a2", size = 47564532, upload-time = "2026-02-16T10:08:34.27Z" }, + { url = "https://files.pythonhosted.org/packages/20/bc/73f611989116b6f53347581b02177f9f620efdf3cd3f405d0e83cdf53a83/pyarrow-23.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ddf743e82f69dcd6dbbcb63628895d7161e04e56794ef80550ac6f3315eeb1d5", size = 48183685, upload-time = "2026-02-16T10:08:42.889Z" }, + { url = "https://files.pythonhosted.org/packages/b0/cc/6c6b3ecdae2a8c3aced99956187e8302fc954cc2cca2a37cf2111dad16ce/pyarrow-23.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e052a211c5ac9848ae15d5ec875ed0943c0221e2fcfe69eee80b604b4e703222", size = 50605582, upload-time = "2026-02-16T10:08:51.641Z" }, + { url = "https://files.pythonhosted.org/packages/8d/94/d359e708672878d7638a04a0448edf7c707f9e5606cee11e15aaa5c7535a/pyarrow-23.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:5abde149bb3ce524782d838eb67ac095cd3fd6090eba051130589793f1a7f76d", size = 27521148, upload-time = "2026-02-16T10:08:58.077Z" }, + { url = "https://files.pythonhosted.org/packages/b0/41/8e6b6ef7e225d4ceead8459427a52afdc23379768f54dd3566014d7618c1/pyarrow-23.0.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:6f0147ee9e0386f519c952cc670eb4a8b05caa594eeffe01af0e25f699e4e9bb", size = 34302230, upload-time = "2026-02-16T10:09:03.859Z" }, + { url = "https://files.pythonhosted.org/packages/bf/4a/1472c00392f521fea03ae93408bf445cc7bfa1ab81683faf9bc188e36629/pyarrow-23.0.1-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:0ae6e17c828455b6265d590100c295193f93cc5675eb0af59e49dbd00d2de350", size = 35850050, upload-time = "2026-02-16T10:09:11.877Z" }, + { url = "https://files.pythonhosted.org/packages/0c/b2/bd1f2f05ded56af7f54d702c8364c9c43cd6abb91b0e9933f3d77b4f4132/pyarrow-23.0.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:fed7020203e9ef273360b9e45be52a2a47d3103caf156a30ace5247ffb51bdbd", size = 44491918, upload-time = "2026-02-16T10:09:18.144Z" }, + { url = "https://files.pythonhosted.org/packages/0b/62/96459ef5b67957eac38a90f541d1c28833d1b367f014a482cb63f3b7cd2d/pyarrow-23.0.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:26d50dee49d741ac0e82185033488d28d35be4d763ae6f321f97d1140eb7a0e9", size = 47562811, upload-time = "2026-02-16T10:09:25.792Z" }, + { url = "https://files.pythonhosted.org/packages/7d/94/1170e235add1f5f45a954e26cd0e906e7e74e23392dcb560de471f7366ec/pyarrow-23.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3c30143b17161310f151f4a2bcfe41b5ff744238c1039338779424e38579d701", size = 48183766, upload-time = "2026-02-16T10:09:34.645Z" }, + { url = "https://files.pythonhosted.org/packages/0e/2d/39a42af4570377b99774cdb47f63ee6c7da7616bd55b3d5001aa18edfe4f/pyarrow-23.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:db2190fa79c80a23fdd29fef4b8992893f024ae7c17d2f5f4db7171fa30c2c78", size = 50607669, upload-time = "2026-02-16T10:09:44.153Z" }, + { url = "https://files.pythonhosted.org/packages/00/ca/db94101c187f3df742133ac837e93b1f269ebdac49427f8310ee40b6a58f/pyarrow-23.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:f00f993a8179e0e1c9713bcc0baf6d6c01326a406a9c23495ec1ba9c9ebf2919", size = 27527698, upload-time = "2026-02-16T10:09:50.263Z" }, + { url = "https://files.pythonhosted.org/packages/9a/4b/4166bb5abbfe6f750fc60ad337c43ecf61340fa52ab386da6e8dbf9e63c4/pyarrow-23.0.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:f4b0dbfa124c0bb161f8b5ebb40f1a680b70279aa0c9901d44a2b5a20806039f", size = 34214575, upload-time = "2026-02-16T10:09:56.225Z" }, + { url = "https://files.pythonhosted.org/packages/e1/da/3f941e3734ac8088ea588b53e860baeddac8323ea40ce22e3d0baa865cc9/pyarrow-23.0.1-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:7707d2b6673f7de054e2e83d59f9e805939038eebe1763fe811ee8fa5c0cd1a7", size = 35832540, upload-time = "2026-02-16T10:10:03.428Z" }, + { url = "https://files.pythonhosted.org/packages/88/7c/3d841c366620e906d54430817531b877ba646310296df42ef697308c2705/pyarrow-23.0.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:86ff03fb9f1a320266e0de855dee4b17da6794c595d207f89bba40d16b5c78b9", size = 44470940, upload-time = "2026-02-16T10:10:10.704Z" }, + { url = "https://files.pythonhosted.org/packages/2c/a5/da83046273d990f256cb79796a190bbf7ec999269705ddc609403f8c6b06/pyarrow-23.0.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:813d99f31275919c383aab17f0f455a04f5a429c261cc411b1e9a8f5e4aaaa05", size = 47586063, upload-time = "2026-02-16T10:10:17.95Z" }, + { url = "https://files.pythonhosted.org/packages/5b/3c/b7d2ebcff47a514f47f9da1e74b7949138c58cfeb108cdd4ee62f43f0cf3/pyarrow-23.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bf5842f960cddd2ef757d486041d57c96483efc295a8c4a0e20e704cbbf39c67", size = 48173045, upload-time = "2026-02-16T10:10:25.363Z" }, + { url = "https://files.pythonhosted.org/packages/43/b2/b40961262213beaba6acfc88698eb773dfce32ecdf34d19291db94c2bd73/pyarrow-23.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:564baf97c858ecc03ec01a41062e8f4698abc3e6e2acd79c01c2e97880a19730", size = 50621741, upload-time = "2026-02-16T10:10:33.477Z" }, + { url = "https://files.pythonhosted.org/packages/f6/70/1fdda42d65b28b078e93d75d371b2185a61da89dda4def8ba6ba41ebdeb4/pyarrow-23.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:07deae7783782ac7250989a7b2ecde9b3c343a643f82e8a4df03d93b633006f0", size = 27620678, upload-time = "2026-02-16T10:10:39.31Z" }, + { url = "https://files.pythonhosted.org/packages/47/10/2cbe4c6f0fb83d2de37249567373d64327a5e4d8db72f486db42875b08f6/pyarrow-23.0.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:6b8fda694640b00e8af3c824f99f789e836720aa8c9379fb435d4c4953a756b8", size = 34210066, upload-time = "2026-02-16T10:10:45.487Z" }, + { url = "https://files.pythonhosted.org/packages/cb/4f/679fa7e84dadbaca7a65f7cdba8d6c83febbd93ca12fa4adf40ba3b6362b/pyarrow-23.0.1-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:8ff51b1addc469b9444b7c6f3548e19dc931b172ab234e995a60aea9f6e6025f", size = 35825526, upload-time = "2026-02-16T10:10:52.266Z" }, + { url = "https://files.pythonhosted.org/packages/f9/63/d2747d930882c9d661e9398eefc54f15696547b8983aaaf11d4a2e8b5426/pyarrow-23.0.1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:71c5be5cbf1e1cb6169d2a0980850bccb558ddc9b747b6206435313c47c37677", size = 44473279, upload-time = "2026-02-16T10:11:01.557Z" }, + { url = "https://files.pythonhosted.org/packages/b3/93/10a48b5e238de6d562a411af6467e71e7aedbc9b87f8d3a35f1560ae30fb/pyarrow-23.0.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:9b6f4f17b43bc39d56fec96e53fe89d94bac3eb134137964371b45352d40d0c2", size = 47585798, upload-time = "2026-02-16T10:11:09.401Z" }, + { url = "https://files.pythonhosted.org/packages/5c/20/476943001c54ef078dbf9542280e22741219a184a0632862bca4feccd666/pyarrow-23.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9fc13fc6c403d1337acab46a2c4346ca6c9dec5780c3c697cf8abfd5e19b6b37", size = 48179446, upload-time = "2026-02-16T10:11:17.781Z" }, + { url = "https://files.pythonhosted.org/packages/4b/b6/5dd0c47b335fcd8edba9bfab78ad961bd0fd55ebe53468cc393f45e0be60/pyarrow-23.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5c16ed4f53247fa3ffb12a14d236de4213a4415d127fe9cebed33d51671113e2", size = 50623972, upload-time = "2026-02-16T10:11:26.185Z" }, + { url = "https://files.pythonhosted.org/packages/d5/09/a532297c9591a727d67760e2e756b83905dd89adb365a7f6e9c72578bcc1/pyarrow-23.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:cecfb12ef629cf6be0b1887f9f86463b0dd3dc3195ae6224e74006be4736035a", size = 27540749, upload-time = "2026-02-16T10:12:23.297Z" }, + { url = "https://files.pythonhosted.org/packages/a5/8e/38749c4b1303e6ae76b3c80618f84861ae0c55dd3c2273842ea6f8258233/pyarrow-23.0.1-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:29f7f7419a0e30264ea261fdc0e5fe63ce5a6095003db2945d7cd78df391a7e1", size = 34471544, upload-time = "2026-02-16T10:11:32.535Z" }, + { url = "https://files.pythonhosted.org/packages/a3/73/f237b2bc8c669212f842bcfd842b04fc8d936bfc9d471630569132dc920d/pyarrow-23.0.1-cp313-cp313t-macosx_12_0_x86_64.whl", hash = "sha256:33d648dc25b51fd8055c19e4261e813dfc4d2427f068bcecc8b53d01b81b0500", size = 35949911, upload-time = "2026-02-16T10:11:39.813Z" }, + { url = "https://files.pythonhosted.org/packages/0c/86/b912195eee0903b5611bf596833def7d146ab2d301afeb4b722c57ffc966/pyarrow-23.0.1-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:cd395abf8f91c673dd3589cadc8cc1ee4e8674fa61b2e923c8dd215d9c7d1f41", size = 44520337, upload-time = "2026-02-16T10:11:47.764Z" }, + { url = "https://files.pythonhosted.org/packages/69/c2/f2a717fb824f62d0be952ea724b4f6f9372a17eed6f704b5c9526f12f2f1/pyarrow-23.0.1-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:00be9576d970c31defb5c32eb72ef585bf600ef6d0a82d5eccaae96639cf9d07", size = 47548944, upload-time = "2026-02-16T10:11:56.607Z" }, + { url = "https://files.pythonhosted.org/packages/84/a7/90007d476b9f0dc308e3bc57b832d004f848fd6c0da601375d20d92d1519/pyarrow-23.0.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:c2139549494445609f35a5cda4eb94e2c9e4d704ce60a095b342f82460c73a83", size = 48236269, upload-time = "2026-02-16T10:12:04.47Z" }, + { url = "https://files.pythonhosted.org/packages/b0/3f/b16fab3e77709856eb6ac328ce35f57a6d4a18462c7ca5186ef31b45e0e0/pyarrow-23.0.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:7044b442f184d84e2351e5084600f0d7343d6117aabcbc1ac78eb1ae11eb4125", size = 50604794, upload-time = "2026-02-16T10:12:11.797Z" }, + { url = "https://files.pythonhosted.org/packages/e9/a1/22df0620a9fac31d68397a75465c344e83c3dfe521f7612aea33e27ab6c0/pyarrow-23.0.1-cp313-cp313t-win_amd64.whl", hash = "sha256:a35581e856a2fafa12f3f54fce4331862b1cfb0bef5758347a858a4aa9d6bae8", size = 27660642, upload-time = "2026-02-16T10:12:17.746Z" }, + { url = "https://files.pythonhosted.org/packages/8d/1b/6da9a89583ce7b23ac611f183ae4843cd3a6cf54f079549b0e8c14031e73/pyarrow-23.0.1-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:5df1161da23636a70838099d4aaa65142777185cc0cdba4037a18cee7d8db9ca", size = 34238755, upload-time = "2026-02-16T10:12:32.819Z" }, + { url = "https://files.pythonhosted.org/packages/ae/b5/d58a241fbe324dbaeb8df07be6af8752c846192d78d2272e551098f74e88/pyarrow-23.0.1-cp314-cp314-macosx_12_0_x86_64.whl", hash = "sha256:fa8e51cb04b9f8c9c5ace6bab63af9a1f88d35c0d6cbf53e8c17c098552285e1", size = 35847826, upload-time = "2026-02-16T10:12:38.949Z" }, + { url = "https://files.pythonhosted.org/packages/54/a5/8cbc83f04aba433ca7b331b38f39e000efd9f0c7ce47128670e737542996/pyarrow-23.0.1-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:0b95a3994f015be13c63148fef8832e8a23938128c185ee951c98908a696e0eb", size = 44536859, upload-time = "2026-02-16T10:12:45.467Z" }, + { url = "https://files.pythonhosted.org/packages/36/2e/c0f017c405fcdc252dbccafbe05e36b0d0eb1ea9a958f081e01c6972927f/pyarrow-23.0.1-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:4982d71350b1a6e5cfe1af742c53dfb759b11ce14141870d05d9e540d13bc5d1", size = 47614443, upload-time = "2026-02-16T10:12:55.525Z" }, + { url = "https://files.pythonhosted.org/packages/af/6b/2314a78057912f5627afa13ba43809d9d653e6630859618b0fd81a4e0759/pyarrow-23.0.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c250248f1fe266db627921c89b47b7c06fee0489ad95b04d50353537d74d6886", size = 48232991, upload-time = "2026-02-16T10:13:04.729Z" }, + { url = "https://files.pythonhosted.org/packages/40/f2/1bcb1d3be3460832ef3370d621142216e15a2c7c62602a4ea19ec240dd64/pyarrow-23.0.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5f4763b83c11c16e5f4c15601ba6dfa849e20723b46aa2617cb4bffe8768479f", size = 50645077, upload-time = "2026-02-16T10:13:14.147Z" }, + { url = "https://files.pythonhosted.org/packages/eb/3f/b1da7b61cd66566a4d4c8383d376c606d1c34a906c3f1cb35c479f59d1aa/pyarrow-23.0.1-cp314-cp314-win_amd64.whl", hash = "sha256:3a4c85ef66c134161987c17b147d6bffdca4566f9a4c1d81a0a01cdf08414ea5", size = 28234271, upload-time = "2026-02-16T10:14:09.397Z" }, + { url = "https://files.pythonhosted.org/packages/b5/78/07f67434e910a0f7323269be7bfbf58699bd0c1d080b18a1ab49ba943fe8/pyarrow-23.0.1-cp314-cp314t-macosx_12_0_arm64.whl", hash = "sha256:17cd28e906c18af486a499422740298c52d7c6795344ea5002a7720b4eadf16d", size = 34488692, upload-time = "2026-02-16T10:13:21.541Z" }, + { url = "https://files.pythonhosted.org/packages/50/76/34cf7ae93ece1f740a04910d9f7e80ba166b9b4ab9596a953e9e62b90fe1/pyarrow-23.0.1-cp314-cp314t-macosx_12_0_x86_64.whl", hash = "sha256:76e823d0e86b4fb5e1cf4a58d293036e678b5a4b03539be933d3b31f9406859f", size = 35964383, upload-time = "2026-02-16T10:13:28.63Z" }, + { url = "https://files.pythonhosted.org/packages/46/90/459b827238936d4244214be7c684e1b366a63f8c78c380807ae25ed92199/pyarrow-23.0.1-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:a62e1899e3078bf65943078b3ad2a6ddcacf2373bc06379aac61b1e548a75814", size = 44538119, upload-time = "2026-02-16T10:13:35.506Z" }, + { url = "https://files.pythonhosted.org/packages/28/a1/93a71ae5881e99d1f9de1d4554a87be37da11cd6b152239fb5bd924fdc64/pyarrow-23.0.1-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:df088e8f640c9fae3b1f495b3c64755c4e719091caf250f3a74d095ddf3c836d", size = 47571199, upload-time = "2026-02-16T10:13:42.504Z" }, + { url = "https://files.pythonhosted.org/packages/88/a3/d2c462d4ef313521eaf2eff04d204ac60775263f1fb08c374b543f79f610/pyarrow-23.0.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:46718a220d64677c93bc243af1d44b55998255427588e400677d7192671845c7", size = 48259435, upload-time = "2026-02-16T10:13:49.226Z" }, + { url = "https://files.pythonhosted.org/packages/cc/f1/11a544b8c3d38a759eb3fbb022039117fd633e9a7b19e4841cc3da091915/pyarrow-23.0.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a09f3876e87f48bc2f13583ab551f0379e5dfb83210391e68ace404181a20690", size = 50629149, upload-time = "2026-02-16T10:13:57.238Z" }, + { url = "https://files.pythonhosted.org/packages/50/f2/c0e76a0b451ffdf0cf788932e182758eb7558953f4f27f1aff8e2518b653/pyarrow-23.0.1-cp314-cp314t-win_amd64.whl", hash = "sha256:527e8d899f14bd15b740cd5a54ad56b7f98044955373a17179d5956ddb93d9ce", size = 28365807, upload-time = "2026-02-16T10:14:03.892Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, +] + +[[package]] +name = "pynndescent" +version = "0.6.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "joblib" }, + { name = "llvmlite" }, + { name = "numba" }, + { name = "scikit-learn", version = "1.7.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "scikit-learn", version = "1.8.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "scipy", version = "1.15.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "scipy", version = "1.17.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4a/fb/7f58c397fb31666756457ee2ac4c0289ef2daad57f4ae4be8dec12f80b03/pynndescent-0.6.0.tar.gz", hash = "sha256:7ffde0fb5b400741e055a9f7d377e3702e02250616834231f6c209e39aac24f5", size = 2992987, upload-time = "2026-01-08T21:29:58.943Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b2/e6/94145d714402fd5ade00b5661f2d0ab981219e07f7db9bfa16786cdb9c04/pynndescent-0.6.0-py3-none-any.whl", hash = "sha256:dc8c74844e4c7f5cbd1e0cd6909da86fdc789e6ff4997336e344779c3d5538ef", size = 73511, upload-time = "2026-01-08T21:29:57.306Z" }, +] + +[[package]] +name = "pyparsing" +version = "3.3.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/91/9c6ee907786a473bf81c5f53cf703ba0957b23ab84c264080fb5a450416f/pyparsing-3.3.2.tar.gz", hash = "sha256:c777f4d763f140633dcb6d8a3eda953bf7a214dc4eff598413c070bcdc117cbc", size = 6851574, upload-time = "2026-01-21T03:57:59.36Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/10/bd/c038d7cc38edc1aa5bf91ab8068b63d4308c66c4c8bb3cbba7dfbc049f9c/pyparsing-3.3.2-py3-none-any.whl", hash = "sha256:850ba148bd908d7e2411587e247a1e4f0327839c40e2e5e6d05a007ecc69911d", size = 122781, upload-time = "2026-01-21T03:57:55.912Z" }, +] + +[[package]] +name = "pyreadstat" +version = "1.3.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "narwhals" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "numpy", version = "2.4.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1a/f3/633b3ab095eb3ecca96f8f332d3ae645de17f07606f944a7c96e3c6c2f27/pyreadstat-1.3.3.tar.gz", hash = "sha256:157afa4974ea76354aeccb03dbdb8cdfc8272b0b231bfb4d55b449e3f75c7071", size = 618316, upload-time = "2026-01-23T17:41:59.018Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ac/3e/0c143a423369abdce25b9312119f9ef6e5f0f26e8ab3d8eba619b0e20bf8/pyreadstat-1.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c35b3c33aa51902227f2f951d7b623fece082dfea1eca6661144c1311beb13ad", size = 577243, upload-time = "2026-01-23T17:40:51.382Z" }, + { url = "https://files.pythonhosted.org/packages/b6/3e/956b47adef507e232ebd7b4a0e5d1ab1b2ea51e4e51da2ff990d7a96528a/pyreadstat-1.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:866dd57269b6ac40026363e97281ca8ee2b7097bf396cec7a1cd7622dd274222", size = 529910, upload-time = "2026-01-23T17:40:53.701Z" }, + { url = "https://files.pythonhosted.org/packages/4c/fa/2debc8e3bfba72bb61657f7228e3129f9615273f6e32c13d79f5c15e2f77/pyreadstat-1.3.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a5952b40d449f45637f330476b79a58d629ea36eaa483c11df0b9790dfed47ac", size = 2941953, upload-time = "2026-01-23T17:41:01.056Z" }, + { url = "https://files.pythonhosted.org/packages/c7/58/2f83c8b2a04b13eedc19725abb0eb75a8c20a7eacfadd34a0c918a852b53/pyreadstat-1.3.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a87e5f6fde6c42c172b7ad3b854073f15c05c5931a4ba0d76a0a202831c827cf", size = 3032613, upload-time = "2026-01-23T17:41:05.492Z" }, + { url = "https://files.pythonhosted.org/packages/c2/22/d23520cc311dc0c2cd3fcaa6261a071bfdc69e32e195f7fa5425146746eb/pyreadstat-1.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:0add0430b987cb7c4433198f8a28578c20b563e644b04d362af9f427c7f7c216", size = 2422715, upload-time = "2026-01-23T17:41:07.669Z" }, + { url = "https://files.pythonhosted.org/packages/68/44/213276a412f6b53aacfd63fc5005c6c0d6daedffab4ffd149edaeac68110/pyreadstat-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f209f3317aab85428c071265e0fc7ce598cb77ff0ff4d4b5bcec1c962ade68f8", size = 575250, upload-time = "2026-01-23T17:41:09.556Z" }, + { url = "https://files.pythonhosted.org/packages/f7/b0/ac1eda0aa744a1e1147836d7ea340ed2578925bfe8b72824deebf006e671/pyreadstat-1.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4a1a4217450fe837003d0c1965ba6ba5b374d8612ae97a9b532bc437b3a85e87", size = 527980, upload-time = "2026-01-23T17:41:12.303Z" }, + { url = "https://files.pythonhosted.org/packages/dc/10/9c27d52c91c567c4d247dbdd43ddbf6552e8c61f7f28c25084adb3612cf8/pyreadstat-1.3.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:faafe490c4281cca67687fef34169cd95e10110933fc9cf296233b47c63504cb", size = 3048300, upload-time = "2026-01-23T17:41:14.823Z" }, + { url = "https://files.pythonhosted.org/packages/8b/c8/bde29307c7fa2a4e37e609e1797883e5d5fdab39886ec177da0d00171b06/pyreadstat-1.3.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d924373e0de19f14a6b733790c265f0670d7789e7c5f5a8b03962e772099cafb", size = 3146294, upload-time = "2026-01-23T17:41:17.135Z" }, + { url = "https://files.pythonhosted.org/packages/da/dc/e22114ae107b344d15124a5d20f5bfe5e1ea715412909824a205cabfaf60/pyreadstat-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:65386b4f785e64323f29a605c9a4ef4689a43342f8702e9fceda05bdcbf50904", size = 2423192, upload-time = "2026-01-23T17:41:19.171Z" }, + { url = "https://files.pythonhosted.org/packages/22/d0/a8daae1e4938866449c6e9f28ba89d934335e4dd921100f04159caf1f9df/pyreadstat-1.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9abdf57d6f2d32f77b778a45567274297eb7d425f6625df497defbf8e522d442", size = 569177, upload-time = "2026-01-23T17:41:21.134Z" }, + { url = "https://files.pythonhosted.org/packages/b5/47/060e659f1f438d61dac76aabe232140d4447bbe1954a4235cc1a2157d1f4/pyreadstat-1.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:30c5f2f5856233ea85c1671cca31f50ad7f2ef087cfe57b6a9d0e342c8140e5f", size = 523760, upload-time = "2026-01-23T17:41:23.811Z" }, + { url = "https://files.pythonhosted.org/packages/2f/88/0041c569557d15a0249a295a0bf96b5074e167e3fdd97437cac850c9b675/pyreadstat-1.3.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:32e96b0e7ba17340e9ebe008262aef4c5a80a01cf842795a0edbce4f8de39d08", size = 3018665, upload-time = "2026-01-23T17:41:26.303Z" }, + { url = "https://files.pythonhosted.org/packages/91/49/258773d26544fd09cca57369cc77540dfa71c3446469fc4cf891ca24acb2/pyreadstat-1.3.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7931d10a29f79dfaf8207fdecd241e813bd0d02aa015ca2d997806a477e21de1", size = 3118377, upload-time = "2026-01-23T17:41:29.263Z" }, + { url = "https://files.pythonhosted.org/packages/ef/53/03e2043e220f4f3bf1dde7bdcb49e6958123ca89b8ddf134138877a9e20b/pyreadstat-1.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:417f21aaff6eea3f024b4856fef42b1387ad967e46bddc89ceadd509a52d8ace", size = 2409816, upload-time = "2026-01-23T17:41:32.178Z" }, + { url = "https://files.pythonhosted.org/packages/e3/e9/676f2c6e8200f6d72953a24162c566ae3d2f85d21fd16a69946c5345c4fd/pyreadstat-1.3.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d657abc17e30d0b51c9efd4d8e1cc9f8f5ebc2af0b46712bbda4aec2a996065f", size = 568230, upload-time = "2026-01-23T17:41:35.566Z" }, + { url = "https://files.pythonhosted.org/packages/5b/61/a97a32eee5954b606c5d34c9890a4ff041d53216f9b863ff66e7efd768fa/pyreadstat-1.3.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92866553c838478185632460d8c18cd479aa3cc550424298e26380c590e0dc87", size = 522045, upload-time = "2026-01-23T17:41:38.246Z" }, + { url = "https://files.pythonhosted.org/packages/1c/b0/cb19b27259820eafadb4589d087a9226dd526da7c68e37913319c84041be/pyreadstat-1.3.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1ad8e1602fe9d8240d7087574d5cba1965b1e85a19daf3cad5743c859d20b65d", size = 3001153, upload-time = "2026-01-23T17:41:41.041Z" }, + { url = "https://files.pythonhosted.org/packages/b3/d7/fa898744ddd55d2becbbece6a880f28f2435d5d061de9772a7ad1de39554/pyreadstat-1.3.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0eac5560ed4233ee3fbe86551fe3bb9af6951cf37ec6fcdf3ad00a55286406fe", size = 3104591, upload-time = "2026-01-23T17:41:44.136Z" }, + { url = "https://files.pythonhosted.org/packages/5c/30/3eda2a99631a247b58a70eb0abb6dcc5998e2a45188169cf418183730aec/pyreadstat-1.3.3-cp313-cp313-win_amd64.whl", hash = "sha256:297506203b58a521c3b2831386f017a2cde2b8a139cfeb59abbaf81aa7fe9721", size = 2410868, upload-time = "2026-01-23T17:41:46.517Z" }, + { url = "https://files.pythonhosted.org/packages/da/08/448722be52b018955e5a8e9479650e3a43e52f1c14043c31eb2d4e3c598c/pyreadstat-1.3.3-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:605b32d3ea9313506ce900623afbe15610c4151b5bbee7bf7f96417d59616bae", size = 569453, upload-time = "2026-01-23T17:41:48.053Z" }, + { url = "https://files.pythonhosted.org/packages/2f/9e/d31871d49858654bcf5fb42f5f38e32ebaa7df579629c4afa33e455da2cf/pyreadstat-1.3.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:bd28cba80f45d9308efe7950f70402f46d4824dacfb1b6e9d1510aeed852b052", size = 523932, upload-time = "2026-01-23T17:41:49.437Z" }, + { url = "https://files.pythonhosted.org/packages/b3/d8/c6de059fd3f98061943d96cbf00af35d1f7cdcca90a513cf35f01bb75415/pyreadstat-1.3.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ba8d550697efb9658a835107fb3d63237c313df2fbcb619bbc77ab637b1d32be", size = 3000460, upload-time = "2026-01-23T17:41:52.146Z" }, + { url = "https://files.pythonhosted.org/packages/ec/15/d5d43aab970021bcd898832d4bfb2682ec29ba769be0cf20289ec63eede7/pyreadstat-1.3.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7a12a47fbf636f9b4e6a6bc660bff84e1d26f188a6cd79a7f3f9a7b5d585892c", size = 3079691, upload-time = "2026-01-23T17:41:54.798Z" }, + { url = "https://files.pythonhosted.org/packages/73/93/a83664c4a2f173b29fb41827d8842e21af8dbe2507948352df73ca779464/pyreadstat-1.3.3-cp314-cp314-win_amd64.whl", hash = "sha256:c397928fd7138253f7a8f3f27e5efd80e131645f062d541f80f264656c9e4bd0", size = 2445254, upload-time = "2026-01-23T17:41:57.127Z" }, +] + +[[package]] +name = "pytest" +version = "9.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" }, +] + +[[package]] +name = "pytest-cov" +version = "7.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "coverage", extra = ["toml"] }, + { name = "pluggy" }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5e/f7/c933acc76f5208b3b00089573cf6a2bc26dc80a8aece8f52bb7d6b1855ca/pytest_cov-7.0.0.tar.gz", hash = "sha256:33c97eda2e049a0c5298e91f519302a1334c26ac65c1a483d6206fd458361af1", size = 54328, upload-time = "2025-09-09T10:57:02.113Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" }, +] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, +] + +[[package]] +name = "pytokens" +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b6/34/b4e015b99031667a7b960f888889c5bd34ef585c85e1cb56a594b92836ac/pytokens-0.4.1.tar.gz", hash = "sha256:292052fe80923aae2260c073f822ceba21f3872ced9a68bb7953b348e561179a", size = 23015, upload-time = "2026-01-30T01:03:45.924Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/42/24/f206113e05cb8ef51b3850e7ef88f20da6f4bf932190ceb48bd3da103e10/pytokens-0.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2a44ed93ea23415c54f3face3b65ef2b844d96aeb3455b8a69b3df6beab6acc5", size = 161522, upload-time = "2026-01-30T01:02:50.393Z" }, + { url = "https://files.pythonhosted.org/packages/d4/e9/06a6bf1b90c2ed81a9c7d2544232fe5d2891d1cd480e8a1809ca354a8eb2/pytokens-0.4.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:add8bf86b71a5d9fb5b89f023a80b791e04fba57960aa790cc6125f7f1d39dfe", size = 246945, upload-time = "2026-01-30T01:02:52.399Z" }, + { url = "https://files.pythonhosted.org/packages/69/66/f6fb1007a4c3d8b682d5d65b7c1fb33257587a5f782647091e3408abe0b8/pytokens-0.4.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:670d286910b531c7b7e3c0b453fd8156f250adb140146d234a82219459b9640c", size = 259525, upload-time = "2026-01-30T01:02:53.737Z" }, + { url = "https://files.pythonhosted.org/packages/04/92/086f89b4d622a18418bac74ab5db7f68cf0c21cf7cc92de6c7b919d76c88/pytokens-0.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4e691d7f5186bd2842c14813f79f8884bb03f5995f0575272009982c5ac6c0f7", size = 262693, upload-time = "2026-01-30T01:02:54.871Z" }, + { url = "https://files.pythonhosted.org/packages/b4/7b/8b31c347cf94a3f900bdde750b2e9131575a61fdb620d3d3c75832262137/pytokens-0.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:27b83ad28825978742beef057bfe406ad6ed524b2d28c252c5de7b4a6dd48fa2", size = 103567, upload-time = "2026-01-30T01:02:56.414Z" }, + { url = "https://files.pythonhosted.org/packages/3d/92/790ebe03f07b57e53b10884c329b9a1a308648fc083a6d4a39a10a28c8fc/pytokens-0.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d70e77c55ae8380c91c0c18dea05951482e263982911fc7410b1ffd1dadd3440", size = 160864, upload-time = "2026-01-30T01:02:57.882Z" }, + { url = "https://files.pythonhosted.org/packages/13/25/a4f555281d975bfdd1eba731450e2fe3a95870274da73fb12c40aeae7625/pytokens-0.4.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4a58d057208cb9075c144950d789511220b07636dd2e4708d5645d24de666bdc", size = 248565, upload-time = "2026-01-30T01:02:59.912Z" }, + { url = "https://files.pythonhosted.org/packages/17/50/bc0394b4ad5b1601be22fa43652173d47e4c9efbf0044c62e9a59b747c56/pytokens-0.4.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b49750419d300e2b5a3813cf229d4e5a4c728dae470bcc89867a9ad6f25a722d", size = 260824, upload-time = "2026-01-30T01:03:01.471Z" }, + { url = "https://files.pythonhosted.org/packages/4e/54/3e04f9d92a4be4fc6c80016bc396b923d2a6933ae94b5f557c939c460ee0/pytokens-0.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d9907d61f15bf7261d7e775bd5d7ee4d2930e04424bab1972591918497623a16", size = 264075, upload-time = "2026-01-30T01:03:04.143Z" }, + { url = "https://files.pythonhosted.org/packages/d1/1b/44b0326cb5470a4375f37988aea5d61b5cc52407143303015ebee94abfd6/pytokens-0.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:ee44d0f85b803321710f9239f335aafe16553b39106384cef8e6de40cb4ef2f6", size = 103323, upload-time = "2026-01-30T01:03:05.412Z" }, + { url = "https://files.pythonhosted.org/packages/41/5d/e44573011401fb82e9d51e97f1290ceb377800fb4eed650b96f4753b499c/pytokens-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:140709331e846b728475786df8aeb27d24f48cbcf7bcd449f8de75cae7a45083", size = 160663, upload-time = "2026-01-30T01:03:06.473Z" }, + { url = "https://files.pythonhosted.org/packages/f0/e6/5bbc3019f8e6f21d09c41f8b8654536117e5e211a85d89212d59cbdab381/pytokens-0.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6d6c4268598f762bc8e91f5dbf2ab2f61f7b95bdc07953b602db879b3c8c18e1", size = 255626, upload-time = "2026-01-30T01:03:08.177Z" }, + { url = "https://files.pythonhosted.org/packages/bf/3c/2d5297d82286f6f3d92770289fd439956b201c0a4fc7e72efb9b2293758e/pytokens-0.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:24afde1f53d95348b5a0eb19488661147285ca4dd7ed752bbc3e1c6242a304d1", size = 269779, upload-time = "2026-01-30T01:03:09.756Z" }, + { url = "https://files.pythonhosted.org/packages/20/01/7436e9ad693cebda0551203e0bf28f7669976c60ad07d6402098208476de/pytokens-0.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5ad948d085ed6c16413eb5fec6b3e02fa00dc29a2534f088d3302c47eb59adf9", size = 268076, upload-time = "2026-01-30T01:03:10.957Z" }, + { url = "https://files.pythonhosted.org/packages/2e/df/533c82a3c752ba13ae7ef238b7f8cdd272cf1475f03c63ac6cf3fcfb00b6/pytokens-0.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:3f901fe783e06e48e8cbdc82d631fca8f118333798193e026a50ce1b3757ea68", size = 103552, upload-time = "2026-01-30T01:03:12.066Z" }, + { url = "https://files.pythonhosted.org/packages/cb/dc/08b1a080372afda3cceb4f3c0a7ba2bde9d6a5241f1edb02a22a019ee147/pytokens-0.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8bdb9d0ce90cbf99c525e75a2fa415144fd570a1ba987380190e8b786bc6ef9b", size = 160720, upload-time = "2026-01-30T01:03:13.843Z" }, + { url = "https://files.pythonhosted.org/packages/64/0c/41ea22205da480837a700e395507e6a24425151dfb7ead73343d6e2d7ffe/pytokens-0.4.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5502408cab1cb18e128570f8d598981c68a50d0cbd7c61312a90507cd3a1276f", size = 254204, upload-time = "2026-01-30T01:03:14.886Z" }, + { url = "https://files.pythonhosted.org/packages/e0/d2/afe5c7f8607018beb99971489dbb846508f1b8f351fcefc225fcf4b2adc0/pytokens-0.4.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:29d1d8fb1030af4d231789959f21821ab6325e463f0503a61d204343c9b355d1", size = 268423, upload-time = "2026-01-30T01:03:15.936Z" }, + { url = "https://files.pythonhosted.org/packages/68/d4/00ffdbd370410c04e9591da9220a68dc1693ef7499173eb3e30d06e05ed1/pytokens-0.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:970b08dd6b86058b6dc07efe9e98414f5102974716232d10f32ff39701e841c4", size = 266859, upload-time = "2026-01-30T01:03:17.458Z" }, + { url = "https://files.pythonhosted.org/packages/a7/c9/c3161313b4ca0c601eeefabd3d3b576edaa9afdefd32da97210700e47652/pytokens-0.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:9bd7d7f544d362576be74f9d5901a22f317efc20046efe2034dced238cbbfe78", size = 103520, upload-time = "2026-01-30T01:03:18.652Z" }, + { url = "https://files.pythonhosted.org/packages/8f/a7/b470f672e6fc5fee0a01d9e75005a0e617e162381974213a945fcd274843/pytokens-0.4.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4a14d5f5fc78ce85e426aa159489e2d5961acf0e47575e08f35584009178e321", size = 160821, upload-time = "2026-01-30T01:03:19.684Z" }, + { url = "https://files.pythonhosted.org/packages/80/98/e83a36fe8d170c911f864bfded690d2542bfcfacb9c649d11a9e6eb9dc41/pytokens-0.4.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97f50fd18543be72da51dd505e2ed20d2228c74e0464e4262e4899797803d7fa", size = 254263, upload-time = "2026-01-30T01:03:20.834Z" }, + { url = "https://files.pythonhosted.org/packages/0f/95/70d7041273890f9f97a24234c00b746e8da86df462620194cef1d411ddeb/pytokens-0.4.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dc74c035f9bfca0255c1af77ddd2d6ae8419012805453e4b0e7513e17904545d", size = 268071, upload-time = "2026-01-30T01:03:21.888Z" }, + { url = "https://files.pythonhosted.org/packages/da/79/76e6d09ae19c99404656d7db9c35dfd20f2086f3eb6ecb496b5b31163bad/pytokens-0.4.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:f66a6bbe741bd431f6d741e617e0f39ec7257ca1f89089593479347cc4d13324", size = 271716, upload-time = "2026-01-30T01:03:23.633Z" }, + { url = "https://files.pythonhosted.org/packages/79/37/482e55fa1602e0a7ff012661d8c946bafdc05e480ea5a32f4f7e336d4aa9/pytokens-0.4.1-cp314-cp314-win_amd64.whl", hash = "sha256:b35d7e5ad269804f6697727702da3c517bb8a5228afa450ab0fa787732055fc9", size = 104539, upload-time = "2026-01-30T01:03:24.788Z" }, + { url = "https://files.pythonhosted.org/packages/30/e8/20e7db907c23f3d63b0be3b8a4fd1927f6da2395f5bcc7f72242bb963dfe/pytokens-0.4.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:8fcb9ba3709ff77e77f1c7022ff11d13553f3c30299a9fe246a166903e9091eb", size = 168474, upload-time = "2026-01-30T01:03:26.428Z" }, + { url = "https://files.pythonhosted.org/packages/d6/81/88a95ee9fafdd8f5f3452107748fd04c24930d500b9aba9738f3ade642cc/pytokens-0.4.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:79fc6b8699564e1f9b521582c35435f1bd32dd06822322ec44afdeba666d8cb3", size = 290473, upload-time = "2026-01-30T01:03:27.415Z" }, + { url = "https://files.pythonhosted.org/packages/cf/35/3aa899645e29b6375b4aed9f8d21df219e7c958c4c186b465e42ee0a06bf/pytokens-0.4.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d31b97b3de0f61571a124a00ffe9a81fb9939146c122c11060725bd5aea79975", size = 303485, upload-time = "2026-01-30T01:03:28.558Z" }, + { url = "https://files.pythonhosted.org/packages/52/a0/07907b6ff512674d9b201859f7d212298c44933633c946703a20c25e9d81/pytokens-0.4.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:967cf6e3fd4adf7de8fc73cd3043754ae79c36475c1c11d514fc72cf5490094a", size = 306698, upload-time = "2026-01-30T01:03:29.653Z" }, + { url = "https://files.pythonhosted.org/packages/39/2a/cbbf9250020a4a8dd53ba83a46c097b69e5eb49dd14e708f496f548c6612/pytokens-0.4.1-cp314-cp314t-win_amd64.whl", hash = "sha256:584c80c24b078eec1e227079d56dc22ff755e0ba8654d8383b2c549107528918", size = 116287, upload-time = "2026-01-30T01:03:30.912Z" }, + { url = "https://files.pythonhosted.org/packages/c6/78/397db326746f0a342855b81216ae1f0a32965deccfd7c830a2dbc66d2483/pytokens-0.4.1-py3-none-any.whl", hash = "sha256:26cef14744a8385f35d0e095dc8b3a7583f6c953c2e3d269c7f82484bf5ad2de", size = 13729, upload-time = "2026-01-30T01:03:45.029Z" }, +] + +[[package]] +name = "pytz" +version = "2026.1.post1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/56/db/b8721d71d945e6a8ac63c0fc900b2067181dbb50805958d4d4661cf7d277/pytz-2026.1.post1.tar.gz", hash = "sha256:3378dde6a0c3d26719182142c56e60c7f9af7e968076f31aae569d72a0358ee1", size = 321088, upload-time = "2026-03-03T07:47:50.683Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/10/99/781fe0c827be2742bcc775efefccb3b048a3a9c6ce9aec0cbf4a101677e5/pytz-2026.1.post1-py2.py3-none-any.whl", hash = "sha256:f2fd16142fda348286a75e1a524be810bb05d444e5a081f37f7affc635035f7a", size = 510489, upload-time = "2026-03-03T07:47:49.167Z" }, +] + +[[package]] +name = "pyyaml" +version = "6.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f4/a0/39350dd17dd6d6c6507025c0e53aef67a9293a6d37d3511f23ea510d5800/pyyaml-6.0.3-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:214ed4befebe12df36bcc8bc2b64b396ca31be9304b8f59e25c11cf94a4c033b", size = 184227, upload-time = "2025-09-25T21:31:46.04Z" }, + { url = "https://files.pythonhosted.org/packages/05/14/52d505b5c59ce73244f59c7a50ecf47093ce4765f116cdb98286a71eeca2/pyyaml-6.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02ea2dfa234451bbb8772601d7b8e426c2bfa197136796224e50e35a78777956", size = 174019, upload-time = "2025-09-25T21:31:47.706Z" }, + { url = "https://files.pythonhosted.org/packages/43/f7/0e6a5ae5599c838c696adb4e6330a59f463265bfa1e116cfd1fbb0abaaae/pyyaml-6.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b30236e45cf30d2b8e7b3e85881719e98507abed1011bf463a8fa23e9c3e98a8", size = 740646, upload-time = "2025-09-25T21:31:49.21Z" }, + { url = "https://files.pythonhosted.org/packages/2f/3a/61b9db1d28f00f8fd0ae760459a5c4bf1b941baf714e207b6eb0657d2578/pyyaml-6.0.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:66291b10affd76d76f54fad28e22e51719ef9ba22b29e1d7d03d6777a9174198", size = 840793, upload-time = "2025-09-25T21:31:50.735Z" }, + { url = "https://files.pythonhosted.org/packages/7a/1e/7acc4f0e74c4b3d9531e24739e0ab832a5edf40e64fbae1a9c01941cabd7/pyyaml-6.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c7708761fccb9397fe64bbc0395abcae8c4bf7b0eac081e12b809bf47700d0b", size = 770293, upload-time = "2025-09-25T21:31:51.828Z" }, + { url = "https://files.pythonhosted.org/packages/8b/ef/abd085f06853af0cd59fa5f913d61a8eab65d7639ff2a658d18a25d6a89d/pyyaml-6.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:418cf3f2111bc80e0933b2cd8cd04f286338bb88bdc7bc8e6dd775ebde60b5e0", size = 732872, upload-time = "2025-09-25T21:31:53.282Z" }, + { url = "https://files.pythonhosted.org/packages/1f/15/2bc9c8faf6450a8b3c9fc5448ed869c599c0a74ba2669772b1f3a0040180/pyyaml-6.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5e0b74767e5f8c593e8c9b5912019159ed0533c70051e9cce3e8b6aa699fcd69", size = 758828, upload-time = "2025-09-25T21:31:54.807Z" }, + { url = "https://files.pythonhosted.org/packages/a3/00/531e92e88c00f4333ce359e50c19b8d1de9fe8d581b1534e35ccfbc5f393/pyyaml-6.0.3-cp310-cp310-win32.whl", hash = "sha256:28c8d926f98f432f88adc23edf2e6d4921ac26fb084b028c733d01868d19007e", size = 142415, upload-time = "2025-09-25T21:31:55.885Z" }, + { url = "https://files.pythonhosted.org/packages/2a/fa/926c003379b19fca39dd4634818b00dec6c62d87faf628d1394e137354d4/pyyaml-6.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:bdb2c67c6c1390b63c6ff89f210c8fd09d9a1217a465701eac7316313c915e4c", size = 158561, upload-time = "2025-09-25T21:31:57.406Z" }, + { url = "https://files.pythonhosted.org/packages/6d/16/a95b6757765b7b031c9374925bb718d55e0a9ba8a1b6a12d25962ea44347/pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e", size = 185826, upload-time = "2025-09-25T21:31:58.655Z" }, + { url = "https://files.pythonhosted.org/packages/16/19/13de8e4377ed53079ee996e1ab0a9c33ec2faf808a4647b7b4c0d46dd239/pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824", size = 175577, upload-time = "2025-09-25T21:32:00.088Z" }, + { url = "https://files.pythonhosted.org/packages/0c/62/d2eb46264d4b157dae1275b573017abec435397aa59cbcdab6fc978a8af4/pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c", size = 775556, upload-time = "2025-09-25T21:32:01.31Z" }, + { url = "https://files.pythonhosted.org/packages/10/cb/16c3f2cf3266edd25aaa00d6c4350381c8b012ed6f5276675b9eba8d9ff4/pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00", size = 882114, upload-time = "2025-09-25T21:32:03.376Z" }, + { url = "https://files.pythonhosted.org/packages/71/60/917329f640924b18ff085ab889a11c763e0b573da888e8404ff486657602/pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d", size = 806638, upload-time = "2025-09-25T21:32:04.553Z" }, + { url = "https://files.pythonhosted.org/packages/dd/6f/529b0f316a9fd167281a6c3826b5583e6192dba792dd55e3203d3f8e655a/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a", size = 767463, upload-time = "2025-09-25T21:32:06.152Z" }, + { url = "https://files.pythonhosted.org/packages/f2/6a/b627b4e0c1dd03718543519ffb2f1deea4a1e6d42fbab8021936a4d22589/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4", size = 794986, upload-time = "2025-09-25T21:32:07.367Z" }, + { url = "https://files.pythonhosted.org/packages/45/91/47a6e1c42d9ee337c4839208f30d9f09caa9f720ec7582917b264defc875/pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b", size = 142543, upload-time = "2025-09-25T21:32:08.95Z" }, + { url = "https://files.pythonhosted.org/packages/da/e3/ea007450a105ae919a72393cb06f122f288ef60bba2dc64b26e2646fa315/pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf", size = 158763, upload-time = "2025-09-25T21:32:09.96Z" }, + { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" }, + { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" }, + { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" }, + { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" }, + { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" }, + { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" }, + { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" }, + { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" }, + { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" }, + { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669, upload-time = "2025-09-25T21:32:23.673Z" }, + { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252, upload-time = "2025-09-25T21:32:25.149Z" }, + { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081, upload-time = "2025-09-25T21:32:26.575Z" }, + { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159, upload-time = "2025-09-25T21:32:27.727Z" }, + { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626, upload-time = "2025-09-25T21:32:28.878Z" }, + { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613, upload-time = "2025-09-25T21:32:30.178Z" }, + { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115, upload-time = "2025-09-25T21:32:31.353Z" }, + { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" }, + { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" }, + { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" }, + { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814, upload-time = "2025-09-25T21:32:35.712Z" }, + { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809, upload-time = "2025-09-25T21:32:36.789Z" }, + { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454, upload-time = "2025-09-25T21:32:37.966Z" }, + { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355, upload-time = "2025-09-25T21:32:39.178Z" }, + { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175, upload-time = "2025-09-25T21:32:40.865Z" }, + { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228, upload-time = "2025-09-25T21:32:42.084Z" }, + { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194, upload-time = "2025-09-25T21:32:43.362Z" }, + { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429, upload-time = "2025-09-25T21:32:57.844Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912, upload-time = "2025-09-25T21:32:59.247Z" }, + { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108, upload-time = "2025-09-25T21:32:44.377Z" }, + { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641, upload-time = "2025-09-25T21:32:45.407Z" }, + { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901, upload-time = "2025-09-25T21:32:48.83Z" }, + { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132, upload-time = "2025-09-25T21:32:50.149Z" }, + { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261, upload-time = "2025-09-25T21:32:51.808Z" }, + { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272, upload-time = "2025-09-25T21:32:52.941Z" }, + { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923, upload-time = "2025-09-25T21:32:54.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062, upload-time = "2025-09-25T21:32:55.767Z" }, + { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" }, +] + +[[package]] +name = "requests" +version = "2.32.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, +] + +[[package]] +name = "rich" +version = "14.3.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b3/c6/f3b320c27991c46f43ee9d856302c70dc2d0fb2dba4842ff739d5f46b393/rich-14.3.3.tar.gz", hash = "sha256:b8daa0b9e4eef54dd8cf7c86c03713f53241884e814f4e2f5fb342fe520f639b", size = 230582, upload-time = "2026-02-19T17:23:12.474Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/14/25/b208c5683343959b670dc001595f2f3737e051da617f66c31f7c4fa93abc/rich-14.3.3-py3-none-any.whl", hash = "sha256:793431c1f8619afa7d3b52b2cdec859562b950ea0d4b6b505397612db8d5362d", size = 310458, upload-time = "2026-02-19T17:23:13.732Z" }, +] + +[[package]] +name = "ruff" +version = "0.15.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/51/df/f8629c19c5318601d3121e230f74cbee7a3732339c52b21daa2b82ef9c7d/ruff-0.15.6.tar.gz", hash = "sha256:8394c7bb153a4e3811a4ecdacd4a8e6a4fa8097028119160dffecdcdf9b56ae4", size = 4597916, upload-time = "2026-03-12T23:05:47.51Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9e/2f/4e03a7e5ce99b517e98d3b4951f411de2b0fa8348d39cf446671adcce9a2/ruff-0.15.6-py3-none-linux_armv6l.whl", hash = "sha256:7c98c3b16407b2cf3d0f2b80c80187384bc92c6774d85fefa913ecd941256fff", size = 10508953, upload-time = "2026-03-12T23:05:17.246Z" }, + { url = "https://files.pythonhosted.org/packages/70/60/55bcdc3e9f80bcf39edf0cd272da6fa511a3d94d5a0dd9e0adf76ceebdb4/ruff-0.15.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ee7dcfaad8b282a284df4aa6ddc2741b3f4a18b0555d626805555a820ea181c3", size = 10942257, upload-time = "2026-03-12T23:05:23.076Z" }, + { url = "https://files.pythonhosted.org/packages/e7/f9/005c29bd1726c0f492bfa215e95154cf480574140cb5f867c797c18c790b/ruff-0.15.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:3bd9967851a25f038fc8b9ae88a7fbd1b609f30349231dffaa37b6804923c4bb", size = 10322683, upload-time = "2026-03-12T23:05:33.738Z" }, + { url = "https://files.pythonhosted.org/packages/5f/74/2f861f5fd7cbb2146bddb5501450300ce41562da36d21868c69b7a828169/ruff-0.15.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13f4594b04e42cd24a41da653886b04d2ff87adbf57497ed4f728b0e8a4866f8", size = 10660986, upload-time = "2026-03-12T23:05:53.245Z" }, + { url = "https://files.pythonhosted.org/packages/c1/a1/309f2364a424eccb763cdafc49df843c282609f47fe53aa83f38272389e0/ruff-0.15.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e2ed8aea2f3fe57886d3f00ea5b8aae5bf68d5e195f487f037a955ff9fbaac9e", size = 10332177, upload-time = "2026-03-12T23:05:56.145Z" }, + { url = "https://files.pythonhosted.org/packages/30/41/7ebf1d32658b4bab20f8ac80972fb19cd4e2c6b78552be263a680edc55ac/ruff-0.15.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70789d3e7830b848b548aae96766431c0dc01a6c78c13381f423bf7076c66d15", size = 11170783, upload-time = "2026-03-12T23:06:01.742Z" }, + { url = "https://files.pythonhosted.org/packages/76/be/6d488f6adca047df82cd62c304638bcb00821c36bd4881cfca221561fdfc/ruff-0.15.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:542aaf1de3154cea088ced5a819ce872611256ffe2498e750bbae5247a8114e9", size = 12044201, upload-time = "2026-03-12T23:05:28.697Z" }, + { url = "https://files.pythonhosted.org/packages/71/68/e6f125df4af7e6d0b498f8d373274794bc5156b324e8ab4bf5c1b4fc0ec7/ruff-0.15.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c22e6f02c16cfac3888aa636e9eba857254d15bbacc9906c9689fdecb1953ab", size = 11421561, upload-time = "2026-03-12T23:05:31.236Z" }, + { url = "https://files.pythonhosted.org/packages/f1/9f/f85ef5fd01a52e0b472b26dc1b4bd228b8f6f0435975442ffa4741278703/ruff-0.15.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98893c4c0aadc8e448cfa315bd0cc343a5323d740fe5f28ef8a3f9e21b381f7e", size = 11310928, upload-time = "2026-03-12T23:05:45.288Z" }, + { url = "https://files.pythonhosted.org/packages/8c/26/b75f8c421f5654304b89471ed384ae8c7f42b4dff58fa6ce1626d7f2b59a/ruff-0.15.6-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:70d263770d234912374493e8cc1e7385c5d49376e41dfa51c5c3453169dc581c", size = 11235186, upload-time = "2026-03-12T23:05:50.677Z" }, + { url = "https://files.pythonhosted.org/packages/fc/d4/d5a6d065962ff7a68a86c9b4f5500f7d101a0792078de636526c0edd40da/ruff-0.15.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:55a1ad63c5a6e54b1f21b7514dfadc0c7fb40093fa22e95143cf3f64ebdcd512", size = 10635231, upload-time = "2026-03-12T23:05:37.044Z" }, + { url = "https://files.pythonhosted.org/packages/d6/56/7c3acf3d50910375349016cf33de24be021532042afbed87942858992491/ruff-0.15.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8dc473ba093c5ec238bb1e7429ee676dca24643c471e11fbaa8a857925b061c0", size = 10340357, upload-time = "2026-03-12T23:06:04.748Z" }, + { url = "https://files.pythonhosted.org/packages/06/54/6faa39e9c1033ff6a3b6e76b5df536931cd30caf64988e112bbf91ef5ce5/ruff-0.15.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:85b042377c2a5561131767974617006f99f7e13c63c111b998f29fc1e58a4cfb", size = 10860583, upload-time = "2026-03-12T23:05:58.978Z" }, + { url = "https://files.pythonhosted.org/packages/cb/1e/509a201b843b4dfb0b32acdedf68d951d3377988cae43949ba4c4133a96a/ruff-0.15.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:cef49e30bc5a86a6a92098a7fbf6e467a234d90b63305d6f3ec01225a9d092e0", size = 11410976, upload-time = "2026-03-12T23:05:39.955Z" }, + { url = "https://files.pythonhosted.org/packages/6c/25/3fc9114abf979a41673ce877c08016f8e660ad6cf508c3957f537d2e9fa9/ruff-0.15.6-py3-none-win32.whl", hash = "sha256:bbf67d39832404812a2d23020dda68fee7f18ce15654e96fb1d3ad21a5fe436c", size = 10616872, upload-time = "2026-03-12T23:05:42.451Z" }, + { url = "https://files.pythonhosted.org/packages/89/7a/09ece68445ceac348df06e08bf75db72d0e8427765b96c9c0ffabc1be1d9/ruff-0.15.6-py3-none-win_amd64.whl", hash = "sha256:aee25bc84c2f1007ecb5037dff75cef00414fdf17c23f07dc13e577883dca406", size = 11787271, upload-time = "2026-03-12T23:05:20.168Z" }, + { url = "https://files.pythonhosted.org/packages/7f/d0/578c47dd68152ddddddf31cd7fc67dc30b7cdf639a86275fda821b0d9d98/ruff-0.15.6-py3-none-win_arm64.whl", hash = "sha256:c34de3dd0b0ba203be50ae70f5910b17188556630e2178fd7d79fc030eb0d837", size = 11060497, upload-time = "2026-03-12T23:05:25.968Z" }, +] + +[[package]] +name = "scikit-learn" +version = "1.7.2" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.11'", +] +dependencies = [ + { name = "joblib", marker = "python_full_version < '3.11'" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "scipy", version = "1.15.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "threadpoolctl", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/98/c2/a7855e41c9d285dfe86dc50b250978105dce513d6e459ea66a6aeb0e1e0c/scikit_learn-1.7.2.tar.gz", hash = "sha256:20e9e49ecd130598f1ca38a1d85090e1a600147b9c02fa6f15d69cb53d968fda", size = 7193136, upload-time = "2025-09-09T08:21:29.075Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ba/3e/daed796fd69cce768b8788401cc464ea90b306fb196ae1ffed0b98182859/scikit_learn-1.7.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b33579c10a3081d076ab403df4a4190da4f4432d443521674637677dc91e61f", size = 9336221, upload-time = "2025-09-09T08:20:19.328Z" }, + { url = "https://files.pythonhosted.org/packages/1c/ce/af9d99533b24c55ff4e18d9b7b4d9919bbc6cd8f22fe7a7be01519a347d5/scikit_learn-1.7.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:36749fb62b3d961b1ce4fedf08fa57a1986cd409eff2d783bca5d4b9b5fce51c", size = 8653834, upload-time = "2025-09-09T08:20:22.073Z" }, + { url = "https://files.pythonhosted.org/packages/58/0e/8c2a03d518fb6bd0b6b0d4b114c63d5f1db01ff0f9925d8eb10960d01c01/scikit_learn-1.7.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7a58814265dfc52b3295b1900cfb5701589d30a8bb026c7540f1e9d3499d5ec8", size = 9660938, upload-time = "2025-09-09T08:20:24.327Z" }, + { url = "https://files.pythonhosted.org/packages/2b/75/4311605069b5d220e7cf5adabb38535bd96f0079313cdbb04b291479b22a/scikit_learn-1.7.2-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4a847fea807e278f821a0406ca01e387f97653e284ecbd9750e3ee7c90347f18", size = 9477818, upload-time = "2025-09-09T08:20:26.845Z" }, + { url = "https://files.pythonhosted.org/packages/7f/9b/87961813c34adbca21a6b3f6b2bea344c43b30217a6d24cc437c6147f3e8/scikit_learn-1.7.2-cp310-cp310-win_amd64.whl", hash = "sha256:ca250e6836d10e6f402436d6463d6c0e4d8e0234cfb6a9a47835bd392b852ce5", size = 8886969, upload-time = "2025-09-09T08:20:29.329Z" }, + { url = "https://files.pythonhosted.org/packages/43/83/564e141eef908a5863a54da8ca342a137f45a0bfb71d1d79704c9894c9d1/scikit_learn-1.7.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7509693451651cd7361d30ce4e86a1347493554f172b1c72a39300fa2aea79e", size = 9331967, upload-time = "2025-09-09T08:20:32.421Z" }, + { url = "https://files.pythonhosted.org/packages/18/d6/ba863a4171ac9d7314c4d3fc251f015704a2caeee41ced89f321c049ed83/scikit_learn-1.7.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:0486c8f827c2e7b64837c731c8feff72c0bd2b998067a8a9cbc10643c31f0fe1", size = 8648645, upload-time = "2025-09-09T08:20:34.436Z" }, + { url = "https://files.pythonhosted.org/packages/ef/0e/97dbca66347b8cf0ea8b529e6bb9367e337ba2e8be0ef5c1a545232abfde/scikit_learn-1.7.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:89877e19a80c7b11a2891a27c21c4894fb18e2c2e077815bcade10d34287b20d", size = 9715424, upload-time = "2025-09-09T08:20:36.776Z" }, + { url = "https://files.pythonhosted.org/packages/f7/32/1f3b22e3207e1d2c883a7e09abb956362e7d1bd2f14458c7de258a26ac15/scikit_learn-1.7.2-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8da8bf89d4d79aaec192d2bda62f9b56ae4e5b4ef93b6a56b5de4977e375c1f1", size = 9509234, upload-time = "2025-09-09T08:20:38.957Z" }, + { url = "https://files.pythonhosted.org/packages/9f/71/34ddbd21f1da67c7a768146968b4d0220ee6831e4bcbad3e03dd3eae88b6/scikit_learn-1.7.2-cp311-cp311-win_amd64.whl", hash = "sha256:9b7ed8d58725030568523e937c43e56bc01cadb478fc43c042a9aca1dacb3ba1", size = 8894244, upload-time = "2025-09-09T08:20:41.166Z" }, + { url = "https://files.pythonhosted.org/packages/a7/aa/3996e2196075689afb9fce0410ebdb4a09099d7964d061d7213700204409/scikit_learn-1.7.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:8d91a97fa2b706943822398ab943cde71858a50245e31bc71dba62aab1d60a96", size = 9259818, upload-time = "2025-09-09T08:20:43.19Z" }, + { url = "https://files.pythonhosted.org/packages/43/5d/779320063e88af9c4a7c2cf463ff11c21ac9c8bd730c4a294b0000b666c9/scikit_learn-1.7.2-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:acbc0f5fd2edd3432a22c69bed78e837c70cf896cd7993d71d51ba6708507476", size = 8636997, upload-time = "2025-09-09T08:20:45.468Z" }, + { url = "https://files.pythonhosted.org/packages/5c/d0/0c577d9325b05594fdd33aa970bf53fb673f051a45496842caee13cfd7fe/scikit_learn-1.7.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e5bf3d930aee75a65478df91ac1225ff89cd28e9ac7bd1196853a9229b6adb0b", size = 9478381, upload-time = "2025-09-09T08:20:47.982Z" }, + { url = "https://files.pythonhosted.org/packages/82/70/8bf44b933837ba8494ca0fc9a9ab60f1c13b062ad0197f60a56e2fc4c43e/scikit_learn-1.7.2-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b4d6e9deed1a47aca9fe2f267ab8e8fe82ee20b4526b2c0cd9e135cea10feb44", size = 9300296, upload-time = "2025-09-09T08:20:50.366Z" }, + { url = "https://files.pythonhosted.org/packages/c6/99/ed35197a158f1fdc2fe7c3680e9c70d0128f662e1fee4ed495f4b5e13db0/scikit_learn-1.7.2-cp312-cp312-win_amd64.whl", hash = "sha256:6088aa475f0785e01bcf8529f55280a3d7d298679f50c0bb70a2364a82d0b290", size = 8731256, upload-time = "2025-09-09T08:20:52.627Z" }, + { url = "https://files.pythonhosted.org/packages/ae/93/a3038cb0293037fd335f77f31fe053b89c72f17b1c8908c576c29d953e84/scikit_learn-1.7.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0b7dacaa05e5d76759fb071558a8b5130f4845166d88654a0f9bdf3eb57851b7", size = 9212382, upload-time = "2025-09-09T08:20:54.731Z" }, + { url = "https://files.pythonhosted.org/packages/40/dd/9a88879b0c1104259136146e4742026b52df8540c39fec21a6383f8292c7/scikit_learn-1.7.2-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:abebbd61ad9e1deed54cca45caea8ad5f79e1b93173dece40bb8e0c658dbe6fe", size = 8592042, upload-time = "2025-09-09T08:20:57.313Z" }, + { url = "https://files.pythonhosted.org/packages/46/af/c5e286471b7d10871b811b72ae794ac5fe2989c0a2df07f0ec723030f5f5/scikit_learn-1.7.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:502c18e39849c0ea1a5d681af1dbcf15f6cce601aebb657aabbfe84133c1907f", size = 9434180, upload-time = "2025-09-09T08:20:59.671Z" }, + { url = "https://files.pythonhosted.org/packages/f1/fd/df59faa53312d585023b2da27e866524ffb8faf87a68516c23896c718320/scikit_learn-1.7.2-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7a4c328a71785382fe3fe676a9ecf2c86189249beff90bf85e22bdb7efaf9ae0", size = 9283660, upload-time = "2025-09-09T08:21:01.71Z" }, + { url = "https://files.pythonhosted.org/packages/a7/c7/03000262759d7b6f38c836ff9d512f438a70d8a8ddae68ee80de72dcfb63/scikit_learn-1.7.2-cp313-cp313-win_amd64.whl", hash = "sha256:63a9afd6f7b229aad94618c01c252ce9e6fa97918c5ca19c9a17a087d819440c", size = 8702057, upload-time = "2025-09-09T08:21:04.234Z" }, + { url = "https://files.pythonhosted.org/packages/55/87/ef5eb1f267084532c8e4aef98a28b6ffe7425acbfd64b5e2f2e066bc29b3/scikit_learn-1.7.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:9acb6c5e867447b4e1390930e3944a005e2cb115922e693c08a323421a6966e8", size = 9558731, upload-time = "2025-09-09T08:21:06.381Z" }, + { url = "https://files.pythonhosted.org/packages/93/f8/6c1e3fc14b10118068d7938878a9f3f4e6d7b74a8ddb1e5bed65159ccda8/scikit_learn-1.7.2-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:2a41e2a0ef45063e654152ec9d8bcfc39f7afce35b08902bfe290c2498a67a6a", size = 9038852, upload-time = "2025-09-09T08:21:08.628Z" }, + { url = "https://files.pythonhosted.org/packages/83/87/066cafc896ee540c34becf95d30375fe5cbe93c3b75a0ee9aa852cd60021/scikit_learn-1.7.2-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:98335fb98509b73385b3ab2bd0639b1f610541d3988ee675c670371d6a87aa7c", size = 9527094, upload-time = "2025-09-09T08:21:11.486Z" }, + { url = "https://files.pythonhosted.org/packages/9c/2b/4903e1ccafa1f6453b1ab78413938c8800633988c838aa0be386cbb33072/scikit_learn-1.7.2-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:191e5550980d45449126e23ed1d5e9e24b2c68329ee1f691a3987476e115e09c", size = 9367436, upload-time = "2025-09-09T08:21:13.602Z" }, + { url = "https://files.pythonhosted.org/packages/b5/aa/8444be3cfb10451617ff9d177b3c190288f4563e6c50ff02728be67ad094/scikit_learn-1.7.2-cp313-cp313t-win_amd64.whl", hash = "sha256:57dc4deb1d3762c75d685507fbd0bc17160144b2f2ba4ccea5dc285ab0d0e973", size = 9275749, upload-time = "2025-09-09T08:21:15.96Z" }, + { url = "https://files.pythonhosted.org/packages/d9/82/dee5acf66837852e8e68df6d8d3a6cb22d3df997b733b032f513d95205b7/scikit_learn-1.7.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fa8f63940e29c82d1e67a45d5297bdebbcb585f5a5a50c4914cc2e852ab77f33", size = 9208906, upload-time = "2025-09-09T08:21:18.557Z" }, + { url = "https://files.pythonhosted.org/packages/3c/30/9029e54e17b87cb7d50d51a5926429c683d5b4c1732f0507a6c3bed9bf65/scikit_learn-1.7.2-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:f95dc55b7902b91331fa4e5845dd5bde0580c9cd9612b1b2791b7e80c3d32615", size = 8627836, upload-time = "2025-09-09T08:21:20.695Z" }, + { url = "https://files.pythonhosted.org/packages/60/18/4a52c635c71b536879f4b971c2cedf32c35ee78f48367885ed8025d1f7ee/scikit_learn-1.7.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9656e4a53e54578ad10a434dc1f993330568cfee176dff07112b8785fb413106", size = 9426236, upload-time = "2025-09-09T08:21:22.645Z" }, + { url = "https://files.pythonhosted.org/packages/99/7e/290362f6ab582128c53445458a5befd471ed1ea37953d5bcf80604619250/scikit_learn-1.7.2-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96dc05a854add0e50d3f47a1ef21a10a595016da5b007c7d9cd9d0bffd1fcc61", size = 9312593, upload-time = "2025-09-09T08:21:24.65Z" }, + { url = "https://files.pythonhosted.org/packages/8e/87/24f541b6d62b1794939ae6422f8023703bbf6900378b2b34e0b4384dfefd/scikit_learn-1.7.2-cp314-cp314-win_amd64.whl", hash = "sha256:bb24510ed3f9f61476181e4db51ce801e2ba37541def12dc9333b946fc7a9cf8", size = 8820007, upload-time = "2025-09-09T08:21:26.713Z" }, +] + +[[package]] +name = "scikit-learn" +version = "1.8.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.14' and sys_platform == 'win32'", + "python_full_version >= '3.14' and sys_platform == 'emscripten'", + "python_full_version >= '3.14' and sys_platform != 'emscripten' and sys_platform != 'win32'", + "python_full_version >= '3.11' and python_full_version < '3.14' and sys_platform == 'win32'", + "python_full_version >= '3.11' and python_full_version < '3.14' and sys_platform == 'emscripten'", + "python_full_version >= '3.11' and python_full_version < '3.14' and sys_platform != 'emscripten' and sys_platform != 'win32'", +] +dependencies = [ + { name = "joblib", marker = "python_full_version >= '3.11'" }, + { name = "numpy", version = "2.4.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "scipy", version = "1.17.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "threadpoolctl", marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0e/d4/40988bf3b8e34feec1d0e6a051446b1f66225f8529b9309becaeef62b6c4/scikit_learn-1.8.0.tar.gz", hash = "sha256:9bccbb3b40e3de10351f8f5068e105d0f4083b1a65fa07b6634fbc401a6287fd", size = 7335585, upload-time = "2025-12-10T07:08:53.618Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c9/92/53ea2181da8ac6bf27170191028aee7251f8f841f8d3edbfdcaf2008fde9/scikit_learn-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:146b4d36f800c013d267b29168813f7a03a43ecd2895d04861f1240b564421da", size = 8595835, upload-time = "2025-12-10T07:07:39.385Z" }, + { url = "https://files.pythonhosted.org/packages/01/18/d154dc1638803adf987910cdd07097d9c526663a55666a97c124d09fb96a/scikit_learn-1.8.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:f984ca4b14914e6b4094c5d52a32ea16b49832c03bd17a110f004db3c223e8e1", size = 8080381, upload-time = "2025-12-10T07:07:41.93Z" }, + { url = "https://files.pythonhosted.org/packages/8a/44/226142fcb7b7101e64fdee5f49dbe6288d4c7af8abf593237b70fca080a4/scikit_learn-1.8.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5e30adb87f0cc81c7690a84f7932dd66be5bac57cfe16b91cb9151683a4a2d3b", size = 8799632, upload-time = "2025-12-10T07:07:43.899Z" }, + { url = "https://files.pythonhosted.org/packages/36/4d/4a67f30778a45d542bbea5db2dbfa1e9e100bf9ba64aefe34215ba9f11f6/scikit_learn-1.8.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ada8121bcb4dac28d930febc791a69f7cb1673c8495e5eee274190b73a4559c1", size = 9103788, upload-time = "2025-12-10T07:07:45.982Z" }, + { url = "https://files.pythonhosted.org/packages/89/3c/45c352094cfa60050bcbb967b1faf246b22e93cb459f2f907b600f2ceda5/scikit_learn-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:c57b1b610bd1f40ba43970e11ce62821c2e6569e4d74023db19c6b26f246cb3b", size = 8081706, upload-time = "2025-12-10T07:07:48.111Z" }, + { url = "https://files.pythonhosted.org/packages/3d/46/5416595bb395757f754feb20c3d776553a386b661658fb21b7c814e89efe/scikit_learn-1.8.0-cp311-cp311-win_arm64.whl", hash = "sha256:2838551e011a64e3053ad7618dda9310175f7515f1742fa2d756f7c874c05961", size = 7688451, upload-time = "2025-12-10T07:07:49.873Z" }, + { url = "https://files.pythonhosted.org/packages/90/74/e6a7cc4b820e95cc38cf36cd74d5aa2b42e8ffc2d21fe5a9a9c45c1c7630/scikit_learn-1.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5fb63362b5a7ddab88e52b6dbb47dac3fd7dafeee740dc6c8d8a446ddedade8e", size = 8548242, upload-time = "2025-12-10T07:07:51.568Z" }, + { url = "https://files.pythonhosted.org/packages/49/d8/9be608c6024d021041c7f0b3928d4749a706f4e2c3832bbede4fb4f58c95/scikit_learn-1.8.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:5025ce924beccb28298246e589c691fe1b8c1c96507e6d27d12c5fadd85bfd76", size = 8079075, upload-time = "2025-12-10T07:07:53.697Z" }, + { url = "https://files.pythonhosted.org/packages/dd/47/f187b4636ff80cc63f21cd40b7b2d177134acaa10f6bb73746130ee8c2e5/scikit_learn-1.8.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4496bb2cf7a43ce1a2d7524a79e40bc5da45cf598dbf9545b7e8316ccba47bb4", size = 8660492, upload-time = "2025-12-10T07:07:55.574Z" }, + { url = "https://files.pythonhosted.org/packages/97/74/b7a304feb2b49df9fafa9382d4d09061a96ee9a9449a7cbea7988dda0828/scikit_learn-1.8.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0bcfe4d0d14aec44921545fd2af2338c7471de9cb701f1da4c9d85906ab847a", size = 8931904, upload-time = "2025-12-10T07:07:57.666Z" }, + { url = "https://files.pythonhosted.org/packages/9f/c4/0ab22726a04ede56f689476b760f98f8f46607caecff993017ac1b64aa5d/scikit_learn-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:35c007dedb2ffe38fe3ee7d201ebac4a2deccd2408e8621d53067733e3c74809", size = 8019359, upload-time = "2025-12-10T07:07:59.838Z" }, + { url = "https://files.pythonhosted.org/packages/24/90/344a67811cfd561d7335c1b96ca21455e7e472d281c3c279c4d3f2300236/scikit_learn-1.8.0-cp312-cp312-win_arm64.whl", hash = "sha256:8c497fff237d7b4e07e9ef1a640887fa4fb765647f86fbe00f969ff6280ce2bb", size = 7641898, upload-time = "2025-12-10T07:08:01.36Z" }, + { url = "https://files.pythonhosted.org/packages/03/aa/e22e0768512ce9255eba34775be2e85c2048da73da1193e841707f8f039c/scikit_learn-1.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0d6ae97234d5d7079dc0040990a6f7aeb97cb7fa7e8945f1999a429b23569e0a", size = 8513770, upload-time = "2025-12-10T07:08:03.251Z" }, + { url = "https://files.pythonhosted.org/packages/58/37/31b83b2594105f61a381fc74ca19e8780ee923be2d496fcd8d2e1147bd99/scikit_learn-1.8.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:edec98c5e7c128328124a029bceb09eda2d526997780fef8d65e9a69eead963e", size = 8044458, upload-time = "2025-12-10T07:08:05.336Z" }, + { url = "https://files.pythonhosted.org/packages/2d/5a/3f1caed8765f33eabb723596666da4ebbf43d11e96550fb18bdec42b467b/scikit_learn-1.8.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:74b66d8689d52ed04c271e1329f0c61635bcaf5b926db9b12d58914cdc01fe57", size = 8610341, upload-time = "2025-12-10T07:08:07.732Z" }, + { url = "https://files.pythonhosted.org/packages/38/cf/06896db3f71c75902a8e9943b444a56e727418f6b4b4a90c98c934f51ed4/scikit_learn-1.8.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8fdf95767f989b0cfedb85f7ed8ca215d4be728031f56ff5a519ee1e3276dc2e", size = 8900022, upload-time = "2025-12-10T07:08:09.862Z" }, + { url = "https://files.pythonhosted.org/packages/1c/f9/9b7563caf3ec8873e17a31401858efab6b39a882daf6c1bfa88879c0aa11/scikit_learn-1.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:2de443b9373b3b615aec1bb57f9baa6bb3a9bd093f1269ba95c17d870422b271", size = 7989409, upload-time = "2025-12-10T07:08:12.028Z" }, + { url = "https://files.pythonhosted.org/packages/49/bd/1f4001503650e72c4f6009ac0c4413cb17d2d601cef6f71c0453da2732fc/scikit_learn-1.8.0-cp313-cp313-win_arm64.whl", hash = "sha256:eddde82a035681427cbedded4e6eff5e57fa59216c2e3e90b10b19ab1d0a65c3", size = 7619760, upload-time = "2025-12-10T07:08:13.688Z" }, + { url = "https://files.pythonhosted.org/packages/d2/7d/a630359fc9dcc95496588c8d8e3245cc8fd81980251079bc09c70d41d951/scikit_learn-1.8.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:7cc267b6108f0a1499a734167282c00c4ebf61328566b55ef262d48e9849c735", size = 8826045, upload-time = "2025-12-10T07:08:15.215Z" }, + { url = "https://files.pythonhosted.org/packages/cc/56/a0c86f6930cfcd1c7054a2bc417e26960bb88d32444fe7f71d5c2cfae891/scikit_learn-1.8.0-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:fe1c011a640a9f0791146011dfd3c7d9669785f9fed2b2a5f9e207536cf5c2fd", size = 8420324, upload-time = "2025-12-10T07:08:17.561Z" }, + { url = "https://files.pythonhosted.org/packages/46/1e/05962ea1cebc1cf3876667ecb14c283ef755bf409993c5946ade3b77e303/scikit_learn-1.8.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:72358cce49465d140cc4e7792015bb1f0296a9742d5622c67e31399b75468b9e", size = 8680651, upload-time = "2025-12-10T07:08:19.952Z" }, + { url = "https://files.pythonhosted.org/packages/fe/56/a85473cd75f200c9759e3a5f0bcab2d116c92a8a02ee08ccd73b870f8bb4/scikit_learn-1.8.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:80832434a6cc114f5219211eec13dcbc16c2bac0e31ef64c6d346cde3cf054cb", size = 8925045, upload-time = "2025-12-10T07:08:22.11Z" }, + { url = "https://files.pythonhosted.org/packages/cc/b7/64d8cfa896c64435ae57f4917a548d7ac7a44762ff9802f75a79b77cb633/scikit_learn-1.8.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ee787491dbfe082d9c3013f01f5991658b0f38aa8177e4cd4bf434c58f551702", size = 8507994, upload-time = "2025-12-10T07:08:23.943Z" }, + { url = "https://files.pythonhosted.org/packages/5e/37/e192ea709551799379958b4c4771ec507347027bb7c942662c7fbeba31cb/scikit_learn-1.8.0-cp313-cp313t-win_arm64.whl", hash = "sha256:bf97c10a3f5a7543f9b88cbf488d33d175e9146115a451ae34568597ba33dcde", size = 7869518, upload-time = "2025-12-10T07:08:25.71Z" }, + { url = "https://files.pythonhosted.org/packages/24/05/1af2c186174cc92dcab2233f327336058c077d38f6fe2aceb08e6ab4d509/scikit_learn-1.8.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:c22a2da7a198c28dd1a6e1136f19c830beab7fdca5b3e5c8bba8394f8a5c45b3", size = 8528667, upload-time = "2025-12-10T07:08:27.541Z" }, + { url = "https://files.pythonhosted.org/packages/a8/25/01c0af38fe969473fb292bba9dc2b8f9b451f3112ff242c647fee3d0dfe7/scikit_learn-1.8.0-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:6b595b07a03069a2b1740dc08c2299993850ea81cce4fe19b2421e0c970de6b7", size = 8066524, upload-time = "2025-12-10T07:08:29.822Z" }, + { url = "https://files.pythonhosted.org/packages/be/ce/a0623350aa0b68647333940ee46fe45086c6060ec604874e38e9ab7d8e6c/scikit_learn-1.8.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:29ffc74089f3d5e87dfca4c2c8450f88bdc61b0fc6ed5d267f3988f19a1309f6", size = 8657133, upload-time = "2025-12-10T07:08:31.865Z" }, + { url = "https://files.pythonhosted.org/packages/b8/cb/861b41341d6f1245e6ca80b1c1a8c4dfce43255b03df034429089ca2a2c5/scikit_learn-1.8.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fb65db5d7531bccf3a4f6bec3462223bea71384e2cda41da0f10b7c292b9e7c4", size = 8923223, upload-time = "2025-12-10T07:08:34.166Z" }, + { url = "https://files.pythonhosted.org/packages/76/18/a8def8f91b18cd1ba6e05dbe02540168cb24d47e8dcf69e8d00b7da42a08/scikit_learn-1.8.0-cp314-cp314-win_amd64.whl", hash = "sha256:56079a99c20d230e873ea40753102102734c5953366972a71d5cb39a32bc40c6", size = 8096518, upload-time = "2025-12-10T07:08:36.339Z" }, + { url = "https://files.pythonhosted.org/packages/d1/77/482076a678458307f0deb44e29891d6022617b2a64c840c725495bee343f/scikit_learn-1.8.0-cp314-cp314-win_arm64.whl", hash = "sha256:3bad7565bc9cf37ce19a7c0d107742b320c1285df7aab1a6e2d28780df167242", size = 7754546, upload-time = "2025-12-10T07:08:38.128Z" }, + { url = "https://files.pythonhosted.org/packages/2d/d1/ef294ca754826daa043b2a104e59960abfab4cf653891037d19dd5b6f3cf/scikit_learn-1.8.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:4511be56637e46c25721e83d1a9cea9614e7badc7040c4d573d75fbe257d6fd7", size = 8848305, upload-time = "2025-12-10T07:08:41.013Z" }, + { url = "https://files.pythonhosted.org/packages/5b/e2/b1f8b05138ee813b8e1a4149f2f0d289547e60851fd1bb268886915adbda/scikit_learn-1.8.0-cp314-cp314t-macosx_12_0_arm64.whl", hash = "sha256:a69525355a641bf8ef136a7fa447672fb54fe8d60cab5538d9eb7c6438543fb9", size = 8432257, upload-time = "2025-12-10T07:08:42.873Z" }, + { url = "https://files.pythonhosted.org/packages/26/11/c32b2138a85dcb0c99f6afd13a70a951bfdff8a6ab42d8160522542fb647/scikit_learn-1.8.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c2656924ec73e5939c76ac4c8b026fc203b83d8900362eb2599d8aee80e4880f", size = 8678673, upload-time = "2025-12-10T07:08:45.362Z" }, + { url = "https://files.pythonhosted.org/packages/c7/57/51f2384575bdec454f4fe4e7a919d696c9ebce914590abf3e52d47607ab8/scikit_learn-1.8.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:15fc3b5d19cc2be65404786857f2e13c70c83dd4782676dd6814e3b89dc8f5b9", size = 8922467, upload-time = "2025-12-10T07:08:47.408Z" }, + { url = "https://files.pythonhosted.org/packages/35/4d/748c9e2872637a57981a04adc038dacaa16ba8ca887b23e34953f0b3f742/scikit_learn-1.8.0-cp314-cp314t-win_amd64.whl", hash = "sha256:00d6f1d66fbcf4eba6e356e1420d33cc06c70a45bb1363cd6f6a8e4ebbbdece2", size = 8774395, upload-time = "2025-12-10T07:08:49.337Z" }, + { url = "https://files.pythonhosted.org/packages/60/22/d7b2ebe4704a5e50790ba089d5c2ae308ab6bb852719e6c3bd4f04c3a363/scikit_learn-1.8.0-cp314-cp314t-win_arm64.whl", hash = "sha256:f28dd15c6bb0b66ba09728cf09fd8736c304be29409bd8445a080c1280619e8c", size = 8002647, upload-time = "2025-12-10T07:08:51.601Z" }, +] + +[[package]] +name = "scipy" +version = "1.15.3" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.11'", +] +dependencies = [ + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0f/37/6964b830433e654ec7485e45a00fc9a27cf868d622838f6b6d9c5ec0d532/scipy-1.15.3.tar.gz", hash = "sha256:eae3cf522bc7df64b42cad3925c876e1b0b6c35c1337c93e12c0f366f55b0eaf", size = 59419214, upload-time = "2025-05-08T16:13:05.955Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/2f/4966032c5f8cc7e6a60f1b2e0ad686293b9474b65246b0c642e3ef3badd0/scipy-1.15.3-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:a345928c86d535060c9c2b25e71e87c39ab2f22fc96e9636bd74d1dbf9de448c", size = 38702770, upload-time = "2025-05-08T16:04:20.849Z" }, + { url = "https://files.pythonhosted.org/packages/a0/6e/0c3bf90fae0e910c274db43304ebe25a6b391327f3f10b5dcc638c090795/scipy-1.15.3-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:ad3432cb0f9ed87477a8d97f03b763fd1d57709f1bbde3c9369b1dff5503b253", size = 30094511, upload-time = "2025-05-08T16:04:27.103Z" }, + { url = "https://files.pythonhosted.org/packages/ea/b1/4deb37252311c1acff7f101f6453f0440794f51b6eacb1aad4459a134081/scipy-1.15.3-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:aef683a9ae6eb00728a542b796f52a5477b78252edede72b8327a886ab63293f", size = 22368151, upload-time = "2025-05-08T16:04:31.731Z" }, + { url = "https://files.pythonhosted.org/packages/38/7d/f457626e3cd3c29b3a49ca115a304cebb8cc6f31b04678f03b216899d3c6/scipy-1.15.3-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:1c832e1bd78dea67d5c16f786681b28dd695a8cb1fb90af2e27580d3d0967e92", size = 25121732, upload-time = "2025-05-08T16:04:36.596Z" }, + { url = "https://files.pythonhosted.org/packages/db/0a/92b1de4a7adc7a15dcf5bddc6e191f6f29ee663b30511ce20467ef9b82e4/scipy-1.15.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:263961f658ce2165bbd7b99fa5135195c3a12d9bef045345016b8b50c315cb82", size = 35547617, upload-time = "2025-05-08T16:04:43.546Z" }, + { url = "https://files.pythonhosted.org/packages/8e/6d/41991e503e51fc1134502694c5fa7a1671501a17ffa12716a4a9151af3df/scipy-1.15.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e2abc762b0811e09a0d3258abee2d98e0c703eee49464ce0069590846f31d40", size = 37662964, upload-time = "2025-05-08T16:04:49.431Z" }, + { url = "https://files.pythonhosted.org/packages/25/e1/3df8f83cb15f3500478c889be8fb18700813b95e9e087328230b98d547ff/scipy-1.15.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ed7284b21a7a0c8f1b6e5977ac05396c0d008b89e05498c8b7e8f4a1423bba0e", size = 37238749, upload-time = "2025-05-08T16:04:55.215Z" }, + { url = "https://files.pythonhosted.org/packages/93/3e/b3257cf446f2a3533ed7809757039016b74cd6f38271de91682aa844cfc5/scipy-1.15.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5380741e53df2c566f4d234b100a484b420af85deb39ea35a1cc1be84ff53a5c", size = 40022383, upload-time = "2025-05-08T16:05:01.914Z" }, + { url = "https://files.pythonhosted.org/packages/d1/84/55bc4881973d3f79b479a5a2e2df61c8c9a04fcb986a213ac9c02cfb659b/scipy-1.15.3-cp310-cp310-win_amd64.whl", hash = "sha256:9d61e97b186a57350f6d6fd72640f9e99d5a4a2b8fbf4b9ee9a841eab327dc13", size = 41259201, upload-time = "2025-05-08T16:05:08.166Z" }, + { url = "https://files.pythonhosted.org/packages/96/ab/5cc9f80f28f6a7dff646c5756e559823614a42b1939d86dd0ed550470210/scipy-1.15.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:993439ce220d25e3696d1b23b233dd010169b62f6456488567e830654ee37a6b", size = 38714255, upload-time = "2025-05-08T16:05:14.596Z" }, + { url = "https://files.pythonhosted.org/packages/4a/4a/66ba30abe5ad1a3ad15bfb0b59d22174012e8056ff448cb1644deccbfed2/scipy-1.15.3-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:34716e281f181a02341ddeaad584205bd2fd3c242063bd3423d61ac259ca7eba", size = 30111035, upload-time = "2025-05-08T16:05:20.152Z" }, + { url = "https://files.pythonhosted.org/packages/4b/fa/a7e5b95afd80d24313307f03624acc65801846fa75599034f8ceb9e2cbf6/scipy-1.15.3-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:3b0334816afb8b91dab859281b1b9786934392aa3d527cd847e41bb6f45bee65", size = 22384499, upload-time = "2025-05-08T16:05:24.494Z" }, + { url = "https://files.pythonhosted.org/packages/17/99/f3aaddccf3588bb4aea70ba35328c204cadd89517a1612ecfda5b2dd9d7a/scipy-1.15.3-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:6db907c7368e3092e24919b5e31c76998b0ce1684d51a90943cb0ed1b4ffd6c1", size = 25152602, upload-time = "2025-05-08T16:05:29.313Z" }, + { url = "https://files.pythonhosted.org/packages/56/c5/1032cdb565f146109212153339f9cb8b993701e9fe56b1c97699eee12586/scipy-1.15.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:721d6b4ef5dc82ca8968c25b111e307083d7ca9091bc38163fb89243e85e3889", size = 35503415, upload-time = "2025-05-08T16:05:34.699Z" }, + { url = "https://files.pythonhosted.org/packages/bd/37/89f19c8c05505d0601ed5650156e50eb881ae3918786c8fd7262b4ee66d3/scipy-1.15.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39cb9c62e471b1bb3750066ecc3a3f3052b37751c7c3dfd0fd7e48900ed52982", size = 37652622, upload-time = "2025-05-08T16:05:40.762Z" }, + { url = "https://files.pythonhosted.org/packages/7e/31/be59513aa9695519b18e1851bb9e487de66f2d31f835201f1b42f5d4d475/scipy-1.15.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:795c46999bae845966368a3c013e0e00947932d68e235702b5c3f6ea799aa8c9", size = 37244796, upload-time = "2025-05-08T16:05:48.119Z" }, + { url = "https://files.pythonhosted.org/packages/10/c0/4f5f3eeccc235632aab79b27a74a9130c6c35df358129f7ac8b29f562ac7/scipy-1.15.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:18aaacb735ab38b38db42cb01f6b92a2d0d4b6aabefeb07f02849e47f8fb3594", size = 40047684, upload-time = "2025-05-08T16:05:54.22Z" }, + { url = "https://files.pythonhosted.org/packages/ab/a7/0ddaf514ce8a8714f6ed243a2b391b41dbb65251affe21ee3077ec45ea9a/scipy-1.15.3-cp311-cp311-win_amd64.whl", hash = "sha256:ae48a786a28412d744c62fd7816a4118ef97e5be0bee968ce8f0a2fba7acf3bb", size = 41246504, upload-time = "2025-05-08T16:06:00.437Z" }, + { url = "https://files.pythonhosted.org/packages/37/4b/683aa044c4162e10ed7a7ea30527f2cbd92e6999c10a8ed8edb253836e9c/scipy-1.15.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6ac6310fdbfb7aa6612408bd2f07295bcbd3fda00d2d702178434751fe48e019", size = 38766735, upload-time = "2025-05-08T16:06:06.471Z" }, + { url = "https://files.pythonhosted.org/packages/7b/7e/f30be3d03de07f25dc0ec926d1681fed5c732d759ac8f51079708c79e680/scipy-1.15.3-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:185cd3d6d05ca4b44a8f1595af87f9c372bb6acf9c808e99aa3e9aa03bd98cf6", size = 30173284, upload-time = "2025-05-08T16:06:11.686Z" }, + { url = "https://files.pythonhosted.org/packages/07/9c/0ddb0d0abdabe0d181c1793db51f02cd59e4901da6f9f7848e1f96759f0d/scipy-1.15.3-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:05dc6abcd105e1a29f95eada46d4a3f251743cfd7d3ae8ddb4088047f24ea477", size = 22446958, upload-time = "2025-05-08T16:06:15.97Z" }, + { url = "https://files.pythonhosted.org/packages/af/43/0bce905a965f36c58ff80d8bea33f1f9351b05fad4beaad4eae34699b7a1/scipy-1.15.3-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:06efcba926324df1696931a57a176c80848ccd67ce6ad020c810736bfd58eb1c", size = 25242454, upload-time = "2025-05-08T16:06:20.394Z" }, + { url = "https://files.pythonhosted.org/packages/56/30/a6f08f84ee5b7b28b4c597aca4cbe545535c39fe911845a96414700b64ba/scipy-1.15.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c05045d8b9bfd807ee1b9f38761993297b10b245f012b11b13b91ba8945f7e45", size = 35210199, upload-time = "2025-05-08T16:06:26.159Z" }, + { url = "https://files.pythonhosted.org/packages/0b/1f/03f52c282437a168ee2c7c14a1a0d0781a9a4a8962d84ac05c06b4c5b555/scipy-1.15.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:271e3713e645149ea5ea3e97b57fdab61ce61333f97cfae392c28ba786f9bb49", size = 37309455, upload-time = "2025-05-08T16:06:32.778Z" }, + { url = "https://files.pythonhosted.org/packages/89/b1/fbb53137f42c4bf630b1ffdfc2151a62d1d1b903b249f030d2b1c0280af8/scipy-1.15.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6cfd56fc1a8e53f6e89ba3a7a7251f7396412d655bca2aa5611c8ec9a6784a1e", size = 36885140, upload-time = "2025-05-08T16:06:39.249Z" }, + { url = "https://files.pythonhosted.org/packages/2e/2e/025e39e339f5090df1ff266d021892694dbb7e63568edcfe43f892fa381d/scipy-1.15.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0ff17c0bb1cb32952c09217d8d1eed9b53d1463e5f1dd6052c7857f83127d539", size = 39710549, upload-time = "2025-05-08T16:06:45.729Z" }, + { url = "https://files.pythonhosted.org/packages/e6/eb/3bf6ea8ab7f1503dca3a10df2e4b9c3f6b3316df07f6c0ded94b281c7101/scipy-1.15.3-cp312-cp312-win_amd64.whl", hash = "sha256:52092bc0472cfd17df49ff17e70624345efece4e1a12b23783a1ac59a1b728ed", size = 40966184, upload-time = "2025-05-08T16:06:52.623Z" }, + { url = "https://files.pythonhosted.org/packages/73/18/ec27848c9baae6e0d6573eda6e01a602e5649ee72c27c3a8aad673ebecfd/scipy-1.15.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2c620736bcc334782e24d173c0fdbb7590a0a436d2fdf39310a8902505008759", size = 38728256, upload-time = "2025-05-08T16:06:58.696Z" }, + { url = "https://files.pythonhosted.org/packages/74/cd/1aef2184948728b4b6e21267d53b3339762c285a46a274ebb7863c9e4742/scipy-1.15.3-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:7e11270a000969409d37ed399585ee530b9ef6aa99d50c019de4cb01e8e54e62", size = 30109540, upload-time = "2025-05-08T16:07:04.209Z" }, + { url = "https://files.pythonhosted.org/packages/5b/d8/59e452c0a255ec352bd0a833537a3bc1bfb679944c4938ab375b0a6b3a3e/scipy-1.15.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:8c9ed3ba2c8a2ce098163a9bdb26f891746d02136995df25227a20e71c396ebb", size = 22383115, upload-time = "2025-05-08T16:07:08.998Z" }, + { url = "https://files.pythonhosted.org/packages/08/f5/456f56bbbfccf696263b47095291040655e3cbaf05d063bdc7c7517f32ac/scipy-1.15.3-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:0bdd905264c0c9cfa74a4772cdb2070171790381a5c4d312c973382fc6eaf730", size = 25163884, upload-time = "2025-05-08T16:07:14.091Z" }, + { url = "https://files.pythonhosted.org/packages/a2/66/a9618b6a435a0f0c0b8a6d0a2efb32d4ec5a85f023c2b79d39512040355b/scipy-1.15.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79167bba085c31f38603e11a267d862957cbb3ce018d8b38f79ac043bc92d825", size = 35174018, upload-time = "2025-05-08T16:07:19.427Z" }, + { url = "https://files.pythonhosted.org/packages/b5/09/c5b6734a50ad4882432b6bb7c02baf757f5b2f256041da5df242e2d7e6b6/scipy-1.15.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9deabd6d547aee2c9a81dee6cc96c6d7e9a9b1953f74850c179f91fdc729cb7", size = 37269716, upload-time = "2025-05-08T16:07:25.712Z" }, + { url = "https://files.pythonhosted.org/packages/77/0a/eac00ff741f23bcabd352731ed9b8995a0a60ef57f5fd788d611d43d69a1/scipy-1.15.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:dde4fc32993071ac0c7dd2d82569e544f0bdaff66269cb475e0f369adad13f11", size = 36872342, upload-time = "2025-05-08T16:07:31.468Z" }, + { url = "https://files.pythonhosted.org/packages/fe/54/4379be86dd74b6ad81551689107360d9a3e18f24d20767a2d5b9253a3f0a/scipy-1.15.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f77f853d584e72e874d87357ad70f44b437331507d1c311457bed8ed2b956126", size = 39670869, upload-time = "2025-05-08T16:07:38.002Z" }, + { url = "https://files.pythonhosted.org/packages/87/2e/892ad2862ba54f084ffe8cc4a22667eaf9c2bcec6d2bff1d15713c6c0703/scipy-1.15.3-cp313-cp313-win_amd64.whl", hash = "sha256:b90ab29d0c37ec9bf55424c064312930ca5f4bde15ee8619ee44e69319aab163", size = 40988851, upload-time = "2025-05-08T16:08:33.671Z" }, + { url = "https://files.pythonhosted.org/packages/1b/e9/7a879c137f7e55b30d75d90ce3eb468197646bc7b443ac036ae3fe109055/scipy-1.15.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3ac07623267feb3ae308487c260ac684b32ea35fd81e12845039952f558047b8", size = 38863011, upload-time = "2025-05-08T16:07:44.039Z" }, + { url = "https://files.pythonhosted.org/packages/51/d1/226a806bbd69f62ce5ef5f3ffadc35286e9fbc802f606a07eb83bf2359de/scipy-1.15.3-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:6487aa99c2a3d509a5227d9a5e889ff05830a06b2ce08ec30df6d79db5fcd5c5", size = 30266407, upload-time = "2025-05-08T16:07:49.891Z" }, + { url = "https://files.pythonhosted.org/packages/e5/9b/f32d1d6093ab9eeabbd839b0f7619c62e46cc4b7b6dbf05b6e615bbd4400/scipy-1.15.3-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:50f9e62461c95d933d5c5ef4a1f2ebf9a2b4e83b0db374cb3f1de104d935922e", size = 22540030, upload-time = "2025-05-08T16:07:54.121Z" }, + { url = "https://files.pythonhosted.org/packages/e7/29/c278f699b095c1a884f29fda126340fcc201461ee8bfea5c8bdb1c7c958b/scipy-1.15.3-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:14ed70039d182f411ffc74789a16df3835e05dc469b898233a245cdfd7f162cb", size = 25218709, upload-time = "2025-05-08T16:07:58.506Z" }, + { url = "https://files.pythonhosted.org/packages/24/18/9e5374b617aba742a990581373cd6b68a2945d65cc588482749ef2e64467/scipy-1.15.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a769105537aa07a69468a0eefcd121be52006db61cdd8cac8a0e68980bbb723", size = 34809045, upload-time = "2025-05-08T16:08:03.929Z" }, + { url = "https://files.pythonhosted.org/packages/e1/fe/9c4361e7ba2927074360856db6135ef4904d505e9b3afbbcb073c4008328/scipy-1.15.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9db984639887e3dffb3928d118145ffe40eff2fa40cb241a306ec57c219ebbbb", size = 36703062, upload-time = "2025-05-08T16:08:09.558Z" }, + { url = "https://files.pythonhosted.org/packages/b7/8e/038ccfe29d272b30086b25a4960f757f97122cb2ec42e62b460d02fe98e9/scipy-1.15.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:40e54d5c7e7ebf1aa596c374c49fa3135f04648a0caabcb66c52884b943f02b4", size = 36393132, upload-time = "2025-05-08T16:08:15.34Z" }, + { url = "https://files.pythonhosted.org/packages/10/7e/5c12285452970be5bdbe8352c619250b97ebf7917d7a9a9e96b8a8140f17/scipy-1.15.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5e721fed53187e71d0ccf382b6bf977644c533e506c4d33c3fb24de89f5c3ed5", size = 38979503, upload-time = "2025-05-08T16:08:21.513Z" }, + { url = "https://files.pythonhosted.org/packages/81/06/0a5e5349474e1cbc5757975b21bd4fad0e72ebf138c5592f191646154e06/scipy-1.15.3-cp313-cp313t-win_amd64.whl", hash = "sha256:76ad1fb5f8752eabf0fa02e4cc0336b4e8f021e2d5f061ed37d6d264db35e3ca", size = 40308097, upload-time = "2025-05-08T16:08:27.627Z" }, +] + +[[package]] +name = "scipy" +version = "1.17.1" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.14' and sys_platform == 'win32'", + "python_full_version >= '3.14' and sys_platform == 'emscripten'", + "python_full_version >= '3.14' and sys_platform != 'emscripten' and sys_platform != 'win32'", + "python_full_version >= '3.11' and python_full_version < '3.14' and sys_platform == 'win32'", + "python_full_version >= '3.11' and python_full_version < '3.14' and sys_platform == 'emscripten'", + "python_full_version >= '3.11' and python_full_version < '3.14' and sys_platform != 'emscripten' and sys_platform != 'win32'", +] +dependencies = [ + { name = "numpy", version = "2.4.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7a/97/5a3609c4f8d58b039179648e62dd220f89864f56f7357f5d4f45c29eb2cc/scipy-1.17.1.tar.gz", hash = "sha256:95d8e012d8cb8816c226aef832200b1d45109ed4464303e997c5b13122b297c0", size = 30573822, upload-time = "2026-02-23T00:26:24.851Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/df/75/b4ce781849931fef6fd529afa6b63711d5a733065722d0c3e2724af9e40a/scipy-1.17.1-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:1f95b894f13729334fb990162e911c9e5dc1ab390c58aa6cbecb389c5b5e28ec", size = 31613675, upload-time = "2026-02-23T00:16:00.13Z" }, + { url = "https://files.pythonhosted.org/packages/f7/58/bccc2861b305abdd1b8663d6130c0b3d7cc22e8d86663edbc8401bfd40d4/scipy-1.17.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:e18f12c6b0bc5a592ed23d3f7b891f68fd7f8241d69b7883769eb5d5dfb52696", size = 28162057, upload-time = "2026-02-23T00:16:09.456Z" }, + { url = "https://files.pythonhosted.org/packages/6d/ee/18146b7757ed4976276b9c9819108adbc73c5aad636e5353e20746b73069/scipy-1.17.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:a3472cfbca0a54177d0faa68f697d8ba4c80bbdc19908c3465556d9f7efce9ee", size = 20334032, upload-time = "2026-02-23T00:16:17.358Z" }, + { url = "https://files.pythonhosted.org/packages/ec/e6/cef1cf3557f0c54954198554a10016b6a03b2ec9e22a4e1df734936bd99c/scipy-1.17.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:766e0dc5a616d026a3a1cffa379af959671729083882f50307e18175797b3dfd", size = 22709533, upload-time = "2026-02-23T00:16:25.791Z" }, + { url = "https://files.pythonhosted.org/packages/4d/60/8804678875fc59362b0fb759ab3ecce1f09c10a735680318ac30da8cd76b/scipy-1.17.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:744b2bf3640d907b79f3fd7874efe432d1cf171ee721243e350f55234b4cec4c", size = 33062057, upload-time = "2026-02-23T00:16:36.931Z" }, + { url = "https://files.pythonhosted.org/packages/09/7d/af933f0f6e0767995b4e2d705a0665e454d1c19402aa7e895de3951ebb04/scipy-1.17.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:43af8d1f3bea642559019edfe64e9b11192a8978efbd1539d7bc2aaa23d92de4", size = 35349300, upload-time = "2026-02-23T00:16:49.108Z" }, + { url = "https://files.pythonhosted.org/packages/b4/3d/7ccbbdcbb54c8fdc20d3b6930137c782a163fa626f0aef920349873421ba/scipy-1.17.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cd96a1898c0a47be4520327e01f874acfd61fb48a9420f8aa9f6483412ffa444", size = 35127333, upload-time = "2026-02-23T00:17:01.293Z" }, + { url = "https://files.pythonhosted.org/packages/e8/19/f926cb11c42b15ba08e3a71e376d816ac08614f769b4f47e06c3580c836a/scipy-1.17.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4eb6c25dd62ee8d5edf68a8e1c171dd71c292fdae95d8aeb3dd7d7de4c364082", size = 37741314, upload-time = "2026-02-23T00:17:12.576Z" }, + { url = "https://files.pythonhosted.org/packages/95/da/0d1df507cf574b3f224ccc3d45244c9a1d732c81dcb26b1e8a766ae271a8/scipy-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:d30e57c72013c2a4fe441c2fcb8e77b14e152ad48b5464858e07e2ad9fbfceff", size = 36607512, upload-time = "2026-02-23T00:17:23.424Z" }, + { url = "https://files.pythonhosted.org/packages/68/7f/bdd79ceaad24b671543ffe0ef61ed8e659440eb683b66f033454dcee90eb/scipy-1.17.1-cp311-cp311-win_arm64.whl", hash = "sha256:9ecb4efb1cd6e8c4afea0daa91a87fbddbce1b99d2895d151596716c0b2e859d", size = 24599248, upload-time = "2026-02-23T00:17:34.561Z" }, + { url = "https://files.pythonhosted.org/packages/35/48/b992b488d6f299dbe3f11a20b24d3dda3d46f1a635ede1c46b5b17a7b163/scipy-1.17.1-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:35c3a56d2ef83efc372eaec584314bd0ef2e2f0d2adb21c55e6ad5b344c0dcb8", size = 31610954, upload-time = "2026-02-23T00:17:49.855Z" }, + { url = "https://files.pythonhosted.org/packages/b2/02/cf107b01494c19dc100f1d0b7ac3cc08666e96ba2d64db7626066cee895e/scipy-1.17.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:fcb310ddb270a06114bb64bbe53c94926b943f5b7f0842194d585c65eb4edd76", size = 28172662, upload-time = "2026-02-23T00:18:01.64Z" }, + { url = "https://files.pythonhosted.org/packages/cf/a9/599c28631bad314d219cf9ffd40e985b24d603fc8a2f4ccc5ae8419a535b/scipy-1.17.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:cc90d2e9c7e5c7f1a482c9875007c095c3194b1cfedca3c2f3291cdc2bc7c086", size = 20344366, upload-time = "2026-02-23T00:18:12.015Z" }, + { url = "https://files.pythonhosted.org/packages/35/f5/906eda513271c8deb5af284e5ef0206d17a96239af79f9fa0aebfe0e36b4/scipy-1.17.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:c80be5ede8f3f8eded4eff73cc99a25c388ce98e555b17d31da05287015ffa5b", size = 22704017, upload-time = "2026-02-23T00:18:21.502Z" }, + { url = "https://files.pythonhosted.org/packages/da/34/16f10e3042d2f1d6b66e0428308ab52224b6a23049cb2f5c1756f713815f/scipy-1.17.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e19ebea31758fac5893a2ac360fedd00116cbb7628e650842a6691ba7ca28a21", size = 32927842, upload-time = "2026-02-23T00:18:35.367Z" }, + { url = "https://files.pythonhosted.org/packages/01/8e/1e35281b8ab6d5d72ebe9911edcdffa3f36b04ed9d51dec6dd140396e220/scipy-1.17.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:02ae3b274fde71c5e92ac4d54bc06c42d80e399fec704383dcd99b301df37458", size = 35235890, upload-time = "2026-02-23T00:18:49.188Z" }, + { url = "https://files.pythonhosted.org/packages/c5/5c/9d7f4c88bea6e0d5a4f1bc0506a53a00e9fcb198de372bfe4d3652cef482/scipy-1.17.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8a604bae87c6195d8b1045eddece0514d041604b14f2727bbc2b3020172045eb", size = 35003557, upload-time = "2026-02-23T00:18:54.74Z" }, + { url = "https://files.pythonhosted.org/packages/65/94/7698add8f276dbab7a9de9fb6b0e02fc13ee61d51c7c3f85ac28b65e1239/scipy-1.17.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f590cd684941912d10becc07325a3eeb77886fe981415660d9265c4c418d0bea", size = 37625856, upload-time = "2026-02-23T00:19:00.307Z" }, + { url = "https://files.pythonhosted.org/packages/a2/84/dc08d77fbf3d87d3ee27f6a0c6dcce1de5829a64f2eae85a0ecc1f0daa73/scipy-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:41b71f4a3a4cab9d366cd9065b288efc4d4f3c0b37a91a8e0947fb5bd7f31d87", size = 36549682, upload-time = "2026-02-23T00:19:07.67Z" }, + { url = "https://files.pythonhosted.org/packages/bc/98/fe9ae9ffb3b54b62559f52dedaebe204b408db8109a8c66fdd04869e6424/scipy-1.17.1-cp312-cp312-win_arm64.whl", hash = "sha256:f4115102802df98b2b0db3cce5cb9b92572633a1197c77b7553e5203f284a5b3", size = 24547340, upload-time = "2026-02-23T00:19:12.024Z" }, + { url = "https://files.pythonhosted.org/packages/76/27/07ee1b57b65e92645f219b37148a7e7928b82e2b5dbeccecb4dff7c64f0b/scipy-1.17.1-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:5e3c5c011904115f88a39308379c17f91546f77c1667cea98739fe0fccea804c", size = 31590199, upload-time = "2026-02-23T00:19:17.192Z" }, + { url = "https://files.pythonhosted.org/packages/ec/ae/db19f8ab842e9b724bf5dbb7db29302a91f1e55bc4d04b1025d6d605a2c5/scipy-1.17.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:6fac755ca3d2c3edcb22f479fceaa241704111414831ddd3bc6056e18516892f", size = 28154001, upload-time = "2026-02-23T00:19:22.241Z" }, + { url = "https://files.pythonhosted.org/packages/5b/58/3ce96251560107b381cbd6e8413c483bbb1228a6b919fa8652b0d4090e7f/scipy-1.17.1-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:7ff200bf9d24f2e4d5dc6ee8c3ac64d739d3a89e2326ba68aaf6c4a2b838fd7d", size = 20325719, upload-time = "2026-02-23T00:19:26.329Z" }, + { url = "https://files.pythonhosted.org/packages/b2/83/15087d945e0e4d48ce2377498abf5ad171ae013232ae31d06f336e64c999/scipy-1.17.1-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:4b400bdc6f79fa02a4d86640310dde87a21fba0c979efff5248908c6f15fad1b", size = 22683595, upload-time = "2026-02-23T00:19:30.304Z" }, + { url = "https://files.pythonhosted.org/packages/b4/e0/e58fbde4a1a594c8be8114eb4aac1a55bcd6587047efc18a61eb1f5c0d30/scipy-1.17.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2b64ca7d4aee0102a97f3ba22124052b4bd2152522355073580bf4845e2550b6", size = 32896429, upload-time = "2026-02-23T00:19:35.536Z" }, + { url = "https://files.pythonhosted.org/packages/f5/5f/f17563f28ff03c7b6799c50d01d5d856a1d55f2676f537ca8d28c7f627cd/scipy-1.17.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:581b2264fc0aa555f3f435a5944da7504ea3a065d7029ad60e7c3d1ae09c5464", size = 35203952, upload-time = "2026-02-23T00:19:42.259Z" }, + { url = "https://files.pythonhosted.org/packages/8d/a5/9afd17de24f657fdfe4df9a3f1ea049b39aef7c06000c13db1530d81ccca/scipy-1.17.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:beeda3d4ae615106d7094f7e7cef6218392e4465cc95d25f900bebabfded0950", size = 34979063, upload-time = "2026-02-23T00:19:47.547Z" }, + { url = "https://files.pythonhosted.org/packages/8b/13/88b1d2384b424bf7c924f2038c1c409f8d88bb2a8d49d097861dd64a57b2/scipy-1.17.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6609bc224e9568f65064cfa72edc0f24ee6655b47575954ec6339534b2798369", size = 37598449, upload-time = "2026-02-23T00:19:53.238Z" }, + { url = "https://files.pythonhosted.org/packages/35/e5/d6d0e51fc888f692a35134336866341c08655d92614f492c6860dc45bb2c/scipy-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:37425bc9175607b0268f493d79a292c39f9d001a357bebb6b88fdfaff13f6448", size = 36510943, upload-time = "2026-02-23T00:20:50.89Z" }, + { url = "https://files.pythonhosted.org/packages/2a/fd/3be73c564e2a01e690e19cc618811540ba5354c67c8680dce3281123fb79/scipy-1.17.1-cp313-cp313-win_arm64.whl", hash = "sha256:5cf36e801231b6a2059bf354720274b7558746f3b1a4efb43fcf557ccd484a87", size = 24545621, upload-time = "2026-02-23T00:20:55.871Z" }, + { url = "https://files.pythonhosted.org/packages/6f/6b/17787db8b8114933a66f9dcc479a8272e4b4da75fe03b0c282f7b0ade8cd/scipy-1.17.1-cp313-cp313t-macosx_10_14_x86_64.whl", hash = "sha256:d59c30000a16d8edc7e64152e30220bfbd724c9bbb08368c054e24c651314f0a", size = 31936708, upload-time = "2026-02-23T00:19:58.694Z" }, + { url = "https://files.pythonhosted.org/packages/38/2e/524405c2b6392765ab1e2b722a41d5da33dc5c7b7278184a8ad29b6cb206/scipy-1.17.1-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:010f4333c96c9bb1a4516269e33cb5917b08ef2166d5556ca2fd9f082a9e6ea0", size = 28570135, upload-time = "2026-02-23T00:20:03.934Z" }, + { url = "https://files.pythonhosted.org/packages/fd/c3/5bd7199f4ea8556c0c8e39f04ccb014ac37d1468e6cfa6a95c6b3562b76e/scipy-1.17.1-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:2ceb2d3e01c5f1d83c4189737a42d9cb2fc38a6eeed225e7515eef71ad301dce", size = 20741977, upload-time = "2026-02-23T00:20:07.935Z" }, + { url = "https://files.pythonhosted.org/packages/d9/b8/8ccd9b766ad14c78386599708eb745f6b44f08400a5fd0ade7cf89b6fc93/scipy-1.17.1-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:844e165636711ef41f80b4103ed234181646b98a53c8f05da12ca5ca289134f6", size = 23029601, upload-time = "2026-02-23T00:20:12.161Z" }, + { url = "https://files.pythonhosted.org/packages/6d/a0/3cb6f4d2fb3e17428ad2880333cac878909ad1a89f678527b5328b93c1d4/scipy-1.17.1-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:158dd96d2207e21c966063e1635b1063cd7787b627b6f07305315dd73d9c679e", size = 33019667, upload-time = "2026-02-23T00:20:17.208Z" }, + { url = "https://files.pythonhosted.org/packages/f3/c3/2d834a5ac7bf3a0c806ad1508efc02dda3c8c61472a56132d7894c312dea/scipy-1.17.1-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:74cbb80d93260fe2ffa334efa24cb8f2f0f622a9b9febf8b483c0b865bfb3475", size = 35264159, upload-time = "2026-02-23T00:20:23.087Z" }, + { url = "https://files.pythonhosted.org/packages/4d/77/d3ed4becfdbd217c52062fafe35a72388d1bd82c2d0ba5ca19d6fcc93e11/scipy-1.17.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:dbc12c9f3d185f5c737d801da555fb74b3dcfa1a50b66a1a93e09190f41fab50", size = 35102771, upload-time = "2026-02-23T00:20:28.636Z" }, + { url = "https://files.pythonhosted.org/packages/bd/12/d19da97efde68ca1ee5538bb261d5d2c062f0c055575128f11a2730e3ac1/scipy-1.17.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:94055a11dfebe37c656e70317e1996dc197e1a15bbcc351bcdd4610e128fe1ca", size = 37665910, upload-time = "2026-02-23T00:20:34.743Z" }, + { url = "https://files.pythonhosted.org/packages/06/1c/1172a88d507a4baaf72c5a09bb6c018fe2ae0ab622e5830b703a46cc9e44/scipy-1.17.1-cp313-cp313t-win_amd64.whl", hash = "sha256:e30bdeaa5deed6bc27b4cc490823cd0347d7dae09119b8803ae576ea0ce52e4c", size = 36562980, upload-time = "2026-02-23T00:20:40.575Z" }, + { url = "https://files.pythonhosted.org/packages/70/b0/eb757336e5a76dfa7911f63252e3b7d1de00935d7705cf772db5b45ec238/scipy-1.17.1-cp313-cp313t-win_arm64.whl", hash = "sha256:a720477885a9d2411f94a93d16f9d89bad0f28ca23c3f8daa521e2dcc3f44d49", size = 24856543, upload-time = "2026-02-23T00:20:45.313Z" }, + { url = "https://files.pythonhosted.org/packages/cf/83/333afb452af6f0fd70414dc04f898647ee1423979ce02efa75c3b0f2c28e/scipy-1.17.1-cp314-cp314-macosx_10_14_x86_64.whl", hash = "sha256:a48a72c77a310327f6a3a920092fa2b8fd03d7deaa60f093038f22d98e096717", size = 31584510, upload-time = "2026-02-23T00:21:01.015Z" }, + { url = "https://files.pythonhosted.org/packages/ed/a6/d05a85fd51daeb2e4ea71d102f15b34fedca8e931af02594193ae4fd25f7/scipy-1.17.1-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:45abad819184f07240d8a696117a7aacd39787af9e0b719d00285549ed19a1e9", size = 28170131, upload-time = "2026-02-23T00:21:05.888Z" }, + { url = "https://files.pythonhosted.org/packages/db/7b/8624a203326675d7746a254083a187398090a179335b2e4a20e2ddc46e83/scipy-1.17.1-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:3fd1fcdab3ea951b610dc4cef356d416d5802991e7e32b5254828d342f7b7e0b", size = 20342032, upload-time = "2026-02-23T00:21:09.904Z" }, + { url = "https://files.pythonhosted.org/packages/c9/35/2c342897c00775d688d8ff3987aced3426858fd89d5a0e26e020b660b301/scipy-1.17.1-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:7bdf2da170b67fdf10bca777614b1c7d96ae3ca5794fd9587dce41eb2966e866", size = 22678766, upload-time = "2026-02-23T00:21:14.313Z" }, + { url = "https://files.pythonhosted.org/packages/ef/f2/7cdb8eb308a1a6ae1e19f945913c82c23c0c442a462a46480ce487fdc0ac/scipy-1.17.1-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:adb2642e060a6549c343603a3851ba76ef0b74cc8c079a9a58121c7ec9fe2350", size = 32957007, upload-time = "2026-02-23T00:21:19.663Z" }, + { url = "https://files.pythonhosted.org/packages/0b/2e/7eea398450457ecb54e18e9d10110993fa65561c4f3add5e8eccd2b9cd41/scipy-1.17.1-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:eee2cfda04c00a857206a4330f0c5e3e56535494e30ca445eb19ec624ae75118", size = 35221333, upload-time = "2026-02-23T00:21:25.278Z" }, + { url = "https://files.pythonhosted.org/packages/d9/77/5b8509d03b77f093a0d52e606d3c4f79e8b06d1d38c441dacb1e26cacf46/scipy-1.17.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:d2650c1fb97e184d12d8ba010493ee7b322864f7d3d00d3f9bb97d9c21de4068", size = 35042066, upload-time = "2026-02-23T00:21:31.358Z" }, + { url = "https://files.pythonhosted.org/packages/f9/df/18f80fb99df40b4070328d5ae5c596f2f00fffb50167e31439e932f29e7d/scipy-1.17.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:08b900519463543aa604a06bec02461558a6e1cef8fdbb8098f77a48a83c8118", size = 37612763, upload-time = "2026-02-23T00:21:37.247Z" }, + { url = "https://files.pythonhosted.org/packages/4b/39/f0e8ea762a764a9dc52aa7dabcfad51a354819de1f0d4652b6a1122424d6/scipy-1.17.1-cp314-cp314-win_amd64.whl", hash = "sha256:3877ac408e14da24a6196de0ddcace62092bfc12a83823e92e49e40747e52c19", size = 37290984, upload-time = "2026-02-23T00:22:35.023Z" }, + { url = "https://files.pythonhosted.org/packages/7c/56/fe201e3b0f93d1a8bcf75d3379affd228a63d7e2d80ab45467a74b494947/scipy-1.17.1-cp314-cp314-win_arm64.whl", hash = "sha256:f8885db0bc2bffa59d5c1b72fad7a6a92d3e80e7257f967dd81abb553a90d293", size = 25192877, upload-time = "2026-02-23T00:22:39.798Z" }, + { url = "https://files.pythonhosted.org/packages/96/ad/f8c414e121f82e02d76f310f16db9899c4fcde36710329502a6b2a3c0392/scipy-1.17.1-cp314-cp314t-macosx_10_14_x86_64.whl", hash = "sha256:1cc682cea2ae55524432f3cdff9e9a3be743d52a7443d0cba9017c23c87ae2f6", size = 31949750, upload-time = "2026-02-23T00:21:42.289Z" }, + { url = "https://files.pythonhosted.org/packages/7c/b0/c741e8865d61b67c81e255f4f0a832846c064e426636cd7de84e74d209be/scipy-1.17.1-cp314-cp314t-macosx_12_0_arm64.whl", hash = "sha256:2040ad4d1795a0ae89bfc7e8429677f365d45aa9fd5e4587cf1ea737f927b4a1", size = 28585858, upload-time = "2026-02-23T00:21:47.706Z" }, + { url = "https://files.pythonhosted.org/packages/ed/1b/3985219c6177866628fa7c2595bfd23f193ceebbe472c98a08824b9466ff/scipy-1.17.1-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:131f5aaea57602008f9822e2115029b55d4b5f7c070287699fe45c661d051e39", size = 20757723, upload-time = "2026-02-23T00:21:52.039Z" }, + { url = "https://files.pythonhosted.org/packages/c0/19/2a04aa25050d656d6f7b9e7b685cc83d6957fb101665bfd9369ca6534563/scipy-1.17.1-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:9cdc1a2fcfd5c52cfb3045feb399f7b3ce822abdde3a193a6b9a60b3cb5854ca", size = 23043098, upload-time = "2026-02-23T00:21:56.185Z" }, + { url = "https://files.pythonhosted.org/packages/86/f1/3383beb9b5d0dbddd030335bf8a8b32d4317185efe495374f134d8be6cce/scipy-1.17.1-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6e3dcd57ab780c741fde8dc68619de988b966db759a3c3152e8e9142c26295ad", size = 33030397, upload-time = "2026-02-23T00:22:01.404Z" }, + { url = "https://files.pythonhosted.org/packages/41/68/8f21e8a65a5a03f25a79165ec9d2b28c00e66dc80546cf5eb803aeeff35b/scipy-1.17.1-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a9956e4d4f4a301ebf6cde39850333a6b6110799d470dbbb1e25326ac447f52a", size = 35281163, upload-time = "2026-02-23T00:22:07.024Z" }, + { url = "https://files.pythonhosted.org/packages/84/8d/c8a5e19479554007a5632ed7529e665c315ae7492b4f946b0deb39870e39/scipy-1.17.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:a4328d245944d09fd639771de275701ccadf5f781ba0ff092ad141e017eccda4", size = 35116291, upload-time = "2026-02-23T00:22:12.585Z" }, + { url = "https://files.pythonhosted.org/packages/52/52/e57eceff0e342a1f50e274264ed47497b59e6a4e3118808ee58ddda7b74a/scipy-1.17.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a77cbd07b940d326d39a1d1b37817e2ee4d79cb30e7338f3d0cddffae70fcaa2", size = 37682317, upload-time = "2026-02-23T00:22:18.513Z" }, + { url = "https://files.pythonhosted.org/packages/11/2f/b29eafe4a3fbc3d6de9662b36e028d5f039e72d345e05c250e121a230dd4/scipy-1.17.1-cp314-cp314t-win_amd64.whl", hash = "sha256:eb092099205ef62cd1782b006658db09e2fed75bffcae7cc0d44052d8aa0f484", size = 37345327, upload-time = "2026-02-23T00:22:24.442Z" }, + { url = "https://files.pythonhosted.org/packages/07/39/338d9219c4e87f3e708f18857ecd24d22a0c3094752393319553096b98af/scipy-1.17.1-cp314-cp314t-win_arm64.whl", hash = "sha256:200e1050faffacc162be6a486a984a0497866ec54149a01270adc8a59b7c7d21", size = 25489165, upload-time = "2026-02-23T00:22:29.563Z" }, +] + +[[package]] +name = "seaborn" +version = "0.13.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "matplotlib" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "numpy", version = "2.4.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "pandas", version = "2.3.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "pandas", version = "3.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/86/59/a451d7420a77ab0b98f7affa3a1d78a313d2f7281a57afb1a34bae8ab412/seaborn-0.13.2.tar.gz", hash = "sha256:93e60a40988f4d65e9f4885df477e2fdaff6b73a9ded434c1ab356dd57eefff7", size = 1457696, upload-time = "2024-01-25T13:21:52.551Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/83/11/00d3c3dfc25ad54e731d91449895a79e4bf2384dc3ac01809010ba88f6d5/seaborn-0.13.2-py3-none-any.whl", hash = "sha256:636f8336facf092165e27924f223d3c62ca560b1f2bb5dff7ab7fad265361987", size = 294914, upload-time = "2024-01-25T13:21:49.598Z" }, +] + +[[package]] +name = "shellingham" +version = "1.5.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310, upload-time = "2023-10-24T04:13:40.426Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755, upload-time = "2023-10-24T04:13:38.866Z" }, +] + +[[package]] +name = "six" +version = "1.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, +] + +[[package]] +name = "statsmodels" +version = "0.14.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "numpy", version = "2.4.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "packaging" }, + { name = "pandas", version = "2.3.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "pandas", version = "3.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "patsy" }, + { name = "scipy", version = "1.15.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "scipy", version = "1.17.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0d/81/e8d74b34f85285f7335d30c5e3c2d7c0346997af9f3debf9a0a9a63de184/statsmodels-0.14.6.tar.gz", hash = "sha256:4d17873d3e607d398b85126cd4ed7aad89e4e9d89fc744cdab1af3189a996c2a", size = 20689085, upload-time = "2025-12-05T23:08:39.522Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b5/6d/9ec309a175956f88eb8420ac564297f37cf9b1f73f89db74da861052dc29/statsmodels-0.14.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f4ff0649a2df674c7ffb6fa1a06bffdb82a6adf09a48e90e000a15a6aaa734b0", size = 10142419, upload-time = "2025-12-05T19:27:35.625Z" }, + { url = "https://files.pythonhosted.org/packages/86/8f/338c5568315ec5bf3ac7cd4b71e34b98cb3b0f834919c0c04a0762f878a1/statsmodels-0.14.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:109012088b3e370080846ab053c76d125268631410142daad2f8c10770e8e8d9", size = 10022819, upload-time = "2025-12-05T19:27:49.385Z" }, + { url = "https://files.pythonhosted.org/packages/b0/77/5fc4cbc2d608f9b483b0675f82704a8bcd672962c379fe4d82100d388dbf/statsmodels-0.14.6-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e93bd5d220f3cb6fc5fc1bffd5b094966cab8ee99f6c57c02e95710513d6ac3f", size = 10118927, upload-time = "2025-12-05T23:07:51.256Z" }, + { url = "https://files.pythonhosted.org/packages/94/55/b86c861c32186403fe121d9ab27bc16d05839b170d92a978beb33abb995e/statsmodels-0.14.6-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:06eec42d682fdb09fe5d70a05930857efb141754ec5a5056a03304c1b5e32fd9", size = 10413015, upload-time = "2025-12-05T23:08:53.95Z" }, + { url = "https://files.pythonhosted.org/packages/f9/be/daf0dba729ccdc4176605f4a0fd5cfe71cdda671749dca10e74a732b8b1c/statsmodels-0.14.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0444e88557df735eda7db330806fe09d51c9f888bb1f5906cb3a61fb1a3ed4a8", size = 10441248, upload-time = "2025-12-05T23:09:09.353Z" }, + { url = "https://files.pythonhosted.org/packages/9a/1c/2e10b7c7cc44fa418272996bf0427b8016718fd62f995d9c1f7ab37adf35/statsmodels-0.14.6-cp310-cp310-win_amd64.whl", hash = "sha256:e83a9abe653835da3b37fb6ae04b45480c1de11b3134bd40b09717192a1456ea", size = 9583410, upload-time = "2025-12-05T19:28:02.086Z" }, + { url = "https://files.pythonhosted.org/packages/a9/4d/df4dd089b406accfc3bb5ee53ba29bb3bdf5ae61643f86f8f604baa57656/statsmodels-0.14.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6ad5c2810fc6c684254a7792bf1cbaf1606cdee2a253f8bd259c43135d87cfb4", size = 10121514, upload-time = "2025-12-05T19:28:16.521Z" }, + { url = "https://files.pythonhosted.org/packages/82/af/ec48daa7f861f993b91a0dcc791d66e1cf56510a235c5cbd2ab991a31d5c/statsmodels-0.14.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:341fa68a7403e10a95c7b6e41134b0da3a7b835ecff1eb266294408535a06eb6", size = 10003346, upload-time = "2025-12-05T19:28:29.568Z" }, + { url = "https://files.pythonhosted.org/packages/a9/2c/c8f7aa24cd729970728f3f98822fb45149adc216f445a9301e441f7ac760/statsmodels-0.14.6-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bdf1dfe2a3ca56f5529118baf33a13efed2783c528f4a36409b46bbd2d9d48eb", size = 10129872, upload-time = "2025-12-05T23:09:25.724Z" }, + { url = "https://files.pythonhosted.org/packages/40/c6/9ae8e9b0721e9b6eb5f340c3a0ce8cd7cce4f66e03dd81f80d60f111987f/statsmodels-0.14.6-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a3764ba8195c9baf0925a96da0743ff218067a269f01d155ca3558deed2658ca", size = 10381964, upload-time = "2025-12-05T23:09:41.326Z" }, + { url = "https://files.pythonhosted.org/packages/28/8c/cf3d30c8c2da78e2ad1f50ade8b7fabec3ff4cdfc56fbc02e097c4577f90/statsmodels-0.14.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9e8d2e519852adb1b420e018f5ac6e6684b2b877478adf7fda2cfdb58f5acb5d", size = 10409611, upload-time = "2025-12-05T23:09:57.131Z" }, + { url = "https://files.pythonhosted.org/packages/bf/cc/018f14ecb58c6cb89de9d52695740b7d1f5a982aa9ea312483ea3c3d5f77/statsmodels-0.14.6-cp311-cp311-win_amd64.whl", hash = "sha256:2738a00fca51196f5a7d44b06970ace6b8b30289839e4808d656f8a98e35faa7", size = 9580385, upload-time = "2025-12-05T19:28:42.778Z" }, + { url = "https://files.pythonhosted.org/packages/25/ce/308e5e5da57515dd7cab3ec37ea2d5b8ff50bef1fcc8e6d31456f9fae08e/statsmodels-0.14.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fe76140ae7adc5ff0e60a3f0d56f4fffef484efa803c3efebf2fcd734d72ecb5", size = 10091932, upload-time = "2025-12-05T19:28:55.446Z" }, + { url = "https://files.pythonhosted.org/packages/05/30/affbabf3c27fb501ec7b5808230c619d4d1a4525c07301074eb4bda92fa9/statsmodels-0.14.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:26d4f0ed3b31f3c86f83a92f5c1f5cbe63fc992cd8915daf28ca49be14463a1c", size = 9997345, upload-time = "2025-12-05T19:29:10.278Z" }, + { url = "https://files.pythonhosted.org/packages/48/f5/3a73b51e6450c31652c53a8e12e24eac64e3824be816c0c2316e7dbdcb7d/statsmodels-0.14.6-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d8c00a42863e4f4733ac9d078bbfad816249c01451740e6f5053ecc7db6d6368", size = 10058649, upload-time = "2025-12-05T23:10:12.775Z" }, + { url = "https://files.pythonhosted.org/packages/81/68/dddd76117df2ef14c943c6bbb6618be5c9401280046f4ddfc9fb4596a1b8/statsmodels-0.14.6-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:19b58cf7474aa9e7e3b0771a66537148b2df9b5884fbf156096c0e6c1ff0469d", size = 10339446, upload-time = "2025-12-05T23:10:28.503Z" }, + { url = "https://files.pythonhosted.org/packages/56/4a/dce451c74c4050535fac1ec0c14b80706d8fc134c9da22db3c8a0ec62c33/statsmodels-0.14.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:81e7dcc5e9587f2567e52deaff5220b175bf2f648951549eae5fc9383b62bc37", size = 10368705, upload-time = "2025-12-05T23:10:44.339Z" }, + { url = "https://files.pythonhosted.org/packages/60/15/3daba2df40be8b8a9a027d7f54c8dedf24f0d81b96e54b52293f5f7e3418/statsmodels-0.14.6-cp312-cp312-win_amd64.whl", hash = "sha256:b5eb07acd115aa6208b4058211138393a7e6c2cf12b6f213ede10f658f6a714f", size = 9543991, upload-time = "2025-12-05T23:10:58.536Z" }, + { url = "https://files.pythonhosted.org/packages/81/59/a5aad5b0cc266f5be013db8cde563ac5d2a025e7efc0c328d83b50c72992/statsmodels-0.14.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:47ee7af083623d2091954fa71c7549b8443168f41b7c5dce66510274c50fd73e", size = 10072009, upload-time = "2025-12-05T23:11:14.021Z" }, + { url = "https://files.pythonhosted.org/packages/53/dd/d8cfa7922fc6dc3c56fa6c59b348ea7de829a94cd73208c6f8202dd33f17/statsmodels-0.14.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:aa60d82e29fcd0a736e86feb63a11d2380322d77a9369a54be8b0965a3985f71", size = 9980018, upload-time = "2025-12-05T23:11:30.907Z" }, + { url = "https://files.pythonhosted.org/packages/ee/77/0ec96803eba444efd75dba32f2ef88765ae3e8f567d276805391ec2c98c6/statsmodels-0.14.6-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:89ee7d595f5939cc20bf946faedcb5137d975f03ae080f300ebb4398f16a5bd4", size = 10060269, upload-time = "2025-12-05T23:11:46.338Z" }, + { url = "https://files.pythonhosted.org/packages/10/b9/fd41f1f6af13a1a1212a06bb377b17762feaa6d656947bf666f76300fc05/statsmodels-0.14.6-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:730f3297b26749b216a06e4327fe0be59b8d05f7d594fb6caff4287b69654589", size = 10324155, upload-time = "2025-12-05T23:12:01.805Z" }, + { url = "https://files.pythonhosted.org/packages/ee/0f/a6900e220abd2c69cd0a07e3ad26c71984be6061415a60e0f17b152ecf08/statsmodels-0.14.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f1c08befa85e93acc992b72a390ddb7bd876190f1360e61d10cf43833463bc9c", size = 10349765, upload-time = "2025-12-05T23:12:18.018Z" }, + { url = "https://files.pythonhosted.org/packages/98/08/b79f0c614f38e566eebbdcff90c0bcacf3c6ba7a5bbb12183c09c29ca400/statsmodels-0.14.6-cp313-cp313-win_amd64.whl", hash = "sha256:8021271a79f35b842c02a1794465a651a9d06ec2080f76ebc3b7adce77d08233", size = 9540043, upload-time = "2025-12-05T23:12:33.887Z" }, + { url = "https://files.pythonhosted.org/packages/71/de/09540e870318e0c7b58316561d417be45eff731263b4234fdd2eee3511a8/statsmodels-0.14.6-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:00781869991f8f02ad3610da6627fd26ebe262210287beb59761982a8fa88cae", size = 10069403, upload-time = "2025-12-05T23:12:48.424Z" }, + { url = "https://files.pythonhosted.org/packages/ab/f0/63c1bfda75dc53cee858006e1f46bd6d6f883853bea1b97949d0087766ca/statsmodels-0.14.6-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:73f305fbf31607b35ce919fae636ab8b80d175328ed38fdc6f354e813b86ee37", size = 9989253, upload-time = "2025-12-05T23:13:05.274Z" }, + { url = "https://files.pythonhosted.org/packages/c1/98/b0dfb4f542b2033a3341aa5f1bdd97024230a4ad3670c5b0839d54e3dcab/statsmodels-0.14.6-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e443e7077a6e2d3faeea72f5a92c9f12c63722686eb80bb40a0f04e4a7e267ad", size = 10090802, upload-time = "2025-12-05T23:13:20.653Z" }, + { url = "https://files.pythonhosted.org/packages/34/0e/2408735aca9e764643196212f9069912100151414dd617d39ffc72d77eee/statsmodels-0.14.6-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3414e40c073d725007a6603a18247ab7af3467e1af4a5e5a24e4c27bc26673b4", size = 10337587, upload-time = "2025-12-05T23:13:37.597Z" }, + { url = "https://files.pythonhosted.org/packages/0f/36/4d44f7035ab3c0b2b6a4c4ebb98dedf36246ccbc1b3e2f51ebcd7ac83abb/statsmodels-0.14.6-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:a518d3f9889ef920116f9fa56d0338069e110f823926356946dae83bc9e33e19", size = 10363350, upload-time = "2025-12-05T23:13:53.08Z" }, + { url = "https://files.pythonhosted.org/packages/26/33/f1652d0c59fa51de18492ee2345b65372550501ad061daa38f950be390b6/statsmodels-0.14.6-cp314-cp314-win_amd64.whl", hash = "sha256:151b73e29f01fe619dbce7f66d61a356e9d1fe5e906529b78807df9189c37721", size = 9588010, upload-time = "2025-12-05T23:14:07.28Z" }, +] + +[[package]] +name = "tables" +version = "3.10.1" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.11'", +] +dependencies = [ + { name = "blosc2", marker = "python_full_version < '3.11'" }, + { name = "numexpr", marker = "python_full_version < '3.11'" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "packaging", marker = "python_full_version < '3.11'" }, + { name = "py-cpuinfo", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0d/5d/96708a84e9fcd29d1f684d56d4c38a23d29b1c934599a072a49f27ccfa71/tables-3.10.1.tar.gz", hash = "sha256:4aa07ac734b9c037baeaf44aec64ec902ad247f57811b59f30c4e31d31f126cf", size = 4762413, upload-time = "2024-08-17T09:57:47.127Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ff/69/a768ec8104ada032c9be09f521f548766ddd0351bc941c9d42fa5db001de/tables-3.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bca9d11a570ca1bc57f0845e54e55c3093d5a1ace376faee639e09503a73745b", size = 6823691, upload-time = "2024-08-17T09:56:50.229Z" }, + { url = "https://files.pythonhosted.org/packages/e4/2d/074bc14b39de9b552eec02ee583eff2997d903da1355f4450506335a6055/tables-3.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b62881cb682438d1e92b9178db42b160638aef3ca23341f7d98e9b27821b1eb4", size = 5471221, upload-time = "2024-08-17T09:56:54.84Z" }, + { url = "https://files.pythonhosted.org/packages/4a/30/29411ab804b5ac4bee25c82ba38f4e7a8c0b52c6a1cdbeea7d1db33a53fe/tables-3.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9cf1bfd8b0e0195196205fc8a134628219cff85d20da537facd67a291e6b347", size = 7170201, upload-time = "2024-08-17T09:56:59.011Z" }, + { url = "https://files.pythonhosted.org/packages/0a/7d/3165c7538b8e89b22fa17ad68e04106cca7023cf68e94011ae7b3b6d2a78/tables-3.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77f0e6dd45b91d99bf3976c8655c48fe3816baf390b9098e4fb2f0fdf9da7078", size = 7571035, upload-time = "2024-08-17T09:57:03.115Z" }, + { url = "https://files.pythonhosted.org/packages/46/b3/985a23d2cf27aad383301a5e99e1851228a1941b868515612b5357bded5f/tables-3.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:d90542ec172d1d60df0b796c48ad446f2b69a5d5cd3077bd6450891b854d1ffb", size = 6311650, upload-time = "2024-08-17T09:57:06.593Z" }, + { url = "https://files.pythonhosted.org/packages/dc/04/957264eb35e60251830a965e2d02332eb36ed14fbd8345df06981bbf3ece/tables-3.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f8917262a2bb3cd79d37e108557e34ec4b365fdcc806e01dd10765a84c65dab6", size = 6790492, upload-time = "2024-08-17T09:57:10.247Z" }, + { url = "https://files.pythonhosted.org/packages/b2/19/eb7af9d92aaf6766f5fedfce11a97ab03cf39856561c5f562dc0c769a682/tables-3.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f93f6db623b484bb6606537c2a71e95ee34fae19b0d891867642dd8c7be05af6", size = 5506835, upload-time = "2024-08-17T09:57:13.883Z" }, + { url = "https://files.pythonhosted.org/packages/b0/8f/897324e1ad543ca439b2c91f04c406f3eeda6e7ff2f43b4cd939f05043e4/tables-3.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01ca51624bca1a87e703d6d6b796368bc3460ff007ea8b1341be03bedd863833", size = 7166960, upload-time = "2024-08-17T09:57:17.463Z" }, + { url = "https://files.pythonhosted.org/packages/4e/5c/3f21d1135bf60af99ac79a17bbffd333d69763df2197ba04f47dd30bbd4e/tables-3.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9372516c76be3a05a573df63a69ce38315d03b5816d2a1e89c48129ec8b161b0", size = 7568724, upload-time = "2024-08-17T09:57:23.02Z" }, + { url = "https://files.pythonhosted.org/packages/1f/e3/3ee6b66263902eccadc4e0e23bca7fb480fd190904b7ce0bea4777b5b799/tables-3.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:09190fb504888aeacafb7739c13d5c5a3e87af3d261f4d2f832b1f8407be133a", size = 6312200, upload-time = "2024-08-17T09:57:26.322Z" }, + { url = "https://files.pythonhosted.org/packages/95/ec/ea6c476e33602c172c797fe8f8ab96d007d964137068276d142b142a28e5/tables-3.10.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a7090af37909e3bf229d5599fa442633e5a93b6082960b01038dc0106e07a8da", size = 6791597, upload-time = "2024-08-17T09:57:29.598Z" }, + { url = "https://files.pythonhosted.org/packages/74/02/a967a506e9204e3328a8c03f67e6f3c919defc8df11aba83ae5b2abf7b0f/tables-3.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:203ed50c0c5f30f007df7633089b2a567b99856cd25d68f19d91624a8db2e7ad", size = 5474779, upload-time = "2024-08-17T09:57:32.43Z" }, + { url = "https://files.pythonhosted.org/packages/c3/26/925793f753664ec698b2c6315c818269313db143da38150897cf260405c2/tables-3.10.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e36ce9f10471c69c1f0b06c6966de762558a35d62592c55df7994a8019adaf0c", size = 7130683, upload-time = "2024-08-17T09:57:36.181Z" }, + { url = "https://files.pythonhosted.org/packages/d8/79/2b34f22284459e940a84e71dba19b2a34c7cc0ce3cdf685923c50d5b9611/tables-3.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f233e78cc9fa4157ec4c3ef2abf01a731fe7969bc6ed73539e5f4cd3b94c98b2", size = 7531367, upload-time = "2024-08-17T09:57:39.864Z" }, + { url = "https://files.pythonhosted.org/packages/3d/27/5a23830f611e26dd7ee104096c6bb82e481b16f3f17ccaed3075f8d48312/tables-3.10.1-cp312-cp312-win_amd64.whl", hash = "sha256:34357d2f2f75843a44e6fe54d1f11fc2e35a8fd3cb134df3d3362cff78010adb", size = 6295046, upload-time = "2024-08-17T09:57:43.561Z" }, + { url = "https://files.pythonhosted.org/packages/d3/d4/e7c25df877e054b05f146d6ccb920bcdbe8d39b35a0962868b80547532c7/tables-3.10.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6fc5b46a4f359249c3ab9a0a0a2448d7e680e68cffd63fdf3fb7171781edd46e", size = 6824253, upload-time = "2024-11-09T19:26:06.428Z" }, + { url = "https://files.pythonhosted.org/packages/c6/49/091865d75090a24493bd1b66e52d72f4d9627ff42983a13d4dcd89455d02/tables-3.10.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2ecabd7f459d40b7f9f5256850dd5f43773fda7b789f827de92c3d26df1e320f", size = 5499587, upload-time = "2024-11-09T19:26:12.402Z" }, + { url = "https://files.pythonhosted.org/packages/23/83/9dac8af333149fa01add439f710d4a312b70faf81c2f59a16b8bfaebb75e/tables-3.10.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40a4ee18f3c9339d9dd8fd3777c75cda5768f2ff347064a2796f59161a190af8", size = 7128236, upload-time = "2024-11-09T19:26:15.716Z" }, + { url = "https://files.pythonhosted.org/packages/89/fd/62f31643596f6ab71fc6d2a87acdee0bc01a03fbe1a7f3f6dc0c91e2546d/tables-3.10.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:757c6ea257c174af8036cf8f273ede756bbcd6db5ac7e2a4d64e788b0f371152", size = 7527953, upload-time = "2024-11-09T19:26:20.229Z" }, +] + +[[package]] +name = "tables" +version = "3.11.1" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.14' and sys_platform == 'win32'", + "python_full_version >= '3.14' and sys_platform == 'emscripten'", + "python_full_version >= '3.14' and sys_platform != 'emscripten' and sys_platform != 'win32'", + "python_full_version >= '3.11' and python_full_version < '3.14' and sys_platform == 'win32'", + "python_full_version >= '3.11' and python_full_version < '3.14' and sys_platform == 'emscripten'", + "python_full_version >= '3.11' and python_full_version < '3.14' and sys_platform != 'emscripten' and sys_platform != 'win32'", +] +dependencies = [ + { name = "blosc2", marker = "python_full_version >= '3.11'" }, + { name = "numexpr", marker = "python_full_version >= '3.11'" }, + { name = "numpy", version = "2.4.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "packaging", marker = "python_full_version >= '3.11'" }, + { name = "py-cpuinfo", marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cc/a3/d213ebe7376d48055bd55a29cd9f99061afa0dcece608f94a5025d797b0a/tables-3.11.1.tar.gz", hash = "sha256:78abcf413091bc7c1e4e8c10fbbb438d1ac0b5a87436c5b972c3e8253871b6fb", size = 4790533, upload-time = "2026-03-01T11:43:36.036Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fa/bb/4a9cde6628563388db26fa86c64adb0f2475a757e72af0ec185fd520b72f/tables-3.11.1-cp311-abi3-macosx_10_9_x86_64.whl", hash = "sha256:eb30684c42a77bbecdef2b9c763c4372b0ddc9cc5bd8b2a2055f2042eee67217", size = 7045977, upload-time = "2026-03-01T11:42:48.605Z" }, + { url = "https://files.pythonhosted.org/packages/78/74/6568c8d3aabf9982ab89fe3e378afbd7aad4894bde4570991a3246169ef4/tables-3.11.1-cp311-abi3-macosx_11_0_arm64.whl", hash = "sha256:f0367d2e3df0f10ea63ccf4279f3fe58e32ec481767320301a483e2b3cd83efc", size = 6264947, upload-time = "2026-03-01T11:42:53.192Z" }, + { url = "https://files.pythonhosted.org/packages/cc/a3/ec228901fca4c996306b17f5c60a4105144df0bbd07b3a4a816f91f37b4a/tables-3.11.1-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:56bf6fb9132ead989b7e76695d7613d6d08f071a8019038d6565ba90c66b9f3e", size = 6903733, upload-time = "2026-03-01T11:42:58.349Z" }, + { url = "https://files.pythonhosted.org/packages/99/29/c2dc674ea70fa9a4819417289a9c0d3e4780835beeed573eb66964cfb763/tables-3.11.1-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1e78fe190fdeb4afe430b79651bae2a4f341904eb85aa8dbafe5f1caee1c7f67", size = 7241357, upload-time = "2026-03-01T11:43:03.938Z" }, + { url = "https://files.pythonhosted.org/packages/60/b5/a59b62af4127790c618eb11c06c106706e07509a3fb9e346b2a3ffa74419/tables-3.11.1-cp311-abi3-win_amd64.whl", hash = "sha256:7fa6cb03f6fe55ae4f85e89ec5450e5c40cc4c52d8c3b60eb157a445c2219e89", size = 6526565, upload-time = "2026-03-01T11:43:08.58Z" }, + { url = "https://files.pythonhosted.org/packages/1e/ce/561c82496e7c8c15ebf19b53b12c0ef91b322a66869db762db9711102764/tables-3.11.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:a4bbd95036a4d0cc5c86c1f87fbb490b4c53cd70982f1c01b3ed6dcb3085cbb9", size = 7111409, upload-time = "2026-03-01T11:43:13.424Z" }, + { url = "https://files.pythonhosted.org/packages/84/18/bac920aee8239b572c506459607c6dd8742bc6275a43d51d2dd6ae1a1541/tables-3.11.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:e3cfe79484351f7216eb8f3767bfa1217bfd271b04428f79cfa7ef6d7491919d", size = 6380142, upload-time = "2026-03-01T11:43:17.213Z" }, + { url = "https://files.pythonhosted.org/packages/59/3c/f4a694aa744d2b14d536e172c28dd70c84445f4787083a82d6d44a39e39f/tables-3.11.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a9c35f87fcb6a48c79fbc4e3ab15ca8f6053c4ce13063d6ca2ec36cbb58f40f", size = 7014135, upload-time = "2026-03-01T11:43:22.359Z" }, + { url = "https://files.pythonhosted.org/packages/45/82/94d4320d6c0fe5bd55230eec90cd142d58cda37b7cce00a318ac2a6abd93/tables-3.11.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4cf3218b76ba78d156d6ee75c19fb757d50682f6c7b4905370441afbfc9d77f3", size = 7349293, upload-time = "2026-03-01T11:43:27.569Z" }, + { url = "https://files.pythonhosted.org/packages/f7/02/a0f61a602ce2f2be8cc2e6146cc51acdaa8a1bb9b823b3863e70d3e0505d/tables-3.11.1-cp314-cp314t-win_amd64.whl", hash = "sha256:a6f7a3b82dbf0ae0f30de635ca88bb42dd87938b0950369d0ee4289c52ae6de2", size = 6854713, upload-time = "2026-03-01T11:43:31.934Z" }, +] + +[[package]] +name = "threadpoolctl" +version = "3.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b7/4d/08c89e34946fce2aec4fbb45c9016efd5f4d7f24af8e5d93296e935631d8/threadpoolctl-3.6.0.tar.gz", hash = "sha256:8ab8b4aa3491d812b623328249fab5302a68d2d71745c8a4c719a2fcaba9f44e", size = 21274, upload-time = "2025-03-13T13:49:23.031Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/32/d5/f9a850d79b0851d1d4ef6456097579a9005b31fea68726a4ae5f2d82ddd9/threadpoolctl-3.6.0-py3-none-any.whl", hash = "sha256:43a0b8fd5a2928500110039e43a5eed8480b918967083ea48dc3ab9f13c4a7fb", size = 18638, upload-time = "2025-03-13T13:49:21.846Z" }, +] + +[[package]] +name = "tomli" +version = "2.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/82/30/31573e9457673ab10aa432461bee537ce6cef177667deca369efb79df071/tomli-2.4.0.tar.gz", hash = "sha256:aa89c3f6c277dd275d8e243ad24f3b5e701491a860d5121f2cdd399fbb31fc9c", size = 17477, upload-time = "2026-01-11T11:22:38.165Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/d9/3dc2289e1f3b32eb19b9785b6a006b28ee99acb37d1d47f78d4c10e28bf8/tomli-2.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b5ef256a3fd497d4973c11bf142e9ed78b150d36f5773f1ca6088c230ffc5867", size = 153663, upload-time = "2026-01-11T11:21:45.27Z" }, + { url = "https://files.pythonhosted.org/packages/51/32/ef9f6845e6b9ca392cd3f64f9ec185cc6f09f0a2df3db08cbe8809d1d435/tomli-2.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5572e41282d5268eb09a697c89a7bee84fae66511f87533a6f88bd2f7b652da9", size = 148469, upload-time = "2026-01-11T11:21:46.873Z" }, + { url = "https://files.pythonhosted.org/packages/d6/c2/506e44cce89a8b1b1e047d64bd495c22c9f71f21e05f380f1a950dd9c217/tomli-2.4.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:551e321c6ba03b55676970b47cb1b73f14a0a4dce6a3e1a9458fd6d921d72e95", size = 236039, upload-time = "2026-01-11T11:21:48.503Z" }, + { url = "https://files.pythonhosted.org/packages/b3/40/e1b65986dbc861b7e986e8ec394598187fa8aee85b1650b01dd925ca0be8/tomli-2.4.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5e3f639a7a8f10069d0e15408c0b96a2a828cfdec6fca05296ebcdcc28ca7c76", size = 243007, upload-time = "2026-01-11T11:21:49.456Z" }, + { url = "https://files.pythonhosted.org/packages/9c/6f/6e39ce66b58a5b7ae572a0f4352ff40c71e8573633deda43f6a379d56b3e/tomli-2.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1b168f2731796b045128c45982d3a4874057626da0e2ef1fdd722848b741361d", size = 240875, upload-time = "2026-01-11T11:21:50.755Z" }, + { url = "https://files.pythonhosted.org/packages/aa/ad/cb089cb190487caa80204d503c7fd0f4d443f90b95cf4ef5cf5aa0f439b0/tomli-2.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:133e93646ec4300d651839d382d63edff11d8978be23da4cc106f5a18b7d0576", size = 246271, upload-time = "2026-01-11T11:21:51.81Z" }, + { url = "https://files.pythonhosted.org/packages/0b/63/69125220e47fd7a3a27fd0de0c6398c89432fec41bc739823bcc66506af6/tomli-2.4.0-cp311-cp311-win32.whl", hash = "sha256:b6c78bdf37764092d369722d9946cb65b8767bfa4110f902a1b2542d8d173c8a", size = 96770, upload-time = "2026-01-11T11:21:52.647Z" }, + { url = "https://files.pythonhosted.org/packages/1e/0d/a22bb6c83f83386b0008425a6cd1fa1c14b5f3dd4bad05e98cf3dbbf4a64/tomli-2.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:d3d1654e11d724760cdb37a3d7691f0be9db5fbdaef59c9f532aabf87006dbaa", size = 107626, upload-time = "2026-01-11T11:21:53.459Z" }, + { url = "https://files.pythonhosted.org/packages/2f/6d/77be674a3485e75cacbf2ddba2b146911477bd887dda9d8c9dfb2f15e871/tomli-2.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:cae9c19ed12d4e8f3ebf46d1a75090e4c0dc16271c5bce1c833ac168f08fb614", size = 94842, upload-time = "2026-01-11T11:21:54.831Z" }, + { url = "https://files.pythonhosted.org/packages/3c/43/7389a1869f2f26dba52404e1ef13b4784b6b37dac93bac53457e3ff24ca3/tomli-2.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:920b1de295e72887bafa3ad9f7a792f811847d57ea6b1215154030cf131f16b1", size = 154894, upload-time = "2026-01-11T11:21:56.07Z" }, + { url = "https://files.pythonhosted.org/packages/e9/05/2f9bf110b5294132b2edf13fe6ca6ae456204f3d749f623307cbb7a946f2/tomli-2.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7d6d9a4aee98fac3eab4952ad1d73aee87359452d1c086b5ceb43ed02ddb16b8", size = 149053, upload-time = "2026-01-11T11:21:57.467Z" }, + { url = "https://files.pythonhosted.org/packages/e8/41/1eda3ca1abc6f6154a8db4d714a4d35c4ad90adc0bcf700657291593fbf3/tomli-2.4.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36b9d05b51e65b254ea6c2585b59d2c4cb91c8a3d91d0ed0f17591a29aaea54a", size = 243481, upload-time = "2026-01-11T11:21:58.661Z" }, + { url = "https://files.pythonhosted.org/packages/d2/6d/02ff5ab6c8868b41e7d4b987ce2b5f6a51d3335a70aa144edd999e055a01/tomli-2.4.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1c8a885b370751837c029ef9bc014f27d80840e48bac415f3412e6593bbc18c1", size = 251720, upload-time = "2026-01-11T11:22:00.178Z" }, + { url = "https://files.pythonhosted.org/packages/7b/57/0405c59a909c45d5b6f146107c6d997825aa87568b042042f7a9c0afed34/tomli-2.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8768715ffc41f0008abe25d808c20c3d990f42b6e2e58305d5da280ae7d1fa3b", size = 247014, upload-time = "2026-01-11T11:22:01.238Z" }, + { url = "https://files.pythonhosted.org/packages/2c/0e/2e37568edd944b4165735687cbaf2fe3648129e440c26d02223672ee0630/tomli-2.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b438885858efd5be02a9a133caf5812b8776ee0c969fea02c45e8e3f296ba51", size = 251820, upload-time = "2026-01-11T11:22:02.727Z" }, + { url = "https://files.pythonhosted.org/packages/5a/1c/ee3b707fdac82aeeb92d1a113f803cf6d0f37bdca0849cb489553e1f417a/tomli-2.4.0-cp312-cp312-win32.whl", hash = "sha256:0408e3de5ec77cc7f81960c362543cbbd91ef883e3138e81b729fc3eea5b9729", size = 97712, upload-time = "2026-01-11T11:22:03.777Z" }, + { url = "https://files.pythonhosted.org/packages/69/13/c07a9177d0b3bab7913299b9278845fc6eaaca14a02667c6be0b0a2270c8/tomli-2.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:685306e2cc7da35be4ee914fd34ab801a6acacb061b6a7abca922aaf9ad368da", size = 108296, upload-time = "2026-01-11T11:22:04.86Z" }, + { url = "https://files.pythonhosted.org/packages/18/27/e267a60bbeeee343bcc279bb9e8fbed0cbe224bc7b2a3dc2975f22809a09/tomli-2.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:5aa48d7c2356055feef06a43611fc401a07337d5b006be13a30f6c58f869e3c3", size = 94553, upload-time = "2026-01-11T11:22:05.854Z" }, + { url = "https://files.pythonhosted.org/packages/34/91/7f65f9809f2936e1f4ce6268ae1903074563603b2a2bd969ebbda802744f/tomli-2.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:84d081fbc252d1b6a982e1870660e7330fb8f90f676f6e78b052ad4e64714bf0", size = 154915, upload-time = "2026-01-11T11:22:06.703Z" }, + { url = "https://files.pythonhosted.org/packages/20/aa/64dd73a5a849c2e8f216b755599c511badde80e91e9bc2271baa7b2cdbb1/tomli-2.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9a08144fa4cba33db5255f9b74f0b89888622109bd2776148f2597447f92a94e", size = 149038, upload-time = "2026-01-11T11:22:07.56Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8a/6d38870bd3d52c8d1505ce054469a73f73a0fe62c0eaf5dddf61447e32fa/tomli-2.4.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c73add4bb52a206fd0c0723432db123c0c75c280cbd67174dd9d2db228ebb1b4", size = 242245, upload-time = "2026-01-11T11:22:08.344Z" }, + { url = "https://files.pythonhosted.org/packages/59/bb/8002fadefb64ab2669e5b977df3f5e444febea60e717e755b38bb7c41029/tomli-2.4.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1fb2945cbe303b1419e2706e711b7113da57b7db31ee378d08712d678a34e51e", size = 250335, upload-time = "2026-01-11T11:22:09.951Z" }, + { url = "https://files.pythonhosted.org/packages/a5/3d/4cdb6f791682b2ea916af2de96121b3cb1284d7c203d97d92d6003e91c8d/tomli-2.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bbb1b10aa643d973366dc2cb1ad94f99c1726a02343d43cbc011edbfac579e7c", size = 245962, upload-time = "2026-01-11T11:22:11.27Z" }, + { url = "https://files.pythonhosted.org/packages/f2/4a/5f25789f9a460bd858ba9756ff52d0830d825b458e13f754952dd15fb7bb/tomli-2.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4cbcb367d44a1f0c2be408758b43e1ffb5308abe0ea222897d6bfc8e8281ef2f", size = 250396, upload-time = "2026-01-11T11:22:12.325Z" }, + { url = "https://files.pythonhosted.org/packages/aa/2f/b73a36fea58dfa08e8b3a268750e6853a6aac2a349241a905ebd86f3047a/tomli-2.4.0-cp313-cp313-win32.whl", hash = "sha256:7d49c66a7d5e56ac959cb6fc583aff0651094ec071ba9ad43df785abc2320d86", size = 97530, upload-time = "2026-01-11T11:22:13.865Z" }, + { url = "https://files.pythonhosted.org/packages/3b/af/ca18c134b5d75de7e8dc551c5234eaba2e8e951f6b30139599b53de9c187/tomli-2.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:3cf226acb51d8f1c394c1b310e0e0e61fecdd7adcb78d01e294ac297dd2e7f87", size = 108227, upload-time = "2026-01-11T11:22:15.224Z" }, + { url = "https://files.pythonhosted.org/packages/22/c3/b386b832f209fee8073c8138ec50f27b4460db2fdae9ffe022df89a57f9b/tomli-2.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:d20b797a5c1ad80c516e41bc1fb0443ddb5006e9aaa7bda2d71978346aeb9132", size = 94748, upload-time = "2026-01-11T11:22:16.009Z" }, + { url = "https://files.pythonhosted.org/packages/f3/c4/84047a97eb1004418bc10bdbcfebda209fca6338002eba2dc27cc6d13563/tomli-2.4.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:26ab906a1eb794cd4e103691daa23d95c6919cc2fa9160000ac02370cc9dd3f6", size = 154725, upload-time = "2026-01-11T11:22:17.269Z" }, + { url = "https://files.pythonhosted.org/packages/a8/5d/d39038e646060b9d76274078cddf146ced86dc2b9e8bbf737ad5983609a0/tomli-2.4.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:20cedb4ee43278bc4f2fee6cb50daec836959aadaf948db5172e776dd3d993fc", size = 148901, upload-time = "2026-01-11T11:22:18.287Z" }, + { url = "https://files.pythonhosted.org/packages/73/e5/383be1724cb30f4ce44983d249645684a48c435e1cd4f8b5cded8a816d3c/tomli-2.4.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:39b0b5d1b6dd03684b3fb276407ebed7090bbec989fa55838c98560c01113b66", size = 243375, upload-time = "2026-01-11T11:22:19.154Z" }, + { url = "https://files.pythonhosted.org/packages/31/f0/bea80c17971c8d16d3cc109dc3585b0f2ce1036b5f4a8a183789023574f2/tomli-2.4.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a26d7ff68dfdb9f87a016ecfd1e1c2bacbe3108f4e0f8bcd2228ef9a766c787d", size = 250639, upload-time = "2026-01-11T11:22:20.168Z" }, + { url = "https://files.pythonhosted.org/packages/2c/8f/2853c36abbb7608e3f945d8a74e32ed3a74ee3a1f468f1ffc7d1cb3abba6/tomli-2.4.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:20ffd184fb1df76a66e34bd1b36b4a4641bd2b82954befa32fe8163e79f1a702", size = 246897, upload-time = "2026-01-11T11:22:21.544Z" }, + { url = "https://files.pythonhosted.org/packages/49/f0/6c05e3196ed5337b9fe7ea003e95fd3819a840b7a0f2bf5a408ef1dad8ed/tomli-2.4.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:75c2f8bbddf170e8effc98f5e9084a8751f8174ea6ccf4fca5398436e0320bc8", size = 254697, upload-time = "2026-01-11T11:22:23.058Z" }, + { url = "https://files.pythonhosted.org/packages/f3/f5/2922ef29c9f2951883525def7429967fc4d8208494e5ab524234f06b688b/tomli-2.4.0-cp314-cp314-win32.whl", hash = "sha256:31d556d079d72db7c584c0627ff3a24c5d3fb4f730221d3444f3efb1b2514776", size = 98567, upload-time = "2026-01-11T11:22:24.033Z" }, + { url = "https://files.pythonhosted.org/packages/7b/31/22b52e2e06dd2a5fdbc3ee73226d763b184ff21fc24e20316a44ccc4d96b/tomli-2.4.0-cp314-cp314-win_amd64.whl", hash = "sha256:43e685b9b2341681907759cf3a04e14d7104b3580f808cfde1dfdb60ada85475", size = 108556, upload-time = "2026-01-11T11:22:25.378Z" }, + { url = "https://files.pythonhosted.org/packages/48/3d/5058dff3255a3d01b705413f64f4306a141a8fd7a251e5a495e3f192a998/tomli-2.4.0-cp314-cp314-win_arm64.whl", hash = "sha256:3d895d56bd3f82ddd6faaff993c275efc2ff38e52322ea264122d72729dca2b2", size = 96014, upload-time = "2026-01-11T11:22:26.138Z" }, + { url = "https://files.pythonhosted.org/packages/b8/4e/75dab8586e268424202d3a1997ef6014919c941b50642a1682df43204c22/tomli-2.4.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:5b5807f3999fb66776dbce568cc9a828544244a8eb84b84b9bafc080c99597b9", size = 163339, upload-time = "2026-01-11T11:22:27.143Z" }, + { url = "https://files.pythonhosted.org/packages/06/e3/b904d9ab1016829a776d97f163f183a48be6a4deb87304d1e0116a349519/tomli-2.4.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c084ad935abe686bd9c898e62a02a19abfc9760b5a79bc29644463eaf2840cb0", size = 159490, upload-time = "2026-01-11T11:22:28.399Z" }, + { url = "https://files.pythonhosted.org/packages/e3/5a/fc3622c8b1ad823e8ea98a35e3c632ee316d48f66f80f9708ceb4f2a0322/tomli-2.4.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f2e3955efea4d1cfbcb87bc321e00dc08d2bcb737fd1d5e398af111d86db5df", size = 269398, upload-time = "2026-01-11T11:22:29.345Z" }, + { url = "https://files.pythonhosted.org/packages/fd/33/62bd6152c8bdd4c305ad9faca48f51d3acb2df1f8791b1477d46ff86e7f8/tomli-2.4.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e0fe8a0b8312acf3a88077a0802565cb09ee34107813bba1c7cd591fa6cfc8d", size = 276515, upload-time = "2026-01-11T11:22:30.327Z" }, + { url = "https://files.pythonhosted.org/packages/4b/ff/ae53619499f5235ee4211e62a8d7982ba9e439a0fb4f2f351a93d67c1dd2/tomli-2.4.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:413540dce94673591859c4c6f794dfeaa845e98bf35d72ed59636f869ef9f86f", size = 273806, upload-time = "2026-01-11T11:22:32.56Z" }, + { url = "https://files.pythonhosted.org/packages/47/71/cbca7787fa68d4d0a9f7072821980b39fbb1b6faeb5f5cf02f4a5559fa28/tomli-2.4.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:0dc56fef0e2c1c470aeac5b6ca8cc7b640bb93e92d9803ddaf9ea03e198f5b0b", size = 281340, upload-time = "2026-01-11T11:22:33.505Z" }, + { url = "https://files.pythonhosted.org/packages/f5/00/d595c120963ad42474cf6ee7771ad0d0e8a49d0f01e29576ee9195d9ecdf/tomli-2.4.0-cp314-cp314t-win32.whl", hash = "sha256:d878f2a6707cc9d53a1be1414bbb419e629c3d6e67f69230217bb663e76b5087", size = 108106, upload-time = "2026-01-11T11:22:34.451Z" }, + { url = "https://files.pythonhosted.org/packages/de/69/9aa0c6a505c2f80e519b43764f8b4ba93b5a0bbd2d9a9de6e2b24271b9a5/tomli-2.4.0-cp314-cp314t-win_amd64.whl", hash = "sha256:2add28aacc7425117ff6364fe9e06a183bb0251b03f986df0e78e974047571fd", size = 120504, upload-time = "2026-01-11T11:22:35.764Z" }, + { url = "https://files.pythonhosted.org/packages/b3/9f/f1668c281c58cfae01482f7114a4b88d345e4c140386241a1a24dcc9e7bc/tomli-2.4.0-cp314-cp314t-win_arm64.whl", hash = "sha256:2b1e3b80e1d5e52e40e9b924ec43d81570f0e7d09d11081b797bc4692765a3d4", size = 99561, upload-time = "2026-01-11T11:22:36.624Z" }, + { url = "https://files.pythonhosted.org/packages/23/d1/136eb2cb77520a31e1f64cbae9d33ec6df0d78bdf4160398e86eec8a8754/tomli-2.4.0-py3-none-any.whl", hash = "sha256:1f776e7d669ebceb01dee46484485f43a4048746235e683bcdffacdf1fb4785a", size = 14477, upload-time = "2026-01-11T11:22:37.446Z" }, +] + +[[package]] +name = "tqdm" +version = "4.67.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/09/a9/6ba95a270c6f1fbcd8dac228323f2777d886cb206987444e4bce66338dd4/tqdm-4.67.3.tar.gz", hash = "sha256:7d825f03f89244ef73f1d4ce193cb1774a8179fd96f31d7e1dcde62092b960bb", size = 169598, upload-time = "2026-02-03T17:35:53.048Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/16/e1/3079a9ff9b8e11b846c6ac5c8b5bfb7ff225eee721825310c91b3b50304f/tqdm-4.67.3-py3-none-any.whl", hash = "sha256:ee1e4c0e59148062281c49d80b25b67771a127c85fc9676d3be5f243206826bf", size = 78374, upload-time = "2026-02-03T17:35:50.982Z" }, +] + +[[package]] +name = "typer" +version = "0.24.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-doc" }, + { name = "click" }, + { name = "rich" }, + { name = "shellingham" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f5/24/cb09efec5cc954f7f9b930bf8279447d24618bb6758d4f6adf2574c41780/typer-0.24.1.tar.gz", hash = "sha256:e39b4732d65fbdcde189ae76cf7cd48aeae72919dea1fdfc16593be016256b45", size = 118613, upload-time = "2026-02-21T16:54:40.609Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4a/91/48db081e7a63bb37284f9fbcefda7c44c277b18b0e13fbc36ea2335b71e6/typer-0.24.1-py3-none-any.whl", hash = "sha256:112c1f0ce578bfb4cab9ffdabc68f031416ebcc216536611ba21f04e9aa84c9e", size = 56085, upload-time = "2026-02-21T16:54:41.616Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "tzdata" +version = "2025.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5e/a7/c202b344c5ca7daf398f3b8a477eeb205cf3b6f32e7ec3a6bac0629ca975/tzdata-2025.3.tar.gz", hash = "sha256:de39c2ca5dc7b0344f2eba86f49d614019d29f060fc4ebc8a417896a620b56a7", size = 196772, upload-time = "2025-12-13T17:45:35.667Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/b0/003792df09decd6849a5e39c28b513c06e84436a54440380862b5aeff25d/tzdata-2025.3-py2.py3-none-any.whl", hash = "sha256:06a47e5700f3081aab02b2e513160914ff0694bce9947d6b76ebd6bf57cfc5d1", size = 348521, upload-time = "2025-12-13T17:45:33.889Z" }, +] + +[[package]] +name = "umap-learn" +version = "0.5.11" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numba" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "numpy", version = "2.4.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "pynndescent" }, + { name = "scikit-learn", version = "1.7.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "scikit-learn", version = "1.8.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "scipy", version = "1.15.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "scipy", version = "1.17.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "tqdm" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/94/9a/a1e4a257a9aa979dac4f6d5781dac929cbb0949959e2003ed82657d10b0f/umap_learn-0.5.11.tar.gz", hash = "sha256:31566ffd495fbf05d7ab3efcba703861c0f5e6fc6998a838d0e2becdd00e54f5", size = 96409, upload-time = "2026-01-12T20:44:47.553Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/d2/fcf7192dd1cd8c090b6cfd53fa223c4fb2887a17c47e06bc356d44f40dfb/umap_learn-0.5.11-py3-none-any.whl", hash = "sha256:cb17adbde9d544ba79481b3ab4d81ac222e940f3d9219307bea6044f869af3cc", size = 90890, upload-time = "2026-01-12T20:44:46.511Z" }, +] + +[[package]] +name = "urllib3" +version = "2.6.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/24/5f1b3bdffd70275f6661c76461e25f024d5a38a46f04aaca912426a2b1d3/urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed", size = 435556, upload-time = "2026-01-07T16:24:43.925Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" }, +] + +[[package]] +name = "xxhash" +version = "3.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/02/84/30869e01909fb37a6cc7e18688ee8bf1e42d57e7e0777636bd47524c43c7/xxhash-3.6.0.tar.gz", hash = "sha256:f0162a78b13a0d7617b2845b90c763339d1f1d82bb04a4b07f4ab535cc5e05d6", size = 85160, upload-time = "2025-10-02T14:37:08.097Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/34/ee/f9f1d656ad168681bb0f6b092372c1e533c4416b8069b1896a175c46e484/xxhash-3.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:87ff03d7e35c61435976554477a7f4cd1704c3596a89a8300d5ce7fc83874a71", size = 32845, upload-time = "2025-10-02T14:33:51.573Z" }, + { url = "https://files.pythonhosted.org/packages/a3/b1/93508d9460b292c74a09b83d16750c52a0ead89c51eea9951cb97a60d959/xxhash-3.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f572dfd3d0e2eb1a57511831cf6341242f5a9f8298a45862d085f5b93394a27d", size = 30807, upload-time = "2025-10-02T14:33:52.964Z" }, + { url = "https://files.pythonhosted.org/packages/07/55/28c93a3662f2d200c70704efe74aab9640e824f8ce330d8d3943bf7c9b3c/xxhash-3.6.0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:89952ea539566b9fed2bbd94e589672794b4286f342254fad28b149f9615fef8", size = 193786, upload-time = "2025-10-02T14:33:54.272Z" }, + { url = "https://files.pythonhosted.org/packages/c1/96/fec0be9bb4b8f5d9c57d76380a366f31a1781fb802f76fc7cda6c84893c7/xxhash-3.6.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:48e6f2ffb07a50b52465a1032c3cf1f4a5683f944acaca8a134a2f23674c2058", size = 212830, upload-time = "2025-10-02T14:33:55.706Z" }, + { url = "https://files.pythonhosted.org/packages/c4/a0/c706845ba77b9611f81fd2e93fad9859346b026e8445e76f8c6fd057cc6d/xxhash-3.6.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b5b848ad6c16d308c3ac7ad4ba6bede80ed5df2ba8ed382f8932df63158dd4b2", size = 211606, upload-time = "2025-10-02T14:33:57.133Z" }, + { url = "https://files.pythonhosted.org/packages/67/1e/164126a2999e5045f04a69257eea946c0dc3e86541b400d4385d646b53d7/xxhash-3.6.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a034590a727b44dd8ac5914236a7b8504144447a9682586c3327e935f33ec8cc", size = 444872, upload-time = "2025-10-02T14:33:58.446Z" }, + { url = "https://files.pythonhosted.org/packages/2d/4b/55ab404c56cd70a2cf5ecfe484838865d0fea5627365c6c8ca156bd09c8f/xxhash-3.6.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8a8f1972e75ebdd161d7896743122834fe87378160c20e97f8b09166213bf8cc", size = 193217, upload-time = "2025-10-02T14:33:59.724Z" }, + { url = "https://files.pythonhosted.org/packages/45/e6/52abf06bac316db33aa269091ae7311bd53cfc6f4b120ae77bac1b348091/xxhash-3.6.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ee34327b187f002a596d7b167ebc59a1b729e963ce645964bbc050d2f1b73d07", size = 210139, upload-time = "2025-10-02T14:34:02.041Z" }, + { url = "https://files.pythonhosted.org/packages/34/37/db94d490b8691236d356bc249c08819cbcef9273a1a30acf1254ff9ce157/xxhash-3.6.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:339f518c3c7a850dd033ab416ea25a692759dc7478a71131fe8869010d2b75e4", size = 197669, upload-time = "2025-10-02T14:34:03.664Z" }, + { url = "https://files.pythonhosted.org/packages/b7/36/c4f219ef4a17a4f7a64ed3569bc2b5a9c8311abdb22249ac96093625b1a4/xxhash-3.6.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:bf48889c9630542d4709192578aebbd836177c9f7a4a2778a7d6340107c65f06", size = 210018, upload-time = "2025-10-02T14:34:05.325Z" }, + { url = "https://files.pythonhosted.org/packages/fd/06/bfac889a374fc2fc439a69223d1750eed2e18a7db8514737ab630534fa08/xxhash-3.6.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:5576b002a56207f640636056b4160a378fe36a58db73ae5c27a7ec8db35f71d4", size = 413058, upload-time = "2025-10-02T14:34:06.925Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d1/555d8447e0dd32ad0930a249a522bb2e289f0d08b6b16204cfa42c1f5a0c/xxhash-3.6.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:af1f3278bd02814d6dedc5dec397993b549d6f16c19379721e5a1d31e132c49b", size = 190628, upload-time = "2025-10-02T14:34:08.669Z" }, + { url = "https://files.pythonhosted.org/packages/d1/15/8751330b5186cedc4ed4b597989882ea05e0408b53fa47bcb46a6125bfc6/xxhash-3.6.0-cp310-cp310-win32.whl", hash = "sha256:aed058764db109dc9052720da65fafe84873b05eb8b07e5e653597951af57c3b", size = 30577, upload-time = "2025-10-02T14:34:10.234Z" }, + { url = "https://files.pythonhosted.org/packages/bb/cc/53f87e8b5871a6eb2ff7e89c48c66093bda2be52315a8161ddc54ea550c4/xxhash-3.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:e82da5670f2d0d98950317f82a0e4a0197150ff19a6df2ba40399c2a3b9ae5fb", size = 31487, upload-time = "2025-10-02T14:34:11.618Z" }, + { url = "https://files.pythonhosted.org/packages/9f/00/60f9ea3bb697667a14314d7269956f58bf56bb73864f8f8d52a3c2535e9a/xxhash-3.6.0-cp310-cp310-win_arm64.whl", hash = "sha256:4a082ffff8c6ac07707fb6b671caf7c6e020c75226c561830b73d862060f281d", size = 27863, upload-time = "2025-10-02T14:34:12.619Z" }, + { url = "https://files.pythonhosted.org/packages/17/d4/cc2f0400e9154df4b9964249da78ebd72f318e35ccc425e9f403c392f22a/xxhash-3.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b47bbd8cf2d72797f3c2772eaaac0ded3d3af26481a26d7d7d41dc2d3c46b04a", size = 32844, upload-time = "2025-10-02T14:34:14.037Z" }, + { url = "https://files.pythonhosted.org/packages/5e/ec/1cc11cd13e26ea8bc3cb4af4eaadd8d46d5014aebb67be3f71fb0b68802a/xxhash-3.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2b6821e94346f96db75abaa6e255706fb06ebd530899ed76d32cd99f20dc52fa", size = 30809, upload-time = "2025-10-02T14:34:15.484Z" }, + { url = "https://files.pythonhosted.org/packages/04/5f/19fe357ea348d98ca22f456f75a30ac0916b51c753e1f8b2e0e6fb884cce/xxhash-3.6.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d0a9751f71a1a65ce3584e9cae4467651c7e70c9d31017fa57574583a4540248", size = 194665, upload-time = "2025-10-02T14:34:16.541Z" }, + { url = "https://files.pythonhosted.org/packages/90/3b/d1f1a8f5442a5fd8beedae110c5af7604dc37349a8e16519c13c19a9a2de/xxhash-3.6.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b29ee68625ab37b04c0b40c3fafdf24d2f75ccd778333cfb698f65f6c463f62", size = 213550, upload-time = "2025-10-02T14:34:17.878Z" }, + { url = "https://files.pythonhosted.org/packages/c4/ef/3a9b05eb527457d5db13a135a2ae1a26c80fecd624d20f3e8dcc4cb170f3/xxhash-3.6.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6812c25fe0d6c36a46ccb002f40f27ac903bf18af9f6dd8f9669cb4d176ab18f", size = 212384, upload-time = "2025-10-02T14:34:19.182Z" }, + { url = "https://files.pythonhosted.org/packages/0f/18/ccc194ee698c6c623acbf0f8c2969811a8a4b6185af5e824cd27b9e4fd3e/xxhash-3.6.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4ccbff013972390b51a18ef1255ef5ac125c92dc9143b2d1909f59abc765540e", size = 445749, upload-time = "2025-10-02T14:34:20.659Z" }, + { url = "https://files.pythonhosted.org/packages/a5/86/cf2c0321dc3940a7aa73076f4fd677a0fb3e405cb297ead7d864fd90847e/xxhash-3.6.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:297b7fbf86c82c550e12e8fb71968b3f033d27b874276ba3624ea868c11165a8", size = 193880, upload-time = "2025-10-02T14:34:22.431Z" }, + { url = "https://files.pythonhosted.org/packages/82/fb/96213c8560e6f948a1ecc9a7613f8032b19ee45f747f4fca4eb31bb6d6ed/xxhash-3.6.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dea26ae1eb293db089798d3973a5fc928a18fdd97cc8801226fae705b02b14b0", size = 210912, upload-time = "2025-10-02T14:34:23.937Z" }, + { url = "https://files.pythonhosted.org/packages/40/aa/4395e669b0606a096d6788f40dbdf2b819d6773aa290c19e6e83cbfc312f/xxhash-3.6.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:7a0b169aafb98f4284f73635a8e93f0735f9cbde17bd5ec332480484241aaa77", size = 198654, upload-time = "2025-10-02T14:34:25.644Z" }, + { url = "https://files.pythonhosted.org/packages/67/74/b044fcd6b3d89e9b1b665924d85d3f400636c23590226feb1eb09e1176ce/xxhash-3.6.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:08d45aef063a4531b785cd72de4887766d01dc8f362a515693df349fdb825e0c", size = 210867, upload-time = "2025-10-02T14:34:27.203Z" }, + { url = "https://files.pythonhosted.org/packages/bc/fd/3ce73bf753b08cb19daee1eb14aa0d7fe331f8da9c02dd95316ddfe5275e/xxhash-3.6.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:929142361a48ee07f09121fe9e96a84950e8d4df3bb298ca5d88061969f34d7b", size = 414012, upload-time = "2025-10-02T14:34:28.409Z" }, + { url = "https://files.pythonhosted.org/packages/ba/b3/5a4241309217c5c876f156b10778f3ab3af7ba7e3259e6d5f5c7d0129eb2/xxhash-3.6.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:51312c768403d8540487dbbfb557454cfc55589bbde6424456951f7fcd4facb3", size = 191409, upload-time = "2025-10-02T14:34:29.696Z" }, + { url = "https://files.pythonhosted.org/packages/c0/01/99bfbc15fb9abb9a72b088c1d95219fc4782b7d01fc835bd5744d66dd0b8/xxhash-3.6.0-cp311-cp311-win32.whl", hash = "sha256:d1927a69feddc24c987b337ce81ac15c4720955b667fe9b588e02254b80446fd", size = 30574, upload-time = "2025-10-02T14:34:31.028Z" }, + { url = "https://files.pythonhosted.org/packages/65/79/9d24d7f53819fe301b231044ea362ce64e86c74f6e8c8e51320de248b3e5/xxhash-3.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:26734cdc2d4ffe449b41d186bbeac416f704a482ed835d375a5c0cb02bc63fef", size = 31481, upload-time = "2025-10-02T14:34:32.062Z" }, + { url = "https://files.pythonhosted.org/packages/30/4e/15cd0e3e8772071344eab2961ce83f6e485111fed8beb491a3f1ce100270/xxhash-3.6.0-cp311-cp311-win_arm64.whl", hash = "sha256:d72f67ef8bf36e05f5b6c65e8524f265bd61071471cd4cf1d36743ebeeeb06b7", size = 27861, upload-time = "2025-10-02T14:34:33.555Z" }, + { url = "https://files.pythonhosted.org/packages/9a/07/d9412f3d7d462347e4511181dea65e47e0d0e16e26fbee2ea86a2aefb657/xxhash-3.6.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:01362c4331775398e7bb34e3ab403bc9ee9f7c497bc7dee6272114055277dd3c", size = 32744, upload-time = "2025-10-02T14:34:34.622Z" }, + { url = "https://files.pythonhosted.org/packages/79/35/0429ee11d035fc33abe32dca1b2b69e8c18d236547b9a9b72c1929189b9a/xxhash-3.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b7b2df81a23f8cb99656378e72501b2cb41b1827c0f5a86f87d6b06b69f9f204", size = 30816, upload-time = "2025-10-02T14:34:36.043Z" }, + { url = "https://files.pythonhosted.org/packages/b7/f2/57eb99aa0f7d98624c0932c5b9a170e1806406cdbcdb510546634a1359e0/xxhash-3.6.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:dc94790144e66b14f67b10ac8ed75b39ca47536bf8800eb7c24b50271ea0c490", size = 194035, upload-time = "2025-10-02T14:34:37.354Z" }, + { url = "https://files.pythonhosted.org/packages/4c/ed/6224ba353690d73af7a3f1c7cdb1fc1b002e38f783cb991ae338e1eb3d79/xxhash-3.6.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:93f107c673bccf0d592cdba077dedaf52fe7f42dcd7676eba1f6d6f0c3efffd2", size = 212914, upload-time = "2025-10-02T14:34:38.6Z" }, + { url = "https://files.pythonhosted.org/packages/38/86/fb6b6130d8dd6b8942cc17ab4d90e223653a89aa32ad2776f8af7064ed13/xxhash-3.6.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2aa5ee3444c25b69813663c9f8067dcfaa2e126dc55e8dddf40f4d1c25d7effa", size = 212163, upload-time = "2025-10-02T14:34:39.872Z" }, + { url = "https://files.pythonhosted.org/packages/ee/dc/e84875682b0593e884ad73b2d40767b5790d417bde603cceb6878901d647/xxhash-3.6.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f7f99123f0e1194fa59cc69ad46dbae2e07becec5df50a0509a808f90a0f03f0", size = 445411, upload-time = "2025-10-02T14:34:41.569Z" }, + { url = "https://files.pythonhosted.org/packages/11/4f/426f91b96701ec2f37bb2b8cec664eff4f658a11f3fa9d94f0a887ea6d2b/xxhash-3.6.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:49e03e6fe2cac4a1bc64952dd250cf0dbc5ef4ebb7b8d96bce82e2de163c82a2", size = 193883, upload-time = "2025-10-02T14:34:43.249Z" }, + { url = "https://files.pythonhosted.org/packages/53/5a/ddbb83eee8e28b778eacfc5a85c969673e4023cdeedcfcef61f36731610b/xxhash-3.6.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bd17fede52a17a4f9a7bc4472a5867cb0b160deeb431795c0e4abe158bc784e9", size = 210392, upload-time = "2025-10-02T14:34:45.042Z" }, + { url = "https://files.pythonhosted.org/packages/1e/c2/ff69efd07c8c074ccdf0a4f36fcdd3d27363665bcdf4ba399abebe643465/xxhash-3.6.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:6fb5f5476bef678f69db04f2bd1efbed3030d2aba305b0fc1773645f187d6a4e", size = 197898, upload-time = "2025-10-02T14:34:46.302Z" }, + { url = "https://files.pythonhosted.org/packages/58/ca/faa05ac19b3b622c7c9317ac3e23954187516298a091eb02c976d0d3dd45/xxhash-3.6.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:843b52f6d88071f87eba1631b684fcb4b2068cd2180a0224122fe4ef011a9374", size = 210655, upload-time = "2025-10-02T14:34:47.571Z" }, + { url = "https://files.pythonhosted.org/packages/d4/7a/06aa7482345480cc0cb597f5c875b11a82c3953f534394f620b0be2f700c/xxhash-3.6.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7d14a6cfaf03b1b6f5f9790f76880601ccc7896aff7ab9cd8978a939c1eb7e0d", size = 414001, upload-time = "2025-10-02T14:34:49.273Z" }, + { url = "https://files.pythonhosted.org/packages/23/07/63ffb386cd47029aa2916b3d2f454e6cc5b9f5c5ada3790377d5430084e7/xxhash-3.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:418daf3db71e1413cfe211c2f9a528456936645c17f46b5204705581a45390ae", size = 191431, upload-time = "2025-10-02T14:34:50.798Z" }, + { url = "https://files.pythonhosted.org/packages/0f/93/14fde614cadb4ddf5e7cebf8918b7e8fac5ae7861c1875964f17e678205c/xxhash-3.6.0-cp312-cp312-win32.whl", hash = "sha256:50fc255f39428a27299c20e280d6193d8b63b8ef8028995323bf834a026b4fbb", size = 30617, upload-time = "2025-10-02T14:34:51.954Z" }, + { url = "https://files.pythonhosted.org/packages/13/5d/0d125536cbe7565a83d06e43783389ecae0c0f2ed037b48ede185de477c0/xxhash-3.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:c0f2ab8c715630565ab8991b536ecded9416d615538be8ecddce43ccf26cbc7c", size = 31534, upload-time = "2025-10-02T14:34:53.276Z" }, + { url = "https://files.pythonhosted.org/packages/54/85/6ec269b0952ec7e36ba019125982cf11d91256a778c7c3f98a4c5043d283/xxhash-3.6.0-cp312-cp312-win_arm64.whl", hash = "sha256:eae5c13f3bc455a3bbb68bdc513912dc7356de7e2280363ea235f71f54064829", size = 27876, upload-time = "2025-10-02T14:34:54.371Z" }, + { url = "https://files.pythonhosted.org/packages/33/76/35d05267ac82f53ae9b0e554da7c5e281ee61f3cad44c743f0fcd354f211/xxhash-3.6.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:599e64ba7f67472481ceb6ee80fa3bd828fd61ba59fb11475572cc5ee52b89ec", size = 32738, upload-time = "2025-10-02T14:34:55.839Z" }, + { url = "https://files.pythonhosted.org/packages/31/a8/3fbce1cd96534a95e35d5120637bf29b0d7f5d8fa2f6374e31b4156dd419/xxhash-3.6.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7d8b8aaa30fca4f16f0c84a5c8d7ddee0e25250ec2796c973775373257dde8f1", size = 30821, upload-time = "2025-10-02T14:34:57.219Z" }, + { url = "https://files.pythonhosted.org/packages/0c/ea/d387530ca7ecfa183cb358027f1833297c6ac6098223fd14f9782cd0015c/xxhash-3.6.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d597acf8506d6e7101a4a44a5e428977a51c0fadbbfd3c39650cca9253f6e5a6", size = 194127, upload-time = "2025-10-02T14:34:59.21Z" }, + { url = "https://files.pythonhosted.org/packages/ba/0c/71435dcb99874b09a43b8d7c54071e600a7481e42b3e3ce1eb5226a5711a/xxhash-3.6.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:858dc935963a33bc33490128edc1c12b0c14d9c7ebaa4e387a7869ecc4f3e263", size = 212975, upload-time = "2025-10-02T14:35:00.816Z" }, + { url = "https://files.pythonhosted.org/packages/84/7a/c2b3d071e4bb4a90b7057228a99b10d51744878f4a8a6dd643c8bd897620/xxhash-3.6.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ba284920194615cb8edf73bf52236ce2e1664ccd4a38fdb543506413529cc546", size = 212241, upload-time = "2025-10-02T14:35:02.207Z" }, + { url = "https://files.pythonhosted.org/packages/81/5f/640b6eac0128e215f177df99eadcd0f1b7c42c274ab6a394a05059694c5a/xxhash-3.6.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4b54219177f6c6674d5378bd862c6aedf64725f70dd29c472eaae154df1a2e89", size = 445471, upload-time = "2025-10-02T14:35:03.61Z" }, + { url = "https://files.pythonhosted.org/packages/5e/1e/3c3d3ef071b051cc3abbe3721ffb8365033a172613c04af2da89d5548a87/xxhash-3.6.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:42c36dd7dbad2f5238950c377fcbf6811b1cdb1c444fab447960030cea60504d", size = 193936, upload-time = "2025-10-02T14:35:05.013Z" }, + { url = "https://files.pythonhosted.org/packages/2c/bd/4a5f68381939219abfe1c22a9e3a5854a4f6f6f3c4983a87d255f21f2e5d/xxhash-3.6.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f22927652cba98c44639ffdc7aaf35828dccf679b10b31c4ad72a5b530a18eb7", size = 210440, upload-time = "2025-10-02T14:35:06.239Z" }, + { url = "https://files.pythonhosted.org/packages/eb/37/b80fe3d5cfb9faff01a02121a0f4d565eb7237e9e5fc66e73017e74dcd36/xxhash-3.6.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b45fad44d9c5c119e9c6fbf2e1c656a46dc68e280275007bbfd3d572b21426db", size = 197990, upload-time = "2025-10-02T14:35:07.735Z" }, + { url = "https://files.pythonhosted.org/packages/d7/fd/2c0a00c97b9e18f72e1f240ad4e8f8a90fd9d408289ba9c7c495ed7dc05c/xxhash-3.6.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:6f2580ffab1a8b68ef2b901cde7e55fa8da5e4be0977c68f78fc80f3c143de42", size = 210689, upload-time = "2025-10-02T14:35:09.438Z" }, + { url = "https://files.pythonhosted.org/packages/93/86/5dd8076a926b9a95db3206aba20d89a7fc14dd5aac16e5c4de4b56033140/xxhash-3.6.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:40c391dd3cd041ebc3ffe6f2c862f402e306eb571422e0aa918d8070ba31da11", size = 414068, upload-time = "2025-10-02T14:35:11.162Z" }, + { url = "https://files.pythonhosted.org/packages/af/3c/0bb129170ee8f3650f08e993baee550a09593462a5cddd8e44d0011102b1/xxhash-3.6.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f205badabde7aafd1a31e8ca2a3e5a763107a71c397c4481d6a804eb5063d8bd", size = 191495, upload-time = "2025-10-02T14:35:12.971Z" }, + { url = "https://files.pythonhosted.org/packages/e9/3a/6797e0114c21d1725e2577508e24006fd7ff1d8c0c502d3b52e45c1771d8/xxhash-3.6.0-cp313-cp313-win32.whl", hash = "sha256:2577b276e060b73b73a53042ea5bd5203d3e6347ce0d09f98500f418a9fcf799", size = 30620, upload-time = "2025-10-02T14:35:14.129Z" }, + { url = "https://files.pythonhosted.org/packages/86/15/9bc32671e9a38b413a76d24722a2bf8784a132c043063a8f5152d390b0f9/xxhash-3.6.0-cp313-cp313-win_amd64.whl", hash = "sha256:757320d45d2fbcce8f30c42a6b2f47862967aea7bf458b9625b4bbe7ee390392", size = 31542, upload-time = "2025-10-02T14:35:15.21Z" }, + { url = "https://files.pythonhosted.org/packages/39/c5/cc01e4f6188656e56112d6a8e0dfe298a16934b8c47a247236549a3f7695/xxhash-3.6.0-cp313-cp313-win_arm64.whl", hash = "sha256:457b8f85dec5825eed7b69c11ae86834a018b8e3df5e77783c999663da2f96d6", size = 27880, upload-time = "2025-10-02T14:35:16.315Z" }, + { url = "https://files.pythonhosted.org/packages/f3/30/25e5321c8732759e930c555176d37e24ab84365482d257c3b16362235212/xxhash-3.6.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a42e633d75cdad6d625434e3468126c73f13f7584545a9cf34e883aa1710e702", size = 32956, upload-time = "2025-10-02T14:35:17.413Z" }, + { url = "https://files.pythonhosted.org/packages/9f/3c/0573299560d7d9f8ab1838f1efc021a280b5ae5ae2e849034ef3dee18810/xxhash-3.6.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:568a6d743219e717b07b4e03b0a828ce593833e498c3b64752e0f5df6bfe84db", size = 31072, upload-time = "2025-10-02T14:35:18.844Z" }, + { url = "https://files.pythonhosted.org/packages/7a/1c/52d83a06e417cd9d4137722693424885cc9878249beb3a7c829e74bf7ce9/xxhash-3.6.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:bec91b562d8012dae276af8025a55811b875baace6af510412a5e58e3121bc54", size = 196409, upload-time = "2025-10-02T14:35:20.31Z" }, + { url = "https://files.pythonhosted.org/packages/e3/8e/c6d158d12a79bbd0b878f8355432075fc82759e356ab5a111463422a239b/xxhash-3.6.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:78e7f2f4c521c30ad5e786fdd6bae89d47a32672a80195467b5de0480aa97b1f", size = 215736, upload-time = "2025-10-02T14:35:21.616Z" }, + { url = "https://files.pythonhosted.org/packages/bc/68/c4c80614716345d55071a396cf03d06e34b5f4917a467faf43083c995155/xxhash-3.6.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3ed0df1b11a79856df5ffcab572cbd6b9627034c1c748c5566fa79df9048a7c5", size = 214833, upload-time = "2025-10-02T14:35:23.32Z" }, + { url = "https://files.pythonhosted.org/packages/7e/e9/ae27c8ffec8b953efa84c7c4a6c6802c263d587b9fc0d6e7cea64e08c3af/xxhash-3.6.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0e4edbfc7d420925b0dd5e792478ed393d6e75ff8fc219a6546fb446b6a417b1", size = 448348, upload-time = "2025-10-02T14:35:25.111Z" }, + { url = "https://files.pythonhosted.org/packages/d7/6b/33e21afb1b5b3f46b74b6bd1913639066af218d704cc0941404ca717fc57/xxhash-3.6.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fba27a198363a7ef87f8c0f6b171ec36b674fe9053742c58dd7e3201c1ab30ee", size = 196070, upload-time = "2025-10-02T14:35:26.586Z" }, + { url = "https://files.pythonhosted.org/packages/96/b6/fcabd337bc5fa624e7203aa0fa7d0c49eed22f72e93229431752bddc83d9/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:794fe9145fe60191c6532fa95063765529770edcdd67b3d537793e8004cabbfd", size = 212907, upload-time = "2025-10-02T14:35:28.087Z" }, + { url = "https://files.pythonhosted.org/packages/4b/d3/9ee6160e644d660fcf176c5825e61411c7f62648728f69c79ba237250143/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:6105ef7e62b5ac73a837778efc331a591d8442f8ef5c7e102376506cb4ae2729", size = 200839, upload-time = "2025-10-02T14:35:29.857Z" }, + { url = "https://files.pythonhosted.org/packages/0d/98/e8de5baa5109394baf5118f5e72ab21a86387c4f89b0e77ef3e2f6b0327b/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:f01375c0e55395b814a679b3eea205db7919ac2af213f4a6682e01220e5fe292", size = 213304, upload-time = "2025-10-02T14:35:31.222Z" }, + { url = "https://files.pythonhosted.org/packages/7b/1d/71056535dec5c3177eeb53e38e3d367dd1d16e024e63b1cee208d572a033/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:d706dca2d24d834a4661619dcacf51a75c16d65985718d6a7d73c1eeeb903ddf", size = 416930, upload-time = "2025-10-02T14:35:32.517Z" }, + { url = "https://files.pythonhosted.org/packages/dc/6c/5cbde9de2cd967c322e651c65c543700b19e7ae3e0aae8ece3469bf9683d/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5f059d9faeacd49c0215d66f4056e1326c80503f51a1532ca336a385edadd033", size = 193787, upload-time = "2025-10-02T14:35:33.827Z" }, + { url = "https://files.pythonhosted.org/packages/19/fa/0172e350361d61febcea941b0cc541d6e6c8d65d153e85f850a7b256ff8a/xxhash-3.6.0-cp313-cp313t-win32.whl", hash = "sha256:1244460adc3a9be84731d72b8e80625788e5815b68da3da8b83f78115a40a7ec", size = 30916, upload-time = "2025-10-02T14:35:35.107Z" }, + { url = "https://files.pythonhosted.org/packages/ad/e6/e8cf858a2b19d6d45820f072eff1bea413910592ff17157cabc5f1227a16/xxhash-3.6.0-cp313-cp313t-win_amd64.whl", hash = "sha256:b1e420ef35c503869c4064f4a2f2b08ad6431ab7b229a05cce39d74268bca6b8", size = 31799, upload-time = "2025-10-02T14:35:36.165Z" }, + { url = "https://files.pythonhosted.org/packages/56/15/064b197e855bfb7b343210e82490ae672f8bc7cdf3ddb02e92f64304ee8a/xxhash-3.6.0-cp313-cp313t-win_arm64.whl", hash = "sha256:ec44b73a4220623235f67a996c862049f375df3b1052d9899f40a6382c32d746", size = 28044, upload-time = "2025-10-02T14:35:37.195Z" }, + { url = "https://files.pythonhosted.org/packages/7e/5e/0138bc4484ea9b897864d59fce9be9086030825bc778b76cb5a33a906d37/xxhash-3.6.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:a40a3d35b204b7cc7643cbcf8c9976d818cb47befcfac8bbefec8038ac363f3e", size = 32754, upload-time = "2025-10-02T14:35:38.245Z" }, + { url = "https://files.pythonhosted.org/packages/18/d7/5dac2eb2ec75fd771957a13e5dda560efb2176d5203f39502a5fc571f899/xxhash-3.6.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:a54844be970d3fc22630b32d515e79a90d0a3ddb2644d8d7402e3c4c8da61405", size = 30846, upload-time = "2025-10-02T14:35:39.6Z" }, + { url = "https://files.pythonhosted.org/packages/fe/71/8bc5be2bb00deb5682e92e8da955ebe5fa982da13a69da5a40a4c8db12fb/xxhash-3.6.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:016e9190af8f0a4e3741343777710e3d5717427f175adfdc3e72508f59e2a7f3", size = 194343, upload-time = "2025-10-02T14:35:40.69Z" }, + { url = "https://files.pythonhosted.org/packages/e7/3b/52badfb2aecec2c377ddf1ae75f55db3ba2d321c5e164f14461c90837ef3/xxhash-3.6.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4f6f72232f849eb9d0141e2ebe2677ece15adfd0fa599bc058aad83c714bb2c6", size = 213074, upload-time = "2025-10-02T14:35:42.29Z" }, + { url = "https://files.pythonhosted.org/packages/a2/2b/ae46b4e9b92e537fa30d03dbc19cdae57ed407e9c26d163895e968e3de85/xxhash-3.6.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:63275a8aba7865e44b1813d2177e0f5ea7eadad3dd063a21f7cf9afdc7054063", size = 212388, upload-time = "2025-10-02T14:35:43.929Z" }, + { url = "https://files.pythonhosted.org/packages/f5/80/49f88d3afc724b4ac7fbd664c8452d6db51b49915be48c6982659e0e7942/xxhash-3.6.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3cd01fa2aa00d8b017c97eb46b9a794fbdca53fc14f845f5a328c71254b0abb7", size = 445614, upload-time = "2025-10-02T14:35:45.216Z" }, + { url = "https://files.pythonhosted.org/packages/ed/ba/603ce3961e339413543d8cd44f21f2c80e2a7c5cfe692a7b1f2cccf58f3c/xxhash-3.6.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0226aa89035b62b6a86d3c68df4d7c1f47a342b8683da2b60cedcddb46c4d95b", size = 194024, upload-time = "2025-10-02T14:35:46.959Z" }, + { url = "https://files.pythonhosted.org/packages/78/d1/8e225ff7113bf81545cfdcd79eef124a7b7064a0bba53605ff39590b95c2/xxhash-3.6.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c6e193e9f56e4ca4923c61238cdaced324f0feac782544eb4c6d55ad5cc99ddd", size = 210541, upload-time = "2025-10-02T14:35:48.301Z" }, + { url = "https://files.pythonhosted.org/packages/6f/58/0f89d149f0bad89def1a8dd38feb50ccdeb643d9797ec84707091d4cb494/xxhash-3.6.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:9176dcaddf4ca963d4deb93866d739a343c01c969231dbe21680e13a5d1a5bf0", size = 198305, upload-time = "2025-10-02T14:35:49.584Z" }, + { url = "https://files.pythonhosted.org/packages/11/38/5eab81580703c4df93feb5f32ff8fa7fe1e2c51c1f183ee4e48d4bb9d3d7/xxhash-3.6.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:c1ce4009c97a752e682b897aa99aef84191077a9433eb237774689f14f8ec152", size = 210848, upload-time = "2025-10-02T14:35:50.877Z" }, + { url = "https://files.pythonhosted.org/packages/5e/6b/953dc4b05c3ce678abca756416e4c130d2382f877a9c30a20d08ee6a77c0/xxhash-3.6.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:8cb2f4f679b01513b7adbb9b1b2f0f9cdc31b70007eaf9d59d0878809f385b11", size = 414142, upload-time = "2025-10-02T14:35:52.15Z" }, + { url = "https://files.pythonhosted.org/packages/08/a9/238ec0d4e81a10eb5026d4a6972677cbc898ba6c8b9dbaec12ae001b1b35/xxhash-3.6.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:653a91d7c2ab54a92c19ccf43508b6a555440b9be1bc8be553376778be7f20b5", size = 191547, upload-time = "2025-10-02T14:35:53.547Z" }, + { url = "https://files.pythonhosted.org/packages/f1/ee/3cf8589e06c2164ac77c3bf0aa127012801128f1feebf2a079272da5737c/xxhash-3.6.0-cp314-cp314-win32.whl", hash = "sha256:a756fe893389483ee8c394d06b5ab765d96e68fbbfe6fde7aa17e11f5720559f", size = 31214, upload-time = "2025-10-02T14:35:54.746Z" }, + { url = "https://files.pythonhosted.org/packages/02/5d/a19552fbc6ad4cb54ff953c3908bbc095f4a921bc569433d791f755186f1/xxhash-3.6.0-cp314-cp314-win_amd64.whl", hash = "sha256:39be8e4e142550ef69629c9cd71b88c90e9a5db703fecbcf265546d9536ca4ad", size = 32290, upload-time = "2025-10-02T14:35:55.791Z" }, + { url = "https://files.pythonhosted.org/packages/b1/11/dafa0643bc30442c887b55baf8e73353a344ee89c1901b5a5c54a6c17d39/xxhash-3.6.0-cp314-cp314-win_arm64.whl", hash = "sha256:25915e6000338999236f1eb68a02a32c3275ac338628a7eaa5a269c401995679", size = 28795, upload-time = "2025-10-02T14:35:57.162Z" }, + { url = "https://files.pythonhosted.org/packages/2c/db/0e99732ed7f64182aef4a6fb145e1a295558deec2a746265dcdec12d191e/xxhash-3.6.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c5294f596a9017ca5a3e3f8884c00b91ab2ad2933cf288f4923c3fd4346cf3d4", size = 32955, upload-time = "2025-10-02T14:35:58.267Z" }, + { url = "https://files.pythonhosted.org/packages/55/f4/2a7c3c68e564a099becfa44bb3d398810cc0ff6749b0d3cb8ccb93f23c14/xxhash-3.6.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1cf9dcc4ab9cff01dfbba78544297a3a01dafd60f3bde4e2bfd016cf7e4ddc67", size = 31072, upload-time = "2025-10-02T14:35:59.382Z" }, + { url = "https://files.pythonhosted.org/packages/c6/d9/72a29cddc7250e8a5819dad5d466facb5dc4c802ce120645630149127e73/xxhash-3.6.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:01262da8798422d0685f7cef03b2bd3f4f46511b02830861df548d7def4402ad", size = 196579, upload-time = "2025-10-02T14:36:00.838Z" }, + { url = "https://files.pythonhosted.org/packages/63/93/b21590e1e381040e2ca305a884d89e1c345b347404f7780f07f2cdd47ef4/xxhash-3.6.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:51a73fb7cb3a3ead9f7a8b583ffd9b8038e277cdb8cb87cf890e88b3456afa0b", size = 215854, upload-time = "2025-10-02T14:36:02.207Z" }, + { url = "https://files.pythonhosted.org/packages/ce/b8/edab8a7d4fa14e924b29be877d54155dcbd8b80be85ea00d2be3413a9ed4/xxhash-3.6.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b9c6df83594f7df8f7f708ce5ebeacfc69f72c9fbaaababf6cf4758eaada0c9b", size = 214965, upload-time = "2025-10-02T14:36:03.507Z" }, + { url = "https://files.pythonhosted.org/packages/27/67/dfa980ac7f0d509d54ea0d5a486d2bb4b80c3f1bb22b66e6a05d3efaf6c0/xxhash-3.6.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:627f0af069b0ea56f312fd5189001c24578868643203bca1abbc2c52d3a6f3ca", size = 448484, upload-time = "2025-10-02T14:36:04.828Z" }, + { url = "https://files.pythonhosted.org/packages/8c/63/8ffc2cc97e811c0ca5d00ab36604b3ea6f4254f20b7bc658ca825ce6c954/xxhash-3.6.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:aa912c62f842dfd013c5f21a642c9c10cd9f4c4e943e0af83618b4a404d9091a", size = 196162, upload-time = "2025-10-02T14:36:06.182Z" }, + { url = "https://files.pythonhosted.org/packages/4b/77/07f0e7a3edd11a6097e990f6e5b815b6592459cb16dae990d967693e6ea9/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:b465afd7909db30168ab62afe40b2fcf79eedc0b89a6c0ab3123515dc0df8b99", size = 213007, upload-time = "2025-10-02T14:36:07.733Z" }, + { url = "https://files.pythonhosted.org/packages/ae/d8/bc5fa0d152837117eb0bef6f83f956c509332ce133c91c63ce07ee7c4873/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:a881851cf38b0a70e7c4d3ce81fc7afd86fbc2a024f4cfb2a97cf49ce04b75d3", size = 200956, upload-time = "2025-10-02T14:36:09.106Z" }, + { url = "https://files.pythonhosted.org/packages/26/a5/d749334130de9411783873e9b98ecc46688dad5db64ca6e04b02acc8b473/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:9b3222c686a919a0f3253cfc12bb118b8b103506612253b5baeaac10d8027cf6", size = 213401, upload-time = "2025-10-02T14:36:10.585Z" }, + { url = "https://files.pythonhosted.org/packages/89/72/abed959c956a4bfc72b58c0384bb7940663c678127538634d896b1195c10/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:c5aa639bc113e9286137cec8fadc20e9cd732b2cc385c0b7fa673b84fc1f2a93", size = 417083, upload-time = "2025-10-02T14:36:12.276Z" }, + { url = "https://files.pythonhosted.org/packages/0c/b3/62fd2b586283b7d7d665fb98e266decadf31f058f1cf6c478741f68af0cb/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5c1343d49ac102799905e115aee590183c3921d475356cb24b4de29a4bc56518", size = 193913, upload-time = "2025-10-02T14:36:14.025Z" }, + { url = "https://files.pythonhosted.org/packages/9a/9a/c19c42c5b3f5a4aad748a6d5b4f23df3bed7ee5445accc65a0fb3ff03953/xxhash-3.6.0-cp314-cp314t-win32.whl", hash = "sha256:5851f033c3030dd95c086b4a36a2683c2ff4a799b23af60977188b057e467119", size = 31586, upload-time = "2025-10-02T14:36:15.603Z" }, + { url = "https://files.pythonhosted.org/packages/03/d6/4cc450345be9924fd5dc8c590ceda1db5b43a0a889587b0ae81a95511360/xxhash-3.6.0-cp314-cp314t-win_amd64.whl", hash = "sha256:0444e7967dac37569052d2409b00a8860c2135cff05502df4da80267d384849f", size = 32526, upload-time = "2025-10-02T14:36:16.708Z" }, + { url = "https://files.pythonhosted.org/packages/0f/c9/7243eb3f9eaabd1a88a5a5acadf06df2d83b100c62684b7425c6a11bcaa8/xxhash-3.6.0-cp314-cp314t-win_arm64.whl", hash = "sha256:bb79b1e63f6fd84ec778a4b1916dfe0a7c3fdb986c06addd5db3a0d413819d95", size = 28898, upload-time = "2025-10-02T14:36:17.843Z" }, + { url = "https://files.pythonhosted.org/packages/93/1e/8aec23647a34a249f62e2398c42955acd9b4c6ed5cf08cbea94dc46f78d2/xxhash-3.6.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0f7b7e2ec26c1666ad5fc9dbfa426a6a3367ceaf79db5dd76264659d509d73b0", size = 30662, upload-time = "2025-10-02T14:37:01.743Z" }, + { url = "https://files.pythonhosted.org/packages/b8/0b/b14510b38ba91caf43006209db846a696ceea6a847a0c9ba0a5b1adc53d6/xxhash-3.6.0-pp311-pypy311_pp73-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5dc1e14d14fa0f5789ec29a7062004b5933964bb9b02aae6622b8f530dc40296", size = 41056, upload-time = "2025-10-02T14:37:02.879Z" }, + { url = "https://files.pythonhosted.org/packages/50/55/15a7b8a56590e66ccd374bbfa3f9ffc45b810886c8c3b614e3f90bd2367c/xxhash-3.6.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:881b47fc47e051b37d94d13e7455131054b56749b91b508b0907eb07900d1c13", size = 36251, upload-time = "2025-10-02T14:37:04.44Z" }, + { url = "https://files.pythonhosted.org/packages/62/b2/5ac99a041a29e58e95f907876b04f7067a0242cb85b5f39e726153981503/xxhash-3.6.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c6dc31591899f5e5666f04cc2e529e69b4072827085c1ef15294d91a004bc1bd", size = 32481, upload-time = "2025-10-02T14:37:05.869Z" }, + { url = "https://files.pythonhosted.org/packages/7b/d9/8d95e906764a386a3d3b596f3c68bb63687dfca806373509f51ce8eea81f/xxhash-3.6.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:15e0dac10eb9309508bfc41f7f9deaa7755c69e35af835db9cb10751adebc35d", size = 31565, upload-time = "2025-10-02T14:37:06.966Z" }, +] + +[[package]] +name = "yarl" +version = "1.23.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "multidict" }, + { name = "propcache" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/23/6e/beb1beec874a72f23815c1434518bfc4ed2175065173fb138c3705f658d4/yarl-1.23.0.tar.gz", hash = "sha256:53b1ea6ca88ebd4420379c330aea57e258408dd0df9af0992e5de2078dc9f5d5", size = 194676, upload-time = "2026-03-01T22:07:53.373Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8b/0d/9cc638702f6fc3c7a3685bcc8cf2a9ed7d6206e932a49f5242658047ef51/yarl-1.23.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cff6d44cb13d39db2663a22b22305d10855efa0fa8015ddeacc40bc59b9d8107", size = 123764, upload-time = "2026-03-01T22:04:09.7Z" }, + { url = "https://files.pythonhosted.org/packages/7a/35/5a553687c5793df5429cd1db45909d4f3af7eee90014888c208d086a44f0/yarl-1.23.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e4c53f8347cd4200f0d70a48ad059cabaf24f5adc6ba08622a23423bc7efa10d", size = 86282, upload-time = "2026-03-01T22:04:11.892Z" }, + { url = "https://files.pythonhosted.org/packages/68/2e/c5a2234238f8ce37a8312b52801ee74117f576b1539eec8404a480434acc/yarl-1.23.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2a6940a074fb3c48356ed0158a3ca5699c955ee4185b4d7d619be3c327143e05", size = 86053, upload-time = "2026-03-01T22:04:13.292Z" }, + { url = "https://files.pythonhosted.org/packages/74/3f/bbd8ff36fb038622797ffbaf7db314918bb4d76f1cc8a4f9ca7a55fe5195/yarl-1.23.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ed5f69ce7be7902e5c70ea19eb72d20abf7d725ab5d49777d696e32d4fc1811d", size = 99395, upload-time = "2026-03-01T22:04:15.133Z" }, + { url = "https://files.pythonhosted.org/packages/77/04/9516bc4e269d2a3ec9c6779fcdeac51ce5b3a9b0156f06ac7152e5bba864/yarl-1.23.0-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:389871e65468400d6283c0308e791a640b5ab5c83bcee02a2f51295f95e09748", size = 92143, upload-time = "2026-03-01T22:04:16.829Z" }, + { url = "https://files.pythonhosted.org/packages/c7/63/88802d1f6b1cb1fc67d67a58cd0cf8a1790de4ce7946e434240f1d60ab4a/yarl-1.23.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:dda608c88cf709b1d406bdfcd84d8d63cff7c9e577a403c6108ce8ce9dcc8764", size = 107643, upload-time = "2026-03-01T22:04:18.519Z" }, + { url = "https://files.pythonhosted.org/packages/8e/db/4f9b838f4d8bdd6f0f385aed8bbf21c71ed11a0b9983305c302cbd557815/yarl-1.23.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8c4fe09e0780c6c3bf2b7d4af02ee2394439d11a523bbcf095cf4747c2932007", size = 108700, upload-time = "2026-03-01T22:04:20.373Z" }, + { url = "https://files.pythonhosted.org/packages/50/12/95a1d33f04a79c402664070d43b8b9f72dc18914e135b345b611b0b1f8cc/yarl-1.23.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:31c9921eb8bd12633b41ad27686bbb0b1a2a9b8452bfdf221e34f311e9942ed4", size = 102769, upload-time = "2026-03-01T22:04:23.055Z" }, + { url = "https://files.pythonhosted.org/packages/86/65/91a0285f51321369fd1a8308aa19207520c5f0587772cfc2e03fc2467e90/yarl-1.23.0-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:5f10fd85e4b75967468af655228fbfd212bdf66db1c0d135065ce288982eda26", size = 101114, upload-time = "2026-03-01T22:04:25.031Z" }, + { url = "https://files.pythonhosted.org/packages/58/80/c7c8244fc3e5bc483dc71a09560f43b619fab29301a0f0a8f936e42865c7/yarl-1.23.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:dbf507e9ef5688bada447a24d68b4b58dd389ba93b7afc065a2ba892bea54769", size = 98883, upload-time = "2026-03-01T22:04:27.281Z" }, + { url = "https://files.pythonhosted.org/packages/86/e7/71ca9cc9ca79c0b7d491216177d1aed559d632947b8ffb0ee60f7d8b23e3/yarl-1.23.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:85e9beda1f591bc73e77ea1c51965c68e98dafd0fec72cdd745f77d727466716", size = 94172, upload-time = "2026-03-01T22:04:28.554Z" }, + { url = "https://files.pythonhosted.org/packages/6a/3f/6c6c8a0fe29c26fb2db2e8d32195bb84ec1bfb8f1d32e7f73b787fcf349b/yarl-1.23.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:0e1fdaa14ef51366d7757b45bde294e95f6c8c049194e793eedb8387c86d5993", size = 107010, upload-time = "2026-03-01T22:04:30.385Z" }, + { url = "https://files.pythonhosted.org/packages/56/38/12730c05e5ad40a76374d440ed8b0899729a96c250516d91c620a6e38fc2/yarl-1.23.0-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:75e3026ab649bf48f9a10c0134512638725b521340293f202a69b567518d94e0", size = 100285, upload-time = "2026-03-01T22:04:31.752Z" }, + { url = "https://files.pythonhosted.org/packages/34/92/6a7be9239f2347234e027284e7a5f74b1140cc86575e7b469d13fba1ebfe/yarl-1.23.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:80e6d33a3d42a7549b409f199857b4fb54e2103fc44fb87605b6663b7a7ff750", size = 108230, upload-time = "2026-03-01T22:04:33.844Z" }, + { url = "https://files.pythonhosted.org/packages/5e/81/4aebccfa9376bd98b9d8bfad20621a57d3e8cfc5b8631c1fa5f62cdd03f4/yarl-1.23.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5ec2f42d41ccbd5df0270d7df31618a8ee267bfa50997f5d720ddba86c4a83a6", size = 103008, upload-time = "2026-03-01T22:04:35.856Z" }, + { url = "https://files.pythonhosted.org/packages/38/0f/0b4e3edcec794a86b853b0c6396c0a888d72dfce19b2d88c02ac289fb6c1/yarl-1.23.0-cp310-cp310-win32.whl", hash = "sha256:debe9c4f41c32990771be5c22b56f810659f9ddf3d63f67abfdcaa2c6c9c5c1d", size = 83073, upload-time = "2026-03-01T22:04:38.268Z" }, + { url = "https://files.pythonhosted.org/packages/a0/71/ad95c33da18897e4c636528bbc24a1dd23fe16797de8bc4ec667b8db0ba4/yarl-1.23.0-cp310-cp310-win_amd64.whl", hash = "sha256:ab5f043cb8a2d71c981c09c510da013bc79fd661f5c60139f00dd3c3cc4f2ffb", size = 87328, upload-time = "2026-03-01T22:04:39.558Z" }, + { url = "https://files.pythonhosted.org/packages/e2/14/dfa369523c79bccf9c9c746b0a63eb31f65db9418ac01275f7950962e504/yarl-1.23.0-cp310-cp310-win_arm64.whl", hash = "sha256:263cd4f47159c09b8b685890af949195b51d1aa82ba451c5847ca9bc6413c220", size = 82463, upload-time = "2026-03-01T22:04:41.454Z" }, + { url = "https://files.pythonhosted.org/packages/a2/aa/60da938b8f0997ba3a911263c40d82b6f645a67902a490b46f3355e10fae/yarl-1.23.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b35d13d549077713e4414f927cdc388d62e543987c572baee613bf82f11a4b99", size = 123641, upload-time = "2026-03-01T22:04:42.841Z" }, + { url = "https://files.pythonhosted.org/packages/24/84/e237607faf4e099dbb8a4f511cfd5efcb5f75918baad200ff7380635631b/yarl-1.23.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cbb0fef01f0c6b38cb0f39b1f78fc90b807e0e3c86a7ff3ce74ad77ce5c7880c", size = 86248, upload-time = "2026-03-01T22:04:44.757Z" }, + { url = "https://files.pythonhosted.org/packages/b2/0d/71ceabc14c146ba8ee3804ca7b3d42b1664c8440439de5214d366fec7d3a/yarl-1.23.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dc52310451fc7c629e13c4e061cbe2dd01684d91f2f8ee2821b083c58bd72432", size = 85988, upload-time = "2026-03-01T22:04:46.365Z" }, + { url = "https://files.pythonhosted.org/packages/8c/6c/4a90d59c572e46b270ca132aca66954f1175abd691f74c1ef4c6711828e2/yarl-1.23.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b2c6b50c7b0464165472b56b42d4c76a7b864597007d9c085e8b63e185cf4a7a", size = 100566, upload-time = "2026-03-01T22:04:47.639Z" }, + { url = "https://files.pythonhosted.org/packages/49/fb/c438fb5108047e629f6282a371e6e91cf3f97ee087c4fb748a1f32ceef55/yarl-1.23.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:aafe5dcfda86c8af00386d7781d4c2181b5011b7be3f2add5e99899ea925df05", size = 92079, upload-time = "2026-03-01T22:04:48.925Z" }, + { url = "https://files.pythonhosted.org/packages/d9/13/d269aa1aed3e4f50a5a103f96327210cc5fa5dd2d50882778f13c7a14606/yarl-1.23.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9ee33b875f0b390564c1fb7bc528abf18c8ee6073b201c6ae8524aca778e2d83", size = 108741, upload-time = "2026-03-01T22:04:50.838Z" }, + { url = "https://files.pythonhosted.org/packages/85/fb/115b16f22c37ea4437d323e472945bea97301c8ec6089868fa560abab590/yarl-1.23.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4c41e021bc6d7affb3364dc1e1e5fa9582b470f283748784bd6ea0558f87f42c", size = 108099, upload-time = "2026-03-01T22:04:52.499Z" }, + { url = "https://files.pythonhosted.org/packages/9a/64/c53487d9f4968045b8afa51aed7ca44f58b2589e772f32745f3744476c82/yarl-1.23.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:99c8a9ed30f4164bc4c14b37a90208836cbf50d4ce2a57c71d0f52c7fb4f7598", size = 102678, upload-time = "2026-03-01T22:04:55.176Z" }, + { url = "https://files.pythonhosted.org/packages/85/59/cd98e556fbb2bf8fab29c1a722f67ad45c5f3447cac798ab85620d1e70af/yarl-1.23.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f2af5c81a1f124609d5f33507082fc3f739959d4719b56877ab1ee7e7b3d602b", size = 100803, upload-time = "2026-03-01T22:04:56.588Z" }, + { url = "https://files.pythonhosted.org/packages/9e/c0/b39770b56d4a9f0bb5f77e2f1763cd2d75cc2f6c0131e3b4c360348fcd65/yarl-1.23.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6b41389c19b07c760c7e427a3462e8ab83c4bb087d127f0e854c706ce1b9215c", size = 100163, upload-time = "2026-03-01T22:04:58.492Z" }, + { url = "https://files.pythonhosted.org/packages/e7/64/6980f99ab00e1f0ff67cb84766c93d595b067eed07439cfccfc8fb28c1a6/yarl-1.23.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:1dc702e42d0684f42d6519c8d581e49c96cefaaab16691f03566d30658ee8788", size = 93859, upload-time = "2026-03-01T22:05:00.268Z" }, + { url = "https://files.pythonhosted.org/packages/38/69/912e6c5e146793e5d4b5fe39ff5b00f4d22463dfd5a162bec565ac757673/yarl-1.23.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:0e40111274f340d32ebcc0a5668d54d2b552a6cca84c9475859d364b380e3222", size = 108202, upload-time = "2026-03-01T22:05:02.273Z" }, + { url = "https://files.pythonhosted.org/packages/59/97/35ca6767524687ad64e5f5c31ad54bc76d585585a9fcb40f649e7e82ffed/yarl-1.23.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:4764a6a7588561a9aef92f65bda2c4fb58fe7c675c0883862e6df97559de0bfb", size = 99866, upload-time = "2026-03-01T22:05:03.597Z" }, + { url = "https://files.pythonhosted.org/packages/d3/1c/1a3387ee6d73589f6f2a220ae06f2984f6c20b40c734989b0a44f5987308/yarl-1.23.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:03214408cfa590df47728b84c679ae4ef00be2428e11630277be0727eba2d7cc", size = 107852, upload-time = "2026-03-01T22:05:04.986Z" }, + { url = "https://files.pythonhosted.org/packages/a4/b8/35c0750fcd5a3f781058bfd954515dd4b1eab45e218cbb85cf11132215f1/yarl-1.23.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:170e26584b060879e29fac213e4228ef063f39128723807a312e5c7fec28eff2", size = 102919, upload-time = "2026-03-01T22:05:06.397Z" }, + { url = "https://files.pythonhosted.org/packages/e5/1c/9a1979aec4a81896d597bcb2177827f2dbee3f5b7cc48b2d0dadb644b41d/yarl-1.23.0-cp311-cp311-win32.whl", hash = "sha256:51430653db848d258336cfa0244427b17d12db63d42603a55f0d4546f50f25b5", size = 82602, upload-time = "2026-03-01T22:05:08.444Z" }, + { url = "https://files.pythonhosted.org/packages/93/22/b85eca6fa2ad9491af48c973e4c8cf6b103a73dbb271fe3346949449fca0/yarl-1.23.0-cp311-cp311-win_amd64.whl", hash = "sha256:bf49a3ae946a87083ef3a34c8f677ae4243f5b824bfc4c69672e72b3d6719d46", size = 87461, upload-time = "2026-03-01T22:05:10.145Z" }, + { url = "https://files.pythonhosted.org/packages/93/95/07e3553fe6f113e6864a20bdc53a78113cda3b9ced8784ee52a52c9f80d8/yarl-1.23.0-cp311-cp311-win_arm64.whl", hash = "sha256:b39cb32a6582750b6cc77bfb3c49c0f8760dc18dc96ec9fb55fbb0f04e08b928", size = 82336, upload-time = "2026-03-01T22:05:11.554Z" }, + { url = "https://files.pythonhosted.org/packages/88/8a/94615bc31022f711add374097ad4144d569e95ff3c38d39215d07ac153a0/yarl-1.23.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1932b6b8bba8d0160a9d1078aae5838a66039e8832d41d2992daa9a3a08f7860", size = 124737, upload-time = "2026-03-01T22:05:12.897Z" }, + { url = "https://files.pythonhosted.org/packages/e3/6f/c6554045d59d64052698add01226bc867b52fe4a12373415d7991fdca95d/yarl-1.23.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:411225bae281f114067578891bc75534cfb3d92a3b4dfef7a6ca78ba354e6069", size = 87029, upload-time = "2026-03-01T22:05:14.376Z" }, + { url = "https://files.pythonhosted.org/packages/19/2a/725ecc166d53438bc88f76822ed4b1e3b10756e790bafd7b523fe97c322d/yarl-1.23.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:13a563739ae600a631c36ce096615fe307f131344588b0bc0daec108cdb47b25", size = 86310, upload-time = "2026-03-01T22:05:15.71Z" }, + { url = "https://files.pythonhosted.org/packages/99/30/58260ed98e6ff7f90ba84442c1ddd758c9170d70327394a6227b310cd60f/yarl-1.23.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9cbf44c5cb4a7633d078788e1b56387e3d3cf2b8139a3be38040b22d6c3221c8", size = 97587, upload-time = "2026-03-01T22:05:17.384Z" }, + { url = "https://files.pythonhosted.org/packages/76/0a/8b08aac08b50682e65759f7f8dde98ae8168f72487e7357a5d684c581ef9/yarl-1.23.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:53ad387048f6f09a8969631e4de3f1bf70c50e93545d64af4f751b2498755072", size = 92528, upload-time = "2026-03-01T22:05:18.804Z" }, + { url = "https://files.pythonhosted.org/packages/52/07/0b7179101fe5f8385ec6c6bb5d0cb9f76bd9fb4a769591ab6fb5cdbfc69a/yarl-1.23.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4a59ba56f340334766f3a4442e0efd0af895fae9e2b204741ef885c446b3a1a8", size = 105339, upload-time = "2026-03-01T22:05:20.235Z" }, + { url = "https://files.pythonhosted.org/packages/d3/8a/36d82869ab5ec829ca8574dfcb92b51286fcfb1e9c7a73659616362dc880/yarl-1.23.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:803a3c3ce4acc62eaf01eaca1208dcf0783025ef27572c3336502b9c232005e7", size = 105061, upload-time = "2026-03-01T22:05:22.268Z" }, + { url = "https://files.pythonhosted.org/packages/66/3e/868e5c3364b6cee19ff3e1a122194fa4ce51def02c61023970442162859e/yarl-1.23.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a3d2bff8f37f8d0f96c7ec554d16945050d54462d6e95414babaa18bfafc7f51", size = 100132, upload-time = "2026-03-01T22:05:23.638Z" }, + { url = "https://files.pythonhosted.org/packages/cf/26/9c89acf82f08a52cb52d6d39454f8d18af15f9d386a23795389d1d423823/yarl-1.23.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c75eb09e8d55bceb4367e83496ff8ef2bc7ea6960efb38e978e8073ea59ecb67", size = 99289, upload-time = "2026-03-01T22:05:25.749Z" }, + { url = "https://files.pythonhosted.org/packages/6f/54/5b0db00d2cb056922356104468019c0a132e89c8d3ab67d8ede9f4483d2a/yarl-1.23.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877b0738624280e34c55680d6054a307aa94f7d52fa0e3034a9cc6e790871da7", size = 96950, upload-time = "2026-03-01T22:05:27.318Z" }, + { url = "https://files.pythonhosted.org/packages/f6/40/10fa93811fd439341fad7e0718a86aca0de9548023bbb403668d6555acab/yarl-1.23.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:b5405bb8f0e783a988172993cfc627e4d9d00432d6bbac65a923041edacf997d", size = 93960, upload-time = "2026-03-01T22:05:28.738Z" }, + { url = "https://files.pythonhosted.org/packages/bc/d2/8ae2e6cd77d0805f4526e30ec43b6f9a3dfc542d401ac4990d178e4bf0cf/yarl-1.23.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:1c3a3598a832590c5a3ce56ab5576361b5688c12cb1d39429cf5dba30b510760", size = 104703, upload-time = "2026-03-01T22:05:30.438Z" }, + { url = "https://files.pythonhosted.org/packages/2f/0c/b3ceacf82c3fe21183ce35fa2acf5320af003d52bc1fcf5915077681142e/yarl-1.23.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:8419ebd326430d1cbb7efb5292330a2cf39114e82df5cc3d83c9a0d5ebeaf2f2", size = 98325, upload-time = "2026-03-01T22:05:31.835Z" }, + { url = "https://files.pythonhosted.org/packages/9d/e0/12900edd28bdab91a69bd2554b85ad7b151f64e8b521fe16f9ad2f56477a/yarl-1.23.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:be61f6fff406ca40e3b1d84716fde398fc08bc63dd96d15f3a14230a0973ed86", size = 105067, upload-time = "2026-03-01T22:05:33.358Z" }, + { url = "https://files.pythonhosted.org/packages/15/61/74bb1182cf79c9bbe4eb6b1f14a57a22d7a0be5e9cedf8e2d5c2086474c3/yarl-1.23.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3ceb13c5c858d01321b5d9bb65e4cf37a92169ea470b70fec6f236b2c9dd7e34", size = 100285, upload-time = "2026-03-01T22:05:35.4Z" }, + { url = "https://files.pythonhosted.org/packages/69/7f/cd5ef733f2550de6241bd8bd8c3febc78158b9d75f197d9c7baa113436af/yarl-1.23.0-cp312-cp312-win32.whl", hash = "sha256:fffc45637bcd6538de8b85f51e3df3223e4ad89bccbfca0481c08c7fc8b7ed7d", size = 82359, upload-time = "2026-03-01T22:05:36.811Z" }, + { url = "https://files.pythonhosted.org/packages/f5/be/25216a49daeeb7af2bec0db22d5e7df08ed1d7c9f65d78b14f3b74fd72fc/yarl-1.23.0-cp312-cp312-win_amd64.whl", hash = "sha256:f69f57305656a4852f2a7203efc661d8c042e6cc67f7acd97d8667fb448a426e", size = 87674, upload-time = "2026-03-01T22:05:38.171Z" }, + { url = "https://files.pythonhosted.org/packages/d2/35/aeab955d6c425b227d5b7247eafb24f2653fedc32f95373a001af5dfeb9e/yarl-1.23.0-cp312-cp312-win_arm64.whl", hash = "sha256:6e87a6e8735b44816e7db0b2fbc9686932df473c826b0d9743148432e10bb9b9", size = 81879, upload-time = "2026-03-01T22:05:40.006Z" }, + { url = "https://files.pythonhosted.org/packages/9a/4b/a0a6e5d0ee8a2f3a373ddef8a4097d74ac901ac363eea1440464ccbe0898/yarl-1.23.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:16c6994ac35c3e74fb0ae93323bf8b9c2a9088d55946109489667c510a7d010e", size = 123796, upload-time = "2026-03-01T22:05:41.412Z" }, + { url = "https://files.pythonhosted.org/packages/67/b6/8925d68af039b835ae876db5838e82e76ec87b9782ecc97e192b809c4831/yarl-1.23.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4a42e651629dafb64fd5b0286a3580613702b5809ad3f24934ea87595804f2c5", size = 86547, upload-time = "2026-03-01T22:05:42.841Z" }, + { url = "https://files.pythonhosted.org/packages/ae/50/06d511cc4b8e0360d3c94af051a768e84b755c5eb031b12adaaab6dec6e5/yarl-1.23.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7c6b9461a2a8b47c65eef63bb1c76a4f1c119618ffa99ea79bc5bb1e46c5821b", size = 85854, upload-time = "2026-03-01T22:05:44.85Z" }, + { url = "https://files.pythonhosted.org/packages/c4/f4/4e30b250927ffdab4db70da08b9b8d2194d7c7b400167b8fbeca1e4701ca/yarl-1.23.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2569b67d616eab450d262ca7cb9f9e19d2f718c70a8b88712859359d0ab17035", size = 98351, upload-time = "2026-03-01T22:05:46.836Z" }, + { url = "https://files.pythonhosted.org/packages/86/fc/4118c5671ea948208bdb1492d8b76bdf1453d3e73df051f939f563e7dcc5/yarl-1.23.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e9d9a4d06d3481eab79803beb4d9bd6f6a8e781ec078ac70d7ef2dcc29d1bea5", size = 92711, upload-time = "2026-03-01T22:05:48.316Z" }, + { url = "https://files.pythonhosted.org/packages/56/11/1ed91d42bd9e73c13dc9e7eb0dd92298d75e7ac4dd7f046ad0c472e231cd/yarl-1.23.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f514f6474e04179d3d33175ed3f3e31434d3130d42ec153540d5b157deefd735", size = 106014, upload-time = "2026-03-01T22:05:50.028Z" }, + { url = "https://files.pythonhosted.org/packages/ce/c9/74e44e056a23fbc33aca71779ef450ca648a5bc472bdad7a82339918f818/yarl-1.23.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:fda207c815b253e34f7e1909840fd14299567b1c0eb4908f8c2ce01a41265401", size = 105557, upload-time = "2026-03-01T22:05:51.416Z" }, + { url = "https://files.pythonhosted.org/packages/66/fe/b1e10b08d287f518994f1e2ff9b6d26f0adeecd8dd7d533b01bab29a3eda/yarl-1.23.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:34b6cf500e61c90f305094911f9acc9c86da1a05a7a3f5be9f68817043f486e4", size = 101559, upload-time = "2026-03-01T22:05:52.872Z" }, + { url = "https://files.pythonhosted.org/packages/72/59/c5b8d94b14e3d3c2a9c20cb100119fd534ab5a14b93673ab4cc4a4141ea5/yarl-1.23.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d7504f2b476d21653e4d143f44a175f7f751cd41233525312696c76aa3dbb23f", size = 100502, upload-time = "2026-03-01T22:05:54.954Z" }, + { url = "https://files.pythonhosted.org/packages/77/4f/96976cb54cbfc5c9fd73ed4c51804f92f209481d1fb190981c0f8a07a1d7/yarl-1.23.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:578110dd426f0d209d1509244e6d4a3f1a3e9077655d98c5f22583d63252a08a", size = 98027, upload-time = "2026-03-01T22:05:56.409Z" }, + { url = "https://files.pythonhosted.org/packages/63/6e/904c4f476471afdbad6b7e5b70362fb5810e35cd7466529a97322b6f5556/yarl-1.23.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:609d3614d78d74ebe35f54953c5bbd2ac647a7ddb9c30a5d877580f5e86b22f2", size = 95369, upload-time = "2026-03-01T22:05:58.141Z" }, + { url = "https://files.pythonhosted.org/packages/9d/40/acfcdb3b5f9d68ef499e39e04d25e141fe90661f9d54114556cf83be8353/yarl-1.23.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4966242ec68afc74c122f8459abd597afd7d8a60dc93d695c1334c5fd25f762f", size = 105565, upload-time = "2026-03-01T22:06:00.286Z" }, + { url = "https://files.pythonhosted.org/packages/5e/c6/31e28f3a6ba2869c43d124f37ea5260cac9c9281df803c354b31f4dd1f3c/yarl-1.23.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:e0fd068364a6759bc794459f0a735ab151d11304346332489c7972bacbe9e72b", size = 99813, upload-time = "2026-03-01T22:06:01.712Z" }, + { url = "https://files.pythonhosted.org/packages/08/1f/6f65f59e72d54aa467119b63fc0b0b1762eff0232db1f4720cd89e2f4a17/yarl-1.23.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:39004f0ad156da43e86aa71f44e033de68a44e5a31fc53507b36dd253970054a", size = 105632, upload-time = "2026-03-01T22:06:03.188Z" }, + { url = "https://files.pythonhosted.org/packages/a3/c4/18b178a69935f9e7a338127d5b77d868fdc0f0e49becd286d51b3a18c61d/yarl-1.23.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e5723c01a56c5028c807c701aa66722916d2747ad737a046853f6c46f4875543", size = 101895, upload-time = "2026-03-01T22:06:04.651Z" }, + { url = "https://files.pythonhosted.org/packages/8f/54/f5b870b5505663911dba950a8e4776a0dbd51c9c54c0ae88e823e4b874a0/yarl-1.23.0-cp313-cp313-win32.whl", hash = "sha256:1b6b572edd95b4fa8df75de10b04bc81acc87c1c7d16bcdd2035b09d30acc957", size = 82356, upload-time = "2026-03-01T22:06:06.04Z" }, + { url = "https://files.pythonhosted.org/packages/7a/84/266e8da36879c6edcd37b02b547e2d9ecdfea776be49598e75696e3316e1/yarl-1.23.0-cp313-cp313-win_amd64.whl", hash = "sha256:baaf55442359053c7d62f6f8413a62adba3205119bcb6f49594894d8be47e5e3", size = 87515, upload-time = "2026-03-01T22:06:08.107Z" }, + { url = "https://files.pythonhosted.org/packages/00/fd/7e1c66efad35e1649114fa13f17485f62881ad58edeeb7f49f8c5e748bf9/yarl-1.23.0-cp313-cp313-win_arm64.whl", hash = "sha256:fb4948814a2a98e3912505f09c9e7493b1506226afb1f881825368d6fb776ee3", size = 81785, upload-time = "2026-03-01T22:06:10.181Z" }, + { url = "https://files.pythonhosted.org/packages/9c/fc/119dd07004f17ea43bb91e3ece6587759edd7519d6b086d16bfbd3319982/yarl-1.23.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:aecfed0b41aa72b7881712c65cf764e39ce2ec352324f5e0837c7048d9e6daaa", size = 130719, upload-time = "2026-03-01T22:06:11.708Z" }, + { url = "https://files.pythonhosted.org/packages/e6/0d/9f2348502fbb3af409e8f47730282cd6bc80dec6630c1e06374d882d6eb2/yarl-1.23.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a41bcf68efd19073376eb8cf948b8d9be0af26256403e512bb18f3966f1f9120", size = 89690, upload-time = "2026-03-01T22:06:13.429Z" }, + { url = "https://files.pythonhosted.org/packages/50/93/e88f3c80971b42cfc83f50a51b9d165a1dbf154b97005f2994a79f212a07/yarl-1.23.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:cde9a2ecd91668bcb7f077c4966d8ceddb60af01b52e6e3e2680e4cf00ad1a59", size = 89851, upload-time = "2026-03-01T22:06:15.53Z" }, + { url = "https://files.pythonhosted.org/packages/1c/07/61c9dd8ba8f86473263b4036f70fb594c09e99c0d9737a799dfd8bc85651/yarl-1.23.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5023346c4ee7992febc0068e7593de5fa2bf611848c08404b35ebbb76b1b0512", size = 95874, upload-time = "2026-03-01T22:06:17.553Z" }, + { url = "https://files.pythonhosted.org/packages/9e/e9/f9ff8ceefba599eac6abddcfb0b3bee9b9e636e96dbf54342a8577252379/yarl-1.23.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d1009abedb49ae95b136a8904a3f71b342f849ffeced2d3747bf29caeda218c4", size = 88710, upload-time = "2026-03-01T22:06:19.004Z" }, + { url = "https://files.pythonhosted.org/packages/eb/78/0231bfcc5d4c8eec220bc2f9ef82cb4566192ea867a7c5b4148f44f6cbcd/yarl-1.23.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a8d00f29b42f534cc8aa3931cfe773b13b23e561e10d2b26f27a8d309b0e82a1", size = 101033, upload-time = "2026-03-01T22:06:21.203Z" }, + { url = "https://files.pythonhosted.org/packages/cd/9b/30ea5239a61786f18fd25797151a17fbb3be176977187a48d541b5447dd4/yarl-1.23.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:95451e6ce06c3e104556d73b559f5da6c34a069b6b62946d3ad66afcd51642ea", size = 100817, upload-time = "2026-03-01T22:06:22.738Z" }, + { url = "https://files.pythonhosted.org/packages/62/e2/a4980481071791bc83bce2b7a1a1f7adcabfa366007518b4b845e92eeee3/yarl-1.23.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:531ef597132086b6cf96faa7c6c1dcd0361dd5f1694e5cc30375907b9b7d3ea9", size = 97482, upload-time = "2026-03-01T22:06:24.21Z" }, + { url = "https://files.pythonhosted.org/packages/e5/1e/304a00cf5f6100414c4b5a01fc7ff9ee724b62158a08df2f8170dfc72a2d/yarl-1.23.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:88f9fb0116fbfcefcab70f85cf4b74a2b6ce5d199c41345296f49d974ddb4123", size = 95949, upload-time = "2026-03-01T22:06:25.697Z" }, + { url = "https://files.pythonhosted.org/packages/68/03/093f4055ed4cae649ac53bca3d180bd37102e9e11d048588e9ab0c0108d0/yarl-1.23.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:e7b0460976dc75cb87ad9cc1f9899a4b97751e7d4e77ab840fc9b6d377b8fd24", size = 95839, upload-time = "2026-03-01T22:06:27.309Z" }, + { url = "https://files.pythonhosted.org/packages/b9/28/4c75ebb108f322aa8f917ae10a8ffa4f07cae10a8a627b64e578617df6a0/yarl-1.23.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:115136c4a426f9da976187d238e84139ff6b51a20839aa6e3720cd1026d768de", size = 90696, upload-time = "2026-03-01T22:06:29.048Z" }, + { url = "https://files.pythonhosted.org/packages/23/9c/42c2e2dd91c1a570402f51bdf066bfdb1241c2240ba001967bad778e77b7/yarl-1.23.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:ead11956716a940c1abc816b7df3fa2b84d06eaed8832ca32f5c5e058c65506b", size = 100865, upload-time = "2026-03-01T22:06:30.525Z" }, + { url = "https://files.pythonhosted.org/packages/74/05/1bcd60a8a0a914d462c305137246b6f9d167628d73568505fce3f1cb2e65/yarl-1.23.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:fe8f8f5e70e6dbdfca9882cd9deaac058729bcf323cf7a58660901e55c9c94f6", size = 96234, upload-time = "2026-03-01T22:06:32.692Z" }, + { url = "https://files.pythonhosted.org/packages/90/b2/f52381aac396d6778ce516b7bc149c79e65bfc068b5de2857ab69eeea3b7/yarl-1.23.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:a0e317df055958a0c1e79e5d2aa5a5eaa4a6d05a20d4b0c9c3f48918139c9fc6", size = 100295, upload-time = "2026-03-01T22:06:34.268Z" }, + { url = "https://files.pythonhosted.org/packages/e5/e8/638bae5bbf1113a659b2435d8895474598afe38b4a837103764f603aba56/yarl-1.23.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f0fd84de0c957b2d280143522c4f91a73aada1923caee763e24a2b3fda9f8a5", size = 97784, upload-time = "2026-03-01T22:06:35.864Z" }, + { url = "https://files.pythonhosted.org/packages/80/25/a3892b46182c586c202629fc2159aa13975d3741d52ebd7347fd501d48d5/yarl-1.23.0-cp313-cp313t-win32.whl", hash = "sha256:93a784271881035ab4406a172edb0faecb6e7d00f4b53dc2f55919d6c9688595", size = 88313, upload-time = "2026-03-01T22:06:37.39Z" }, + { url = "https://files.pythonhosted.org/packages/43/68/8c5b36aa5178900b37387937bc2c2fe0e9505537f713495472dcf6f6fccc/yarl-1.23.0-cp313-cp313t-win_amd64.whl", hash = "sha256:dd00607bffbf30250fe108065f07453ec124dbf223420f57f5e749b04295e090", size = 94932, upload-time = "2026-03-01T22:06:39.579Z" }, + { url = "https://files.pythonhosted.org/packages/c6/cc/d79ba8292f51f81f4dc533a8ccfb9fc6992cabf0998ed3245de7589dc07c/yarl-1.23.0-cp313-cp313t-win_arm64.whl", hash = "sha256:ac09d42f48f80c9ee1635b2fcaa819496a44502737660d3c0f2ade7526d29144", size = 84786, upload-time = "2026-03-01T22:06:41.988Z" }, + { url = "https://files.pythonhosted.org/packages/90/98/b85a038d65d1b92c3903ab89444f48d3cee490a883477b716d7a24b1a78c/yarl-1.23.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:21d1b7305a71a15b4794b5ff22e8eef96ff4a6d7f9657155e5aa419444b28912", size = 124455, upload-time = "2026-03-01T22:06:43.615Z" }, + { url = "https://files.pythonhosted.org/packages/39/54/bc2b45559f86543d163b6e294417a107bb87557609007c007ad889afec18/yarl-1.23.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:85610b4f27f69984932a7abbe52703688de3724d9f72bceb1cca667deff27474", size = 86752, upload-time = "2026-03-01T22:06:45.425Z" }, + { url = "https://files.pythonhosted.org/packages/24/f9/e8242b68362bffe6fb536c8db5076861466fc780f0f1b479fc4ffbebb128/yarl-1.23.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:23f371bd662cf44a7630d4d113101eafc0cfa7518a2760d20760b26021454719", size = 86291, upload-time = "2026-03-01T22:06:46.974Z" }, + { url = "https://files.pythonhosted.org/packages/ea/d8/d1cb2378c81dd729e98c716582b1ccb08357e8488e4c24714658cc6630e8/yarl-1.23.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4a80f77dc1acaaa61f0934176fccca7096d9b1ff08c8ba9cddf5ae034a24319", size = 99026, upload-time = "2026-03-01T22:06:48.459Z" }, + { url = "https://files.pythonhosted.org/packages/0a/ff/7196790538f31debe3341283b5b0707e7feb947620fc5e8236ef28d44f72/yarl-1.23.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:bd654fad46d8d9e823afbb4f87c79160b5a374ed1ff5bde24e542e6ba8f41434", size = 92355, upload-time = "2026-03-01T22:06:50.306Z" }, + { url = "https://files.pythonhosted.org/packages/c1/56/25d58c3eddde825890a5fe6aa1866228377354a3c39262235234ab5f616b/yarl-1.23.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:682bae25f0a0dd23a056739f23a134db9f52a63e2afd6bfb37ddc76292bbd723", size = 106417, upload-time = "2026-03-01T22:06:52.1Z" }, + { url = "https://files.pythonhosted.org/packages/51/8a/882c0e7bc8277eb895b31bce0138f51a1ba551fc2e1ec6753ffc1e7c1377/yarl-1.23.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a82836cab5f197a0514235aaf7ffccdc886ccdaa2324bc0aafdd4ae898103039", size = 106422, upload-time = "2026-03-01T22:06:54.424Z" }, + { url = "https://files.pythonhosted.org/packages/42/2b/fef67d616931055bf3d6764885990a3ac647d68734a2d6a9e1d13de437a2/yarl-1.23.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1c57676bdedc94cd3bc37724cf6f8cd2779f02f6aba48de45feca073e714fe52", size = 101915, upload-time = "2026-03-01T22:06:55.895Z" }, + { url = "https://files.pythonhosted.org/packages/18/6a/530e16aebce27c5937920f3431c628a29a4b6b430fab3fd1c117b26ff3f6/yarl-1.23.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c7f8dc16c498ff06497c015642333219871effba93e4a2e8604a06264aca5c5c", size = 100690, upload-time = "2026-03-01T22:06:58.21Z" }, + { url = "https://files.pythonhosted.org/packages/88/08/93749219179a45e27b036e03260fda05190b911de8e18225c294ac95bbc9/yarl-1.23.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:5ee586fb17ff8f90c91cf73c6108a434b02d69925f44f5f8e0d7f2f260607eae", size = 98750, upload-time = "2026-03-01T22:06:59.794Z" }, + { url = "https://files.pythonhosted.org/packages/d9/cf/ea424a004969f5d81a362110a6ac1496d79efdc6d50c2c4b2e3ea0fc2519/yarl-1.23.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:17235362f580149742739cc3828b80e24029d08cbb9c4bda0242c7b5bc610a8e", size = 94685, upload-time = "2026-03-01T22:07:01.375Z" }, + { url = "https://files.pythonhosted.org/packages/e2/b7/14341481fe568e2b0408bcf1484c652accafe06a0ade9387b5d3fd9df446/yarl-1.23.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:0793e2bd0cf14234983bbb371591e6bea9e876ddf6896cdcc93450996b0b5c85", size = 106009, upload-time = "2026-03-01T22:07:03.151Z" }, + { url = "https://files.pythonhosted.org/packages/0a/e6/5c744a9b54f4e8007ad35bce96fbc9218338e84812d36f3390cea616881a/yarl-1.23.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:3650dc2480f94f7116c364096bc84b1d602f44224ef7d5c7208425915c0475dd", size = 100033, upload-time = "2026-03-01T22:07:04.701Z" }, + { url = "https://files.pythonhosted.org/packages/0c/23/e3bfc188d0b400f025bc49d99793d02c9abe15752138dcc27e4eaf0c4a9e/yarl-1.23.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:f40e782d49630ad384db66d4d8b73ff4f1b8955dc12e26b09a3e3af064b3b9d6", size = 106483, upload-time = "2026-03-01T22:07:06.231Z" }, + { url = "https://files.pythonhosted.org/packages/72/42/f0505f949a90b3f8b7a363d6cbdf398f6e6c58946d85c6d3a3bc70595b26/yarl-1.23.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:94f8575fbdf81749008d980c17796097e645574a3b8c28ee313931068dad14fe", size = 102175, upload-time = "2026-03-01T22:07:08.4Z" }, + { url = "https://files.pythonhosted.org/packages/aa/65/b39290f1d892a9dd671d1c722014ca062a9c35d60885d57e5375db0404b5/yarl-1.23.0-cp314-cp314-win32.whl", hash = "sha256:c8aa34a5c864db1087d911a0b902d60d203ea3607d91f615acd3f3108ac32169", size = 83871, upload-time = "2026-03-01T22:07:09.968Z" }, + { url = "https://files.pythonhosted.org/packages/a9/5b/9b92f54c784c26e2a422e55a8d2607ab15b7ea3349e28359282f84f01d43/yarl-1.23.0-cp314-cp314-win_amd64.whl", hash = "sha256:63e92247f383c85ab00dd0091e8c3fa331a96e865459f5ee80353c70a4a42d70", size = 89093, upload-time = "2026-03-01T22:07:11.501Z" }, + { url = "https://files.pythonhosted.org/packages/e0/7d/8a84dc9381fd4412d5e7ff04926f9865f6372b4c2fd91e10092e65d29eb8/yarl-1.23.0-cp314-cp314-win_arm64.whl", hash = "sha256:70efd20be968c76ece7baa8dafe04c5be06abc57f754d6f36f3741f7aa7a208e", size = 83384, upload-time = "2026-03-01T22:07:13.069Z" }, + { url = "https://files.pythonhosted.org/packages/dd/8d/d2fad34b1c08aa161b74394183daa7d800141aaaee207317e82c790b418d/yarl-1.23.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:9a18d6f9359e45722c064c97464ec883eb0e0366d33eda61cb19a244bf222679", size = 131019, upload-time = "2026-03-01T22:07:14.903Z" }, + { url = "https://files.pythonhosted.org/packages/19/ff/33009a39d3ccf4b94d7d7880dfe17fb5816c5a4fe0096d9b56abceea9ac7/yarl-1.23.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:2803ed8b21ca47a43da80a6fd1ed3019d30061f7061daa35ac54f63933409412", size = 89894, upload-time = "2026-03-01T22:07:17.372Z" }, + { url = "https://files.pythonhosted.org/packages/0c/f1/dab7ac5e7306fb79c0190766a3c00b4cb8d09a1f390ded68c85a5934faf5/yarl-1.23.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:394906945aa8b19fc14a61cf69743a868bb8c465efe85eee687109cc540b98f4", size = 89979, upload-time = "2026-03-01T22:07:19.361Z" }, + { url = "https://files.pythonhosted.org/packages/aa/b1/08e95f3caee1fad6e65017b9f26c1d79877b502622d60e517de01e72f95d/yarl-1.23.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:71d006bee8397a4a89f469b8deb22469fe7508132d3c17fa6ed871e79832691c", size = 95943, upload-time = "2026-03-01T22:07:21.266Z" }, + { url = "https://files.pythonhosted.org/packages/c0/cc/6409f9018864a6aa186c61175b977131f373f1988e198e031236916e87e4/yarl-1.23.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:62694e275c93d54f7ccedcfef57d42761b2aad5234b6be1f3e3026cae4001cd4", size = 88786, upload-time = "2026-03-01T22:07:23.129Z" }, + { url = "https://files.pythonhosted.org/packages/76/40/cc22d1d7714b717fde2006fad2ced5efe5580606cb059ae42117542122f3/yarl-1.23.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a31de1613658308efdb21ada98cbc86a97c181aa050ba22a808120bb5be3ab94", size = 101307, upload-time = "2026-03-01T22:07:24.689Z" }, + { url = "https://files.pythonhosted.org/packages/8f/0d/476c38e85ddb4c6ec6b20b815bdd779aa386a013f3d8b85516feee55c8dc/yarl-1.23.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:fb1e8b8d66c278b21d13b0a7ca22c41dd757a7c209c6b12c313e445c31dd3b28", size = 100904, upload-time = "2026-03-01T22:07:26.287Z" }, + { url = "https://files.pythonhosted.org/packages/72/32/0abe4a76d59adf2081dcb0397168553ece4616ada1c54d1c49d8936c74f8/yarl-1.23.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50f9d8d531dfb767c565f348f33dd5139a6c43f5cbdf3f67da40d54241df93f6", size = 97728, upload-time = "2026-03-01T22:07:27.906Z" }, + { url = "https://files.pythonhosted.org/packages/b7/35/7b30f4810fba112f60f5a43237545867504e15b1c7647a785fbaf588fac2/yarl-1.23.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:575aa4405a656e61a540f4a80eaa5260f2a38fff7bfdc4b5f611840d76e9e277", size = 95964, upload-time = "2026-03-01T22:07:30.198Z" }, + { url = "https://files.pythonhosted.org/packages/2d/86/ed7a73ab85ef00e8bb70b0cb5421d8a2a625b81a333941a469a6f4022828/yarl-1.23.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:041b1a4cefacf65840b4e295c6985f334ba83c30607441ae3cf206a0eed1a2e4", size = 95882, upload-time = "2026-03-01T22:07:32.132Z" }, + { url = "https://files.pythonhosted.org/packages/19/90/d56967f61a29d8498efb7afb651e0b2b422a1e9b47b0ab5f4e40a19b699b/yarl-1.23.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:d38c1e8231722c4ce40d7593f28d92b5fc72f3e9774fe73d7e800ec32299f63a", size = 90797, upload-time = "2026-03-01T22:07:34.404Z" }, + { url = "https://files.pythonhosted.org/packages/72/00/8b8f76909259f56647adb1011d7ed8b321bcf97e464515c65016a47ecdf0/yarl-1.23.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:d53834e23c015ee83a99377db6e5e37d8484f333edb03bd15b4bc312cc7254fb", size = 101023, upload-time = "2026-03-01T22:07:35.953Z" }, + { url = "https://files.pythonhosted.org/packages/ac/e2/cab11b126fb7d440281b7df8e9ddbe4851e70a4dde47a202b6642586b8d9/yarl-1.23.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:2e27c8841126e017dd2a054a95771569e6070b9ee1b133366d8b31beb5018a41", size = 96227, upload-time = "2026-03-01T22:07:37.594Z" }, + { url = "https://files.pythonhosted.org/packages/c2/9b/2c893e16bfc50e6b2edf76c1a9eb6cb0c744346197e74c65e99ad8d634d0/yarl-1.23.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:76855800ac56f878847a09ce6dba727c93ca2d89c9e9d63002d26b916810b0a2", size = 100302, upload-time = "2026-03-01T22:07:39.334Z" }, + { url = "https://files.pythonhosted.org/packages/28/ec/5498c4e3a6d5f1003beb23405671c2eb9cdbf3067d1c80f15eeafe301010/yarl-1.23.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:e09fd068c2e169a7070d83d3bde728a4d48de0549f975290be3c108c02e499b4", size = 98202, upload-time = "2026-03-01T22:07:41.717Z" }, + { url = "https://files.pythonhosted.org/packages/fe/c3/cd737e2d45e70717907f83e146f6949f20cc23cd4bf7b2688727763aa458/yarl-1.23.0-cp314-cp314t-win32.whl", hash = "sha256:73309162a6a571d4cbd3b6a1dcc703c7311843ae0d1578df6f09be4e98df38d4", size = 90558, upload-time = "2026-03-01T22:07:43.433Z" }, + { url = "https://files.pythonhosted.org/packages/e1/19/3774d162f6732d1cfb0b47b4140a942a35ca82bb19b6db1f80e9e7bdc8f8/yarl-1.23.0-cp314-cp314t-win_amd64.whl", hash = "sha256:4503053d296bc6e4cbd1fad61cf3b6e33b939886c4f249ba7c78b602214fabe2", size = 97610, upload-time = "2026-03-01T22:07:45.773Z" }, + { url = "https://files.pythonhosted.org/packages/51/47/3fa2286c3cb162c71cdb34c4224d5745a1ceceb391b2bd9b19b668a8d724/yarl-1.23.0-cp314-cp314t-win_arm64.whl", hash = "sha256:44bb7bef4ea409384e3f8bc36c063d77ea1b8d4a5b2706956c0d6695f07dcc25", size = 86041, upload-time = "2026-03-01T22:07:49.026Z" }, + { url = "https://files.pythonhosted.org/packages/69/68/c8739671f5699c7dc470580a4f821ef37c32c4cb0b047ce223a7f115757f/yarl-1.23.0-py3-none-any.whl", hash = "sha256:a2df6afe50dea8ae15fa34c9f824a3ee958d785fd5d089063d960bae1daa0a3f", size = 48288, upload-time = "2026-03-01T22:07:51.388Z" }, +]