diff --git a/.github/.gitignore b/.github/.gitignore
new file mode 100644
index 0000000..690ed16
--- /dev/null
+++ b/.github/.gitignore
@@ -0,0 +1,3 @@
+# Generated documentation (regenerate with: python scripts/generate-docs.py)
+docs/workflows/
+docs/actions/
diff --git a/.github/docs/assets/fonts/Roobert-Bold.woff2 b/.github/docs/assets/fonts/Roobert-Bold.woff2
new file mode 100755
index 0000000..bb15dcf
Binary files /dev/null and b/.github/docs/assets/fonts/Roobert-Bold.woff2 differ
diff --git a/.github/docs/assets/fonts/Roobert-Regular.woff2 b/.github/docs/assets/fonts/Roobert-Regular.woff2
new file mode 100755
index 0000000..5ee34c7
Binary files /dev/null and b/.github/docs/assets/fonts/Roobert-Regular.woff2 differ
diff --git a/.github/docs/assets/fonts/Roobert-SemiBold.woff2 b/.github/docs/assets/fonts/Roobert-SemiBold.woff2
new file mode 100755
index 0000000..00c93fc
Binary files /dev/null and b/.github/docs/assets/fonts/Roobert-SemiBold.woff2 differ
diff --git a/.github/docs/assets/images/favicon.png b/.github/docs/assets/images/favicon.png
new file mode 100644
index 0000000..54d52cf
Binary files /dev/null and b/.github/docs/assets/images/favicon.png differ
diff --git a/.github/docs/index.md b/.github/docs/index.md
new file mode 100644
index 0000000..2059e9f
--- /dev/null
+++ b/.github/docs/index.md
@@ -0,0 +1,60 @@
+# Futured CI/CD Workflows
+
+Reusable GitHub Actions workflows and composite actions for **iOS**, **Android**, and **Kotlin Multiplatform** projects at Futured.
+
+---
+
+## What's Inside
+
+
+
+- :material-apple:{ .lg .middle } **iOS Workflows**
+
+ ---
+
+ Self-hosted runner workflows for testing, building, and releasing iOS apps via Fastlane.
+
+ [:octicons-arrow-right-24: iOS Workflows](workflows/ios/index.md)
+
+- :material-android:{ .lg .middle } **Android Workflows**
+
+ ---
+
+ Cloud-based workflows for PR checks, nightly builds, and releases to Firebase & Google Play.
+
+ [:octicons-arrow-right-24: Android Workflows](workflows/android/index.md)
+
+- :material-language-kotlin:{ .lg .middle } **KMP Workflows**
+
+ ---
+
+ Workflows for Kotlin Multiplatform projects — change detection and combined builds.
+
+ [:octicons-arrow-right-24: KMP Workflows](workflows/kmp/index.md)
+
+- :material-cog:{ .lg .middle } **Composite Actions**
+
+ ---
+
+ Reusable building blocks used by the workflows — environment setup, Fastlane steps, and utilities.
+
+ [:octicons-arrow-right-24: Actions](actions/index.md)
+
+
+
+---
+
+## Quick Links
+
+| Platform | Test | Build | Release |
+|----------|------|-------|---------|
+| **iOS** | [selfhosted-test](workflows/ios/selfhosted-test.md) | [selfhosted-nightly-build](workflows/ios/selfhosted-nightly-build.md) | [selfhosted-release](workflows/ios/selfhosted-release.md) |
+| **iOS + KMP** | [selfhosted-test](workflows/ios-kmp/selfhosted-test.md) | [selfhosted-build](workflows/ios-kmp/selfhosted-build.md) | [selfhosted-release](workflows/ios-kmp/selfhosted-release.md) |
+| **Android** | [cloud-check](workflows/android/cloud-check.md) | [cloud-nightly-build](workflows/android/cloud-nightly-build.md) | [Firebase](workflows/android/cloud-release-firebase.md) / [Google Play](workflows/android/cloud-release-googleplay.md) |
+| **KMP** | — | [combined-nightly-build](workflows/kmp/combined-nightly-build.md) | — |
+
+---
+
+## Repository
+
+[:fontawesome-brands-github: futuredapp/.github](https://github.com/futuredapp/.github){ .md-button }
diff --git a/.github/docs/overrides/.icons/futured/logo.svg b/.github/docs/overrides/.icons/futured/logo.svg
new file mode 100644
index 0000000..aedd25d
--- /dev/null
+++ b/.github/docs/overrides/.icons/futured/logo.svg
@@ -0,0 +1,8 @@
+
\ No newline at end of file
diff --git a/.github/docs/stylesheets/extra.css b/.github/docs/stylesheets/extra.css
new file mode 100644
index 0000000..5fde3c6
--- /dev/null
+++ b/.github/docs/stylesheets/extra.css
@@ -0,0 +1,176 @@
+@font-face {
+ font-family: 'Roobert';
+ src: url('../assets/fonts/Roobert-Regular.woff2') format('woff2');
+ font-weight: 400;
+ font-style: normal;
+ font-display: swap;
+}
+
+@font-face {
+ font-family: 'Roobert';
+ src: url('../assets/fonts/Roobert-SemiBold.woff2') format('woff2');
+ font-weight: 600;
+ font-style: normal;
+ font-display: swap;
+}
+
+@font-face {
+ font-family: 'Roobert';
+ src: url('../assets/fonts/Roobert-Bold.woff2') format('woff2');
+ font-weight: 700;
+ font-style: normal;
+ font-display: swap;
+}
+
+:root {
+ --md-text-font: "Roobert"
+}
+
+[data-md-color-scheme="default"] {
+ --ftrd-background: #FFFFFF;
+ --ftrd-background-variant: #0000001A;
+ --ftrd-background-variant2: #0000000D;
+ --ftrd-foreground: #222222;
+ --ftrd-foreground-variant: #00000099;
+ --ftrd-foreground-variant2: #00000066;
+ --ftrd-foreground-variant3: #22222299;
+ --ftrd-border: #E7E8E9;
+ --ftrd-brand: #FF5F00;
+
+ --md-primary-fg-color: var(--ftrd-brand);
+ --md-primary-bg-color: var(--ftrd-foreground);
+
+ --md-default-fg-color: var(--ftrd-foreground);
+ --md-default-bg-color: var(--ftrd-background);
+
+ --md-accent-fg-color: var(--ftrd-brand);
+ --md-typeset-a-color: var(--ftrd-brand) !important;
+}
+
+[data-md-color-scheme="slate"] {
+ --ftrd-background: #1B1B1B;
+ --ftrd-background-variant: #FFFFFF1A;
+ --ftrd-background-variant2: #FFFFFF0D;
+ --ftrd-foreground: #E7E8E9;
+ --ftrd-foreground-variant: #C2C5C8;
+ --ftrd-foreground-variant2: #C2C5C8;
+ --ftrd-foreground-variant3: #C2C5C8;
+ --ftrd-border: #292929;
+ --ftrd-brand: #FF5F00;
+
+ --md-primary-fg-color: var(--ftrd-brand);
+ --md-primary-bg-color: var(--ftrd-foreground);
+
+ --md-default-fg-color: var(--ftrd-foreground);
+ --md-default-bg-color: var(--ftrd-background);
+
+ --md-accent-fg-color: var(--ftrd-brand);
+
+ --md-code-bg-color: var(--ftrd-background-variant);
+ --md-typeset-a-color: var(--ftrd-brand) !important;
+}
+
+/* Header background */
+.md-header {
+ background-color: var(--ftrd-background);
+}
+
+/* Header title margin */
+.md-header__title {
+ margin-left: 0.2rem !important;
+}
+
+/* Nav title background in drawer */
+.md-nav--primary .md-nav__title[for=__drawer] {
+ background-color: var(--ftrd-background);
+}
+
+/* Search widget - icon */
+.md-search__icon[for=__search] {
+ color: var(--ftrd-foreground-variant);
+}
+
+/* Search widget - background */
+.md-search__form {
+ background-color: var(--ftrd-background-variant);
+}
+
+/* Search widget - hover */
+.md-search__form:hover {
+ background-color: var(--ftrd-background-variant2);
+}
+
+/* Search widget - input placeholder color */
+.md-search__input::placeholder {
+ color: var(--ftrd-foreground-variant);
+}
+
+/* Palette button colors */
+.md-header__button[for="__palette_0"],
+.md-header__button[for="__palette_1"],
+.md-header__button[for="__palette_2"] {
+ color: var(--ftrd-foreground-variant);
+}
+
+/* Tab background */
+.md-tabs {
+ background-color: var(--ftrd-background);
+ border-bottom: 1px solid var(--ftrd-border);
+}
+
+/* Tab item color */
+.md-tabs__item {
+ color: var(--ftrd-foreground) !important;
+}
+
+/* Tab item opacity, gets rid of transition animation, did not find a way to do transition to brand color */
+.md-tabs__link {
+ opacity: 1;
+}
+
+/* Tab item color -- selected */
+.md-tabs__item--active {
+ color: var(--ftrd-brand) !important;
+}
+
+/* Navigation - scrollbar color */
+.md-sidebar__scrollwrap:hover {
+ scrollbar-color: var(--ftrd-foreground-variant) #0000;
+}
+
+/* Navigation - section color */
+.md-nav__item--section>.md-nav__link[for] {
+ color: var(--ftrd-foreground-variant);
+}
+
+/* Typography - Page title */
+.md-typeset h1 {
+ color: var(--ftrd-foreground) !important;
+}
+
+/* Footer - background*/
+.md-footer-meta {
+ background-color: var(--ftrd-background);
+ border-top: 1px solid var(--ftrd-border);
+}
+
+/* Footer - Made with ❤️🔥 at Futured */
+.md-copyright__highlight {
+ color: var(--ftrd-foreground);
+}
+
+/* Footer - Made with mkdocs */
+.md-copyright {
+ color: var(--ftrd-foreground-variant3);
+}
+
+/* Footer - Made with mkdocs - link */
+.md-copyright a {
+ color: var(--ftrd-foreground-variant3) !important;
+ text-decoration: underline;
+}
+
+/* Footer - social icons */
+.md-social a {
+ color: var(--ftrd-foreground) !important;
+}
\ No newline at end of file
diff --git a/.github/mkdocs.yml b/.github/mkdocs.yml
new file mode 100644
index 0000000..5798728
--- /dev/null
+++ b/.github/mkdocs.yml
@@ -0,0 +1,140 @@
+#$schema: https://squidfunk.github.io/mkdocs-material/schema.json
+
+site_name: Futured CI/CD Workflows
+site_url: https://futuredapp.github.io/.github
+theme:
+ name: material
+ language: en
+
+ custom_dir: docs/overrides
+
+ favicon: assets/images/favicon.png
+ icon:
+ logo: futured/logo
+
+ font:
+ code: JetBrains Mono
+
+ palette:
+ # Palette toggle for automatic mode
+ - media: "(prefers-color-scheme)"
+ toggle:
+ icon: material/brightness-auto
+ name: Switch to auto mode
+
+ # Palette toggle for light mode
+ - media: "(prefers-color-scheme: light)"
+ scheme: default
+ toggle:
+ icon: material/brightness-7
+ name: Switch to dark mode
+
+ # Palette toggle for dark mode
+ - media: "(prefers-color-scheme: dark)"
+ scheme: slate
+ toggle:
+ icon: material/brightness-4
+ name: Switch to light mode
+
+ features:
+ - navigation.instant
+ - navigation.tabs
+ - navigation.sections
+ - navigation.indexes
+ - navigation.expand
+ - content.code.copy
+
+plugins:
+ - search
+ - glightbox:
+ background: light-dark(white, black)
+ shadow: false
+
+nav:
+ - Home: index.md
+ - Workflows:
+ - Overview: workflows/index.md
+ - iOS:
+ - workflows/ios/index.md
+ - Test: workflows/ios/selfhosted-test.md
+ - Nightly Build: workflows/ios/selfhosted-nightly-build.md
+ - On-demand Build: workflows/ios/selfhosted-on-demand-build.md
+ - Release: workflows/ios/selfhosted-release.md
+ - Build (Deprecated): workflows/ios/selfhosted-build.md
+ - iOS + KMP:
+ - workflows/ios-kmp/index.md
+ - Test: workflows/ios-kmp/selfhosted-test.md
+ - Build: workflows/ios-kmp/selfhosted-build.md
+ - Release: workflows/ios-kmp/selfhosted-release.md
+ - Android:
+ - workflows/android/index.md
+ - PR Check: workflows/android/cloud-check.md
+ - Nightly Build: workflows/android/cloud-nightly-build.md
+ - Release (Firebase): workflows/android/cloud-release-firebase.md
+ - Release (Google Play): workflows/android/cloud-release-googleplay.md
+ - Generate Baseline Profiles: workflows/android/cloud-generate-baseline-profiles.md
+ - KMP:
+ - workflows/kmp/index.md
+ - Detect Changes: workflows/kmp/cloud-detect-changes.md
+ - Combined Nightly Build: workflows/kmp/combined-nightly-build.md
+ - Universal:
+ - workflows/universal/index.md
+ - Workflows Lint: workflows/universal/workflows-lint.md
+ - Cloud Backup: workflows/universal/cloud-backup.md
+ - Self-hosted Backup: workflows/universal/selfhosted-backup.md
+ - Actions:
+ - Overview: actions/index.md
+ - Android:
+ - actions/android/index.md
+ - Setup Environment: actions/android/setup-environment.md
+ - Check: actions/android/check.md
+ - Build Firebase: actions/android/build-firebase.md
+ - Build Google Play: actions/android/build-googleplay.md
+ - Generate Baseline Profiles: actions/android/generate-baseline-profiles.md
+ - iOS:
+ - actions/ios/index.md
+ - Export Secrets: actions/ios/export-secrets.md
+ - Fastlane Test: actions/ios/fastlane-test.md
+ - Fastlane Beta: actions/ios/fastlane-beta.md
+ - Fastlane Release: actions/ios/fastlane-release.md
+ - KMP Build: actions/ios/kmp-build.md
+ - Utility:
+ - actions/utility/index.md
+ - KMP Detect Changes: actions/utility/kmp-detect-changes.md
+ - Detect Changes & Changelog: actions/utility/detect-changes-changelog.md
+ - JIRA Transition Tickets: actions/utility/jira-transition-tickets.md
+
+copyright: Made with ❤️🔥 at Futured
+
+extra:
+ version:
+ provider: mike
+ social:
+ - icon: material/web
+ name: Web
+ link: https://www.futured.app
+ - icon: fontawesome/brands/github
+ name: GitHub
+ link: https://www.github.com/futuredapp
+ - icon: fontawesome/brands/linkedin
+ name: LinkedIn
+ link: https://www.linkedin.com/company/futuredapps
+ - icon: fontawesome/brands/instagram
+ name: Instagram
+ link: https://www.instagram.com/futuredapps
+
+extra_css:
+ - stylesheets/extra.css
+
+markdown_extensions:
+ - admonition
+ - attr_list
+ - md_in_html
+ - pymdownx.details
+ - pymdownx.superfences
+ - pymdownx.tabbed:
+ alternate_style: true
+ - pymdownx.blocks.caption
+ - pymdownx.emoji:
+ emoji_index: !!python/name:material.extensions.emoji.twemoji
+ emoji_generator: !!python/name:material.extensions.emoji.to_svg
diff --git a/.github/requirements-docs.txt b/.github/requirements-docs.txt
new file mode 100644
index 0000000..d9c7df7
--- /dev/null
+++ b/.github/requirements-docs.txt
@@ -0,0 +1,6 @@
+mkdocs>=1.6,<2
+mkdocs-material>=9.5
+mkdocs-glightbox>=0.4
+mike>=2.1
+Jinja2>=3.1
+PyYAML>=6.0
diff --git a/.github/scripts/config.py b/.github/scripts/config.py
new file mode 100644
index 0000000..fb0e2b6
--- /dev/null
+++ b/.github/scripts/config.py
@@ -0,0 +1,210 @@
+"""Auto-discovered registry of workflows and actions for documentation."""
+
+from __future__ import annotations
+
+from pathlib import Path
+
+import yaml
+
+ROOT_DIR = Path(__file__).resolve().parent.parent
+
+# ---------------------------------------------------------------------------
+# Manual configuration
+# ---------------------------------------------------------------------------
+
+CATEGORY_LABELS: dict[str, str] = {
+ "ios": "iOS",
+ "ios-kmp": "iOS + KMP",
+ "android": "Android",
+ "kmp": "KMP",
+ "universal": "Universal",
+ "utility": "Utility",
+}
+
+EXCLUDE: set[str] = {"deploy-docs"}
+
+# Per-entry overrides keyed by workflow/action name (filename stem or dir name).
+# Any key set here replaces the auto-discovered value.
+#
+# Supported keys (workflows):
+# source – relative path to the YAML file
+# category – category id (must exist in CATEGORY_LABELS)
+# title – display title (default: YAML `name:` field)
+# output – output markdown path
+# runner – runner label shown in docs
+# not_reusable – bool; True hides the "Usage" snippet (auto-detected
+# when `workflow_call` trigger is absent)
+# deprecated – bool; True marks the workflow as deprecated
+# deprecated_message – markdown string shown in the deprecation banner
+# readme – relative path to a companion .md to embed
+# (auto-detected from workflows/{key}.md)
+#
+# Supported keys (actions):
+# source – relative path to action.yml
+# category – category id (must exist in CATEGORY_LABELS)
+# title – display title (default: YAML `name:` field)
+# output – output markdown path
+# readme – relative path to a README.md to embed (auto-detected)
+#
+OVERRIDES: dict[str, dict] = {
+ "ios-selfhosted-build": {
+ "title": "iOS Build (Deprecated)",
+ "deprecated": True,
+ "deprecated_message": "Use `ios-selfhosted-nightly-build` instead.",
+ },
+ "workflows-lint": {
+ "not_reusable": True,
+ },
+}
+
+# Ordered longest-first so "ios-kmp" matches before "ios".
+CATEGORY_PREFIXES: list[str] = sorted(
+ ["ios-kmp", "ios", "android", "kmp", "universal"],
+ key=len,
+ reverse=True,
+)
+
+# ---------------------------------------------------------------------------
+# Helpers
+# ---------------------------------------------------------------------------
+
+
+def _match_category(key: str, fallback: str) -> tuple[str, str]:
+ """Return (category, slug) by matching the longest category prefix.
+
+ The slug is the remainder of *key* after stripping the prefix and its
+ trailing hyphen. If no prefix matches, *fallback* is used as category
+ and the full key becomes the slug.
+ """
+ for prefix in CATEGORY_PREFIXES:
+ if key.startswith(prefix + "-"):
+ slug = key[len(prefix) + 1 :]
+ return prefix, slug
+ if key == prefix:
+ return prefix, key
+ return fallback, key
+
+
+def _derive_runner(key: str, yaml_text: str) -> str:
+ """Derive the runner label from filename convention, falling back to YAML."""
+ if "-combined-" in key:
+ return "Self-hosted + ubuntu-latest"
+ if "-selfhosted-" in key:
+ return "Self-hosted"
+ if "-cloud-" in key:
+ return "ubuntu-latest"
+
+ # Fallback: parse first runs-on value from YAML.
+ for line in yaml_text.splitlines():
+ stripped = line.strip()
+ if stripped.startswith("runs-on:"):
+ value = stripped.split(":", 1)[1].strip()
+ # Normalise common variations.
+ if "self-hosted" in value.lower():
+ return "Self-hosted"
+ return value
+ return "ubuntu-latest"
+
+
+def _parse_yaml_name(path: Path) -> str:
+ """Return the top-level ``name`` field from a YAML file."""
+ with open(path) as f:
+ data = yaml.safe_load(f)
+ if data and isinstance(data, dict):
+ return data.get("name", path.stem)
+ return path.stem
+
+
+def _has_workflow_call(path: Path) -> bool:
+ """Return True if the workflow declares a ``workflow_call`` trigger."""
+ with open(path) as f:
+ text = f.read()
+ return "workflow_call" in text
+
+
+# ---------------------------------------------------------------------------
+# Auto-discovery
+# ---------------------------------------------------------------------------
+
+
+def discover_workflows(root: Path) -> dict[str, dict]:
+ """Scan ``workflows/*.yml`` and build the config dict."""
+ workflows: dict[str, dict] = {}
+ workflows_dir = root / "workflows"
+ if not workflows_dir.is_dir():
+ return workflows
+
+ for path in sorted(workflows_dir.glob("*.yml")):
+ key = path.stem
+ if key in EXCLUDE:
+ continue
+
+ category, slug = _match_category(key, "universal")
+ title = _parse_yaml_name(path)
+
+ with open(path) as f:
+ yaml_text = f.read()
+ runner = _derive_runner(key, yaml_text)
+
+ entry: dict = {
+ "source": f"workflows/{path.name}",
+ "category": category,
+ "title": title,
+ "output": f"docs/workflows/{category}/{slug}.md",
+ "runner": runner,
+ }
+
+ if not _has_workflow_call(path):
+ entry["not_reusable"] = True
+
+ readme = path.with_suffix(".md")
+ if readme.exists():
+ entry["readme"] = f"workflows/{readme.name}"
+
+ # Merge overrides on top.
+ if key in OVERRIDES:
+ entry.update(OVERRIDES[key])
+
+ workflows[key] = entry
+
+ return workflows
+
+
+def discover_actions(root: Path) -> dict[str, dict]:
+ """Scan ``actions/*/action.yml`` and build the config dict."""
+ actions: dict[str, dict] = {}
+ actions_dir = root / "actions"
+ if not actions_dir.is_dir():
+ return actions
+
+ for path in sorted(actions_dir.glob("*/action.yml")):
+ key = path.parent.name
+ category, slug = _match_category(key, "utility")
+ title = _parse_yaml_name(path)
+
+ entry: dict = {
+ "source": f"actions/{key}/action.yml",
+ "category": category,
+ "title": title,
+ "output": f"docs/actions/{category}/{slug}.md",
+ }
+
+ readme = path.parent / "README.md"
+ if readme.exists():
+ entry["readme"] = f"actions/{key}/README.md"
+
+ # Merge overrides on top.
+ if key in OVERRIDES:
+ entry.update(OVERRIDES[key])
+
+ actions[key] = entry
+
+ return actions
+
+
+# ---------------------------------------------------------------------------
+# Exported registries (same interface as before)
+# ---------------------------------------------------------------------------
+
+WORKFLOWS: dict[str, dict] = discover_workflows(ROOT_DIR)
+ACTIONS: dict[str, dict] = discover_actions(ROOT_DIR)
diff --git a/.github/scripts/enrichers/__init__.py b/.github/scripts/enrichers/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/.github/scripts/enrichers/ai_enricher.py b/.github/scripts/enrichers/ai_enricher.py
new file mode 100644
index 0000000..d338899
--- /dev/null
+++ b/.github/scripts/enrichers/ai_enricher.py
@@ -0,0 +1,76 @@
+"""AI-powered documentation enricher (stub).
+
+This enricher is a documented no-op that serves as the integration point
+for future AI-generated documentation. When activated, it will use an LLM
+to generate additional descriptions, usage tips, and examples for workflows
+and actions that lack hand-written READMEs.
+
+Activation:
+ Pass ``--enrich`` flag to ``generate-docs.py`` AND set the
+ ``AI_DOCS_API_KEY`` environment variable. Optionally provide
+ ``--ai-config path/to/config.json`` with the following structure::
+
+ {
+ "api_key_env": "AI_DOCS_API_KEY",
+ "model": "claude-sonnet-4-20250514",
+ "prompt_template": "scripts/templates/ai_prompt.txt",
+ "max_tokens": 1024
+ }
+
+Implementation notes for future activation:
+ 1. Read the source YAML file content for the spec.
+ 2. Construct a prompt from the template with the YAML content and
+ existing enrichment results.
+ 3. Call the AI API to generate additional documentation.
+ 4. Parse the response into an EnrichmentResult.
+"""
+
+from __future__ import annotations
+
+import json
+import os
+from pathlib import Path
+
+from .base import BaseEnricher, EnrichmentResult
+
+
+class AIEnricher(BaseEnricher):
+ """AI-powered enricher — currently a documented no-op.
+
+ Activates only when ``--enrich`` flag is passed AND the API key
+ environment variable is set. Without both conditions met, this
+ enricher silently produces empty results.
+ """
+
+ def __init__(
+ self,
+ enabled: bool = False,
+ config_path: str | Path | None = None,
+ ) -> None:
+ self._enabled = enabled
+ self._config: dict = {}
+
+ if config_path and Path(config_path).exists():
+ with open(config_path) as f:
+ self._config = json.load(f)
+
+ # Check for API key
+ api_key_env = self._config.get("api_key_env", "AI_DOCS_API_KEY")
+ self._api_key = os.environ.get(api_key_env, "")
+
+ def name(self) -> str:
+ return "ai"
+
+ def can_enrich(self, spec: object, config: dict) -> bool:
+ return self._enabled and bool(self._api_key)
+
+ def enrich(
+ self,
+ spec: object,
+ config: dict,
+ prior_results: list[EnrichmentResult],
+ ) -> EnrichmentResult:
+ # Stub — no-op until AI integration is implemented.
+ # When implementing, use self._config for model/prompt settings
+ # and self._api_key for authentication.
+ return EnrichmentResult()
diff --git a/.github/scripts/enrichers/base.py b/.github/scripts/enrichers/base.py
new file mode 100644
index 0000000..10a9057
--- /dev/null
+++ b/.github/scripts/enrichers/base.py
@@ -0,0 +1,54 @@
+"""Abstract base class for documentation enrichers."""
+
+from __future__ import annotations
+
+from abc import ABC, abstractmethod
+from dataclasses import dataclass, field
+
+
+@dataclass
+class EnrichmentResult:
+ """Result of an enrichment pass on a workflow or action spec."""
+
+ additional_description: str | None = None
+ usage_tips: str | None = None
+ examples: list[str] = field(default_factory=list)
+
+
+class BaseEnricher(ABC):
+ """Interface for documentation enrichers.
+
+ Enrichers augment auto-generated documentation with additional content
+ beyond what the YAML metadata provides. They run in sequence; each
+ enricher receives the results of all prior enrichers so it can avoid
+ duplicating content.
+ """
+
+ @abstractmethod
+ def name(self) -> str:
+ """Human-readable name for this enricher."""
+ ...
+
+ @abstractmethod
+ def can_enrich(self, spec: object, config: dict) -> bool:
+ """Return True if this enricher has content to add for the given spec."""
+ ...
+
+ @abstractmethod
+ def enrich(
+ self,
+ spec: object,
+ config: dict,
+ prior_results: list[EnrichmentResult],
+ ) -> EnrichmentResult:
+ """Produce enrichment content for the given spec.
+
+ Args:
+ spec: A WorkflowSpec or ActionSpec instance.
+ config: The registry entry dict for this item.
+ prior_results: Results from enrichers that ran before this one.
+
+ Returns:
+ An EnrichmentResult with any additional content to inject.
+ """
+ ...
diff --git a/.github/scripts/enrichers/readme_enricher.py b/.github/scripts/enrichers/readme_enricher.py
new file mode 100644
index 0000000..6f93edd
--- /dev/null
+++ b/.github/scripts/enrichers/readme_enricher.py
@@ -0,0 +1,108 @@
+"""Enricher that extracts content from existing README files."""
+
+from __future__ import annotations
+
+import re
+from pathlib import Path
+
+from .base import BaseEnricher, EnrichmentResult
+
+
+class ReadmeEnricher(BaseEnricher):
+ """Extracts documentation sections from existing README.md files.
+
+ For actions that already have hand-written READMEs (e.g.
+ jira-transition-tickets, universal-detect-changes-and-generate-changelog),
+ this enricher pulls in sections that go beyond what the YAML metadata
+ provides — How It Works, Usage Examples, Testing, architecture details, etc.
+ """
+
+ def __init__(self, base_dir: str | Path) -> None:
+ self._base_dir = Path(base_dir)
+
+ def name(self) -> str:
+ return "readme"
+
+ def can_enrich(self, spec: object, config: dict) -> bool:
+ return "readme" in config
+
+ def enrich(
+ self,
+ spec: object,
+ config: dict,
+ prior_results: list[EnrichmentResult],
+ ) -> EnrichmentResult:
+ readme_path = self._base_dir / config["readme"]
+ if not readme_path.exists():
+ return EnrichmentResult()
+
+ content = readme_path.read_text(encoding="utf-8")
+
+ # Extract sections beyond Inputs/Outputs (those are already in the
+ # generated tables). Keep sections like How It Works, Usage Examples,
+ # Testing, Features, Scripts, etc.
+ skip_headings = {
+ "inputs",
+ "outputs",
+ "overview", # Usually duplicates description
+ }
+
+ sections: list[str] = []
+ current_section: list[str] = []
+ current_heading = ""
+ in_skip = False
+ in_code_fence = False
+
+ for line in content.splitlines():
+ # Track fenced code blocks to avoid matching headings inside them
+ if line.startswith("```"):
+ in_code_fence = not in_code_fence
+ if not in_skip:
+ current_section.append(line)
+ continue
+
+ if in_code_fence:
+ if not in_skip:
+ current_section.append(line)
+ continue
+
+ heading_match = re.match(r"^(#{1,3})\s+(.+)", line)
+ if heading_match:
+ # Save previous section if not skipped
+ if current_section and not in_skip:
+ sections.append("\n".join(current_section))
+
+ current_heading = heading_match.group(2).strip()
+ heading_key = re.sub(r"[`*]", "", current_heading).lower()
+
+ # Skip the title line (first H1) and known metadata sections
+ level = len(heading_match.group(1))
+ if level == 1:
+ in_skip = True
+ current_section = []
+ continue
+
+ in_skip = heading_key in skip_headings
+ current_section = [line] if not in_skip else []
+ else:
+ if not in_skip:
+ current_section.append(line)
+
+ # Don't forget the last section
+ if current_section and not in_skip:
+ sections.append("\n".join(current_section))
+
+ additional = "\n\n".join(s.strip() for s in sections if s.strip())
+
+ # Extract usage examples separately
+ examples: list[str] = []
+ example_blocks = re.findall(
+ r"```yaml\n(.*?)```", content, re.DOTALL
+ )
+ for block in example_blocks:
+ examples.append(block.strip())
+
+ return EnrichmentResult(
+ additional_description=additional if additional else None,
+ examples=examples if examples else [],
+ )
diff --git a/.github/scripts/generate-docs.py b/.github/scripts/generate-docs.py
new file mode 100644
index 0000000..aec2e92
--- /dev/null
+++ b/.github/scripts/generate-docs.py
@@ -0,0 +1,272 @@
+#!/usr/bin/env python3
+"""Generate documentation markdown files from workflow and action YAML specs.
+
+Usage:
+ python scripts/generate-docs.py [--enrich] [--ai-config PATH]
+
+Pipeline:
+ 1. Load config registry
+ 2. Parse all workflow YAMLs + action YAMLs
+ 3. Run enricher pipeline (README enricher always; AI enricher if --enrich)
+ 4. Render markdown via Jinja2 templates
+ 5. Write generated .md files to docs/
+ 6. Generate category index pages
+"""
+
+from __future__ import annotations
+
+import argparse
+import os
+import sys
+from collections import defaultdict
+from pathlib import Path
+
+# Ensure the scripts package is importable
+SCRIPT_DIR = Path(__file__).resolve().parent
+ROOT_DIR = SCRIPT_DIR.parent
+sys.path.insert(0, str(ROOT_DIR))
+
+from scripts.config import ACTIONS, CATEGORY_LABELS, WORKFLOWS
+from scripts.enrichers.ai_enricher import AIEnricher
+from scripts.enrichers.base import BaseEnricher, EnrichmentResult
+from scripts.enrichers.readme_enricher import ReadmeEnricher
+from scripts.parsers.action_parser import parse_action
+from scripts.parsers.workflow_parser import parse_workflow
+from scripts.renderers.markdown_renderer import (
+ render_action,
+ render_index,
+ render_workflow,
+)
+
+
+def _run_enrichers(
+ enrichers: list[BaseEnricher],
+ spec: object,
+ config: dict,
+) -> list[EnrichmentResult]:
+ """Run all enrichers in sequence, passing prior results forward."""
+ results: list[EnrichmentResult] = []
+ for enricher in enrichers:
+ if enricher.can_enrich(spec, config):
+ result = enricher.enrich(spec, config, results)
+ results.append(result)
+ return results
+
+
+def _build_workflow_index_items(
+ category: str,
+ configs: list[tuple[str, dict]],
+ specs: dict,
+) -> list[dict]:
+ """Build index items for workflows in a category."""
+ items = []
+ for key, cfg in configs:
+ spec = specs.get(key)
+ # Relative link from index to the workflow page
+ filename = Path(cfg["output"]).name
+ items.append(
+ {
+ "title": cfg["title"],
+ "link": filename,
+ "description": spec.name if spec else cfg["title"],
+ }
+ )
+ return items
+
+
+def _build_action_index_items(
+ category: str,
+ configs: list[tuple[str, dict]],
+ specs: dict,
+) -> list[dict]:
+ """Build index items for actions in a category."""
+ items = []
+ for key, cfg in configs:
+ spec = specs.get(key)
+ filename = Path(cfg["output"]).name
+ items.append(
+ {
+ "title": cfg["title"],
+ "link": filename,
+ "description": spec.description if spec else cfg["title"],
+ }
+ )
+ return items
+
+
+def main() -> None:
+ parser = argparse.ArgumentParser(description="Generate documentation site")
+ parser.add_argument(
+ "--enrich",
+ action="store_true",
+ help="Enable AI enricher (requires AI_DOCS_API_KEY env var)",
+ )
+ parser.add_argument(
+ "--ai-config",
+ type=str,
+ default=None,
+ help="Path to AI enricher configuration JSON",
+ )
+ parser.add_argument(
+ "--ref",
+ type=str,
+ default="main",
+ help="Git ref for usage snippets (e.g. 'main' or '2.1.0')",
+ )
+ args = parser.parse_args()
+
+ templates_dir = SCRIPT_DIR / "templates"
+
+ # Initialize enrichers
+ enrichers: list[BaseEnricher] = [
+ ReadmeEnricher(ROOT_DIR),
+ AIEnricher(enabled=args.enrich, config_path=args.ai_config),
+ ]
+
+ # -------------------------------------------------------------------
+ # Parse all workflow YAML files
+ # -------------------------------------------------------------------
+ print("Parsing workflows...")
+ workflow_specs = {}
+ for key, cfg in WORKFLOWS.items():
+ source = ROOT_DIR / cfg["source"]
+ if not source.exists():
+ print(f" WARNING: {source} not found, skipping {key}")
+ continue
+ spec = parse_workflow(source)
+ workflow_specs[key] = spec
+ print(f" Parsed: {key} ({spec.name})")
+
+ # -------------------------------------------------------------------
+ # Parse all action YAML files
+ # -------------------------------------------------------------------
+ print("\nParsing actions...")
+ action_specs = {}
+ for key, cfg in ACTIONS.items():
+ source = ROOT_DIR / cfg["source"]
+ if not source.exists():
+ print(f" WARNING: {source} not found, skipping {key}")
+ continue
+ spec = parse_action(source)
+ action_specs[key] = spec
+ print(f" Parsed: {key} ({spec.name})")
+
+ # -------------------------------------------------------------------
+ # Render workflow pages
+ # -------------------------------------------------------------------
+ print("\nRendering workflow pages...")
+ for key, cfg in WORKFLOWS.items():
+ spec = workflow_specs.get(key)
+ if not spec:
+ continue
+ enrichments = _run_enrichers(enrichers, spec, cfg)
+ path = render_workflow(spec, cfg, enrichments, templates_dir, ROOT_DIR, ref=args.ref)
+ print(f" Written: {path.relative_to(ROOT_DIR)}")
+
+ # -------------------------------------------------------------------
+ # Render action pages
+ # -------------------------------------------------------------------
+ print("\nRendering action pages...")
+ for key, cfg in ACTIONS.items():
+ spec = action_specs.get(key)
+ if not spec:
+ continue
+ enrichments = _run_enrichers(enrichers, spec, cfg)
+ path = render_action(spec, cfg, enrichments, templates_dir, ROOT_DIR, ref=args.ref)
+ print(f" Written: {path.relative_to(ROOT_DIR)}")
+
+ # -------------------------------------------------------------------
+ # Generate category index pages
+ # -------------------------------------------------------------------
+ print("\nGenerating index pages...")
+
+ # Group workflows by category
+ wf_by_category: dict[str, list[tuple[str, dict]]] = defaultdict(list)
+ for key, cfg in WORKFLOWS.items():
+ wf_by_category[cfg["category"]].append((key, cfg))
+
+ # Group actions by category
+ act_by_category: dict[str, list[tuple[str, dict]]] = defaultdict(list)
+ for key, cfg in ACTIONS.items():
+ act_by_category[cfg["category"]].append((key, cfg))
+
+ # Workflow category index pages
+ for category, entries in wf_by_category.items():
+ label = CATEGORY_LABELS.get(category, category.title())
+ items = _build_workflow_index_items(category, entries, workflow_specs)
+ index_path = ROOT_DIR / "docs" / "workflows" / category / "index.md"
+ render_index(
+ title=f"{label} Workflows",
+ description=f"Reusable GitHub Actions workflows for {label} projects.",
+ items=items,
+ templates_dir=templates_dir,
+ output_path=index_path,
+ )
+ print(f" Written: {index_path.relative_to(ROOT_DIR)}")
+
+ # Action category index pages
+ for category, entries in act_by_category.items():
+ label = CATEGORY_LABELS.get(category, category.title())
+ items = _build_action_index_items(category, entries, action_specs)
+ index_path = ROOT_DIR / "docs" / "actions" / category / "index.md"
+ render_index(
+ title=f"{label} Actions",
+ description=f"Composite GitHub Actions for {label} projects.",
+ items=items,
+ templates_dir=templates_dir,
+ output_path=index_path,
+ )
+ print(f" Written: {index_path.relative_to(ROOT_DIR)}")
+
+ # Top-level workflow index
+ all_wf_items = []
+ wf_categories = [c for c in CATEGORY_LABELS if c in wf_by_category]
+ for category in wf_categories:
+ label = CATEGORY_LABELS.get(category, category.title())
+ all_wf_items.append(
+ {
+ "title": f"{label} Workflows",
+ "link": f"{category}/index.md",
+ "description": f"{len(wf_by_category[category])} workflow(s)",
+ }
+ )
+ render_index(
+ title="Workflows",
+ description="All reusable GitHub Actions workflows organized by platform.",
+ items=all_wf_items,
+ templates_dir=templates_dir,
+ output_path=ROOT_DIR / "docs" / "workflows" / "index.md",
+ )
+ print(f" Written: docs/workflows/index.md")
+
+ # Top-level action index
+ all_act_items = []
+ act_categories = [c for c in CATEGORY_LABELS if c in act_by_category]
+ for category in act_categories:
+ label = CATEGORY_LABELS.get(category, category.title())
+ all_act_items.append(
+ {
+ "title": f"{label} Actions",
+ "link": f"{category}/index.md",
+ "description": f"{len(act_by_category[category])} action(s)",
+ }
+ )
+ render_index(
+ title="Actions",
+ description="All composite GitHub Actions organized by platform.",
+ items=all_act_items,
+ templates_dir=templates_dir,
+ output_path=ROOT_DIR / "docs" / "actions" / "index.md",
+ )
+ print(f" Written: docs/actions/index.md")
+
+ # -------------------------------------------------------------------
+ # Summary
+ # -------------------------------------------------------------------
+ total_wf = len(workflow_specs)
+ total_act = len(action_specs)
+ print(f"\nDone! Generated {total_wf} workflow pages + {total_act} action pages.")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/.github/scripts/parsers/__init__.py b/.github/scripts/parsers/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/.github/scripts/parsers/action_parser.py b/.github/scripts/parsers/action_parser.py
new file mode 100644
index 0000000..c500d10
--- /dev/null
+++ b/.github/scripts/parsers/action_parser.py
@@ -0,0 +1,31 @@
+"""Parser for composite GitHub Actions (action.yml) files."""
+
+from __future__ import annotations
+
+from pathlib import Path
+
+import yaml
+
+from .types import ActionSpec, parse_inputs, parse_outputs
+
+
+def parse_action(path: str | Path) -> ActionSpec:
+ """Parse a composite action YAML file into an ActionSpec."""
+ path = Path(path)
+ with open(path) as f:
+ data = yaml.safe_load(f)
+ data = data or {}
+
+ name = data.get("name", path.parent.name)
+ description = data.get("description", "")
+
+ inputs = parse_inputs(data.get("inputs") or {})
+ outputs = parse_outputs(data.get("outputs") or {})
+
+ return ActionSpec(
+ name=name,
+ description=description,
+ source_path=str(path),
+ inputs=inputs,
+ outputs=outputs,
+ )
diff --git a/.github/scripts/parsers/types.py b/.github/scripts/parsers/types.py
new file mode 100644
index 0000000..2ac77d1
--- /dev/null
+++ b/.github/scripts/parsers/types.py
@@ -0,0 +1,78 @@
+"""Shared dataclasses and helpers for parsed workflow and action specifications."""
+
+from __future__ import annotations
+
+from dataclasses import dataclass, field
+
+
+@dataclass
+class InputSpec:
+ name: str
+ description: str
+ type: str = "string"
+ required: bool = False
+ default: str | None = None
+
+
+@dataclass
+class SecretSpec:
+ name: str
+ description: str
+ required: bool = False
+
+
+@dataclass
+class OutputSpec:
+ name: str
+ description: str
+
+
+@dataclass
+class WorkflowSpec:
+ name: str
+ source_path: str
+ inputs: list[InputSpec] = field(default_factory=list)
+ secrets: list[SecretSpec] = field(default_factory=list)
+ outputs: list[OutputSpec] = field(default_factory=list)
+ jobs: dict[str, dict] = field(default_factory=dict)
+
+
+@dataclass
+class ActionSpec:
+ name: str
+ description: str
+ source_path: str
+ inputs: list[InputSpec] = field(default_factory=list)
+ outputs: list[OutputSpec] = field(default_factory=list)
+
+
+def parse_inputs(raw: dict) -> list[InputSpec]:
+ """Parse a raw inputs dict into a list of InputSpec."""
+ inputs = []
+ for name, spec in raw.items():
+ spec = spec or {}
+ default = spec.get("default")
+ inputs.append(
+ InputSpec(
+ name=name,
+ description=spec.get("description", ""),
+ type=spec.get("type", "string"),
+ required=spec.get("required", False),
+ default=str(default) if default is not None else None,
+ )
+ )
+ return inputs
+
+
+def parse_outputs(raw: dict) -> list[OutputSpec]:
+ """Parse a raw outputs dict into a list of OutputSpec."""
+ outputs = []
+ for name, spec in raw.items():
+ spec = spec or {}
+ outputs.append(
+ OutputSpec(
+ name=name,
+ description=spec.get("description", ""),
+ )
+ )
+ return outputs
diff --git a/.github/scripts/parsers/workflow_parser.py b/.github/scripts/parsers/workflow_parser.py
new file mode 100644
index 0000000..bb06712
--- /dev/null
+++ b/.github/scripts/parsers/workflow_parser.py
@@ -0,0 +1,77 @@
+"""Parser for reusable GitHub Actions workflow files (on.workflow_call)."""
+
+from __future__ import annotations
+
+from pathlib import Path
+
+import yaml
+
+from .types import InputSpec, OutputSpec, SecretSpec, WorkflowSpec, parse_inputs, parse_outputs
+
+
+def parse_workflow(path: str | Path) -> WorkflowSpec:
+ """Parse a reusable workflow YAML file into a WorkflowSpec."""
+ path = Path(path)
+ with open(path) as f:
+ data = yaml.safe_load(f)
+ data = data or {}
+
+ name = data.get("name", path.stem)
+
+ # Extract workflow_call trigger definition
+ on_block = data.get("on") or data.get(True) or {}
+ workflow_call = {}
+ if isinstance(on_block, dict):
+ workflow_call = on_block.get("workflow_call", {}) or {}
+
+ inputs = parse_inputs(workflow_call.get("inputs") or {})
+ secrets = _parse_secrets(workflow_call.get("secrets") or {})
+ outputs = parse_outputs(workflow_call.get("outputs") or {})
+ jobs = _parse_jobs(data.get("jobs") or {})
+
+ return WorkflowSpec(
+ name=name,
+ source_path=str(path),
+ inputs=inputs,
+ secrets=secrets,
+ outputs=outputs,
+ jobs=jobs,
+ )
+
+
+def _parse_secrets(raw: dict) -> list[SecretSpec]:
+ secrets = []
+ for name, spec in raw.items():
+ spec = spec or {}
+ secrets.append(
+ SecretSpec(
+ name=name,
+ description=spec.get("description", ""),
+ required=spec.get("required", False),
+ )
+ )
+ return secrets
+
+
+def _parse_jobs(raw: dict) -> dict[str, dict]:
+ jobs = {}
+ for job_name, job_spec in raw.items():
+ job_spec = job_spec or {}
+ job_info: dict = {
+ "runs-on": job_spec.get("runs-on", ""),
+ }
+
+ # Collect all 'uses' references from steps and job-level reuse
+ uses_refs: list[str] = []
+ if "uses" in job_spec:
+ uses_refs.append(job_spec["uses"])
+
+ for step in job_spec.get("steps") or []:
+ if "uses" in step:
+ uses_refs.append(step["uses"])
+
+ if uses_refs:
+ job_info["uses"] = uses_refs
+
+ jobs[job_name] = job_info
+ return jobs
diff --git a/.github/scripts/renderers/__init__.py b/.github/scripts/renderers/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/.github/scripts/renderers/markdown_renderer.py b/.github/scripts/renderers/markdown_renderer.py
new file mode 100644
index 0000000..93ff067
--- /dev/null
+++ b/.github/scripts/renderers/markdown_renderer.py
@@ -0,0 +1,220 @@
+"""Renders parsed specs + enrichment results into markdown files via Jinja2."""
+
+from __future__ import annotations
+
+import os
+import re
+from pathlib import Path
+
+from jinja2 import Environment, FileSystemLoader
+
+from scripts.config import ACTIONS, CATEGORY_LABELS, WORKFLOWS
+from scripts.enrichers.base import EnrichmentResult
+from scripts.parsers.types import ActionSpec, InputSpec, WorkflowSpec
+
+
+def _build_env(templates_dir: str | Path) -> Environment:
+ return Environment(
+ loader=FileSystemLoader(str(templates_dir)),
+ keep_trailing_newline=True,
+ trim_blocks=True,
+ lstrip_blocks=True,
+ )
+
+
+def _usage_placeholder(inp: InputSpec) -> str:
+ """Generate a placeholder value for the usage snippet."""
+ if inp.default is not None:
+ return inp.default
+ type_map = {
+ "boolean": "true",
+ "number": "0",
+ }
+ return type_map.get(inp.type, "'...'")
+
+
+def _resolve_action_link(uses_ref: str, from_output_path: str) -> dict | None:
+ """Resolve a uses: reference to a cross-link if it's an internal action."""
+ # Match futuredapp/.github/.github/actions/@...
+ match = re.match(
+ r"futuredapp/\.github/\.github/actions/([^@]+)@", uses_ref
+ )
+ if not match:
+ # Also match internal workflow references
+ wf_match = re.match(
+ r"futuredapp/\.github/\.github/workflows/([^@]+)@", uses_ref
+ )
+ if not wf_match:
+ return None
+ wf_file = wf_match.group(1)
+ # Find matching workflow config
+ for _key, cfg in WORKFLOWS.items():
+ if cfg["source"].endswith(wf_file):
+ rel = os.path.relpath(cfg["output"], os.path.dirname(from_output_path))
+ return {"name": cfg["title"], "link": rel}
+ return None
+
+ action_name = match.group(1)
+ for _key, cfg in ACTIONS.items():
+ if cfg["source"] == f"actions/{action_name}/action.yml":
+ rel = os.path.relpath(cfg["output"], os.path.dirname(from_output_path))
+ return {"name": cfg["title"], "link": rel}
+ return None
+
+
+def render_workflow(
+ spec: WorkflowSpec,
+ config: dict,
+ enrichments: list[EnrichmentResult],
+ templates_dir: str | Path,
+ output_base: str | Path,
+ ref: str = "main",
+) -> Path:
+ """Render a workflow spec to a markdown file."""
+ env = _build_env(templates_dir)
+ template = env.get_template("workflow.md.j2")
+
+ # Prepare inputs with usage placeholders
+ inputs = []
+ required_inputs = []
+ for inp in spec.inputs:
+ inp_dict = {
+ "name": inp.name,
+ "type": inp.type,
+ "required": inp.required,
+ "default": inp.default,
+ "description": inp.description,
+ "usage_placeholder": _usage_placeholder(inp),
+ }
+ inputs.append(inp_dict)
+ if inp.required:
+ required_inputs.append(inp_dict)
+
+ required_secrets = [s for s in spec.secrets if s.required]
+
+ # Resolve internal action cross-links
+ internal_actions: list[dict] = []
+ seen_actions: set[str] = set()
+ for _job_name, job_info in spec.jobs.items():
+ for uses_ref in job_info.get("uses", []):
+ link = _resolve_action_link(uses_ref, config["output"])
+ if link and link["name"] not in seen_actions:
+ internal_actions.append(link)
+ seen_actions.add(link["name"])
+
+ # Merge enrichment results
+ enrichment_parts: list[str] = []
+ for er in enrichments:
+ if er.additional_description:
+ enrichment_parts.append(er.additional_description)
+ if er.usage_tips:
+ enrichment_parts.append(er.usage_tips)
+ enrichment_text = "\n\n".join(enrichment_parts) if enrichment_parts else ""
+
+ # Generate a sensible job name for the usage snippet
+ usage_job_name = config.get("title", "build").lower().replace(" ", "-")
+ usage_job_name = re.sub(r"[^a-z0-9-]", "", usage_job_name)
+
+ rendered = template.render(
+ title=config["title"],
+ source_path=config["source"],
+ runner=config.get("runner", ""),
+ deprecated=config.get("deprecated", False),
+ deprecated_message=config.get("deprecated_message", ""),
+ not_reusable=config.get("not_reusable", False),
+ spec=spec,
+ inputs=inputs,
+ required_inputs=required_inputs,
+ secrets=spec.secrets,
+ required_secrets=required_secrets,
+ outputs=spec.outputs,
+ internal_actions=internal_actions,
+ enrichment=enrichment_text,
+ usage_job_name=usage_job_name,
+ ref=ref,
+ )
+
+ output_path = Path(output_base) / config["output"]
+ output_path.parent.mkdir(parents=True, exist_ok=True)
+ output_path.write_text(rendered, encoding="utf-8")
+ return output_path
+
+
+def render_action(
+ spec: ActionSpec,
+ config: dict,
+ enrichments: list[EnrichmentResult],
+ templates_dir: str | Path,
+ output_base: str | Path,
+ ref: str = "main",
+) -> Path:
+ """Render an action spec to a markdown file."""
+ env = _build_env(templates_dir)
+ template = env.get_template("action.md.j2")
+
+ inputs = []
+ required_inputs = []
+ for inp in spec.inputs:
+ inp_dict = {
+ "name": inp.name,
+ "type": inp.type,
+ "required": inp.required,
+ "default": inp.default,
+ "description": inp.description,
+ "usage_placeholder": _usage_placeholder(inp),
+ }
+ inputs.append(inp_dict)
+ if inp.required:
+ required_inputs.append(inp_dict)
+
+ # Merge enrichment results
+ enrichment_parts: list[str] = []
+ for er in enrichments:
+ if er.additional_description:
+ enrichment_parts.append(er.additional_description)
+ if er.usage_tips:
+ enrichment_parts.append(er.usage_tips)
+ enrichment_text = "\n\n".join(enrichment_parts) if enrichment_parts else ""
+
+ # Action path (e.g. actions/android-setup-environment)
+ action_path = str(Path(config["source"]).parent)
+
+ rendered = template.render(
+ title=config["title"],
+ source_path=config["source"],
+ spec=spec,
+ inputs=inputs,
+ required_inputs=required_inputs,
+ outputs=spec.outputs,
+ enrichment=enrichment_text,
+ action_path=action_path,
+ ref=ref,
+ )
+
+ output_path = Path(output_base) / config["output"]
+ output_path.parent.mkdir(parents=True, exist_ok=True)
+ output_path.write_text(rendered, encoding="utf-8")
+ return output_path
+
+
+def render_index(
+ title: str,
+ description: str,
+ items: list[dict],
+ templates_dir: str | Path,
+ output_path: str | Path,
+) -> Path:
+ """Render a category index page."""
+ env = _build_env(templates_dir)
+ template = env.get_template("index.md.j2")
+
+ rendered = template.render(
+ title=title,
+ description=description,
+ items=items,
+ )
+
+ output_path = Path(output_path)
+ output_path.parent.mkdir(parents=True, exist_ok=True)
+ output_path.write_text(rendered, encoding="utf-8")
+ return output_path
diff --git a/.github/scripts/templates/action.md.j2 b/.github/scripts/templates/action.md.j2
new file mode 100644
index 0000000..90bbc95
--- /dev/null
+++ b/.github/scripts/templates/action.md.j2
@@ -0,0 +1,46 @@
+# {{ title }}
+
+**Source:** [`{{ source_path }}`](https://github.com/futuredapp/.github/blob/main/.github/{{ source_path }})
+
+{{ spec.description }}
+
+{% if inputs %}
+## Usage
+
+```yaml
+- uses: futuredapp/.github/.github/{{ action_path }}@{{ ref }}
+{% if required_inputs %}
+ with:
+{% for inp in required_inputs %}
+ {{ inp.name }}: {{ inp.usage_placeholder }}
+{% endfor %}
+{% endif %}
+```
+
+{% endif %}
+{% if inputs %}
+## Inputs
+
+| Name | Type | Required | Default | Description |
+|------|------|----------|---------|-------------|
+{% for inp in inputs %}
+| `{{ inp.name }}` | `{{ inp.type }}` | {{ "Yes" if inp.required else "No" }} | {{ "`" + inp.default + "`" if inp.default is not none and inp.default != "" else "—" }} | {{ inp.description }} |
+{% endfor %}
+
+{% endif %}
+{% if outputs %}
+## Outputs
+
+| Name | Description |
+|------|-------------|
+{% for out in outputs %}
+| `{{ out.name }}` | {{ out.description }} |
+{% endfor %}
+
+{% endif %}
+{% if enrichment %}
+## Additional Details
+
+{{ enrichment }}
+
+{% endif %}
diff --git a/.github/scripts/templates/index.md.j2 b/.github/scripts/templates/index.md.j2
new file mode 100644
index 0000000..a9a969b
--- /dev/null
+++ b/.github/scripts/templates/index.md.j2
@@ -0,0 +1,12 @@
+# {{ title }}
+
+{{ description }}
+
+{% if items %}
+| Name | Description |
+|------|-------------|
+{% for item in items %}
+| [{{ item.title }}]({{ item.link }}) | {{ item.description }} |
+{% endfor %}
+
+{% endif %}
diff --git a/.github/scripts/templates/workflow.md.j2 b/.github/scripts/templates/workflow.md.j2
new file mode 100644
index 0000000..d06e5ad
--- /dev/null
+++ b/.github/scripts/templates/workflow.md.j2
@@ -0,0 +1,88 @@
+# {{ title }}
+
+{% if deprecated %}
+!!! warning "Deprecated"
+ {{ deprecated_message }}
+
+{% endif %}
+{% if not_reusable %}
+!!! info "Internal Workflow"
+ This is not a reusable workflow — it runs directly on `pull_request` events in this repository.
+
+{% endif %}
+**Source:** [`{{ source_path }}`](https://github.com/futuredapp/.github/blob/main/.github/{{ source_path }})
+{% if runner %}
+
+**Runner:** `{{ runner }}`
+{% endif %}
+
+{% if spec.name != title %}
+*{{ spec.name }}*
+{% endif %}
+
+{% if inputs %}
+## Usage
+
+```yaml
+jobs:
+ {{ usage_job_name }}:
+ uses: futuredapp/.github/.github/{{ source_path }}@{{ ref }}
+{% if required_inputs %}
+ with:
+{% for inp in required_inputs %}
+ {{ inp.name }}: {{ inp.usage_placeholder }}
+{% endfor %}
+{% endif %}
+{% if required_secrets %}
+ secrets:
+{% for sec in required_secrets %}
+ {{ sec.name }}: {{"${{"}} secrets.{{ sec.name }} {{"}}"}}
+{% endfor %}
+{% endif %}
+```
+
+{% endif %}
+{% if inputs %}
+## Inputs
+
+| Name | Type | Required | Default | Description |
+|------|------|----------|---------|-------------|
+{% for inp in inputs %}
+| `{{ inp.name }}` | `{{ inp.type }}` | {{ "Yes" if inp.required else "No" }} | {{ "`" + inp.default + "`" if inp.default is not none and inp.default != "" else "—" }} | {{ inp.description }} |
+{% endfor %}
+
+{% endif %}
+{% if secrets %}
+## Secrets
+
+| Name | Required | Description |
+|------|----------|-------------|
+{% for sec in secrets %}
+| `{{ sec.name }}` | {{ "Yes" if sec.required else "No" }} | {{ sec.description }} |
+{% endfor %}
+
+{% endif %}
+{% if outputs %}
+## Outputs
+
+| Name | Description |
+|------|-------------|
+{% for out in outputs %}
+| `{{ out.name }}` | {{ out.description }} |
+{% endfor %}
+
+{% endif %}
+{% if internal_actions %}
+## Internal Actions Used
+
+{% for action in internal_actions %}
+- [`{{ action.name }}`]({{ action.link }})
+{% endfor %}
+
+{% endif %}
+{% if enrichment %}
+## Additional Details
+
+{{ enrichment }}
+
+{% endif %}
diff --git a/.github/workflows/deploy-docs.yml b/.github/workflows/deploy-docs.yml
new file mode 100644
index 0000000..48b2019
--- /dev/null
+++ b/.github/workflows/deploy-docs.yml
@@ -0,0 +1,66 @@
+name: Deploy Documentation
+
+on:
+ push:
+ branches: [main]
+ tags: ['*']
+ paths:
+ - '.github/workflows/*.yml'
+ - '.github/actions/*/action.yml'
+ - '.github/actions/*/README.md'
+ - 'docs/**'
+ - 'mkdocs.yml'
+ - 'scripts/**'
+ - 'requirements-docs.txt'
+ workflow_dispatch:
+
+permissions:
+ contents: write
+
+concurrency:
+ group: deploy-docs
+ cancel-in-progress: true
+
+jobs:
+ deploy:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v6
+ with:
+ fetch-depth: 0
+ fetch-tags: true
+ - uses: actions/setup-python@v6
+ with:
+ python-version: '3.x'
+ cache: 'pip'
+ cache-dependency-path: requirements-docs.txt
+ - run: pip install -r requirements-docs.txt
+ - name: Determine version and alias
+ id: version
+ run: |
+ if [[ "$GITHUB_REF" == refs/tags/* ]]; then
+ VERSION="${GITHUB_REF#refs/tags/}"
+ {
+ echo "version=$VERSION"
+ echo "alias=latest"
+ echo "ref=$VERSION"
+ } >> "$GITHUB_OUTPUT"
+ else
+ {
+ echo "version=main"
+ echo "alias="
+ echo "ref=main"
+ } >> "$GITHUB_OUTPUT"
+ fi
+ - run: python scripts/generate-docs.py --ref ${{ steps.version.outputs.ref }}
+ - run: |
+ git config user.name github-actions[bot]
+ git config user.email 41898282+github-actions[bot]@users.noreply.github.com
+ - name: Deploy versioned docs
+ run: |
+ if [[ -n "${{ steps.version.outputs.alias }}" ]]; then
+ mike deploy --push --update-aliases ${{ steps.version.outputs.version }} ${{ steps.version.outputs.alias }}
+ mike set-default --push latest
+ else
+ mike deploy --push main
+ fi
diff --git a/.gitignore b/.gitignore
index 4a6f485..90c9f29 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,2 +1,4 @@
.idea
*.DS_Store
+.github/.venv/
+.github/site/