Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
130 changes: 130 additions & 0 deletions adr_kit/enforcement/adapters/base.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,130 @@
"""Base adapter interface for enforcement config generation.

All adapters must implement BaseAdapter. Capabilities are declared via
properties so the PolicyRouter can match adapters to contract policy keys
and the detected technology stack without instantiating every adapter.

ConfigFragment is the in-memory artifact produced by generate_fragments();
the pipeline is responsible for writing it to disk.
"""

from abc import ABC, abstractmethod
from dataclasses import dataclass, field

from ...contract.models import MergedConstraints


@dataclass
class ConfigFragment:
"""In-memory config fragment produced by an adapter before it is written to disk."""

adapter: str
"""Adapter that produced this fragment, e.g. 'eslint'."""

target_file: str
"""Relative path (from project root) where this fragment should be written."""

content: str
"""Serialized content ready to write (JSON string, TOML string, etc.)."""

fragment_type: str
"""Format hint: 'json_file', 'toml_file', 'ini_file', etc."""

policy_keys: list[str] = field(default_factory=list)
"""Contract policy keys covered by this fragment, e.g. ['imports.disallow.axios']."""


class BaseAdapter(ABC):
"""Abstract base class for all enforcement adapters.

Subclasses declare their capabilities via properties so the PolicyRouter
can select them without running them. Only generate_fragments() is called
when the adapter is actually selected.
"""

# ------------------------------------------------------------------
# Required capability declarations
# ------------------------------------------------------------------

@property
@abstractmethod
def name(self) -> str:
"""Unique adapter identifier, e.g. 'eslint', 'ruff'."""
...

@property
@abstractmethod
def supported_policy_keys(self) -> list[str]:
"""Contract constraint fields this adapter can enforce.

Match the field names on MergedConstraints, e.g. ['imports', 'python'].
The router uses this to determine which adapters handle which policy keys.
"""
...

@property
@abstractmethod
def supported_languages(self) -> list[str]:
"""Languages / ecosystems this adapter targets, e.g. ['javascript', 'typescript'].

The router uses this to filter adapters by the detected project stack.
Values should be lower-case language identifiers that StackDetector emits.
"""
...

@property
@abstractmethod
def config_targets(self) -> list[str]:
"""File paths (relative to project root) this adapter may write to."""
...

# ------------------------------------------------------------------
# Optional capability declarations (with sensible defaults)
# ------------------------------------------------------------------

@property
def supported_clause_kinds(self) -> list[str]:
"""Clause families this adapter can enforce, e.g. 'forbidden_import'.

Provisional — ENF-CLA will define the canonical vocabulary. Until then,
free-form strings are acceptable. Defaults to empty (adapter handles all).
"""
return []

@property
def output_modes(self) -> list[str]:
"""Kinds of artifacts this adapter emits.

Values: native_config, native_rules, generated_checker, policy_file, script_fallback.
ENF-MODE will formalise these as a first-class enum. Defaults to native_config.
"""
return ["native_config"]

@property
def supported_stages(self) -> list[str]:
"""Enforcement stages this adapter targets (commit, push, ci).

Defaults to ['ci'].
"""
return ["ci"]

# ------------------------------------------------------------------
# Core method
# ------------------------------------------------------------------

@abstractmethod
def generate_fragments(
self, constraints: MergedConstraints
) -> list[ConfigFragment]:
"""Generate in-memory config fragments from the merged constraints.

The pipeline calls this after routing. Implementations must be
deterministic — identical inputs must produce identical outputs.

Args:
constraints: The merged policy constraints from the ConstraintsContract.

Returns:
List of ConfigFragment objects (may be empty if nothing to emit).
"""
...
61 changes: 61 additions & 0 deletions adr_kit/enforcement/adapters/eslint.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,11 @@
from pathlib import Path
from typing import Any, TypedDict

from ...contract.models import MergedConstraints
from ...core.model import ADR, ADRStatus
from ...core.parse import ParseError, find_adr_files, parse_adr_file
from ...core.policy_extractor import PolicyExtractor
from .base import BaseAdapter, ConfigFragment


class ADRMetadata(TypedDict):
Expand Down Expand Up @@ -437,3 +439,62 @@ def generate_eslint_overrides(
)

return {"overrides": overrides}


class ESLintAdapter(BaseAdapter):
"""Enforcement adapter that generates ESLint configuration from contract constraints."""

@property
def name(self) -> str:
return "eslint"

@property
def supported_policy_keys(self) -> list[str]:
return ["imports"]

@property
def supported_languages(self) -> list[str]:
return ["javascript", "typescript"]

@property
def config_targets(self) -> list[str]:
return [".eslintrc.adrs.json"]

@property
def supported_clause_kinds(self) -> list[str]:
return ["forbidden_import", "preferred_import"]

@property
def output_modes(self) -> list[str]:
return ["native_config"]

@property
def supported_stages(self) -> list[str]:
return ["commit", "ci"]

def generate_fragments(
self, constraints: MergedConstraints
) -> list[ConfigFragment]:
"""Generate ESLint config fragment from merged constraints."""
config = generate_eslint_config_from_contract(constraints)
content = json.dumps(config, indent=2)

policy_keys: list[str] = []
if constraints.imports and constraints.imports.disallow:
policy_keys.extend(
[f"imports.disallow.{x}" for x in constraints.imports.disallow]
)
if constraints.imports and constraints.imports.prefer:
policy_keys.extend(
[f"imports.prefer.{x}" for x in constraints.imports.prefer]
)

return [
ConfigFragment(
adapter=self.name,
target_file=".eslintrc.adrs.json",
content=content,
fragment_type="json_file",
policy_keys=policy_keys,
)
]
63 changes: 63 additions & 0 deletions adr_kit/enforcement/adapters/ruff.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,10 @@

import toml

from ...contract.models import MergedConstraints
from ...core.model import ADR, ADRStatus
from ...core.parse import ParseError, find_adr_files, parse_adr_file
from .base import BaseAdapter, ConfigFragment


class PythonRuleExtractor:
Expand Down Expand Up @@ -396,3 +398,64 @@ def generate_pyproject_ruff_section(
}
}
}


class RuffAdapter(BaseAdapter):
"""Enforcement adapter that generates Ruff configuration from contract constraints."""

@property
def name(self) -> str:
return "ruff"

@property
def supported_policy_keys(self) -> list[str]:
return ["python", "imports"]

@property
def supported_languages(self) -> list[str]:
return ["python"]

@property
def config_targets(self) -> list[str]:
return [".ruff-adr.toml"]

@property
def supported_clause_kinds(self) -> list[str]:
return ["forbidden_import"]

@property
def output_modes(self) -> list[str]:
return ["native_config"]

@property
def supported_stages(self) -> list[str]:
return ["commit", "ci"]

def generate_fragments(
self, constraints: MergedConstraints
) -> list[ConfigFragment]:
"""Generate Ruff config fragment from merged constraints."""
config_toml = generate_ruff_config_from_contract(constraints)

policy_keys: list[str] = []
if constraints.python and constraints.python.disallow_imports:
policy_keys.extend(
[
f"python.disallow_imports.{x}"
for x in constraints.python.disallow_imports
]
)
if constraints.imports and constraints.imports.disallow:
policy_keys.extend(
[f"imports.disallow.{x}" for x in constraints.imports.disallow]
)

return [
ConfigFragment(
adapter=self.name,
target_file=".ruff-adr.toml",
content=config_toml,
fragment_type="toml_file",
policy_keys=policy_keys,
)
]
79 changes: 79 additions & 0 deletions adr_kit/enforcement/detection/stack.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
"""Technology stack detection for the enforcement router.

Scans a project directory for file extensions to determine which languages
and ecosystems are present. The router uses this to filter adapters by stack.
"""

from collections.abc import Generator
from pathlib import Path

# Extension → language identifier (lower-case)
_EXT_TO_LANGUAGE: dict[str, str] = {
".py": "python",
".js": "javascript",
".jsx": "javascript",
".ts": "typescript",
".tsx": "typescript",
}

# Directories to always skip during scanning
_SKIP_DIRS = {
".git",
".venv",
"venv",
"node_modules",
"__pycache__",
".mypy_cache",
".ruff_cache",
".pytest_cache",
"dist",
"build",
".testenv",
}


class StackDetector:
"""Detect which technology stacks are present in a project directory.

Scans file extensions (skipping common build/cache dirs) and returns
a deduplicated, sorted list of language identifiers.

Example::

detector = StackDetector(project_root=Path("/my/project"))
stack = detector.detect()
# e.g. ["python", "typescript"]
"""

def __init__(self, project_root: Path) -> None:
self.project_root = project_root

def detect(self) -> list[str]:
"""Return detected language identifiers for this project.

Scans all files under project_root (excluding common ignored dirs)
and maps file extensions to language names.

Returns:
Sorted, deduplicated list of language identifiers, e.g. ['python', 'typescript'].
"""
found: set[str] = set()

for path in self._iter_files():
lang = _EXT_TO_LANGUAGE.get(path.suffix.lower())
if lang:
found.add(lang)

return sorted(found)

def _iter_files(self) -> "Generator[Path, None, None]":
"""Iterate over project files, skipping ignored directories."""
try:
for item in self.project_root.rglob("*"):
# Skip if any parent component is in the skip list
if any(part in _SKIP_DIRS for part in item.parts):
continue
if item.is_file():
yield item
except PermissionError:
pass
Loading
Loading