Skip to content
This repository was archived by the owner on Mar 4, 2026. It is now read-only.
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions HeadySystems_v13/Heady_it1_v_1_0_0.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
OUTPUT_DIR = ROOT / "heady_iterations" / "it1"

def main():
"""Generates the Iteration 1 scaffold."""
OUTPUT_DIR.mkdir(parents=True, exist_ok=True)
payload = {
"iteration": 1,
Expand Down
1 change: 1 addition & 0 deletions HeadySystems_v13/Heady_it2_v_1_0_0.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
OUTPUT_DIR = ROOT / "heady_iterations" / "it2"

def main():
"""Generates the Iteration 2 scaffold."""
OUTPUT_DIR.mkdir(parents=True, exist_ok=True)
payload = {
"iteration": 2,
Expand Down
1 change: 1 addition & 0 deletions HeadySystems_v13/Heady_it3_v_1_0_0.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
OUTPUT_DIR = ROOT / "heady_iterations" / "it3"

def main():
"""Generates the Iteration 3 scaffold."""
OUTPUT_DIR.mkdir(parents=True, exist_ok=True)
payload = {
"iteration": 3,
Expand Down
1 change: 1 addition & 0 deletions HeadySystems_v13/Heady_it4_v_1_0_0.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
OUTPUT_DIR = ROOT / "heady_iterations" / "it4"

def main():
"""Generates the Iteration 4 scaffold."""
OUTPUT_DIR.mkdir(parents=True, exist_ok=True)
payload = {
"iteration": 4,
Expand Down
124 changes: 90 additions & 34 deletions HeadySystems_v13/codex_builder_v13.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,17 +25,27 @@
DESTRUCTIVE_PATTERNS = ["write", "delete", "rm", "exec", "shell", "edit_file"]

class AtomicWriter:
"""Provides atomic file writing capabilities using temporary files."""

@staticmethod
def write_json(path: str, data: Dict[str, Any]) -> str:
def _write_to_temp_and_replace(path: str, content_bytes: bytes) -> str:
"""
Writes bytes to a temporary file and atomically replaces the target file.

Args:
path: The target file path.
content_bytes: The content to write in bytes.

Returns:
The SHA256 hash of the content.
"""
# FIX: Ensure directory defaults to '.' if empty to prevent cross-device link errors
directory = os.path.dirname(path)
if not directory:
directory = "."
elif not os.path.exists(directory):
os.makedirs(directory, exist_ok=True)

json_content = json.dumps(data, indent=2, sort_keys=True)
content_bytes = json_content.encode('utf-8')
file_hash = hashlib.sha256(content_bytes).hexdigest()

with tempfile.NamedTemporaryFile(mode='wb', dir=directory, delete=False) as tf:
Expand All @@ -55,34 +65,40 @@ def write_json(path: str, data: Dict[str, Any]) -> str:
return file_hash

@staticmethod
def write_text(path: str, content: str) -> str:
directory = os.path.dirname(path)
if not directory:
directory = "."
elif not os.path.exists(directory):
os.makedirs(directory, exist_ok=True)
def write_json(path: str, data: Dict[str, Any]) -> str:
"""
Writes a dictionary as a JSON file atomically.

Args:
path: The target file path.
data: The dictionary data to write.

content_bytes = content.encode('utf-8')
file_hash = hashlib.sha256(content_bytes).hexdigest()
Returns:
The SHA256 hash of the written content.
"""
json_content = json.dumps(data, indent=2, sort_keys=True)
return AtomicWriter._write_to_temp_and_replace(path, json_content.encode('utf-8'))

with tempfile.NamedTemporaryFile(mode='wb', dir=directory, delete=False) as tf:
tf.write(content_bytes)
tf.flush()
os.fsync(tf.fileno())
temp_name = tf.name
@staticmethod
def write_text(path: str, content: str) -> str:
"""
Writes a string to a text file atomically.

try:
os.replace(temp_name, path)
except OSError:
os.remove(temp_name)
raise
Args:
path: The target file path.
content: The string content to write.

print(f"[Generate] {path} (SHA256: {file_hash[:8]}...)")
return file_hash
Returns:
The SHA256 hash of the written content.
"""
return AtomicWriter._write_to_temp_and_replace(path, content.encode('utf-8'))

class GovernanceGenerator:
"""Generates governance-related configurations."""

@staticmethod
def generate_lock_file() -> Dict[str, Any]:
"""Generates the governance lock file structure."""
return {
"mode": "release",
"repo": "HeadyConnection-Org/governance",
Expand All @@ -93,8 +109,11 @@ def generate_lock_file() -> Dict[str, Any]:
}

class GatewayConfigurator:
"""Generates the MCP Gateway configuration."""

@staticmethod
def generate_config() -> Dict[str, Any]:
"""Generates the gateway configuration dictionary."""
return {
"bind": "127.0.0.1",
"allowHosts": ["127.0.0.1", "localhost"],
Expand All @@ -118,13 +137,13 @@ def generate_config() -> Dict[str, Any]:
]
}

def main():
print(f"Starting Codex Builder {GENERATOR_VERSION}...")
def _generate_registry(manifest: Dict[str, Any]) -> None:
"""
Generates the REGISTRY.json file and updates the manifest.

# Use timezone-aware UTC datetime
manifest = {"files": [], "generated_at": datetime.datetime.now(datetime.timezone.utc).isoformat()}

# 1. Registry
Args:
manifest: The manifest dictionary to update.
"""
registry_data = {
"schema_version": "1.0.0",
"as_of_date": datetime.datetime.now(datetime.timezone.utc).strftime("%Y-%m-%d"),
Expand All @@ -142,17 +161,35 @@ def main():
h_reg = AtomicWriter.write_json("REGISTRY.json", registry_data)
manifest["files"].append({"path": "REGISTRY.json", "sha256": h_reg})

# 2. Governance Lock
def _generate_governance(manifest: Dict[str, Any]) -> None:
"""
Generates the governance.lock file and updates the manifest.

Args:
manifest: The manifest dictionary to update.
"""
gov_lock = GovernanceGenerator.generate_lock_file()
h_gov = AtomicWriter.write_json("governance.lock", gov_lock)
manifest["files"].append({"path": "governance.lock", "sha256": h_gov})

# 3. Gateway Config
def _generate_gateway_config(manifest: Dict[str, Any]) -> None:
"""
Generates the mcp-gateway-config.json file and updates the manifest.

Args:
manifest: The manifest dictionary to update.
"""
gateway_conf = GatewayConfigurator.generate_config()
h_gw = AtomicWriter.write_json("mcp-gateway-config.json", gateway_conf)
manifest["files"].append({"path": "mcp-gateway-config.json", "sha256": h_gw})

# 4. Directories
def _create_structure(manifest: Dict[str, Any]) -> None:
"""
Creates the directory structure and .gitkeep files, updating the manifest.

Args:
manifest: The manifest dictionary to update.
"""
dirs_to_create = [
"prompts/registry",
"prompts/receipts",
Expand All @@ -167,7 +204,13 @@ def main():
h_keep = AtomicWriter.write_text(keep_path, "")
manifest["files"].append({"path": keep_path, "sha256": h_keep})

# 5. Context Docs
def _generate_context(manifest: Dict[str, Any]) -> None:
"""
Generates the CONTEXT.md file and updates the manifest.

Args:
manifest: The manifest dictionary to update.
"""
context_md = f"""# Heady Sovereign Node
> Generated by {GENERATOR_NAME} {GENERATOR_VERSION}
> DO NOT EDIT. This file is deterministically derived from REGISTRY.json.
Expand All @@ -185,7 +228,20 @@ def main():
h_ctx = AtomicWriter.write_text("CONTEXT.md", context_md)
manifest["files"].append({"path": "CONTEXT.md", "sha256": h_ctx})

# 6. Manifest
def main():
"""Main entry point for the Codex Builder."""
print(f"Starting Codex Builder {GENERATOR_VERSION}...")

# Use timezone-aware UTC datetime
manifest = {"files": [], "generated_at": datetime.datetime.now(datetime.timezone.utc).isoformat()}

_generate_registry(manifest)
_generate_governance(manifest)
_generate_gateway_config(manifest)
_create_structure(manifest)
_generate_context(manifest)

# Manifest
AtomicWriter.write_json("manifest.json", manifest)
print("\n Repository generation complete.")

Expand Down
1 change: 1 addition & 0 deletions HeadySystems_v13/scripts/docs/check_drift.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
WATCH_PATHS = ["ops/", "ai/", "docs/", "web/modules/custom/"]

def main() -> int:
"""Compares git diffs to detect undocumented changes."""
parser = argparse.ArgumentParser()
parser.add_argument("--base", required=True)
parser.add_argument("--head", required=True)
Expand Down
1 change: 1 addition & 0 deletions HeadySystems_v13/scripts/docs/validate_docs.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
ROOT = Path(__file__).resolve().parents[2]

def main() -> int:
"""Checks for missing documentation files."""
missing = [path for path in REQUIRED_FILES if not (ROOT / path).exists()]
if missing:
print("Missing required docs:", missing)
Expand Down
1 change: 1 addition & 0 deletions HeadySystems_v13/scripts/ops/validate_status_feed.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from pathlib import Path

def main() -> int:
"""Validates the JSON structure of the status feed."""
parser = argparse.ArgumentParser()
parser.add_argument("path")
args = parser.parse_args()
Expand Down
1 change: 1 addition & 0 deletions HeadySystems_v13/scripts/patents/validate_patent_index.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from pathlib import Path

def main() -> int:
"""Validates the patent index (stub)."""
# Stub validation logic
print("Patent index validation passed")
return 0
Expand Down