Skip to content
This repository was archived by the owner on Mar 4, 2026. It is now read-only.
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions HeadySystems_v13/Heady_it1_v_1_0_0.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
OUTPUT_DIR = ROOT / "heady_iterations" / "it1"

def main():
"""Generates the iteration scaffold."""
OUTPUT_DIR.mkdir(parents=True, exist_ok=True)
payload = {
"iteration": 1,
Expand Down
1 change: 1 addition & 0 deletions HeadySystems_v13/Heady_it2_v_1_0_0.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
OUTPUT_DIR = ROOT / "heady_iterations" / "it2"

def main():
"""Generates the iteration scaffold."""
OUTPUT_DIR.mkdir(parents=True, exist_ok=True)
payload = {
"iteration": 2,
Expand Down
1 change: 1 addition & 0 deletions HeadySystems_v13/Heady_it3_v_1_0_0.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
OUTPUT_DIR = ROOT / "heady_iterations" / "it3"

def main():
"""Generates the iteration scaffold."""
OUTPUT_DIR.mkdir(parents=True, exist_ok=True)
payload = {
"iteration": 3,
Expand Down
1 change: 1 addition & 0 deletions HeadySystems_v13/Heady_it4_v_1_0_0.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
OUTPUT_DIR = ROOT / "heady_iterations" / "it4"

def main():
"""Generates the iteration scaffold."""
OUTPUT_DIR.mkdir(parents=True, exist_ok=True)
payload = {
"iteration": 4,
Expand Down
118 changes: 69 additions & 49 deletions HeadySystems_v13/codex_builder_v13.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,64 +25,53 @@
DESTRUCTIVE_PATTERNS = ["write", "delete", "rm", "exec", "shell", "edit_file"]

class AtomicWriter:
"""Handles atomic file operations to prevent corruption."""

@staticmethod
def write_json(path: str, data: Dict[str, Any]) -> str:
def _write_to_temp_and_replace(path: str, content_bytes: bytes) -> str:
"""Internal helper to write bytes to a temp file and replace target."""
# FIX: Ensure directory defaults to '.' if empty to prevent cross-device link errors
directory = os.path.dirname(path)
if not directory:
directory = "."
elif not os.path.exists(directory):
os.makedirs(directory, exist_ok=True)

json_content = json.dumps(data, indent=2, sort_keys=True)
content_bytes = json_content.encode('utf-8')
file_hash = hashlib.sha256(content_bytes).hexdigest()

with tempfile.NamedTemporaryFile(mode='wb', dir=directory, delete=False) as tf:
tf.write(content_bytes)
tf.flush()
os.fsync(tf.fileno())
temp_name = tf.name

try:
os.replace(temp_name, path)
except OSError as e:
os.remove(temp_name)
print(f" Atomic write failed for {path}: {e}")
sys.exit(1)

print(f"[Generate] {path} (SHA256: {file_hash[:8]}...)")
return file_hash

@staticmethod
def write_text(path: str, content: str) -> str:
directory = os.path.dirname(path)
if not directory:
directory = "."
elif not os.path.exists(directory):
os.makedirs(directory, exist_ok=True)

content_bytes = content.encode('utf-8')
file_hash = hashlib.sha256(content_bytes).hexdigest()

with tempfile.NamedTemporaryFile(mode='wb', dir=directory, delete=False) as tf:
tf.write(content_bytes)
tf.flush()
os.fsync(tf.fileno())
temp_name = tf.name

try:
os.replace(temp_name, path)
except OSError:
os.remove(temp_name)
raise
def write_json(path: str, data: Dict[str, Any]) -> str:
"""Writes a dictionary as JSON to a file atomically."""
json_content = json.dumps(data, indent=2, sort_keys=True)
return AtomicWriter._write_to_temp_and_replace(path, json_content.encode('utf-8'))

print(f"[Generate] {path} (SHA256: {file_hash[:8]}...)")
return file_hash
@staticmethod
def write_text(path: str, content: str) -> str:
"""Writes text content to a file atomically."""
return AtomicWriter._write_to_temp_and_replace(path, content.encode('utf-8'))

class GovernanceGenerator:
"""Generates governance lock configurations."""

@staticmethod
def generate_lock_file() -> Dict[str, Any]:
"""Generates the governance lock file structure."""
return {
"mode": "release",
"repo": "HeadyConnection-Org/governance",
Expand All @@ -93,8 +82,11 @@ def generate_lock_file() -> Dict[str, Any]:
}

class GatewayConfigurator:
"""Generates the MCP Gateway configuration."""

@staticmethod
def generate_config() -> Dict[str, Any]:
"""Generates the gateway configuration dictionary."""
return {
"bind": "127.0.0.1",
"allowHosts": ["127.0.0.1", "localhost"],
Expand All @@ -118,13 +110,8 @@ def generate_config() -> Dict[str, Any]:
]
}

def main():
print(f"Starting Codex Builder {GENERATOR_VERSION}...")

# Use timezone-aware UTC datetime
manifest = {"files": [], "generated_at": datetime.datetime.now(datetime.timezone.utc).isoformat()}

# 1. Registry
def _generate_registry() -> Dict[str, str]:
"""Generates the registry file."""
registry_data = {
"schema_version": "1.0.0",
"as_of_date": datetime.datetime.now(datetime.timezone.utc).strftime("%Y-%m-%d"),
Expand All @@ -139,20 +126,27 @@ def main():
"audit_enabled": True
}
}
h_reg = AtomicWriter.write_json("REGISTRY.json", registry_data)
manifest["files"].append({"path": "REGISTRY.json", "sha256": h_reg})
file_hash = AtomicWriter.write_json("REGISTRY.json", registry_data)
return {"path": "REGISTRY.json", "sha256": file_hash}

# 2. Governance Lock

def _generate_governance() -> Dict[str, str]:
"""Generates the governance lock file."""
gov_lock = GovernanceGenerator.generate_lock_file()
h_gov = AtomicWriter.write_json("governance.lock", gov_lock)
manifest["files"].append({"path": "governance.lock", "sha256": h_gov})
file_hash = AtomicWriter.write_json("governance.lock", gov_lock)
return {"path": "governance.lock", "sha256": file_hash}

# 3. Gateway Config

def _generate_gateway_config() -> Dict[str, str]:
"""Generates the gateway configuration file."""
gateway_conf = GatewayConfigurator.generate_config()
h_gw = AtomicWriter.write_json("mcp-gateway-config.json", gateway_conf)
manifest["files"].append({"path": "mcp-gateway-config.json", "sha256": h_gw})
file_hash = AtomicWriter.write_json("mcp-gateway-config.json", gateway_conf)
return {"path": "mcp-gateway-config.json", "sha256": file_hash}

# 4. Directories

def _create_structure() -> List[Dict[str, str]]:
"""Creates directory structure and gitkeeps."""
files = []
dirs_to_create = [
"prompts/registry",
"prompts/receipts",
Expand All @@ -164,10 +158,13 @@ def main():
os.makedirs(d, exist_ok=True)
keep_path = os.path.join(d, ".gitkeep")
if not os.path.exists(keep_path):
h_keep = AtomicWriter.write_text(keep_path, "")
manifest["files"].append({"path": keep_path, "sha256": h_keep})
file_hash = AtomicWriter.write_text(keep_path, "")
files.append({"path": keep_path, "sha256": file_hash})
return files

# 5. Context Docs

def _generate_context() -> Dict[str, str]:
"""Generates the context documentation."""
context_md = f"""# Heady Sovereign Node
> Generated by {GENERATOR_NAME} {GENERATOR_VERSION}
> DO NOT EDIT. This file is deterministically derived from REGISTRY.json.
Expand All @@ -182,8 +179,31 @@ def main():
* **Governance:** Locked (v1.2.0)
* **PromptOps:** Enforced
"""
h_ctx = AtomicWriter.write_text("CONTEXT.md", context_md)
manifest["files"].append({"path": "CONTEXT.md", "sha256": h_ctx})
file_hash = AtomicWriter.write_text("CONTEXT.md", context_md)
return {"path": "CONTEXT.md", "sha256": file_hash}


def main():
"""Main entry point for the Codex Builder."""
print(f"Starting Codex Builder {GENERATOR_VERSION}...")

# Use timezone-aware UTC datetime
manifest = {"files": [], "generated_at": datetime.datetime.now(datetime.timezone.utc).isoformat()}

# 1. Registry
manifest["files"].append(_generate_registry())

# 2. Governance Lock
manifest["files"].append(_generate_governance())

# 3. Gateway Config
manifest["files"].append(_generate_gateway_config())

# 4. Directories
manifest["files"].extend(_create_structure())

# 5. Context Docs
manifest["files"].append(_generate_context())

# 6. Manifest
AtomicWriter.write_json("manifest.json", manifest)
Expand Down
2 changes: 2 additions & 0 deletions HeadySystems_v13/scripts/docs/check_drift.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
#!/usr/bin/env python3
"""Checks for documentation drift between git commits."""
import argparse
import subprocess
import sys

WATCH_PATHS = ["ops/", "ai/", "docs/", "web/modules/custom/"]

def main() -> int:
"""Compares git diffs to detect undocumented changes."""
parser = argparse.ArgumentParser()
parser.add_argument("--base", required=True)
parser.add_argument("--head", required=True)
Expand Down
2 changes: 2 additions & 0 deletions HeadySystems_v13/scripts/docs/validate_docs.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
#!/usr/bin/env python3
"""Validates the existence of required documentation files."""
import os
import sys
from pathlib import Path
Expand All @@ -15,6 +16,7 @@
ROOT = Path(__file__).resolve().parents[2]

def main() -> int:
"""Checks for missing documentation files."""
missing = [path for path in REQUIRED_FILES if not (ROOT / path).exists()]
if missing:
print("Missing required docs:", missing)
Expand Down
2 changes: 2 additions & 0 deletions HeadySystems_v13/scripts/ops/validate_status_feed.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
#!/usr/bin/env python3
"""Validates the status feed JSON."""
import argparse
import json
import sys
from pathlib import Path

def main() -> int:
"""Validates the JSON structure of the status feed."""
parser = argparse.ArgumentParser()
parser.add_argument("path")
args = parser.parse_args()
Expand Down
2 changes: 2 additions & 0 deletions HeadySystems_v13/scripts/patents/validate_patent_index.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
#!/usr/bin/env python3
"""Validates the patent index."""
import sys
from pathlib import Path

def main() -> int:
"""Validates the patent index (stub)."""
# Stub validation logic
print("Patent index validation passed")
return 0
Expand Down