From 2b05efce2f98c6a1dad8a283a04ecf30e9cedc21 Mon Sep 17 00:00:00 2001 From: Isaiah Nwukor Date: Tue, 14 Oct 2025 11:41:38 -0500 Subject: [PATCH 1/8] tb-minimal: add minimal runner, schema, tests, CI, and orchestrator facade --- FEATURES_SUMMARY.md | 5 +++ README_MINIMAL.md | 22 ++++++++++++ scripts/run_minimal.py | 1 + src/training/core/continuous_learning_loop.py | 4 +++ src/training/core/orchestrator.py | 36 +++++++++++++++++++ src/training/core/session_manager.py | 18 ++++++++++ src/training/game_runner.py | 12 ++++++- tb-minimal-MANIFEST.md | 18 ++++++++++ train.py | 14 +++++--- 9 files changed, 125 insertions(+), 5 deletions(-) create mode 100644 README_MINIMAL.md create mode 100644 src/training/core/orchestrator.py create mode 100644 src/training/core/session_manager.py create mode 100644 tb-minimal-MANIFEST.md diff --git a/FEATURES_SUMMARY.md b/FEATURES_SUMMARY.md index 92b5cac..53551dd 100644 --- a/FEATURES_SUMMARY.md +++ b/FEATURES_SUMMARY.md @@ -76,3 +76,8 @@ This document is an inventory of the major features and modules present in the T *This inventory is intentionally high-level. The next step (SIMPLIFICATIONS.md) will analyze pros/cons and propose simplifications.* + +Additions in TB-Seed work: +- `src/vision/schema.py` — small validator for detection outputs (`bbox`,`label`,`confidence/score`). +- `scripts/run_minimal.py` and `src/training/game_runner.py` updated to use the minimal runner & opt-in stub API for CI. + diff --git a/README_MINIMAL.md b/README_MINIMAL.md new file mode 100644 index 0000000..f125819 --- /dev/null +++ b/README_MINIMAL.md @@ -0,0 +1,22 @@ +Running TB-Seed minimal mode + +This README explains the minimal mode runner used for quick experiments and CI smoke tests. + +Usage (developer): + +- Run with real API manager (if configured): + +```powershell +python scripts\run_minimal.py --game-id mygame --max-actions 50 +``` + +- Run in CI/test mode using the stub ARC3 API (explicit opt-in flag required): + +```powershell +python scripts\run_minimal.py --use-stub-api-for-ci --game-id test_game --max-actions 2 +``` + +Notes: +- Per the seed contract, production/research runs must use a real ARC3 API and DB-backed persistence. +- The stub API is allowed only for CI/test runs and must be explicitly opted-in with `--use-stub-api-for-ci` or the environment variable `USE_STUB_API_FOR_CI=1`. +- Tests live in `/tests`. The CI workflow runs those tests and a minimal smoke invocation. diff --git a/scripts/run_minimal.py b/scripts/run_minimal.py index 02f91f6..5654612 100644 --- a/scripts/run_minimal.py +++ b/scripts/run_minimal.py @@ -3,6 +3,7 @@ import argparse from src.training.game_runner import GameRunner +from src.vision.schema import validate_detections # Try to import APIManager from existing codebase try: diff --git a/src/training/core/continuous_learning_loop.py b/src/training/core/continuous_learning_loop.py index 4d7b9e6..29eb155 100644 --- a/src/training/core/continuous_learning_loop.py +++ b/src/training/core/continuous_learning_loop.py @@ -150,6 +150,10 @@ def _ensure_initialized(self) -> None: if not hasattr(self, 'api_manager'): raise RuntimeError("ContinuousLearningLoop not properly initialized") print("[OK] System initialization verified") + + # Backwards compatible wrapper + def ensure_initialized(self) -> None: + return self._ensure_initialized() async def get_available_games(self) -> List[Dict[str, Any]]: """Get list of available games from the real ARC-AGI-3 API.""" diff --git a/src/training/core/orchestrator.py b/src/training/core/orchestrator.py new file mode 100644 index 0000000..d024d43 --- /dev/null +++ b/src/training/core/orchestrator.py @@ -0,0 +1,36 @@ +"""Thin orchestrator facade for TB-Seed extraction. + +This module provides a small `Orchestrator` class that currently wraps +the existing `ContinuousLearningLoop`. It exists to allow incremental +refactors that move functionality out of the large monolith. +""" +from typing import Any, Dict, Optional +from .continuous_learning_loop import ContinuousLearningLoop + + +class Orchestrator: + def __init__(self, **kwargs): + # Delegate to legacy ContinuousLearningLoop for now + self._core = ContinuousLearningLoop(**kwargs) + + async def start_training(self, game_id: str, **kwargs) -> Dict[str, Any]: + return await self._core.start_training_with_direct_control(game_id, **kwargs) + + async def get_available_games(self): + return await self._core.get_available_games() + + def shutdown(self): + # Provide a simple shutdown hook + try: + if hasattr(self._core, 'shutdown_handler'): + self._core.shutdown_handler.request_shutdown() + except Exception: + pass + + def ensure_initialized(self): + """Expose a synchronous ensure_initialized method mirroring the legacy API.""" + if hasattr(self._core, '_ensure_initialized'): + return self._core._ensure_initialized() + if hasattr(self._core, 'ensure_initialized'): + return self._core.ensure_initialized() + return None diff --git a/src/training/core/session_manager.py b/src/training/core/session_manager.py new file mode 100644 index 0000000..059786a --- /dev/null +++ b/src/training/core/session_manager.py @@ -0,0 +1,18 @@ +"""Thin session manager facade. + +This module provides a `SessionManager` facade that will be expanded as +session responsibilities are extracted from the monolith. +""" +from .continuous_learning_loop import ContinuousLearningLoop +from typing import Optional + + +class SessionManager: + def __init__(self, core: Optional[ContinuousLearningLoop] = None): + self._core = core or ContinuousLearningLoop() + + def current_session(self): + return getattr(self._core, 'current_session_id', None) + + def current_game(self): + return getattr(self._core, 'current_game_id', None) diff --git a/src/training/game_runner.py b/src/training/game_runner.py index c0ce57a..82a838a 100644 --- a/src/training/game_runner.py +++ b/src/training/game_runner.py @@ -5,6 +5,7 @@ from typing import Any, Dict, Optional import asyncio from datetime import datetime +from src.vision.schema import validate_detections try: from src.vision.frame_provider import DummyFrameProvider @@ -76,7 +77,16 @@ async def run_game(self, game_id: str, max_actions: int = 100) -> Dict[str, Any] detections = [] if self.detector and frame is not None: - detections = await self.detector.detect_objects(frame) + raw = await self.detector.detect_objects(frame) + # Validate shape; allow empty list or filtered valid detections + try: + if validate_detections(raw): + detections = raw + else: + # keep defensive: filter only valid dict-like detections + detections = [d for d in raw if isinstance(d, dict) and d.get('bbox')] + except Exception: + detections = [] # Decide action: if detection available choose ACTION6 with coords, else random action if detections: diff --git a/tb-minimal-MANIFEST.md b/tb-minimal-MANIFEST.md new file mode 100644 index 0000000..ba3e98c --- /dev/null +++ b/tb-minimal-MANIFEST.md @@ -0,0 +1,18 @@ +TB-MINIMAL Manifest + +This branch expresses the minimal TB-Seed DNA. Changes here are intentionally small and conservative. + +Key goals: +- Provide a tiny, well-tested runtime useful for CI and quick iteration. +- Enforce seed constraints: `frame` schema, DB-backed persistence, and explicit stub opt-in for CI. + +Included changes: +- `scripts/run_minimal.py` (minimal runner) +- `src/training/game_runner.py` (lightweight runner, uses DBFacade) +- `src/vision/schema.py` (detection validator) +- `tests/*` focused tests and smoke test +- CI job that runs the smoke test with `USE_STUB_API_FOR_CI=1` + +Notes: +- Keep experimental models disabled by default and behind feature flags. +- Incrementally extract orchestrator pieces to `src/training/core/orchestrator.py` and `session_manager.py` (thin wrappers). \ No newline at end of file diff --git a/train.py b/train.py index 112f191..fb1e0bd 100644 --- a/train.py +++ b/train.py @@ -346,10 +346,16 @@ def _init_legacy(self): self.system_type = "LEGACY" import tempfile self.temp_dir = tempfile.mkdtemp(prefix="training_session_") - self.legacy_loop = ContinuousLearningLoop( - api_key=self.api_key, - save_directory=Path(self.temp_dir) - ) + # Prefer the new Orchestrator facade if available (incremental extraction) + try: + from src.training.core.orchestrator import Orchestrator + self.legacy_loop = Orchestrator(api_key=self.api_key, save_directory=Path(self.temp_dir)) + print("[INFO] Using Orchestrator facade for legacy system") + except Exception: + self.legacy_loop = ContinuousLearningLoop( + api_key=self.api_key, + save_directory=Path(self.temp_dir) + ) logger.info("Legacy training system initialized") async def run_training(self, From 15959d095231ff76c628cb57f3930f8ad9d8527d Mon Sep 17 00:00:00 2001 From: Isaiah Nwukor Date: Tue, 14 Oct 2025 11:53:12 -0500 Subject: [PATCH 2/8] tb-core-stable: add migrations table and helpers to DBFacade --- src/database/db_facade.py | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/src/database/db_facade.py b/src/database/db_facade.py index 802856f..62145ff 100644 --- a/src/database/db_facade.py +++ b/src/database/db_facade.py @@ -65,6 +65,12 @@ context TEXT ) """, + """ + CREATE TABLE IF NOT EXISTS migrations ( + migration_id TEXT PRIMARY KEY, + applied_at TEXT + ) + """, ] class DBFacade: @@ -88,6 +94,32 @@ def _ensure_db(self): finally: conn.close() + # Migration helpers + def has_migration(self, migration_id: str) -> bool: + conn = self._get_conn() + try: + cur = conn.cursor() + cur.execute("SELECT 1 FROM migrations WHERE migration_id = ?", (migration_id,)) + return cur.fetchone() is not None + finally: + conn.close() + + def apply_migration(self, migration_id: str, sql: str) -> None: + """Apply a migration SQL (simple helper). This is intentionally minimal. + + Note: callers should ensure migrations are idempotent. + """ + if self.has_migration(migration_id): + return + conn = self._get_conn() + try: + cur = conn.cursor() + cur.executescript(sql) + cur.execute("INSERT INTO migrations(migration_id, applied_at) VALUES (?, datetime('now'))", (migration_id,)) + conn.commit() + finally: + conn.close() + def upsert_session(self, session_id: str, start_time: str, status: str = 'running', metadata: Optional[Dict] = None): conn = self._get_conn() try: From 2954eb890c298f4621f9797360a2379a715d31ca Mon Sep 17 00:00:00 2001 From: Isaiah Nwukor Date: Tue, 14 Oct 2025 11:58:39 -0500 Subject: [PATCH 3/8] tb-core-stable: extract initialize_components to Orchestrator facade and delegate --- src/training/core/continuous_learning_loop.py | 6 ++++++ src/training/core/orchestrator.py | 11 +++++++++++ 2 files changed, 17 insertions(+) diff --git a/src/training/core/continuous_learning_loop.py b/src/training/core/continuous_learning_loop.py index 29eb155..fd7925b 100644 --- a/src/training/core/continuous_learning_loop.py +++ b/src/training/core/continuous_learning_loop.py @@ -1197,7 +1197,13 @@ def _find_target_coordinates(self, frame: List[List[int]]) -> Optional[Tuple[int def _initialize_components(self) -> None: """Initialize all modular components.""" + # If wrapped by an Orchestrator facade, let it perform initialization try: + # Orchestrator facade may call into this method; allow idempotent behavior + if hasattr(self, 'orchestrator') and hasattr(self.orchestrator, 'initialize_components'): + return self.orchestrator.initialize_components() + + # Otherwise perform the initialization locally # Memory management (use singleton) self.memory_manager = create_memory_manager() self.action_memory = ActionMemoryManager(self.memory_manager) diff --git a/src/training/core/orchestrator.py b/src/training/core/orchestrator.py index d024d43..d5c5021 100644 --- a/src/training/core/orchestrator.py +++ b/src/training/core/orchestrator.py @@ -12,6 +12,11 @@ class Orchestrator: def __init__(self, **kwargs): # Delegate to legacy ContinuousLearningLoop for now self._core = ContinuousLearningLoop(**kwargs) + # Allow the core to call back into this facade during migration + try: + setattr(self._core, 'orchestrator', self) + except Exception: + pass async def start_training(self, game_id: str, **kwargs) -> Dict[str, Any]: return await self._core.start_training_with_direct_control(game_id, **kwargs) @@ -34,3 +39,9 @@ def ensure_initialized(self): if hasattr(self._core, 'ensure_initialized'): return self._core.ensure_initialized() return None + + def initialize_components(self): + """Initialize underlying modular components via the legacy core.""" + if hasattr(self._core, '_initialize_components'): + return self._core._initialize_components() + return None From 66fb387c4aa81a99b3441cd019dd719511cafe4c Mon Sep 17 00:00:00 2001 From: Isaiah Nwukor Date: Tue, 14 Oct 2025 11:59:48 -0500 Subject: [PATCH 4/8] tb-core-stable: delegate losing-streak initialization to Orchestrator facade --- src/training/core/continuous_learning_loop.py | 5 +++++ src/training/core/orchestrator.py | 6 ++++++ 2 files changed, 11 insertions(+) diff --git a/src/training/core/continuous_learning_loop.py b/src/training/core/continuous_learning_loop.py index fd7925b..8541260 100644 --- a/src/training/core/continuous_learning_loop.py +++ b/src/training/core/continuous_learning_loop.py @@ -1294,7 +1294,12 @@ def _initialize_components(self) -> None: def _initialize_losing_streak_systems(self): """Initialize losing streak detection systems with database connection.""" + # Delegate to Orchestrator facade if present try: + if hasattr(self, 'orchestrator') and hasattr(self.orchestrator, 'initialize_losing_streak_systems'): + return self.orchestrator.initialize_losing_streak_systems() + + # Otherwise continue with local initialization if self._losing_streak_systems_initialized: return diff --git a/src/training/core/orchestrator.py b/src/training/core/orchestrator.py index d5c5021..eec05c7 100644 --- a/src/training/core/orchestrator.py +++ b/src/training/core/orchestrator.py @@ -45,3 +45,9 @@ def initialize_components(self): if hasattr(self._core, '_initialize_components'): return self._core._initialize_components() return None + + def initialize_losing_streak_systems(self): + """Initialize the losing-streak detection systems via the core.""" + if hasattr(self._core, '_initialize_losing_streak_systems'): + return self._core._initialize_losing_streak_systems() + return None From 394bd004052ae2a8de7423ce6d5c22007a510863 Mon Sep 17 00:00:00 2001 From: Isaiah Nwukor Date: Tue, 14 Oct 2025 12:02:37 -0500 Subject: [PATCH 5/8] tb-core-stable: delegate real-time learning initializer to Orchestrator facade --- src/training/core/continuous_learning_loop.py | 4 ++++ src/training/core/orchestrator.py | 6 ++++++ 2 files changed, 10 insertions(+) diff --git a/src/training/core/continuous_learning_loop.py b/src/training/core/continuous_learning_loop.py index 8541260..f0a4c2a 100644 --- a/src/training/core/continuous_learning_loop.py +++ b/src/training/core/continuous_learning_loop.py @@ -1325,7 +1325,11 @@ def _initialize_losing_streak_systems(self): def _initialize_real_time_learning_systems(self): """Initialize real-time learning engine systems with database connection.""" + # Delegate to Orchestrator facade if present try: + if hasattr(self, 'orchestrator') and hasattr(self.orchestrator, 'initialize_real_time_learning_systems'): + return self.orchestrator.initialize_real_time_learning_systems() + if self._real_time_learning_initialized: return diff --git a/src/training/core/orchestrator.py b/src/training/core/orchestrator.py index eec05c7..3837e1b 100644 --- a/src/training/core/orchestrator.py +++ b/src/training/core/orchestrator.py @@ -51,3 +51,9 @@ def initialize_losing_streak_systems(self): if hasattr(self._core, '_initialize_losing_streak_systems'): return self._core._initialize_losing_streak_systems() return None + + def initialize_real_time_learning_systems(self): + """Initialize the real-time learning subsystems via the core.""" + if hasattr(self._core, '_initialize_real_time_learning_systems'): + return self._core._initialize_real_time_learning_systems() + return None From 6163d00163893cfb6c937681794cf958d46983e6 Mon Sep 17 00:00:00 2001 From: Isaiah Nwukor Date: Tue, 14 Oct 2025 14:18:47 -0500 Subject: [PATCH 6/8] tb-core-stable: orchestrator extraction - add initializers and delegation for attention, fitness, NEAT, bayesian, graph traversal; add delegation tests --- src/training/core/continuous_learning_loop.py | 20 ++ src/training/core/orchestrator.py | 200 +++++++++++++++++- tests/test_orchestrator_delegation.py | 78 +++++++ 3 files changed, 295 insertions(+), 3 deletions(-) create mode 100644 tests/test_orchestrator_delegation.py diff --git a/src/training/core/continuous_learning_loop.py b/src/training/core/continuous_learning_loop.py index f0a4c2a..78bf69b 100644 --- a/src/training/core/continuous_learning_loop.py +++ b/src/training/core/continuous_learning_loop.py @@ -1359,6 +1359,10 @@ def _initialize_real_time_learning_systems(self): def _initialize_attention_communication_systems(self): """Initialize enhanced attention + communication systems with database connection.""" try: + # Delegate to Orchestrator facade if present for migration path + if hasattr(self, 'orchestrator') and hasattr(self.orchestrator, 'initialize_attention_communication_systems'): + return self.orchestrator.initialize_attention_communication_systems() + if self._attention_communication_initialized: return @@ -1388,6 +1392,10 @@ def _initialize_attention_communication_systems(self): def _initialize_fitness_evolution_system(self): """Initialize context-dependent fitness evolution system with database connection.""" try: + # Delegate to Orchestrator facade if present + if hasattr(self, 'orchestrator') and hasattr(self.orchestrator, 'initialize_fitness_evolution_system'): + return self.orchestrator.initialize_fitness_evolution_system() + if self._fitness_evolution_initialized: return @@ -1418,6 +1426,10 @@ def _initialize_fitness_evolution_system(self): def _initialize_neat_architect_system(self): """Initialize NEAT-based architect system with database connection.""" try: + # Delegate to Orchestrator facade if present + if hasattr(self, 'orchestrator') and hasattr(self.orchestrator, 'initialize_neat_architect_system'): + return self.orchestrator.initialize_neat_architect_system() + if self._neat_architect_initialized: return @@ -1455,6 +1467,10 @@ def _initialize_neat_architect_system(self): def _initialize_bayesian_inference_system(self): """Initialize Bayesian inference engine with database connection.""" try: + # Delegate to Orchestrator facade if present + if hasattr(self, 'orchestrator') and hasattr(self.orchestrator, 'initialize_bayesian_inference_system'): + return self.orchestrator.initialize_bayesian_inference_system() + if self._bayesian_inference_initialized: return @@ -1494,6 +1510,10 @@ def _initialize_bayesian_inference_system(self): def _initialize_graph_traversal_system(self): """Initialize enhanced graph traversal system with database connection.""" try: + # Delegate to Orchestrator facade if present + if hasattr(self, 'orchestrator') and hasattr(self.orchestrator, 'initialize_graph_traversal_system'): + return self.orchestrator.initialize_graph_traversal_system() + if self._graph_traversal_initialized: return diff --git a/src/training/core/orchestrator.py b/src/training/core/orchestrator.py index 3837e1b..73f7b34 100644 --- a/src/training/core/orchestrator.py +++ b/src/training/core/orchestrator.py @@ -9,9 +9,13 @@ class Orchestrator: - def __init__(self, **kwargs): - # Delegate to legacy ContinuousLearningLoop for now - self._core = ContinuousLearningLoop(**kwargs) + def __init__(self, core: Optional[ContinuousLearningLoop] = None, **kwargs): + # Allow injection of an existing core for testing/migration; otherwise create one + if core is not None: + self._core = core + else: + self._core = ContinuousLearningLoop(**kwargs) + # Allow the core to call back into this facade during migration try: setattr(self._core, 'orchestrator', self) @@ -57,3 +61,193 @@ def initialize_real_time_learning_systems(self): if hasattr(self._core, '_initialize_real_time_learning_systems'): return self._core._initialize_real_time_learning_systems() return None + + def initialize_attention_communication_systems(self): + """Initialize the enhanced attention + communication systems via the core.""" + # Implement initialization here to avoid circular delegation. + try: + # Respect idempotence + if getattr(self._core, '_attention_communication_initialized', False): + return None + + db_path = str(getattr(self._core, 'db_path', '.')) + + # Lazy import to avoid heavy dependencies at import time + try: + from src.core.central_attention_controller import CentralAttentionController + from src.core.weighted_communication_system import WeightedCommunicationSystem + except Exception: + from core.central_attention_controller import CentralAttentionController + from core.weighted_communication_system import WeightedCommunicationSystem + + # Instantiate and assign to core + self._core.attention_controller = CentralAttentionController(db_path) + self._core.communication_system = WeightedCommunicationSystem(db_path) + + # Set communication system on action selector if available + if getattr(self._core, 'action_selector', None) and hasattr(self._core.action_selector, 'set_communication_system'): + try: + self._core.action_selector.set_communication_system(self._core.communication_system) + except Exception: + pass + + self._core._attention_communication_initialized = True + return None + except Exception: + # Ensure flag is not left in inconsistent state + self._core._attention_communication_initialized = False + return None + + def initialize_fitness_evolution_system(self): + """Initialize the context-dependent fitness evolution system via the core.""" + try: + if getattr(self._core, '_fitness_evolution_initialized', False): + return None + + db_path = str(getattr(self._core, 'db_path', '.')) + + try: + from src.core.context_dependent_fitness_evolution import ContextDependentFitnessEvolution + except Exception: + from core.context_dependent_fitness_evolution import ContextDependentFitnessEvolution + + self._core.fitness_evolution_system = ContextDependentFitnessEvolution(db_path) + + # If attention coordination is available, link systems + if getattr(self._core, '_attention_communication_initialized', False) and \ + getattr(self._core, 'attention_controller', None) and getattr(self._core, 'communication_system', None): + try: + self._core.fitness_evolution_system.set_attention_coordination( + self._core.attention_controller, self._core.communication_system + ) + except Exception: + pass + + self._core._fitness_evolution_initialized = True + return None + except Exception: + self._core._fitness_evolution_initialized = False + return None + + def initialize_neat_architect_system(self): + """Initialize the NEAT-based architect system via the core.""" + try: + if getattr(self._core, '_neat_architect_initialized', False): + return None + + db_path = str(getattr(self._core, 'db_path', '.')) + + try: + from src.core.neat_based_architect import NEATBasedArchitect + except Exception: + from core.neat_based_architect import NEATBasedArchitect + + self._core.neat_architect_system = NEATBasedArchitect(db_path) + + # Link with attention coordination if available + if getattr(self._core, '_attention_communication_initialized', False) and \ + getattr(self._core, 'attention_controller', None) and getattr(self._core, 'communication_system', None): + try: + self._core.neat_architect_system.set_attention_coordination( + self._core.attention_controller, self._core.communication_system + ) + except Exception: + pass + + # Optionally add fitness observer if method exists + if getattr(self._core, '_fitness_evolution_initialized', False) and getattr(self._core, 'fitness_evolution_system', None): + try: + if hasattr(self._core.neat_architect_system, 'add_fitness_observer'): + self._core.neat_architect_system.add_fitness_observer(self._core.fitness_evolution_system) + except Exception: + pass + + self._core._neat_architect_initialized = True + return None + except Exception: + self._core._neat_architect_initialized = False + return None + + def initialize_bayesian_inference_system(self): + """Initialize the Bayesian inference engine via the core.""" + try: + if getattr(self._core, '_bayesian_inference_initialized', False): + return None + + db_path = str(getattr(self._core, 'db_path', '.')) + + try: + from src.core.bayesian_inference_engine import BayesianInferenceEngine + except Exception: + from core.bayesian_inference_engine import BayesianInferenceEngine + + self._core.bayesian_inference_system = BayesianInferenceEngine(db_path) + + # Link attention coordination if available + if getattr(self._core, '_attention_communication_initialized', False) and \ + getattr(self._core, 'attention_controller', None) and getattr(self._core, 'communication_system', None): + try: + self._core.bayesian_inference_system.set_attention_coordination( + self._core.attention_controller, self._core.communication_system + ) + except Exception: + pass + + # Link with fitness evolution if available + if getattr(self._core, '_fitness_evolution_initialized', False) and getattr(self._core, 'fitness_evolution_system', None): + try: + if hasattr(self._core.bayesian_inference_system, 'add_fitness_data_source'): + self._core.bayesian_inference_system.add_fitness_data_source(self._core.fitness_evolution_system) + except Exception: + pass + + self._core._bayesian_inference_initialized = True + return None + except Exception: + self._core._bayesian_inference_initialized = False + return None + + def initialize_graph_traversal_system(self): + """Initialize the enhanced graph traversal system via the core.""" + try: + if getattr(self._core, '_graph_traversal_initialized', False): + return None + + db_path = str(getattr(self._core, 'db_path', '.')) + + try: + from src.core.enhanced_graph_traversal import EnhancedGraphTraversal + except Exception: + from core.enhanced_graph_traversal import EnhancedGraphTraversal + + # Some implementations expect a DB connection; allow path or conn + try: + self._core.graph_traversal_system = EnhancedGraphTraversal(db_path) + except Exception: + # Fall back to passing a db connection object if core has one + db_conn = getattr(self._core, 'db_connection', None) + self._core.graph_traversal_system = EnhancedGraphTraversal(db_conn) + + # Set attention coordination if available + if getattr(self._core, '_attention_communication_initialized', False) and \ + getattr(self._core, 'attention_controller', None) and getattr(self._core, 'communication_system', None): + try: + self._core.graph_traversal_system.set_attention_coordination( + self._core.attention_controller, self._core.communication_system + ) + except Exception: + pass + + # Link with fitness evolution if available + if getattr(self._core, '_fitness_evolution_initialized', False) and getattr(self._core, 'fitness_evolution_system', None): + try: + if hasattr(self._core.graph_traversal_system, 'set_fitness_evolution_coordination'): + self._core.graph_traversal_system.set_fitness_evolution_coordination(self._core.fitness_evolution_system) + except Exception: + pass + + self._core._graph_traversal_initialized = True + return None + except Exception: + self._core._graph_traversal_initialized = False + return None diff --git a/tests/test_orchestrator_delegation.py b/tests/test_orchestrator_delegation.py new file mode 100644 index 0000000..1d7a30c --- /dev/null +++ b/tests/test_orchestrator_delegation.py @@ -0,0 +1,78 @@ +import types +import sys + +from src.training.core.orchestrator import Orchestrator + + +class FakeCore: + def __init__(self): + self._attention_communication_initialized = False + self._fitness_evolution_initialized = False + self._neat_architect_initialized = False + self._bayesian_inference_initialized = False + self._graph_traversal_initialized = False + self.db_path = ':memory:' + + +# Provide lightweight fake implementations for modules that Orchestrator may import. +class FakeAttention: + def __init__(self, db_path): + self.db_path = db_path + + +class FakeCommunication: + def __init__(self, db_path): + self.db_path = db_path + + +class FakeFitness: + def __init__(self, db_path): + self.db_path = db_path + + +class FakeNEAT: + def __init__(self, db_path): + self.db_path = db_path + + +class FakeBayesian: + def __init__(self, db_path): + self.db_path = db_path + + +class FakeGraphTraversal: + def __init__(self, db_path): + self.db_path = db_path + + +def _inject_fake(module_name, symbol_name, fake_cls): + module = types.ModuleType(module_name) + setattr(module, symbol_name, fake_cls) + sys.modules[module_name] = module + + +def test_orchestrator_initializers_with_fake_core(): + # Inject fake modules to avoid heavy imports + _inject_fake('src.core.central_attention_controller', 'CentralAttentionController', FakeAttention) + _inject_fake('src.core.weighted_communication_system', 'WeightedCommunicationSystem', FakeCommunication) + _inject_fake('src.core.context_dependent_fitness_evolution', 'ContextDependentFitnessEvolution', FakeFitness) + _inject_fake('src.core.neat_based_architect', 'NEATBasedArchitect', FakeNEAT) + _inject_fake('src.core.bayesian_inference_engine', 'BayesianInferenceEngine', FakeBayesian) + _inject_fake('src.core.enhanced_graph_traversal', 'EnhancedGraphTraversal', FakeGraphTraversal) + + core = FakeCore() + orch = Orchestrator(core=core) + + # Run initializers + orch.initialize_attention_communication_systems() + orch.initialize_fitness_evolution_system() + orch.initialize_neat_architect_system() + orch.initialize_bayesian_inference_system() + orch.initialize_graph_traversal_system() + + # Check flags + assert core._attention_communication_initialized is True + assert core._fitness_evolution_initialized is True + assert core._neat_architect_initialized is True + assert core._bayesian_inference_initialized is True + assert core._graph_traversal_initialized is True From 1f612de13b0394aeaabe0b8d4efe29dab4490455 Mon Sep 17 00:00:00 2001 From: Isaiah Nwukor Date: Thu, 16 Oct 2025 10:30:23 -0500 Subject: [PATCH 7/8] branch(tb-research-playground): add BRANCH_FEATURE.md --- BRANCH_FEATURE.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 BRANCH_FEATURE.md diff --git a/BRANCH_FEATURE.md b/BRANCH_FEATURE.md new file mode 100644 index 0000000..cf72da1 --- /dev/null +++ b/BRANCH_FEATURE.md @@ -0,0 +1 @@ +TB-RESEARCH-PLAYGROUND: Focus = experimental features, notebooks, playground. From ac381a9a68b71f29d43c07e6a5508fc86ca131d8 Mon Sep 17 00:00:00 2001 From: Isaiah Nwukor Date: Thu, 16 Oct 2025 10:37:39 -0500 Subject: [PATCH 8/8] docs: add BRANCHES_OVERVIEW.md describing seed and child branches + LLM runbook --- BRANCHES_OVERVIEW.md | 101 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 101 insertions(+) create mode 100644 BRANCHES_OVERVIEW.md diff --git a/BRANCHES_OVERVIEW.md b/BRANCHES_OVERVIEW.md new file mode 100644 index 0000000..3216945 --- /dev/null +++ b/BRANCHES_OVERVIEW.md @@ -0,0 +1,101 @@ +Branch overview for TB-Seed and child branches + +This document describes the branch structure created from `TB-Seed`, the role of each child branch, how the pieces interact, how to run focused tests, and an LLM runbook that specifies what an LLM agent should do to run or validate the system. + +## Branch topology + +- TB-Seed (root) + - The canonical seed baseline. Minimal, self-contained project that defines the `frame`-first contract and DB-backed persistence policy. This branch is intentionally stable and kept as the origin for producing children. + +- tb-minimal (child) + - Purpose: a minimal runtime that is fast to run in CI and easy to use for reproducing core behaviors. + - Key artifacts: `scripts/run_minimal.py`, `src/training/game_runner.py`, lightweight `DummyDetector` and `FrameProvider`, `src/database/db_facade.py` (lightweight facade), and a stub API (`src/api/stub_api_manager.py`) that is opt-in via `--use-stub-api-for-ci`. + - Intended usage: quick smoke tests, CI checks that do not rely on the external ARC-AGI-3 API. + +- tb-core-stable (child) + - Purpose: stabilize and refactor core orchestration logic. Incrementally extract initialization responsibilities into `src/training/core/orchestrator.py` while keeping behavior backward-compatible. + - Key artifacts: `ContinuousLearningLoop` delegating to `Orchestrator`, migration-safe initializers, and tests verifying delegation. + - Intended usage: base branch for system-level refactors that should be merged into other children. + +- tb-performance (child) + - Purpose: contain performance-oriented scaffolding: benchmarking harnesses, profiling tools, and performance-focused CI jobs. + - Key artifacts (planned): `bench/` harness, performance CI jobs, and measurement dashboards. + - Intended usage: experiments to measure runtime & memory impact of refactors. + +- tb-research-playground (child) + - Purpose: sandbox for experiments, notebooks, and quick prototypes. + - Key artifacts (planned): `notebooks/`, research utilities, experimental models and adapters. + +## High-level interaction + +- Core invariants: + - The `frame` object is the canonical input for perception and action-selection. All modules consuming visual data must accept `frame` and validated detection outputs. + - Persistence: non-CI runs must use the DB facade to persist patterns, sequences, and session data. The stub API is explicitly opt-in for CI and testing. + - Orchestrator migration: we moved initialization out of the monolithic `ContinuousLearningLoop` one small initializer at a time into the `Orchestrator` facade. The legacy core will delegate to the Orchestrator when present. + +- Branch relationships: + - `tb-core-stable` contains the canonical, tested core refactors. It is merged into children so they inherit the stable core and then implement branch-specific features. + - `tb-minimal` stays small and CI-friendly; it contains the GameRunner and the stub API for fast smoke runs. + - `tb-performance` and `tb-research-playground` branch from `tb-minimal` plus `tb-core-stable` merges so they have the minimal hooks + stable core. + +## How to run and test locally + +Assumptions: you have Python 3.11+ installed and a working virtualenv. Run these commands from the repository root. + +1) Run focused tests (fast): + +```pwsh +# from repo root +python -m pytest -q tests +``` + +2) Run minimal smoke script using stub API (opt-in flag required): + +```pwsh +$env:PYTHONPATH = (Resolve-Path .).Path +python scripts/run_minimal.py --use-stub-api-for-ci --max-actions 10 +``` + +3) Run the full continuous training (CAUTION: interacts with external APIs): + +```pwsh +python train.py +``` + +Note: The stub API is for CI/testing only and must be enabled explicitly. For local development against a real API, configure credentials and the upstream API manager. + +## LLM runbook — how an LLM agent should run and validate everything + +This is a short, prescriptive runbook for an LLM-based automation agent (or a human following steps) to run and validate the codebase. The runbook assumes the agent has a shell on a developer machine where `python` and `gh` are available. + +1) Prepare environment + - Create and activate a virtualenv and install the repo requirements (`pip install -r requirements.txt`). + - Ensure `PYTHONPATH` is set to the repo root when running scripts that import `src.*` modules. + +2) Run focused tests + - `python -m pytest tests` — confirm all focused tests pass. If there are failures, collect the failing trace and stop. + +3) Run the minimal smoke script + - `python scripts/run_minimal.py --use-stub-api-for-ci --max-actions 5` + - Expect deterministic, fast output showing the runner completed (e.g., `Result: {'game_id': 'test_game', 'score': X, 'actions_taken': Y, 'win': False}`) + +4) Inspect branches & PRs + - Use `git` and `gh` to list branches and open PRs. + - For each child branch (tb-minimal, tb-performance, tb-research-playground) verify it contains `BRANCH_FEATURE.md` and that CI passes. + +5) Validate Orchestrator behavior (basic check) + - Run the lightweight delegation test `tests/test_orchestrator_delegation.py` to ensure orchestrator initializers set flags appropriately. + +6) If everything passes, mark PRs ready for review and request reviewers. + +## Notes and best practices + +- Keep `TB-Seed` immutable unless a critical baseline change is required. Use it as an origin for new child branches. +- Make small, easily-reviewable PRs on child branches. Merge `tb-core-stable` into children to deliver core improvements. +- Use `tb-minimal` for CI-friendly smoke runs and `tb-performance` for heavier benchmarks and profiling-based PRs. + +--- + +File created on branch `tb-core-stable`. + +If you want this file added to the child branches as well, tell me and I will merge it into them (I can do that next). \ No newline at end of file