diff --git a/PLAN.md b/PLAN.md new file mode 100644 index 0000000..ecf49da --- /dev/null +++ b/PLAN.md @@ -0,0 +1,745 @@ +# MeshCore Sidekick - Implementation Plan + +## Project Overview + +MeshCore companion application that: +- Subscribes to all MeshCore events via Serial/BLE +- Persists events in SQLite database with configurable retention +- Provides REST API for querying data and sending commands +- Includes mock MeshCore for development without hardware +- Exposes Prometheus metrics for monitoring +- Generates OpenAPI/Swagger documentation + +## Technology Stack + +- **Language**: Python 3.11+ +- **Database**: SQLite with SQLAlchemy ORM +- **API Framework**: FastAPI +- **MeshCore Library**: meshcore_py (v2.2.1+) +- **Metrics**: Prometheus +- **Configuration**: CLI arguments > Environment variables > Defaults + +--- + +## Phase 1: Foundation ✅ COMPLETE + +**Goal**: Working application with database persistence and mock support + +### Completed Components + +#### 1.1 Project Setup ✅ +- [x] Python package structure with pyproject.toml +- [x] Dependencies: meshcore, FastAPI, SQLAlchemy, Prometheus, etc. +- [x] README with quick start guide +- [x] .gitignore configuration + +#### 1.2 Database Layer ✅ +- [x] SQLAlchemy models (14 tables): + - `nodes` - Node tracking with prefix indexing + - `messages` - Direct and channel messages + - `advertisements` - Node advertisements + - `paths` - Routing path information + - `trace_paths` - Trace path results with SNR + - `telemetry` - Sensor data from nodes + - `acknowledgments` - Message confirmations + - `status_responses` - Node status data + - `statistics` - Device statistics (core/radio/packets) + - `binary_responses` - Binary protocol responses + - `control_data` - Control packet data + - `raw_data` - Raw packet data + - `device_info` - Companion device information + - `events_log` - Raw event log +- [x] Database engine with connection pooling +- [x] Session management with context managers +- [x] Data cleanup for retention policy +- [x] Indexes for fast prefix queries + +#### 1.3 MeshCore Interface ✅ +- [x] Abstract `MeshCoreInterface` base class +- [x] `RealMeshCore` - meshcore_py wrapper +- [x] `MockMeshCore` - Two operation modes: + - Random event generation (configurable intervals) + - Scenario playback (5 built-in scenarios) + +#### 1.4 Built-in Mock Scenarios ✅ +- [x] `simple_chat` - Two nodes exchanging messages +- [x] `trace_path_test` - Multi-hop network tracing +- [x] `telemetry_collection` - Periodic sensor data +- [x] `network_stress` - High-traffic simulation +- [x] `battery_drain` - Battery degradation over time + +#### 1.5 Event Subscriber ✅ +- [x] Event handler for all MeshCore event types +- [x] Database persistence logic +- [x] Node upsert (create/update) logic +- [x] Error handling and logging +- [x] Prometheus metrics collection points + +#### 1.6 Configuration Management ✅ +- [x] CLI argument parsing with argparse +- [x] Environment variable support +- [x] Priority: CLI > Env > Defaults +- [x] 20+ configuration options +- [x] Connection settings (serial/mock) +- [x] Database settings (path, retention) +- [x] API settings (host, port) +- [x] Logging settings (level, format) + +#### 1.7 Utilities ✅ +- [x] Public key address utilities: + - Normalization (lowercase) + - Validation (hex check) + - Prefix extraction + - Prefix matching +- [x] Logging setup: + - JSON formatter for structured logs + - Text formatter with colors + - Configurable log levels +- [x] Prometheus metrics collectors (defined, not yet wired) + +#### 1.8 Main Application ✅ +- [x] Application lifecycle management +- [x] MeshCore connection handling +- [x] Event subscription setup +- [x] Background cleanup task +- [x] Signal handlers (SIGINT, SIGTERM) +- [x] Graceful shutdown + +### Test Results ✅ +- Database created with 14 tables +- Events captured and persisted +- Mock scenarios working (simple_chat verified) +- Node tracking functional +- Message/advertisement storage working +- Configuration system operational + +--- + +## Phase 2: REST API 🚧 IN PROGRESS + +**Goal**: Full REST API with OpenAPI docs + +### 2.1 FastAPI Application Setup +- [ ] Create FastAPI app with metadata +- [ ] Configure CORS middleware +- [ ] Add exception handlers +- [ ] Setup startup/shutdown events +- [ ] Configure OpenAPI customization: + - Title, version, description + - Contact and license information + - API grouping with tags + - Example values for all models + +### 2.2 Pydantic Models +- [ ] Request models for all command endpoints +- [ ] Response models for all endpoints +- [ ] Validation rules and constraints +- [ ] Field descriptions and examples +- [ ] Nested models for complex data + +### 2.3 Command Endpoints (POST) +- [ ] `POST /api/v1/commands/send_message` + - Send direct message to node + - Input: destination, text, text_type + - Output: message_id, estimated_delivery_ms +- [ ] `POST /api/v1/commands/send_channel_message` + - Send channel broadcast + - Input: text, flood +- [ ] `POST /api/v1/commands/send_advert` + - Send self-advertisement + - Input: flood +- [ ] `POST /api/v1/commands/send_trace_path` + - Initiate trace path + - Input: destination + - Output: trace_id, initiator_tag +- [ ] `POST /api/v1/commands/ping` + - Ping a node + - Input: destination +- [ ] `POST /api/v1/commands/send_telemetry_request` + - Request telemetry + - Input: destination + +### 2.4 Query Endpoints (GET) +- [ ] `GET /api/v1/messages` + - List messages with filters: + - from/to (public key prefix) + - type (contact/channel) + - start_date/end_date + - limit/offset (pagination) +- [ ] `GET /api/v1/advertisements` + - List advertisements with filters: + - node (public key prefix) + - adv_type + - date range, pagination +- [ ] `GET /api/v1/telemetry` + - List telemetry data + - Filters: node, date range, pagination +- [ ] `GET /api/v1/trace_paths` + - List trace path results + - Filters: destination, date range, pagination +- [ ] `GET /api/v1/statistics` + - Get latest statistics + - Query param: stat_type (core/radio/packets) +- [ ] `GET /api/v1/device_info` + - Get companion device information + +### 2.5 Node Endpoints (GET) +- [ ] `GET /api/v1/nodes` + - List all nodes + - Filters: sort, order, pagination +- [ ] `GET /api/v1/nodes/{prefix}` + - Search by prefix (2-64 chars) + - Returns all matching nodes +- [ ] `GET /api/v1/nodes/{public_key}/messages` + - Get messages for specific node + - Filters: date range, pagination +- [ ] `GET /api/v1/nodes/{public_key}/paths` + - Get routing paths for node +- [ ] `GET /api/v1/nodes/{public_key}/telemetry` + - Get telemetry for node + - Filters: date range, pagination + +### 2.6 Health Endpoints (GET) +- [ ] `GET /api/v1/health` + - Overall health status + - MeshCore connection status + - Database connection status + - Uptime, events processed +- [ ] `GET /api/v1/health/db` + - Database connectivity check + - Database size + - Table row counts +- [ ] `GET /api/v1/health/meshcore` + - MeshCore connection status + - Device info (if connected) + +### 2.7 Dependencies and Middleware +- [ ] Database session dependency +- [ ] MeshCore instance dependency +- [ ] Request logging middleware +- [ ] Error response formatting +- [ ] CORS configuration + +### 2.8 Integration +- [ ] Integrate FastAPI with main application +- [ ] Run API server in background task +- [ ] Share MeshCore instance with API routes +- [ ] Add API configuration options + +--- + +## Phase 3: Observability 📋 PLANNED + +**Goal**: Production-ready observability + +### 3.1 Prometheus Integration +- [ ] Wire up metrics collectors in event handler +- [ ] Add `/metrics` endpoint +- [ ] Implement all metric types: + - Event counters by type + - Message latency histograms + - Node connectivity gauges + - Signal quality histograms (SNR/RSSI) + - Battery/storage gauges + - Radio statistics + - Database metrics + - Error counters +- [ ] Add FastAPI metrics middleware +- [ ] Document Prometheus queries + +### 3.2 Enhanced Logging +- [ ] Add contextual logging throughout +- [ ] Log request/response for API calls +- [ ] Log event processing errors +- [ ] Add correlation IDs for tracing +- [ ] Performance logging for slow queries + +### 3.3 Database Monitoring +- [ ] Periodic database size updates +- [ ] Table row count metrics +- [ ] Query performance tracking +- [ ] Cleanup operation metrics + +### 3.4 Health Monitoring +- [ ] Connection status tracking +- [ ] Auto-reconnect attempts +- [ ] Event processing lag monitoring +- [ ] Alert on connection failures + +--- + +## Phase 4: Docker Deployment 📋 PLANNED + +**Goal**: Production-ready Docker deployment + +### 4.1 Dockerfile +- [ ] Multi-stage build for smaller image +- [ ] Python 3.11+ base image +- [ ] Install dependencies +- [ ] Non-root user +- [ ] Health check +- [ ] Expose ports (API: 8000) + +### 4.2 Docker Compose (Development) +- [ ] meshcore-sidekick service (mock mode) +- [ ] Volume mounts for development +- [ ] Environment variable configuration +- [ ] Port mappings +- [ ] Optional: Prometheus service +- [ ] Optional: Grafana service + +### 4.3 Docker Compose (Production) +- [ ] meshcore-sidekick service (real hardware) +- [ ] Serial device mapping +- [ ] Persistent volume for database +- [ ] Restart policy +- [ ] Logging configuration +- [ ] Health checks + +### 4.4 Prometheus Configuration +- [ ] prometheus.yml scrape config +- [ ] Target: meshcore-sidekick:8000/metrics +- [ ] Scrape interval: 15s + +### 4.5 Grafana Dashboard +- [ ] Dashboard JSON configuration +- [ ] Panels: + - Message rate over time + - Active nodes gauge + - Round-trip latency histogram + - Battery voltage gauge + - Signal quality graphs (SNR/RSSI) + - Event type distribution + - Database size gauge + +### 4.6 Documentation +- [ ] Docker build instructions +- [ ] Docker run examples +- [ ] docker-compose usage +- [ ] Environment variable reference +- [ ] Volume mounting guide +- [ ] Serial device access setup + +--- + +## Phase 5: Testing & Documentation 📋 PLANNED + +**Goal**: Comprehensive testing and documentation + +### 5.1 Unit Tests +- [ ] Database model tests +- [ ] Address utility tests +- [ ] Configuration tests +- [ ] Mock MeshCore tests +- [ ] Event handler tests + +### 5.2 Integration Tests +- [ ] API endpoint tests +- [ ] Database persistence tests +- [ ] Mock scenario tests +- [ ] Configuration priority tests + +### 5.3 API Documentation +- [ ] Complete OpenAPI schema +- [ ] Request/response examples +- [ ] Authentication documentation (future) +- [ ] Error code reference +- [ ] Rate limiting info (future) + +### 5.4 User Documentation +- [ ] Installation guide +- [ ] Configuration guide +- [ ] CLI reference +- [ ] Environment variable reference +- [ ] API usage examples +- [ ] Mock scenario guide +- [ ] Troubleshooting guide + +### 5.5 Developer Documentation +- [ ] Architecture overview +- [ ] Database schema documentation +- [ ] Adding new scenarios +- [ ] Contributing guide +- [ ] Code style guide + +--- + +## Phase 6: Advanced Features 📋 FUTURE + +**Goal**: Additional functionality and integrations + +### 6.1 MCP Server Integration +- [ ] Define MCP protocol schemas +- [ ] Implement MCP tool endpoints +- [ ] Read operations: + - Query battery status + - Query messages + - Query node list + - Query telemetry +- [ ] Write operations: + - Send message + - Send advertisement + - Ping node + - Send telemetry request +- [ ] MCP server documentation +- [ ] Example MCP client usage + +### 6.2 Web UI (Optional) +- [ ] React/Vue frontend +- [ ] Dashboard with real-time updates +- [ ] Node map visualization +- [ ] Message history viewer +- [ ] Network topology graph +- [ ] Configuration interface + +### 6.3 Real-time Features +- [ ] WebSocket endpoint for live events +- [ ] Server-Sent Events (SSE) support +- [ ] Real-time node status updates +- [ ] Live message notifications + +### 6.4 Advanced Querying +- [ ] Full-text search on messages +- [ ] Geographic queries (nodes within radius) +- [ ] Network topology queries +- [ ] Path analysis tools +- [ ] Message threading/conversations + +### 6.5 Alert System +- [ ] Alert rules engine +- [ ] Node offline alerts +- [ ] Low battery alerts +- [ ] Message delivery failures +- [ ] Network congestion alerts +- [ ] Alert delivery (webhook, email) + +### 6.6 Data Export +- [ ] CSV export for all tables +- [ ] JSON export +- [ ] GPX export for node locations +- [ ] Message archive export +- [ ] Statistics reports + +### 6.7 Authentication & Authorization +- [ ] API key authentication +- [ ] JWT token support +- [ ] Role-based access control +- [ ] Rate limiting per API key +- [ ] Usage tracking + +### 6.8 Performance Enhancements +- [ ] PostgreSQL backend option +- [ ] Redis caching layer +- [ ] Message queue for event processing +- [ ] Horizontal scaling support +- [ ] Read replicas + +--- + +## Configuration Reference + +### CLI Arguments + +```bash +Connection: + --serial-port TEXT Serial port device + --serial-baud INTEGER Serial baud rate + --use-mock Use mock MeshCore + --mock-scenario TEXT Scenario name for playback + --mock-loop Loop scenario indefinitely + --mock-nodes INTEGER Number of simulated nodes + --mock-min-interval FLOAT Min event interval (seconds) + --mock-max-interval FLOAT Max event interval (seconds) + --mock-center-lat FLOAT Center latitude + --mock-center-lon FLOAT Center longitude + +Database: + --db-path TEXT Database file path + --retention-days INTEGER Data retention days + --cleanup-interval-hours INTEGER Cleanup interval hours + +API: + --api-host TEXT API host + --api-port INTEGER API port + --api-title TEXT API title + --api-version TEXT API version + +Metrics: + --no-metrics Disable Prometheus metrics + +Logging: + --log-level LEVEL Log level (DEBUG/INFO/WARNING/ERROR/CRITICAL) + --log-format TYPE Log format (json/text) +``` + +### Environment Variables + +```bash +# Connection +MESHCORE_SERIAL_PORT=/dev/ttyUSB0 +MESHCORE_SERIAL_BAUD=115200 +MESHCORE_USE_MOCK=true +MESHCORE_MOCK_SCENARIO=simple_chat +MESHCORE_MOCK_LOOP=true +MESHCORE_MOCK_NODES=10 +MESHCORE_MOCK_MIN_INTERVAL=1.0 +MESHCORE_MOCK_MAX_INTERVAL=10.0 +MESHCORE_MOCK_CENTER_LAT=45.5231 +MESHCORE_MOCK_CENTER_LON=-122.6765 + +# Database +MESHCORE_DB_PATH=/data/meshcore.db +MESHCORE_RETENTION_DAYS=30 +MESHCORE_CLEANUP_INTERVAL_HOURS=1 + +# API +MESHCORE_API_HOST=0.0.0.0 +MESHCORE_API_PORT=8000 +MESHCORE_API_TITLE="MeshCore Sidekick API" +MESHCORE_API_VERSION="1.0.0" + +# Metrics +MESHCORE_METRICS_ENABLED=true + +# Logging +MESHCORE_LOG_LEVEL=INFO +MESHCORE_LOG_FORMAT=json +``` + +--- + +## Database Schema + +### Tables + +1. **nodes** - Node tracking with prefix indexing +2. **messages** - Direct and channel messages +3. **advertisements** - Node advertisements with GPS +4. **paths** - Routing path information +5. **trace_paths** - Trace results with SNR data +6. **telemetry** - Sensor telemetry data +7. **acknowledgments** - Message confirmations with timing +8. **status_responses** - Node status data +9. **statistics** - Device statistics (core/radio/packets) +10. **binary_responses** - Binary protocol responses +11. **control_data** - Control packet data +12. **raw_data** - Raw packet data +13. **device_info** - Companion device information +14. **events_log** - Raw event log for all events + +### Key Indexes + +- `nodes.public_key` (unique) +- `nodes.public_key_prefix_2` (for fast 2-char prefix queries) +- `nodes.public_key_prefix_8` (for fast 8-char prefix queries) +- `messages.from_public_key` +- `messages.to_public_key` +- `messages.timestamp` +- `advertisements.public_key` +- `events_log.event_type` +- `events_log.created_at` (for cleanup) + +--- + +## Mock Scenarios + +### simple_chat +Two nodes (Alice & Bob) exchanging messages +- Duration: 10 seconds +- Events: 2 advertisements, 2 messages, 1 ACK + +### trace_path_test +Trace path through multi-hop network +- Duration: 5 seconds +- Events: 3 advertisements, 1 trace result + +### telemetry_collection +Periodic telemetry from sensor node +- Duration: 15 seconds +- Events: 1 advertisement, 3 telemetry responses + +### network_stress +High-traffic scenario with many nodes +- Duration: 30 seconds +- Events: 10 advertisements, 20 channel messages + +### battery_drain +Simulated battery drain over time +- Duration: 200 seconds +- Events: 20 battery status updates + +--- + +## Metrics Reference + +### Event Counters +- `meshcore_events_total{event_type}` - Total events by type +- `meshcore_messages_total{direction,message_type}` - Messages by direction/type +- `meshcore_advertisements_total{adv_type}` - Advertisements by type + +### Latency +- `meshcore_message_roundtrip_seconds` - Message round-trip time +- `meshcore_ack_latency_seconds` - ACK latency + +### Connectivity +- `meshcore_nodes_total` - Total unique nodes +- `meshcore_nodes_active{node_type}` - Active nodes (last hour) +- `meshcore_path_hop_count` - Path hop distribution + +### Signal Quality +- `meshcore_snr_db` - SNR histogram +- `meshcore_rssi_dbm` - RSSI histogram + +### Device +- `meshcore_battery_voltage` - Battery voltage +- `meshcore_battery_percentage` - Battery percentage +- `meshcore_storage_used_bytes` - Storage used +- `meshcore_storage_total_bytes` - Storage total + +### Radio +- `meshcore_radio_noise_floor_dbm` - Noise floor +- `meshcore_radio_airtime_percent` - Airtime utilization +- `meshcore_packets_total{direction,status}` - Packet counts + +### Database +- `meshcore_db_table_rows{table}` - Rows per table +- `meshcore_db_size_bytes` - Database size +- `meshcore_db_cleanup_rows_deleted{table}` - Cleanup counts + +### Application +- `meshcore_connection_status` - Connection status (1=connected) +- `meshcore_errors_total{component,error_type}` - Error counts + +--- + +## Development Workflow + +### Setup +```bash +# Clone repository +git clone https://github.com/ipnet-mesh/meshcore-sidekick.git +cd meshcore-sidekick + +# Install dependencies +pip install -r requirements.txt + +# Or with Poetry +poetry install +``` + +### Running +```bash +# Development with mock +python -m meshcore_sidekick --use-mock --log-level DEBUG + +# With scenario +python -m meshcore_sidekick --use-mock --mock-scenario simple_chat + +# Production with hardware +python -m meshcore_sidekick --serial-port /dev/ttyUSB0 +``` + +### Testing +```bash +# Run tests +pytest + +# With coverage +pytest --cov=meshcore_sidekick + +# Specific test +pytest tests/test_database.py +``` + +### Code Quality +```bash +# Format +black src/ tests/ + +# Lint +ruff check src/ tests/ + +# Type check +mypy src/ +``` + +--- + +## Deployment + +### Local Development +```bash +python -m meshcore_sidekick --use-mock +``` + +### Docker (Mock) +```bash +docker-compose up --build +``` + +### Docker (Production) +```bash +docker-compose -f docker-compose.prod.yml up -d +``` + +### Systemd Service +```ini +[Unit] +Description=MeshCore Sidekick +After=network.target + +[Service] +Type=simple +User=meshcore +WorkingDirectory=/opt/meshcore-sidekick +Environment="MESHCORE_SERIAL_PORT=/dev/ttyUSB0" +Environment="MESHCORE_DB_PATH=/var/lib/meshcore/data.db" +ExecStart=/usr/bin/python3 -m meshcore_sidekick +Restart=always + +[Install] +WantedBy=multi-user.target +``` + +--- + +## Future Considerations + +### Scalability +- Separate API server from event collector +- Use message queue (Redis/RabbitMQ) for events +- PostgreSQL for multi-instance deployments +- Read replicas for query performance + +### Security +- API authentication (API keys, JWT) +- Rate limiting +- Input validation +- SQL injection prevention +- Encrypted storage option + +### Data Privacy +- Optional message content exclusion +- GDPR compliance features +- Data export tools +- User data deletion + +### Performance +- Connection pooling optimization +- Query optimization +- Caching frequently accessed data +- Batch inserts for events + +--- + +## Contributing + +1. Create feature branch from `main` +2. Implement changes with tests +3. Follow code style (black, ruff) +4. Update documentation +5. Submit pull request + +## License + +See LICENSE file for details. diff --git a/README.md b/README.md index c8ea55a..bf056ca 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,136 @@ -# meshcore-sidekick -MeshCore Companion Node Data Harvesting and Messaging Bridge +# MeshCore Sidekick + +MeshCore companion application for event collection, persistence, and REST API access. + +## Features + +- Subscribe to all MeshCore events via Serial/BLE connection +- Persist events in SQLite database with configurable retention +- REST API for querying collected data and sending commands +- Mock MeshCore implementation for development without hardware +- Prometheus metrics for monitoring +- OpenAPI/Swagger documentation +- Docker deployment ready + +## Quick Start + +### Development (Mock Mode) + +```bash +# Install dependencies +pip install -r requirements.txt + +# Run with mock MeshCore +python -m meshcore_sidekick --use-mock --log-level DEBUG +``` + +### Production (Real Hardware) + +```bash +# Run with real MeshCore device +python -m meshcore_sidekick \ + --serial-port /dev/ttyUSB0 \ + --serial-baud 115200 \ + --db-path /data/meshcore.db \ + --retention-days 90 +``` + +### Docker + +```bash +# Development with mock +docker-compose up --build + +# Production with real hardware +docker-compose -f docker-compose.prod.yml up -d +``` + +## Configuration + +Configuration priority: **CLI Arguments > Environment Variables > Defaults** + +### CLI Arguments + +```bash +python -m meshcore_sidekick --help +``` + +### Environment Variables + +```bash +MESHCORE_SERIAL_PORT=/dev/ttyUSB0 +MESHCORE_USE_MOCK=true +MESHCORE_DB_PATH=/data/meshcore.db +MESHCORE_RETENTION_DAYS=30 +MESHCORE_API_PORT=8000 +MESHCORE_LOG_LEVEL=INFO +``` + +See full configuration options in documentation. + +## Querying the Database + +View captured data with the query tool: + +```bash +# Full report (all tables and statistics) +python -m meshcore_sidekick.query + +# Summary statistics only +python -m meshcore_sidekick.query --summary + +# Recent messages (last 20) +python -m meshcore_sidekick.query --messages 20 + +# Discovered nodes +python -m meshcore_sidekick.query --nodes 10 + +# Recent advertisements +python -m meshcore_sidekick.query --advertisements 10 + +# Telemetry data +python -m meshcore_sidekick.query --telemetry 5 + +# Trace paths +python -m meshcore_sidekick.query --traces 5 + +# Activity in last 6 hours +python -m meshcore_sidekick.query --activity 6 + +# Custom database location +python -m meshcore_sidekick.query --db-path /data/meshcore.db +``` + +## API Documentation + +Once running, access interactive API docs at: +- Swagger UI: http://localhost:8000/docs +- ReDoc: http://localhost:8000/redoc +- OpenAPI Schema: http://localhost:8000/openapi.json + +## Prometheus Metrics + +Metrics available at: http://localhost:8000/metrics + +## Development + +```bash +# Install dependencies +pip install -r requirements.txt + +# Run tests +pytest + +# Format code +black src/ tests/ + +# Lint +ruff check src/ tests/ + +# Type check +mypy src/ +``` + +## License + +See LICENSE file. diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..0e81fd0 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,49 @@ +[tool.poetry] +name = "meshcore-sidekick" +version = "1.0.0" +description = "MeshCore companion application for event collection and REST API" +authors = ["Your Name "] +readme = "README.md" +packages = [{include = "meshcore_sidekick", from = "src"}] + +[tool.poetry.dependencies] +python = "^3.11" +meshcore = "^2.2.1" +fastapi = "^0.115.0" +uvicorn = {extras = ["standard"], version = "^0.31.0"} +sqlalchemy = "^2.0.0" +alembic = "^1.13.0" +pydantic = "^2.9.0" +prometheus-client = "^0.21.0" +prometheus-fastapi-instrumentator = "^7.0.0" +python-multipart = "^0.0.12" + +[tool.poetry.group.dev.dependencies] +pytest = "^8.3.0" +pytest-asyncio = "^0.24.0" +pytest-cov = "^6.0.0" +black = "^24.10.0" +ruff = "^0.7.0" +mypy = "^1.13.0" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" + +[tool.black] +line-length = 100 +target-version = ['py312'] + +[tool.ruff] +line-length = 100 +target-version = "py312" + +[tool.mypy] +python_version = "3.12" +warn_return_any = true +warn_unused_configs = true +disallow_untyped_defs = false + +[tool.pytest.ini_options] +asyncio_mode = "auto" +testpaths = ["tests"] diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..88b1d84 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,9 @@ +meshcore>=2.2.1 +fastapi>=0.115.0 +uvicorn[standard]>=0.31.0 +sqlalchemy>=2.0.0 +alembic>=1.13.0 +pydantic>=2.9.0 +prometheus-client>=0.21.0 +prometheus-fastapi-instrumentator>=7.0.0 +python-multipart>=0.0.12 diff --git a/src/meshcore_sidekick/__init__.py b/src/meshcore_sidekick/__init__.py new file mode 100644 index 0000000..93e17ba --- /dev/null +++ b/src/meshcore_sidekick/__init__.py @@ -0,0 +1,3 @@ +"""MeshCore Sidekick - Event collector and REST API for MeshCore devices.""" + +__version__ = "1.0.0" diff --git a/src/meshcore_sidekick/__main__.py b/src/meshcore_sidekick/__main__.py new file mode 100644 index 0000000..f4e7c4a --- /dev/null +++ b/src/meshcore_sidekick/__main__.py @@ -0,0 +1,197 @@ +"""Main application entry point.""" + +import asyncio +import logging +import signal +import sys +from typing import Optional + +from .config import Config +from .database.engine import init_database +from .database.cleanup import DataCleanup +from .meshcore import RealMeshCore, MockMeshCore, MeshCoreInterface +from .subscriber.event_handler import EventHandler +from .subscriber.metrics import get_metrics +from .utils.logging import setup_logging + +logger = logging.getLogger(__name__) + + +class Application: + """Main application controller.""" + + def __init__(self, config: Config): + """ + Initialize application. + + Args: + config: Application configuration + """ + self.config = config + self.meshcore: Optional[MeshCoreInterface] = None + self.event_handler: Optional[EventHandler] = None + self.cleanup_task: Optional[asyncio.Task] = None + self.running = False + + async def start(self) -> None: + """Start the application.""" + logger.info("Starting MeshCore Sidekick") + logger.info(f"\n{self.config.display()}") + + # Initialize database + logger.info("Initializing database...") + init_database(self.config.db_path) + + # Initialize event handler + self.event_handler = EventHandler() + + # Initialize metrics + if self.config.metrics_enabled: + metrics = get_metrics() + metrics.set_connection_status(False) + + # Initialize MeshCore (real or mock) + if self.config.use_mock: + logger.info("Initializing Mock MeshCore") + self.meshcore = MockMeshCore( + scenario_name=self.config.mock_scenario, + loop_scenario=self.config.mock_loop, + num_nodes=self.config.mock_nodes, + min_interval=self.config.mock_min_interval, + max_interval=self.config.mock_max_interval, + center_lat=self.config.mock_center_lat, + center_lon=self.config.mock_center_lon, + ) + else: + logger.info("Initializing Real MeshCore") + self.meshcore = RealMeshCore( + serial_port=self.config.serial_port, + baud_rate=self.config.serial_baud, + ) + + # Connect to MeshCore + logger.info("Connecting to MeshCore...") + connected = await self.meshcore.connect() + + if not connected: + logger.error("Failed to connect to MeshCore") + sys.exit(1) + + logger.info("Connected to MeshCore successfully") + + if self.config.metrics_enabled: + metrics.set_connection_status(True) + + # Subscribe to events + logger.info("Subscribing to MeshCore events...") + await self.meshcore.subscribe_to_events(self.event_handler.handle_event) + + # Start cleanup task + if self.config.retention_days > 0: + logger.info(f"Starting cleanup task (every {self.config.cleanup_interval_hours} hours)") + self.cleanup_task = asyncio.create_task(self._cleanup_loop()) + + self.running = True + logger.info("Application started successfully") + + async def stop(self) -> None: + """Stop the application.""" + logger.info("Stopping MeshCore Sidekick...") + self.running = False + + # Cancel cleanup task + if self.cleanup_task: + self.cleanup_task.cancel() + try: + await self.cleanup_task + except asyncio.CancelledError: + pass + + # Disconnect from MeshCore + if self.meshcore: + await self.meshcore.disconnect() + + # Update metrics + if self.config.metrics_enabled: + metrics = get_metrics() + metrics.set_connection_status(False) + + logger.info("Application stopped") + + async def _cleanup_loop(self) -> None: + """Background task for periodic database cleanup.""" + cleanup = DataCleanup(self.config.retention_days) + + while self.running: + try: + # Wait for interval + await asyncio.sleep(self.config.cleanup_interval_hours * 3600) + + # Run cleanup + logger.info("Running database cleanup...") + deleted_counts = cleanup.cleanup_old_data() + + # Update metrics + if self.config.metrics_enabled: + metrics = get_metrics() + for table, count in deleted_counts.items(): + if count > 0: + metrics.record_cleanup(table, count) + + except asyncio.CancelledError: + break + except Exception as e: + logger.error(f"Error in cleanup loop: {e}", exc_info=True) + if self.config.metrics_enabled: + metrics = get_metrics() + metrics.record_error("cleanup", "cleanup_failed") + + async def run(self) -> None: + """Run the application until interrupted.""" + try: + await self.start() + + # Keep running until interrupted + while self.running: + await asyncio.sleep(1) + + except KeyboardInterrupt: + logger.info("Received keyboard interrupt") + except Exception as e: + logger.error(f"Application error: {e}", exc_info=True) + finally: + await self.stop() + + +def main() -> None: + """Main entry point.""" + # Load configuration + config = Config.from_args_and_env() + + # Setup logging + setup_logging(level=config.log_level, format_type=config.log_format) + + # Create and run application + app = Application(config) + + # Setup signal handlers + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + + def signal_handler(sig, frame): + logger.info(f"Received signal {sig}") + loop.create_task(app.stop()) + + signal.signal(signal.SIGINT, signal_handler) + signal.signal(signal.SIGTERM, signal_handler) + + try: + loop.run_until_complete(app.run()) + except KeyboardInterrupt: + logger.info("Keyboard interrupt") + finally: + loop.close() + + +if __name__ == "__main__": + main() diff --git a/src/meshcore_sidekick/api/__init__.py b/src/meshcore_sidekick/api/__init__.py new file mode 100644 index 0000000..f55861e --- /dev/null +++ b/src/meshcore_sidekick/api/__init__.py @@ -0,0 +1,5 @@ +"""FastAPI REST API application.""" + +from .app import create_app + +__all__ = ["create_app"] diff --git a/src/meshcore_sidekick/api/routes/__init__.py b/src/meshcore_sidekick/api/routes/__init__.py new file mode 100644 index 0000000..fb0a2f8 --- /dev/null +++ b/src/meshcore_sidekick/api/routes/__init__.py @@ -0,0 +1 @@ +"""API route modules.""" diff --git a/src/meshcore_sidekick/config.py b/src/meshcore_sidekick/config.py new file mode 100644 index 0000000..2b78108 --- /dev/null +++ b/src/meshcore_sidekick/config.py @@ -0,0 +1,305 @@ +"""Configuration management with CLI args, environment variables, and defaults.""" + +import os +import argparse +from dataclasses import dataclass +from typing import Optional + + +@dataclass +class Config: + """Application configuration.""" + + # === Connection === + serial_port: str = "/dev/ttyUSB0" + serial_baud: int = 115200 + use_mock: bool = False + mock_scenario: Optional[str] = None + mock_loop: bool = False + mock_nodes: int = 10 + mock_min_interval: float = 1.0 + mock_max_interval: float = 10.0 + mock_center_lat: float = 45.5231 + mock_center_lon: float = -122.6765 + + # === Database === + db_path: str = "./meshcore.db" + retention_days: int = 30 + cleanup_interval_hours: int = 1 + + # === API === + api_host: str = "0.0.0.0" + api_port: int = 8000 + api_title: str = "MeshCore Sidekick API" + api_version: str = "1.0.0" + + # === Prometheus === + metrics_enabled: bool = True + + # === Logging === + log_level: str = "INFO" + log_format: str = "json" # json|text + + @classmethod + def from_args_and_env(cls) -> "Config": + """ + Load configuration from CLI arguments, environment variables, and defaults. + + Priority: CLI args > Environment variables > Defaults + + Returns: + Config instance + """ + # Parse CLI arguments + parser = argparse.ArgumentParser( + description="MeshCore Sidekick - Event collector and API server", + formatter_class=argparse.ArgumentDefaultsHelpFormatter + ) + + # Connection arguments + conn_group = parser.add_argument_group('Connection') + conn_group.add_argument( + "--serial-port", + type=str, + help="Serial port device (e.g., /dev/ttyUSB0)" + ) + conn_group.add_argument( + "--serial-baud", + type=int, + help="Serial baud rate" + ) + conn_group.add_argument( + "--use-mock", + action="store_true", + help="Use mock MeshCore instead of real device" + ) + conn_group.add_argument( + "--mock-scenario", + type=str, + help="Mock scenario name for playback" + ) + conn_group.add_argument( + "--mock-loop", + action="store_true", + help="Loop mock scenario indefinitely" + ) + conn_group.add_argument( + "--mock-nodes", + type=int, + help="Number of simulated nodes in random mode" + ) + conn_group.add_argument( + "--mock-min-interval", + type=float, + help="Minimum interval between random events (seconds)" + ) + conn_group.add_argument( + "--mock-max-interval", + type=float, + help="Maximum interval between random events (seconds)" + ) + conn_group.add_argument( + "--mock-center-lat", + type=float, + help="Center latitude for simulated nodes" + ) + conn_group.add_argument( + "--mock-center-lon", + type=float, + help="Center longitude for simulated nodes" + ) + + # Database arguments + db_group = parser.add_argument_group('Database') + db_group.add_argument( + "--db-path", + type=str, + help="SQLite database file path" + ) + db_group.add_argument( + "--retention-days", + type=int, + help="Data retention period in days" + ) + db_group.add_argument( + "--cleanup-interval-hours", + type=int, + help="Cleanup task interval in hours" + ) + + # API arguments + api_group = parser.add_argument_group('API') + api_group.add_argument( + "--api-host", + type=str, + help="API server host" + ) + api_group.add_argument( + "--api-port", + type=int, + help="API server port" + ) + api_group.add_argument( + "--api-title", + type=str, + help="API title for OpenAPI documentation" + ) + api_group.add_argument( + "--api-version", + type=str, + help="API version" + ) + + # Prometheus arguments + metrics_group = parser.add_argument_group('Metrics') + metrics_group.add_argument( + "--no-metrics", + action="store_true", + help="Disable Prometheus metrics" + ) + + # Logging arguments + log_group = parser.add_argument_group('Logging') + log_group.add_argument( + "--log-level", + type=str, + choices=["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"], + help="Logging level" + ) + log_group.add_argument( + "--log-format", + type=str, + choices=["json", "text"], + help="Log output format" + ) + + args = parser.parse_args() + + # Helper function to get value with priority: CLI > Env > Default + def get_value(cli_arg, env_var, default, type_converter=str): + if cli_arg is not None: + return cli_arg + env_value = os.getenv(env_var) + if env_value is not None: + if type_converter == bool: + return env_value.lower() in ("true", "1", "yes", "on") + return type_converter(env_value) + return default + + # Build config instance + config = cls() + + # Apply values with priority + config.serial_port = get_value( + args.serial_port, "MESHCORE_SERIAL_PORT", config.serial_port + ) + config.serial_baud = get_value( + args.serial_baud, "MESHCORE_SERIAL_BAUD", config.serial_baud, int + ) + config.use_mock = args.use_mock or get_value( + None, "MESHCORE_USE_MOCK", config.use_mock, bool + ) + config.mock_scenario = get_value( + args.mock_scenario, "MESHCORE_MOCK_SCENARIO", config.mock_scenario + ) + config.mock_loop = args.mock_loop or get_value( + None, "MESHCORE_MOCK_LOOP", config.mock_loop, bool + ) + config.mock_nodes = get_value( + args.mock_nodes, "MESHCORE_MOCK_NODES", config.mock_nodes, int + ) + config.mock_min_interval = get_value( + args.mock_min_interval, "MESHCORE_MOCK_MIN_INTERVAL", + config.mock_min_interval, float + ) + config.mock_max_interval = get_value( + args.mock_max_interval, "MESHCORE_MOCK_MAX_INTERVAL", + config.mock_max_interval, float + ) + config.mock_center_lat = get_value( + args.mock_center_lat, "MESHCORE_MOCK_CENTER_LAT", + config.mock_center_lat, float + ) + config.mock_center_lon = get_value( + args.mock_center_lon, "MESHCORE_MOCK_CENTER_LON", + config.mock_center_lon, float + ) + + config.db_path = get_value( + args.db_path, "MESHCORE_DB_PATH", config.db_path + ) + config.retention_days = get_value( + args.retention_days, "MESHCORE_RETENTION_DAYS", config.retention_days, int + ) + config.cleanup_interval_hours = get_value( + args.cleanup_interval_hours, "MESHCORE_CLEANUP_INTERVAL_HOURS", + config.cleanup_interval_hours, int + ) + + config.api_host = get_value( + args.api_host, "MESHCORE_API_HOST", config.api_host + ) + config.api_port = get_value( + args.api_port, "MESHCORE_API_PORT", config.api_port, int + ) + config.api_title = get_value( + args.api_title, "MESHCORE_API_TITLE", config.api_title + ) + config.api_version = get_value( + args.api_version, "MESHCORE_API_VERSION", config.api_version + ) + + config.metrics_enabled = not args.no_metrics and get_value( + None, "MESHCORE_METRICS_ENABLED", config.metrics_enabled, bool + ) + + config.log_level = get_value( + args.log_level, "MESHCORE_LOG_LEVEL", config.log_level + ) + config.log_format = get_value( + args.log_format, "MESHCORE_LOG_FORMAT", config.log_format + ) + + return config + + def display(self) -> str: + """ + Display configuration in human-readable format. + + Returns: + Formatted configuration string + """ + lines = [ + "Configuration:", + " Connection:", + f" Mode: {'Mock' if self.use_mock else 'Real'}", + ] + + if self.use_mock: + lines.extend([ + f" Scenario: {self.mock_scenario or 'Random'}", + f" Loop: {self.mock_loop}", + f" Nodes: {self.mock_nodes}", + f" Event Interval: {self.mock_min_interval}-{self.mock_max_interval}s", + ]) + else: + lines.extend([ + f" Serial Port: {self.serial_port}", + f" Baud Rate: {self.serial_baud}", + ]) + + lines.extend([ + " Database:", + f" Path: {self.db_path}", + f" Retention: {self.retention_days} days", + f" Cleanup Interval: {self.cleanup_interval_hours} hours", + " API:", + f" Host: {self.api_host}", + f" Port: {self.api_port}", + f" Metrics: {'Enabled' if self.metrics_enabled else 'Disabled'}", + " Logging:", + f" Level: {self.log_level}", + f" Format: {self.log_format}", + ]) + + return "\n".join(lines) diff --git a/src/meshcore_sidekick/database/__init__.py b/src/meshcore_sidekick/database/__init__.py new file mode 100644 index 0000000..93f6c05 --- /dev/null +++ b/src/meshcore_sidekick/database/__init__.py @@ -0,0 +1,38 @@ +"""Database layer for MeshCore event persistence.""" + +from .engine import DatabaseEngine, get_session +from .models import ( + Node, + Message, + Advertisement, + Path, + TracePath, + Telemetry, + Acknowledgment, + StatusResponse, + Statistics, + BinaryResponse, + ControlData, + RawData, + DeviceInfo, + EventLog, +) + +__all__ = [ + "DatabaseEngine", + "get_session", + "Node", + "Message", + "Advertisement", + "Path", + "TracePath", + "Telemetry", + "Acknowledgment", + "StatusResponse", + "Statistics", + "BinaryResponse", + "ControlData", + "RawData", + "DeviceInfo", + "EventLog", +] diff --git a/src/meshcore_sidekick/database/cleanup.py b/src/meshcore_sidekick/database/cleanup.py new file mode 100644 index 0000000..96a4991 --- /dev/null +++ b/src/meshcore_sidekick/database/cleanup.py @@ -0,0 +1,119 @@ +"""Database cleanup for data retention.""" + +import logging +from datetime import datetime, timedelta +from sqlalchemy import delete +from .models import ( + Message, + Advertisement, + Telemetry, + TracePath, + Acknowledgment, + StatusResponse, + Statistics, + BinaryResponse, + ControlData, + RawData, + EventLog, +) +from .engine import session_scope + +logger = logging.getLogger(__name__) + + +class DataCleanup: + """Handles automatic cleanup of old data based on retention policy.""" + + def __init__(self, retention_days: int): + """ + Initialize cleanup handler. + + Args: + retention_days: Number of days to retain data + """ + self.retention_days = retention_days + + def cleanup_old_data(self) -> dict: + """ + Remove data older than retention period. + + Returns: + Dictionary with counts of deleted records per table + """ + cutoff_date = datetime.utcnow() - timedelta(days=self.retention_days) + deleted_counts = {} + + logger.info(f"Starting cleanup of data older than {cutoff_date.isoformat()}") + + with session_scope() as session: + # Cleanup messages + result = session.execute( + delete(Message).where(Message.received_at < cutoff_date) + ) + deleted_counts["messages"] = result.rowcount + + # Cleanup advertisements + result = session.execute( + delete(Advertisement).where(Advertisement.received_at < cutoff_date) + ) + deleted_counts["advertisements"] = result.rowcount + + # Cleanup telemetry + result = session.execute( + delete(Telemetry).where(Telemetry.received_at < cutoff_date) + ) + deleted_counts["telemetry"] = result.rowcount + + # Cleanup trace paths + result = session.execute( + delete(TracePath).where(TracePath.completed_at < cutoff_date) + ) + deleted_counts["trace_paths"] = result.rowcount + + # Cleanup acknowledgments + result = session.execute( + delete(Acknowledgment).where(Acknowledgment.confirmed_at < cutoff_date) + ) + deleted_counts["acknowledgments"] = result.rowcount + + # Cleanup status responses + result = session.execute( + delete(StatusResponse).where(StatusResponse.received_at < cutoff_date) + ) + deleted_counts["status_responses"] = result.rowcount + + # Cleanup statistics + result = session.execute( + delete(Statistics).where(Statistics.recorded_at < cutoff_date) + ) + deleted_counts["statistics"] = result.rowcount + + # Cleanup binary responses + result = session.execute( + delete(BinaryResponse).where(BinaryResponse.received_at < cutoff_date) + ) + deleted_counts["binary_responses"] = result.rowcount + + # Cleanup control data + result = session.execute( + delete(ControlData).where(ControlData.received_at < cutoff_date) + ) + deleted_counts["control_data"] = result.rowcount + + # Cleanup raw data + result = session.execute( + delete(RawData).where(RawData.received_at < cutoff_date) + ) + deleted_counts["raw_data"] = result.rowcount + + # Cleanup event log + result = session.execute( + delete(EventLog).where(EventLog.created_at < cutoff_date) + ) + deleted_counts["events_log"] = result.rowcount + + total_deleted = sum(deleted_counts.values()) + logger.info(f"Cleanup complete: {total_deleted} total records deleted") + logger.debug(f"Deleted by table: {deleted_counts}") + + return deleted_counts diff --git a/src/meshcore_sidekick/database/engine.py b/src/meshcore_sidekick/database/engine.py new file mode 100644 index 0000000..b4eafc1 --- /dev/null +++ b/src/meshcore_sidekick/database/engine.py @@ -0,0 +1,160 @@ +"""Database engine and session management.""" + +import logging +from contextlib import contextmanager +from pathlib import Path +from typing import Generator, Optional +from sqlalchemy import create_engine, event +from sqlalchemy.engine import Engine +from sqlalchemy.orm import sessionmaker, Session +from .models import Base + +logger = logging.getLogger(__name__) + + +class DatabaseEngine: + """Manages database connection and session lifecycle.""" + + def __init__(self, db_path: str): + """ + Initialize database engine. + + Args: + db_path: Path to SQLite database file + """ + self.db_path = db_path + self.engine: Optional[Engine] = None + self.session_factory: Optional[sessionmaker] = None + + def initialize(self) -> None: + """Initialize database engine and create tables if needed.""" + # Ensure parent directory exists + db_file = Path(self.db_path) + db_file.parent.mkdir(parents=True, exist_ok=True) + + # Create engine with SQLite-specific settings + connection_string = f"sqlite:///{self.db_path}" + self.engine = create_engine( + connection_string, + echo=False, + pool_pre_ping=True, + connect_args={"check_same_thread": False} # Allow multi-threaded access + ) + + # Enable foreign keys for SQLite + @event.listens_for(Engine, "connect") + def set_sqlite_pragma(dbapi_conn, connection_record): + cursor = dbapi_conn.cursor() + cursor.execute("PRAGMA foreign_keys=ON") + cursor.execute("PRAGMA journal_mode=WAL") # Write-Ahead Logging for better concurrency + cursor.close() + + # Create all tables + Base.metadata.create_all(self.engine) + logger.info(f"Database initialized at {self.db_path}") + + # Create session factory + self.session_factory = sessionmaker(bind=self.engine) + + def get_session(self) -> Session: + """ + Create a new database session. + + Returns: + SQLAlchemy Session instance + + Raises: + RuntimeError: If database not initialized + """ + if not self.session_factory: + raise RuntimeError("Database not initialized. Call initialize() first.") + return self.session_factory() + + @contextmanager + def session_scope(self) -> Generator[Session, None, None]: + """ + Provide a transactional scope for database operations. + + Yields: + SQLAlchemy Session + + Example: + with db_engine.session_scope() as session: + node = Node(public_key="01ab2186...") + session.add(node) + """ + session = self.get_session() + try: + yield session + session.commit() + except Exception: + session.rollback() + raise + finally: + session.close() + + def close(self) -> None: + """Close database engine.""" + if self.engine: + self.engine.dispose() + logger.info("Database engine closed") + + +# Global database engine instance +_db_engine: Optional[DatabaseEngine] = None + + +def init_database(db_path: str) -> DatabaseEngine: + """ + Initialize global database engine. + + Args: + db_path: Path to SQLite database file + + Returns: + DatabaseEngine instance + """ + global _db_engine + _db_engine = DatabaseEngine(db_path) + _db_engine.initialize() + return _db_engine + + +def get_database() -> DatabaseEngine: + """ + Get global database engine instance. + + Returns: + DatabaseEngine instance + + Raises: + RuntimeError: If database not initialized + """ + if not _db_engine: + raise RuntimeError("Database not initialized. Call init_database() first.") + return _db_engine + + +def get_session() -> Session: + """ + Get a new database session from global engine. + + Returns: + SQLAlchemy Session + + Raises: + RuntimeError: If database not initialized + """ + return get_database().get_session() + + +@contextmanager +def session_scope() -> Generator[Session, None, None]: + """ + Provide a transactional scope using global database engine. + + Yields: + SQLAlchemy Session + """ + with get_database().session_scope() as session: + yield session diff --git a/src/meshcore_sidekick/database/models.py b/src/meshcore_sidekick/database/models.py new file mode 100644 index 0000000..2b6837d --- /dev/null +++ b/src/meshcore_sidekick/database/models.py @@ -0,0 +1,228 @@ +"""SQLAlchemy database models for MeshCore events.""" + +from datetime import datetime +from typing import List, Optional +from sqlalchemy import ( + Boolean, + Integer, + String, + Float, + Text, + LargeBinary, + DateTime, + Index, + func, +) +from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column + + +class Base(DeclarativeBase): + """Base class for all database models.""" + pass + + +class Node(Base): + """Represents a MeshCore node (repeater or companion device).""" + + __tablename__ = "nodes" + + id: Mapped[int] = mapped_column(Integer, primary_key=True) + public_key: Mapped[str] = mapped_column(String(64), unique=True, nullable=False, index=True) + public_key_prefix_2: Mapped[str] = mapped_column(String(2), nullable=False, index=True) + public_key_prefix_8: Mapped[str] = mapped_column(String(8), nullable=False, index=True) + node_type: Mapped[Optional[str]] = mapped_column(String(32)) # chat/repeater/room/none + name: Mapped[Optional[str]] = mapped_column(String(128)) + latitude: Mapped[Optional[float]] = mapped_column(Float) + longitude: Mapped[Optional[float]] = mapped_column(Float) + last_seen: Mapped[Optional[datetime]] = mapped_column(DateTime) + first_seen: Mapped[datetime] = mapped_column(DateTime, default=func.now()) + created_at: Mapped[datetime] = mapped_column(DateTime, default=func.now()) + + @classmethod + def find_by_prefix(cls, session, prefix: str) -> List["Node"]: + """Find all nodes matching a public key prefix.""" + prefix_lower = prefix.lower() + prefix_len = len(prefix_lower) + + if prefix_len <= 2: + return session.query(cls).filter( + cls.public_key_prefix_2.like(f"{prefix_lower}%") + ).all() + elif prefix_len <= 8: + return session.query(cls).filter( + cls.public_key_prefix_8.like(f"{prefix_lower}%") + ).all() + else: + return session.query(cls).filter( + cls.public_key.like(f"{prefix_lower}%") + ).all() + + +class Message(Base): + """Represents a direct or channel message.""" + + __tablename__ = "messages" + + id: Mapped[int] = mapped_column(Integer, primary_key=True) + direction: Mapped[str] = mapped_column(String(16), nullable=False) # inbound/outbound + message_type: Mapped[str] = mapped_column(String(16), nullable=False) # contact/channel + text_type: Mapped[str] = mapped_column(String(32), default="plain") # plain/cli_data/signed_plain + from_public_key: Mapped[Optional[str]] = mapped_column(String(64), index=True) + to_public_key: Mapped[Optional[str]] = mapped_column(String(64), index=True) + content: Mapped[str] = mapped_column(Text, nullable=False) + snr: Mapped[Optional[float]] = mapped_column(Float) + rssi: Mapped[Optional[float]] = mapped_column(Float) + timestamp: Mapped[datetime] = mapped_column(DateTime, nullable=False) + received_at: Mapped[datetime] = mapped_column(DateTime, default=func.now()) + + __table_args__ = ( + Index("idx_messages_timestamp", "timestamp"), + ) + + +class Advertisement(Base): + """Represents a node advertisement.""" + + __tablename__ = "advertisements" + + id: Mapped[int] = mapped_column(Integer, primary_key=True) + public_key: Mapped[str] = mapped_column(String(64), nullable=False, index=True) + adv_type: Mapped[Optional[str]] = mapped_column(String(32)) # none/chat/repeater/room + name: Mapped[Optional[str]] = mapped_column(String(128)) + latitude: Mapped[Optional[float]] = mapped_column(Float) + longitude: Mapped[Optional[float]] = mapped_column(Float) + flags: Mapped[Optional[int]] = mapped_column(Integer) + received_at: Mapped[datetime] = mapped_column(DateTime, default=func.now(), index=True) + + +class Path(Base): + """Represents a routing path to a node.""" + + __tablename__ = "paths" + + id: Mapped[int] = mapped_column(Integer, primary_key=True) + node_public_key: Mapped[str] = mapped_column(String(64), nullable=False, index=True) + path_data: Mapped[Optional[bytes]] = mapped_column(LargeBinary) # 64-byte outbound path + hop_count: Mapped[Optional[int]] = mapped_column(Integer) + updated_at: Mapped[datetime] = mapped_column(DateTime, default=func.now()) + created_at: Mapped[datetime] = mapped_column(DateTime, default=func.now()) + + +class TracePath(Base): + """Represents a trace path result with SNR data.""" + + __tablename__ = "trace_paths" + + id: Mapped[int] = mapped_column(Integer, primary_key=True) + initiator_tag: Mapped[int] = mapped_column(Integer, nullable=False) + destination_public_key: Mapped[Optional[str]] = mapped_column(String(64)) + path_hashes: Mapped[Optional[str]] = mapped_column(Text) # JSON array of 2-char hashes + snr_values: Mapped[Optional[str]] = mapped_column(Text) # JSON array of SNR values + hop_count: Mapped[Optional[int]] = mapped_column(Integer) + completed_at: Mapped[datetime] = mapped_column(DateTime, default=func.now(), index=True) + + +class Telemetry(Base): + """Represents telemetry data from a node.""" + + __tablename__ = "telemetry" + + id: Mapped[int] = mapped_column(Integer, primary_key=True) + node_public_key: Mapped[str] = mapped_column(String(64), nullable=False, index=True) + lpp_data: Mapped[Optional[bytes]] = mapped_column(LargeBinary) # LPP-formatted sensor data + parsed_data: Mapped[Optional[str]] = mapped_column(Text) # JSON parsed sensors + received_at: Mapped[datetime] = mapped_column(DateTime, default=func.now(), index=True) + + +class Acknowledgment(Base): + """Represents a message acknowledgment with timing.""" + + __tablename__ = "acknowledgments" + + id: Mapped[int] = mapped_column(Integer, primary_key=True) + message_id: Mapped[Optional[int]] = mapped_column(Integer) # Reference to messages table + destination_public_key: Mapped[Optional[str]] = mapped_column(String(64)) + round_trip_ms: Mapped[Optional[int]] = mapped_column(Integer) + confirmed_at: Mapped[datetime] = mapped_column(DateTime, default=func.now(), index=True) + + +class StatusResponse(Base): + """Represents a status response from a node.""" + + __tablename__ = "status_responses" + + id: Mapped[int] = mapped_column(Integer, primary_key=True) + node_public_key: Mapped[str] = mapped_column(String(64), nullable=False, index=True) + status_data: Mapped[str] = mapped_column(Text, nullable=False) # JSON status payload + received_at: Mapped[datetime] = mapped_column(DateTime, default=func.now(), index=True) + + +class Statistics(Base): + """Represents device statistics (core/radio/packets).""" + + __tablename__ = "statistics" + + id: Mapped[int] = mapped_column(Integer, primary_key=True) + stat_type: Mapped[str] = mapped_column(String(32), nullable=False) # core/radio/packets + data: Mapped[str] = mapped_column(Text, nullable=False) # JSON statistics data + recorded_at: Mapped[datetime] = mapped_column(DateTime, default=func.now(), index=True) + + +class BinaryResponse(Base): + """Represents a binary response matched by tag.""" + + __tablename__ = "binary_responses" + + id: Mapped[int] = mapped_column(Integer, primary_key=True) + tag: Mapped[int] = mapped_column(Integer, nullable=False) # 32-bit matching tag + payload: Mapped[bytes] = mapped_column(LargeBinary, nullable=False) + received_at: Mapped[datetime] = mapped_column(DateTime, default=func.now(), index=True) + + +class ControlData(Base): + """Represents control packet data.""" + + __tablename__ = "control_data" + + id: Mapped[int] = mapped_column(Integer, primary_key=True) + from_public_key: Mapped[Optional[str]] = mapped_column(String(64), index=True) + payload: Mapped[bytes] = mapped_column(LargeBinary, nullable=False) + received_at: Mapped[datetime] = mapped_column(DateTime, default=func.now(), index=True) + + +class RawData(Base): + """Represents raw packet data.""" + + __tablename__ = "raw_data" + + id: Mapped[int] = mapped_column(Integer, primary_key=True) + from_public_key: Mapped[Optional[str]] = mapped_column(String(64), index=True) + payload: Mapped[bytes] = mapped_column(LargeBinary, nullable=False) + received_at: Mapped[datetime] = mapped_column(DateTime, default=func.now(), index=True) + + +class DeviceInfo(Base): + """Represents companion device information and status.""" + + __tablename__ = "device_info" + + id: Mapped[int] = mapped_column(Integer, primary_key=True) + battery_voltage: Mapped[Optional[float]] = mapped_column(Float) + battery_percentage: Mapped[Optional[int]] = mapped_column(Integer) + storage_used: Mapped[Optional[int]] = mapped_column(Integer) + storage_total: Mapped[Optional[int]] = mapped_column(Integer) + device_time: Mapped[Optional[datetime]] = mapped_column(DateTime) + firmware_version: Mapped[Optional[str]] = mapped_column(String(32)) + capabilities: Mapped[Optional[str]] = mapped_column(Text) # JSON + recorded_at: Mapped[datetime] = mapped_column(DateTime, default=func.now(), index=True) + + +class EventLog(Base): + """Raw event log for all MeshCore events.""" + + __tablename__ = "events_log" + + id: Mapped[int] = mapped_column(Integer, primary_key=True) + event_type: Mapped[str] = mapped_column(String(64), nullable=False, index=True) + event_data: Mapped[str] = mapped_column(Text, nullable=False) # JSON full event payload + created_at: Mapped[datetime] = mapped_column(DateTime, default=func.now(), index=True) diff --git a/src/meshcore_sidekick/main.py b/src/meshcore_sidekick/main.py new file mode 100644 index 0000000..7ba6e5f --- /dev/null +++ b/src/meshcore_sidekick/main.py @@ -0,0 +1,6 @@ +"""Convenience entry point for running the application.""" + +from .__main__ import main + +if __name__ == "__main__": + main() diff --git a/src/meshcore_sidekick/meshcore/__init__.py b/src/meshcore_sidekick/meshcore/__init__.py new file mode 100644 index 0000000..74e8c71 --- /dev/null +++ b/src/meshcore_sidekick/meshcore/__init__.py @@ -0,0 +1,7 @@ +"""MeshCore interface and implementations.""" + +from .interface import MeshCoreInterface +from .real import RealMeshCore +from .mock import MockMeshCore + +__all__ = ["MeshCoreInterface", "RealMeshCore", "MockMeshCore"] diff --git a/src/meshcore_sidekick/meshcore/interface.py b/src/meshcore_sidekick/meshcore/interface.py new file mode 100644 index 0000000..4c9a102 --- /dev/null +++ b/src/meshcore_sidekick/meshcore/interface.py @@ -0,0 +1,185 @@ +"""Abstract interface for MeshCore implementations.""" + +from abc import ABC, abstractmethod +from typing import Callable, Any, Optional, List +from dataclasses import dataclass + + +@dataclass +class Event: + """Represents a MeshCore event.""" + type: str + payload: dict + + +@dataclass +class Contact: + """Represents a MeshCore contact.""" + public_key: str + name: Optional[str] = None + node_type: Optional[str] = None + latitude: Optional[float] = None + longitude: Optional[float] = None + + +class MeshCoreInterface(ABC): + """Abstract base class for MeshCore implementations.""" + + @abstractmethod + async def connect(self) -> bool: + """ + Connect to MeshCore device. + + Returns: + True if connection successful, False otherwise + """ + pass + + @abstractmethod + async def disconnect(self) -> None: + """Disconnect from MeshCore device.""" + pass + + @abstractmethod + async def is_connected(self) -> bool: + """ + Check if connected to MeshCore device. + + Returns: + True if connected, False otherwise + """ + pass + + @abstractmethod + async def subscribe_to_events(self, handler: Callable[[Event], None]) -> None: + """ + Subscribe to all MeshCore events. + + Args: + handler: Async callback function to handle events + """ + pass + + @abstractmethod + async def send_message(self, destination: str, text: str, text_type: str = "plain") -> Event: + """ + Send a direct message to a node. + + Args: + destination: Destination public key (full or prefix) + text: Message content + text_type: Message type (plain/cli_data/signed_plain) + + Returns: + Event with send confirmation + """ + pass + + @abstractmethod + async def send_channel_message(self, text: str, flood: bool = True) -> Event: + """ + Send a channel broadcast message. + + Args: + text: Message content + flood: Whether to flood the message + + Returns: + Event with send confirmation + """ + pass + + @abstractmethod + async def send_advert(self, flood: bool = True) -> Event: + """ + Send self-advertisement. + + Args: + flood: Whether to flood the advertisement + + Returns: + Event with send confirmation + """ + pass + + @abstractmethod + async def send_trace_path(self, destination: str) -> Event: + """ + Initiate trace path to destination. + + Args: + destination: Destination public key + + Returns: + Event with trace initiation confirmation + """ + pass + + @abstractmethod + async def ping(self, destination: str) -> Event: + """ + Ping a node. + + Args: + destination: Destination public key + + Returns: + Event with ping confirmation + """ + pass + + @abstractmethod + async def send_telemetry_request(self, destination: str) -> Event: + """ + Request telemetry from a node. + + Args: + destination: Destination public key + + Returns: + Event with request confirmation + """ + pass + + @abstractmethod + async def get_device_info(self) -> Event: + """ + Get companion device information. + + Returns: + Event with device info payload + """ + pass + + @abstractmethod + async def get_battery(self) -> Event: + """ + Get battery status. + + Returns: + Event with battery info payload + """ + pass + + @abstractmethod + async def get_contacts(self) -> List[Contact]: + """ + Get list of contacts. + + Returns: + List of Contact objects + """ + pass + + @abstractmethod + async def get_statistics(self, stat_type: str = "core") -> Event: + """ + Get device statistics. + + Args: + stat_type: Type of statistics (core/radio/packets) + + Returns: + Event with statistics payload + """ + pass diff --git a/src/meshcore_sidekick/meshcore/mock.py b/src/meshcore_sidekick/meshcore/mock.py new file mode 100644 index 0000000..0ce555a --- /dev/null +++ b/src/meshcore_sidekick/meshcore/mock.py @@ -0,0 +1,460 @@ +"""Mock MeshCore implementation for testing without hardware.""" + +import asyncio +import logging +import random +import time +from datetime import datetime +from typing import Callable, List, Optional +from .interface import MeshCoreInterface, Event, Contact +from .scenarios import SCENARIOS, process_dynamic_values + +logger = logging.getLogger(__name__) + + +class MockMeshCore(MeshCoreInterface): + """Mock MeshCore implementation for development and testing.""" + + def __init__( + self, + scenario_name: Optional[str] = None, + loop_scenario: bool = False, + num_nodes: int = 10, + min_interval: float = 1.0, + max_interval: float = 10.0, + center_lat: float = 45.5231, + center_lon: float = -122.6765, + gps_radius_km: float = 10.0, + ): + """ + Initialize mock MeshCore. + + Args: + scenario_name: Name of scenario to play back (None for random) + loop_scenario: Whether to loop scenario indefinitely + num_nodes: Number of simulated nodes for random mode + min_interval: Minimum interval between random events (seconds) + max_interval: Maximum interval between random events (seconds) + center_lat: Center latitude for simulated nodes + center_lon: Center longitude for simulated nodes + gps_radius_km: Radius in km for random GPS coordinates + """ + self.scenario_name = scenario_name + self.loop_scenario = loop_scenario + self.num_nodes = num_nodes + self.min_interval = min_interval + self.max_interval = max_interval + self.center_lat = center_lat + self.center_lon = center_lon + self.gps_radius_km = gps_radius_km + + self._connected = False + self._event_handlers: List[Callable] = [] + self._background_task: Optional[asyncio.Task] = None + self._simulated_nodes: List[dict] = [] + self._message_counter = 0 + + async def connect(self) -> bool: + """Connect to mock MeshCore.""" + logger.info("Connecting to Mock MeshCore") + self._connected = True + + # Generate simulated nodes + self._generate_simulated_nodes() + + # Start background event generation + if self.scenario_name: + self._background_task = asyncio.create_task(self._playback_scenario()) + else: + self._background_task = asyncio.create_task(self._generate_random_events()) + + logger.info(f"Mock MeshCore connected with {len(self._simulated_nodes)} simulated nodes") + return True + + async def disconnect(self) -> None: + """Disconnect from mock MeshCore.""" + logger.info("Disconnecting from Mock MeshCore") + self._connected = False + + if self._background_task: + self._background_task.cancel() + try: + await self._background_task + except asyncio.CancelledError: + pass + + async def is_connected(self) -> bool: + """Check if connected.""" + return self._connected + + async def subscribe_to_events(self, handler: Callable[[Event], None]) -> None: + """Subscribe to events.""" + self._event_handlers.append(handler) + logger.debug(f"Added event handler: {handler.__name__}") + + def _generate_simulated_nodes(self) -> None: + """Generate simulated node data.""" + node_names = [ + "Alice", "Bob", "Charlie", "Diana", "Eve", "Frank", + "Grace", "Henry", "Ivy", "Jack", "Kate", "Leo", + "Repeater-01", "Repeater-02", "Gateway-01", "Sensor-01", + "Sensor-02", "Mobile-01", "Mobile-02", "Base-Station" + ] + + node_types = ["chat", "repeater", "room", "none"] + + for i in range(self.num_nodes): + # Generate random public key + public_key = "".join([f"{random.randint(0, 255):02x}" for _ in range(32)]) + + # Random GPS within radius + lat_offset = random.uniform(-1, 1) * (self.gps_radius_km / 111.0) + lon_offset = random.uniform(-1, 1) * (self.gps_radius_km / 111.0) + + node = { + "public_key": public_key, + "name": node_names[i % len(node_names)] if i < len(node_names) else f"Node-{i}", + "node_type": random.choice(node_types), + "latitude": self.center_lat + lat_offset, + "longitude": self.center_lon + lon_offset, + } + self._simulated_nodes.append(node) + + async def _dispatch_event(self, event: Event) -> None: + """Dispatch event to all handlers.""" + for handler in self._event_handlers: + try: + await handler(event) + except Exception as e: + logger.error(f"Error in event handler {handler.__name__}: {e}") + + async def _generate_random_events(self) -> None: + """Background task generating random events.""" + logger.info("Starting random event generation") + + while self._connected: + try: + # Select random event type + event_type = self._select_random_event_type() + + # Generate event + event = await self._create_random_event(event_type) + + # Dispatch event + await self._dispatch_event(event) + + # Wait random interval + delay = random.uniform(self.min_interval, self.max_interval) + await asyncio.sleep(delay) + + except asyncio.CancelledError: + break + except Exception as e: + logger.error(f"Error generating random event: {e}") + await asyncio.sleep(1.0) + + def _select_random_event_type(self) -> str: + """Select random event type with weighted probabilities.""" + event_types = [ + ("ADVERTISEMENT", 30), + ("CONTACT_MSG_RECV", 25), + ("CHANNEL_MSG_RECV", 15), + ("PATH_UPDATED", 10), + ("SEND_CONFIRMED", 8), + ("TELEMETRY_RESPONSE", 5), + ("TRACE_DATA", 3), + ("BATTERY", 2), + ("STATUS_RESPONSE", 2), + ] + + total_weight = sum(weight for _, weight in event_types) + rand = random.uniform(0, total_weight) + + cumulative = 0 + for event_type, weight in event_types: + cumulative += weight + if rand <= cumulative: + return event_type + + return "ADVERTISEMENT" + + async def _create_random_event(self, event_type: str) -> Event: + """Create random event of specified type.""" + node = random.choice(self._simulated_nodes) + + if event_type == "ADVERTISEMENT": + return Event( + type="ADVERTISEMENT", + payload={ + "public_key": node["public_key"], + "name": node["name"], + "adv_type": node["node_type"], + "latitude": node["latitude"], + "longitude": node["longitude"], + "flags": random.randint(0, 255), + } + ) + + elif event_type == "CONTACT_MSG_RECV": + from_node = node + to_node = random.choice(self._simulated_nodes) + messages = [ + "Hello!", "How are you?", "Testing 123", "Roger that", + "Message received", "All good here", "Check", + "Standing by", "Copy that", "Acknowledged" + ] + return Event( + type="CONTACT_MSG_RECV", + payload={ + "from_public_key": from_node["public_key"], + "to_public_key": to_node["public_key"], + "text": random.choice(messages), + "snr": random.uniform(-5, 30), + "rssi": random.uniform(-110, -50), + "timestamp": datetime.utcnow().isoformat() + "Z", + } + ) + + elif event_type == "CHANNEL_MSG_RECV": + messages = [ + "Hello everyone!", "Anyone online?", "Network test", + "All stations check in", "Repeater operational", + "Good morning", "Weather update", "Checking coverage" + ] + return Event( + type="CHANNEL_MSG_RECV", + payload={ + "from_public_key": node["public_key"], + "text": random.choice(messages), + "snr": random.uniform(-5, 30), + "timestamp": datetime.utcnow().isoformat() + "Z", + } + ) + + elif event_type == "PATH_UPDATED": + hop_count = random.randint(1, 5) + return Event( + type="PATH_UPDATED", + payload={ + "node_public_key": node["public_key"], + "hop_count": hop_count, + } + ) + + elif event_type == "SEND_CONFIRMED": + return Event( + type="SEND_CONFIRMED", + payload={ + "destination_public_key": node["public_key"], + "round_trip_ms": random.randint(500, 10000), + } + ) + + elif event_type == "TELEMETRY_RESPONSE": + return Event( + type="TELEMETRY_RESPONSE", + payload={ + "node_public_key": node["public_key"][:12], + "parsed_data": { + "temperature": random.uniform(15, 35), + "humidity": random.randint(30, 80), + "battery": random.uniform(3.0, 4.2), + } + } + ) + + elif event_type == "TRACE_DATA": + hop_count = random.randint(1, 5) + path_hashes = [node["public_key"][:2] for _ in range(hop_count)] + snr_values = [random.uniform(10, 50) for _ in range(hop_count)] + return Event( + type="TRACE_DATA", + payload={ + "initiator_tag": random.randint(0, 0xFFFFFFFF), + "destination_public_key": node["public_key"], + "path_hashes": path_hashes, + "snr_values": snr_values, + "hop_count": hop_count, + } + ) + + elif event_type == "BATTERY": + return Event( + type="BATTERY", + payload={ + "battery_voltage": random.uniform(3.2, 4.2), + "battery_percentage": random.randint(20, 100), + } + ) + + elif event_type == "STATUS_RESPONSE": + return Event( + type="STATUS_RESPONSE", + payload={ + "node_public_key": node["public_key"], + "status_data": { + "uptime": random.randint(0, 86400), + "messages": random.randint(0, 1000), + } + } + ) + + return Event(type="UNKNOWN", payload={}) + + async def _playback_scenario(self) -> None: + """Play back predefined scenario.""" + if self.scenario_name not in SCENARIOS: + logger.error(f"Unknown scenario: {self.scenario_name}") + return + + scenario = SCENARIOS[self.scenario_name] + logger.info(f"Playing scenario: {scenario['description']}") + + while self._connected: + start_time = time.time() + + for event_def in scenario["events"]: + if not self._connected: + break + + # Wait until event delay + target_time = start_time + event_def["delay"] + await asyncio.sleep(max(0, target_time - time.time())) + + # Process dynamic values + payload = process_dynamic_values(event_def["data"]) + + # Create and dispatch event + event = Event(type=event_def["type"], payload=payload) + await self._dispatch_event(event) + + if not self.loop_scenario: + logger.info("Scenario playback complete") + break + + logger.info("Looping scenario...") + + async def send_message(self, destination: str, text: str, text_type: str = "plain") -> Event: + """Send a direct message (mock).""" + self._message_counter += 1 + logger.info(f"Mock: Sending message to {destination}: {text}") + return Event( + type="MSG_SENT", + payload={ + "message_id": self._message_counter, + "destination": destination, + "text": text, + "estimated_delivery_ms": random.randint(1000, 5000), + } + ) + + async def send_channel_message(self, text: str, flood: bool = True) -> Event: + """Send a channel message (mock).""" + self._message_counter += 1 + logger.info(f"Mock: Sending channel message: {text}") + return Event( + type="MSG_SENT", + payload={ + "message_id": self._message_counter, + "text": text, + "flood": flood, + } + ) + + async def send_advert(self, flood: bool = True) -> Event: + """Send advertisement (mock).""" + logger.info("Mock: Sending advertisement") + return Event( + type="ADVERT_SENT", + payload={"flood": flood} + ) + + async def send_trace_path(self, destination: str) -> Event: + """Send trace path request (mock).""" + tag = random.randint(0, 0xFFFFFFFF) + logger.info(f"Mock: Sending trace path to {destination}, tag={tag}") + return Event( + type="TRACE_INITIATED", + payload={ + "destination": destination, + "initiator_tag": tag, + } + ) + + async def ping(self, destination: str) -> Event: + """Ping node (mock).""" + logger.info(f"Mock: Pinging {destination}") + return Event( + type="PING_SENT", + payload={"destination": destination} + ) + + async def send_telemetry_request(self, destination: str) -> Event: + """Request telemetry (mock).""" + logger.info(f"Mock: Requesting telemetry from {destination}") + return Event( + type="TELEMETRY_REQUEST_SENT", + payload={"destination": destination} + ) + + async def get_device_info(self) -> Event: + """Get device info (mock).""" + return Event( + type="DEVICE_INFO", + payload={ + "firmware_version": "1.21.0-mock", + "capabilities": {"ble": True, "gps": True, "lora": True} + } + ) + + async def get_battery(self) -> Event: + """Get battery status (mock).""" + return Event( + type="BATTERY", + payload={ + "battery_voltage": random.uniform(3.5, 4.2), + "battery_percentage": random.randint(50, 100), + } + ) + + async def get_contacts(self) -> List[Contact]: + """Get contacts (mock).""" + contacts = [] + for node in self._simulated_nodes: + contact = Contact( + public_key=node["public_key"], + name=node["name"], + node_type=node["node_type"], + latitude=node["latitude"], + longitude=node["longitude"], + ) + contacts.append(contact) + return contacts + + async def get_statistics(self, stat_type: str = "core") -> Event: + """Get statistics (mock).""" + if stat_type == "core": + data = { + "battery": random.uniform(3.5, 4.2), + "uptime": random.randint(0, 86400), + "errors": random.randint(0, 10), + "queue_depth": random.randint(0, 20), + } + elif stat_type == "radio": + data = { + "noise_floor": random.uniform(-120, -100), + "rssi": random.uniform(-90, -50), + "snr": random.uniform(5, 30), + "airtime_percent": random.uniform(5, 50), + } + else: # packets + data = { + "tx_count": random.randint(100, 10000), + "rx_count": random.randint(200, 20000), + "tx_errors": random.randint(0, 100), + } + + return Event( + type="STATISTICS", + payload={"stat_type": stat_type, "data": data} + ) diff --git a/src/meshcore_sidekick/meshcore/real.py b/src/meshcore_sidekick/meshcore/real.py new file mode 100644 index 0000000..11c6d71 --- /dev/null +++ b/src/meshcore_sidekick/meshcore/real.py @@ -0,0 +1,276 @@ +"""Real MeshCore implementation using meshcore_py library.""" + +import logging +from typing import Callable, List, Optional +from meshcore import MeshCore as MeshCorePy +from meshcore import EventType +from .interface import MeshCoreInterface, Event, Contact + +logger = logging.getLogger(__name__) + + +class RealMeshCore(MeshCoreInterface): + """Real MeshCore implementation wrapping meshcore_py library.""" + + def __init__(self, serial_port: str, baud_rate: int = 115200): + """ + Initialize real MeshCore connection. + + Args: + serial_port: Serial port device path (e.g., /dev/ttyUSB0) + baud_rate: Serial baud rate (default 115200) + """ + self.serial_port = serial_port + self.baud_rate = baud_rate + self.meshcore: Optional[MeshCorePy] = None + self._event_handlers: List[Callable] = [] + + async def connect(self) -> bool: + """Connect to MeshCore device via serial.""" + try: + logger.info(f"Connecting to MeshCore on {self.serial_port} at {self.baud_rate} baud") + self.meshcore = await MeshCorePy.create_serial( + self.serial_port, + self.baud_rate, + debug=False + ) + + # Subscribe to all event types + logger.info(f"Subscribing to {len(list(EventType))} event types") + for event_type in EventType: + logger.debug(f"Subscribing to {event_type.name}") + self.meshcore.subscribe(event_type, self._handle_meshcore_event) + + # Start auto message fetching if available + if hasattr(self.meshcore, 'start_auto_message_fetching'): + logger.info("Starting auto message fetching") + await self.meshcore.start_auto_message_fetching() + + logger.info("Connected to MeshCore device") + return True + + except Exception as e: + logger.error(f"Failed to connect to MeshCore: {e}", exc_info=True) + return False + + async def disconnect(self) -> None: + """Disconnect from MeshCore device.""" + if self.meshcore: + try: + await self.meshcore.disconnect() + logger.info("Disconnected from MeshCore device") + except Exception as e: + logger.error(f"Error disconnecting: {e}") + finally: + self.meshcore = None + + async def is_connected(self) -> bool: + """Check if connected to MeshCore device.""" + if not self.meshcore: + return False + try: + return self.meshcore.is_connected + except Exception: + return False + + async def subscribe_to_events(self, handler: Callable[[Event], None]) -> None: + """Subscribe to all MeshCore events.""" + self._event_handlers.append(handler) + logger.info(f"Added event handler: {handler.__name__} (total handlers: {len(self._event_handlers)})") + + async def _handle_meshcore_event(self, event) -> None: + """ + Internal handler that converts meshcore_py events to our Event format. + + Args: + event: meshcore_py Event object + """ + try: + logger.debug(f"Received MeshCore event: {event}") + + # Convert meshcore_py event to our Event format + event_type = event.type.name if hasattr(event.type, 'name') else str(event.type) + event_payload = event.payload if hasattr(event, 'payload') else {} + + logger.info(f"Processing event: {event_type}") + + our_event = Event( + type=event_type, + payload=event_payload + ) + + # Dispatch to all registered handlers + logger.debug(f"Dispatching to {len(self._event_handlers)} handlers") + for handler in self._event_handlers: + try: + await handler(our_event) + except Exception as e: + logger.error(f"Error in event handler {handler.__name__}: {e}", exc_info=True) + + except Exception as e: + logger.error(f"Error processing MeshCore event: {e}", exc_info=True) + + async def send_message(self, destination: str, text: str, text_type: str = "plain") -> Event: + """Send a direct message to a node.""" + if not self.meshcore: + raise RuntimeError("Not connected to MeshCore") + + try: + result = await self.meshcore.send_msg(destination, text) + return Event( + type=result.type.name if hasattr(result.type, 'name') else str(result.type), + payload=result.payload if hasattr(result, 'payload') else {} + ) + except Exception as e: + logger.error(f"Failed to send message: {e}") + return Event(type="ERROR", payload={"error": str(e)}) + + async def send_channel_message(self, text: str, flood: bool = True) -> Event: + """Send a channel broadcast message.""" + if not self.meshcore: + raise RuntimeError("Not connected to MeshCore") + + try: + # Note: meshcore_py may have a different method name for channel messages + # This is a placeholder - adjust based on actual API + result = await self.meshcore.send_channel_msg(text, flood=flood) + return Event( + type=result.type.name if hasattr(result.type, 'name') else str(result.type), + payload=result.payload if hasattr(result, 'payload') else {} + ) + except Exception as e: + logger.error(f"Failed to send channel message: {e}") + return Event(type="ERROR", payload={"error": str(e)}) + + async def send_advert(self, flood: bool = True) -> Event: + """Send self-advertisement.""" + if not self.meshcore: + raise RuntimeError("Not connected to MeshCore") + + try: + result = await self.meshcore.send_advert(flood=flood) + return Event( + type=result.type.name if hasattr(result.type, 'name') else str(result.type), + payload=result.payload if hasattr(result, 'payload') else {} + ) + except Exception as e: + logger.error(f"Failed to send advertisement: {e}") + return Event(type="ERROR", payload={"error": str(e)}) + + async def send_trace_path(self, destination: str) -> Event: + """Initiate trace path to destination.""" + if not self.meshcore: + raise RuntimeError("Not connected to MeshCore") + + try: + result = await self.meshcore.send_trace_path(destination) + return Event( + type=result.type.name if hasattr(result.type, 'name') else str(result.type), + payload=result.payload if hasattr(result, 'payload') else {} + ) + except Exception as e: + logger.error(f"Failed to send trace path: {e}") + return Event(type="ERROR", payload={"error": str(e)}) + + async def ping(self, destination: str) -> Event: + """Ping a node (typically via telemetry request or status request).""" + if not self.meshcore: + raise RuntimeError("Not connected to MeshCore") + + try: + # MeshCore doesn't have a dedicated ping, so use status request + result = await self.meshcore.send_status_req(destination) + return Event( + type=result.type.name if hasattr(result.type, 'name') else str(result.type), + payload=result.payload if hasattr(result, 'payload') else {} + ) + except Exception as e: + logger.error(f"Failed to ping node: {e}") + return Event(type="ERROR", payload={"error": str(e)}) + + async def send_telemetry_request(self, destination: str) -> Event: + """Request telemetry from a node.""" + if not self.meshcore: + raise RuntimeError("Not connected to MeshCore") + + try: + result = await self.meshcore.send_telemetry_req(destination) + return Event( + type=result.type.name if hasattr(result.type, 'name') else str(result.type), + payload=result.payload if hasattr(result, 'payload') else {} + ) + except Exception as e: + logger.error(f"Failed to send telemetry request: {e}") + return Event(type="ERROR", payload={"error": str(e)}) + + async def get_device_info(self) -> Event: + """Get companion device information.""" + if not self.meshcore: + raise RuntimeError("Not connected to MeshCore") + + try: + result = await self.meshcore.send_device_query() + return Event( + type=result.type.name if hasattr(result.type, 'name') else str(result.type), + payload=result.payload if hasattr(result, 'payload') else {} + ) + except Exception as e: + logger.error(f"Failed to get device info: {e}") + return Event(type="ERROR", payload={"error": str(e)}) + + async def get_battery(self) -> Event: + """Get battery status.""" + if not self.meshcore: + raise RuntimeError("Not connected to MeshCore") + + try: + result = await self.meshcore.get_bat() + return Event( + type=result.type.name if hasattr(result.type, 'name') else str(result.type), + payload=result.payload if hasattr(result, 'payload') else {} + ) + except Exception as e: + logger.error(f"Failed to get battery status: {e}") + return Event(type="ERROR", payload={"error": str(e)}) + + async def get_contacts(self) -> List[Contact]: + """Get list of contacts.""" + if not self.meshcore: + raise RuntimeError("Not connected to MeshCore") + + try: + contacts_result = await self.meshcore.get_contacts() + contacts = [] + + # Convert meshcore_py contacts to our Contact format + if hasattr(contacts_result, 'payload') and 'contacts' in contacts_result.payload: + for mc_contact in contacts_result.payload['contacts']: + contact = Contact( + public_key=mc_contact.get('public_key', ''), + name=mc_contact.get('name'), + node_type=mc_contact.get('node_type'), + latitude=mc_contact.get('latitude'), + longitude=mc_contact.get('longitude') + ) + contacts.append(contact) + + return contacts + + except Exception as e: + logger.error(f"Failed to get contacts: {e}") + return [] + + async def get_statistics(self, stat_type: str = "core") -> Event: + """Get device statistics.""" + if not self.meshcore: + raise RuntimeError("Not connected to MeshCore") + + try: + result = await self.meshcore.get_stats(stat_type) + return Event( + type=result.type.name if hasattr(result.type, 'name') else str(result.type), + payload=result.payload if hasattr(result, 'payload') else {} + ) + except Exception as e: + logger.error(f"Failed to get statistics: {e}") + return Event(type="ERROR", payload={"error": str(e)}) diff --git a/src/meshcore_sidekick/meshcore/scenarios.py b/src/meshcore_sidekick/meshcore/scenarios.py new file mode 100644 index 0000000..39607dd --- /dev/null +++ b/src/meshcore_sidekick/meshcore/scenarios.py @@ -0,0 +1,268 @@ +"""Predefined scenarios for mock MeshCore playback.""" + +import random +import uuid +from datetime import datetime +from typing import Any, Dict + + +def process_dynamic_values(data: Dict[str, Any]) -> Dict[str, Any]: + """ + Process dynamic placeholder values in event data. + + Supported placeholders: + - {{now}}: Current timestamp (ISO format) + - {{random_snr}}: Random SNR value (-20 to 30 dB) + - {{random_rssi}}: Random RSSI value (-110 to -50 dBm) + - {{uuid}}: Random UUID + - {{counter}}: Incrementing counter + + Args: + data: Event data dictionary + + Returns: + Processed dictionary with placeholders replaced + """ + result = {} + counter = getattr(process_dynamic_values, '_counter', 0) + + for key, value in data.items(): + if isinstance(value, str): + if value == "{{now}}": + result[key] = datetime.utcnow().isoformat() + "Z" + elif value == "{{random_snr}}": + result[key] = random.uniform(-20, 30) + elif value == "{{random_rssi}}": + result[key] = random.uniform(-110, -50) + elif value == "{{uuid}}": + result[key] = str(uuid.uuid4()) + elif value == "{{counter}}": + result[key] = counter + counter += 1 + else: + result[key] = value + elif isinstance(value, dict): + result[key] = process_dynamic_values(value) + elif isinstance(value, list): + result[key] = [ + process_dynamic_values(item) if isinstance(item, dict) else item + for item in value + ] + else: + result[key] = value + + process_dynamic_values._counter = counter + return result + + +SCENARIOS = { + "simple_chat": { + "description": "Two nodes exchanging messages", + "events": [ + { + "delay": 0.0, + "type": "ADVERTISEMENT", + "data": { + "public_key": "01ab2186c4d5e6f7a8b9c0d1e2f3a4b5c6d7e8f9a0b1c2d3e4f5a6b7c8d9e0f1", + "name": "Alice", + "adv_type": "chat", + "latitude": 45.5231, + "longitude": -122.6765, + "flags": 0, + } + }, + { + "delay": 2.0, + "type": "ADVERTISEMENT", + "data": { + "public_key": "b3f4e5d6a7b8c9d0e1f2a3b4c5d6e7f8a9b0c1d2e3f4a5b6c7d8e9f0a1b2c3d4", + "name": "Bob", + "adv_type": "chat", + "latitude": 45.5345, + "longitude": -122.6543, + "flags": 0, + } + }, + { + "delay": 5.0, + "type": "CONTACT_MSG_RECV", + "data": { + "from_public_key": "01ab2186c4d5e6f7a8b9c0d1e2f3a4b5c6d7e8f9a0b1c2d3e4f5a6b7c8d9e0f1", + "to_public_key": "b3f4e5d6a7b8c9d0e1f2a3b4c5d6e7f8a9b0c1d2e3f4a5b6c7d8e9f0a1b2c3d4", + "text": "Hello Bob!", + "snr": 15.5, + "rssi": -75.0, + "timestamp": "{{now}}" + } + }, + { + "delay": 8.0, + "type": "SEND_CONFIRMED", + "data": { + "destination_public_key": "01ab2186c4d5e6f7a8b9c0d1e2f3a4b5c6d7e8f9a0b1c2d3e4f5a6b7c8d9e0f1", + "round_trip_ms": 2500 + } + }, + { + "delay": 10.0, + "type": "CONTACT_MSG_RECV", + "data": { + "from_public_key": "b3f4e5d6a7b8c9d0e1f2a3b4c5d6e7f8a9b0c1d2e3f4a5b6c7d8e9f0a1b2c3d4", + "to_public_key": "01ab2186c4d5e6f7a8b9c0d1e2f3a4b5c6d7e8f9a0b1c2d3e4f5a6b7c8d9e0f1", + "text": "Hi Alice! How are you?", + "snr": 14.8, + "rssi": -78.0, + "timestamp": "{{now}}" + } + } + ] + }, + + "trace_path_test": { + "description": "Trace path through multi-hop network", + "events": [ + { + "delay": 0.0, + "type": "ADVERTISEMENT", + "data": { + "public_key": "01abc123456789abcdef0123456789abcdef0123456789abcdef0123456789ab", + "name": "NodeA", + "adv_type": "chat", + "latitude": 45.5231, + "longitude": -122.6765, + } + }, + { + "delay": 1.0, + "type": "ADVERTISEMENT", + "data": { + "public_key": "b3def456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef", + "name": "NodeB", + "adv_type": "repeater", + "latitude": 45.5345, + "longitude": -122.6543, + } + }, + { + "delay": 2.0, + "type": "ADVERTISEMENT", + "data": { + "public_key": "fa9876543210fedcba9876543210fedcba9876543210fedcba9876543210fedc", + "name": "NodeC", + "adv_type": "chat", + "latitude": 45.5456, + "longitude": -122.6321, + } + }, + { + "delay": 5.0, + "type": "TRACE_DATA", + "data": { + "initiator_tag": 305419896, + "destination_public_key": "fa9876543210fedcba9876543210fedcba9876543210fedcba9876543210fedc", + "path_hashes": ["b3", "fa"], + "snr_values": [48.0, 45.2], + "hop_count": 2, + } + } + ] + }, + + "telemetry_collection": { + "description": "Periodic telemetry from sensor nodes", + "events": [ + { + "delay": 0.0, + "type": "ADVERTISEMENT", + "data": { + "public_key": "sensor01aabbccddeeff00112233445566778899aabbccddeeff00112233445566", + "name": "TempSensor", + "adv_type": "chat", + "latitude": 45.5231, + "longitude": -122.6765, + } + }, + { + "delay": 5.0, + "type": "TELEMETRY_RESPONSE", + "data": { + "node_public_key": "sensor01aabb", + "parsed_data": { + "temperature": 22.5, + "humidity": 65, + "battery": 3.8 + } + } + }, + { + "delay": 10.0, + "type": "TELEMETRY_RESPONSE", + "data": { + "node_public_key": "sensor01aabb", + "parsed_data": { + "temperature": 23.1, + "humidity": 63, + "battery": 3.75 + } + } + }, + { + "delay": 15.0, + "type": "TELEMETRY_RESPONSE", + "data": { + "node_public_key": "sensor01aabb", + "parsed_data": { + "temperature": 23.8, + "humidity": 61, + "battery": 3.72 + } + } + } + ] + }, + + "network_stress": { + "description": "High-traffic scenario with many nodes", + "events": [ + # 10 nodes advertising + *[{ + "delay": i * 0.5, + "type": "ADVERTISEMENT", + "data": { + "public_key": f"node{i:02d}{'ab' * 30}", + "name": f"Node{i:02d}", + "adv_type": "chat", + "latitude": 45.52 + (i * 0.01), + "longitude": -122.67 + (i * 0.01), + } + } for i in range(10)], + + # Flood of channel messages + *[{ + "delay": 10.0 + i * 1.0, + "type": "CHANNEL_MSG_RECV", + "data": { + "from_public_key": f"node{i % 10:02d}{'ab' * 30}", + "text": f"Channel message {i}", + "snr": "{{random_snr}}", + "rssi": "{{random_rssi}}", + "timestamp": "{{now}}" + } + } for i in range(20)] + ] + }, + + "battery_drain": { + "description": "Simulated battery drain over time", + "events": [ + *[{ + "delay": i * 10.0, + "type": "BATTERY", + "data": { + "battery_voltage": max(3.0, 4.2 - (i * 0.05)), + "battery_percentage": max(0, 100 - (i * 5)), + } + } for i in range(20)] + ] + } +} diff --git a/src/meshcore_sidekick/query.py b/src/meshcore_sidekick/query.py new file mode 100755 index 0000000..649a6d1 --- /dev/null +++ b/src/meshcore_sidekick/query.py @@ -0,0 +1,457 @@ +"""Database query tool for viewing captured MeshCore data.""" + +import argparse +import json +import sys +from datetime import datetime, timedelta +from pathlib import Path +from typing import Optional + +try: + import sqlite3 +except ImportError: + print("Error: sqlite3 module not available", file=sys.stderr) + sys.exit(1) + + +class DatabaseQuery: + """Query tool for MeshCore database.""" + + def __init__(self, db_path: str): + """ + Initialize database query tool. + + Args: + db_path: Path to SQLite database file + """ + self.db_path = db_path + if not Path(db_path).exists(): + raise FileNotFoundError(f"Database not found: {db_path}") + self.conn = sqlite3.connect(db_path) + self.cursor = self.conn.cursor() + + def close(self): + """Close database connection.""" + if self.conn: + self.conn.close() + + def print_summary(self): + """Print database summary statistics.""" + print("=" * 80) + print("MESHCORE DATABASE SUMMARY") + print("=" * 80) + print(f"\nDatabase: {self.db_path}") + + # Database size + db_size = Path(self.db_path).stat().st_size + print(f"Size: {db_size:,} bytes ({db_size / 1024:.1f} KB)") + + print("\n" + "-" * 80) + print("TABLE STATISTICS") + print("-" * 80) + + tables = [ + ("events_log", "All Events"), + ("nodes", "Nodes"), + ("messages", "Messages"), + ("advertisements", "Advertisements"), + ("paths", "Routing Paths"), + ("trace_paths", "Trace Paths"), + ("telemetry", "Telemetry"), + ("acknowledgments", "Acknowledgments"), + ("status_responses", "Status Responses"), + ("statistics", "Statistics"), + ("binary_responses", "Binary Responses"), + ("control_data", "Control Data"), + ("raw_data", "Raw Data"), + ("device_info", "Device Info"), + ] + + for table, description in tables: + try: + self.cursor.execute(f"SELECT COUNT(*) FROM {table}") + count = self.cursor.fetchone()[0] + print(f" {description:.<30} {count:>8,}") + except sqlite3.Error as e: + print(f" {description:.<30} ERROR: {e}") + + def print_event_breakdown(self): + """Print breakdown of events by type.""" + print("\n" + "-" * 80) + print("EVENT TYPE BREAKDOWN") + print("-" * 80) + + self.cursor.execute( + "SELECT event_type, COUNT(*) as count FROM events_log " + "GROUP BY event_type ORDER BY count DESC" + ) + results = self.cursor.fetchall() + + if results: + for event_type, count in results: + print(f" {event_type:.<40} {count:>8,}") + else: + print(" No events recorded") + + def print_recent_events(self, limit: int = 10): + """Print recent events.""" + print("\n" + "-" * 80) + print(f"RECENT EVENTS (last {limit})") + print("-" * 80) + + self.cursor.execute( + "SELECT event_type, created_at FROM events_log " + "ORDER BY created_at DESC LIMIT ?", + (limit,) + ) + results = self.cursor.fetchall() + + if results: + for event_type, created_at in results: + print(f" [{created_at}] {event_type}") + else: + print(" No events recorded") + + def print_nodes(self, limit: int = 10): + """Print discovered nodes.""" + print("\n" + "-" * 80) + print(f"NODES (showing up to {limit})") + print("-" * 80) + + self.cursor.execute( + "SELECT name, public_key_prefix_8, node_type, last_seen, first_seen " + "FROM nodes ORDER BY last_seen DESC LIMIT ?", + (limit,) + ) + results = self.cursor.fetchall() + + if results: + for name, prefix, node_type, last_seen, first_seen in results: + name_display = name or "Unknown" + type_display = node_type or "unknown" + print(f"\n Node: {name_display}") + print(f" Public Key: {prefix}...") + print(f" Type: {type_display}") + print(f" First Seen: {first_seen}") + print(f" Last Seen: {last_seen}") + else: + print(" No nodes discovered") + + def print_messages(self, limit: int = 10): + """Print recent messages.""" + print("\n" + "-" * 80) + print(f"MESSAGES (last {limit})") + print("-" * 80) + + self.cursor.execute( + "SELECT direction, message_type, from_public_key, to_public_key, " + "content, snr, rssi, timestamp, received_at " + "FROM messages ORDER BY received_at DESC LIMIT ?", + (limit,) + ) + results = self.cursor.fetchall() + + if results: + for idx, (direction, msg_type, from_key, to_key, content, snr, rssi, ts, recv) in enumerate(results, 1): + print(f"\n Message #{idx}") + print(f" Direction: {direction}") + print(f" Type: {msg_type}") + print(f" From: {from_key[:16] if from_key else 'Unknown'}...") + if to_key: + print(f" To: {to_key[:16]}...") + print(f" Content: {content[:100]}") + if snr is not None: + print(f" SNR: {snr:.1f} dB") + if rssi is not None: + print(f" RSSI: {rssi:.1f} dBm") + print(f" Timestamp: {ts}") + print(f" Received: {recv}") + else: + print(" No messages recorded") + + def print_advertisements(self, limit: int = 10): + """Print recent advertisements.""" + print("\n" + "-" * 80) + print(f"ADVERTISEMENTS (last {limit})") + print("-" * 80) + + self.cursor.execute( + "SELECT public_key, adv_type, name, latitude, longitude, received_at " + "FROM advertisements ORDER BY received_at DESC LIMIT ?", + (limit,) + ) + results = self.cursor.fetchall() + + if results: + for idx, (pub_key, adv_type, name, lat, lon, recv) in enumerate(results, 1): + print(f"\n Advertisement #{idx}") + print(f" Public Key: {pub_key[:16]}...") + print(f" Type: {adv_type or 'unknown'}") + print(f" Name: {name or 'Unknown'}") + if lat is not None and lon is not None: + print(f" Location: {lat:.4f}, {lon:.4f}") + print(f" Received: {recv}") + else: + print(" No advertisements recorded") + + def print_telemetry(self, limit: int = 5): + """Print recent telemetry data.""" + print("\n" + "-" * 80) + print(f"TELEMETRY (last {limit})") + print("-" * 80) + + self.cursor.execute( + "SELECT node_public_key, parsed_data, received_at " + "FROM telemetry ORDER BY received_at DESC LIMIT ?", + (limit,) + ) + results = self.cursor.fetchall() + + if results: + for idx, (node_key, parsed, recv) in enumerate(results, 1): + print(f"\n Telemetry #{idx}") + print(f" Node: {node_key}...") + print(f" Received: {recv}") + if parsed: + try: + data = json.loads(parsed) + print(f" Data:") + for key, value in data.items(): + print(f" {key}: {value}") + except json.JSONDecodeError: + print(f" Data: {parsed}") + else: + print(" No telemetry recorded") + + def print_trace_paths(self, limit: int = 5): + """Print recent trace paths.""" + print("\n" + "-" * 80) + print(f"TRACE PATHS (last {limit})") + print("-" * 80) + + self.cursor.execute( + "SELECT initiator_tag, destination_public_key, path_hashes, " + "snr_values, hop_count, completed_at " + "FROM trace_paths ORDER BY completed_at DESC LIMIT ?", + (limit,) + ) + results = self.cursor.fetchall() + + if results: + for idx, (tag, dest, hashes, snrs, hops, completed) in enumerate(results, 1): + print(f"\n Trace Path #{idx}") + print(f" Initiator Tag: 0x{tag:08x}") + if dest: + print(f" Destination: {dest[:16]}...") + print(f" Hop Count: {hops or 'N/A'}") + print(f" Completed: {completed}") + if hashes: + try: + path = json.loads(hashes) + print(f" Path: {' -> '.join(path)}") + except json.JSONDecodeError: + pass + if snrs: + try: + snr_list = json.loads(snrs) + print(f" SNR values: {', '.join(f'{s:.1f}' for s in snr_list)}") + except json.JSONDecodeError: + pass + else: + print(" No trace paths recorded") + + def print_activity_timeline(self, hours: int = 24): + """Print activity timeline.""" + print("\n" + "-" * 80) + print(f"ACTIVITY TIMELINE (last {hours} hours)") + print("-" * 80) + + cutoff = datetime.now() - timedelta(hours=hours) + cutoff_str = cutoff.strftime("%Y-%m-%d %H:%M:%S") + + self.cursor.execute( + "SELECT event_type, COUNT(*) as count " + "FROM events_log WHERE created_at >= ? " + "GROUP BY event_type ORDER BY count DESC", + (cutoff_str,) + ) + results = self.cursor.fetchall() + + if results: + total = sum(count for _, count in results) + print(f"\n Total events: {total:,}") + print(f" Since: {cutoff_str}") + print("\n Breakdown:") + for event_type, count in results: + pct = (count / total) * 100 + print(f" {event_type:.<40} {count:>6,} ({pct:>5.1f}%)") + else: + print(f" No activity in the last {hours} hours") + + def print_full_report(self): + """Print full database report.""" + self.print_summary() + self.print_event_breakdown() + self.print_activity_timeline() + self.print_recent_events() + self.print_nodes() + self.print_messages() + self.print_advertisements() + self.print_telemetry() + self.print_trace_paths() + print("\n" + "=" * 80) + + +def main(): + """Main entry point for query tool.""" + parser = argparse.ArgumentParser( + description="Query MeshCore Sidekick database", + formatter_class=argparse.RawDescriptionHelpFormatter, + epilog=""" +Examples: + # Full report + python -m meshcore_sidekick.query + + # Summary only + python -m meshcore_sidekick.query --summary + + # Recent messages + python -m meshcore_sidekick.query --messages 20 + + # Nodes discovered + python -m meshcore_sidekick.query --nodes + + # Activity in last 6 hours + python -m meshcore_sidekick.query --activity 6 + """ + ) + + parser.add_argument( + "--db-path", + type=str, + default="./meshcore.db", + help="Path to database file (default: ./meshcore.db)" + ) + parser.add_argument( + "--summary", + action="store_true", + help="Show summary statistics only" + ) + parser.add_argument( + "--events", + type=int, + metavar="N", + help="Show N recent events" + ) + parser.add_argument( + "--nodes", + type=int, + metavar="N", + nargs="?", + const=10, + help="Show N discovered nodes (default: 10)" + ) + parser.add_argument( + "--messages", + type=int, + metavar="N", + nargs="?", + const=10, + help="Show N recent messages (default: 10)" + ) + parser.add_argument( + "--advertisements", + type=int, + metavar="N", + nargs="?", + const=10, + help="Show N recent advertisements (default: 10)" + ) + parser.add_argument( + "--telemetry", + type=int, + metavar="N", + nargs="?", + const=5, + help="Show N recent telemetry entries (default: 5)" + ) + parser.add_argument( + "--traces", + type=int, + metavar="N", + nargs="?", + const=5, + help="Show N recent trace paths (default: 5)" + ) + parser.add_argument( + "--activity", + type=int, + metavar="HOURS", + nargs="?", + const=24, + help="Show activity timeline for last N hours (default: 24)" + ) + + args = parser.parse_args() + + try: + db = DatabaseQuery(args.db_path) + + # If no specific options, show full report + if not any([ + args.summary, + args.events, + args.nodes is not None, + args.messages is not None, + args.advertisements is not None, + args.telemetry is not None, + args.traces is not None, + args.activity is not None, + ]): + db.print_full_report() + else: + # Show requested sections + if args.summary or any([args.events, args.nodes, args.messages, args.advertisements]): + db.print_summary() + + if args.events: + db.print_recent_events(args.events) + + if args.nodes is not None: + db.print_nodes(args.nodes) + + if args.messages is not None: + db.print_messages(args.messages) + + if args.advertisements is not None: + db.print_advertisements(args.advertisements) + + if args.telemetry is not None: + db.print_telemetry(args.telemetry) + + if args.traces is not None: + db.print_trace_paths(args.traces) + + if args.activity is not None: + db.print_activity_timeline(args.activity) + + print() # Final newline + + db.close() + + except FileNotFoundError as e: + print(f"Error: {e}", file=sys.stderr) + sys.exit(1) + except sqlite3.Error as e: + print(f"Database error: {e}", file=sys.stderr) + sys.exit(1) + except KeyboardInterrupt: + print("\nInterrupted", file=sys.stderr) + sys.exit(130) + except Exception as e: + print(f"Unexpected error: {e}", file=sys.stderr) + sys.exit(1) + + +if __name__ == "__main__": + main() diff --git a/src/meshcore_sidekick/subscriber/__init__.py b/src/meshcore_sidekick/subscriber/__init__.py new file mode 100644 index 0000000..6ccbdc6 --- /dev/null +++ b/src/meshcore_sidekick/subscriber/__init__.py @@ -0,0 +1,6 @@ +"""Event subscription and persistence.""" + +from .event_handler import EventHandler +from .metrics import MetricsCollector + +__all__ = ["EventHandler", "MetricsCollector"] diff --git a/src/meshcore_sidekick/subscriber/event_handler.py b/src/meshcore_sidekick/subscriber/event_handler.py new file mode 100644 index 0000000..0d1fd9f --- /dev/null +++ b/src/meshcore_sidekick/subscriber/event_handler.py @@ -0,0 +1,389 @@ +"""Event handler for processing and persisting MeshCore events.""" + +import json +import logging +from datetime import datetime +from typing import Optional +from ..meshcore.interface import Event +from ..database.models import ( + Node, + Message, + Advertisement, + Path, + TracePath, + Telemetry, + Acknowledgment, + StatusResponse, + Statistics, + BinaryResponse, + ControlData, + RawData, + DeviceInfo, + EventLog, +) +from ..database.engine import session_scope +from ..utils.address import normalize_public_key, extract_prefix + +logger = logging.getLogger(__name__) + + +class EventHandler: + """Handles MeshCore events and persists them to database.""" + + def __init__(self): + """Initialize event handler.""" + self.event_count = 0 + + async def handle_event(self, event: Event) -> None: + """ + Handle incoming MeshCore event. + + Args: + event: Event to process and persist + """ + self.event_count += 1 + + try: + logger.debug(f"Processing event: {event.type}") + + # Log all events to events_log table + await self._log_event(event) + + # Handle specific event types + handler_map = { + "ADVERTISEMENT": self._handle_advertisement, + "NEW_ADVERT": self._handle_advertisement, + "CONTACT_MSG_RECV": self._handle_contact_message, + "CHANNEL_MSG_RECV": self._handle_channel_message, + "PATH_UPDATED": self._handle_path_updated, + "TRACE_DATA": self._handle_trace_data, + "TELEMETRY_RESPONSE": self._handle_telemetry, + "SEND_CONFIRMED": self._handle_acknowledgment, + "STATUS_RESPONSE": self._handle_status_response, + "STATISTICS": self._handle_statistics, + "BATTERY": self._handle_battery, + "DEVICE_INFO": self._handle_device_info, + "BINARY_RESPONSE": self._handle_binary_response, + "CONTROL_DATA": self._handle_control_data, + "RAW_DATA": self._handle_raw_data, + # Internal meshcore_py events (informational only) + "MESSAGES_WAITING": None, # Logged but not persisted separately + "NO_MORE_MSGS": None, # Logged but not persisted separately + "RX_LOG_DATA": None, # Logged but not persisted separately + } + + handler = handler_map.get(event.type) + if handler: + await handler(event) + elif handler is None and event.type in handler_map: + # Event type is known but no specific handler (informational) + logger.debug(f"Informational event (not persisted separately): {event.type}") + else: + logger.info(f"Unknown event type (logged to events_log): {event.type}") + + except Exception as e: + logger.error(f"Error handling event {event.type}: {e}", exc_info=True) + + async def _log_event(self, event: Event) -> None: + """Log raw event to events_log table.""" + try: + with session_scope() as session: + event_log = EventLog( + event_type=event.type, + event_data=json.dumps(event.payload), + ) + session.add(event_log) + except Exception as e: + logger.error(f"Failed to log event: {e}") + + async def _upsert_node(self, public_key: str, **kwargs) -> Optional[Node]: + """ + Create or update node record. + + Args: + public_key: Node public key + **kwargs: Additional node attributes + + Returns: + Node object or None on error + """ + try: + normalized_key = normalize_public_key(public_key) + + with session_scope() as session: + # Try to find existing node + node = session.query(Node).filter_by(public_key=normalized_key).first() + + if node: + # Update existing node + for key, value in kwargs.items(): + if value is not None: + setattr(node, key, value) + node.last_seen = datetime.utcnow() + else: + # Create new node + node = Node( + public_key=normalized_key, + public_key_prefix_2=extract_prefix(normalized_key, 2), + public_key_prefix_8=extract_prefix(normalized_key, 8), + last_seen=datetime.utcnow(), + **kwargs + ) + session.add(node) + + session.flush() + return node + + except Exception as e: + logger.error(f"Failed to upsert node {public_key}: {e}") + return None + + async def _handle_advertisement(self, event: Event) -> None: + """Handle ADVERTISEMENT event.""" + payload = event.payload + public_key = payload.get("public_key") + + if not public_key: + logger.warning("Advertisement missing public_key") + return + + # Upsert node + await self._upsert_node( + public_key, + node_type=payload.get("adv_type"), + name=payload.get("name"), + latitude=payload.get("latitude"), + longitude=payload.get("longitude"), + ) + + # Store advertisement + with session_scope() as session: + advert = Advertisement( + public_key=normalize_public_key(public_key), + adv_type=payload.get("adv_type"), + name=payload.get("name"), + latitude=payload.get("latitude"), + longitude=payload.get("longitude"), + flags=payload.get("flags"), + ) + session.add(advert) + + async def _handle_contact_message(self, event: Event) -> None: + """Handle CONTACT_MSG_RECV event.""" + payload = event.payload + from_key = payload.get("from_public_key") + to_key = payload.get("to_public_key") + + # Upsert sender node + if from_key: + await self._upsert_node(from_key) + + # Store message + with session_scope() as session: + message = Message( + direction="inbound", + message_type="contact", + from_public_key=normalize_public_key(from_key) if from_key else None, + to_public_key=normalize_public_key(to_key) if to_key else None, + content=payload.get("text", ""), + text_type=payload.get("text_type", "plain"), + snr=payload.get("snr"), + rssi=payload.get("rssi"), + timestamp=self._parse_timestamp(payload.get("timestamp")), + ) + session.add(message) + + async def _handle_channel_message(self, event: Event) -> None: + """Handle CHANNEL_MSG_RECV event.""" + payload = event.payload + from_key = payload.get("from_public_key") + + # Upsert sender node + if from_key: + await self._upsert_node(from_key) + + # Store message + with session_scope() as session: + message = Message( + direction="inbound", + message_type="channel", + from_public_key=normalize_public_key(from_key) if from_key else None, + to_public_key=None, + content=payload.get("text", ""), + text_type=payload.get("text_type", "plain"), + snr=payload.get("snr"), + rssi=payload.get("rssi"), + timestamp=self._parse_timestamp(payload.get("timestamp")), + ) + session.add(message) + + async def _handle_path_updated(self, event: Event) -> None: + """Handle PATH_UPDATED event.""" + payload = event.payload + node_key = payload.get("node_public_key") + + if not node_key: + return + + # Upsert node + await self._upsert_node(node_key) + + # Store path + with session_scope() as session: + path = Path( + node_public_key=normalize_public_key(node_key), + path_data=payload.get("path_data"), + hop_count=payload.get("hop_count"), + ) + session.add(path) + + async def _handle_trace_data(self, event: Event) -> None: + """Handle TRACE_DATA event.""" + payload = event.payload + + with session_scope() as session: + trace = TracePath( + initiator_tag=payload.get("initiator_tag"), + destination_public_key=payload.get("destination_public_key"), + path_hashes=json.dumps(payload.get("path_hashes", [])), + snr_values=json.dumps(payload.get("snr_values", [])), + hop_count=payload.get("hop_count"), + ) + session.add(trace) + + async def _handle_telemetry(self, event: Event) -> None: + """Handle TELEMETRY_RESPONSE event.""" + payload = event.payload + node_key = payload.get("node_public_key") + + if not node_key: + return + + with session_scope() as session: + telemetry = Telemetry( + node_public_key=node_key, + lpp_data=payload.get("lpp_data"), + parsed_data=json.dumps(payload.get("parsed_data")) if payload.get("parsed_data") else None, + ) + session.add(telemetry) + + async def _handle_acknowledgment(self, event: Event) -> None: + """Handle SEND_CONFIRMED event.""" + payload = event.payload + + with session_scope() as session: + ack = Acknowledgment( + message_id=payload.get("message_id"), + destination_public_key=payload.get("destination_public_key"), + round_trip_ms=payload.get("round_trip_ms"), + ) + session.add(ack) + + async def _handle_status_response(self, event: Event) -> None: + """Handle STATUS_RESPONSE event.""" + payload = event.payload + node_key = payload.get("node_public_key") + + if not node_key: + return + + with session_scope() as session: + status = StatusResponse( + node_public_key=normalize_public_key(node_key), + status_data=json.dumps(payload.get("status_data", {})), + ) + session.add(status) + + async def _handle_statistics(self, event: Event) -> None: + """Handle STATISTICS event.""" + payload = event.payload + + with session_scope() as session: + stats = Statistics( + stat_type=payload.get("stat_type", "core"), + data=json.dumps(payload.get("data", {})), + ) + session.add(stats) + + async def _handle_battery(self, event: Event) -> None: + """Handle BATTERY event.""" + payload = event.payload + + with session_scope() as session: + device_info = DeviceInfo( + battery_voltage=payload.get("battery_voltage"), + battery_percentage=payload.get("battery_percentage"), + ) + session.add(device_info) + + async def _handle_device_info(self, event: Event) -> None: + """Handle DEVICE_INFO event.""" + payload = event.payload + + with session_scope() as session: + device_info = DeviceInfo( + battery_voltage=payload.get("battery_voltage"), + battery_percentage=payload.get("battery_percentage"), + storage_used=payload.get("storage_used"), + storage_total=payload.get("storage_total"), + device_time=self._parse_timestamp(payload.get("device_time")), + firmware_version=payload.get("firmware_version"), + capabilities=json.dumps(payload.get("capabilities")) if payload.get("capabilities") else None, + ) + session.add(device_info) + + async def _handle_binary_response(self, event: Event) -> None: + """Handle BINARY_RESPONSE event.""" + payload = event.payload + + with session_scope() as session: + binary_resp = BinaryResponse( + tag=payload.get("tag"), + payload=payload.get("payload", b""), + ) + session.add(binary_resp) + + async def _handle_control_data(self, event: Event) -> None: + """Handle CONTROL_DATA event.""" + payload = event.payload + + with session_scope() as session: + control = ControlData( + from_public_key=payload.get("from_public_key"), + payload=payload.get("payload", b""), + ) + session.add(control) + + async def _handle_raw_data(self, event: Event) -> None: + """Handle RAW_DATA event.""" + payload = event.payload + + with session_scope() as session: + raw = RawData( + from_public_key=payload.get("from_public_key"), + payload=payload.get("payload", b""), + ) + session.add(raw) + + def _parse_timestamp(self, timestamp_str: Optional[str]) -> datetime: + """ + Parse timestamp string to datetime. + + Args: + timestamp_str: ISO format timestamp string + + Returns: + datetime object (current time if parsing fails) + """ + if not timestamp_str: + return datetime.utcnow() + + try: + # Remove 'Z' suffix if present + if timestamp_str.endswith('Z'): + timestamp_str = timestamp_str[:-1] + + return datetime.fromisoformat(timestamp_str) + except Exception as e: + logger.warning(f"Failed to parse timestamp '{timestamp_str}': {e}") + return datetime.utcnow() diff --git a/src/meshcore_sidekick/subscriber/metrics.py b/src/meshcore_sidekick/subscriber/metrics.py new file mode 100644 index 0000000..9fe7ed7 --- /dev/null +++ b/src/meshcore_sidekick/subscriber/metrics.py @@ -0,0 +1,226 @@ +"""Prometheus metrics collector.""" + +import logging +from prometheus_client import Counter, Gauge, Histogram + +logger = logging.getLogger(__name__) + + +class MetricsCollector: + """Collects and exposes Prometheus metrics.""" + + def __init__(self): + """Initialize metrics.""" + + # Event counters + self.events_total = Counter( + 'meshcore_events_total', + 'Total MeshCore events received', + ['event_type'] + ) + + self.messages_total = Counter( + 'meshcore_messages_total', + 'Total messages processed', + ['direction', 'message_type'] + ) + + self.advertisements_total = Counter( + 'meshcore_advertisements_total', + 'Total advertisements received', + ['adv_type'] + ) + + # Latency metrics + self.message_roundtrip_seconds = Histogram( + 'meshcore_message_roundtrip_seconds', + 'Message round-trip time in seconds', + buckets=[0.1, 0.5, 1.0, 2.0, 5.0, 10.0, 30.0, 60.0] + ) + + # Node connectivity + self.nodes_total = Gauge( + 'meshcore_nodes_total', + 'Total unique nodes in database' + ) + + self.nodes_active = Gauge( + 'meshcore_nodes_active', + 'Nodes seen in last hour', + ['node_type'] + ) + + self.path_hop_count = Histogram( + 'meshcore_path_hop_count', + 'Distribution of path hop counts', + buckets=[1, 2, 3, 4, 5, 6, 7, 8, 10, 15, 20] + ) + + # Signal quality + self.snr_db = Histogram( + 'meshcore_snr_db', + 'Signal-to-noise ratio in dB', + buckets=[-20, -10, 0, 5, 10, 15, 20, 25, 30, 40] + ) + + self.rssi_dbm = Histogram( + 'meshcore_rssi_dbm', + 'Received signal strength in dBm', + buckets=[-120, -110, -100, -90, -80, -70, -60, -50, -40] + ) + + # Device statistics + self.battery_voltage = Gauge( + 'meshcore_battery_voltage', + 'Device battery voltage' + ) + + self.battery_percentage = Gauge( + 'meshcore_battery_percentage', + 'Device battery percentage' + ) + + self.storage_used_bytes = Gauge( + 'meshcore_storage_used_bytes', + 'Storage used in bytes' + ) + + self.storage_total_bytes = Gauge( + 'meshcore_storage_total_bytes', + 'Total storage capacity in bytes' + ) + + # Radio statistics + self.radio_noise_floor_dbm = Gauge( + 'meshcore_radio_noise_floor_dbm', + 'Radio noise floor in dBm' + ) + + self.radio_airtime_percent = Gauge( + 'meshcore_radio_airtime_percent', + 'Radio airtime utilization percentage' + ) + + self.packets_total = Counter( + 'meshcore_packets_total', + 'Total packets', + ['direction', 'status'] + ) + + # Database metrics + self.db_table_rows = Gauge( + 'meshcore_db_table_rows', + 'Number of rows in database tables', + ['table'] + ) + + self.db_size_bytes = Gauge( + 'meshcore_db_size_bytes', + 'Database file size in bytes' + ) + + self.db_cleanup_rows_deleted = Counter( + 'meshcore_db_cleanup_rows_deleted', + 'Rows deleted during retention cleanup', + ['table'] + ) + + # Application health + self.connection_status = Gauge( + 'meshcore_connection_status', + 'MeshCore connection status (1=connected, 0=disconnected)' + ) + + self.errors_total = Counter( + 'meshcore_errors_total', + 'Total errors encountered', + ['component', 'error_type'] + ) + + def record_event(self, event_type: str) -> None: + """Record an event.""" + self.events_total.labels(event_type=event_type).inc() + + def record_message(self, direction: str, message_type: str) -> None: + """Record a message.""" + self.messages_total.labels( + direction=direction, + message_type=message_type + ).inc() + + def record_advertisement(self, adv_type: str) -> None: + """Record an advertisement.""" + self.advertisements_total.labels(adv_type=adv_type or "unknown").inc() + + def record_roundtrip(self, milliseconds: int) -> None: + """Record message round-trip time.""" + self.message_roundtrip_seconds.observe(milliseconds / 1000.0) + + def record_hop_count(self, hops: int) -> None: + """Record path hop count.""" + self.path_hop_count.observe(hops) + + def record_snr(self, snr: float) -> None: + """Record SNR measurement.""" + self.snr_db.observe(snr) + + def record_rssi(self, rssi: float) -> None: + """Record RSSI measurement.""" + self.rssi_dbm.observe(rssi) + + def update_battery(self, voltage: float = None, percentage: int = None) -> None: + """Update battery metrics.""" + if voltage is not None: + self.battery_voltage.set(voltage) + if percentage is not None: + self.battery_percentage.set(percentage) + + def update_storage(self, used: int = None, total: int = None) -> None: + """Update storage metrics.""" + if used is not None: + self.storage_used_bytes.set(used) + if total is not None: + self.storage_total_bytes.set(total) + + def update_radio_stats(self, noise_floor: float = None, airtime: float = None) -> None: + """Update radio statistics.""" + if noise_floor is not None: + self.radio_noise_floor_dbm.set(noise_floor) + if airtime is not None: + self.radio_airtime_percent.set(airtime) + + def record_packet(self, direction: str, status: str) -> None: + """Record packet transmission.""" + self.packets_total.labels(direction=direction, status=status).inc() + + def update_db_table_rows(self, table: str, count: int) -> None: + """Update database table row count.""" + self.db_table_rows.labels(table=table).set(count) + + def update_db_size(self, size_bytes: int) -> None: + """Update database size.""" + self.db_size_bytes.set(size_bytes) + + def record_cleanup(self, table: str, count: int) -> None: + """Record cleanup operation.""" + self.db_cleanup_rows_deleted.labels(table=table).inc(count) + + def set_connection_status(self, connected: bool) -> None: + """Set connection status.""" + self.connection_status.set(1 if connected else 0) + + def record_error(self, component: str, error_type: str) -> None: + """Record an error.""" + self.errors_total.labels(component=component, error_type=error_type).inc() + + +# Global metrics collector instance +_metrics: MetricsCollector = None + + +def get_metrics() -> MetricsCollector: + """Get or create global metrics collector.""" + global _metrics + if _metrics is None: + _metrics = MetricsCollector() + return _metrics diff --git a/src/meshcore_sidekick/utils/__init__.py b/src/meshcore_sidekick/utils/__init__.py new file mode 100644 index 0000000..2a8c725 --- /dev/null +++ b/src/meshcore_sidekick/utils/__init__.py @@ -0,0 +1,6 @@ +"""Utility functions and helpers.""" + +from .address import normalize_public_key, extract_prefix, is_valid_public_key +from .logging import setup_logging + +__all__ = ["normalize_public_key", "extract_prefix", "is_valid_public_key", "setup_logging"] diff --git a/src/meshcore_sidekick/utils/address.py b/src/meshcore_sidekick/utils/address.py new file mode 100644 index 0000000..9dc3770 --- /dev/null +++ b/src/meshcore_sidekick/utils/address.py @@ -0,0 +1,72 @@ +"""Public address utility functions for MeshCore nodes.""" + +import re + + +def is_valid_public_key(key: str) -> bool: + """ + Check if a string is a valid hexadecimal public key. + + Args: + key: Public key string to validate + + Returns: + True if valid hex string, False otherwise + """ + if not key: + return False + return bool(re.match(r'^[0-9a-fA-F]+$', key)) + + +def normalize_public_key(key: str) -> str: + """ + Normalize public key to lowercase hex. + + Args: + key: Public key string + + Returns: + Lowercase hex string + + Raises: + ValueError: If key is not valid hex + """ + if not is_valid_public_key(key): + raise ValueError(f"Invalid public key: {key}") + return key.lower() + + +def extract_prefix(key: str, length: int = 2) -> str: + """ + Extract prefix of specified length from public key. + + Args: + key: Public key string + length: Number of characters to extract (default 2) + + Returns: + Prefix string of specified length + + Raises: + ValueError: If key is too short or invalid + """ + normalized = normalize_public_key(key) + if len(normalized) < length: + raise ValueError(f"Public key too short for prefix length {length}") + return normalized[:length] + + +def matches_prefix(full_key: str, prefix: str) -> bool: + """ + Check if a full public key matches a given prefix. + + Args: + full_key: Full public key to check + prefix: Prefix to match against + + Returns: + True if full_key starts with prefix (case-insensitive) + """ + if not is_valid_public_key(full_key) or not is_valid_public_key(prefix): + return False + return normalize_public_key(full_key).startswith(normalize_public_key(prefix)) diff --git a/src/meshcore_sidekick/utils/logging.py b/src/meshcore_sidekick/utils/logging.py new file mode 100644 index 0000000..fb592e3 --- /dev/null +++ b/src/meshcore_sidekick/utils/logging.py @@ -0,0 +1,99 @@ +"""Logging configuration and setup.""" + +import logging +import sys +import json +from datetime import datetime +from typing import Any, Dict + + +class JSONFormatter(logging.Formatter): + """Custom JSON formatter for structured logging.""" + + def format(self, record: logging.LogRecord) -> str: + """Format log record as JSON.""" + log_data: Dict[str, Any] = { + "timestamp": datetime.utcnow().isoformat() + "Z", + "level": record.levelname, + "logger": record.name, + "message": record.getMessage(), + } + + # Add exception info if present + if record.exc_info: + log_data["exception"] = self.formatException(record.exc_info) + + # Add extra fields + if hasattr(record, "extra"): + log_data.update(record.extra) + + return json.dumps(log_data) + + +class TextFormatter(logging.Formatter): + """Custom text formatter with colors (if terminal supports it).""" + + # ANSI color codes + COLORS = { + "DEBUG": "\033[36m", # Cyan + "INFO": "\033[32m", # Green + "WARNING": "\033[33m", # Yellow + "ERROR": "\033[31m", # Red + "CRITICAL": "\033[35m", # Magenta + } + RESET = "\033[0m" + + def __init__(self, use_colors: bool = True): + """ + Initialize formatter. + + Args: + use_colors: Whether to use ANSI color codes + """ + super().__init__( + fmt="%(asctime)s [%(levelname)s] %(name)s: %(message)s", + datefmt="%Y-%m-%d %H:%M:%S" + ) + self.use_colors = use_colors and sys.stderr.isatty() + + def format(self, record: logging.LogRecord) -> str: + """Format log record with optional colors.""" + if self.use_colors and record.levelname in self.COLORS: + record.levelname = ( + f"{self.COLORS[record.levelname]}{record.levelname}{self.RESET}" + ) + return super().format(record) + + +def setup_logging(level: str = "INFO", format_type: str = "json") -> None: + """ + Configure application logging. + + Args: + level: Log level (DEBUG, INFO, WARNING, ERROR, CRITICAL) + format_type: Format type ('json' or 'text') + """ + # Get numeric level + numeric_level = getattr(logging, level.upper(), logging.INFO) + + # Create handler + handler = logging.StreamHandler(sys.stderr) + handler.setLevel(numeric_level) + + # Set formatter + if format_type.lower() == "json": + formatter = JSONFormatter() + else: + formatter = TextFormatter(use_colors=True) + + handler.setFormatter(formatter) + + # Configure root logger + root_logger = logging.getLogger() + root_logger.setLevel(numeric_level) + root_logger.handlers.clear() + root_logger.addHandler(handler) + + # Reduce noise from third-party libraries + logging.getLogger("urllib3").setLevel(logging.WARNING) + logging.getLogger("asyncio").setLevel(logging.WARNING) diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..5a00150 --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1 @@ +"""Tests for MeshCore Sidekick."""