diff --git a/README.md b/README.md index 49592bff..d3ae513d 100644 --- a/README.md +++ b/README.md @@ -46,6 +46,7 @@ flowchart LR ## PostgreSQL Schema (DDL) See `backend/app/db/schema.sql`. Key tables: - users, categories, expenses, bills, reminders +- **accounts** (multi-account support: name, type, balance, currency) - ad_impressions, subscription_plans, user_subscriptions - refresh_tokens (optional if rotating), audit_logs @@ -66,6 +67,8 @@ OpenAPI: `backend/app/openapi.yaml` - Bills: CRUD `/bills`, pay/mark `/bills/{id}/pay` - Reminders: CRUD `/reminders`, trigger `/reminders/run` - Insights: `/insights/monthly`, `/insights/budget-suggestion` +- **Accounts**: CRUD `/accounts` (multi-account management) +- **Dashboard**: `/dashboard/summary`, `/dashboard/overview` (multi-account financial overview) ## MVP UI/UX Plan - Auth screens: register/login. @@ -183,6 +186,37 @@ finmind/ - Primary: schedule via APScheduler in-process with persistence in Postgres (job table) and a simple daily trigger. Alternatively, use Railway/Render cron to hit `/reminders/run`. - Twilio WhatsApp free trial supports sandbox; email via SMTP (e.g., SendGrid free tier). +## Multi-Account Dashboard + +FinMind supports tracking finances across multiple accounts (checking, savings, credit, investment, cash, etc.). + +### Account Types +- `CHECKING` - Primary bank account for daily transactions +- `SAVINGS` - Savings accounts +- `CREDIT` - Credit card accounts (can have negative balance) +- `INVESTMENT` - Investment/portfolio accounts +- `CASH` - Physical cash tracking +- `OTHER` - Custom account types + +### Dashboard Overview Endpoint +`GET /dashboard/overview` returns: +- `net_worth.total_balance` - Sum of all account balances +- `net_worth.by_currency` - Balance breakdown by currency +- `accounts` - List of all active accounts with balances +- `recent_transactions` - Last 10 transactions with account associations +- `summary` - Monthly income/expenses and account counts + +### Account Management +- `GET /accounts/` - List all accounts (optionally include inactive) +- `POST /accounts/` - Create new account +- `GET /accounts/{id}` - Get specific account +- `PUT /accounts/{id}` - Update account (name, type, balance) +- `DELETE /accounts/{id}` - Soft delete (deactivate) +- `DELETE /accounts/{id}/hard` - Permanent deletion + +### Linking Expenses to Accounts +Expenses can optionally be linked to accounts via `account_id` field for detailed tracking per account. + ## Security & Scalability - JWT access/refresh, secure cookies OR Authorization header. - RBAC-ready via roles on `users.role`. diff --git a/packages/backend/app/db/schema.sql b/packages/backend/app/db/schema.sql index 410189de..0ea59ea0 100644 --- a/packages/backend/app/db/schema.sql +++ b/packages/backend/app/db/schema.sql @@ -11,6 +11,26 @@ CREATE TABLE IF NOT EXISTS users ( ALTER TABLE users ADD COLUMN IF NOT EXISTS preferred_currency VARCHAR(10) NOT NULL DEFAULT 'INR'; +-- Multi-account support +DO $$ BEGIN + CREATE TYPE account_type AS ENUM ('CHECKING','SAVINGS','CREDIT','INVESTMENT','CASH','OTHER'); +EXCEPTION + WHEN duplicate_object THEN NULL; +END $$; + +CREATE TABLE IF NOT EXISTS accounts ( + id SERIAL PRIMARY KEY, + user_id INT NOT NULL REFERENCES users(id) ON DELETE CASCADE, + name VARCHAR(200) NOT NULL, + account_type account_type NOT NULL DEFAULT 'CHECKING', + balance NUMERIC(12,2) NOT NULL DEFAULT 0.00, + currency VARCHAR(10) NOT NULL DEFAULT 'INR', + is_active BOOLEAN NOT NULL DEFAULT TRUE, + created_at TIMESTAMP NOT NULL DEFAULT NOW(), + updated_at TIMESTAMP NOT NULL DEFAULT NOW() +); +CREATE INDEX IF NOT EXISTS idx_accounts_user ON accounts(user_id); + CREATE TABLE IF NOT EXISTS categories ( id SERIAL PRIMARY KEY, user_id INT NOT NULL REFERENCES users(id) ON DELETE CASCADE, @@ -22,6 +42,7 @@ CREATE TABLE IF NOT EXISTS expenses ( id SERIAL PRIMARY KEY, user_id INT NOT NULL REFERENCES users(id) ON DELETE CASCADE, category_id INT REFERENCES categories(id) ON DELETE SET NULL, + account_id INT REFERENCES accounts(id) ON DELETE SET NULL, amount NUMERIC(12,2) NOT NULL, currency VARCHAR(10) NOT NULL DEFAULT 'INR', expense_type VARCHAR(20) NOT NULL DEFAULT 'EXPENSE', @@ -30,10 +51,14 @@ CREATE TABLE IF NOT EXISTS expenses ( created_at TIMESTAMP NOT NULL DEFAULT NOW() ); CREATE INDEX IF NOT EXISTS idx_expenses_user_spent_at ON expenses(user_id, spent_at DESC); +CREATE INDEX IF NOT EXISTS idx_expenses_account ON expenses(account_id); ALTER TABLE expenses ADD COLUMN IF NOT EXISTS expense_type VARCHAR(20) NOT NULL DEFAULT 'EXPENSE'; +ALTER TABLE expenses + ADD COLUMN IF NOT EXISTS account_id INT REFERENCES accounts(id) ON DELETE SET NULL; + DO $$ BEGIN CREATE TYPE recurring_cadence AS ENUM ('DAILY','WEEKLY','MONTHLY','YEARLY'); EXCEPTION @@ -123,3 +148,47 @@ CREATE TABLE IF NOT EXISTS audit_logs ( action VARCHAR(100) NOT NULL, created_at TIMESTAMP NOT NULL DEFAULT NOW() ); + + +-- Rule-based auto-tagging +DO \$\$ BEGIN + CREATE TYPE rule_field AS ENUM ('payee','amount','description','notes'); +EXCEPTION WHEN duplicate_object THEN NULL; +END \$\$; + +DO \$\$ BEGIN + CREATE TYPE rule_operator AS ENUM ('contains','equals','regex','gt','lt','gte','lte','startswith','endswith'); +EXCEPTION WHEN duplicate_object THEN NULL; +END \$\$; + +DO \$\$ BEGIN + CREATE TYPE condition_type AS ENUM ('AND','OR'); +EXCEPTION WHEN duplicate_object THEN NULL; +END \$\$; + +CREATE TABLE IF NOT EXISTS categorization_rules ( + id SERIAL PRIMARY KEY, + user_id INT NOT NULL REFERENCES users(id) ON DELETE CASCADE, + name VARCHAR(100) NOT NULL, + field rule_field NOT NULL, + operator rule_operator NOT NULL, + value VARCHAR(500) NOT NULL, + category_id INT REFERENCES categories(id) ON DELETE SET NULL, + tag VARCHAR(100), + priority INT NOT NULL DEFAULT 0, + condition_type condition_type NOT NULL DEFAULT 'AND', + active BOOLEAN NOT NULL DEFAULT TRUE, + created_at TIMESTAMP NOT NULL DEFAULT NOW(), + updated_at TIMESTAMP NOT NULL DEFAULT NOW() +); +CREATE INDEX IF NOT EXISTS idx_categorization_rules_user_priority ON categorization_rules(user_id, priority DESC); + +CREATE TABLE IF NOT EXISTS rule_conditions ( + id SERIAL PRIMARY KEY, + rule_id INT NOT NULL REFERENCES categorization_rules(id) ON DELETE CASCADE, + field rule_field NOT NULL, + operator rule_operator NOT NULL, + value VARCHAR(500) NOT NULL, + created_at TIMESTAMP NOT NULL DEFAULT NOW() +); +CREATE INDEX IF NOT EXISTS idx_rule_conditions_rule ON rule_conditions(rule_id); diff --git a/packages/backend/app/models.py b/packages/backend/app/models.py index 64d44810..66958e08 100644 --- a/packages/backend/app/models.py +++ b/packages/backend/app/models.py @@ -4,6 +4,42 @@ from .extensions import db +class AccountType(str, Enum): + CHECKING = "CHECKING" + SAVINGS = "SAVINGS" + CREDIT = "CREDIT" + INVESTMENT = "INVESTMENT" + CASH = "CASH" + OTHER = "OTHER" + + +class Account(db.Model): + """Financial account for multi-account tracking.""" + __tablename__ = "accounts" + id = db.Column(db.Integer, primary_key=True) + user_id = db.Column(db.Integer, db.ForeignKey("users.id"), nullable=False) + name = db.Column(db.String(200), nullable=False) + account_type = db.Column(SAEnum(AccountType), default=AccountType.CHECKING, nullable=False) + balance = db.Column(db.Numeric(12, 2), default=0.00, nullable=False) + currency = db.Column(db.String(10), default="INR", nullable=False) + is_active = db.Column(db.Boolean, default=True, nullable=False) + created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False) + updated_at = db.Column(db.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) + + def to_dict(self): + return { + "id": self.id, + "user_id": self.user_id, + "name": self.name, + "account_type": self.account_type.value if self.account_type else None, + "balance": float(self.balance or 0), + "currency": self.currency, + "is_active": self.is_active, + "created_at": self.created_at.isoformat() if self.created_at else None, + "updated_at": self.updated_at.isoformat() if self.updated_at else None, + } + + class Role(str, Enum): USER = "USER" ADMIN = "ADMIN" @@ -32,6 +68,7 @@ class Expense(db.Model): id = db.Column(db.Integer, primary_key=True) user_id = db.Column(db.Integer, db.ForeignKey("users.id"), nullable=False) category_id = db.Column(db.Integer, db.ForeignKey("categories.id"), nullable=True) + account_id = db.Column(db.Integer, db.ForeignKey("accounts.id"), nullable=True) amount = db.Column(db.Numeric(12, 2), nullable=False) currency = db.Column(db.String(10), default="INR", nullable=False) expense_type = db.Column(db.String(20), default="EXPENSE", nullable=False) @@ -133,3 +170,71 @@ class AuditLog(db.Model): user_id = db.Column(db.Integer, db.ForeignKey("users.id"), nullable=True) action = db.Column(db.String(100), nullable=False) created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False) + + +class RuleField(str, Enum): + PAYEE = 'payee' + AMOUNT = 'amount' + DESCRIPTION = 'description' + NOTES = 'notes' + +class RuleOperator(str, Enum): + CONTAINS = 'contains' + EQUALS = 'equals' + REGEX = 'regex' + GT = 'gt' + LT = 'lt' + GTE = 'gte' + LTE = 'lte' + STARTSWITH = 'startswith' + ENDSWITH = 'endswith' + +class ConditionType(str, Enum): + AND = 'AND' + OR = 'OR' + +class CategorizationRule(db.Model): + __tablename__ = 'categorization_rules' + id = db.Column(db.Integer, primary_key=True) + user_id = db.Column(db.Integer, db.ForeignKey('users.id'), nullable=False) + name = db.Column(db.String(100), nullable=False) + field = db.Column(SAEnum(RuleField), nullable=False) + operator = db.Column(SAEnum(RuleOperator), nullable=False) + value = db.Column(db.String(500), nullable=False) + category_id = db.Column(db.Integer, db.ForeignKey('categories.id'), nullable=True) + tag = db.Column(db.String(100), nullable=True) + priority = db.Column(db.Integer, default=0, nullable=False) + condition_type = db.Column(SAEnum(ConditionType), default=ConditionType.AND, nullable=False) + active = db.Column(db.Boolean, default=True, nullable=False) + created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False) + updated_at = db.Column(db.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) + + def to_dict(self): + return { + 'id': self.id, 'name': self.name, + 'field': self.field.value if self.field else None, + 'operator': self.operator.value if self.operator else None, + 'value': self.value, 'category_id': self.category_id, + 'tag': self.tag, 'priority': self.priority, + 'condition_type': self.condition_type.value if self.condition_type else None, + 'active': self.active, + 'created_at': self.created_at.isoformat() if self.created_at else None, + 'updated_at': self.updated_at.isoformat() if self.updated_at else None, + } + +class RuleCondition(db.Model): + __tablename__ = 'rule_conditions' + id = db.Column(db.Integer, primary_key=True) + rule_id = db.Column(db.Integer, db.ForeignKey('categorization_rules.id', ondelete='CASCADE'), nullable=False) + field = db.Column(SAEnum(RuleField), nullable=False) + operator = db.Column(SAEnum(RuleOperator), nullable=False) + value = db.Column(db.String(500), nullable=False) + created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False) + + def to_dict(self): + return { + 'id': self.id, 'rule_id': self.rule_id, + 'field': self.field.value if self.field else None, + 'operator': self.operator.value if self.operator else None, + 'value': self.value, + } diff --git a/packages/backend/app/routes/__init__.py b/packages/backend/app/routes/__init__.py index f13b0f89..b227a005 100644 --- a/packages/backend/app/routes/__init__.py +++ b/packages/backend/app/routes/__init__.py @@ -7,14 +7,20 @@ from .categories import bp as categories_bp from .docs import bp as docs_bp from .dashboard import bp as dashboard_bp +from .savings_opportunities import bp as savings_opportunities_bp +from .rules import bp as rules_bp +from .analytics import bp as analytics_bp def register_routes(app: Flask): - app.register_blueprint(auth_bp, url_prefix="/auth") - app.register_blueprint(expenses_bp, url_prefix="/expenses") - app.register_blueprint(bills_bp, url_prefix="/bills") - app.register_blueprint(reminders_bp, url_prefix="/reminders") - app.register_blueprint(insights_bp, url_prefix="/insights") - app.register_blueprint(categories_bp, url_prefix="/categories") - app.register_blueprint(docs_bp, url_prefix="/docs") - app.register_blueprint(dashboard_bp, url_prefix="/dashboard") + app.register_blueprint(auth_bp, url_prefix='/auth') + app.register_blueprint(expenses_bp, url_prefix='/expenses') + app.register_blueprint(bills_bp, url_prefix='/bills') + app.register_blueprint(reminders_bp, url_prefix='/reminders') + app.register_blueprint(insights_bp, url_prefix='/insights') + app.register_blueprint(categories_bp, url_prefix='/categories') + app.register_blueprint(docs_bp, url_prefix='/docs') + app.register_blueprint(dashboard_bp, url_prefix='/dashboard') + app.register_blueprint(savings_opportunities_bp, url_prefix='/savings-opportunities') + app.register_blueprint(rules_bp, url_prefix='/rules') + app.register_blueprint(analytics_bp, url_prefix='/analytics') diff --git a/packages/backend/app/routes/accounts.py b/packages/backend/app/routes/accounts.py new file mode 100644 index 00000000..b93b7078 --- /dev/null +++ b/packages/backend/app/routes/accounts.py @@ -0,0 +1,174 @@ +"""Account CRUD endpoints for multi-account financial tracking.""" +from flask import Blueprint, jsonify, request +from flask_jwt_extended import jwt_required, get_jwt_identity +from sqlalchemy import exc + +from ..extensions import db +from ..models import Account, AccountType + +bp = Blueprint("accounts", __name__) + + +@bp.get("/") +@jwt_required() +def list_accounts(): + """List all accounts for the authenticated user.""" + uid = int(get_jwt_identity()) + include_inactive = request.args.get("include_inactive", "false").lower() == "true" + + query = Account.query.filter(Account.user_id == uid) + if not include_inactive: + query = query.filter(Account.is_active.is_(True)) + + accounts = query.order_by(Account.created_at.desc()).all() + return jsonify({"accounts": [a.to_dict() for a in accounts]}), 200 + + +@bp.get("/") +@jwt_required() +def get_account(account_id): + """Get a specific account by ID.""" + uid = int(get_jwt_identity()) + account = Account.query.filter( + Account.id == account_id, + Account.user_id == uid + ).first() + + if not account: + return jsonify(error="Account not found"), 404 + + return jsonify(account.to_dict()), 200 + + +@bp.post("/") +@jwt_required() +def create_account(): + """Create a new financial account.""" + uid = int(get_jwt_identity()) + data = request.get_json() + + if not data: + return jsonify(error="Request body required"), 400 + + name = data.get("name", "").strip() + if not name: + return jsonify(error="Account name is required"), 400 + + account_type_str = data.get("account_type", "CHECKING").upper() + try: + account_type = AccountType(account_type_str) + except ValueError: + return jsonify(error=f"Invalid account type. Valid types: {[t.value for t in AccountType]}"), 400 + + balance = data.get("balance", 0) + try: + balance = float(balance) + except (TypeError, ValueError): + return jsonify(error="Balance must be a valid number"), 400 + + currency = data.get("currency", "INR").strip().upper()[:10] + + account = Account( + user_id=uid, + name=name, + account_type=account_type, + balance=balance, + currency=currency, + is_active=data.get("is_active", True), + ) + + try: + db.session.add(account) + db.session.commit() + return jsonify(account.to_dict()), 201 + except exc.IntegrityError: + db.session.rollback() + return jsonify(error="Failed to create account"), 400 + + +@bp.put("/") +@jwt_required() +def update_account(account_id): + """Update an existing account.""" + uid = int(get_jwt_identity()) + account = Account.query.filter( + Account.id == account_id, + Account.user_id == uid + ).first() + + if not account: + return jsonify(error="Account not found"), 404 + + data = request.get_json() + if not data: + return jsonify(error="Request body required"), 400 + + if "name" in data: + name = data["name"].strip() + if not name: + return jsonify(error="Account name cannot be empty"), 400 + account.name = name + + if "account_type" in data: + try: + account.account_type = AccountType(data["account_type"].upper()) + except ValueError: + return jsonify(error=f"Invalid account type. Valid types: {[t.value for t in AccountType]}"), 400 + + if "balance" in data: + try: + account.balance = float(data["balance"]) + except (TypeError, ValueError): + return jsonify(error="Balance must be a valid number"), 400 + + if "currency" in data: + account.currency = data["currency"].strip().upper()[:10] + + if "is_active" in data: + account.is_active = bool(data["is_active"]) + + try: + db.session.commit() + return jsonify(account.to_dict()), 200 + except exc.IntegrityError: + db.session.rollback() + return jsonify(error="Failed to update account"), 400 + + +@bp.delete("/") +@jwt_required() +def delete_account(account_id): + """Delete an account (soft delete by setting is_active=False).""" + uid = int(get_jwt_identity()) + account = Account.query.filter( + Account.id == account_id, + Account.user_id == uid + ).first() + + if not account: + return jsonify(error="Account not found"), 404 + + # Soft delete + account.is_active = False + db.session.commit() + + return jsonify(message="Account deactivated successfully"), 200 + + +@bp.delete("//hard") +@jwt_required() +def hard_delete_account(account_id): + """Permanently delete an account.""" + uid = int(get_jwt_identity()) + account = Account.query.filter( + Account.id == account_id, + Account.user_id == uid + ).first() + + if not account: + return jsonify(error="Account not found"), 404 + + db.session.delete(account) + db.session.commit() + + return jsonify(message="Account deleted permanently"), 200 \ No newline at end of file diff --git a/packages/backend/app/routes/analytics.py b/packages/backend/app/routes/analytics.py new file mode 100644 index 00000000..01dc88a3 --- /dev/null +++ b/packages/backend/app/routes/analytics.py @@ -0,0 +1,265 @@ +from datetime import date, timedelta +from decimal import Decimal +from flask import Blueprint, jsonify, request +from flask_jwt_extended import jwt_required, get_jwt_identity +from sqlalchemy import func, extract +from ..extensions import db +from ..models import Expense, Category +from ..services.cache import cache_get, cache_set +import logging + +bp = Blueprint("analytics", __name__) +logger = logging.getLogger("finmind.analytics") + + +def _heatmap_cache_key(user_id: int, year: int, aggregation: str, category_id: int | None) -> str: + cat_part = str(category_id) if category_id else "all" + return f"user:{user_id}:heatmap:{year}:{aggregation}:{cat_part}" + + +@bp.get("/heatmap") +@jwt_required() +def spending_heatmap(): + """Generate day-by-day spending data suitable for heatmap visualization.""" + uid = int(get_jwt_identity()) + + try: + year = int(request.args.get("year", date.today().year)) + except ValueError: + return jsonify(error="invalid year"), 400 + + category_id = request.args.get("category") + if category_id: + try: + category_id = int(category_id) + except ValueError: + return jsonify(error="invalid category id"), 400 + else: + category_id = None + + aggregation = request.args.get("aggregation", "daily").lower() + if aggregation not in ("daily", "weekly", "monthly"): + return jsonify(error="aggregation must be daily, weekly, or monthly"), 400 + + cache_key = _heatmap_cache_key(uid, year, aggregation, category_id) + cached = cache_get(cache_key) + if cached: + logger.info("Heatmap cache hit user=%s year=%s", uid, year) + return jsonify(cached) + + year_start = date(year, 1, 1) + year_end = date(year, 12, 31) + prev_year_start = date(year - 1, 1, 1) + prev_year_end = date(year - 1, 12, 31) + + try: + query = ( + db.session.query( + Expense.spent_at, + Expense.amount, + Expense.category_id, + ) + .filter( + Expense.user_id == uid, + Expense.spent_at >= year_start, + Expense.spent_at <= year_end, + Expense.expense_type != "INCOME", + ) + ) + + if category_id: + query = query.filter(Expense.category_id == category_id) + + expenses = query.all() + + prev_query = ( + db.session.query( + Expense.spent_at, + Expense.amount, + ) + .filter( + Expense.user_id == uid, + Expense.spent_at >= prev_year_start, + Expense.spent_at <= prev_year_end, + Expense.expense_type != "INCOME", + ) + ) + + if category_id: + prev_query = prev_query.filter(Expense.category_id == category_id) + + prev_expenses = prev_query.all() + + if aggregation == "daily": + heatmap_data = _aggregate_daily(expenses, year) + elif aggregation == "weekly": + heatmap_data = _aggregate_weekly(expenses, year) + else: + heatmap_data = _aggregate_monthly(expenses, year) + + current_total = sum(float(e.amount) for e in expenses) + prev_total = sum(float(e.amount) for e in prev_expenses) + + if prev_total > 0: + change_pct = round(((current_total - prev_total) / prev_total) * 100, 2) + else: + change_pct = 0.0 if current_total == 0 else 100.0 + + comparison = { + "current_period_total": round(current_total, 2), + "previous_period_total": round(prev_total, 2), + "change_amount": round(current_total - prev_total, 2), + "change_percent": change_pct, + "period_type": "year_over_year", + } + + response = { + "data": heatmap_data, + "comparison": comparison, + "metadata": { + "year": year, + "category_id": category_id, + "aggregation": aggregation, + "total_transactions": len(expenses), + "date_range": { + "start": year_start.isoformat(), + "end": year_end.isoformat(), + }, + }, + } + + cache_set(cache_key, response, ttl_seconds=300) + logger.info("Heatmap generated user=%s year=%s aggregation=%s", uid, year, aggregation) + + return jsonify(response) + + except Exception as e: + logger.exception("Heatmap generation failed user=%s", uid) + return jsonify(error="failed to generate heatmap", details=str(e)), 500 + + +def _aggregate_daily(expenses, year): + from collections import defaultdict + daily_data = defaultdict(lambda: {"amount": Decimal(0), "count": 0, "categories": defaultdict(Decimal)}) + + for exp in expenses: + day_key = exp.spent_at.isoformat() + daily_data[day_key]["amount"] += exp.amount + daily_data[day_key]["count"] += 1 + if exp.category_id: + daily_data[day_key]["categories"][exp.category_id] += exp.amount + + category_ids = set() + for day_data in daily_data.values(): + category_ids.update(day_data["categories"].keys()) + + category_names = {} + if category_ids: + cat_rows = db.session.query(Category.id, Category.name).filter(Category.id.in_(category_ids)).all() + category_names = {c.id: c.name for c in cat_rows} + + result = [] + for day_key in sorted(daily_data.keys()): + data = daily_data[day_key] + top_cat_id = None + top_cat_amount = Decimal(0) + for cat_id, amt in data["categories"].items(): + if amt > top_cat_amount: + top_cat_amount = amt + top_cat_id = cat_id + + result.append({ + "date": day_key, + "amount": round(float(data["amount"]), 2), + "transaction_count": data["count"], + "top_category": category_names.get(top_cat_id) if top_cat_id else None, + }) + + return result + + +def _aggregate_weekly(expenses, year): + from collections import defaultdict + weekly_data = defaultdict(lambda: {"amount": Decimal(0), "count": 0, "categories": defaultdict(Decimal)}) + + for exp in expenses: + iso_cal = exp.spent_at.isocalendar() + week_key = f"{iso_cal[0]}-W{iso_cal[1]:02d}" + weekly_data[week_key]["amount"] += exp.amount + weekly_data[week_key]["count"] += 1 + if exp.category_id: + weekly_data[week_key]["categories"][exp.category_id] += exp.amount + + category_ids = set() + for week_data in weekly_data.values(): + category_ids.update(week_data["categories"].keys()) + + category_names = {} + if category_ids: + cat_rows = db.session.query(Category.id, Category.name).filter(Category.id.in_(category_ids)).all() + category_names = {c.id: c.name for c in cat_rows} + + result = [] + for week_key in sorted(weekly_data.keys()): + data = weekly_data[week_key] + top_cat_id = None + top_cat_amount = Decimal(0) + for cat_id, amt in data["categories"].items(): + if amt > top_cat_amount: + top_cat_amount = amt + top_cat_id = cat_id + + year_part, week_part = map(int, week_key.split("-W")) + week_num = int(week_part) + jan4 = date(year_part, 1, 4) + week_start = jan4 + timedelta(days=(week_num - 1) * 7 - jan4.weekday()) + + result.append({ + "date": week_key, + "week_start": week_start.isoformat(), + "amount": round(float(data["amount"]), 2), + "transaction_count": data["count"], + "top_category": category_names.get(top_cat_id) if top_cat_id else None, + }) + + return result + + +def _aggregate_monthly(expenses, year): + from collections import defaultdict + monthly_data = defaultdict(lambda: {"amount": Decimal(0), "count": 0, "categories": defaultdict(Decimal)}) + + for exp in expenses: + month_key = exp.spent_at.strftime("%Y-%m") + monthly_data[month_key]["amount"] += exp.amount + monthly_data[month_key]["count"] += 1 + if exp.category_id: + monthly_data[month_key]["categories"][exp.category_id] += exp.amount + + category_ids = set() + for month_data in monthly_data.values(): + category_ids.update(month_data["categories"].keys()) + + category_names = {} + if category_ids: + cat_rows = db.session.query(Category.id, Category.name).filter(Category.id.in_(category_ids)).all() + category_names = {c.id: c.name for c in cat_rows} + + result = [] + for month_key in sorted(monthly_data.keys()): + data = monthly_data[month_key] + top_cat_id = None + top_cat_amount = Decimal(0) + for cat_id, amt in data["categories"].items(): + if amt > top_cat_amount: + top_cat_amount = amt + top_cat_id = cat_id + + result.append({ + "date": month_key, + "amount": round(float(data["amount"]), 2), + "transaction_count": data["count"], + "top_category": category_names.get(top_cat_id) if top_cat_id else None, + }) + + return result diff --git a/packages/backend/app/routes/dashboard.py b/packages/backend/app/routes/dashboard.py index c3106110..9cae1f3c 100644 --- a/packages/backend/app/routes/dashboard.py +++ b/packages/backend/app/routes/dashboard.py @@ -4,7 +4,7 @@ from flask_jwt_extended import jwt_required, get_jwt_identity from ..extensions import db -from ..models import Bill, Expense, Category +from ..models import Bill, Expense, Category, Account from ..services.cache import cache_get, cache_set, dashboard_summary_key bp = Blueprint("dashboard", __name__) @@ -176,3 +176,115 @@ def _is_valid_month(ym: str) -> bool: return False m = int(month) return 1 <= m <= 12 + + +@bp.get("/overview") +@jwt_required() +def multi_account_overview(): + """Get financial overview across all accounts.""" + uid = int(get_jwt_identity()) + + payload = { + "net_worth": { + "total_balance": 0.0, + "by_currency": {}, + }, + "accounts": [], + "recent_transactions": [], + "summary": { + "total_accounts": 0, + "active_accounts": 0, + "total_income_this_month": 0.0, + "total_expenses_this_month": 0.0, + }, + "errors": [], + } + + today = date.today() + year, month = today.year, today.month + + # Get all accounts with their balances + try: + accounts = Account.query.filter( + Account.user_id == uid, + Account.is_active.is_(True) + ).order_by(Account.name).all() + + total_balance = 0.0 + by_currency = {} + + for acc in accounts: + balance = float(acc.balance or 0) + total_balance += balance + curr = acc.currency or "INR" + by_currency[curr] = by_currency.get(curr, 0.0) + balance + + payload["accounts"].append({ + "id": acc.id, + "name": acc.name, + "account_type": acc.account_type.value if acc.account_type else "CHECKING", + "balance": balance, + "currency": acc.currency, + "is_active": acc.is_active, + }) + + payload["net_worth"]["total_balance"] = round(total_balance, 2) + payload["net_worth"]["by_currency"] = {k: round(v, 2) for k, v in by_currency.items()} + payload["summary"]["total_accounts"] = len(accounts) + payload["summary"]["active_accounts"] = sum(1 for a in accounts if a.is_active) + except Exception: + payload["errors"].append("accounts_unavailable") + + # Get recent transactions with account info + try: + rows = ( + db.session.query(Expense) + .filter(Expense.user_id == uid) + .order_by(Expense.spent_at.desc(), Expense.id.desc()) + .limit(10) + .all() + ) + payload["recent_transactions"] = [ + { + "id": e.id, + "description": e.notes or "Transaction", + "amount": float(e.amount), + "date": e.spent_at.isoformat(), + "type": e.expense_type, + "category_id": e.category_id, + "account_id": e.account_id, + "currency": e.currency, + } + for e in rows + ] + except Exception: + payload["errors"].append("recent_transactions_unavailable") + + # Get monthly income and expenses + try: + income = ( + db.session.query(func.coalesce(func.sum(Expense.amount), 0)) + .filter( + Expense.user_id == uid, + extract("year", Expense.spent_at) == year, + extract("month", Expense.spent_at) == month, + Expense.expense_type == "INCOME", + ) + .scalar() + ) + expenses = ( + db.session.query(func.coalesce(func.sum(Expense.amount), 0)) + .filter( + Expense.user_id == uid, + extract("year", Expense.spent_at) == year, + extract("month", Expense.spent_at) == month, + Expense.expense_type != "INCOME", + ) + .scalar() + ) + payload["summary"]["total_income_this_month"] = float(income or 0) + payload["summary"]["total_expenses_this_month"] = float(expenses or 0) + except Exception: + payload["errors"].append("monthly_summary_unavailable") + + return jsonify(payload), 200 diff --git a/packages/backend/app/routes/expenses.py b/packages/backend/app/routes/expenses.py index 1376d46f..def59625 100644 --- a/packages/backend/app/routes/expenses.py +++ b/packages/backend/app/routes/expenses.py @@ -75,6 +75,13 @@ def create_expense(): spent_at=date.fromisoformat(raw_date) if raw_date else date.today(), ) db.session.add(e) + # Auto-apply categorization rules if no category specified + if not e.category_id: + try: + from .rules import apply_rules + apply_rules(e, uid) + except Exception as ex: + logger.warning('Failed to auto-apply rules: %s', ex) db.session.commit() logger.info("Created expense id=%s user=%s amount=%s", e.id, uid, e.amount) # Invalidate caches diff --git a/packages/backend/app/routes/rules.py b/packages/backend/app/routes/rules.py new file mode 100644 index 00000000..f2fa83ba --- /dev/null +++ b/packages/backend/app/routes/rules.py @@ -0,0 +1,177 @@ +import logging +import re +from decimal import Decimal, InvalidOperation +from flask import Blueprint, jsonify, request +from flask_jwt_extended import jwt_required, get_jwt_identity +from ..extensions import db +from ..models import CategorizationRule, RuleField, RuleOperator, ConditionType, RuleCondition, Category + +bp = Blueprint('rules', __name__) +logger = logging.getLogger('finmind.rules') + +@bp.get('') +@jwt_required() +def list_rules(): + uid = int(get_jwt_identity()) + rules = db.session.query(CategorizationRule).filter_by(user_id=uid).order_by(CategorizationRule.priority.desc()).all() + return jsonify([r.to_dict() for r in rules]) + +@bp.get('/') +@jwt_required() +def get_rule(rule_id): + uid = int(get_jwt_identity()) + rule = db.session.get(CategorizationRule, rule_id) + if not rule or rule.user_id != uid: + return jsonify(error='not found'), 404 + return jsonify(rule.to_dict()) + +@bp.post('') +@jwt_required() +def create_rule(): + uid = int(get_jwt_identity()) + data = request.get_json() or {} + name = (data.get('name') or '').strip() + if not name: return jsonify(error='name required'), 400 + field = _parse_field(data.get('field')) + if not field: return jsonify(error='valid field required'), 400 + operator = _parse_operator(data.get('operator')) + if not operator: return jsonify(error='valid operator required'), 400 + value = (data.get('value') or '').strip() + if not value: return jsonify(error='value required'), 400 + if operator == RuleOperator.REGEX: + try: re.compile(value) + except re.error as e: return jsonify(error='invalid regex: ' + str(e)), 400 + category_id = data.get('category_id') + if category_id: + cat = db.session.get(Category, category_id) + if not cat or cat.user_id != uid: return jsonify(error='category not found'), 404 + try: priority = int(data.get('priority', 0)) + except: return jsonify(error='invalid priority'), 400 + condition_type = _parse_condition_type(data.get('condition_type')) + rule = CategorizationRule(user_id=uid, name=name, field=field, operator=operator, value=value, category_id=category_id, tag=data.get('tag'), priority=priority, condition_type=condition_type, active=bool(data.get('active', True))) + db.session.add(rule) + db.session.commit() + logger.info('Created rule id=%s user=%s', rule.id, uid) + conditions = data.get('conditions', []) + if conditions: + for cd in conditions: + cf, co, cv = _parse_field(cd.get('field')), _parse_operator(cd.get('operator')), (cd.get('value') or '').strip() + if cf and co and cv: + if co == RuleOperator.REGEX: + try: + re.compile(cv) + except: + continue + db.session.add(RuleCondition(rule_id=rule.id, field=cf, operator=co, value=cv)) + db.session.commit() + return jsonify(rule.to_dict()), 201 + + +@bp.patch('/') +@jwt_required() +def update_rule(rule_id): + uid = int(get_jwt_identity()) + rule = db.session.get(CategorizationRule, rule_id) + if not rule or rule.user_id != uid: return jsonify(error='not found'), 404 + data = request.get_json() or {} + if 'name' in data: rule.name = (data.get('name') or '').strip() or rule.name + if 'field' in data: rule.field = _parse_field(data.get('field')) or rule.field + if 'operator' in data: rule.operator = _parse_operator(data.get('operator')) or rule.operator + if 'value' in data: rule.value = (data.get('value') or '').strip() or rule.value + if 'category_id' in data: rule.category_id = data.get('category_id') + if 'tag' in data: rule.tag = data.get('tag') + if 'priority' in data: + try: + rule.priority = int(data.get('priority', 0)) + except: + pass + if 'active' in data: rule.active = bool(data.get('active')) + db.session.commit() + return jsonify(rule.to_dict()) + +@bp.delete('/') +@jwt_required() +def delete_rule(rule_id): + uid = int(get_jwt_identity()) + rule = db.session.get(CategorizationRule, rule_id) + if not rule or rule.user_id != uid: return jsonify(error='not found'), 404 + db.session.delete(rule) + db.session.commit() + return jsonify(message='deleted') + +@bp.post('//conditions') +@jwt_required() +def add_condition(rule_id): + uid = int(get_jwt_identity()) + rule = db.session.get(CategorizationRule, rule_id) + if not rule or rule.user_id != uid: return jsonify(error='not found'), 404 + data = request.get_json() or {} + field = _parse_field(data.get('field')) + operator = _parse_operator(data.get('operator')) + value = (data.get('value') or '').strip() + if not field or not operator or not value: return jsonify(error='field, operator, value required'), 400 + cond = RuleCondition(rule_id=rule_id, field=field, operator=operator, value=value) + db.session.add(cond) + db.session.commit() + return jsonify(cond.to_dict()), 201 + +@bp.post('/apply/') +@jwt_required() +def apply_rules_to_expense(expense_id): + from ..models import Expense + uid = int(get_jwt_identity()) + exp = db.session.get(Expense, expense_id) + if not exp or exp.user_id != uid: return jsonify(error='expense not found'), 404 + result = apply_rules(exp, uid) + return jsonify(result) + + +def _parse_field(raw): return RuleField(str(raw).lower().strip()) if raw else None +def _parse_operator(raw): return RuleOperator(str(raw).lower().strip()) if raw else None +def _parse_condition_type(raw): return ConditionType(str(raw).upper().strip()) if raw else ConditionType.AND + +def apply_rules(expense, user_id): + rules = db.session.query(CategorizationRule).filter_by(user_id=user_id, active=True).order_by(CategorizationRule.priority.desc()).all() + applied = [] + cat_id = None + tag = None + for rule in rules: + if _evaluate_rule(expense, rule): + applied.append(rule.to_dict()) + if rule.category_id and not cat_id: cat_id = rule.category_id; expense.category_id = cat_id + if rule.tag and not tag: tag = rule.tag + if not db.session.query(RuleCondition).filter_by(rule_id=rule.id).first(): break + if applied: db.session.commit() + return {'expense_id': expense.id, 'category_id': cat_id, 'tag': tag, 'applied_rules': applied} + +def _evaluate_rule(expense, rule): + conds = db.session.query(RuleCondition).filter_by(rule_id=rule.id).all() + if not conds: return _evaluate_condition(expense, rule.field, rule.operator, rule.value) + results = [_evaluate_condition(expense, rule.field, rule.operator, rule.value)] + for c in conds: results.append(_evaluate_condition(expense, c.field, c.operator, c.value)) + return all(results) if rule.condition_type == ConditionType.AND else any(results) + +def _evaluate_condition(expense, field, operator, value): + fv = str(expense.amount) if field == RuleField.AMOUNT else expense.notes or '' + if operator == RuleOperator.CONTAINS: return value.lower() in fv.lower() + elif operator == RuleOperator.EQUALS: return value.lower() == fv.lower() + elif operator == RuleOperator.STARTSWITH: return fv.lower().startswith(value.lower()) + elif operator == RuleOperator.ENDSWITH: return fv.lower().endswith(value.lower()) + elif operator == RuleOperator.REGEX: + + try: + + return bool(re.search(value, fv, re.I)) + + except: + + return False + elif operator in (RuleOperator.GT, RuleOperator.LT, RuleOperator.GTE, RuleOperator.LTE): + try: + fn, vn = Decimal(fv), Decimal(value) + if operator == RuleOperator.GT: return fn > vn + elif operator == RuleOperator.LT: return fn < vn + elif operator == RuleOperator.GTE: return fn >= vn + elif operator == RuleOperator.LTE: return fn <= vn + except: return False + return False diff --git a/packages/backend/tests/test_accounts.py b/packages/backend/tests/test_accounts.py new file mode 100644 index 00000000..77dc533a --- /dev/null +++ b/packages/backend/tests/test_accounts.py @@ -0,0 +1,316 @@ +"""Tests for multi-account CRUD and dashboard overview.""" +import pytest +from flask import Flask +from flask_jwt_extended import create_access_token + +from app.extensions import db +from app.models import User, Account, AccountType, Expense +from app.routes import register_routes + + +@pytest.fixture +def app(): + """Create test app with in-memory database.""" + app = Flask(__name__) + app.config.update( + SQLALCHEMY_DATABASE_URI="sqlite:///:memory:", + SQLALCHEMY_TRACK_MODIFICATIONS=False, + JWT_SECRET_KEY="test-secret-key", + TESTING=True, + ) + db.init_app(app) + jwt.init_app(app) if "jwt" not in app.extensions else None + + from flask_jwt_extended import JWTManager + jwt = JWTManager(app) + + register_routes(app) + + with app.app_context(): + db.create_all() + yield app + + +@pytest.fixture +def client(app): + """Create test client.""" + return app.test_client() + + +@pytest.fixture +def test_user(app): + """Create test user.""" + with app.app_context(): + user = User( + email="test@example.com", + password_hash="hashed_password", + preferred_currency="INR", + ) + db.session.add(user) + db.session.commit() + return user.id + + +@pytest.fixture +def auth_header(app, test_user): + """Create JWT auth header.""" + with app.app_context(): + token = create_access_token(identity=str(test_user)) + return {"Authorization": f"Bearer {token}"} + + +class TestAccountCRUD: + """Tests for account CRUD operations.""" + + def test_list_accounts_empty(self, client, auth_header): + """Test listing accounts when none exist.""" + response = client.get("/accounts/", headers=auth_header) + assert response.status_code == 200 + data = response.get_json() + assert data["accounts"] == [] + + def test_create_account(self, client, auth_header): + """Test creating a new account.""" + response = client.post( + "/accounts/", + headers=auth_header, + json={ + "name": "Main Checking", + "account_type": "CHECKING", + "balance": 1000.50, + "currency": "INR", + }, + ) + assert response.status_code == 201 + data = response.get_json() + assert data["name"] == "Main Checking" + assert data["account_type"] == "CHECKING" + assert data["balance"] == 1000.50 + assert data["currency"] == "INR" + assert data["is_active"] is True + + def test_create_account_invalid_type(self, client, auth_header): + """Test creating account with invalid type.""" + response = client.post( + "/accounts/", + headers=auth_header, + json={ + "name": "Test Account", + "account_type": "INVALID_TYPE", + "balance": 100, + }, + ) + assert response.status_code == 400 + data = response.get_json() + assert "Invalid account type" in data["error"] + + def test_create_account_missing_name(self, client, auth_header): + """Test creating account without name.""" + response = client.post( + "/accounts/", + headers=auth_header, + json={"balance": 100}, + ) + assert response.status_code == 400 + data = response.get_json() + assert "name is required" in data["error"] + + def test_get_account(self, client, auth_header): + """Test getting a specific account.""" + # Create account first + create_response = client.post( + "/accounts/", + headers=auth_header, + json={"name": "Savings Account", "account_type": "SAVINGS", "balance": 5000}, + ) + account_id = create_response.get_json()["id"] + + # Get the account + response = client.get(f"/accounts/{account_id}", headers=auth_header) + assert response.status_code == 200 + data = response.get_json() + assert data["name"] == "Savings Account" + assert data["account_type"] == "SAVINGS" + + def test_get_account_not_found(self, client, auth_header): + """Test getting non-existent account.""" + response = client.get("/accounts/999", headers=auth_header) + assert response.status_code == 404 + + def test_update_account(self, client, auth_header): + """Test updating an account.""" + # Create account + create_response = client.post( + "/accounts/", + headers=auth_header, + json={"name": "Old Name", "balance": 100}, + ) + account_id = create_response.get_json()["id"] + + # Update account + response = client.put( + f"/accounts/{account_id}", + headers=auth_header, + json={"name": "New Name", "balance": 500}, + ) + assert response.status_code == 200 + data = response.get_json() + assert data["name"] == "New Name" + assert data["balance"] == 500 + + def test_delete_account_soft(self, client, auth_header): + """Test soft deleting an account.""" + # Create account + create_response = client.post( + "/accounts/", + headers=auth_header, + json={"name": "To Delete", "balance": 100}, + ) + account_id = create_response.get_json()["id"] + + # Soft delete + response = client.delete(f"/accounts/{account_id}", headers=auth_header) + assert response.status_code == 200 + + # Verify it's inactive + get_response = client.get(f"/accounts/{account_id}", headers=auth_header) + assert get_response.get_json()["is_active"] is False + + def test_list_accounts_exclude_inactive(self, client, auth_header): + """Test that inactive accounts are excluded by default.""" + # Create active account + client.post( + "/accounts/", + headers=auth_header, + json={"name": "Active Account", "balance": 100}, + ) + + # Create and deactivate another account + create_response = client.post( + "/accounts/", + headers=auth_header, + json={"name": "Inactive Account", "balance": 200}, + ) + account_id = create_response.get_json()["id"] + client.delete(f"/accounts/{account_id}", headers=auth_header) + + # List accounts (should only show active) + response = client.get("/accounts/", headers=auth_header) + data = response.get_json() + assert len(data["accounts"]) == 1 + assert data["accounts"][0]["name"] == "Active Account" + + def test_list_accounts_include_inactive(self, client, auth_header): + """Test including inactive accounts in list.""" + # Create and deactivate an account + create_response = client.post( + "/accounts/", + headers=auth_header, + json={"name": "Inactive Account", "balance": 200}, + ) + account_id = create_response.get_json()["id"] + client.delete(f"/accounts/{account_id}", headers=auth_header) + + # List with include_inactive + response = client.get("/accounts/?include_inactive=true", headers=auth_header) + data = response.get_json() + assert len(data["accounts"]) >= 1 + + +class TestDashboardOverview: + """Tests for multi-account dashboard overview.""" + + def test_overview_no_accounts(self, client, auth_header): + """Test overview when no accounts exist.""" + response = client.get("/dashboard/overview", headers=auth_header) + assert response.status_code == 200 + data = response.get_json() + assert data["net_worth"]["total_balance"] == 0.0 + assert data["accounts"] == [] + assert data["summary"]["total_accounts"] == 0 + + def test_overview_single_account(self, client, auth_header): + """Test overview with single account.""" + # Create account + client.post( + "/accounts/", + headers=auth_header, + json={"name": "Checking", "account_type": "CHECKING", "balance": 1000}, + ) + + response = client.get("/dashboard/overview", headers=auth_header) + assert response.status_code == 200 + data = response.get_json() + assert data["net_worth"]["total_balance"] == 1000.0 + assert len(data["accounts"]) == 1 + assert data["summary"]["total_accounts"] == 1 + + def test_overview_multiple_accounts(self, client, auth_header): + """Test overview with multiple accounts.""" + # Create multiple accounts + client.post( + "/accounts/", + headers=auth_header, + json={"name": "Checking", "account_type": "CHECKING", "balance": 2000}, + ) + client.post( + "/accounts/", + headers=auth_header, + json={"name": "Savings", "account_type": "SAVINGS", "balance": 5000}, + ) + client.post( + "/accounts/", + headers=auth_header, + json={"name": "Credit Card", "account_type": "CREDIT", "balance": -500}, + ) + + response = client.get("/dashboard/overview", headers=auth_header) + assert response.status_code == 200 + data = response.get_json() + # Net worth = 2000 + 5000 - 500 = 6500 + assert data["net_worth"]["total_balance"] == 6500.0 + assert len(data["accounts"]) == 3 + assert data["summary"]["total_accounts"] == 3 + + def test_overview_multi_currency(self, client, auth_header): + """Test overview with multiple currencies.""" + client.post( + "/accounts/", + headers=auth_header, + json={"name": "INR Account", "balance": 1000, "currency": "INR"}, + ) + client.post( + "/accounts/", + headers=auth_header, + json={"name": "USD Account", "balance": 500, "currency": "USD"}, + ) + + response = client.get("/dashboard/overview", headers=auth_header) + data = response.get_json() + assert "INR" in data["net_worth"]["by_currency"] + assert "USD" in data["net_worth"]["by_currency"] + assert data["net_worth"]["by_currency"]["INR"] == 1000.0 + assert data["net_worth"]["by_currency"]["USD"] == 500.0 + + def test_overview_unauthorized(self, client): + """Test overview without authentication.""" + response = client.get("/dashboard/overview") + assert response.status_code == 401 + + +class TestAccountTypes: + """Test all account types are valid.""" + + def test_all_account_types(self, client, auth_header): + """Test creating accounts with all valid types.""" + valid_types = ["CHECKING", "SAVINGS", "CREDIT", "INVESTMENT", "CASH", "OTHER"] + + for acc_type in valid_types: + response = client.post( + "/accounts/", + headers=auth_header, + json={"name": f"{acc_type} Account", "account_type": acc_type, "balance": 100}, + ) + assert response.status_code == 201 + data = response.get_json() + assert data["account_type"] == acc_type \ No newline at end of file diff --git a/packages/backend/tests/test_analytics.py b/packages/backend/tests/test_analytics.py new file mode 100644 index 00000000..c534874d --- /dev/null +++ b/packages/backend/tests/test_analytics.py @@ -0,0 +1,260 @@ +import pytest + + +def test_heatmap_daily_aggregation(client, auth_header): + # Create some expenses + for i in range(5): + client.post( + "/expenses", + json={ + "amount": 10.0 * (i + 1), + "description": f"Test expense {i}", + "date": f"2026-01-{10 + i:02d}", + }, + headers=auth_header, + ) + + r = client.get("/analytics/heatmap?year=2026&aggregation=daily", headers=auth_header) + assert r.status_code == 200 + data = r.get_json() + + assert "data" in data + assert "comparison" in data + assert "metadata" in data + + assert data["metadata"]["year"] == 2026 + assert data["metadata"]["aggregation"] == "daily" + + # Check that we have daily entries + assert len(data["data"]) >= 5 + + # Check structure of first entry + entry = data["data"][0] + assert "date" in entry + assert "amount" in entry + assert "transaction_count" in entry + assert "top_category" in entry + + +def test_heatmap_weekly_aggregation(client, auth_header): + # Create expenses in different weeks + dates = ["2026-01-05", "2026-01-06", "2026-01-12", "2026-01-15"] + for i, d in enumerate(dates): + client.post( + "/expenses", + json={ + "amount": 20.0 + i, + "description": f"Weekly test {i}", + "date": d, + }, + headers=auth_header, + ) + + r = client.get("/analytics/heatmap?year=2026&aggregation=weekly", headers=auth_header) + assert r.status_code == 200 + data = r.get_json() + + assert data["metadata"]["aggregation"] == "weekly" + + # Check weekly structure + if data["data"]: + entry = data["data"][0] + assert "week_start" in entry + assert entry["date"].startswith("2026-W") + + +def test_heatmap_monthly_aggregation(client, auth_header): + # Create expenses in different months + dates = ["2026-01-15", "2026-01-20", "2026-02-10", "2026-03-05"] + for i, d in enumerate(dates): + client.post( + "/expenses", + json={ + "amount": 30.0 + i, + "description": f"Monthly test {i}", + "date": d, + }, + headers=auth_header, + ) + + r = client.get("/analytics/heatmap?year=2026&aggregation=monthly", headers=auth_header) + assert r.status_code == 200 + data = r.get_json() + + assert data["metadata"]["aggregation"] == "monthly" + + # Should have entries for months with expenses + months = [entry["date"] for entry in data["data"]] + assert "2026-01" in months + + +def test_heatmap_with_category_filter(client, auth_header): + # Create a category + r = client.post("/categories", json={"name": "Food"}, headers=auth_header) + assert r.status_code in (201, 409) + r = client.get("/categories", headers=auth_header) + cat_id = r.get_json()[0]["id"] + + # Create expenses with and without category + client.post( + "/expenses", + json={ + "amount": 50.0, + "description": "Categorized expense", + "date": "2026-02-10", + "category_id": cat_id, + }, + headers=auth_header, + ) + client.post( + "/expenses", + json={ + "amount": 100.0, + "description": "Uncategorized expense", + "date": "2026-02-11", + }, + headers=auth_header, + ) + + r = client.get(f"/analytics/heatmap?year=2026&category={cat_id}", headers=auth_header) + assert r.status_code == 200 + data = r.get_json() + + # Only categorized expense should be included + assert data["metadata"]["category_id"] == cat_id + total = sum(entry["amount"] for entry in data["data"]) + assert total == 50.0 + + +def test_heatmap_comparison(client, auth_header): + # Create expenses in current year + for i in range(3): + client.post( + "/expenses", + json={ + "amount": 50.0, + "description": f"Current year {i}", + "date": f"2026-03-{10 + i:02d}", + }, + headers=auth_header, + ) + + # Create expenses in previous year + for i in range(2): + client.post( + "/expenses", + json={ + "amount": 40.0, + "description": f"Previous year {i}", + "date": f"2025-03-{10 + i:02d}", + }, + headers=auth_header, + ) + + r = client.get("/analytics/heatmap?year=2026", headers=auth_header) + assert r.status_code == 200 + data = r.get_json() + + comparison = data["comparison"] + assert "current_period_total" in comparison + assert "previous_period_total" in comparison + assert "change_amount" in comparison + assert "change_percent" in comparison + assert comparison["period_type"] == "year_over_year" + + assert comparison["current_period_total"] == 150.0 + assert comparison["previous_period_total"] == 80.0 + + +def test_heatmap_invalid_year(client, auth_header): + r = client.get("/analytics/heatmap?year=invalid", headers=auth_header) + assert r.status_code == 400 + assert "error" in r.get_json() + + +def test_heatmap_invalid_aggregation(client, auth_header): + r = client.get("/analytics/heatmap?year=2026&aggregation=invalid", headers=auth_header) + assert r.status_code == 400 + assert "error" in r.get_json() + + +def test_heatmap_excludes_income(client, auth_header): + # Create expense + client.post( + "/expenses", + json={ + "amount": 100.0, + "description": "Expense", + "date": "2026-04-01", + "expense_type": "EXPENSE", + }, + headers=auth_header, + ) + + # Create income (should be excluded) + client.post( + "/expenses", + json={ + "amount": 1000.0, + "description": "Income", + "date": "2026-04-02", + "expense_type": "INCOME", + }, + headers=auth_header, + ) + + r = client.get("/analytics/heatmap?year=2026&aggregation=daily", headers=auth_header) + assert r.status_code == 200 + data = r.get_json() + + total = sum(entry["amount"] for entry in data["data"]) + # Income should not be counted + assert total == 100.0 + + +def test_heatmap_top_category(client, auth_header): + # Create categories + r = client.post("/categories", json={"name": "Groceries"}, headers=auth_header) + r = client.get("/categories", headers=auth_header) + groceries_id = [c["id"] for c in r.get_json() if c["name"] == "Groceries"][0] + + r = client.post("/categories", json={"name": "Transport"}, headers=auth_header) + r = client.get("/categories", headers=auth_header) + transport_id = [c["id"] for c in r.get_json() if c["name"] == "Transport"][0] + + # Create multiple expenses on same day with different categories + client.post( + "/expenses", + json={ + "amount": 100.0, + "description": "Groceries", + "date": "2026-05-01", + "category_id": groceries_id, + }, + headers=auth_header, + ) + client.post( + "/expenses", + json={ + "amount": 30.0, + "description": "Transport", + "date": "2026-05-01", + "category_id": transport_id, + }, + headers=auth_header, + ) + + r = client.get("/analytics/heatmap?year=2026&aggregation=daily", headers=auth_header) + assert r.status_code == 200 + data = r.get_json() + + # Find the entry for 2026-05-01 + may01_entry = next((e for e in data["data"] if e["date"] == "2026-05-01"), None) + assert may01_entry is not None + assert may01_entry["top_category"] == "Groceries" + assert may01_entry["transaction_count"] == 2 + + +def test_heatmap_unauthenticated(client): + r = client.get("/analytics/heatmap?year=2026") + assert r.status_code == 401 diff --git a/packages/backend/tests/test_rules.py b/packages/backend/tests/test_rules.py new file mode 100644 index 00000000..e4772582 --- /dev/null +++ b/packages/backend/tests/test_rules.py @@ -0,0 +1,35 @@ + +def test_update_rule(client, auth_header): + r = client.post('/rules', json={'name': 'Test Rule', 'field': 'description', 'operator': 'contains', 'value': 'test'}, headers=auth_header) + assert r.status_code == 201 + rule = r.get_json() + r = client.patch('/rules/' + str(rule['id']), json={'name': 'Updated Rule', 'value': 'updated', 'priority': 5}, headers=auth_header) + assert r.status_code == 200 + updated = r.get_json() + assert updated['name'] == 'Updated Rule' + +def test_delete_rule(client, auth_header): + r = client.post('/rules', json={'name': 'To Delete', 'field': 'description', 'operator': 'contains', 'value': 'delete'}, headers=auth_header) + assert r.status_code == 201 + rule = r.get_json() + r = client.delete('/rules/' + str(rule['id']), headers=auth_header) + assert r.status_code == 200 + +def test_rule_applies_to_expense(client, auth_header): + r = client.post('/categories', json={'name': 'Shopping'}, headers=auth_header) + cat = r.get_json() + r = client.post('/rules', json={'name': 'Amazon', 'field': 'description', 'operator': 'contains', 'value': 'amazon', 'category_id': cat['id'], 'priority': 10}, headers=auth_header) + r = client.post('/expenses', json={'amount': 50.00, 'description': 'Amazon purchase', 'date': '2026-01-15'}, headers=auth_header) + expense = r.get_json() + assert expense['category_id'] == cat['id'] + +def test_priority_ordering(client, auth_header): + r = client.post('/categories', json={'name': 'High'}, headers=auth_header) + cat_high = r.get_json() + r = client.post('/categories', json={'name': 'Low'}, headers=auth_header) + cat_low = r.get_json() + r = client.post('/rules', json={'name': 'Low', 'field': 'description', 'operator': 'contains', 'value': 'test', 'category_id': cat_low['id'], 'priority': 1}, headers=auth_header) + r = client.post('/rules', json={'name': 'High', 'field': 'description', 'operator': 'contains', 'value': 'test', 'category_id': cat_high['id'], 'priority': 10}, headers=auth_header) + r = client.post('/expenses', json={'amount': 50.00, 'description': 'test expense', 'date': '2026-01-15'}, headers=auth_header) + expense = r.get_json() + assert expense['category_id'] == cat_high['id']