diff --git a/.importlinter b/.importlinter index 582700d..1e31c0f 100644 --- a/.importlinter +++ b/.importlinter @@ -42,16 +42,17 @@ forbidden_modules = adapters domain -# Domain must not import services, adapters, or lib +# Domain must not import services or adapters. lib is allowed: lib hosts +# layer-agnostic utilities (e.g. ISO timestamp parsing) shared by both +# domain and services. [importlinter:contract:domain-independence] -name = Domain must not import services, adapters, or lib +name = Domain must not import services or adapters type = forbidden source_modules = domain forbidden_modules = services adapters - lib # Models must not import services, adapters, domain, or lib [importlinter:contract:models-independence] diff --git a/main.py b/main.py index a5f11b0..ee32864 100755 --- a/main.py +++ b/main.py @@ -675,10 +675,6 @@ async def sync_rom_saves(self, rom_id): async def get_save_slots(self, rom_id): return await self._save_sync_service.get_save_slots(rom_id) - @migration_blocked - async def set_game_slot(self, rom_id, slot): - return self._save_sync_service.set_game_slot(rom_id, slot) - async def get_slot_saves(self, rom_id, slot): return await self._save_sync_service.get_slot_saves(rom_id, slot) diff --git a/py_modules/adapters/romm/romm_api.py b/py_modules/adapters/romm/romm_api.py index 026cf67..52f7135 100644 --- a/py_modules/adapters/romm/romm_api.py +++ b/py_modules/adapters/romm/romm_api.py @@ -113,9 +113,9 @@ def list_saves( ) -> list[dict]: query = f"/api/saves?rom_id={rom_id}" if device_id is not None: - query += f"&device_id={device_id}" + query += f"&device_id={urllib.parse.quote(device_id, safe='')}" if slot is not None: - query += f"&slot={slot}" + query += f"&slot={urllib.parse.quote(slot, safe='')}" result = self._client.request(query) return result if isinstance(result, list) else [] @@ -130,11 +130,11 @@ def upload_save( slot: str | None = None, overwrite: bool = False, ) -> dict: - params = f"rom_id={rom_id}&emulator={urllib.parse.quote(emulator)}" + params = f"rom_id={rom_id}&emulator={urllib.parse.quote(emulator, safe='')}" if device_id is not None: - params += f"&device_id={device_id}" + params += f"&device_id={urllib.parse.quote(device_id, safe='')}" if slot is not None: - params += f"&slot={slot}" + params += f"&slot={urllib.parse.quote(slot, safe='')}" if overwrite: params += "&overwrite=true" if save_id is not None: @@ -155,7 +155,7 @@ def download_save_content( path = f"/api/saves/{save_id}/content" if device_id is not None: opt = "true" if optimistic else "false" - path += f"?device_id={device_id}&optimistic={opt}" + path += f"?device_id={urllib.parse.quote(device_id, safe='')}&optimistic={opt}" self._client.download(path, dest_path) def confirm_download(self, save_id: int, device_id: str) -> dict: @@ -170,7 +170,7 @@ def get_save_metadata(self, save_id: int) -> dict: def get_save_summary(self, rom_id: int, device_id: str | None = None) -> dict: query = f"/api/saves/summary?rom_id={rom_id}" if device_id is not None: - query += f"&device_id={device_id}" + query += f"&device_id={urllib.parse.quote(device_id, safe='')}" return self._client.request(query) def delete_server_saves(self, save_ids: list[int]) -> dict: @@ -195,7 +195,7 @@ def list_devices(self) -> list[dict]: def update_device(self, device_id: str, **fields) -> dict: payload = {k: v for k, v in fields.items() if v is not None} - return self._client.put_json(f"/api/devices/{device_id}", payload) + return self._client.put_json(f"/api/devices/{urllib.parse.quote(device_id, safe='')}", payload) # ── Notes / Playtime ────────────────────────────────────────────── diff --git a/py_modules/domain/save_conflicts.py b/py_modules/domain/save_conflicts.py deleted file mode 100644 index e9064e5..0000000 --- a/py_modules/domain/save_conflicts.py +++ /dev/null @@ -1,163 +0,0 @@ -"""Pure save-file conflict detection and resolution logic. - -No I/O, no service/adapter/lib imports. All functions are stateless and -operate only on the values passed to them. -""" - -from __future__ import annotations - -from datetime import UTC, datetime - -from models.saves import SaveConflict - - -def check_local_changes(local_hash: str | None, last_sync_hash: str) -> bool: - """Return True if the local file has changed since the last sync. - - Parameters - ---------- - local_hash: - MD5 hex digest of the current local file (may be empty/None if missing). - last_sync_hash: - MD5 hex digest recorded at the last successful sync. - """ - return local_hash != last_sync_hash - - -def check_server_changes_fast( - file_state: dict, - server_save: dict, -) -> bool | None: - """Fast-path server-change detection using stored timestamp and size. - - Returns - ------- - False - Server is definitely unchanged (timestamp+size match AND last_sync_hash - is present so we have a valid baseline to compare against). - True - Server has definitely changed (size differs on a timestamp-matched save). - None - Indeterminate — timestamp changed (or no stored timestamp); the caller - must fall back to a slow-path hash comparison. - """ - stored_updated_at = file_state.get("last_sync_server_updated_at") - stored_size = file_state.get("last_sync_server_size") - server_updated_at = server_save.get("updated_at", "") - server_size = server_save.get("file_size_bytes") - - # Fast path: timestamp unchanged - if stored_updated_at and server_updated_at == stored_updated_at: - return stored_size is not None and server_size is not None and server_size != stored_size - - # Timestamp changed or no stored timestamp — indeterminate without hash - return None - - -def determine_action(local_changed: bool, server_changed: bool) -> str: - """Decide the sync action from local and server change flags. - - Returns - ------- - str - One of ``"skip"``, ``"upload"``, ``"download"``, or ``"conflict"``. - """ - if not local_changed and not server_changed: - return "skip" - if not local_changed: - return "download" - if not server_changed: - return "upload" - return "conflict" - - -def resolve_conflict_by_mode( - mode: str, - local_mtime: float, - server_save: dict, - tolerance: float = 60.0, -) -> str: - """Apply a conflict resolution mode. - - Parameters - ---------- - mode: - One of ``"ask_me"``, ``"always_upload"``, ``"always_download"``, - or ``"newest_wins"``. - local_mtime: - Modification time of the local save file (seconds since epoch). - server_save: - Server save metadata dict (needs ``"updated_at"`` ISO-8601 string). - tolerance: - Clock-skew tolerance in seconds used by ``"newest_wins"`` mode. - Defaults to ``60``. - - Returns - ------- - str - One of ``"upload"``, ``"download"``, or ``"ask"``. - """ - if mode == "always_upload": - return "upload" - if mode == "always_download": - return "download" - if mode == "ask_me": - return "ask" - - # newest_wins (default fallback for unrecognised modes too) - server_updated = server_save.get("updated_at", "") - try: - server_dt = datetime.fromisoformat(server_updated.replace("Z", "+00:00")) - local_dt = datetime.fromtimestamp(local_mtime, tz=UTC) - diff = abs((local_dt - server_dt).total_seconds()) - if diff <= tolerance: - return "ask" - return "upload" if local_dt > server_dt else "download" - except (ValueError, TypeError): - return "ask" - - -def build_conflict_dict( - rom_id: int, - filename: str, - local_info: dict | None, - local_hash: str | None, - server_save: dict, -) -> SaveConflict: - """Build a conflict descriptor for the frontend. - - Parameters - ---------- - rom_id: - Integer ROM identifier. - filename: - Save filename (e.g. ``"pokemon.srm"``). - local_info: - Pre-computed local file metadata with keys ``"path"``, ``"mtime"`` - (float seconds since epoch, or ``None``), and ``"size"`` (int, or - ``None``). Pass ``None`` if no local file exists. - local_hash: - MD5 hex digest of the local file, or ``None``. - server_save: - Server save metadata dict. - - Returns - ------- - SaveConflict - Conflict descriptor ready for the frontend. - """ - local_mtime_val: float | None = local_info.get("mtime") if local_info else None - return SaveConflict( - rom_id=rom_id, - filename=filename, - local_path=local_info["path"] if local_info else None, - local_hash=local_hash, - local_mtime=( - datetime.fromtimestamp(local_mtime_val, tz=UTC).isoformat() if local_mtime_val is not None else None - ), - local_size=local_info.get("size") if local_info else None, - server_save_id=server_save.get("id"), - server_updated_at=server_save.get("updated_at", ""), - server_size=server_save.get("file_size_bytes"), - created_at=datetime.now(UTC).isoformat(), - ) diff --git a/py_modules/domain/save_status.py b/py_modules/domain/save_status.py index 9ebd219..4b54b01 100644 --- a/py_modules/domain/save_status.py +++ b/py_modules/domain/save_status.py @@ -7,23 +7,24 @@ from datetime import UTC, datetime +from lib.iso_time import parse_iso + def _format_time_ago(iso_timestamp: str) -> str | None: """Format an ISO timestamp as a human-readable time-ago label, or None on error.""" - try: - check_dt = datetime.fromisoformat(iso_timestamp) - if check_dt.tzinfo is None: - check_dt = check_dt.replace(tzinfo=UTC) - diff_min = int((datetime.now(UTC) - check_dt).total_seconds() // 60) - if diff_min < 1: - return "Just now" - if diff_min < 60: - return f"{diff_min}m ago" - if diff_min < 1440: - return f"{diff_min // 60}h ago" - return f"{diff_min // 1440}d ago" - except (ValueError, TypeError): + check_dt = parse_iso(iso_timestamp) + if check_dt is None: return None + if check_dt.tzinfo is None: + check_dt = check_dt.replace(tzinfo=UTC) + diff_min = int((datetime.now(UTC) - check_dt).total_seconds() // 60) + if diff_min < 1: + return "Just now" + if diff_min < 60: + return f"{diff_min}m ago" + if diff_min < 1440: + return f"{diff_min // 60}h ago" + return f"{diff_min // 1440}d ago" def compute_save_sync_display( diff --git a/py_modules/domain/sync_action.py b/py_modules/domain/sync_action.py index 360ecdc..165bdbe 100644 --- a/py_modules/domain/sync_action.py +++ b/py_modules/domain/sync_action.py @@ -40,7 +40,8 @@ from __future__ import annotations from dataclasses import dataclass -from datetime import datetime + +from lib.iso_time import parse_iso_to_epoch # --------------------------------------------------------------------------- # SyncAction variants @@ -94,22 +95,6 @@ class Conflict: # --------------------------------------------------------------------------- -def _parse_iso_to_epoch(value: str) -> float | None: - """Parse an ISO-8601 timestamp to epoch seconds. - - Handles a trailing "Z" defensively (older datetime.fromisoformat versions - reject it). Returns None on any parse failure — the caller decides how - to interpret that. - """ - if not value: - return None - try: - normalized = value.replace("Z", "+00:00") if value.endswith("Z") else value - return datetime.fromisoformat(normalized).timestamp() - except (ValueError, TypeError): - return None - - def _local_mtime_ge_server_updated_at(local_file: dict, server: dict) -> bool: """Return True iff local mtime is at-or-after the server save's updated_at. @@ -120,7 +105,7 @@ def _local_mtime_ge_server_updated_at(local_file: dict, server: dict) -> bool: local_mtime = local_file.get("mtime") if not isinstance(local_mtime, int | float): return False - server_epoch = _parse_iso_to_epoch(server.get("updated_at", "")) + server_epoch = parse_iso_to_epoch(server.get("updated_at", "")) if server_epoch is None: return False return local_mtime >= server_epoch @@ -195,8 +180,12 @@ def compute_sync_action( return Upload(target_save_id=None) return Skip(reason="nothing_to_sync") - # 2. Pick newest server save by updated_at. - server = max(server_saves_in_slot, key=lambda s: s.get("updated_at", "")) + # 2. Pick newest server save by updated_at (epoch-keyed; unparseable + # timestamps sort to the bottom so they can't beat a parseable one). + server = max( + server_saves_in_slot, + key=lambda s: parse_iso_to_epoch(s.get("updated_at")) or 0.0, + ) # 3. Find our device's entry on the chosen save and branch on it. device_syncs = server.get("device_syncs") or [] diff --git a/py_modules/lib/iso_time.py b/py_modules/lib/iso_time.py new file mode 100644 index 0000000..0a77f06 --- /dev/null +++ b/py_modules/lib/iso_time.py @@ -0,0 +1,30 @@ +"""ISO-8601 timestamp parsing helpers. + +Layer-agnostic utilities — domain and services may both import from here. +""" + +from __future__ import annotations + +from datetime import datetime + + +def parse_iso(value: str | None) -> datetime | None: + """Parse an ISO-8601 timestamp to an aware datetime, or None on failure. + + Handles a trailing "Z" defensively (older datetime.fromisoformat versions + reject it). Returns None for empty/None input or any parse failure — the + caller decides how to interpret that. + """ + if not value: + return None + try: + normalized = value.replace("Z", "+00:00") if value.endswith("Z") else value + return datetime.fromisoformat(normalized) + except (ValueError, TypeError): + return None + + +def parse_iso_to_epoch(value: str | None) -> float | None: + """Parse an ISO-8601 timestamp to epoch seconds (UTC), or None on failure.""" + dt = parse_iso(value) + return dt.timestamp() if dt is not None else None diff --git a/py_modules/services/playtime.py b/py_modules/services/playtime.py index 5537aa0..20b3ae6 100644 --- a/py_modules/services/playtime.py +++ b/py_modules/services/playtime.py @@ -11,6 +11,7 @@ from datetime import UTC, datetime from typing import TYPE_CHECKING +from lib.iso_time import parse_iso from services.protocols import RetryStrategy, RommApiProtocol, StatePersister if TYPE_CHECKING: @@ -206,7 +207,9 @@ async def record_session_end(self, rom_id: int) -> dict: return {"success": False, "message": "No active session"} try: - start = datetime.fromisoformat(entry["last_session_start"]) + start = parse_iso(entry["last_session_start"]) + if start is None: + return {"success": False, "message": "Failed to calculate session duration"} now = datetime.now(UTC) duration = (now - start).total_seconds() diff --git a/py_modules/services/saves.py b/py_modules/services/saves.py index 5c4bc70..2849cbc 100644 --- a/py_modules/services/saves.py +++ b/py_modules/services/saves.py @@ -32,6 +32,7 @@ compute_sync_action, ) from lib.errors import RommApiError, classify_error +from lib.iso_time import parse_iso_to_epoch from services.protocols import ( CoreNameProviderFn, CoreResolverFn, @@ -1135,7 +1136,7 @@ def _resolve_chosen_server(action: object, candidates: list[dict]) -> dict | Non return None if not candidates: return None - return max(candidates, key=lambda s: s.get("updated_at", "")) + return max(candidates, key=lambda s: parse_iso_to_epoch(s.get("updated_at")) or 0.0) def _status_entry_for_local_file( self, @@ -1206,7 +1207,7 @@ def _status_entry_for_server_only( ) -> dict: """Build the ready-to-download status entry when no local file exists but the slot has server saves. Picks newest by ``updated_at``.""" - newest = max(server_in_slot, key=lambda s: s.get("updated_at", "")) + newest = max(server_in_slot, key=lambda s: parse_iso_to_epoch(s.get("updated_at")) or 0.0) return self._build_file_status( self._local_save_target(newest, rom_name), local_path=None, @@ -2244,7 +2245,7 @@ async def resolve_sync_conflict( server_in_slot = self._filter_server_saves_to_slot(server_saves, active_slot) if not server_in_slot: return {"success": False, "message": "No server save in active slot"} - server = max(server_in_slot, key=lambda s: s.get("updated_at", "")) + server = max(server_in_slot, key=lambda s: parse_iso_to_epoch(s.get("updated_at")) or 0.0) try: if action == "use_server": diff --git a/src/api/backend.ts b/src/api/backend.ts index 8d14598..0502cf7 100755 --- a/src/api/backend.ts +++ b/src/api/backend.ts @@ -156,7 +156,6 @@ export const recordSessionEnd = callable<[number], { success: boolean; duration_ export const getSaveSyncSettings = callable<[], SaveSyncSettings>("get_save_sync_settings"); export const updateSaveSyncSettings = callable<[SaveSyncSettings], { success: boolean }>("update_save_sync_settings"); export const getSaveSlots = callable<[number], { success: boolean; slots: SaveSlotSummary[]; active_slot: string }>("get_save_slots"); -export const setGameSlot = callable<[number, string], { success: boolean; active_slot?: string; message?: string }>("set_game_slot"); export const getSlotSaves = callable<[number, string], SlotSavesResponse>("get_slot_saves"); export const switchSlot = callable<[number, string], SwitchSlotResponse>("switch_slot"); diff --git a/src/components/SavesTab.tsx b/src/components/SavesTab.tsx index 3c77c17..46dc193 100644 --- a/src/components/SavesTab.tsx +++ b/src/components/SavesTab.tsx @@ -728,8 +728,6 @@ const SlotPanel: FC = ({ msg = "Sync your saves first — local changes haven't been uploaded"; } else if (result.reason === "server_unreachable") { msg = "Can't switch — RomM server is not reachable"; - } else if (result.reason === "unresolved_conflicts") { - msg = "Resolve conflicts before switching slots"; } setSwitchError(msg); if (switchErrorTimerRef.current) clearTimeout(switchErrorTimerRef.current); diff --git a/src/types/index.ts b/src/types/index.ts index 1b222f7..ada56a8 100755 --- a/src/types/index.ts +++ b/src/types/index.ts @@ -343,7 +343,7 @@ export interface SlotSavesResponse { export interface SwitchSlotResponse { success: boolean; - reason?: "pending_uploads" | "unresolved_conflicts" | "server_unreachable" | "sync_disabled" | "not_installed"; + reason?: "pending_uploads" | "server_unreachable" | "sync_disabled" | "not_installed"; files?: string[]; save_status?: SaveStatus; } diff --git a/tests/adapters/romm/test_romm_api.py b/tests/adapters/romm/test_romm_api.py index e0a1a80..edcf686 100644 --- a/tests/adapters/romm/test_romm_api.py +++ b/tests/adapters/romm/test_romm_api.py @@ -103,6 +103,44 @@ def test_with_slot(self): path = client.upload_multipart.call_args[0][0] assert "slot=default" in path + def test_url_encodes_slot_with_special_chars(self): + """Slot with reserved URL characters (&, =, /, ?, +, space) is percent-encoded.""" + api, client = _make_api() + client.upload_multipart.return_value = {"id": 1} + api.upload_save(42, "/tmp/save.srm", "retroarch-mgba", slot="Mom & Dad=draft+1?/x") + path = client.upload_multipart.call_args[0][0] + # raw special chars must NOT appear in the value segment + assert "slot=Mom%20%26%20Dad%3Ddraft%2B1%3F%2Fx" in path + assert "slot=Mom & Dad=draft+1?/x" not in path + + def test_url_encodes_slot_empty_string(self): + """Empty string slot is encoded but still present (caller asked for it).""" + api, client = _make_api() + client.upload_multipart.return_value = {"id": 1} + api.upload_save(42, "/tmp/save.srm", "retroarch-mgba", slot="") + path = client.upload_multipart.call_args[0][0] + # empty string serializes as "slot=" — important: still on the URL + assert "slot=" in path + + def test_url_encodes_slot_non_ascii(self): + """Non-ASCII slot (e.g. Japanese) is percent-encoded as UTF-8.""" + api, client = _make_api() + client.upload_multipart.return_value = {"id": 1} + api.upload_save(42, "/tmp/save.srm", "retroarch-mgba", slot="スロット") + path = client.upload_multipart.call_args[0][0] + # UTF-8 of スロット = E382B9 E383AD E38383 E38388 + assert "slot=%E3%82%B9%E3%83%AD%E3%83%83%E3%83%88" in path + assert "スロット" not in path + + def test_url_encodes_device_id_with_special_chars(self): + """device_id is encoded defensively even though it's normally a UUID.""" + api, client = _make_api() + client.upload_multipart.return_value = {"id": 1} + api.upload_save(42, "/tmp/save.srm", "retroarch-mgba", device_id="abc&xyz=1") + path = client.upload_multipart.call_args[0][0] + assert "device_id=abc%26xyz%3D1" in path + assert "device_id=abc&xyz=1" not in path + def test_with_overwrite_true(self): api, client = _make_api() client.upload_multipart.return_value = {"id": 1} @@ -119,11 +157,13 @@ def test_overwrite_false_not_in_query(self): assert "overwrite" not in path def test_encodes_emulator(self): + """Slash in emulator name is encoded (safe="" — house style).""" api, client = _make_api() client.upload_multipart.return_value = {"id": 1} api.upload_save(42, "/tmp/save.srm", "retro arch/core") path = client.upload_multipart.call_args[0][0] - assert "emulator=retro%20arch/core" in path + assert "emulator=retro%20arch%2Fcore" in path + assert "retro arch/core" not in path def test_409_raises_conflict_error(self): """409 from server propagates as RommConflictError.""" @@ -162,6 +202,21 @@ def test_without_device_id_no_query_params(self): api.download_save_content(42, "/tmp/save.srm") client.download.assert_called_once_with("/api/saves/42/content", "/tmp/save.srm") + def test_url_encodes_device_id_ascii_round_trip(self): + """ASCII device_id (UUID-like) survives encoding unchanged.""" + api, client = _make_api() + api.download_save_content(99, "/tmp/save.srm", device_id="abc-123") + url = client.download.call_args[0][0] + assert "device_id=abc-123" in url + + def test_url_encodes_device_id_with_special_chars(self): + """device_id with reserved URL characters is percent-encoded.""" + api, client = _make_api() + api.download_save_content(99, "/tmp/save.srm", device_id="abc&xyz/1") + url = client.download.call_args[0][0] + assert "device_id=abc%26xyz%2F1" in url + assert "device_id=abc&xyz/1" not in url + class TestListCollections: def test_returns_list(self): @@ -260,6 +315,23 @@ def test_url_contains_device_id(self): url = client.put_json.call_args[0][0] assert "xyz-999" in url + def test_url_encodes_device_id_ascii_round_trip(self): + """ASCII device_id (UUID-like) survives encoding unchanged.""" + api, client = _make_api() + client.put_json.return_value = {"id": "abc-123"} + api.update_device("abc-123", client_version="1.0.0") + url = client.put_json.call_args[0][0] + assert url == "/api/devices/abc-123" + + def test_url_encodes_device_id_with_special_chars(self): + """device_id with reserved URL characters is percent-encoded.""" + api, client = _make_api() + client.put_json.return_value = {"id": "abc&xyz/1"} + api.update_device("abc&xyz/1", client_version="1.0.0") + url = client.put_json.call_args[0][0] + assert url == "/api/devices/abc%26xyz%2F1" + assert "abc&xyz/1" not in url + class TestSetVersion: def test_stores_version(self): @@ -300,6 +372,40 @@ def test_non_list_returns_empty(self): client.request.return_value = {"error": "bad"} assert api.list_saves(42, device_id="abc") == [] + def test_url_encodes_slot_with_special_chars(self): + """Slot with reserved URL characters is percent-encoded.""" + api, client = _make_api() + client.request.return_value = [] + api.list_saves(42, slot="Mom & Dad=draft+1?/x") + url = client.request.call_args[0][0] + assert "slot=Mom%20%26%20Dad%3Ddraft%2B1%3F%2Fx" in url + assert "slot=Mom & Dad=draft+1?/x" not in url + + def test_url_encodes_slot_ascii_safe_round_trips(self): + """Plain ASCII slot like 'Desktop' is unchanged.""" + api, client = _make_api() + client.request.return_value = [] + api.list_saves(42, slot="Desktop") + url = client.request.call_args[0][0] + assert "slot=Desktop" in url + + def test_url_encodes_slot_non_ascii(self): + """Non-ASCII slot is percent-encoded as UTF-8.""" + api, client = _make_api() + client.request.return_value = [] + api.list_saves(42, slot="スロット") + url = client.request.call_args[0][0] + assert "slot=%E3%82%B9%E3%83%AD%E3%83%83%E3%83%88" in url + + def test_url_encodes_device_id_with_special_chars(self): + """device_id is encoded defensively even though it's normally a UUID.""" + api, client = _make_api() + client.request.return_value = [] + api.list_saves(42, device_id="abc&xyz=1") + url = client.request.call_args[0][0] + assert "device_id=abc%26xyz%3D1" in url + assert "device_id=abc&xyz=1" not in url + class TestGetCurrentUser: def test_calls_users_me_endpoint(self): @@ -477,6 +583,15 @@ def test_with_device_id(self): api.get_save_summary(42, device_id="abc-123") client.request.assert_called_once_with("/api/saves/summary?rom_id=42&device_id=abc-123") + def test_url_encodes_device_id_with_special_chars(self): + """device_id is encoded defensively even though it's normally a UUID.""" + api, client = _make_api() + client.request.return_value = {"total": 0} + api.get_save_summary(42, device_id="abc&xyz=1") + url = client.request.call_args[0][0] + assert "device_id=abc%26xyz%3D1" in url + assert "device_id=abc&xyz=1" not in url + class TestDeleteServerSaves: def test_posts_save_ids(self): diff --git a/tests/domain/test_save_conflicts.py b/tests/domain/test_save_conflicts.py deleted file mode 100644 index dfc1178..0000000 --- a/tests/domain/test_save_conflicts.py +++ /dev/null @@ -1,243 +0,0 @@ -"""Unit tests for domain.save_conflicts — pure conflict detection functions.""" - -from __future__ import annotations - -from datetime import UTC, datetime - -from models.saves import SaveConflict - -from domain.save_conflicts import ( - build_conflict_dict, - check_local_changes, - check_server_changes_fast, - determine_action, - resolve_conflict_by_mode, -) - -# --------------------------------------------------------------------------- -# TestCheckLocalChanges -# --------------------------------------------------------------------------- - - -class TestCheckLocalChanges: - def test_same_hash_returns_false(self): - assert check_local_changes("abc123", "abc123") is False - - def test_different_hash_returns_true(self): - assert check_local_changes("abc123", "def456") is True - - def test_empty_local_hash_differs_from_baseline(self): - assert check_local_changes("", "abc123") is True - - def test_both_empty_returns_false(self): - assert check_local_changes("", "") is False - - def test_none_local_hash_differs_from_baseline(self): - assert check_local_changes(None, "abc123") is True - - def test_none_local_hash_matches_none_baseline(self): - # Unusual edge case — both None means equal - assert check_local_changes(None, None) is False # type: ignore[arg-type] - - -# --------------------------------------------------------------------------- -# TestCheckServerChangesFast -# --------------------------------------------------------------------------- - - -class TestCheckServerChangesFast: - def _file_state(self, updated_at="2026-02-17T06:00:00Z", size=1024): - return { - "last_sync_server_updated_at": updated_at, - "last_sync_server_size": size, - } - - def _server_save(self, updated_at="2026-02-17T06:00:00Z", size=1024): - return {"updated_at": updated_at, "file_size_bytes": size} - - def test_timestamp_and_size_match_returns_false(self): - file_state = self._file_state() - server_save = self._server_save() - result = check_server_changes_fast(file_state, server_save) - assert result is False - - def test_timestamp_matches_size_differs_returns_true(self): - file_state = self._file_state(size=1024) - server_save = self._server_save(size=2048) - result = check_server_changes_fast(file_state, server_save) - assert result is True - - def test_timestamp_changed_returns_none(self): - file_state = self._file_state(updated_at="2026-02-17T06:00:00Z") - server_save = self._server_save(updated_at="2026-02-17T12:00:00Z") - result = check_server_changes_fast(file_state, server_save) - assert result is None - - def test_no_stored_timestamp_returns_none(self): - file_state = {"last_sync_server_size": 1024} # no last_sync_server_updated_at - server_save = self._server_save() - result = check_server_changes_fast(file_state, server_save) - assert result is None - - def test_empty_file_state_returns_none(self): - result = check_server_changes_fast({}, self._server_save()) - assert result is None - - def test_stored_size_none_timestamp_matches_returns_false(self): - """If stored_size is None (legacy), size check is skipped when timestamp matches.""" - file_state = {"last_sync_server_updated_at": "2026-02-17T06:00:00Z", "last_sync_server_size": None} - server_save = self._server_save(size=2048) - result = check_server_changes_fast(file_state, server_save) - # stored_size is None — condition `stored_size is not None and ...` is False -> unchanged - assert result is False - - def test_server_size_none_timestamp_matches_returns_false(self): - """If server returns no size, size check is skipped.""" - file_state = self._file_state(size=1024) - server_save = {"updated_at": "2026-02-17T06:00:00Z", "file_size_bytes": None} - result = check_server_changes_fast(file_state, server_save) - assert result is False - - -# --------------------------------------------------------------------------- -# TestDetermineAction -# --------------------------------------------------------------------------- - - -class TestDetermineAction: - def test_neither_changed_skips(self): - assert determine_action(False, False) == "skip" - - def test_only_server_changed_downloads(self): - assert determine_action(False, True) == "download" - - def test_only_local_changed_uploads(self): - assert determine_action(True, False) == "upload" - - def test_both_changed_conflicts(self): - assert determine_action(True, True) == "conflict" - - -# --------------------------------------------------------------------------- -# TestResolveConflictByMode -# --------------------------------------------------------------------------- - - -class TestResolveConflictByMode: - def _server_save(self, updated_at="2026-02-17T06:00:00Z"): - return {"updated_at": updated_at} - - def test_ask_me_returns_ask(self): - result = resolve_conflict_by_mode("ask_me", 1000.0, self._server_save()) - assert result == "ask" - - def test_always_upload_returns_upload(self): - result = resolve_conflict_by_mode("always_upload", 1000.0, self._server_save()) - assert result == "upload" - - def test_always_download_returns_download(self): - result = resolve_conflict_by_mode("always_download", 1000.0, self._server_save()) - assert result == "download" - - def test_newest_wins_local_newer_uploads(self): - # Server updated at 2026-02-17T06:00:00Z - # local_mtime is after that - server_dt = datetime(2026, 2, 17, 6, 0, 0, tzinfo=UTC) - local_mtime = server_dt.timestamp() + 3600 # 1 hour later - result = resolve_conflict_by_mode("newest_wins", local_mtime, self._server_save(), tolerance=60) - assert result == "upload" - - def test_newest_wins_server_newer_downloads(self): - server_dt = datetime(2026, 2, 17, 6, 0, 0, tzinfo=UTC) - local_mtime = server_dt.timestamp() - 3600 # 1 hour earlier - result = resolve_conflict_by_mode("newest_wins", local_mtime, self._server_save(), tolerance=60) - assert result == "download" - - def test_newest_wins_within_tolerance_asks(self): - server_dt = datetime(2026, 2, 17, 6, 0, 0, tzinfo=UTC) - local_mtime = server_dt.timestamp() + 30 # 30s later, within 60s tolerance - result = resolve_conflict_by_mode("newest_wins", local_mtime, self._server_save(), tolerance=60) - assert result == "ask" - - def test_newest_wins_invalid_server_date_asks(self): - result = resolve_conflict_by_mode("newest_wins", 1000.0, {"updated_at": "not-a-date"}, tolerance=60) - assert result == "ask" - - def test_newest_wins_missing_server_date_asks(self): - result = resolve_conflict_by_mode("newest_wins", 1000.0, {}, tolerance=60) - assert result == "ask" - - def test_unknown_mode_falls_through_to_newest_wins(self): - """Unrecognised mode falls through to newest_wins logic.""" - server_dt = datetime(2026, 2, 17, 6, 0, 0, tzinfo=UTC) - local_mtime = server_dt.timestamp() + 3600 - result = resolve_conflict_by_mode("some_future_mode", local_mtime, self._server_save(), tolerance=60) - assert result == "upload" - - -# --------------------------------------------------------------------------- -# TestBuildConflictDict -# --------------------------------------------------------------------------- - - -class TestBuildConflictDict: - def _server_save(self): - return { - "id": 100, - "updated_at": "2026-02-17T06:00:00Z", - "file_size_bytes": 1024, - } - - def test_full_dict_structure(self): - local_mtime = datetime(2026, 2, 17, 5, 0, 0, tzinfo=UTC).timestamp() - local_info = {"path": "/saves/pokemon.srm", "mtime": local_mtime, "size": 1024} - result = build_conflict_dict(42, "pokemon.srm", local_info, "abc123", self._server_save()) - - assert isinstance(result, SaveConflict) - assert result.rom_id == 42 - assert result.filename == "pokemon.srm" - assert result.local_path == "/saves/pokemon.srm" - assert result.local_hash == "abc123" - assert result.local_mtime == "2026-02-17T05:00:00+00:00" - assert result.local_size == 1024 - assert result.server_save_id == 100 - assert result.server_updated_at == "2026-02-17T06:00:00Z" - assert result.server_size == 1024 - assert result.created_at is not None - - def test_no_local_info(self): - result = build_conflict_dict(42, "pokemon.srm", None, None, self._server_save()) - assert isinstance(result, SaveConflict) - assert result.local_path is None - assert result.local_hash is None - assert result.local_mtime is None - assert result.local_size is None - - def test_local_mtime_none(self): - local_info = {"path": "/saves/pokemon.srm", "mtime": None, "size": 1024} - result = build_conflict_dict(42, "pokemon.srm", local_info, "abc123", self._server_save()) - assert isinstance(result, SaveConflict) - assert result.local_mtime is None - - def test_missing_local_mtime_key(self): - """local_info without mtime key — treated as None.""" - local_info = {"path": "/saves/pokemon.srm", "size": 1024} - result = build_conflict_dict(42, "pokemon.srm", local_info, "abc123", self._server_save()) - assert isinstance(result, SaveConflict) - assert result.local_mtime is None - - def test_server_missing_optional_fields(self): - server = {"id": 99} - local_info = {"path": "/saves/pokemon.srm", "mtime": None, "size": 0} - result = build_conflict_dict(42, "pokemon.srm", local_info, "hash", server) - assert isinstance(result, SaveConflict) - assert result.server_save_id == 99 - assert result.server_updated_at == "" - assert result.server_size is None - - def test_created_at_is_utc_iso(self): - local_info = {"path": "/saves/pokemon.srm", "mtime": None, "size": 0} - result = build_conflict_dict(1, "f.srm", local_info, None, self._server_save()) - assert isinstance(result, SaveConflict) - # Should parse without error - datetime.fromisoformat(result.created_at) diff --git a/tests/domain/test_save_status.py b/tests/domain/test_save_status.py index 6a53d2a..14b644a 100644 --- a/tests/domain/test_save_status.py +++ b/tests/domain/test_save_status.py @@ -65,3 +65,19 @@ def test_local_path_present_counts_as_local(self): files = [{"status": "skip", "local_path": "/saves/test.srm"}] result = compute_save_sync_display(files, None) assert result == {"status": "synced", "label": "Not synced"} + + def test_unparseable_last_check_falls_back_to_not_synced(self): + """Malformed timestamp -> _format_time_ago returns None -> 'Not synced' fallback.""" + files = [{"status": "synced", "local_path": "/saves/test.srm"}] + result = compute_save_sync_display(files, "not-a-date") + assert result == {"status": "synced", "label": "Not synced"} + + def test_naive_timestamp_treated_as_utc(self): + """A naive ISO timestamp (no tzinfo) is treated as UTC, not crashed on.""" + # Recent enough that we get a sane label rather than something like '20000d ago'. + recent_naive = (datetime.now(UTC) - timedelta(minutes=5)).replace(tzinfo=None).isoformat() + files = [{"status": "synced", "local_path": "/saves/test.srm"}] + result = compute_save_sync_display(files, recent_naive) + assert result["status"] == "synced" + # Allow a small tolerance band — clock-edge flake protection. + assert result["label"] in {"4m ago", "5m ago", "6m ago"} diff --git a/tests/lib/test_iso_time.py b/tests/lib/test_iso_time.py new file mode 100644 index 0000000..cbf287d --- /dev/null +++ b/tests/lib/test_iso_time.py @@ -0,0 +1,138 @@ +"""Unit tests for lib.iso_time — ISO-8601 timestamp helpers.""" + +from __future__ import annotations + +from datetime import UTC, datetime, timedelta, timezone + +from lib.iso_time import parse_iso, parse_iso_to_epoch + + +class TestParseIso: + def test_parses_z_suffix(self): + dt = parse_iso("2024-01-15T12:00:00Z") + assert dt is not None + assert dt == datetime(2024, 1, 15, 12, 0, 0, tzinfo=UTC) + assert dt.tzinfo is not None + + def test_parses_explicit_utc_offset(self): + dt = parse_iso("2024-01-15T12:00:00+00:00") + assert dt is not None + assert dt == datetime(2024, 1, 15, 12, 0, 0, tzinfo=UTC) + + def test_parses_microseconds(self): + dt = parse_iso("2024-01-15T12:00:00.123456+00:00") + assert dt is not None + assert dt == datetime(2024, 1, 15, 12, 0, 0, 123456, tzinfo=UTC) + + def test_parses_non_utc_offset(self): + dt = parse_iso("2024-01-15T12:00:00+02:00") + assert dt is not None + # Same instant in UTC = 10:00 + assert dt.utcoffset() == timedelta(hours=2) + assert dt.astimezone(UTC) == datetime(2024, 1, 15, 10, 0, 0, tzinfo=UTC) + + def test_parses_negative_offset(self): + dt = parse_iso("2024-01-15T12:00:00-05:00") + assert dt is not None + assert dt.utcoffset() == timedelta(hours=-5) + + def test_empty_string_returns_none(self): + assert parse_iso("") is None + + def test_none_returns_none(self): + assert parse_iso(None) is None + + def test_garbage_returns_none(self): + assert parse_iso("not-a-date") is None + + def test_invalid_components_return_none(self): + # Month 13, day 99, hour 99 etc — all out of range + assert parse_iso("2024-13-99T99:99:99Z") is None + + def test_naive_iso_returns_naive_datetime(self): + # Documented behavior: helper does NOT force a timezone. + # datetime.fromisoformat("2024-01-15T12:00:00") returns naive dt; + # callers that need an aware dt must replace tzinfo themselves. + dt = parse_iso("2024-01-15T12:00:00") + assert dt is not None + assert dt.tzinfo is None + assert dt == datetime(2024, 1, 15, 12, 0, 0) + + def test_only_z_suffix_normalisation(self): + """Trailing Z (any case) is normalised to +00:00. Lowercase 'z' is not + a documented input format; we test only uppercase Z (the contract).""" + dt = parse_iso("2024-12-31T23:59:59Z") + assert dt is not None + assert dt.tzinfo is not None + + +class TestParseIsoToEpoch: + def test_z_suffix_epoch(self): + # 2024-01-15T12:00:00Z = 1705320000 + assert parse_iso_to_epoch("2024-01-15T12:00:00Z") == 1705320000.0 + + def test_with_offset(self): + # 2024-01-15T14:00:00+02:00 == 12:00 UTC == 1705320000 + assert parse_iso_to_epoch("2024-01-15T14:00:00+02:00") == 1705320000.0 + + def test_microseconds_included_in_epoch(self): + v = parse_iso_to_epoch("2024-01-15T12:00:00.500000+00:00") + assert v is not None + assert abs(v - 1705320000.5) < 1e-6 + + def test_empty_returns_none(self): + assert parse_iso_to_epoch("") is None + + def test_none_returns_none(self): + assert parse_iso_to_epoch(None) is None + + def test_garbage_returns_none(self): + assert parse_iso_to_epoch("not-a-date") is None + + def test_consistency_with_parse_iso(self): + """parse_iso_to_epoch(x) must equal parse_iso(x).timestamp() when not None.""" + for value in ( + "2024-01-15T12:00:00Z", + "2024-01-15T12:00:00+00:00", + "2024-01-15T12:00:00.123456+00:00", + "2024-06-30T08:30:45-07:00", + ): + dt = parse_iso(value) + epoch = parse_iso_to_epoch(value) + assert dt is not None + assert epoch is not None + assert epoch == dt.timestamp() + + def test_naive_iso_uses_local_time(self): + """Naive ISO -> naive datetime; .timestamp() interprets it as local time. + + We don't pin a specific epoch (CI tz varies); we just assert it + round-trips through parse_iso consistently.""" + v_naive = "2024-01-15T12:00:00" + epoch = parse_iso_to_epoch(v_naive) + dt = parse_iso(v_naive) + assert epoch is not None + assert dt is not None + assert epoch == dt.timestamp() + + def test_failure_chain_returns_none(self): + """If parse_iso returns None, parse_iso_to_epoch must too.""" + for bad in ("", None, "garbage", "2024-13-01T00:00:00Z"): + assert parse_iso_to_epoch(bad) is None + assert parse_iso(bad) is None + + +class TestEdgeOffsets: + def test_far_future_utc(self): + dt = parse_iso("2099-12-31T23:59:59+00:00") + assert dt is not None + assert dt.tzinfo == UTC + + def test_unusual_offset(self): + # +05:45 (Nepal) — not a multiple-of-hour offset + dt = parse_iso("2024-01-15T12:00:00+05:45") + assert dt is not None + assert dt.utcoffset() == timedelta(hours=5, minutes=45) + # Compare with a manual aware dt for equality + expected = datetime(2024, 1, 15, 12, 0, 0, tzinfo=timezone(timedelta(hours=5, minutes=45))) + assert dt == expected diff --git a/tests/services/test_playtime.py b/tests/services/test_playtime.py index 1ce6015..f9592b2 100644 --- a/tests/services/test_playtime.py +++ b/tests/services/test_playtime.py @@ -86,6 +86,21 @@ async def test_end_without_start(self): result = await svc.record_session_end(42) assert result["success"] is False + @pytest.mark.asyncio + async def test_end_with_unparseable_start(self): + """Malformed last_session_start -> parse_iso returns None -> failure.""" + svc, _, state, _ = make_service() + state["playtime"]["42"] = { + "total_seconds": 0, + "session_count": 0, + "last_session_start": "not-a-date", + "last_session_duration_sec": None, + "offline_deltas": [], + } + result = await svc.record_session_end(42) + assert result["success"] is False + assert "Failed to calculate session duration" in result["message"] + @pytest.mark.asyncio async def test_multiple_sessions_accumulate(self): svc, _, state, _ = make_service()