From 5b055308517ed816dc1f3a748a20beb7056c9d84 Mon Sep 17 00:00:00 2001 From: mrjarnould Date: Sun, 22 Mar 2026 22:56:51 +0100 Subject: [PATCH 1/3] Add CloudKit-backed reminders and notes services --- .gitignore | 8 + README.md | 759 ++++- buf.yaml | 13 + example_reminders.py | 1380 ++++++++ example_reminders_delta.py | 420 +++ examples/notes_cli.py | 425 +++ pyicloud/base.py | 38 +- pyicloud/common/__init__.py | 5 + pyicloud/common/cloudkit/__init__.py | 69 + pyicloud/common/cloudkit/base.py | 57 + pyicloud/common/cloudkit/models.py | 940 ++++++ pyicloud/common/models.py | 37 + .../models/services/account/account_models.py | 420 +++ .../services/calendar/missing_operations.md | 128 + .../hidemyemail/hidemyemail_models.py | 661 ++++ .../services/hidemyemail/hme_list_test.py | 52 + pyicloud/services/__init__.py | 2 + pyicloud/services/notes/__init__.py | 15 + pyicloud/services/notes/client.py | 471 +++ pyicloud/services/notes/decoding.py | 93 + pyicloud/services/notes/domain.py | 18 + pyicloud/services/notes/models/__init__.py | 13 + pyicloud/services/notes/models/_ck_base.py | 53 + pyicloud/services/notes/models/cloudkit.py | 810 +++++ pyicloud/services/notes/models/constants.py | 29 + pyicloud/services/notes/models/dto.py | 78 + pyicloud/services/notes/protobuf/__init__.py | 0 pyicloud/services/notes/protobuf/notes.proto | 239 ++ pyicloud/services/notes/protobuf/notes_pb2.py | 99 + .../services/notes/protobuf/notes_pb2.pyi | 539 ++++ pyicloud/services/notes/rendering/__init__.py | 7 + .../services/notes/rendering/attachments.py | 362 +++ .../services/notes/rendering/ck_datasource.py | 319 ++ .../services/notes/rendering/debug_tools.py | 162 + pyicloud/services/notes/rendering/exporter.py | 615 ++++ pyicloud/services/notes/rendering/options.py | 76 + pyicloud/services/notes/rendering/renderer.py | 923 ++++++ .../notes/rendering/renderer_iface.py | 43 + .../services/notes/rendering/table_builder.py | 284 ++ pyicloud/services/notes/service.py | 962 ++++++ pyicloud/services/reminders.py | 128 - pyicloud/services/reminders/__init__.py | 19 + pyicloud/services/reminders/_constants.py | 9 + pyicloud/services/reminders/_mappers.py | 345 ++ pyicloud/services/reminders/_protocol.py | 211 ++ pyicloud/services/reminders/_reads.py | 469 +++ pyicloud/services/reminders/_support.py | 71 + pyicloud/services/reminders/_writes.py | 1093 +++++++ pyicloud/services/reminders/client.py | 235 ++ .../services/reminders/models/__init__.py | 32 + pyicloud/services/reminders/models/domain.py | 151 + pyicloud/services/reminders/models/results.py | 29 + .../services/reminders/protobuf/__init__.py | 1 + .../reminders/protobuf/reminders.proto | 159 + .../reminders/protobuf/reminders_pb2.py | 69 + .../services/reminders/protobuf/typedef.json | 76 + .../services/reminders/protobuf/typedef.py | 13 + .../protobuf/versioned_document.proto | 24 + .../protobuf/versioned_document_pb2.py | 35 + pyicloud/services/reminders/service.py | 437 +++ pyproject.toml | 11 + requirements.txt | 3 + requirements_test.txt | 1 + tests/fixtures/note_fixture.json | 590 ++++ tests/services/test_reminders.py | 194 +- tests/services/test_reminders_cloudkit.py | 2856 +++++++++++++++++ tests/test_base.py | 110 + tests/test_example_reminders_delta.py | 70 + tests/test_notes.py | 523 +++ tests/test_notes_cli.py | 193 ++ tests/test_notes_rendering.py | 578 ++++ 71 files changed, 20104 insertions(+), 255 deletions(-) create mode 100644 buf.yaml create mode 100644 example_reminders.py create mode 100644 example_reminders_delta.py create mode 100644 examples/notes_cli.py create mode 100644 pyicloud/common/__init__.py create mode 100644 pyicloud/common/cloudkit/__init__.py create mode 100644 pyicloud/common/cloudkit/base.py create mode 100644 pyicloud/common/cloudkit/models.py create mode 100644 pyicloud/common/models.py create mode 100644 pyicloud/models/services/account/account_models.py create mode 100644 pyicloud/models/services/calendar/missing_operations.md create mode 100644 pyicloud/models/services/hidemyemail/hidemyemail_models.py create mode 100644 pyicloud/models/services/hidemyemail/hme_list_test.py create mode 100644 pyicloud/services/notes/__init__.py create mode 100644 pyicloud/services/notes/client.py create mode 100644 pyicloud/services/notes/decoding.py create mode 100644 pyicloud/services/notes/domain.py create mode 100644 pyicloud/services/notes/models/__init__.py create mode 100644 pyicloud/services/notes/models/_ck_base.py create mode 100644 pyicloud/services/notes/models/cloudkit.py create mode 100644 pyicloud/services/notes/models/constants.py create mode 100644 pyicloud/services/notes/models/dto.py create mode 100644 pyicloud/services/notes/protobuf/__init__.py create mode 100644 pyicloud/services/notes/protobuf/notes.proto create mode 100644 pyicloud/services/notes/protobuf/notes_pb2.py create mode 100644 pyicloud/services/notes/protobuf/notes_pb2.pyi create mode 100644 pyicloud/services/notes/rendering/__init__.py create mode 100644 pyicloud/services/notes/rendering/attachments.py create mode 100644 pyicloud/services/notes/rendering/ck_datasource.py create mode 100644 pyicloud/services/notes/rendering/debug_tools.py create mode 100644 pyicloud/services/notes/rendering/exporter.py create mode 100644 pyicloud/services/notes/rendering/options.py create mode 100644 pyicloud/services/notes/rendering/renderer.py create mode 100644 pyicloud/services/notes/rendering/renderer_iface.py create mode 100644 pyicloud/services/notes/rendering/table_builder.py create mode 100644 pyicloud/services/notes/service.py delete mode 100644 pyicloud/services/reminders.py create mode 100644 pyicloud/services/reminders/__init__.py create mode 100644 pyicloud/services/reminders/_constants.py create mode 100644 pyicloud/services/reminders/_mappers.py create mode 100644 pyicloud/services/reminders/_protocol.py create mode 100644 pyicloud/services/reminders/_reads.py create mode 100644 pyicloud/services/reminders/_support.py create mode 100644 pyicloud/services/reminders/_writes.py create mode 100644 pyicloud/services/reminders/client.py create mode 100644 pyicloud/services/reminders/models/__init__.py create mode 100644 pyicloud/services/reminders/models/domain.py create mode 100644 pyicloud/services/reminders/models/results.py create mode 100644 pyicloud/services/reminders/protobuf/__init__.py create mode 100644 pyicloud/services/reminders/protobuf/reminders.proto create mode 100644 pyicloud/services/reminders/protobuf/reminders_pb2.py create mode 100644 pyicloud/services/reminders/protobuf/typedef.json create mode 100644 pyicloud/services/reminders/protobuf/typedef.py create mode 100644 pyicloud/services/reminders/protobuf/versioned_document.proto create mode 100644 pyicloud/services/reminders/protobuf/versioned_document_pb2.py create mode 100644 pyicloud/services/reminders/service.py create mode 100644 tests/fixtures/note_fixture.json create mode 100644 tests/services/test_reminders_cloudkit.py create mode 100644 tests/test_example_reminders_delta.py create mode 100644 tests/test_notes.py create mode 100644 tests/test_notes_cli.py create mode 100644 tests/test_notes_rendering.py diff --git a/.gitignore b/.gitignore index c63c729a..93697ee2 100644 --- a/.gitignore +++ b/.gitignore @@ -78,3 +78,11 @@ uv.lock fetch_devices_*.py *.jpg /test*.py + +# Workspace +/workspace/ + +# Security excludes +*_examples/ +sample_*.json +test_real_data.py diff --git a/README.md b/README.md index 67f41f07..206b5cbc 100644 --- a/README.md +++ b/README.md @@ -921,7 +921,762 @@ deleted = api.hidemyemail.delete(anonymous_id) print(f"Deleted alias: {deleted}") ``` +## Reminders + +You can access your iCloud Reminders through the `reminders` property: + +```python +reminders = api.reminders +``` + +The high-level Reminders service exposes typed list, reminder, alarm, hashtag, +attachment, and recurrence-rule models for both snapshot reads and mutations. + +_List reminder lists:_ + +```python +for lst in api.reminders.lists(): + print(lst.id, lst.title, lst.color, lst.count) +``` + +_List reminders globally or within one list:_ + +```python +reminders = api.reminders + +target_list = next(iter(reminders.lists()), None) +if target_list: + for reminder in reminders.reminders(list_id=target_list.id): + print(reminder.id, reminder.title, reminder.completed) + +for reminder in reminders.reminders(): + print(reminder.title) +``` + +_Fetch one reminder by ID:_ + +```python +reminder_id = "YOUR_REMINDER_ID" +reminder = api.reminders.get(reminder_id) + +print(reminder.title) +print(reminder.desc) +print(reminder.due_date) +``` + +_Create, update, and delete a reminder:_ + +```python +from datetime import datetime, timedelta, timezone + +reminders = api.reminders +target_list = next(iter(reminders.lists()), None) +if target_list is None: + raise RuntimeError("No reminder lists found") + +created = reminders.create( + list_id=target_list.id, + title="Buy milk", + desc="2 percent", + due_date=datetime.now(timezone.utc) + timedelta(days=1), + priority=1, + flagged=True, +) + +created.desc = "2 percent organic" +created.completed = True +reminders.update(created) + +fresh = reminders.get(created.id) +reminders.delete(fresh) +``` + +`priority` uses Apple's numeric values. Common values are `0` (none), `1` +(high), `5` (medium), and `9` (low). + +_Work with a compound list snapshot:_ + +```python +reminders = api.reminders +target_list = next(iter(reminders.lists()), None) +if target_list is None: + raise RuntimeError("No reminder lists found") + +result = api.reminders.list_reminders( + list_id=target_list.id, + include_completed=True, + results_limit=200, +) + +print(len(result.reminders)) +print(result.alarms.keys()) +print(result.attachments.keys()) +print(result.hashtags.keys()) +``` + +`list_reminders()` returns a `ListRemindersResult` containing: + +- `reminders` +- `alarms` +- `triggers` +- `attachments` +- `hashtags` +- `recurrence_rules` + +_Track incremental changes:_ + +```python +reminders = api.reminders + +# Earlier run: capture and persist a cursor somewhere durable. +cursor = reminders.sync_cursor() +# save cursor to disk / database here + +# Later run: reload the previously saved cursor from disk / database. +loaded_cursor = stored_cursor_value +for event in reminders.iter_changes(since=loaded_cursor): + print(event.type, event.reminder_id) + if event.reminder is not None: + print(event.reminder.title) + +# After processing, persist the new high-water mark for the next run. +next_cursor = reminders.sync_cursor() +``` + +`iter_changes(since=...)` yields `ReminderChangeEvent` objects. Updated +reminders include a hydrated `reminder` payload. Deleted events may still carry +`event.reminder` for soft-deleted records; only true tombstones guarantee +`event.reminder is None`, in which case you should rely on `event.reminder_id`. + +_Add location triggers and inspect alarms:_ + +```python +from pyicloud.services.reminders.models import Proximity + +reminders = api.reminders +reminder = next(iter(reminders.reminders()), None) +if reminder is None: + raise RuntimeError("No reminders found") + +alarm, trigger = reminders.add_location_trigger( + reminder, + title="Office", + address="1 Infinite Loop, Cupertino, CA", + latitude=37.3318, + longitude=-122.0312, + radius=150.0, + proximity=Proximity.ARRIVING, +) + +for row in reminders.alarms_for(reminder): + print(row.alarm.id, row.trigger.id if row.trigger else None) +``` + +_Add hashtags, URL attachments, and recurrence rules:_ + +```python +from pyicloud.services.reminders.models import RecurrenceFrequency + +reminders = api.reminders +reminder = next(iter(reminders.reminders()), None) +if reminder is None: + raise RuntimeError("No reminders found") + +hashtag = reminders.create_hashtag(reminder, "errands") +attachment = reminders.create_url_attachment( + reminder, + url="https://example.com/checklist", +) +rule = reminders.create_recurrence_rule( + reminder, + frequency=RecurrenceFrequency.WEEKLY, + interval=1, +) + +print(reminders.tags_for(reminder)) +print(reminders.attachments_for(reminder)) +print(reminders.recurrence_rules_for(reminder)) +``` + +You can also update and delete related records: + +```python +reminders.update_attachment(attachment, url="https://example.org/checklist") +reminders.update_recurrence_rule(rule, interval=2) +reminders.delete_hashtag(reminder, hashtag) +reminders.delete_attachment(reminder, attachment) +reminders.delete_recurrence_rule(reminder, rule) +``` + +Reminders caveats: + +- Reminder mutations operate on typed models. The normal pattern is to fetch a + reminder, mutate fields locally, then call `update(reminder)`. +- Naive `datetime` values passed to `create()` are interpreted as UTC by the + service. +- `update_hashtag()` exists, but the iCloud Reminders web app currently treats + hashtag names as effectively read-only in some live flows, so rename behavior + may not be reflected consistently outside the API. + +### Reminders Example Scripts + +[`example_reminders.py`](example_reminders.py) is a comprehensive live +integration validator for the Reminders service. It exercises list discovery, +read paths, write paths, location triggers, hashtags, attachments, recurrence +rules, and delete flows against a real iCloud account. + +[`example_reminders_delta.py`](example_reminders_delta.py) is a smaller live +validator focused on `sync_cursor()` and `iter_changes(since=...)`. + +## Notes + +You can access your iCloud Notes through the `notes` property: + +```python +notes = api.notes +``` + +The high-level Notes service exposes typed note, folder, and attachment models +for common workflows such as recent-note listings, full-note retrieval, HTML +rendering, and on-disk exports. Prefer `api.notes` for normal use and treat +`api.notes.raw` as an advanced/debug escape hatch when you need direct access to +the underlying CloudKit client. + +_List recent notes:_ + +```python +notes = api.notes + +for summary in notes.recents(limit=10): + print(summary.id, summary.title, summary.modified_at) +``` + +_Iterate folders and list notes in one folder:_ + +```python +notes = api.notes + +folder = next(iter(notes.folders()), None) +if folder: + print(folder.id, folder.name, folder.has_subfolders) + for summary in notes.in_folder(folder.id, limit=5): + print(summary.title) +``` + +_Iterate all notes or capture a sync cursor for later incremental work:_ + +```python +notes = api.notes + +for summary in notes.iter_all(): + print(summary.id, summary.title) + +cursor = notes.sync_cursor() +print(cursor) +``` + +Persist the sync cursor from `sync_cursor()` and pass it back to +`iter_all(since=...)` or `iter_changes(since=...)` on a later run to enumerate +only newer changes. + +_Fetch a full note with attachment metadata:_ + +```python +note_id = "YOUR_NOTE_ID" +note = api.notes.get(note_id, with_attachments=True) + +print(note.title) +print(note.text) + +for attachment in note.attachments or []: + print(attachment.id, attachment.filename, attachment.uti, attachment.size) +``` + +_Render a note to an HTML fragment:_ + +```python +html_fragment = api.notes.render_note( + note_id, + preview_appearance="light", + pdf_object_height=600, +) + +print(html_fragment[:200]) +``` + +`render_note()` returns an HTML fragment string and does not download assets or +write files to disk. + +_Export a note to HTML on disk:_ + +```python +path = api.notes.export_note( + note_id, + "./exports/notes_html", + export_mode="archival", + assets_dir="./exports/assets", + full_page=True, +) + +print(path) +``` + +`export_note()` accepts `ExportConfig` keyword arguments such as +`export_mode`, `assets_dir`, `full_page`, `preview_appearance`, and +`pdf_object_height`. + +- `export_mode="archival"` downloads assets locally and rewrites the HTML to + use local file references for stable, offline-friendly output. +- `export_mode="lightweight"` skips local downloads and keeps remote/preview + asset references for quick inspection. + +_Save or stream an attachment:_ + +```python +note = api.notes.get(note_id, with_attachments=True) +attachment = next(iter(note.attachments or []), None) + +if attachment: + saved_path = attachment.save_to("./exports/notes_attachments", service=api.notes) + print(saved_path) + + with open("./attachment-copy.bin", "wb") as file_out: + for chunk in attachment.stream(service=api.notes): + file_out.write(chunk) +``` + +Notes caveats: + +- `get()` raises `NoteLockedError` for passphrase-locked notes whose content + cannot be read. +- `get()`, `render_note()`, and `export_note()` raise `NoteNotFound` when the + note ID does not exist. +- `api.notes.raw` is available for advanced/debug workflows, but it is not the + primary Notes API surface. + +### Notes CLI Example + +[`examples/notes_cli.py`](examples/notes_cli.py) is a local developer utility +built on top of `api.notes`. It is useful for searching notes, inspecting the +rendering pipeline, and exporting HTML, but its selection heuristics and debug +output are convenience behavior rather than part of the Notes service contract. + +_Archival export (downloads local assets):_ + +```bash +uv run python examples/notes_cli.py \ + --username you@example.com \ + --title "My Note" \ + --max 1 \ + --output-dir ./exports/notes_html \ + --assets-dir ./exports/assets \ + --export-mode archival \ + --full-page +``` + +_Lightweight export (skips local asset downloads):_ + +```bash +uv run python examples/notes_cli.py \ + --username you@example.com \ + --title-contains "meeting" \ + --max 3 \ + --output-dir ./exports/notes_html \ + --export-mode lightweight +``` + +Important CLI flags: + +- `--title` filters by exact note title. +- `--title-contains` filters by case-insensitive title substring. +- `--max` limits how many matching notes are exported. +- `--output-dir` selects the directory for saved HTML output. +- `--export-mode archival|lightweight` controls whether assets are downloaded + locally (`archival`) or left as remote/preview references (`lightweight`). +- `--assets-dir` selects the base directory for downloaded assets in archival + mode. +- `--full-page` wraps saved output in a complete HTML page. If omitted, the CLI + saves an HTML fragment. +- `--notes-debug` enables verbose Notes/export debugging. +- `--dump-runs` prints attribute runs and writes an annotated mapping under + `workspace/notes_runs`. +- `--preview-appearance light|dark` selects the preferred preview variant when + multiple appearances are available. +- `--pdf-height` sets the pixel height for embedded PDF `` elements. + +`--download-assets` is no longer supported in the example CLI. Use +`--export-mode` to choose between archival and lightweight export behavior. + +## Reminders + +You can access your iCloud Reminders through the `reminders` property: + +```python +reminders = api.reminders +``` + +The high-level Reminders service exposes typed list, reminder, alarm, hashtag, +attachment, and recurrence-rule models for both snapshot reads and mutations. + +_List reminder lists:_ + +```python +for lst in api.reminders.lists(): + print(lst.id, lst.title, lst.color, lst.count) +``` + +_List reminders globally or within one list:_ + +```python +reminders = api.reminders + +target_list = next(iter(reminders.lists()), None) +if target_list: + for reminder in reminders.reminders(list_id=target_list.id): + print(reminder.id, reminder.title, reminder.completed) + +for reminder in reminders.reminders(): + print(reminder.title) +``` + +_Fetch one reminder by ID:_ + +```python +reminder_id = "YOUR_REMINDER_ID" +reminder = api.reminders.get(reminder_id) + +print(reminder.title) +print(reminder.desc) +print(reminder.due_date) +``` + +_Create, update, and delete a reminder:_ + +```python +from datetime import datetime, timedelta, timezone + +reminders = api.reminders +target_list = next(iter(reminders.lists())) + +created = reminders.create( + list_id=target_list.id, + title="Buy milk", + desc="2 percent", + due_date=datetime.now(timezone.utc) + timedelta(days=1), + priority=1, + flagged=True, +) + +created.desc = "2 percent organic" +created.completed = True +reminders.update(created) + +fresh = reminders.get(created.id) +reminders.delete(fresh) +``` + +`priority` uses Apple's numeric values. Common values are `0` (none), `1` +(high), `5` (medium), and `9` (low). + +_Work with a compound list snapshot:_ + +```python +reminders = api.reminders +target_list = next(iter(reminders.lists())) + +result = api.reminders.list_reminders( + list_id=target_list.id, + include_completed=True, + results_limit=200, +) + +print(len(result.reminders)) +print(result.alarms.keys()) +print(result.attachments.keys()) +print(result.hashtags.keys()) +``` + +`list_reminders()` returns a `ListRemindersResult` containing: + +- `reminders` +- `alarms` +- `triggers` +- `attachments` +- `hashtags` +- `recurrence_rules` + +_Track incremental changes:_ + +```python +reminders = api.reminders + +cursor = reminders.sync_cursor() + +for event in reminders.iter_changes(since=cursor): + print(event.type, event.reminder_id) + if event.reminder is not None: + print(event.reminder.title) +``` + +`iter_changes(since=...)` yields `ReminderChangeEvent` objects. Updated +reminders include a hydrated `reminder` payload; deleted reminders only include +the `reminder_id`. + +_Add location triggers and inspect alarms:_ + +```python +from pyicloud.services.reminders.models import Proximity + +reminders = api.reminders +reminder = next(iter(reminders.reminders())) + +alarm, trigger = reminders.add_location_trigger( + reminder, + title="Office", + address="1 Infinite Loop, Cupertino, CA", + latitude=37.3318, + longitude=-122.0312, + radius=150.0, + proximity=Proximity.ARRIVING, +) + +for row in reminders.alarms_for(reminder): + print(row.alarm.id, row.trigger.id if row.trigger else None) +``` + +_Add hashtags, URL attachments, and recurrence rules:_ + +```python +from pyicloud.services.reminders.models import RecurrenceFrequency + +reminders = api.reminders +reminder = next(iter(reminders.reminders())) + +hashtag = reminders.create_hashtag(reminder, "errands") +attachment = reminders.create_url_attachment( + reminder, + url="https://example.com/checklist", +) +rule = reminders.create_recurrence_rule( + reminder, + frequency=RecurrenceFrequency.WEEKLY, + interval=1, +) + +print(reminders.tags_for(reminder)) +print(reminders.attachments_for(reminder)) +print(reminders.recurrence_rules_for(reminder)) +``` + +You can also update and delete related records: + +```python +reminders.update_attachment(attachment, url="https://example.org/checklist") +reminders.update_recurrence_rule(rule, interval=2) +reminders.delete_hashtag(reminder, hashtag) +reminders.delete_attachment(reminder, attachment) +reminders.delete_recurrence_rule(reminder, rule) +``` + +Reminders caveats: + +- Reminder mutations operate on typed models. The normal pattern is to fetch a + reminder, mutate fields locally, then call `update(reminder)`. +- Naive `datetime` values passed to `create()` are interpreted as UTC by the + service. +- `update_hashtag()` exists, but the iCloud Reminders web app currently treats + hashtag names as effectively read-only in some live flows, so rename behavior + may not be reflected consistently outside the API. + +### Reminders Example Scripts + +[`example_reminders.py`](example_reminders.py) is a comprehensive live +integration validator for the Reminders service. It exercises list discovery, +read paths, write paths, location triggers, hashtags, attachments, recurrence +rules, and delete flows against a real iCloud account. + +[`example_reminders_delta.py`](example_reminders_delta.py) is a smaller live +validator focused on `sync_cursor()` and `iter_changes(since=...)`. + +## Notes + +You can access your iCloud Notes through the `notes` property: + +```python +notes = api.notes +``` + +The high-level Notes service exposes typed note, folder, and attachment models +for common workflows such as recent-note listings, full-note retrieval, HTML +rendering, and on-disk exports. Prefer `api.notes` for normal use and treat +`api.notes.raw` as an advanced/debug escape hatch when you need direct access to +the underlying CloudKit client. + +_List recent notes:_ + +```python +notes = api.notes + +for summary in notes.recents(limit=10): + print(summary.id, summary.title, summary.modified_at) +``` + +_Iterate folders and list notes in one folder:_ + +```python +notes = api.notes + +folder = next(iter(notes.folders()), None) +if folder: + print(folder.id, folder.name, folder.has_subfolders) + for summary in notes.in_folder(folder.id, limit=5): + print(summary.title) +``` + +_Iterate all notes or capture a sync cursor for later incremental work:_ + +```python +notes = api.notes + +for summary in notes.iter_all(): + print(summary.id, summary.title) + +cursor = notes.sync_cursor() +print(cursor) +``` + +Persist the sync cursor from `sync_cursor()` and pass it back to +`iter_all(since=...)` or `iter_changes(since=...)` on a later run to enumerate +only newer changes. + +_Fetch a full note with attachment metadata:_ + +```python +note_id = "YOUR_NOTE_ID" +note = api.notes.get(note_id, with_attachments=True) + +print(note.title) +print(note.text) + +for attachment in note.attachments or []: + print(attachment.id, attachment.filename, attachment.uti, attachment.size) +``` + +_Render a note to an HTML fragment:_ + +```python +html_fragment = api.notes.render_note( + note_id, + preview_appearance="light", + pdf_object_height=600, +) + +print(html_fragment[:200]) +``` + +`render_note()` returns an HTML fragment string and does not download assets or +write files to disk. + +_Export a note to HTML on disk:_ + +```python +path = api.notes.export_note( + note_id, + "./exports/notes_html", + export_mode="archival", + assets_dir="./exports/assets", + full_page=True, +) + +print(path) +``` + +`export_note()` accepts `ExportConfig` keyword arguments such as +`export_mode`, `assets_dir`, `full_page`, `preview_appearance`, and +`pdf_object_height`. + +- `export_mode="archival"` downloads assets locally and rewrites the HTML to + use local file references for stable, offline-friendly output. +- `export_mode="lightweight"` skips local downloads and keeps remote/preview + asset references for quick inspection. + +_Save or stream an attachment:_ + +```python +note = api.notes.get(note_id, with_attachments=True) +attachment = next(iter(note.attachments or []), None) + +if attachment: + saved_path = attachment.save_to("./exports/notes_attachments", service=api.notes) + print(saved_path) + + with open("./attachment-copy.bin", "wb") as file_out: + for chunk in attachment.stream(service=api.notes): + file_out.write(chunk) +``` + +Notes caveats: + +- `get()` raises `NoteLockedError` for passphrase-locked notes whose content + cannot be read. +- `get()`, `render_note()`, and `export_note()` raise `NoteNotFound` when the + note ID does not exist. +- `api.notes.raw` is available for advanced/debug workflows, but it is not the + primary Notes API surface. + +### Notes CLI Example + +[`examples/notes_cli.py`](examples/notes_cli.py) is a local developer utility +built on top of `api.notes`. It is useful for searching notes, inspecting the +rendering pipeline, and exporting HTML, but its selection heuristics and debug +output are convenience behavior rather than part of the Notes service contract. + +_Archival export (downloads local assets):_ + +```bash +uv run python examples/notes_cli.py \ + --username you@example.com \ + --title "My Note" \ + --max 1 \ + --output-dir ./exports/notes_html \ + --assets-dir ./exports/assets \ + --export-mode archival \ + --full-page +``` + +_Lightweight export (skips local asset downloads):_ + +```bash +uv run python examples/notes_cli.py \ + --username you@example.com \ + --title-contains "meeting" \ + --max 3 \ + --output-dir ./exports/notes_html \ + --export-mode lightweight +``` + +Important CLI flags: + +- `--title` filters by exact note title. +- `--title-contains` filters by case-insensitive title substring. +- `--max` limits how many matching notes are exported. +- `--output-dir` selects the directory for saved HTML output. +- `--export-mode archival|lightweight` controls whether assets are downloaded + locally (`archival`) or left as remote/preview references (`lightweight`). +- `--assets-dir` selects the base directory for downloaded assets in archival + mode. +- `--full-page` wraps saved output in a complete HTML page. If omitted, the CLI + saves an HTML fragment. +- `--notes-debug` enables verbose Notes/export debugging. +- `--dump-runs` prints attribute runs and writes an annotated mapping under + `workspace/notes_runs`. +- `--preview-appearance light|dark` selects the preferred preview variant when + multiple appearances are available. +- `--pdf-height` sets the pixel height for embedded PDF `` elements. + +`--download-assets` is no longer supported in the example CLI. Use +`--export-mode` to choose between archival and lightweight export behavior. + ## Examples -If you want to see some code samples, see the [examples](/examples.py). -` +If you want to see some code samples, see the [examples](examples.py). diff --git a/buf.yaml b/buf.yaml new file mode 100644 index 00000000..9158dd3a --- /dev/null +++ b/buf.yaml @@ -0,0 +1,13 @@ +version: v1 + +lint: + use: + - DEFAULT + ignore_only: + PACKAGE_DIRECTORY_MATCH: + # Intentional: package name mirrors Apple Notes wire format (`notes`). + - pyicloud/services/notes/protobuf/notes.proto + # Intentional: package name mirrors Apple Reminders wire format (`topotext`). + - pyicloud/services/reminders/protobuf/reminders.proto + # Intentional: package name mirrors Apple Reminders wire format (`versioned_document`). + - pyicloud/services/reminders/protobuf/versioned_document.proto diff --git a/example_reminders.py b/example_reminders.py new file mode 100644 index 00000000..ba2bd360 --- /dev/null +++ b/example_reminders.py @@ -0,0 +1,1380 @@ +"""Comprehensive integration validator for the pyicloud Reminders service. + +This script exercises the snapshot/read/write surface of +`pyicloud.services.reminders.service.RemindersService` and validates +round-trip behavior against iCloud. + +Validated API surface: +- lists() +- reminders(list_id=...) and reminders() +- get(reminder_id) +- create(...) across supported field combinations +- update(reminder) +- add_location_trigger(...) +- create_hashtag(...) / delete_hashtag(...) +- create_url_attachment(...) / update_attachment(...) / delete_attachment(...) +- create_recurrence_rule(...) / update_recurrence_rule(...) / delete_recurrence_rule(...) +- alarms_for(reminder) +- tags_for(reminder) +- attachments_for(reminder) +- recurrence_rules_for(reminder) +- list_reminders(list_id, include_completed=...) +- delete(reminder) + +Notes: +- The script writes real reminders into your iCloud account. +- Use `--cleanup` to soft-delete generated reminders at the end. +- Delta APIs are validated separately by `example_reminders_delta.py`. +""" + +from __future__ import annotations + +import argparse +import os +import sys +import traceback +from dataclasses import dataclass, field +from datetime import datetime, timedelta, timezone +from getpass import getpass +from time import monotonic, sleep +from typing import Any, Callable, Dict, Iterable, Optional, Sequence + +from pyicloud import PyiCloudService +from pyicloud.services.reminders.models.domain import ( + Proximity, + RecurrenceFrequency, + Reminder, + RemindersList, +) + +PRIORITY_NONE = 0 +PRIORITY_HIGH = 1 +PRIORITY_MEDIUM = 5 +PRIORITY_LOW = 9 + + +@dataclass +class ValidationTracker: + checks: int = 0 + failures: list[str] = field(default_factory=list) + + def expect(self, condition: bool, label: str, detail: str = "") -> None: + self.checks += 1 + if condition: + print(f" [PASS] {label}") + return + + message = label if not detail else f"{label}: {detail}" + self.failures.append(message) + print(f" [FAIL] {message}") + + +@dataclass +class RunState: + created: Dict[str, Reminder] = field(default_factory=dict) + deleted_ids: set[str] = field(default_factory=set) + + +def banner(title: str) -> None: + print(f"\n{'=' * 78}") + print(title) + print(f"{'=' * 78}") + + +def parse_args() -> argparse.Namespace: + now = datetime.now(timezone.utc).strftime("%Y%m%d-%H%M%S") + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument( + "--username", + default=os.getenv("PYICLOUD_USERNAME"), + help="Apple ID email. Defaults to interactive prompt.", + ) + parser.add_argument( + "--password", + default=os.getenv("PYICLOUD_PASSWORD"), + help="Apple ID password. Defaults to keyring or interactive prompt.", + ) + parser.add_argument( + "--list-name", + default="pyicloud testing", + help="Existing reminders list title to use.", + ) + parser.add_argument( + "--prefix", + default=f"pyicloud-reminders-validation-{now}", + help="Prefix added to created reminder titles.", + ) + parser.add_argument( + "--results-limit", + type=int, + default=500, + help="Result limit for compound list_reminders query.", + ) + parser.add_argument( + "--consistency-timeout", + type=float, + default=20.0, + help="Seconds to wait for eventual consistency checks.", + ) + parser.add_argument( + "--poll-interval", + type=float, + default=1.0, + help="Polling interval in seconds for eventual consistency checks.", + ) + parser.add_argument( + "--sleep-seconds", + type=float, + default=0.2, + help="Short delay between write-heavy operations.", + ) + parser.add_argument( + "--cleanup", + action="store_true", + help="Soft-delete generated reminders at the end.", + ) + parser.add_argument( + "--debug", + action="store_true", + help="Print traceback on unexpected errors.", + ) + return parser.parse_args() + + +def resolve_credentials(args: argparse.Namespace) -> tuple[str, Optional[str]]: + username = args.username or input("Apple ID: ").strip() + if not username: + raise ValueError("Apple ID username is required.") + + # Keep None so keyring/session-based auth can work. + password = args.password + if password == "": + password = None + + # If no password was provided via args/env, optionally prompt only when stdin + # is interactive and the caller chooses to provide one. + if password is None and sys.stdin.isatty(): + answer = input("Password not provided. Prompt now? [y/N]: ").strip().lower() + if answer in {"y", "yes"}: + password = getpass("Apple ID password: ") + + return username, password + + +def _prompt_selection( + prompt: str, options: Sequence[Any], default_index: int = 0 +) -> int: + if not options: + raise ValueError("Cannot select from an empty option list.") + selected_index = default_index + if len(options) > 1: + raw_index = input(f"{prompt} [{default_index}]: ").strip() + if raw_index: + selected_index = int(raw_index) + if selected_index < 0 or selected_index >= len(options): + raise RuntimeError("Invalid selection.") + return selected_index + + +def _trusted_device_label(device: dict[str, Any]) -> str: + if device.get("phoneNumber"): + return "SMS trusted device" + if device.get("deviceName") or device.get("id"): + return "Trusted device" + return "Unknown trusted device" + + +def _raw_token(value: str) -> str: + if "/" not in value: + return value + return value.split("/", 1)[1] + + +def authenticate(args: argparse.Namespace) -> PyiCloudService: + username, password = resolve_credentials(args) + print("Authenticating with iCloud...") + api = PyiCloudService(apple_id=username, password=password) + + if api.requires_2fa: + fido2_devices = list(api.fido2_devices) + if fido2_devices: + print("Security key verification required.") + for index, _device in enumerate(fido2_devices): + print(f" {index}: Security key {index}") + selected_index = _prompt_selection( + "Select security key", + fido2_devices, + ) + selected_device = fido2_devices[selected_index] + print("Touch the selected security key to continue.") + try: + api.confirm_security_key(selected_device) + except Exception as exc: # pragma: no cover - live integration path + raise RuntimeError("Security key verification failed.") from exc + else: + code = input("Enter 2FA code: ").strip() + if not api.validate_2fa_code(code): + raise RuntimeError("Invalid 2FA code.") + if not api.is_trusted_session: + print("Session is not trusted. Requesting trust...") + api.trust_session() + + elif api.requires_2sa: + devices = api.trusted_devices + if not devices: + raise RuntimeError("2SA required but no trusted devices were returned.") + + print("Trusted devices:") + for index, device in enumerate(devices): + print(f" {index}: {_trusted_device_label(device)}") + + selected_index = _prompt_selection( + "Select trusted device", + devices, + ) + device = devices[selected_index] + if not api.send_verification_code(device): + raise RuntimeError("Failed to send 2SA verification code.") + + code = input("Enter 2SA verification code: ").strip() + if not api.validate_verification_code(device, code): + raise RuntimeError("Invalid 2SA verification code.") + + return api + + +def pick_target_list(lists: Iterable[RemindersList], list_name: str) -> RemindersList: + all_lists = list(lists) + if not all_lists: + raise RuntimeError("No reminders lists found in iCloud account.") + + print("Available lists:") + for lst in all_lists: + print(f" - {lst.title} ({lst.id})") + + for lst in all_lists: + if lst.title == list_name: + print(f"\nUsing list: {lst.title} ({lst.id})") + return lst + + raise RuntimeError( + f"List '{list_name}' not found. " + f"Please create it first or pass --list-name with an existing list." + ) + + +def approximately_same_time( + left: Optional[datetime], right: Optional[datetime], tolerance_seconds: int = 1 +) -> bool: + if left is None or right is None: + return left is right + + if left.tzinfo is None: + left = left.replace(tzinfo=timezone.utc) + if right.tzinfo is None: + right = right.replace(tzinfo=timezone.utc) + + return abs(left.timestamp() - right.timestamp()) <= tolerance_seconds + + +def wait_until( + description: str, + predicate, + timeout_seconds: float, + poll_interval: float, +) -> bool: + deadline = monotonic() + timeout_seconds + while monotonic() < deadline: + if predicate(): + return True + sleep(poll_interval) + print(f" [WARN] Timed out while waiting for: {description}") + return False + + +def cleanup_generated(api: PyiCloudService, state: RunState) -> None: + banner("Cleanup") + for case_name, reminder in state.created.items(): + if reminder.id in state.deleted_ids: + print(f" [SKIP] {case_name}: already deleted ({reminder.id})") + continue + + try: + fresh = api.reminders.get(reminder.id) + except LookupError: + print(f" [SKIP] {case_name}: not found ({reminder.id})") + continue + + try: + api.reminders.delete(fresh) + state.deleted_ids.add(reminder.id) + print(f" [OK] Deleted {case_name}: {reminder.id}") + except Exception as exc: # pragma: no cover - live integration path + print(f" [WARN] Failed deleting {case_name} ({reminder.id}): {exc}") + + +def main() -> int: + args = parse_args() + tracker = ValidationTracker() + state = RunState() + api: Optional[PyiCloudService] = None + + try: + api = authenticate(args) + reminders_api = api.reminders + + banner("1) Discover Lists") + target_list = pick_target_list(reminders_api.lists(), args.list_name) + + banner("2) Baseline Reads") + baseline_list_items = list(reminders_api.reminders(list_id=target_list.id)) + baseline_global_items = list(reminders_api.reminders()) + baseline_compound = reminders_api.list_reminders( + target_list.id, + include_completed=True, + results_limit=args.results_limit, + ) + print(f" Baseline list-scoped reminders: {len(baseline_list_items)}") + print(f" Baseline global reminders: {len(baseline_global_items)}") + print( + " Baseline compound counts: " + f"reminders={len(baseline_compound.reminders)}, " + f"alarms={len(baseline_compound.alarms)}, " + f"triggers={len(baseline_compound.triggers)}, " + f"attachments={len(baseline_compound.attachments)}, " + f"hashtags={len(baseline_compound.hashtags)}, " + f"recurrence_rules={len(baseline_compound.recurrence_rules)}" + ) + + def create_case( + case_name: str, + suffix: str, + *, + desc: str, + completed: bool = False, + due_date: Optional[datetime] = None, + priority: int = PRIORITY_NONE, + flagged: bool = False, + all_day: bool = False, + time_zone_name: Optional[str] = None, + parent_reminder_id: Optional[str] = None, + ) -> Reminder: + title = f"{args.prefix} | {suffix}" + reminder = reminders_api.create( + target_list.id, + title=title, + desc=desc, + completed=completed, + due_date=due_date, + priority=priority, + flagged=flagged, + all_day=all_day, + time_zone=time_zone_name, + parent_reminder_id=parent_reminder_id, + ) + state.created[case_name] = reminder + print(f" [CREATE] {case_name}: {reminder.id}") + if args.sleep_seconds > 0: + sleep(args.sleep_seconds) + return reminder + + def assert_round_trip( + case_name: str, + reminder_id: str, + *, + expected_title: Optional[str] = None, + expected_desc: Optional[str] = None, + expected_completed: Optional[bool] = None, + expected_due_date: Optional[datetime] = None, + expected_priority: Optional[int] = None, + expected_flagged: Optional[bool] = None, + expected_all_day: Optional[bool] = None, + expected_time_zone: Optional[str] = None, + expected_parent_reminder_id: Optional[str] = None, + ) -> Reminder: + matched: dict[str, Optional[Reminder]] = {"reminder": None} + + def _matches_expectations(fresh: Reminder) -> bool: + if expected_title is not None and fresh.title != expected_title: + return False + if expected_desc is not None and fresh.desc != expected_desc: + return False + if ( + expected_completed is not None + and fresh.completed != expected_completed + ): + return False + if expected_due_date is not None and not approximately_same_time( + fresh.due_date, + expected_due_date, + ): + return False + if ( + expected_priority is not None + and fresh.priority != expected_priority + ): + return False + if expected_flagged is not None and fresh.flagged != expected_flagged: + return False + if expected_all_day is not None and fresh.all_day != expected_all_day: + return False + if ( + expected_time_zone is not None + and fresh.time_zone != expected_time_zone + ): + return False + if ( + expected_parent_reminder_id is not None + and fresh.parent_reminder_id != expected_parent_reminder_id + ): + return False + return True + + def _poll_round_trip() -> bool: + try: + fresh = reminders_api.get(reminder_id) + except LookupError: + return False + if not _matches_expectations(fresh): + return False + matched["reminder"] = fresh + return True + + wait_until( + f"{case_name} round-trip consistency", + _poll_round_trip, + args.consistency_timeout, + args.poll_interval, + ) + + fresh = matched["reminder"] + if fresh is None: + fresh = reminders_api.get(reminder_id) + + if expected_title is not None: + tracker.expect( + fresh.title == expected_title, + f"{case_name}: title round-trip", + f"expected={expected_title!r}, got={fresh.title!r}", + ) + + if expected_desc is not None: + tracker.expect( + fresh.desc == expected_desc, + f"{case_name}: desc round-trip", + f"expected={expected_desc!r}, got={fresh.desc!r}", + ) + + if expected_completed is not None: + tracker.expect( + fresh.completed == expected_completed, + f"{case_name}: completed round-trip", + f"expected={expected_completed}, got={fresh.completed}", + ) + + if expected_due_date is not None: + tracker.expect( + approximately_same_time(fresh.due_date, expected_due_date), + f"{case_name}: due_date round-trip", + f"expected={expected_due_date}, got={fresh.due_date}", + ) + + if expected_priority is not None: + tracker.expect( + fresh.priority == expected_priority, + f"{case_name}: priority round-trip", + f"expected={expected_priority}, got={fresh.priority}", + ) + + if expected_flagged is not None: + tracker.expect( + fresh.flagged == expected_flagged, + f"{case_name}: flagged round-trip", + f"expected={expected_flagged}, got={fresh.flagged}", + ) + + if expected_all_day is not None: + tracker.expect( + fresh.all_day == expected_all_day, + f"{case_name}: all_day round-trip", + f"expected={expected_all_day}, got={fresh.all_day}", + ) + + if expected_time_zone is not None: + tracker.expect( + fresh.time_zone == expected_time_zone, + f"{case_name}: time_zone round-trip", + f"expected={expected_time_zone!r}, got={fresh.time_zone!r}", + ) + + if expected_parent_reminder_id is not None: + tracker.expect( + fresh.parent_reminder_id == expected_parent_reminder_id, + f"{case_name}: parent reminder round-trip", + ( + f"expected={expected_parent_reminder_id!r}, " + f"got={fresh.parent_reminder_id!r}" + ), + ) + + return fresh + + def wait_for_reminder( + description: str, + reminder_id: str, + predicate: Callable[[Reminder], bool], + *, + allow_missing: bool = False, + ) -> tuple[Optional[Reminder], bool]: + matched: dict[str, Optional[Reminder] | bool] = { + "reminder": None, + "missing": False, + } + + def _poll() -> bool: + try: + fresh = reminders_api.get(reminder_id) + except LookupError: + matched["reminder"] = None + matched["missing"] = True + return allow_missing + + matched["missing"] = False + if not predicate(fresh): + return False + matched["reminder"] = fresh + return True + + wait_until( + description, + _poll, + args.consistency_timeout, + args.poll_interval, + ) + return matched["reminder"], bool(matched["missing"]) + + def wait_for_linked_id( + description: str, + reminder_id: str, + attr_name: str, + linked_id: str, + *, + present: bool, + ) -> Reminder: + expected_raw_id = _raw_token(linked_id) + fresh, _ = wait_for_reminder( + description, + reminder_id, + lambda reminder: ( + any( + _raw_token(item) == expected_raw_id + for item in getattr(reminder, attr_name) + ) + if present + else all( + _raw_token(item) != expected_raw_id + for item in getattr(reminder, attr_name) + ) + ), + ) + if fresh is None: + fresh = reminders_api.get(reminder_id) + return fresh + + def wait_for_relationship_rows( + description: str, + reminder_id: str, + fetch_rows: Callable[[Reminder], list[Any]], + predicate: Callable[[list[Any]], bool], + ) -> tuple[Reminder, list[Any]]: + matched: dict[str, Any] = {"reminder": None, "rows": None} + + def _poll() -> bool: + try: + fresh = reminders_api.get(reminder_id) + except LookupError: + return False + rows = fetch_rows(fresh) + if not predicate(rows): + return False + matched["reminder"] = fresh + matched["rows"] = rows + return True + + wait_until( + description, + _poll, + args.consistency_timeout, + args.poll_interval, + ) + + fresh = matched["reminder"] + if fresh is None: + fresh = reminders_api.get(reminder_id) + rows = matched["rows"] + if rows is None: + rows = fetch_rows(fresh) + return fresh, rows + + banner("3) Create Matrix (All Supported create() Configurations)") + due_aware = (datetime.now(tz=timezone.utc) + timedelta(days=1)).replace( + hour=9, minute=0, second=0, microsecond=0 + ) + due_naive = (datetime.utcnow() + timedelta(days=2)).replace( + hour=11, minute=15, second=0, microsecond=0 + ) + due_naive_expected = due_naive.replace(tzinfo=timezone.utc) + all_day_due = (datetime.now(tz=timezone.utc) + timedelta(days=3)).replace( + hour=0, minute=0, second=0, microsecond=0 + ) + paris_due = (datetime.now(tz=timezone.utc) + timedelta(days=4)).replace( + hour=14, minute=30, second=0, microsecond=0 + ) + + basic = create_case( + "basic", + "basic", + desc="Basic reminder with title and notes.", + ) + assert_round_trip( + "basic", + basic.id, + expected_title=f"{args.prefix} | basic", + expected_desc="Basic reminder with title and notes.", + expected_priority=PRIORITY_NONE, + expected_flagged=False, + expected_all_day=False, + ) + + child_case = create_case( + "child_reminder", + "child reminder", + desc="Child reminder linked to the basic reminder.", + parent_reminder_id=basic.id, + ) + assert_round_trip( + "child_reminder", + child_case.id, + expected_parent_reminder_id=basic.id, + ) + + completed = create_case( + "completed_on_create", + "completed on create", + desc="Created with completed=True.", + completed=True, + ) + assert_round_trip( + "completed_on_create", + completed.id, + expected_completed=True, + ) + + due_aware_case = create_case( + "due_aware", + "due aware", + desc="Timezone-aware due date.", + due_date=due_aware, + ) + assert_round_trip( + "due_aware", + due_aware_case.id, + expected_due_date=due_aware, + ) + + due_naive_case = create_case( + "due_naive", + "due naive", + desc="Naive due date should be interpreted as UTC by service code.", + due_date=due_naive, + ) + assert_round_trip( + "due_naive", + due_naive_case.id, + expected_due_date=due_naive_expected, + ) + + all_day_case = create_case( + "all_day", + "all day", + desc="All-day reminder.", + due_date=all_day_due, + all_day=True, + ) + assert_round_trip( + "all_day", + all_day_case.id, + expected_due_date=all_day_due, + expected_all_day=True, + ) + + high_case = create_case( + "priority_high_flagged", + "priority high flagged", + desc="High priority and flagged.", + priority=PRIORITY_HIGH, + flagged=True, + ) + assert_round_trip( + "priority_high_flagged", + high_case.id, + expected_priority=PRIORITY_HIGH, + expected_flagged=True, + ) + + medium_case = create_case( + "priority_medium", + "priority medium", + desc="Medium priority.", + priority=PRIORITY_MEDIUM, + ) + assert_round_trip( + "priority_medium", + medium_case.id, + expected_priority=PRIORITY_MEDIUM, + ) + + low_case = create_case( + "priority_low", + "priority low", + desc="Low priority.", + priority=PRIORITY_LOW, + ) + assert_round_trip( + "priority_low", + low_case.id, + expected_priority=PRIORITY_LOW, + ) + + tz_due_case = create_case( + "timezone_due", + "timezone due", + desc="Reminder with explicit time_zone and due date.", + due_date=paris_due, + time_zone_name="Europe/Paris", + ) + assert_round_trip( + "timezone_due", + tz_due_case.id, + expected_due_date=paris_due, + expected_time_zone="Europe/Paris", + ) + + tz_only_case = create_case( + "timezone_only", + "timezone only", + desc="Reminder with time_zone only.", + time_zone_name="UTC", + ) + assert_round_trip( + "timezone_only", + tz_only_case.id, + expected_time_zone="UTC", + ) + + full_case = create_case( + "full_combo", + "full combo", + desc="Create with due date, priority, flagged, and time zone.", + due_date=due_aware + timedelta(days=7), + priority=PRIORITY_HIGH, + flagged=True, + all_day=False, + time_zone_name="UTC", + ) + assert_round_trip( + "full_combo", + full_case.id, + expected_priority=PRIORITY_HIGH, + expected_flagged=True, + expected_all_day=False, + expected_time_zone="UTC", + ) + + location_arrive = create_case( + "location_arriving", + "location arriving", + desc="Location trigger with arriving proximity.", + ) + location_leave = create_case( + "location_leaving", + "location leaving", + desc="Location trigger with leaving proximity.", + ) + linked_case = create_case( + "linked_records", + "linked records", + desc="Hashtag + attachment + recurrence write validation case.", + ) + + delete_candidate = create_case( + "delete_candidate", + "delete candidate", + desc="Will be deleted to validate delete path.", + ) + + banner("4) update() Round-Trip") + updated_basic = reminders_api.get(basic.id) + updated_basic.title = f"{args.prefix} | basic updated" + updated_basic.desc = "Updated via update() validation path." + updated_basic.completed = True + reminders_api.update(updated_basic) + + post_update = assert_round_trip( + "update_basic_step1", + basic.id, + expected_title=f"{args.prefix} | basic updated", + expected_desc="Updated via update() validation path.", + expected_completed=True, + ) + + post_update.completed = False + reminders_api.update(post_update) + assert_round_trip( + "update_basic_step2", + basic.id, + expected_completed=False, + ) + + banner("5) add_location_trigger() + alarms_for()") + pre_alarm_basic = reminders_api.alarms_for(reminders_api.get(medium_case.id)) + tracker.expect( + pre_alarm_basic == [], + "alarms_for() returns empty list for reminders without alarms", + f"got={pre_alarm_basic}", + ) + + arrive_alarm, arrive_trigger = reminders_api.add_location_trigger( + reminders_api.get(location_arrive.id), + title="Eiffel Tower", + address="Champ de Mars, 5 Av. Anatole France, 75007 Paris, France", + latitude=48.8584, + longitude=2.2945, + radius=150.0, + proximity=Proximity.ARRIVING, + ) + tracker.expect( + arrive_trigger.proximity == Proximity.ARRIVING, + "add_location_trigger() returns ARRIVING trigger", + ) + + leave_alarm, leave_trigger = reminders_api.add_location_trigger( + reminders_api.get(location_leave.id), + title="Gare de Luxembourg", + address="Place de la Gare, 1616 Luxembourg", + latitude=49.6004, + longitude=6.1345, + radius=200.0, + proximity=Proximity.LEAVING, + ) + tracker.expect( + leave_trigger.proximity == Proximity.LEAVING, + "add_location_trigger() returns LEAVING trigger", + ) + + location_arrive_fresh, arrive_alarm_rows = wait_for_relationship_rows( + "ARRIVING location trigger to round-trip", + location_arrive.id, + reminders_api.alarms_for, + lambda rows: any( + row.alarm.id == arrive_alarm.id + and row.trigger is not None + and row.trigger.id == arrive_trigger.id + for row in rows + ), + ) + location_leave_fresh, leave_alarm_rows = wait_for_relationship_rows( + "LEAVING location trigger to round-trip", + location_leave.id, + reminders_api.alarms_for, + lambda rows: any( + row.alarm.id == leave_alarm.id + and row.trigger is not None + and row.trigger.id == leave_trigger.id + for row in rows + ), + ) + + tracker.expect( + len(location_arrive_fresh.alarm_ids) >= 1, + "ARRIVING reminder has alarm_ids after trigger creation", + f"alarm_ids={location_arrive_fresh.alarm_ids}", + ) + tracker.expect( + len(location_leave_fresh.alarm_ids) >= 1, + "LEAVING reminder has alarm_ids after trigger creation", + f"alarm_ids={location_leave_fresh.alarm_ids}", + ) + + arrive_match = next( + (row for row in arrive_alarm_rows if row.alarm.id == arrive_alarm.id), + None, + ) + leave_match = next( + (row for row in leave_alarm_rows if row.alarm.id == leave_alarm.id), + None, + ) + + tracker.expect( + arrive_match is not None, + "alarms_for() returns created ARRIVING alarm", + f"alarm_id={arrive_alarm.id}", + ) + tracker.expect( + leave_match is not None, + "alarms_for() returns created LEAVING alarm", + f"alarm_id={leave_alarm.id}", + ) + + if arrive_match is not None and arrive_match.trigger is not None: + tracker.expect( + arrive_match.trigger.id == arrive_trigger.id, + "alarms_for() returns matching ARRIVING trigger", + ) + + if leave_match is not None and leave_match.trigger is not None: + tracker.expect( + leave_match.trigger.id == leave_trigger.id, + "alarms_for() returns matching LEAVING trigger", + ) + + banner("6) tags_for() / attachments_for() / recurrence_rules_for()") + arrive_tags = reminders_api.tags_for(location_arrive_fresh) + arrive_attachments = reminders_api.attachments_for(location_arrive_fresh) + arrive_recurrence_rules = reminders_api.recurrence_rules_for( + location_arrive_fresh + ) + tracker.expect( + isinstance(arrive_tags, list), + "tags_for() returns a list", + ) + tracker.expect( + isinstance(arrive_attachments, list), + "attachments_for() returns a list", + ) + tracker.expect( + isinstance(arrive_recurrence_rules, list), + "recurrence_rules_for() returns a list", + ) + tracker.expect( + len(arrive_tags) == 0, + "tags_for() default is empty on new reminder", + f"got={arrive_tags}", + ) + tracker.expect( + len(arrive_attachments) == 0, + "attachments_for() default is empty on new reminder", + f"got={arrive_attachments}", + ) + tracker.expect( + len(arrive_recurrence_rules) == 0, + "recurrence_rules_for() default is empty on new reminder", + f"got={arrive_recurrence_rules}", + ) + + banner("7) Extended Write APIs (hashtags/attachments/recurrence)") + linked_fresh = reminders_api.get(linked_case.id) + + hashtag_created = reminders_api.create_hashtag(linked_fresh, "pyicloud") + linked_fresh = wait_for_linked_id( + "created hashtag ID to appear on linked reminder", + linked_case.id, + "hashtag_ids", + hashtag_created.id, + present=True, + ) + tracker.expect( + any( + hid == hashtag_created.id.split("/", 1)[1] + for hid in linked_fresh.hashtag_ids + ), + "create_hashtag() links hashtag ID on reminder", + f"hashtag_ids={linked_fresh.hashtag_ids}", + ) + linked_fresh, fetched_tags = wait_for_relationship_rows( + "created hashtag to appear in tags_for()", + linked_case.id, + reminders_api.tags_for, + lambda rows: any(tag.id == hashtag_created.id for tag in rows), + ) + fetched_tag = next( + (tag for tag in fetched_tags if tag.id == hashtag_created.id), None + ) + tracker.expect( + fetched_tag is not None, + "tags_for() returns created hashtag", + f"hashtag_id={hashtag_created.id}", + ) + if fetched_tag is not None: + print( + " [INFO] Skipping update_hashtag(): " + "Hashtag.Name is read-only in the iCloud Reminders web app" + ) + + attachment_created = reminders_api.create_url_attachment( + linked_fresh, + url="https://example.com/reminders", + uti="public.url", + ) + linked_fresh = wait_for_linked_id( + "created attachment ID to appear on linked reminder", + linked_case.id, + "attachment_ids", + attachment_created.id, + present=True, + ) + tracker.expect( + any( + aid == attachment_created.id.split("/", 1)[1] + for aid in linked_fresh.attachment_ids + ), + "create_url_attachment() links attachment ID on reminder", + f"attachment_ids={linked_fresh.attachment_ids}", + ) + linked_fresh, fetched_attachments = wait_for_relationship_rows( + "created attachment to appear in attachments_for()", + linked_case.id, + reminders_api.attachments_for, + lambda rows: any(att.id == attachment_created.id for att in rows), + ) + fetched_attachment = next( + (att for att in fetched_attachments if att.id == attachment_created.id), + None, + ) + tracker.expect( + fetched_attachment is not None, + "attachments_for() returns created URL attachment", + f"attachment_id={attachment_created.id}", + ) + if fetched_attachment is not None: + reminders_api.update_attachment( + fetched_attachment, + url="https://example.org/reminders", + ) + linked_fresh, updated_attachments = wait_for_relationship_rows( + "updated URL attachment to round-trip", + linked_case.id, + reminders_api.attachments_for, + lambda rows: any( + att.id == fetched_attachment.id + and getattr(att, "url", None) == "https://example.org/reminders" + for att in rows + ), + ) + tracker.expect( + any( + att.id == fetched_attachment.id + and getattr(att, "url", None) == "https://example.org/reminders" + for att in updated_attachments + ), + "update_attachment() updates URL attachment", + ) + attachment_created = next( + (att for att in updated_attachments if att.id == fetched_attachment.id), + fetched_attachment, + ) + + recurrence_created = reminders_api.create_recurrence_rule( + linked_fresh, + frequency=RecurrenceFrequency.WEEKLY, + interval=2, + occurrence_count=0, + first_day_of_week=1, + ) + linked_fresh = wait_for_linked_id( + "created recurrence rule ID to appear on linked reminder", + linked_case.id, + "recurrence_rule_ids", + recurrence_created.id, + present=True, + ) + tracker.expect( + any( + rid == recurrence_created.id.split("/", 1)[1] + for rid in linked_fresh.recurrence_rule_ids + ), + "create_recurrence_rule() links recurrence ID on reminder", + f"recurrence_rule_ids={linked_fresh.recurrence_rule_ids}", + ) + linked_fresh, fetched_rules = wait_for_relationship_rows( + "created recurrence rule to appear in recurrence_rules_for()", + linked_case.id, + reminders_api.recurrence_rules_for, + lambda rows: any(rule.id == recurrence_created.id for rule in rows), + ) + fetched_rule = next( + (rule for rule in fetched_rules if rule.id == recurrence_created.id), + None, + ) + tracker.expect( + fetched_rule is not None, + "recurrence_rules_for() returns created recurrence rule", + f"rule_id={recurrence_created.id}", + ) + if fetched_rule is not None: + reminders_api.update_recurrence_rule( + fetched_rule, + interval=3, + occurrence_count=5, + ) + linked_fresh, updated_rules = wait_for_relationship_rows( + "updated recurrence rule to round-trip", + linked_case.id, + reminders_api.recurrence_rules_for, + lambda rows: any( + rule.id == fetched_rule.id + and rule.interval == 3 + and rule.occurrence_count == 5 + for rule in rows + ), + ) + tracker.expect( + any( + rule.id == fetched_rule.id + and rule.interval == 3 + and rule.occurrence_count == 5 + for rule in updated_rules + ), + "update_recurrence_rule() updates recurrence fields", + ) + recurrence_created = next( + (rule for rule in updated_rules if rule.id == fetched_rule.id), + fetched_rule, + ) + + banner("8) reminders() + list_reminders() Query Paths") + expected_created_ids = {r.id for r in state.created.values()} + + visible_in_list = wait_until( + "created reminders to appear in reminders(list_id=...) output", + lambda: expected_created_ids.issubset( + {r.id for r in reminders_api.reminders(list_id=target_list.id)} + ), + timeout_seconds=args.consistency_timeout, + poll_interval=args.poll_interval, + ) + tracker.expect( + visible_in_list, + "reminders(list_id=...) contains all created reminder IDs", + ) + + visible_globally = wait_until( + "created reminders to appear in reminders() output", + lambda: expected_created_ids.issubset( + {r.id for r in reminders_api.reminders()} + ), + timeout_seconds=args.consistency_timeout, + poll_interval=args.poll_interval, + ) + tracker.expect( + visible_globally, + "reminders() contains all created reminder IDs", + ) + + compound_open = reminders_api.list_reminders( + target_list.id, + include_completed=False, + results_limit=args.results_limit, + ) + compound_all = reminders_api.list_reminders( + target_list.id, + include_completed=True, + results_limit=args.results_limit, + ) + + for key in [ + "reminders", + "alarms", + "triggers", + "attachments", + "hashtags", + "recurrence_rules", + ]: + tracker.expect( + hasattr(compound_open, key), + f"list_reminders(include_completed=False) returns key '{key}'", + ) + tracker.expect( + hasattr(compound_all, key), + f"list_reminders(include_completed=True) returns key '{key}'", + ) + + all_ids_from_compound = {r.id for r in compound_all.reminders} + tracker.expect( + expected_created_ids.issubset(all_ids_from_compound), + "list_reminders(include_completed=True) contains all created reminders", + f"missing={sorted(expected_created_ids - all_ids_from_compound)}", + ) + + tracker.expect( + len(compound_all.reminders) >= len(compound_open.reminders), + "include_completed=True returns at least as many reminders as include_completed=False", + f"false={len(compound_open.reminders)}, true={len(compound_all.reminders)}", + ) + + tracker.expect( + arrive_alarm.id in compound_all.alarms, + "Compound query exposes ARRIVING alarm", + f"alarm_id={arrive_alarm.id}", + ) + tracker.expect( + leave_alarm.id in compound_all.alarms, + "Compound query exposes LEAVING alarm", + f"alarm_id={leave_alarm.id}", + ) + tracker.expect( + arrive_trigger.id in compound_all.triggers, + "Compound query exposes ARRIVING trigger", + f"trigger_id={arrive_trigger.id}", + ) + tracker.expect( + leave_trigger.id in compound_all.triggers, + "Compound query exposes LEAVING trigger", + f"trigger_id={leave_trigger.id}", + ) + tracker.expect( + hashtag_created.id in compound_all.hashtags, + "Compound query exposes created hashtag", + f"hashtag_id={hashtag_created.id}", + ) + tracker.expect( + attachment_created.id in compound_all.attachments, + "Compound query exposes created attachment", + f"attachment_id={attachment_created.id}", + ) + tracker.expect( + recurrence_created.id in compound_all.recurrence_rules, + "Compound query exposes created recurrence rule", + f"recurrence_rule_id={recurrence_created.id}", + ) + + banner("9) delete() Verification") + reminders_api.delete(reminders_api.get(delete_candidate.id)) + state.deleted_ids.add(delete_candidate.id) + + deleted_state, delete_missing = wait_for_reminder( + "deleted reminder to disappear or report deleted=True", + delete_candidate.id, + lambda fresh: fresh.deleted is True, + allow_missing=True, + ) + if delete_missing: + tracker.expect( + True, + "delete() made reminder non-retrievable via get()", + ) + else: + if deleted_state is None: + deleted_state = reminders_api.get(delete_candidate.id) + tracker.expect( + deleted_state.deleted is True, + "delete() marks reminder as deleted when record remains queryable", + f"deleted={deleted_state.deleted}", + ) + + linked_fresh = reminders_api.get(linked_case.id) + reminders_api.delete_hashtag(linked_fresh, hashtag_created) + linked_fresh = wait_for_linked_id( + "deleted hashtag ID to disappear from linked reminder", + linked_case.id, + "hashtag_ids", + hashtag_created.id, + present=False, + ) + tracker.expect( + all( + hid != hashtag_created.id.split("/", 1)[1] + for hid in linked_fresh.hashtag_ids + ), + "delete_hashtag() removes hashtag ID from reminder", + f"hashtag_ids={linked_fresh.hashtag_ids}", + ) + + reminders_api.delete_attachment(linked_fresh, attachment_created) + linked_fresh = wait_for_linked_id( + "deleted attachment ID to disappear from linked reminder", + linked_case.id, + "attachment_ids", + attachment_created.id, + present=False, + ) + tracker.expect( + all( + aid != attachment_created.id.split("/", 1)[1] + for aid in linked_fresh.attachment_ids + ), + "delete_attachment() removes attachment ID from reminder", + f"attachment_ids={linked_fresh.attachment_ids}", + ) + + reminders_api.delete_recurrence_rule(linked_fresh, recurrence_created) + linked_fresh = wait_for_linked_id( + "deleted recurrence rule ID to disappear from linked reminder", + linked_case.id, + "recurrence_rule_ids", + recurrence_created.id, + present=False, + ) + tracker.expect( + all( + rid != recurrence_created.id.split("/", 1)[1] + for rid in linked_fresh.recurrence_rule_ids + ), + "delete_recurrence_rule() removes recurrence ID from reminder", + f"recurrence_rule_ids={linked_fresh.recurrence_rule_ids}", + ) + + banner("Coverage Notes") + print( + "Validated snapshot/read/write capabilities in current service implementation:" + ) + print(" - CRUD for reminders (create/get/update/delete)") + print(" - Alarm triggers (Location ARRIVING/LEAVING)") + print(" - Hashtag create/delete") + print( + " update_hashtag() is not live-validated because Hashtag.Name is read-only" + ) + print(" - URL attachment create/update/delete") + print(" - Recurrence rule create/update/delete") + print(" - Query APIs (lists, reminders, list_reminders)") + print(" - Delta APIs are validated separately by example_reminders_delta.py") + print( + " - Linked fetch helpers " + "(alarms_for, tags_for, attachments_for, recurrence_rules_for)" + ) + + except Exception as exc: # pragma: no cover - live integration path + banner("Fatal Error") + print(str(exc)) + if args.debug: + traceback.print_exc() + return 1 + + finally: + # Cleanup always runs if requested, even after failures. + if args.cleanup and api is not None: + try: + cleanup_generated(api, state) + except Exception as cleanup_exc: # pragma: no cover + print(f"[WARN] Cleanup failed: {cleanup_exc}") + + banner("Validation Summary") + print(f"Checks executed: {tracker.checks}") + print(f"Failures: {len(tracker.failures)}") + + if tracker.failures: + print("\nFailure details:") + for failure in tracker.failures: + print(f" - {failure}") + return 2 + + print("All validations passed.") + print( + "Generated reminders kept in iCloud. " + "Use --cleanup on the next run if you want auto-deletion." + ) + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/example_reminders_delta.py b/example_reminders_delta.py new file mode 100644 index 00000000..7fba10cd --- /dev/null +++ b/example_reminders_delta.py @@ -0,0 +1,420 @@ +"""Dedicated integration validator for Reminders delta-sync APIs. + +This script validates the currently implemented delta APIs in +`pyicloud.services.reminders.service.RemindersService`: + +- sync_cursor() +- iter_changes(since=...) + +The validation intentionally checks update and delete in separate cursor windows, +because CloudKit zone changes are a delta-state feed rather than an append-only +event log for individual records. +""" + +from __future__ import annotations + +import argparse +import os +import sys +import traceback +from dataclasses import dataclass, field +from datetime import datetime, timezone +from getpass import getpass +from time import monotonic, sleep +from typing import Any, Iterable, Optional, Sequence + +from pyicloud import PyiCloudService +from pyicloud.services.reminders.models.domain import Reminder, RemindersList + + +@dataclass +class ValidationTracker: + checks: int = 0 + failures: list[str] = field(default_factory=list) + + def expect(self, condition: bool, label: str, detail: str = "") -> None: + self.checks += 1 + if condition: + print(f" [PASS] {label}") + return + + message = label if not detail else f"{label}: {detail}" + self.failures.append(message) + print(f" [FAIL] {message}") + + +@dataclass +class RunState: + created: Optional[Reminder] = None + deleted: bool = False + + +def banner(title: str) -> None: + print(f"\n{'=' * 78}") + print(title) + print(f"{'=' * 78}") + + +def parse_args() -> argparse.Namespace: + now = datetime.now(timezone.utc).strftime("%Y%m%d-%H%M%S") + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument( + "--username", + default=os.getenv("PYICLOUD_USERNAME"), + help="Apple ID email. Defaults to interactive prompt.", + ) + parser.add_argument( + "--password", + default=os.getenv("PYICLOUD_PASSWORD"), + help="Apple ID password. Defaults to keyring or interactive prompt.", + ) + parser.add_argument( + "--list-name", + default="pyicloud testing", + help="Existing reminders list title to use.", + ) + parser.add_argument( + "--prefix", + default=f"pyicloud-reminders-delta-{now}", + help="Prefix added to the dedicated delta reminder title.", + ) + parser.add_argument( + "--consistency-timeout", + type=float, + default=20.0, + help="Seconds to wait for eventual consistency checks.", + ) + parser.add_argument( + "--poll-interval", + type=float, + default=1.0, + help="Polling interval in seconds for eventual consistency checks.", + ) + parser.add_argument( + "--cleanup", + action="store_true", + help="Delete the generated reminder if the script fails before the delete phase.", + ) + parser.add_argument( + "--debug", + action="store_true", + help="Print traceback on unexpected errors.", + ) + return parser.parse_args() + + +def resolve_credentials(args: argparse.Namespace) -> tuple[str, Optional[str]]: + username = args.username or input("Apple ID: ").strip() + if not username: + raise ValueError("Apple ID username is required.") + + password = args.password + if password == "": + password = None + + if password is None and sys.stdin.isatty(): + answer = input("Password not provided. Prompt now? [y/N]: ").strip().lower() + if answer in {"y", "yes"}: + password = getpass("Apple ID password: ") + + return username, password + + +def _prompt_selection( + prompt: str, options: Sequence[Any], default_index: int = 0 +) -> int: + if not options: + raise ValueError("Cannot select from an empty option list.") + selected_index = default_index + if len(options) > 1: + raw_index = input(f"{prompt} [{default_index}]: ").strip() + if raw_index: + selected_index = int(raw_index) + if selected_index < 0 or selected_index >= len(options): + raise RuntimeError("Invalid selection.") + return selected_index + + +def _trusted_device_label(device: dict[str, Any]) -> str: + if device.get("phoneNumber"): + return "SMS trusted device" + if device.get("deviceName") or device.get("id"): + return "Trusted device" + return "Unknown trusted device" + + +def authenticate(args: argparse.Namespace) -> PyiCloudService: + username, password = resolve_credentials(args) + print("Authenticating with iCloud...") + api = PyiCloudService(apple_id=username, password=password) + + if api.requires_2fa: + fido2_devices = list(api.fido2_devices) + if fido2_devices: + print("Security key verification required.") + for index, _device in enumerate(fido2_devices): + print(f" {index}: Security key {index}") + selected_index = _prompt_selection( + "Select security key", + fido2_devices, + ) + selected_device = fido2_devices[selected_index] + print("Touch the selected security key to continue.") + try: + api.confirm_security_key(selected_device) + except Exception as exc: # pragma: no cover - live integration path + raise RuntimeError("Security key verification failed.") from exc + else: + code = input("Enter 2FA code: ").strip() + if not api.validate_2fa_code(code): + raise RuntimeError("Invalid 2FA code.") + if not api.is_trusted_session: + print("Session is not trusted. Requesting trust...") + api.trust_session() + elif api.requires_2sa: + devices = api.trusted_devices + if not devices: + raise RuntimeError("2SA required but no trusted devices were returned.") + + print("Trusted devices:") + for index, device in enumerate(devices): + print(f" {index}: {_trusted_device_label(device)}") + + selected_index = _prompt_selection( + "Select trusted device", + devices, + ) + device = devices[selected_index] + if not api.send_verification_code(device): + raise RuntimeError("Failed to send 2SA verification code.") + + code = input("Enter 2SA verification code: ").strip() + if not api.validate_verification_code(device, code): + raise RuntimeError("Invalid 2SA verification code.") + + return api + + +def pick_target_list(lists: Iterable[RemindersList], list_name: str) -> RemindersList: + all_lists = list(lists) + if not all_lists: + raise RuntimeError("No reminders lists found in iCloud account.") + + print("Available lists:") + for lst in all_lists: + print(f" - {lst.title} ({lst.id})") + + for lst in all_lists: + if lst.title == list_name: + print(f"\nUsing list: {lst.title} ({lst.id})") + return lst + + raise RuntimeError( + f"List '{list_name}' not found. " + f"Please create it first or pass --list-name with an existing list." + ) + + +def wait_until( + description: str, + predicate, + timeout_seconds: float, + poll_interval: float, +) -> bool: + deadline = monotonic() + timeout_seconds + while monotonic() < deadline: + if predicate(): + return True + sleep(poll_interval) + print(f" [WARN] Timed out while waiting for: {description}") + return False + + +def cleanup_generated(api: PyiCloudService, state: RunState) -> None: + if state.created is None or state.deleted: + return + + banner("Cleanup") + try: + fresh = api.reminders.get(state.created.id) + except LookupError: + print(f" [SKIP] Not found ({state.created.id})") + return + + try: + api.reminders.delete(fresh) + state.deleted = True + print(f" [OK] Deleted {state.created.id}") + except Exception as exc: # pragma: no cover - live integration path + print(f" [WARN] Cleanup failed for {state.created.id}: {exc}") + + +def main() -> int: + args = parse_args() + tracker = ValidationTracker() + state = RunState() + api: Optional[PyiCloudService] = None + + try: + api = authenticate(args) + reminders_api = api.reminders + + banner("1) Discover Lists") + target_list = pick_target_list(reminders_api.lists(), args.list_name) + + banner("2) sync_cursor() + create delta") + create_cursor = reminders_api.sync_cursor() + tracker.expect( + isinstance(create_cursor, str) and bool(create_cursor), + "sync_cursor() returns a non-empty cursor before create", + f"cursor={create_cursor!r}", + ) + + created = reminders_api.create( + list_id=target_list.id, + title=f"{args.prefix} | create", + desc="Dedicated reminder for delta-sync validation.", + ) + state.created = created + print(f" [CREATE] delta reminder: {created.id}") + + create_events = [] + + def create_visible() -> bool: + nonlocal create_events + create_events = list(reminders_api.iter_changes(since=create_cursor)) + return any( + event.type == "updated" + and event.reminder_id == created.id + and event.reminder is not None + and event.reminder.title == created.title + for event in create_events + ) + + tracker.expect( + wait_until( + "delta create to appear in iter_changes() output", + create_visible, + timeout_seconds=args.consistency_timeout, + poll_interval=args.poll_interval, + ), + "iter_changes(since=...) returns an updated event after create", + f"event_count={len(create_events)}", + ) + tracker.expect( + all( + hasattr(event, "type") and hasattr(event, "reminder_id") + for event in create_events + ), + "iter_changes() returns structured change events after create", + ) + + banner("3) sync_cursor() + update delta") + update_cursor = reminders_api.sync_cursor() + tracker.expect( + isinstance(update_cursor, str) and bool(update_cursor), + "sync_cursor() returns a non-empty cursor before update", + f"cursor={update_cursor!r}", + ) + + updated_title = f"{args.prefix} | updated" + updated_desc = "Updated delta-sync body." + updated = reminders_api.get(created.id) + updated.title = updated_title + updated.desc = updated_desc + reminders_api.update(updated) + + update_events = [] + + def update_visible() -> bool: + nonlocal update_events + update_events = list(reminders_api.iter_changes(since=update_cursor)) + return any( + event.type == "updated" + and event.reminder_id == created.id + and event.reminder is not None + and event.reminder.title == updated_title + and event.reminder.desc == updated_desc + for event in update_events + ) + + tracker.expect( + wait_until( + "delta update to appear in iter_changes() output", + update_visible, + timeout_seconds=args.consistency_timeout, + poll_interval=args.poll_interval, + ), + "iter_changes(since=...) returns an updated event after update", + f"event_count={len(update_events)}", + ) + + banner("4) sync_cursor() + delete delta") + delete_cursor = reminders_api.sync_cursor() + tracker.expect( + isinstance(delete_cursor, str) and bool(delete_cursor), + "sync_cursor() returns a non-empty cursor before delete", + f"cursor={delete_cursor!r}", + ) + + reminders_api.delete(reminders_api.get(created.id)) + state.deleted = True + + delete_events = [] + + def delete_visible() -> bool: + nonlocal delete_events + delete_events = list(reminders_api.iter_changes(since=delete_cursor)) + return any( + event.type == "deleted" and event.reminder_id == created.id + for event in delete_events + ) + + tracker.expect( + wait_until( + "delta delete to appear in iter_changes() output", + delete_visible, + timeout_seconds=args.consistency_timeout, + poll_interval=args.poll_interval, + ), + "iter_changes(since=...) returns a deleted event after delete", + f"event_count={len(delete_events)}", + ) + + banner("Coverage Notes") + print("Validated delta capabilities in current service implementation:") + print(" - sync_cursor()") + print(" - iter_changes(since=...) after create") + print(" - iter_changes(since=...) after update") + print(" - iter_changes(since=...) after delete") + + except Exception as exc: # pragma: no cover - live integration path + banner("Fatal Error") + print(str(exc)) + if args.debug: + traceback.print_exc() + return 1 + + finally: + if args.cleanup and api is not None: + try: + cleanup_generated(api, state) + except Exception as cleanup_exc: # pragma: no cover + print(f"[WARN] Cleanup failed: {cleanup_exc}") + + banner("Validation Summary") + print(f"Checks executed: {tracker.checks}") + print(f"Failures: {len(tracker.failures)}") + + if tracker.failures: + print("\nFailure details:") + for failure in tracker.failures: + print(f" - {failure}") + return 2 + + print("All validations passed.") + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/examples/notes_cli.py b/examples/notes_cli.py new file mode 100644 index 00000000..4dae7868 --- /dev/null +++ b/examples/notes_cli.py @@ -0,0 +1,425 @@ +"""Developer utility for searching, inspecting, and exporting iCloud Notes. + +Run: + uv run python examples/notes_cli.py --username you@example.com ... + +This script is built on top of ``api.notes`` for local exploration and export +workflows. It is useful for debugging note selection, rendering, and HTML +exports, but it is not the primary public API for the Notes service. +""" + +from __future__ import annotations + +import argparse +import logging +import os +import re +import sys +from typing import Any, List, Optional + +# Ensure pyicloud can be imported when running from examples/ directly. +sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) + +from rich.console import Console +from rich.logging import RichHandler + +from pyicloud import PyiCloudService +from pyicloud.common.cloudkit import CKRecord +from pyicloud.exceptions import PyiCloudServiceUnavailable +from pyicloud.services.notes.rendering.exporter import decode_and_parse_note +from pyicloud.services.notes.rendering.options import ExportConfig +from pyicloud.utils import get_password + +console = Console() +logger = logging.getLogger("notes.explore") + + +def parse_args() -> argparse.Namespace: + p = argparse.ArgumentParser( + description="Developer utility for exploring and exporting iCloud Notes" + ) + p.add_argument("--username", dest="username", required=True, help="Apple ID") + p.add_argument( + "--verbose", + dest="verbose", + action="store_true", + default=False, + help="Enable verbose logs and detailed output", + ) + p.add_argument( + "--cookie-dir", + dest="cookie_dir", + default="", + help="Directory to store session cookies", + ) + p.add_argument( + "--china-mainland", + action="store_true", + dest="china_mainland", + default=False, + help="Set if Apple ID region is China mainland", + ) + p.add_argument( + "--max", + dest="max_items", + type=int, + default=20, + help="How many most recent notes to render (default: 20)", + ) + p.add_argument( + "--title", + dest="title", + default="", + help="Only render notes whose title exactly matches this string", + ) + p.add_argument( + "--title-contains", + dest="title_contains", + default="", + help="Only render notes whose title contains this substring (case-insensitive)", + ) + p.add_argument( + "--output-dir", + dest="output_dir", + default=os.path.join("workspace", "notes_html"), + help="Directory to write rendered HTML output (default: workspace/notes_html)", + ) + p.add_argument( + "--full-page", + dest="full_page", + action="store_true", + default=False, + help="Wrap saved output in a full HTML page; if omitted, save an HTML fragment", + ) + p.add_argument( + "--dump-runs", + dest="dump_runs", + action="store_true", + default=False, + help="Dump attribute runs and write an annotated mapping under workspace/notes_runs", + ) + p.add_argument( + "--assets-dir", + dest="assets_dir", + default=os.path.join("exports", "assets"), + help="Directory to store downloaded assets in archival export mode (default: exports/assets)", + ) + p.add_argument( + "--export-mode", + dest="export_mode", + choices=["archival", "lightweight"], + default="archival", + help="Export intent: 'archival' downloads assets for stable, offline HTML (default); 'lightweight' skips downloads for quick previews", + ) + p.add_argument( + "--notes-debug", + dest="notes_debug", + action="store_true", + default=False, + help="Enable verbose Notes/export debug output (datasource, attachments, and rendering)", + ) + p.add_argument( + "--preview-appearance", + dest="preview_appearance", + choices=["light", "dark"], + default="light", + help="Select which preview appearance to prefer for image previews (light/dark)", + ) + p.add_argument( + "--pdf-height", + dest="pdf_height", + type=int, + default=600, + help="Height in pixels for embedded PDF objects (default: 600)", + ) + return p.parse_args() + + +def ensure_auth(api: PyiCloudService) -> None: + if api.requires_2fa: + fido2_devices = list(api.fido2_devices) + if fido2_devices: + logger.info("Security key verification required.") + for index, _device in enumerate(fido2_devices): + logger.info(" %d: Security key %d", index, index) + sel = input("Select security key index [0]: ").strip() + try: + idx = int(sel) if sel else 0 + except ValueError: + idx = 0 + if idx < 0 or idx >= len(fido2_devices): + logger.warning("Invalid selection; defaulting to security key 0") + idx = 0 + logger.info("Touch the selected security key to continue.") + try: + api.confirm_security_key(fido2_devices[idx]) + except Exception as exc: # pragma: no cover - live auth path + raise RuntimeError("Security key verification failed") from exc + else: + logger.info("Two-factor authentication required.") + code = input("Enter the 2FA code: ") + if not api.validate_2fa_code(code): + raise RuntimeError("Failed to verify 2FA code") + if not api.is_trusted_session: + api.trust_session() + elif api.requires_2sa: + logger.info("Two-step authentication required.") + devices: List[dict[str, Any]] = api.trusted_devices + if not devices: + raise RuntimeError("No trusted devices available for 2SA") + for i, device in enumerate(devices): + label = ( + "SMS trusted device" if device.get("phoneNumber") else "Trusted device" + ) + logger.info(" %d: %s", i, label) + sel = input("Select device index [0]: ").strip() + try: + idx = int(sel) if sel else 0 + except Exception: + idx = 0 + if idx < 0 or idx >= len(devices): + logger.warning("Invalid selection; defaulting to device 0") + idx = 0 + device = devices[idx] + if not api.send_verification_code(device): + raise RuntimeError("Failed to send verification code") + code = input("Enter verification code: ") + if not api.validate_verification_code(device, code): + raise RuntimeError("Failed to verify code") + + +def main() -> None: + logging.basicConfig( + level=logging.INFO, + format="%(message)s", + handlers=[ + RichHandler( + rich_tracebacks=True, + markup=True, + show_time=True, + log_time_format="%H:%M:%S", + ) + ], + ) + + args = parse_args() + + import time + + t0 = time.perf_counter() + + def phase(msg: str) -> None: + try: + dt = time.perf_counter() - t0 + logger.info("[+%.3fs] %s", dt, msg) + except Exception: + logger.info(msg) + + if args.verbose: + logging.getLogger("pyicloud.services.notes.service").setLevel(logging.DEBUG) + logging.getLogger("pyicloud.services.notes.client").setLevel(logging.DEBUG) + + debug_dir = os.path.join("workspace", "notes_debug") + if os.getenv("PYICLOUD_CK_EXTRA") == "forbid": + logger.info( + "[yellow]Strict CloudKit validation is enabled[/yellow].\n" + "Errors and raw payloads may be easier to diagnose under: [bold]%s[/bold]", + debug_dir, + ) + + phase("bootstrap: starting authentication") + pw = get_password(args.username) + api = PyiCloudService( + apple_id=args.username, + password=pw, + china_mainland=args.china_mainland, + cookie_directory=args.cookie_dir or None, + ) + ensure_auth(api) + phase("bootstrap: authentication complete") + + try: + phase("service: initializing NotesService") + notes = api.notes + phase("service: NotesService ready") + except PyiCloudServiceUnavailable as exc: + logger.error("Notes service not available: %s", exc) + return + + max_items = max(1, int(args.max_items)) + out_dir = args.output_dir + try: + os.makedirs(out_dir, exist_ok=True) + except Exception as exc: + logger.error("Failed to create output directory '%s': %s", out_dir, exc) + return + + def _safe_name(s: Optional[str]) -> str: + if not s: + return "untitled" + s = re.sub(r"\s+", " ", s).strip() + s = re.sub(r"[^\w\- ]+", "-", s) + return s[:60] or "untitled" + + def _match_title(title: Optional[str]) -> bool: + if not title: + return False + if args.title and title == args.title: + return True + if args.title_contains and args.title_contains.lower() in title.lower(): + return True + return False + + candidates = [] + if args.title or args.title_contains: + logger.info("[bold]\nSearching notes by title[/bold]") + phase( + "selection: recents-first title search (exact='%s' contains='%s')" + % (args.title, args.title_contains) + ) + try: + window = max(500, max_items * 50) + seen: set[str] = set() + for note in notes.recents(limit=window): + if _match_title(note.title or ""): + if note.id not in seen: + candidates.append(note) + seen.add(note.id) + if len(candidates) >= max_items: + break + phase( + f"selection: recents matched {len(candidates)} candidate(s) in window={window}" + ) + + if len(candidates) < max_items: + phase("selection: fallback to full feed scan (iter_all)") + for note in notes.iter_all(): + if _match_title(note.title or "") and note.id not in seen: + candidates.append(note) + seen.add(note.id) + if len(candidates) >= max_items: + break + phase(f"selection: total matched {len(candidates)} candidate(s)") + + try: + from datetime import datetime, timezone + + epoch = datetime(1970, 1, 1, tzinfo=timezone.utc) + candidates.sort(key=lambda x: x.modified_at or epoch, reverse=True) + except Exception: + pass + except Exception as exc: + logger.error("Title search failed, falling back to recents: %s", exc) + + if not candidates: + logger.info("[bold]\nMost Recent Notes (HTML)[/bold]") + phase(f"selection: loading {max_items} most recent notes") + for note in notes.recents(limit=max_items): + candidates.append(note) + phase(f"selection: using {len(candidates)} recent note(s)") + + for idx, item in enumerate(candidates): + phase(f"note[{idx}]: start '{(item.title or 'untitled')}'") + if args.verbose or args.notes_debug: + console.rule(f"idx: {idx}") + console.print(item, end="\n\n") + + ck = notes.raw + phase(f"note[{idx}]: ck.lookup(TextDataEncrypted,Attachments,TitleEncrypted)") + resp = ck.lookup( + [item.id], + desired_keys=["TextDataEncrypted", "Attachments", "TitleEncrypted"], + ) + note_rec = None + for record in resp.records: + if isinstance(record, CKRecord) and record.recordName == item.id: + note_rec = record + break + if note_rec is None: + console.print(f"[red]Note lookup returned no CKRecord for {item.id}[/red]") + continue + + phase(f"note[{idx}]: decode+parse start") + proto_note = decode_and_parse_note(note_rec) + phase(f"note[{idx}]: decode+parse ok") + if args.notes_debug: + console.print("proto_note:") + console.print(proto_note, end="\n\n") + + from pyicloud.services.notes.rendering.exporter import NoteExporter + + phase(f"note[{idx}]: exporter init") + config = ExportConfig( + debug=bool(args.notes_debug), + export_mode=str(args.export_mode).strip().lower(), + assets_dir=args.assets_dir or None, + full_page=bool(args.full_page), + preview_appearance=str(args.preview_appearance).strip().lower(), + pdf_object_height=int(args.pdf_height or 600), + ) + exporter = NoteExporter(ck, config=config) + phase(f"note[{idx}]: export start") + + title = item.title or "Apple Note" + safe = _safe_name(title) + short_id = (item.id or "note")[:8] + filename = f"{idx:02d}_{safe}_{short_id}.html" + + try: + path = exporter.export(note_rec, output_dir=out_dir, filename=filename) + phase(f"note[{idx}]: export done -> {path}") + if path: + console.print(f"[green]Saved:[/green] {path}") + else: + console.print("[red]Export returned None (skipped?)[/red]") + except Exception as exc: + phase(f"note[{idx}]: export failed: {exc}") + console.print(f"[red]Export failed:[/red] {exc}") + + if args.dump_runs: + try: + from pyicloud.services.notes.rendering.debug_tools import ( + annotate_note_runs_html, + dump_runs_text, + map_merged_runs, + ) + + console.rule("attribute runs (utf16 mapping)") + console.print(dump_runs_text(proto_note)) + + merged = map_merged_runs(proto_note) + console.rule("merged runs (post-merge)") + lines = [] + for row in merged: + raw = str(row.get("text", "")) + pretty = ( + raw.replace("\n", "⏎\n") + .replace("\u2028", "⤶\n") + .replace("\x00", "␀") + .replace("\ufffc", "{OBJ}") + ) + lines.append( + f"[{row['index']:03d}] off={row['utf16_start']:<5} len={row['utf16_len']:<4} text=“{pretty}”" + ) + console.print("\n".join(lines)) + + runs_dir = os.path.join("workspace", "notes_runs") + os.makedirs(runs_dir, exist_ok=True) + runs_name = f"{idx:02d}_{_safe_name(item.title)}_{(item.id or 'note')[:8]}_runs.html" + runs_path = os.path.join(runs_dir, runs_name) + with open(runs_path, "w", encoding="utf-8") as handle: + handle.write(annotate_note_runs_html(proto_note)) + console.print(f"[cyan]Saved runs map:[/cyan] {runs_path}") + except Exception as exc: + console.print(f"[red]Failed to dump runs:[/red] {exc}") + + try: + import time as _time + + logger.info("[+%.3fs] completed", _time.perf_counter() - t0) + except Exception: + logger.info("completed") + + +if __name__ == "__main__": + main() diff --git a/pyicloud/base.py b/pyicloud/base.py index 51f86efe..47897c70 100644 --- a/pyicloud/base.py +++ b/pyicloud/base.py @@ -23,6 +23,7 @@ from requests import HTTPError from requests.models import Response +from pyicloud.common.cloudkit.base import CloudKitExtraMode from pyicloud.const import ACCOUNT_NAME, CONTENT_TYPE_JSON, CONTENT_TYPE_TEXT from pyicloud.exceptions import ( PyiCloud2FARequiredException, @@ -46,6 +47,7 @@ RemindersService, UbiquityService, ) +from pyicloud.services.notes import NotesService from pyicloud.session import PyiCloudSession from pyicloud.srp_password import SrpPassword, SrpProtocolType from pyicloud.utils import ( @@ -153,6 +155,7 @@ def __init__( refresh_interval: float | None = None, *, authenticate: bool = True, + cloudkit_validation_extra: Optional[CloudKitExtraMode] = None, ) -> None: self._is_china_mainland: bool = ( environ.get("icloud_china", "0") == "1" @@ -176,6 +179,7 @@ def __init__( self.params: dict[str, Any] = {} self._client_id: str = client_id or str(uuid1()).lower() self._with_family: bool = with_family + self._cloudkit_validation_extra = cloudkit_validation_extra _cookie_directory: str = self._setup_cookie_directory(cookie_directory) _headers: dict[str, str] = _HEADERS.copy() @@ -215,6 +219,7 @@ def __init__( self._hidemyemail: Optional[HideMyEmailService] = None self._photos: Optional[PhotosService] = None self._reminders: Optional[RemindersService] = None + self._notes: Optional[NotesService] = None self._requires_mfa: bool = False @@ -1043,12 +1048,18 @@ def contacts(self) -> ContactsService: def reminders(self) -> RemindersService: """Gets the 'Reminders' service.""" if not self._reminders: - service_root: str = self.get_webservice_url("reminders") try: + service_root: str = self.get_webservice_url("ckdatabasews") self._reminders = RemindersService( - service_root=service_root, session=self.session, params=self.params + service_root=service_root, + session=self.session, + params=self.params, + cloudkit_validation_extra=self._cloudkit_validation_extra, ) - except (PyiCloudAPIResponseException,) as error: + except ( + PyiCloudAPIResponseException, + PyiCloudServiceNotActivatedException, + ) as error: raise PyiCloudServiceUnavailable( "Reminders service not available" ) from error @@ -1073,6 +1084,27 @@ def drive(self) -> DriveService: ) from error return self._drive + @property + def notes(self) -> NotesService: + """Gets the 'Notes' service.""" + if not self._notes: + try: + service_root: str = self.get_webservice_url("ckdatabasews") + self._notes = NotesService( + service_root=service_root, + session=self.session, + params=self.params, + cloudkit_validation_extra=self._cloudkit_validation_extra, + ) + except ( + PyiCloudAPIResponseException, + PyiCloudServiceNotActivatedException, + ) as error: + raise PyiCloudServiceUnavailable( + "Notes service not available" + ) from error + return self._notes + @property def account_name(self) -> str: """Retrieves the account name associated with the Apple ID.""" diff --git a/pyicloud/common/__init__.py b/pyicloud/common/__init__.py new file mode 100644 index 00000000..f7eb1290 --- /dev/null +++ b/pyicloud/common/__init__.py @@ -0,0 +1,5 @@ +"""Shared model bases and helpers for pyicloud.""" + +from .models import FrozenServiceModel, MutableServiceModel, ServiceModel + +__all__ = ["ServiceModel", "FrozenServiceModel", "MutableServiceModel"] diff --git a/pyicloud/common/cloudkit/__init__.py b/pyicloud/common/cloudkit/__init__.py new file mode 100644 index 00000000..a57696b7 --- /dev/null +++ b/pyicloud/common/cloudkit/__init__.py @@ -0,0 +1,69 @@ +"""Shared CloudKit models and utilities.""" + +from .base import CKModel, CloudKitExtraMode, resolve_cloudkit_validation_extra +from .models import ( + CKErrorItem, + CKFieldOpen, + CKFVInt64, + CKFVReference, + CKFVString, + CKLookupDescriptor, + CKLookupRequest, + CKLookupResponse, + CKModifyOperation, + CKModifyRequest, + CKModifyResponse, + CKQueryFilterBy, + CKQueryObject, + CKQueryRequest, + CKQueryResponse, + CKQuerySortBy, + CKRecord, + CKReference, + CKTombstoneRecord, + CKWriteFields, + CKWriteParent, + CKWriteRecord, + CKZoneChangesRequest, + CKZoneChangesResponse, + CKZoneChangesZone, + CKZoneChangesZoneReq, + CKZoneID, + CKZoneIDReq, + KnownCKField, +) + +__all__ = [ + "CKModel", + "CloudKitExtraMode", + "CKErrorItem", + "CKFieldOpen", + "CKFVInt64", + "CKFVReference", + "CKFVString", + "CKLookupDescriptor", + "CKLookupRequest", + "CKLookupResponse", + "CKModifyOperation", + "CKModifyRequest", + "CKModifyResponse", + "CKQueryFilterBy", + "CKQueryObject", + "CKQueryRequest", + "CKQueryResponse", + "CKQuerySortBy", + "CKRecord", + "CKReference", + "CKTombstoneRecord", + "CKWriteFields", + "CKWriteParent", + "CKWriteRecord", + "CKZoneChangesRequest", + "CKZoneChangesResponse", + "CKZoneChangesZone", + "CKZoneChangesZoneReq", + "CKZoneID", + "CKZoneIDReq", + "KnownCKField", + "resolve_cloudkit_validation_extra", +] diff --git a/pyicloud/common/cloudkit/base.py b/pyicloud/common/cloudkit/base.py new file mode 100644 index 00000000..3aa1ae4d --- /dev/null +++ b/pyicloud/common/cloudkit/base.py @@ -0,0 +1,57 @@ +from __future__ import annotations + +import os +from typing import Literal, cast + +from pydantic import BaseModel, ConfigDict + +CloudKitExtraMode = Literal["allow", "ignore", "forbid"] + + +def resolve_cloudkit_validation_extra( + explicit: CloudKitExtraMode | None = None, + *, + default: CloudKitExtraMode = "allow", +) -> CloudKitExtraMode: + """ + Resolve the validation mode for CloudKit wire models. + + ``PYICLOUD_CK_EXTRA`` accepts ``allow``, ``ignore``, or ``forbid``. + Convenience booleans remain supported for local debugging: + - ``true/1/on/strict`` -> ``forbid`` + - ``false/0/off/lenient`` -> ``allow`` + + ``explicit`` takes precedence over the environment. + """ + if explicit is not None: + return explicit + + raw = (os.getenv("PYICLOUD_CK_EXTRA") or default).strip().lower() + + if raw in {"allow", "forbid", "ignore"}: + return cast(CloudKitExtraMode, raw) + + if raw in {"1", "true", "yes", "on", "strict"}: + return "forbid" + if raw in {"0", "false", "no", "off", "lenient"}: + return "allow" + + return default + + +class CKModel(BaseModel): + """ + Shared base model for CloudKit wire payloads. + + Wire models stay permissive by default so unexpected Apple fields are + preserved. Strict reverse-engineering mode is applied at validation call + sites via ``model_validate(..., extra="forbid")``. + """ + + model_config = ConfigDict( + extra="allow", + arbitrary_types_allowed=True, + ) + + +__all__ = ["CKModel", "CloudKitExtraMode", "resolve_cloudkit_validation_extra"] diff --git a/pyicloud/common/cloudkit/models.py b/pyicloud/common/cloudkit/models.py new file mode 100644 index 00000000..285b3455 --- /dev/null +++ b/pyicloud/common/cloudkit/models.py @@ -0,0 +1,940 @@ +""" +CloudKit “wire” models for /records/query requests & responses. +- Response models (records) + refined request models (query payloads). +""" + +from __future__ import annotations + +from datetime import datetime, timezone +from enum import Enum +from typing import Annotated, Dict, List, Literal, Optional, Union + +from pydantic import ( + Base64Bytes, + BeforeValidator, + Field, + JsonValue, + PlainSerializer, + RootModel, + TypeAdapter, + WithJsonSchema, + field_validator, + model_validator, +) + +from .base import CKModel + +# --------------------------------------------------------------------------- +# Shared helpers +# --------------------------------------------------------------------------- + +# Year-zero/ancient timestamp handling: normalize to None for Optional[datetime] fields. +# Python datetime supports years 1..9999 (no year 0). Some APIs use ancient ms values +# as "not set" sentinels; we treat those as None during parsing. +CANONICAL_MIN_MS = -62135596800000 # 0001-01-01T00:00:00Z +SENTINEL_ZERO_MS: set[int] = { + CANONICAL_MIN_MS, + -62135769600000, # observed in CloudKit responses (approx 2 days earlier) +} + + +def _from_millis_or_none(v): + # Accept int/float or signed numeric strings; be strict about milliseconds. + if isinstance(v, (int, float)): + iv = int(v) + elif isinstance(v, str): + sv = v.strip() + if sv.startswith("0001-01-01"): + # ISO-like sentinel for year 1 -> treat as None + return None + try: + iv = int(sv) + except (TypeError, ValueError, OverflowError): + return None + else: + raise ValueError("Expected milliseconds since epoch as int or numeric string") + # Coerce sentinels and anything older than canonical MIN to None + if iv in SENTINEL_ZERO_MS or iv <= CANONICAL_MIN_MS: + return None + try: + return datetime.fromtimestamp(iv / 1000.0, tz=timezone.utc) + except (ValueError, OSError, OverflowError): + # Some Reminders records contain out-of-range TIMESTAMP values + # (e.g. malformed/legacy DueDate). Treat these as unset so one bad + # record does not fail the entire page. + return None + + +def _to_millis(dt: datetime) -> int: + if dt.tzinfo is None: + # If you prefer, raise instead of coercing. + dt = dt.replace(tzinfo=timezone.utc) + return int(dt.timestamp() * 1000) + + +MillisDateTime = Annotated[ + datetime, + BeforeValidator(_from_millis_or_none), + PlainSerializer(_to_millis, return_type=int, when_used="json"), + WithJsonSchema({"type": "integer", "description": "milliseconds since Unix epoch"}), +] + +# Nullable variant used for wrappers that can legitimately carry "no timestamp" +MillisDateTimeOrNone = Annotated[ + Optional[datetime], + BeforeValidator(lambda v: None if v is None else _from_millis_or_none(v)), + PlainSerializer( + lambda v: None if v is None else _to_millis(v), + return_type=int, + when_used="json", + ), + WithJsonSchema( + { + "type": ["integer", "null"], + "description": "milliseconds since Unix epoch or null sentinel", + } + ), +] + + +# Some top-level properties (e.g., CKRecord.expirationTime) arrive as seconds-since-epoch +# in this API. Be tolerant and also accept millisecond values if Apple changes shape. +def _from_secs_or_millis(v): + if isinstance(v, (int, float)): + iv = int(v) + elif isinstance(v, str): + try: + iv = int(v.strip()) + except (TypeError, ValueError, OverflowError): + return None + else: + raise ValueError("Expected seconds or milliseconds since epoch as int/str") + try: + # Heuristic: values < 1e11 are seconds (covers dates up to ~5138 CE) + if abs(iv) < 100_000_000_000: + return datetime.fromtimestamp(iv, tz=timezone.utc) + # Otherwise treat as milliseconds + return datetime.fromtimestamp(iv / 1000.0, tz=timezone.utc) + except (ValueError, OSError, OverflowError): + return None + + +def _to_secs(dt: datetime) -> int: + if dt.tzinfo is None: + dt = dt.replace(tzinfo=timezone.utc) + return int(dt.timestamp()) + + +SecsOrMillisDateTime = Annotated[ + datetime, + BeforeValidator(_from_secs_or_millis), + PlainSerializer(_to_secs, return_type=int, when_used="json"), + WithJsonSchema({"type": "integer", "description": "seconds since Unix epoch"}), +] + + +# --------------------------------------------------------------------------- +# CloudKit primitives shared by request & response +# --------------------------------------------------------------------------- + + +class CKZoneID(CKModel): + zoneName: str + ownerRecordName: Optional[str] = None + zoneType: Optional[str] = None + + +class CKAuditInfo(CKModel): + """ + Appears as `created` / `modified` at the record level (response). + """ + + timestamp: MillisDateTime + userRecordName: Optional[str] = None + deviceID: Optional[str] = None + + +class CKParent(CKModel): + recordName: str + + +class CKStableUrl(CKModel): + routingKey: Optional[str] = None + shortTokenHash: Optional[str] = None + protectedFullToken: Optional[str] = None + encryptedPublicSharingKey: Optional[str] = None + displayedHostname: Optional[str] = None + + +class CKChainProtectionInfo(CKModel): + bytes: Optional[Base64Bytes] = None # base64 string as seen on wire + pcsChangeTag: Optional[str] = None + + +# --------------------------------------------------------------------------- +# Share-surface models +# --------------------------------------------------------------------------- + + +class CKShare(CKModel): + """ + Minimal share reference as seen embedded under a record's top-level `share` key. + Observed nested payloads only surfaced `recordName` and `zoneID` here. + Keep this coarse for now; we can expand with shortGUID/shortTokenHash, etc., + if they appear nested here in future payloads. + """ + + recordName: Optional[str] = None + zoneID: Optional[CKZoneID] = None + + +class CKNameComponents(CKModel): + givenName: Optional[str] = None + familyName: Optional[str] = None + + +class CKLookupInfo(CKModel): + emailAddress: Optional[str] = None + phoneNumber: Optional[str] = None + + +class CKUserIdentity(CKModel): + userRecordName: Optional[str] = None + nameComponents: Optional[CKNameComponents] = None + lookupInfo: Optional[CKLookupInfo] = None + + +class CKParticipantProtectionInfo(CKChainProtectionInfo): + """Participant-scoped protectionInfo envelope.""" + + +class CKParticipant(CKModel): + participantId: Optional[str] = None + userIdentity: Optional[CKUserIdentity] = None + type: Optional[str] = None + acceptanceStatus: Optional[str] = None + permission: Optional[str] = None + customRole: Optional[str] = None + isApprovedRequester: Optional[bool] = None + orgUser: Optional[bool] = None + publicKeyVersion: Optional[int] = None + outOfNetworkPrivateKey: Optional[str] = None + outOfNetworkKeyType: Optional[int] = None + protectionInfo: Optional[CKParticipantProtectionInfo] = None + + +class CKPCSInfo(CKChainProtectionInfo): + """Top-level PCS envelope used by shared-record metadata.""" + + +class CKReference(CKModel): + """ + Value inside REFERENCE / REFERENCE_LIST typed fields (both request & response). + """ + + recordName: str + action: Optional[str] = None # e.g., "VALIDATE" + zoneID: Optional[CKZoneID] = None + + +# --------------------------------------------------------------------------- +# Response-side: typed field wrappers under record.fields +# --------------------------------------------------------------------------- + + +class _CKFieldBase(CKModel): + # Every field wrapper has a 'type' discriminator and a 'value' + # Subclasses declare type: Literal[...] for the discriminator. + pass + + +class CKTimestampField(_CKFieldBase): + type: Literal["TIMESTAMP"] + value: ( + MillisDateTimeOrNone # Apple sometimes sends a "zero" ms sentinel; map to None + ) + + +class CKInt64Field(_CKFieldBase): + type: Literal["INT64"] + value: int + + +class CKEncryptedBytesField(_CKFieldBase): + type: Literal["ENCRYPTED_BYTES"] + value: Base64Bytes + + +class CKReferenceField(_CKFieldBase): + type: Literal["REFERENCE"] + value: Optional[CKReference] + + +class CKReferenceListField(_CKFieldBase): + type: Literal["REFERENCE_LIST"] + value: List[CKReference] + + +# Occasionally CloudKit also uses STRING-typed wrappers at the `fields` level; +# keep support here for completeness. +class CKStringField(_CKFieldBase): + type: Literal["STRING"] + value: Optional[str] + isEncrypted: Optional[bool] = None # seen on some STRING wrappers (lookup) + + +class CKStringListField(_CKFieldBase): + type: Literal["STRING_LIST"] + value: List[str] + + +# Asset thumbnails / tokens (e.g. FirstAttachmentThumbnail) +class CKAssetToken(CKModel): + # Keep as str to preserve exact wire representation. + fileChecksum: Optional[str] = None + referenceChecksum: Optional[str] = None + wrappingKey: Optional[str] = None + downloadURL: Optional[str] = None + downloadedData: Optional[Base64Bytes] = None + size: Optional[int] = None + + +class CKAssetIDField(_CKFieldBase): + type: Literal["ASSETID"] + value: CKAssetToken + + +# Optional but seen in other CK APIs +class CKAssetField(_CKFieldBase): + type: Literal["ASSET"] + value: CKAssetToken + + +class CKDoubleField(_CKFieldBase): + type: Literal["DOUBLE"] + value: float + # AlarmTrigger latitude/longitude in Reminders can be encrypted doubles. + isEncrypted: Optional[bool] = None + + +class CKBytesField(_CKFieldBase): + # Raw bytes seen on wire (e.g., LastViewedTimestamp, CryptoPassphraseVerifier) + type: Literal["BYTES"] + value: Base64Bytes + + +class CKDoubleListField(_CKFieldBase): + type: Literal["DOUBLE_LIST"] + value: List[float] + + +class CKInt64ListField(_CKFieldBase): + type: Literal["INT64_LIST"] + value: List[int] + + +class CKAssetIDListField(_CKFieldBase): + # e.g., PreviewImages, PaperAssets (most cases) + type: Literal["ASSETID_LIST"] + value: List[CKAssetToken] + + +class CKUnknownListField(_CKFieldBase): + # Extremely rare: observed on some PaperAssets payloads as UNKNOWN_LIST. + type: Literal["UNKNOWN_LIST"] + value: List[JsonValue] # keep generic to be future-proof + + +class CKPassthroughField(_CKFieldBase): + type: str + value: JsonValue + + +# One source of truth for known CloudKit field 'type' tags. +KNOWN_TAGS: frozenset[str] = frozenset( + { + "TIMESTAMP", + "INT64", + "ENCRYPTED_BYTES", + "REFERENCE", + "REFERENCE_LIST", + "STRING", + "STRING_LIST", + "ASSETID", + "ASSET", + "DOUBLE", + "BYTES", + "DOUBLE_LIST", + "INT64_LIST", + "ASSETID_LIST", + "UNKNOWN_LIST", + } +) + + +# Discriminated union over all known field wrapper types we saw/anticipate. +# Split into (a) a known, literal-tagged union and (b) an open wrapper that +# gracefully falls back to CKPassthroughField for unknown tags. +KnownCKField = Annotated[ + Union[ + CKTimestampField, + CKInt64Field, + CKEncryptedBytesField, + CKReferenceField, + CKReferenceListField, + CKStringField, + CKStringListField, + CKAssetIDField, + CKAssetField, + CKDoubleField, + CKBytesField, + CKDoubleListField, + CKInt64ListField, + CKAssetIDListField, + CKUnknownListField, + ], + Field(discriminator="type"), +] + +_KNOWN_CK_FIELD_MODELS: dict[str, type[_CKFieldBase]] = { + "TIMESTAMP": CKTimestampField, + "INT64": CKInt64Field, + "ENCRYPTED_BYTES": CKEncryptedBytesField, + "REFERENCE": CKReferenceField, + "REFERENCE_LIST": CKReferenceListField, + "STRING": CKStringField, + "STRING_LIST": CKStringListField, + "ASSETID": CKAssetIDField, + "ASSET": CKAssetField, + "DOUBLE": CKDoubleField, + "BYTES": CKBytesField, + "DOUBLE_LIST": CKDoubleListField, + "INT64_LIST": CKInt64ListField, + "ASSETID_LIST": CKAssetIDListField, + "UNKNOWN_LIST": CKUnknownListField, +} + + +# --------------------------------------------------------------------------- +# Record and response +# --------------------------------------------------------------------------- + + +class CKFieldOpen(RootModel[Union[KnownCKField, CKPassthroughField]]): + """ + Public API for field wrappers: + - `.value` (preferred): the decoded inner `value` of the CK wrapper + - `.type_tag`: the CloudKit `type` string (e.g., "TIMESTAMP", "INT64_LIST") + - `.unwrap()`: return the inner typed wrapper instance (e.g., `CKTimestampField`) + + Implementation detail: `.root` is internal — avoid relying on it outside this class. + """ + + # v2 root models name the inner value "root" + root: Union[KnownCKField, CKPassthroughField] + + @property + def value(self): + # unified way to read the inner 'value' without touching .root + return getattr(self.root, "value", None) + + @property + def type_tag(self) -> Optional[str]: + # useful when inspecting unknown/passthrough fields + return getattr(self.root, "type", None) + + def unwrap(self): + """Return the inner typed wrapper (e.g., CKTimestampField). + Public escape hatch; prefer `.value` for most use-cases. + """ + return self.root + + @model_validator(mode="before") + @classmethod + def _dispatch_before(cls, obj): + """ + Ensure nested contexts (e.g., values inside Dict[str, CKFieldOpen]) use the same + discriminator-based dispatch as our explicit model_validate(...) call. + + IMPORTANT: For RootModel, 'before' must return the *underlying* value, not + a dict like {'root': ...}; returning a dict breaks the discriminated union + when this model is nested. + """ + t = obj.get("type") if isinstance(obj, dict) else None + + # Already an instance of one of our wrapper models -> keep as-is. + if isinstance(obj, _CKFieldBase): + return obj + + # Known wrappers: coerce to the exact discriminated member instance + if isinstance(obj, dict) and t in KNOWN_TAGS: + return _KNOWN_CK_FIELD_MODELS[t].model_validate(obj) + + # Explicit but unknown wrapper -> passthrough instance + if isinstance(obj, dict) and "type" in obj and "value" in obj: + return CKPassthroughField.model_validate(obj) + + # Fallback: wrap whatever came in as passthrough + return CKPassthroughField(type=str(t) if t else "UNKNOWN", value=obj) + + +_CK_FIELD_OPEN_ADAPTER = TypeAdapter(CKFieldOpen) + + +# Insert CKFields class here (after CKFieldOpen, before record/response section) +class CKFields(dict[str, CKFieldOpen]): + """ + Dict-like container that also allows attribute access, e.g.: + rec.fields.ModificationDate.value + Falls back to normal dict behavior for [] access. + + Public surface: + - Attribute access: `rec.fields..value` + - Mapping access: `rec.fields["FieldName"].value` + - Helpers: `get_field()`, `get_value()` + + Implementation detail: `.root` is **internal**; client code should not use it. + Use `.unwrap()` if you need the inner typed wrapper for `isinstance` checks. + """ + + def __getattr__(self, name: str) -> CKFieldOpen: + try: + return dict.__getitem__(self, name) + except KeyError as e: + raise AttributeError(name) from e + + def __dir__(self): + base = set(super().__dir__()) + return sorted(base | set(self.keys())) + + def get_field(self, key: str): + f = self.get(key) + if f is None: + return None + # Use public API; avoid touching `.root` here. + return f.unwrap() if hasattr(f, "unwrap") else f + + def get_value(self, key: str): + f = self.get_field(key) + return None if f is None else getattr(f, "value", None) + + +def _coerce_field_mapping(v, mapping_cls): + """Validate a raw field mapping into the requested CK field container.""" + if isinstance(v, mapping_cls): + return v + if isinstance(v, dict): + return mapping_cls( + {k: _CK_FIELD_OPEN_ADAPTER.validate_python(val) for k, val in v.items()} + ) + return v + + +# --------------------------------------------------------------------------- +# Record and response +# --------------------------------------------------------------------------- + + +class CKRecord(CKModel): + """ + A CloudKit record as returned in /records/query (or other endpoints). + + The 'fields' map contains app-level fields by **PascalCase** names + (e.g., TitleEncrypted, ModificationDate, Deleted, Folder, Folders, ...), + each wrapped in a CKField type above. + """ + + recordName: str + recordType: str # allow unknown record types + + # App-level fields (typed wrappers) + fields: CKFields = Field(default_factory=CKFields) + + @field_validator("fields", mode="before") + @classmethod + def _coerce_fields(cls, v): + """ + Ensure the mapping is validated item-by-item to CKFieldOpen + and wrapped in CKFields to enable attribute access DX. + """ + return _coerce_field_mapping(v, CKFields) + + @model_validator(mode="after") + def _validate_encrypted_fields(self): + """Validate encrypted-field wrappers against observed CloudKit shapes. + + Most `*Encrypted` fields use ENCRYPTED_BYTES, but shared CloudKit + records can legitimately carry STRING wrappers with `isEncrypted=true`. + """ + for key, wrapper in self.fields.items(): + if not isinstance(key, str) or not key.endswith("Encrypted"): + continue + + inner = wrapper.unwrap() if hasattr(wrapper, "unwrap") else wrapper + tag = getattr(inner, "type", None) + + if tag == "ENCRYPTED_BYTES": + continue + + if tag == "STRING" and getattr(inner, "isEncrypted", False) is True: + continue + + raise ValueError( + f"Field '{key}' on recordType {self.recordType!r} must be " + "ENCRYPTED_BYTES or STRING with isEncrypted=true, " + f"got {tag!r}" + ) + + return self + + # Often present, often empty object + pluginFields: Dict[str, JsonValue] = Field(default_factory=dict) + + # Record metadata + recordChangeTag: Optional[str] = None + created: Optional[CKAuditInfo] = None + modified: Optional[CKAuditInfo] = None + deleted: Optional[bool] = None + + zoneID: Optional[CKZoneID] = None + parent: Optional[CKParent] = None + + # Sharing/identity/exposure + displayedHostname: Optional[str] = None + stableUrl: Optional[CKStableUrl] = None + shortGUID: Optional[str] = None + + # Share-surface (top-level shared metadata) + share: Optional[CKShare] = None + publicPermission: Optional[str] = None + participants: Optional[List[CKParticipant]] = None + requesters: Optional[List[CKParticipant]] = None + blocked: Optional[List[CKParticipant]] = None + denyAccessRequests: Optional[bool] = None + owner: Optional[CKParticipant] = None + currentUserParticipant: Optional[CKParticipant] = None + invitedPCS: Optional[CKPCSInfo] = None + selfAddedPCS: Optional[CKPCSInfo] = None + shortTokenHash: Optional[str] = None + + # End-to-end encryption metadata (optional) + chainProtectionInfo: Optional[CKChainProtectionInfo] = None + chainParentKey: Optional[str] = None + chainPrivateKey: Optional[str] = None + + # Observed on InlineAttachment records as numeric seconds since epoch + expirationTime: Optional[SecsOrMillisDateTime] = None + + +class CKWriteParent(CKModel): + """Write-side parent reference embedded under a record.""" + + recordName: str + + +class CKWriteFields(CKFields): + """Write-side field mapping for modify requests.""" + + +class CKWriteRecord(CKModel): + """ + CloudKit record shape used in modify requests. + + Keep this narrower than CKRecord: only request-side fields that are + actually serialized on writes belong here. + """ + + recordName: str + recordType: str + fields: CKWriteFields = Field(default_factory=CKWriteFields) + pluginFields: Dict[str, JsonValue] = Field(default_factory=dict) + recordChangeTag: Optional[str] = None + parent: Optional[CKWriteParent] = None + zoneID: Optional[CKZoneID] = None + + @field_validator("fields", mode="before") + @classmethod + def _coerce_fields(cls, v): + return _coerce_field_mapping(v, CKWriteFields) + + +# --------------------------------------------------------------------------- +# Error items mixed into records[] on failure +# --------------------------------------------------------------------------- +class CKErrorItem(CKModel): + """ + Error item possibly present inside `records[]` when a per-record operation fails. + Additional server-provided properties will be preserved via CKModel(extra="allow"). + """ + + serverErrorCode: str + reason: Optional[str] = None + recordName: Optional[str] = None + + +# --------------------------------------------------------------------------- +# Tombstone record for deleted entries +# --------------------------------------------------------------------------- +class CKTombstoneRecord(CKModel): + """ + A 'tombstone' entry returned by CloudKit to indicate a deleted record. + Tombstones intentionally omit `recordType` and `fields` — they only assert + that a record with `recordName` existed but has since been deleted. + Additional server-provided properties will be preserved via CKModel(extra="allow"). + """ + + recordName: str + deleted: Literal[True] + zoneID: Optional[CKZoneID] = None + + +class CKQueryResponse(CKModel): + """ + Top-level response from /records/query: + - records: list of CKRecord + - continuationMarker: optional paging token (present if more results exist) + """ + + records: List[Union[CKRecord, CKTombstoneRecord, CKErrorItem]] = Field( + default_factory=list + ) + continuationMarker: Optional[str] = None + # When getCurrentSyncToken=true is passed, server also returns a top-level syncToken + # Include it for strict validation; clients can ignore if not needed. + syncToken: Optional[str] = None + + +# --------------------------------------------------------------------------- +# Request-side: /records/query payloads (refined) +# --------------------------------------------------------------------------- + + +# Comparators seen on the wire. Keep Union[str, Enum] to be forward-compatible. +class CKComparator(str, Enum): + EQUALS = "EQUALS" + IN_ = "IN" # 'IN' is a reserved word in Python, keep name distinct + CONTAINS_ANY = "CONTAINS_ANY" + LESS_THAN = "LESS_THAN" + LESS_THAN_OR_EQUALS = "LESS_THAN_OR_EQUALS" + GREATER_THAN = "GREATER_THAN" + GREATER_THAN_OR_EQUALS = "GREATER_THAN_OR_EQUALS" + BEGINS_WITH = "BEGINS_WITH" + # Add more as you encounter them + + +# FieldValue typed wrappers (request side) — discriminated by 'type' +class _CKFilterValueBase(CKModel): + pass # Subclasses declare type: Literal[...] for the discriminator. + + +class CKFVString(_CKFilterValueBase): + type: Literal["STRING"] + value: str + + +class CKFVInt64(_CKFilterValueBase): + type: Literal["INT64"] + value: int + + +class CKFVStringList(_CKFilterValueBase): + type: Literal["STRING_LIST"] + value: List[str] + + +class CKFVReference(_CKFilterValueBase): + type: Literal["REFERENCE"] + value: CKReference # zoneID is optional in observed payloads + + +class CKFVReferenceList(_CKFilterValueBase): + type: Literal["REFERENCE_LIST"] + value: List[CKReference] + + +CKFilterValue = Annotated[ + Union[ + CKFVString, + CKFVInt64, + CKFVStringList, + CKFVReference, + CKFVReferenceList, + ], + Field(discriminator="type"), +] + + +class CKQuerySortBy(CKModel): + """ + Sort directive. Example: + {"fieldName": "modTime", "ascending": false} + """ + + fieldName: str + ascending: Optional[bool] = None + + +class CKQueryFilterBy(CKModel): + """ + Filter clause. Examples: + + STRING equality: + {"comparator": "EQUALS", + "fieldName": "indexName", + "fieldValue": {"value": "recents", "type": "STRING"}} + + REFERENCE equality: + {"comparator": "EQUALS", + "fieldName": "reference", + "fieldValue": {"value": {"recordName": "...", "action": "VALIDATE"}, + "type": "REFERENCE"}} + """ + + comparator: Union[CKComparator, str] + fieldName: str + fieldValue: CKFilterValue + + +class CKQueryObject(CKModel): + """ + The 'query' object inside the request. + + recordType can be an app-defined pseudo type like "SearchIndexes" or "pinned" + or a real record type. + """ + + recordType: str + filterBy: Optional[List[CKQueryFilterBy]] = None + sortBy: Optional[List[CKQuerySortBy]] = None + + +# Request side (only what you actually send on the wire) +class CKZoneIDReq(CKModel): + zoneName: str + zoneType: Optional[str] = None + ownerRecordName: Optional[str] = None + + +class CKQueryRequest(CKModel): + """ + Top-level /records/query request payload. + """ + + query: CKQueryObject + zoneID: CKZoneIDReq + desiredKeys: Optional[List[str]] = None # can include duplicates; keep order + resultsLimit: Optional[int] = None + # Observed as a base64-like string on the wire; keep as str for strictness + continuationMarker: Optional[str] = None + + +class CKLookupDescriptor(CKModel): + recordName: str + + +# --------------------------------------------------------------------------- +# Request-side: /records/lookup payloads +# --------------------------------------------------------------------------- + + +class CKLookupRequest(CKModel): + records: List[CKLookupDescriptor] + zoneID: CKZoneIDReq + desiredKeys: Optional[List[str]] = None + + +class CKLookupResponse(CKModel): + records: List[Union[CKRecord, CKTombstoneRecord, CKErrorItem]] + # Server returns a top-level syncToken when getCurrentSyncToken=true + syncToken: Optional[str] = None + + +# --------------------------------------------------------------------------- +# Response-side: /changes/zone responses (delta sync) +# --------------------------------------------------------------------------- + + +class CKZoneChangesZone(CKModel): + """ + One zone entry inside the /changes/zone response. + + Observed shape: + - Always has: records[], zoneID, syncToken + - moreComing is present but sometimes null (treat as Optional[bool]) + """ + + records: List[Union[CKRecord, CKTombstoneRecord, CKErrorItem]] = Field( + default_factory=list + ) + moreComing: Optional[bool] = None + syncToken: str + zoneID: CKZoneID + + +class CKZoneChangesResponse(CKModel): + """ + Top-level envelope for /private/changes/zone (and /shared/changes/zone) responses. + """ + + zones: List[CKZoneChangesZone] = Field(default_factory=list) + + +# --------------------------------------------------------------------------- +# Request-side: /changes/zone payloads +# --------------------------------------------------------------------------- + + +class CKZoneChangesZoneReq(CKModel): + """ + One zone request entry for /changes/zone. + + Observed keys: + - zoneID: includes zoneName (e.g., "Notes" or "Reminders"), sometimes zoneType and ownerRecordName (for shared) + - desiredKeys: list of field names to project (duplicates allowed, order preserved) + - desiredRecordTypes: list of record types to include + - syncToken: optional paging token (base64-like string) + - reverse: optional bool + """ + + zoneID: CKZoneID # allow ownerRecordName/zoneType when present + desiredKeys: Optional[List[str]] = None + desiredRecordTypes: Optional[List[str]] = None + # Observed as a base64-like string on the wire; keep as str for strictness + syncToken: Optional[str] = None + reverse: Optional[bool] = None + + +class CKZoneChangesRequest(CKModel): + zones: List[CKZoneChangesZoneReq] + resultsLimit: Optional[int] = None + + +# --------------------------------------------------------------------------- +# Request-side: /records/modify payloads +# --------------------------------------------------------------------------- + + +class CKModifyOperation(CKModel): + operationType: Literal[ + "create", + "update", + "forceUpdate", + "replace", + "forceReplace", + "delete", + "forceDelete", + ] + record: CKWriteRecord + + +class CKModifyRequest(CKModel): + operations: List[CKModifyOperation] + zoneID: CKZoneIDReq + atomic: Optional[bool] = None + + +class CKModifyResponse(CKModel): + records: List[Union[CKRecord, CKTombstoneRecord, CKErrorItem]] = Field( + default_factory=list + ) + syncToken: Optional[str] = None diff --git a/pyicloud/common/models.py b/pyicloud/common/models.py new file mode 100644 index 00000000..9affa6da --- /dev/null +++ b/pyicloud/common/models.py @@ -0,0 +1,37 @@ +"""Shared strict base models for application-facing service/domain objects.""" + +from __future__ import annotations + +from pydantic import BaseModel, ConfigDict + + +class ServiceModel(BaseModel): + """Strict base for pyicloud's public service/domain models.""" + + model_config = ConfigDict( + extra="forbid", + arbitrary_types_allowed=False, + ) + + +class FrozenServiceModel(ServiceModel): + """Strict immutable base for public read-only models.""" + + model_config = ConfigDict( + extra="forbid", + arbitrary_types_allowed=False, + frozen=True, + ) + + +class MutableServiceModel(ServiceModel): + """Strict mutable base that validates assignments after construction.""" + + model_config = ConfigDict( + extra="forbid", + arbitrary_types_allowed=False, + validate_assignment=True, + ) + + +__all__ = ["FrozenServiceModel", "MutableServiceModel", "ServiceModel"] diff --git a/pyicloud/models/services/account/account_models.py b/pyicloud/models/services/account/account_models.py new file mode 100644 index 00000000..129e6eef --- /dev/null +++ b/pyicloud/models/services/account/account_models.py @@ -0,0 +1,420 @@ +""" +Pydantic models for the Account service. + +Models for these operations: + - {self.service_root}/setup/web/device/getDevices + - {self.service_root}/setup/web/family/getFamilyDetails + - {self.service_root}/setup/ws/1/storageUsageInfo + - {self._gateway_root}/v1/accounts/{dsid}/plans/icloud/pricing + - {self._gateway_root}/v3/accounts/{dsid}/subscriptions/features/cloud.storage/plan-summary + - {self._gateway_root}/v1/accounts/{dsid}/plans/next-larger-plan + - {self._gateway_root}/v3/accounts/{dsid}/subscriptions/features + - {self._gateway_root}/v4/accounts/{dsid}/subscriptions/features + - +""" + +from datetime import datetime +from typing import List, Literal, Optional + +from dateutil.parser import isoparse +from pydantic import BaseModel, ConfigDict, Field, HttpUrl, field_validator + +from pyicloud.utils import underscore_to_camelcase + + +# ─── Base and Shared Config ────────────────────────────────────────────────── +class ConfigModel(BaseModel): + """Base class providing camel-case aliases, population by name, and allowing extra fields.""" + + model_config = ConfigDict( + alias_generator=underscore_to_camelcase, populate_by_name=True, extra="allow" + ) + + +# ─── Constants ─────────────────────────────────────────────────────────────── + +# Example constants (anonymized) +EXAMPLE_SERIAL_MAC = "●●●●●XXXXX" +EXAMPLE_DEVICE_NAME_MAC = "User's MacBook Pro" + +# Device specification constants +EXAMPLE_MAC_OS_VERSION = "OSX;15.5" +EXAMPLE_MAC_MODEL = "MacBookPro18,4" +EXAMPLE_MAC_DISPLAY_NAME = 'MacBook Pro 14"' + + +# --- {self.service_root}/setup/web/device/getDevices ─────────────────────── + + +class AccountDevice(ConfigModel): + """Model for any account device.""" + + # Fields that are ALWAYS present (from sample data) + serial_number: str + """Device serial number (privacy-masked)""" + os_version: str + """Operating system and version (format: 'OS;version')""" + name: str + """User-assigned device name""" + model: str + """Apple's internal model identifier""" + udid: str + """Universally unique device identifier""" + model_display_name: str + """Human-readable model name""" + + # Device images (always present) - Keep manual aliases because uses "URL" not "Url" + model_large_photo_url1x: HttpUrl = Field(alias="modelLargePhotoURL1x") + """URL of large photo (1x)""" + model_large_photo_url2x: HttpUrl = Field(alias="modelLargePhotoURL2x") + """URL of large photo (2x)""" + model_small_photo_url1x: HttpUrl = Field(alias="modelSmallPhotoURL1x") + """URL of small photo (1x)""" + model_small_photo_url2x: HttpUrl = Field(alias="modelSmallPhotoURL2x") + """URL of small photo (2x)""" + + # Fields that MIGHT be present (observed in sample) + imei: Optional[str] = None + """International Mobile Equipment Identity (privacy-masked)""" + latest_backup: Optional[datetime] = None + """ISO timestamp of most recent backup""" + payment_methods: Optional[List[str]] = None + """List of payment method IDs associated with this device""" + + @field_validator("latest_backup", mode="before") + @classmethod + def _parse_latest_backup(cls, v): + """Parse ISO 8601 datetime string to datetime object.""" + if isinstance(v, str): + # Use dateutil for proper ISO 8601 parsing (handles "Z" suffix in Python 3.10+) + return isoparse(v) + return v + + # extra="allow" handles any other device-specific fields automatically + + +class AccountPaymentMethod(ConfigModel): + """Model for an account payment method.""" + + last_four_digits: str + """Last four digits of card/account number""" + balance_status: Literal["UNAVAILABLE", "NOTAPPLICABLE", "AVAILABLE"] + """Current balance status of the payment method""" + suspension_reason: Literal["ACTIVE", "SUSPENDED", "INACTIVE"] + """Current suspension status""" + id: str + """Unique payment method identifier""" + type: str + """Descriptive name of payment method""" + is_car_key: bool + """Whether this method can be used as a car key""" + + model_config = ConfigModel.model_config | ConfigDict( + json_schema_extra={ + "example": { + "lastFourDigits": "XXXX", + "balanceStatus": "UNAVAILABLE", + "suspensionReason": "ACTIVE", + "id": "redacted", + "type": "Revolut Mastercard", + "isCarKey": False, + } + } + ) + + +class GetDevicesResponse(ConfigModel): + """Response model for the Get Devices operation.""" + + devices: List[AccountDevice] + """List of devices associated with the account""" + payment_methods: List[AccountPaymentMethod] + """List of payment methods associated with the account""" + + model_config = ConfigModel.model_config | ConfigDict( + json_schema_extra={ + "example": { + "devices": [ + { + "serialNumber": EXAMPLE_SERIAL_MAC, + "osVersion": EXAMPLE_MAC_OS_VERSION, + "name": EXAMPLE_DEVICE_NAME_MAC, + "model": EXAMPLE_MAC_MODEL, + "modelDisplayName": EXAMPLE_MAC_DISPLAY_NAME, + } + ], + "paymentMethods": [ + { + "lastFourDigits": "XXXX", + "balanceStatus": "UNAVAILABLE", + "suspensionReason": "ACTIVE", + "id": "redacted", + "type": "Revolut Mastercard", + "isCarKey": False, + } + ], + } + } + ) + + +# ─── {self.service_root}/setup/web/family/getFamilyDetails ────────────────────────────────────────────────────── + + +class FamilyMember(ConfigModel): + """Model for a family member.""" + + last_name: str + """Family member's last name""" + dsid: str + """Apple ID Directory Services identifier""" + original_invitation_email: str + """Email address used for the original family invitation""" + full_name: str + """Complete name of the family member""" + age_classification: Literal["ADULT", "CHILD", "TEEN"] + """Age classification category""" + apple_id_for_purchases: str + """Apple ID used for purchases""" + apple_id: str + """Primary Apple ID""" + family_id: str + """Identifier of the family group""" + first_name: str + """Family member's first name""" + has_parental_privileges: bool + """Whether this member has parental control privileges""" + has_screen_time_enabled: bool + """Whether Screen Time is enabled for this member""" + has_ask_to_buy_enabled: bool + """Whether Ask to Buy is enabled for this member""" + has_share_purchases_enabled: bool + """Whether purchase sharing is enabled""" + has_share_my_location_enabled: bool + """Whether location sharing is enabled""" + dsid_for_purchases: str + """Directory Services ID used for purchases""" + + # Optional field - only appears for some family members + share_my_location_enabled_family_members: Optional[List[str]] = None + """List of family member DSIDs for whom location sharing is enabled""" + + +class Family(ConfigModel): + """Model for family group information.""" + + family_id: str + """Unique identifier for the family group""" + transfer_requests: List[str] + """List of pending transfer requests""" + invitations: List[str] + """List of pending family invitations""" + organizer: str + """DSID of the family organizer""" + members: List[str] + """List of family member DSIDs""" + outgoing_transfer_requests: List[str] + """List of outgoing transfer requests""" + etag: str + """Entity tag for caching/versioning""" + + +class GetFamilyDetailsResponse(ConfigModel): + """Response model for the Get Family Details operation.""" + + status_message: str = Field(alias="status-message") + """Human-readable status message""" + family_invitations: List[str] + """List of pending family invitations""" + outgoing_transfer_requests: List[str] + """List of outgoing transfer requests""" + is_member_of_family: bool + """Whether the current user is a family member""" + family: Family + """Family group information""" + family_members: List[FamilyMember] + """List of all family members""" + status: int + """Numeric status code""" + show_add_member_button: bool + """Whether to show the add member button in UI""" + + +# ─── {self.service_root}/setup/ws/1/storageUsageInfo ────────────────────────────────────────────────── + + +class StorageUsageByMedia(ConfigModel): + """Model for storage usage by media type.""" + + media_key: str + """Media type identifier (e.g., 'photos', 'backup', 'docs')""" + display_label: str + """Human-readable label for the media type""" + display_color: str + """Hex color code for UI display (without #)""" + usage_in_bytes: int + """Storage used by this media type in bytes""" + + +class StorageUsageInfo(ConfigModel): + """Model for overall storage usage information.""" + + comp_storage_in_bytes: int + """Complementary storage in bytes""" + used_storage_in_bytes: int + """Total used storage in bytes""" + total_storage_in_bytes: int + """Total available storage in bytes""" + commerce_storage_in_bytes: int + """Commercial storage allocation in bytes""" + + +class QuotaStatus(ConfigModel): + """Model for storage quota status information.""" + + over_quota: bool + """Whether the user is over their storage quota""" + have_max_quota_tier: bool + """Whether the user has the maximum quota tier""" + almost_full: bool = Field(alias="almost-full") + """Whether the storage is almost full""" + paid_quota: bool + """Whether the user has a paid storage quota""" + + +class StorageUsageInfoResponse(ConfigModel): + """Response model for the Get Storage Usage Info operation.""" + + storage_usage_by_media: List[StorageUsageByMedia] + """Breakdown of storage usage by media type""" + storage_usage_info: StorageUsageInfo + """Overall storage usage statistics""" + quota_status: QuotaStatus + """Storage quota status information""" + + +# --- {self._gateway_root}/v1/accounts/{dsid}/plans/icloud/pricing ────────────────────────────────────────────────────────── + + +class PricingPlansResponse(ConfigModel): + """Response model for the Get Pricing Plans operation.""" + + paid_plan: bool + """Whether this is a paid plan""" + price_for_display: str + """Formatted price string for display (e.g., '$9.99')""" + renewal_period: Literal["MONTHLY", "YEARLY"] + """Billing cycle frequency""" + + +# --- {self._gateway_root}/v3/accounts/{dsid}/subscriptions/features/cloud.storage/plan-summary ────────────────────────────────────────────────────────── + + +class PlanInclusion(ConfigModel): + """Model for plan inclusion information.""" + + included_in_plan: bool + """Whether the feature is included in this plan""" + limit: Optional[int] = None + """Storage limit amount (if applicable)""" + limit_units: Optional[str] = None + """Storage limit units (e.g., 'TIB', 'GIB')""" + + +class PlanSummaryResponse(ConfigModel): + """Response model for the Get Plan Summary operation.""" + + feature_key: str + """Feature identifier (e.g., 'cloud.storage')""" + summary: PlanInclusion + """Main plan summary information""" + included_with_account_purchased_plan: PlanInclusion + """Inclusion details for account purchased plan""" + included_with_apple_one_plan: PlanInclusion + """Inclusion details for Apple One plan""" + included_with_shared_plan: PlanInclusion + """Inclusion details for shared plan""" + included_with_comped_plan: PlanInclusion + """Inclusion details for complimentary plan""" + included_with_managed_plan: PlanInclusion + """Inclusion details for managed plan""" + + +# --- {self._gateway_root}/v1/accounts/{dsid}/plans/next-larger-plan ────────────────────────────────────────────────────────── + + +class NextLargerPlanResponse(ConfigModel): + """Response model for the Get Next Larger Plan operation.""" + + parameters: str + """URL-encoded parameters for the plan purchase""" + interrupted_buy_error_codes: str + """JSON-encoded array of error codes as string""" + price_for_display: str + """Formatted price string for display (e.g., '$29.99')""" + plan_size_in_bytes: int + """Storage plan size in bytes""" + plan_name: str + """Human-readable plan name (e.g., '6 TB')""" + highest_tier_plan_name: str + """Name of the highest available tier plan""" + user_eligible_for_offer: bool + """Whether the user is eligible for this offer""" + + +# --- {self._gateway_root}/v3/accounts/{dsid}/subscriptions/features ────────────────────────────────────────────────────────── + + +class SubscriptionV3Feature(ConfigModel): + """Model for an individual subscription feature.""" + + feature_key: str + """Feature identifier (e.g., 'cloud.storage', 'home.cameras')""" + can_use: bool + """Whether the user can use this feature""" + cache_till: datetime + """ISO timestamp when this feature data expires from cache""" + limit: Optional[int] = None + """Feature limit amount (if applicable)""" + limit_units: Optional[str] = None + """Feature limit units (e.g., 'TIB', 'GIB')""" + + @field_validator("cache_till", mode="before") + @classmethod + def _parse_cache_till(cls, v): + """Parse ISO 8601 datetime string to datetime object.""" + if isinstance(v, str): + return isoparse(v) + return v + + +# Type alias for the subscription features response (array of features) +SubscriptionFeaturesResponse = List[SubscriptionV3Feature] + + +# --- {self._gateway_root}/v4/accounts/{dsid}/subscriptions/features ────────────────────────────────────────────────────────── + + +class SubscriptionV4Feature(ConfigModel): + """Model for version 4 subscription features.""" + + feature_key: str + """Feature identifier (e.g., 'apps.rsvp.create-event')""" + can_use: bool + """Whether the user can use this feature""" + cache_till: datetime + """ISO timestamp when this feature data expires from cache""" + access_token: str + """JWT access token for this feature""" + + @field_validator("cache_till", mode="before") + @classmethod + def _parse_cache_till(cls, v): + """Parse ISO 8601 datetime string to datetime object.""" + if isinstance(v, str): + return isoparse(v) + return v + + +# Type alias for the v4 subscription features response (array of features) +SubscriptionV4FeaturesResponse = List[SubscriptionV4Feature] diff --git a/pyicloud/models/services/calendar/missing_operations.md b/pyicloud/models/services/calendar/missing_operations.md new file mode 100644 index 00000000..c2659af5 --- /dev/null +++ b/pyicloud/models/services/calendar/missing_operations.md @@ -0,0 +1,128 @@ +# HTTP Operations not yet implemented in calendar.py service + +================================================================================ + +1. update_calendar + +================================================================================ + +PURPOSE: Updates an existing calendar +METHOD: POST +URL: {service_root}/ca/collections/{calendar.guid} +PATH PARAMS: - #TODO: determine path params for update_calendar +QUERY PARAMS: - #TODO: determine query params for update_calendar +PAYLOAD: #TODO: determine payload format for update_calendar +RESPONSE: #TODO: determine response format for update_calendar + +================================================================================ + +update_event + +================================================================================ + +PURPOSE: Updates an existing event +METHOD: POST +URL: {service_root}/ca/events/{event.pguid}/{event.guid} +PATH PARAMS: - #TODO: determine path params for update_event +QUERY PARAMS: - #TODO: determine query params for update_event +PAYLOAD: #TODO: determine payload format for update_event +RESPONSE: #TODO: determine response format for update_event + +================================================================================ + +Idle + +================================================================================ + +PURPOSE: Unknown purpose +METHOD: POST +URL: {service_root}/ca/idle +PATH PARAMS: - #TODO: determine path params for idle +QUERY PARAMS: - #TODO: determine query params for idle +PAYLOAD: #TODO: determine payload format for idle +RESPONSE: #TODO: determine response format for idle + +================================================================================ + +alarmtriggers + +================================================================================ + +PURPOSE: Unknown purpose +METHOD: GET +URL: {service_root}/alarmtriggers +PATH PARAMS: - #TODO: determine path params for alarmtriggers +QUERY PARAMS: - #TODO: determine query params for alarmtriggers +PAYLOAD: #TODO: determine payload format for alarmtriggers +RESPONSE: #TODO: determine response format for alarmtriggers + +================================================================================ + +State + +================================================================================ + +PURPOSE: Unknown purpose +METHOD: GET +URL: {service_root}/ca/state +PATH PARAMS: - #TODO: determine path params for State +QUERY PARAMS: - #TODO: determine query params for State +PAYLOAD: #TODO: determine payload format for State +RESPONSE: #TODO: determine response format for State + +================================================================================ + +serverpreferences + +================================================================================ + +PURPOSE: Unknown purpose +METHOD: POST +URL: {service_root}/ca/serverpreferences +PATH PARAMS: - #TODO: determine path params for serverpreferences +QUERY PARAMS: - #TODO: determine query params for serverpreferences +PAYLOAD: #TODO: determine payload format for serverpreferences +RESPONSE: #TODO: determine response format for serverpreferences + +================================================================================ + +Remove all events from a recurring event + +================================================================================ + +PURPOSE: Removes all events from a recurring event +METHOD: POST +URL: {service_root}/ca/events/{event.pguid}/{event.guid}\_\_20250802T100000Z/all +PATH PARAMS: - #TODO: determine path params for remove all events from a recurring event +QUERY PARAMS: - #TODO: determine query params for remove all events from a recurring event +PAYLOAD: #TODO: determine payload format for remove all events from a recurring event +RESPONSE: #TODO: determine response format for remove all events from a recurring event + +================================================================================ + +attachment + +================================================================================ + +PURPOSE: attach a file to an event +METHOD: POST +URL: {service_root}/ca/attachment/{event.pguid}/{event.guid} +PATH PARAMS: - #TODO: determine path params for attachment +QUERY PARAMS: - #TODO: determine query params for attachment - example: +[ +{'X-name': 'folo_logo.png'}, +{'X-type': 'image%2Fpng'}, +{'ctag': 'HwoQEgwAAQPbkyhd0AAAAAAYAxgAIhUIzZ7FhsqY69QyEPbZ8rWG86q0pAEoAEgA'}, +{'lang': 'en-US'}, +{'usertz': 'Europe%2FParis'}, +{'requestID': '132'}, +{'ifMatch': 'mdp8vzll'}, +{'startDate': '2025-07-26'}, +{'endDate': '2025-09-06'}, +{'clientBuildNumber': '2526Project38'}, +{'clientMasteringNumber': '2526B20'}, +{'clientId': '93cf465f-eb5a-4f4a-8043-f7bcbd9b57ac'}, +{'dsid': '10927495723'} +] +PAYLOAD: #TODO: determine payload format for attachment +RESPONSE: #TODO: determine response format for attachment diff --git a/pyicloud/models/services/hidemyemail/hidemyemail_models.py b/pyicloud/models/services/hidemyemail/hidemyemail_models.py new file mode 100644 index 00000000..f2739f32 --- /dev/null +++ b/pyicloud/models/services/hidemyemail/hidemyemail_models.py @@ -0,0 +1,661 @@ +# Start of Selection +""" +Pydantic models for the HideMyEmail service. + +Models for these operations: + - Generate new email aliases + - Reserve specific aliases + - List all existing aliases + - Get alias details by ID + - Update alias metadata (label, note) + - Delete aliases + - Deactivate aliases + - Reactivate aliases +""" + +from datetime import datetime, timezone +from typing import Annotated, Literal, Union + +from pydantic import BaseModel, ConfigDict, EmailStr, Field, field_validator + +from pyicloud.utils import underscore_to_camelcase + + +# ─── Base and Shared Config ────────────────────────────────────────────────── +class ConfigModel(BaseModel): + """Base class providing camel-case aliases, population by name, and allowing extra fields.""" + + model_config = ConfigDict( + alias_generator=underscore_to_camelcase, + populate_by_name=True, + extra="allow", + json_encoders={ + datetime: lambda dt: int(dt.replace(tzinfo=timezone.utc).timestamp()) + }, + ) + + +# Example constants (anonymized) +EXAMPLE_FORWARD_EMAIL = "user@example.com" +EXAMPLE_ALIAS_ON_DEMAND = "alias-example-1a@icloud.com" +EXAMPLE_ALIAS_IN_APP = "alias-inapp-2b@icloud.com" +EXAMPLE_ALIAS_GENERATED = "alias-generated-3c@icloud.com" +EXAMPLE_ALIAS_RESERVE = "alias-reserve-4d@icloud.com" +EXAMPLE_LABEL = "Project Signup" +EXAMPLE_RESERVED_LABEL = "Reserved Label" + + +# ─── Shared building-blocks ───────────────────────────────────────────────── +class HideMyEmailByIdRequest(ConfigModel): + """Request payload for single-alias operations by anonymousId.""" + + anonymous_id: str = Field(..., alias="anonymousId") + """Anonymous ID of the alias.""" + + model_config = ConfigModel.model_config | ConfigDict( + json_schema_extra={"example": {"anonymousId": "abc123anonymous"}} + ) + + +class MessageResult(ConfigModel): + """Generic result payload containing only a `message` field.""" + + message: str = Field(...) + """Result message, e.g., 'success'.""" + + model_config = ConfigModel.model_config | ConfigDict( + json_schema_extra={"example": {"message": "success"}} + ) + + +# ─── Alias models ─────────────────────────────────────────────────────────── +class HideMyEmailBase(ConfigModel): + """Common fields for Hide My Email entries.""" + + origin: Literal["ON_DEMAND", "IN_APP"] + """The origin of the alias, either "ON_DEMAND" or "IN_APP".""" + + anonymous_id: str = Field(..., alias="anonymousId") + """Anonymous ID of the alias.""" + + domain: str + """The domain associated with the alias.""" + + forward_to_email: EmailStr = Field(..., alias="forwardToEmail") + """The email address to which emails are forwarded.""" + + hme: str + """The Hide My Email address.""" + + label: str + """The label for the alias.""" + + note: str + """The note for the alias.""" + + create_timestamp: datetime = Field(..., alias="createTimestamp") + """Creation timestamp as a datetime object.""" + + is_active: bool = Field(..., alias="isActive") + """Whether the alias is active.""" + + recipient_mail_id: str = Field(..., alias="recipientMailId") + """The recipient mail ID.""" + + @field_validator("create_timestamp", mode="before") + @classmethod + def _parse_create_timestamp(cls, v): + # API returns milliseconds since epoch + if isinstance(v, int): + return datetime.fromtimestamp(v / 1000, tz=timezone.utc) + return v + + model_config = ConfigModel.model_config | ConfigDict( + json_schema_extra={ + "example": { + "origin": "ON_DEMAND", + "anonymousId": "xyz000anon", + "domain": "", + "forwardToEmail": EXAMPLE_FORWARD_EMAIL, + "hme": EXAMPLE_ALIAS_ON_DEMAND, + "label": EXAMPLE_LABEL, + "note": "", + "createTimestamp": 1700000000000, + "isActive": True, + "recipientMailId": "", + } + } + ) + + +class HideMyEmailOnDemand(HideMyEmailBase): + """Alias created on demand via iCloud settings.""" + + origin: Literal["ON_DEMAND"] + """The origin of the alias, always "ON_DEMAND".""" + + model_config = ConfigModel.model_config + + +class HideMyEmailInApp(HideMyEmailBase): + """Alias created within a third-party app supporting Hide My Email.""" + + origin: Literal["IN_APP"] + """The origin of the alias, always "IN_APP".""" + + origin_app_name: str = Field(..., alias="originAppName") + """The name of the originating app.""" + + app_bundle_id: str = Field(..., alias="appBundleId") + """The bundle ID of the originating app.""" + + model_config = ConfigModel.model_config | ConfigDict( + json_schema_extra={ + "example": { + "origin": "IN_APP", + "anonymousId": "uvw111anon", + "domain": "com.example.app", + "forwardToEmail": EXAMPLE_FORWARD_EMAIL, + "hme": EXAMPLE_ALIAS_IN_APP, + "label": "App Feature", + "note": "Generated by App", + "createTimestamp": 1700000001234, + "isActive": True, + "recipientMailId": "", + "originAppName": "ExampleApp", + "appBundleId": "com.example.app", + } + } + ) + + +HideMyEmail = Annotated[ + Union[HideMyEmailOnDemand, HideMyEmailInApp], + Field(discriminator="origin"), +] + + +# ─── List endpoint ────────────────────────────────────────────────────────── +class HideMyEmailListResult(ConfigModel): + """Container for the result of a Hide My Email list operation.""" + + forward_to_emails: list[EmailStr] = Field( + default_factory=list, alias="forwardToEmails" + ) + """List of email addresses to which emails are forwarded.""" + + hme_emails: list[HideMyEmail] = Field(default_factory=list, alias="hmeEmails") + """List of Hide My Email aliases.""" + + selected_forward_to: EmailStr = Field(..., alias="selectedForwardTo") + """The currently selected forward-to email address.""" + + model_config = ConfigModel.model_config | ConfigDict( + json_schema_extra={ + "example": { + "forwardToEmails": [EXAMPLE_FORWARD_EMAIL], + "hmeEmails": [ + { + "origin": "ON_DEMAND", + "anonymousId": "xyz000anon", + "domain": "", + "forwardToEmail": EXAMPLE_FORWARD_EMAIL, + "hme": EXAMPLE_ALIAS_ON_DEMAND, + "label": EXAMPLE_LABEL, + "note": "", + "createTimestamp": 1700000000000, + "isActive": True, + "recipientMailId": "", + } + ], + "selectedForwardTo": EXAMPLE_FORWARD_EMAIL, + } + } + ) + + +class HideMyEmailListResponse(ConfigModel): + """Full response model for the Hide My Email 'list' API operation.""" + + success: bool + """Whether the API call was successful.""" + + timestamp: datetime + """Server timestamp as datetime object.""" + + result: HideMyEmailListResult + """The result payload.""" + + @field_validator("timestamp", mode="before") + @classmethod + def _parse_timestamp(cls, v): + if isinstance(v, int): + return datetime.fromtimestamp(v, tz=timezone.utc) + return v + + model_config = ConfigModel.model_config | ConfigDict( + json_schema_extra={ + "example": { + "success": True, + "timestamp": 1700000000, + "result": { + "forwardToEmails": [EXAMPLE_FORWARD_EMAIL], + "hmeEmails": [ + { + "origin": "ON_DEMAND", + "anonymousId": "xyz000anon", + "domain": "", + "forwardToEmail": EXAMPLE_FORWARD_EMAIL, + "hme": EXAMPLE_ALIAS_ON_DEMAND, + "label": EXAMPLE_LABEL, + "note": "", + "createTimestamp": 1700000000000, + "isActive": True, + "recipientMailId": "", + } + ], + "selectedForwardTo": EXAMPLE_FORWARD_EMAIL, + }, + } + } + ) + + +# ─── Generate endpoint ───────────────────────────────────────────────────── +class HideMyEmailGenerateRequest(ConfigModel): + """Request payload for generating a new Hide My Email address.""" + + lang_code: str = Field(..., alias="langCode") + """Language code for the request.""" + + model_config = ConfigModel.model_config | ConfigDict( + json_schema_extra={"example": {"langCode": "en-us"}} + ) + + +class HideMyEmailGenerateResult(ConfigModel): + """Result payload containing the newly generated Hide My Email address.""" + + hme: str + """The newly generated Hide My Email address.""" + + model_config = ConfigModel.model_config | ConfigDict( + json_schema_extra={"example": {"hme": EXAMPLE_ALIAS_GENERATED}} + ) + + +class HideMyEmailGenerateResponse(ConfigModel): + """Full response model for the Hide My Email 'generate' API operation.""" + + success: bool + """Whether the API call was successful.""" + + timestamp: datetime + """Server timestamp as datetime object.""" + + result: HideMyEmailGenerateResult = Field(...) + """The result payload.""" + + @field_validator("timestamp", mode="before") + @classmethod + def _parse_timestamp(cls, v): + if isinstance(v, int): + return datetime.fromtimestamp(v, tz=timezone.utc) + return v + + model_config = ConfigModel.model_config | ConfigDict( + json_schema_extra={ + "example": { + "success": True, + "timestamp": 1700000100, + "result": {"hme": EXAMPLE_ALIAS_GENERATED}, + } + } + ) + + +# ─── Reserve endpoint ──────────────────────────────────────────────────────── + + +class HideMyEmailReserveRequest(ConfigModel): + """Request payload for reserving an existing Hide My Email alias.""" + + hme: str + """The Hide My Email address to reserve.""" + + label: str + """The label for the reserved alias.""" + + note: str + """The note for the reserved alias.""" + + model_config = ConfigModel.model_config | ConfigDict( + json_schema_extra={ + "example": { + "hme": EXAMPLE_ALIAS_RESERVE, + "label": EXAMPLE_RESERVED_LABEL, + "note": "", + } + } + ) + + +class HideMyEmailReserveOnly(ConfigModel): + """Slim alias model for the 'reserve' operation (no forwardToEmail).""" + + origin: Literal["ON_DEMAND"] + """The origin of the alias, always "ON_DEMAND".""" + + anonymous_id: str = Field(..., alias="anonymousId") + """Anonymous ID of the alias.""" + + domain: str + """The domain associated with the alias.""" + + hme: str + """The Hide My Email address.""" + + label: str + """The label for the alias.""" + + note: str + """The note for the alias.""" + + create_timestamp: datetime = Field(..., alias="createTimestamp") + """Creation timestamp as a datetime object.""" + + is_active: bool = Field(..., alias="isActive") + """Whether the alias is active.""" + + recipient_mail_id: str = Field(..., alias="recipientMailId") + """The recipient mail ID.""" + + @field_validator("create_timestamp", mode="before") + @classmethod + def _parse_create_timestamp(cls, v): + if isinstance(v, int): + return datetime.fromtimestamp(v / 1000, tz=timezone.utc) + return v + + model_config = ConfigModel.model_config | ConfigDict( + json_schema_extra={ + "example": { + "hme": { + "origin": "ON_DEMAND", + "anonymousId": "xyz000anon", + "domain": "", + "hme": EXAMPLE_ALIAS_RESERVE, + "label": EXAMPLE_RESERVED_LABEL, + "note": "", + "createTimestamp": 1700000200, + "isActive": True, + "recipientMailId": "", + } + } + } + ) + + +class HideMyEmailReserveResponse(ConfigModel): + """Full response model for the Hide My Email 'reserve' API operation.""" + + success: bool + """Whether the API call was successful.""" + + timestamp: datetime + """Server timestamp as datetime object.""" + + result: HideMyEmailReserveOnly = Field(...) + """The result payload.""" + + @field_validator("timestamp", mode="before") + @classmethod + def _parse_timestamp(cls, v): + if isinstance(v, int): + return datetime.fromtimestamp(v, tz=timezone.utc) + return v + + model_config = ConfigModel.model_config | ConfigDict( + json_schema_extra={ + "example": { + "success": True, + "timestamp": 1700000200, + "result": { + "hme": { + "origin": "ON_DEMAND", + "anonymousId": "xyz000anon", + "domain": "", + "hme": EXAMPLE_ALIAS_RESERVE, + "label": EXAMPLE_RESERVED_LABEL, + "note": "", + "createTimestamp": 1700000200, + "isActive": True, + "recipientMailId": "", + } + }, + } + } + ) + + +# ─── Get endpoint ────────────────────────────────────────────────────────── + + +class HideMyEmailGetRequest(HideMyEmailByIdRequest): + """Request model for the Hide My Email 'get' API operation.""" + + pass + + +class HideMyEmailGetResponse(ConfigModel): + """Response model for the Hide My Email 'get' API operation.""" + + success: bool + """Whether the API call was successful.""" + + timestamp: datetime + """Server timestamp as datetime object.""" + + result: HideMyEmailBase = Field(...) + """The result payload.""" + + @field_validator("timestamp", mode="before") + @classmethod + def _parse_timestamp(cls, v): + if isinstance(v, int): + return datetime.fromtimestamp(v, tz=timezone.utc) + return v + + model_config = ConfigModel.model_config | ConfigDict( + json_schema_extra={ + "example": { + "success": True, + "timestamp": 1700000300, + "result": { + "origin": "ON_DEMAND", + "anonymousId": "xyz000anon", + "domain": "", + "forwardToEmail": EXAMPLE_FORWARD_EMAIL, + "hme": EXAMPLE_ALIAS_ON_DEMAND, + "label": EXAMPLE_LABEL, + "note": "", + "createTimestamp": 1700000000000, + "isActive": True, + "recipientMailId": "", + }, + } + } + ) + + +# ─── Update endpoint ──────────────────────────────────────────────────────── + + +class HideMyEmailUpdateRequest(HideMyEmailByIdRequest): + """Request model for the Hide My Email 'update' API operation.""" + + label: str + """The new label for the alias.""" + + note: str + """The new note for the alias.""" + + model_config = ConfigModel.model_config | ConfigDict( + json_schema_extra={ + "example": { + "anonymousId": "abc123anonymous", + "label": EXAMPLE_LABEL, + "note": "Updated note", + } + } + ) + + +class HideMyEmailUpdateResponse(ConfigModel): + """Response model for the Hide My Email 'update' API operation.""" + + success: bool + """Whether the API call was successful.""" + + timestamp: datetime + """Server timestamp as datetime object.""" + + result: MessageResult = Field(...) + """The result payload.""" + + @field_validator("timestamp", mode="before") + @classmethod + def _parse_timestamp(cls, v): + if isinstance(v, int): + return datetime.fromtimestamp(v, tz=timezone.utc) + return v + + model_config = ConfigModel.model_config | ConfigDict( + json_schema_extra={ + "example": { + "success": True, + "timestamp": 1700000400, + "result": {"message": "success"}, + } + } + ) + + +# ─── Delete endpoint ───────────────────────────────────────────────────────── + + +class HideMyEmailDeleteRequest(HideMyEmailByIdRequest): + """Request model for the Hide My Email 'delete' API operation.""" + + pass + + +class HideMyEmailDeleteResponse(ConfigModel): + """Response model for the Hide My Email 'delete' API operation.""" + + success: bool + """Whether the API call was successful.""" + + timestamp: datetime + """Server timestamp as datetime object.""" + + result: MessageResult = Field(...) + """The result payload.""" + + @field_validator("timestamp", mode="before") + @classmethod + def _parse_timestamp(cls, v): + if isinstance(v, int): + return datetime.fromtimestamp(v, tz=timezone.utc) + return v + + model_config = ConfigModel.model_config | ConfigDict( + json_schema_extra={ + "example": { + "success": True, + "timestamp": 1700000500, + "result": {"message": "success"}, + } + } + ) + + +# ─── Deactivate endpoint ───────────────────────────────────────────────────── + + +class HideMyEmailDeactivateRequest(HideMyEmailByIdRequest): + """Request model for the Hide My Email 'deactivate' API operation.""" + + pass + + +class HideMyEmailDeactivateResponse(ConfigModel): + """Response model for the Hide My Email 'deactivate' API operation.""" + + success: bool + """Whether the API call was successful.""" + + timestamp: datetime + """Server timestamp as datetime object.""" + + result: MessageResult = Field(...) + """The result payload.""" + + @field_validator("timestamp", mode="before") + @classmethod + def _parse_timestamp(cls, v): + if isinstance(v, int): + return datetime.fromtimestamp(v, tz=timezone.utc) + return v + + model_config = ConfigModel.model_config | ConfigDict( + json_schema_extra={ + "example": { + "success": True, + "timestamp": 1700000600, + "result": {"message": "success"}, + } + } + ) + + +# ─── Reactivate endpoint ──────────────────────────────────────────────────── + + +class HideMyEmailReactivateRequest(HideMyEmailByIdRequest): + """Request model for the Hide My Email 'reactivate' API operation.""" + + pass + + +class HideMyEmailReactivateResponse(ConfigModel): + """Response model for the Hide My Email 'reactivate' API operation.""" + + success: bool + """Whether the API call was successful.""" + + timestamp: datetime + """Server timestamp as datetime object.""" + + result: MessageResult = Field(...) + """The result payload.""" + + @field_validator("timestamp", mode="before") + @classmethod + def _parse_timestamp(cls, v): + if isinstance(v, int): + return datetime.fromtimestamp(v, tz=timezone.utc) + return v + + model_config = ConfigModel.model_config | ConfigDict( + json_schema_extra={ + "example": { + "success": True, + "timestamp": 1700000700, + "result": {"message": "success"}, + } + } + ) + + +# End of Selection diff --git a/pyicloud/models/services/hidemyemail/hme_list_test.py b/pyicloud/models/services/hidemyemail/hme_list_test.py new file mode 100644 index 00000000..2bc679ec --- /dev/null +++ b/pyicloud/models/services/hidemyemail/hme_list_test.py @@ -0,0 +1,52 @@ +""" +Demo script to load a JSON file containing a Hide My Email "list" endpoint response +and demonstrate datetime parsing for timestamps and create_timestamps. + +Usage: + python load_list_response_demo.py path/to/list_response.json +""" + +import argparse +import json + +from rich import pretty +from rich.console import Console +from rich.traceback import install + +# Import the Pydantic model (ensure your models are on PYTHONPATH or adjust import) +from pyicloud.models.services.hidemyemail.hidemyemail_models import ( + HideMyEmailListResponse, +) + +install(show_locals=True) +pretty.install() + +console = Console() + + +def main(): + """ + Demo script to load a JSON file containing a Hide My Email "list" endpoint response + """ + parser = argparse.ArgumentParser( + description="Load and validate a Hide My Email list response, printing datetime fields." + ) + parser.add_argument( + "json_path", + help="Path to the JSON file with the 'list' endpoint response", + ) + args = parser.parse_args() + + # Load raw JSON + with open(args.json_path, "r", encoding="utf-8") as f: + data = json.load(f) + + # Validate and parse into Pydantic model + response = HideMyEmailListResponse.model_validate(data) + + console.rule("Response") + console.print(response) + + +if __name__ == "__main__": + main() diff --git a/pyicloud/services/__init__.py b/pyicloud/services/__init__.py index 7befeb93..7518be86 100644 --- a/pyicloud/services/__init__.py +++ b/pyicloud/services/__init__.py @@ -6,6 +6,7 @@ from pyicloud.services.drive import DriveService from pyicloud.services.findmyiphone import AppleDevice, FindMyiPhoneServiceManager from pyicloud.services.hidemyemail import HideMyEmailService +from pyicloud.services.notes import NotesService from pyicloud.services.photos import PhotosService from pyicloud.services.reminders import RemindersService from pyicloud.services.ubiquity import UbiquityService @@ -18,6 +19,7 @@ "DriveService", "FindMyiPhoneServiceManager", "HideMyEmailService", + "NotesService", "PhotosService", "RemindersService", "UbiquityService", diff --git a/pyicloud/services/notes/__init__.py b/pyicloud/services/notes/__init__.py new file mode 100644 index 00000000..305584ef --- /dev/null +++ b/pyicloud/services/notes/__init__.py @@ -0,0 +1,15 @@ +"""Public API for the Notes service.""" + +from .domain import AttachmentId +from .models import Attachment, ChangeEvent, Note, NoteFolder, NoteSummary +from .service import NotesService + +__all__ = [ + "NotesService", + "Note", + "NoteSummary", + "NoteFolder", + "ChangeEvent", + "Attachment", + "AttachmentId", +] diff --git a/pyicloud/services/notes/client.py b/pyicloud/services/notes/client.py new file mode 100644 index 00000000..5bdd3d4c --- /dev/null +++ b/pyicloud/services/notes/client.py @@ -0,0 +1,471 @@ +""" +Low-level CloudKit client for the Notes container. + +This \"escape hatch\" is also used internally by NotesService to implement +developer-friendly methods. It returns typed Pydantic models from +pyicloud.services.notes_models.cloudkit and hides HTTP details. +""" + +from __future__ import annotations + +import json +import logging +import os +from typing import Dict, Iterable, Iterator, List, Optional, TypeVar +from urllib.parse import urlsplit, urlunsplit + +from pydantic import ValidationError + +from pyicloud.common.cloudkit import ( + CKFVString, + CKLookupDescriptor, + CKLookupRequest, + CKLookupResponse, + CKQueryFilterBy, + CKQueryObject, + CKQueryRequest, + CKQueryResponse, + CKZoneChangesRequest, + CKZoneChangesResponse, + CKZoneChangesZone, + CKZoneChangesZoneReq, + CKZoneIDReq, + CloudKitExtraMode, + resolve_cloudkit_validation_extra, +) + +LOGGER = logging.getLogger(__name__) +_ResponseModelT = TypeVar("_ResponseModelT") +DEFAULT_TIMEOUT = (10.0, 60.0) + + +# ------------------------------- Errors -------------------------------------- + + +class NotesError(Exception): + """Base Notes transport error.""" + + +class NotesAuthError(NotesError): + """Auth/PCS/cookie issues (401/403).""" + + +class NotesRateLimited(NotesError): + """429 Too Many Requests.""" + + def __init__(self, message: str, retry_after: Optional[float] = None): + super().__init__(message) + self.retry_after = retry_after + + +class NotesApiError(NotesError): + """Catch-all API error.""" + + def __init__(self, message: str, payload: Optional[object] = None): + super().__init__(message) + self.payload = payload + + +# ------------------------------- Transport ----------------------------------- + + +class _CloudKitClient: + """ + Minimal HTTP transport: + - JSON requests via `json=payload` + - Lowercase boolean query params + - Bounded debug dumps (PYICLOUD_DEBUG_MAX_BYTES) + """ + + def __init__( + self, + base_url: str, + session, + base_params: Dict[str, object], + *, + timeout: tuple[float, float] = DEFAULT_TIMEOUT, + ): + self._base_url = base_url.rstrip("/") + self._session = session + self._params = self._normalize_params(base_params or {}) + self._timeout = timeout + LOGGER.debug("Initialized _CloudKitClient with base_url: %s", self._base_url) + + @staticmethod + def _normalize_params(params: Dict[str, object]) -> Dict[str, str]: + out: Dict[str, str] = {} + for k, v in params.items(): + if isinstance(v, bool): + out[k] = "true" if v else "false" + else: + out[k] = str(v) + return out + + def _build_url(self, path: str) -> str: + from urllib.parse import urlencode + + q = urlencode(self._params) + return f"{self._base_url}{path}" + (f"?{q}" if q else "") + + @staticmethod + def _redact_url(url: str) -> str: + parts = urlsplit(url) + return urlunsplit((parts.scheme, parts.netloc, parts.path, "", "")) + + def post(self, path: str, payload: Dict) -> Dict: + url = self._build_url(path) + redacted_url = self._redact_url(url) + LOGGER.info("POST to %s", redacted_url) + resp = self._session.post(url, json=payload, timeout=self._timeout) + code = getattr(resp, "status_code", 0) + LOGGER.debug("POST to %s returned status %d", redacted_url, code) + if code >= 400: + self._dump_http_debug(path.strip("/"), url, payload, resp) + if code in (401, 403): + LOGGER.error( + "POST to %s failed with auth error: %d", redacted_url, code + ) + raise NotesAuthError(f"HTTP {code}: unauthorized") + if code == 429: + retry_after = None + try: + hdr = resp.headers.get("Retry-After") + if hdr: + retry_after = float(hdr) + except Exception: + retry_after = None + LOGGER.warning( + "POST to %s was rate-limited. Retry after: %s", + redacted_url, + retry_after, + ) + raise NotesRateLimited( + "HTTP 429: rate limited", retry_after=retry_after + ) + # Try to include server json error if possible + try: + body = resp.json() + except Exception: + body = getattr(resp, "text", None) + LOGGER.error("POST to %s failed with code %d", redacted_url, code) + raise NotesApiError(f"HTTP {code}", payload=body) + try: + json_response = resp.json() + LOGGER.debug("Successfully parsed JSON response from %s", redacted_url) + return json_response + except Exception: + self._dump_http_debug(path.strip("/"), url, payload, resp) + LOGGER.error("Failed to parse JSON response from %s", redacted_url) + raise NotesApiError( + "Invalid JSON response", payload=getattr(resp, "text", None) + ) + + @staticmethod + def _dump_http_debug(op: str, url: str, payload: Dict, resp) -> None: + if not os.getenv("PYICLOUD_NOTES_DEBUG"): + return + ts = __import__("time").strftime("%Y%m%d-%H%M%S") + out_dir = os.path.join("workspace", "notes_debug") + try: + os.makedirs(out_dir, exist_ok=True) + except Exception: + return + # Request + req_path = os.path.join(out_dir, f"{ts}_{op}_http_request.json") + try: + with open(req_path, "w", encoding="utf-8") as f: + json.dump( + {"url": url, "payload": payload}, f, ensure_ascii=False, indent=2 + ) + except Exception: + pass + # Response + res_path = os.path.join(out_dir, f"{ts}_{op}_http_response.txt") + try: + status = getattr(resp, "status_code", None) + headers = getattr(resp, "headers", {}) + body_text = None + try: + body_text = resp.text + except Exception: + body_text = None + with open(res_path, "w", encoding="utf-8") as f: + f.write(f"status={status}\nurl={url}\nheaders={dict(headers)}\n\n") + if body_text: + max_bytes = int(os.getenv("PYICLOUD_DEBUG_MAX_BYTES", "524288")) + if len(body_text) > max_bytes: + f.write(body_text[:max_bytes] + "\n[truncated]\n") + else: + f.write(body_text) + except Exception: + pass + + # Simple helpers for assets (streaming GET) + def get_stream(self, url: str, chunk_size: int = 65536) -> Iterator[bytes]: + redacted_url = self._redact_url(url) + LOGGER.info("GET stream from %s", redacted_url) + resp = self._session.get(url, stream=True, timeout=self._timeout) + code = getattr(resp, "status_code", 0) + if code >= 400: + self._dump_http_debug("asset_get", url, {}, resp) + LOGGER.error("GET stream from %s failed with code %d", redacted_url, code) + raise NotesApiError( + f"HTTP {code} on asset GET", payload=getattr(resp, "text", None) + ) + for chunk in resp.iter_content(chunk_size=chunk_size): + if chunk: + yield chunk + + +# ------------------------------ Raw client ----------------------------------- + + +class CloudKitNotesClient: + """ + Raw CloudKit service for the Notes container. + + Methods map 1:1 to CloudKit endpoints: + - /records/query + - /records/lookup + - /changes/zone + """ + + def __init__( + self, + base_url: str, + session, + base_params: Dict[str, object], + *, + validation_extra: CloudKitExtraMode | None = None, + timeout: tuple[float, float] = DEFAULT_TIMEOUT, + ): + self._http = _CloudKitClient( + base_url, + session, + base_params, + timeout=timeout, + ) + self._validation_extra = validation_extra + LOGGER.info("CloudKitNotesClient initialized.") + + def _validate_response( + self, model_cls: type[_ResponseModelT], data: Dict + ) -> _ResponseModelT: + return model_cls.model_validate( + data, + extra=resolve_cloudkit_validation_extra(self._validation_extra), + ) + + # ----- Query ----- + + def query( + self, + *, + query: CKQueryObject, + zone_id: CKZoneIDReq, + desired_keys: Optional[List[str]] = None, + results_limit: Optional[int] = None, + continuation: Optional[str] = None, + ) -> CKQueryResponse: + LOGGER.info("Executing query for recordType: %s", query.recordType) + payload = CKQueryRequest( + query=query, + zoneID=zone_id, + desiredKeys=desired_keys, + resultsLimit=results_limit, + continuationMarker=continuation, + ).model_dump(exclude_none=True) + data = self._http.post("/records/query", payload) + try: + resp = self._validate_response(CKQueryResponse, data) + LOGGER.info("Query returned %d records.", len(resp.records)) + return resp + except ValidationError as e: + self._log_validation("records.query", data, e) + LOGGER.error("Query response validation failed.") + raise NotesApiError("Query response validation failed", payload=data) from e + + # ----- Lookup ----- + + def lookup( + self, + record_names: Iterable[str], + *, + desired_keys: Optional[List[str]] = None, + ) -> CKLookupResponse: + record_names_list = list(record_names) + LOGGER.info("Executing lookup for %d records.", len(record_names_list)) + req = CKLookupRequest( + records=[ + CKLookupDescriptor(recordName=str(rn)) for rn in record_names_list + ], + zoneID=CKZoneIDReq(zoneName="Notes"), + desiredKeys=desired_keys, + ) + payload = req.model_dump(exclude_none=True) + data = self._http.post("/records/lookup", payload) + try: + resp = self._validate_response(CKLookupResponse, data) + LOGGER.info("Lookup returned %d records.", len(resp.records)) + return resp + except ValidationError as e: + self._log_validation("records.lookup", data, e) + LOGGER.error("Lookup response validation failed.") + raise NotesApiError( + "Lookup response validation failed", payload=data + ) from e + + # ----- Changes (paged generator) ----- + + def changes( + self, + *, + zone_req: CKZoneChangesZoneReq, + ) -> Iterator[CKZoneChangesZone]: + req = CKZoneChangesRequest(zones=[zone_req]) + LOGGER.info("Start fetching changes for zone: %s", zone_req.zoneID.zoneName) + page_num = 1 + while True: + payload = req.model_dump(exclude_none=True) + LOGGER.debug("Fetching changes page %d", page_num) + data = self._http.post("/changes/zone", payload) + try: + envelope = self._validate_response(CKZoneChangesResponse, data) + except ValidationError as e: + self._log_validation("changes.zone", data, e) + LOGGER.error("Changes response validation failed.") + raise NotesApiError( + "Changes response validation failed", payload=data + ) from e + zone = envelope.zones[0] if envelope.zones else None + if not zone: + LOGGER.info("No more changes available.") + return + + LOGGER.info( + "Changes page %d returned %d records.", page_num, len(zone.records) + ) + yield zone + + if not zone.moreComing: + LOGGER.info("All changes fetched.") + return + # advance sync token + LOGGER.debug("More changes to come, advancing sync token.") + req.zones[0].syncToken = zone.syncToken + page_num += 1 + + # ----- Asset helpers ----- + + def download_asset_stream( + self, + url: str, + *, + chunk_size: int = 65536, + ) -> Iterator[bytes]: + yield from self._http.get_stream(url, chunk_size=chunk_size) + + def download_asset_to(self, url: str, directory: str) -> str: + import os + import uuid + + LOGGER.info( + "Downloading asset from %s to directory %s", + self._http._redact_url(url), + directory, + ) + os.makedirs(directory, exist_ok=True) + fname = f"icloud-asset-{uuid.uuid4().hex}" + path = os.path.join(directory, fname) + with open(path, "wb") as f: + for chunk in self.download_asset_stream(url): + f.write(chunk) + LOGGER.info("Finished downloading asset to %s", path) + return path + + # ----- Sync token convenience ----- + + def current_sync_token(self, *, zone_name: str) -> str: + """ + Fetch a current sync token cheaply by issuing a zero-limit query that + requests getCurrentSyncToken=true (already in params) and reading the top-level token. + + Some deployments place the token in CKQueryResponse.syncToken; if absent, + we fall back to a one-shot changes call (no records) to harvest a token. + """ + LOGGER.info("Fetching current sync token for zone: %s", zone_name) + # Approach 1: /records/query on SearchIndexes with limit 1 + LOGGER.debug("Attempting to get sync token via cheap query.") + q = CKQueryObject( + recordType="SearchIndexes", + filterBy=[ + CKQueryFilterBy( + comparator="EQUALS", + fieldName="indexName", + fieldValue=CKFVString(type="STRING", value="recents"), + ) + ], + ) + payload = CKQueryRequest( + query=q, + zoneID=CKZoneIDReq(zoneName=zone_name), + resultsLimit=1, + ).model_dump(exclude_none=True) + try: + data = self._http.post("/records/query", payload) + resp = self._validate_response(CKQueryResponse, data) + if getattr(resp, "syncToken", None): + LOGGER.info("Successfully obtained sync token via query.") + return str(resp.syncToken) + except Exception as e: + LOGGER.warning( + "Failed to get sync token via query, falling back. Error: %s", e + ) + # ignore and fall back + pass + + # Approach 2: one empty /changes/zone call to get initial token + LOGGER.debug("Falling back to get sync token via changes call.") + req = CKZoneChangesRequest( + zones=[ + CKZoneChangesZoneReq( + zoneID={"zoneName": zone_name, "zoneType": "REGULAR_CUSTOM_ZONE"}, # type: ignore[dict-item] + desiredRecordTypes=[], + desiredKeys=[], + reverse=False, + ) + ] + ) + data = self._http.post("/changes/zone", req.model_dump(exclude_none=True)) + env = self._validate_response(CKZoneChangesResponse, data) + z = env.zones[0] if env.zones else None + if z and getattr(z, "syncToken", None): + LOGGER.info("Successfully obtained sync token via changes call.") + return str(z.syncToken) + + LOGGER.error("Failed to obtain sync token for zone: %s", zone_name) + raise NotesApiError("Unable to obtain sync token") + + # ----- Debug ----- + + @staticmethod + def _log_validation(op: str, data: Dict, err: ValidationError) -> None: + if not os.getenv("PYICLOUD_NOTES_DEBUG"): + return + ts = __import__("time").strftime("%Y%m%d-%H%M%S") + out_dir = os.path.join("workspace", "notes_debug") + try: + os.makedirs(out_dir, exist_ok=True) + except Exception: + return + path = os.path.join(out_dir, f"{ts}_{op}_validation.json") + try: + with open(path, "w", encoding="utf-8") as f: + json.dump( + {"op": op, "errors": err.errors(), "data": data}, + f, + ensure_ascii=False, + indent=2, + ) + except Exception: + pass diff --git a/pyicloud/services/notes/decoding.py b/pyicloud/services/notes/decoding.py new file mode 100644 index 00000000..8518706f --- /dev/null +++ b/pyicloud/services/notes/decoding.py @@ -0,0 +1,93 @@ +from __future__ import annotations + +import base64 +import binascii +import gzip +import logging +import zlib +from typing import List, Optional, Union + +from .domain import AttachmentId, NoteBody +from .protobuf import notes_pb2 + +LOGGER = logging.getLogger(__name__) + + +def _b64_to_bytes(val: Optional[Union[str, bytes, bytearray]]) -> Optional[bytes]: + """Accepts base64 string OR raw bytes and returns raw bytes.""" + if val is None: + return None + if isinstance(val, (bytes, bytearray)): + LOGGER.debug("notes.decoder.input_bytes len=%d", len(val)) + return bytes(val) + if isinstance(val, str): + try: + out = base64.b64decode(val, validate=True) + LOGGER.debug( + "notes.decoder.input_b64 len=%d -> bytes=%d", len(val), len(out) + ) + return out + except binascii.Error: + # Not valid base64; treat as plain text and encode best-effort + LOGGER.debug("notes.decoder.input_str_nonb64 len=%d", len(val)) + return val.encode("utf-8", errors="replace") + + +def _decompress(blob: bytes) -> bytes: + if len(blob) >= 2 and blob[0] == 0x1F and blob[1] == 0x8B: + return gzip.decompress(blob) + try: + return zlib.decompress(blob) + except zlib.error: + return zlib.decompress(blob, -zlib.MAX_WBITS) + + +class BodyDecoder: + """Decode TextDataEncrypted (base64, compressed) to NoteBody.""" + + def decode( + self, text_data_encrypted_b64: Optional[Union[str, bytes, bytearray]] + ) -> Optional[NoteBody]: + if text_data_encrypted_b64 is None: + return None + raw = _b64_to_bytes(text_data_encrypted_b64) + if not raw: + return None + try: + doc = _decompress(raw) + except Exception as e: + LOGGER.debug("notes.decoder.decompress_fail %s", e) + return None + + try: + msg = notes_pb2.NoteStoreProto() + msg.ParseFromString(doc) + note = getattr(getattr(msg, "document", None), "note", None) + text = getattr(note, "note_text", None) if note else None + + ids: List[AttachmentId] = [] + if note: + seen = set() + for run in getattr(note, "attribute_run", []): + ai = getattr(run, "attachment_info", None) + if ai and ( + getattr(ai, "attachment_identifier", None) + or getattr(ai, "type_uti", None) + ): + ident = getattr(ai, "attachment_identifier", "") or "" + type_uti = getattr(ai, "type_uti", None) or None + key = (ident, type_uti) + if key in seen: + continue + seen.add(key) + ids.append(AttachmentId(identifier=ident, type_uti=type_uti)) + LOGGER.debug( + "notes.decoder.attachments ids=%d note_text=%s", + len(ids), + bool(text), + ) + + return NoteBody(bytes=doc, text=text, attachment_ids=ids) + except Exception as e: + LOGGER.debug("notes.decoder.proto_parse_fail %s", e) + return None diff --git a/pyicloud/services/notes/domain.py b/pyicloud/services/notes/domain.py new file mode 100644 index 00000000..160d5feb --- /dev/null +++ b/pyicloud/services/notes/domain.py @@ -0,0 +1,18 @@ +from __future__ import annotations + +from typing import List, Optional + +from pydantic import Field + +from pyicloud.common.models import FrozenServiceModel + + +class AttachmentId(FrozenServiceModel): + identifier: str + type_uti: Optional[str] = None + + +class NoteBody(FrozenServiceModel): + bytes: bytes + text: Optional[str] = None + attachment_ids: List[AttachmentId] = Field(default_factory=list) diff --git a/pyicloud/services/notes/models/__init__.py b/pyicloud/services/notes/models/__init__.py new file mode 100644 index 00000000..f7868860 --- /dev/null +++ b/pyicloud/services/notes/models/__init__.py @@ -0,0 +1,13 @@ +"""Public exports for Notes service data models.""" + +from __future__ import annotations + +from .dto import Attachment, ChangeEvent, Note, NoteFolder, NoteSummary + +__all__ = [ + "Attachment", + "Note", + "NoteFolder", + "NoteSummary", + "ChangeEvent", +] diff --git a/pyicloud/services/notes/models/_ck_base.py b/pyicloud/services/notes/models/_ck_base.py new file mode 100644 index 00000000..8309cdf5 --- /dev/null +++ b/pyicloud/services/notes/models/_ck_base.py @@ -0,0 +1,53 @@ +from __future__ import annotations + +import os + +from pydantic import BaseModel, ConfigDict + + +def _env_extra_mode(default: str = "forbid") -> str: + """ + Determine the extra-mode from environment vars. + + PYICLOUD_NOTES_EXTRA: allow|forbid|ignore + (fallback) PYICLOUD_EXTRA: allow|forbid|ignore + Convenience booleans: "true/1/on" -> forbid (strict), "false/0/off" -> allow + """ + raw = ( + (os.getenv("PYICLOUD_NOTES_EXTRA") or os.getenv("PYICLOUD_EXTRA") or default) + .strip() + .lower() + ) + + if raw in {"allow", "forbid", "ignore"}: + return raw + + # convenience switches people naturally try + if raw in {"1", "true", "yes", "on", "strict"}: + return "forbid" + if raw in {"0", "false", "no", "off", "lenient"}: + return "allow" + + return default # fall back to strict during development + + +_EXTRA = _env_extra_mode() + + +class CKModel(BaseModel): + """ + Project-wide base model. + + Default is extra='forbid' (strict) for development; switch at runtime by + setting an env var before import: + export PYICLOUD_NOTES_EXTRA=allow # or forbid/ignore + """ + + model_config = ConfigDict( + extra=_EXTRA, # 'forbid' | 'allow' | 'ignore' + arbitrary_types_allowed=True, # keep whatever you already relied upon + ) + + +# Public API of this module +__all__ = ["CKModel", "_env_extra_mode"] diff --git a/pyicloud/services/notes/models/cloudkit.py b/pyicloud/services/notes/models/cloudkit.py new file mode 100644 index 00000000..283864b2 --- /dev/null +++ b/pyicloud/services/notes/models/cloudkit.py @@ -0,0 +1,810 @@ +""" +CloudKit “wire” models for /records/query requests & responses (Notes container). +- Response models (records) + refined request models (query payloads). +""" + +from __future__ import annotations + +from datetime import datetime, timezone +from enum import Enum +from typing import Annotated, Dict, List, Literal, Optional, Union + +from pydantic import ( + Base64Bytes, + BeforeValidator, + Field, + JsonValue, + PlainSerializer, + RootModel, + TypeAdapter, + WithJsonSchema, + field_validator, + model_validator, +) + +from ._ck_base import CKModel + +# --------------------------------------------------------------------------- +# Shared helpers +# --------------------------------------------------------------------------- + +# Year-zero/ancient timestamp handling: normalize to None for Optional[datetime] fields. +# Python datetime supports years 1..9999 (no year 0). Some APIs use ancient ms values +# as "not set" sentinels; we treat those as None during parsing. +CANONICAL_MIN_MS = -62135596800000 # 0001-01-01T00:00:00Z +SENTINEL_ZERO_MS: set[int] = { + CANONICAL_MIN_MS, + -62135769600000, # observed in captures (approx 2 days earlier) +} + + +def _from_millis_or_none(v): + # Accept int/float or digit-only str; be strict about being milliseconds. + if isinstance(v, (int, float)): + iv = int(v) + elif isinstance(v, str) and v.isdigit(): + iv = int(v) + elif isinstance(v, str) and v.startswith("0001-01-01"): + # ISO-like sentinel for year 1 -> treat as None + return None + else: + raise TypeError("Expected milliseconds since epoch as int or digit string") + # Coerce sentinels and anything older than canonical MIN to None + if iv in SENTINEL_ZERO_MS or iv <= CANONICAL_MIN_MS: + return None + return datetime.fromtimestamp(iv / 1000.0, tz=timezone.utc) + + +def _to_millis(dt: datetime) -> int: + if dt.tzinfo is None: + # If you prefer, raise instead of coercing. + dt = dt.replace(tzinfo=timezone.utc) + return int(dt.timestamp() * 1000) + + +MillisDateTime = Annotated[ + datetime, + BeforeValidator(_from_millis_or_none), + PlainSerializer(_to_millis, return_type=int, when_used="json"), + WithJsonSchema({"type": "integer", "description": "milliseconds since Unix epoch"}), +] + +# Nullable variant used for wrappers that can legitimately carry "no timestamp" +MillisDateTimeOrNone = Annotated[ + Optional[datetime], + BeforeValidator(lambda v: None if v is None else _from_millis_or_none(v)), + PlainSerializer( + lambda v: None if v is None else _to_millis(v), + return_type=int, + when_used="json", + ), + WithJsonSchema( + { + "type": ["integer", "null"], + "description": "milliseconds since Unix epoch or null sentinel", + } + ), +] + + +# Some top-level properties (e.g., CKRecord.expirationTime) arrive as seconds-since-epoch +# in this API. Be tolerant and also accept millisecond values if Apple changes shape. +def _from_secs_or_millis(v): + if isinstance(v, (int, float)): + iv = int(v) + elif isinstance(v, str) and v.isdigit(): + iv = int(v) + else: + raise TypeError("Expected seconds or milliseconds since epoch as int/str") + # Heuristic: values < 1e11 are seconds (covers dates up to ~5138 CE) + if abs(iv) < 100_000_000_000: + return datetime.fromtimestamp(iv, tz=timezone.utc) + # Otherwise treat as milliseconds + return datetime.fromtimestamp(iv / 1000.0, tz=timezone.utc) + + +def _to_secs(dt: datetime) -> int: + if dt.tzinfo is None: + dt = dt.replace(tzinfo=timezone.utc) + return int(dt.timestamp()) + + +SecsOrMillisDateTime = Annotated[ + datetime, + BeforeValidator(_from_secs_or_millis), + PlainSerializer(_to_secs, return_type=int, when_used="json"), + WithJsonSchema({"type": "integer", "description": "seconds since Unix epoch"}), +] + + +# --------------------------------------------------------------------------- +# CloudKit primitives shared by request & response +# --------------------------------------------------------------------------- + + +class CKZoneID(CKModel): + zoneName: str + ownerRecordName: Optional[str] = None + zoneType: Optional[str] = None + + +class CKAuditInfo(CKModel): + """ + Appears as `created` / `modified` at the record level (response). + """ + + timestamp: MillisDateTime + userRecordName: Optional[str] = None + deviceID: Optional[str] = None + + +class CKParent(CKModel): + recordName: str + + +class CKStableUrl(CKModel): + routingKey: Optional[str] = None + shortTokenHash: Optional[str] = None + protectedFullToken: Optional[str] = None + encryptedPublicSharingKey: Optional[str] = None + displayedHostname: Optional[str] = None + + +class CKChainProtectionInfo(CKModel): + bytes: Optional[Base64Bytes] = None # base64 string as seen on wire + pcsChangeTag: Optional[str] = None + + +# --------------------------------------------------------------------------- +# Share-surface models (coarse first) +# --------------------------------------------------------------------------- + + +class CKShare(CKModel): + """ + Minimal share reference as seen embedded under a record's top-level `share` key. + Your audit samples only surfaced `recordName` and `zoneID` inside this object. + Keep this coarse for now; we can expand with shortGUID/shortTokenHash, etc., + if they appear nested here in future captures. + """ + + recordName: Optional[str] = None + zoneID: Optional[CKZoneID] = None + + +class CKReference(CKModel): + """ + Value inside REFERENCE / REFERENCE_LIST typed fields (both request & response). + """ + + recordName: str + action: Optional[str] = None # e.g., "VALIDATE" + zoneID: Optional[CKZoneID] = None + + +# --------------------------------------------------------------------------- +# Response-side: typed field wrappers under record.fields +# --------------------------------------------------------------------------- + + +class _CKFieldBase(CKModel): + # Every field wrapper has a 'type' discriminator and a 'value' + type: str + + +class CKTimestampField(_CKFieldBase): + type: Literal["TIMESTAMP"] + value: ( + MillisDateTimeOrNone # Apple sometimes sends a "zero" ms sentinel; map to None + ) + + +class CKInt64Field(_CKFieldBase): + type: Literal["INT64"] + value: int + + +class CKEncryptedBytesField(_CKFieldBase): + type: Literal["ENCRYPTED_BYTES"] + value: Base64Bytes + + +class CKReferenceField(_CKFieldBase): + type: Literal["REFERENCE"] + value: CKReference + + +class CKReferenceListField(_CKFieldBase): + type: Literal["REFERENCE_LIST"] + value: List[CKReference] + + +# Occasionally CloudKit also uses STRING-typed wrappers; not present in your +# three responses at the 'fields' level, but kept for completeness. +class CKStringField(_CKFieldBase): + type: Literal["STRING"] + value: str + isEncrypted: Optional[bool] = None # seen on some STRING wrappers (lookup) + + +# Asset thumbnails / tokens (e.g. FirstAttachmentThumbnail) +class CKAssetToken(CKModel): + # Keep as str to preserve exact wire representation. + fileChecksum: Optional[str] = None + referenceChecksum: Optional[str] = None + wrappingKey: Optional[str] = None + downloadURL: Optional[str] = None + size: Optional[int] = None + + +class CKAssetIDField(_CKFieldBase): + type: Literal["ASSETID"] + value: CKAssetToken + + +# Optional but seen in other CK APIs +class CKAssetField(_CKFieldBase): + type: Literal["ASSET"] + value: CKAssetToken + + +class CKDoubleField(_CKFieldBase): + type: Literal["DOUBLE"] + value: float + + +class CKBytesField(_CKFieldBase): + # Raw bytes seen on wire (e.g., LastViewedTimestamp, CryptoPassphraseVerifier) + type: Literal["BYTES"] + value: Base64Bytes + + +class CKDoubleListField(_CKFieldBase): + type: Literal["DOUBLE_LIST"] + value: List[float] + + +class CKInt64ListField(_CKFieldBase): + type: Literal["INT64_LIST"] + value: List[int] + + +class CKAssetIDListField(_CKFieldBase): + # e.g., PreviewImages, PaperAssets (most cases) + type: Literal["ASSETID_LIST"] + value: List[CKAssetToken] + + +class CKUnknownListField(_CKFieldBase): + # extremely rare: seen on PaperAssets as UNKNOWN_LIST in your samples + type: Literal["UNKNOWN_LIST"] + value: List[JsonValue] # keep generic to be future-proof + + +class CKPassthroughField(_CKFieldBase): + type: str + value: JsonValue + + +# One source of truth for known CloudKit field 'type' tags. +KNOWN_TAGS: frozenset[str] = frozenset( + { + "TIMESTAMP", + "INT64", + "ENCRYPTED_BYTES", + "REFERENCE", + "REFERENCE_LIST", + "STRING", + "ASSETID", + "ASSET", + "DOUBLE", + "BYTES", + "DOUBLE_LIST", + "INT64_LIST", + "ASSETID_LIST", + "UNKNOWN_LIST", + } +) + + +# Discriminated union over all known field wrapper types we saw/anticipate. +# Split into (a) a known, literal-tagged union and (b) an open wrapper that +# gracefully falls back to CKPassthroughField for unknown tags. +KnownCKField = Annotated[ + Union[ + CKTimestampField, + CKInt64Field, + CKEncryptedBytesField, + CKReferenceField, + CKReferenceListField, + CKStringField, + CKAssetIDField, + CKAssetField, + CKDoubleField, + CKBytesField, + CKDoubleListField, + CKInt64ListField, + CKAssetIDListField, + CKUnknownListField, + ], + Field(discriminator="type"), +] + + +# --------------------------------------------------------------------------- +# Record and response +# --------------------------------------------------------------------------- + + +class CKFieldOpen(RootModel[Union[KnownCKField, CKPassthroughField]]): + """ + Public API for field wrappers: + - `.value` (preferred): the decoded inner `value` of the CK wrapper + - `.type_tag`: the CloudKit `type` string (e.g., "TIMESTAMP", "INT64_LIST") + - `.unwrap()`: return the inner typed wrapper instance (e.g., `CKTimestampField`) + + Implementation detail: `.root` is internal — avoid relying on it outside this class. + """ + + # v2 root models name the inner value "root" + root: Union[KnownCKField, CKPassthroughField] + + @property + def value(self): + # unified way to read the inner 'value' without touching .root + return getattr(self.root, "value", None) + + @property + def type_tag(self) -> Optional[str]: + # useful when inspecting unknown/passthrough fields + return getattr(self.root, "type", None) + + def unwrap(self): + """Return the inner typed wrapper (e.g., CKTimestampField). + Public escape hatch; prefer `.value` for most use-cases. + """ + return self.root + + @model_validator(mode="before") + @classmethod + def _dispatch_before(cls, obj): + """ + Ensure nested contexts (e.g., values inside Dict[str, CKFieldOpen]) use the same + discriminator-based dispatch as our explicit model_validate(...) call. + + IMPORTANT: For RootModel, 'before' must return the *underlying* value, not + a dict like {'root': ...}; returning a dict breaks the discriminated union + when this model is nested. + """ + t = obj.get("type") if isinstance(obj, dict) else None + + # Known wrappers: coerce to the exact discriminated member instance + if t in KNOWN_TAGS: + return TypeAdapter(KnownCKField).validate_python(obj) + + # Already an instance of one of our wrapper models -> keep as-is + # (covers CKEncryptedBytesField, CKTimestampField, CKPassthroughField, etc.) + if isinstance(obj, _CKFieldBase): + return obj + + # Explicit but unknown wrapper -> passthrough instance + if isinstance(obj, dict) and "type" in obj and "value" in obj: + return CKPassthroughField(**obj) + + # Fallback: wrap whatever came in as passthrough + return CKPassthroughField(type=str(t) if t else "UNKNOWN", value=obj) + + +# Insert CKFields class here (after CKFieldOpen, before record/response section) +class CKFields(dict[str, CKFieldOpen]): + """ + Dict-like container that also allows attribute access, e.g.: + rec.fields.ModificationDate.value + Falls back to normal dict behavior for [] access. + + Public surface: + - Attribute access: `rec.fields..value` + - Mapping access: `rec.fields["FieldName"].value` + - Helpers: `get_field()`, `get_value()` + + Implementation detail: `.root` is **internal**; client code should not use it. + Use `.unwrap()` if you need the inner typed wrapper for `isinstance` checks. + """ + + def __getattr__(self, name: str) -> CKFieldOpen: + try: + return dict.__getitem__(self, name) + except KeyError as e: + raise AttributeError(name) from e + + def __dir__(self): + base = set(super().__dir__()) + return sorted(base | set(self.keys())) + + def get_field(self, key: str): + f = self.get(key) + if f is None: + return None + # Use public API; avoid touching `.root` here. + return f.unwrap() if hasattr(f, "unwrap") else f + + def get_value(self, key: str): + f = self.get_field(key) + return None if f is None else getattr(f, "value", None) + + +# --------------------------------------------------------------------------- +# Record and response +# --------------------------------------------------------------------------- + + +class CKRecordType(str, Enum): + Note = "Note" + Folder = "Folder" + PasswordProtectedNote = "PasswordProtectedNote" + + +class CKRecord(CKModel): + """ + A CloudKit record as returned in /records/query for Notes. + + The 'fields' map contains app-level fields by **PascalCase** names + (e.g., TitleEncrypted, ModificationDate, Deleted, Folder, Folders, ...), + each wrapped in a CKField type above. + """ + + recordName: str + recordType: Union[CKRecordType, str] # allow unknown record types + + # App-level fields (typed wrappers) + fields: CKFields = Field(default_factory=CKFields) + + @field_validator("fields", mode="before") + @classmethod + def _coerce_fields(cls, v): + """ + Ensure the mapping is validated item-by-item to CKFieldOpen + and wrapped in CKFields to enable attribute access DX. + """ + if isinstance(v, CKFields): + return v + if isinstance(v, dict): + adapter = TypeAdapter(CKFieldOpen) + return CKFields({k: adapter.validate_python(val) for k, val in v.items()}) + return v + + @field_validator("fields") + @classmethod + def _validate_encrypted_bytes(cls, v: CKFields) -> CKFields: + """Enforce a strict invariant: any field ending with 'Encrypted' must be + represented as ENCRYPTED_BYTES. This guarantees downstream code can + assume a single decoding path (bytes) for encrypted payloads. + + If a server variant ever sends a different wrapper (e.g., STRING), this + validator will fail fast with a clear error during model validation. + """ + try: + for key, wrapper in v.items(): + if not isinstance(key, str) or not key.endswith("Encrypted"): + continue + # CKFieldOpen unwrap -> typed wrapper (e.g., CKEncryptedBytesField) + inner = wrapper.unwrap() if hasattr(wrapper, "unwrap") else wrapper + tag = getattr(inner, "type", None) + if tag != "ENCRYPTED_BYTES": + # Keep the message explicit to aid debugging if the server flips shape + raise TypeError( + f"Field '{key}' must be ENCRYPTED_BYTES, got {tag!r}" + ) + except Exception as e: + # Re-raise to integrate with Pydantic's error surfacing + raise e + return v + + # Often present, often empty object + pluginFields: Dict[str, JsonValue] = Field(default_factory=dict) + + # Record metadata + recordChangeTag: Optional[str] = None + created: Optional[CKAuditInfo] = None + modified: Optional[CKAuditInfo] = None + deleted: Optional[bool] = None + + zoneID: Optional[CKZoneID] = None + parent: Optional[CKParent] = None + + # Sharing/identity/exposure + displayedHostname: Optional[str] = None + stableUrl: Optional[CKStableUrl] = None + shortGUID: Optional[str] = None + + # Share-surface (top-level, coarse types) + share: Optional[CKShare] = None + publicPermission: Optional[str] = None + participants: Optional[List[Dict[str, JsonValue]]] = None + requesters: Optional[List[Dict[str, JsonValue]]] = None + blocked: Optional[List[Dict[str, JsonValue]]] = None + denyAccessRequests: Optional[bool] = None + owner: Optional[Dict[str, JsonValue]] = None + currentUserParticipant: Optional[Dict[str, JsonValue]] = None + invitedPCS: Optional[Dict[str, JsonValue]] = None + selfAddedPCS: Optional[Dict[str, JsonValue]] = None + shortTokenHash: Optional[str] = None + + # End-to-end encryption metadata (optional) + chainProtectionInfo: Optional[CKChainProtectionInfo] = None + chainParentKey: Optional[str] = None + chainPrivateKey: Optional[str] = None + + # Observed on InlineAttachment records as numeric seconds since epoch + expirationTime: Optional[SecsOrMillisDateTime] = None + + +# --------------------------------------------------------------------------- +# Error items mixed into records[] on failure +# --------------------------------------------------------------------------- +class CKErrorItem(CKModel): + """ + Error item possibly present inside `records[]` when a per-record operation fails. + Strict during modeling: unknown keys will raise (inherits extra="forbid"). + """ + + serverErrorCode: str + reason: Optional[str] = None + recordName: Optional[str] = None + + +# --------------------------------------------------------------------------- +# Tombstone record for deleted entries +# --------------------------------------------------------------------------- +class CKTombstoneRecord(CKModel): + """ + A 'tombstone' entry returned by CloudKit to indicate a deleted record. + Tombstones intentionally omit `recordType` and `fields` — they only assert + that a record with `recordName` existed but has since been deleted. + Additional server-provided properties will be preserved via CKModel(extra="allow"). + """ + + recordName: str + deleted: Literal[True] + zoneID: Optional[CKZoneID] = None + + +class CKQueryResponse(CKModel): + """ + Top-level response from /records/query: + - records: list of CKRecord + - continuationMarker: optional paging token (present if more results exist) + """ + + records: List[Union[CKRecord, CKTombstoneRecord, CKErrorItem]] = Field( + default_factory=list + ) + continuationMarker: Optional[str] = None + # When getCurrentSyncToken=true is passed, server also returns a top-level syncToken + # Include it for strict validation; clients can ignore if not needed. + syncToken: Optional[str] = None + + +# --------------------------------------------------------------------------- +# Request-side: /records/query payloads (refined) +# --------------------------------------------------------------------------- + + +# Comparators seen on the wire. Keep Union[str, Enum] to be forward-compatible. +class CKComparator(str, Enum): + EQUALS = "EQUALS" + IN_ = "IN" # 'IN' is a reserved word in Python, keep name distinct + CONTAINS_ANY = "CONTAINS_ANY" + LESS_THAN = "LESS_THAN" + LESS_THAN_OR_EQUALS = "LESS_THAN_OR_EQUALS" + GREATER_THAN = "GREATER_THAN" + GREATER_THAN_OR_EQUALS = "GREATER_THAN_OR_EQUALS" + BEGINS_WITH = "BEGINS_WITH" + # Add more as you encounter them + + +# FieldValue typed wrappers (request side) — discriminated by 'type' +class _CKFilterValueBase(CKModel): + type: str + + +class CKFVString(_CKFilterValueBase): + type: Literal["STRING"] + value: str + + +class CKFVInt64(_CKFilterValueBase): + type: Literal["INT64"] + value: int + + +class CKFVStringList(_CKFilterValueBase): + type: Literal["STRING_LIST"] + value: List[str] + + +class CKFVReference(_CKFilterValueBase): + type: Literal["REFERENCE"] + value: CKReference # zoneID optional in your samples + + +class CKFVReferenceList(_CKFilterValueBase): + type: Literal["REFERENCE_LIST"] + value: List[CKReference] + + +CKFilterValue = Annotated[ + Union[ + CKFVString, + CKFVInt64, + CKFVStringList, + CKFVReference, + CKFVReferenceList, + ], + Field(discriminator="type"), +] + + +class CKQuerySortBy(CKModel): + """ + Sort directive. Example: + {"fieldName": "modTime", "ascending": false} + """ + + fieldName: str + ascending: Optional[bool] = None + + +class CKQueryFilterBy(CKModel): + """ + Filter clause. Examples: + + STRING equality: + {"comparator": "EQUALS", + "fieldName": "indexName", + "fieldValue": {"value": "recents", "type": "STRING"}} + + REFERENCE equality: + {"comparator": "EQUALS", + "fieldName": "reference", + "fieldValue": {"value": {"recordName": "...", "action": "VALIDATE"}, + "type": "REFERENCE"}} + """ + + comparator: Union[CKComparator, str] + fieldName: str + fieldValue: CKFilterValue + + +class CKQueryObject(CKModel): + """ + The 'query' object inside the request. + + recordType can be an app-defined pseudo type like "SearchIndexes" or "pinned" + (your samples), or a real record type. + """ + + recordType: str + filterBy: Optional[List[CKQueryFilterBy]] = None + sortBy: Optional[List[CKQuerySortBy]] = None + + +class CKDesiredKey(str, Enum): + """ + Enum for common desired keys in CloudKit queries. + """ + + TITLE_ENCRYPTED = "TitleEncrypted" + SNIPPET_ENCRYPTED = "SnippetEncrypted" + FIRST_ATTACHMENT_UTI_ENCRYPTED = "FirstAttachmentUTIEncrypted" + FIRST_ATTACHMENT_THUMBNAIL = "FirstAttachmentThumbnail" + FIRST_ATTACHMENT_THUMBNAIL_ORIENTATION = "FirstAttachmentThumbnailOrientation" + MODIFICATION_DATE = "ModificationDate" + DELETED = "Deleted" + FOLDERS = "Folders" + FOLDER = "Folder" + ATTACHMENTS = "Attachments" + PARENT_FOLDER = "ParentFolder" + NOTE = "Note" + LAST_VIEWED_MODIFICATION_DATE = "LastViewedModificationDate" + MINIMUM_SUPPORTED_NOTES_VERSION = "MinimumSupportedNotesVersion" + IS_PINNED = "IsPinned" + + +# Request side (only what you actually send on the wire) +class CKZoneIDReq(CKModel): + zoneName: Literal["Notes"] + + +class CKQueryRequest(CKModel): + """ + Top-level /records/query request payload. + """ + + query: CKQueryObject + zoneID: CKZoneIDReq + desiredKeys: Optional[List[Union[CKDesiredKey, str]]] = ( + None # can include duplicates; keep order + ) + resultsLimit: Optional[int] = None + # Observed as a base64-like string on the wire; keep as str for strictness + continuationMarker: Optional[str] = None + + +class CKLookupDescriptor(CKModel): + recordName: str + + +# --------------------------------------------------------------------------- +# Request-side: /records/lookup payloads +# --------------------------------------------------------------------------- + + +class CKLookupRequest(CKModel): + records: List[CKLookupDescriptor] + zoneID: CKZoneIDReq + + +class CKLookupResponse(CKModel): + records: List[Union[CKRecord, CKTombstoneRecord, CKErrorItem]] + # Server returns a top-level syncToken when getCurrentSyncToken=true + syncToken: Optional[str] = None + + +# --------------------------------------------------------------------------- +# Response-side: /changes/zone responses (delta sync) +# --------------------------------------------------------------------------- + + +class CKZoneChangesZone(CKModel): + """ + One zone entry inside the /changes/zone response. + + Based on your corpus: + - Always has: records[], zoneID, syncToken + - moreComing is present but sometimes null (treat as Optional[bool]) + """ + + records: List[Union[CKRecord, CKTombstoneRecord, CKErrorItem]] = Field( + default_factory=list + ) + moreComing: Optional[bool] = None + syncToken: str + zoneID: CKZoneID + + +class CKZoneChangesResponse(CKModel): + """ + Top-level envelope for /private/changes/zone (and /shared/changes/zone) responses. + """ + + zones: List[CKZoneChangesZone] = Field(default_factory=list) + + +# --------------------------------------------------------------------------- +# Request-side: /changes/zone payloads +# --------------------------------------------------------------------------- + + +class CKZoneChangesZoneReq(CKModel): + """ + One zone request entry for /changes/zone. + + Observed keys in corpus: + - zoneID: includes zoneName (always "Notes" here), sometimes zoneType and ownerRecordName (for shared) + - desiredKeys: list of field names to project (duplicates allowed, order preserved) + - desiredRecordTypes: list of record types to include + - syncToken: optional paging token (base64-like string) + - reverse: optional bool + """ + + zoneID: CKZoneID # allow ownerRecordName/zoneType when present + desiredKeys: Optional[List[Union[CKDesiredKey, str]]] = None + desiredRecordTypes: Optional[List[str]] = None + # Observed as a base64-like string on the wire; keep as str for strictness + syncToken: Optional[str] = None + reverse: Optional[bool] = None + + +class CKZoneChangesRequest(CKModel): + zones: List[CKZoneChangesZoneReq] diff --git a/pyicloud/services/notes/models/constants.py b/pyicloud/services/notes/models/constants.py new file mode 100644 index 00000000..e01dd814 --- /dev/null +++ b/pyicloud/services/notes/models/constants.py @@ -0,0 +1,29 @@ +from enum import Enum + + +class NotesRecordType(str, Enum): + Note = "Note" + Folder = "Folder" + PasswordProtectedNote = "PasswordProtectedNote" + + +class NotesDesiredKey(str, Enum): + """ + Enum for common desired keys in CloudKit queries for Notes. + """ + + TITLE_ENCRYPTED = "TitleEncrypted" + SNIPPET_ENCRYPTED = "SnippetEncrypted" + FIRST_ATTACHMENT_UTI_ENCRYPTED = "FirstAttachmentUTIEncrypted" + FIRST_ATTACHMENT_THUMBNAIL = "FirstAttachmentThumbnail" + FIRST_ATTACHMENT_THUMBNAIL_ORIENTATION = "FirstAttachmentThumbnailOrientation" + MODIFICATION_DATE = "ModificationDate" + DELETED = "Deleted" + FOLDERS = "Folders" + FOLDER = "Folder" + ATTACHMENTS = "Attachments" + PARENT_FOLDER = "ParentFolder" + NOTE = "Note" + LAST_VIEWED_MODIFICATION_DATE = "LastViewedModificationDate" + MINIMUM_SUPPORTED_NOTES_VERSION = "MinimumSupportedNotesVersion" + IS_PINNED = "IsPinned" diff --git a/pyicloud/services/notes/models/dto.py b/pyicloud/services/notes/models/dto.py new file mode 100644 index 00000000..4755dafe --- /dev/null +++ b/pyicloud/services/notes/models/dto.py @@ -0,0 +1,78 @@ +"""High-level Notes data transfer objects.""" + +from __future__ import annotations + +from datetime import datetime +from typing import TYPE_CHECKING, Iterator, List, Literal, Optional + +from pydantic import computed_field + +from pyicloud.common.models import FrozenServiceModel + +if TYPE_CHECKING: # pragma: no cover - import for type checking only + from ..service import NotesService + + +class NoteSummary(FrozenServiceModel): + """Lightweight metadata returned by list/search APIs.""" + + id: str + title: Optional[str] + snippet: Optional[str] + modified_at: Optional[datetime] + folder_id: Optional[str] + folder_name: Optional[str] + is_deleted: bool + is_locked: bool + + +class Attachment(FrozenServiceModel): + """Metadata for a note attachment.""" + + id: str + filename: Optional[str] + uti: Optional[str] + size: Optional[int] + download_url: Optional[str] + preview_url: Optional[str] + thumbnail_url: Optional[str] + + def save_to(self, directory: str, *, service: "NotesService") -> str: + """Download the attachment to ``directory`` using the provided service.""" + + return service._download_attachment_to(self, directory) + + def stream( + self, *, service: "NotesService", chunk_size: int = 65_536 + ) -> Iterator[bytes]: + """Yield the attachment bytes in chunks using the provided service.""" + + yield from service._stream_attachment(self, chunk_size=chunk_size) + + +class Note(NoteSummary): + """Full note payload returned by ``NotesService.get``.""" + + text: Optional[str] + html: Optional[str] = None + attachments: Optional[List[Attachment]] + + @computed_field + @property + def has_attachments(self) -> Optional[bool]: + """Return ``True``/``False`` when attachments were loaded, otherwise ``None``.""" + if self.attachments is None: + return None + return bool(self.attachments) + + +class NoteFolder(FrozenServiceModel): + id: str + name: Optional[str] + has_subfolders: Optional[bool] + count: Optional[int] # not always available + + +class ChangeEvent(FrozenServiceModel): + type: Literal["updated", "deleted"] + note: NoteSummary diff --git a/pyicloud/services/notes/protobuf/__init__.py b/pyicloud/services/notes/protobuf/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/pyicloud/services/notes/protobuf/notes.proto b/pyicloud/services/notes/protobuf/notes.proto new file mode 100644 index 00000000..8eef8724 --- /dev/null +++ b/pyicloud/services/notes/protobuf/notes.proto @@ -0,0 +1,239 @@ +syntax = "proto3"; + +package notes; + +// =============================== +// Common classes used across types +// =============================== + +message Color { + float red = 1; + float green = 2; + float blue = 3; + float alpha = 4; +} + +message AttachmentInfo { + optional string attachment_identifier = 1; // preserve HasField + optional string type_uti = 2; // preserve HasField +} + +message Font { + // Not used in presence checks, may remain non-optional + string font_name = 1; + float point_size = 2; + int32 font_hints = 3; +} + +// Observed highlight palette used by Apple Notes +enum Highlight { + HIGHLIGHT_UNKNOWN = 0; + HIGHLIGHT_PURPLE = 1; + HIGHLIGHT_PINK = 2; + HIGHLIGHT_ORANGE = 3; + HIGHLIGHT_MINT = 4; + HIGHLIGHT_BLUE = 5; +} + +enum StyleType { + STYLE_TYPE_TITLE = 0; + STYLE_TYPE_HEADING = 1; + STYLE_TYPE_SUBHEADING = 2; + STYLE_TYPE_MONOSPACED = 4; + STYLE_TYPE_BULLET_LIST_ITEM = 100; + STYLE_TYPE_DASHED_LIST_ITEM = 101; + STYLE_TYPE_NUMBERED_LIST_ITEM = 102; + STYLE_TYPE_CHECKLIST_ITEM = 103; +} + +enum WritingDirection { + WRITING_DIRECTION_DEFAULT = 0; + WRITING_DIRECTION_LTR = 1; + WRITING_DIRECTION_RTL = 2; +} + +enum Alignment { + ALIGNMENT_DEFAULT = 0; + ALIGNMENT_CENTER = 1; + ALIGNMENT_RIGHT = 2; + ALIGNMENT_JUSTIFY = 3; +} + +// Styles a "Paragraph" (any run of characters in an AttributeRun) +message ParagraphStyle { + optional StyleType style_type = 1; // was [default = -1] in proto2; use presence instead + optional Alignment alignment = 2; + optional WritingDirection writing_direction_paragraph = 3; // 1/3 LTR, 2/4 RTL (observed) + optional int32 indent_amount = 4; + optional Checklist checklist = 5; // message presence works in proto3 + optional int32 starting_list_item_number = 7; // for ordered lists + optional int32 block_quote = 8; + optional bytes paragraph_uuid = 9; + +} + + +// Represents a checklist item +message Checklist { + optional bytes uuid = 1; // presence is checked in Python + optional int32 done = 2; // presence is checked in Python +} + +// Represents an object that has pointers to a key and a value +message DictionaryElement { + ObjectID key = 1; + ObjectID value = 2; +} + +message Dictionary { + repeated DictionaryElement element = 1; +} + +// ObjectIDs are used to identify objects within the protobuf, offsets in an array, or a string. +message ObjectID { + uint64 unsigned_integer_value = 2; + string string_value = 4; + int32 object_index = 6; +} + +// Register Latest is used to identify the most recent version +message RegisterLatest { + ObjectID contents = 2; +} + +// MapEntries have a key that maps to a key-item array index and a value that points to an object. +message MapItem { + int32 key = 1; + ObjectID value = 2; +} + +enum FontWeight { + FONT_WEIGHT_UNKNOWN = 0; + FONT_WEIGHT_BOLD = 1; + FONT_WEIGHT_ITALIC = 2; + FONT_WEIGHT_BOLD_ITALIC = 3; +} + +// A "run" of characters that need to be styled/displayed/etc +message AttributeRun { + int32 length = 1; + + optional ParagraphStyle paragraph_style = 2; // message presence + + // Inline styling + optional Font font = 3; // message presence (not used today) + optional FontWeight font_weight = 5; + optional int32 underlined = 6; + optional int32 strikethrough = 7; + optional int32 superscript = 8; // sign indicates super/sub + optional string link = 9; + optional Color color = 10; // message presence + optional WritingDirection writing_direction_selection = 11; // may also appear here + optional AttachmentInfo attachment_info = 12; // message presence + optional int32 timestamp = 13; + optional int32 emphasis_style = 14; + optional Highlight highlight_color = 15; // preferred highlight palette (when present) +} + +// =============================== +// Overall Note protobufs +// =============================== + +message NoteStoreProto { + Document document = 2; +} + +message Document { + int32 version = 2; + Note note = 3; +} + +message Note { + string note_text = 2; + repeated AttributeRun attribute_run = 5; +} + +// =============================== +// Embedded objects (mergeable data) +// =============================== + +message MergableDataProto { + MergableDataObject mergable_data_object = 2; +} + +message MergableDataObject { + int32 version = 2; + MergeableDataObjectData mergeable_data_object_data = 3; +} + +message MergeableDataObjectData { + repeated MergeableDataObjectRow mergeable_data_object_entry = 3; + repeated string mergeable_data_object_key_item = 4; + repeated string mergeable_data_object_type_item = 5; + repeated bytes mergeable_data_object_uuid_item = 6; +} + +message MergeableDataObjectRow { + RegisterLatest register_latest = 1; + optional List list = 5; + optional Dictionary dictionary = 6; + optional UnknownMergeableDataObjectEntryMessage unknown_message = 9; + optional Note note = 10; + optional MergeableDataObjectMap custom_map = 13; + optional OrderedSet ordered_set = 16; +} + +message UnknownMergeableDataObjectEntryMessage { + optional UnknownMergeableDataObjectEntryMessageEntry unknown_entry = 1; +} + +message UnknownMergeableDataObjectEntryMessageEntry { + optional int32 unknown_int1 = 1; + optional int64 unknown_int2 = 2; +} + +message MergeableDataObjectMap { + int32 type = 1; + repeated MapItem map_entry = 3; +} + +message OrderedSet { + OrderedSetOrdering ordering = 1; + Dictionary elements = 2; +} + +message OrderedSetOrdering { + OrderedSetOrderingArray array = 1; + Dictionary contents = 2; +} + +message OrderedSetOrderingArray { + Note contents = 1; + repeated OrderedSetOrderingArrayAttachment attachment = 2; +} + +message OrderedSetOrderingArrayAttachment { + int32 index = 1; + bytes uuid = 2; +} + +// List structures (not used yet by renderer, left structurally consistent) +message List { + repeated ListItem list_entry = 1; +} + +message ListItem { + ObjectID id = 2; + optional ListEntryDetails details = 3; + ListEntryDetails additional_details = 4; +} + +message ListEntryDetails { + optional ListEntryDetailsKey list_entry_details_key = 1; + optional ObjectID id = 2; +} + +message ListEntryDetailsKey { + int32 list_entry_details_type_index = 1; + int32 list_entry_details_key = 2; +} diff --git a/pyicloud/services/notes/protobuf/notes_pb2.py b/pyicloud/services/notes/protobuf/notes_pb2.py new file mode 100644 index 00000000..fcdcb5d5 --- /dev/null +++ b/pyicloud/services/notes/protobuf/notes_pb2.py @@ -0,0 +1,99 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE +# source: notes.proto +# Protobuf Python Version: 6.31.1 +"""Generated protocol buffer code.""" + +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder + +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, 6, 31, 1, "", "notes.proto" +) +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n\x0bnotes.proto\x12\x05notes"@\n\x05\x43olor\x12\x0b\n\x03red\x18\x01 \x01(\x02\x12\r\n\x05green\x18\x02 \x01(\x02\x12\x0c\n\x04\x62lue\x18\x03 \x01(\x02\x12\r\n\x05\x61lpha\x18\x04 \x01(\x02"r\n\x0e\x41ttachmentInfo\x12"\n\x15\x61ttachment_identifier\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x15\n\x08type_uti\x18\x02 \x01(\tH\x01\x88\x01\x01\x42\x18\n\x16_attachment_identifierB\x0b\n\t_type_uti"A\n\x04\x46ont\x12\x11\n\tfont_name\x18\x01 \x01(\t\x12\x12\n\npoint_size\x18\x02 \x01(\x02\x12\x12\n\nfont_hints\x18\x03 \x01(\x05"\xeb\x03\n\x0eParagraphStyle\x12)\n\nstyle_type\x18\x01 \x01(\x0e\x32\x10.notes.StyleTypeH\x00\x88\x01\x01\x12(\n\talignment\x18\x02 \x01(\x0e\x32\x10.notes.AlignmentH\x01\x88\x01\x01\x12\x41\n\x1bwriting_direction_paragraph\x18\x03 \x01(\x0e\x32\x17.notes.WritingDirectionH\x02\x88\x01\x01\x12\x1a\n\rindent_amount\x18\x04 \x01(\x05H\x03\x88\x01\x01\x12(\n\tchecklist\x18\x05 \x01(\x0b\x32\x10.notes.ChecklistH\x04\x88\x01\x01\x12&\n\x19starting_list_item_number\x18\x07 \x01(\x05H\x05\x88\x01\x01\x12\x18\n\x0b\x62lock_quote\x18\x08 \x01(\x05H\x06\x88\x01\x01\x12\x1b\n\x0eparagraph_uuid\x18\t \x01(\x0cH\x07\x88\x01\x01\x42\r\n\x0b_style_typeB\x0c\n\n_alignmentB\x1e\n\x1c_writing_direction_paragraphB\x10\n\x0e_indent_amountB\x0c\n\n_checklistB\x1c\n\x1a_starting_list_item_numberB\x0e\n\x0c_block_quoteB\x11\n\x0f_paragraph_uuid"C\n\tChecklist\x12\x11\n\x04uuid\x18\x01 \x01(\x0cH\x00\x88\x01\x01\x12\x11\n\x04\x64one\x18\x02 \x01(\x05H\x01\x88\x01\x01\x42\x07\n\x05_uuidB\x07\n\x05_done"Q\n\x11\x44ictionaryElement\x12\x1c\n\x03key\x18\x01 \x01(\x0b\x32\x0f.notes.ObjectID\x12\x1e\n\x05value\x18\x02 \x01(\x0b\x32\x0f.notes.ObjectID"7\n\nDictionary\x12)\n\x07\x65lement\x18\x01 \x03(\x0b\x32\x18.notes.DictionaryElement"V\n\x08ObjectID\x12\x1e\n\x16unsigned_integer_value\x18\x02 \x01(\x04\x12\x14\n\x0cstring_value\x18\x04 \x01(\t\x12\x14\n\x0cobject_index\x18\x06 \x01(\x05"3\n\x0eRegisterLatest\x12!\n\x08\x63ontents\x18\x02 \x01(\x0b\x32\x0f.notes.ObjectID"6\n\x07MapItem\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\x1e\n\x05value\x18\x02 \x01(\x0b\x32\x0f.notes.ObjectID"\xdb\x05\n\x0c\x41ttributeRun\x12\x0e\n\x06length\x18\x01 \x01(\x05\x12\x33\n\x0fparagraph_style\x18\x02 \x01(\x0b\x32\x15.notes.ParagraphStyleH\x00\x88\x01\x01\x12\x1e\n\x04\x66ont\x18\x03 \x01(\x0b\x32\x0b.notes.FontH\x01\x88\x01\x01\x12+\n\x0b\x66ont_weight\x18\x05 \x01(\x0e\x32\x11.notes.FontWeightH\x02\x88\x01\x01\x12\x17\n\nunderlined\x18\x06 \x01(\x05H\x03\x88\x01\x01\x12\x1a\n\rstrikethrough\x18\x07 \x01(\x05H\x04\x88\x01\x01\x12\x18\n\x0bsuperscript\x18\x08 \x01(\x05H\x05\x88\x01\x01\x12\x11\n\x04link\x18\t \x01(\tH\x06\x88\x01\x01\x12 \n\x05\x63olor\x18\n \x01(\x0b\x32\x0c.notes.ColorH\x07\x88\x01\x01\x12\x41\n\x1bwriting_direction_selection\x18\x0b \x01(\x0e\x32\x17.notes.WritingDirectionH\x08\x88\x01\x01\x12\x33\n\x0f\x61ttachment_info\x18\x0c \x01(\x0b\x32\x15.notes.AttachmentInfoH\t\x88\x01\x01\x12\x16\n\ttimestamp\x18\r \x01(\x05H\n\x88\x01\x01\x12\x1b\n\x0e\x65mphasis_style\x18\x0e \x01(\x05H\x0b\x88\x01\x01\x12.\n\x0fhighlight_color\x18\x0f \x01(\x0e\x32\x10.notes.HighlightH\x0c\x88\x01\x01\x42\x12\n\x10_paragraph_styleB\x07\n\x05_fontB\x0e\n\x0c_font_weightB\r\n\x0b_underlinedB\x10\n\x0e_strikethroughB\x0e\n\x0c_superscriptB\x07\n\x05_linkB\x08\n\x06_colorB\x1e\n\x1c_writing_direction_selectionB\x12\n\x10_attachment_infoB\x0c\n\n_timestampB\x11\n\x0f_emphasis_styleB\x12\n\x10_highlight_color"3\n\x0eNoteStoreProto\x12!\n\x08\x64ocument\x18\x02 \x01(\x0b\x32\x0f.notes.Document"6\n\x08\x44ocument\x12\x0f\n\x07version\x18\x02 \x01(\x05\x12\x19\n\x04note\x18\x03 \x01(\x0b\x32\x0b.notes.Note"E\n\x04Note\x12\x11\n\tnote_text\x18\x02 \x01(\t\x12*\n\rattribute_run\x18\x05 \x03(\x0b\x32\x13.notes.AttributeRun"L\n\x11MergableDataProto\x12\x37\n\x14mergable_data_object\x18\x02 \x01(\x0b\x32\x19.notes.MergableDataObject"i\n\x12MergableDataObject\x12\x0f\n\x07version\x18\x02 \x01(\x05\x12\x42\n\x1amergeable_data_object_data\x18\x03 \x01(\x0b\x32\x1e.notes.MergeableDataObjectData"\xd7\x01\n\x17MergeableDataObjectData\x12\x42\n\x1bmergeable_data_object_entry\x18\x03 \x03(\x0b\x32\x1d.notes.MergeableDataObjectRow\x12&\n\x1emergeable_data_object_key_item\x18\x04 \x03(\t\x12\'\n\x1fmergeable_data_object_type_item\x18\x05 \x03(\t\x12\'\n\x1fmergeable_data_object_uuid_item\x18\x06 \x03(\x0c"\xba\x03\n\x16MergeableDataObjectRow\x12.\n\x0fregister_latest\x18\x01 \x01(\x0b\x32\x15.notes.RegisterLatest\x12\x1e\n\x04list\x18\x05 \x01(\x0b\x32\x0b.notes.ListH\x00\x88\x01\x01\x12*\n\ndictionary\x18\x06 \x01(\x0b\x32\x11.notes.DictionaryH\x01\x88\x01\x01\x12K\n\x0funknown_message\x18\t \x01(\x0b\x32-.notes.UnknownMergeableDataObjectEntryMessageH\x02\x88\x01\x01\x12\x1e\n\x04note\x18\n \x01(\x0b\x32\x0b.notes.NoteH\x03\x88\x01\x01\x12\x36\n\ncustom_map\x18\r \x01(\x0b\x32\x1d.notes.MergeableDataObjectMapH\x04\x88\x01\x01\x12+\n\x0bordered_set\x18\x10 \x01(\x0b\x32\x11.notes.OrderedSetH\x05\x88\x01\x01\x42\x07\n\x05_listB\r\n\x0b_dictionaryB\x12\n\x10_unknown_messageB\x07\n\x05_noteB\r\n\x0b_custom_mapB\x0e\n\x0c_ordered_set"\x8a\x01\n&UnknownMergeableDataObjectEntryMessage\x12N\n\runknown_entry\x18\x01 \x01(\x0b\x32\x32.notes.UnknownMergeableDataObjectEntryMessageEntryH\x00\x88\x01\x01\x42\x10\n\x0e_unknown_entry"\x85\x01\n+UnknownMergeableDataObjectEntryMessageEntry\x12\x19\n\x0cunknown_int1\x18\x01 \x01(\x05H\x00\x88\x01\x01\x12\x19\n\x0cunknown_int2\x18\x02 \x01(\x03H\x01\x88\x01\x01\x42\x0f\n\r_unknown_int1B\x0f\n\r_unknown_int2"I\n\x16MergeableDataObjectMap\x12\x0c\n\x04type\x18\x01 \x01(\x05\x12!\n\tmap_entry\x18\x03 \x03(\x0b\x32\x0e.notes.MapItem"^\n\nOrderedSet\x12+\n\x08ordering\x18\x01 \x01(\x0b\x32\x19.notes.OrderedSetOrdering\x12#\n\x08\x65lements\x18\x02 \x01(\x0b\x32\x11.notes.Dictionary"h\n\x12OrderedSetOrdering\x12-\n\x05\x61rray\x18\x01 \x01(\x0b\x32\x1e.notes.OrderedSetOrderingArray\x12#\n\x08\x63ontents\x18\x02 \x01(\x0b\x32\x11.notes.Dictionary"v\n\x17OrderedSetOrderingArray\x12\x1d\n\x08\x63ontents\x18\x01 \x01(\x0b\x32\x0b.notes.Note\x12<\n\nattachment\x18\x02 \x03(\x0b\x32(.notes.OrderedSetOrderingArrayAttachment"@\n!OrderedSetOrderingArrayAttachment\x12\r\n\x05index\x18\x01 \x01(\x05\x12\x0c\n\x04uuid\x18\x02 \x01(\x0c"+\n\x04List\x12#\n\nlist_entry\x18\x01 \x03(\x0b\x32\x0f.notes.ListItem"\x97\x01\n\x08ListItem\x12\x1b\n\x02id\x18\x02 \x01(\x0b\x32\x0f.notes.ObjectID\x12-\n\x07\x64\x65tails\x18\x03 \x01(\x0b\x32\x17.notes.ListEntryDetailsH\x00\x88\x01\x01\x12\x33\n\x12\x61\x64\x64itional_details\x18\x04 \x01(\x0b\x32\x17.notes.ListEntryDetailsB\n\n\x08_details"\x97\x01\n\x10ListEntryDetails\x12?\n\x16list_entry_details_key\x18\x01 \x01(\x0b\x32\x1a.notes.ListEntryDetailsKeyH\x00\x88\x01\x01\x12 \n\x02id\x18\x02 \x01(\x0b\x32\x0f.notes.ObjectIDH\x01\x88\x01\x01\x42\x19\n\x17_list_entry_details_keyB\x05\n\x03_id"\\\n\x13ListEntryDetailsKey\x12%\n\x1dlist_entry_details_type_index\x18\x01 \x01(\x05\x12\x1e\n\x16list_entry_details_key\x18\x02 \x01(\x05*\x8a\x01\n\tHighlight\x12\x15\n\x11HIGHLIGHT_UNKNOWN\x10\x00\x12\x14\n\x10HIGHLIGHT_PURPLE\x10\x01\x12\x12\n\x0eHIGHLIGHT_PINK\x10\x02\x12\x14\n\x10HIGHLIGHT_ORANGE\x10\x03\x12\x12\n\x0eHIGHLIGHT_MINT\x10\x04\x12\x12\n\x0eHIGHLIGHT_BLUE\x10\x05*\xf3\x01\n\tStyleType\x12\x14\n\x10STYLE_TYPE_TITLE\x10\x00\x12\x16\n\x12STYLE_TYPE_HEADING\x10\x01\x12\x19\n\x15STYLE_TYPE_SUBHEADING\x10\x02\x12\x19\n\x15STYLE_TYPE_MONOSPACED\x10\x04\x12\x1f\n\x1bSTYLE_TYPE_BULLET_LIST_ITEM\x10\x64\x12\x1f\n\x1bSTYLE_TYPE_DASHED_LIST_ITEM\x10\x65\x12!\n\x1dSTYLE_TYPE_NUMBERED_LIST_ITEM\x10\x66\x12\x1d\n\x19STYLE_TYPE_CHECKLIST_ITEM\x10g*g\n\x10WritingDirection\x12\x1d\n\x19WRITING_DIRECTION_DEFAULT\x10\x00\x12\x19\n\x15WRITING_DIRECTION_LTR\x10\x01\x12\x19\n\x15WRITING_DIRECTION_RTL\x10\x02*d\n\tAlignment\x12\x15\n\x11\x41LIGNMENT_DEFAULT\x10\x00\x12\x14\n\x10\x41LIGNMENT_CENTER\x10\x01\x12\x13\n\x0f\x41LIGNMENT_RIGHT\x10\x02\x12\x15\n\x11\x41LIGNMENT_JUSTIFY\x10\x03*p\n\nFontWeight\x12\x17\n\x13\x46ONT_WEIGHT_UNKNOWN\x10\x00\x12\x14\n\x10\x46ONT_WEIGHT_BOLD\x10\x01\x12\x16\n\x12\x46ONT_WEIGHT_ITALIC\x10\x02\x12\x1b\n\x17\x46ONT_WEIGHT_BOLD_ITALIC\x10\x03\x62\x06proto3' +) + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "notes_pb2", _globals) +if not _descriptor._USE_C_DESCRIPTORS: + DESCRIPTOR._loaded_options = None + _globals["_HIGHLIGHT"]._serialized_start = 4121 + _globals["_HIGHLIGHT"]._serialized_end = 4259 + _globals["_STYLETYPE"]._serialized_start = 4262 + _globals["_STYLETYPE"]._serialized_end = 4505 + _globals["_WRITINGDIRECTION"]._serialized_start = 4507 + _globals["_WRITINGDIRECTION"]._serialized_end = 4610 + _globals["_ALIGNMENT"]._serialized_start = 4612 + _globals["_ALIGNMENT"]._serialized_end = 4712 + _globals["_FONTWEIGHT"]._serialized_start = 4714 + _globals["_FONTWEIGHT"]._serialized_end = 4826 + _globals["_COLOR"]._serialized_start = 22 + _globals["_COLOR"]._serialized_end = 86 + _globals["_ATTACHMENTINFO"]._serialized_start = 88 + _globals["_ATTACHMENTINFO"]._serialized_end = 202 + _globals["_FONT"]._serialized_start = 204 + _globals["_FONT"]._serialized_end = 269 + _globals["_PARAGRAPHSTYLE"]._serialized_start = 272 + _globals["_PARAGRAPHSTYLE"]._serialized_end = 763 + _globals["_CHECKLIST"]._serialized_start = 765 + _globals["_CHECKLIST"]._serialized_end = 832 + _globals["_DICTIONARYELEMENT"]._serialized_start = 834 + _globals["_DICTIONARYELEMENT"]._serialized_end = 915 + _globals["_DICTIONARY"]._serialized_start = 917 + _globals["_DICTIONARY"]._serialized_end = 972 + _globals["_OBJECTID"]._serialized_start = 974 + _globals["_OBJECTID"]._serialized_end = 1060 + _globals["_REGISTERLATEST"]._serialized_start = 1062 + _globals["_REGISTERLATEST"]._serialized_end = 1113 + _globals["_MAPITEM"]._serialized_start = 1115 + _globals["_MAPITEM"]._serialized_end = 1169 + _globals["_ATTRIBUTERUN"]._serialized_start = 1172 + _globals["_ATTRIBUTERUN"]._serialized_end = 1903 + _globals["_NOTESTOREPROTO"]._serialized_start = 1905 + _globals["_NOTESTOREPROTO"]._serialized_end = 1956 + _globals["_DOCUMENT"]._serialized_start = 1958 + _globals["_DOCUMENT"]._serialized_end = 2012 + _globals["_NOTE"]._serialized_start = 2014 + _globals["_NOTE"]._serialized_end = 2083 + _globals["_MERGABLEDATAPROTO"]._serialized_start = 2085 + _globals["_MERGABLEDATAPROTO"]._serialized_end = 2161 + _globals["_MERGABLEDATAOBJECT"]._serialized_start = 2163 + _globals["_MERGABLEDATAOBJECT"]._serialized_end = 2268 + _globals["_MERGEABLEDATAOBJECTDATA"]._serialized_start = 2271 + _globals["_MERGEABLEDATAOBJECTDATA"]._serialized_end = 2486 + _globals["_MERGEABLEDATAOBJECTROW"]._serialized_start = 2489 + _globals["_MERGEABLEDATAOBJECTROW"]._serialized_end = 2931 + _globals["_UNKNOWNMERGEABLEDATAOBJECTENTRYMESSAGE"]._serialized_start = 2934 + _globals["_UNKNOWNMERGEABLEDATAOBJECTENTRYMESSAGE"]._serialized_end = 3072 + _globals["_UNKNOWNMERGEABLEDATAOBJECTENTRYMESSAGEENTRY"]._serialized_start = 3075 + _globals["_UNKNOWNMERGEABLEDATAOBJECTENTRYMESSAGEENTRY"]._serialized_end = 3208 + _globals["_MERGEABLEDATAOBJECTMAP"]._serialized_start = 3210 + _globals["_MERGEABLEDATAOBJECTMAP"]._serialized_end = 3283 + _globals["_ORDEREDSET"]._serialized_start = 3285 + _globals["_ORDEREDSET"]._serialized_end = 3379 + _globals["_ORDEREDSETORDERING"]._serialized_start = 3381 + _globals["_ORDEREDSETORDERING"]._serialized_end = 3485 + _globals["_ORDEREDSETORDERINGARRAY"]._serialized_start = 3487 + _globals["_ORDEREDSETORDERINGARRAY"]._serialized_end = 3605 + _globals["_ORDEREDSETORDERINGARRAYATTACHMENT"]._serialized_start = 3607 + _globals["_ORDEREDSETORDERINGARRAYATTACHMENT"]._serialized_end = 3671 + _globals["_LIST"]._serialized_start = 3673 + _globals["_LIST"]._serialized_end = 3716 + _globals["_LISTITEM"]._serialized_start = 3719 + _globals["_LISTITEM"]._serialized_end = 3870 + _globals["_LISTENTRYDETAILS"]._serialized_start = 3873 + _globals["_LISTENTRYDETAILS"]._serialized_end = 4024 + _globals["_LISTENTRYDETAILSKEY"]._serialized_start = 4026 + _globals["_LISTENTRYDETAILSKEY"]._serialized_end = 4118 +# @@protoc_insertion_point(module_scope) diff --git a/pyicloud/services/notes/protobuf/notes_pb2.pyi b/pyicloud/services/notes/protobuf/notes_pb2.pyi new file mode 100644 index 00000000..9229754e --- /dev/null +++ b/pyicloud/services/notes/protobuf/notes_pb2.pyi @@ -0,0 +1,539 @@ +from collections.abc import Iterable as _Iterable +from collections.abc import Mapping as _Mapping +from typing import ClassVar as _ClassVar +from typing import Optional as _Optional +from typing import Union as _Union + +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf.internal import containers as _containers +from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper + +DESCRIPTOR: _descriptor.FileDescriptor + +class Highlight(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = () + HIGHLIGHT_UNKNOWN: _ClassVar[Highlight] + HIGHLIGHT_PURPLE: _ClassVar[Highlight] + HIGHLIGHT_PINK: _ClassVar[Highlight] + HIGHLIGHT_ORANGE: _ClassVar[Highlight] + HIGHLIGHT_MINT: _ClassVar[Highlight] + HIGHLIGHT_BLUE: _ClassVar[Highlight] + +class StyleType(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = () + STYLE_TYPE_TITLE: _ClassVar[StyleType] + STYLE_TYPE_HEADING: _ClassVar[StyleType] + STYLE_TYPE_SUBHEADING: _ClassVar[StyleType] + STYLE_TYPE_MONOSPACED: _ClassVar[StyleType] + STYLE_TYPE_BULLET_LIST_ITEM: _ClassVar[StyleType] + STYLE_TYPE_DASHED_LIST_ITEM: _ClassVar[StyleType] + STYLE_TYPE_NUMBERED_LIST_ITEM: _ClassVar[StyleType] + STYLE_TYPE_CHECKLIST_ITEM: _ClassVar[StyleType] + +class WritingDirection(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = () + WRITING_DIRECTION_DEFAULT: _ClassVar[WritingDirection] + WRITING_DIRECTION_LTR: _ClassVar[WritingDirection] + WRITING_DIRECTION_RTL: _ClassVar[WritingDirection] + +class Alignment(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = () + ALIGNMENT_DEFAULT: _ClassVar[Alignment] + ALIGNMENT_CENTER: _ClassVar[Alignment] + ALIGNMENT_RIGHT: _ClassVar[Alignment] + ALIGNMENT_JUSTIFY: _ClassVar[Alignment] + +class FontWeight(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = () + FONT_WEIGHT_UNKNOWN: _ClassVar[FontWeight] + FONT_WEIGHT_BOLD: _ClassVar[FontWeight] + FONT_WEIGHT_ITALIC: _ClassVar[FontWeight] + FONT_WEIGHT_BOLD_ITALIC: _ClassVar[FontWeight] + +HIGHLIGHT_UNKNOWN: Highlight +HIGHLIGHT_PURPLE: Highlight +HIGHLIGHT_PINK: Highlight +HIGHLIGHT_ORANGE: Highlight +HIGHLIGHT_MINT: Highlight +HIGHLIGHT_BLUE: Highlight +STYLE_TYPE_TITLE: StyleType +STYLE_TYPE_HEADING: StyleType +STYLE_TYPE_SUBHEADING: StyleType +STYLE_TYPE_MONOSPACED: StyleType +STYLE_TYPE_BULLET_LIST_ITEM: StyleType +STYLE_TYPE_DASHED_LIST_ITEM: StyleType +STYLE_TYPE_NUMBERED_LIST_ITEM: StyleType +STYLE_TYPE_CHECKLIST_ITEM: StyleType +WRITING_DIRECTION_DEFAULT: WritingDirection +WRITING_DIRECTION_LTR: WritingDirection +WRITING_DIRECTION_RTL: WritingDirection +ALIGNMENT_DEFAULT: Alignment +ALIGNMENT_CENTER: Alignment +ALIGNMENT_RIGHT: Alignment +ALIGNMENT_JUSTIFY: Alignment +FONT_WEIGHT_UNKNOWN: FontWeight +FONT_WEIGHT_BOLD: FontWeight +FONT_WEIGHT_ITALIC: FontWeight +FONT_WEIGHT_BOLD_ITALIC: FontWeight + +class Color(_message.Message): + __slots__ = ("red", "green", "blue", "alpha") + RED_FIELD_NUMBER: _ClassVar[int] + GREEN_FIELD_NUMBER: _ClassVar[int] + BLUE_FIELD_NUMBER: _ClassVar[int] + ALPHA_FIELD_NUMBER: _ClassVar[int] + red: float + green: float + blue: float + alpha: float + def __init__( + self, + red: _Optional[float] = ..., + green: _Optional[float] = ..., + blue: _Optional[float] = ..., + alpha: _Optional[float] = ..., + ) -> None: ... + +class AttachmentInfo(_message.Message): + __slots__ = ("attachment_identifier", "type_uti") + ATTACHMENT_IDENTIFIER_FIELD_NUMBER: _ClassVar[int] + TYPE_UTI_FIELD_NUMBER: _ClassVar[int] + attachment_identifier: str + type_uti: str + def __init__( + self, + attachment_identifier: _Optional[str] = ..., + type_uti: _Optional[str] = ..., + ) -> None: ... + +class Font(_message.Message): + __slots__ = ("font_name", "point_size", "font_hints") + FONT_NAME_FIELD_NUMBER: _ClassVar[int] + POINT_SIZE_FIELD_NUMBER: _ClassVar[int] + FONT_HINTS_FIELD_NUMBER: _ClassVar[int] + font_name: str + point_size: float + font_hints: int + def __init__( + self, + font_name: _Optional[str] = ..., + point_size: _Optional[float] = ..., + font_hints: _Optional[int] = ..., + ) -> None: ... + +class ParagraphStyle(_message.Message): + __slots__ = ( + "style_type", + "alignment", + "writing_direction_paragraph", + "indent_amount", + "checklist", + "starting_list_item_number", + "block_quote", + "paragraph_uuid", + ) + STYLE_TYPE_FIELD_NUMBER: _ClassVar[int] + ALIGNMENT_FIELD_NUMBER: _ClassVar[int] + WRITING_DIRECTION_PARAGRAPH_FIELD_NUMBER: _ClassVar[int] + INDENT_AMOUNT_FIELD_NUMBER: _ClassVar[int] + CHECKLIST_FIELD_NUMBER: _ClassVar[int] + STARTING_LIST_ITEM_NUMBER_FIELD_NUMBER: _ClassVar[int] + BLOCK_QUOTE_FIELD_NUMBER: _ClassVar[int] + PARAGRAPH_UUID_FIELD_NUMBER: _ClassVar[int] + style_type: StyleType + alignment: Alignment + writing_direction_paragraph: WritingDirection + indent_amount: int + checklist: Checklist + starting_list_item_number: int + block_quote: int + paragraph_uuid: bytes + def __init__( + self, + style_type: _Optional[_Union[StyleType, str]] = ..., + alignment: _Optional[_Union[Alignment, str]] = ..., + writing_direction_paragraph: _Optional[_Union[WritingDirection, str]] = ..., + indent_amount: _Optional[int] = ..., + checklist: _Optional[_Union[Checklist, _Mapping]] = ..., + starting_list_item_number: _Optional[int] = ..., + block_quote: _Optional[int] = ..., + paragraph_uuid: _Optional[bytes] = ..., + ) -> None: ... + +class Checklist(_message.Message): + __slots__ = ("uuid", "done") + UUID_FIELD_NUMBER: _ClassVar[int] + DONE_FIELD_NUMBER: _ClassVar[int] + uuid: bytes + done: int + def __init__( + self, uuid: _Optional[bytes] = ..., done: _Optional[int] = ... + ) -> None: ... + +class DictionaryElement(_message.Message): + __slots__ = ("key", "value") + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: ObjectID + value: ObjectID + def __init__( + self, + key: _Optional[_Union[ObjectID, _Mapping]] = ..., + value: _Optional[_Union[ObjectID, _Mapping]] = ..., + ) -> None: ... + +class Dictionary(_message.Message): + __slots__ = ("element",) + ELEMENT_FIELD_NUMBER: _ClassVar[int] + element: _containers.RepeatedCompositeFieldContainer[DictionaryElement] + def __init__( + self, element: _Optional[_Iterable[_Union[DictionaryElement, _Mapping]]] = ... + ) -> None: ... + +class ObjectID(_message.Message): + __slots__ = ("unsigned_integer_value", "string_value", "object_index") + UNSIGNED_INTEGER_VALUE_FIELD_NUMBER: _ClassVar[int] + STRING_VALUE_FIELD_NUMBER: _ClassVar[int] + OBJECT_INDEX_FIELD_NUMBER: _ClassVar[int] + unsigned_integer_value: int + string_value: str + object_index: int + def __init__( + self, + unsigned_integer_value: _Optional[int] = ..., + string_value: _Optional[str] = ..., + object_index: _Optional[int] = ..., + ) -> None: ... + +class RegisterLatest(_message.Message): + __slots__ = ("contents",) + CONTENTS_FIELD_NUMBER: _ClassVar[int] + contents: ObjectID + def __init__( + self, contents: _Optional[_Union[ObjectID, _Mapping]] = ... + ) -> None: ... + +class MapItem(_message.Message): + __slots__ = ("key", "value") + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: int + value: ObjectID + def __init__( + self, + key: _Optional[int] = ..., + value: _Optional[_Union[ObjectID, _Mapping]] = ..., + ) -> None: ... + +class AttributeRun(_message.Message): + __slots__ = ( + "length", + "paragraph_style", + "font", + "font_weight", + "underlined", + "strikethrough", + "superscript", + "link", + "color", + "writing_direction_selection", + "attachment_info", + "timestamp", + "emphasis_style", + "highlight_color", + ) + LENGTH_FIELD_NUMBER: _ClassVar[int] + PARAGRAPH_STYLE_FIELD_NUMBER: _ClassVar[int] + FONT_FIELD_NUMBER: _ClassVar[int] + FONT_WEIGHT_FIELD_NUMBER: _ClassVar[int] + UNDERLINED_FIELD_NUMBER: _ClassVar[int] + STRIKETHROUGH_FIELD_NUMBER: _ClassVar[int] + SUPERSCRIPT_FIELD_NUMBER: _ClassVar[int] + LINK_FIELD_NUMBER: _ClassVar[int] + COLOR_FIELD_NUMBER: _ClassVar[int] + WRITING_DIRECTION_SELECTION_FIELD_NUMBER: _ClassVar[int] + ATTACHMENT_INFO_FIELD_NUMBER: _ClassVar[int] + TIMESTAMP_FIELD_NUMBER: _ClassVar[int] + EMPHASIS_STYLE_FIELD_NUMBER: _ClassVar[int] + HIGHLIGHT_COLOR_FIELD_NUMBER: _ClassVar[int] + length: int + paragraph_style: ParagraphStyle + font: Font + font_weight: FontWeight + underlined: int + strikethrough: int + superscript: int + link: str + color: Color + writing_direction_selection: WritingDirection + attachment_info: AttachmentInfo + timestamp: int + emphasis_style: int + highlight_color: Highlight + def __init__( + self, + length: _Optional[int] = ..., + paragraph_style: _Optional[_Union[ParagraphStyle, _Mapping]] = ..., + font: _Optional[_Union[Font, _Mapping]] = ..., + font_weight: _Optional[_Union[FontWeight, str]] = ..., + underlined: _Optional[int] = ..., + strikethrough: _Optional[int] = ..., + superscript: _Optional[int] = ..., + link: _Optional[str] = ..., + color: _Optional[_Union[Color, _Mapping]] = ..., + writing_direction_selection: _Optional[_Union[WritingDirection, str]] = ..., + attachment_info: _Optional[_Union[AttachmentInfo, _Mapping]] = ..., + timestamp: _Optional[int] = ..., + emphasis_style: _Optional[int] = ..., + highlight_color: _Optional[_Union[Highlight, str]] = ..., + ) -> None: ... + +class NoteStoreProto(_message.Message): + __slots__ = ("document",) + DOCUMENT_FIELD_NUMBER: _ClassVar[int] + document: Document + def __init__( + self, document: _Optional[_Union[Document, _Mapping]] = ... + ) -> None: ... + +class Document(_message.Message): + __slots__ = ("version", "note") + VERSION_FIELD_NUMBER: _ClassVar[int] + NOTE_FIELD_NUMBER: _ClassVar[int] + version: int + note: Note + def __init__( + self, + version: _Optional[int] = ..., + note: _Optional[_Union[Note, _Mapping]] = ..., + ) -> None: ... + +class Note(_message.Message): + __slots__ = ("note_text", "attribute_run") + NOTE_TEXT_FIELD_NUMBER: _ClassVar[int] + ATTRIBUTE_RUN_FIELD_NUMBER: _ClassVar[int] + note_text: str + attribute_run: _containers.RepeatedCompositeFieldContainer[AttributeRun] + def __init__( + self, + note_text: _Optional[str] = ..., + attribute_run: _Optional[_Iterable[_Union[AttributeRun, _Mapping]]] = ..., + ) -> None: ... + +class MergableDataProto(_message.Message): + __slots__ = ("mergable_data_object",) + MERGABLE_DATA_OBJECT_FIELD_NUMBER: _ClassVar[int] + mergable_data_object: MergableDataObject + def __init__( + self, + mergable_data_object: _Optional[_Union[MergableDataObject, _Mapping]] = ..., + ) -> None: ... + +class MergableDataObject(_message.Message): + __slots__ = ("version", "mergeable_data_object_data") + VERSION_FIELD_NUMBER: _ClassVar[int] + MERGEABLE_DATA_OBJECT_DATA_FIELD_NUMBER: _ClassVar[int] + version: int + mergeable_data_object_data: MergeableDataObjectData + def __init__( + self, + version: _Optional[int] = ..., + mergeable_data_object_data: _Optional[ + _Union[MergeableDataObjectData, _Mapping] + ] = ..., + ) -> None: ... + +class MergeableDataObjectData(_message.Message): + __slots__ = ( + "mergeable_data_object_entry", + "mergeable_data_object_key_item", + "mergeable_data_object_type_item", + "mergeable_data_object_uuid_item", + ) + MERGEABLE_DATA_OBJECT_ENTRY_FIELD_NUMBER: _ClassVar[int] + MERGEABLE_DATA_OBJECT_KEY_ITEM_FIELD_NUMBER: _ClassVar[int] + MERGEABLE_DATA_OBJECT_TYPE_ITEM_FIELD_NUMBER: _ClassVar[int] + MERGEABLE_DATA_OBJECT_UUID_ITEM_FIELD_NUMBER: _ClassVar[int] + mergeable_data_object_entry: _containers.RepeatedCompositeFieldContainer[ + MergeableDataObjectRow + ] + mergeable_data_object_key_item: _containers.RepeatedScalarFieldContainer[str] + mergeable_data_object_type_item: _containers.RepeatedScalarFieldContainer[str] + mergeable_data_object_uuid_item: _containers.RepeatedScalarFieldContainer[bytes] + def __init__( + self, + mergeable_data_object_entry: _Optional[ + _Iterable[_Union[MergeableDataObjectRow, _Mapping]] + ] = ..., + mergeable_data_object_key_item: _Optional[_Iterable[str]] = ..., + mergeable_data_object_type_item: _Optional[_Iterable[str]] = ..., + mergeable_data_object_uuid_item: _Optional[_Iterable[bytes]] = ..., + ) -> None: ... + +class MergeableDataObjectRow(_message.Message): + __slots__ = ( + "register_latest", + "list", + "dictionary", + "unknown_message", + "note", + "custom_map", + "ordered_set", + ) + REGISTER_LATEST_FIELD_NUMBER: _ClassVar[int] + LIST_FIELD_NUMBER: _ClassVar[int] + DICTIONARY_FIELD_NUMBER: _ClassVar[int] + UNKNOWN_MESSAGE_FIELD_NUMBER: _ClassVar[int] + NOTE_FIELD_NUMBER: _ClassVar[int] + CUSTOM_MAP_FIELD_NUMBER: _ClassVar[int] + ORDERED_SET_FIELD_NUMBER: _ClassVar[int] + register_latest: RegisterLatest + list: List + dictionary: Dictionary + unknown_message: UnknownMergeableDataObjectEntryMessage + note: Note + custom_map: MergeableDataObjectMap + ordered_set: OrderedSet + def __init__( + self, + register_latest: _Optional[_Union[RegisterLatest, _Mapping]] = ..., + list: _Optional[_Union[List, _Mapping]] = ..., + dictionary: _Optional[_Union[Dictionary, _Mapping]] = ..., + unknown_message: _Optional[ + _Union[UnknownMergeableDataObjectEntryMessage, _Mapping] + ] = ..., + note: _Optional[_Union[Note, _Mapping]] = ..., + custom_map: _Optional[_Union[MergeableDataObjectMap, _Mapping]] = ..., + ordered_set: _Optional[_Union[OrderedSet, _Mapping]] = ..., + ) -> None: ... + +class UnknownMergeableDataObjectEntryMessage(_message.Message): + __slots__ = ("unknown_entry",) + UNKNOWN_ENTRY_FIELD_NUMBER: _ClassVar[int] + unknown_entry: UnknownMergeableDataObjectEntryMessageEntry + def __init__( + self, + unknown_entry: _Optional[ + _Union[UnknownMergeableDataObjectEntryMessageEntry, _Mapping] + ] = ..., + ) -> None: ... + +class UnknownMergeableDataObjectEntryMessageEntry(_message.Message): + __slots__ = ("unknown_int1", "unknown_int2") + UNKNOWN_INT1_FIELD_NUMBER: _ClassVar[int] + UNKNOWN_INT2_FIELD_NUMBER: _ClassVar[int] + unknown_int1: int + unknown_int2: int + def __init__( + self, unknown_int1: _Optional[int] = ..., unknown_int2: _Optional[int] = ... + ) -> None: ... + +class MergeableDataObjectMap(_message.Message): + __slots__ = ("type", "map_entry") + TYPE_FIELD_NUMBER: _ClassVar[int] + MAP_ENTRY_FIELD_NUMBER: _ClassVar[int] + type: int + map_entry: _containers.RepeatedCompositeFieldContainer[MapItem] + def __init__( + self, + type: _Optional[int] = ..., + map_entry: _Optional[_Iterable[_Union[MapItem, _Mapping]]] = ..., + ) -> None: ... + +class OrderedSet(_message.Message): + __slots__ = ("ordering", "elements") + ORDERING_FIELD_NUMBER: _ClassVar[int] + ELEMENTS_FIELD_NUMBER: _ClassVar[int] + ordering: OrderedSetOrdering + elements: Dictionary + def __init__( + self, + ordering: _Optional[_Union[OrderedSetOrdering, _Mapping]] = ..., + elements: _Optional[_Union[Dictionary, _Mapping]] = ..., + ) -> None: ... + +class OrderedSetOrdering(_message.Message): + __slots__ = ("array", "contents") + ARRAY_FIELD_NUMBER: _ClassVar[int] + CONTENTS_FIELD_NUMBER: _ClassVar[int] + array: OrderedSetOrderingArray + contents: Dictionary + def __init__( + self, + array: _Optional[_Union[OrderedSetOrderingArray, _Mapping]] = ..., + contents: _Optional[_Union[Dictionary, _Mapping]] = ..., + ) -> None: ... + +class OrderedSetOrderingArray(_message.Message): + __slots__ = ("contents", "attachment") + CONTENTS_FIELD_NUMBER: _ClassVar[int] + ATTACHMENT_FIELD_NUMBER: _ClassVar[int] + contents: Note + attachment: _containers.RepeatedCompositeFieldContainer[ + OrderedSetOrderingArrayAttachment + ] + def __init__( + self, + contents: _Optional[_Union[Note, _Mapping]] = ..., + attachment: _Optional[ + _Iterable[_Union[OrderedSetOrderingArrayAttachment, _Mapping]] + ] = ..., + ) -> None: ... + +class OrderedSetOrderingArrayAttachment(_message.Message): + __slots__ = ("index", "uuid") + INDEX_FIELD_NUMBER: _ClassVar[int] + UUID_FIELD_NUMBER: _ClassVar[int] + index: int + uuid: bytes + def __init__( + self, index: _Optional[int] = ..., uuid: _Optional[bytes] = ... + ) -> None: ... + +class List(_message.Message): + __slots__ = ("list_entry",) + LIST_ENTRY_FIELD_NUMBER: _ClassVar[int] + list_entry: _containers.RepeatedCompositeFieldContainer[ListItem] + def __init__( + self, list_entry: _Optional[_Iterable[_Union[ListItem, _Mapping]]] = ... + ) -> None: ... + +class ListItem(_message.Message): + __slots__ = ("id", "details", "additional_details") + ID_FIELD_NUMBER: _ClassVar[int] + DETAILS_FIELD_NUMBER: _ClassVar[int] + ADDITIONAL_DETAILS_FIELD_NUMBER: _ClassVar[int] + id: ObjectID + details: ListEntryDetails + additional_details: ListEntryDetails + def __init__( + self, + id: _Optional[_Union[ObjectID, _Mapping]] = ..., + details: _Optional[_Union[ListEntryDetails, _Mapping]] = ..., + additional_details: _Optional[_Union[ListEntryDetails, _Mapping]] = ..., + ) -> None: ... + +class ListEntryDetails(_message.Message): + __slots__ = ("list_entry_details_key", "id") + LIST_ENTRY_DETAILS_KEY_FIELD_NUMBER: _ClassVar[int] + ID_FIELD_NUMBER: _ClassVar[int] + list_entry_details_key: ListEntryDetailsKey + id: ObjectID + def __init__( + self, + list_entry_details_key: _Optional[_Union[ListEntryDetailsKey, _Mapping]] = ..., + id: _Optional[_Union[ObjectID, _Mapping]] = ..., + ) -> None: ... + +class ListEntryDetailsKey(_message.Message): + __slots__ = ("list_entry_details_type_index", "list_entry_details_key") + LIST_ENTRY_DETAILS_TYPE_INDEX_FIELD_NUMBER: _ClassVar[int] + LIST_ENTRY_DETAILS_KEY_FIELD_NUMBER: _ClassVar[int] + list_entry_details_type_index: int + list_entry_details_key: int + def __init__( + self, + list_entry_details_type_index: _Optional[int] = ..., + list_entry_details_key: _Optional[int] = ..., + ) -> None: ... diff --git a/pyicloud/services/notes/rendering/__init__.py b/pyicloud/services/notes/rendering/__init__.py new file mode 100644 index 00000000..c25bbfa9 --- /dev/null +++ b/pyicloud/services/notes/rendering/__init__.py @@ -0,0 +1,7 @@ +"""Rendering support for Apple Notes (proto3), transport-agnostic. + +Contains: +- renderer_iface: the minimal datasource Protocol and AttachmentRef value type +- renderer: pure HTML renderer for note content (fragment + page) +- ck_datasource: CloudKit-backed in-memory datasource for attachments +""" diff --git a/pyicloud/services/notes/rendering/attachments.py b/pyicloud/services/notes/rendering/attachments.py new file mode 100644 index 00000000..89fba5e0 --- /dev/null +++ b/pyicloud/services/notes/rendering/attachments.py @@ -0,0 +1,362 @@ +""" +UTI-based attachment rendering strategies for Apple Notes. + +This module contains a small, pure dispatcher that maps a note attachment's +type_uti (and available datasource metadata) to an HTML fragment. It +intentionally performs no I/O; all data must be provided by the caller via the +AttachmentContext. + +Design: + - AttachmentContext: immutable bundle of fields the strategies may use + - Renderers: small classes implementing `render(ctx, render_note_cb)` + - Dispatcher: exact UTI map, then prefix rules, then default fallback + +`render_note_cb` is a callback used by the table renderer to render nested +pb.Note cells (delegates back to the main note renderer without creating a +cyclic import). +""" + +from __future__ import annotations + +import html +from dataclasses import dataclass +from typing import Any, Callable, Optional +from urllib.parse import urlsplit + +from tinyhtml import h + +from .table_builder import render_table_from_mergeable + + +@dataclass(frozen=True) +class AttachmentContext: + id: str + uti: str + title: Optional[str] + primary_url: Optional[str] + thumb_url: Optional[str] + mergeable_gz: Optional[bytes] + # Optional: preceding text in the same paragraph/line before the attachment + prior_text: Optional[str] = None + # Optional: behavior flags supplied by caller + link_target: Optional[str] = None + link_rel: Optional[str] = None + link_referrerpolicy: Optional[str] = None + pdf_object_height: Optional[int] = None + + def base_attrs(self, extra: dict[str, str] | None = None) -> dict[str, str]: + base = { + "class": "attachment", + "data-uti": self.uti, + "data-id": self.id, + } + if extra: + base.update(extra) + return base + + +def _safe_url( + url: Optional[str], + *, + allowed_schemes: set[str], +) -> Optional[str]: + if not url: + return None + + candidate = "".join(ch for ch in str(url).strip() if ch >= " " and ch != "\x7f") + if not candidate or candidate.startswith("//"): + return None + + parts = urlsplit(candidate) + if not parts.scheme: + return candidate + + scheme = parts.scheme.casefold() + if scheme not in allowed_schemes: + return None + if scheme in {"http", "https"} and not parts.netloc: + return None + if scheme in {"mailto", "tel"} and not (parts.path or parts.netloc): + return None + return candidate + + +def _is_remote_url(url: str) -> bool: + parts = urlsplit(url) + return parts.scheme.casefold() in {"http", "https"} + + +def _link_attrs( + ctx: AttachmentContext, + *, + class_name: str, + href: Optional[str] = None, +) -> dict[str, str]: + attrs = {"class": class_name} + if href: + attrs["href"] = href + if ctx.link_rel: + attrs["rel"] = ctx.link_rel + if ctx.link_referrerpolicy: + attrs["referrerpolicy"] = ctx.link_referrerpolicy + if ctx.link_target: + attrs["target"] = ctx.link_target + return attrs + + +class _Renderer: + def render( + self, ctx: AttachmentContext, render_note_cb: Callable + ) -> str: # pragma: no cover - interface + raise NotImplementedError + + +class _DefaultRenderer(_Renderer): + def render(self, ctx: AttachmentContext, render_note_cb: Callable) -> str: + label = ctx.title or ctx.uti or "attachment" + href = _safe_url(ctx.primary_url, allowed_schemes={"http", "https"}) + return h( + "a", + **ctx.base_attrs(_link_attrs(ctx, class_name="attachment link", href=href)), + )(label).render() + + +class _TableRenderer(_Renderer): + def render(self, ctx: AttachmentContext, render_note_cb: Callable) -> str: + if ctx.mergeable_gz: + html_tbl = render_table_from_mergeable(ctx.mergeable_gz, render_note_cb) + if html_tbl: + return html_tbl + # Fallback to a link + label = ctx.title or ctx.uti or "table" + href = _safe_url(ctx.primary_url, allowed_schemes={"http", "https"}) + return h( + "a", + **ctx.base_attrs(_link_attrs(ctx, class_name="attachment link", href=href)), + )(label).render() + + +class _UrlRenderer(_Renderer): + def render(self, ctx: AttachmentContext, render_note_cb: Callable) -> str: + title = ctx.title or ctx.uti or "link" + href = _safe_url( + ctx.primary_url, + allowed_schemes={"http", "https", "mailto", "tel"}, + ) + if href: + return h( + "a", + **ctx.base_attrs( + _link_attrs(ctx, class_name="attachment link", href=href) + ), + )(title).render() + return h( + "a", + **ctx.base_attrs(_link_attrs(ctx, class_name="attachment link")), + )(title).render() + + +class _ImageRenderer(_Renderer): + def render(self, ctx: AttachmentContext, render_note_cb: Callable) -> str: + url = _safe_url( + ctx.primary_url, + allowed_schemes={"http", "https"}, + ) or _safe_url( + ctx.thumb_url, + allowed_schemes={"http", "https"}, + ) + alt = ctx.title or ctx.uti or "image" + if url: + # Add responsive sizing so large images don't overflow the viewport + attrs = ctx.base_attrs( + { + "src": url, + "alt": alt, + "class": "attachment image", + "style": "max-width:100%;height:auto", + } + ) + attr_html = " ".join(f'{k}="{html.escape(v)}"' for k, v in attrs.items()) + return f"" + return h("a", **ctx.base_attrs({"class": "attachment link"}))(alt).render() + + +class _AudioRenderer(_Renderer): + def render(self, ctx: AttachmentContext, render_note_cb: Callable) -> str: + url = _safe_url(ctx.primary_url, allowed_schemes={"http", "https"}) + if url: + attrs = ctx.base_attrs({"src": url, "class": "attachment audio"}) + attr_html = " ".join(f'{k}="{html.escape(v)}"' for k, v in attrs.items()) + return f"" + title = ctx.title or ctx.uti or "audio" + return h("a", **ctx.base_attrs({"class": "attachment link"}))(title).render() + + +class _VideoRenderer(_Renderer): + def render(self, ctx: AttachmentContext, render_note_cb: Callable) -> str: + url = _safe_url(ctx.primary_url, allowed_schemes={"http", "https"}) + if url: + attrs = ctx.base_attrs( + { + "src": url, + "class": "attachment video", + "controls": "controls", + "style": "max-width:100%;height:auto", + } + ) + attr_html = " ".join(f'{k}="{html.escape(v)}"' for k, v in attrs.items()) + return f"" + title = ctx.title or ctx.uti or "video" + return h("a", **ctx.base_attrs({"class": "attachment link"}))(title).render() + + +class _PdfRenderer(_Renderer): + def render(self, ctx: AttachmentContext, render_note_cb: Callable) -> str: + title = ctx.title or "PDF" + url = _safe_url(ctx.primary_url, allowed_schemes={"http", "https"}) + if url: + # Only embed local PDFs. Remote CloudKit URLs often force downloads and break UX. + is_remote = _is_remote_url(url) + if not is_remote: + height_px = ( + ctx.pdf_object_height + if isinstance(ctx.pdf_object_height, int) + and ctx.pdf_object_height > 0 + else 600 + ) + obj_attrs = ctx.base_attrs( + { + "data": url, + "type": "application/pdf", + "class": "attachment pdf", + # allow config to control height + "style": f"width:100%;height:{height_px}px", + } + ) + fallback = h( + "a", + **ctx.base_attrs( + _link_attrs(ctx, class_name="attachment link", href=url) + ), + )(title) + return h("object", **obj_attrs)(fallback).render() + # Remote embed not allowed → use a link + return h( + "a", + **ctx.base_attrs( + _link_attrs(ctx, class_name="attachment file", href=url) + ), + )(title).render() + # No URL → plain label link without href + return h("a", **ctx.base_attrs({"class": "attachment file"}))(title).render() + + +class _VCardRenderer(_Renderer): + def render(self, ctx: AttachmentContext, render_note_cb: Callable) -> str: + title = ctx.title or "contact" + href = _safe_url(ctx.primary_url, allowed_schemes={"http", "https"}) + if href: + return h( + "a", + **ctx.base_attrs( + _link_attrs(ctx, class_name="attachment contact", href=href) + ), + )(title).render() + return h("a", **ctx.base_attrs({"class": "attachment contact"}))(title).render() + + +class _HashtagRenderer(_Renderer): + def render(self, ctx: AttachmentContext, render_note_cb: Callable) -> str: + # Avoid double prefix when AltText already includes '#' + if ctx.title: + raw = ctx.title.strip() + text = raw if raw.startswith("#") else f"#{raw}" + else: + text = ctx.uti or "hashtag" + # Expose the normalized tag (without '#') for consumers + tag_norm = text[1:] if text.startswith("#") else text + attrs = ctx.base_attrs({"class": "attachment hashtag", "data-tag": tag_norm}) + return h("span", **attrs)(text).render() + + +class _CalculatorRenderer(_Renderer): + def render(self, ctx: AttachmentContext, render_note_cb: Callable) -> str: + # Render exactly what the server provides (AltTextEncrypted/TitleEncrypted/SummaryEncrypted), + # without any additional normalization. + label = ctx.title or ctx.uti or "result" + return h("span", **ctx.base_attrs({"class": "attachment calc"}))(label).render() + + +# Graph expression (Calculate) – inline token that typically renders the left-hand +# side of an equation (e.g., "y = "). We mirror calculator's behavior and render +# a semantic, non-clickable span with a distinct class. +class _GraphExpressionRenderer(_Renderer): + def render(self, ctx: AttachmentContext, render_note_cb: Callable) -> str: + label = ctx.title or ctx.uti or "expression" + return h("span", **ctx.base_attrs({"class": "attachment calc-graph"}))( + label + ).render() + + +# Singletons +_DEFAULT = _DefaultRenderer() +_TABLE = _TableRenderer() +_URL = _UrlRenderer() +_IMAGE = _ImageRenderer() +_AUDIO = _AudioRenderer() +_VIDEO = _VideoRenderer() +_PDF = _PdfRenderer() +_VCARD = _VCardRenderer() +_HASHTAG = _HashtagRenderer() +_CALC = _CalculatorRenderer() +_GRAPH = _GraphExpressionRenderer() + + +# Exact UTI mappings +_EXACT: dict[str, _Renderer] = { + "com.apple.notes.table": _TABLE, + "public.url": _URL, + "com.apple.m4a-audio": _AUDIO, + "com.adobe.pdf": _PDF, + "public.pdf": _PDF, + "com.apple.paper.doc.pdf": _PDF, + "public.vcard": _VCARD, + "com.apple.notes.inlinetextattachment.hashtag": _HASHTAG, + "com.apple.notes.inlinetextattachment.calculateresult": _CALC, + "com.apple.notes.inlinetextattachment.calculategraphexpression": _GRAPH, + # com.apple.paper (sketch) – prefer image-like rendering when URLs are present + "com.apple.paper": _IMAGE, + "com.apple.quicktime-movie": _VIDEO, + "public.movie": _VIDEO, + "public.video": _VIDEO, + "public.mpeg-4": _VIDEO, +} + + +# Prefix matchers in order +_PREFIX: list[tuple[str, _Renderer]] = [ + ("public.image", _IMAGE), + ("public.jpeg", _IMAGE), + ("public.jpg", _IMAGE), + ("public.png", _IMAGE), + ("public.heic", _IMAGE), + ("public.heif", _IMAGE), + ("public.tiff", _IMAGE), + ("public.gif", _IMAGE), + ("public.bmp", _IMAGE), + ("public.webp", _IMAGE), +] + + +def render_attachment( + ctx: AttachmentContext, render_note_cb: Callable[[Any], str] +) -> str: + uti = (ctx.uti or "").lower() + r = _EXACT.get(uti) + if r is not None: + return r.render(ctx, render_note_cb) + for prefix, rr in _PREFIX: + if uti.startswith(prefix): + return rr.render(ctx, render_note_cb) + # fallback + return _DEFAULT.render(ctx, render_note_cb) diff --git a/pyicloud/services/notes/rendering/ck_datasource.py b/pyicloud/services/notes/rendering/ck_datasource.py new file mode 100644 index 00000000..25c961c0 --- /dev/null +++ b/pyicloud/services/notes/rendering/ck_datasource.py @@ -0,0 +1,319 @@ +""" +CloudKit-backed NoteDataSource implementation. + +Provides an in-memory datasource for a single Note that can answer: + - get_attachment_uti(identifier) -> Optional[str] + - get_mergeable_gz(identifier) -> Optional[bytes] + - (optional) get_primary_asset_url/get_thumbnail_url/get_title + +Population is performed by feeding CloudKit records into `add_attachment_record`. +""" + +from __future__ import annotations + +import logging +from dataclasses import dataclass, field +from typing import Dict, Optional + +from pyicloud.common.cloudkit import CKRecord + +from .options import ExportConfig +from .renderer_iface import NoteDataSource + +LOGGER = logging.getLogger(__name__) + + +@dataclass(slots=True) +class CloudKitNoteDataSource(NoteDataSource): + _uti: Dict[str, str] = field(default_factory=dict) + _mergeable_gz: Dict[str, bytes] = field(default_factory=dict) + _primary_asset_url: Dict[str, str] = field(default_factory=dict) + _thumbnail_url: Dict[str, str] = field(default_factory=dict) + _title: Dict[str, str] = field(default_factory=dict) + _config: Optional[ExportConfig] = None + + # Minimal protocol + def get_attachment_uti(self, identifier: str) -> Optional[str]: + return self._uti.get(identifier) + + def get_mergeable_gz(self, identifier: str) -> Optional[bytes]: + return self._mergeable_gz.get(identifier) + + # Optional richer protocol + def get_primary_asset_url(self, identifier: str) -> Optional[str]: + return self._primary_asset_url.get(identifier) + + def get_thumbnail_url(self, identifier: str) -> Optional[str]: + return self._thumbnail_url.get(identifier) + + def get_title(self, identifier: str) -> Optional[str]: + return self._title.get(identifier) + + # Overrides for callers that download assets locally and want to point the + # renderer at a local path instead of the remote CloudKit URL. + def set_primary_asset_url(self, identifier: str, url: str) -> None: + if not identifier or not url: + return + self._primary_asset_url[identifier] = url + + def add_attachment_record(self, rec: CKRecord) -> None: + fields = rec.fields + + # With strict model validation, *Encrypted fields are always bytes. + def _text_from_bytes(val: Optional[bytes | bytearray]) -> Optional[str]: + if val is None: + return None + try: + return val.decode("utf-8", "replace") + except Exception: + return None + + def _asset_url(obj) -> Optional[str]: + """Best-effort extractor for CloudKit asset token downloadURL. + + Accepts a mapping (dict-like) or an object with attribute `downloadURL`. + Returns the URL string if present and non-empty. + """ + if obj is None: + return None + try: + if isinstance(obj, dict): + url = obj.get("downloadURL") + return url if isinstance(url, str) and url else None + url = getattr(obj, "downloadURL", None) + return url if isinstance(url, str) and url else None + except Exception: + return None + + # Attachment logical identifier (if present); otherwise, use recordName + ident: Optional[str] = None + for key in ("AttachmentIdentifier", "attachmentIdentifier", "Identifier"): + raw = getattr(fields.get_field(key) or (), "value", None) + if isinstance(raw, str) and raw: + ident = raw + break + rec_name = rec.recordName or None + if not ident and not rec_name: + return + + # Store under both the logical identifier (if present) and the recordName + keys: list[str] = [] + if ident: + keys.append(ident) + if rec_name and rec_name not in keys: + keys.append(rec_name) + + if rec_name and rec_name not in keys: + keys.append(rec_name) + + # UTI (plain or encrypted) + + uti_val: Optional[str] = None + uti_plain = fields.get_value("UTI") or fields.get_value("AttachmentUTI") + if isinstance(uti_plain, str) and uti_plain: + uti_val = uti_plain + else: + uti_enc = fields.get_value("UTIEncrypted") # bytes by invariant + uti_val = ( + _text_from_bytes(uti_enc) + if isinstance(uti_enc, (bytes, bytearray)) + else None + ) + if uti_val: + for k in keys: + self._uti[k] = uti_val + uti_l = (uti_val or "").lower() + is_url_uti = uti_l == "public.url" + is_pdf_uti = uti_l in ("com.adobe.pdf", "public.pdf", "com.apple.paper.doc.pdf") + is_image_uti = uti_l.startswith("public.image") or uti_l in { + "public.jpeg", + "public.jpg", + "public.png", + "public.heic", + "public.heif", + "public.tiff", + "public.gif", + "public.bmp", + "public.webp", + # Treat Apple Notes sketches as image-like to prefer previews/Media + "com.apple.paper", + } + + # Mergeable table (gzipped bytes) + md = fields.get_value("MergeableDataEncrypted") # bytes by invariant + if isinstance(md, (bytes, bytearray)) and md: + for k in keys: + self._mergeable_gz[k] = bytes(md) + + # Primary/thumbnail asset URLs from common fields + pa_val = fields.get_value("PrimaryAsset") + url = _asset_url(pa_val) + if url: + for k in keys: + self._primary_asset_url[k] = url + + # Some attachments (e.g., com.apple.paper) expose preview images instead. + # Prefer the first PreviewImages URL; fall back to FallbackImage. + # Prefer a true PDF for paper/pdf UTIs: FallbackPDF, then PaperAssets + if is_pdf_uti and not any(k in self._primary_asset_url for k in keys): + fp_fld = fields.get_field("FallbackPDF") + url = _asset_url(getattr(fp_fld, "value", None) if fp_fld else None) + if url: + for k in keys: + self._primary_asset_url[k] = url + + if is_pdf_uti and not any(k in self._primary_asset_url for k in keys): + pa_list_fld = fields.get_field("PaperAssets") + try: + tokens = getattr(pa_list_fld, "value", None) if pa_list_fld else None + if isinstance(tokens, (list, tuple)) and tokens: + url = _asset_url(tokens[0]) + if url: + for k in keys: + self._primary_asset_url[k] = url + except Exception: + pass + + # Thumbnails/previews: expose as thumbnail_url; for image UTIs, we may also + # use previews as primary when nothing else is available. + if not any(k in self._primary_asset_url for k in keys) and is_image_uti: + pi_fld = fields.get_field("PreviewImages") + try: + tokens = getattr(pi_fld, "value", None) if pi_fld else None + if isinstance(tokens, (list, tuple)) and tokens: + # Try to align with PreviewAppearances (0=light, 1=dark) + app_fld = fields.get_field("PreviewAppearances") + apps = getattr(app_fld, "value", None) if app_fld else None + # Prefer config preview appearance when supplied, else env + pref = "light" + try: + if self._config and getattr( + self._config, "preview_appearance", None + ): + pref = str(self._config.preview_appearance).strip().lower() + except Exception: + pref = "light" + pref_code = 1 if pref in ("dark", "1", "true", "yes") else 0 + selected: Optional[str] = None + if isinstance(apps, (list, tuple)) and len(apps) == len(tokens): + for idx, app in enumerate(apps): + try: + code = int(app) + except Exception: + code = None + if code == pref_code: + selected = _asset_url(tokens[idx]) + if selected: + break + # Fallback: first valid token + if not selected: + for token in tokens: + selected = _asset_url(token) + if selected: + break + if selected: + for k in keys: + self._primary_asset_url[k] = selected + except Exception: + pass + if not any(k in self._primary_asset_url for k in keys) and is_image_uti: + fb_fld = fields.get_field("FallbackImage") + url = _asset_url(getattr(fb_fld, "value", None) if fb_fld else None) + if url: + for k in keys: + self._primary_asset_url[k] = url + + # Regardless of PDF or not, also capture previews as thumbnail candidates + # so callers may show a small preview. + try: + pi2_fld = fields.get_field("PreviewImages") + tokens2 = getattr(pi2_fld, "value", None) if pi2_fld else None + if isinstance(tokens2, (list, tuple)) and tokens2: + thumb = _asset_url(tokens2[0]) + if thumb: + for k in keys: + self._thumbnail_url[k] = thumb + except Exception: + pass + + # Older fields: a plain URL string + url_enc = fields.get_value("URLStringEncrypted") # bytes by invariant + if ( + isinstance(url_enc, (bytes, bytearray)) + and url_enc + and not any(k in self._primary_asset_url for k in keys) + ): + dec = _text_from_bytes(bytes(url_enc)) + if dec: + for k in keys: + self._primary_asset_url[k] = dec + + # Title (optional) — attempt several common encrypted fields + titles_try = [ + fields.get_value("TitleEncrypted"), + fields.get_value("SummaryEncrypted"), + fields.get_value("LocalizedTitleEncrypted"), + fields.get_value("AltTextEncrypted"), + # Inline tokens sometimes carry a canonical identifier separate from AltText. + fields.get_value("TokenContentIdentifierEncrypted"), + # Also try unencrypted fields, just in case + fields.get_value("Title"), + fields.get_value("Summary"), + fields.get_value("AltText"), + ] + found_title = False + for tv in titles_try: + dec_title = None + if isinstance(tv, (bytes, bytearray)): + dec_title = _text_from_bytes(tv) + elif isinstance(tv, str): + dec_title = tv + + if dec_title: + found_title = True + for k in keys: + self._title[k] = dec_title + break + + # For web links, a URL is still a useful visible label when no richer + # title is available. Avoid applying this fallback to images/media so + # signed CloudKit URLs do not leak into rendered labels or alt text. + if not found_title and is_url_uti: + # We look for the URLStringEncrypted which we might have already decoded + url_val = self._primary_asset_url.get(keys[0] if keys else "") + # Or try URLString raw + if not url_val: + url_raw = fields.get_value("URLString") + if isinstance(url_raw, str) and url_raw: + url_val = url_raw + + if url_val: + # If we have a URL but no title, use the URL as the title + for k in keys: + self._title[k] = url_val + + # Optional thumbnail (best-effort via Media) + # For types like VCard or Audio, 'Media' might be the only asset source. + # So if we lack a primary asset, use Media as the primary too. + media_val = fields.get_value("Media") + thumb_url = _asset_url(media_val) + if thumb_url: + for k in keys: + self._thumbnail_url[k] = thumb_url + if k not in self._primary_asset_url: + self._primary_asset_url[k] = thumb_url + + try: + LOGGER.debug( + "ckds.add_attachment_record", + extra={ + "component": "notes", + "op": "ckds.add_attachment_record", + "record_name": rec.recordName, + "identifier": ident, + "has_uti": bool(uti_val), + "has_mergeable": any(k in self._mergeable_gz for k in keys), + }, + ) + except Exception: + pass diff --git a/pyicloud/services/notes/rendering/debug_tools.py b/pyicloud/services/notes/rendering/debug_tools.py new file mode 100644 index 00000000..44da19f8 --- /dev/null +++ b/pyicloud/services/notes/rendering/debug_tools.py @@ -0,0 +1,162 @@ +""" +Debug helpers for mapping AttributeRuns to the exact text slices they cover. + +These utilities are intended for troubleshooting renderer issues. They do not +perform any network I/O and can be safely used in tests. +""" + +from __future__ import annotations + +import html +from typing import Dict, List, Optional + +from ..protobuf import notes_pb2 as pb + +# We intentionally import the private helper; it's stable within this repo. +from .renderer import StyleSig, _merge_runs, _slice_for_run # type: ignore + + +def _enum_name(enum_cls, value: Optional[int]) -> str: + if value is None: + return "(none)" + try: + return enum_cls.Name(int(value)) # type: ignore[attr-defined] + except Exception: + return str(value) + + +def map_attribute_runs(note: pb.Note) -> List[Dict[str, object]]: + """Return a list of dictionaries mapping each AttributeRun to its text. + + Each dict contains: + - index: run index + - utf16_start: start offset in UTF-16 code units + - utf16_len: run.length + - text: Python string slice for the run + - style_type, alignment, writing_direction, indent_amount + - has_attachment: whether run carries attachment_info + """ + text = note.note_text or "" + pos = 0 + out: List[Dict[str, object]] = [] + for idx, r in enumerate(note.attribute_run): + seg, pos2 = _slice_for_run(text, pos, r.length) + ps = r.paragraph_style if r.HasField("paragraph_style") else None + out.append( + { + "index": idx, + "utf16_start": pos, + "utf16_len": int(getattr(r, "length", 0) or 0), + "text": seg, + "style_type": getattr(ps, "style_type", None) + if ps is not None + else None, + "alignment": getattr(ps, "alignment", None) if ps is not None else None, + "writing_direction": getattr(ps, "writing_direction_paragraph", None) + if ps is not None + else None, + "indent_amount": getattr(ps, "indent_amount", None) + if ps is not None + else None, + "has_attachment": bool(r.HasField("attachment_info")), + } + ) + pos = pos2 + return out + + +def dump_runs_text(note: pb.Note) -> str: + """Return a human-readable dump of runs with escaped whitespace markers.""" + rows = [] + for row in map_attribute_runs(note): + raw = str(row["text"]) if row.get("text") is not None else "" + # Make control characters explicit to see line boundaries clearly + pretty = ( + raw.replace("\n", "⏎\n") + .replace("\u2028", "⤶\n") + .replace("\x00", "␀") + .replace("\ufffc", "{OBJ}") + ) + st_name = _enum_name(pb.StyleType, row.get("style_type")) + align = _enum_name(pb.Alignment, row.get("alignment")) + wd = _enum_name(pb.WritingDirection, row.get("writing_direction")) + indent = row.get("indent_amount") + rows.append( + f"[{row['index']:03d}] off={row['utf16_start']:<5} len={row['utf16_len']:<4} " + f"style={st_name:<26} indent={indent!s:<2} align={align:<16} wd={wd:<8} " + f"text=“{pretty}”" + ) + return "\n".join(rows) + + +def annotate_note_runs_html(note: pb.Note) -> str: + """Return a small HTML page highlighting each run in a different color. + + Hover tooltips include run index, offsets, and style information. + """ + palette = [ + "#FFF3CD", # yellow + "#D1ECF1", # cyan + "#F8D7DA", # pink + "#D4EDDA", # green + "#E2E3E5", # gray + ] + spans: List[str] = [] + for row in map_attribute_runs(note): + idx = int(row["index"]) # type: ignore[arg-type] + bg = palette[idx % len(palette)] + raw = str(row.get("text", "")) + tip = ( + f"run {idx} | off={row['utf16_start']} len={row['utf16_len']} | " + f"{_enum_name(pb.StyleType, row.get('style_type'))} ind={row.get('indent_amount')} | " + f"{_enum_name(pb.Alignment, row.get('alignment'))} | " + f"{_enum_name(pb.WritingDirection, row.get('writing_direction'))}" + ) + safe = ( + html.escape(raw) + .replace("\u2028", "") + .replace("\n", "") + .replace("\ufffc", "{OBJ}") + .replace("\x00", "") + ) + spans.append( + f'{safe}' + ) + + content = "".join(spans) + return ( + '' + "" + f"
{content}
" + ) + + +def map_merged_runs(note: pb.Note) -> List[Dict[str, object]]: + """Same as map_attribute_runs, but after the renderer's run merge step. + + Useful to understand how the renderer will chunk paragraphs. + """ + text = note.note_text or "" + merged = _merge_runs(note.attribute_run) + out: List[Dict[str, object]] = [] + pos = 0 + for idx, mr in enumerate(merged): + seg, pos = _slice_for_run(text, pos, mr.length) + sig: StyleSig = mr.sig + out.append( + { + "index": idx, + "utf16_start": pos - mr.length, + "utf16_len": mr.length, + "text": seg, + "style_type": getattr(sig, "style_type", None), + "alignment": getattr(sig, "alignment", None), + "writing_direction": getattr(sig, "writing_direction", None), + "indent_amount": getattr(sig, "indent_amount", None), + "has_attachment": getattr(mr, "attachment", None) is not None, + } + ) + return out diff --git a/pyicloud/services/notes/rendering/exporter.py b/pyicloud/services/notes/rendering/exporter.py new file mode 100644 index 00000000..5a835c16 --- /dev/null +++ b/pyicloud/services/notes/rendering/exporter.py @@ -0,0 +1,615 @@ +""" +Exporter helpers for Apple Notes → HTML. + +These functions are thin, testable wrappers around the existing decoding, +datasource hydration, rendering, and file I/O utilities. They are intentionally +pure (no global state) and perform only the minimal work needed by CLI tools +and higher-level APIs. +""" + +from __future__ import annotations + +import logging +import os +import re +from typing import Dict, Iterable, List, Optional, Tuple + +from rich.console import Console + +from pyicloud.common.cloudkit import CKRecord + +from ..decoding import BodyDecoder +from ..protobuf import notes_pb2 as pb +from .ck_datasource import CloudKitNoteDataSource +from .options import ExportConfig +from .renderer import NoteRenderer, render_note_fragment, render_note_page + +console = Console() + +LOGGER = logging.getLogger(__name__) + + +def decode_and_parse_note(record: CKRecord) -> Optional[pb.Note]: + """Decode a Note CKRecord's TextDataEncrypted and return a parsed pb.Note. + + Returns None if body is missing or cannot be parsed. + """ + if not isinstance(record, CKRecord): + return None + raw = record.fields.get_value("TextDataEncrypted") + if not raw: + return None + try: + nb = BodyDecoder().decode(raw) + if not nb or not getattr(nb, "bytes", None): + return None + msg = pb.NoteStoreProto() + msg.ParseFromString(nb.bytes) + return getattr(getattr(msg, "document", None), "note", None) + except Exception: + return None + + +def _attachment_ids_from_record_and_runs(record: CKRecord, note: pb.Note) -> List[str]: + # Collect from Attachments field + ids: List[str] = [] + fld = record.fields.get_field("Attachments") + if fld and hasattr(fld, "value"): + for ref in getattr(fld, "value", []) or []: + rn = getattr(ref, "recordName", None) + if rn: + ids.append(rn) + # Merge inline run identifiers + ids_from_runs: List[str] = [] + for rattr in getattr(note, "attribute_run", []) or []: + if rattr.HasField("attachment_info") and rattr.attachment_info.HasField( + "attachment_identifier" + ): + aid = rattr.attachment_info.attachment_identifier + if aid: + ids_from_runs.append(aid) + seen: set[str] = set() + merged: List[str] = [] + for a in ids + ids_from_runs: + if a not in seen: + seen.add(a) + merged.append(a) + return merged + + +def build_datasource( + ck_client, + note_record: CKRecord, + note: pb.Note, + config: Optional[ExportConfig] = None, +) -> Tuple[CloudKitNoteDataSource, List[str]]: + """Build a CloudKit-backed Note datasource for a single note. + + Returns (datasource, attachment_ids) where attachment_ids is the merged list + of attachment record names to which the datasource has been hydrated. + """ + ds = CloudKitNoteDataSource(_config=config) + att_ids = _attachment_ids_from_record_and_runs(note_record, note) + if att_ids: + resp = ck_client.lookup(att_ids) # desired_keys=None → all fields + media_map: Dict[str, str] = {} # media_record_name -> parent attachment id + debug = bool(getattr(config, "debug", False)) + for rec_idx, rec in enumerate(resp.records): + if debug: + console.rule(f"rec_idx {rec_idx}") + console.print(rec) + if isinstance(rec, CKRecord): + ds.add_attachment_record(rec) + # Capture Media reference to follow for full-fidelity images + try: + fld = rec.fields.get_field("Media") + ref = getattr(fld, "value", None) if fld else None + rn = getattr(ref, "recordName", None) + if rn: + media_map[rn] = rec.recordName + except Exception: + pass + # Follow Media references to fetch original asset URLs and wire them to the parent + if media_map: + try: + mresp = ck_client.lookup(list(media_map.keys())) + if bool(getattr(config, "debug", False)): + try: + console.rule("media lookup response") + console.print(mresp) + LOGGER.info("attachment media resp:\n%s", mresp) + except Exception: + pass + for mrec in mresp.records: + if not isinstance(mrec, CKRecord): + continue + url: Optional[str] = None + # Best-effort: find any field whose value looks like an asset token with downloadURL + try: + for k in list(getattr(mrec, "fields", ()).keys()): + fld = mrec.fields.get_field(k) + val = getattr(fld, "value", None) + u = getattr(val, "downloadURL", None) + if isinstance(u, str) and u: + url = u + break + except Exception: + url = None + if url: + parent = media_map.get(mrec.recordName) + if parent: + # Only promote Media-derived URLs to primary for image-like attachments. + # For 'public.url' (web links) and others, keep the primary_url as the + # actual destination, and use previews/Media only as thumbnails. + try: + parent_uti = ( + ds.get_attachment_uti(parent) or "" + ).lower() + except Exception: + parent_uti = "" + # Use config-aware predicate to recognize image UTIs (jpeg/png/heic/webp...) + conf = config or ExportConfig() + is_image = conf.is_image_uti(parent_uti) + # Simple heuristic for audio/video promotion + is_av = ( + "audio" in parent_uti + or "video" in parent_uti + or "movie" in parent_uti + or "mpeg" in parent_uti + ) + + # Logic update: + # 1. If we have no URL yet (e.g. VCard), ALWAYS take the Media URL. + # 2. If we have a URL but it's an Image/AV, check if we should "upgrade" + # to the Media URL (e.g. valid preview -> full res). + + is_media_upgrade = getattr( + conf, "prefer_media_for_images", True + ) and (is_image or is_av) + + # Only fetch current if we might need to check for upgrade + cur_primary = None + try: + cur_primary = ds.get_primary_asset_url(parent) + except Exception: + pass + + if (not cur_primary) or is_media_upgrade: + try: + cur_thumb = ds.get_thumbnail_url(parent) + except Exception: + cur_thumb = None + + # If missing, OR (we want upgrade AND current is likely just a thumbnail) + if (not cur_primary) or ( + is_media_upgrade and cur_primary == cur_thumb + ): + ds.set_primary_asset_url(parent, url) + except Exception: + pass + return ds, att_ids + + +def download_pdf_assets( + ck_client, + ds: CloudKitNoteDataSource, + att_ids: Iterable[str], + *, + assets_dir: str, + out_dir: str, + config: Optional[ExportConfig] = None, +) -> Dict[str, str]: + """Download PDFs for attachments and rewrite datasource URLs to local paths. + + Returns a mapping of attachment id → relative path used in HTML. + Only applies to PDF UTIs. Files are renamed with `.pdf` extension when the + magic header is present. + """ + os.makedirs(assets_dir, exist_ok=True) + updated: Dict[str, str] = {} + + def _is_pdf_uti(s: Optional[str]) -> bool: + return bool( + s + and s.lower() in ("com.adobe.pdf", "public.pdf", "com.apple.paper.doc.pdf") + ) + + note_subdir = os.path.abspath(assets_dir) + for aid in att_ids: + uti = (ds.get_attachment_uti(aid) or "").lower() + if not _is_pdf_uti(uti): + continue + url = ds.get_primary_asset_url(aid) + if not (url and (url.startswith("http://") or url.startswith("https://"))): + # Skip thumbnails for PDFs — they are images and will not embed as PDF + continue + try: + saved_path = ck_client.download_asset_to(url, note_subdir) + resolved = saved_path + try: + with open(saved_path, "rb") as fh: + head = fh.read(5) + if head.startswith(b"%PDF-") and not saved_path.lower().endswith( + ".pdf" + ): + new_path = saved_path + ".pdf" + try: + os.replace(saved_path, new_path) + resolved = new_path + except Exception: + resolved = saved_path + except Exception: + resolved = saved_path + rel = os.path.relpath(resolved, start=os.path.abspath(out_dir)) + ds.set_primary_asset_url(aid, rel) + updated[aid] = rel + except Exception: + # Ignore individual download failures; caller can log + pass + return updated + + +def download_image_assets( + ck_client, + ds: CloudKitNoteDataSource, + att_ids: Iterable[str], + *, + assets_dir: str, + out_dir: str, + config: Optional[ExportConfig] = None, +) -> Dict[str, str]: + """Download image attachments and rewrite datasource URLs to local paths. + + Returns a mapping of attachment id → relative path used in HTML. + Applies to common image UTIs (jpeg/png/heic/webp/gif/bmp/tiff...). + """ + os.makedirs(assets_dir, exist_ok=True) + updated: Dict[str, str] = {} + + conf = config or ExportConfig() + + def _infer_image_ext(head: bytes) -> Optional[str]: + try: + if head.startswith(b"\xff\xd8\xff"): + return ".jpg" + if head.startswith(b"\x89PNG\r\n\x1a\n"): + return ".png" + if head.startswith(b"GIF87a") or head.startswith(b"GIF89a"): + return ".gif" + if head.startswith(b"RIFF") and head[8:12] == b"WEBP": + return ".webp" + # ISO Base Media File (HEIC/HEIF) — look for ftyp box + # Common brands: 'heic', 'heif', 'mif1', 'msf1', 'hevc' + if len(head) >= 12 and head[4:8] == b"ftyp": + brand = head[8:12] + if brand in (b"heic", b"heif", b"mif1", b"msf1", b"hevc"): + return ".heic" + if head.startswith(b"BM"): + return ".bmp" + # TIFF + if head.startswith(b"II*\x00") or head.startswith(b"MM\x00*"): + return ".tiff" + except Exception: + pass + return None + + note_subdir = os.path.abspath(assets_dir) + for aid in att_ids: + uti = (ds.get_attachment_uti(aid) or "").lower() + if not conf.is_image_uti(uti): + continue + url = ds.get_primary_asset_url(aid) + if not url: + # Fallback to thumbnail for image-like attachments that only expose previews (e.g., com.apple.paper) + try: + url = ds.get_thumbnail_url(aid) + except Exception: + url = None + if not (url and (url.startswith("http://") or url.startswith("https://"))): + # Already local or missing + continue + try: + saved_path = ck_client.download_asset_to(url, note_subdir) + resolved = saved_path + try: + with open(saved_path, "rb") as fh: + head = fh.read(16) + ext = _infer_image_ext(head) + if ext and not saved_path.lower().endswith(ext): + new_path = saved_path + ext + try: + os.replace(saved_path, new_path) + resolved = new_path + except Exception: + resolved = saved_path + except Exception: + resolved = saved_path + rel = os.path.relpath(resolved, start=os.path.abspath(out_dir)) + ds.set_primary_asset_url(aid, rel) + updated[aid] = rel + except Exception: + # Ignore individual failures; caller can log or continue + pass + return updated + + +def download_av_assets( + ck_client, + ds: CloudKitNoteDataSource, + att_ids: Iterable[str], + *, + assets_dir: str, + out_dir: str, + config: Optional[ExportConfig] = None, +) -> Dict[str, str]: + """Download audio/video attachments and rewrite datasource URLs to local paths. + + Returns a mapping of attachment id → relative path used in HTML. + """ + os.makedirs(assets_dir, exist_ok=True) + updated: Dict[str, str] = {} + + def _infer_av_ext(head: bytes) -> Optional[str]: + try: + # M4A / MP4 / MOV (ISO Base Media) + if len(head) >= 12 and head[4:8] == b"ftyp": + brand = head[8:12] + if brand in (b"M4A ", b"mp42", b"isom"): + return ".m4a" + if brand in (b"qt ", b"moov"): + return ".mov" + # Fallback for generic MP4/QuickTime + return ".mp4" + + # QuickTime (moov atom at start) + if len(head) >= 8 and head[4:8] == b"moov": + return ".mov" + + # MP3 - ID3v2 container + if head.startswith(b"ID3"): + return ".mp3" + # MP3 - frame sync (approximate) + if ( + head.startswith(b"\xff\xfb") + or head.startswith(b"\xff\xf3") + or head.startswith(b"\xff\xf2") + ): + return ".mp3" + # WAVE + if head.startswith(b"RIFF") and head[8:12] == b"WAVE": + return ".wav" + # AVI + if head.startswith(b"RIFF") and head[8:12] == b"AVI ": + return ".avi" + except Exception: + pass + return None + + note_subdir = os.path.abspath(assets_dir) + for aid in att_ids: + uti = (ds.get_attachment_uti(aid) or "").lower() + # Basic check for audio or video + is_av = ( + "audio" in uti + or "video" in uti + or "mpeg" in uti + or "movie" in uti + or "quicktime" in uti + ) + if not is_av: + continue + + url = ds.get_primary_asset_url(aid) + if not (url and (url.startswith("http://") or url.startswith("https://"))): + continue + + try: + saved_path = ck_client.download_asset_to(url, note_subdir) + resolved = saved_path + try: + with open(saved_path, "rb") as fh: + head = fh.read(16) + ext = _infer_av_ext(head) + # Fallbacks for common Apple types + if not ext: + if "com.apple.m4a-audio" in uti: + ext = ".m4a" + elif "quicktime" in uti: + ext = ".mov" + + if ext and not saved_path.lower().endswith(ext): + new_path = saved_path + ext + try: + os.replace(saved_path, new_path) + resolved = new_path + except Exception: + resolved = saved_path + except Exception: + resolved = saved_path + + rel = os.path.relpath(resolved, start=os.path.abspath(out_dir)) + ds.set_primary_asset_url(aid, rel) + updated[aid] = rel + except Exception: + pass + return updated + + +def download_vcard_assets( + ck_client, + ds: CloudKitNoteDataSource, + att_ids: Iterable[str], + *, + assets_dir: str, + out_dir: str, + config: Optional[ExportConfig] = None, +) -> Dict[str, str]: + """Download VCard (contact) attachments and rewrite datasource URLs to local paths. + + Returns a mapping of attachment id → relative path used in HTML. + """ + os.makedirs(assets_dir, exist_ok=True) + updated: Dict[str, str] = {} + note_subdir = os.path.abspath(assets_dir) + + for aid in att_ids: + uti = (ds.get_attachment_uti(aid) or "").lower() + if "public.vcard" not in uti: + continue + + url = ds.get_primary_asset_url(aid) + if not (url and (url.startswith("http://") or url.startswith("https://"))): + continue + + try: + saved_path = ck_client.download_asset_to(url, note_subdir) + resolved = saved_path + + # Ensure .vcf extension + if not saved_path.lower().endswith(".vcf"): + new_path = saved_path + ".vcf" + try: + os.replace(saved_path, new_path) + resolved = new_path + except Exception: + resolved = saved_path + + rel = os.path.relpath(resolved, start=os.path.abspath(out_dir)) + ds.set_primary_asset_url(aid, rel) + updated[aid] = rel + except Exception: + pass + + return updated + + +def render_fragment( + note: pb.Note, + ds: Optional[CloudKitNoteDataSource], + config: Optional[ExportConfig] = None, +) -> str: + return render_note_fragment(note, ds, config=config) + + +def _safe_name(s: Optional[str]) -> str: + if not s: + return "untitled" + s = re.sub(r"\s+", " ", s).strip() + s = re.sub(r"[^\w\- ]+", "-", s) + return s[:60] or "untitled" + + +def write_html( + title: str, + html_fragment: str, + out_dir: str, + *, + full_page: bool = False, + filename: Optional[str] = None, +) -> str: + os.makedirs(out_dir, exist_ok=True) + page = render_note_page(title, html_fragment) if full_page else html_fragment + fname = filename or f"{_safe_name(title)}.html" + root = os.path.abspath(out_dir) + path = os.path.abspath(os.path.join(root, fname)) + if os.path.commonpath([root, path]) != root: + raise ValueError("filename must stay within out_dir") + with open(path, "w", encoding="utf-8") as f: + f.write(page) + return path + + +class NoteExporter: + """Orchestrator for exporting notes to HTML with assets.""" + + def __init__(self, ck_client, config: Optional[ExportConfig] = None): + self.client = ck_client + self.config = config or ExportConfig() + self.renderer = NoteRenderer(self.config) + + def export( + self, + note_record: CKRecord, + output_dir: str, + filename: Optional[str] = None, + ) -> Optional[str]: + """ + Export a single note record to HTML in the output directory. + Returns the path to the written HTML file, or None if export failed (e.g. no body). + """ + # 1. Decode + note = decode_and_parse_note(note_record) + if not note: + return None + + # 2. Build Datasource + ds, att_ids = build_datasource(self.client, note_record, note, self.config) + + # 3. Download Assets when doing archival export + export_mode = str(getattr(self.config, "export_mode", "archival") or "archival") + export_mode = export_mode.strip().lower() + if export_mode == "archival": + assets_root = getattr(self.config, "assets_dir", None) or os.path.join( + output_dir, "assets" + ) + assets_dir = os.path.join(assets_root, note_record.recordName) + + download_pdf_assets( + self.client, + ds, + att_ids, + assets_dir=assets_dir, + out_dir=output_dir, + config=self.config, + ) + download_image_assets( + self.client, + ds, + att_ids, + assets_dir=assets_dir, + out_dir=output_dir, + config=self.config, + ) + download_av_assets( + self.client, + ds, + att_ids, + assets_dir=assets_dir, + out_dir=output_dir, + config=self.config, + ) + download_vcard_assets( + self.client, + ds, + att_ids, + assets_dir=assets_dir, + out_dir=output_dir, + config=self.config, + ) + + # 4. Render + html_fragment = self.renderer.render(note, ds) + + # 5. Write + title = "Untitled" + title_enc = note_record.fields.get_value("TitleEncrypted") + if title_enc: + try: + if isinstance(title_enc, bytes): + title = title_enc.decode("utf-8") + elif isinstance(title_enc, str): + title = title_enc + except Exception: + pass + + full_page = getattr(self.config, "full_page", None) + if full_page is None: + full_page = True + + return write_html( + title, + html_fragment, + output_dir, + full_page=bool(full_page), + filename=filename, + ) diff --git a/pyicloud/services/notes/rendering/options.py b/pyicloud/services/notes/rendering/options.py new file mode 100644 index 00000000..8cd730f4 --- /dev/null +++ b/pyicloud/services/notes/rendering/options.py @@ -0,0 +1,76 @@ +""" +Export/render configuration for Apple Notes HTML output. + +Centralizes behavior flags so callers can tune defaults without touching +core logic. All fields are optional at call sites; None means "use current +module defaults and/or environment fallbacks". +""" + +from __future__ import annotations + +from dataclasses import dataclass +from typing import Literal, Optional, Tuple + + +@dataclass(frozen=True, slots=True) +class ExportConfig: + # Logging/debug + debug: bool = False + + # Export policy + export_mode: Literal["archival", "lightweight"] = "archival" + assets_dir: Optional[str] = None + full_page: Optional[bool] = None + + # Image fidelity: when a Media record is present for image attachments, + # prefer it over preview images. Keep this True for best quality. + prefer_media_for_images: bool = True + + # Predicate for recognizing image UTIs beyond just the "public.image" prefix. + # If empty, a reasonable built-in set is used. + image_uti_prefixes: Tuple[str, ...] = ("public.image",) + image_uti_exacts: Tuple[str, ...] = ( + "public.jpeg", + "public.jpg", + "public.png", + "public.heic", + "public.heif", + "public.tiff", + "public.gif", + "public.bmp", + "public.webp", + # Apple Notes sketches use this UTI; treat as image-like for downloads + "com.apple.paper", + ) + + # Appearance hint for preview selection: "light" or "dark". + preview_appearance: str = "light" + + # Default height for embedded PDFs + pdf_object_height: int = 600 + + # Link behavior + link_target_blank: bool = True + link_rel: str = "noopener noreferrer" + referrer_policy: str = "no-referrer" + + def is_image_uti(self, uti: Optional[str]) -> bool: + if not uti: + return False + u = uti.casefold() + + prefixes = self.image_uti_prefixes or ("public.image",) + for prefix in prefixes: + if not isinstance(prefix, str): + raise TypeError("image_uti_prefixes must contain only strings") + if u.startswith(prefix.casefold()): + return True + + exacts = self.image_uti_exacts or () + normalized_exacts = [] + for exact in exacts: + if not isinstance(exact, str): + raise TypeError("image_uti_exacts must contain only strings") + normalized_exacts.append(exact.casefold()) + + return u in tuple(normalized_exacts) diff --git a/pyicloud/services/notes/rendering/renderer.py b/pyicloud/services/notes/rendering/renderer.py new file mode 100644 index 00000000..64a6f1c2 --- /dev/null +++ b/pyicloud/services/notes/rendering/renderer.py @@ -0,0 +1,923 @@ +""" +Pure renderer for Apple Notes (proto3). + +Converts a parsed pb.Note into minimal, readable HTML. No I/O. +""" + +from __future__ import annotations + +import html +from dataclasses import dataclass +from enum import IntEnum +from typing import List, Optional, Tuple, cast +from urllib.parse import urlsplit + +from ..protobuf import notes_pb2 as pb +from .attachments import AttachmentContext, render_attachment +from .options import ExportConfig +from .renderer_iface import AttachmentRef, NoteDataSource + + +class StyleType(IntEnum): + DEFAULT = -1 + TITLE = 0 + HEADING = 1 + SUBHEADING = 2 + MONOSPACED = 4 + # Observed list styles (from legacy tooling) + DOTTED_LIST = 100 + DASHED_LIST = 101 + NUMBERED_LIST = 102 + CHECKBOX = 103 + + +def _is_list_style(st: Optional[int]) -> bool: + return st in ( + StyleType.DOTTED_LIST, + StyleType.DASHED_LIST, + StyleType.NUMBERED_LIST, + StyleType.CHECKBOX, + ) + + +def _safe_anchor_href(url: Optional[str]) -> Optional[str]: + if not url: + return None + + candidate = "".join(ch for ch in str(url).strip() if ch >= " " and ch != "\x7f") + if not candidate: + return None + + parts = urlsplit(candidate) + scheme = parts.scheme.casefold() + if scheme not in {"http", "https", "mailto", "tel"}: + return None + if scheme in {"http", "https"} and not parts.netloc: + return None + if scheme in {"mailto", "tel"} and not (parts.path or parts.netloc): + return None + return candidate + + +_FONT_STACKS = { + "ComicSansMS": [ + '"Comic Sans MS"', + '"Comic Sans"', + '"Chalkboard SE"', + '"Comic Neue"', + "cursive", + ], + "Comic Sans MS": [ + '"Comic Sans MS"', + '"Comic Sans"', + '"Chalkboard SE"', + '"Comic Neue"', + "cursive", + ], + "HelveticaNeue": ['"Helvetica Neue"', "Helvetica", "Arial", "sans-serif"], + "Helvetica Neue": ['"Helvetica Neue"', "Helvetica", "Arial", "sans-serif"], + "ArialMT": ["Arial", "Helvetica", "sans-serif"], + "Arial": ["Arial", "Helvetica", "sans-serif"], + "TimesNewRomanPSMT": ['"Times New Roman"', "Times", "serif"], + "Times New Roman": ['"Times New Roman"', "Times", "serif"], + "CourierNewPSMT": ['"Courier New"', "Courier", "monospace"], + "Courier New": ['"Courier New"', "Courier", "monospace"], +} + + +def _css_font_stack(name: str) -> str: + stack = _FONT_STACKS.get(name) + if stack: + return ", ".join(stack) + try: + safe = name.replace('"', "'") + except Exception: + safe = name + lower = name.lower() + generic = "sans-serif" + if "mono" in lower or "courier" in lower or "code" in lower: + generic = "monospace" + elif "serif" in lower or "times" in lower or "georgia" in lower: + generic = "serif" + elif "comic" in lower or "chalk" in lower or "hand" in lower: + generic = "cursive" + return f'"{safe}", {generic}' + + +@dataclass(frozen=True) +class StyleSig: + # Inline styling + font_weight: Optional[int] + underlined: Optional[int] + strikethrough: Optional[int] + superscript: Optional[int] + link: Optional[str] + color_hex: Optional[str] + emphasis_style: Optional[int] # FIX: added + font_size_pt: Optional[float] + font_name: Optional[str] + + # Block/paragraph styling + style_type: Optional[int] + alignment: Optional[int] + indent_amount: Optional[int] + block_quote: Optional[int] + writing_direction: Optional[int] + checklist_done: Optional[int] + start_number: Optional[int] + highlight: Optional[int] + paragraph_uuid: Optional[bytes] + + @staticmethod + def from_run(run: pb.AttributeRun) -> "StyleSig": + ps = run.paragraph_style if run.HasField("paragraph_style") else None + st = align = indent = bq = wd = None + start_num = None # default when no paragraph_style/start provided + fw = getattr(run, "font_weight", None) + ul = getattr(run, "underlined", None) + stt = getattr(run, "strikethrough", None) + sup = getattr(run, "superscript", None) + link = getattr(run, "link", None) + emph = getattr(run, "emphasis_style", None) # FIX + # Optional font info + font = run.font if run.HasField("font") else None + fsz = getattr(font, "point_size", None) if font is not None else None + fname = getattr(font, "font_name", None) if font is not None else None + # Optional highlight palette + hl = getattr(run, "highlight_color", None) + color_hex = None + if hasattr(run, "color") and run.HasField("color"): + try: + r = getattr(run.color, "red", 0.0) + g = getattr(run.color, "green", 0.0) + b = getattr(run.color, "blue", 0.0) + r8 = max(0, min(255, round(r * 255))) + g8 = max(0, min(255, round(g * 255))) + b8 = max(0, min(255, round(b * 255))) + color_hex = f"#{r8:02X}{g8:02X}{b8:02X}" + except Exception: + color_hex = None + para_uuid = None + if ps is not None: + # Use presence checks (proto3 optional) to avoid defaulting to TITLE (0) + try: + if ps.HasField("style_type"): + st = ps.style_type + except Exception: + pass + try: + if ps.HasField("alignment"): + align = ps.alignment + except Exception: + pass + try: + if ps.HasField("indent_amount"): + indent = ps.indent_amount + if isinstance(indent, int) and indent < 0: + indent = 0 + except Exception: + pass + try: + if ps.HasField("block_quote"): + bq = ps.block_quote + except Exception: + pass + try: + if ps.HasField("writing_direction_paragraph"): + wd = ps.writing_direction_paragraph + except Exception: + pass + try: + if ps.HasField("paragraph_uuid"): + para_uuid = getattr(ps, "paragraph_uuid", None) + except Exception: + para_uuid = None + # Ordered list start number (proto3 optional supports HasField) + try: + start_num = ( + ps.starting_list_item_number + if hasattr(ps, "starting_list_item_number") + and ps.HasField("starting_list_item_number") + else None + ) + except Exception: + start_num = None + try: + if ps.HasField("checklist"): + try: + checklist_done = getattr(ps.checklist, "done", None) + except Exception: + checklist_done = None + else: + checklist_done = None + except Exception: + checklist_done = None + else: + checklist_done = None + start_num = None + return StyleSig( + font_weight=fw, + underlined=ul, + strikethrough=stt, + superscript=sup, + link=link, + color_hex=color_hex, + emphasis_style=emph, # FIX + font_size_pt=fsz, + font_name=fname, + style_type=st, + alignment=align, + indent_amount=indent, + block_quote=bq, + writing_direction=wd, + checklist_done=checklist_done, + start_number=start_num, + highlight=hl, + paragraph_uuid=para_uuid, + ) + + def same_paragraph_as(self, other: "StyleSig") -> bool: + # If both runs carry a paragraph UUID and it differs, this is a new paragraph + if ( + self.paragraph_uuid + and other.paragraph_uuid + and self.paragraph_uuid != other.paragraph_uuid + ): + return False + # Treat "neutral" runs (no style_type) as part of an active list paragraph + # to avoid prematurely closing list items between runs. + st_self = self.style_type + st_other = other.style_type + if _is_list_style(st_self) and st_other is None: + return True + + return ( + st_self == st_other + and self.alignment == other.alignment + and self.indent_amount == other.indent_amount + and self.block_quote == other.block_quote + and self.writing_direction == other.writing_direction + # Checklist item state is part of the paragraph semantics; if it differs, + # do not merge runs so each item carries its own done/unchecked state. + and self.checklist_done == other.checklist_done + # Start number for ordered lists can differ between paragraphs; avoid merging. + and self.start_number == other.start_number + ) + + def same_inline_as(self, other: "StyleSig") -> bool: + return ( + self.font_weight == other.font_weight + and self.underlined == other.underlined + and self.strikethrough == other.strikethrough + and self.superscript == other.superscript + and self.link == other.link + and self.color_hex == other.color_hex + and self.emphasis_style == other.emphasis_style + and self.font_size_pt == other.font_size_pt + and self.font_name == other.font_name + and self.highlight == other.highlight + ) + + def same_effective_style(self, other: "StyleSig") -> bool: + return self.same_paragraph_as(other) and self.same_inline_as(other) + + +@dataclass +class MergedRun: + length: int + sig: StyleSig + attachment: Optional[AttachmentRef] + + +def _merge_runs(runs) -> List[MergedRun]: + out: List[MergedRun] = [] + for r in runs: + sig = StyleSig.from_run(r) + attachment = None + if r.HasField("attachment_info"): + ai = r.attachment_info + identifier = getattr(ai, "attachment_identifier", None) or None + uti_hint = getattr(ai, "type_uti", None) or None + attachment = AttachmentRef(identifier=identifier, uti_hint=uti_hint) + if ( + out + and attachment is None + and out[-1].attachment is None + and out[-1].sig.same_effective_style(sig) + ): + out[-1].length += r.length + else: + out.append(MergedRun(length=r.length, sig=sig, attachment=attachment)) + return out + + +def _slice_for_run(s: str, start: int, length_units: int) -> Tuple[str, int]: + end_guess = start + length_units + while True: + chunk = s[start:end_guess] + astrals = sum(1 for ch in chunk if ord(ch) > 0xFFFF) + new_end = start + length_units + astrals + if new_end == end_guess: + return chunk, end_guess + end_guess = new_end + + +def render_note_fragment( + note: pb.Note, + datasource: Optional[NoteDataSource], + config: Optional[ExportConfig] = None, +) -> str: + text = note.note_text or "" + merged = _merge_runs(note.attribute_run) + + fragments: List[str] = [] + para_tag_open = "" + para_tag_close = "" + deferred_breaks = 0 + # strip_leading_break_next = False + + def _emphasis_css(emph_val: Optional[int]) -> List[str]: + # Use the highlight value from the signature, which may come from emphasis_style or highlight_color + if emph_val is None: + return [] + # Map emphasis palette index to CSS variables that adapt to light/dark. + # Variables are defined in render_note_page(). + idx = int(emph_val) + if idx not in (1, 2, 3, 4, 5): + return [] + return [f"background-color:var(--hl{idx}-bg)"] + + i = 0 + list_stack: List[ + dict + ] = [] # {"indent": int, "tag": str, "li_open": bool, "li_index": Optional[int], "li_has_content": bool} + + def _close_top_list() -> None: + if not list_stack: + return + top = list_stack.pop() + if top.get("li_open"): + fragments.append("") + fragments.append(f"") + + def _close_lists_to(target_indent: int) -> None: + while list_stack and list_stack[-1]["indent"] > target_indent: + _close_top_list() + + def _ensure_list( + indent: int, + desired_tag: str, + *, + start: Optional[int] = None, + cls: Optional[str] = None, + ) -> None: + while list_stack and ( + list_stack[-1]["indent"] > indent + or ( + list_stack[-1]["indent"] == indent + and list_stack[-1]["tag"] != desired_tag + ) + ): + _close_top_list() + while (not list_stack) or list_stack[-1]["indent"] < indent: + if list_stack and not list_stack[-1]["li_open"]: + fragments.append("
  • ") + list_stack[-1]["li_open"] = True + list_stack[-1]["li_index"] = len(fragments) - 1 + list_stack[-1]["li_has_content"] = False + level = (list_stack[-1]["indent"] + 1) if list_stack else 0 + # Use the same list type at all nesting levels for consistency + tag = desired_tag + attrs: List[str] = [] + if cls and tag == "ul": + attrs.append(f'class="{html.escape(cls)}"') + if start and tag == "ol" and level == indent and int(start) > 1: + attrs.append(f'start="{int(start)}"') + attr_text = (" " + " ".join(attrs)) if attrs else "" + fragments.append(f"<{tag}{attr_text}>") + list_stack.append( + { + "indent": level, + "tag": tag, + "li_open": False, + "li_index": None, + "li_has_content": False, + } + ) + + def paragraph_open(sig: StyleSig) -> None: + nonlocal para_tag_open, para_tag_close + if sig.style_type in ( + StyleType.DOTTED_LIST, + StyleType.DASHED_LIST, + StyleType.NUMBERED_LIST, + StyleType.CHECKBOX, + ): + indent = int(sig.indent_amount or 0) + desired = "ol" if sig.style_type == StyleType.NUMBERED_LIST else "ul" + cls = "dashed" if sig.style_type == StyleType.DASHED_LIST else None + _ensure_list(indent, desired, start=sig.start_number, cls=cls) + # If a list item is already open at this level and had content, that + # item is complete; close it before starting a new one. + if list_stack[-1]["li_open"] and list_stack[-1].get("li_has_content"): + fragments.append("
  • ") + list_stack[-1]["li_open"] = False + if not list_stack[-1]["li_open"]: + fragments.append("
  • ") + list_stack[-1]["li_open"] = True + list_stack[-1]["li_index"] = len(fragments) - 1 + list_stack[-1]["li_has_content"] = False + if sig.style_type == StyleType.CHECKBOX: + checked = " checked" if sig.checklist_done == 1 else "" + fragments.append(f' ') + para_tag_open = "
  • " + para_tag_close = "
  • " + return + + _close_lists_to(-1) + if sig.style_type == StyleType.TITLE: + tag = "h1" + elif sig.style_type == StyleType.HEADING: + tag = "h2" + elif sig.style_type == StyleType.SUBHEADING: + tag = "h3" + elif sig.style_type == StyleType.MONOSPACED: + tag = "pre" + else: + tag = "p" + if sig.block_quote == 1 and tag == "p": + tag = "blockquote" + styles: List[str] = [] + if sig.alignment == 1: + styles.append("text-align:center") + elif sig.alignment == 2: + styles.append("text-align:right") + elif sig.alignment == 3: + styles.append("text-align:justify") + dir_attr = "" + if sig.writing_direction in (2, 4): + dir_attr = ' dir="rtl"' + elif sig.writing_direction in (1, 3): + dir_attr = ' dir="ltr"' + style_attr = f' style="{"; ".join(styles)}"' if styles else "" + para_tag_open = f"<{tag}{style_attr}{dir_attr}>" + para_tag_close = f"" + fragments.append(para_tag_open) + + def paragraph_close() -> None: + nonlocal para_tag_close, deferred_breaks + deferred_breaks = 0 + if para_tag_close: + if para_tag_close == "" and list_stack: + if list_stack[-1]["li_open"]: + # If li had no content, drop the opening and skip the closing + if not list_stack[-1].get("li_has_content"): + idx = list_stack[-1].get("li_index") + if isinstance(idx, int) and 0 <= idx < len(fragments): + fragments[idx] = "" + # Also drop a stray checklist checkbox emitted for an + # empty list item. This happens when a trailing + # paragraph opens a new CHECKBOX item but carries no + # text before the paragraph closes. + if ( + fragments + and isinstance(fragments[-1], str) + and fragments[-1] + .lstrip() + .startswith(' before closing the list item + while ( + fragments + and isinstance(fragments[-1], str) + and fragments[-1] == "
    " + ): + fragments.pop() + fragments.append(para_tag_close) + list_stack[-1]["li_open"] = False + elif para_tag_close != "": + fragments.append(para_tag_close) + para_tag_close = "" + + def _preserve_leading_ws(text: str) -> str: + # Convert leading spaces/tabs on each line into   so indentation is visible + # while keeping normal whitespace collapsing for the rest of the line. + out: List[str] = [] + i = 0 + n = len(text) + while i < n: + # find end of current line + j = i + while j < n and text[j] not in ("\n", "\u2028"): + j += 1 + line = text[i:j] + # escape full line + esc = html.escape(line) + # measure leading spaces/tabs in original (not escaped) + k = 0 + prefix: List[str] = [] + for ch in line: + if ch == " ": + prefix.append(" ") + k += 1 + elif ch == "\t": + prefix.append(" " * 4) + k += 1 + else: + break + if prefix: + esc = "".join(prefix) + esc[k:] + out.append(esc) + # line break token + if j < n: + out.append("
    ") + j += 1 + i = j + return "".join(out) + + def wrap_inline(sig: StyleSig, html_text: str) -> str: + styles: List[str] = [] + if sig.font_weight in (1, 3): + styles.append("font-weight:bold") + if sig.font_weight in (2, 3): + styles.append("font-style:italic") + if sig.color_hex: + styles.append(f"color:{sig.color_hex}") + styles.extend(_emphasis_css(sig.highlight or sig.emphasis_style)) + if sig.highlight and not sig.emphasis_style: + try: + idx = int(sig.highlight) + if idx in (1, 2, 3, 4, 5): + styles.append(f"background-color:var(--hl{idx}-bg)") + except Exception: + pass + if sig.font_size_pt: + try: + styles.append(f"font-size:{float(sig.font_size_pt):.0f}pt") + except Exception: + pass + if sig.font_name: + styles.append(f"font-family:{_css_font_stack(str(sig.font_name))}") + deco: List[str] = [] + if sig.underlined == 1: + deco.append("underline") + if sig.strikethrough == 1: + deco.append("line-through") + if deco: + styles.append(f"text-decoration:{' '.join(deco)}") + # Use single quotes for the style attribute to safely include quoted + # font-family names (e.g., "Comic Sans MS") without breaking HTML. + if styles: + style_attr = "; ".join(styles) + styled = f"{html_text}" + else: + styled = html_text + safe_href = _safe_anchor_href(sig.link) + if safe_href: + rel = "noopener noreferrer" + rp = "no-referrer" + try: + if config and getattr(config, "link_rel", None): + rel = str(config.link_rel) + if config and getattr(config, "referrer_policy", None): + rp = str(config.referrer_policy) + except Exception: + pass + styled = f'{styled}' + if sig.superscript == 1: + styled = f"{styled}" + elif sig.superscript == -1: + styled = f"{styled}" + return styled + + total = len(merged) + prev_sig: Optional[StyleSig] = None + for idx, mr in enumerate(merged): + next_mr = merged[idx + 1] if idx + 1 < total else None + is_para_boundary = next_mr is None or not mr.sig.same_paragraph_as(next_mr.sig) + + if prev_sig is None or not prev_sig.same_paragraph_as(mr.sig): + if prev_sig is not None: + # Avoid closing the parent list item when transitioning to a + # deeper-indented list paragraph; the nested list should remain + # inside the current
  • . + close_prev = True + if _is_list_style( + getattr(prev_sig, "style_type", None) + ) and _is_list_style(getattr(mr.sig, "style_type", None)): + prev_indent = int(getattr(prev_sig, "indent_amount", 0) or 0) + cur_indent = int(getattr(mr.sig, "indent_amount", 0) or 0) + if cur_indent > prev_indent: + close_prev = False + if close_prev: + paragraph_close() + paragraph_open(mr.sig) + + if mr.attachment is not None: + ident = mr.attachment.identifier or "" + uti = (mr.attachment.resolved_uti(datasource) or "").lower() + title = None + primary = None + thumb = None + gz = None + # Capture preceding text on the same paragraph/line for inline renderers (e.g., calculator) + prior_text = None + try: + # 'i' is the current Python-string index for this run start + # Collect text since the last explicit line break + lb_n = text.rfind("\n", 0, i) + lb_u = text.rfind("\u2028", 0, i) + lb = max(lb_n, lb_u) + prior_text = text[(lb + 1) if lb >= 0 else 0 : i] + except Exception: + prior_text = None + if datasource and ident: + get_title = getattr(datasource, "get_title", None) + get_p = getattr(datasource, "get_primary_asset_url", None) + get_t = getattr(datasource, "get_thumbnail_url", None) + get_m = getattr(datasource, "get_mergeable_gz", None) + title = ( + cast(Optional[str], get_title(ident)) + if callable(get_title) + else None + ) + primary = cast(Optional[str], get_p(ident)) if callable(get_p) else None + thumb = cast(Optional[str], get_t(ident)) if callable(get_t) else None + gz = cast(Optional[bytes], get_m(ident)) if callable(get_m) else None + + # Derive link behavior from config + link_target = ( + "_blank" + if (config and getattr(config, "link_target_blank", True)) + else None + ) + link_rel = getattr(config, "link_rel", None) if config else None + pdf_h = getattr(config, "pdf_object_height", None) if config else None + + ctx = AttachmentContext( + id=ident, + uti=uti, + title=title, + primary_url=primary, + thumb_url=thumb, + mergeable_gz=gz, + prior_text=prior_text, + link_target=link_target, + link_rel=link_rel, + link_referrerpolicy=getattr(config, "referrer_policy", None) + if config + else None, + pdf_object_height=pdf_h, + ) + html_att = render_attachment( + ctx, + lambda cell_note: render_note_fragment( + cell_note, datasource, config=config + ), + ) + fragments.append(html_att) + if list_stack and list_stack[-1]["li_open"]: + list_stack[-1]["li_has_content"] = True + + i += mr.length + # We used to set strip_leading_break_next = True here, but that swallows + # explicit newlines that follow an inline attachment. + # strip_leading_break_next = True + else: + s, i = _slice_for_run(text, i, mr.length) + s = s.replace("\x00", "\u2400").replace("\ufffc", "") + # Removed the strip_leading_break_next check + if list_stack and list_stack[-1]["li_open"]: + # Inside a list item + if _is_list_style(mr.sig.style_type): + # For any list style, a newline generally means a new sibling item + segs = s.split("\n") + for k, seg in enumerate(segs): + if seg: + # If we are strictly inside a list item that is a "spacer" (bulletless), + # and we are about to add text, we must close the spacer and start a + # real list item so the text gets a bullet. + if list_stack and list_stack[-1]["li_open"]: + idx = list_stack[-1]["li_index"] + if idx is not None and idx < len(fragments): + tag = fragments[idx] + if 'style="list-style-type: none"' in tag: + fragments.append("
  • ") + list_stack[-1]["li_open"] = False + + # Open new standard item + fragments.append("
  • ") + list_stack[-1]["li_open"] = True + list_stack[-1]["li_index"] = len(fragments) - 1 + list_stack[-1]["li_has_content"] = False + + if mr.sig.style_type == StyleType.CHECKBOX: + checked = ( + " checked" + if mr.sig.checklist_done == 1 + else "" + ) + fragments.append( + f' ' + ) + + fragments.append(wrap_inline(mr.sig, html.escape(seg))) + if seg.strip(): + list_stack[-1]["li_has_content"] = True + else: + # Empty segment implies a newline in the source (e.g. \n\n). + # Apple Notes renders this as a vertical space (blank line) + # but WITHOUT a bullet. + # We check if this is a "trailing" newline used for nesting (handled below) + # or an actual blank line. + pass + + if k < len(segs) - 1: + next_seg = segs[k + 1] if (k + 1) < len(segs) else None + # If the newline is the trailing one (next seg empty and last), + # keep the current
  • open so a nested list can attach to it. + if next_seg == "" and (k + 1) == len(segs) - 1: + continue + + # Otherwise, end current item and start a new sibling item. + # If the current item (seg) was empty, we want it to be "bulletless". + # But we've already opened the
  • tag at the top of the loop or previous iter. + # So we need to retroactively apply style or just insure content forces height? + # Actually, we can just close the current
  • . + # If it was empty (seg==""), the browser renders an empty bullet point
  • . + + # Correction: We want to hide the bullet for *this* item if it's empty. + # But the
  • tag was emitted *before* we processed this segment + # (at the end of the previous iteration or start of block). + # We can't easily change the opening tag now. + + # Alternative: Handle the *next* opening tag. + + fragments.append("
  • ") + list_stack[-1]["li_open"] = False + + # open next + style_attr = "" + # If next segment is empty (and not the trailing nesting case), + # it's a blank line. Hide the marker. + # We know next_seg is the content of the next item. + is_next_empty = next_seg == "" + # Caution: if next_seg is "" AND it's the last one, we skipped above. + # So if we are here, next_seg might be empty (spacer) or "Text". + + if is_next_empty: + style_attr = ' style="list-style-type: none"' + + fragments.append(f"") + # Track the index of the opening tag + list_stack[-1]["li_index"] = len(fragments) - 1 + list_stack[-1]["li_open"] = True + + if is_next_empty: + # Ensure it has height + fragments.append(" ") + + list_stack[-1]["li_has_content"] = False + + # For checklist style, inject a checkbox for each new item + # ONLY if it's not a spacer (empty). + if ( + mr.sig.style_type == StyleType.CHECKBOX + and not is_next_empty + ): + checked = ( + " checked" if mr.sig.checklist_done == 1 else "" + ) + fragments.append( + f' ' + ) + else: + # Non-list paragraphs keep newlines as
    + segs = s.split("\n") + for k, seg in enumerate(segs): + if seg: + fragments.append(wrap_inline(mr.sig, html.escape(seg))) + if seg.strip(): + list_stack[-1]["li_has_content"] = True + if k < len(segs) - 1: + fragments.append("
    ") + else: + if is_para_boundary: + s = s.rstrip("\n\u2028") + + # If we are in a list item, check if it was a "spacer" (empty bulletless item) + # created by a previous run's trailing newlines. If so, and we have text, + # we should close the spacer and start a new real item. + if list_stack and list_stack[-1]["li_open"]: + # Check if current item is a spacer + idx = list_stack[-1]["li_index"] + if idx is not None and idx < len(fragments): + # A spacer looks like
  • + tag = fragments[idx] + if 'style="list-style-type: none"' in tag: + # Close spacer + fragments.append("
  • ") + list_stack[-1]["li_open"] = False + # Open new standard item + fragments.append("
  • ") + list_stack[-1]["li_open"] = True + list_stack[-1]["li_index"] = len(fragments) - 1 + list_stack[-1]["li_has_content"] = False + if mr.sig.style_type == StyleType.CHECKBOX: + checked = ( + " checked" if mr.sig.checklist_done == 1 else "" + ) + fragments.append( + f' ' + ) + + if s.replace("\n", "").replace("\u2028", "") == "": + deferred_breaks += s.count("\n") + s.count("\u2028") + else: + if deferred_breaks > 0: + fragments.append("
    " * deferred_breaks) + deferred_breaks = 0 + if mr.sig.style_type == StyleType.MONOSPACED: + safe = html.escape(s) + else: + # Preserve leading spaces/tabs per line for visible indentation + safe = _preserve_leading_ws(s) + fragments.append(wrap_inline(mr.sig, safe)) + if list_stack and list_stack[-1]["li_open"]: + list_stack[-1]["li_has_content"] = list_stack[-1].get( + "li_has_content" + ) or (s.strip() != "") + + if is_para_boundary: + # Do not close the current
  • when the next paragraph is a + # deeper-indented list item. Keeping the parent
  • open ensures + # the nested
      /
        is emitted inside the correct item rather + # than inside an empty sibling
      1. . + should_close = True + if next_mr is not None: + cur_st = mr.sig.style_type + nxt_st = next_mr.sig.style_type + if _is_list_style(cur_st) and _is_list_style(nxt_st): + cur_indent = int(mr.sig.indent_amount or 0) + nxt_indent = int(next_mr.sig.indent_amount or 0) + if nxt_indent > cur_indent: + should_close = False + if should_close: + paragraph_close() + prev_sig = mr.sig + + if prev_sig is not None and para_tag_close: + paragraph_close() + _close_lists_to(-1) + return "".join(fragments) + + +def render_note_page(title: str, html_fragment: str, extra_css: str = "") -> str: + return ( + '' + '' + f"{html.escape(title)}" + "
        {html_fragment}
        ' + ) + + +class NoteRenderer: + """Class-based interface for note rendering.""" + + def __init__(self, config: Optional[ExportConfig] = None): + self.config = config or ExportConfig() + + def render(self, note: pb.Note, datasource: Optional[NoteDataSource] = None) -> str: + """Render the note body to an HTML fragment string.""" + return render_note_fragment(note, datasource, config=self.config) + + def render_full_page(self, title: str, html_fragment: str) -> str: + """Wrap an HTML fragment in a full page with CSS.""" + return render_note_page(title, html_fragment) diff --git a/pyicloud/services/notes/rendering/renderer_iface.py b/pyicloud/services/notes/rendering/renderer_iface.py new file mode 100644 index 00000000..2358349e --- /dev/null +++ b/pyicloud/services/notes/rendering/renderer_iface.py @@ -0,0 +1,43 @@ +""" +Transport-agnostic renderer interface for Apple Notes. + +Defines the minimal datasource seam (`NoteDataSource`) that the renderer +requires to resolve: + - the UTI of an embedded attachment (by identifier), and + - the mergeable table bytes (gzipped) for table attachments. + +Optional richer datasource capabilities (if present) may include: + - get_primary_asset_url(identifier) + - get_thumbnail_url(identifier) + - get_title(identifier) + +The renderer never performs I/O; it only calls this interface. +""" + +from __future__ import annotations + +from dataclasses import dataclass +from typing import Optional, Protocol + + +class NoteDataSource(Protocol): + """Minimal attachment datasource required by the renderer.""" + + def get_attachment_uti(self, identifier: str) -> Optional[str]: ... + + def get_mergeable_gz(self, identifier: str) -> Optional[bytes]: ... + + +@dataclass(frozen=True, slots=True) +class AttachmentRef: + """Lightweight reference created while walking AttributeRuns.""" + + identifier: Optional[str] = None + uti_hint: Optional[str] = None + + def resolved_uti(self, datasource: Optional[NoteDataSource]) -> Optional[str]: + if self.uti_hint: + return self.uti_hint + if datasource and self.identifier: + return datasource.get_attachment_uti(self.identifier) + return None diff --git a/pyicloud/services/notes/rendering/table_builder.py b/pyicloud/services/notes/rendering/table_builder.py new file mode 100644 index 00000000..8e1288cc --- /dev/null +++ b/pyicloud/services/notes/rendering/table_builder.py @@ -0,0 +1,284 @@ +""" +Table reconstruction for Apple Notes MergeableData payloads (CloudKit/iCloud path). + +Given a gzipped MergeableData payload for a table attachment, reconstructs the +row/column ordering and renders a plain HTML with cell contents. Cell +contents are themselves Notes; callers provide a callback to render a pb.Note +into HTML using the existing renderer. +""" + +from __future__ import annotations + +import gzip +from dataclasses import dataclass, field +from enum import Enum +from typing import Callable, List, Optional + +from tinyhtml import h, raw # type: ignore[import-not-found] + +from ..protobuf import notes_pb2 as pb + + +class TypeName(str, Enum): + ICTABLE = "com.apple.notes.ICTable" + + +class MapKey(str, Enum): + CR_ROWS = "crRows" + CR_COLUMNS = "crColumns" + CELL_COLUMNS = "cellColumns" + + +@dataclass(frozen=True, slots=True) +class TableSpec: + type_name: TypeName + rows_key: MapKey + cols_key: MapKey + cellcols_key: MapKey + + +TABLE_SPEC = TableSpec( + type_name=TypeName.ICTABLE, + rows_key=MapKey.CR_ROWS, + cols_key=MapKey.CR_COLUMNS, + cellcols_key=MapKey.CELL_COLUMNS, +) + +MAX_TABLE_AXIS_ITEMS = 512 +MAX_TABLE_CELLS = 50_000 + + +@dataclass(slots=True) +class Cell: + html: str = "" + + +@dataclass(slots=True) +class AxisState: + indices: dict[int, int] = field(default_factory=dict) + total: int = 0 + + +@dataclass(slots=True) +class TableBuilder: + key_items: List[str] + type_items: List[str] + uuid_items: List[bytes] + entries: List[pb.MergeableDataObjectRow] + render_note_cb: Callable[[pb.Note], str] + + uuid_index: dict[bytes, int] = field(init=False) + rows: AxisState = field(default_factory=AxisState) + cols: AxisState = field(default_factory=AxisState) + cells: List[List[Cell]] = field(default_factory=list) + + def __post_init__(self) -> None: + self.uuid_index = {u: i for i, u in enumerate(self.uuid_items)} + + def _uuid_index_from_entry(self, entry: pb.MergeableDataObjectRow) -> Optional[int]: + try: + # custom_map.map_entry[0].value.unsigned_integer_value -> UUID VALUE + val = entry.custom_map.map_entry[0].value.unsigned_integer_value + return ( + self.uuid_index.get(self.uuid_items[val], None) + if 0 <= val < len(self.uuid_items) + else None + ) + except Exception: + return None + + def _parse_axis(self, entry: pb.MergeableDataObjectRow, axis: AxisState) -> None: + axis.total = 0 + axis.indices.clear() + # 1) Array attachments reference UUID VALUES directly + try: + for att in entry.ordered_set.ordering.array.attachment: + idx = self.uuid_index.get(att.uuid) + if idx is None: + continue + axis.indices[idx] = axis.total + axis.total += 1 + except Exception: + pass + # 2) Contents remap (key -> value) using entries addressed by object_index + try: + for elem in entry.ordered_set.ordering.contents.element: + k_ent = self.entries[elem.key.object_index] + v_ent = self.entries[elem.value.object_index] + k_idx = self._uuid_index_from_entry(k_ent) + v_idx = self._uuid_index_from_entry(v_ent) + if v_idx is None: + continue + pos = axis.indices.get(k_idx, axis.indices.get(v_idx, 0)) # type: ignore[arg-type] + axis.total = max(axis.total, pos + 1) + axis.indices[v_idx] = pos + except Exception: + pass + + def parse_rows(self, entry: pb.MergeableDataObjectRow) -> None: + self._parse_axis(entry, self.rows) + + def parse_cols(self, entry: pb.MergeableDataObjectRow) -> None: + self._parse_axis(entry, self.cols) + + def init_table_buffers(self) -> None: + if ( + self.rows.total <= 0 + or self.cols.total <= 0 + or self.rows.total > MAX_TABLE_AXIS_ITEMS + or self.cols.total > MAX_TABLE_AXIS_ITEMS + or self.rows.total * self.cols.total > MAX_TABLE_CELLS + ): + self.cells = [] + return + self.cells = [ + [Cell() for _ in range(self.cols.total)] for _ in range(self.rows.total) + ] + + def parse_cell_columns(self, entry: pb.MergeableDataObjectRow) -> None: + # entry.dictionary.element: key -> column dict + for col in entry.dictionary.element: + try: + col_key_ent = self.entries[col.key.object_index] + col_pos = self.cols.indices.get( + self._uuid_index_from_entry(col_key_ent) # type: ignore[arg-type] + ) + if col_pos is None: + continue + col_dict_ent = self.entries[col.value.object_index] + except Exception: + continue + for row in col_dict_ent.dictionary.element: + try: + row_key_ent = self.entries[row.key.object_index] + row_pos = self.rows.indices.get( + self._uuid_index_from_entry(row_key_ent) # type: ignore[arg-type] + ) + if row_pos is None: + continue + cell_ent = self.entries[row.value.object_index] + except Exception: + continue + if not cell_ent.HasField("note"): + continue + try: + cell_note = cell_ent.note + inner_html = self.render_note_cb(cell_note) + if row_pos >= len(self.cells) or col_pos >= len( + self.cells[row_pos] + ): + continue + self.cells[row_pos][col_pos].html = inner_html + except Exception: + continue + + def render_html_table(self) -> Optional[str]: + if not self.cells or self.rows.total == 0 or self.cols.total == 0: + return None + trs: List[object] = [] + for r in range(self.rows.total): + tds: List[object] = [] + for c in range(self.cols.total): + cell_html = self.cells[r][c].html or "" + tds.append(h("td")(raw(cell_html))) # type: ignore[arg-type] + trs.append(h("tr")(*tds)) # type: ignore[arg-type] + return h("table")(*trs).render() # type: ignore[arg-type] + + +ALLOWED_TABLE_TYPES = { + TypeName.ICTABLE.value, + "com.apple.notes.ICTable2", + "com.apple.notes.CRTable", +} + + +def render_table_from_mergeable( + gz_bytes: bytes, render_note_cb: Callable[[pb.Note], str] +) -> Optional[str]: + if not gz_bytes: + return None + try: + payload = gzip.decompress(gz_bytes) + except Exception: + payload = gz_bytes + try: + m = pb.MergableDataProto() + m.ParseFromString(payload) + data = m.mergable_data_object.mergeable_data_object_data + key_items = list(data.mergeable_data_object_key_item) + type_items = list(data.mergeable_data_object_type_item) + uuid_items = list(data.mergeable_data_object_uuid_item) + entries = list(data.mergeable_data_object_entry) + except Exception: + return None + + # Find root entry by type name and walk + for e in entries: + if not e.HasField("custom_map"): + continue + try: + type_idx = e.custom_map.type + tname_ok = ( + 0 <= type_idx < len(type_items) + and type_items[type_idx] in ALLOWED_TABLE_TYPES + ) + except Exception: + tname_ok = False + + # Fallback: treat as table if it contains the expected keys + has_rows = has_cols = has_cells = False + try: + keynames = [ + key_items[me.key] + for me in e.custom_map.map_entry + if 0 <= me.key < len(key_items) + ] + has_rows = TABLE_SPEC.rows_key.value in keynames + has_cols = TABLE_SPEC.cols_key.value in keynames + has_cells = TABLE_SPEC.cellcols_key.value in keynames + except Exception: + pass + + if not (tname_ok or (has_rows and has_cols and has_cells)): + continue + + tb = TableBuilder( + key_items=key_items, + type_items=type_items, + uuid_items=uuid_items, + entries=entries, + render_note_cb=render_note_cb, + ) + pending_cell_columns: Optional[pb.MergeableDataObjectRow] = None + for me in e.custom_map.map_entry: + kname = key_items[me.key] if 0 <= me.key < len(key_items) else None + try: + target = entries[me.value.object_index] + except Exception: + continue + if kname == TABLE_SPEC.rows_key.value: + try: + tb.parse_rows(target) + except Exception: + continue + elif kname == TABLE_SPEC.cols_key.value: + try: + tb.parse_cols(target) + except Exception: + continue + elif kname == TABLE_SPEC.cellcols_key.value: + pending_cell_columns = target + if tb.rows.total <= 0 or tb.cols.total <= 0: + continue + tb.init_table_buffers() + if not tb.cells: + continue + if pending_cell_columns: + try: + tb.parse_cell_columns(pending_cell_columns) + except Exception: + continue + html_table = tb.render_html_table() + if html_table: + return html_table + return None diff --git a/pyicloud/services/notes/service.py b/pyicloud/services/notes/service.py new file mode 100644 index 00000000..213e3275 --- /dev/null +++ b/pyicloud/services/notes/service.py @@ -0,0 +1,962 @@ +""" +High-level Apple Notes service built on top of a preconfigured CloudKit client. + +Public API: + - NotesService.recents(limit=50) -> Iterable[NoteSummary] + - NotesService.recents_in_folder(folder_id, limit=20) -> Iterable[NoteSummary] + - NotesService.iter_all(since=None) -> Iterable[NoteSummary] + - NotesService.folders() -> Iterable[NoteFolder] + - NotesService.in_folder(folder_id, limit=None) -> Iterable[NoteSummary] + - NotesService.get(note_id, with_attachments=False) -> Note + - NotesService.sync_cursor() -> str + - NotesService.export_note(note_id, output_dir, **config_kwargs) -> str + - NotesService.render_note(note_id, **config_kwargs) -> str + - NotesService.iter_changes(since=None) -> Iterable[ChangeEvent] + - NotesService.raw -> CloudKitNotesClient + +Prefer the typed service methods for normal use. ``raw`` is an escape hatch for +advanced or unsupported CloudKit workflows. +""" + +from __future__ import annotations + +import logging +from typing import Dict, Iterable, Iterator, List, Optional + +from pyicloud.common.cloudkit import ( + CKErrorItem, + CKFVString, + CKLookupResponse, + CKQueryFilterBy, + CKQueryObject, + CKQueryResponse, + CKQuerySortBy, + CKRecord, + CKReference, + CKTombstoneRecord, + CKZoneChangesZoneReq, + CKZoneID, + CKZoneIDReq, + CloudKitExtraMode, +) +from pyicloud.common.cloudkit.models import CKReferenceField, CKReferenceListField +from pyicloud.services.base import BaseService +from pyicloud.services.notes.decoding import BodyDecoder + +from .client import ( + CloudKitNotesClient, + NotesApiError, + NotesAuthError, + NotesError, + NotesRateLimited, +) +from .domain import AttachmentId, NoteBody +from .models import Attachment, Note, NoteSummary +from .models.constants import NotesDesiredKey, NotesRecordType +from .models.dto import ChangeEvent, NoteFolder + +LOGGER = logging.getLogger(__name__) + + +class NoteNotFound(NotesError): + pass + + +class NoteLockedError(NotesError): + pass + + +# ----------------------------- NotesService ---------------------------------- + + +class NotesService(BaseService): + """ + Typed, developer-friendly Notes API. + + The service exposes lightweight listing helpers, full-note retrieval, + attachment download helpers, and HTML render/export utilities while hiding + most CloudKit details behind ``NoteSummary``, ``Note``, and ``Attachment`` + models. + """ + + _CONTAINER = "com.apple.notes" + _ENV = "production" + _SCOPE = "private" + + def __init__( + self, + service_root: str, + session, + params: Dict[str, str], + *, + cloudkit_validation_extra: CloudKitExtraMode | None = None, + ): + super().__init__(service_root=service_root, session=session, params=params) + endpoint = f"{self.service_root}/database/1/{self._CONTAINER}/{self._ENV}/{self._SCOPE}" + # Sensible defaults; lower-case booleans are applied in the raw client + base_params = { + "remapEnums": True, + "getCurrentSyncToken": True, + **(params or {}), + } + self._raw = CloudKitNotesClient( + base_url=endpoint, + session=session, + base_params=base_params, + validation_extra=cloudkit_validation_extra, + ) + # In-memory caches + self._folder_name_cache: Dict[str, Optional[str]] = {} + self._attachment_meta_cache: Dict[str, Attachment] = {} + + # -------------------------- Public API methods --------------------------- + + def recents(self, *, limit: int = 50) -> Iterable[NoteSummary]: + """ + Yield the newest note summaries, ordered most-recent first. + + Args: + limit: Maximum number of notes to yield. + + Yields: + ``NoteSummary`` instances with lightweight metadata suitable for + feeds, selectors, or navigation UIs. + """ + if limit <= 0: + return + + desired_keys = [ + NotesDesiredKey.TITLE_ENCRYPTED, + NotesDesiredKey.SNIPPET_ENCRYPTED, + NotesDesiredKey.MODIFICATION_DATE, + NotesDesiredKey.DELETED, + NotesDesiredKey.FOLDER, + NotesDesiredKey.FIRST_ATTACHMENT_UTI_ENCRYPTED, + NotesDesiredKey.FIRST_ATTACHMENT_THUMBNAIL, + NotesDesiredKey.FIRST_ATTACHMENT_THUMBNAIL_ORIENTATION, + NotesDesiredKey.ATTACHMENTS, + ] + query = CKQueryObject( + recordType="SearchIndexes", + filterBy=[ + CKQueryFilterBy( + comparator="EQUALS", + fieldName="indexName", + fieldValue=CKFVString(type="STRING", value="recents"), + ) + ], + sortBy=[CKQuerySortBy(fieldName="modTime", ascending=False)], + ) + yielded = 0 + cont: Optional[str] = None + LOGGER.debug("Fetching recents: limit=%d", limit) + while True: + remaining = limit - yielded + if remaining <= 0: + return + resp: CKQueryResponse = self._raw.query( + query=query, + zone_id=CKZoneIDReq(zoneName="Notes"), + desired_keys=self._coerce_keys(desired_keys), + results_limit=min(200, remaining), + continuation=cont, + ) + for rec in resp.records: + if isinstance(rec, CKRecord): + summary = self._summary_from_record(rec) + yielded += 1 + yield summary + if yielded >= limit: + LOGGER.debug( + "Recents: yielded %d notes (limit reached)", yielded + ) + return + cont = getattr(resp, "continuationMarker", None) + if not cont: + LOGGER.debug("Recents: no more continuation marker, done.") + return + + def recents_in_folder( + self, folder_id: str, *, limit: int = 20 + ) -> Iterable[NoteSummary]: + """ + Yield recent note summaries that belong to ``folder_id``. + + This is a convenience helper that filters the global recents feed rather + than scanning the full Notes history. + """ + if limit <= 0: + return + emitted = 0 + # Pull a larger window than requested to increase the chance of finding matches fast. + window = max(200, limit * 5) + LOGGER.debug( + "Fetching recents in folder: folder_id=%s limit=%d", folder_id, limit + ) + for n in self.recents(limit=window): + if n.folder_id == folder_id and not n.is_deleted: + yield n + emitted += 1 + if emitted >= limit: + LOGGER.debug( + "Recents in folder: yielded %d notes (limit reached)", emitted + ) + return + + def iter_all(self, *, since: Optional[str] = None) -> Iterable[NoteSummary]: + """ + Yield note summaries from the Notes changes feed. + + Args: + since: Optional sync token from ``sync_cursor()``. When provided, the + iterator yields only note records changed since that cursor. + + Yields: + ``NoteSummary`` instances for full exports, indexing jobs, or local + cache refreshes. + """ + LOGGER.debug("Iterating all notes%s", f" since={since}" if since else "") + for zone in self._raw.changes( + zone_req=CKZoneChangesZoneReq( + zoneID=CKZoneID(zoneName="Notes", zoneType="REGULAR_CUSTOM_ZONE"), + desiredRecordTypes=[NotesRecordType.Note], + desiredKeys=self._coerce_keys( + [ + NotesDesiredKey.TITLE_ENCRYPTED, + NotesDesiredKey.SNIPPET_ENCRYPTED, + NotesDesiredKey.MODIFICATION_DATE, + NotesDesiredKey.DELETED, + NotesDesiredKey.FOLDER, + NotesDesiredKey.ATTACHMENTS, + ] + ), + syncToken=since, + reverse=False, + ), + # CloudKit page sizing is implicit; we cap our own yield volume by iterating + ): + for rec in zone.records: + if isinstance(rec, CKRecord): + yield self._summary_from_record(rec) + + def folders(self) -> Iterable[NoteFolder]: + """ + Yield top-level Notes folders as ``NoteFolder`` models. + + Use this to build folder navigation or resolve folder IDs before calling + ``in_folder()``. + """ + desired_keys = [ + NotesDesiredKey.TITLE_ENCRYPTED, + NotesDesiredKey.TITLE_MODIFICATION_DATE, + NotesDesiredKey.HAS_SUBFOLDER, + ] + query = CKQueryObject( + recordType="SearchIndexes", + filterBy=[ + CKQueryFilterBy( + comparator="EQUALS", + fieldName="indexName", + fieldValue=CKFVString(type="STRING", value="parentless"), + ) + ], + ) + cont: Optional[str] = None + LOGGER.debug("Fetching folders") + while True: + resp: CKQueryResponse = self._raw.query( + query=query, + zone_id=CKZoneIDReq(zoneName="Notes"), + desired_keys=self._coerce_keys(desired_keys), + results_limit=200, + continuation=cont, + ) + for rec in resp.records: + if isinstance(rec, CKRecord): + folder_id = rec.recordName + name = self._decode_encrypted( + rec.fields.get_value("TitleEncrypted") + ) + has_sub = bool( + getattr( + rec.fields.get_field(NotesDesiredKey.HAS_SUBFOLDER) or (), + "value", + False, + ) + ) + yield NoteFolder( + id=folder_id, name=name, has_subfolders=has_sub, count=None + ) + # cache for later + self._folder_name_cache.setdefault(folder_id, name) + cont = getattr(resp, "continuationMarker", None) + if not cont: + LOGGER.debug("Folders: no more continuation marker, done.") + return + + def in_folder( + self, folder_id: str, *, limit: Optional[int] = None + ) -> Iterable[NoteSummary]: + """ + Yield non-deleted notes in ``folder_id``, ordered newest first. + + Args: + folder_id: Folder identifier from ``folders()`` or note metadata. + limit: Optional maximum number of notes to yield. + """ + emitted = 0 + LOGGER.debug( + "Fetching notes in folder: folder_id=%s limit=%s", folder_id, limit + ) + for zone in self._raw.changes( + zone_req=CKZoneChangesZoneReq( + zoneID=CKZoneID(zoneName="Notes", zoneType="REGULAR_CUSTOM_ZONE"), + desiredRecordTypes=[NotesRecordType.Note], + desiredKeys=self._coerce_keys( + [ + NotesDesiredKey.TITLE_ENCRYPTED, + NotesDesiredKey.SNIPPET_ENCRYPTED, + NotesDesiredKey.MODIFICATION_DATE, + NotesDesiredKey.DELETED, + NotesDesiredKey.FOLDER, + NotesDesiredKey.FIRST_ATTACHMENT_UTI_ENCRYPTED, + NotesDesiredKey.FIRST_ATTACHMENT_THUMBNAIL, + NotesDesiredKey.FIRST_ATTACHMENT_THUMBNAIL_ORIENTATION, + NotesDesiredKey.ATTACHMENTS, + ] + ), + reverse=True, # newest first + ) + ): + for rec in zone.records: + if not isinstance(rec, CKRecord): + continue + rec_folder_id = self._extract_folder_id(rec) + deleted = bool(rec.fields.get_value("Deleted") or False) + if deleted or rec_folder_id != folder_id: + continue + yield self._summary_from_record(rec) + emitted += 1 + if limit and emitted >= limit: + LOGGER.debug( + "Notes in folder: yielded %d notes (limit reached)", emitted + ) + return + + def get(self, note_id: str, *, with_attachments: bool = False) -> Note: + """ + Return a single note with decoded text and optional attachment metadata. + + Args: + note_id: The CloudKit note record identifier. + with_attachments: When ``True``, resolve ``Attachment`` metadata and + include it on the returned ``Note``. + + Raises: + NoteNotFound: If the note does not exist. + NoteLockedError: If the note is passphrase-locked and its content + cannot be read. + """ + LOGGER.debug( + "Fetching note: note_id=%s with_attachments=%s", note_id, with_attachments + ) + resp: CKLookupResponse = self._raw.lookup( + record_names=[note_id], + desired_keys=self._coerce_keys( + [ + NotesDesiredKey.TITLE_ENCRYPTED, + NotesDesiredKey.SNIPPET_ENCRYPTED, + NotesDesiredKey.MODIFICATION_DATE, + NotesDesiredKey.DELETED, + NotesDesiredKey.FOLDER, + NotesDesiredKey.ATTACHMENTS, + "TextDataEncrypted", # may or may not be present + ] + ), + ) + target: Optional[CKRecord] = None + for rec in resp.records: + if isinstance(rec, CKRecord) and rec.recordName == note_id: + target = rec + break + if target is None: + LOGGER.warning("Note not found: %s", note_id) + raise NoteNotFound(f"Note not found: {note_id}") + + summary = self._summary_from_record(target) + if summary.is_locked: + LOGGER.warning("Note is locked and cannot be read: %s", note_id) + raise NoteLockedError( + f"Note '{summary.title or note_id}' is locked and cannot be read." + ) + + note_body = self._decode_note_body(target) + # Minimal breadcrumbs for body decode outcome + try: + if note_body and note_body.text: + LOGGER.info( + "notes.body.decoded ok id=%s len=%d", + summary.id, + len(note_body.text), + ) + else: + LOGGER.info("notes.body.decoded empty id=%s", summary.id) + except Exception: + LOGGER.info( + "notes.body.decoded %s", + "ok" if note_body and note_body.text else "empty", + ) + attachments: Optional[List[Attachment]] = None + html: Optional[str] = None + + text = note_body.text if note_body else None + attachment_ids: List[AttachmentId] = [] + if note_body and note_body.attachment_ids: + attachment_ids = note_body.attachment_ids + + if with_attachments: + attachments = self._resolve_attachments_for_record( + target, attachment_ids=attachment_ids + ) + + return Note( + id=summary.id, + title=summary.title, + snippet=summary.snippet, + modified_at=summary.modified_at, + folder_id=summary.folder_id, + folder_name=summary.folder_name, + is_deleted=summary.is_deleted, + is_locked=summary.is_locked, + text=text, + html=html, + attachments=attachments, + ) + + def sync_cursor(self) -> str: + """ + Return the current Notes sync token. + + Persist this token and pass it back to ``iter_all(since=...)`` or + ``iter_changes(since=...)`` on a later run to perform incremental syncs. + """ + LOGGER.debug("Fetching sync cursor for Notes zone") + return self._raw.current_sync_token(zone_name="Notes") + + def export_note(self, note_id: str, output_dir: str, **config_kwargs) -> str: + """ + Export a note to HTML on disk and return the generated file path. + + Args: + note_id: The UUID of the note to export. + output_dir: Directory where the HTML file will be written. + **config_kwargs: Keyword arguments forwarded to ``ExportConfig``, + including ``export_mode``, ``assets_dir``, ``full_page``, + ``preview_appearance``, ``pdf_object_height``, and link behavior + settings. + + Returns: + The path to the generated HTML file. + + Notes: + By default, this produces archival output: a full HTML page with + local asset downloads. + """ + resp = self._raw.lookup([note_id]) + target = None + for rec in resp.records: + if isinstance(rec, CKRecord) and rec.recordName == note_id: + target = rec + break + if not target: + raise NoteNotFound(f"Note not found: {note_id}") + + # Lazy import to avoid circular dependency + from .rendering.exporter import NoteExporter + from .rendering.options import ExportConfig + + config = ExportConfig(**config_kwargs) + exporter = NoteExporter(self._raw, config=config) + path = exporter.export(target, output_dir=output_dir) + if not path: + raise NotesError(f"Failed to export note: {note_id}") + return path + + def render_note(self, note_id: str, **config_kwargs) -> str: + """ + Render a note to an HTML fragment string. + + Args: + note_id: The UUID of the note to render. + **config_kwargs: Keyword arguments forwarded to ``ExportConfig`` to + tune preview selection or link behavior for the rendered HTML. + + Returns: + An HTML fragment string. This method does not download assets or + write files to disk. + """ + resp = self._raw.lookup([note_id]) + target = None + for rec in resp.records: + if isinstance(rec, CKRecord) and rec.recordName == note_id: + target = rec + break + if not target: + raise NoteNotFound(f"Note not found: {note_id}") + + from .rendering.exporter import build_datasource, decode_and_parse_note + from .rendering.options import ExportConfig + from .rendering.renderer import NoteRenderer + + config = ExportConfig(**config_kwargs) + note = decode_and_parse_note(target) + if not note: + return "" + + ds, _ = build_datasource(self._raw, target, note, config) + renderer = NoteRenderer(config) + return renderer.render(note, ds) + + def iter_changes(self, *, since: Optional[str] = None) -> Iterable[ChangeEvent]: + """ + Yield ``ChangeEvent`` items from the Notes changes feed. + + Pass a sync token from ``sync_cursor()`` to process only new changes + since a previous run. + """ + LOGGER.debug("Iterating changes%s", f" since={since}" if since else "") + for zone in self._raw.changes( + zone_req=CKZoneChangesZoneReq( + zoneID=CKZoneID(zoneName="Notes", zoneType="REGULAR_CUSTOM_ZONE"), + desiredRecordTypes=[NotesRecordType.Note], + desiredKeys=self._coerce_keys( + [ + NotesDesiredKey.TITLE_ENCRYPTED, + NotesDesiredKey.SNIPPET_ENCRYPTED, + NotesDesiredKey.MODIFICATION_DATE, + NotesDesiredKey.DELETED, + NotesDesiredKey.FOLDER, + NotesDesiredKey.ATTACHMENTS, + ] + ), + syncToken=since, + reverse=False, + ) + ): + for rec in zone.records: + if isinstance(rec, CKRecord): + deleted_flag = bool(rec.fields.get_value("Deleted") or False) + evt_type = "deleted" if deleted_flag else "updated" + yield ChangeEvent( + type=evt_type, + note=self._summary_from_record(rec), + ) + continue + + if isinstance(rec, CKTombstoneRecord): + record_name = getattr(rec, "recordName", None) + if record_name: + yield ChangeEvent( + type="deleted", + note=NoteSummary( + id=record_name, + title=None, + snippet=None, + modified_at=None, + folder_id=None, + folder_name=None, + is_deleted=True, + is_locked=False, + ), + ) + continue + + if isinstance(rec, CKErrorItem): + details = { + "serverErrorCode": rec.serverErrorCode, + "reason": rec.reason, + "recordName": rec.recordName, + } + LOGGER.error( + "CloudKit error during change enumeration: %s (%s) record=%s", + rec.serverErrorCode or "UNKNOWN", + rec.reason, + rec.recordName, + ) + raise NotesApiError( + ( + "CloudKit error during change enumeration: " + f"{rec.serverErrorCode or 'UNKNOWN'}" + ), + payload=details, + ) + + LOGGER.error("Unexpected record type in changes feed: %r", rec) + raise NotesApiError( + "Unexpected record type in changes feed", + payload={"record_repr": repr(rec)}, + ) + + @property + def raw(self) -> CloudKitNotesClient: + """ + Return the authenticated low-level Notes CloudKit client. + + This is an escape hatch for advanced or unsupported operations; prefer + the typed service methods above for normal use. + """ + return self._raw + + # -------------------------- Internal helpers ----------------------------- + + @staticmethod + def _coerce_keys(keys: Optional[Iterable[object]]) -> Optional[List[str]]: + if keys is None: + return None + out: List[str] = [] + for k in keys: + if isinstance(k, NotesDesiredKey): + out.append(k.value) + else: + out.append(str(k)) + return out + + @staticmethod + def _decode_encrypted(b: Optional[bytes | str]) -> Optional[str]: + if b is None: + return None + if isinstance(b, str): + return b + try: + return b.decode("utf-8", "replace") + except Exception: + return None + + def _extract_folder_id(self, rec: CKRecord) -> Optional[str]: + f = rec.fields.get_field("Folder") + if isinstance(f, CKReferenceField) and f.value: + return f.value.recordName + fl = rec.fields.get_field("Folders") + if isinstance(fl, CKReferenceListField) and fl.value: + return fl.value[0].recordName + return None + + def _folder_name(self, folder_id: Optional[str]) -> Optional[str]: + if not folder_id: + return None + if folder_id in self._folder_name_cache: + return self._folder_name_cache[folder_id] + try: + resp = self._raw.lookup([folder_id], desired_keys=["TitleEncrypted"]) + name: Optional[str] = None + for rec in resp.records: + if isinstance(rec, CKRecord) and rec.recordName == folder_id: + name = self._decode_encrypted( + rec.fields.get_value("TitleEncrypted") + ) + break + self._folder_name_cache[folder_id] = name + LOGGER.debug("Folder name resolved: folder_id=%s name=%s", folder_id, name) + return name + except (NotesApiError, NotesAuthError, NotesRateLimited): + self._folder_name_cache[folder_id] = None + LOGGER.warning("Failed to resolve folder name: folder_id=%s", folder_id) + return None + + def _summary_from_record(self, rec: CKRecord) -> NoteSummary: + title = self._decode_encrypted(rec.fields.get_value("TitleEncrypted")) + snippet = self._decode_encrypted(rec.fields.get_value("SnippetEncrypted")) + modified = rec.fields.get_value( + "ModificationDate" + ) # already tz-aware datetime or None + deleted = bool(rec.fields.get_value("Deleted") or False) + folder_id = self._extract_folder_id(rec) + folder_name = self._folder_name(folder_id) + is_locked = ( + str(getattr(rec, "recordType", "")).lower() + == NotesRecordType.PasswordProtectedNote.lower() + ) + return NoteSummary( + id=rec.recordName, + title=title, + snippet=snippet, + modified_at=modified, + folder_id=folder_id, + folder_name=folder_name, + is_deleted=deleted, + is_locked=is_locked, + ) + + def _decode_note_body(self, rec: CKRecord) -> Optional[NoteBody]: + """Decode TextDataEncrypted into a NoteBody (text + attachment IDs).""" + + raw = rec.fields.get_value("TextDataEncrypted") + if not raw: + LOGGER.debug("notes.body.missing TextDataEncrypted id=%s", rec.recordName) + return None + try: + nb = BodyDecoder().decode(raw) + if nb and isinstance(nb, NoteBody): + return nb + LOGGER.debug( + "notes.body.no_text id=%s bytes=%s", + rec.recordName, + ( + len(getattr(nb, "bytes", b"")) + if nb and getattr(nb, "bytes", None) + else "0" + ), + ) + return None + except Exception as e: + LOGGER.warning("notes.body.decode_failed id=%s err=%s", rec.recordName, e) + return None + + def _resolve_attachments_for_record( + self, + rec: CKRecord, + *, + attachment_ids: Optional[List[AttachmentId]] = None, + ) -> List[Attachment]: + """Hydrate attachment metadata for a note. + + Combines attachment identifiers from CloudKit references and the decoded + protobuf body. Missing records are skipped gracefully. + """ + + out: List[Attachment] = [] + + alias_ids: List[str] = [] + lookup_candidates: List[str] = [] + + if attachment_ids: + for aid in attachment_ids: + ident = getattr(aid, "identifier", None) + if ident: + alias_ids.append(ident) + + fld = rec.fields.get_field("Attachments") + if fld and hasattr(fld, "value"): + refs: List[CKReference] = getattr(fld, "value", []) or [] + for ref in refs: + rn = getattr(ref, "recordName", None) + if rn: + lookup_candidates.append(rn) + alias_ids.append(rn) + + # Deduplicate alias list while preserving order + seen_alias: set[str] = set() + ids: List[str] = [] + for cid in alias_ids: + if cid not in seen_alias: + seen_alias.add(cid) + ids.append(cid) + + LOGGER.debug( + "notes.attachments.candidates alias=%s lookup=%s", + ids, + lookup_candidates, + ) + + if not ids and not lookup_candidates: + return out + + # Fetch metadata for uncached attachments + seen_lookup: set[str] = set() + lookup_ids: List[str] = [] + lookup_source_ids = lookup_candidates if lookup_candidates else ids + for cid in lookup_source_ids: + if cid not in seen_lookup: + seen_lookup.add(cid) + lookup_ids.append(cid) + + missing = [aid for aid in lookup_ids if aid not in self._attachment_meta_cache] + if missing: + desired_keys = [ + "Filename", + "AttachmentUTI", + "UTI", + "Size", + "Thumbnail", + "FirstAttachmentThumbnail", + "PrimaryAsset", + "PreviewImages", + "PreviewAppearances", + "FallbackImage", + "FallbackPDF", + "PaperAssets", + "AttachmentIdentifier", + "attachmentIdentifier", + "Identifier", + ] + try: + resp = self._raw.lookup(missing, desired_keys=desired_keys) + except NotesApiError as err: + LOGGER.debug( + "notes.attachments.lookup_failed ids=%s err=%s", + missing, + err, + ) + resp = CKLookupResponse(records=[]) + + for rec_a in getattr(resp, "records", []): + if not isinstance(rec_a, CKRecord): + continue + + attachment = self._build_attachment_from_record(rec_a) + if not attachment: + LOGGER.debug( + "notes.attachments.unhandled record=%s fields=%s", + getattr(rec_a, "recordName", None), + list(rec_a.fields.keys()) + if getattr(rec_a, "fields", None) + else None, + ) + continue + + base_id = attachment.id + aliases = self._attachment_aliases(rec_a, base_id) + for alias in aliases: + self._attachment_meta_cache[alias] = attachment + LOGGER.debug( + "notes.attachments.cached base=%s alias=%s", + base_id, + alias, + ) + + for aid in ids or lookup_ids: + att = self._attachment_meta_cache.get(aid) + if att and att not in out: + out.append(att) + + LOGGER.debug( + "Resolved %d attachments for note %s", + len(out), + getattr(rec, "recordName", None), + ) + return out + + @staticmethod + def _coerce_string(rec: CKRecord, names: List[str]) -> Optional[str]: + for n in names: + v = rec.fields.get_value(n) + if v is None: + continue + if isinstance(v, bytes): + try: + return v.decode("utf-8", "replace") + except Exception: + continue + if isinstance(v, str): + return v + return None + + @staticmethod + def _attachment_aliases(rec: CKRecord, record_name: str) -> List[str]: + aliases = [record_name] + identifier = NotesService._coerce_string( + rec, ["AttachmentIdentifier", "attachmentIdentifier", "Identifier"] + ) + if identifier and identifier not in aliases: + aliases.append(identifier) + return aliases + + def _build_attachment_from_record(self, rec: CKRecord) -> Optional[Attachment]: + aid = getattr(rec, "recordName", None) + if not aid: + return None + + filename = self._coerce_string(rec, ["Filename", "Name", "FileName"]) + uti = self._coerce_string(rec, ["AttachmentUTI", "UTI"]) + size = self._coerce_int(rec, ["Size", "FileSize"]) + + download_url = None + preview_url = None + thumbnail_url = None + + download_url = download_url or self._coerce_asset_url(rec, ["PrimaryAsset"]) + download_url = download_url or self._coerce_asset_url(rec, ["FallbackPDF"]) + download_url = download_url or self._coerce_asset_url_from_list( + rec, "FallbackPDF" + ) + download_url = download_url or self._coerce_asset_url_from_list( + rec, "PaperAssets" + ) + + preview_url = preview_url or self._coerce_asset_url_from_list( + rec, "PreviewImages" + ) + preview_url = preview_url or self._coerce_asset_url(rec, ["FallbackImage"]) + + thumbnail_url = thumbnail_url or self._coerce_asset_url( + rec, ["Thumbnail", "FirstAttachmentThumbnail"] + ) + thumbnail_url = thumbnail_url or preview_url + + return Attachment( + id=aid, + filename=filename, + uti=uti, + size=size, + download_url=download_url, + preview_url=preview_url, + thumbnail_url=thumbnail_url, + ) + + @staticmethod + def _coerce_int(rec: CKRecord, names: List[str]) -> Optional[int]: + for n in names: + v = rec.fields.get_value(n) + if isinstance(v, int): + return v + if isinstance(v, float): + return int(v) + if isinstance(v, str) and v.isdigit(): + return int(v) + return None + + @staticmethod + def _coerce_asset_url(rec: CKRecord, names: List[str]) -> Optional[str]: + for n in names: + fld = rec.fields.get_field(n) + if not fld: + continue + # ASSET/ASSETID wrapper -> value.downloadURL if present + val = getattr(fld, "value", None) + url = getattr(val, "downloadURL", None) + if isinstance(url, str): + return url + return None + + @staticmethod + def _coerce_asset_url_from_list(rec: CKRecord, name: str) -> Optional[str]: + fld = rec.fields.get_field(name) + if not fld: + return None + val = getattr(fld, "value", None) + if isinstance(val, (list, tuple)): + for token in val: + url = None + if isinstance(token, dict): + url = token.get("downloadURL") + else: + url = getattr(token, "downloadURL", None) + if isinstance(url, str) and url: + return url + return None + + def _download_attachment_to(self, att: Attachment, directory: str) -> str: + url = att.download_url or att.preview_url or att.thumbnail_url + if not url: + raise NotesApiError("Attachment does not expose a download URL.") + LOGGER.debug("Downloading attachment %s to %s", att.id, directory) + return self._raw.download_asset_to(url, directory) + + def _stream_attachment( + self, att: Attachment, *, chunk_size: int = 65536 + ) -> Iterator[bytes]: + url = att.download_url or att.preview_url or att.thumbnail_url + if not url: + raise NotesApiError("Attachment does not expose a download URL.") + LOGGER.debug("Streaming attachment %s chunk_size=%d", att.id, chunk_size) + yield from self._raw.download_asset_stream(url, chunk_size=chunk_size) diff --git a/pyicloud/services/reminders.py b/pyicloud/services/reminders.py deleted file mode 100644 index 6d8c481b..00000000 --- a/pyicloud/services/reminders.py +++ /dev/null @@ -1,128 +0,0 @@ -"""Reminders service.""" - -import time -import uuid -from datetime import datetime -from typing import Any - -from tzlocal import get_localzone_name - -from pyicloud.services.base import BaseService -from pyicloud.session import PyiCloudSession - - -class RemindersService(BaseService): - """The 'Reminders' iCloud service.""" - - def __init__( - self, service_root: str, session: PyiCloudSession, params: dict[str, Any] - ) -> None: - super().__init__(service_root, session, params) - - self.lists = {} - self.collections = {} - - self.refresh() - - def refresh(self) -> None: - """Refresh data.""" - params_reminders = dict(self.params) - params_reminders.update( - { - "clientVersion": "4.0", - "lang": "en-us", - "usertz": get_localzone_name(), - } - ) - - # Open reminders - req = self.session.get( - f"{self.service_root}/rd/startup", params=params_reminders - ) - - data = req.json() - - self.lists = {} - self.collections = {} - for collection in data["Collections"]: - temp = [] - self.collections[collection["title"]] = { - "guid": collection["guid"], - "ctag": collection["ctag"], - } - for reminder in data["Reminders"]: - if reminder["pGuid"] != collection["guid"]: - continue - - if reminder.get("dueDate"): - due = datetime( - reminder["dueDate"][0], - reminder["dueDate"][1], - reminder["dueDate"][2], - reminder["dueDate"][3], - reminder["dueDate"][4], - reminder["dueDate"][5], - ) - else: - due = None - - temp.append( - { - "title": reminder["title"], - "desc": reminder.get("description"), - "due": due, - } - ) - self.lists[collection["title"]] = temp - - def post(self, title, description="", collection=None, due_date=None): - """Adds a new reminder.""" - pguid = "tasks" - if collection and collection in self.collections: - pguid = self.collections[collection]["guid"] - - params_reminders = dict(self.params) - params_reminders.update( - {"clientVersion": "4.0", "lang": "en-us", "usertz": get_localzone_name()} - ) - - due_dates = None - if due_date: - due_dates = [ - int(f"{due_date.year}{due_date.month:02}{due_date.day:02}"), - due_date.year, - due_date.month, - due_date.day, - due_date.hour, - due_date.minute, - ] - - req = self.session.post( - f"{self.service_root}/rd/reminders/tasks", - json={ - "Reminders": { - "title": title, - "description": description, - "pGuid": pguid, - "etag": None, - "order": None, - "priority": 0, - "recurrence": None, - "alarms": [], - "startDate": None, - "startDateTz": None, - "startDateIsAllDay": False, - "completedDate": None, - "dueDate": due_dates, - "dueDateIsAllDay": False, - "lastModifiedDate": None, - "createdDate": None, - "isFamily": None, - "createdDateExtended": int(time.time() * 1000), - "guid": str(uuid.uuid4()), - }, - "ClientState": {"Collections": list(self.collections.values())}, - }, - params=params_reminders, - ) - return req.ok diff --git a/pyicloud/services/reminders/__init__.py b/pyicloud/services/reminders/__init__.py new file mode 100644 index 00000000..0b2688e2 --- /dev/null +++ b/pyicloud/services/reminders/__init__.py @@ -0,0 +1,19 @@ +"""Public API for the Reminders service.""" + +from .models import ( + AlarmWithTrigger, + ListRemindersResult, + Reminder, + ReminderChangeEvent, + RemindersList, +) +from .service import RemindersService + +__all__ = [ + "AlarmWithTrigger", + "ListRemindersResult", + "RemindersService", + "Reminder", + "ReminderChangeEvent", + "RemindersList", +] diff --git a/pyicloud/services/reminders/_constants.py b/pyicloud/services/reminders/_constants.py new file mode 100644 index 00000000..176233bc --- /dev/null +++ b/pyicloud/services/reminders/_constants.py @@ -0,0 +1,9 @@ +"""Shared CloudKit constants for the Reminders service.""" + +from pyicloud.common.cloudkit import CKZoneID, CKZoneIDReq + +_REMINDERS_ZONE = CKZoneID(zoneName="Reminders", zoneType="REGULAR_CUSTOM_ZONE") +_REMINDERS_ZONE_REQ = CKZoneIDReq( + zoneName="Reminders", + zoneType="REGULAR_CUSTOM_ZONE", +) diff --git a/pyicloud/services/reminders/_mappers.py b/pyicloud/services/reminders/_mappers.py new file mode 100644 index 00000000..0b04e1d8 --- /dev/null +++ b/pyicloud/services/reminders/_mappers.py @@ -0,0 +1,345 @@ +"""CloudKit record mappers for the Reminders service.""" + +from __future__ import annotations + +import json as _json +import logging +from typing import Any, Callable, Optional + +from pyicloud.common.cloudkit import CKRecord + +from ._protocol import ( + _as_raw_id, + _decode_attachment_url, + _decode_crdt_document, + _ref_name, +) +from .client import RemindersApiError +from .models import ( + Alarm, + Hashtag, + ImageAttachment, + LocationTrigger, + Proximity, + RecurrenceFrequency, + RecurrenceRule, + Reminder, + RemindersList, + URLAttachment, +) + +Attachment = URLAttachment | ImageAttachment + + +class RemindersRecordMapper: + """Translate CloudKit records into Reminders domain models.""" + + def __init__( + self, + get_raw: Callable[[], Any], + logger: logging.Logger, + ) -> None: + self._get_raw = get_raw + self._logger = logger + + @staticmethod + def _parse_reminder_ids_payload(payload_text: str, source: str) -> list[str]: + """Decode a JSON array of reminder IDs into normalized raw IDs.""" + try: + payload = _json.loads(payload_text) + except (ValueError, TypeError) as exc: + raise RemindersApiError( + f"Failed to parse {source}", + payload={"source": source, "payload": payload_text}, + ) from exc + + if not isinstance(payload, list): + raise RemindersApiError( + f"{source} must decode to a JSON array", + payload={"source": source, "payload": payload}, + ) + + reminder_ids: list[str] = [] + for item in payload: + if not isinstance(item, str): + raise RemindersApiError( + f"{source} must contain only string reminder IDs", + payload={"source": source, "payload": payload}, + ) + reminder_ids.append(_as_raw_id(item, "Reminder")) + + return reminder_ids + + def _reminder_ids_for_list_record(self, rec: CKRecord) -> list[str]: + """Load reminder membership from inline or asset-backed list fields.""" + fields = rec.fields + reminder_ids_raw = fields.get_value("ReminderIDs") + if reminder_ids_raw is not None: + if not isinstance(reminder_ids_raw, str): + raise RemindersApiError( + "ReminderIDs field had unexpected type", + payload={ + "recordName": rec.recordName, + "type": type(reminder_ids_raw).__name__, + }, + ) + return self._parse_reminder_ids_payload( + reminder_ids_raw, + f"List {rec.recordName} ReminderIDs", + ) + + asset = fields.get_value("ReminderIDsAsset") + if asset is None: + return [] + + asset_bytes = getattr(asset, "downloadedData", None) + if asset_bytes is None: + download_url = getattr(asset, "downloadURL", None) + if not download_url: + raise RemindersApiError( + f"List {rec.recordName} ReminderIDsAsset is missing data and downloadURL", + payload={"recordName": rec.recordName}, + ) + asset_bytes = self._get_raw().download_asset_bytes(download_url) + + try: + payload_text = asset_bytes.decode("utf-8") + except UnicodeDecodeError as exc: + raise RemindersApiError( + f"List {rec.recordName} ReminderIDsAsset was not valid UTF-8", + payload={"recordName": rec.recordName}, + ) from exc + + return self._parse_reminder_ids_payload( + payload_text, + f"List {rec.recordName} ReminderIDsAsset", + ) + + def _coerce_text(self, value: Any, *, field_name: str, record_name: str) -> str: + """Normalize CloudKit text-like values into ``str`` for domain models.""" + if value is None: + return "" + if isinstance(value, str): + return value + if isinstance(value, bytes): + try: + return value.decode("utf-8") + except UnicodeDecodeError: + self._logger.warning( + "Field %s on %s was undecodable bytes; replacing invalid UTF-8", + field_name, + record_name, + ) + return value.decode("utf-8", errors="replace") + return str(value) + + def record_to_list(self, rec: CKRecord) -> RemindersList: + fields = rec.fields + title = fields.get_value("Name") + color = fields.get_value("Color") + + return RemindersList( + id=rec.recordName, + title=str(title) if title else "Untitled", + color=str(color) if color else None, + count=int(fields.get_value("Count") or 0), + badge_emblem=fields.get_value("BadgeEmblem"), + sorting_style=fields.get_value("SortingStyle"), + is_group=bool(fields.get_value("IsGroup") or 0), + reminder_ids=self._reminder_ids_for_list_record(rec), + record_change_tag=rec.recordChangeTag, + ) + + def record_to_reminder(self, rec: CKRecord) -> Reminder: + fields = rec.fields + created = fields.get_value("CreationDate") + if created is None and rec.created is not None: + created = rec.created.timestamp + + modified = fields.get_value("LastModifiedDate") + if modified is None and rec.modified is not None: + modified = rec.modified.timestamp + + title_doc = fields.get_value("TitleDocument") + title = "Untitled" + if title_doc: + try: + title = self._coerce_text( + _decode_crdt_document(title_doc), + field_name="TitleDocument", + record_name=rec.recordName, + ) + except Exception as exc: # pragma: no cover - defensive fallback + self._logger.warning( + "TitleDocument decode failed for %s: %s", + rec.recordName, + exc, + ) + title = "Error Decoding Title" + + notes_doc = fields.get_value("NotesDocument") + desc = "" + if notes_doc: + try: + desc = self._coerce_text( + _decode_crdt_document(notes_doc), + field_name="NotesDocument", + record_name=rec.recordName, + ) + except Exception as exc: # pragma: no cover - defensive fallback + self._logger.warning( + "NotesDocument decode failed for %s: %s", + rec.recordName, + exc, + ) + + return Reminder( + id=rec.recordName, + list_id=_ref_name(fields, "List"), + title=title, + desc=desc, + due_date=fields.get_value("DueDate"), + start_date=fields.get_value("StartDate"), + completed=bool(fields.get_value("Completed") or 0), + completed_date=fields.get_value("CompletionDate"), + priority=int(fields.get_value("Priority") or 0), + flagged=bool(fields.get_value("Flagged") or 0), + all_day=bool(fields.get_value("AllDay") or 0), + deleted=bool(fields.get_value("Deleted") or 0), + time_zone=fields.get_value("TimeZone"), + alarm_ids=[ + _as_raw_id(x, "Alarm") for x in (fields.get_value("AlarmIDs") or []) + ], + hashtag_ids=[ + _as_raw_id(x, "Hashtag") for x in (fields.get_value("HashtagIDs") or []) + ], + attachment_ids=[ + _as_raw_id(x, "Attachment") + for x in (fields.get_value("AttachmentIDs") or []) + ], + recurrence_rule_ids=[ + _as_raw_id(x, "RecurrenceRule") + for x in (fields.get_value("RecurrenceRuleIDs") or []) + ], + parent_reminder_id=_ref_name(fields, "ParentReminder") or None, + created=created, + modified=modified, + record_change_tag=rec.recordChangeTag, + ) + + def record_to_alarm(self, rec: CKRecord) -> Alarm: + fields = rec.fields + return Alarm( + id=rec.recordName, + alarm_uid=fields.get_value("AlarmUID") or "", + reminder_id=_ref_name(fields, "Reminder"), + trigger_id=fields.get_value("TriggerID") or "", + record_change_tag=rec.recordChangeTag, + ) + + def record_to_alarm_trigger(self, rec: CKRecord) -> Optional[LocationTrigger]: + fields = rec.fields + trigger_type = fields.get_value("Type") or "" + alarm_id = _ref_name(fields, "Alarm") + + if trigger_type == "Location": + prox_raw = int(fields.get_value("Proximity") or 1) + try: + proximity = Proximity(prox_raw) + except ValueError: + self._logger.warning( + "Unknown Proximity %d on %s", + prox_raw, + rec.recordName, + ) + proximity = Proximity.ARRIVING + + return LocationTrigger( + id=rec.recordName, + alarm_id=alarm_id, + title=fields.get_value("Title") or "", + address=fields.get_value("Address") or "", + latitude=float(fields.get_value("Latitude") or 0.0), + longitude=float(fields.get_value("Longitude") or 0.0), + radius=float(fields.get_value("Radius") or 0.0), + proximity=proximity, + location_uid=fields.get_value("LocationUID") or "", + record_change_tag=rec.recordChangeTag, + ) + + self._logger.warning( + "Unsupported AlarmTrigger type '%s' on %s", + trigger_type, + rec.recordName, + ) + return None + + def record_to_attachment(self, rec: CKRecord) -> Optional[Attachment]: + fields = rec.fields + att_type = fields.get_value("Type") or "" + reminder_id = _ref_name(fields, "Reminder") + + if att_type == "URL": + return URLAttachment( + id=rec.recordName, + reminder_id=reminder_id, + url=_decode_attachment_url(fields.get_value("URL") or ""), + uti=fields.get_value("UTI") or "public.url", + record_change_tag=rec.recordChangeTag, + ) + + if att_type == "Image": + file_asset = fields.get_value("FileAsset") + download_url = "" + if file_asset and hasattr(file_asset, "downloadURL"): + download_url = file_asset.downloadURL or "" + + return ImageAttachment( + id=rec.recordName, + reminder_id=reminder_id, + file_asset_url=download_url, + filename=fields.get_value("FileName") or "", + file_size=int(fields.get_value("FileSize") or 0), + width=int(fields.get_value("Width") or 0), + height=int(fields.get_value("Height") or 0), + uti=fields.get_value("UTI") or "public.jpeg", + record_change_tag=rec.recordChangeTag, + ) + + self._logger.warning( + "Unknown Attachment type '%s' on %s", + att_type, + rec.recordName, + ) + return None + + def record_to_hashtag(self, rec: CKRecord) -> Hashtag: + fields = rec.fields + return Hashtag( + id=rec.recordName, + name=self._coerce_text( + fields.get_value("Name"), + field_name="Name", + record_name=rec.recordName, + ), + reminder_id=_ref_name(fields, "Reminder"), + created=fields.get_value("CreationDate"), + record_change_tag=rec.recordChangeTag, + ) + + def record_to_recurrence_rule(self, rec: CKRecord) -> RecurrenceRule: + fields = rec.fields + freq_raw = fields.get_value("Frequency") or 1 + try: + freq = RecurrenceFrequency(freq_raw) + except ValueError: + freq = RecurrenceFrequency.DAILY + return RecurrenceRule( + id=rec.recordName, + reminder_id=_ref_name(fields, "Reminder"), + frequency=freq, + interval=fields.get_value("Interval") or 1, + occurrence_count=fields.get_value("OccurrenceCount") or 0, + first_day_of_week=fields.get_value("FirstDayOfTheWeek") or 0, + record_change_tag=rec.recordChangeTag, + ) diff --git a/pyicloud/services/reminders/_protocol.py b/pyicloud/services/reminders/_protocol.py new file mode 100644 index 00000000..b5b0b31b --- /dev/null +++ b/pyicloud/services/reminders/_protocol.py @@ -0,0 +1,211 @@ +"""Pure protocol helpers for the Reminders service.""" + +from __future__ import annotations + +import base64 +import binascii +import json as _json +import logging +import time +import uuid +import zlib +from urllib.parse import urlparse + +from .protobuf import reminders_pb2, versioned_document_pb2 + +LOGGER = logging.getLogger(__name__) + + +class CRDTDecodeError(ValueError): + """Raised when a Reminders CRDT payload cannot be decoded.""" + + +def _ref_name(fields, key: str) -> str: + """Extract recordName from a REFERENCE field, or return ''.""" + field = fields.get_field(key) + if field and field.value and hasattr(field.value, "recordName"): + return field.value.recordName + return "" + + +def _as_record_name(value: str, prefix: str) -> str: + """Return a record name with the expected prefix (e.g. ``Alarm/UUID``).""" + if not value: + return value + value = str(value) + token = f"{prefix}/" + if value.startswith(token): + return value + return f"{token}{value}" + + +def _as_raw_id(value: str, prefix: str) -> str: + """Return a raw UUID/id token without the record prefix.""" + if not value: + return value + value = str(value) + token = f"{prefix}/" + if value.startswith(token): + return value[len(token) :] + return value + + +def _looks_like_url(value: str) -> bool: + """Return True for values that already look like a URL.""" + if not value: + return False + + parsed = urlparse(value) + if not parsed.scheme: + return False + if parsed.scheme in {"http", "https"}: + return bool(parsed.netloc) + return bool(parsed.netloc or parsed.path) + + +def _decode_attachment_url(value: str) -> str: + """Decode a URL attachment value, falling back to the raw value.""" + if not value: + return "" + if _looks_like_url(value): + return value + + padding = "=" * ((4 - len(value) % 4) % 4) + try: + decoded = base64.b64decode( + f"{value}{padding}", + validate=True, + ).decode("utf-8") + except (binascii.Error, UnicodeDecodeError, ValueError): + return value + + if _looks_like_url(decoded): + return decoded + return value + + +def _decode_crdt_document(encrypted_value: str | bytes) -> str: + """Decode a CRDT document (TitleDocument or NotesDocument).""" + data = encrypted_value + if isinstance(data, str): + padding = 4 - (len(data) % 4) + if padding != 4: + data += "=" * padding + try: + data = base64.b64decode(data) + except (binascii.Error, ValueError) as exc: + raise CRDTDecodeError("Invalid base64-encoded CRDT document") from exc + + try: + data = zlib.decompress(data) + except zlib.error: + try: + import gzip as _gzip + + data = _gzip.decompress(data) + except OSError as exc: + LOGGER.debug("CRDT decompress skipped: %s (%s)", exc, data[:10]) + + try: + document = versioned_document_pb2.Document() # type: ignore[attr-defined] + document.ParseFromString(data) + if document.version: + string_bytes = document.version[0].data + value = reminders_pb2.String() # type: ignore[attr-defined] + value.ParseFromString(string_bytes) + return value.string or "" + except Exception as exc: # pragma: no cover - fallback path + LOGGER.debug("versioned_document.Document parse failed: %s", exc) + + try: + version = versioned_document_pb2.Version() # type: ignore[attr-defined] + version.ParseFromString(data) + if version.data: + value = reminders_pb2.String() # type: ignore[attr-defined] + value.ParseFromString(version.data) + return value.string or "" + except Exception as exc: # pragma: no cover - fallback path + LOGGER.debug("versioned_document.Version parse failed: %s", exc) + + try: + value = reminders_pb2.String() # type: ignore[attr-defined] + value.ParseFromString(data) + if value.string: + return value.string + except Exception as exc: # pragma: no cover - legacy fallback path + LOGGER.debug("bare String parse failed: %s", exc) + + raise CRDTDecodeError("Unable to decode CRDT document") + + +def _encode_crdt_document(text: str) -> str: + """Encode a string into an Apple versioned topotext CRDT document.""" + text_length = len(text) if text else 0 + replica_uuid = bytes.fromhex("d46bcae41b8766c18d75efe35c9145c3") + clock_max = 0xFFFF_FFFF + + value = reminders_pb2.String() # type: ignore[attr-defined] + value.string = text + + sentinel = value.substring.add() + sentinel.charID.replicaID = 0 + sentinel.charID.clock = 0 + sentinel.length = 0 + sentinel.timestamp.replicaID = 0 + sentinel.timestamp.clock = 0 + sentinel.child.append(1) + + if text_length > 0: + content = value.substring.add() + content.charID.replicaID = 1 + content.charID.clock = 0 + content.length = text_length + content.timestamp.replicaID = 1 + content.timestamp.clock = 0 + content.child.append(2) + + terminal = value.substring.add() + terminal.charID.replicaID = 0 + terminal.charID.clock = clock_max + terminal.length = 0 + terminal.timestamp.replicaID = 0 + terminal.timestamp.clock = clock_max + + timestamp_clock = value.timestamp.clock.add() + timestamp_clock.replicaUUID = replica_uuid + content_clock = timestamp_clock.replicaClock.add() + content_clock.clock = text_length + sentinel_clock = timestamp_clock.replicaClock.add() + sentinel_clock.clock = 1 + + if text_length > 0: + attribute_run = value.attributeRun.add() + attribute_run.length = text_length + + string_bytes = value.SerializeToString() + + version = versioned_document_pb2.Version() # type: ignore[attr-defined] + version.serializationVersion = 0 + version.minimumSupportedVersion = 0 + version.data = string_bytes + + document = versioned_document_pb2.Document() # type: ignore[attr-defined] + document.serializationVersion = 0 + document.version.append(version) + doc_bytes = document.SerializeToString() + + compressed = zlib.compress(doc_bytes) + return base64.b64encode(compressed).decode("utf-8") + + +def _generate_resolution_token_map(fields_modified: list[str]) -> str: + """Generate a ResolutionTokenMap for a set of modified fields.""" + apple_epoch = time.time() - 978307200.0 + tokens = {} + for field_name in fields_modified: + tokens[field_name] = { + "counter": 1, + "modificationTime": apple_epoch, + "replicaID": str(uuid.uuid4()).upper(), + } + return _json.dumps({"map": tokens}, separators=(",", ":")) diff --git a/pyicloud/services/reminders/_reads.py b/pyicloud/services/reminders/_reads.py new file mode 100644 index 00000000..57027827 --- /dev/null +++ b/pyicloud/services/reminders/_reads.py @@ -0,0 +1,469 @@ +"""Read-side orchestration for the Reminders service.""" + +from __future__ import annotations + +import logging +from typing import Any, Callable, Dict, Iterable, List, Optional, TypeVar + +from pyicloud.common.cloudkit import ( + CKErrorItem, + CKFVInt64, + CKFVReference, + CKQueryFilterBy, + CKQueryObject, + CKRecord, + CKReference, + CKTombstoneRecord, + CKZoneChangesZone, + CKZoneChangesZoneReq, +) + +from ._constants import _REMINDERS_ZONE, _REMINDERS_ZONE_REQ +from ._mappers import Attachment, RemindersRecordMapper +from ._protocol import _as_record_name +from ._support import _assert_read_success +from .client import RemindersApiError +from .models import ( + Alarm, + AlarmWithTrigger, + Hashtag, + ListRemindersResult, + LocationTrigger, + RecurrenceRule, + Reminder, + ReminderChangeEvent, + RemindersList, +) + +TRelated = TypeVar("TRelated") + + +class RemindersReadAPI: + """Encapsulates read/query behavior for the Reminders service.""" + + def __init__( + self, + get_raw: Callable[[], Any], + mapper: RemindersRecordMapper, + logger: logging.Logger, + ) -> None: + self._get_raw = get_raw + self._mapper = mapper + self._logger = logger + + def _iter_zone_change_pages( + self, + *, + desired_record_types: Optional[List[str]], + desired_keys: Optional[List[str]] = None, + sync_token: Optional[str] = None, + reverse: Optional[bool] = None, + ) -> Iterable[CKZoneChangesZone]: + """Yield paged /changes/zone results, advancing the zone sync token.""" + next_sync_token = sync_token + more_coming = True + + while more_coming: + response = self._get_raw().changes( + zone_req=CKZoneChangesZoneReq( + zoneID=_REMINDERS_ZONE, + desiredRecordTypes=desired_record_types, + desiredKeys=desired_keys, + reverse=reverse, + syncToken=next_sync_token, + ) + ) + if not response.zones: + return + + more_coming = False + for zone in response.zones: + yield zone + next_sync_token = zone.syncToken + more_coming = more_coming or bool(zone.moreComing) + + def lists(self) -> Iterable[RemindersList]: + """Fetch reminders lists as a full snapshot.""" + for zone in self._iter_zone_change_pages(desired_record_types=["List"]): + _assert_read_success(zone.records, "Fetch reminder lists") + for rec in zone.records: + if not isinstance(rec, CKRecord) or rec.recordType != "List": + continue + yield self._mapper.record_to_list(rec) + + def reminders(self, list_id: Optional[str] = None) -> Iterable[Reminder]: + """Fetch reminders as a full snapshot, optionally filtered by list.""" + reminder_map: Dict[str, Reminder] = {} + + list_ids: List[str] + if list_id: + list_ids = [list_id] + else: + list_ids = [lst.id for lst in self.lists()] + + for lid in list_ids: + batch = self.list_reminders( + list_id=lid, + include_completed=True, + results_limit=200, + ) + for reminder in batch.reminders: + reminder_map[reminder.id] = reminder + + for reminder in reminder_map.values(): + yield reminder + + def sync_cursor(self) -> str: + """Return the latest usable sync token for the Reminders zone.""" + sync_token: Optional[str] = None + for zone in self._iter_zone_change_pages( + desired_record_types=[], + desired_keys=[], + reverse=False, + ): + sync_token = zone.syncToken + + if sync_token: + return sync_token + + raise RemindersApiError("Unable to obtain sync token for Reminders zone") + + def iter_changes( + self, *, since: Optional[str] = None + ) -> Iterable[ReminderChangeEvent]: + """Iterate reminder changes since an optional sync token.""" + for zone in self._iter_zone_change_pages( + desired_record_types=["Reminder"], + sync_token=since, + reverse=False, + ): + for rec in zone.records: + if isinstance(rec, CKRecord): + if rec.recordType != "Reminder": + continue + + reminder = self._mapper.record_to_reminder(rec) + evt_type = "deleted" if reminder.deleted else "updated" + yield ReminderChangeEvent( + type=evt_type, + reminder_id=reminder.id, + reminder=reminder, + ) + continue + + if isinstance(rec, CKTombstoneRecord): + yield ReminderChangeEvent( + type="deleted", + reminder_id=rec.recordName, + reminder=None, + ) + continue + + if isinstance(rec, CKErrorItem): + record_name = rec.recordName or "" + reason = rec.reason or "no reason provided" + raise RemindersApiError( + "Iterating reminder changes failed for " + f"{record_name}: {rec.serverErrorCode} ({reason})", + payload={ + "recordName": rec.recordName, + "serverErrorCode": rec.serverErrorCode, + "reason": rec.reason, + }, + ) + + def get(self, reminder_id: str) -> Reminder: + """Fetch a single reminder by ID.""" + record_name = _as_record_name(reminder_id, "Reminder") + resp = self._get_raw().lookup( + record_names=[record_name], + zone_id=_REMINDERS_ZONE_REQ, + ) + _assert_read_success(resp.records, "Lookup reminder") + + target = None + for rec in resp.records: + if isinstance(rec, CKRecord) and rec.recordName == record_name: + target = rec + break + + if not target: + raise LookupError(f"Reminder not found: {record_name}") + + return self._mapper.record_to_reminder(target) + + def _lookup_related_records( + self, + *, + raw_ids: List[str], + prefix: str, + record_type: str, + mapper: Callable[[CKRecord], Optional[TRelated]], + operation_name: str, + ) -> List[TRelated]: + """Fetch and map linked child records while preserving lookup order.""" + if not raw_ids: + return [] + + resp = self._get_raw().lookup( + record_names=[_as_record_name(uid, prefix) for uid in raw_ids], + zone_id=_REMINDERS_ZONE_REQ, + ) + _assert_read_success(resp.records, operation_name) + + mapped_records: List[TRelated] = [] + for rec in resp.records: + if not isinstance(rec, CKRecord) or rec.recordType != record_type: + continue + mapped = mapper(rec) + if mapped is not None: + mapped_records.append(mapped) + return mapped_records + + @staticmethod + def _scope_related_records( + records: Dict[str, TRelated], + *, + relation_getter: Callable[[TRelated], Optional[str]], + allowed_ids: set[str], + ) -> Dict[str, TRelated]: + """Filter a related-record map down to rows linked to allowed parent IDs.""" + return { + record_id: record + for record_id, record in records.items() + if relation_getter(record) in allowed_ids + } + + def _ingest_compound_record( + self, + rec: CKRecord, + *, + reminders_map: Dict[str, Reminder], + alarms: Dict[str, Alarm], + triggers: Dict[str, LocationTrigger], + attachments: Dict[str, Attachment], + hashtags: Dict[str, Hashtag], + recurrence_rules: Dict[str, RecurrenceRule], + ) -> None: + """Route one compound reminderList record into its typed collection.""" + record_type = rec.recordType + if record_type == "Reminder": + reminder = self._mapper.record_to_reminder(rec) + reminders_map[reminder.id] = reminder + return + + if record_type == "Alarm": + alarm = self._mapper.record_to_alarm(rec) + alarms[alarm.id] = alarm + return + + if record_type == "AlarmTrigger": + trigger = self._mapper.record_to_alarm_trigger(rec) + if trigger: + triggers[trigger.id] = trigger + return + + if record_type == "Attachment": + attachment = self._mapper.record_to_attachment(rec) + if attachment: + attachments[attachment.id] = attachment + return + + if record_type == "Hashtag": + hashtag = self._mapper.record_to_hashtag(rec) + hashtags[hashtag.id] = hashtag + return + + if record_type == "RecurrenceRule": + recurrence_rule = self._mapper.record_to_recurrence_rule(rec) + recurrence_rules[recurrence_rule.id] = recurrence_rule + + def list_reminders( + self, + list_id: str, + include_completed: bool = False, + results_limit: int = 200, + ) -> ListRemindersResult: + """Fetch all records for a list using the compound ``reminderList`` query.""" + query = CKQueryObject( + recordType="reminderList", + filterBy=[ + CKQueryFilterBy( + comparator="EQUALS", + fieldName="List", + fieldValue=CKFVReference( + type="REFERENCE", + value=CKReference(recordName=list_id, action="VALIDATE"), + ), + ), + CKQueryFilterBy( + comparator="EQUALS", + fieldName="includeCompleted", + fieldValue=CKFVInt64( + type="INT64", + value=1 if include_completed else 0, + ), + ), + CKQueryFilterBy( + comparator="EQUALS", + fieldName="LookupValidatingReference", + fieldValue=CKFVInt64(type="INT64", value=1), + ), + ], + ) + + reminders_map: Dict[str, Reminder] = {} + alarms: Dict[str, Alarm] = {} + triggers: Dict[str, LocationTrigger] = {} + attachments: Dict[str, Attachment] = {} + hashtags: Dict[str, Hashtag] = {} + recurrence_rules: Dict[str, RecurrenceRule] = {} + + continuation: Optional[str] = None + while True: + resp = self._get_raw().query( + query=query, + zone_id=_REMINDERS_ZONE_REQ, + results_limit=results_limit, + continuation=continuation, + ) + _assert_read_success(resp.records, "List reminders query") + + for rec in resp.records: + if not isinstance(rec, CKRecord): + continue + self._ingest_compound_record( + rec, + reminders_map=reminders_map, + alarms=alarms, + triggers=triggers, + attachments=attachments, + hashtags=hashtags, + recurrence_rules=recurrence_rules, + ) + + continuation = resp.continuationMarker + if not continuation: + break + + scoped_reminders = [ + reminder + for reminder in reminders_map.values() + if reminder.list_id == list_id + ] + scoped_reminder_ids = {reminder.id for reminder in scoped_reminders} + + scoped_alarms = self._scope_related_records( + alarms, + relation_getter=lambda alarm: alarm.reminder_id, + allowed_ids=scoped_reminder_ids, + ) + scoped_alarm_ids = set(scoped_alarms.keys()) + + scoped_triggers = self._scope_related_records( + triggers, + relation_getter=lambda trigger: trigger.alarm_id, + allowed_ids=scoped_alarm_ids, + ) + scoped_attachments = self._scope_related_records( + attachments, + relation_getter=lambda attachment: attachment.reminder_id, + allowed_ids=scoped_reminder_ids, + ) + scoped_hashtags = self._scope_related_records( + hashtags, + relation_getter=lambda hashtag: hashtag.reminder_id, + allowed_ids=scoped_reminder_ids, + ) + scoped_recurrence_rules = self._scope_related_records( + recurrence_rules, + relation_getter=lambda recurrence_rule: recurrence_rule.reminder_id, + allowed_ids=scoped_reminder_ids, + ) + + return ListRemindersResult( + reminders=scoped_reminders, + alarms=scoped_alarms, + triggers=scoped_triggers, + attachments=scoped_attachments, + hashtags=scoped_hashtags, + recurrence_rules=scoped_recurrence_rules, + ) + + def alarms_for(self, reminder: Reminder) -> List[AlarmWithTrigger]: + """Fetch alarms + triggers for a reminder via lookup.""" + if not reminder.alarm_ids: + return [] + + resp = self._get_raw().lookup( + record_names=[_as_record_name(uid, "Alarm") for uid in reminder.alarm_ids], + zone_id=_REMINDERS_ZONE_REQ, + ) + _assert_read_success(resp.records, "Lookup alarms") + + alarms = [] + for rec in resp.records: + if isinstance(rec, CKRecord) and rec.recordType == "Alarm": + alarm = self._mapper.record_to_alarm(rec) + alarms.append(alarm) + + trigger_ids = [ + _as_record_name(alarm.trigger_id, "AlarmTrigger") + for alarm in alarms + if alarm.trigger_id + ] + trigger_map = {} + if trigger_ids: + trigger_response = self._get_raw().lookup( + record_names=trigger_ids, + zone_id=_REMINDERS_ZONE_REQ, + ) + _assert_read_success(trigger_response.records, "Lookup alarm triggers") + for rec in trigger_response.records: + if isinstance(rec, CKRecord) and rec.recordType == "AlarmTrigger": + trigger = self._mapper.record_to_alarm_trigger(rec) + if trigger: + trigger_map[_as_record_name(trigger.id, "AlarmTrigger")] = ( + trigger + ) + return [ + AlarmWithTrigger( + alarm=alarm, + trigger=( + trigger_map.get(_as_record_name(alarm.trigger_id, "AlarmTrigger")) + if alarm.trigger_id + else None + ), + ) + for alarm in alarms + ] + + def tags_for(self, reminder: Reminder) -> List[Hashtag]: + """Fetch hashtags for a reminder via lookup.""" + return self._lookup_related_records( + raw_ids=reminder.hashtag_ids, + prefix="Hashtag", + record_type="Hashtag", + mapper=self._mapper.record_to_hashtag, + operation_name="Lookup hashtags", + ) + + def attachments_for(self, reminder: Reminder) -> List[Attachment]: + """Fetch attachments for a reminder via lookup.""" + return self._lookup_related_records( + raw_ids=reminder.attachment_ids, + prefix="Attachment", + record_type="Attachment", + mapper=self._mapper.record_to_attachment, + operation_name="Lookup attachments", + ) + + def recurrence_rules_for(self, reminder: Reminder) -> List[RecurrenceRule]: + """Fetch recurrence rules for a reminder via lookup.""" + return self._lookup_related_records( + raw_ids=reminder.recurrence_rule_ids, + prefix="RecurrenceRule", + record_type="RecurrenceRule", + mapper=self._mapper.record_to_recurrence_rule, + operation_name="Lookup recurrence rules", + ) diff --git a/pyicloud/services/reminders/_support.py b/pyicloud/services/reminders/_support.py new file mode 100644 index 00000000..b81a0987 --- /dev/null +++ b/pyicloud/services/reminders/_support.py @@ -0,0 +1,71 @@ +"""Shared CloudKit support helpers for the Reminders service.""" + +from __future__ import annotations + +from typing import Any, Iterable, Optional + +from pyicloud.common.cloudkit import CKErrorItem, CKModifyResponse, CKRecord + +from .client import RemindersApiError + + +def _raise_record_errors(records: Iterable[Any], operation_name: str) -> None: + """Raise when CloudKit returned one or more per-record errors.""" + errors = [item for item in records if isinstance(item, CKErrorItem)] + if not errors: + return + + details = [] + for err in errors: + record_name = err.recordName or "" + reason = err.reason or "no reason provided" + details.append(f"{record_name}: {err.serverErrorCode} ({reason})") + + raise RemindersApiError( + f"{operation_name} failed for {len(errors)} record(s): " + "; ".join(details), + payload=[ + { + "recordName": e.recordName, + "serverErrorCode": e.serverErrorCode, + "reason": e.reason, + } + for e in errors + ], + ) + + +def _assert_modify_success(response: CKModifyResponse, operation_name: str) -> None: + """Raise when CloudKit accepted the request but rejected one or more records.""" + _raise_record_errors(response.records, operation_name) + + +def _assert_read_success(records: Iterable[Any], operation_name: str) -> None: + """Raise when a read endpoint returned one or more per-record errors.""" + _raise_record_errors(records, operation_name) + + +def _response_record_change_tag( + response: CKModifyResponse, + record_name: str, +) -> Optional[str]: + """Return the latest recordChangeTag for a record from a modify response.""" + change_tag: Optional[str] = None + for item in response.records: + if not isinstance(item, CKRecord): + continue + if item.recordName != record_name: + continue + if item.recordChangeTag: + change_tag = item.recordChangeTag + return change_tag + + +def _refresh_record_change_tag( + response: CKModifyResponse, + model_obj: Any, + record_name: str, +) -> None: + """Hydrate an in-memory model object's record_change_tag from modify ack.""" + change_tag = _response_record_change_tag(response, record_name) + if change_tag: + setattr(model_obj, "record_change_tag", change_tag) diff --git a/pyicloud/services/reminders/_writes.py b/pyicloud/services/reminders/_writes.py new file mode 100644 index 00000000..6a354cda --- /dev/null +++ b/pyicloud/services/reminders/_writes.py @@ -0,0 +1,1093 @@ +"""Write-side orchestration for the Reminders service.""" + +from __future__ import annotations + +import logging +import time +import uuid +from datetime import datetime, timezone +from typing import Any, Callable, Dict, Optional + +from pyicloud.common.cloudkit import ( + CKModifyOperation, + CKModifyResponse, + CKRecord, + CKWriteParent, + CKWriteRecord, +) + +from ._constants import _REMINDERS_ZONE_REQ +from ._mappers import Attachment, RemindersRecordMapper +from ._protocol import ( + _as_raw_id, + _as_record_name, + _encode_crdt_document, + _generate_resolution_token_map, +) +from ._support import ( + _assert_modify_success, + _assert_read_success, + _refresh_record_change_tag, + _response_record_change_tag, +) +from .models import ( + Alarm, + Hashtag, + ImageAttachment, + LocationTrigger, + Proximity, + RecurrenceFrequency, + RecurrenceRule, + Reminder, + URLAttachment, +) + + +class RemindersWriteAPI: + """Encapsulates mutation behavior for the Reminders service.""" + + def __init__( + self, + get_raw: Callable[[], Any], + mapper: RemindersRecordMapper, + logger: logging.Logger, + ) -> None: + self._get_raw = get_raw + self._mapper = mapper + self._logger = logger + + @staticmethod + def _reminder_record_name(reminder_id: str) -> str: + """Normalize reminder IDs so writes accept shorthand and canonical forms.""" + return _as_record_name(reminder_id, "Reminder") + + @staticmethod + def _validated_location_trigger( + *, + trigger_id: str, + alarm_id: str, + title: str, + address: str, + latitude: float, + longitude: float, + radius: float, + proximity: Proximity, + location_uid: str, + ) -> LocationTrigger: + """Validate geofence data before sending a remote write.""" + return LocationTrigger( + id=trigger_id, + alarm_id=alarm_id, + title=title, + address=address, + latitude=latitude, + longitude=longitude, + radius=radius, + proximity=proximity, + location_uid=location_uid, + ) + + @staticmethod + def _validated_image_attachment( + attachment: ImageAttachment, + *, + uti: Optional[str] = None, + filename: Optional[str] = None, + file_size: Optional[int] = None, + width: Optional[int] = None, + height: Optional[int] = None, + ) -> ImageAttachment: + """Validate image metadata updates before sending a remote write.""" + return ImageAttachment( + id=attachment.id, + reminder_id=attachment.reminder_id, + file_asset_url=attachment.file_asset_url, + filename=attachment.filename if filename is None else filename, + file_size=attachment.file_size if file_size is None else int(file_size), + width=attachment.width if width is None else int(width), + height=attachment.height if height is None else int(height), + uti=attachment.uti if uti is None else uti, + record_change_tag=attachment.record_change_tag, + ) + + @classmethod + def _validated_recurrence_rule( + cls, + *, + recurrence_id: str, + reminder_id: str, + frequency: RecurrenceFrequency, + interval: int, + occurrence_count: int, + first_day_of_week: int, + record_change_tag: Optional[str] = None, + ) -> RecurrenceRule: + """Validate recurrence values before mutating remote state.""" + return RecurrenceRule( + id=_as_record_name(recurrence_id, "RecurrenceRule"), + reminder_id=cls._reminder_record_name(reminder_id), + frequency=frequency, + interval=interval, + occurrence_count=occurrence_count, + first_day_of_week=first_day_of_week, + record_change_tag=record_change_tag, + ) + + @staticmethod + def _completion_datetime( + *, + completed: bool, + completed_date: Optional[datetime], + now_ms: int, + ) -> Optional[datetime]: + """Resolve the completion timestamp to persist for a reminder write.""" + if not completed: + return None + if completed_date is None: + return datetime.fromtimestamp(now_ms / 1000.0, tz=timezone.utc) + if completed_date.tzinfo is None: + return completed_date.replace(tzinfo=timezone.utc) + return completed_date + + @staticmethod + def _write_record( + *, + record_name: str, + record_type: str, + fields: Dict[str, Any], + record_change_tag: Optional[str] = None, + parent_record_name: Optional[str] = None, + ) -> CKWriteRecord: + """Build a typed CloudKit modify-record payload.""" + parent = None + if parent_record_name: + parent = CKWriteParent(recordName=parent_record_name) + + return CKWriteRecord( + recordName=record_name, + recordType=record_type, + recordChangeTag=record_change_tag, + fields=fields, + parent=parent, + ) + + def _build_linked_ids_update_op( + self, + *, + reminder: Reminder, + field_name: str, + token_field_name: str, + raw_ids: list[str], + ) -> CKModifyOperation: + """Build a Reminder update operation for an ID-list field.""" + now_ms = int(time.time() * 1000) + token_map = _generate_resolution_token_map( + [token_field_name, "lastModifiedDate"] + ) + reminder_record_name = self._reminder_record_name(reminder.id) + return CKModifyOperation( + operationType="update", + record=self._write_record( + record_name=reminder_record_name, + record_type="Reminder", + record_change_tag=reminder.record_change_tag, + fields={ + field_name: {"type": "STRING_LIST", "value": raw_ids}, + "ResolutionTokenMap": {"type": "STRING", "value": token_map}, + "LastModifiedDate": {"type": "TIMESTAMP", "value": now_ms}, + }, + ), + ) + + def _submit_single_record_update( + self, + *, + operation_name: str, + record_name: str, + record_type: str, + record_change_tag: Optional[str], + fields: Dict[str, Any], + model_obj: Any, + ) -> CKModifyResponse: + """Run a one-record update and refresh the local object's change tag.""" + op = CKModifyOperation( + operationType="update", + record=self._write_record( + record_name=record_name, + record_type=record_type, + record_change_tag=record_change_tag, + fields=fields, + ), + ) + modify_response = self._get_raw().modify( + operations=[op], + zone_id=_REMINDERS_ZONE_REQ, + ) + _assert_modify_success(modify_response, operation_name) + _refresh_record_change_tag(modify_response, model_obj, record_name) + return modify_response + + def _lookup_created_reminder(self, record_name: str) -> Reminder: + """Fetch a freshly-created reminder by record name.""" + resp = self._get_raw().lookup( + record_names=[record_name], + zone_id=_REMINDERS_ZONE_REQ, + ) + _assert_read_success(resp.records, "Lookup reminder") + + for rec in resp.records: + if isinstance(rec, CKRecord) and rec.recordName == record_name: + return self._mapper.record_to_reminder(rec) + + raise LookupError(f"Reminder not found: {record_name}") + + def _create_linked_child( + self, + *, + reminder: Reminder, + reminder_ids_attr: str, + prefix: str, + record_type: str, + field_name: str, + token_field_name: str, + child_fields: Dict[str, Any], + operation_name: str, + ) -> tuple[str, CKModifyResponse]: + """Create a linked child record and update the reminder ID list.""" + child_uuid = str(uuid.uuid4()).upper() + child_record_name = f"{prefix}/{child_uuid}" + reminder_record_name = self._reminder_record_name(reminder.id) + linked_ids = [ + _as_raw_id(x, prefix) for x in (getattr(reminder, reminder_ids_attr) or []) + ] + linked_ids.append(child_uuid) + + reminder_op = self._build_linked_ids_update_op( + reminder=reminder, + field_name=field_name, + token_field_name=token_field_name, + raw_ids=linked_ids, + ) + child_op = CKModifyOperation( + operationType="create", + record=self._write_record( + record_name=child_record_name, + record_type=record_type, + fields=child_fields, + parent_record_name=reminder_record_name, + ), + ) + + modify_response = self._get_raw().modify( + operations=[reminder_op, child_op], + zone_id=_REMINDERS_ZONE_REQ, + atomic=True, + ) + _assert_modify_success(modify_response, operation_name) + + setattr(reminder, reminder_ids_attr, linked_ids) + _refresh_record_change_tag(modify_response, reminder, reminder_record_name) + return child_record_name, modify_response + + def _delete_linked_child( + self, + *, + reminder: Reminder, + reminder_ids_attr: str, + child: Any, + prefix: str, + record_type: str, + field_name: str, + token_field_name: str, + operation_name: str, + ) -> None: + """Soft-delete a linked child record and update the reminder ID list.""" + child_record_name = _as_record_name(getattr(child, "id"), prefix) + child_uuid = _as_raw_id(child_record_name, prefix) + reminder_record_name = self._reminder_record_name(reminder.id) + child_reminder_id = getattr(child, "reminder_id", None) + if child_reminder_id and ( + self._reminder_record_name(child_reminder_id) != reminder_record_name + ): + raise ValueError( + f"{prefix} child {child_record_name} is linked to " + f"{self._reminder_record_name(child_reminder_id)}, not " + f"{reminder_record_name}" + ) + + linked_ids = [ + _as_raw_id(x, prefix) + for x in (getattr(reminder, reminder_ids_attr) or []) + if _as_raw_id(x, prefix) != child_uuid + ] + reminder_op = self._build_linked_ids_update_op( + reminder=reminder, + field_name=field_name, + token_field_name=token_field_name, + raw_ids=linked_ids, + ) + + child_fields: Dict[str, Any] = { + "Deleted": {"type": "INT64", "value": 1}, + } + if child_reminder_id: + child_fields["Reminder"] = { + "type": "REFERENCE", + "value": { + "recordName": self._reminder_record_name(child_reminder_id), + "action": "VALIDATE", + }, + } + + child_op = CKModifyOperation( + operationType="update", + record=self._write_record( + record_name=child_record_name, + record_type=record_type, + record_change_tag=getattr(child, "record_change_tag", None), + fields=child_fields, + ), + ) + + modify_response = self._get_raw().modify( + operations=[reminder_op, child_op], + zone_id=_REMINDERS_ZONE_REQ, + atomic=True, + ) + _assert_modify_success(modify_response, operation_name) + + setattr(reminder, reminder_ids_attr, linked_ids) + _refresh_record_change_tag(modify_response, reminder, reminder_record_name) + _refresh_record_change_tag(modify_response, child, child_record_name) + + def create( + self, + list_id: str, + title: str, + desc: str = "", + completed: bool = False, + due_date: Optional[datetime] = None, + priority: int = 0, + flagged: bool = False, + all_day: bool = False, + time_zone: Optional[str] = None, + parent_reminder_id: Optional[str] = None, + ) -> Reminder: + """Create a new Reminder inside a List, optionally as a child reminder.""" + reminder_uuid = str(uuid.uuid4()).upper() + record_name = f"Reminder/{reminder_uuid}" + + title_doc = _encode_crdt_document(title) + notes_doc = _encode_crdt_document(desc) + + fields_mod = [ + "allDay", + "titleDocument", + "notesDocument", + "parentReminder", + "priority", + "icsDisplayOrder", + "creationDate", + "list", + "flagged", + "completed", + "completionDate", + "lastModifiedDate", + "recurrenceRuleIDs", + "dueDate", + "timeZone", + ] + token_map = _generate_resolution_token_map(fields_mod) + now_ms = int(time.time() * 1000) + + record_fields: dict[str, Any] = { + "AllDay": {"type": "INT64", "value": 1 if all_day else 0}, + "Completed": {"type": "INT64", "value": 1 if completed else 0}, + "CompletionDate": { + "type": "TIMESTAMP", + "value": now_ms if completed else None, + }, + "CreationDate": {"type": "TIMESTAMP", "value": now_ms}, + "Deleted": {"type": "INT64", "value": 0}, + "Flagged": {"type": "INT64", "value": 1 if flagged else 0}, + "Imported": {"type": "INT64", "value": 0}, + "LastModifiedDate": {"type": "TIMESTAMP", "value": now_ms}, + "List": { + "type": "REFERENCE", + "value": {"recordName": list_id, "action": "VALIDATE"}, + }, + "NotesDocument": {"type": "STRING", "value": notes_doc}, + "Priority": {"type": "INT64", "value": priority}, + "ResolutionTokenMap": {"type": "STRING", "value": token_map}, + "TitleDocument": {"type": "STRING", "value": title_doc}, + } + + if due_date is not None: + if due_date.tzinfo is None: + due_date = due_date.replace(tzinfo=timezone.utc) + record_fields["DueDate"] = { + "type": "TIMESTAMP", + "value": int(due_date.timestamp() * 1000), + } + + if time_zone: + record_fields["TimeZone"] = {"type": "STRING", "value": time_zone} + + if parent_reminder_id: + record_fields["ParentReminder"] = { + "type": "REFERENCE", + "value": { + "recordName": _as_record_name(parent_reminder_id, "Reminder"), + "action": "VALIDATE", + }, + } + + op = CKModifyOperation( + operationType="create", + record=self._write_record( + record_name=record_name, + record_type="Reminder", + fields=record_fields, + parent_record_name=list_id, + ), + ) + + modify_response = self._get_raw().modify( + operations=[op], + zone_id=_REMINDERS_ZONE_REQ, + ) + _assert_modify_success(modify_response, "Create reminder") + + return self._lookup_created_reminder(record_name) + + def update(self, reminder: Reminder) -> None: + """Update an existing reminder.""" + reminder_record_name = self._reminder_record_name(reminder.id) + title_doc = _encode_crdt_document(reminder.title) + notes_doc = _encode_crdt_document(reminder.desc or "") + now_ms = int(time.time() * 1000) + + fields_mod = [ + "titleDocument", + "notesDocument", + "completed", + "completionDate", + "priority", + "flagged", + "allDay", + "lastModifiedDate", + ] + completion_date = self._completion_datetime( + completed=reminder.completed, + completed_date=reminder.completed_date, + now_ms=now_ms, + ) + completion_date_ms = ( + int(completion_date.timestamp() * 1000) + if completion_date is not None + else None + ) + + fields: dict[str, Any] = { + "TitleDocument": {"type": "STRING", "value": title_doc}, + "NotesDocument": {"type": "STRING", "value": notes_doc}, + "Completed": {"type": "INT64", "value": 1 if reminder.completed else 0}, + "CompletionDate": { + "type": "TIMESTAMP", + "value": completion_date_ms, + }, + "Priority": {"type": "INT64", "value": reminder.priority}, + "Flagged": {"type": "INT64", "value": 1 if reminder.flagged else 0}, + "AllDay": {"type": "INT64", "value": 1 if reminder.all_day else 0}, + "LastModifiedDate": {"type": "TIMESTAMP", "value": now_ms}, + } + if reminder.due_date is not None: + due_date = reminder.due_date + if due_date.tzinfo is None: + due_date = due_date.replace(tzinfo=timezone.utc) + reminder.due_date = due_date + fields["DueDate"] = { + "type": "TIMESTAMP", + "value": int(due_date.timestamp() * 1000), + } + else: + fields["DueDate"] = {"type": "TIMESTAMP", "value": None} + fields_mod.append("dueDate") + if reminder.time_zone: + fields["TimeZone"] = {"type": "STRING", "value": reminder.time_zone} + else: + fields["TimeZone"] = {"type": "STRING", "value": None} + fields_mod.append("timeZone") + if reminder.parent_reminder_id: + fields["ParentReminder"] = { + "type": "REFERENCE", + "value": { + "recordName": _as_record_name( + reminder.parent_reminder_id, + "Reminder", + ), + "action": "VALIDATE", + }, + } + else: + fields["ParentReminder"] = {"type": "REFERENCE", "value": None} + fields_mod.append("parentReminder") + fields["ResolutionTokenMap"] = { + "type": "STRING", + "value": _generate_resolution_token_map(fields_mod), + } + + self._submit_single_record_update( + operation_name="Update reminder", + record_name=reminder_record_name, + record_type="Reminder", + record_change_tag=reminder.record_change_tag, + fields=fields, + model_obj=reminder, + ) + reminder.completed_date = completion_date + reminder.modified = datetime.fromtimestamp(now_ms / 1000.0, tz=timezone.utc) + + def delete(self, reminder: Reminder) -> None: + """Delete a reminder using soft-update (Deleted: 1).""" + reminder_record_name = self._reminder_record_name(reminder.id) + fields_mod = ["deleted", "lastModifiedDate"] + token_map = _generate_resolution_token_map(fields_mod) + now_ms = int(time.time() * 1000) + + fields: dict[str, Any] = { + "Deleted": {"type": "INT64", "value": 1}, + "ResolutionTokenMap": {"type": "STRING", "value": token_map}, + "LastModifiedDate": {"type": "TIMESTAMP", "value": now_ms}, + } + + self._submit_single_record_update( + operation_name="Delete reminder", + record_name=reminder_record_name, + record_type="Reminder", + record_change_tag=reminder.record_change_tag, + fields=fields, + model_obj=reminder, + ) + reminder.deleted = True + reminder.modified = datetime.fromtimestamp(now_ms / 1000.0, tz=timezone.utc) + + def add_location_trigger( + self, + reminder: Reminder, + title: str = "", + address: str = "", + latitude: float = 0.0, + longitude: float = 0.0, + radius: float = 100.0, + proximity: Proximity = Proximity.ARRIVING, + ) -> tuple[Alarm, LocationTrigger]: + """Attach a location-based alarm trigger to an existing Reminder.""" + alarm_uuid = str(uuid.uuid4()).upper() + trigger_uuid = str(uuid.uuid4()).upper() + location_uid = str(uuid.uuid4()).upper() + alarm_record_name = f"Alarm/{alarm_uuid}" + trigger_record_name = f"AlarmTrigger/{trigger_uuid}" + reminder_record_name = self._reminder_record_name(reminder.id) + now_ms = int(time.time() * 1000) + + apple_epoch_secs = time.time() - 978307200.0 + due_date_nonce = 100_000_000_000 + apple_epoch_secs + trigger = self._validated_location_trigger( + trigger_id=trigger_record_name, + alarm_id=alarm_record_name, + title=title, + address=address, + latitude=latitude, + longitude=longitude, + radius=radius, + proximity=proximity, + location_uid=location_uid, + ) + + existing_alarm_ids = [ + _as_raw_id(x, "Alarm") for x in list(reminder.alarm_ids or []) + ] + existing_alarm_ids.append(alarm_uuid) + token_map = _generate_resolution_token_map(["alarmIDs", "lastModifiedDate"]) + reminder_op = CKModifyOperation( + operationType="update", + record=self._write_record( + record_name=reminder_record_name, + record_type="Reminder", + record_change_tag=reminder.record_change_tag, + fields={ + "AlarmIDs": {"type": "STRING_LIST", "value": existing_alarm_ids}, + "ResolutionTokenMap": {"type": "STRING", "value": token_map}, + "LastModifiedDate": {"type": "TIMESTAMP", "value": now_ms}, + }, + ), + ) + + alarm_op = CKModifyOperation( + operationType="create", + record=self._write_record( + record_name=alarm_record_name, + record_type="Alarm", + fields={ + "AlarmUID": {"value": alarm_uuid, "type": "STRING"}, + "Deleted": {"value": 0, "type": "INT64"}, + "Imported": {"value": 0, "type": "INT64"}, + "Reminder": { + "type": "REFERENCE", + "value": { + "recordName": reminder_record_name, + "action": "VALIDATE", + }, + }, + "TriggerID": {"value": trigger_uuid, "type": "STRING"}, + "DueDateResolutionTokenAsNonce": { + "value": due_date_nonce, + "type": "DOUBLE", + }, + }, + parent_record_name=reminder_record_name, + ), + ) + + trigger_op = CKModifyOperation( + operationType="create", + record=self._write_record( + record_name=trigger_record_name, + record_type="AlarmTrigger", + fields={ + "Address": { + "value": address, + "isEncrypted": True, + "type": "STRING", + }, + "Alarm": { + "type": "REFERENCE", + "value": { + "recordName": alarm_record_name, + "action": "VALIDATE", + }, + }, + "Deleted": {"value": 0, "type": "INT64"}, + "Latitude": { + "value": trigger.latitude, + "isEncrypted": True, + "type": "DOUBLE", + }, + "LocationUID": {"value": location_uid, "type": "STRING"}, + "Longitude": { + "value": trigger.longitude, + "isEncrypted": True, + "type": "DOUBLE", + }, + "Proximity": {"value": int(trigger.proximity), "type": "INT64"}, + "Radius": {"value": trigger.radius, "type": "DOUBLE"}, + "ReferenceFrameString": { + "value": "1", + "isEncrypted": True, + "type": "STRING", + }, + "Title": { + "value": trigger.title, + "isEncrypted": True, + "type": "STRING", + }, + "Type": {"value": "Location", "type": "STRING"}, + }, + parent_record_name=alarm_record_name, + ), + ) + + modify_response = self._get_raw().modify( + operations=[reminder_op, alarm_op, trigger_op], + zone_id=_REMINDERS_ZONE_REQ, + atomic=True, + ) + _assert_modify_success(modify_response, "Add location trigger") + + reminder.alarm_ids = existing_alarm_ids + _refresh_record_change_tag(modify_response, reminder, reminder_record_name) + + alarm = Alarm( + id=alarm_record_name, + alarm_uid=alarm_uuid, + reminder_id=reminder_record_name, + trigger_id=trigger_uuid, + record_change_tag=_response_record_change_tag( + modify_response, + alarm_record_name, + ), + ) + trigger.record_change_tag = _response_record_change_tag( + modify_response, + trigger_record_name, + ) + return alarm, trigger + + def create_hashtag(self, reminder: Reminder, name: str) -> Hashtag: + """Create a hashtag linked to a reminder and update Reminder.HashtagIDs.""" + now_ms = int(time.time() * 1000) + reminder_record_name = self._reminder_record_name(reminder.id) + hashtag_record_name, modify_response = self._create_linked_child( + reminder=reminder, + reminder_ids_attr="hashtag_ids", + prefix="Hashtag", + record_type="Hashtag", + field_name="HashtagIDs", + token_field_name="hashtagIDs", + child_fields={ + "Name": {"type": "STRING", "value": name}, + "Deleted": {"type": "INT64", "value": 0}, + "Reminder": { + "type": "REFERENCE", + "value": { + "recordName": reminder_record_name, + "action": "VALIDATE", + }, + }, + "CreationDate": {"type": "TIMESTAMP", "value": now_ms}, + }, + operation_name="Create hashtag", + ) + return Hashtag( + id=hashtag_record_name, + name=name, + reminder_id=reminder_record_name, + record_change_tag=_response_record_change_tag( + modify_response, + hashtag_record_name, + ), + ) + + def update_hashtag(self, hashtag: Hashtag, name: str) -> None: + """Update an existing hashtag name.""" + fields: dict[str, Any] = { + "Name": {"type": "STRING", "value": name}, + } + if hashtag.reminder_id: + fields["Reminder"] = { + "type": "REFERENCE", + "value": { + "recordName": self._reminder_record_name(hashtag.reminder_id), + "action": "VALIDATE", + }, + } + + hashtag_record_name = _as_record_name(hashtag.id, "Hashtag") + self._submit_single_record_update( + operation_name="Update hashtag", + record_name=hashtag_record_name, + record_type="Hashtag", + record_change_tag=hashtag.record_change_tag, + fields=fields, + model_obj=hashtag, + ) + hashtag.name = name + + def delete_hashtag(self, reminder: Reminder, hashtag: Hashtag) -> None: + """Soft-delete a hashtag and remove it from Reminder.HashtagIDs.""" + self._delete_linked_child( + reminder=reminder, + reminder_ids_attr="hashtag_ids", + child=hashtag, + prefix="Hashtag", + record_type="Hashtag", + field_name="HashtagIDs", + token_field_name="hashtagIDs", + operation_name="Delete hashtag", + ) + + def create_url_attachment( + self, + reminder: Reminder, + url: str, + uti: str = "public.url", + ) -> URLAttachment: + """Create a URL attachment and link it from Reminder.AttachmentIDs.""" + reminder_record_name = self._reminder_record_name(reminder.id) + attachment_record_name, modify_response = self._create_linked_child( + reminder=reminder, + reminder_ids_attr="attachment_ids", + prefix="Attachment", + record_type="Attachment", + field_name="AttachmentIDs", + token_field_name="attachmentIDs", + child_fields={ + "Type": {"type": "STRING", "value": "URL"}, + "Reminder": { + "type": "REFERENCE", + "value": { + "recordName": reminder_record_name, + "action": "VALIDATE", + }, + }, + "URL": { + "type": "STRING", + "value": url, + "isEncrypted": True, + }, + "UTI": {"type": "STRING", "value": uti}, + "Imported": {"type": "INT64", "value": 0}, + "Deleted": {"type": "INT64", "value": 0}, + }, + operation_name="Create attachment", + ) + return URLAttachment( + id=attachment_record_name, + reminder_id=reminder_record_name, + url=url, + uti=uti, + record_change_tag=_response_record_change_tag( + modify_response, + attachment_record_name, + ), + ) + + def update_attachment( + self, + attachment: Attachment, + *, + url: Optional[str] = None, + uti: Optional[str] = None, + filename: Optional[str] = None, + file_size: Optional[int] = None, + width: Optional[int] = None, + height: Optional[int] = None, + ) -> None: + """Update an attachment record (URL fields or image metadata fields).""" + fields: dict[str, Any] = {} + + if isinstance(attachment, URLAttachment): + has_mutation = url is not None or uti is not None + if not has_mutation: + raise ValueError("No attachment fields provided for update") + if url is not None: + fields["URL"] = { + "type": "STRING", + "value": url, + "isEncrypted": True, + } + if uti is not None: + fields["UTI"] = {"type": "STRING", "value": uti} + fields["Type"] = {"type": "STRING", "value": "URL"} + else: + has_mutation = any( + value is not None for value in (uti, filename, file_size, width, height) + ) + if not has_mutation: + raise ValueError("No attachment fields provided for update") + validated_attachment = self._validated_image_attachment( + attachment, + uti=uti, + filename=filename, + file_size=file_size, + width=width, + height=height, + ) + if uti is not None: + fields["UTI"] = {"type": "STRING", "value": uti} + if filename is not None: + fields["FileName"] = {"type": "STRING", "value": filename} + if file_size is not None: + fields["FileSize"] = {"type": "INT64", "value": int(file_size)} + if width is not None: + fields["Width"] = {"type": "INT64", "value": int(width)} + if height is not None: + fields["Height"] = {"type": "INT64", "value": int(height)} + fields["Type"] = {"type": "STRING", "value": "Image"} + + if attachment.reminder_id: + fields["Reminder"] = { + "type": "REFERENCE", + "value": { + "recordName": self._reminder_record_name(attachment.reminder_id), + "action": "VALIDATE", + }, + } + + attachment_record_name = _as_record_name(attachment.id, "Attachment") + self._submit_single_record_update( + operation_name="Update attachment", + record_name=attachment_record_name, + record_type="Attachment", + record_change_tag=attachment.record_change_tag, + fields=fields, + model_obj=attachment, + ) + + if isinstance(attachment, URLAttachment): + if url is not None: + attachment.url = url + if uti is not None: + attachment.uti = uti + else: + attachment.uti = validated_attachment.uti + attachment.filename = validated_attachment.filename + attachment.file_size = validated_attachment.file_size + attachment.width = validated_attachment.width + attachment.height = validated_attachment.height + + def delete_attachment(self, reminder: Reminder, attachment: Attachment) -> None: + """Soft-delete an attachment and unlink it from Reminder.AttachmentIDs.""" + self._delete_linked_child( + reminder=reminder, + reminder_ids_attr="attachment_ids", + child=attachment, + prefix="Attachment", + record_type="Attachment", + field_name="AttachmentIDs", + token_field_name="attachmentIDs", + operation_name="Delete attachment", + ) + + def create_recurrence_rule( + self, + reminder: Reminder, + *, + frequency: RecurrenceFrequency = RecurrenceFrequency.DAILY, + interval: int = 1, + occurrence_count: int = 0, + first_day_of_week: int = 0, + ) -> RecurrenceRule: + """Create a recurrence rule and link it from Reminder.RecurrenceRuleIDs.""" + validated_rule = self._validated_recurrence_rule( + recurrence_id="RecurrenceRule/NEW", + reminder_id=reminder.id, + frequency=frequency, + interval=interval, + occurrence_count=occurrence_count, + first_day_of_week=first_day_of_week, + ) + reminder_record_name = self._reminder_record_name(reminder.id) + recurrence_record_name, modify_response = self._create_linked_child( + reminder=reminder, + reminder_ids_attr="recurrence_rule_ids", + prefix="RecurrenceRule", + record_type="RecurrenceRule", + field_name="RecurrenceRuleIDs", + token_field_name="recurrenceRuleIDs", + child_fields={ + "Reminder": { + "type": "REFERENCE", + "value": { + "recordName": reminder_record_name, + "action": "VALIDATE", + }, + }, + "Frequency": { + "type": "INT64", + "value": int(validated_rule.frequency), + }, + "Interval": {"type": "INT64", "value": int(validated_rule.interval)}, + "OccurrenceCount": { + "type": "INT64", + "value": int(validated_rule.occurrence_count), + }, + "FirstDayOfTheWeek": { + "type": "INT64", + "value": int(validated_rule.first_day_of_week), + }, + "Imported": {"type": "INT64", "value": 0}, + "Deleted": {"type": "INT64", "value": 0}, + }, + operation_name="Create recurrence rule", + ) + validated_rule.id = recurrence_record_name + validated_rule.reminder_id = reminder_record_name + validated_rule.record_change_tag = _response_record_change_tag( + modify_response, + recurrence_record_name, + ) + return validated_rule + + def update_recurrence_rule( + self, + recurrence_rule: RecurrenceRule, + *, + frequency: Optional[RecurrenceFrequency] = None, + interval: Optional[int] = None, + occurrence_count: Optional[int] = None, + first_day_of_week: Optional[int] = None, + ) -> None: + """Update an existing recurrence rule.""" + fields: dict[str, Any] = {} + has_mutation = any( + value is not None + for value in (frequency, interval, occurrence_count, first_day_of_week) + ) + if not has_mutation: + raise ValueError("No recurrence rule fields provided for update") + validated_rule = self._validated_recurrence_rule( + recurrence_id=recurrence_rule.id, + reminder_id=recurrence_rule.reminder_id, + frequency=frequency or recurrence_rule.frequency, + interval=recurrence_rule.interval if interval is None else interval, + occurrence_count=( + recurrence_rule.occurrence_count + if occurrence_count is None + else occurrence_count + ), + first_day_of_week=( + recurrence_rule.first_day_of_week + if first_day_of_week is None + else first_day_of_week + ), + record_change_tag=recurrence_rule.record_change_tag, + ) + + if frequency is not None: + fields["Frequency"] = {"type": "INT64", "value": int(frequency)} + if interval is not None: + fields["Interval"] = {"type": "INT64", "value": int(interval)} + if occurrence_count is not None: + fields["OccurrenceCount"] = { + "type": "INT64", + "value": int(occurrence_count), + } + if first_day_of_week is not None: + fields["FirstDayOfTheWeek"] = { + "type": "INT64", + "value": int(first_day_of_week), + } + if recurrence_rule.reminder_id: + fields["Reminder"] = { + "type": "REFERENCE", + "value": { + "recordName": self._reminder_record_name( + recurrence_rule.reminder_id + ), + "action": "VALIDATE", + }, + } + + recurrence_record_name = _as_record_name(recurrence_rule.id, "RecurrenceRule") + self._submit_single_record_update( + operation_name="Update recurrence rule", + record_name=recurrence_record_name, + record_type="RecurrenceRule", + record_change_tag=recurrence_rule.record_change_tag, + fields=fields, + model_obj=recurrence_rule, + ) + + recurrence_rule.frequency = validated_rule.frequency + recurrence_rule.interval = validated_rule.interval + recurrence_rule.occurrence_count = validated_rule.occurrence_count + recurrence_rule.first_day_of_week = validated_rule.first_day_of_week + + def delete_recurrence_rule( + self, + reminder: Reminder, + recurrence_rule: RecurrenceRule, + ) -> None: + """Soft-delete a recurrence rule and unlink it from the reminder.""" + self._delete_linked_child( + reminder=reminder, + reminder_ids_attr="recurrence_rule_ids", + child=recurrence_rule, + prefix="RecurrenceRule", + record_type="RecurrenceRule", + field_name="RecurrenceRuleIDs", + token_field_name="recurrenceRuleIDs", + operation_name="Delete recurrence rule", + ) diff --git a/pyicloud/services/reminders/client.py b/pyicloud/services/reminders/client.py new file mode 100644 index 00000000..8dd5a2c2 --- /dev/null +++ b/pyicloud/services/reminders/client.py @@ -0,0 +1,235 @@ +""" +Low-level CloudKit client for the Reminders container. +""" + +from __future__ import annotations + +import logging +from typing import Dict, List, Optional, TypeVar + +from pydantic import ValidationError + +from pyicloud.common.cloudkit import ( + CKLookupDescriptor, + CKLookupRequest, + CKLookupResponse, + CKModifyOperation, + CKModifyRequest, + CKModifyResponse, + CKQueryObject, + CKQueryRequest, + CKQueryResponse, + CKZoneChangesRequest, + CKZoneChangesResponse, + CKZoneChangesZoneReq, + CKZoneIDReq, + CloudKitExtraMode, + resolve_cloudkit_validation_extra, +) + +LOGGER = logging.getLogger(__name__) +_ResponseModelT = TypeVar("_ResponseModelT") + + +# ... (Error classes remain the same) ... + + +class RemindersAuthError(Exception): + """Auth/PCS/cookie issues (401/403).""" + + +class RemindersApiError(Exception): + """Catch-all API error.""" + + def __init__(self, message: str, payload: Optional[object] = None): + super().__init__(message) + self.payload = payload + + +class _CloudKitClient: + """ + Minimal HTTP transport for CloudKit. + """ + + _REQUEST_TIMEOUT = (10.0, 60.0) + + def __init__(self, base_url: str, session, base_params: Dict[str, object]): + self._base_url = base_url.rstrip("/") + self._session = session + self._params = self._normalize_params(base_params or {}) + + @staticmethod + def _normalize_params(params: Dict[str, object]) -> Dict[str, str]: + out: Dict[str, str] = {} + for k, v in params.items(): + if isinstance(v, bool): + out[k] = "true" if v else "false" + else: + out[k] = str(v) + return out + + def _build_url(self, path: str) -> str: + from urllib.parse import urlencode + + q = urlencode(self._params) + return f"{self._base_url}{path}" + (f"?{q}" if q else "") + + def post(self, path: str, payload: Dict) -> Dict: + url = self._build_url(path) + LOGGER.debug("POST to %s", url) + resp = self._session.post(url, json=payload, timeout=self._REQUEST_TIMEOUT) + code = getattr(resp, "status_code", 0) + + if code in (401, 403): + raise RemindersAuthError(f"HTTP {code}: unauthorized") + if code >= 400: + try: + body = resp.json() + except Exception: + body = getattr(resp, "text", None) + raise RemindersApiError(f"HTTP {code}", payload=body) + + try: + return resp.json() + except Exception: + raise RemindersApiError( + "Invalid JSON response", payload=getattr(resp, "text", None) + ) + + def get_bytes(self, url: str) -> bytes: + LOGGER.debug("GET asset from %s", url) + resp = self._session.get(url, timeout=self._REQUEST_TIMEOUT) + code = getattr(resp, "status_code", 0) + + if code in (401, 403): + raise RemindersAuthError(f"HTTP {code}: unauthorized") + if code >= 400: + raise RemindersApiError( + f"HTTP {code} on asset GET", payload=getattr(resp, "text", None) + ) + + content = getattr(resp, "content", None) + if isinstance(content, bytes): + return content + + text = getattr(resp, "text", None) + if isinstance(text, str): + return text.encode("utf-8") + + raise RemindersApiError("Invalid asset response", payload=text) + + +class CloudKitRemindersClient: + """ + Raw CloudKit service for the Reminders container. + """ + + def __init__( + self, + base_url: str, + session, + base_params: Dict[str, object], + *, + validation_extra: CloudKitExtraMode | None = None, + ): + self._http = _CloudKitClient(base_url, session, base_params) + self._validation_extra = validation_extra + + def _validate_response( + self, model_cls: type[_ResponseModelT], data: Dict + ) -> _ResponseModelT: + return model_cls.model_validate( + data, + extra=resolve_cloudkit_validation_extra(self._validation_extra), + ) + + def lookup( + self, + record_names: List[str], + zone_id: CKZoneIDReq, + ) -> CKLookupResponse: + """Fetch records by ID.""" + payload = CKLookupRequest( + records=[CKLookupDescriptor(recordName=n) for n in record_names], + zoneID=zone_id, + ).model_dump(mode="json", exclude_none=True) + + data = self._http.post("/records/lookup", payload) + try: + return self._validate_response(CKLookupResponse, data) + except ValidationError as e: + raise RemindersApiError( + "Lookup response validation failed", payload=data + ) from e + + def query( + self, + *, + query: CKQueryObject, + zone_id: CKZoneIDReq, + desired_keys: Optional[List[str]] = None, + results_limit: Optional[int] = None, + continuation: Optional[str] = None, + ) -> CKQueryResponse: + payload = CKQueryRequest( + query=query, + zoneID=zone_id, + desiredKeys=desired_keys, + resultsLimit=results_limit, + continuationMarker=continuation, + ).model_dump(mode="json", exclude_none=True) + + data = self._http.post("/records/query", payload) + try: + return self._validate_response(CKQueryResponse, data) + except ValidationError as e: + raise RemindersApiError( + "Query response validation failed", payload=data + ) from e + + def changes( + self, + *, + zone_req: CKZoneChangesZoneReq, + results_limit: Optional[int] = None, + ) -> CKZoneChangesResponse: + """Fetch changes (sync) for a zone.""" + + payload = CKZoneChangesRequest( + zones=[zone_req], + resultsLimit=results_limit, + ).model_dump(mode="json", exclude_none=True) + + data = self._http.post("/changes/zone", payload) + try: + return self._validate_response(CKZoneChangesResponse, data) + except ValidationError as e: + raise RemindersApiError( + "Changes response validation failed", payload=data + ) from e + + def modify( + self, + *, + operations: List[CKModifyOperation], + zone_id: CKZoneIDReq, + atomic: Optional[bool] = None, + ) -> CKModifyResponse: + """Modify (create/update/delete) records.""" + payload = CKModifyRequest( + operations=operations, + zoneID=zone_id, + atomic=atomic, + ).model_dump(mode="json", exclude_none=True) + + data = self._http.post("/records/modify", payload) + try: + return self._validate_response(CKModifyResponse, data) + except ValidationError as e: + raise RemindersApiError( + "Modify response validation failed", payload=data + ) from e + + def download_asset_bytes(self, url: str) -> bytes: + """Download raw bytes from a CloudKit asset URL.""" + return self._http.get_bytes(url) diff --git a/pyicloud/services/reminders/models/__init__.py b/pyicloud/services/reminders/models/__init__.py new file mode 100644 index 00000000..27ee07fb --- /dev/null +++ b/pyicloud/services/reminders/models/__init__.py @@ -0,0 +1,32 @@ +"""Reminders models.""" + +from .domain import ( + Alarm, + Hashtag, + ImageAttachment, + LocationTrigger, + Proximity, + RecurrenceFrequency, + RecurrenceRule, + Reminder, + ReminderChangeEvent, + RemindersList, + URLAttachment, +) +from .results import AlarmWithTrigger, ListRemindersResult + +__all__ = [ + "Alarm", + "AlarmWithTrigger", + "Hashtag", + "ImageAttachment", + "ListRemindersResult", + "LocationTrigger", + "Proximity", + "RecurrenceFrequency", + "RecurrenceRule", + "Reminder", + "ReminderChangeEvent", + "RemindersList", + "URLAttachment", +] diff --git a/pyicloud/services/reminders/models/domain.py b/pyicloud/services/reminders/models/domain.py new file mode 100644 index 00000000..d9a94c97 --- /dev/null +++ b/pyicloud/services/reminders/models/domain.py @@ -0,0 +1,151 @@ +from datetime import datetime +from enum import IntEnum +from typing import Literal, Optional + +from pydantic import Field + +from pyicloud.common.models import FrozenServiceModel, MutableServiceModel + + +class Reminder(MutableServiceModel): + id: str + list_id: str + title: str + desc: str = "" + completed: bool = False + completed_date: Optional[datetime] = None + due_date: Optional[datetime] = None + start_date: Optional[datetime] = None + priority: int = 0 + flagged: bool = False + all_day: bool = False + deleted: bool = False + time_zone: Optional[str] = None + alarm_ids: list[str] = Field(default_factory=list) + hashtag_ids: list[str] = Field(default_factory=list) + attachment_ids: list[str] = Field(default_factory=list) + recurrence_rule_ids: list[str] = Field(default_factory=list) + parent_reminder_id: Optional[str] = None + created: Optional[datetime] = None + modified: Optional[datetime] = None + record_change_tag: Optional[str] = None + + +class ReminderChangeEvent(FrozenServiceModel): + """Incremental reminder change event emitted by ``iter_changes()``.""" + + type: Literal["updated", "deleted"] + reminder_id: str + reminder: Optional[Reminder] = None + + +class RemindersList(MutableServiceModel): + id: str + title: str + color: Optional[str] = None + count: int = 0 + badge_emblem: Optional[str] = None + sorting_style: Optional[str] = None + is_group: bool = False + reminder_ids: list[str] = Field(default_factory=list) + guid: Optional[str] = None + record_change_tag: Optional[str] = None + + +# --- Alarm records --- + + +class Alarm(MutableServiceModel): + """Container for alarm triggers, referenced by Reminder.alarm_ids.""" + + id: str + alarm_uid: str + reminder_id: str + trigger_id: str + record_change_tag: Optional[str] = None + + +class Proximity(IntEnum): + """Geofence proximity direction.""" + + ARRIVING = 1 + LEAVING = 2 + + +class LocationTrigger(MutableServiceModel): + """Location-based alarm trigger (geofence).""" + + id: str + alarm_id: str + title: str = "" + address: str = "" + latitude: float = 0.0 + longitude: float = 0.0 + radius: float = Field(default=0.0, ge=0.0) + proximity: Proximity = Proximity.ARRIVING + location_uid: str = "" + record_change_tag: Optional[str] = None + + +# --- Attachment records --- + + +class URLAttachment(MutableServiceModel): + """URL attachment on a reminder.""" + + id: str + reminder_id: str + url: str = "" + uti: str = "public.url" + record_change_tag: Optional[str] = None + + +class ImageAttachment(MutableServiceModel): + """Image attachment on a reminder.""" + + id: str + reminder_id: str + file_asset_url: str = "" + filename: str = "" + file_size: int = Field(default=0, ge=0) + width: int = Field(default=0, ge=0) + height: int = Field(default=0, ge=0) + uti: str = "public.jpeg" + record_change_tag: Optional[str] = None + + +# --- Hashtag records --- + + +class Hashtag(MutableServiceModel): + """Tag associated with a reminder.""" + + id: str + name: str + reminder_id: str + created: Optional[datetime] = None + record_change_tag: Optional[str] = None + + +# --- Recurrence rules --- + + +class RecurrenceFrequency(IntEnum): + """Recurrence frequency type.""" + + DAILY = 1 + WEEKLY = 2 + MONTHLY = 3 + YEARLY = 4 + + +class RecurrenceRule(MutableServiceModel): + """Recurrence rule for a repeating reminder.""" + + id: str + reminder_id: str + frequency: RecurrenceFrequency = RecurrenceFrequency.DAILY + interval: int = Field(default=1, ge=1) + occurrence_count: int = Field(default=0, ge=0) # 0 = infinite + first_day_of_week: int = Field(default=0, ge=0, le=6) + record_change_tag: Optional[str] = None diff --git a/pyicloud/services/reminders/models/results.py b/pyicloud/services/reminders/models/results.py new file mode 100644 index 00000000..e42b1a6f --- /dev/null +++ b/pyicloud/services/reminders/models/results.py @@ -0,0 +1,29 @@ +from __future__ import annotations + +from typing import Optional + +from pyicloud.common.models import FrozenServiceModel + +from .domain import ( + Alarm, + Hashtag, + ImageAttachment, + LocationTrigger, + RecurrenceRule, + Reminder, + URLAttachment, +) + + +class AlarmWithTrigger(FrozenServiceModel): + alarm: Alarm + trigger: Optional[LocationTrigger] = None + + +class ListRemindersResult(FrozenServiceModel): + reminders: list[Reminder] + alarms: dict[str, Alarm] + triggers: dict[str, LocationTrigger] + attachments: dict[str, URLAttachment | ImageAttachment] + hashtags: dict[str, Hashtag] + recurrence_rules: dict[str, RecurrenceRule] diff --git a/pyicloud/services/reminders/protobuf/__init__.py b/pyicloud/services/reminders/protobuf/__init__.py new file mode 100644 index 00000000..a7ccbc28 --- /dev/null +++ b/pyicloud/services/reminders/protobuf/__init__.py @@ -0,0 +1 @@ +"""Generated protobuf modules for the Reminders service.""" diff --git a/pyicloud/services/reminders/protobuf/reminders.proto b/pyicloud/services/reminders/protobuf/reminders.proto new file mode 100644 index 00000000..bd5d9935 --- /dev/null +++ b/pyicloud/services/reminders/protobuf/reminders.proto @@ -0,0 +1,159 @@ +syntax = "proto2"; + +package topotext; + +// Apple's topotext CRDT protocol, as extracted from iCloud Reminders main.js. +// This is the wire format used for TitleDocument in CloudKit. + +message String { + optional string string = 2; + + // Only needed for mergeable strings. + repeated Substring substring = 3; + optional VectorTimestamp timestamp = 4; + + repeated AttributeRun attributeRun = 5; + + // Optional attachment data. + repeated Attachment attachment = 6; +} + +message VectorTimestamp { + message Clock { + optional bytes replicaUUID = 1; + message ReplicaClock { + optional uint32 clock = 1; + optional uint32 subclock = 2; + } + repeated ReplicaClock replicaClock = 2; + } + repeated Clock clock = 1; +} + +message CharID { + optional uint32 replicaID = 1; + optional uint32 clock = 2; +} + +message Substring { + optional CharID charID = 1; + // Length of substring in UTF-16 characters. + optional uint32 length = 2; + + // Style timestamp. + optional CharID timestamp = 3; + + optional bool tombstone = 4; + + // Index into String.substring. + repeated uint32 child = 5; +} + +message Selection { + message Range { + optional CharID fromChar = 1; + optional CharID toChar = 2; + }; + repeated bytes replicaUUID = 1; + repeated Range range = 2; + + enum Affinity { + Backward = 0; + Forward = 1; + } + optional Affinity affinity = 3; +} + +message AttributeRun { + // Length of run in UTF-16 characters. + optional uint32 length = 1; + + optional ParagraphStyle paragraphStyle = 2; + + // Overrides bold, italic and paragraph styles. + optional Font font = 3; + + // Modifiers applied on-top of paragraph style (bold, italic...). + optional uint32 fontHints = 5; + + optional uint32 underline = 6; + optional uint32 strikethrough = 7; + + optional int32 superscript = 8; + optional string link = 9; + optional Color color = 10; + + enum WritingDirection { + NaturalDirection = 0; + LeftToRight = 1; + RightToLeft = 2; + LeftToRightOverride = 3; + RightToLeftOverride = 4; + } + + optional WritingDirection writingDirection = 11; + + optional AttachmentInfo attachmentInfo = 12; + + optional uint64 timestamp = 13; + + // Reminders hashtags + optional HashtagInfo hashtagInfo = 14; +} + +message Font { + optional string name = 1; + optional float pointSize = 2; + optional uint32 fontHints = 3; +} + +message ParagraphStyle { + enum Alignment { + Left = 0; + Center = 1; + Right = 2; + Justified = 3; + Natural = 4; + } + + optional uint32 style = 1; + optional Alignment alignment = 2; + optional AttributeRun.WritingDirection writingDirection = 3; + optional int32 indent = 4; + optional Todo todo = 5; + optional uint32 paragraphHints = 6; + optional uint32 startingListItemNumber = 7; + optional uint32 blockQuoteLevel = 8; +} + +message HashtagInfo { + optional string objectIdentifier = 1; +} + +message AttachmentInfo { + optional string attachmentIdentifier = 1; + optional string typeUTI = 2; +} + +message Attachment { + optional string identifier = 2; + optional bytes mergeableData = 3; + optional float sizeHeight = 4; + optional float sizeWidth = 5; + optional string summary = 6; + optional string title = 7; + optional string typeUTI = 8; + optional string urlString = 9; +} + +message Todo { + optional bytes todoUUID = 1; + optional bool done = 2; +} + +message Color { + optional float red = 1; + optional float green = 2; + optional float blue = 3; + optional float alpha = 4; +} diff --git a/pyicloud/services/reminders/protobuf/reminders_pb2.py b/pyicloud/services/reminders/protobuf/reminders_pb2.py new file mode 100644 index 00000000..aa1ab103 --- /dev/null +++ b/pyicloud/services/reminders/protobuf/reminders_pb2.py @@ -0,0 +1,69 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE +# source: reminders.proto +# Protobuf Python Version: 6.31.1 +"""Generated protocol buffer code.""" + +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder + +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, 6, 31, 1, "", "reminders.proto" +) +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n\x0freminders.proto\x12\x08topotext"\xc6\x01\n\x06String\x12\x0e\n\x06string\x18\x02 \x01(\t\x12&\n\tsubstring\x18\x03 \x03(\x0b\x32\x13.topotext.Substring\x12,\n\ttimestamp\x18\x04 \x01(\x0b\x32\x19.topotext.VectorTimestamp\x12,\n\x0c\x61ttributeRun\x18\x05 \x03(\x0b\x32\x16.topotext.AttributeRun\x12(\n\nattachment\x18\x06 \x03(\x0b\x32\x14.topotext.Attachment"\xd5\x01\n\x0fVectorTimestamp\x12.\n\x05\x63lock\x18\x01 \x03(\x0b\x32\x1f.topotext.VectorTimestamp.Clock\x1a\x91\x01\n\x05\x43lock\x12\x13\n\x0breplicaUUID\x18\x01 \x01(\x0c\x12\x42\n\x0creplicaClock\x18\x02 \x03(\x0b\x32,.topotext.VectorTimestamp.Clock.ReplicaClock\x1a/\n\x0cReplicaClock\x12\r\n\x05\x63lock\x18\x01 \x01(\r\x12\x10\n\x08subclock\x18\x02 \x01(\r"*\n\x06\x43harID\x12\x11\n\treplicaID\x18\x01 \x01(\r\x12\r\n\x05\x63lock\x18\x02 \x01(\r"\x84\x01\n\tSubstring\x12 \n\x06\x63harID\x18\x01 \x01(\x0b\x32\x10.topotext.CharID\x12\x0e\n\x06length\x18\x02 \x01(\r\x12#\n\ttimestamp\x18\x03 \x01(\x0b\x32\x10.topotext.CharID\x12\x11\n\ttombstone\x18\x04 \x01(\x08\x12\r\n\x05\x63hild\x18\x05 \x03(\r"\xf0\x01\n\tSelection\x12\x13\n\x0breplicaUUID\x18\x01 \x03(\x0c\x12(\n\x05range\x18\x02 \x03(\x0b\x32\x19.topotext.Selection.Range\x12.\n\x08\x61\x66\x66inity\x18\x03 \x01(\x0e\x32\x1c.topotext.Selection.Affinity\x1aM\n\x05Range\x12"\n\x08\x66romChar\x18\x01 \x01(\x0b\x32\x10.topotext.CharID\x12 \n\x06toChar\x18\x02 \x01(\x0b\x32\x10.topotext.CharID"%\n\x08\x41\x66\x66inity\x12\x0c\n\x08\x42\x61\x63kward\x10\x00\x12\x0b\n\x07\x46orward\x10\x01"\xa0\x04\n\x0c\x41ttributeRun\x12\x0e\n\x06length\x18\x01 \x01(\r\x12\x30\n\x0eparagraphStyle\x18\x02 \x01(\x0b\x32\x18.topotext.ParagraphStyle\x12\x1c\n\x04\x66ont\x18\x03 \x01(\x0b\x32\x0e.topotext.Font\x12\x11\n\tfontHints\x18\x05 \x01(\r\x12\x11\n\tunderline\x18\x06 \x01(\r\x12\x15\n\rstrikethrough\x18\x07 \x01(\r\x12\x13\n\x0bsuperscript\x18\x08 \x01(\x05\x12\x0c\n\x04link\x18\t \x01(\t\x12\x1e\n\x05\x63olor\x18\n \x01(\x0b\x32\x0f.topotext.Color\x12\x41\n\x10writingDirection\x18\x0b \x01(\x0e\x32\'.topotext.AttributeRun.WritingDirection\x12\x30\n\x0e\x61ttachmentInfo\x18\x0c \x01(\x0b\x32\x18.topotext.AttachmentInfo\x12\x11\n\ttimestamp\x18\r \x01(\x04\x12*\n\x0bhashtagInfo\x18\x0e \x01(\x0b\x32\x15.topotext.HashtagInfo"|\n\x10WritingDirection\x12\x14\n\x10NaturalDirection\x10\x00\x12\x0f\n\x0bLeftToRight\x10\x01\x12\x0f\n\x0bRightToLeft\x10\x02\x12\x17\n\x13LeftToRightOverride\x10\x03\x12\x17\n\x13RightToLeftOverride\x10\x04":\n\x04\x46ont\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x11\n\tpointSize\x18\x02 \x01(\x02\x12\x11\n\tfontHints\x18\x03 \x01(\r"\xe2\x02\n\x0eParagraphStyle\x12\r\n\x05style\x18\x01 \x01(\r\x12\x35\n\talignment\x18\x02 \x01(\x0e\x32".topotext.ParagraphStyle.Alignment\x12\x41\n\x10writingDirection\x18\x03 \x01(\x0e\x32\'.topotext.AttributeRun.WritingDirection\x12\x0e\n\x06indent\x18\x04 \x01(\x05\x12\x1c\n\x04todo\x18\x05 \x01(\x0b\x32\x0e.topotext.Todo\x12\x16\n\x0eparagraphHints\x18\x06 \x01(\r\x12\x1e\n\x16startingListItemNumber\x18\x07 \x01(\r\x12\x17\n\x0f\x62lockQuoteLevel\x18\x08 \x01(\r"H\n\tAlignment\x12\x08\n\x04Left\x10\x00\x12\n\n\x06\x43\x65nter\x10\x01\x12\t\n\x05Right\x10\x02\x12\r\n\tJustified\x10\x03\x12\x0b\n\x07Natural\x10\x04"\'\n\x0bHashtagInfo\x12\x18\n\x10objectIdentifier\x18\x01 \x01(\t"?\n\x0e\x41ttachmentInfo\x12\x1c\n\x14\x61ttachmentIdentifier\x18\x01 \x01(\t\x12\x0f\n\x07typeUTI\x18\x02 \x01(\t"\xa2\x01\n\nAttachment\x12\x12\n\nidentifier\x18\x02 \x01(\t\x12\x15\n\rmergeableData\x18\x03 \x01(\x0c\x12\x12\n\nsizeHeight\x18\x04 \x01(\x02\x12\x11\n\tsizeWidth\x18\x05 \x01(\x02\x12\x0f\n\x07summary\x18\x06 \x01(\t\x12\r\n\x05title\x18\x07 \x01(\t\x12\x0f\n\x07typeUTI\x18\x08 \x01(\t\x12\x11\n\turlString\x18\t \x01(\t"&\n\x04Todo\x12\x10\n\x08todoUUID\x18\x01 \x01(\x0c\x12\x0c\n\x04\x64one\x18\x02 \x01(\x08"@\n\x05\x43olor\x12\x0b\n\x03red\x18\x01 \x01(\x02\x12\r\n\x05green\x18\x02 \x01(\x02\x12\x0c\n\x04\x62lue\x18\x03 \x01(\x02\x12\r\n\x05\x61lpha\x18\x04 \x01(\x02' +) + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "reminders_pb2", _globals) +if not _descriptor._USE_C_DESCRIPTORS: + DESCRIPTOR._loaded_options = None + _globals["_STRING"]._serialized_start = 30 + _globals["_STRING"]._serialized_end = 228 + _globals["_VECTORTIMESTAMP"]._serialized_start = 231 + _globals["_VECTORTIMESTAMP"]._serialized_end = 444 + _globals["_VECTORTIMESTAMP_CLOCK"]._serialized_start = 299 + _globals["_VECTORTIMESTAMP_CLOCK"]._serialized_end = 444 + _globals["_VECTORTIMESTAMP_CLOCK_REPLICACLOCK"]._serialized_start = 397 + _globals["_VECTORTIMESTAMP_CLOCK_REPLICACLOCK"]._serialized_end = 444 + _globals["_CHARID"]._serialized_start = 446 + _globals["_CHARID"]._serialized_end = 488 + _globals["_SUBSTRING"]._serialized_start = 491 + _globals["_SUBSTRING"]._serialized_end = 623 + _globals["_SELECTION"]._serialized_start = 626 + _globals["_SELECTION"]._serialized_end = 866 + _globals["_SELECTION_RANGE"]._serialized_start = 750 + _globals["_SELECTION_RANGE"]._serialized_end = 827 + _globals["_SELECTION_AFFINITY"]._serialized_start = 829 + _globals["_SELECTION_AFFINITY"]._serialized_end = 866 + _globals["_ATTRIBUTERUN"]._serialized_start = 869 + _globals["_ATTRIBUTERUN"]._serialized_end = 1413 + _globals["_ATTRIBUTERUN_WRITINGDIRECTION"]._serialized_start = 1289 + _globals["_ATTRIBUTERUN_WRITINGDIRECTION"]._serialized_end = 1413 + _globals["_FONT"]._serialized_start = 1415 + _globals["_FONT"]._serialized_end = 1473 + _globals["_PARAGRAPHSTYLE"]._serialized_start = 1476 + _globals["_PARAGRAPHSTYLE"]._serialized_end = 1830 + _globals["_PARAGRAPHSTYLE_ALIGNMENT"]._serialized_start = 1758 + _globals["_PARAGRAPHSTYLE_ALIGNMENT"]._serialized_end = 1830 + _globals["_HASHTAGINFO"]._serialized_start = 1832 + _globals["_HASHTAGINFO"]._serialized_end = 1871 + _globals["_ATTACHMENTINFO"]._serialized_start = 1873 + _globals["_ATTACHMENTINFO"]._serialized_end = 1936 + _globals["_ATTACHMENT"]._serialized_start = 1939 + _globals["_ATTACHMENT"]._serialized_end = 2101 + _globals["_TODO"]._serialized_start = 2103 + _globals["_TODO"]._serialized_end = 2141 + _globals["_COLOR"]._serialized_start = 2143 + _globals["_COLOR"]._serialized_end = 2207 +# @@protoc_insertion_point(module_scope) diff --git a/pyicloud/services/reminders/protobuf/typedef.json b/pyicloud/services/reminders/protobuf/typedef.json new file mode 100644 index 00000000..30be18a2 --- /dev/null +++ b/pyicloud/services/reminders/protobuf/typedef.json @@ -0,0 +1,76 @@ +{ + "2": { + "name": "Title", + "type": "message", + "message_typedef": { + "3": { + "name": "Content", + "type": "message", + "message_typedef": { + "2": { + "name": "Text", + "type": "string" + }, + "3": { + "name": "AttributeRun", + "type": "message", + "seen_repeated": true, + "message_typedef": { + "1": { + "name": "Range", + "type": "message", + "message_typedef": { + "1": { + "name": "Location", + "type": "int" + }, + "2": { + "name": "Length", + "type": "int" + } + } + }, + "2": { + "name": "AttributeType", + "type": "int" + }, + "3": { + "name": "FontInfo", + "type": "message", + "message_typedef": { + "1": { + "name": "FontHandle", + "type": "int" + }, + "2": { + "name": "Size", + "type": "int" + } + } + }, + "5": { + "name": "Flag", + "type": "int" + } + } + }, + "4": { + "name": "AttachmentInfo", + "type": "message", + "message_typedef": { + "1": { + "name": "UUIDBytes", + "type": "bytes" + }, + "2": { + "name": "Info", + "type": "int", + "seen_repeated": true + } + } + } + } + } + } + } +} diff --git a/pyicloud/services/reminders/protobuf/typedef.py b/pyicloud/services/reminders/protobuf/typedef.py new file mode 100644 index 00000000..16fe9f68 --- /dev/null +++ b/pyicloud/services/reminders/protobuf/typedef.py @@ -0,0 +1,13 @@ +TITLE_DOCUMENT_TYPEDEF = { + "2": { + "type": "message", + "name": "title", + "message_typedef": { + "3": { + "type": "message", + "name": "content", + "message_typedef": {"2": {"type": "string", "name": "text"}}, + } + }, + } +} diff --git a/pyicloud/services/reminders/protobuf/versioned_document.proto b/pyicloud/services/reminders/protobuf/versioned_document.proto new file mode 100644 index 00000000..bdf48d6f --- /dev/null +++ b/pyicloud/services/reminders/protobuf/versioned_document.proto @@ -0,0 +1,24 @@ +syntax = "proto2"; + +// ========================================================================== +// versioned_document.proto +// Outer versioned wrapper for topotext CRDT documents. +// Extracted from iCloud.com Reminders main.js at line 99472. +// ========================================================================== +package versioned_document; + +message Document { + // Just in case. + optional uint32 serializationVersion = 1; + + repeated Version version = 2; +} + +message Version { + optional uint32 serializationVersion = 1; + optional uint32 minimumSupportedVersion = 2; + + // Interpreted as a topotext.String. + // Archived as bytes to ensure bit-perfect backward compatibility. + optional bytes data = 3; +} diff --git a/pyicloud/services/reminders/protobuf/versioned_document_pb2.py b/pyicloud/services/reminders/protobuf/versioned_document_pb2.py new file mode 100644 index 00000000..d4e8a6df --- /dev/null +++ b/pyicloud/services/reminders/protobuf/versioned_document_pb2.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE +# source: versioned_document.proto +# Protobuf Python Version: 6.31.1 +"""Generated protocol buffer code.""" + +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder + +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, 6, 31, 1, "", "versioned_document.proto" +) +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n\x18versioned_document.proto\x12\x12versioned_document"V\n\x08\x44ocument\x12\x1c\n\x14serializationVersion\x18\x01 \x01(\r\x12,\n\x07version\x18\x02 \x03(\x0b\x32\x1b.versioned_document.Version"V\n\x07Version\x12\x1c\n\x14serializationVersion\x18\x01 \x01(\r\x12\x1f\n\x17minimumSupportedVersion\x18\x02 \x01(\r\x12\x0c\n\x04\x64\x61ta\x18\x03 \x01(\x0c' +) + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "versioned_document_pb2", _globals) +if not _descriptor._USE_C_DESCRIPTORS: + DESCRIPTOR._loaded_options = None + _globals["_DOCUMENT"]._serialized_start = 48 + _globals["_DOCUMENT"]._serialized_end = 134 + _globals["_VERSION"]._serialized_start = 136 + _globals["_VERSION"]._serialized_end = 222 +# @@protoc_insertion_point(module_scope) diff --git a/pyicloud/services/reminders/service.py b/pyicloud/services/reminders/service.py new file mode 100644 index 00000000..d6218846 --- /dev/null +++ b/pyicloud/services/reminders/service.py @@ -0,0 +1,437 @@ +""" +High-level Reminders service built on top of the iCloud Reminders CloudKit API. + +Public API: + - RemindersService.lists() -> Iterable[RemindersList] + - RemindersService.reminders(list_id=None) -> Iterable[Reminder] + - RemindersService.list_reminders(list_id, include_completed=False, results_limit=200) + - RemindersService.get(reminder_id) -> Reminder + - RemindersService.sync_cursor() -> str + - RemindersService.iter_changes(since=None) -> Iterable[ReminderChangeEvent] + - RemindersService.create(...) + - RemindersService.update(reminder) -> None + - RemindersService.delete(reminder) -> None + - RemindersService.add_location_trigger(reminder, ...) -> tuple[Alarm, LocationTrigger] + - RemindersService.create_hashtag(...) / update_hashtag(...) / delete_hashtag(...) + - RemindersService.create_url_attachment(...) / update_attachment(...) / delete_attachment(...) + - RemindersService.create_recurrence_rule(...) / update_recurrence_rule(...) / delete_recurrence_rule(...) + - RemindersService.alarms_for(reminder) -> list[AlarmWithTrigger] + - RemindersService.tags_for(reminder) -> list[Hashtag] + - RemindersService.attachments_for(reminder) -> list[Attachment] + - RemindersService.recurrence_rules_for(reminder) -> list[RecurrenceRule] + +The service returns typed reminder, list, alarm, attachment, hashtag, and +recurrence models so normal callers do not need to work with CloudKit records +directly. +""" + +from __future__ import annotations + +import logging +from datetime import datetime +from typing import Any, Dict, Iterable, List, Optional, Union + +from pyicloud.common.cloudkit import CKRecord +from pyicloud.common.cloudkit.base import CloudKitExtraMode +from pyicloud.services.base import BaseService + +from ._mappers import RemindersRecordMapper +from ._protocol import ( + _decode_crdt_document, + _encode_crdt_document, + _generate_resolution_token_map, +) +from ._reads import RemindersReadAPI +from ._writes import RemindersWriteAPI +from .client import CloudKitRemindersClient +from .models import ( + Alarm, + AlarmWithTrigger, + Hashtag, + ImageAttachment, + ListRemindersResult, + LocationTrigger, + Proximity, + RecurrenceFrequency, + RecurrenceRule, + Reminder, + ReminderChangeEvent, + RemindersList, + URLAttachment, +) + +LOGGER = logging.getLogger(__name__) + +Attachment = Union[URLAttachment, ImageAttachment] + + +class RemindersService(BaseService): + """ + Typed Reminders API for snapshot reads, incremental sync, and mutations. + + Use this service for list discovery, reminder CRUD, and supported reminder + metadata such as alarms, hashtags, attachments, and recurrence rules. + """ + + _CONTAINER = "com.apple.reminders" + _ENV = "production" + _SCOPE = "private" + + def __init__( + self, + service_root: str, + session: Any, + params: Dict[str, Any], + *, + cloudkit_validation_extra: CloudKitExtraMode | None = None, + ): + super().__init__(service_root, session, params) + endpoint = ( + f"{self.service_root}/database/1/" + f"{self._CONTAINER}/{self._ENV}/{self._SCOPE}" + ) + base_params = { + "remapEnums": True, + **(params or {}), + } + self._raw = CloudKitRemindersClient( + endpoint, + session, + base_params, + validation_extra=cloudkit_validation_extra, + ) + + def get_raw() -> CloudKitRemindersClient: + return self._raw + + self._mapper = RemindersRecordMapper(get_raw, LOGGER) + self._reads = RemindersReadAPI(get_raw, self._mapper, LOGGER) + self._writes = RemindersWriteAPI(get_raw, self._mapper, LOGGER) + + def lists(self) -> Iterable[RemindersList]: + """ + Yield reminder lists as ``RemindersList`` models. + + Use this to discover available lists and obtain the ``list_id`` values + needed by creation and query helpers. + """ + return self._reads.lists() + + def reminders( + self, + list_id: Optional[str] = None, + ) -> Iterable[Reminder]: + """ + Yield reminders across all lists or for a specific list. + + Args: + list_id: Optional list identifier. When provided, only reminders in + that list are returned. + """ + reminder_map: Dict[str, Reminder] = {} + + list_ids: List[str] + if list_id: + list_ids = [list_id] + else: + list_ids = [lst.id for lst in self.lists()] + + for lid in list_ids: + batch = self.list_reminders( + list_id=lid, + include_completed=True, + results_limit=200, + ) + for reminder in batch.reminders: + reminder_map[reminder.id] = reminder + + for reminder in reminder_map.values(): + yield reminder + + def sync_cursor(self) -> str: + """ + Return the current sync token for the Reminders zone. + + Persist this token and pass it to ``iter_changes(since=...)`` later to + enumerate only newer changes. + """ + return self._reads.sync_cursor() + + def iter_changes( + self, + *, + since: Optional[str] = None, + ) -> Iterable[ReminderChangeEvent]: + """ + Yield reminder change events since an optional sync token. + + Updated reminders are returned with ``type="updated"`` and a hydrated + ``reminder`` payload. Deletions are returned with ``type="deleted"`` + and only the ``reminder_id`` populated. + """ + return self._reads.iter_changes(since=since) + + def get(self, reminder_id: str) -> Reminder: + """ + Return a single reminder by ID. + + Args: + reminder_id: The full reminder record identifier. + """ + return self._reads.get(reminder_id) + + def create( + self, + list_id: str, + title: str, + desc: str = "", + completed: bool = False, + due_date: Optional[datetime] = None, + priority: int = 0, + flagged: bool = False, + all_day: bool = False, + time_zone: Optional[str] = None, + parent_reminder_id: Optional[str] = None, + ) -> Reminder: + """ + Create a reminder and return the hydrated ``Reminder`` model. + + Args: + list_id: Target reminder list ID. + title: Reminder title. + desc: Reminder notes/body text. + completed: Whether the reminder should be created as completed. + due_date: Optional due date. Naive datetimes are treated as UTC. + priority: Apple Reminders priority value. Common values are + ``0`` (none), ``1`` (high), ``5`` (medium), and ``9`` (low). + flagged: Whether the reminder is flagged. + all_day: Whether the reminder should be treated as all-day. + time_zone: Optional time zone name for the due date. + parent_reminder_id: Optional parent reminder ID for subtasks. + """ + return self._writes.create( + list_id=list_id, + title=title, + desc=desc, + completed=completed, + due_date=due_date, + priority=priority, + flagged=flagged, + all_day=all_day, + time_zone=time_zone, + parent_reminder_id=parent_reminder_id, + ) + + def update(self, reminder: Reminder) -> None: + """ + Persist changes made to a ``Reminder`` model back to iCloud. + + Fetch the reminder, mutate its fields locally, then pass it to + ``update()``. + """ + self._writes.update(reminder) + + def delete(self, reminder: Reminder) -> None: + """ + Soft-delete a reminder in iCloud. + + The provided ``Reminder`` model is marked deleted and the remote record + is updated accordingly. + """ + self._writes.delete(reminder) + + def add_location_trigger( + self, + reminder: Reminder, + title: str = "", + address: str = "", + latitude: float = 0.0, + longitude: float = 0.0, + radius: float = 100.0, + proximity: Proximity = Proximity.ARRIVING, + ) -> tuple[Alarm, LocationTrigger]: + """ + Add a location-based alarm trigger to a reminder. + + Returns the created ``Alarm`` and ``LocationTrigger`` records. + """ + return self._writes.add_location_trigger( + reminder=reminder, + title=title, + address=address, + latitude=latitude, + longitude=longitude, + radius=radius, + proximity=proximity, + ) + + def create_hashtag(self, reminder: Reminder, name: str) -> Hashtag: + """Create and attach a hashtag to ``reminder``.""" + return self._writes.create_hashtag(reminder, name) + + def update_hashtag(self, hashtag: Hashtag, name: str) -> None: + """ + Update a hashtag name. + + Note: the iCloud Reminders web app currently treats hashtag names as + effectively read-only in some live flows, so rename behavior may not be + reflected consistently outside the API. + """ + self._writes.update_hashtag(hashtag, name) + + def delete_hashtag(self, reminder: Reminder, hashtag: Hashtag) -> None: + """Detach and delete a hashtag from ``reminder``.""" + self._writes.delete_hashtag(reminder, hashtag) + + def create_url_attachment( + self, + reminder: Reminder, + url: str, + uti: str = "public.url", + ) -> URLAttachment: + """Create a URL attachment on ``reminder``.""" + return self._writes.create_url_attachment(reminder, url, uti) + + def update_attachment( + self, + attachment: Attachment, + *, + url: Optional[str] = None, + uti: Optional[str] = None, + filename: Optional[str] = None, + file_size: Optional[int] = None, + width: Optional[int] = None, + height: Optional[int] = None, + ) -> None: + """ + Update a reminder attachment in place. + + URL attachments can update ``url`` and ``uti``. Image attachments can + update metadata such as filename, size, and dimensions. + """ + self._writes.update_attachment( + attachment, + url=url, + uti=uti, + filename=filename, + file_size=file_size, + width=width, + height=height, + ) + + def delete_attachment(self, reminder: Reminder, attachment: Attachment) -> None: + """Detach and delete an attachment from ``reminder``.""" + self._writes.delete_attachment(reminder, attachment) + + def create_recurrence_rule( + self, + reminder: Reminder, + *, + frequency: RecurrenceFrequency = RecurrenceFrequency.DAILY, + interval: int = 1, + occurrence_count: int = 0, + first_day_of_week: int = 0, + ) -> RecurrenceRule: + """ + Create and attach a recurrence rule to ``reminder``. + + ``occurrence_count=0`` means the recurrence is open-ended. + """ + return self._writes.create_recurrence_rule( + reminder, + frequency=frequency, + interval=interval, + occurrence_count=occurrence_count, + first_day_of_week=first_day_of_week, + ) + + def update_recurrence_rule( + self, + recurrence_rule: RecurrenceRule, + *, + frequency: Optional[RecurrenceFrequency] = None, + interval: Optional[int] = None, + occurrence_count: Optional[int] = None, + first_day_of_week: Optional[int] = None, + ) -> None: + """Update fields on an existing recurrence rule.""" + self._writes.update_recurrence_rule( + recurrence_rule, + frequency=frequency, + interval=interval, + occurrence_count=occurrence_count, + first_day_of_week=first_day_of_week, + ) + + def delete_recurrence_rule( + self, + reminder: Reminder, + recurrence_rule: RecurrenceRule, + ) -> None: + """Detach and delete a recurrence rule from ``reminder``.""" + self._writes.delete_recurrence_rule(reminder, recurrence_rule) + + def list_reminders( + self, + list_id: str, + include_completed: bool = False, + results_limit: int = 200, + ) -> ListRemindersResult: + """ + Return a compound reminder snapshot for one list. + + The result includes the list's reminders plus related alarms, + triggers, attachments, hashtags, and recurrence rules keyed by ID. + """ + return self._reads.list_reminders( + list_id=list_id, + include_completed=include_completed, + results_limit=results_limit, + ) + + def alarms_for(self, reminder: Reminder) -> List[AlarmWithTrigger]: + """Return alarm rows, including attached location triggers, for ``reminder``.""" + return self._reads.alarms_for(reminder) + + def tags_for(self, reminder: Reminder) -> List[Hashtag]: + """Return hashtags currently attached to ``reminder``.""" + return self._reads.tags_for(reminder) + + def attachments_for(self, reminder: Reminder) -> List[Attachment]: + """Return attachments currently attached to ``reminder``.""" + return self._reads.attachments_for(reminder) + + def recurrence_rules_for(self, reminder: Reminder) -> List[RecurrenceRule]: + """Return recurrence rules currently attached to ``reminder``.""" + return self._reads.recurrence_rules_for(reminder) + + # Compatibility wrappers for the service's tested helper surface. + def _decode_crdt_document(self, encrypted_value: str | bytes) -> str: + return _decode_crdt_document(encrypted_value) + + def _encode_crdt_document(self, text: str) -> str: + return _encode_crdt_document(text) + + def _generate_resolution_token_map(self, fields_modified: list[str]) -> str: + return _generate_resolution_token_map(fields_modified) + + def _record_to_list(self, rec: CKRecord) -> RemindersList: + return self._mapper.record_to_list(rec) + + def _record_to_reminder(self, rec: CKRecord) -> Reminder: + return self._mapper.record_to_reminder(rec) + + def _record_to_alarm(self, rec: CKRecord) -> Alarm: + return self._mapper.record_to_alarm(rec) + + def _record_to_alarm_trigger(self, rec: CKRecord) -> Optional[LocationTrigger]: + return self._mapper.record_to_alarm_trigger(rec) + + def _record_to_attachment(self, rec: CKRecord) -> Optional[Attachment]: + return self._mapper.record_to_attachment(rec) + + def _record_to_hashtag(self, rec: CKRecord) -> Hashtag: + return self._mapper.record_to_hashtag(rec) + + def _record_to_recurrence_rule(self, rec: CKRecord) -> RecurrenceRule: + return self._mapper.record_to_recurrence_rule(rec) diff --git a/pyproject.toml b/pyproject.toml index 42383d27..b6c8387b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -103,10 +103,21 @@ repository = "https://github.com/timlaing/pyicloud" [project.scripts] icloud = "pyicloud.cmdline:main" +[dependency-groups] +dev = [ + "grpcio-tools>=1.76.0", +] + +[tool.setuptools] +include-package-data = true + [tool.setuptools.packages.find] where = ["."] include = ["pyicloud*"] +[tool.setuptools.package-data] +"pyicloud.services.notes.protobuf" = ["*.pyi"] + [tool.setuptools.dynamic] readme = {file = "README.md", content-type = "text/markdown"} dependencies = {file = ["requirements.txt"]} diff --git a/requirements.txt b/requirements.txt index bca6c954..06f2a422 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,8 +3,11 @@ click>=8.1.8 fido2>=2.0.0 keyring>=25.6.0 keyrings.alt>=5.0.2 +protobuf>=6.31.1,<7 +pydantic>=2.12,<3 requests>=2.31.0 rich>=13.0.0 srp>=1.0.21 +tinyhtml>=1.1.0 typer>=0.16.1 tzlocal==5.3.1 diff --git a/requirements_test.txt b/requirements_test.txt index 908016cd..74497e2f 100644 --- a/requirements_test.txt +++ b/requirements_test.txt @@ -1,3 +1,4 @@ +bbpb==1.4.2 isort>=5.11.5 prek>=0.3.1 pylint>=3.3.4 diff --git a/tests/fixtures/note_fixture.json b/tests/fixtures/note_fixture.json new file mode 100644 index 00000000..e45cc907 --- /dev/null +++ b/tests/fixtures/note_fixture.json @@ -0,0 +1,590 @@ +{ + "note": { + "recordName": "NOTE_RECORD_0001", + "recordType": "Note", + "fields": { + "ModificationDate": "2025-12-12T01:32:14.282000+00:00", + "TitleEncrypted": { + "__bytes__": "cHlpY2xvdWQgbm90ZXMgc2VydmljZSB0ZXN0" + }, + "TextDataEncrypted": { + "__bytes__": "H4sIAAAAAAAAE8VaCVwUdd8HBFxWhHUBBUQcERWPWfY+UIM9ZjlESEHUVGSPgR3dqz0QO0wU7yMNT1Q08T7AE0VRzMywzCPU9PFIzcw0HyvLLCvf/8zODrsDqM/79H5ePyYz39/3d/5//2uI4cNer2X4sHyiV2nZ66OtkzGd0eLUQ2aLA7VDdtRWjOlQCDw7mEy51WpEoSxCkurE9ChUzPNGLYWFqM0OaaBijQ1DHZMBANkwnQEYKHFAhajG4bQBmsMCGVCjFXLacXahxWbSOCCNWQ9ZbEUaM/YWcGhAMRsZBAqM6hxYMWqczIHSUBsKYbgLncVktaEG1GwHIsiI2R24N6BoRylPSUxmLu45xzHZiNrBiwF1haLRWopJO3G5mMOIxnGYzDRUo8fMRa2zSCHOy3FqDS+i2ik5zlZY9JNb52mBBGcMtZgtdqtGh+oJTivkuGYKrqC2mB2QmiibA48CaAAS+Ku1GF02OM0Y5tAYMR1mR70lhAPw02nWozYjZkb1dEmOw4ZNRB0Gm8VZZKALDViRwQj+cwCzmBmyOm2gC1ojOTxYWqOzBUcDUItuIvSmEww2XYjnM8AjgwEe4Q6A7J4B4gA9KMw8cQDRV+CF7icbdIqN7D2ihjlOK+hGnQ2zOjwbpdABaIAL+s1oMRMFNUP2Zm6S5wvRG69sQ9tsgVLK1NiKABnvWurFYtbhEZNvjkmY16vB5nrPMWmMRkrV/Ubqul/dytS7W1vptDssJgjvLDttEECsSosJ00E5GrMdb1eNwwANR+1OI07l9+c/aqhnMvPAjNdoiVlmxW1Cg6F4noj5plNjdmBgJRgMCZlMhwUMJXh0MfpBlNQtIkwhAHZgFjOTib+X5PNhfgkMVhpCmAkmup0Y1GzXYkFSFU6QED7w+EpgZ6oxG1gQMAdqcj0OgApbR14meEX5f0b7X7H/G6V/QNfLBFOlsRvIYnvWOgfVWfD51vzspeaG7K3QHAbM1hrk7TfLadKCLYD0zMxxaGz49IXUNtC9vH9k2D2DU7eEPFPMbY6ZFgr//zMUpQHVTaTPAyjBbrA4wf6gRcEqqsMpqL5vy9hegfcf0z2C9uRT7JzW5R72mtNr1QAzHewLZge5FgTR1i9cBFqGXAuD2hC710Y52ELMJoDRdyocxzsPLWyxi7plOtyUrS2pDd+b6MIJYOXFCjGw/0I56JtODDJpSjATCumdOszkBGcwjRXDrUJ6sHnYUNObThRsGOAA4Hq3u35iWqfHEwdKt9qdJshqsdksEKqBCsHRwYZyoGFOXKzXmNxejBqnHl+DARn4Bgc3i9FpBXUk3jVAIw8FFjQgM4vZjuILs9MGoSWoTYe5qg0GxKLHHBA4nrk5IDinDdc0AtyGWlGwMpv1eHygDJjdjpksIEKkBNKjRnCyAwKNCXVAZhRPjXjUOa2YHneAQiaLEZgBkXlE6mVHRZYBLUbNGKFrtGogM0jRDLYEDYjDlZIdhGgjgraD/cdSjOHD7hWyV6xacITUgMOXmQN2Pw+OGbXrMLz8GjvY9s1EPcADOPcCPWCacArGFYRO1tKOW7CBocClFi3414aC44yLrnHgSYNDCXh04t2NWe0gdMysw/TAC0ivOVeo0FmEV7qlWTdEjnNzfuDADDZuvDJ4PVCThQPJHUTdzaCokAkzYyYNhIIuIZqW2MCJQzOY3Tzo9tRyiF/Chwjctf0SEj4hEZQIWkgE3hJoEgbOCoUeJ1WCypQ7HBqdAZ9jrq18KKrHNMAvAhZ3vR7MlNcNFofFRQb3CygTnOTIN0qTeM+z4AcJE0jMJc5Tamx60gkT13LZz9UUgXNJ2mQrcXacaGcaHA6rPSkxcdKkSRzXjYcD7hOJrgsHXgjAB1Zc55EccMjUGcjYo1lMf/y+BG5MxM8E32g2M4Dhy1oSzfqYATBfgPlFdyKwr+JYHQgoOaEdCd2OY4WSLH8S+imOFUmyAkhjc3qy5nQiaYEAI55YQQQigXwT2pNYEMuXwIIAxiAxJokxABYUHUZgHVjVDEqZCUDcyZ4oVgEFdiDBD6JJ9dcAGEyC5W5QAMCOJLg7igT5AAwhwV1ucBAAQ6PDCbCOwfo2hPLEIqmVXUhqKAA7keBaN4gz2ST4cSRrOAWGkeDxSA9mOAmu68JSUGAECX7kyexMgrfdjoIB2IUEv+lCDg7OjCTB5VEsPwqMIsHVUax2FBjtDt6dfBwAu5LglihWAAH2AGAMCe70tNmNBFe61TsDMJYEK9xgBAC7k+AqNxgOQIgEl7nBMAD2IMGlUR65x5FgjSfY0106TzCeBH/uwsqmwF4E6Icz2xNgdwD2JsHNnup9ojszGWB2PAd/QsFMoZ7jkphdmSzVg6KbK2YiybNzuYeW//J4GLsd4wMh24+RggtPRs5Je6dEcWYfEhX9y6Ep+UC4JQoII/vFMmLY4QyfDNYZ/z2VTX+oL1b4v/HFrsIakaH67sNTgf1iGL5sdgbrR/GkMZfis5dXLuc/Ufp89C4pjWVEuLT7rPmGbdowJOqgruLDdZsW5pPybox1hPoe5oKxI/sqM5Z3Hff2so0hl0kxxAh2qbcf3WVZ04eq/Hm5UKLS+uDPBF/KAaFf+a+TU6atSwk4+n5X0fHVzGke8lh2eKRvSwYpj2Mw2J0ZvhBg8L8tLbixO2tow6quy3gDIyIoGxCju8tGxO2eoxduVq0+VOB/OG+t6tcEH4rh2wbDl/LTAfjxw/0UT6saF1yTMny1sH7kz4cm5nn46emyckqd//XzOemBpb6Dj+94nlhM+YllxLXOoLyw2JEM/0ginxn3fxrX+bfM8e+XfSO6905UEj2b+oppFXNYSscCpnNKL8Nh35bZtGDQqt6mHAJxEON26u1hxpXfKg/svjHqPKbYk0e3sCsj77F5Tnpdw5XwPhXdL8/2sBDmiqFNRqw7SjqDshDVhgW//9YC99VjiGldLn11C/Evklt9X8FC3AstBLzUwqtk2TnSNwUwLs5+umHMoQx0fx//BwWLfj/jvUh8W7Mru+LCkNQ149vzI//KHkZKExiFbv3XA4b9Ne9pVq89C9cmzD/wZHdCO66vu1ZWP6Iz22B69G5nlg8e7Q9Q/Z2zPVLTp/2akxd16zWVxxzpzCAYm4RR5/b3VW1d+PBwWQT8/Rse3Udm/PuaI3V9QuSDV3yWd13f6Y9yj/kcjK8bOGfwtaO2vRdUE+b9deTewKu9uS2t1DP2if6+Jn996oOHHTtkrJ3owRjQBqN5Lga3zkh5qZeU5liZ7lijLmzrxpyrOFnbKef1GyWB41vGmn0mbG1okXLt3PZx0/sk7ZjqwejXBsMda19GUOuMlOfuP6/ujlwg2pR7DMDY2/Wpt+aqO9ZnDT4x4ol1Ycuk0tY0XmySIePLP72WFX12Fpti9HGXl86IDojw8SmQe6y8bZh6qSsfylXHF7iy/rOuQl7gaqq85UrfZoHImdtmKB3dg/D6ev3AS7uyI2beuDoqCy7bQ0/m6b8LPntSnZ1cfR7utPD4+Yf0qtDlRKgprVSlDUMvdxTyAkfcf9JR6AscQf+co1i8dAwg7xr+JCNvSlq7ReMW19z6aU0nuv5fnb9bts+elnqow66RyDHNDFKeyOjaujy6I7MD8fEZ//Y8NKdlwG0YJKcsXeoxZalu+egTbWlBwtBnGwvWbRu3clR/+rQvGXDx/afd5NG1UT2G7b24YaTHlG7niqEFwx3lFHeULRnascx4KU+o4st4clisVClgIVcth6UIIoSFCrGMJ1ILJFylmi0E91KOBv8dG4e4nXIwM/7rD/zrkYa6C3N0GqPOadQ4wOUf/y5Pn1NtRhjTeo5UBZiuYb3RsOl+4Q3FuSODe75zoChsrbc2d/rSAVsLs9Vz+l/a2Bgju0ZJO7xQGvpCaeQLpUEvkL7VelR4xUVKqUwhEPJgpVAlhYVSOR9W8HhCWC7kIWoxX8xVclX/VcVfVpEXSWPxPQuv9rpeGes/WZB2fgbrApN197U4StsP1377y25FR83q3vWdJGFV+i+6ktLpvi7jdLEWZcYjAq4EUUq4sErEU8FCBEFghVqshuUKqUQtk0nUKr6SPfg/S7vIprEa0BIrSN+OWczeI9NGjDGth0hKR7q7tUUKEmY8n6+QiQQKBJZI1GJYKFfKYYVYLIKlIj4ikImlIrlKyg5tTsGqsaI2+jSgG6ZP9TblEKOL6xLRqW7SlhsJ2SfKOv5ro94/6FO6hSnX8iduPZZ2b+f5R4JB7WsWe1gIdS846Veehhc0pQ9dvhKLO5b8d7zHtSySoXcdGGP/nP/BVB/kwmz5iGXFk2dbSU48g82OdnHwa9WQXzrvXPSRXLflmf/2fzvqUcpSFG4JcPzA0TM8b//CInVZ6WezGp+PCR5CcSCS0y6Dlfh5cMDgGeptVWcDe88vXLyQ4vQhOf4ZrLoOyanlkUMHLpm/a8owv+o5FIdDcgIyWI0NlbHqxrQ/Nuut1cytf+RRHBHJCQSnqPWP1mv7ZH2zePjkolEQdwnFGURy2oOrWk3O2Y8HDDm2u5q9f+ajubOo7JVU9owMlqpSLHaOkz/aqtWzj939nTYF12lPfb+Mq8xcNpl5cdWEgnYeh1hyHHYw4mLvhacsm1H9u+Pj0ujlFAO/HqM4o+rrW736F8h71T0OSh13ScnxskEwPqmdYAoYiGxfekJy9uf9yk1UNmEgG9Q1Sl8mcrP/7p6OrAmedPLzFfEPKU54M6d6++jHv7ZPqd8tuzotLKL0Smt2tvPLp3TWZfNnzR1eHvjGCB3FiSY5YLSdat22bR0Q8YFz6tenb9Vfol16kkZf3y0fOmXR7ptse9ONRiqfEHdNuNciokcuTMldPfaH/UMV51fQt9vkro4jFySpgzc2zDp4c0Wm1OOS3ZnhhzO6xPlvubpEcXDRVeUz+8aH+V7dXejq7hF+62YiI1N6LGra6FMg3GXy6u5Cd3dPy5lT/XRPZkZdRQjnr68nL6FYURQLZOy3emV/qEz57ZwjV0ctiRu+j2JFe7JMRy8Y004rv6pdvPSIdfqlBIoV5unxMFww8sjN7Ioj52PCLvWaPtTjfE/Ffv/esNAN99JSNvSa2v+H9g5na/nNmpRbevbT7Lc2lW+7ciGiHPMehS01OxPLvsjm7d1xsb9BHqBsWcM7Iv784tOZkVvNNy4G/94H9vYR6SsFGe2ofbJItTIj8PDxP2+uqheXURy2i4PnMyc8YClnYVZEaYHw9Gvwndte60OhqzIVK4cu/q5O8cnScuHxhDM53l1FckSNBciJONXcBYt67ek088NFXt1J+qrVTf2g6re04FVNT6NTQxZv8ZorhXhWWZ83je0QntW05fH4E1LH3Ode841gzC/+48q8Eynb91a+N35y+fLb3nXbpb22bmSTInfbEQ329V3tNC99onu/rnHca9ySeWxj7i3NnnVpEMmQMnqwOYwifGz6hTNZO3bd/a33pKxBsxeOTg083iBlgXU9e/Wq+5yi1JWl9dq+2NHet0jNJEYCm+vShAhd0dVBn0/YoNYf3fSr4GbPld/humUJN34aKEImT712u/2qz7VvULqJlK4frvvk2oUkZHHa4zVjgr6aNWDWVVy3JnBk/uYqeXlZY4+Di89tiaV0Od66lnUhJfkh2W/u6pTcwIl97yoLP/quLv7+jFj19vbaESfKu2nPUNnGeWa74v338hX3kbtr9E7pzrci7uNezy/YP/xRXSZyKMUc73MgfjCl2d1Tc2S/35vSS7L3NWRMqg6plAfgPldc7zwu/GFW3f7Plg9ZVFOKeI/QvvmREz/dr5659toPH9cWcK9Sqwf56bTvn9M+21ySwW/Qfh/hP/LjA5Q82bW6iEaITnIvq35d2e67xmdQTxM1wkz3Z5IrjKthqVfSpm5ekNzraOyl7S0/tdzsB4ccu6FecUDS+H5v5oztlA/y85dsXOHYqmcZyOHS68px7ORMykI3YIHooopR7SreDkhdVP+u/r07UHkhfQ1cOmDVd2t+S6+s6lW8OC9742nPT08sYv5mz4diS+/LL9fZNt/hP/xxLN3CA/OePkkXUg/Mvz4rsMm4TkbKezCWBwAT7XDK8l8S1+5VIj9OK/bdJKwsGN8y0RW+OyPndBzSWH9KlDRr2WgJxWC4im082ViGnVUcaqgKrMwJadedfo8+e+GLuYdOKM9NY2RtqWdVjaD0w9zT6T3JJ8XBWNbU+pyORfK/pru/VY5zp7FvapW24mza03m7kZ2ioH6N2sHMeJVarOApxXwYESECWMhTSWCFRMUDJ2yuQC5QKUVKLo8dQT9qOvBfFlJV8mvdfMsA+yyadzo/J/X63FTNR9N6p8TTA/ytZsYsszNzf6Xq0c6vn5d2wwOUiBRquVIsgaVSMbhqyfhcWCpUIbBSzlfL5OCkLASXrlcLkG6eCrCnO8DjGVXfpHdP0234e/XZrzZdr/AegXM7dtRcflcl31acL8yObjhGD58ux8PnImKFgM8VwDxwoIeFEj64vMgUSlgtE4C/MqFYLVe9LPw2zHtfM9qQUidhZl2vQfW7M0+VLqjM23v1wROvL11ki5pPX09+kJd9uWLrg4QfizhzKAZ1Frb2zS4/O2bIkbreZZt+OvZoFcnIccfYLSjQZ8hM1bczG551TU87WqwVMOPB1ViiEsskMI+n5sFCoUgIrgLgEidQixGBkitH1Fwlu4PVqTViOs4EK1pET5xutGXgm98bWXfo0RDe5i1raw5c0+ZTjCB34NWmEblNw9JDDsyYnpH029LHJGO428eepLWBa9/KRlZ8j859ErBms5YP7vsqMF5yCbi0cNV8WKiSSWGpQqSEVWoBIhBxJSAvOZtJBu60Gelx0222jHvQyYVHJx9TpU+7kSQ9NnfNKopBbZE9/NcnnhSkyjfIa03mrfcPe/QcGPSDEW9c3bUno6gs9GJK9198TmqTmfFKhUwhlyFyWKJQc0HUPC4McpDCEqFILlBLZOBuKWR3od28OHqLjmPVF3o3PN18y/BuXPn+55q4tLp1uRMv94/80r0DjHGXoPDPy6f3liqDt85qZxiXsj1GOxDcbhGEy5WJEHCfV4MZLefLYIVQCaIU81RqmVSo5qoQdlhzgCahBtY49ZiFXl+68VeUQ4xA6gCiXXT8eK3qg82po87kH3nm7ptc3AK+8R669PmYptEZ1aUDDmb9mMa5oxUy44UKiUwl5CthvoKnBuGr+LBMwgfLplSqEkmVMhlPKWQHk11RrNPY9J5xtWbV4/sU2RdtMkJdk3nzmH3b0nXKU2vfurNm3LxZe+jX2tnMd2Kt9SmnVpUzxgfEbNxJ+10ikfsMRu3jBbVZvFXJnyYfXFbJoTiD3Ftv0tzhAfPeSQ34IKbJPHhKOTcj5kX/GwZ9taVr00enDTnECHBHeKxwwtPQu0Nj1h898uiAfYn7WO1wW5ham3fFHowklg1/9v6+n1mTtLlgu0B4QrWEK4W5iBLsZ2KuBJbJBSJYLZGohGpEyBMrpewBr/TpxKCxGxyaFqsR3eurRCWWCxRcBcKH+WKpCiyBaiUs40q4sEAgUCsUUq5MIBb8n0T1MjnEaE995hB/VnP2y8zgior42rDc3I50C3Of3zpX9accWeL/5pOY49cm0r8C0eX4VyCRWKmSyhEw1ZUKMNWFQgUsR5QILFepVCKBUsyTyhVtfQV6mWNyjWpd+j9IRmbgJzEAAA==" + }, + "Folder": "recordName='DefaultFolder-CloudKit' action='VALIDATE' zoneID=CKZoneID(zoneName='Notes', ownerRecordName='_ANONYMIZED_USER_ID', zoneType='REGULAR_CUSTOM_ZONE')", + "SnippetEncrypted": { + "__bytes__": "QXBwbGUgTm90ZXMgR3VpZGUgdjE=" + } + } + }, + "attachments": [ + { + "recordName": "INLINEATTACHMENT_RECORD_0001", + "recordType": "InlineAttachment", + "fields": { + "UTIEncrypted": { + "__bytes__": "Y29tLmFwcGxlLm5vdGVzLmlubGluZXRleHRhdHRhY2htZW50LmNhbGN1bGF0ZXJlc3VsdA==" + }, + "CreationDate": "2025-08-17 16:08:44.416000+00:00", + "AltTextEncrypted": { + "__bytes__": "4oCJPeKAiTQ=" + }, + "Note": "recordName='NOTE_RECORD_0001' action='VALIDATE' zoneID=CKZoneID(zoneName='Notes', ownerRecordName='_SANITIZED_OWNER_RECORD', zoneType='REGULAR_CUSTOM_ZONE')", + "MinimumSupportedNotesVersion": 15, + "Deleted": 0, + "TokenContentIdentifierEncrypted": { + "__bytes__": "SUNJbmxpbmVBdHRhY2htZW50Q2FsY3VsYXRlU3RhdGVWYWxpZExUUg==" + } + } + }, + { + "recordName": "INLINEATTACHMENT_RECORD_0002", + "recordType": "InlineAttachment", + "fields": { + "UTIEncrypted": { + "__bytes__": "Y29tLmFwcGxlLm5vdGVzLmlubGluZXRleHRhdHRhY2htZW50LmNhbGN1bGF0ZXJlc3VsdA==" + }, + "CreationDate": "2025-09-22 17:17:15.360000+00:00", + "AltTextEncrypted": { + "__bytes__": "4oCJPeKAiSQ2MC4wMA==" + }, + "Note": "recordName='NOTE_RECORD_0001' action='VALIDATE' zoneID=CKZoneID(zoneName='Notes', ownerRecordName='_SANITIZED_OWNER_RECORD', zoneType='REGULAR_CUSTOM_ZONE')", + "MinimumSupportedNotesVersion": 15, + "Deleted": 0, + "TokenContentIdentifierEncrypted": { + "__bytes__": "SUNJbmxpbmVBdHRhY2htZW50Q2FsY3VsYXRlU3RhdGVWYWxpZExUUg==" + } + } + }, + { + "recordName": "INLINEATTACHMENT_RECORD_0003", + "recordType": "InlineAttachment", + "fields": { + "UTIEncrypted": { + "__bytes__": "Y29tLmFwcGxlLm5vdGVzLmlubGluZXRleHRhdHRhY2htZW50LmNhbGN1bGF0ZWdyYXBoZXhwcmVzc2lvbg==" + }, + "CreationDate": "2025-09-22 18:04:17.705000+00:00", + "AltTextEncrypted": { + "__bytes__": "eeKAiT3igIk=" + }, + "Note": "recordName='NOTE_RECORD_0001' action='VALIDATE' zoneID=CKZoneID(zoneName='Notes', ownerRecordName='_SANITIZED_OWNER_RECORD', zoneType='REGULAR_CUSTOM_ZONE')", + "MinimumSupportedNotesVersion": 15, + "Deleted": 0, + "TokenContentIdentifierEncrypted": { + "__bytes__": "SUNJbmxpbmVBdHRhY2htZW50Q2FsY3VsYXRlU3RhdGVWYWxpZExUUg==" + } + } + }, + { + "recordName": "ATTACHMENT_RECORD_0001", + "recordType": "Attachment", + "fields": { + "CroppingQuadTopRightY": 1.0, + "PreviewUpdateDate": "2025-09-22 21:26:36.131000+00:00", + "UTIEncrypted": { + "__bytes__": "Y29tLmFwcGxlLnBhcGVy" + }, + "CreationDate": "2025-09-22 18:04:17.672000+00:00", + "CroppingQuadTopRightX": 1.0, + "PaperAssets": [], + "PaperDatabase": "fileChecksum='SANITIZED_FILE_0001' referenceChecksum='SANITIZED_REF_0001' wrappingKey='SANITIZED_WRAP_0001==' downloadURL='https://example.com/icloud-assets/asset-0001/${f}' size=1830", + "ReplicaIDToNotesVersionDataEncrypted": { + "__bytes__": "CgYKBAgAEAMaODI2ChAKAjACEgIwARoGCgQIABACChAKAjADEgIwARoGCgQIABABChAKAjAEEgIwARoGCgQIABADGgxqCggAGgYIABICCCIaDGoKCAIaBggBEgIQARoMagoIAhoGCAESAhAAGgxqCggCGgYIARICEAIiDGludGVnZXJWYWx1ZSIJVVVJREluZGV4Khdjb20uYXBwbGUuQ1JEVC5OU051bWJlcioXY29tLmFwcGxlLkNSRFQuTlNTdHJpbmcqFWNvbS5hcHBsZS5DUkRULk5TVVVJRCoWY29tLmFwcGxlLkNSRFQuQ1JUdXBsZSooY29tLmFwcGxlLkNSRFQuQ1JSZWdpc3Rlck11bHRpVmFsdWVMZWFzdCojY29tLmFwcGxlLkNSRFQuQ1JSZWdpc3Rlck11bHRpVmFsdWUqFWNvbS5hcHBsZS5DUkRULkNSVHJlZSoZY29tLmFwcGxlLkNSRFQuQ1JUcmVlTm9kZSoVY29tLmFwcGxlLkNSRFQuTlNEYXRhKhVjb20uYXBwbGUuQ1JEVC5OU0RhdGUqMWNvbS5hcHBsZS5ub3Rlcy5JQ0Nsb3VkU3luY2luZ09iamVjdEFjdGl2aXR5RXZlbnQqKGNvbS5hcHBsZS5ub3Rlcy5JQ0Zhc3RTeW5jU2VsZWN0aW9uU3RhdGUqImNvbS5hcHBsZS5ub3Rlcy5JQ1RUQXVkaW9SZWNvcmRpbmcqK2NvbS5hcHBsZS5ub3Rlcy5JQ1RUQXVkaW9SZWNvcmRpbmcuRnJhZ21lbnQqJWNvbS5hcHBsZS5ub3Rlcy5JQ1RUVHJhbnNjcmlwdFNlZ21lbnQqF2NvbS5hcHBsZS5ub3Rlcy5DUlRhYmxlKhdjb20uYXBwbGUubm90ZXMuSUNUYWJsZTIQROtn4JiGRT+IVDC6l/TzUTIQUT2FinRBQhGpx7+t+9YwkjIQxhiJSHx4Qs60RRka9Lp+Xg==" + }, + "MinimumSupportedNotesVersion": 14, + "PreviewWidths": [768.0, 768.0], + "UTI": "com.apple.paper", + "FallbackImage": "fileChecksum='SANITIZED_FILE_0002' referenceChecksum='SANITIZED_REF_0002' wrappingKey='SANITIZED_WRAP_0002==' downloadURL='https://example.com/icloud-assets/asset-0002/${f}' size=139289", + "PreviewImages": [ + "fileChecksum='SANITIZED_FILE_0003' referenceChecksum='SANITIZED_REF_0003' wrappingKey='SANITIZED_WRAP_0003==' downloadURL='https://example.com/icloud-assets/asset-0003/${f}' size=58633", + "fileChecksum='SANITIZED_FILE_0004' referenceChecksum='SANITIZED_REF_0004' wrappingKey='SANITIZED_WRAP_0004==' downloadURL='https://example.com/icloud-assets/asset-0004/${f}' size=47751" + ], + "SummaryEncrypted": { + "__bytes__": "" + }, + "CroppingQuadTopLeftY": 1.0, + "Height": 364.0, + "CroppingQuadTopLeftX": 0.0, + "Deleted": 0, + "ImageClassificationSummaryVersion": 0, + "Width": 768.0, + "PreviewHeights": [768.0, 768.0], + "CroppingQuadBottomRightX": 1.0, + "PreviewScales": [1.0, 1.0], + "CroppingQuadBottomRightY": 0.0, + "MetadataData": { + "__bytes__": "eyJwYXBlckNvbnRlbnRCb3VuZHNXaWR0aEtleSI6MzAwLCJoYXNOZXdJbmtzS2V5Ijp0cnVlLCJwYXBlckNvbnRlbnRCb3VuZHNPcmlnaW5ZS2V5IjowLCJwYXBlckNvbnRlbnRCb3VuZHNIZWlnaHRLZXkiOjMwMCwiaGFzRW5oYW5jZWRDYW52YXNLZXkiOnRydWUsInBhcGVyQ29udGVudEJvdW5kc09yaWdpblhLZXkiOjIzNH0=" + }, + "HandwritingSummaryVersion": 0, + "CroppingQuadBottomLeftX": 0.0, + "CroppingQuadBottomLeftY": 0.0, + "Orientation": 0, + "OcrSummaryVersion": 0, + "ImageFilterType": 0, + "PreviewScaleWhenDrawings": [1, 1], + "PreviewAppearances": [1, 0], + "Note": "recordName='NOTE_RECORD_0001' action='VALIDATE' zoneID=CKZoneID(zoneName='Notes', ownerRecordName='_SANITIZED_OWNER_RECORD', zoneType='REGULAR_CUSTOM_ZONE')", + "LastModificationDate": "2025-09-22 21:26:35.867000+00:00" + } + }, + { + "recordName": "ATTACHMENT_RECORD_0002", + "recordType": "Attachment", + "fields": { + "CroppingQuadTopRightY": 1.0, + "UTIEncrypted": { + "__bytes__": "Y29tLmFwcGxlLm5vdGVzLnRhYmxl" + }, + "CreationDate": "2025-08-06 14:02:24.758000+00:00", + "CroppingQuadTopRightX": 1.0, + "FallbackSubtitleMac": "Upgrade macOS to view this table.", + "ReplicaIDToNotesVersionDataEncrypted": { + "__bytes__": "CgYKBAgAEAMaODI2ChAKAjACEgIwARoGCgQIABACChAKAjADEgIwARoGCgQIABABChAKAjAEEgIwARoGCgQIABADGgxqCggAGgYIABICCCIaDGoKCAIaBggBEgIQARoMagoIAhoGCAESAhAAGgxqCggCGgYIARICEAIiDGludGVnZXJWYWx1ZSIJVVVJREluZGV4Khdjb20uYXBwbGUuQ1JEVC5OU051bWJlcioXY29tLmFwcGxlLkNSRFQuTlNTdHJpbmcqFWNvbS5hcHBsZS5DUkRULk5TVVVJRCoWY29tLmFwcGxlLkNSRFQuQ1JUdXBsZSooY29tLmFwcGxlLkNSRFQuQ1JSZWdpc3Rlck11bHRpVmFsdWVMZWFzdCojY29tLmFwcGxlLkNSRFQuQ1JSZWdpc3Rlck11bHRpVmFsdWUqFWNvbS5hcHBsZS5DUkRULkNSVHJlZSoZY29tLmFwcGxlLkNSRFQuQ1JUcmVlTm9kZSoVY29tLmFwcGxlLkNSRFQuTlNEYXRhKhVjb20uYXBwbGUuQ1JEVC5OU0RhdGUqMWNvbS5hcHBsZS5ub3Rlcy5JQ0Nsb3VkU3luY2luZ09iamVjdEFjdGl2aXR5RXZlbnQqKGNvbS5hcHBsZS5ub3Rlcy5JQ0Zhc3RTeW5jU2VsZWN0aW9uU3RhdGUqImNvbS5hcHBsZS5ub3Rlcy5JQ1RUQXVkaW9SZWNvcmRpbmcqK2NvbS5hcHBsZS5ub3Rlcy5JQ1RUQXVkaW9SZWNvcmRpbmcuRnJhZ21lbnQqJWNvbS5hcHBsZS5ub3Rlcy5JQ1RUVHJhbnNjcmlwdFNlZ21lbnQqF2NvbS5hcHBsZS5ub3Rlcy5DUlRhYmxlKhdjb20uYXBwbGUubm90ZXMuSUNUYWJsZTIQROtn4JiGRT+IVDC6l/TzUTIQUT2FinRBQhGpx7+t+9YwkjIQxhiJSHx4Qs60RRka9Lp+Xg==" + }, + "FallbackSubtitleIOS": "Upgrade iOS to view this table.", + "MinimumSupportedNotesVersion": 14, + "UTI": "com.apple.notes.table", + "MergeableDataEncrypted": { + "__bytes__": "H4sIAAAAAAAAE+NgEPrDz8EgwCD1lZ+LjYsFyGQUYpAKyPLjEJDS4mAQUlNSMYACXSwEDEixcTAKMRkwAmlmIM0DpFmAND+QZgXSQlJqRipcAlxMBuxAHpMU1CawCDdQhAMmAlcH0s2Moo4NKMIKV5celCrE75yfU5qbp2CkoxCUX65gJCUAlgT5BkxrMIJFGIEi/FIgmlGDSUqMiwMo9x8I+IHq4GwtMQ5+ISEvgSJORo3bTM6NixwyppvqP7OX4sni4gA6mINJiAm7tYY0t5YJhceMEkacGGHEhTWMDKkYRrfLhB7+5fFyXPpJ6Hdl5pOlUplB6egWURYqEkCLRCQYsViFHBIsUucZmxhPM3ItYuSq4vIRYnu/fw8Q4bGZCWwzAx6bmTgYQVhIBJj2BQIPynz293c9tW32h6tCK0ISgKKMQgJvjGQumEX6Lt88x3lR4rTl7EKQ2OAFhj0bSmzwAUVYYCJIqngxVPHB4wzZf6woPDZa+1bN8fCnySaOCsteBv37enGDCMS3bxVe7Vd85x468VrFnffH3J9D/SEAdDU3ij8EgSLsaL4FqRLAUCWI1bfsKDwOKR4uLqhZTAbCUrpZ2hyMUuocbELKSorOQSGJSTmpkBTnklmUmlySmZ/nk5pWEpIflJmeUaLEkZmSmleSWVKpxJ2cmpMDUVmsxBka6unimZeSWqHEllwETKdAoeQimKxYchE2c5VYilNz0rTEk/Nz9RILCnJS9ZyDXEL0/IL9SnOTUouwSASXFGXmpWuJYkiArNcSQxMG+qYUyNHSwBAPSk3PLC5JLfItzSnJDEvMKU31SU0sLtFSJkIlhu1Aa4pSU7UksQr75adg6vALdkksScQunKpliBDOyy9JLdbzdHbOyS9NCa7MSwb63j8pCxh8jsAQLANGg2sZMDqQfQjT4gb0D0hHcGoOJLSDS0CGK2GqDAlxLE3JzA9KTc4vSgEFrzZhNXpuRYnpuSCrVbEpDilKzCtOLsosKAlOhSgTR1cGTWqYEkD9IAkjAZfX6Q9mtLnad4QY7Jr+5XOgkcDN6aLRmxrc1/X0fOEwYN1YbCQg6j3x8eOn3i9Wmyx+sW7vpl4jgep6gdTOXBf2fZOO3OK/vvyikcBvsVmrj2a43tx8OeHCidboECOM4scIo+gxwsi0RhgZ1kqGS4oLw5VCTMAilomDCQDCcNMhAQgAAA==" + }, + "SummaryEncrypted": { + "__bytes__": "Q29sdW1uIDEsIFJvdyAxCkNvbHVtbiAyLCBSb3cgMQpDb2x1bW4gMSwgUm93IDIKQ29sdW1uIDIsIFJvdyAyCg==" + }, + "CroppingQuadTopLeftY": 1.0, + "CroppingQuadTopLeftX": 0.0, + "Deleted": 0, + "ImageClassificationSummaryVersion": 0, + "CroppingQuadBottomRightX": 1.0, + "CroppingQuadBottomRightY": 0.0, + "HandwritingSummaryVersion": 0, + "CroppingQuadBottomLeftX": 0.0, + "CroppingQuadBottomLeftY": 0.0, + "Orientation": 0, + "OcrSummaryVersion": 0, + "ImageFilterType": 0, + "Note": "recordName='NOTE_RECORD_0001' action='VALIDATE' zoneID=CKZoneID(zoneName='Notes', ownerRecordName='_SANITIZED_OWNER_RECORD', zoneType='REGULAR_CUSTOM_ZONE')", + "LastModificationDate": "2025-08-06 14:03:14.986000+00:00", + "FallbackTitle": "Table" + } + }, + { + "recordName": "ATTACHMENT_RECORD_0003", + "recordType": "Attachment", + "fields": { + "CroppingQuadTopRightY": 1.0, + "UTIEncrypted": { + "__bytes__": "Y29tLmFwcGxlLm5vdGVzLnRhYmxl" + }, + "CreationDate": "2025-08-06 14:02:24.758000+00:00", + "CroppingQuadTopRightX": 1.0, + "FallbackSubtitleMac": "Upgrade macOS to view this table.", + "ReplicaIDToNotesVersionDataEncrypted": { + "__bytes__": "CgYKBAgAEAMaODI2ChAKAjACEgIwARoGCgQIABACChAKAjADEgIwARoGCgQIABABChAKAjAEEgIwARoGCgQIABADGgxqCggAGgYIABICCCIaDGoKCAIaBggBEgIQARoMagoIAhoGCAESAhAAGgxqCggCGgYIARICEAIiDGludGVnZXJWYWx1ZSIJVVVJREluZGV4Khdjb20uYXBwbGUuQ1JEVC5OU051bWJlcioXY29tLmFwcGxlLkNSRFQuTlNTdHJpbmcqFWNvbS5hcHBsZS5DUkRULk5TVVVJRCoWY29tLmFwcGxlLkNSRFQuQ1JUdXBsZSooY29tLmFwcGxlLkNSRFQuQ1JSZWdpc3Rlck11bHRpVmFsdWVMZWFzdCojY29tLmFwcGxlLkNSRFQuQ1JSZWdpc3Rlck11bHRpVmFsdWUqFWNvbS5hcHBsZS5DUkRULkNSVHJlZSoZY29tLmFwcGxlLkNSRFQuQ1JUcmVlTm9kZSoVY29tLmFwcGxlLkNSRFQuTlNEYXRhKhVjb20uYXBwbGUuQ1JEVC5OU0RhdGUqMWNvbS5hcHBsZS5ub3Rlcy5JQ0Nsb3VkU3luY2luZ09iamVjdEFjdGl2aXR5RXZlbnQqKGNvbS5hcHBsZS5ub3Rlcy5JQ0Zhc3RTeW5jU2VsZWN0aW9uU3RhdGUqImNvbS5hcHBsZS5ub3Rlcy5JQ1RUQXVkaW9SZWNvcmRpbmcqK2NvbS5hcHBsZS5ub3Rlcy5JQ1RUQXVkaW9SZWNvcmRpbmcuRnJhZ21lbnQqJWNvbS5hcHBsZS5ub3Rlcy5JQ1RUVHJhbnNjcmlwdFNlZ21lbnQqF2NvbS5hcHBsZS5ub3Rlcy5DUlRhYmxlKhdjb20uYXBwbGUubm90ZXMuSUNUYWJsZTIQROtn4JiGRT+IVDC6l/TzUTIQUT2FinRBQhGpx7+t+9YwkjIQxhiJSHx4Qs60RRka9Lp+Xg==" + }, + "FallbackSubtitleIOS": "Upgrade iOS to view this table.", + "MinimumSupportedNotesVersion": 14, + "UTI": "com.apple.notes.table", + "MergeableDataEncrypted": { + "__bytes__": "H4sIAAAAAAAAE71VfWgbZRjvXdL0zTUu12uTtLepWeZHmtiYXDcZIrQ16fpBl2kuG/qHSJq8zS5c7url0q0wC2uH2x8bZX5Q/3AIsZONWf1DWNmqVMpAUIQp84sy8Gv+M3S6Fj8YMt98mut7rAGrR5Ln3t/zPO/vnuf33hNQx9y2gzq6jv3DTpkoI7olmDr2iVQY0KwH1DEPuO7zl64OnZ/yxZoAwZB+AlkDsi3IGpFtRbYe2S3sTu4RiqZIvxmtSLbEVEA2IYTSIM0IsZaRSmZ+P4MmzoSQeg0CENJQyUxGIGMNymI2LTm5h5wR+YAzwNIFZ77ignUTBYRAiJUtWDfJ2imAfLfRZUVxlXuPHVgZZpBWzIR7mQweznXvn9nx8I9drCVFAVQUIBlSn5b7z2lJPdrOdWmJf0lr0KyMGrEaMbEsmFh3VcQSIsnK4wc2QqxW9PgtrcQgvTzGfPuXZbDn9E3m1rjww2lNpwIbKFAtRBsiCUZULUK9RgQaE6EJE4HRFaGzVhHu9MAVET5pmpZbv+g7Pp27cujS0tRjmt501ipCDb2phWhDRMCIqkUwsSfJSeIESX1GUDMExTPmG4vvFT934Dase9JIQJS/TAsaz3S/SM8arg8tHL+Rmznz8VEJoQRDZ1dzv39kCl+9YHijPbViOoRQkqGXDm7P7TgfJidnM96tI6vbmeIxsaEjYNQcCjtCTBrEgRBQRqrybFieHctzVI5XdYcaNCugWZn/r+5NdP0SmeJ7waT7KeJaszdX7N7Cy6/ZxIuh79+dUE5eYz5vKnYv15D4df75/q/mfz5z65Rt7plSF9pQhWZNzSxCNmmQzQhpXtO9fF4blsdieZt1u0dpVo2alYW1UFSJi/TfzXakvIBgHwQmZptrazASjQ2LsPg+hAQFxlVBlobgiBqVI0Jyv+oCQgJKqqCOuxrjUBSLkRmXee/egdCAlIAHXaa4gt4iBMWVstceV/T2dRkzUBzxOOJy2hcbHRWhLxgJRX1hPpxND0NFx8GriiAlPTbMkaf32NfAqJosWnjcGB6BSSGjQmV3VlSFfTExC4dgLKN6ttUQibEjGgVCT5suHJYTeEaYD8XUmD4MPYF/YElWYcY3EAyKcjbBj0txVP2e4RRqXw/q4BiSoXcMyVFdYTllF6onn8FDsdhtXs1v7sIjo9GebEKQIzAuK4l8e73rx/h2KbFkOk99v15wVIlJmbgijKo8LIY51oaVjhruQPl5B0eHrie/efWF3q5jUf/FmdWVJzn6iPfenue6eucmE+3gz+7Lpzj6u5cuvP3h9OD8WYc5PT/xwQBHt/ctHDn2Yt/RqX33LPKXrpg5emLngavAuZt+JxyYW/k6+xZHkx3Ks28muvecOOe9fPjmT0sc/eXrT0uLs4+ff8X5/m/9y5/SHDY6OWxsctjI5LDhwGFDhMNGxaNbKJbCqmVI9EdCAvJvXzvg9oQLAAA=" + }, + "SummaryEncrypted": { + "__bytes__": "Q29sdW1uIDEsIFJvdyAxCkNvbHVtbiAyLCBSb3cgMQpDb2x1bW4gMywgUm93IDEKQ29sdW1uIDEsIFJvdyAyCkNvbHVtbiAyLCBSb3cgMgpDb2x1bW4gMywgUm93IDIKQ29sdW1uIDEsIFJvdyAzCkNvbHVtbiAyLCBSb3cgMwpDb2x1bW4gMywgUm93IDMK" + }, + "CroppingQuadTopLeftY": 1.0, + "CroppingQuadTopLeftX": 0.0, + "Deleted": 0, + "ImageClassificationSummaryVersion": 0, + "CroppingQuadBottomRightX": 1.0, + "CroppingQuadBottomRightY": 0.0, + "HandwritingSummaryVersion": 0, + "CroppingQuadBottomLeftX": 0.0, + "CroppingQuadBottomLeftY": 0.0, + "Orientation": 0, + "OcrSummaryVersion": 0, + "ImageFilterType": 0, + "Note": "recordName='NOTE_RECORD_0001' action='VALIDATE' zoneID=CKZoneID(zoneName='Notes', ownerRecordName='_SANITIZED_OWNER_RECORD', zoneType='REGULAR_CUSTOM_ZONE')", + "LastModificationDate": "2025-08-06 14:03:56.506000+00:00", + "FallbackTitle": "Table" + } + }, + { + "recordName": "ATTACHMENT_RECORD_0004", + "recordType": "Attachment", + "fields": { + "CroppingQuadTopRightY": 1.0, + "UTIEncrypted": { + "__bytes__": "Y29tLmFwcGxlLm5vdGVzLnRhYmxl" + }, + "CreationDate": "2025-08-06 14:02:24.758000+00:00", + "CroppingQuadTopRightX": 1.0, + "FallbackSubtitleMac": "Upgrade macOS to view this table.", + "ReplicaIDToNotesVersionDataEncrypted": { + "__bytes__": "CgYKBAgAEAMaODI2ChAKAjACEgIwARoGCgQIABACChAKAjADEgIwARoGCgQIABABChAKAjAEEgIwARoGCgQIABADGgxqCggAGgYIABICCCIaDGoKCAIaBggBEgIQARoMagoIAhoGCAESAhAAGgxqCggCGgYIARICEAIiDGludGVnZXJWYWx1ZSIJVVVJREluZGV4Khdjb20uYXBwbGUuQ1JEVC5OU051bWJlcioXY29tLmFwcGxlLkNSRFQuTlNTdHJpbmcqFWNvbS5hcHBsZS5DUkRULk5TVVVJRCoWY29tLmFwcGxlLkNSRFQuQ1JUdXBsZSooY29tLmFwcGxlLkNSRFQuQ1JSZWdpc3Rlck11bHRpVmFsdWVMZWFzdCojY29tLmFwcGxlLkNSRFQuQ1JSZWdpc3Rlck11bHRpVmFsdWUqFWNvbS5hcHBsZS5DUkRULkNSVHJlZSoZY29tLmFwcGxlLkNSRFQuQ1JUcmVlTm9kZSoVY29tLmFwcGxlLkNSRFQuTlNEYXRhKhVjb20uYXBwbGUuQ1JEVC5OU0RhdGUqMWNvbS5hcHBsZS5ub3Rlcy5JQ0Nsb3VkU3luY2luZ09iamVjdEFjdGl2aXR5RXZlbnQqKGNvbS5hcHBsZS5ub3Rlcy5JQ0Zhc3RTeW5jU2VsZWN0aW9uU3RhdGUqImNvbS5hcHBsZS5ub3Rlcy5JQ1RUQXVkaW9SZWNvcmRpbmcqK2NvbS5hcHBsZS5ub3Rlcy5JQ1RUQXVkaW9SZWNvcmRpbmcuRnJhZ21lbnQqJWNvbS5hcHBsZS5ub3Rlcy5JQ1RUVHJhbnNjcmlwdFNlZ21lbnQqF2NvbS5hcHBsZS5ub3Rlcy5DUlRhYmxlKhdjb20uYXBwbGUubm90ZXMuSUNUYWJsZTIQROtn4JiGRT+IVDC6l/TzUTIQUT2FinRBQhGpx7+t+9YwkjIQxhiJSHx4Qs60RRka9Lp+Xg==" + }, + "FallbackSubtitleIOS": "Upgrade iOS to view this table.", + "MinimumSupportedNotesVersion": 14, + "UTI": "com.apple.notes.table", + "MergeableDataEncrypted": { + "__bytes__": "H4sIAAAAAAAAE71XDUwTVxzvuxY8jgLn49MT5CgotSKD82NkKsVRWWGI81pd4pYspT3g4NrD69VhtmWow2DmxtiWqItmymZMhmaRqBh1QIwf0+zDGKNZ1LjppstMpvMj0Rl1rx8UjisrJJuXtu/e7977v///93+//7viGrg2B9eQGuoRTcQSOnQLoIZ6pbEGJykTroHTDHlFoWtmhJ/Bi4rFAcSKAGq1qM1ArQ61WaiNQW0OVcLMJUgCK4pHPYwKrRRAJiJEr0DSEQIHEaqKsQZQvz2tYlwsQmIUCI6QCQqEQEhc2NYAYL8BcEa5KPjcHnpWAc2Kb9Kz6Kutn9ANfH2DgL4y56JFyeGp52iZa5EpMjDVz1CgNYIAAhCSSAVaIxZCEtEC/hYYtSEkiaRCY3RUGoGj+U/RlYRshe9Nhfh0CKvI7yd2iBnnX9rY0XXu7WNH181nUxI0mlZz6vbPs81ZO7bTC6b6u81aSt9I4IhAHIMYWu9jwHYAmKsMqDgQkM/j4iSB96B4ogSSNCKQJBKGAxnNbQrPgSkZQO042gPDXcSo44A9CmCN0sXZARdrRcFVQPOyQ+CdvJdD90NOF9BeWeKbOLlBEn31DQW0w+OiawVf1LRUh3zHRvV9Nl4diXKjtgiUAES8n+7Utvd05iw/50HiYxRRaak2wK4F0KSMiglE5RElt0NAobhFF1/HR+ffGNXjDNwYmW2FVzpFL0ahnASVchJVyklSKYcMK6cPsIcBnB6KlxlNOc28p+m/0E3GGHST70+iFAeMF7HyNV1lDZvmPHfdPKiby5deL83q7+srDaYPozoB+yGAecoAgkoZ2oAB1+lxSSU5qlQycEMkT5GNiKTOVpE6ll3/f5MaWRO7APslgNbxa9svZoW+o8VXEVUjmXhFRJoDqlYoI1ahjGSVMlJUykhVKSNtlDOleFRl+KRm4dmeKRdXwV8e66sW7rwDH63mf92J0jjQbzan7t+3rzTr7h0plEZAbQTsBgCzlCEUh9M4/vNDP4bzY3Kwoo100jiy1hSPJgs375GfqSwi8HnyxInS1BvX68xZ167WhvjUUesA2wrgNGUAwcNhXJs+L+qmp/C8yDSO2PITqLPYWvADRqzBiH2AWAH1t/qPhD//4oIuTOBoLmA4GP6FKeitkUy9c2mbuafauuPbpb1n9vyWjVAAyZ5DD5sHspfYOnsXJ297fExCKAbJyh2N/a9VvXy7e87tK4fn9HchVAvJ5c9vTV45t6zi8IG/8tvZjZ0wqNhJSHs6hRophBAKZDJCcAWSiZDYQWSYpUkqS5TK0mSVpcyw9odTjCt6cYoeoejFU50oGR9gxFlAbAKEDcaNJRPa8WQimIWnR3vfODnLemp3/MV3fjddyQ1m4Van9dUjD8uavvpzXk/mk+6DwSwc7Fh8feX+F62f9l7r+6za2g+D7+tTULwTFQxkIyRdgdAIiQ+zOzRvimpetmoeHZFLvaKXoOglUnqCCK2FFRmomY0zcEDl47Ew15BTztodtQIXVJ6FlzinzIueaq5Otousv24YcN7FeWReXm2Id3KCEBzpNcQtW1ZpqUQnVIsh1ikhvSLIKQ0+TXNKkewadF5OqDOlO0V3oaMZlffCctZiL6yx1fjctZwU4YENyd9Tb0pVPfAvb0obAaNofKhjMqpwlqvnvTInLfYJMr/cgV4QqjmHVzbljmGkanW0jMRxpkkR4RrRpZ5RY7M4ZEdkmDMVD8EeUea8hZXl5YLoc9lWe5wo+iW1jYi+hYjBVSgNi1ahdAyPcHBKBYrHP8PGCUG2bbLfuEE90m5f6HPxIss5Rcnlp3dG9DGFFZKj3u1femqkwXb0D9DrlPhm2cYFh6WPHBbaauoHaL7/AUNabtb/vHn9InO7vejQpnt3lzJky0D8wBmT5cGWzF0JD9p++oIhvSvTu2+WVL3fdw/b83day1aG/HrFabcMq3r2dlfpXecPSAy5/uHddz+aZ9nbuvzCQe200/cZ0vDdDft8l4Voa9rwo7A3X8OQTzanP9py33Jh6/mErlPHF5xjyMIrJ9qtJ8t2r7v9R8lbiTWXGVWhZVQFmVGVXUZVyhlVAWFUhYZRlZMXMgmKUDECMfRXDMOxfwDPumfRhxAAAA==" + }, + "SummaryEncrypted": { + "__bytes__": "Q29sdW1uIDEsIFJvdyAxIOKAkyBib2xkIHRleHQKQ29sdW1uIDIsIFJvdyAxIOKAkyBpdGFsaWNpc2VkIHRleHQgCkNvbHVtbiAzLCBSb3cgMSDigJMgdW5kZXJsaW5lZCB0ZXh0CkNvbHVtbiAxLCBSb3cgMiDigJMgc3RyaWtldGhyb3VnaCB0ZXh0CkNvbHVtbiAzLCBSb3cgNCDigJMgYm9sZCwgaXRhbGljaXNlZCwgdW5kZXJsaW5lZCwgYW5kIHN0cmlrZXRocm91Z2ggdGV4dApDb2x1bW4gMywgUm93IDIg4oCTIG5vcm1hbCB1bm1vZGlmaWVkIHRleHQKQ29sdW1uIDEsIFJvdyAzIOKAkyBoaWdobGlnaHRlZCBwdXJwbGUgdGV4dApDb2x1bW4gMiwgUm93IDMg4oCTIGhpZ2hsaWdodGVkIHBpbmsgdGV4dApDb2x1bW4gMywgUm93IDMg4oCTIGhpZ2hsaWdodGVkIG9yYW5nZSB0ZXh0CkNvbHVtbiAxLCBSb3cgNCDigJMgaGlnaGxpZ2h0ZWQgbWludCB0ZXh0CkNvbHVtbiAyLCBSb3cgNCDigJMgaGlnaGxpZ2h0ZWQgYmx1ZSB0ZXh0CkNvbHVtbiAzLCBSb3cgNCDigJMgYm9sZCwgaXRhbGljaXNlZCwgdW5kZXJsaW5lZCwgc3RyaWtldGhyb3VnaCwgYW5kIGJsdWUgdGV4dAo=" + }, + "CroppingQuadTopLeftY": 1.0, + "CroppingQuadTopLeftX": 0.0, + "Deleted": 0, + "ImageClassificationSummaryVersion": 0, + "CroppingQuadBottomRightX": 1.0, + "CroppingQuadBottomRightY": 0.0, + "HandwritingSummaryVersion": 0, + "CroppingQuadBottomLeftX": 0.0, + "CroppingQuadBottomLeftY": 0.0, + "Orientation": 0, + "OcrSummaryVersion": 0, + "ImageFilterType": 0, + "Note": "recordName='NOTE_RECORD_0001' action='VALIDATE' zoneID=CKZoneID(zoneName='Notes', ownerRecordName='_SANITIZED_OWNER_RECORD', zoneType='REGULAR_CUSTOM_ZONE')", + "LastModificationDate": "2025-08-08 19:49:09.245000+00:00", + "FallbackTitle": "Table" + } + }, + { + "recordName": "ATTACHMENT_RECORD_0005", + "recordType": "Attachment", + "fields": { + "CroppingQuadTopRightY": 1.0, + "PreviewUpdateDate": "2025-08-02 21:05:43.556000+00:00", + "UTIEncrypted": { + "__bytes__": "cHVibGljLmpwZWc=" + }, + "CreationDate": "2025-08-02 01:23:53.521000+00:00", + "CroppingQuadTopRightX": 1.0, + "ReplicaIDToNotesVersionDataEncrypted": { + "__bytes__": "CgYKBAgAEAMaODI2ChAKAjACEgIwARoGCgQIABACChAKAjADEgIwARoGCgQIABABChAKAjAEEgIwARoGCgQIABADGgxqCggAGgYIABICCCIaDGoKCAIaBggBEgIQARoMagoIAhoGCAESAhAAGgxqCggCGgYIARICEAIiDGludGVnZXJWYWx1ZSIJVVVJREluZGV4Khdjb20uYXBwbGUuQ1JEVC5OU051bWJlcioXY29tLmFwcGxlLkNSRFQuTlNTdHJpbmcqFWNvbS5hcHBsZS5DUkRULk5TVVVJRCoWY29tLmFwcGxlLkNSRFQuQ1JUdXBsZSooY29tLmFwcGxlLkNSRFQuQ1JSZWdpc3Rlck11bHRpVmFsdWVMZWFzdCojY29tLmFwcGxlLkNSRFQuQ1JSZWdpc3Rlck11bHRpVmFsdWUqFWNvbS5hcHBsZS5DUkRULkNSVHJlZSoZY29tLmFwcGxlLkNSRFQuQ1JUcmVlTm9kZSoVY29tLmFwcGxlLkNSRFQuTlNEYXRhKhVjb20uYXBwbGUuQ1JEVC5OU0RhdGUqMWNvbS5hcHBsZS5ub3Rlcy5JQ0Nsb3VkU3luY2luZ09iamVjdEFjdGl2aXR5RXZlbnQqKGNvbS5hcHBsZS5ub3Rlcy5JQ0Zhc3RTeW5jU2VsZWN0aW9uU3RhdGUqImNvbS5hcHBsZS5ub3Rlcy5JQ1RUQXVkaW9SZWNvcmRpbmcqK2NvbS5hcHBsZS5ub3Rlcy5JQ1RUQXVkaW9SZWNvcmRpbmcuRnJhZ21lbnQqJWNvbS5hcHBsZS5ub3Rlcy5JQ1RUVHJhbnNjcmlwdFNlZ21lbnQqF2NvbS5hcHBsZS5ub3Rlcy5DUlRhYmxlKhdjb20uYXBwbGUubm90ZXMuSUNUYWJsZTIQROtn4JiGRT+IVDC6l/TzUTIQUT2FinRBQhGpx7+t+9YwkjIQxhiJSHx4Qs60RRka9Lp+Xg==" + }, + "MinimumSupportedNotesVersion": 14, + "PreviewWidths": [384.0, 192.0], + "UTI": "public.jpeg", + "PreviewImages": [ + "fileChecksum='SANITIZED_FILE_0005' referenceChecksum='SANITIZED_REF_0005' wrappingKey='SANITIZED_WRAP_0005==' downloadURL='https://example.com/icloud-assets/asset-0005/${f}' size=186990", + "fileChecksum='SANITIZED_FILE_0006' referenceChecksum='SANITIZED_REF_0006' wrappingKey='SANITIZED_WRAP_0006==' downloadURL='https://example.com/icloud-assets/asset-0006/${f}' size=51354" + ], + "SummaryEncrypted": { + "__bytes__": "R3Jhc3MgR3JlZW5zd2FyZCBMYXduIExhd25zIFBhc3R1cmUgUGFzdHVyZXMgUGxhaW4gUGxhaW5zIExhbmQgRHJ5IExhbmQgRHJ5IExhbmRzIExhbmRzIE91dGRvb3IgT3V0ZG9vcnMgT3V0c2lkZSBPdXRzaWRlcyBBbmltYWwgQW5pbWFscyBGYXVuYSBGYXVuYXMgQ2FuaW5lIENhbmluZXMgTWFtbWFsIE1hbW1hbHMgVGVycmllciBUZXJyaWVycyBEb2cgRG9nZ3kgRG9ncyBQb29jaCBQb29jaGVzIFB1cCBQdXBweSBQdXBzIEJpY2hvbiAg" + }, + "CroppingQuadTopLeftY": 1.0, + "Height": 1550.0, + "CroppingQuadTopLeftX": 0.0, + "Deleted": 0, + "ImageClassificationSummaryVersion": 4, + "Width": 1779.0, + "PreviewHeights": [335.0, 167.0], + "CroppingQuadBottomRightX": 1.0, + "PreviewScales": [1.0, 1.0], + "CroppingQuadBottomRightY": 0.0, + "HandwritingSummaryVersion": 0, + "CroppingQuadBottomLeftX": 0.0, + "Media": "recordName='MEDIA_RECORD_0001' action='VALIDATE' zoneID=CKZoneID(zoneName='Notes', ownerRecordName='_SANITIZED_OWNER_RECORD', zoneType='REGULAR_CUSTOM_ZONE')", + "CroppingQuadBottomLeftY": 0.0, + "Orientation": 0, + "OcrSummaryVersion": 1, + "OcrSummaryEncrypted": { + "__bytes__": "IA==" + }, + "ImageClassificationSummaryEncrypted": { + "__bytes__": "R3Jhc3MgR3JlZW5zd2FyZCBMYXduIExhd25zIFBhc3R1cmUgUGFzdHVyZXMgUGxhaW4gUGxhaW5zIExhbmQgRHJ5IExhbmQgRHJ5IExhbmRzIExhbmRzIE91dGRvb3IgT3V0ZG9vcnMgT3V0c2lkZSBPdXRzaWRlcyBBbmltYWwgQW5pbWFscyBGYXVuYSBGYXVuYXMgQ2FuaW5lIENhbmluZXMgTWFtbWFsIE1hbW1hbHMgVGVycmllciBUZXJyaWVycyBEb2cgRG9nZ3kgRG9ncyBQb29jaCBQb29jaGVzIFB1cCBQdXBweSBQdXBzIEJpY2hvbg==" + }, + "ImageFilterType": 0, + "PreviewScaleWhenDrawings": [1, 1], + "PreviewAppearances": [0, 0], + "TitleEncrypted": { + "__bytes__": "cHJveHktaW1hZ2UuanBn" + }, + "Note": "recordName='NOTE_RECORD_0001' action='VALIDATE' zoneID=CKZoneID(zoneName='Notes', ownerRecordName='_SANITIZED_OWNER_RECORD', zoneType='REGULAR_CUSTOM_ZONE')", + "LastModificationDate": "2025-08-02 01:23:53.521000+00:00", + "FileSize": 281925 + } + }, + { + "recordName": "ATTACHMENT_RECORD_0006", + "recordType": "Attachment", + "fields": { + "CroppingQuadTopRightY": 1.0, + "PreviewUpdateDate": "2025-09-12 17:25:55.715000+00:00", + "UTIEncrypted": { + "__bytes__": "cHVibGljLnVybA==" + }, + "CreationDate": "2025-09-12 17:25:54.863000+00:00", + "CroppingQuadTopRightX": 1.0, + "ReplicaIDToNotesVersionDataEncrypted": { + "__bytes__": "CgYKBAgAEAMaODI2ChAKAjACEgIwARoGCgQIABACChAKAjADEgIwARoGCgQIABABChAKAjAEEgIwARoGCgQIABADGgxqCggAGgYIABICCCIaDGoKCAIaBggBEgIQARoMagoIAhoGCAESAhAAGgxqCggCGgYIARICEAIiDGludGVnZXJWYWx1ZSIJVVVJREluZGV4Khdjb20uYXBwbGUuQ1JEVC5OU051bWJlcioXY29tLmFwcGxlLkNSRFQuTlNTdHJpbmcqFWNvbS5hcHBsZS5DUkRULk5TVVVJRCoWY29tLmFwcGxlLkNSRFQuQ1JUdXBsZSooY29tLmFwcGxlLkNSRFQuQ1JSZWdpc3Rlck11bHRpVmFsdWVMZWFzdCojY29tLmFwcGxlLkNSRFQuQ1JSZWdpc3Rlck11bHRpVmFsdWUqFWNvbS5hcHBsZS5DUkRULkNSVHJlZSoZY29tLmFwcGxlLkNSRFQuQ1JUcmVlTm9kZSoVY29tLmFwcGxlLkNSRFQuTlNEYXRhKhVjb20uYXBwbGUuQ1JEVC5OU0RhdGUqMWNvbS5hcHBsZS5ub3Rlcy5JQ0Nsb3VkU3luY2luZ09iamVjdEFjdGl2aXR5RXZlbnQqKGNvbS5hcHBsZS5ub3Rlcy5JQ0Zhc3RTeW5jU2VsZWN0aW9uU3RhdGUqImNvbS5hcHBsZS5ub3Rlcy5JQ1RUQXVkaW9SZWNvcmRpbmcqK2NvbS5hcHBsZS5ub3Rlcy5JQ1RUQXVkaW9SZWNvcmRpbmcuRnJhZ21lbnQqJWNvbS5hcHBsZS5ub3Rlcy5JQ1RUVHJhbnNjcmlwdFNlZ21lbnQqF2NvbS5hcHBsZS5ub3Rlcy5DUlRhYmxlKhdjb20uYXBwbGUubm90ZXMuSUNUYWJsZTIQROtn4JiGRT+IVDC6l/TzUTIQUT2FinRBQhGpx7+t+9YwkjIQxhiJSHx4Qs60RRka9Lp+Xg==" + }, + "MinimumSupportedNotesVersion": 14, + "PreviewWidths": [1024.0], + "UTI": "public.url", + "PreviewImages": [ + "fileChecksum='SANITIZED_FILE_0007' referenceChecksum='SANITIZED_REF_0007' wrappingKey='SANITIZED_WRAP_0007==' downloadURL='https://example.com/icloud-assets/asset-0007/${f}' size=791692" + ], + "SummaryEncrypted": { + "__bytes__": "RGlzY29yZCBpcyBncmVhdCBmb3IgcGxheWluZyBnYW1lcyBhbmQgY2hpbGxpbmcgd2l0aCBmcmllbmRzLCBvciBldmVuIGJ1aWxkaW5nIGEgd29ybGR3aWRlIGNvbW11bml0eS4gQ3VzdG9taXplIHlvdXIgb3duIHNwYWNlIHRvIHRhbGssIHBsYXksIGFuZCBoYW5nIG91dC4=" + }, + "URLStringEncrypted": { + "__bytes__": "aHR0cHM6Ly9kaXNjb3JkLmNvbS9jaGFubmVscy8xMzgwMTAzMjI2NDg5MzA3MTQ3LzEzODAxMDMyMjcwMTc2NjI1ODc=" + }, + "CroppingQuadTopLeftY": 1.0, + "CroppingQuadTopLeftX": 0.0, + "Deleted": 0, + "ImageClassificationSummaryVersion": 0, + "LinkPresentationMetadataEncrypted": { + "__bytes__": "YnBsaXN0MDDUAQIDBAUGBwpYJHZlcnNpb25ZJGFyY2hpdmVyVCR0b3BYJG9iamVjdHMSAAGGoF8QD05TS2V5ZWRBcmNoaXZlctEICVRyb290gAGvECILDDc9PkRISUpLTE1OT1BjZ2tsb3J2d3uAhIWGio+SlpicVSRudWxs3xAVDQ4PEBESExQVFhcYGRobHB0eHyAhIiMkJSYnKCkqKywtLi8vMS8zNDU2WGl0ZW1UeXBlW29yaWdpbmFsVVJMVXRpdGxlV3N1bW1hcnlUaWNvblYkY2xhc3NWaW1hZ2VzXW9yaWdpbmFsVGl0bGVfEBFjb2xsYWJvcmF0aW9uVHlwZVd2ZXJzaW9uWHNpdGVOYW1lW3R3aXR0ZXJDYXJkXWltYWdlTWV0YWRhdGFcaXNJbmNvbXBsZXRlXxAPdXNlc0FjdGl2aXR5UHViV3Byb2R1Y3RfEBRpc0VuY29kZWRGb3JMb2NhbFVzZV8QFmNyZWF0b3JUd2l0dGVyVXNlcm5hbWVTVVJMXGljb25NZXRhZGF0YVVpY29uc4ALgAKAB4AJgA6AIYAegAgQABABgAqADYAXCAiAHwiADIAFgBOAHNM4Ejk6OzxXTlMuYmFzZVtOUy5yZWxhdGl2ZYAAgASAA18QRGh0dHBzOi8vZGlzY29yZC5jb20vY2hhbm5lbHMvMTM4MDEwMzIyNjQ4OTMwNzE0Ny8xMzgwMTAzMjI3MDE3NjYyNTg30j9AQUJaJGNsYXNzbmFtZVgkY2xhc3Nlc1VOU1VSTKJBQ1hOU09iamVjdNM4Ejk6O0eAAIAEgAZfEERodHRwczovL2Rpc2NvcmQuY29tL2NoYW5uZWxzLzEzODAxMDMyMjY0ODkzMDcxNDcvMTM4MDEwMzIyNzAxNzY2MjU4N28QIQBHAHIAbwB1AHAAIABDAGgAYQB0ACAAVABoAGEAdCAZAHMAIABBAGwAbAAgAEYAdQBuACAAJgAgAEcAYQBtAGUAc28QKwBEAGkAcwBjAG8AcgBkACAALQAgAEcAcgBvAHUAcAAgAEMAaABhAHQAIABUAGgAYQB0IBkAcwAgAEEAbABsACAARgB1AG4AIAAmACAARwBhAG0AZQBzXxCbRGlzY29yZCBpcyBncmVhdCBmb3IgcGxheWluZyBnYW1lcyBhbmQgY2hpbGxpbmcgd2l0aCBmcmllbmRzLCBvciBldmVuIGJ1aWxkaW5nIGEgd29ybGR3aWRlIGNvbW11bml0eS4gQ3VzdG9taXplIHlvdXIgb3duIHNwYWNlIHRvIHRhbGssIHBsYXksIGFuZCBoYW5nIG91dC5XRGlzY29yZFd3ZWJzaXRlWEBkaXNjb3JkXxATc3VtbWFyeV9sYXJnZV9pbWFnZdpRUlNUElVWV1hZL1tcXV5fYGFiKl8QFmhhc1NpbmdsZURvbWluYW50Q29sb3JfEBJkb21pbmFudENvbG9yLmJsdWVdZG9taW5hbnRDb2xvclRkYXRhWE1JTUVUeXBlXxATZG9taW5hbnRDb2xvci5ncmVlbl8QEWRvbWluYW50Q29sb3IucmVkXxATZG9taW5hbnRDb2xvci5hbHBoYVlpbWFnZVR5cGUIIz/u/v7+/v7/CYAPgBKAESM/2dnZ2dnZ2iM/1paWlpaWlyM/8AAAAAAAANJkEmVmV05TLmRhdGFPEV/1AAABAAQAEBAAAAEAIABoBAAARgAAACAgAAABACAAqBAAAK4EAAAwMAAAAQAgAKglAABWFQAAAAAAAAEAIAD3JAAA/joAACgAAAAQAAAAIAAAAAEAIAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA8WRYMvFkWJjxZFfa8WRX+vFkWPrxZFja8WRYmPFkWDIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADxZFcK8WRXmvFkWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/xZFf/8WRXmvFkVwoAAAAAAAAAAAAAAADxZFcK8WRYwvJlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/xZVjC8WVXCgAAAAAAAAAA8WRXmvJlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//FlV5oAAAAA8WRYMvFkV//yZVj/8mVY//R8cf/3qqP/8mVY//JlWP/yZVj/8mVY//eqo//zfHH/8mVY//JlWP/xZVj/8WVYMvFkV5jyZVj/8mVY//iyq//+/f3//ejm//nBu//71tP/+9bT//nBu//96Ob//v39//iyq//yZVj/8mVY//FlV5jxZFfa8mVY//JlWP/839z//v7+//76+v/72NX//v7+//7+/v/72NX//vr6//7+/v/839z/8mVY//JlWP/xZFfa8WRX+vJlWP/yZVj/+9TQ//7+/v/5wLv/8mVY//zd2v/83dr/8mVY//nAu//+/v7/+9TQ//JlWP/yZVj/8WRX+vFkV/ryZVj/8mVY//i0rv/+/v7//ezr//itpv/++Pj//vj4//itpv/97Ov//v7+//i0rf/yZVj/8mVY//FkV/rxZFfa8mVY//JlWP/0fnP//v7+//7+/v/+/v7//v7+//7+/v/+/v7//v7+//7+/v/0fnP/8mVY//JlWP/xZFfa8WRXmPJlWP/yZVj/8mVY//rIw//+/v7//v7+//7+/v/+/v7//v7+//7+/v/6yMP/8mVY//JlWP/yZVj/8WRXmPFkWDLxZVj/8mVY//JlWP/yaFv/9pWM//epov/ze3D/83tw//epov/2lYz/8mhb//JlWP/yZVj/8WRX//FlVzIAAAAA8WRYmvJlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//FlV5wAAAAAAAAAAPJlWAryZFjC8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//FlV8LxZVcKAAAAAAAAAAAAAAAA8mVYCvFkWJzxZVf/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8WVY//FlV5rxZVcKAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA8WVYMvFkWJjxZVfa8WRX+vFkV/rxZFfa8WVYmPJlWDIAAAAAAAAAAAAAAAAAAAAA8A8AAMADAACAAQAAgAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgAEAAIABAADAAwAA8A8AACgAAAAgAAAAQAAAAAEAIAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA8WRXEPFkWFzxZFec8WRYzvFkWOzxZFf88WRY/PFkWOzxZFjM8WRXnPFkWFzxZFgQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA8WVYKPFkWJ7xZFj28mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//FkWPbxZFie8WRYKAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA8WRYCvFkV47xZFj68mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/xZFf68WRYjvFlVwoAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAPFkVybxZFfU8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8WRX1PFkVyYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADxZFgy8WVY7PJlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8WRX7PFlWDIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA8WRXJvFlV+zyZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8WRY7PFlVyYAAAAAAAAAAAAAAAAAAAAAAAAAAPFkVwrxZFfU8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8WRX1PFlWAoAAAAAAAAAAAAAAAAAAAAA8WRYjvJlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8WVYjgAAAAAAAAAAAAAAAPFkWCjxZFf68mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JnWv/yb2P/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yb2P/8mda//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/xZVj68WVYKAAAAAAAAAAA8WRYnvJlWP/yZVj/8mVY//JlWP/yZVj/8mVY//NzZ//4trD//fHw//zh3//yZ1r/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mda//zi3//98e//+LWv//NyZv/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/xZVieAAAAAPFkVxDxZFj28mVY//JlWP/yZVj/8mVY//JnW//4rqf//vj3/////////////OPh//SAdf/1jIL/96eg//i1r//4ta//96eg//WMg//0gHb//OTi/////////////vf3//itpv/yZ1r/8mVY//JlWP/yZVj/8mVY//FlV/bxZVgQ8WRXXPJlWP/yZVj/8mVY//JlWP/yZVj/+LSu//////////////////749//6xsH//vf3//////////////////////////////////739//6xsH//vj3//////////////////i0rf/yZVj/8mVY//JlWP/yZVj/8mVY//FlV1zxZFec8mVY//JlWP/yZVj/8mVY//JlWP/5wLv/////////////////////////////////////////////////////////////////////////////////////////////////+cC7//JlWP/yZVj/8mVY//JlWP/yZVj/8WRXnPFkV8zyZVj/8mVY//JlWP/yZVj/8mVY//m/uf///////////////////////e7t//enoP/5vLb//v7+/////////////v7+//m8t//3p6D//e7t///////////////////////5v7n/8mVY//JlWP/yZVj/8mVY//JlWP/xZFfO8WRX7PJlWP/yZVj/8mVY//JlWP/yZVj/+LSt///////////////////////1jYT/8mVY//JlWP/6x8L////////////6x8L/8mVY//JlWP/1jYP///////////////////////izrf/yZVj/8mVY//JlWP/yZVj/8mVY//FkV+zxZFj88mVY//JlWP/yZVj/8mVY//JlWP/2n5b///////////////////////N2a//yZVj/8mVY//iwqv////////////iwqv/yZVj/8mVY//N2a///////////////////////9p6W//JlWP/yZVj/8mVY//JlWP/yZVj/8WRY/PFkV/zyZVj/8mVY//JlWP/yZVj/8mVY//R/dP//////////////////////+Law//JlWP/yb2L//Obk/////////////Obk//JvY//yZVj/+Law///////////////////////0f3T/8mVY//JlWP/yZVj/8mVY//JlWP/xZFf88WRX7PJlWP/yZVj/8mVY//JlWP/yZVj/8mVY//3t7P///////////////////////evp//729f///////////////////////vX1//3r6f///////////////////////e3r//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//FkV+zxZFfO8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/+Lex///////////////////////////////////////////////////////////////////////////////////////4t7H/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8WVXzPFkV5zyZVj/8mVY//JlWP/yZVj/8mVY//JlWP/zd2z//vv7/////////////////////////////////////////////////////////////////////////////vv7//N3bP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/xZFec8WRXXPJlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/5v7r////////////////////////////////////////////////////////////////////////////5v7r/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//FkV1zxZFcQ8WRY9vJlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JwZP/98vD//////////////////////////////////////////////////////////////////fHw//JwZP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/xZFf28WRXEAAAAADxZFie8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//NyZ//4rKX//N7b//79/f/6zsn/9Y2D//aXjv/2l47/9Y2D//rOyv/+/f3//N7b//ispf/zcmf/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//FlV54AAAAAAAAAAPFlWCjxZVj68mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mxg//JuYv/yZVj/8mVY//JlWP/yZVj/8m5i//JtYP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/xZFf68WVXKAAAAAAAAAAAAAAAAPFkWI7yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//FlV44AAAAAAAAAAAAAAAAAAAAA8WRYCvFkWNTyZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/xZVfU8WVXCgAAAAAAAAAAAAAAAAAAAAAAAAAA8mVYJvFlWOzyZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8WVX7PFlVyYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA8mRYMvFkWOzyZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//FlV+zyZVgyAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA8mVYJvJkWNTyZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/xZVfU8WVXJgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA8WVXCvFkWI7xZVf68mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/xZVj68mVYjvJlWAoAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAPJlWCjxZVie8WRY9vJlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj28mVYnvJlWCgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADxZVgQ8WRXXPFlV5zxZFjM8WRX7PFlWPzxZVf88WRX7PFkV87xZFec8WVXXPFlVxAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/8AD//8AAP/8AAA/+AAAH/AAAA/gAAAHwAAAA8AAAAOAAAABgAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIAAAAGAAAABwAAAA8AAAAPgAAAH8AAAD/gAAB/8AAA//wAA///AA/8oAAAAMAAAAGAAAAABACAAAAAAAAAkAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIAAAAAAAAAAgAAAAAAAAACAAAAAgAAAAAAAAACAAAAAgAAAAAAAAACAAAAAgAAAAAAAAACAAAAAgAAAADxZFce8WRXXvFkWJTxZFi+8WRY3vFkV/LxZFf/8WRY//FkWPLxZFje8WRYvvFkWJTxZFde8WRXHu9vUAIAAAAAAAAAAgAAAAAAAAAAAAAAAgAAAAAAAAAAAAAAAgAAAAAAAAAAAAAAAgAAAAAAAAAAAAAAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADxZFgW8WRXdPFkWM7xZFj/8WRY//FkV//xZFj/8WRY//FkV//xZFj/8WRY//FkWP/xZFj/8WRY//FkWP/xZFj/8WRY//FkWM7xZFh08WRYFgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgAAAAIAAAACAAAAAgAAAAIAAAACAAAAAgAAAAIAAAACAAAAAgAAAAIAAAAC8WVYHPFkV5TxZFj28mVY//FlV//yZVj/8mVY//FlV//yZVj/8mVY//FlV//yZVj/8mVX//JlV//yZVf/8mVX//JlV//yZVf/8mVX//JlV//yZVf/8WRX9vFkWJbxZFgcAAAAAgAAAAIAAAACAAAAAgAAAAIAAAACAAAAAgAAAAIAAAACAAAAAgAAAAIAAAACAAAAAAAAAAIAAAAAAAAAAgAAAAAAAAACAAAAAgAAAAAAAAACAAAAAvFkWAjxZFd+8WRY9PFkV//yZVj/8mVY//FkV//yZVj/8mVY//FkV//yZVj/8mVY//FkV//yZVj/8mVY//FkV//yZVj/8mVY//FkV//yZVj/8mVY//FkV//yZVj/8mVY//FkV//xZFf28WRYfvFkVwgAAAAAAAAAAgAAAAAAAAAAAAAAAgAAAAAAAAAAAAAAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA8WRXLPFkWNDxZFj/8WRY//FkV//xZFj/8WRY//FkV//xZFj/8WRY//FkV//xZFj/8WRY//FkV//xZFj/8WRY//FkWP/xZFj/8WRY//FkWP/xZFj/8WRY//FkWP/xZFj/8WRY//FkWP/xZFj/8WRY//FkV9DxZFcsAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgAAAAIAAAACAAAAAgAAAAIAAAACAAAAAgAAAALxZFdU8WRX9PJlV//yZVf/8mVY//FlV//yZVj/8mVY//FlV//yZVj/8mVY//FlV//yZVj/8mVY//FlV//yZVj/8mVX//JlV//yZVf/8mVX//JlV//yZVf/8mVX//JlV//yZVf/8mVX//JlV//yZVf/8mVX//JlV//xZFf08WRXVAAAAAIAAAACAAAAAgAAAAIAAAACAAAAAgAAAAIAAAACAAAAAAAAAAIAAAAAAAAAAgAAAAAAAAACAAAAAvFkWG7xZVf/8mVX//FlWP/yZVj/8mVY//FkV//yZVj/8mVY//FkV//yZVj/8mVY//FkV//yZVj/8mVY//FkV//yZVj/8mVY//FkV//yZVj/8mVY//FkV//yZVj/8mVY//FkV//yZVj/8mVY//FkV//yZVj/8mVY//FkV//yZVj/8WVX//FlWG4AAAAAAAAAAgAAAAAAAAAAAAAAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA8WRYbvFkWP/xZFj/8WRY//FkWP/xZFj/8WRY//FkV//xZFj/8WRY//FkV//xZFj/8WRY//FkV//xZFj/8WRY//FkV//xZFj/8WRY//FkWP/xZFj/8WRY//FkWP/xZFj/8WRY//FkWP/xZFj/8WRY//FkWP/xZFj/8WRY//FkWP/xZFj/8WRY//FlWP/xZVhuAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgAAAAIAAAACAAAAAgAAAALxZFdU8WVX//JlV//yZVf/8mVX//JlV//yZVf/8mVY//FlV//yZVj/8mVY//FlV//yZVj/8mVY//FlV//yZVj/8mVY//FlV//yZVj/8mVX//JlV//yZVf/8mVX//JlV//yZVf/8mVX//JlV//yZVf/8mVX//JlV//yZVf/8mVX//JlV//yZVf/8mVX//JlV//xZVf/8WVXVAAAAAIAAAACAAAAAgAAAAIAAAACAAAAAAAAAAIAAAAAAAAAAvFkVyzxZFf08mVX//FlWP/yZVj/8mVX//FlWP/yZVj/8mVY//FkV//yZVj/8mVY//FkV//yZVj/8mVY//FkV//yZVj/8mVY//FkV//yZVj/8mVY//FkV//yZVj/8mVY//FkV//yZVj/8mVY//FkV//yZVj/8mVY//FkV//yZVj/8mVY//FkV//yZVj/8mVY//FkV//yZVj/8WVX9PFkVywAAAAAAAAAAgAAAAAAAAAAAAAAAAAAAAAAAAAA8WRXCPFkV9DxZFj/8WRY//FkWP/xZFj/8WRY//FkWP/xZFj/8WRY//FkV//xZFj/8WRY//FkV//xZFj/8WRY//FkV//xZFj/8WRY//FkV//xZFj/8WRY//FkWP/xZFj/8WRY//FkWP/xZFj/8WRY//FkWP/xZFj/8WRY//FkWP/xZFj/8WRY//FkWP/xZFj/8WRY//FkWP/xZFj/8WRY//FlWNDxZVgIAAAAAAAAAAAAAAAAAAAAAgAAAAIAAAAC8WRYfvJlWP/yZVf/8mVX//JlV//yZVf/8mVX//JlV//yZVf/8mVY//FlV//yZVj/8mVY//FlV//yZVj/8mVY//FlV//yZVj/8mVY//FlV//yZVj/8mVX//JlV//yZVf/8mVX//JlV//yZVf/8mVX//JlV//yZVf/8mVX//JlV//yZVf/8mVX//JlV//yZVf/8mVX//JlV//yZVf/8mVX//JlV//xZVh+AAAAAgAAAAIAAAACAAAAAAAAAALxZFgc8WRX9vJkWP/xZVj/8mVX//FlWP/yZVj/8mVX//FlWP/yZVj/8mVY//FkV//yZVj/8mVY//FkV//yZVj/8mVY//FkV//yZVj/8mVY//FkV//yZVj/8mVY//FkV//yZVj/8mVY//FkV//yZVj/8mVY//FkV//yZVj/8mVY//FkV//yZVj/8mVY//FkV//yZVj/8mVY//FkV//yZVj/8mVY//FkV//xZVj08WVYHAAAAAAAAAAAAAAAAAAAAADxZFiW8WRY//JkWP/xZFj/8WRY//FkWP/xZFj/8WRY//FkWP/xZFj/8WRY//FkV//yZ1r/9ZOK//rJxP/1i4H/8WRY//FkV//xZFj/8WRY//FkV//xZFj/8WRY//FkWP/xZFj/8WRY//FkWP/xZFj/9YuC//rIw//1kon/8mda//FkWP/xZFj/8WRY//FkWP/xZFj/8WRY//FkWP/xZFj/8WRY//FkWP/xZFj/8WVYlAAAAAAAAAAAAAAAAvFkVxbxZFf28WVX//JlWP/yZVf/8mVX//JlV//yZVf/8mVX//JlV//yZVf/8mtf//eqo//97ez////////////96uj/8mpd//FlV//yZVj/8mVY//FlV//yZVj/8mVX//JlV//yZVf/8mVX//JlV//yal3//ero/////////////ezq//epof/yal7/8mVX//JlV//yZVf/8mVX//JlV//yZVf/8mVX//JlV//yZVf/8WVX9vFlWBYAAAACAAAAAPFkWHTxZVj/8WVX//JkWP/xZVj/8mVX//FlWP/yZVj/8mVX//JlWP/2npb//e/u//7+/v////////////7+/v/+/Pz/9peO//FkV//yZln/83ds//WJf//1kon/9ZKJ//WJf//zd2z/8mZZ//FkV//2mI///v38//7+/v////////////7+/v/97u3/9pyU//JlWP/yZVj/8mVY//FkV//yZVj/8mVY//FkV//yZVj/8mVY//FlV3QAAAAA72BQAvFkV87xZFj/8WRY//JkWP/xZFj/8WRY//FkWP/xZFj/83Fm//vV0f/+/v7//v7+//7+/v/+/v7//v7+//rKxf/0hHr/96Kb//vRzf/+9PP//v7+//7+/v/+/v7//v7+//7+/v/+/v7//vTz//vRzf/3opv/9IV7//rKxv/+/v7//v7+//7+/v/+/v7//v7+//vU0P/zcWX/8WRY//FkWP/xZFj/8WRY//FkWP/xZFj/8WRY//FlWM4AAAAA8WRXHvFkWP/yZVf/8WVX//JlWP/yZVf/8mVX//JlV//yZVf/9piP//////////////////////////////////3q6f/++Pj//////////////////////////////////////////////////////////////////vj4//3r6f/////////////////////////////////2mI//8mVX//JlV//yZVf/8mVX//JlV//yZVf/8mVX//FkV//xZFce8WRXXvJkWP/xZVj/8WVX//JkWP/xZVj/8mVX//FlWP/yZVj/9qCY//7+/v////////////7+/v////////////7+/v////////////7+/v////////////7+/v////////////7+/v////////////7+/v////////////7+/v////////////7+/v////////////7+/v/2oJj/8mVY//FkV//yZVj/8mVY//FkV//yZVj/8mVY//FkV//xZFde8WRXlPJkWP/xZFj/8WRY//JkWP/xZFj/8WRY//FkWP/xZFj/96Ka//7+/v/+/v7//v7+//7+/v/+/v7//v7+//7+/v/+/v7//v7+//7+/v/+/v7//v7+//7+/v/+/v7//v7+//7+/v/+/v7//v7+//7+/v/+/v7//v7+//7+/v/+/v7//v7+//7+/v/+/v7//v7+//7+/v/3opr/8WRY//FkWP/xZFj/8WRY//FkWP/xZFj/8WRY//FkWP/xZFeU8WRXvvJlV//yZVf/8WVX//JlWP/yZVf/8mVX//JlV//yZVf/9p2V///////////////////////////////////////72tf/9IJ4//NzZ//4rKX//v39///////////////////////+/f3/+Kyl//NzaP/0gnf/+9rX///////////////////////////////////////2nZX/8mVX//JlV//yZVf/8mVX//JlV//yZVf/8mVX//JlV//xZFe+8WRX3vJkWP/xZVj/8WVX//JkWP/xZVj/8mVX//FlWP/yZVj/9ZKJ//7+/v////////////7+/v////////////7z8v/yb2L/8mVY//FkV//yZVj/+bq0//7+/v////////////7+/v/5urT/8mVY//FkV//yZVj/8m5i//7z8v////////////7+/v////////////7+/v/1kon/8mVY//FkV//yZVj/8mVY//FkV//yZVj/8mVY//FkV//xZFfe8WRX8vJkWP/xZFj/8WRY//JkWP/xZFj/8WRY//FkWP/xZFj/9IF2//7+/v/+/v7//v7+//7+/v/+/v7//v7+//rLx//xZFj/8WRY//FkV//xZFj/9Yl///7+/v/+/v7//v7+//7+/v/0iH7/8WRY//FkWP/xZFj/8WRY//rMyP/+/v7//v7+//7+/v/+/v7//v7+//7+/v/0gHb/8WRY//FkWP/xZFj/8WRY//FkWP/xZFj/8WRY//FkWP/xZFjy8mVY//JlV//yZVf/8WVX//JlWP/yZVf/8mVX//JlV//yZVf/8mte//78/P////////////////////////////rPy//yZVj/8mVY//FlV//yZVj/9YyC///////////////////////1jIP/8mVX//JlV//yZVf/8mVX//rPy/////////////////////////////78+//ya17/8mVX//JlV//yZVf/8mVX//JlV//yZVf/8mVX//JlV//xZVj/8mRX//JlWP/xZVj/8WVX//JkWP/xZVf/8mVY//FlWP/yZVj/8mVY//zi4P////////////7+/v////////////749//zd2z/8mVY//FlV//yZVj/+sfC//7+/v////////////7+/v/6yMP/8mVY//FkV//yZVj/83dr//749/////////////7+/v////////////zi4P/yZVj/8mVY//FkV//yZVj/8mVY//FkV//yZVj/8mVY//FkV//yZVf/8WRX8vFlV//xZFf/8WRX//JkV//xZFf/8WRX//FkV//xZVf/8WRX//m7tf/+/v7//v7+//7+/v/+/v7//v7+//7+/v/96ej/9puS//WMg//6xL///v7+//7+/v/+/v7//v7+//7+/v/+/v7/+sS///WMg//2m5P//enn//7+/v/+/v7//v7+//7+/v/+/v7//v7+//m7tf/xZVf/8WVX//FkV//xZVf/8WVX//FkV//xZVf/8WVX//FkV//xZFfy8WRX3vJlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//WMg/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////WMgv/yZVj/8mVY//JkWP/yZVj/8mVY//JkWP/yZVj/8mVY//JkWP/xZVfe8WRXvvJlWP/xZVj/8WVX//JkWP/xZVf/8mVY//FlWP/yZVj/8mVY//JnWv/97uz///////7+/v////////////7+/v////////////7+/v////////////7+/v////////////7+/v////////////7+/v////////////7+/v////////////7+/v///////e3s//JnWv/yZVj/8mVY//FkV//yZVj/8mVY//FkV//yZVj/8mVY//FkV//xZVe+8WRXlPFlV//xZFf/8WRX//JkV//xZFf/8WRX//FkV//xZVf/8WRX//FkV//4sav//v7+//7+/v/+/v7//v7+//7+/v/+/v7//v7+//7+/v/+/v7//v7+//7+/v/+/v7//v7+//7+/v/+/v7//v7+//7+/v/+/v7//v7+//7+/v/+/v7//v7+//7+/v/+/v7/+LGq//FkV//xZVf/8WVX//FkV//xZVf/8WVX//FkV//xZVf/8WVX//FkV//xZFeU8WRXXvJlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/zcWX//vj3///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////++Pf/83Fl//JkWP/yZVj/8mVY//JkWP/yZVj/8mVY//JkWP/yZVj/8mVY//JkWP/xZFde8WRXHvFkWP/xZVj/8WVX//JkWP/xZVf/8mVY//FlWP/yZVj/8mVY//FlWP/yZVj/+LWv//7+/v////////////7+/v////////////7+/v////////////7+/v////////////7+/v////////////7+/v////////////7+/v////////////7+/v/4ta//8mVY//FkV//yZVj/8mVY//FkV//yZVj/8mVY//FkV//yZVj/8mVY//FkV//xZFceAAAAAPFlV87xZFf/8WRX//JkV//xZFf/8WRX//FkV//xZVf/8WRX//FkV//xZVf/8m1h//3v7f/+/v7//v7+//7+/v/+/v7//v7+//7+/v/+/v7//v7+//7+/v/+/v7//v7+//7+/v/+/v7//v7+//7+/v/+/v7//v7+//7+/v/+/v7//v7+//3u7f/ybWH/8WVX//FkV//xZVf/8WVX//FkV//xZVf/8WVX//FkV//xZVf/8WVX//FkV87uYE8CAAAAAvFkV3TyZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//aXjv/+/v7//////////////////////////////////////////////////////////////////////////////////////////////////v7+//aWjf/yZVj/8mVY//JkWP/yZVj/8mVY//JkWP/yZVj/8mVY//JkWP/yZVj/8mVY//FkV3QAAAACAAAAAPFkWBbxZFj28WVX//JkWP/xZVf/8mVY//FlWP/yZVj/8mVY//FlWP/yZVj/8mVY//FlV//0hHn/+cG8//708/////////////7+/v/6y8f/9p2V//erpP/4sqv/+LGr//erpP/2nZX/+szI//7+/v////////////708//5wrz/9IR5//FkV//yZVj/8mVY//FkV//yZVj/8mVY//FkV//yZVj/8mVY//FkV//yZVj/8WVX9vFlVxYAAAACAAAAAAAAAADyZFeU8WRX//JkV//xZFf/8WRX//FkV//xZVf/8WRX//FkV//xZVf/8WRX//FkV//xZFf/8WRX//JoW//1i4L/+LKs//vSzv/zeW3/8WRX//FkV//xZFf/8WVX//FkV//xZVf/83lu//vSz//4s63/9YyC//JoW//xZVf/8WVX//FkV//xZVf/8WVX//FkV//xZVf/8WVX//FkV//xZVf/8WVX//FkV//xZVf/8WVXlgAAAAAAAAAAAAAAAgAAAALxZVcc8WVY9PJlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JlWP/yZVj/8mVY//JkWP/yZVj/8mVY//JkWP/yZVj/8mVY//JkWP/yZVj/8mVY//JkWP/yZVj/8mVY//JkWP/yZVj/8mVY//JkWP/yZVj/8mVY//JkWP/xZFf28WVXHAAAAAAAAAACAAAAAAAAAAIAAAAA8WRYfvJkWP/xZVj/8mVY//FlWP/yZVj/8mVY//FlWP/yZVj/8mVY//FlV//yZVj/8mVY//FlV//yZVj/8mVY//FlV//yZVj/8mVY//FlV//yZVj/8mVY//JkV//yZVj/8mVY//JkV//yZVj/8mVY//JkV//yZVj/8mVY//JkV//yZVj/8mVY//JkV//yZVj/8mVY//JkV//yZVj/8mVY//JkV//xZVd+AAAAAgAAAAAAAAACAAAAAAAAAAIAAAAA8WRYCPFkV9DxZVf/8WVX//FlV//xZVf/8WVX//FlV//xZVf/8WVY//FlV//xZVf/8WVY//FlV//xZVf/8WVY//FlV//xZVf/8WVY//FlV//xZVf/8WVX//FlV//xZVj/8WVX//FlV//xZVj/8WVX//FlV//xZVj/8WVX//FlV//xZVj/8WVX//FlV//xZVj/8WVX//FlV//xZVj/8WVX//FlV9DxZVcIAAAAAAAAAAAAAAAAAAAAAgAAAAIAAAACAAAAAvFkWCzxZVf08mRY//JkWP/yZFj/8mRY//JkWP/yZFj/8mRY//JkV//yZFj/8mRY//JkV//yZFj/8mRY//JkV//yZFj/8mRY//JkV//yZFj/8mRY//JkWP/yZFj/8mRY//JkWP/yZFj/8mRY//JkWP/yZFj/8mRY//JkWP/yZFj/8mRY//JkWP/yZFj/8mRY//JkWP/yZFj/8WVX9PFlVywAAAACAAAAAgAAAAIAAAACAAAAAAAAAAIAAAAAAAAAAgAAAADyZVhU8WVX//FlWP/yZVj/8mVY//FlWP/yZVj/8mVY//FlV//yZVj/8mVY//FlV//yZVj/8mVY//FlV//yZVj/8mVY//FlV//yZVj/8mVY//JkV//yZVj/8mVY//JkV//yZVj/8mVY//JkV//yZVj/8mVY//JkV//yZVj/8mVY//JkV//yZVj/8mVY//JkV//xZVf/8WVXVAAAAAAAAAACAAAAAgAAAAAAAAACAAAAAAAAAAIAAAAAAAAAAAAAAAAAAAAA8mRYbvJkV//xZVf/8WVX//FlV//xZVf/8WVY//FlV//xZVf/8WVY//FlV//xZVf/8WVY//FlV//xZVf/8WVY//FlV//xZVf/8WVX//FlV//xZVj/8WVX//FlV//xZVj/8WVX//FlV//xZVj/8WVX//FlV//xZVj/8WVX//FlV//xZVj/8WVX//JlV//yZVduAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgAAAAIAAAACAAAAAgAAAAIAAAACAAAAAvJkWG7yZVf/8mRY//JkWP/yZFj/8mRY//JkV//yZFj/8mRY//JkV//yZFj/8mRY//JkV//yZFj/8mRY//JkV//yZFj/8mRY//JkWP/yZFj/8mRY//JkWP/yZFj/8mRY//JkWP/yZFj/8mRY//JkWP/yZFj/8mRY//JkWP/yZFj/8WVY//JlV24AAAACAAAAAgAAAAIAAAACAAAAAgAAAAIAAAACAAAAAAAAAAIAAAAAAAAAAgAAAAAAAAACAAAAAgAAAADyZVhU8WVX9PFlV//yZVj/8mVY//FlV//yZVj/8mVY//FlV//yZVj/8mVY//FlV//yZVj/8mVY//FlV//yZVj/8mVY//FkV//yZVj/8mVY//FkV//yZVj/8mVY//FkV//yZVj/8mVY//FkV//yZVj/8mVY//FkV//xZVj08WVXVAAAAAAAAAACAAAAAgAAAAAAAAACAAAAAgAAAAAAAAACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA8mRYLPFkWNDyZFj/8mRY//JkWP/yZFj/8mRY//JkWP/yZFj/8mRY//JkWP/yZFj/8mRY//JkWP/yZFj/8mVY//JkWP/yZVj/8mVY//JkWP/yZVj/8mVY//JkWP/yZVj/8mVY//JkWP/yZVj/8mVY//FlWNDxZVcsAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgAAAAIAAAAAAAAAAgAAAAIAAAACAAAAAgAAAAAAAAACAAAAAvFkWAjxZVh+8WVX9vFlV//xZVf/8WVX//FlV//xZVf/8WVX//FlV//xZVf/8WVX//FlV//xZVf/8WVX//FkV//xZVf/8WVX//FkV//xZVf/8WVX//FkV//xZVf/8WVX//FkV//xZVf08mVYfvJlWAgAAAACAAAAAgAAAAIAAAACAAAAAgAAAAIAAAACAAAAAgAAAAIAAAACAAAAAAAAAAIAAAAAAAAAAgAAAAAAAAACAAAAAgAAAAAAAAACAAAAAgAAAAAAAAAC8mVYHPJlWJbxZVj28mVY//FlV//yZVj/8mVY//FlV//yZVj/8mVY//FlV//yZVj/8mVY//FkV//yZVj/8mVY//FkV//yZVj/8mVY//FkV//yZVj/8mVX9vJlWJTyZVgcAAAAAgAAAAAAAAACAAAAAgAAAAAAAAACAAAAAgAAAAAAAAACAAAAAgAAAAAAAAACAAAAAgAAAAIAAAACAAAAAgAAAAIAAAACAAAAAgAAAAIAAAACAAAAAgAAAAIAAAACAAAAAgAAAALxZVgW8WRYdPFlWM7xZFj/8mRY//JlWP/yZFj/8mRY//JlWP/yZFj/8mVY//JkWP/yZVj/8mVY//JkWP/yZVj/8WRY//JlWM7xZVd08WVYFgAAAAIAAAACAAAAAgAAAAIAAAACAAAAAgAAAAIAAAACAAAAAgAAAAIAAAACAAAAAgAAAAIAAAACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAO9wUALxZFce8WRXXvFlV5TxZFi+8WVX3vFkV/LxZFj/8WVX//FkV/LxZFfe8WRXvvFkV5TxZFde8WRXHgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAqSSAALbbAAD//AAAP/8AAAAAAAAAAAAAqQAAAALbAAD/gAAAAf8AAAAAAAAAAAAAqAAAAABbAAD8AAAAAD8AAAAAAAAAAAAAoAAAAAALAADgAAAAAAcAAAAAAAAAAAAAgAAAAAADAADAAAAAAAMAAAAAAAAAAAAAgAAAAAABAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACAAAAAAAAAAAAAAAAAAAAAgAAAAAAAAADAAAAAAAMAAAAAAAAAAgAAoAAAAAACAACgAAAAAAcAAAAAAAAAAAAAqAAAAAASAAC8AAAAAD8AAAAAAAAAAAAAqQAAAACSAAD/gAAAAf8AACEAAAAAAAAAqSAAAASSAAAAAAAAAAAAAP//AAH//wAAiVBORw0KGgoAAAANSUhEUgAAAQAAAAEACAYAAABccqhmAAAkvklEQVR42u3dfXRU9bkv8O+zZyYJIWQmEjRFLDFFEURJEU+QRvAlq4XaanwBxtbjpTKxtMfr8t7aLj2nPSf16Dke+97b20NJtByX1glSRWqP6KJVvCkvwlGoKGApJyloU4FOJkAyyczs5/4xiYLkZV723r/98nzWcglkMvv5zezfs5/fb+/92wThaDd/ORYKpLVq1jCFGFPAVMnEVcSoBBACIcSgUgLKAA4BAJjKQAic+W4cYyBFoBMAEgD3gNHNwFEQuompi4m7AHQR47Cu6YfXrq44qvozEPkj1QGIsYVXxkqQ0i5iwixiXARgFhOqiVEDoglKg2OOMeFPxDjAhA4Ae6DzHl3jfU+3VJxQ/dmJ0UkCsJklTbEyH/nmMfNcAmoZmENM1cMfsW2MkWTid4ixjwnbCLQzjfROSQr2IglAsSVNsSoN2lUA6gFcRUwXOq6zZ4uRBPhtJrQDaNehv/J0S0WX6rC8TBKAxZY0xcp88F0F5gYmLCLQdNUxqcTg/cTYCKJNaaRfkQrBWpIALLA0EpuiQbuFgQYiXAPQONUx2VQfmF8FaD00bIyuLu9QHZDbSQIwSfjOnmpmvoVASwFcrjoeh9oK8AYQRSUZmEMSgIHCkVgIpN0G0O2QTm+0rQz+Bet6dO2jcurRKJIACtR4T8xfctK3COAImD7r2gk8++gD+CWAViXGpzet/2FFSnVATiYJIE9L74xP0XREACwH0VTV8XgS4xCIf0rkW/PU6jI5m5AHSQA5CjfF6xm0khhL5WhvG30Av8TAg20twZ2qg3ESSQBZGCzzG8F8F4gWqo5HjGorgEeiLeXrVQfiBJIARjHY8W8D4+9BuEB1PCInbwD4MXzpaHRVRUJ1MHYlCWAY0vFdZS+ARyQRDE8SwCmk47vaXgAPRFvKo6oDsRNJAIOWRXoaCPghCBerjkWY6g1kEsF61YHYgecTwNIvxy/S0lglk3tewy8CuC/aEtylOhKVPJsAMlft+R4G4w45nedZfQAeJ9KavXodgScTQDjSEwHhEQAVqmMRtvBnZIYFq1QHYjVPJYDBcn8NiOpUxyLsh8G/I+AuLw0LPJEAwitjJZz2fZMY35ByX4yhD5nThg974bSh6xNAuCk+D0yPgTBDdSzCUd4A+A63VwOuTQCN98T8JSd8zYAc9UXeXF8NuDIBLLuzZxrp/ISM9YUhmLdD08LR1RM6VIdiNE11AEa7NdKzHIzXpfMLwxDVgfntpU3x5apDMbxpqgMwyheaYmVpaKsI9EXVsQj3YuYnya9H3DIkcEUCyJzeo2dkok9YgcH7NaLPPbW6/IDqWArl+CFAOBK/hXR6TTq/sAqBpjPj9+GmeKPqWApvi0MN3rnXDOAfVMciPO2haEv5N1UHkS9HJoDMeN8XJeA61bEIAfCL8OmNTpwXcFwCCEeOV2fWisclqmMRYggz7yZNa3TaqUJHJYBlTfG5AD1DwHmqYxFiGId08PVrHXT1oGMmAcNN8UUA/VY6v7Cx8zTQlsy+6gyOSADhSDw8eOSfoDoWIcYwDqBnljbFb1EdSDZsnwDCTT33gugpAPJATeEU4zTQ08ua4veqDmQstk4A4UhPM4DvqI5DiHwQ6Dvhpp5m1XGMHqNNhSM9zSD8k+o4hDDAt6Mt5c2qgxiOLROAdH7hQrZMArZLANL5hYvZLgnYKgFI5xceYKskYJsEEG7quRcy4Sc8gMFfb2sJfld1HIBNEsCtkfgtTPS06jiEsIrOfOva1qDyx5QpTwCDV/g9AznPL7ylD+Cboi3BjSqDUJoAwk3xWoC2QDq/8CAGHwfjmrbW4E5VMShLAOHI8WomflWu7RcedwhEC1TdRagkAYRXxkqQ8r0mt/QKATDwpo70/KdbKk5YvW01lwKntfXS+YXIIOASDVq0cXnMb/W2LU8Ay5p6HgToM1ZvVwg7I9B1xQGt2frtWiiziCI9a3UjhXAKBi9pawmus2p7liWAZXf2TCPG7yEz/kKMiMHHCfibaEtwnxXbs2QIEF4ZKyEdGyCdX4hREWgCA+uXNMXKrNieJQmA01qrrNsvRHYINF1jbZUV2zI9Adwa6Vkuj+sSIjdE9EUrnkVo6hxAOHK8GsRvQ0p/IXLG4OMa0RwzH0FmWgUQXhkrAfQopPMLkRcCTWCdnzDz+gDzhgAp7T55RLcQBSKqKw5opj16zJQhQDjSXQvS5CYfIQzAQJLAC6ItwW1Gv7fhCSC8MlaCtG8bgNmWfDpCeACD95NPrzX6+YPGDwFS2n2Qzi+EoQg0nVPGDwUMrQCk9BfCPINDgUuNvErQ2AqA6CeQzi+EKQgIgLHGyPc0LAGEIz0rAfqU5Z+KEF5CVLesKR4x7O2MeJNwU08VmHeB6Bx1n4wQnhEDp2uirRXdhb6RMRUAo1k6vxCWqQBpDxvxRgVXADLxJ4T1jJoQLLwCIHoY0vmFsNTghGDBdwwWlADCTT2NsryXEIoQLVwW6W4o5C0KrQCaVX8GQngaad8v5GahvBNAuCkehlzxJ4RSBFxSHNBuK+D3cze4rv/rssqPEOox+I/9Sf2i9WsqUrn+bn6lQ8oXls5vvtJxwHnn+jB1ioa/HNGx+6206pCyMrfWj3MmaTj8Xhp/7NRx4gSrDsnVCPSJwSpgTe6/myM5+psj4Adqqn2Ydr4PNdUaaqb6UHX2hyO07riOr/3jSfT2qY50dKXjgO89MB6h4IexHzmq42BnGgc7dfzhYBoHO9Po71cdqbvkWwXkXgHI0d8QAT8wfZoPM6b7MPNCH2qqfSgKjJyPQ0ENi68twi+fH1Ad+qhu/nzxaZ0fACZVaphUqaHusszf02nGwU4db+9PYe87aew7IAmhUPlWAblXAE09uyCTf3mZOkXDrBl+XDIj0/FH6/DDOdnLuPv+E7atAoLlhB//6/ic2zWQZBw4mMbut9LYvSeFzsO66qY4Uj5VQE4VQOa8v3T+bGla5ig/t9aPy2v9mFRZ2FnX8aVk6yrg+kVFOXd+ACgKEGZO92PmdD9uvakYR47q2LErhTfeTOHt/Wnokg+yQqBPFPu1RgBZP1ko1yHAfaobaXdDnf6Ky/2omxNA+QRjF11a3FCEF34zYLsqIFhOaFgYMOS9JlVq+GxDET7bUITjJxg7dqWwdUcSe/Y6YxJUJQLuQg4JIOu9MxzpngfStqpuoF19rErDNfUB1M0p/Eg/lnUb+m1XBYRvLMYNi4tM3caxmI72bUls3SHDhNEw+Mq2lmB7Nq/NoQIgOfp/ROk4oO6yABoWBFBT7bNsu4sbirBh4wCSHxnpFRcDkyZqmFBGmFBGCJUTSkoIpeOAkhJCSTGhpBjQfMPn/WSSkUwCiX5GIsFI9APHTzB6exndPZn/4nH9jOoj4IdhR//RTKzQcMPiYtywuBh/OJjG5i1JtG9PygTimVYCyCoBZFUBhJt6qgAchNz0AwA4Z5KG6xcFUHdZAONLLX3A8gc2b0niZC9jcpWGiRWESRM1lJRYE8vJXkZ3nPGXIzqOHNNROo5w5TzzE8BwEgnGb9uTeHVLUqqCQQwkAappa5lweKzXZlkB8HKAPN/5L73YhxsWFWHmdNOe05C1hfPVdDggMxk5vpRw7scsebTkqEpK6IP5gl17UvjNq0ns3JXzBXGukrlTUI8gi3t1xjxkNC6P+Uv8WgeIzlXdMJW+ekeJsqOcyM2mzUk8+qShq2c7DoM7+5P6tLFOCY6ZwksCvgavd/4r5vql8ztIw8IAPnmJdXMydkSgqcV+bdFYrxu7hmNeqboxKgXLCbcvK1YdhsjRittKUOrxQSsRxlw8dNQEsHRFTyWIPq26ISrdetOZl7YK+5tYoeGLS7yduBn02XAkFhrtNaPu2UQchodn/mdc6FM62SYKc019EaZP8+5QgICATqOvFTBGAqC8FxpwuoAf+MryEtVhiALd+bclCKg/aaMMMZaP9vMRE0C4KV4NwLOP977xuiLTr+gT5pv8MQ3XLzL3CkU7I6LLlqzorh7p56Pt4WHVwasyZbKG6z7t3Z3Gba5fXIRzJqm5YMsONKJbRvzZyL9GjaoDV+XWm4rzuqtN2FNRgPClWz09nBvxYD5sAvBy+T+31o85l3p40OhSs2f5PXttABFdtjQSnzLcz4avAJjGvIDAjQJ+4Pal3j515Ga3LyuB5tFpHQKGHQaM8HFwo+qAVWhYGJCJPxerOlvD57w7tzPsA0TO2NuXNMXKQLRAdbRWC5YTbv68HP3d7vpFRZ68QpCIrlnSFCv76L+fkQB8rF0FD178c/2iImW39grrjC/1bKIfp2X69mmGq3cLetaYExm5nJWwv89cHfDqacEz+vYZCYDhvQlAOe3nLT4f4cbrvFcFEOGMvn1aAlhyR7yKCNNVB2qlyVUarrhcTvt5TX2d34NVAE0PN8WrTv2X0xKARnSV6hCtlu9S1sLZvFoFALjq1L+cPgQgrlcdnZXk6O9tXqwCmHFaHz8tARDgqQSw+Fo5+nuZJ6sAGqECGDz/P1N1fFYJlhMWzJejv9d5rwqgC0+9HuCDBOBjbS4Az5wLk7G/ADJVwNX1ntntQUBAY23e0N8/SAAMmqs6OKuUjgOuudI7X7oYXcNCb10dSMAHfV378B95Xn5v5zwL5wdQUixHf5ExvpRwdb137hFgoHboz6dUALhIdWBWWXytd75skZ2GhQHP3ClIhDlDf9aAzAQgEV2oOjArzDXgMd3CfarO1jD7Ym+sF8Cg6vDKWAkwmAA0XbsIHpkAvHaBJ5op8vCZq71RGRIQ4LR2ETA0BCCapTooK1ROJNTOklN/YnizZvhQOdEbc0MEzAI+SADsiQTQsMAbGV7kx+cjLLzCGxXi0JyfBgDEqFYdkBXq58nRX4xu4XzPTAZ+WAEwME11NGa79GIfJlZ445sV+ZtUqWHWDE9MBlYDQxUA6OOqozHb/Mu9UdqJwnljX6EaANCWrohVglChOhwzBfxA3WVS/ovszK31u/5xYgRMuHlFLKQRtCmFv529za31y5V/ImvjSwmzPXC2yK9p1RoIrk8AV/yN+79MYSyPrBMxRQOoqvD3sa+AH7hkhie+TGGg2Rd7YRhAUzQiuDoBzJrhk/Jf5Gx8Kbn+bAADlRqDXZ0A6i7zwoyuMMPcT7p93+EqPxghuPgA6aQbPI4c1dFxKI2jxxgn+xgTxhNCQcK0832YeJazrmE4FtNxsCPTlkQ/o3Sc89ripH0nH8So9BNQqToQs5w/VUMoaO+d7WQv4+X2JF5uH8B7XTzi66ZO0bBwfgBXX2nftQyG2tK+LYnOw7qj2wIAEys01EzVcLBTL/zN7CnkBxBSHYVZZl9s71mczVuSeLwtgd6+sV/beVjH42v78dzGAdzy+SI0LLTXfQ1uasupZlzox8HOAdVhmCXkmzXn/ntB5MoqYMn1Rba89z+RYPxodR82bEwimcrtd/v7gTfeTKPzTzrmftIPn0/tEXQgyfhxSwLPvTDg+LYMx+8HXt2aY8McgglxDUT2Tb8FKC4GptXYbwyXSDAe/nEfdu5KF/Q+O3en8NAP+pBIcEHvU4iBJOPhH/XhtdcL6yB2aMtIptX4XHs6kEClGjPKCn8r+7mgxme7VX/TacYPf9aH/QcK6/xD3jmQxvd+2od02vqOk04zvv/TPux9x/ltGU1RgGx5IDFISCOCK/PbzAvt96U9+58D2P2WMR1myJ59aax9zvox6oaN7mnLWGZOt9++ZBQNcOeNQNOn2etL6ziUxoYXzNm5n39pAO/+2djOOJqu93U8+2t3tCUbdtuXjMJAmf1myAygaUBNtb2+tF/+KvdJsmzpOhB91roj55Pr+l3TlmxccL7PlYuEEBBwYbMyD/200/nlI0d17Nxl7kzy679Poet9889XHzmq4/Xfm9uWnbusaUu2SkoIk6tc2VXgylZdYLNJm/btSdO3oevA1h3mb2fLjiR0C/rmy+3mtyUXNVNd2VXcmQCmnmevZpl99B9i9pEZyJy3t8LuPfY69263IaVRNAD2SrUGqJlqny8rkWB0HLKmnD3YqWMgad5ptHSacbDDmgRw6D1z25KrqVPsdVAxigbghOogjHbeufb5srqO6JaUzEBmGPAXE8fO73Xppk3+DdeW97rsMw9w3rn2OagYKGafnmKQyolkqwnAv8asPYp195i3vWNWtyVunwpgfCnhrJB99itjcEoD0K06DCOdN9leOS1t8UEs0W/ee1t19Fe1vbG470wAndCYuVd1GEY652y3fUm5cdN16z6bfZV2GloaJKGRy+YAqmyWAMaPs3Z75WXmlamhcmtL4AkmtiUflRPttW8VjLnHdUOAcybZ60uyuiIxcyc9q8LaDmm379J9QwB0uy4B2K0CmFihWXYkO2cSoXyCeduaWKEhaFEVcFbI3Lbk1357xVMwwlGNCUdVx2Ekq49S2bBqbbnp08yfALCqLbNsuJS72+aXGOjWCNSlOhCjBMvJdmsAAMAVFj1rzoqHWVi1yrIdH8xRFCBMcNXqGdSlAe6pAKyepMrW7It9OGeSubFVTiTUWvA4KyvaclYF2fYe/GC5e6oAAndprPNh1YEYJRi0ZwLw+Qg3Xlds6jZu/Ky57291W+xYyQH2HGLmi5m7NACuSQAhG2fn+jo/pp1vTnxTp2i45krrHmJhdlsWzLdf+T9k/Dj3JAAQHdZS0DtUx2EUu503PpXPR/jK8nEoNvjgGfAD96y09mIDM9vydytKbHv0B4CQTavMfLCuH9Z++WhFN4DjqoMxwvhS1RGMbvLHNPyvL48zbHUZTQO+9tVxSk59DrXFqCsPNS2TyOx+042dDzK5YHBs7aMVRzUAYOaDqgMyghO+nNmz/PjaVwvvOAE/cM+Xxyl9jr2Rbbm7aRzmXGrf0n+InW40K9CfgA8XBOlQHY0RAjYuHU8151I/vnVvKSon5hfv5CoN37q3FJd/Un2HmT3Lj+Zv5N+WyomEb91birrL1LclG6WlztjHxsQ4ACCzJDgR9gC4QXVMhXLSl3NBjQ/fe2A8XtiUxIaN/Vk9Uqt0HLD42iJcv7jIVuPkmupMW156OYkNGwdw/MTYt/HatS1jxu2gfWxUlDno+wGAmfaRC9pVYs2ZMMMUBQg3LC7C4oYAtu5IYc/eFP7YqePo0czCGwF/5rTTBTU+fPJSP+Zc4kdJiT2/qKIA4XOfLsKnrw7gtf9KYfdbKXQc0vGX909vS021D7Mv9mNurR/jHdiZ3HK3JYH3AIMJAMx74IIMYLfbR7NVFCAsnB/AwvnOfx59UYBQPy+A+nnOb8vw7VMdgVFoDzA4B0ABfR9csDagZu8JZCFsgYFkGvo+YDABRFdVJJgzs4JO5qIZWmFT7pgD4Heebqk4AZy2LDjvVB2WEMICjH1DfzwlAdAu1XEJIcxHhG1Df/4gAZBUAEJ4BH3Q1z9IAGnSt8HhE4GJfvssIy3cyen7WGYCMH1mAni6peIEM7+jOsBC6PZ6qrRwIafvY8T89tAEIHDmswFfUR1gIaxeg194j9P3MQbaT/376QmAqT2nd7MZMx+KIQTg/H2MaJQEoDO/ojrAQiQSzh6fCfvrd/4+9sqpfzktATz9WLCLGftVR5iv3j7HfznC5pw9Ccj7oy3B0xYBPuPqeQJvVB1mvhIJ1REItzvZ6+AEwDijbw93+8wm1XHmSyoAYbZsbtu2Kx6mb5+RANKkvwLAkc20Y3YeSDIOvevwc0cKHDmqYyBpv+8zm7UO7In79EzfPs0Zdzc/3VJxYlkk/lsiuk51yLmK99jryzkW0/F/H01g7ztpzK31o2FBQOkSXk7w9v4UNmwcwJt705g+zYe/W1GCiRX2uc/bsQmA8erTrRVnPAh42L2RMqWC4xJAd9w+J2l370lh9eMJ/LU7s8Ps3JXCzl0pTJ2iYeH8ABbMDzhyQQwznOxltG9P4oVNA/jLkQ872N530vjHf+3FyuUluGSmPRKnnfaxXDBh/XD/PuynqqexTvPjB6qDztV7XTo6DqVRfZ76hQEOdqaHPVp0Htbx+Np+PPVMP664PID5l/sxa4YPPp+3kkE6zdizN4327Uns2JVC/wjn1xP9jIOdOmZOZ+Wf0R8OpvFelzMTABGGndwf8RMNR+I7QXSZ6sBzFfADt95UjMUNRapDQXdcx9rnBtC+LYlkauTXnVVBqJvjR92cAKZfoD55mSWdznTmLTuSeO31FP4aG7mc1jRg4fwAlt5QhFBQ7RAgnWZs2DiAdb8agO7M/r812lI+f7gfjJgAlkXi9xLRd1RHnq8ZF/rwleUlmFSpfvzYcSiNX/5qADt3pcZ8bbCcMPtiP+ou82PmhT7brgGYrZO9jL37U3j9zTRe/31qzHkaTQPq6wK48boiWzzq/d0/p7FqTQIH/tuZPT+D74+2BB8e7icjJ4AV3dWkaf+tOvRCFBcDX7q1xDZr7XUcSmPDxgHsfCM1akUwJODPrLg76yIfZk73oWaq/RPCQJKx/0Aa+/+Qxp59afzhYDqro6bdOn46zXjhN0msXd+f1Xdla8TnR1cHO4b90Wi/59RhwEfZqRoAMkOD518awMvtyZzOK2sacN5kDdOn+TD1PB+qz9MwuUpTlhQGkozD72bmXQ526ugc/H8uZfLQ8uANCwPKS/0h7jjqf2DE8h8YIwEsi8TvIqL/o7oFRhiaG7h2YcA269APJBlbd6SwafNA3jubpgGTJhImV2k452wNVWdrOCtEmFSpYUIZIVROeU+epdOM4yeAv8Z0HIvpOHKMceSYjve6Mst9HznGeY+JZ1zow9X1mUlQ1ZN7QwaSjPW/HsDzLw04/6g/iJn/Z1tr8Ccj/XyMCiAWAvneB2CPGtoAU6doWHFbCS6osddk25t7U/iXH5hz/VVxMVBeRiguJowvBTSN4PN9uIhqop+RTgPJJCOZAnp7GSd72bSr3h78+1J8otpen//uPSn8/KnEaachnY/7NI0+/ouflR8d6RVjpt5lkfg6IrpZdVOMVl/nxxdvKbZN2fl4WwIv/MbRCzJl7ebPFeGW6+3xFJeu93U8ua4/qwla5+Hnoi3BxtFeMebeT8Aa1c0wQ/v2FO6+/ySee2FA+SWn3XEdmzZ7o/MDwIaNAzgWUzu+PtnLWLu+H99oPunSzg8AtGqsV4yZABIpfSOYO1U3xQzJFBB9th93338Sm7eo64BPrnPBTHMOkilg7Xo1K2uk04wXXx7A3fefwLP/6Z6x/kcxcCiRTI95Y9+YA7F9u/5NnzXn/hCIrlLdKLP09w9eqvtGCsFyDed+zLphQcehNH7+C4cvM5OHQ+/puHSm37Lr/NNpxv/blsKPVvfhd9uzOw3rZAR+eN1joVfHfl0WlkZiUzTyHYSLJgNHM3WKhps/X2zJ47fve+AkOg+74nRTzqad78M/319q6jbSaUb79hSe/XW/yyb4RsN9RFTz1OryrrFemfX5l2WR+BNE9EXVTbOS2Ylg0+YBPPqk947+p2r62xJcc6XxxxVvdvwhY0/+Dcllz14FwFMJoPOwju//ex8mV2loWBAw9BqC7riOdb8aUN1E5Z5cl8DltX5MmGDM53qyl7Fp8wA2vZrE0WNe6/gZDDyY7Wtz+tTDkfgrIFqouoGqBMszj/FefG3hV639uKUPW3e4fCCapU/V+XHXinEFvUfX+zpeenkAm7fkdnWlC4165d9H5Vrb/gSAZxNAvCdzV9gLmwYwe1b+C3xs2ZGUzn+K321PYf7lKcy5NLfPMp1m7H4rjRc2DeDtd7K758D9+JFcXp1TBdC4POYv8Wv7QPQJ1c20i0kTCdcuKEL9vOxmtLvjOu77517brV6k2lkhwiPN47NaJKXrfR2btyTRvt27Zf7w+I1oS3BOLr+R88ArHIkvB9HPVTfVbjQNuGSGD1dcHkDdHP+IN+j8YFUfXntdjv7DmVvrx9e+OvxQ4GQvY+uOJDZvSbrlJh0T6F+KtoTW5PIbOScAqQLGFvADs2f5UTfHj7m1HyYDmfUf28rlH96+fbKXsXNXCtv/K4ndb0mJPzreD59eG11VkdPi+HlNvUoVkL2AH5g53YdZM/zuuLfcZAE/0HhdEfbuT8u4Pie5H/2BPBOAVAFC2El+R38gi3sBhrN+TUUKOZxrFEKYqjmfzg/kmQAAIJHSnwD4LdUtF8Lb+I1oSzCa72/nnQDWr6lIMfM9qpsvhMc9UMgvF3z9pdevDhRCHX4x2hJcVMg7FHwvpq7zSgDeWc1CCFvgPgD3FfouBSeAtY+F9oH5MdUfhxAe83i0Jbir0DcxaDUG/T4AMbWfhxCe8WciajbijQxJANHWim4wf0PpRyKEZ/AD2Sz2kQ1DF2QPR+LbQFSn5kMRwv0Y/Lu2lmC9Ue9n6IJsus7LIROCQpiE+wi4y8h3NDQBrH0stI85t/uRhRDZokeMmPg7leFLspJffxDM+y37TITwBH4DvvTDhb/P6QxPANFVFQkQlkOGAkIYhPsA3JHv9f6jMWVR9mhLcBvA/2L65yKEJxhf+g8x7akMiaT+IJi3m/aZCOEFzNvNKP2HmPpc5mUr4tOg4XUCTTBzO0K4E/eBMDO6Othh1hZMfS5T26PBA8R8t5nbEMK1mL9qZucHTK4AhiyLxNcQ0f+wYltCuAGDn2xrCd5m9nYseTIj+fWVcmpQiCwx7yefHrFiU5ZUAACw9I7ui8hHr8l8gBCj4T4GLm1rCR6wYmuWPQd77WOhfQTcYdX2hHAm/oJVnR+wMAEAQLQluI6ZH7Jym0I4BYMfiraE1lu5TUsTAAD0p/RmBv/a6u0KYW/8YltL8JtWb9XyBLB+TUXKBz0sKwoLMYh5N3x6o4pNWzYJ+FHhSLyagXYiOldVDEKoxuB3iVBv9vn+kVheAQyJtgY7ADQy+LiqGIRQi/sI+Jyqzg8oTAAA0NYa3EngpYN3OwnhIdwH8E1m3eSTLaUJAACiLaGNYDk9KLyFmG+PtoQ2qo5DeQIAgGhrMMqsf111HEJYgZm//lRraJ3qOACbJAAAaGsNfRfM31YdhxDm4m+3tQa/qzqKIcrOAowkHIk3g+ifVMchhPH429GWYLPqKE5luwQASBIQbmS/zg/YNAEAkgSEm9iz8wM2TgCAJAHhBvbt/IDNEwAALIt030ukfUd1HELkipm/bqcJv+HYPgEAwK1N8VsYeBygcapjEWJs3AfmO6KtoajqSMbiiAQAAOGm7kUAPSNJQNgZg48TeKkdLvLJhmMSAACEm+K1zHhebiASdsTgd8FobGsN7lQdS7ZscyFQNqItwV0E1IN5t+pYhDgN81tEqHdS5wcclgCAwbsI/fo8gF9UHYsQAMDgX2ukz1N5V1++HDUE+KhlkfiDRPQPquMQ3sXMD/WX6c3rf1iRUh1LPhydAAAg3NTdCNAvZHJQWInBxykz02+Lm3ry5fgEAGQeQUaE50E0XXUswgOY9+saN65dHdqnOpRCOW4OYDhtjwYPwK/XMvOTqmMR7sbg/4Bfr3VD5wdcUgGcKhzpXg6in8qQQBhpsOS/O9oaWqM6FiO5LgEAmQVHAURBVKc6FuECzNuZcJuVD+ywiiuGAB81eKrwqswCI7LeoMgTcxLM306U6fVu7PyASyuAU4Uj3bUgWgPQbNWxCAfJPMx2ebQ1uE11KGZyZQVwqmhraBd8+jypBkRWmJPM/BD8eq3bOz/ggQrgVOFIdy0T/YRAn1Idi7Ah5u26xsvdMsOfDU8lgCHhSHwlgGYQnaM6FmEDjBigfyPaGmpVHYrVPJkAACDcFKsCa80g3C6nDD2KOQngMUC/L9pa0a06HBU8mwCGhCPdtQA9DKLPqI5FWIh5s67xSi+V+8PxfAIYMnhPQbOcLXA55jcZ/L/bWkObVIdiB5IAPiLc1B0GU7PcV+AyzH8E+MFEGT/h1Dv3zCAJYBjhlbESpCgM0H2SCBxOOv6oJAGM4oNEQHSPDA0cRjp+ViQBZCnc1N0Ipvvk/gKbY94Mwk8S4/X10vHHJgkgR+FI91yAvgnCp+X0oU0wJ5mwFoxVba3BdtXhOIkkgDwNXkewHMBdkFWK1WDuBLCGfWht+1nwsOpwnEgSQIEa74n5S05QA0ArpSqwAHOSgQ0ErEmU6RulzC+MJAADLV0RqyRNCxPjNpkrMBjzDgYeJ+hPePWqPTNIAjBJuClWDdbCABolGeQp0+nXksbroqtDHarDcSNJABYIN8WqibVFDDSCsECGCSPhPmb8loBNOvR1a1srZFxvMkkAFvtCU6xMZ+0qAA0MLCKPX2jEzPsJ2AhgE/z6puiqioTqmLxEEoBiS5piVRprV4FQT4x6ADNBFFAdlykyE3jvgPAKGO066a883VLRpTosL5MEYDOZCoHmMmguAfMYuIiACx2XFDKdvYMIrzNjFxF2atC3/aKl4oTq0MSHJAE4wBeaYmVpnS4C0SwizAKjGsA0AB8HUYXS4JiPAzgIoAPAHgb2EXgP/LxPynn7kwTgcEvvjFVqOk0BaAoDVQRUgVDFjBABlQBCAJUDKGFwGRH8wDBJgzkJQubozNQN4ATAvQC6AXSDcBSMLgBHmXCYmA+zjzvafian5Jzs/wPXulN9hqvMYwAAAABJRU5ErkJggoAQ0j9AaGldTlNNdXRhYmxlRGF0YaNoakNWTlNEYXRhXxAYaW1hZ2Uvdm5kLm1pY3Jvc29mdC5pY29u0j9AbW5XTFBJbWFnZaJtQ9MWHxIrcHGAFIAW0zgSOTo7dYAAgASAFV8QJmh0dHBzOi8vZGlzY29yZC5jb20vYXNzZXRzL2Zhdmljb24uaWNv0j9AeHleTFBJY29uTWV0YWRhdGGiekNeTFBJY29uTWV0YWRhdGHUfB8SFn1+fytUc2l6ZYAagBiAG9M4Ejk6O4OAAIAEgBlfEDlodHRwczovL2Nkbi5kaXNjb3JkYXBwLmNvbS9hc3NldHMvb2dfaW1nX2Rpc2NvcmRfaG9tZS5wbmdWezAsIDB90j9Ah4hfEA9MUEltYWdlTWV0YWRhdGGiiUNfEA9MUEltYWdlTWV0YWRhdGHSixKMjlpOUy5vYmplY3RzoTWAE4Ad0j9AkJFXTlNBcnJheaKQQ9KLEpOOoS6AF4Ad0RKXgCDSP0CZml8QEUxQUHJvZHVjdE1ldGFkYXRhoptDXxARTFBQcm9kdWN0TWV0YWRhdGHSP0Cdnl5MUExpbmtNZXRhZGF0YaKfQ15MUExpbmtNZXRhZGF0YQAIABEAGgAkACkAMgA3AEkATABRAFMAeAB+AKsAtADAAMYAzgDTANoA4QDvAQMBCwEUASABLgE7AU0BVQFsAYUBiQGWAZwBngGgAaIBpAGmAagBqgGsAa4BsAGyAbQBtgG3AbgBugG7Ab0BvwHBAcMBygHSAd4B4AHiAeQCKwIwAjsCRAJKAk0CVgJdAl8CYQJjAqoC7wNIA+YD7gP2A/8EFQQqBEMEWARmBGsEdASKBJ4EtAS+BL8EyATJBMsEzQTPBNgE4QTqBO8E92TwZPJk92UFZQllEGUrZTBlOGU7ZUJlRGVGZU1lT2VRZVNlfGWBZZBlk2WiZatlsGWyZbRltmW9Zb9lwWXDZf9mBmYLZh1mIGYyZjdmQmZEZkZmSGZNZlVmWGZdZl9mYWZjZmZmaGZtZoFmhGaYZp1mrGavAAAAAAAAAgEAAAAAAAAAoAAAAAAAAAAAAAAAAAAAZr4=" + }, + "PreviewHeights": [537.0], + "CroppingQuadBottomRightX": 1.0, + "PreviewScales": [1.0], + "CroppingQuadBottomRightY": 0.0, + "MetadataData": { + "__bytes__": "eyJzbWFsbF9pY29ucyI6ZmFsc2V9" + }, + "HandwritingSummaryVersion": 0, + "CroppingQuadBottomLeftX": 0.0, + "CroppingQuadBottomLeftY": 0.0, + "Orientation": 0, + "OcrSummaryVersion": 0, + "ImageFilterType": 0, + "PreviewScaleWhenDrawings": [1], + "PreviewAppearances": [0], + "TitleEncrypted": { + "__bytes__": "R3JvdXAgQ2hhdCBUaGF04oCZcyBBbGwgRnVuICYgR2FtZXM=" + }, + "Note": "recordName='NOTE_RECORD_0001' action='VALIDATE' zoneID=CKZoneID(zoneName='Notes', ownerRecordName='_SANITIZED_OWNER_RECORD', zoneType='REGULAR_CUSTOM_ZONE')", + "LastModificationDate": "2025-09-12 17:25:54.863000+00:00" + } + }, + { + "recordName": "ATTACHMENT_RECORD_0007", + "recordType": "Attachment", + "fields": { + "CroppingQuadTopRightY": 1.0, + "PreviewUpdateDate": "2025-12-12 01:20:02.083000+00:00", + "UTIEncrypted": { + "__bytes__": "Y29tLmFwcGxlLnBhcGVyLmRvYy5wZGY=" + }, + "CreationDate": "2025-12-12 01:20:01.715000+00:00", + "CroppingQuadTopRightX": 1.0, + "PaperAssets": [ + "fileChecksum='SANITIZED_FILE_0008' referenceChecksum='SANITIZED_REF_0008' wrappingKey='SANITIZED_WRAP_0008==' downloadURL='https://example.com/icloud-assets/asset-0008/${f}' size=753650" + ], + "PaperDatabase": "fileChecksum='SANITIZED_FILE_0009' referenceChecksum='SANITIZED_REF_0009' wrappingKey='SANITIZED_WRAP_0009==' downloadURL='https://example.com/icloud-assets/asset-0009/${f}' size=3202", + "ReplicaIDToNotesVersionDataEncrypted": { + "__bytes__": "CgYKBAgAEAMaODI2ChAKAjACEgIwARoGCgQIABACChAKAjADEgIwARoGCgQIABABChAKAjAEEgIwARoGCgQIABADGgxqCggAGgYIABICCCIaDGoKCAIaBggBEgIQARoMagoIAhoGCAESAhAAGgxqCggCGgYIARICEAIiDGludGVnZXJWYWx1ZSIJVVVJREluZGV4Khdjb20uYXBwbGUuQ1JEVC5OU051bWJlcioXY29tLmFwcGxlLkNSRFQuTlNTdHJpbmcqFWNvbS5hcHBsZS5DUkRULk5TVVVJRCoWY29tLmFwcGxlLkNSRFQuQ1JUdXBsZSooY29tLmFwcGxlLkNSRFQuQ1JSZWdpc3Rlck11bHRpVmFsdWVMZWFzdCojY29tLmFwcGxlLkNSRFQuQ1JSZWdpc3Rlck11bHRpVmFsdWUqFWNvbS5hcHBsZS5DUkRULkNSVHJlZSoZY29tLmFwcGxlLkNSRFQuQ1JUcmVlTm9kZSoVY29tLmFwcGxlLkNSRFQuTlNEYXRhKhVjb20uYXBwbGUuQ1JEVC5OU0RhdGUqMWNvbS5hcHBsZS5ub3Rlcy5JQ0Nsb3VkU3luY2luZ09iamVjdEFjdGl2aXR5RXZlbnQqKGNvbS5hcHBsZS5ub3Rlcy5JQ0Zhc3RTeW5jU2VsZWN0aW9uU3RhdGUqImNvbS5hcHBsZS5ub3Rlcy5JQ1RUQXVkaW9SZWNvcmRpbmcqK2NvbS5hcHBsZS5ub3Rlcy5JQ1RUQXVkaW9SZWNvcmRpbmcuRnJhZ21lbnQqJWNvbS5hcHBsZS5ub3Rlcy5JQ1RUVHJhbnNjcmlwdFNlZ21lbnQqF2NvbS5hcHBsZS5ub3Rlcy5DUlRhYmxlKhdjb20uYXBwbGUubm90ZXMuSUNUYWJsZTIQROtn4JiGRT+IVDC6l/TzUTIQUT2FinRBQhGpx7+t+9YwkjIQxhiJSHx4Qs60RRka9Lp+Xg==" + }, + "MinimumSupportedNotesVersion": 14, + "PreviewWidths": [ + 68.0, 148.0, 98.66666666666667, 22.666666666666668, 34.0, 296.0 + ], + "UTI": "com.apple.paper.doc.pdf", + "PreviewImages": [ + "fileChecksum='SANITIZED_FILE_0010' referenceChecksum='SANITIZED_REF_0010' wrappingKey='SANITIZED_WRAP_0010==' downloadURL='https://example.com/icloud-assets/asset-0010/${f}' size=6215", + "fileChecksum='SANITIZED_FILE_0011' referenceChecksum='SANITIZED_REF_0011' wrappingKey='SANITIZED_WRAP_0011==' downloadURL='https://example.com/icloud-assets/asset-0011/${f}' size=30937", + "fileChecksum='SANITIZED_FILE_0012' referenceChecksum='SANITIZED_REF_0012' wrappingKey='SANITIZED_WRAP_0012==' downloadURL='https://example.com/icloud-assets/asset-0012/${f}' size=30937", + "fileChecksum='SANITIZED_FILE_0013' referenceChecksum='SANITIZED_REF_0013' wrappingKey='SANITIZED_WRAP_0013==' downloadURL='https://example.com/icloud-assets/asset-0013/${f}' size=6215", + "fileChecksum='SANITIZED_FILE_0014' referenceChecksum='SANITIZED_REF_0014' wrappingKey='SANITIZED_WRAP_0014==' downloadURL='https://example.com/icloud-assets/asset-0014/${f}' size=6215", + "fileChecksum='SANITIZED_FILE_0015' referenceChecksum='SANITIZED_REF_0015' wrappingKey='SANITIZED_WRAP_0015==' downloadURL='https://example.com/icloud-assets/asset-0015/${f}' size=30937" + ], + "CroppingQuadTopLeftY": 1.0, + "FallbackPDF": "fileChecksum='SANITIZED_FILE_0016' referenceChecksum='SANITIZED_REF_0016' wrappingKey='SANITIZED_WRAP_0016==' downloadURL='https://example.com/icloud-assets/asset-0016/${f}' size=1085161", + "Height": 792.0, + "CroppingQuadTopLeftX": 0.0, + "Deleted": 0, + "ImageClassificationSummaryVersion": 0, + "Width": 612.0, + "PreviewHeights": [88.0, 192.0, 128.0, 29.333333333333332, 44.0, 384.0], + "CroppingQuadBottomRightX": 1.0, + "PreviewScales": [1.0, 2.0, 3.0, 3.0, 2.0, 1.0], + "CroppingQuadBottomRightY": 0.0, + "HandwritingSummaryVersion": 0, + "CroppingQuadBottomLeftX": 0.0, + "CroppingQuadBottomLeftY": 0.0, + "Orientation": 0, + "OcrSummaryVersion": 0, + "ImageFilterType": 0, + "PreviewScaleWhenDrawings": [1, 1, 1, 1, 1, 1], + "PreviewAppearances": [0, 0, 0, 0, 0, 0], + "TitleEncrypted": { + "__bytes__": "UkVBRCBNRSBGSVJTVA==" + }, + "Note": "recordName='NOTE_RECORD_0001' action='VALIDATE' zoneID=CKZoneID(zoneName='Notes', ownerRecordName='_SANITIZED_OWNER_RECORD', zoneType='REGULAR_CUSTOM_ZONE')", + "LastModificationDate": "2025-12-12 01:20:01.715000+00:00" + } + }, + { + "recordName": "ATTACHMENT_RECORD_0008", + "recordType": "Attachment", + "fields": { + "CroppingQuadTopRightY": 1.0, + "PreviewUpdateDate": "2025-08-17 15:34:40.653000+00:00", + "UTIEncrypted": { + "__bytes__": "Y29tLmFwcGxlLm00YS1hdWRpbw==" + }, + "CreationDate": "2025-08-17 15:34:17.583000+00:00", + "CroppingQuadTopRightX": 1.0, + "ReplicaIDToNotesVersionDataEncrypted": { + "__bytes__": "CgYKBAgAEAMaODI2ChAKAjACEgIwARoGCgQIABACChAKAjADEgIwARoGCgQIABABChAKAjAEEgIwARoGCgQIABADGgxqCggAGgYIABICCCIaDGoKCAIaBggBEgIQARoMagoIAhoGCAESAhAAGgxqCggCGgYIARICEAIiDGludGVnZXJWYWx1ZSIJVVVJREluZGV4Khdjb20uYXBwbGUuQ1JEVC5OU051bWJlcioXY29tLmFwcGxlLkNSRFQuTlNTdHJpbmcqFWNvbS5hcHBsZS5DUkRULk5TVVVJRCoWY29tLmFwcGxlLkNSRFQuQ1JUdXBsZSooY29tLmFwcGxlLkNSRFQuQ1JSZWdpc3Rlck11bHRpVmFsdWVMZWFzdCojY29tLmFwcGxlLkNSRFQuQ1JSZWdpc3Rlck11bHRpVmFsdWUqFWNvbS5hcHBsZS5DUkRULkNSVHJlZSoZY29tLmFwcGxlLkNSRFQuQ1JUcmVlTm9kZSoVY29tLmFwcGxlLkNSRFQuTlNEYXRhKhVjb20uYXBwbGUuQ1JEVC5OU0RhdGUqMWNvbS5hcHBsZS5ub3Rlcy5JQ0Nsb3VkU3luY2luZ09iamVjdEFjdGl2aXR5RXZlbnQqKGNvbS5hcHBsZS5ub3Rlcy5JQ0Zhc3RTeW5jU2VsZWN0aW9uU3RhdGUqImNvbS5hcHBsZS5ub3Rlcy5JQ1RUQXVkaW9SZWNvcmRpbmcqK2NvbS5hcHBsZS5ub3Rlcy5JQ1RUQXVkaW9SZWNvcmRpbmcuRnJhZ21lbnQqJWNvbS5hcHBsZS5ub3Rlcy5JQ1RUVHJhbnNjcmlwdFNlZ21lbnQqF2NvbS5hcHBsZS5ub3Rlcy5DUlRhYmxlKhdjb20uYXBwbGUubm90ZXMuSUNUYWJsZTIQROtn4JiGRT+IVDC6l/TzUTIQUT2FinRBQhGpx7+t+9YwkjIQxhiJSHx4Qs60RRka9Lp+Xg==" + }, + "MinimumSupportedNotesVersion": 14, + "UTI": "com.apple.m4a-audio", + "MergeableDataEncrypted": { + "__bytes__": "ChIKBAgAEAkKBAgBEAEKBAgCEAgSABqPAWqMAQgMGioIABImIiQwMDAwMDAwMC0wMDAwLTAwMDAtMDAwMC0wMDAwMDAwMDAwMDAaBggBEgIwARoGCAISAjACGgYIAxICMAQaBggEEgIwBhoGCAUSAjAHGgcIERIDMKMDGgcIEhIDMKQDGgcIExIDMKYDGgcIFBIDMKcDGgcIFRIDMKgDGgcIFhIDMKoDGggKBgoECAEQARoMCgoKBAgAEAgSAjADGmlSZxIkVGVzdCBhdWRpbyBhdHRhY2htZW50IGluIEFwcGxlIG5vdGUuGhAKBAgAEAAQABoECAAQACgBGhEKBQgBEKYCECQaBAgBEAAoAhoWCggIABD/////DxAAGggIABD/////DyoCCCQaDAoKCgQIAhAIEgIwBRqfAVKcARJaVGVzdCBhdWRpbyBhdHRhY2htZW50IGluIEFwcGxlIE5vdGU7IHJldmVyc2UgZW5naW5lZXJpbmcgQXBwbGUgTm90ZXMgZm9yIE1hcmtkb3duIGFuZCBIVE1MGhAKBAgAEAAQABoECAAQACgBGhAKBAgCEC8QWhoECAIQACgCGhYKCAgAEP////8PEAAaCAgAEP////8PKgIIWhoICgYKBAgBEAEaHSobChkSAjAIGgYKBAgAEAkiCwoECAAQCRIDMKIDGkhqRggNGioIABImIiRENjA3OEQwOS0wMERDLTQwRDQtQjRCMC0xN0VBNjQ5RDA0N0QaBggGEgIwCRoGCAgSAjALGgYIChICMA0aDAoKCgQIABAJEgIwChoMagoIABoGCAcSAggCGgwKCgoECAAQCRICMAwaE2oRCAkaDQgJEgkZroJQn30o2kEa4hJ63xIKswsKkAMSkAHvv7zvv7zvv7zvv7zvv7zvv7zvv7zvv7zvv7zvv7zvv7zvv7zvv7zvv7zvv7zvv7zvv7zvv7zvv7zvv7zvv7zvv7zvv7zvv7zvv7zvv7zvv7zvv7zvv7zvv7zvv7zvv7zvv7zvv7zvv7zvv7zvv7zvv7zvv7zvv7zvv7zvv7zvv7zvv7zvv7zvv7zvv7zvv7waEAoECAAQABAAGgQIABAAKAEaEQoFCAEQygIQMBoECAEQACgCGhYKCAgAEP////8PEAAaCAgAEP////8PKgIIASoCCAEqAggBKgIIASoCCAEqAggBKgIIASoCCAEqAggBKgIIASoCCAEqAggBKgIIASoCCAEqAggBKgIIASoCCAEqAggBKgIIASoCCAEqAggBKgIIASoCCAEqAggBKgIIASoCCAEqAggBKgIIASoCCAEqAggBKgIIASoCCAEqAggBKgIIASoCCAEqAggBKgIIASoCCAEqAggBKgIIASoCCAEqAggBKgIIASoCCAEqAggBKgIIASoCCAEqAggBEhQIABIQUnlVYEdMRLaFJTZya4UVTRIUCAESEO5GNZXLDkBdrDwWzIOWtj0SFAgCEhB9pZuqwslO5YKvNxwFJrSiEhQIAxIQh9kQ9JBYTc2P3zuDeOi/3BIUCAQSED4EV/VLQUBinNpCHFsyEr0SFAgFEhDruUXLISlKYYjW+LjCxDl7EhQIBhIQn3B43D2JTWGXrWj5dpp3vBIUCAcSEE9fKyYcuUkNnplOgOpYuFUSFAgIEhBKSy3RTs1GOpwIw8jYqT/hEhQICRIQJGEZAKqgR72Hq6cf9x1zfhIUCAoSEJLTxrbMJ0ADvcdMGjgSH74SFAgLEhAg5q0F05VHBYV0IP5Fg3zEEhQIDBIQ6e82jqeeTFqvVzGocStqOhIUCA0SEFsN2YD6Kkt9pK2ynZKhV/ASFAgOEhBdOQ0pp6BLX7WpiC4e87HNEhQIDxIQmmb/zcQ1Tk2Vw+wDEIPN+BIUCBASELLhGngkBU5GoVUWvhWEhx8SFAgREhAlkqhlZbNBw7xJDINbSndLEhQIEhIQfgarWVT1TcOeOE1M+w4RXRIUCBMSED+MFPrDjE5Gj2wL0oJGY3gSFAgUEhAsWKPnNrBJeqIlsizbm8+iEhQIFRIQXc/AtSlMS6GXFVPBRY6e5hIUCBYSEK9NzR/sykedgKMlATmvXesSFAgXEhDVFXMyT5hGAJyiqxFq23BpEhQIGBIQt/EfoQRcTlO+oiTNNO86chIUCBkSENy52N9YBEzEgSTUoonvz7wSFAgaEhC+qBoiR5NCe7nvMSuPV0YPEhQIGxIQG35rNfXBTdOvH2Eb8MgSVhIUCBwSEBIu22C5JUFyrAw8L5uGuqESFAgdEhCWrKaZEPlMUZw5Ve6SXgoEEhQIHhIQOwmORDjCQpePYutkEAcJxhIUCB8SEPq4+7JexkQejTlU8kB3bBESFAggEhCm2k32Za1HiaTha/rTGLlvEhQIIRIQsyyo+dr+SCKj4CAKYKUohhIUCCISEPkf7RWnI0xKv3vcKJ0x9fISFAgjEhBff4hYnh9FH4YBLGjMMKrkEhQIJBIQiIkLqo1JQCyWlDNDt86FmxIUCCUSEDVxceyejEK5l1ZfEH7OVYISFAgmEhDTL7V8aGlLiIBfR3jcMAG+EhQIJxIQk9LMfPO7R+2X8xCVLjp1thIUCCgSEBpwr9NdJUFFotf0Dvs6m9gSFAgpEhAUejhay5RLvp25WLWtj8E6EhQIKhIQifJ1ldaeRd+vhkquG9pclhIUCCsSEKbi/z8T2EPPlQScW3rqhvASFAgsEhD6eEQc/1hB0JxbwQOrkZrGEhQILRIQW2yOyf+LRnmBayuweNifkxIUCC4SEAauJnRvV08IrwSNICMzMsUSFAgvEhDZMR1Y8AJJRIOeadr2eLPREqYHChAKAjAWEgIwDhoGCgQIABAJChAKAjAfEgIwFxoGCgQIABAJChAKAjAoEgIwIBoGCgQIABAJChAKAjAxEgIwKRoGCgQIABAJChAKAjA6EgIwMhoGCgQIABAJChAKAjBDEgIwOxoGCgQIABAJChAKAjBMEgIwRBoGCgQIABAJChAKAjBVEgIwTRoGCgQIABAJChAKAjBeEgIwVhoGCgQIABAJChAKAjBmEgIwXxoGCgQIABAJChAKAjBvEgIwZxoGCgQIABAJChAKAjB3EgIwcBoGCgQIABAJChAKAjB/EgIweBoGCgQIABAJChIKAzCIARIDMIABGgYKBAgAEAkKEgoDMJEBEgMwiQEaBgoECAAQCQoSCgMwmgESAzCSARoGCgQIABAJChIKAzCjARIDMJsBGgYKBAgAEAkKEgoDMKwBEgMwpAEaBgoECAAQCQoSCgMwtAESAzCtARoGCgQIABAJChIKAzC9ARIDMLUBGgYKBAgAEAkKEgoDMMYBEgMwvgEaBgoECAAQCQoSCgMwzgESAzDHARoGCgQIABAJChIKAzDWARIDMM8BGgYKBAgAEAkKEgoDMN8BEgMw1wEaBgoECAAQCQoSCgMw6AESAzDgARoGCgQIABAJChIKAzDwARIDMOkBGgYKBAgAEAkKEgoDMPgBEgMw8QEaBgoECAAQCQoSCgMwgAISAzD5ARoGCgQIABAJChIKAzCIAhIDMIECGgYKBAgAEAkKEgoDMJECEgMwiQIaBgoECAAQCQoSCgMwmgISAzCSAhoGCgQIABAJChIKAzCiAhIDMJsCGgYKBAgAEAkKEgoDMKoCEgMwowIaBgoECAAQCQoSCgMwsgISAzCrAhoGCgQIABAJChIKAzC6AhIDMLMCGgYKBAgAEAkKEgoDMMICEgMwuwIaBgoECAAQCQoSCgMwygISAzDDAhoGCgQIABAJChIKAzDSAhIDMMsCGgYKBAgAEAkKEgoDMNoCEgMw0wIaBgoECAAQCQoSCgMw4gISAzDbAhoGCgQIABAJChIKAzDqAhIDMOMCGgYKBAgAEAkKEgoDMPECEgMw6wIaBgoECAAQCQoSCgMw+QISAzDyAhoGCgQIABAJChIKAzCBAxIDMPoCGgYKBAgAEAkKEgoDMIkDEgMwggMaBgoECAAQCQoSCgMwkQMSAzCKAxoGCgQIABAJChIKAzCZAxIDMJIDGgYKBAgAEAkKEgoDMKEDEgMwmgMaBgoECAAQCRpQak4IDhoqCAASJiIkMERBQ0Y3MkItOTk2NS00OEUzLTk5ODgtMzNERTY3QjJEODU1GgYICxICMA8aBggMEgIwEBoGCA0SAjASGgYIDhICMBQaCAoGCgQIABAJGgwKCgoECAAQCRICMBEaE2oRCAAaDQgJEgkZ0MzMzMzM7D8aDAoKCgQIABAJEgIwExoTahEIABoNCAkSCRkK16NwPQotQBoMCgoKBAgAEAkSAjAVGhBqDggBGgoIDxIGIgRIVE1MGgxqCggCGgYIEBICEAMaUGpOCA4aKggAEiYiJDlFRTI0MTA5LUM1NTEtNDIyRS04MTcyLTkyQTZBODI1QjUyQhoGCAsSAjAYGgYIDBICMBkaBggNEgIwGxoGCA4SAjAdGggKBgoECAAQCRoMCgoKBAgAEAkSAjAaGhNqEQgAGg0ICRIJGbgehetRuN4/GgwKCgoECAAQCRICMBwaE2oRCAAaDQgJEgkZSOF6FK5HAUAaDAoKCgQIABAJEgIwHhoNagsIARoHCA8SAyIBYRoMagoIAhoGCBASAhAEGlBqTggOGioIABImIiQxMUU3QTkxOS0wMUQ5LTRCNEMtQUJEMS05OEU1QUY5NDM5M0YaBggLEgIwIRoGCA4SAjAiGgYIDRICMCQaBggMEgIwJhoICgYKBAgAEAkaDAoKCgQIABAJEgIwIxoXahUIARoRCA8SDSILZW5naW5lZXJpbmcaDAoKCgQIABAJEgIwJRoTahEIABoNCAkSCRm4HoXrUbgiQBoMCgoKBAgAEAkSAjAnGhNqEQgAGg0ICRIJGUAzMzMzM+M/GgxqCggCGgYIEBICEAUaUGpOCA4aKggAEiYiJERDRTdFMkVBLTYyQjYtNDZDNC1BNUUxLTlGQjlBNkRDMkE0NxoGCAsSAjAqGgYIDhICMCsaBggNEgIwLRoGCAwSAjAvGggKBgoECAAQCRoMCgoKBAgAEAkSAjAsGhJqEAgBGgwIDxIIIgZIZWxsbywaDAoKCgQIABAJEgIwLhoTahEIABoNCAkSCRkAAAAAAAAAABoMCgoKBAgAEAkSAjAwGhNqEQgAGg0ICRIJGc3MzMzMzPw/GgxqCggCGgYIEBICEAYaUGpOCA4aKggAEiYiJEM2MDdFNUM3LTJBRjgtNEJBQS04NjE1LUQ1ODg4RjY2OEMyNxoGCAsSAjAzGgYIDBICMDQaBggNEgIwNhoGCA4SAjA4GggKBgoECAAQCRoMCgoKBAgAEAkSAjA1GhNqEQgAGg0ICRIJGQDXo3A9Cuc/GgwKCgoECAAQCRICMDcaE2oRCAAaDQgJEgkZw/UoXI/CKkAaDAoKCgQIABAJEgIwORoUahIIARoOCA8SCiIITWFya2Rvd24aDGoKCAIaBggQEgIQBxpQak4IDhoqCAASJiIkOEM1OEQ3NkItMTM0Qi00NkJELTlBMDEtMzA1N0Q1RDQ3NzJBGgYICxICMDwaBggOEgIwPRoGCA0SAjA/GgYIDBICMEEaCAoGCgQIABAJGgwKCgoECAAQCRICMD4aEmoQCAEaDAgPEggiBm91dHB1dBoMCgoKBAgAEAkSAjBAGhNqEQgAGg0ICRIJGY/C9ShcjyZAGgwKCgoECAAQCRICMEIaE2oRCAAaDQgJEgkZENejcD0K9z8aDGoKCAIaBggQEgIQCBpQak4IDhoqCAASJiIkODNFNUI2NTktODg4OS00MzlBLThEQzYtNUI2MjI2QjRBOUQ5GgYICxICMEUaBggMEgIwRhoGCA0SAjBIGgYIDhICMEoaCAoGCgQIABAJGgwKCgoECAAQCRICMEcaE2oRCAAaDQgJEgkZ4NajcD0Kxz8aDAoKCgQIABAJEgIwSRoTahEIABoNCAkSCRlxPQrXo3AfQBoMCgoKBAgAEAkSAjBLGg5qDAgBGggIDxIEIgJhbRoMagoIAhoGCBASAhAJGlBqTggOGioIABImIiQ5RTBGMUQ1My01OUEzLTRCQzMtQTQ3OC03QjVBMjE5NkNCNzIaBggLEgIwThoGCAwSAjBPGgYIDRICMFEaBggOEgIwUxoICgYKBAgAEAkaDAoKCgQIABAJEgIwUBoTahEIABoNCAkSCRkQ16NwPQrHPxoMCgoKBAgAEAkSAjBSGhNqEQgAGg0ICRIJGa5H4XoUrv8/GgwKCgoECAAQCRICMFQaDmoMCAEaCAgPEgQiAmlzGgxqCggCGgYIEBICEAoaUGpOCA4aKggAEiYiJDZFNTg0QzA2LUQ3QjktNDVDQS1CN0FDLTY3MkU5QzI2M0M0MxoGCAsSAjBXGgYIDhICMFgaBggNEgIwWhoGCAwSAjBcGggKBgoECAAQCRoMCgoKBAgAEAkSAjBZGhJqEAgBGgwIDxIIIgZmaWxlcy4aDAoKCgQIABAJEgIwWxoTahEIABoNCAkSCRnXo3A9CtcuQBoMCgoKBAgAEAkSAjBdGhNqEQgAGg0ICRIJGVDhehSuR+E/GgxqCggCGgYIEBICEAsaUGpOCA4aKggAEiYiJEIzNTZBNDg3LTZERjItNEIwOC05MTEwLTcwOTVBRjJDNURCQxoGCAsSAjBgGgYIDhICMGEaBggNEgIwYhoGCAwSAjBkGggKBgoECAAQCRoMCgoKBAgAEAkSAjAVGgwKCgoECAAQCRICMGMaE2oRCAAaDQgJEgkZCtejcD0KLUAaDAoKCgQIABAJEgIwZRoTahEIABoNCAkSCRnQzMzMzMzsPxoMagoIAhoGCBASAhAMGlBqTggOGioIABImIiRDQUNDODU0Qy05OTBDLTQwOUQtOTg3Qy0xNDgxMUY0QzA1OTYaBggLEgIwaBoGCAwSAjBpGgYIDRICMGsaBggOEgIwbRoICgYKBAgAEAkaDAoKCgQIABAJEgIwahoTahEIABoNCAkSCRm4HoXrUbjePxoMCgoKBAgAEAkSAjBsGhNqEQgAGg0ICRIJGYXrUbgehQdAGgwKCgoECAAQCRICMG4aEGoOCAEaCggPEgYiBHRlc3QaDGoKCAIaBggQEgIQDRpQak4IDhoqCAASJiIkM0YyQUI0NDMtRTMxMi00MzBDLTlCRjQtQkFGRDU1QTY3QzQ5GgYICxICMHEaBggMEgIwchoGCA0SAjB0GgYIDhICMHYaCAoGCgQIABAJGgwKCgoECAAQCRICMHMaE2oRCAAaDQgJEgkZUOF6FK5H4T8aDAoKCgQIABAJEgIwdRoTahEIABoNCAkSCRnXo3A9CtcuQBoMCgoKBAgAEAkSAjBZGgxqCggCGgYIEBICEA4aUGpOCA4aKggAEiYiJDE4NkM2REZDLTc5MTUtNEFBOS04OTU3LTE2NkM4RjcyRkRBQhoGCAsSAjB5GgYIDBICMHoaBggNEgIwfBoGCA4SAjB+GggKBgoECAAQCRoMCgoKBAgAEAkSAjB7GhNqEQgAGg0ICRIJGcAehetRuM4/GgwKCgoECAAQCRICMH0aE2oRCAAaDQgJEgkZrkfhehSu/z8aDAoKCgQIABAJEgIwVBoMagoIAhoGCBASAhAPGlRqUggOGioIABImIiRGNDMwMDQzNC00QTY0LTRCODEtQTFGMS02MUQ0ODEyRUZGOUIaBwgLEgMwgQEaBwgOEgMwggEaBwgNEgMwhAEaBwgMEgMwhgEaCAoGCgQIABAJGg0KCwoECAAQCRIDMIMBGhZqFAgBGhAIDxIMIgphdHRhY2htZW50Gg0KCwoECAAQCRIDMIUBGhNqEQgAGg0ICRIJGa5H4XoUrg9AGg0KCwoECAAQCRIDMIcBGhNqEQgAGg0ICRIJGexRuB6F6/k/GgxqCggCGgYIEBICEBAaVGpSCA4aKggAEiYiJEM5ODdCRDkyLTA0OTAtNDdERC05OURFLUY0NEJCN0I4NkQ1MxoHCAsSAzCKARoHCA4SAzCLARoHCA0SAzCNARoHCAwSAzCPARoICgYKBAgAEAkaDQoLCgQIABAJEgMwjAEaE2oRCAEaDQgPEgkiB25vdGUuCgoaDQoLCgQIABAJEgMwjgEaE2oRCAAaDQgJEgkZH4XrUbgeG0AaDQoLCgQIABAJEgMwkAEaE2oRCAAaDQgJEgkZuB6F61G47j8aDGoKCAIaBggQEgIQERpUalIIDhoqCAASJiIkMUY2ODkzRkItRDU1RS00NUVELUI1ODItRDE3M0Y5RDFGOURGGgcICxIDMJMBGgcIDBIDMJQBGgcIDRIDMJYBGgcIDhIDMJgBGggKBgoECAAQCRoNCgsKBAgAEAkSAzCVARoTahEIABoNCAkSCRkwMzMzMzPTPxoNCgsKBAgAEAkSAzCXARoTahEIABoNCAkSCRlSuB6F61EWQBoNCgsKBAgAEAkSAzCZARoOagwIARoICA8SBCICaW4aDGoKCAIaBggQEgIQEhpUalIIDhoqCAASJiIkRkRDQTU1REQtRkMzMy00QkFELTlDRkQtNjY2Nzk3MEI5NkJEGgcICxIDMJwBGgcIDhIDMJ0BGgcIDRIDMJ8BGgcIDBIDMKEBGggKBgoECAAQCRoNCgsKBAgAEAkSAzCeARoRag8IARoLCA8SByIFQXBwbGUaDQoLCgQIABAJEgMwoAEaE2oRCAAaDQgJEgkZ7FG4HoXrI0AaDQoLCgQIABAJEgMwogEaE2oRCAAaDQgJEgkZIDMzMzMz0z8aDGoKCAIaBggQEgIQExpUalIIDhoqCAASJiIkODM0MTczMDMtQzBCOC00NzU5LThBMkQtOUQwNjc4RTU4MUU4GgcICxIDMKUBGgcIDhIDMKYBGgcIDRIDMKgBGgcIDBIDMKoBGggKBgoECAAQCRoNCgsKBAgAEAkSAzCnARoQag4IARoKCA8SBiIEdGhpcxoNCgsKBAgAEAkSAzCpARoTahEIABoNCAkSCRnNzMzMzMz8PxoNCgsKBAgAEAkSAzCrARoTahEIABoNCAkSCRkI16NwPQrHPxoMagoIAhoGCBASAhAUGlRqUggOGioIABImIiQxN0NBNDhCQy0yMTEzLTRDQzAtQjk3NC1DNDg4Nzc3OUNGRjcaBwgLEgMwrgEaBwgMEgMwrwEaBwgNEgMwsQEaBwgOEgMwswEaCAoGCgQIABAJGg0KCwoECAAQCRIDMLABGhNqEQgAGg0ICRIJGaAehetRuM4/Gg0KCwoECAAQCRIDMLIBGhNqEQgAGg0ICRIJGcP1KFyPwhdAGgwKCgoECAAQCRICMB4aDGoKCAIaBggQEgIQFRpUalIIDhoqCAASJiIkRUI0MTczRDYtRjJCMi00N0Q3LUFFNkYtNDlFMTA2N0FGRUQ5GgcICxIDMLYBGgcIDhIDMLcBGgcIDRIDMLkBGgcIDBIDMLsBGggKBgoECAAQCRoNCgsKBAgAEAkSAzC4ARoRag8IARoLCA8SByIFYXVkaW8aDQoLCgQIABAJEgMwugEaE2oRCAAaDQgJEgkZXI/C9ShcC0AaDQoLCgQIABAJEgMwvAEaE2oRCAAaDQgJEgkZSOF6FK5H4T8aDGoKCAIaBggQEgIQFhpUalIIDhoqCAASJiIkNzZDREZENzQtMzk1Ri00QjE1LUEwRjItMjgyNkNGRDJCNjI0GgcICxIDML8BGgcIDhIDMMABGgcIDRIDMMIBGgcIDBIDMMQBGggKBgoECAAQCRoNCgsKBAgAEAkSAzDBARoNagsIARoHCA8SAyIBSRoNCgsKBAgAEAkSAzDDARoTahEIABoNCAkSCRk9CtejcD0eQBoNCgsKBAgAEAkSAzDFARoTahEIABoNCAkSCRlAMzMzMzPTPxoMagoIAhoGCBASAhAXGlRqUggOGioIABImIiQxOTcyNEZEMC0yMTAzLTQzRjAtQTQ1NC0xMzVBMjIyMTQ0MTIaBwgLEgMwyAEaBwgMEgMwyQEaBwgNEgMwywEaBwgOEgMwzQEaCAoGCgQIABAJGg0KCwoECAAQCRIDMMoBGhNqEQgAGg0ICRIJGbgehetRuO4/Gg0KCwoECAAQCRIDMMwBGhNqEQgAGg0ICRIJGWZmZmZmZhpAGg0KCwoECAAQCRIDMIwBGgxqCggCGgYIEBICEBgaVGpSCA4aKggAEiYiJDdBMDYzNTU2LThGNDUtNERFMy04NjUzLUYxREJBNzlFNTU2MRoHCAsSAzDQARoHCA4SAzDRARoHCA0SAzDSARoHCAwSAzDUARoICgYKBAgAEAkaDQoLCgQIABAJEgMwngEaDQoLCgQIABAJEgMw0wEaE2oRCAAaDQgJEgkZexSuR+F6GEAaDQoLCgQIABAJEgMw1QEaE2oRCAAaDQgJEgkZsB6F61G43j8aDGoKCAIaBggQEgIQGRpUalIIDhoqCAASJiIkOTI4QkNFRTAtRkE5Qy00MTVFLThDNDQtRkU2REUyMTVCMDAxGgcICxIDMNgBGgcIDhIDMNkBGgcIDRIDMNsBGgcIDBIDMN0BGggKBgoECAAQCRoNCgsKBAgAEAkSAzDaARoOagwIARoICA8SBCICdG8aDQoLCgQIABAJEgMw3AEaE2oRCAAaDQgJEgkZXI/C9ShcJUAaDQoLCgQIABAJEgMw3gEaE2oRCAAaDQgJEgkZQArXo3A98j8aDGoKCAIaBggQEgIQGhpUalIIDhoqCAASJiIkNUM3Q0Y3RDYtQkUxOC00OTQxLTgwMjMtOTA3QjhBQTBGOTcxGgcICxIDMOEBGgcIDhIDMOIBGgcIDRIDMOQBGgcIDBIDMOYBGggKBgoECAAQCRoNCgsKBAgAEAkSAzDjARoPag0IARoJCA8SBSIDYW5kGg0KCwoECAAQCRIDMOUBGhNqEQgAGg0ICRIJGTMzMzMzMyxAGg0KCwoECAAQCRIDMOcBGhNqEQgAGg0ICRIJGeB6FK5H4do/GgxqCggCGgYIEBICEBsaVGpSCA4aKggAEiYiJDM2MjEzMENFLTM2QjctNDk1RS1BRjg3LTc2OTEyQURGRTI0NhoHCAsSAzDqARoHCAwSAzDrARoHCA0SAzDtARoHCA4SAzDvARoICgYKBAgAEAkaDQoLCgQIABAJEgMw7AEaE2oRCAAaDQgJEgkZsB6F61G47j8aDQoLCgQIABAJEgMw7gEaE2oRCAAaDQgJEgkZpHA9CtejJ0AaDAoKCgQIABAJEgIwPhoMagoIAhoGCBASAhAcGlRqUggOGioIABImIiQxNDc5NTRCRS1CNDY4LTQ4OTEtODlEQi0xQUMzNUFBQjEzQjcaBwgLEgMw8gEaBwgMEgMw8wEaBwgNEgMw9QEaBwgOEgMw9wEaCAoGCgQIABAJGg0KCwoECAAQCRIDMPQBGhNqEQgAGg0ICRIJGQjXo3A9Csc/Gg0KCwoECAAQCRIDMPYBGhNqEQgAGg0ICRIJGc3MzMzMzPw/Gg0KCwoECAAQCRIDMKcBGgxqCggCGgYIEBICEB0aVGpSCA4aKggAEiYiJDRGMTFEQzkyLTY1ODktNEY4NS04NTExLUU2QTlEMEEzNUM0OBoHCAsSAzD6ARoHCAwSAzD7ARoHCA0SAzD9ARoHCA4SAzD/ARoICgYKBAgAEAkaDQoLCgQIABAJEgMw/AEaE2oRCAAaDQgJEgkZIIXrUbge5T8aDQoLCgQIABAJEgMw/gEaE2oRCAAaDQgJEgkZPQrXo3A9JUAaDQoLCgQIABAJEgMw2gEaDGoKCAIaBggQEgIQHhpUalIIDhoqCAASJiIkQjIwN0U4MzktNkJBMi00RUZBLThFOTQtQUY3MEU2RjM3OEI2GgcICxIDMIICGgcIDBIDMIMCGgcIDRIDMIUCGgcIDhIDMIcCGggKBgoECAAQCRoNCgsKBAgAEAkSAzCEAhoTahEIABoNCAkSCRkQ16NwPQrXPxoNCgsKBAgAEAkSAzCGAhoTahEIABoNCAkSCRlSuB6F61EWQBoNCgsKBAgAEAkSAzCZARoMagoIAhoGCBASAhAfGlRqUggOGioIABImIiQyNUI1N0MzMS00OTMzLTQ4QjQtQTI4Ri03RUQ0NTMzQTg2QjEaBwgLEgMwigIaBwgMEgMwiwIaBwgNEgMwjQIaBwgOEgMwjwIaCAoGCgQIABAJGg0KCwoECAAQCRIDMIwCGhNqEQgAGg0ICRIJGeB6FK5H4do/Gg0KCwoECAAQCRIDMI4CGhNqEQgAGg0ICRIJGYXrUbgehSRAGg0KCwoECAAQCRIDMJACGhFqDwgBGgsIDxIHIgVub3RlcxoMagoIAhoGCBASAhAgGlRqUggOGioIABImIiQ4ODBERkRFRS1EODgyLTQ1RjItODdGQS0xRDNFODRDNUJBREUaBwgLEgMwkwIaBwgOEgMwlAIaBwgNEgMwlgIaBwgMEgMwmAIaCAoGCgQIABAJGg0KCwoECAAQCRIDMJUCGhNqEQgBGg0IDxIJIgdyZXZlcnNlGg0KCwoECAAQCRIDMJcCGhNqEQgAGg0ICRIJGRSuR+F6FCBAGg0KCwoECAAQCRIDMJkCGhNqEQgAGg0ICRIJGSCF61G4HvU/GgxqCggCGgYIEBICECEaVGpSCA4aKggAEiYiJDhDMkNEQTI0LUJDOUItNEVGMC1BRkM1LTQ1NzY4Mjg4MEREQRoHCAsSAzCcAhoHCA4SAzCdAhoHCA0SAzCeAhoHCAwSAzCgAhoICgYKBAgAEAkaDQoLCgQIABAJEgMwngEaDQoLCgQIABAJEgMwnwIaE2oRCAAaDQgJEgkZuB6F61G4GEAaDQoLCgQIABAJEgMwoQIaE2oRCAAaDQgJEgkZODMzMzMz4z8aDGoKCAIaBggQEgIQIhpUalIIDhoqCAASJiIkRjAyNDI3OTUtRjNDMi00MzVFLTgzM0UtQTQ3NjE4OEYwMThGGgcICxIDMKQCGgcIDhIDMKUCGgcIDRIDMKYCGgcIDBIDMKgCGggKBgoECAAQCRoNCgsKBAgAEAkSAzCVAhoNCgsKBAgAEAkSAzCnAhoTahEIABoNCAkSCRkzMzMzMzMgQBoNCgsKBAgAEAkSAzCpAhoTahEIABoNCAkSCRkghetRuB71PxoMagoIAhoGCBASAhAjGlRqUggOGioIABImIiREQ0U4QTYzNy0zRjhFLTRERjYtQTZEMi0yQUYyQzcyQjEwQzMaBwgLEgMwrAIaBwgOEgMwrQIaBwgNEgMwrgIaBwgMEgMwsAIaCAoGCgQIABAJGg0KCwoECAAQCRIDMJkBGg0KCwoECAAQCRIDMK8CGhNqEQgAGg0ICRIJGXE9CtejcClAGg0KCwoECAAQCRIDMLECGhNqEQgAGg0ICRIJGaAehetRuN4/GgxqCggCGgYIEBICECQaVGpSCA4aKggAEiYiJDhDNDdGMjgxLTEzQ0EtNEM3RS04NDg4LTlCNUUyQzBDNDVCMhoHCAsSAzC0AhoHCA4SAzC1AhoHCA0SAzC2AhoHCAwSAzC4AhoICgYKBAgAEAkaDAoKCgQIABAJEgIwHhoNCgsKBAgAEAkSAzC3AhoTahEIABoNCAkSCRmF61G4HoUXQBoNCgsKBAgAEAkSAzC5AhoTahEIABoNCAkSCRnAHoXrUbjOPxoMagoIAhoGCBASAhAlGlRqUggOGioIABImIiRFMDNEOTMzOC0zNDUzLTQ2RTgtOTM4RS04NjcyNUNDQzU5NDAaBwgLEgMwvAIaBwgOEgMwvQIaBwgNEgMwvgIaBwgMEgMwwAIaCAoGCgQIABAJGgwKCgoECAAQCRICMG4aDQoLCgQIABAJEgMwvwIaE2oRCAAaDQgJEgkZH4XrUbgeBUAaDQoLCgQIABAJEgMwwQIaE2oRCAAaDQgJEgkZCNejcD0K5z8aDGoKCAIaBggQEgIQJhpUalIIDhoqCAASJiIkOEY1QTZBRkUtQUQ3QS00MTQzLUE0QjYtOEMxQkE4ODYwMEVGGgcICxIDMMQCGgcIDhIDMMUCGgcIDRIDMMYCGgcIDBIDMMgCGggKBgoECAAQCRoNCgsKBAgAEAkSAzC4ARoNCgsKBAgAEAkSAzDHAhoTahEIABoNCAkSCRnhehSuR+EKQBoNCgsKBAgAEAkSAzDJAhoTahEIABoNCAkSCRlI4XoUrkfhPxoMagoIAhoGCBASAhAnGlRqUggOGioIABImIiQ4NTJCNjQ5Ni1EMUQxLTRGQ0QtODg3OS0xODE1M0JDQUUyNjcaBwgLEgMwzAIaBwgOEgMwzQIaBwgNEgMwzgIaBwgMEgMw0AIaCAoGCgQIABAJGgwKCgoECAAQCRICMB4aDQoLCgQIABAJEgMwzwIaE2oRCAAaDQgJEgkZw/UoXI/CAUAaDQoLCgQIABAJEgMw0QIaE2oRCAAaDQgJEgkZCNejcD0K5z8aDGoKCAIaBggQEgIQKBpUalIIDhoqCAASJiIkOTFDRDc3NUUtNDQ1Mi00RjA3LTlDQTItODA5NjYxQjU3NDM3GgcICxIDMNQCGgcIDhIDMNUCGgcIDRIDMNYCGgcIDBIDMNgCGggKBgoECAAQCRoNCgsKBAgAEAkSAzCeARoNCgsKBAgAEAkSAzDXAhoTahEIABoNCAkSCRnsUbgehesjQBoNCgsKBAgAEAkSAzDZAhoTahEIABoNCAkSCRkgMzMzMzPTPxoMagoIAhoGCBASAhApGlRqUggOGioIABImIiQwNEM1N0Q3MC0zRjlGLTQwQjMtOEU5My1DNDM0RDdGQzVDMUUaBwgLEgMw3AIaBwgMEgMw3QIaBwgNEgMw3wIaBwgOEgMw4QIaCAoGCgQIABAJGg0KCwoECAAQCRIDMN4CGhNqEQgAGg0ICRIJGQDXo3A9Csc/Gg0KCwoECAAQCRIDMOACGhNqEQgAGg0ICRIJGfYoXI/C9R5AGg0KCwoECAAQCRIDMMEBGgxqCggCGgYIEBICECoaVGpSCA4aKggAEiYiJDJEOUZCNEE4LUI1NUMtNEU4Mi1BM0I3LTM1RUNCNUY0OTFGQRoHCAsSAzDkAhoHCA4SAzDlAhoHCA0SAzDmAhoHCAwSAzDoAhoICgYKBAgAEAkaDQoLCgQIABAJEgMwkAIaDQoLCgQIABAJEgMw5wIaE2oRCAAaDQgJEgkZhetRuB6FJEAaDQoLCgQIABAJEgMw6QIaE2oRCAAaDQgJEgkZANejcD0K1z8aDGoKCAIaBggQEgIQKxpUalIIDhoqCAASJiIkMTM2NTUwNkUtNDVCNS00RDEwLTlDNzAtMDQ3QThGQTc5QkU5GgcICxIDMOwCGgcIDBIDMO0CGgcIDRIDMO8CGgcIDhIDMPACGggKBgoECAAQCRoNCgsKBAgAEAkSAzDuAhoTahEIABoNCAkSCRnNzMzMzMz8PxoMCgoKBAgAEAkSAjAuGgwKCgoECAAQCRICMCwaDGoKCAIaBggQEgIQLBpUalIIDhoqCAASJiIkQjdBODA1RDktRjNBMS00N0I4LTk3OTEtNEZEQjkxREVGNjY1GgcICxIDMPMCGgcIDhIDMPQCGgcIDRIDMPUCGgcIDBIDMPcCGggKBgoECAAQCRoNCgsKBAgAEAkSAzCZARoNCgsKBAgAEAkSAzD2AhoTahEIABoNCAkSCRmPwvUoXI8pQBoNCgsKBAgAEAkSAzD4AhoTahEIABoNCAkSCRlAMzMzMzPjPxoMagoIAhoGCBASAhAtGlRqUggOGioIABImIiQ5MzMzREZBMC1FOTUyLTRDQ0YtODJCRi0wQTI2MTVEMUUxRjAaBwgLEgMw+wIaBwgOEgMw/AIaBwgNEgMw/QIaBwgMEgMw/wIaCAoGCgQIABAJGg0KCwoECAAQCRIDMIMBGg0KCwoECAAQCRIDMP4CGhNqEQgAGg0ICRIJGTMzMzMzMw9AGg0KCwoECAAQCRIDMIADGhNqEQgAGg0ICRIJGeJ6FK5H4fo/GgxqCggCGgYIEBICEC4aVGpSCA4aKggAEiYiJDNDRDMwQUJDLTVEMzQtNDZFRi04MDEyLUM3ODNGRTlBQ0NFOBoHCAsSAzCDAxoHCA4SAzCEAxoHCA0SAzCFAxoHCAwSAzCHAxoICgYKBAgAEAkaDAoKCgQIABAJEgIwIxoNCgsKBAgAEAkSAzCGAxoTahEIABoNCAkSCRnXo3A9CtciQBoNCgsKBAgAEAkSAzCIAxoTahEIABoNCAkSCRlQ4XoUrkfhPxoMagoIAhoGCBASAhAvGlRqUggOGioIABImIiQzNTNGNzQ3Ni1CNTVGLTQzNEYtQTk0QS0wQUE5MkZFMTJCQTMaBwgLEgMwiwMaBwgOEgMwjAMaBwgNEgMwjQMaBwgMEgMwjwMaCAoGCgQIABAJGg0KCwoECAAQCRIDMOMBGg0KCwoECAAQCRIDMI4DGhNqEQgAGg0ICRIJGTMzMzMzMyxAGg0KCwoECAAQCRIDMJADGhNqEQgAGg0ICRIJGeB6FK5H4do/GgxqCggCGgYIEBICEDAaVGpSCA4aKggAEiYiJDVGNTBFNzczLTA1MDItNEE5NS1BMkVFLTA1MkE4RkQxODhCQhoHCAsSAzCTAxoHCAwSAzCUAxoHCA0SAzCWAxoHCA4SAzCYAxoICgYKBAgAEAkaDQoLCgQIABAJEgMwlQMaE2oRCAAaDQgJEgkZ0MzMzMzM7D8aDQoLCgQIABAJEgMwlwMaE2oRCAAaDQgJEgkZZmZmZmZmKkAaDAoKCgQIABAJEgIwORoMagoIAhoGCBASAhAxGlRqUggOGioIABImIiRCM0E0QjZFQi00NTkzLTQ0MEMtQTZCNS0yMzFDNDlGNjNBNUIaBwgLEgMwmwMaBwgMEgMwnAMaBwgNEgMwngMaBwgOEgMwoAMaCAoGCgQIABAJGg0KCwoECAAQCRIDMJ0DGhNqEQgAGg0ICRIJGQDXo3A9Csc/Gg0KCwoECAAQCRIDMJ8DGhNqEQgAGg0ICRIJGa5H4XoUrh9AGgwKCgoECAAQCRICMEsaDGoKCAIaBggQEgIQMhoISgYKBAgAEAAaCAoGCgQIARABGg0KCwoECAIQCBIDMKUDGh5qHAgBGhgIDxIUIhI5LjQxLjAuMTMuMTAxMzQ2LDAaCAoGCgQIARABGggKBgoECAEQARoNCgsKBAgAEAgSAzCpAxoMagoIABoGCAcSAggAGg0KCwoECAAQCBIDMKsDGhJqEAgBGgwIDxIIIgYxMi4wLjAiCGlkZW50aXR5IhZjYWxsUmVjb3JkaW5nU3RhcnRUaW1lIgdzdW1tYXJ5Ig50b3BMaW5lU3VtbWFyeSINY2FsbFJlY29yZGluZyIJZnJhZ21lbnRzIhF0cmFuc2NyaXB0VmVyc2lvbiIMaW50ZWdlclZhbHVlIgtjcmVhdGVkRGF0ZSILZG91YmxlVmFsdWUiCnRyYW5zY3JpcHQiB3NwZWFrZXIiCGR1cmF0aW9uIgl0aW1lc3RhbXAiBHRleHQiBHNlbGYiCVVVSURJbmRleCIXY2FsbFJlbW90ZVNwZWFrZXJIYW5kbGUiHnRvcGxpbmVTdW1tYXJ5TW9kZWxWZXJzaW9uSW5mbyIWY2FsbExvY2FsU3BlYWtlckhhbmRsZSIIY2FsbFR5cGUiDnN1bW1hcnlWZXJzaW9uIh9sb25nZm9ybVN1bW1hcnlNb2RlbFZlcnNpb25JbmZvKhdjb20uYXBwbGUuQ1JEVC5OU051bWJlcioXY29tLmFwcGxlLkNSRFQuTlNTdHJpbmcqFWNvbS5hcHBsZS5DUkRULk5TVVVJRCoWY29tLmFwcGxlLkNSRFQuQ1JUdXBsZSooY29tLmFwcGxlLkNSRFQuQ1JSZWdpc3Rlck11bHRpVmFsdWVMZWFzdCojY29tLmFwcGxlLkNSRFQuQ1JSZWdpc3Rlck11bHRpVmFsdWUqFWNvbS5hcHBsZS5DUkRULkNSVHJlZSoZY29tLmFwcGxlLkNSRFQuQ1JUcmVlTm9kZSoVY29tLmFwcGxlLkNSRFQuTlNEYXRhKhVjb20uYXBwbGUuQ1JEVC5OU0RhdGUqMWNvbS5hcHBsZS5ub3Rlcy5JQ0Nsb3VkU3luY2luZ09iamVjdEFjdGl2aXR5RXZlbnQqKGNvbS5hcHBsZS5ub3Rlcy5JQ0Zhc3RTeW5jU2VsZWN0aW9uU3RhdGUqImNvbS5hcHBsZS5ub3Rlcy5JQ1RUQXVkaW9SZWNvcmRpbmcqK2NvbS5hcHBsZS5ub3Rlcy5JQ1RUQXVkaW9SZWNvcmRpbmcuRnJhZ21lbnQqJWNvbS5hcHBsZS5ub3Rlcy5JQ1RUVHJhbnNjcmlwdFNlZ21lbnQqF2NvbS5hcHBsZS5ub3Rlcy5DUlRhYmxlKhdjb20uYXBwbGUubm90ZXMuSUNUYWJsZTIQROtn4JiGRT+IVDC6l/TzUTIQlKHmkVeWSaKDg5GWNtMUUzIQweQyKe7PTgaMMZGGBpNwLDIQr03NH+zKR52AoyUBOa9d6zIQG35rNfXBTdOvH2Eb8MgSVjIQ0y+1fGhpS4iAX0d43DABvjIQt/EfoQRcTlO+oiTNNO86cjIQLFij5zawSXqiJbIs25vPojIQifJ1ldaeRd+vhkquG9pcljIQiIkLqo1JQCyWlDNDt86FmzIQvqgaIkeTQnu57zErj1dGDzIQ1RVzMk+YRgCcoqsRattwaTIQBq4mdG9XTwivBI0gIzMyxTIQPgRX9UtBQGKc2kIcWzISvTIQ2TEdWPACSUSDnmna9niz0TIQfaWbqsLJTuWCrzccBSa0ojIQn3B43D2JTWGXrWj5dpp3vDIQktPGtswnQAO9x0waOBIfvjIQ+rj7sl7GRB6NOVTyQHdsETIQk9LMfPO7R+2X8xCVLjp1tjIQ7kY1lcsOQF2sPBbMg5a2PTIQSkst0U7NRjqcCMPI2Kk/4TIQ67lFyyEpSmGI1vi4wsQ5ezIQX3+IWJ4fRR+GASxozDCq5DIQ+R/tFacjTEq/e9wonTH18jIQsyyo+dr+SCKj4CAKYKUohjIQJZKoZWWzQcO8SQyDW0p3SzIQXc/AtSlMS6GXFVPBRY6e5jIQfgarWVT1TcOeOE1M+w4RXTIQ3LnY31gETMSBJNSiie/PvDIQFHo4WsuUS76duVi1rY/BOjIQT18rJhy5SQ2emU6A6li4VTIQsuEaeCQFTkahVRa+FYSHHzIQNXFx7J6MQrmXVl8Qfs5VgjIQJGEZAKqgR72Hq6cf9x1zfjIQWw3ZgPoqS32krbKdkqFX8DIQpuL/PxPYQ8+VBJxbeuqG8DIQptpN9mWtR4mk4Wv60xi5bzIQEi7bYLklQXKsDDwvm4a6oTIQlqymmRD5TFGcOVXukl4KBDIQh9kQ9JBYTc2P3zuDeOi/3DIQmmb/zcQ1Tk2Vw+wDEIPN+DIQIOatBdOVRwWFdCD+RYN8xDIQGnCv010lQUWi1/QO+zqb2DIQUnlVYEdMRLaFJTZya4UVTTIQP4wU+sOMTkaPbAvSgkZjeDIQOwmORDjCQpePYutkEAcJxjIQXTkNKaegS1+1qYguHvOxzTIQW2yOyf+LRnmBayuweNifkzIQ+nhEHP9YQdCcW8EDq5GaxjIQ6e82jqeeTFqvVzGocStqOjo6ChsKEETrZ+CYhkU/iFQwupf081ESAwj6AhICCAEKGwoQweQyKe7PTgaMMZGGBpNwLBIDCIkBEgIIAQ==" + }, + "CroppingQuadTopLeftY": 1.0, + "CroppingQuadTopLeftX": 0.0, + "Deleted": 0, + "ImageClassificationSummaryVersion": 0, + "CroppingQuadBottomRightX": 1.0, + "CroppingQuadBottomRightY": 0.0, + "HandwritingSummaryVersion": 0, + "CroppingQuadBottomLeftX": 0.0, + "Media": "recordName='MEDIA_RECORD_0002' action='VALIDATE' zoneID=CKZoneID(zoneName='Notes', ownerRecordName='_SANITIZED_OWNER_RECORD', zoneType='REGULAR_CUSTOM_ZONE')", + "Duration": 17.9, + "CroppingQuadBottomLeftY": 0.0, + "Orientation": 0, + "OcrSummaryVersion": 0, + "ImageFilterType": 0, + "TitleEncrypted": { + "__bytes__": "TmV3IFJlY29yZGluZw==" + }, + "Note": "recordName='NOTE_RECORD_0001' action='VALIDATE' zoneID=CKZoneID(zoneName='Notes', ownerRecordName='_SANITIZED_OWNER_RECORD', zoneType='REGULAR_CUSTOM_ZONE')", + "LastModificationDate": "2025-08-17 15:34:39.385000+00:00", + "FileSize": 153809 + } + }, + { + "recordName": "ATTACHMENT_RECORD_0009", + "recordType": "Attachment", + "fields": { + "CroppingQuadTopRightY": 1.0, + "UTIEncrypted": { + "__bytes__": "cHVibGljLnZjYXJk" + }, + "CreationDate": "2025-12-12 01:30:25.155000+00:00", + "CroppingQuadTopRightX": 1.0, + "ReplicaIDToNotesVersionDataEncrypted": { + "__bytes__": "CgYKBAgAEAMaODI2ChAKAjACEgIwARoGCgQIABACChAKAjADEgIwARoGCgQIABABChAKAjAEEgIwARoGCgQIABADGgxqCggAGgYIABICCCIaDGoKCAIaBggBEgIQARoMagoIAhoGCAESAhAAGgxqCggCGgYIARICEAIiDGludGVnZXJWYWx1ZSIJVVVJREluZGV4Khdjb20uYXBwbGUuQ1JEVC5OU051bWJlcioXY29tLmFwcGxlLkNSRFQuTlNTdHJpbmcqFWNvbS5hcHBsZS5DUkRULk5TVVVJRCoWY29tLmFwcGxlLkNSRFQuQ1JUdXBsZSooY29tLmFwcGxlLkNSRFQuQ1JSZWdpc3Rlck11bHRpVmFsdWVMZWFzdCojY29tLmFwcGxlLkNSRFQuQ1JSZWdpc3Rlck11bHRpVmFsdWUqFWNvbS5hcHBsZS5DUkRULkNSVHJlZSoZY29tLmFwcGxlLkNSRFQuQ1JUcmVlTm9kZSoVY29tLmFwcGxlLkNSRFQuTlNEYXRhKhVjb20uYXBwbGUuQ1JEVC5OU0RhdGUqMWNvbS5hcHBsZS5ub3Rlcy5JQ0Nsb3VkU3luY2luZ09iamVjdEFjdGl2aXR5RXZlbnQqKGNvbS5hcHBsZS5ub3Rlcy5JQ0Zhc3RTeW5jU2VsZWN0aW9uU3RhdGUqImNvbS5hcHBsZS5ub3Rlcy5JQ1RUQXVkaW9SZWNvcmRpbmcqK2NvbS5hcHBsZS5ub3Rlcy5JQ1RUQXVkaW9SZWNvcmRpbmcuRnJhZ21lbnQqJWNvbS5hcHBsZS5ub3Rlcy5JQ1RUVHJhbnNjcmlwdFNlZ21lbnQqF2NvbS5hcHBsZS5ub3Rlcy5DUlRhYmxlKhdjb20uYXBwbGUubm90ZXMuSUNUYWJsZTIQROtn4JiGRT+IVDC6l/TzUTIQUT2FinRBQhGpx7+t+9YwkjIQxhiJSHx4Qs60RRka9Lp+Xg==" + }, + "LinkPresentationMetadataAsset": "fileChecksum='SANITIZED_FILE_0017' referenceChecksum='SANITIZED_REF_0017' wrappingKey='SANITIZED_WRAP_0017==' downloadURL='https://example.com/icloud-assets/asset-0017/${f}' size=428161", + "MinimumSupportedNotesVersion": 14, + "UTI": "public.vcard", + "CroppingQuadTopLeftY": 1.0, + "CroppingQuadTopLeftX": 0.0, + "Deleted": 0, + "ImageClassificationSummaryVersion": 0, + "CroppingQuadBottomRightX": 1.0, + "CroppingQuadBottomRightY": 0.0, + "HandwritingSummaryVersion": 0, + "CroppingQuadBottomLeftX": 0.0, + "Media": "recordName='MEDIA_RECORD_0003' action='VALIDATE' zoneID=CKZoneID(zoneName='Notes', ownerRecordName='_SANITIZED_OWNER_RECORD', zoneType='REGULAR_CUSTOM_ZONE')", + "CroppingQuadBottomLeftY": 0.0, + "Orientation": 0, + "OcrSummaryVersion": 0, + "ImageFilterType": 0, + "TitleEncrypted": { + "__bytes__": "RHIuIEphbmUgQWxleGFuZHJhIERvZSBQaEQudmNm" + }, + "Note": "recordName='NOTE_RECORD_0001' action='VALIDATE' zoneID=CKZoneID(zoneName='Notes', ownerRecordName='_SANITIZED_OWNER_RECORD', zoneType='REGULAR_CUSTOM_ZONE')", + "LastModificationDate": "2025-12-12 01:30:25.155000+00:00", + "FileSize": 72370 + } + }, + { + "recordName": "INLINEATTACHMENT_RECORD_0004", + "recordType": "InlineAttachment", + "fields": { + "UTIEncrypted": { + "__bytes__": "Y29tLmFwcGxlLm5vdGVzLmlubGluZXRleHRhdHRhY2htZW50Lmhhc2h0YWc=" + }, + "CreationDate": "2025-08-02 01:24:16.897000+00:00", + "AltTextEncrypted": { + "__bytes__": "I215dGFn" + }, + "Note": "recordName='NOTE_RECORD_0001' action='VALIDATE' zoneID=CKZoneID(zoneName='Notes', ownerRecordName='_SANITIZED_OWNER_RECORD', zoneType='REGULAR_CUSTOM_ZONE')", + "MinimumSupportedNotesVersion": 14, + "Deleted": 0, + "TokenContentIdentifierEncrypted": { + "__bytes__": "TVlUQUc=" + } + } + }, + { + "recordName": "INLINEATTACHMENT_RECORD_0005", + "recordType": "InlineAttachment", + "fields": { + "UTIEncrypted": { + "__bytes__": "Y29tLmFwcGxlLm5vdGVzLmlubGluZXRleHRhdHRhY2htZW50Lmhhc2h0YWc=" + }, + "CreationDate": "2025-08-02 01:24:23.045000+00:00", + "AltTextEncrypted": { + "__bytes__": "I215c2Vjb25kdGFn" + }, + "Note": "recordName='NOTE_RECORD_0001' action='VALIDATE' zoneID=CKZoneID(zoneName='Notes', ownerRecordName='_SANITIZED_OWNER_RECORD', zoneType='REGULAR_CUSTOM_ZONE')", + "MinimumSupportedNotesVersion": 14, + "Deleted": 0, + "TokenContentIdentifierEncrypted": { + "__bytes__": "TVlTRUNPTkRUQUc=" + } + } + }, + { + "recordName": "ATTACHMENT_RECORD_0010", + "recordType": "Attachment", + "fields": { + "CroppingQuadTopRightY": 1.0, + "PreviewUpdateDate": "2025-09-11 23:26:37.796000+00:00", + "UTIEncrypted": { + "__bytes__": "Y29tLmFwcGxlLnBhcGVy" + }, + "CreationDate": "2025-08-17 15:36:06.619000+00:00", + "CroppingQuadTopRightX": 1.0, + "PaperAssets": [], + "PaperDatabase": "fileChecksum='SANITIZED_FILE_0018' referenceChecksum='SANITIZED_REF_0018' wrappingKey='SANITIZED_WRAP_0018==' downloadURL='https://example.com/icloud-assets/asset-0018/${f}' size=4797", + "ReplicaIDToNotesVersionDataEncrypted": { + "__bytes__": "CgYKBAgAEAMaODI2ChAKAjACEgIwARoGCgQIABACChAKAjADEgIwARoGCgQIABABChAKAjAEEgIwARoGCgQIABADGgxqCggAGgYIABICCCIaDGoKCAIaBggBEgIQARoMagoIAhoGCAESAhAAGgxqCggCGgYIARICEAIiDGludGVnZXJWYWx1ZSIJVVVJREluZGV4Khdjb20uYXBwbGUuQ1JEVC5OU051bWJlcioXY29tLmFwcGxlLkNSRFQuTlNTdHJpbmcqFWNvbS5hcHBsZS5DUkRULk5TVVVJRCoWY29tLmFwcGxlLkNSRFQuQ1JUdXBsZSooY29tLmFwcGxlLkNSRFQuQ1JSZWdpc3Rlck11bHRpVmFsdWVMZWFzdCojY29tLmFwcGxlLkNSRFQuQ1JSZWdpc3Rlck11bHRpVmFsdWUqFWNvbS5hcHBsZS5DUkRULkNSVHJlZSoZY29tLmFwcGxlLkNSRFQuQ1JUcmVlTm9kZSoVY29tLmFwcGxlLkNSRFQuTlNEYXRhKhVjb20uYXBwbGUuQ1JEVC5OU0RhdGUqMWNvbS5hcHBsZS5ub3Rlcy5JQ0Nsb3VkU3luY2luZ09iamVjdEFjdGl2aXR5RXZlbnQqKGNvbS5hcHBsZS5ub3Rlcy5JQ0Zhc3RTeW5jU2VsZWN0aW9uU3RhdGUqImNvbS5hcHBsZS5ub3Rlcy5JQ1RUQXVkaW9SZWNvcmRpbmcqK2NvbS5hcHBsZS5ub3Rlcy5JQ1RUQXVkaW9SZWNvcmRpbmcuRnJhZ21lbnQqJWNvbS5hcHBsZS5ub3Rlcy5JQ1RUVHJhbnNjcmlwdFNlZ21lbnQqF2NvbS5hcHBsZS5ub3Rlcy5DUlRhYmxlKhdjb20uYXBwbGUubm90ZXMuSUNUYWJsZTIQROtn4JiGRT+IVDC6l/TzUTIQUT2FinRBQhGpx7+t+9YwkjIQxhiJSHx4Qs60RRka9Lp+Xg==" + }, + "MinimumSupportedNotesVersion": 14, + "PreviewWidths": [768.0, 768.0], + "UTI": "com.apple.paper", + "FallbackImage": "fileChecksum='SANITIZED_FILE_0019' referenceChecksum='SANITIZED_REF_0019' wrappingKey='SANITIZED_WRAP_0019==' downloadURL='https://example.com/icloud-assets/asset-0019/${f}' size=53961", + "PreviewImages": [ + "fileChecksum='SANITIZED_FILE_0020' referenceChecksum='SANITIZED_REF_0020' wrappingKey='SANITIZED_WRAP_0020==' downloadURL='https://example.com/icloud-assets/asset-0020/${f}' size=25924", + "fileChecksum='SANITIZED_FILE_0021' referenceChecksum='SANITIZED_REF_0021' wrappingKey='SANITIZED_WRAP_0021==' downloadURL='https://example.com/icloud-assets/asset-0021/${f}' size=22140" + ], + "SummaryEncrypted": { + "__bytes__": "aGVsbG8K" + }, + "CroppingQuadTopLeftY": 1.0, + "Height": 349.0, + "CroppingQuadTopLeftX": 0.0, + "Deleted": 0, + "ImageClassificationSummaryVersion": 0, + "Width": 768.0, + "PreviewHeights": [768.0, 768.0], + "CroppingQuadBottomRightX": 1.0, + "PreviewScales": [1.0, 1.0], + "CroppingQuadBottomRightY": 0.0, + "MetadataData": { + "__bytes__": "eyJwYXBlckNvbnRlbnRCb3VuZHNXaWR0aEtleSI6Mjc4LCJoYXNOZXdJbmtzS2V5Ijp0cnVlLCJwYXBlckNvbnRlbnRCb3VuZHNPcmlnaW5ZS2V5IjoxNjUsInBhcGVyQ29udGVudEJvdW5kc0hlaWdodEtleSI6MTg0LCJoYXNFbmhhbmNlZENhbnZhc0tleSI6dHJ1ZSwicGFwZXJDb250ZW50Qm91bmRzT3JpZ2luWEtleSI6NjN9" + }, + "HandwritingSummaryVersion": 0, + "CroppingQuadBottomLeftX": 0.0, + "CroppingQuadBottomLeftY": 0.0, + "Orientation": 0, + "OcrSummaryVersion": 0, + "ImageFilterType": 0, + "PreviewScaleWhenDrawings": [1, 1], + "PreviewAppearances": [0, 1], + "TitleEncrypted": { + "__bytes__": "aGVsbG8=" + }, + "Note": "recordName='NOTE_RECORD_0001' action='VALIDATE' zoneID=CKZoneID(zoneName='Notes', ownerRecordName='_SANITIZED_OWNER_RECORD', zoneType='REGULAR_CUSTOM_ZONE')", + "LastModificationDate": "2025-11-07 16:33:43.424000+00:00" + } + } + ] +} diff --git a/tests/services/test_reminders.py b/tests/services/test_reminders.py index 32feea59..9a0c03a3 100644 --- a/tests/services/test_reminders.py +++ b/tests/services/test_reminders.py @@ -1,136 +1,86 @@ -"""Unit tests for the RemindersService class.""" -# pylint: disable=protected-access +"""Smoke tests for the CloudKit-backed Reminders service facade.""" -import datetime -from unittest.mock import MagicMock, patch - -from requests import Response +from unittest.mock import MagicMock from pyicloud.services.reminders import RemindersService -from pyicloud.session import PyiCloudSession +from pyicloud.services.reminders.models import ( + ListRemindersResult, + Reminder, + RemindersList, +) -def test_reminders_service_init(mock_session: MagicMock) -> None: - """Test RemindersService initialization.""" - mock_session.get.return_value = MagicMock( - spec=Response, json=lambda: {"Collections": [], "Reminders": []} - ) +def test_reminders_service_init() -> None: + """The reminders facade wires the CloudKit client and typed helpers.""" params: dict[str, str] = {"dsid": "12345"} - - with patch("pyicloud.services.reminders.get_localzone_name", return_value="UTC"): - service = RemindersService("https://example.com", mock_session, params) - - assert service.service_root == "https://example.com" - assert service.params == params - assert not service.lists - assert not service.collections - - -def test_reminders_service_refresh() -> None: - """Test the refresh method.""" - mock_session = MagicMock(spec=PyiCloudSession) - mock_response = MagicMock(spec=Response) - mock_response.json.return_value = { - "Collections": [ - {"title": "Work", "guid": "guid1", "ctag": "ctag1"}, - {"title": "Personal", "guid": "guid2", "ctag": "ctag2"}, - ], - "Reminders": [ - {"title": "Task 1", "pGuid": "guid1", "dueDate": [2023, 10, 1, 12, 0, 0]}, - {"title": "Task 2", "pGuid": "guid2", "dueDate": None}, - ], - } - mock_session.get.return_value = mock_response - with patch("pyicloud.services.reminders.get_localzone_name", return_value="UTC"): - service = RemindersService( - "https://example.com", mock_session, {"dsid": "12345"} + service = RemindersService("https://example.com", MagicMock(), params) + + assert service.service_root == "https://example.com" + assert service.params == params + assert callable(service.lists) + assert callable(service.list_reminders) + assert callable(service.get) + + +def test_reminders_service_lists_delegates_to_read_api() -> None: + service = RemindersService("https://example.com", MagicMock(), {"dsid": "12345"}) + expected = [RemindersList(id="List/WORK", title="Work")] + service._reads.lists = MagicMock(return_value=iter(expected)) + + assert list(service.lists()) == expected + + +def test_reminders_service_reminders_aggregates_list_snapshots() -> None: + service = RemindersService("https://example.com", MagicMock(), {"dsid": "12345"}) + list_id = "List/WORK" + reminder = Reminder(id="Reminder/1", list_id=list_id, title="Task 1") + service.lists = MagicMock(return_value=[RemindersList(id=list_id, title="Work")]) + service.list_reminders = MagicMock( + return_value=ListRemindersResult( + reminders=[reminder], + alarms={}, + triggers={}, + attachments={}, + hashtags={}, + recurrence_rules={}, ) - service.refresh() - - assert "Work" in service.lists - assert "Personal" in service.lists - assert len(service.lists["Work"]) == 1 - assert len(service.lists["Personal"]) == 1 + ) - work_task = service.lists["Work"][0] - assert work_task["title"] == "Task 1" - assert work_task["due"] == datetime.datetime(2023, 10, 1, 12, 0, 0) + assert list(service.reminders()) == [reminder] + service.list_reminders.assert_called_once_with( + list_id=list_id, + include_completed=True, + results_limit=200, + ) - personal_task = service.lists["Personal"][0] - assert personal_task["title"] == "Task 2" - assert personal_task["due"] is None +def test_reminders_service_create_delegates_to_write_api() -> None: + service = RemindersService("https://example.com", MagicMock(), {"dsid": "12345"}) + created = Reminder(id="Reminder/1", list_id="List/WORK", title="New Task") + service._writes.create = MagicMock(return_value=created) + + result = service.create("List/WORK", "New Task", desc="Description") + + assert result == created + service._writes.create.assert_called_once_with( + list_id="List/WORK", + title="New Task", + desc="Description", + completed=False, + due_date=None, + priority=0, + flagged=False, + all_day=False, + time_zone=None, + parent_reminder_id=None, + ) -def test_reminders_service_post() -> None: - """Test the post method.""" - mock_session = MagicMock(spec=PyiCloudSession) - mock_response = MagicMock(spec=Response) - mock_response.ok = True - mock_session.post.return_value = mock_response - with patch("pyicloud.services.reminders.get_localzone_name", return_value="UTC"): - service = RemindersService( - "https://example.com", mock_session, {"dsid": "12345"} - ) - service.collections = {"Work": {"guid": "guid1"}} - - # Test posting a reminder with a due date - due_date = datetime.datetime(2023, 10, 1, 12, 0, 0) - result: bool = service.post("New Task", "Description", "Work", due_date) - - assert result is True - mock_session.post.assert_called_once() - _, kwargs = mock_session.post.call_args - assert kwargs["json"] - data = kwargs["json"] - assert data["Reminders"]["title"] == "New Task" - assert data["Reminders"]["description"] == "Description" - assert data["Reminders"]["pGuid"] == "guid1" - assert data["Reminders"]["dueDate"] == [20231001, 2023, 10, 1, 12, 0] - - # Test posting a reminder without a due date - mock_session.post.reset_mock() - result = service.post("Task Without Due Date", collection="Work") - - assert result is True - mock_session.post.assert_called_once() - _, kwargs = mock_session.post.call_args - data = kwargs["json"] - assert data["Reminders"]["title"] == "Task Without Due Date" - assert data["Reminders"]["dueDate"] is None - - -def test_reminders_service_post_invalid_collection() -> None: - """Test the post method with an invalid collection.""" - mock_session = MagicMock(spec=PyiCloudSession) - mock_response = MagicMock(spec=Response) - mock_response.ok = True - mock_session.post.return_value = mock_response - with patch("pyicloud.services.reminders.get_localzone_name", return_value="UTC"): - service = RemindersService( - "https://example.com", mock_session, {"dsid": "12345"} - ) +def test_reminders_service_delete_delegates_to_write_api() -> None: + service = RemindersService("https://example.com", MagicMock(), {"dsid": "12345"}) + reminder = Reminder(id="Reminder/1", list_id="List/WORK", title="Delete me") + service._writes.delete = MagicMock() - # Post to a non-existent collection - result = service.post("Task", collection="NonExistent") - assert result is True - mock_session.post.assert_called_once() - _, kwargs = mock_session.post.call_args - data = kwargs["json"] - assert data["Reminders"]["pGuid"] == "tasks" # Default collection - - -def test_reminders_service_refresh_empty_response() -> None: - """Test the refresh method with an empty response.""" - mock_session = MagicMock(spec=PyiCloudSession) - mock_response = MagicMock(spec=Response) - mock_response.json.return_value = {"Collections": [], "Reminders": []} - mock_session.get.return_value = mock_response - with patch("pyicloud.services.reminders.get_localzone_name", return_value="UTC"): - service = RemindersService( - "https://example.com", mock_session, {"dsid": "12345"} - ) - service.refresh() + service.delete(reminder) - assert not service.lists - assert not service.collections + service._writes.delete.assert_called_once_with(reminder) diff --git a/tests/services/test_reminders_cloudkit.py b/tests/services/test_reminders_cloudkit.py new file mode 100644 index 00000000..2ef8b0e9 --- /dev/null +++ b/tests/services/test_reminders_cloudkit.py @@ -0,0 +1,2856 @@ +"""Unit tests for the CloudKit-based RemindersService record parsing. + +Tests all _record_to_*() methods and _decode_crdt_document() using +realistic CKRecord JSON fixtures. +""" +# pylint: disable=protected-access + +import base64 +import json +import logging +from datetime import datetime, timezone +from unittest.mock import MagicMock, patch + +import pytest +from pydantic import ValidationError + +from pyicloud.common.cloudkit import ( + CKErrorItem, + CKLookupRequest, + CKLookupResponse, + CKModifyOperation, + CKModifyRequest, + CKModifyResponse, + CKQueryResponse, + CKRecord, + CKTombstoneRecord, + CKWriteRecord, + CKZoneChangesRequest, + CKZoneChangesResponse, + CKZoneChangesZone, + CKZoneID, + CKZoneIDReq, +) +from pyicloud.common.cloudkit.base import resolve_cloudkit_validation_extra +from pyicloud.services.reminders._mappers import RemindersRecordMapper +from pyicloud.services.reminders._protocol import ( + CRDTDecodeError, +) +from pyicloud.services.reminders._protocol import ( + _decode_crdt_document as decode_crdt_document, +) +from pyicloud.services.reminders._protocol import ( + _encode_crdt_document as encode_crdt_document, +) +from pyicloud.services.reminders._protocol import ( + _generate_resolution_token_map as generate_resolution_token_map, +) +from pyicloud.services.reminders.client import ( + CloudKitRemindersClient, + RemindersApiError, + _CloudKitClient, +) +from pyicloud.services.reminders.models import ( + Alarm, + AlarmWithTrigger, + Hashtag, + ImageAttachment, + ListRemindersResult, + LocationTrigger, + Proximity, + RecurrenceFrequency, + RecurrenceRule, + Reminder, + ReminderChangeEvent, + RemindersList, + URLAttachment, +) +from pyicloud.services.reminders.service import RemindersService + +# --------------------------------------------------------------------------- +# Fixture: a stubbed RemindersService (no network, just parsing) +# --------------------------------------------------------------------------- + + +@pytest.fixture +def service(): + """Create a RemindersService with parsing methods but no network.""" + svc = RemindersService("https://ckdatabasews.icloud.com", MagicMock(), {}) + svc._raw = MagicMock() + return svc + + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + + +def _ck_record(record_type: str, record_name: str, fields: dict, **extra) -> CKRecord: + """Build a CKRecord from a raw dict, same as CloudKit JSON on the wire.""" + raw = { + "recordName": record_name, + "recordType": record_type, + "fields": fields, + **extra, + } + return CKRecord.model_validate(raw) + + +# --------------------------------------------------------------------------- +# Programmatically generated CRDT test blobs +# (versioned_document.Document -> topotext.String -> zlib -> base64) +# --------------------------------------------------------------------------- + + +def _make_crdt_blob(text: str) -> str: + """Build a valid reminders CRDT blob: protobuf -> zlib -> base64 string.""" + import base64 + import zlib + + from pyicloud.services.reminders.protobuf import ( + reminders_pb2, + versioned_document_pb2, + ) + + s = reminders_pb2.String() + s.string = text + + version = versioned_document_pb2.Version() + version.serializationVersion = 0 + version.minimumSupportedVersion = 0 + version.data = s.SerializeToString() + + doc = versioned_document_pb2.Document() + doc.serializationVersion = 0 + doc.version.append(version) + + compressed = zlib.compress(doc.SerializeToString()) + return base64.b64encode(compressed).decode("ascii") + + +def _make_crdt_version_bytes(text: str) -> bytes: + """Build the raw, uncompressed versioned_document.Version payload.""" + from pyicloud.services.reminders.protobuf import ( + reminders_pb2, + versioned_document_pb2, + ) + + s = reminders_pb2.String() + s.string = text + + version = versioned_document_pb2.Version() + version.serializationVersion = 0 + version.minimumSupportedVersion = 0 + version.data = s.SerializeToString() + return version.SerializeToString() + + +# Pre-built samples for test use +TITLE_DOC_SAMPLES = { + "Message Benno": _make_crdt_blob("Message Benno"), + "PRISE EN CHARGE": _make_crdt_blob("PRISE EN CHARGE"), + "Cancel Hoess": _make_crdt_blob("Cancel Hoess"), +} + + +def test_reminder_domain_models_are_pydantic_and_mutable(): + reminder = Reminder(id="Reminder/A", list_id="List/A", title="A") + + assert reminder.model_dump()["id"] == "Reminder/A" + reminder.deleted = True + assert reminder.deleted is True + with pytest.raises(ValidationError): + reminder.priority = "high" + reminder.priority = 3 + assert reminder.priority == 3 + with pytest.raises(ValidationError): + Reminder( + id="Reminder/B", + list_id="List/B", + title="B", + unexpected=True, + ) + + +def test_list_result_models_are_frozen(): + result = ListRemindersResult( + reminders=[], + alarms={}, + triggers={}, + attachments={}, + hashtags={}, + recurrence_rules={}, + ) + + with pytest.raises(ValidationError): + result.reminders = [] + + +def test_location_trigger_radius_must_be_non_negative(): + trigger = LocationTrigger(id="AlarmTrigger/A", alarm_id="Alarm/A") + + with pytest.raises(ValidationError): + trigger.radius = -10.0 + + +def test_image_attachment_dimensions_and_size_must_be_non_negative(): + attachment = ImageAttachment(id="Attachment/A", reminder_id="Reminder/A") + + with pytest.raises(ValidationError): + attachment.file_size = -1 + with pytest.raises(ValidationError): + attachment.width = -1 + with pytest.raises(ValidationError): + attachment.height = -1 + + +def test_recurrence_rule_domain_constraints_are_enforced(): + rule = RecurrenceRule(id="RecurrenceRule/A", reminder_id="Reminder/A") + + with pytest.raises(ValidationError): + rule.interval = 0 + with pytest.raises(ValidationError): + rule.occurrence_count = -1 + with pytest.raises(ValidationError): + rule.first_day_of_week = 7 + + +def test_protocol_crdt_round_trip(): + encoded = encode_crdt_document("Round trip") + + assert isinstance(encoded, str) + assert decode_crdt_document(encoded) == "Round trip" + + +def test_protocol_resolution_token_map_structure(): + payload = json.loads(generate_resolution_token_map(["titleDocument", "completed"])) + + assert set(payload.keys()) == {"map"} + assert set(payload["map"].keys()) == {"titleDocument", "completed"} + for token in payload["map"].values(): + assert token["counter"] == 1 + assert isinstance(token["modificationTime"], float) + assert isinstance(token["replicaID"], str) + assert token["replicaID"] + + +def test_mapper_asset_backed_list_membership_download(): + raw = MagicMock() + raw.download_asset_bytes.return_value = b'["REM-3","Reminder/REM-4"]' + mapper = RemindersRecordMapper(lambda: raw, logging.getLogger(__name__)) + rec = _ck_record( + "List", + "List/LIST-ASSET", + { + "Name": {"type": "STRING", "value": "Asset-backed"}, + "ReminderIDsAsset": { + "type": "ASSETID", + "value": { + "fileChecksum": "abc123", + "size": 2, + "wrappingKey": "key", + "downloadURL": "https://example.test/reminder_ids", + "referenceChecksum": "ref", + "signature": "sig", + }, + }, + }, + ) + + lst = mapper.record_to_list(rec) + + assert lst.reminder_ids == ["REM-3", "REM-4"] + raw.download_asset_bytes.assert_called_once_with( + "https://example.test/reminder_ids" + ) + + +def test_cloudkit_client_uses_bounded_timeouts(): + session = MagicMock() + session.post.return_value = MagicMock(status_code=200, json=lambda: {}) + session.get.return_value = MagicMock(status_code=200, content=b"asset-bytes") + client = _CloudKitClient("https://ckdatabasews.icloud.com", session, {}) + + client.post("/records/query", {"query": "payload"}) + client.get_bytes("https://example.test/asset") + + assert session.post.call_args.kwargs["timeout"] == (10.0, 60.0) + assert session.get.call_args.kwargs["timeout"] == (10.0, 60.0) + + +def test_resolve_cloudkit_validation_extra_honors_explicit_override(monkeypatch): + monkeypatch.setenv("PYICLOUD_CK_EXTRA", "forbid") + + assert resolve_cloudkit_validation_extra("allow") == "allow" + + +def test_reminders_client_allows_unexpected_fields_by_default(monkeypatch): + monkeypatch.delenv("PYICLOUD_CK_EXTRA", raising=False) + session = MagicMock() + session.post.return_value = MagicMock( + status_code=200, + json=lambda: {"records": [], "unexpectedTopLevel": {"present": True}}, + ) + client = CloudKitRemindersClient("https://example.com", session, {}) + + response = client.lookup(["Reminder/1"], CKZoneIDReq(zoneName="Reminders")) + + assert isinstance(response, CKLookupResponse) + assert response.model_extra["unexpectedTopLevel"] == {"present": True} + + +def test_reminders_client_strict_mode_wraps_validation_error(): + session = MagicMock() + payload = {"records": [], "unexpectedTopLevel": {"present": True}} + session.post.return_value = MagicMock(status_code=200, json=lambda: payload) + client = CloudKitRemindersClient( + "https://example.com", + session, + {}, + validation_extra="forbid", + ) + + with pytest.raises( + RemindersApiError, match="Lookup response validation failed" + ) as excinfo: + client.lookup(["Reminder/1"], CKZoneIDReq(zoneName="Reminders")) + + assert excinfo.value.payload == payload + assert isinstance(excinfo.value.__cause__, ValidationError) + + +def test_reminders_service_passes_through_validation_override(): + service = RemindersService( + "https://example.com", + MagicMock(), + {}, + cloudkit_validation_extra="ignore", + ) + + assert service._raw._validation_extra == "ignore" + + +# --------------------------------------------------------------------------- +# Tests: _decode_crdt_document +# --------------------------------------------------------------------------- + + +class TestDecodeCrdtDocument: + """Test the shared CRDT document decoder.""" + + def test_decode_message_benno(self, service): + result = service._decode_crdt_document(TITLE_DOC_SAMPLES["Message Benno"]) + assert result == "Message Benno" + + def test_decode_prise_en_charge(self, service): + result = service._decode_crdt_document(TITLE_DOC_SAMPLES["PRISE EN CHARGE"]) + assert result == "PRISE EN CHARGE" + + def test_decode_cancel_hoess(self, service): + result = service._decode_crdt_document(TITLE_DOC_SAMPLES["Cancel Hoess"]) + assert result == "Cancel Hoess" + + def test_decode_empty_raises(self, service): + with pytest.raises(CRDTDecodeError, match="Unable to decode CRDT document"): + service._decode_crdt_document("") + + def test_decode_malformed_base64_raises(self, service): + with pytest.raises( + CRDTDecodeError, match="Invalid base64-encoded CRDT document" + ): + service._decode_crdt_document("!!!not-base64!!!") + + def test_decode_bytes_input(self, service): + """Accept raw bytes as well as base64 string.""" + import base64 + + raw = base64.b64decode(TITLE_DOC_SAMPLES["Message Benno"]) + result = service._decode_crdt_document(raw) + assert result == "Message Benno" + + def test_decode_uncompressed_version_bytes(self, service): + raw = _make_crdt_version_bytes("Buy groceries") + + result = service._decode_crdt_document(raw) + + assert result == "Buy groceries" + + +# --------------------------------------------------------------------------- +# Tests: _record_to_reminder +# --------------------------------------------------------------------------- + + +class TestRecordToReminder: + """Test parsing a Reminder CKRecord.""" + + def test_basic_reminder(self, service): + rec = _ck_record( + "Reminder", + "REM-001", + { + "TitleDocument": { + "type": "ENCRYPTED_BYTES", + "value": TITLE_DOC_SAMPLES["Message Benno"], + }, + "List": { + "type": "REFERENCE", + "value": {"recordName": "LIST-001", "action": "VALIDATE"}, + }, + "Priority": {"type": "INT64", "value": 0}, + "Completed": {"type": "INT64", "value": 0}, + "Flagged": {"type": "INT64", "value": 0}, + "AllDay": {"type": "INT64", "value": 0}, + "Deleted": {"type": "INT64", "value": 0}, + }, + ) + r = service._record_to_reminder(rec) + + assert isinstance(r, Reminder) + assert r.id == "REM-001" + assert r.title == "Message Benno" + assert r.list_id == "LIST-001" + assert r.priority == 0 + assert r.completed is False + assert r.flagged is False + assert r.all_day is False + assert r.deleted is False + assert r.alarm_ids == [] + assert r.hashtag_ids == [] + + def test_reminder_with_all_fields(self, service): + completion_date = datetime(2024, 12, 29, tzinfo=timezone.utc) + created = datetime(2024, 12, 28, tzinfo=timezone.utc) + modified = datetime(2024, 12, 30, tzinfo=timezone.utc) + rec = _ck_record( + "Reminder", + "REM-002", + { + "TitleDocument": { + "type": "ENCRYPTED_BYTES", + "value": TITLE_DOC_SAMPLES["PRISE EN CHARGE"], + }, + "NotesDocument": { + "type": "ENCRYPTED_BYTES", + "value": TITLE_DOC_SAMPLES["Cancel Hoess"], + }, + "List": { + "type": "REFERENCE", + "value": {"recordName": "LIST-002", "action": "VALIDATE"}, + }, + "Priority": {"type": "INT64", "value": 1}, + "Completed": {"type": "INT64", "value": 1}, + "Flagged": {"type": "INT64", "value": 1}, + "AllDay": {"type": "INT64", "value": 1}, + "Deleted": {"type": "INT64", "value": 0}, + "TimeZone": {"type": "STRING", "value": "Europe/Paris"}, + "DueDate": {"type": "TIMESTAMP", "value": 1735488000000}, + "StartDate": {"type": "TIMESTAMP", "value": 1735488000000}, + "CompletionDate": { + "type": "TIMESTAMP", + "value": int(completion_date.timestamp() * 1000), + }, + "CreationDate": { + "type": "TIMESTAMP", + "value": int(created.timestamp() * 1000), + }, + "LastModifiedDate": { + "type": "TIMESTAMP", + "value": int(modified.timestamp() * 1000), + }, + "AlarmIDs": {"type": "STRING_LIST", "value": ["alarm-1", "alarm-2"]}, + "HashtagIDs": {"type": "STRING_LIST", "value": ["hashtag-1"]}, + "AttachmentIDs": {"type": "STRING_LIST", "value": ["attach-1"]}, + }, + ) + r = service._record_to_reminder(rec) + + assert r.title == "PRISE EN CHARGE" + assert r.desc == "Cancel Hoess" + assert r.priority == 1 + assert r.completed is True + assert r.flagged is True + assert r.all_day is True + assert r.time_zone == "Europe/Paris" + assert r.due_date is not None + assert r.completed_date == completion_date + assert r.created == created + assert r.modified == modified + assert r.alarm_ids == ["alarm-1", "alarm-2"] + assert r.hashtag_ids == ["hashtag-1"] + assert r.attachment_ids == ["attach-1"] + + def test_reminder_malformed_title_document_uses_placeholder(self, service): + rec = _ck_record( + "Reminder", + "REM-BAD-TITLE", + { + "TitleDocument": { + "type": "ENCRYPTED_BYTES", + "value": base64.b64encode(b"not-a-crdt").decode("ascii"), + }, + "List": { + "type": "REFERENCE", + "value": {"recordName": "LIST-001", "action": "VALIDATE"}, + }, + }, + ) + + reminder = service._record_to_reminder(rec) + + assert reminder.title == "Error Decoding Title" + + def test_reminder_falls_back_to_record_audit_timestamps(self, service): + created = datetime(2024, 12, 28, tzinfo=timezone.utc) + modified = datetime(2024, 12, 30, tzinfo=timezone.utc) + rec = _ck_record( + "Reminder", + "REM-003", + { + "List": { + "type": "REFERENCE", + "value": {"recordName": "LIST-003", "action": "VALIDATE"}, + }, + "Completed": {"type": "INT64", "value": 0}, + "Priority": {"type": "INT64", "value": 0}, + "Flagged": {"type": "INT64", "value": 0}, + "AllDay": {"type": "INT64", "value": 0}, + "Deleted": {"type": "INT64", "value": 0}, + }, + created={"timestamp": int(created.timestamp() * 1000)}, + modified={"timestamp": int(modified.timestamp() * 1000)}, + ) + + reminder = service._record_to_reminder(rec) + assert reminder.created == created + assert reminder.modified == modified + + def test_reminder_with_uncompressed_version_bytes_documents(self, service): + rec = _ck_record( + "Reminder", + "REM-003B", + { + "TitleDocument": { + "type": "ENCRYPTED_BYTES", + "value": base64.b64encode( + _make_crdt_version_bytes("Buy groceries") + ).decode("ascii"), + }, + "NotesDocument": { + "type": "ENCRYPTED_BYTES", + "value": base64.b64encode( + _make_crdt_version_bytes("Milk, Eggs") + ).decode("ascii"), + }, + "List": { + "type": "REFERENCE", + "value": {"recordName": "LIST-003B", "action": "VALIDATE"}, + }, + }, + ) + + reminder = service._record_to_reminder(rec) + + assert reminder.title == "Buy groceries" + assert reminder.desc == "Milk, Eggs" + + def test_subtask_reminder(self, service): + """Subtask reminders have a ParentReminder REFERENCE.""" + rec = _ck_record( + "Reminder", + "REM-SUBTASK", + { + "TitleDocument": { + "type": "ENCRYPTED_BYTES", + "value": TITLE_DOC_SAMPLES["Cancel Hoess"], + }, + "List": { + "type": "REFERENCE", + "value": {"recordName": "LIST-001"}, + }, + "ParentReminder": { + "type": "REFERENCE", + "value": {"recordName": "REM-PARENT", "action": "VALIDATE"}, + }, + "Priority": {"type": "INT64", "value": 0}, + "Completed": {"type": "INT64", "value": 0}, + "Flagged": {"type": "INT64", "value": 0}, + "AllDay": {"type": "INT64", "value": 0}, + "Deleted": {"type": "INT64", "value": 0}, + }, + ) + r = service._record_to_reminder(rec) + + assert r.id == "REM-SUBTASK" + assert r.parent_reminder_id == "REM-PARENT" + assert r.title == "Cancel Hoess" + + +# --------------------------------------------------------------------------- +# Tests: _record_to_list +# --------------------------------------------------------------------------- + + +class TestRecordToList: + """Test parsing a List CKRecord.""" + + def test_basic_list(self, service): + rec = _ck_record( + "List", + "LIST-001", + { + "Name": {"type": "STRING", "value": "pyicloud"}, + "Color": {"type": "STRING", "value": "#FF6600"}, + "Count": {"type": "INT64", "value": 16}, + "IsGroup": {"type": "INT64", "value": 0}, + }, + ) + lst = service._record_to_list(rec) + + assert isinstance(lst, RemindersList) + assert lst.id == "LIST-001" + assert lst.title == "pyicloud" + assert lst.color == "#FF6600" + assert lst.count == 16 + assert lst.is_group is False + + def test_list_untitled(self, service): + rec = _ck_record("List", "LIST-002", {}) + lst = service._record_to_list(rec) + assert lst.title == "Untitled" + assert lst.count == 0 + + def test_list_parses_inline_reminder_ids_json(self, service): + rec = _ck_record( + "List", + "LIST-003", + { + "ReminderIDs": { + "type": "STRING", + "value": '["REM-1","Reminder/REM-2"]', + } + }, + ) + + lst = service._record_to_list(rec) + assert lst.reminder_ids == ["REM-1", "REM-2"] + + def test_list_parses_asset_backed_reminder_ids_from_downloaded_data(self, service): + payload = base64.b64encode(b'["REM-1","Reminder/REM-2"]').decode("ascii") + rec = _ck_record( + "List", + "LIST-004", + { + "ReminderIDsAsset": { + "type": "ASSET", + "value": {"downloadedData": payload}, + } + }, + ) + + lst = service._record_to_list(rec) + assert lst.reminder_ids == ["REM-1", "REM-2"] + service._raw.download_asset_bytes.assert_not_called() + + def test_list_parses_asset_backed_reminder_ids_from_download_url(self, service): + service._raw.download_asset_bytes.return_value = b'["REM-3","Reminder/REM-4"]' + rec = _ck_record( + "List", + "LIST-005", + { + "ReminderIDsAsset": { + "type": "ASSET", + "value": {"downloadURL": "https://example.com/reminder-ids.json"}, + } + }, + ) + + lst = service._record_to_list(rec) + assert lst.reminder_ids == ["REM-3", "REM-4"] + service._raw.download_asset_bytes.assert_called_once_with( + "https://example.com/reminder-ids.json" + ) + + @pytest.mark.parametrize( + ("asset_value", "download_side_effect"), + [ + ( + {"downloadedData": base64.b64encode(b"{bad json").decode("ascii")}, + None, + ), + ( + {"downloadURL": "https://example.com/reminder-ids.json"}, + RemindersApiError("download failed"), + ), + ], + ) + def test_list_asset_failures_raise( + self, service, asset_value, download_side_effect + ): + if download_side_effect is not None: + service._raw.download_asset_bytes.side_effect = download_side_effect + + rec = _ck_record( + "List", + "LIST-006", + { + "ReminderIDsAsset": { + "type": "ASSET", + "value": asset_value, + } + }, + ) + + with pytest.raises(RemindersApiError): + service._record_to_list(rec) + + +# --------------------------------------------------------------------------- +# Tests: _record_to_alarm +# --------------------------------------------------------------------------- + + +class TestRecordToAlarm: + """Test parsing an Alarm CKRecord.""" + + def test_alarm(self, service): + rec = _ck_record( + "Alarm", + "Alarm/ALARM-001", + { + "AlarmUID": {"type": "STRING", "value": "ALARM-001"}, + "Reminder": { + "type": "REFERENCE", + "value": {"recordName": "REM-001", "action": "VALIDATE"}, + }, + "TriggerID": {"type": "STRING", "value": "TRIGGER-001"}, + "Deleted": {"type": "INT64", "value": 0}, + }, + ) + a = service._record_to_alarm(rec) + + assert isinstance(a, Alarm) + assert a.id == "Alarm/ALARM-001" + assert a.alarm_uid == "ALARM-001" + assert a.reminder_id == "REM-001" + assert a.trigger_id == "TRIGGER-001" + + +# --------------------------------------------------------------------------- +# Tests: _record_to_alarm_trigger +# --------------------------------------------------------------------------- + + +class TestRecordToAlarmTrigger: + """Test parsing supported AlarmTrigger CKRecords.""" + + def test_location_trigger(self, service): + rec = _ck_record( + "AlarmTrigger", + "AlarmTrigger/TRIG-001", + { + "Type": {"type": "STRING", "value": "Location"}, + "Title": {"type": "STRING", "value": "Paris"}, + "Address": {"type": "STRING", "value": "Paris, France"}, + "Latitude": {"type": "DOUBLE", "value": 48.8567879}, + "Longitude": {"type": "DOUBLE", "value": 2.3510768}, + "Radius": {"type": "DOUBLE", "value": 8972.70}, + "Proximity": {"type": "INT64", "value": 1}, + "LocationUID": {"type": "STRING", "value": "LOC-UUID-001"}, + "Alarm": { + "type": "REFERENCE", + "value": {"recordName": "Alarm/ALARM-001", "action": "VALIDATE"}, + }, + }, + ) + t = service._record_to_alarm_trigger(rec) + + assert isinstance(t, LocationTrigger) + assert t.title == "Paris" + assert t.address == "Paris, France" + assert abs(t.latitude - 48.8567879) < 0.0001 + assert abs(t.longitude - 2.3510768) < 0.0001 + assert abs(t.radius - 8972.70) < 0.1 + assert t.proximity == Proximity.ARRIVING + assert t.alarm_id == "Alarm/ALARM-001" + + def test_location_trigger_leaving(self, service): + rec = _ck_record( + "AlarmTrigger", + "AlarmTrigger/TRIG-002", + { + "Type": {"type": "STRING", "value": "Location"}, + "Title": {"type": "STRING", "value": "Home"}, + "Proximity": {"type": "INT64", "value": 2}, + "Alarm": { + "type": "REFERENCE", + "value": {"recordName": "Alarm/ALARM-002"}, + }, + }, + ) + t = service._record_to_alarm_trigger(rec) + + assert isinstance(t, LocationTrigger) + assert t.proximity == Proximity.LEAVING + + def test_vehicle_trigger_is_ignored(self, service): + rec = _ck_record( + "AlarmTrigger", + "AlarmTrigger/TRIG-003", + { + "Type": {"type": "STRING", "value": "Vehicle"}, + "Event": {"type": "INT64", "value": 1}, + "Alarm": { + "type": "REFERENCE", + "value": {"recordName": "Alarm/ALARM-003"}, + }, + }, + ) + assert service._record_to_alarm_trigger(rec) is None + + def test_unknown_type_returns_none(self, service): + rec = _ck_record( + "AlarmTrigger", + "AlarmTrigger/TRIG-005", + { + "Type": {"type": "STRING", "value": "FutureTriggerType"}, + "Alarm": { + "type": "REFERENCE", + "value": {"recordName": "Alarm/ALARM-005"}, + }, + }, + ) + assert service._record_to_alarm_trigger(rec) is None + + +# --------------------------------------------------------------------------- +# Tests: _record_to_attachment +# --------------------------------------------------------------------------- + + +class TestRecordToAttachment: + """Test parsing Attachment CKRecords (URL and Image).""" + + def test_url_attachment(self, service): + rec = _ck_record( + "Attachment", + "Attachment/ATT-001", + { + "Type": {"type": "STRING", "value": "URL"}, + "URL": {"type": "STRING", "value": "https://discord.gg/CAGYSbyqYk"}, + "UTI": {"type": "STRING", "value": "public.url"}, + "Reminder": { + "type": "REFERENCE", + "value": {"recordName": "REM-URL", "action": "VALIDATE"}, + }, + }, + ) + att = service._record_to_attachment(rec) + + assert isinstance(att, URLAttachment) + assert att.url == "https://discord.gg/CAGYSbyqYk" + assert att.uti == "public.url" + assert att.reminder_id == "REM-URL" + + def test_url_attachment_decodes_base64_payload(self, service): + encoded_url = base64.b64encode(b"https://discord.gg/CAGYSbyqYk").decode("ascii") + rec = _ck_record( + "Attachment", + "Attachment/ATT-001B", + { + "Type": {"type": "STRING", "value": "URL"}, + "URL": {"type": "STRING", "value": encoded_url}, + "UTI": {"type": "STRING", "value": "public.url"}, + "Reminder": { + "type": "REFERENCE", + "value": {"recordName": "REM-URL", "action": "VALIDATE"}, + }, + }, + ) + + att = service._record_to_attachment(rec) + + assert isinstance(att, URLAttachment) + assert att.url == "https://discord.gg/CAGYSbyqYk" + + def test_url_attachment_falls_back_to_raw_invalid_payload(self, service): + rec = _ck_record( + "Attachment", + "Attachment/ATT-001C", + { + "Type": {"type": "STRING", "value": "URL"}, + "URL": {"type": "STRING", "value": "not-base64-at-all"}, + "UTI": {"type": "STRING", "value": "public.url"}, + "Reminder": { + "type": "REFERENCE", + "value": {"recordName": "REM-URL", "action": "VALIDATE"}, + }, + }, + ) + + att = service._record_to_attachment(rec) + + assert isinstance(att, URLAttachment) + assert att.url == "not-base64-at-all" + + def test_image_attachment(self, service): + rec = _ck_record( + "Attachment", + "Attachment/ATT-002", + { + "Type": {"type": "STRING", "value": "Image"}, + "FileAsset": { + "type": "ASSETID", + "value": { + "fileChecksum": "abc123", + "downloadURL": "https://cvws.icloud-content.com/photo.jpeg", + "size": 116261, + }, + }, + "FileName": {"type": "STRING", "value": "IMG_1234.jpeg"}, + "FileSize": {"type": "INT64", "value": 116261}, + "Width": {"type": "INT64", "value": 1164}, + "Height": {"type": "INT64", "value": 1248}, + "UTI": {"type": "STRING", "value": "public.jpeg"}, + "Reminder": { + "type": "REFERENCE", + "value": {"recordName": "REM-IMG", "action": "VALIDATE"}, + }, + }, + ) + att = service._record_to_attachment(rec) + + assert isinstance(att, ImageAttachment) + assert att.filename == "IMG_1234.jpeg" + assert att.file_size == 116261 + assert att.width == 1164 + assert att.height == 1248 + assert att.uti == "public.jpeg" + assert att.reminder_id == "REM-IMG" + assert "photo.jpeg" in att.file_asset_url + + def test_unknown_type_returns_none(self, service): + rec = _ck_record( + "Attachment", + "Attachment/ATT-003", + { + "Type": {"type": "STRING", "value": "UnknownType"}, + "Reminder": { + "type": "REFERENCE", + "value": {"recordName": "REM-X"}, + }, + }, + ) + assert service._record_to_attachment(rec) is None + + +# --------------------------------------------------------------------------- +# Tests: _record_to_hashtag +# --------------------------------------------------------------------------- + + +class TestRecordToHashtag: + """Test parsing Hashtag CKRecords.""" + + def test_hashtag(self, service): + rec = _ck_record( + "Hashtag", + "Hashtag/HASH-001", + { + "Name": {"type": "STRING", "value": "mytag1"}, + "Type": {"type": "INT64", "value": 0}, + "Reminder": { + "type": "REFERENCE", + "value": {"recordName": "REM-TAG", "action": "VALIDATE"}, + }, + "CreationDate": {"type": "TIMESTAMP", "value": 1735488000000}, + "Deleted": {"type": "INT64", "value": 0}, + }, + ) + h = service._record_to_hashtag(rec) + + assert isinstance(h, Hashtag) + assert h.name == "mytag1" + assert h.reminder_id == "REM-TAG" + assert h.created is not None + + def test_hashtag_name_from_encrypted_bytes(self, service): + rec = _ck_record( + "Hashtag", + "Hashtag/HASH-002", + { + "Name": { + "type": "ENCRYPTED_BYTES", + "value": base64.b64encode(b"personal").decode("ascii"), + }, + "Reminder": { + "type": "REFERENCE", + "value": {"recordName": "REM-TAG", "action": "VALIDATE"}, + }, + }, + ) + + h = service._record_to_hashtag(rec) + + assert h.name == "personal" + + def test_hashtag_name_with_undecodable_bytes_does_not_crash(self, service): + rec = _ck_record( + "Hashtag", + "Hashtag/HASH-003", + { + "Name": { + "type": "ENCRYPTED_BYTES", + "value": base64.b64encode(b"\xa7(\x9c\x96\x8b\x9d\xfa\xea").decode( + "ascii" + ), + }, + "Reminder": { + "type": "REFERENCE", + "value": {"recordName": "REM-TAG", "action": "VALIDATE"}, + }, + }, + ) + + h = service._record_to_hashtag(rec) + + assert isinstance(h.name, str) + assert h.name + + +# --------------------------------------------------------------------------- +# Tests: STRING_LIST field type handling +# --------------------------------------------------------------------------- + + +class TestStringListField: + """Verify STRING_LIST fields are properly parsed by CKRecord.""" + + def test_string_list_parsed(self): + """STRING_LIST should be parsed as CKStringListField, not CKPassthroughField.""" + from pyicloud.common.cloudkit.models import CKStringListField + + rec = _ck_record( + "Reminder", + "REM-SL", + { + "AlarmIDs": {"type": "STRING_LIST", "value": ["id-1", "id-2", "id-3"]}, + }, + ) + field = rec.fields.get_field("AlarmIDs") + assert isinstance(field, CKStringListField) + assert field.value == ["id-1", "id-2", "id-3"] + + def test_empty_string_list(self): + rec = _ck_record( + "Reminder", + "REM-SL2", + { + "HashtagIDs": {"type": "STRING_LIST", "value": []}, + }, + ) + field = rec.fields.get_field("HashtagIDs") + assert field.value == [] + + +# --------------------------------------------------------------------------- +# Tests: _record_to_recurrence_rule +# --------------------------------------------------------------------------- + + +class TestRecordToRecurrenceRule: + """Test parsing RecurrenceRule CKRecords.""" + + def test_monthly_recurrence(self, service): + rec = _ck_record( + "RecurrenceRule", + "RecurrenceRule/RR-001", + { + "Reminder": { + "type": "REFERENCE", + "value": {"recordName": "REM-001", "action": "VALIDATE"}, + }, + "Frequency": {"type": "INT64", "value": 3}, + "Interval": {"type": "INT64", "value": 1}, + "OccurrenceCount": {"type": "INT64", "value": 0}, + "FirstDayOfTheWeek": {"type": "INT64", "value": 0}, + }, + ) + rr = service._record_to_recurrence_rule(rec) + + assert isinstance(rr, RecurrenceRule) + assert rr.id == "RecurrenceRule/RR-001" + assert rr.reminder_id == "REM-001" + assert rr.frequency == RecurrenceFrequency.MONTHLY + assert rr.interval == 1 + assert rr.occurrence_count == 0 + assert rr.first_day_of_week == 0 + + def test_weekly_with_occurrence_limit(self, service): + rec = _ck_record( + "RecurrenceRule", + "RecurrenceRule/RR-002", + { + "Reminder": { + "type": "REFERENCE", + "value": {"recordName": "REM-002"}, + }, + "Frequency": {"type": "INT64", "value": 2}, + "Interval": {"type": "INT64", "value": 2}, + "OccurrenceCount": {"type": "INT64", "value": 10}, + "FirstDayOfTheWeek": {"type": "INT64", "value": 2}, + }, + ) + rr = service._record_to_recurrence_rule(rec) + + assert rr.frequency == RecurrenceFrequency.WEEKLY + assert rr.interval == 2 + assert rr.occurrence_count == 10 + assert rr.first_day_of_week == 2 + + def test_unknown_frequency_defaults_to_daily(self, service): + rec = _ck_record( + "RecurrenceRule", + "RecurrenceRule/RR-003", + { + "Reminder": { + "type": "REFERENCE", + "value": {"recordName": "REM-003"}, + }, + "Frequency": {"type": "INT64", "value": 99}, + }, + ) + rr = service._record_to_recurrence_rule(rec) + + assert rr.frequency == RecurrenceFrequency.DAILY + + +# --------------------------------------------------------------------------- +# Tests: modify serialization + mutation failure handling +# --------------------------------------------------------------------------- + + +class TestModifySerialization: + """Ensure request models preserve CloudKit wire shape.""" + + def test_double_field_keeps_is_encrypted_in_modify_payload(self): + trigger_record = CKWriteRecord.model_validate( + { + "recordName": "AlarmTrigger/TRIG-DOUBLE", + "recordType": "AlarmTrigger", + "fields": { + "Latitude": { + "type": "DOUBLE", + "value": 48.8584, + "isEncrypted": True, + }, + "Longitude": { + "type": "DOUBLE", + "value": 2.2945, + "isEncrypted": True, + }, + "Type": {"type": "STRING", "value": "Location"}, + }, + } + ) + op = CKModifyOperation(operationType="create", record=trigger_record) + payload = CKModifyRequest( + operations=[op], + zoneID=CKZoneIDReq(zoneName="Reminders", zoneType="REGULAR_CUSTOM_ZONE"), + ).model_dump(mode="json", exclude_none=True) + + fields = payload["operations"][0]["record"]["fields"] + assert fields["Latitude"]["isEncrypted"] is True + assert fields["Longitude"]["isEncrypted"] is True + + def test_lookup_request_serializes_desired_keys(self): + payload = CKLookupRequest( + records=[], + zoneID=CKZoneIDReq(zoneName="Reminders", zoneType="REGULAR_CUSTOM_ZONE"), + desiredKeys=["TitleDocument", "NotesDocument"], + ).model_dump(mode="json", exclude_none=True) + + assert payload["desiredKeys"] == ["TitleDocument", "NotesDocument"] + + def test_zone_changes_request_serializes_results_limit(self): + payload = CKZoneChangesRequest( + zones=[ + { + "zoneID": { + "zoneName": "Reminders", + "zoneType": "REGULAR_CUSTOM_ZONE", + } + } + ], + resultsLimit=50, + ).model_dump(mode="json", exclude_none=True) + + assert payload["resultsLimit"] == 50 + + +class TestMutationErrorHandling: + """Mutation methods should raise on per-record CloudKit failures.""" + + def test_add_location_trigger_raises_on_partial_modify_failure(self): + svc = RemindersService("https://ckdatabasews.icloud.com", MagicMock(), {}) + svc._raw = MagicMock() + svc._raw.modify.return_value = CKModifyResponse( + records=[ + CKErrorItem( + serverErrorCode="BAD_REQUEST", + reason="Invalid value, expected type ENCRYPTED_BYTES.", + recordName="AlarmTrigger/TRIG-FAIL", + ) + ], + syncToken="mock-sync-token", + ) + + reminder = Reminder( + id="Reminder/REM-001", + list_id="List/LIST-001", + title="Pick up coffee near Eiffel Tower", + record_change_tag="mock-change-tag", + alarm_ids=[], + ) + + with pytest.raises(RemindersApiError, match=r"AlarmTrigger/TRIG-FAIL"): + svc.add_location_trigger( + reminder=reminder, + title="Eiffel Tower", + address="Paris", + latitude=48.8584, + longitude=2.2945, + radius=150.0, + proximity=Proximity.ARRIVING, + ) + + assert reminder.alarm_ids == [] + + def test_add_location_trigger_validates_radius_before_modify(self): + svc = RemindersService("https://ckdatabasews.icloud.com", MagicMock(), {}) + svc._raw = MagicMock() + + reminder = Reminder( + id="Reminder/REM-001", + list_id="List/LIST-001", + title="Pick up coffee near Eiffel Tower", + record_change_tag="mock-change-tag", + alarm_ids=[], + ) + + with pytest.raises(ValidationError): + svc.add_location_trigger( + reminder=reminder, + title="Eiffel Tower", + address="Paris", + latitude=48.8584, + longitude=2.2945, + radius=-1.0, + proximity=Proximity.ARRIVING, + ) + + svc._raw.modify.assert_not_called() + + def test_add_location_trigger_normalizes_shorthand_reminder_ids(self): + svc = RemindersService("https://ckdatabasews.icloud.com", MagicMock(), {}) + svc._raw = MagicMock() + + def _ack( + record_name: str, record_type: str, record_change_tag: str + ) -> CKRecord: + return CKRecord.model_validate( + { + "recordName": record_name, + "recordType": record_type, + "recordChangeTag": record_change_tag, + "fields": {}, + } + ) + + svc._raw.modify.return_value = CKModifyResponse( + records=[ + _ack("Reminder/REM-TRIG", "Reminder", "ctag-rem-new"), + _ack("Alarm/ALARM-1", "Alarm", "ctag-alarm-new"), + _ack("AlarmTrigger/TRIG-1", "AlarmTrigger", "ctag-trigger-new"), + ], + syncToken="mock-sync", + ) + + with patch( + "uuid.uuid4", + side_effect=["ALARM-1", "TRIG-1", "LOC-1", "TOKEN-1", "TOKEN-2"], + ): + alarm, trigger = svc.add_location_trigger( + reminder=Reminder( + id="REM-TRIG", + list_id="List/LIST-001", + title="Reminder", + record_change_tag="ctag-rem-old", + alarm_ids=[], + ), + title="Office", + address="1 Infinite Loop", + latitude=37.3318, + longitude=-122.0312, + radius=150.0, + proximity=Proximity.ARRIVING, + ) + + operations = svc._raw.modify.call_args.kwargs["operations"] + assert operations[0].record.recordName == "Reminder/REM-TRIG" + assert ( + operations[1].record.fields["Reminder"].value.recordName + == "Reminder/REM-TRIG" + ) + assert operations[1].record.parent.recordName == "Reminder/REM-TRIG" + assert alarm.reminder_id == "Reminder/REM-TRIG" + assert trigger.id == "AlarmTrigger/TRIG-1" + + def test_create_child_reminder_sets_parent_reference(self): + svc = RemindersService("https://ckdatabasews.icloud.com", MagicMock(), {}) + svc._raw = MagicMock() + svc._raw.modify.return_value = CKModifyResponse(records=[], syncToken="mock") + expected = Reminder( + id="Reminder/CHILD-001", + list_id="List/LIST-001", + title="Child reminder", + parent_reminder_id="Reminder/PARENT-001", + ) + svc._writes._lookup_created_reminder = MagicMock(return_value=expected) + + created = svc.create( + list_id="List/LIST-001", + title="Child reminder", + parent_reminder_id="Reminder/PARENT-001", + ) + + op = svc._raw.modify.call_args.kwargs["operations"][0] + parent_field = op.record.fields["ParentReminder"] + assert parent_field.type_tag == "REFERENCE" + assert parent_field.value.recordName == "Reminder/PARENT-001" + assert created.parent_reminder_id == "Reminder/PARENT-001" + + def test_create_completed_reminder_sets_completion_date(self): + svc = RemindersService("https://ckdatabasews.icloud.com", MagicMock(), {}) + svc._raw = MagicMock() + svc._raw.modify.return_value = CKModifyResponse(records=[], syncToken="mock") + expected = Reminder( + id="Reminder/COMPLETE-001", + list_id="List/LIST-001", + title="Completed reminder", + completed=True, + ) + svc._writes._lookup_created_reminder = MagicMock(return_value=expected) + + svc.create( + list_id="List/LIST-001", + title="Completed reminder", + completed=True, + ) + + op = svc._raw.modify.call_args.kwargs["operations"][0] + completion_field = op.record.fields["CompletionDate"] + assert completion_field.type_tag == "TIMESTAMP" + assert completion_field.value is not None + + +class TestAdditionalWriteApis: + """Validate payload shape and local state updates for newly added write APIs.""" + + @staticmethod + def _ok_modify() -> CKModifyResponse: + return CKModifyResponse(records=[], syncToken="mock-sync") + + @staticmethod + def _ack(record_name: str, record_type: str, record_change_tag: str) -> CKRecord: + return CKRecord.model_validate( + { + "recordName": record_name, + "recordType": record_type, + "recordChangeTag": record_change_tag, + "fields": {}, + } + ) + + def test_create_and_delete_hashtag(self): + svc = RemindersService("https://ckdatabasews.icloud.com", MagicMock(), {}) + svc._raw = MagicMock() + svc._raw.modify.return_value = self._ok_modify() + + reminder = Reminder( + id="Reminder/REM-TAG", + list_id="List/LIST-001", + title="Hashtag reminder", + record_change_tag="ctag-rem", + hashtag_ids=[], + ) + + hashtag = svc.create_hashtag(reminder, "travel") + assert hashtag.id.startswith("Hashtag/") + assert hashtag.name == "travel" + assert len(reminder.hashtag_ids) == 1 + + create_ops = svc._raw.modify.call_args.kwargs["operations"] + assert len(create_ops) == 2 + assert create_ops[1].record.recordType == "Hashtag" + assert create_ops[1].record.fields["Name"].value == "travel" + assert svc._raw.modify.call_args.kwargs["atomic"] is True + + svc._raw.modify.reset_mock() + svc._raw.modify.return_value = self._ok_modify() + svc.delete_hashtag(reminder, hashtag) + + assert reminder.hashtag_ids == [] + delete_ops = svc._raw.modify.call_args.kwargs["operations"] + assert len(delete_ops) == 2 + assert delete_ops[1].record.fields["Deleted"].value == 1 + assert svc._raw.modify.call_args.kwargs["atomic"] is True + + def test_delete_hashtag_rejects_mismatched_parent(self): + svc = RemindersService("https://ckdatabasews.icloud.com", MagicMock(), {}) + svc._raw = MagicMock() + + reminder = Reminder( + id="Reminder/REM-TAG", + list_id="List/LIST-001", + title="Hashtag reminder", + record_change_tag="ctag-rem", + hashtag_ids=["TAG-1"], + ) + hashtag = Hashtag( + id="Hashtag/TAG-1", + name="travel", + reminder_id="Reminder/OTHER-REMINDER", + record_change_tag="ctag-tag", + ) + + with pytest.raises(ValueError, match="Hashtag child"): + svc.delete_hashtag(reminder, hashtag) + + svc._raw.modify.assert_not_called() + assert reminder.hashtag_ids == ["TAG-1"] + + def test_create_update_delete_url_attachment(self): + svc = RemindersService("https://ckdatabasews.icloud.com", MagicMock(), {}) + svc._raw = MagicMock() + svc._raw.modify.return_value = self._ok_modify() + + reminder = Reminder( + id="Reminder/REM-ATT", + list_id="List/LIST-001", + title="Attachment reminder", + record_change_tag="ctag-rem", + attachment_ids=[], + ) + + attachment = svc.create_url_attachment( + reminder=reminder, + url="https://example.com", + uti="public.url", + ) + assert attachment.id.startswith("Attachment/") + assert len(reminder.attachment_ids) == 1 + create_ops = svc._raw.modify.call_args.kwargs["operations"] + assert create_ops[1].record.recordType == "Attachment" + assert create_ops[1].record.fields["Type"].value == "URL" + assert create_ops[1].record.fields["URL"].value == "https://example.com" + assert create_ops[1].record.fields["URL"].unwrap().isEncrypted is True + + svc._raw.modify.reset_mock() + svc._raw.modify.return_value = self._ok_modify() + svc.update_attachment(attachment, url="https://example.org") + assert attachment.url == "https://example.org" + update_ops = svc._raw.modify.call_args.kwargs["operations"] + assert len(update_ops) == 1 + assert update_ops[0].record.fields["URL"].value == "https://example.org" + assert update_ops[0].record.fields["URL"].unwrap().isEncrypted is True + + svc._raw.modify.reset_mock() + svc._raw.modify.return_value = self._ok_modify() + svc.delete_attachment(reminder, attachment) + assert reminder.attachment_ids == [] + delete_ops = svc._raw.modify.call_args.kwargs["operations"] + assert len(delete_ops) == 2 + assert delete_ops[1].record.fields["Deleted"].value == 1 + + def test_delete_attachment_rejects_mismatched_parent(self): + svc = RemindersService("https://ckdatabasews.icloud.com", MagicMock(), {}) + svc._raw = MagicMock() + + reminder = Reminder( + id="Reminder/REM-ATT", + list_id="List/LIST-001", + title="Attachment reminder", + record_change_tag="ctag-rem", + attachment_ids=["ATT-1"], + ) + attachment = URLAttachment( + id="Attachment/ATT-1", + reminder_id="Reminder/OTHER-REMINDER", + url="https://example.com", + record_change_tag="ctag-att", + ) + + with pytest.raises(ValueError, match="Attachment child"): + svc.delete_attachment(reminder, attachment) + + svc._raw.modify.assert_not_called() + assert reminder.attachment_ids == ["ATT-1"] + + def test_create_url_attachment_normalizes_shorthand_reminder_ids(self): + svc = RemindersService("https://ckdatabasews.icloud.com", MagicMock(), {}) + svc._raw = MagicMock() + svc._raw.modify.return_value = CKModifyResponse( + records=[ + self._ack("Reminder/REM-ATT", "Reminder", "ctag-rem-new"), + self._ack("Attachment/ATT-NEW", "Attachment", "ctag-att-new"), + ], + syncToken="mock-sync", + ) + + with patch("uuid.uuid4", return_value="ATT-NEW"): + attachment = svc.create_url_attachment( + reminder=Reminder( + id="REM-ATT", + list_id="List/LIST-001", + title="Attachment reminder", + record_change_tag="ctag-rem-old", + attachment_ids=[], + ), + url="https://example.com", + ) + + operations = svc._raw.modify.call_args.kwargs["operations"] + assert operations[0].record.recordName == "Reminder/REM-ATT" + assert ( + operations[1].record.fields["Reminder"].value.recordName + == "Reminder/REM-ATT" + ) + assert operations[1].record.parent.recordName == "Reminder/REM-ATT" + assert attachment.reminder_id == "Reminder/REM-ATT" + + def test_update_attachment_rejects_noop(self): + svc = RemindersService("https://ckdatabasews.icloud.com", MagicMock(), {}) + svc._raw = MagicMock() + + attachment = URLAttachment( + id="Attachment/A-NOOP", + reminder_id="Reminder/REM-ATT", + url="https://example.com", + record_change_tag="ctag-att", + ) + + with pytest.raises(ValueError, match="No attachment fields"): + svc.update_attachment(attachment) + + svc._raw.modify.assert_not_called() + + def test_update_image_attachment_validates_before_modify(self): + svc = RemindersService("https://ckdatabasews.icloud.com", MagicMock(), {}) + svc._raw = MagicMock() + + attachment = ImageAttachment( + id="Attachment/A-IMG", + reminder_id="Reminder/REM-ATT", + file_asset_url="https://example.com/file.jpg", + filename="file.jpg", + file_size=10, + width=100, + height=50, + record_change_tag="ctag-att", + ) + + with pytest.raises(ValidationError): + svc.update_attachment(attachment, width=-1) + + svc._raw.modify.assert_not_called() + + def test_create_update_delete_recurrence_rule(self): + svc = RemindersService("https://ckdatabasews.icloud.com", MagicMock(), {}) + svc._raw = MagicMock() + svc._raw.modify.return_value = self._ok_modify() + + reminder = Reminder( + id="Reminder/REM-RR", + list_id="List/LIST-001", + title="Recurring reminder", + record_change_tag="ctag-rem", + recurrence_rule_ids=[], + ) + + rr = svc.create_recurrence_rule( + reminder=reminder, + frequency=RecurrenceFrequency.WEEKLY, + interval=2, + occurrence_count=0, + first_day_of_week=1, + ) + assert rr.id.startswith("RecurrenceRule/") + assert rr.frequency == RecurrenceFrequency.WEEKLY + assert len(reminder.recurrence_rule_ids) == 1 + create_ops = svc._raw.modify.call_args.kwargs["operations"] + assert create_ops[1].record.recordType == "RecurrenceRule" + assert create_ops[1].record.fields["Frequency"].value == int( + RecurrenceFrequency.WEEKLY + ) + + svc._raw.modify.reset_mock() + svc._raw.modify.return_value = self._ok_modify() + svc.update_recurrence_rule(rr, interval=3, occurrence_count=5) + assert rr.interval == 3 + assert rr.occurrence_count == 5 + update_ops = svc._raw.modify.call_args.kwargs["operations"] + assert len(update_ops) == 1 + assert update_ops[0].record.fields["Interval"].value == 3 + assert update_ops[0].record.fields["OccurrenceCount"].value == 5 + + svc._raw.modify.reset_mock() + svc._raw.modify.return_value = self._ok_modify() + svc.delete_recurrence_rule(reminder, rr) + assert reminder.recurrence_rule_ids == [] + delete_ops = svc._raw.modify.call_args.kwargs["operations"] + assert len(delete_ops) == 2 + assert delete_ops[1].record.fields["Deleted"].value == 1 + + def test_delete_recurrence_rule_rejects_mismatched_parent(self): + svc = RemindersService("https://ckdatabasews.icloud.com", MagicMock(), {}) + svc._raw = MagicMock() + + reminder = Reminder( + id="Reminder/REM-RR", + list_id="List/LIST-001", + title="Recurring reminder", + record_change_tag="ctag-rem", + recurrence_rule_ids=["RR-1"], + ) + recurrence_rule = RecurrenceRule( + id="RecurrenceRule/RR-1", + reminder_id="Reminder/OTHER-REMINDER", + record_change_tag="ctag-rr", + ) + + with pytest.raises(ValueError, match="RecurrenceRule child"): + svc.delete_recurrence_rule(reminder, recurrence_rule) + + svc._raw.modify.assert_not_called() + assert reminder.recurrence_rule_ids == ["RR-1"] + + def test_create_recurrence_rule_validates_before_modify(self): + svc = RemindersService("https://ckdatabasews.icloud.com", MagicMock(), {}) + svc._raw = MagicMock() + + reminder = Reminder( + id="Reminder/REM-RR", + list_id="List/LIST-001", + title="Recurring reminder", + record_change_tag="ctag-rem", + recurrence_rule_ids=[], + ) + + with pytest.raises(ValidationError): + svc.create_recurrence_rule( + reminder=reminder, + frequency=RecurrenceFrequency.WEEKLY, + interval=0, + ) + + svc._raw.modify.assert_not_called() + + def test_update_recurrence_rule_rejects_noop(self): + svc = RemindersService("https://ckdatabasews.icloud.com", MagicMock(), {}) + svc._raw = MagicMock() + + recurrence_rule = RecurrenceRule( + id="RecurrenceRule/RR-NOOP", + reminder_id="Reminder/REM-RR", + record_change_tag="ctag-rr", + ) + + with pytest.raises(ValueError, match="No recurrence rule fields"): + svc.update_recurrence_rule(recurrence_rule) + + svc._raw.modify.assert_not_called() + + def test_delete_marks_reminder_deleted_and_modified(self): + svc = RemindersService("https://ckdatabasews.icloud.com", MagicMock(), {}) + svc._raw = MagicMock() + svc._raw.modify.return_value = self._ok_modify() + + reminder = Reminder( + id="Reminder/REM-DEL", + list_id="List/LIST-001", + title="Delete me", + record_change_tag="ctag-rem", + deleted=False, + ) + + svc.delete(reminder) + + assert reminder.deleted is True + assert reminder.modified is not None + delete_ops = svc._raw.modify.call_args.kwargs["operations"] + expected_modified = delete_ops[0].record.fields["LastModifiedDate"].value + assert reminder.modified == expected_modified + + def test_update_persists_editable_reminder_fields(self): + svc = RemindersService("https://ckdatabasews.icloud.com", MagicMock(), {}) + svc._raw = MagicMock() + svc._raw.modify.return_value = CKModifyResponse( + records=[self._ack("Reminder/REM-UPD-ALL", "Reminder", "new-reminder-tag")], + syncToken="mock-sync", + ) + + due_date = datetime(2026, 3, 15, 10, 0, tzinfo=timezone.utc) + completed_date = datetime(2026, 3, 15, 9, 30, tzinfo=timezone.utc) + reminder = Reminder( + id="Reminder/REM-UPD-ALL", + list_id="List/LIST-001", + title="Reminder", + desc="Body", + completed=True, + completed_date=completed_date, + due_date=due_date, + priority=1, + flagged=True, + all_day=True, + time_zone="Europe/Paris", + parent_reminder_id="PARENT-001", + record_change_tag="old-reminder-tag", + ) + + svc.update(reminder) + + update_op = svc._raw.modify.call_args.kwargs["operations"][0] + fields = update_op.record.fields + assert fields["Completed"].value == 1 + assert fields["CompletionDate"].value == completed_date + assert fields["Priority"].value == 1 + assert fields["Flagged"].value == 1 + assert fields["AllDay"].value == 1 + assert fields["DueDate"].value == due_date + assert fields["TimeZone"].value == "Europe/Paris" + assert fields["ParentReminder"].value.recordName == "Reminder/PARENT-001" + assert reminder.record_change_tag == "new-reminder-tag" + assert reminder.modified == fields["LastModifiedDate"].value + + def test_update_normalizes_shorthand_reminder_ids(self): + svc = RemindersService("https://ckdatabasews.icloud.com", MagicMock(), {}) + svc._raw = MagicMock() + svc._raw.modify.return_value = CKModifyResponse( + records=[self._ack("Reminder/REM-UPD-SHORT", "Reminder", "new-rem-tag")], + syncToken="mock-sync", + ) + + reminder = Reminder( + id="REM-UPD-SHORT", + list_id="List/LIST-001", + title="Reminder", + desc="Body", + parent_reminder_id="PARENT-001", + record_change_tag="old-rem-tag", + ) + + svc.update(reminder) + + update_op = svc._raw.modify.call_args.kwargs["operations"][0] + assert update_op.record.recordName == "Reminder/REM-UPD-SHORT" + assert ( + update_op.record.fields["ParentReminder"].value.recordName + == "Reminder/PARENT-001" + ) + + def test_update_can_clear_optional_reminder_fields(self): + svc = RemindersService("https://ckdatabasews.icloud.com", MagicMock(), {}) + svc._raw = MagicMock() + svc._raw.modify.return_value = CKModifyResponse( + records=[ + self._ack("Reminder/REM-UPD-CLEAR", "Reminder", "new-reminder-tag") + ], + syncToken="mock-sync", + ) + + reminder = Reminder( + id="Reminder/REM-UPD-CLEAR", + list_id="List/LIST-001", + title="Reminder", + desc="Body", + due_date=None, + time_zone=None, + parent_reminder_id=None, + record_change_tag="old-reminder-tag", + ) + + svc.update(reminder) + + update_op = svc._raw.modify.call_args.kwargs["operations"][0] + fields = update_op.record.fields + assert fields["DueDate"].value is None + assert fields["TimeZone"].value is None + assert fields["ParentReminder"].value is None + + token_map = json.loads(fields["ResolutionTokenMap"].value) + assert "dueDate" in token_map["map"] + assert "timeZone" in token_map["map"] + assert "parentReminder" in token_map["map"] + + def test_update_sets_completion_date_when_marked_completed_without_one(self): + svc = RemindersService("https://ckdatabasews.icloud.com", MagicMock(), {}) + svc._raw = MagicMock() + svc._raw.modify.return_value = CKModifyResponse( + records=[ + self._ack("Reminder/REM-UPD-COMPLETE", "Reminder", "new-reminder-tag") + ], + syncToken="mock-sync", + ) + + reminder = Reminder( + id="Reminder/REM-UPD-COMPLETE", + list_id="List/LIST-001", + title="Reminder", + completed=True, + completed_date=None, + record_change_tag="old-reminder-tag", + ) + + svc.update(reminder) + + update_op = svc._raw.modify.call_args.kwargs["operations"][0] + completion_value = update_op.record.fields["CompletionDate"].value + assert completion_value is not None + assert reminder.completed_date == completion_value + + def test_update_clears_completion_date_when_marked_incomplete(self): + svc = RemindersService("https://ckdatabasews.icloud.com", MagicMock(), {}) + svc._raw = MagicMock() + svc._raw.modify.return_value = CKModifyResponse( + records=[ + self._ack( + "Reminder/REM-UPD-INCOMPLETE", + "Reminder", + "new-reminder-tag", + ) + ], + syncToken="mock-sync", + ) + + reminder = Reminder( + id="Reminder/REM-UPD-INCOMPLETE", + list_id="List/LIST-001", + title="Reminder", + completed=False, + completed_date=datetime(2026, 3, 15, 9, 30, tzinfo=timezone.utc), + record_change_tag="old-reminder-tag", + ) + + svc.update(reminder) + + update_op = svc._raw.modify.call_args.kwargs["operations"][0] + assert update_op.record.fields["CompletionDate"].value is None + assert reminder.completed_date is None + + def test_create_hashtag_hydrates_record_change_tags(self): + svc = RemindersService("https://ckdatabasews.icloud.com", MagicMock(), {}) + svc._raw = MagicMock() + + def _side_effect(**kwargs): + reminder_name = kwargs["operations"][0].record.recordName + hashtag_name = kwargs["operations"][1].record.recordName + return CKModifyResponse( + records=[ + self._ack(reminder_name, "Reminder", "ctag-rem-new"), + self._ack(hashtag_name, "Hashtag", "ctag-hash-new"), + ], + syncToken="mock-sync", + ) + + svc._raw.modify.side_effect = _side_effect + + reminder = Reminder( + id="Reminder/REM-TAG-CTAG", + list_id="List/LIST-001", + title="Hashtag reminder", + record_change_tag="ctag-rem-old", + hashtag_ids=[], + ) + hashtag = svc.create_hashtag(reminder, "travel") + + assert reminder.record_change_tag == "ctag-rem-new" + assert hashtag.record_change_tag == "ctag-hash-new" + + def test_create_url_attachment_hydrates_record_change_tags(self): + svc = RemindersService("https://ckdatabasews.icloud.com", MagicMock(), {}) + svc._raw = MagicMock() + + def _side_effect(**kwargs): + reminder_name = kwargs["operations"][0].record.recordName + attachment_name = kwargs["operations"][1].record.recordName + return CKModifyResponse( + records=[ + self._ack(reminder_name, "Reminder", "ctag-rem-new"), + self._ack(attachment_name, "Attachment", "ctag-att-new"), + ], + syncToken="mock-sync", + ) + + svc._raw.modify.side_effect = _side_effect + + reminder = Reminder( + id="Reminder/REM-ATT-CTAG", + list_id="List/LIST-001", + title="Attachment reminder", + record_change_tag="ctag-rem-old", + attachment_ids=[], + ) + attachment = svc.create_url_attachment( + reminder=reminder, url="https://example.com" + ) + + assert reminder.record_change_tag == "ctag-rem-new" + assert attachment.record_change_tag == "ctag-att-new" + + def test_create_recurrence_rule_hydrates_record_change_tags(self): + svc = RemindersService("https://ckdatabasews.icloud.com", MagicMock(), {}) + svc._raw = MagicMock() + + def _side_effect(**kwargs): + reminder_name = kwargs["operations"][0].record.recordName + recurrence_name = kwargs["operations"][1].record.recordName + return CKModifyResponse( + records=[ + self._ack(reminder_name, "Reminder", "ctag-rem-new"), + self._ack(recurrence_name, "RecurrenceRule", "ctag-rr-new"), + ], + syncToken="mock-sync", + ) + + svc._raw.modify.side_effect = _side_effect + + reminder = Reminder( + id="Reminder/REM-RR-CTAG", + list_id="List/LIST-001", + title="Recurring reminder", + record_change_tag="ctag-rem-old", + recurrence_rule_ids=[], + ) + rr = svc.create_recurrence_rule(reminder=reminder) + + assert reminder.record_change_tag == "ctag-rem-new" + assert rr.record_change_tag == "ctag-rr-new" + + def test_update_methods_refresh_record_change_tag(self): + svc = RemindersService("https://ckdatabasews.icloud.com", MagicMock(), {}) + svc._raw = MagicMock() + + def _side_effect(**kwargs): + operation = kwargs["operations"][0] + record_name = operation.record.recordName + record_type = operation.record.recordType + return CKModifyResponse( + records=[ + self._ack( + record_name, record_type, f"new-{record_type.lower()}-tag" + ) + ], + syncToken="mock-sync", + ) + + svc._raw.modify.side_effect = _side_effect + + reminder = Reminder( + id="Reminder/REM-UPD", + list_id="List/LIST-001", + title="Reminder", + desc="Body", + record_change_tag="old-reminder-tag", + ) + svc.update(reminder) + assert reminder.record_change_tag == "new-reminder-tag" + + hashtag = Hashtag( + id="Hashtag/H-UPD", + name="old", + reminder_id=reminder.id, + record_change_tag="old-hashtag-tag", + ) + svc.update_hashtag(hashtag, "new") + assert hashtag.record_change_tag == "new-hashtag-tag" + + attachment = URLAttachment( + id="Attachment/A-UPD", + reminder_id=reminder.id, + url="https://example.com", + record_change_tag="old-attachment-tag", + ) + svc.update_attachment(attachment, url="https://example.org") + assert attachment.record_change_tag == "new-attachment-tag" + + recurrence_rule = RecurrenceRule( + id="RecurrenceRule/RR-UPD", + reminder_id=reminder.id, + record_change_tag="old-recurrencerule-tag", + ) + svc.update_recurrence_rule(recurrence_rule, interval=2) + assert recurrence_rule.record_change_tag == "new-recurrencerule-tag" + + +class TestReminderReadPaths: + """Validate reminders() and list_reminders() query behavior.""" + + LIST_A = "List/LIST-A" + LIST_B = "List/LIST-B" + + @staticmethod + def _reminder_record(reminder_id: str, list_id: str) -> CKRecord: + return _ck_record( + "Reminder", + reminder_id, + { + "List": { + "type": "REFERENCE", + "value": {"recordName": list_id, "action": "VALIDATE"}, + }, + "Completed": {"type": "INT64", "value": 0}, + "Priority": {"type": "INT64", "value": 0}, + "Flagged": {"type": "INT64", "value": 0}, + "AllDay": {"type": "INT64", "value": 0}, + "Deleted": {"type": "INT64", "value": 0}, + }, + ) + + @staticmethod + def _alarm_record( + alarm_id: str, + reminder_id: str, + trigger_id: str, + ) -> CKRecord: + return _ck_record( + "Alarm", + alarm_id, + { + "AlarmUID": {"type": "STRING", "value": alarm_id.split("/", 1)[1]}, + "Reminder": { + "type": "REFERENCE", + "value": {"recordName": reminder_id, "action": "VALIDATE"}, + }, + "TriggerID": {"type": "STRING", "value": trigger_id.split("/", 1)[1]}, + }, + ) + + @staticmethod + def _trigger_record(trigger_id: str, alarm_id: str) -> CKRecord: + return _ck_record( + "AlarmTrigger", + trigger_id, + { + "Type": {"type": "STRING", "value": "Location"}, + "Alarm": { + "type": "REFERENCE", + "value": {"recordName": alarm_id, "action": "VALIDATE"}, + }, + "Title": {"type": "STRING", "value": "Test Trigger"}, + "Address": {"type": "STRING", "value": "Test Address"}, + "Latitude": {"type": "DOUBLE", "value": 48.0}, + "Longitude": {"type": "DOUBLE", "value": 2.0}, + "Radius": {"type": "DOUBLE", "value": 100.0}, + "Proximity": {"type": "INT64", "value": 1}, + "LocationUID": {"type": "STRING", "value": "LOC-1"}, + }, + ) + + @staticmethod + def _attachment_record(attachment_id: str, reminder_id: str) -> CKRecord: + return _ck_record( + "Attachment", + attachment_id, + { + "Type": {"type": "STRING", "value": "URL"}, + "Reminder": { + "type": "REFERENCE", + "value": {"recordName": reminder_id, "action": "VALIDATE"}, + }, + "URL": {"type": "STRING", "value": "https://example.com"}, + "UTI": {"type": "STRING", "value": "public.url"}, + }, + ) + + @staticmethod + def _hashtag_record(hashtag_id: str, reminder_id: str) -> CKRecord: + return _ck_record( + "Hashtag", + hashtag_id, + { + "Name": {"type": "STRING", "value": "tag"}, + "Reminder": { + "type": "REFERENCE", + "value": {"recordName": reminder_id, "action": "VALIDATE"}, + }, + }, + ) + + @staticmethod + def _recurrence_rule_record(recurrence_id: str, reminder_id: str) -> CKRecord: + return _ck_record( + "RecurrenceRule", + recurrence_id, + { + "Reminder": { + "type": "REFERENCE", + "value": {"recordName": reminder_id, "action": "VALIDATE"}, + }, + "Frequency": {"type": "INT64", "value": 2}, + "Interval": {"type": "INT64", "value": 1}, + "OccurrenceCount": {"type": "INT64", "value": 0}, + "FirstDayOfTheWeek": {"type": "INT64", "value": 1}, + }, + ) + + @staticmethod + def _changes_response( + records: list[CKRecord | CKErrorItem | CKTombstoneRecord], + sync_token: str, + more_coming: bool, + ) -> CKZoneChangesResponse: + return CKZoneChangesResponse( + zones=[ + CKZoneChangesZone( + records=records, + moreComing=more_coming, + syncToken=sync_token, + zoneID=CKZoneID( + zoneName="Reminders", zoneType="REGULAR_CUSTOM_ZONE" + ), + ) + ] + ) + + def test_reminders_aggregates_from_list_reminders(self): + svc = RemindersService("https://ckdatabasews.icloud.com", MagicMock(), {}) + svc._raw = MagicMock() + + rem_a = Reminder(id="Reminder/A", list_id=self.LIST_A, title="A") + rem_b = Reminder(id="Reminder/B", list_id=self.LIST_B, title="B") + + svc.lists = MagicMock( + return_value=[ + RemindersList(id=self.LIST_A, title="List A"), + RemindersList(id=self.LIST_B, title="List B"), + ] + ) + svc.list_reminders = MagicMock( + side_effect=[ + ListRemindersResult( + reminders=[rem_a], + alarms={}, + triggers={}, + attachments={}, + hashtags={}, + recurrence_rules={}, + ), + ListRemindersResult( + reminders=[rem_b, rem_a], + alarms={}, + triggers={}, + attachments={}, + hashtags={}, + recurrence_rules={}, + ), # duplicate across lists -> dedup + ] + ) + + out = list(svc.reminders()) + assert [r.id for r in out] == ["Reminder/A", "Reminder/B"] + assert svc.list_reminders.call_count == 2 + assert svc.list_reminders.call_args_list[0].kwargs == { + "list_id": self.LIST_A, + "include_completed": True, + "results_limit": 200, + } + assert svc.list_reminders.call_args_list[1].kwargs == { + "list_id": self.LIST_B, + "include_completed": True, + "results_limit": 200, + } + assert svc._raw.query.call_count == 0 + assert svc._raw.changes.call_count == 0 + + def test_lists_stops_when_changes_returns_no_zones(self): + svc = RemindersService("https://ckdatabasews.icloud.com", MagicMock(), {}) + svc._raw = MagicMock() + svc._raw.changes.return_value = CKZoneChangesResponse(zones=[]) + + out = list(svc.lists()) + assert out == [] + assert svc._raw.changes.call_count == 1 + + def test_lists_raises_on_error_item(self): + svc = RemindersService("https://ckdatabasews.icloud.com", MagicMock(), {}) + svc._raw = MagicMock() + svc._raw.changes.return_value = self._changes_response( + [ + CKErrorItem( + serverErrorCode="ACCESS_DENIED", + reason="Permission denied", + recordName="List/LIST-A", + ) + ], + sync_token="tok-1", + more_coming=False, + ) + + with pytest.raises(RemindersApiError, match="List/LIST-A"): + list(svc.lists()) + + def test_reminders_applies_list_filter(self): + svc = RemindersService("https://ckdatabasews.icloud.com", MagicMock(), {}) + svc._raw = MagicMock() + + rem_a = Reminder(id="Reminder/A", list_id=self.LIST_A, title="A") + rem_b = Reminder(id="Reminder/B", list_id=self.LIST_A, title="B") + + svc.lists = MagicMock() + svc.list_reminders = MagicMock( + return_value=ListRemindersResult( + reminders=[rem_a, rem_b], + alarms={}, + triggers={}, + attachments={}, + hashtags={}, + recurrence_rules={}, + ) + ) + + out = list(svc.reminders(list_id=self.LIST_A)) + assert [r.id for r in out] == ["Reminder/A", "Reminder/B"] + svc.list_reminders.assert_called_once_with( + list_id=self.LIST_A, + include_completed=True, + results_limit=200, + ) + assert svc.lists.call_count == 0 + assert svc._raw.query.call_count == 0 + assert svc._raw.changes.call_count == 0 + + def test_list_reminders_enforces_list_scope_for_related_records(self): + svc = RemindersService("https://ckdatabasews.icloud.com", MagicMock(), {}) + svc._raw = MagicMock() + + rem_a = self._reminder_record("Reminder/A", self.LIST_A) + rem_b = self._reminder_record("Reminder/B", self.LIST_B) + + alarm_a = self._alarm_record( + "Alarm/ALARM-A", + "Reminder/A", + "AlarmTrigger/TRIG-A", + ) + alarm_b = self._alarm_record( + "Alarm/ALARM-B", + "Reminder/B", + "AlarmTrigger/TRIG-B", + ) + + trig_a = self._trigger_record("AlarmTrigger/TRIG-A", "Alarm/ALARM-A") + trig_b = self._trigger_record("AlarmTrigger/TRIG-B", "Alarm/ALARM-B") + + att_a = self._attachment_record("Attachment/ATT-A", "Reminder/A") + att_b = self._attachment_record("Attachment/ATT-B", "Reminder/B") + + tag_a = self._hashtag_record("Hashtag/TAG-A", "Reminder/A") + tag_b = self._hashtag_record("Hashtag/TAG-B", "Reminder/B") + rr_a = self._recurrence_rule_record("RecurrenceRule/RR-A", "Reminder/A") + rr_b = self._recurrence_rule_record("RecurrenceRule/RR-B", "Reminder/B") + + svc._raw.query.return_value = CKQueryResponse( + records=[ + rem_a, + rem_b, + alarm_a, + alarm_b, + trig_a, + trig_b, + att_a, + att_b, + tag_a, + tag_b, + rr_a, + rr_b, + ], + continuationMarker=None, + ) + + result = svc.list_reminders(list_id=self.LIST_A, include_completed=True) + + assert isinstance(result, ListRemindersResult) + assert [r.id for r in result.reminders] == ["Reminder/A"] + assert set(result.alarms.keys()) == {"Alarm/ALARM-A"} + assert set(result.triggers.keys()) == {"AlarmTrigger/TRIG-A"} + assert set(result.attachments.keys()) == {"Attachment/ATT-A"} + assert set(result.hashtags.keys()) == {"Hashtag/TAG-A"} + assert set(result.recurrence_rules.keys()) == {"RecurrenceRule/RR-A"} + + def test_list_reminders_paginates_query_results(self): + svc = RemindersService("https://ckdatabasews.icloud.com", MagicMock(), {}) + svc._raw = MagicMock() + + rem_a = self._reminder_record("Reminder/A", self.LIST_A) + rem_b = self._reminder_record("Reminder/B", self.LIST_A) + svc._raw.query.side_effect = [ + CKQueryResponse(records=[rem_a], continuationMarker="page-2"), + CKQueryResponse(records=[rem_b], continuationMarker=None), + ] + + result = svc.list_reminders( + list_id=self.LIST_A, + include_completed=True, + results_limit=1, + ) + + assert isinstance(result, ListRemindersResult) + assert {r.id for r in result.reminders} == {"Reminder/A", "Reminder/B"} + assert svc._raw.query.call_count == 2 + first_call = svc._raw.query.call_args_list[0].kwargs + second_call = svc._raw.query.call_args_list[1].kwargs + assert first_call["continuation"] is None + assert second_call["continuation"] == "page-2" + + def test_list_reminders_raises_on_error_item(self): + svc = RemindersService("https://ckdatabasews.icloud.com", MagicMock(), {}) + svc._raw = MagicMock() + svc._raw.query.return_value = CKQueryResponse( + records=[ + CKErrorItem( + serverErrorCode="REQUEST_FAILED", + reason="Backend timeout", + recordName="Reminder/FAIL", + ) + ], + continuationMarker=None, + ) + + with pytest.raises(RemindersApiError, match="Reminder/FAIL"): + svc.list_reminders(list_id=self.LIST_A, include_completed=True) + + def test_get_raises_lookup_error_when_missing(self): + svc = RemindersService("https://ckdatabasews.icloud.com", MagicMock(), {}) + svc._raw = MagicMock() + svc._raw.lookup.return_value = MagicMock(records=[]) + + with pytest.raises(LookupError, match="Reminder not found"): + svc.get("Reminder/MISSING") + + def test_get_normalizes_unprefixed_reminder_id(self): + svc = RemindersService("https://ckdatabasews.icloud.com", MagicMock(), {}) + svc._raw = MagicMock() + svc._raw.lookup.return_value = MagicMock( + records=[self._reminder_record("Reminder/NORMALIZED", self.LIST_A)] + ) + + reminder = svc.get("NORMALIZED") + + assert reminder.id == "Reminder/NORMALIZED" + assert svc._raw.lookup.call_args.kwargs["record_names"] == [ + "Reminder/NORMALIZED" + ] + + def test_get_raises_on_error_item(self): + svc = RemindersService("https://ckdatabasews.icloud.com", MagicMock(), {}) + svc._raw = MagicMock() + svc._raw.lookup.return_value = MagicMock( + records=[ + CKErrorItem( + serverErrorCode="ACCESS_DENIED", + reason="Permission denied", + recordName="Reminder/FAIL", + ) + ] + ) + + with pytest.raises(RemindersApiError, match="Reminder/FAIL"): + svc.get("Reminder/FAIL") + + @pytest.mark.parametrize( + ( + "method_name", + "id_field", + "raw_id", + "record_name", + "record_factory_name", + "expected_attr", + "expected_value", + ), + [ + ( + "tags_for", + "hashtag_ids", + "TAG-1", + "Hashtag/TAG-1", + "_hashtag_record", + "name", + "tag", + ), + ( + "attachments_for", + "attachment_ids", + "ATT-1", + "Attachment/ATT-1", + "_attachment_record", + "url", + "https://example.com", + ), + ( + "recurrence_rules_for", + "recurrence_rule_ids", + "RR-1", + "RecurrenceRule/RR-1", + "_recurrence_rule_record", + "frequency", + RecurrenceFrequency.WEEKLY, + ), + ], + ) + def test_lookup_helpers_use_lookup_ids_and_map_records( + self, + method_name, + id_field, + raw_id, + record_name, + record_factory_name, + expected_attr, + expected_value, + ): + svc = RemindersService("https://ckdatabasews.icloud.com", MagicMock(), {}) + svc._raw = MagicMock() + + reminder = Reminder( + id="Reminder/A", + list_id=self.LIST_A, + title="A", + **{id_field: [raw_id]}, + ) + record_factory = getattr(self, record_factory_name) + svc._raw.lookup.return_value = MagicMock( + records=[record_factory(record_name, "Reminder/A")] + ) + + out = getattr(svc, method_name)(reminder) + assert len(out) == 1 + assert out[0].id == record_name + assert out[0].reminder_id == "Reminder/A" + assert getattr(out[0], expected_attr) == expected_value + assert svc._raw.lookup.call_args.kwargs["record_names"] == [record_name] + + @pytest.mark.parametrize( + ("method_name", "id_field", "raw_id", "record_name"), + [ + ("tags_for", "hashtag_ids", "TAG-1", "Hashtag/TAG-1"), + ("attachments_for", "attachment_ids", "ATT-1", "Attachment/ATT-1"), + ( + "recurrence_rules_for", + "recurrence_rule_ids", + "RR-1", + "RecurrenceRule/RR-1", + ), + ], + ) + def test_lookup_helpers_raise_on_error_item( + self, method_name, id_field, raw_id, record_name + ): + svc = RemindersService("https://ckdatabasews.icloud.com", MagicMock(), {}) + svc._raw = MagicMock() + svc._raw.lookup.return_value = MagicMock( + records=[ + CKErrorItem( + serverErrorCode="REQUEST_FAILED", + reason="Backend timeout", + recordName=record_name, + ) + ] + ) + + reminder = Reminder( + id="Reminder/A", + list_id=self.LIST_A, + title="A", + **{id_field: [raw_id]}, + ) + + method = getattr(svc, method_name) + with pytest.raises(RemindersApiError, match=record_name): + method(reminder) + + def test_alarms_for_returns_typed_rows(self): + svc = RemindersService("https://ckdatabasews.icloud.com", MagicMock(), {}) + svc._raw = MagicMock() + svc._raw.lookup.side_effect = [ + MagicMock( + records=[ + self._alarm_record( + "Alarm/AL-1", + "Reminder/A", + "AlarmTrigger/TRIG-1", + ) + ] + ), + MagicMock( + records=[ + self._trigger_record( + "AlarmTrigger/TRIG-1", + "Alarm/AL-1", + ) + ] + ), + ] + + reminder = Reminder( + id="Reminder/A", + list_id=self.LIST_A, + title="A", + alarm_ids=["AL-1"], + ) + + out = svc.alarms_for(reminder) + + assert len(out) == 1 + assert isinstance(out[0], AlarmWithTrigger) + assert out[0].alarm.id == "Alarm/AL-1" + assert out[0].trigger is not None + assert out[0].alarm.id == "Alarm/AL-1" + assert out[0].trigger.id == "AlarmTrigger/TRIG-1" + + def test_alarms_for_normalizes_prefixed_trigger_ids(self): + svc = RemindersService("https://ckdatabasews.icloud.com", MagicMock(), {}) + svc._raw = MagicMock() + svc._raw.lookup.side_effect = [ + MagicMock( + records=[ + self._alarm_record( + "Alarm/AL-1", + "Reminder/A", + "AlarmTrigger/TRIG-1", + ) + ] + ), + MagicMock( + records=[ + self._trigger_record( + "AlarmTrigger/TRIG-1", + "Alarm/AL-1", + ) + ] + ), + ] + + reminder = Reminder( + id="Reminder/A", + list_id=self.LIST_A, + title="A", + alarm_ids=["AL-1"], + ) + + out = svc.alarms_for(reminder) + + assert len(out) == 1 + assert out[0].trigger is not None + assert out[0].trigger.id == "AlarmTrigger/TRIG-1" + assert svc._raw.lookup.call_args_list[1].kwargs["record_names"] == [ + "AlarmTrigger/TRIG-1" + ] + + def test_alarms_for_raises_on_alarm_lookup_error_item(self): + svc = RemindersService("https://ckdatabasews.icloud.com", MagicMock(), {}) + svc._raw = MagicMock() + svc._raw.lookup.return_value = MagicMock( + records=[ + CKErrorItem( + serverErrorCode="REQUEST_FAILED", + reason="Backend timeout", + recordName="Alarm/AL-1", + ) + ] + ) + + reminder = Reminder( + id="Reminder/A", + list_id=self.LIST_A, + title="A", + alarm_ids=["AL-1"], + ) + + with pytest.raises(RemindersApiError, match="Alarm/AL-1"): + svc.alarms_for(reminder) + + def test_alarms_for_raises_on_trigger_lookup_error_item(self): + svc = RemindersService("https://ckdatabasews.icloud.com", MagicMock(), {}) + svc._raw = MagicMock() + svc._raw.lookup.side_effect = [ + MagicMock( + records=[ + self._alarm_record( + "Alarm/AL-1", + "Reminder/A", + "AlarmTrigger/TRIG-1", + ) + ] + ), + MagicMock( + records=[ + CKErrorItem( + serverErrorCode="REQUEST_FAILED", + reason="Backend timeout", + recordName="AlarmTrigger/TRIG-1", + ) + ] + ), + ] + + reminder = Reminder( + id="Reminder/A", + list_id=self.LIST_A, + title="A", + alarm_ids=["AL-1"], + ) + + with pytest.raises(RemindersApiError, match="AlarmTrigger/TRIG-1"): + svc.alarms_for(reminder) + + +class TestReminderDeltaSync: + """Validate explicit reminder delta-sync APIs.""" + + LIST_A = "List/LIST-A" + + @staticmethod + def _reminder_record(reminder_id: str, *, deleted: bool = False) -> CKRecord: + return _ck_record( + "Reminder", + reminder_id, + { + "List": { + "type": "REFERENCE", + "value": { + "recordName": TestReminderDeltaSync.LIST_A, + "action": "VALIDATE", + }, + }, + "Completed": {"type": "INT64", "value": 0}, + "Priority": {"type": "INT64", "value": 0}, + "Flagged": {"type": "INT64", "value": 0}, + "AllDay": {"type": "INT64", "value": 0}, + "Deleted": {"type": "INT64", "value": 1 if deleted else 0}, + }, + ) + + @staticmethod + def _changes_response( + records: list[CKRecord | CKTombstoneRecord | CKErrorItem], + sync_token: str, + more_coming: bool, + ) -> CKZoneChangesResponse: + return CKZoneChangesResponse( + zones=[ + CKZoneChangesZone( + records=records, + moreComing=more_coming, + syncToken=sync_token, + zoneID=CKZoneID( + zoneName="Reminders", zoneType="REGULAR_CUSTOM_ZONE" + ), + ) + ] + ) + + def test_sync_cursor_returns_final_paged_token(self): + svc = RemindersService("https://ckdatabasews.icloud.com", MagicMock(), {}) + svc._raw = MagicMock() + svc._raw.changes.side_effect = [ + self._changes_response([], sync_token="tok-1", more_coming=True), + self._changes_response([], sync_token="tok-2", more_coming=False), + ] + + assert svc.sync_cursor() == "tok-2" + assert svc._raw.changes.call_count == 2 + first_zone_req = svc._raw.changes.call_args_list[0].kwargs["zone_req"] + second_zone_req = svc._raw.changes.call_args_list[1].kwargs["zone_req"] + assert first_zone_req.syncToken is None + assert second_zone_req.syncToken == "tok-1" + assert first_zone_req.desiredRecordTypes == [] + assert first_zone_req.desiredKeys == [] + + def test_iter_changes_emits_updated_deleted_and_tombstone_events(self): + svc = RemindersService("https://ckdatabasews.icloud.com", MagicMock(), {}) + svc._raw = MagicMock() + svc._raw.changes.return_value = self._changes_response( + [ + self._reminder_record("Reminder/UPD"), + self._reminder_record("Reminder/SOFT-DEL", deleted=True), + CKTombstoneRecord(recordName="Reminder/TOMBSTONE", deleted=True), + ], + sync_token="tok-1", + more_coming=False, + ) + + out = list(svc.iter_changes(since="tok-0")) + assert out == [ + ReminderChangeEvent( + type="updated", + reminder_id="Reminder/UPD", + reminder=Reminder( + id="Reminder/UPD", + list_id=self.LIST_A, + title="Untitled", + completed=False, + priority=0, + flagged=False, + all_day=False, + deleted=False, + ), + ), + ReminderChangeEvent( + type="deleted", + reminder_id="Reminder/SOFT-DEL", + reminder=Reminder( + id="Reminder/SOFT-DEL", + list_id=self.LIST_A, + title="Untitled", + completed=False, + priority=0, + flagged=False, + all_day=False, + deleted=True, + ), + ), + ReminderChangeEvent( + type="deleted", + reminder_id="Reminder/TOMBSTONE", + reminder=None, + ), + ] + zone_req = svc._raw.changes.call_args.kwargs["zone_req"] + assert zone_req.syncToken == "tok-0" + assert zone_req.desiredRecordTypes == ["Reminder"] + + def test_iter_changes_paginates(self): + svc = RemindersService("https://ckdatabasews.icloud.com", MagicMock(), {}) + svc._raw = MagicMock() + svc._raw.changes.side_effect = [ + self._changes_response( + [self._reminder_record("Reminder/A")], + sync_token="tok-1", + more_coming=True, + ), + self._changes_response( + [self._reminder_record("Reminder/B")], + sync_token="tok-2", + more_coming=False, + ), + ] + + out = list(svc.iter_changes(since="tok-0")) + assert [event.reminder_id for event in out] == ["Reminder/A", "Reminder/B"] + assert svc._raw.changes.call_count == 2 + first_zone_req = svc._raw.changes.call_args_list[0].kwargs["zone_req"] + second_zone_req = svc._raw.changes.call_args_list[1].kwargs["zone_req"] + assert first_zone_req.syncToken == "tok-0" + assert second_zone_req.syncToken == "tok-1" + + def test_iter_changes_raises_on_error_item(self): + svc = RemindersService("https://ckdatabasews.icloud.com", MagicMock(), {}) + svc._raw = MagicMock() + svc._raw.changes.return_value = self._changes_response( + [ + CKErrorItem( + serverErrorCode="ACCESS_DENIED", + reason="Token expired", + recordName="Reminder/FAIL", + ) + ], + sync_token="tok-1", + more_coming=False, + ) + + with pytest.raises(RemindersApiError, match="Reminder/FAIL"): + list(svc.iter_changes(since="tok-0")) + + +class TestCloudKitQueryResponseRobustness: + """Validate query parsing against malformed field values seen in real data.""" + + def test_query_response_tolerates_out_of_range_due_date_timestamp(self): + # Captured variant: DueDate TIMESTAMP can be out-of-range (e.g. year 12177). + # Parsing should coerce that field to None, not fail the entire response page. + response = CKQueryResponse.model_validate( + { + "records": [ + { + "recordName": "Reminder/GOOD-1", + "recordType": "Reminder", + "fields": { + "List": { + "type": "REFERENCE", + "value": { + "recordName": "List/LIST-A", + "action": "VALIDATE", + }, + }, + "Completed": {"type": "INT64", "value": 0}, + "Priority": {"type": "INT64", "value": 0}, + "Flagged": {"type": "INT64", "value": 0}, + "AllDay": {"type": "INT64", "value": 0}, + "Deleted": {"type": "INT64", "value": 0}, + "DueDate": {"type": "TIMESTAMP", "value": 1735488000000}, + }, + }, + { + "recordName": "Reminder/BAD-DUE-DATE", + "recordType": "Reminder", + "fields": { + "List": { + "type": "REFERENCE", + "value": { + "recordName": "List/LIST-A", + "action": "VALIDATE", + }, + }, + "Completed": {"type": "INT64", "value": 0}, + "Priority": {"type": "INT64", "value": 0}, + "Flagged": {"type": "INT64", "value": 0}, + "AllDay": {"type": "INT64", "value": 0}, + "Deleted": {"type": "INT64", "value": 0}, + "DueDate": {"type": "TIMESTAMP", "value": 322123125600000}, + }, + }, + ], + } + ) + + assert len(response.records) == 2 + good = response.records[0] + bad = response.records[1] + assert isinstance(good, CKRecord) + assert isinstance(bad, CKRecord) + assert good.fields.get_value("DueDate") is not None + assert bad.fields.get_value("DueDate") is None + + def test_query_response_parses_asset_backed_list_field(self): + payload = base64.b64encode(b'["REM-1","REM-2"]').decode("ascii") + response = CKQueryResponse.model_validate( + { + "records": [ + { + "recordName": "List/LIST-A", + "recordType": "List", + "fields": { + "ReminderIDsAsset": { + "type": "ASSET", + "value": {"downloadedData": payload}, + } + }, + } + ] + } + ) + + rec = response.records[0] + assert isinstance(rec, CKRecord) + asset = rec.fields.get_value("ReminderIDsAsset") + assert asset is not None + assert asset.downloadedData == b'["REM-1","REM-2"]' diff --git a/tests/test_base.py b/tests/test_base.py index 92f95d79..01f8bdfe 100644 --- a/tests/test_base.py +++ b/tests/test_base.py @@ -25,6 +25,7 @@ from pyicloud.services.calendar import CalendarService from pyicloud.services.contacts import ContactsService from pyicloud.services.hidemyemail import HideMyEmailService +from pyicloud.services.notes import NotesService from pyicloud.services.photos import PhotosService from pyicloud.services.reminders import RemindersService from pyicloud.services.ubiquity import UbiquityService @@ -105,6 +106,25 @@ def test_constructor_skips_authentication_when_requested() -> None: get_from_keyring.assert_not_called() +def test_constructor_accepts_keyword_only_cloudkit_validation_extra() -> None: + """cloudkit_validation_extra remains a keyword-only escape hatch.""" + with ( + patch("pyicloud.PyiCloudService.authenticate") as mock_authenticate, + patch("pyicloud.PyiCloudService._setup_cookie_directory") as mock_setup_dir, + patch("builtins.open", new_callable=mock_open), + ): + mock_authenticate.return_value = None + mock_setup_dir.return_value = "/tmp/pyicloud/cookies" + + service = PyiCloudService( + "test@example.com", + secrets.token_hex(32), + cloudkit_validation_extra="ignore", + ) + + assert service._cloudkit_validation_extra == "ignore" + + def test_authenticate_with_missing_token(pyicloud_service: PyiCloudService) -> None: """Test the authenticate method with missing session_token.""" with ( @@ -1544,6 +1564,7 @@ def test_reminders_returns_service( service_root="https://reminders.example.com", session=pyicloud_service.session, params=pyicloud_service.params, + cloudkit_validation_extra=None, ) assert result == mock_reminders_service @@ -1581,6 +1602,95 @@ def test_reminders_raises_on_api_exception( _ = pyicloud_service.reminders +def test_reminders_raises_on_not_activated_exception( + pyicloud_service: PyiCloudService, +) -> None: + """Reminders wraps missing ckdatabasews activation as service unavailable.""" + with patch.object( + pyicloud_service, + "get_webservice_url", + side_effect=PyiCloudServiceNotActivatedException("error"), + ): + pyicloud_service._reminders = None + with pytest.raises( + PyiCloudServiceUnavailable, + match="Reminders service not available", + ): + _ = pyicloud_service.reminders + + +def test_notes_returns_new_notes_service_instance( + pyicloud_service: PyiCloudService, +) -> None: + """Test notes property returns a new NotesService instance.""" + with ( + patch.object( + pyicloud_service, + "get_webservice_url", + return_value="https://notes.example.com", + ), + patch("pyicloud.base.NotesService") as mock_notes_service, + ): + mock_notes_instance = MagicMock(spec=NotesService) + mock_notes_service.return_value = mock_notes_instance + + result = pyicloud_service.notes + + mock_notes_service.assert_called_once_with( + service_root="https://notes.example.com", + session=pyicloud_service.session, + params=pyicloud_service.params, + cloudkit_validation_extra=pyicloud_service._cloudkit_validation_extra, + ) + assert result == mock_notes_instance + + +def test_notes_returns_cached_instance(pyicloud_service: PyiCloudService) -> None: + """Test notes property returns cached instance if already set.""" + mock_notes_service = MagicMock() + pyicloud_service._notes = mock_notes_service + result: NotesService = pyicloud_service.notes + assert result == mock_notes_service + + +def test_notes_raises_on_api_exception(pyicloud_service: PyiCloudService) -> None: + """Test notes property raises PyiCloudServiceUnavailable on API exception.""" + with ( + patch.object( + pyicloud_service, + "get_webservice_url", + return_value="https://notes.example.com", + ), + patch( + "pyicloud.base.NotesService", + side_effect=PyiCloudAPIResponseException("error"), + ), + ): + pyicloud_service._notes = None + with pytest.raises( + PyiCloudServiceUnavailable, + match="Notes service not available", + ): + _ = pyicloud_service.notes + + +def test_notes_raises_on_not_activated_exception( + pyicloud_service: PyiCloudService, +) -> None: + """Notes wraps missing ckdatabasews activation as service unavailable.""" + with patch.object( + pyicloud_service, + "get_webservice_url", + side_effect=PyiCloudServiceNotActivatedException("error"), + ): + pyicloud_service._notes = None + with pytest.raises( + PyiCloudServiceUnavailable, + match="Notes service not available", + ): + _ = pyicloud_service.notes + + def test_setup_cookie_directory_with_custom_path( pyicloud_service: PyiCloudService, ) -> None: diff --git a/tests/test_example_reminders_delta.py b/tests/test_example_reminders_delta.py new file mode 100644 index 00000000..ffb27754 --- /dev/null +++ b/tests/test_example_reminders_delta.py @@ -0,0 +1,70 @@ +import importlib.util +import os +import sys +import unittest +from types import SimpleNamespace +from unittest.mock import MagicMock, patch + +SCRIPT_PATH = os.path.join( + os.path.dirname(__file__), + "..", + "example_reminders_delta.py", +) + + +def _load_example_reminders_delta(): + spec = importlib.util.spec_from_file_location( + "pyicloud_example_reminders_delta", + SCRIPT_PATH, + ) + assert spec is not None + assert spec.loader is not None + module = importlib.util.module_from_spec(spec) + sys.modules[spec.name] = module + spec.loader.exec_module(module) + return module + + +class TestExampleRemindersDelta(unittest.TestCase): + def test_authenticate_uses_security_key_when_fido2_devices_are_available(self): + module = _load_example_reminders_delta() + api = MagicMock() + devices = [object(), object()] + api.requires_2fa = True + api.requires_2sa = False + api.fido2_devices = devices + api.is_trusted_session = False + + with ( + patch.object(module, "resolve_credentials", return_value=("u", "p")), + patch.object(module, "PyiCloudService", return_value=api), + patch("builtins.input", return_value="1"), + ): + result = module.authenticate(SimpleNamespace()) + + self.assertIs(result, api) + api.confirm_security_key.assert_called_once_with(devices[1]) + api.validate_2fa_code.assert_not_called() + api.trust_session.assert_called_once_with() + + def test_authenticate_2sa_uses_selected_trusted_device(self): + module = _load_example_reminders_delta() + api = MagicMock() + devices = [ + {"id": "device-0"}, + {"id": "device-1"}, + ] + api.requires_2fa = False + api.requires_2sa = True + api.trusted_devices = devices + + with ( + patch.object(module, "resolve_credentials", return_value=("u", "p")), + patch.object(module, "PyiCloudService", return_value=api), + patch("builtins.input", side_effect=["1", "123456"]), + ): + result = module.authenticate(SimpleNamespace()) + + self.assertIs(result, api) + api.send_verification_code.assert_called_once_with(devices[1]) + api.validate_verification_code.assert_called_once_with(devices[1], "123456") diff --git a/tests/test_notes.py b/tests/test_notes.py new file mode 100644 index 00000000..b8294fa7 --- /dev/null +++ b/tests/test_notes.py @@ -0,0 +1,523 @@ +"""Tests for the Notes service.""" + +import importlib +import os +import tempfile +import unittest +from datetime import datetime +from typing import Annotated +from unittest.mock import MagicMock, patch + +from pydantic import BaseModel, BeforeValidator, ValidationError + +from pyicloud.common.cloudkit import CKLookupResponse +from pyicloud.common.cloudkit.base import resolve_cloudkit_validation_extra +from pyicloud.common.cloudkit.models import ( + CKParticipant, + CKParticipantProtectionInfo, + CKPCSInfo, + CKRecord, + CKUserIdentity, + _from_millis_or_none, + _from_secs_or_millis, +) +from pyicloud.services.notes import AttachmentId, Note, NotesService, NoteSummary +from pyicloud.services.notes.client import ( + CloudKitNotesClient, + NotesApiError, +) +from pyicloud.services.notes.client import NotesError as ClientNotesError +from pyicloud.services.notes.client import ( + _CloudKitClient, +) +from pyicloud.services.notes.rendering.exporter import decode_and_parse_note, write_html +from pyicloud.services.notes.service import NoteNotFound + + +class NotesServiceTest(unittest.TestCase): + """Tests for the Notes service.""" + + def setUp(self): + """Set up the test case.""" + self.service = NotesService( + service_root="https://example.com", + session=MagicMock(), + params={}, + ) + + def test_get_note(self): + """Test getting a note.""" + self.skipTest("TODO: implement once representative note fixture is available") + + def test_notes_domain_models_are_pydantic(self): + """Notes public models expose Pydantic serialization.""" + summary = NoteSummary( + id="note-1", + title="Hello", + snippet="World", + modified_at=None, + folder_id="folder-1", + folder_name="Inbox", + is_deleted=False, + is_locked=False, + ) + attachment_id = AttachmentId(identifier="att-1", type_uti="public.jpeg") + + self.assertEqual(summary.model_dump()["id"], "note-1") + self.assertEqual(attachment_id.model_dump()["type_uti"], "public.jpeg") + + def test_note_has_attachments_is_in_model_dump(self): + note = Note( + id="note-1", + title="Hello", + snippet="World", + modified_at=None, + folder_id="folder-1", + folder_name="Inbox", + is_deleted=False, + is_locked=False, + text="Body", + attachments=[], + ) + + self.assertFalse(note.model_dump()["has_attachments"]) + + def test_notes_domain_models_forbid_unknown_fields(self): + with self.assertRaises(ValidationError): + NoteSummary( + id="note-1", + title="Hello", + snippet="World", + modified_at=None, + folder_id="folder-1", + folder_name="Inbox", + is_deleted=False, + is_locked=False, + unexpected=True, + ) + + def test_notes_domain_models_are_frozen(self): + summary = NoteSummary( + id="note-1", + title="Hello", + snippet="World", + modified_at=None, + folder_id="folder-1", + folder_name="Inbox", + is_deleted=False, + is_locked=False, + ) + + with self.assertRaises(ValidationError): + summary.title = "Updated" + + def test_resolve_cloudkit_validation_extra_defaults_to_allow(self): + with patch.dict(os.environ, {}, clear=True): + self.assertEqual(resolve_cloudkit_validation_extra(), "allow") + + def test_resolve_cloudkit_validation_extra_uses_env(self): + with patch.dict(os.environ, {"PYICLOUD_CK_EXTRA": "forbid"}, clear=True): + self.assertEqual(resolve_cloudkit_validation_extra(), "forbid") + + def test_notes_client_allows_unexpected_fields_by_default(self): + session = MagicMock() + session.post.return_value = MagicMock( + status_code=200, + json=lambda: {"records": [], "unexpectedTopLevel": {"present": True}}, + ) + client = CloudKitNotesClient( + "https://example.com", + session, + {}, + ) + + response = client.lookup(["Note/1"], desired_keys=None) + + self.assertIsInstance(response, CKLookupResponse) + self.assertEqual(response.model_extra["unexpectedTopLevel"], {"present": True}) + + def test_notes_client_uses_bounded_timeouts(self): + session = MagicMock() + session.post.return_value = MagicMock(status_code=200, json=lambda: {}) + session.get.return_value = MagicMock( + status_code=200, iter_content=lambda **_: [] + ) + client = _CloudKitClient("https://example.com", session, {}) + + client.post("/records/query", {"query": "payload"}) + list(client.get_stream("https://example.com/asset")) + + self.assertEqual(session.post.call_args.kwargs["timeout"], (10.0, 60.0)) + self.assertEqual(session.get.call_args.kwargs["timeout"], (10.0, 60.0)) + + def test_notes_client_redacts_query_strings_in_logs(self): + redacted = _CloudKitClient._redact_url( + "https://example.com/path?token=secret&x=1#frag" + ) + self.assertEqual(redacted, "https://example.com/path") + + def test_notes_client_strict_mode_wraps_validation_error(self): + session = MagicMock() + payload = {"records": [], "unexpectedTopLevel": {"present": True}} + session.post.return_value = MagicMock(status_code=200, json=lambda: payload) + client = CloudKitNotesClient( + "https://example.com", + session, + {}, + validation_extra="forbid", + ) + + with self.assertRaisesRegex( + NotesApiError, "Lookup response validation failed" + ) as ctx: + client.lookup(["Note/1"], desired_keys=None) + + self.assertEqual(ctx.exception.payload, payload) + self.assertIsInstance(ctx.exception.__cause__, ValidationError) + + def test_notes_client_explicit_override_wins_over_env(self): + session = MagicMock() + session.post.return_value = MagicMock( + status_code=200, + json=lambda: {"records": [], "unexpectedTopLevel": {"present": True}}, + ) + with patch.dict(os.environ, {"PYICLOUD_CK_EXTRA": "forbid"}, clear=True): + client = CloudKitNotesClient( + "https://example.com", + session, + {}, + validation_extra="allow", + ) + + response = client.lookup(["Note/1"], desired_keys=None) + + self.assertEqual(response.model_extra["unexpectedTopLevel"], {"present": True}) + + def test_notes_service_passes_through_validation_override(self): + service = NotesService( + service_root="https://example.com", + session=MagicMock(), + params={}, + cloudkit_validation_extra="ignore", + ) + + self.assertEqual(service.raw._validation_extra, "ignore") + + def test_notes_errors_share_client_base_class(self): + self.assertTrue(issubclass(NoteNotFound, ClientNotesError)) + + def test_notes_exporter_module_imports(self): + module = importlib.import_module("pyicloud.services.notes.rendering.exporter") + + self.assertTrue(hasattr(module, "NoteExporter")) + + def test_notes_service_render_note_uses_lazy_importer(self): + record = CKRecord.model_validate( + {"recordName": "Note/1", "recordType": "Note", "fields": {}} + ) + self.service.raw.lookup = MagicMock(return_value=MagicMock(records=[record])) + + with ( + patch( + "pyicloud.services.notes.rendering.exporter.decode_and_parse_note", + return_value=MagicMock(name="note"), + ), + patch( + "pyicloud.services.notes.rendering.exporter.build_datasource", + return_value=(MagicMock(name="datasource"), []), + ), + patch( + "pyicloud.services.notes.rendering.renderer.NoteRenderer.render", + return_value="

        rendered

        ", + ) as mock_render, + ): + rendered = self.service.render_note("Note/1") + + self.assertEqual(rendered, "

        rendered

        ") + mock_render.assert_called_once() + + def test_notes_service_export_note_uses_lazy_importer(self): + record = CKRecord.model_validate( + {"recordName": "Note/1", "recordType": "Note", "fields": {}} + ) + self.service.raw.lookup = MagicMock(return_value=MagicMock(records=[record])) + output_dir = os.path.join( + tempfile.gettempdir(), + "python-test-results", + "notes-export", + ) + output_path = os.path.join(output_dir, "note.html") + + with patch( + "pyicloud.services.notes.rendering.exporter.NoteExporter.export", + return_value=output_path, + ) as mock_export: + exported = self.service.export_note("Note/1", output_dir) + + self.assertEqual(exported, output_path) + mock_export.assert_called_once() + + def test_notes_service_attachment_lookup_prefers_canonical_record_names(self): + note_record = CKRecord.model_validate( + { + "recordName": "Note/1", + "recordType": "Note", + "fields": { + "Attachments": { + "type": "REFERENCE_LIST", + "value": [ + { + "recordName": "Attachment/CANONICAL", + "action": "VALIDATE", + } + ], + } + }, + } + ) + attachment_record = CKRecord.model_validate( + { + "recordName": "Attachment/CANONICAL", + "recordType": "Attachment", + "fields": { + "AttachmentIdentifier": {"type": "STRING", "value": "ALIAS-1"}, + "AttachmentUTI": {"type": "STRING", "value": "public.url"}, + "PrimaryAsset": { + "type": "ASSETID", + "value": {"downloadURL": "https://example.com/file"}, + }, + }, + } + ) + self.service.raw.lookup = MagicMock( + return_value=CKLookupResponse(records=[attachment_record]) + ) + + attachments = self.service._resolve_attachments_for_record( + note_record, + attachment_ids=[AttachmentId(identifier="ALIAS-1")], + ) + + self.assertEqual( + self.service.raw.lookup.call_args.args[0], + ["Attachment/CANONICAL"], + ) + self.assertEqual(len(attachments), 1) + self.assertEqual(attachments[0].id, "Attachment/CANONICAL") + self.assertIs(self.service._attachment_meta_cache["ALIAS-1"], attachments[0]) + + def test_write_html_rejects_filename_escape(self): + out_dir = os.path.join( + tempfile.gettempdir(), + "python-test-results", + "notes-export-write-html", + ) + with self.assertRaisesRegex(ValueError, "filename must stay within out_dir"): + write_html( + "Title", + "

        rendered

        ", + out_dir, + filename="../escape.html", + ) + + def test_decode_and_parse_note_returns_none_on_parse_failure(self): + record = CKRecord.model_validate( + { + "recordName": "Note/1", + "recordType": "Note", + "fields": { + "TextDataEncrypted": { + "type": "ENCRYPTED_BYTES", + "value": "aGVsbG8=", + } + }, + } + ) + + with ( + patch( + "pyicloud.services.notes.rendering.exporter.BodyDecoder.decode", + return_value=MagicMock(bytes=b"broken"), + ), + patch( + "pyicloud.services.notes.rendering.exporter.pb.NoteStoreProto.ParseFromString", + side_effect=ValueError("bad proto"), + ), + ): + self.assertIsNone(decode_and_parse_note(record)) + + def test_note_body_text_defaults_to_none(self): + from pyicloud.services.notes.domain import NoteBody + + body = NoteBody(bytes=b"hello") + self.assertIsNone(body.text) + + def test_shared_cloudkit_signed_string_timestamps_are_tolerated(self): + created = _from_millis_or_none(" 1735689600000 ") + + self.assertIsNotNone(created) + self.assertEqual(created.isoformat(), "2025-01-01T00:00:00+00:00") + self.assertIsNone(_from_secs_or_millis("999999999999999999999999")) + + def test_shared_cloudkit_invalid_timestamp_types_raise_validation_error(self): + class Demo(BaseModel): + created: Annotated[datetime, BeforeValidator(_from_millis_or_none)] + expires: Annotated[datetime, BeforeValidator(_from_secs_or_millis)] + + with self.assertRaises(ValidationError): + Demo.model_validate( + { + "created": object(), + "expires": object(), + } + ) + + def test_shared_cloudkit_share_allows_encrypted_string_fields(self): + """Shared cloudkit.share records may expose STRING + isEncrypted fields.""" + record = CKRecord.model_validate( + { + "recordName": "Share-123", + "recordType": "cloudkit.share", + "fields": { + "SnippetEncrypted": { + "value": "Shared snippet", + "type": "STRING", + "isEncrypted": True, + } + }, + } + ) + + self.assertEqual(record.fields.get_value("SnippetEncrypted"), "Shared snippet") + self.assertEqual( + NotesService._decode_encrypted(record.fields.get_value("SnippetEncrypted")), + "Shared snippet", + ) + + def test_shared_cloudkit_share_participant_surfaces_are_typed(self): + """Shared-record participant and PCS surfaces parse into structured models.""" + record = CKRecord.model_validate( + { + "recordName": "Share-123", + "recordType": "cloudkit.share", + "publicPermission": "NONE", + "participants": [ + { + "participantId": "owner-1", + "userIdentity": { + "userRecordName": "_owner", + "nameComponents": { + "givenName": "Jacob", + "familyName": "Arnould", + }, + "lookupInfo": { + "emailAddress": "jacob@example.com", + }, + }, + "type": "OWNER", + "acceptanceStatus": "ACCEPTED", + "permission": "READ_WRITE", + "customRole": "", + "isApprovedRequester": False, + "orgUser": False, + "publicKeyVersion": 1, + "outOfNetworkPrivateKey": "", + "outOfNetworkKeyType": 0, + "protectionInfo": { + "bytes": "aGVsbG8=", + "pcsChangeTag": "owner-tag", + }, + } + ], + "requesters": [], + "blocked": [], + "owner": { + "participantId": "owner-1", + "userIdentity": { + "userRecordName": "_owner", + }, + "type": "OWNER", + "permission": "READ_WRITE", + "protectionInfo": { + "bytes": "aGVsbG8=", + "pcsChangeTag": "owner-tag", + }, + }, + "currentUserParticipant": { + "participantId": "user-1", + "userIdentity": { + "userRecordName": "_user", + "lookupInfo": { + "phoneNumber": "352621583784", + }, + }, + "type": "ADMINISTRATOR", + "acceptanceStatus": "ACCEPTED", + "permission": "READ_WRITE", + "protectionInfo": { + "bytes": "d29ybGQ=", + "pcsChangeTag": "user-tag", + }, + }, + "invitedPCS": { + "bytes": "aW52aXRlZA==", + "pcsChangeTag": "invited-tag", + }, + "selfAddedPCS": { + "bytes": "c2VsZg==", + "pcsChangeTag": "self-tag", + }, + "fields": { + "SnippetEncrypted": { + "value": "Shared snippet", + "type": "STRING", + "isEncrypted": True, + } + }, + } + ) + + self.assertIsInstance(record.participants, list) + self.assertIsInstance(record.participants[0], CKParticipant) + self.assertIsInstance(record.participants[0].userIdentity, CKUserIdentity) + self.assertEqual( + record.participants[0].userIdentity.nameComponents.givenName, "Jacob" + ) + self.assertIsInstance( + record.participants[0].protectionInfo, CKParticipantProtectionInfo + ) + self.assertIsInstance(record.owner, CKParticipant) + self.assertIsInstance(record.currentUserParticipant, CKParticipant) + self.assertEqual( + record.currentUserParticipant.userIdentity.lookupInfo.phoneNumber, + "352621583784", + ) + self.assertIsInstance(record.invitedPCS, CKPCSInfo) + self.assertEqual(record.invitedPCS.pcsChangeTag, "invited-tag") + self.assertIsInstance(record.selfAddedPCS, CKPCSInfo) + self.assertEqual(record.selfAddedPCS.pcsChangeTag, "self-tag") + + def test_encrypted_string_fields_without_flag_are_rejected(self): + """STRING wrappers on *Encrypted fields must carry isEncrypted=true.""" + with self.assertRaises(ValidationError): + CKRecord.model_validate( + { + "recordName": "Share-123", + "recordType": "cloudkit.share", + "fields": { + "SnippetEncrypted": { + "value": "Shared snippet", + "type": "STRING", + } + }, + } + ) + + def test_decode_encrypted_bytes_and_strings(self): + """Notes encrypted decoder handles both bytes and string field values.""" + self.assertEqual(NotesService._decode_encrypted(b"hello"), "hello") + self.assertEqual(NotesService._decode_encrypted("bonjour"), "bonjour") + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_notes_cli.py b/tests/test_notes_cli.py new file mode 100644 index 00000000..6fb47d2b --- /dev/null +++ b/tests/test_notes_cli.py @@ -0,0 +1,193 @@ +import argparse +import importlib.util +import os +import sys +import unittest +from types import SimpleNamespace +from unittest.mock import MagicMock, patch + +CLI_PATH = os.path.join(os.path.dirname(__file__), "..", "examples", "notes_cli.py") + + +def _load_notes_cli(): + spec = importlib.util.spec_from_file_location( + "pyicloud_examples_notes_cli", CLI_PATH + ) + assert spec is not None + assert spec.loader is not None + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + return module + + +class TestNotesCli(unittest.TestCase): + def _output_dir(self, name): + path = os.path.join("/tmp/python-test-results", "notes-cli", name) + os.makedirs(path, exist_ok=True) + return path + + def test_parse_args_rejects_removed_download_assets_flag(self): + module = _load_notes_cli() + + with patch.object( + sys, + "argv", + ["notes_cli.py", "--username", "user@example.com", "--download-assets"], + ): + with self.assertRaises(SystemExit): + module.parse_args() + + def test_main_requests_titleencrypted_and_maps_export_config(self): + module = _load_notes_cli() + dummy_ckrecord = type("DummyCKRecord", (), {}) + note_record = dummy_ckrecord() + note_record.recordName = "note-1" + + note_item = SimpleNamespace(id="note-1", title="Wanted", modified_at=None) + raw = MagicMock() + raw.lookup.return_value = SimpleNamespace(records=[note_record]) + + notes = MagicMock() + notes.recents.return_value = [note_item] + notes.iter_all.return_value = [] + notes.raw = raw + + api = MagicMock() + api.notes = notes + + exporter = MagicMock() + args = argparse.Namespace( + username="user@example.com", + verbose=False, + cookie_dir="", + china_mainland=False, + max_items=1, + title="Wanted", + title_contains="", + output_dir="", + full_page=True, + dump_runs=False, + assets_dir="", + export_mode="lightweight", + notes_debug=True, + preview_appearance="dark", + pdf_height=777, + ) + + tmpdir = self._output_dir("main-config") + args.output_dir = tmpdir + args.assets_dir = os.path.join(tmpdir, "assets") + exporter.export.return_value = os.path.join(tmpdir, "note.html") + + with ( + patch.object(module, "parse_args", return_value=args), + patch.object(module, "get_password", return_value="pw"), + patch.object(module, "PyiCloudService", return_value=api), + patch.object(module, "ensure_auth"), + patch.object(module, "decode_and_parse_note", return_value=MagicMock()), + patch.object(module, "console", MagicMock()), + patch.object(module, "CKRecord", dummy_ckrecord), + patch( + "pyicloud.services.notes.rendering.exporter.NoteExporter", + return_value=exporter, + ) as mock_exporter_cls, + ): + module.main() + + self.assertEqual( + raw.lookup.call_args.kwargs["desired_keys"], + ["TextDataEncrypted", "Attachments", "TitleEncrypted"], + ) + + config = mock_exporter_cls.call_args.kwargs["config"] + self.assertEqual(config.export_mode, "lightweight") + self.assertEqual(config.assets_dir, args.assets_dir) + self.assertTrue(config.full_page) + self.assertTrue(config.debug) + self.assertEqual(config.preview_appearance, "dark") + self.assertEqual(config.pdf_object_height, 777) + + def test_parse_args_rejects_removed_password_flag(self): + module = _load_notes_cli() + + with patch.object( + sys, + "argv", + ["notes_cli.py", "--username", "user@example.com", "--password", "pw"], + ): + with self.assertRaises(SystemExit): + module.parse_args() + + def test_main_suppresses_note_dumps_without_debug_flags(self): + module = _load_notes_cli() + dummy_ckrecord = type("DummyCKRecord", (), {}) + note_record = dummy_ckrecord() + note_record.recordName = "note-1" + + note_item = SimpleNamespace(id="note-1", title="Wanted", modified_at=None) + raw = MagicMock() + raw.lookup.return_value = SimpleNamespace(records=[note_record]) + + notes = MagicMock() + notes.recents.return_value = [note_item] + notes.iter_all.return_value = [] + notes.raw = raw + + api = MagicMock() + api.notes = notes + + exporter = MagicMock() + args = argparse.Namespace( + username="user@example.com", + verbose=False, + cookie_dir="", + china_mainland=False, + max_items=1, + title="Wanted", + title_contains="", + output_dir=self._output_dir("main-no-debug"), + full_page=False, + dump_runs=False, + assets_dir="", + export_mode="lightweight", + notes_debug=False, + preview_appearance="light", + pdf_height=600, + ) + exporter.export.return_value = os.path.join(args.output_dir, "note.html") + console = MagicMock() + + with ( + patch.object(module, "parse_args", return_value=args), + patch.object(module, "get_password", return_value="pw"), + patch.object(module, "PyiCloudService", return_value=api), + patch.object(module, "ensure_auth"), + patch.object(module, "decode_and_parse_note", return_value=MagicMock()), + patch.object(module, "console", console), + patch.object(module, "CKRecord", dummy_ckrecord), + patch( + "pyicloud.services.notes.rendering.exporter.NoteExporter", + return_value=exporter, + ), + ): + module.main() + + console.rule.assert_not_called() + printed = [call.args[0] for call in console.print.call_args_list if call.args] + self.assertNotIn("proto_note:", printed) + + def test_ensure_auth_uses_security_key_when_fido2_devices_are_available(self): + module = _load_notes_cli() + api = MagicMock() + devices = [object(), object()] + api.requires_2fa = True + api.requires_2sa = False + api.fido2_devices = devices + api.is_trusted_session = False + + with patch("builtins.input", return_value="1"): + module.ensure_auth(api) + + api.confirm_security_key.assert_called_once_with(devices[1]) + api.validate_2fa_code.assert_not_called() + api.trust_session.assert_called_once_with() diff --git a/tests/test_notes_rendering.py b/tests/test_notes_rendering.py new file mode 100644 index 00000000..eaea776f --- /dev/null +++ b/tests/test_notes_rendering.py @@ -0,0 +1,578 @@ +import json +import os +import tempfile +import unittest +from types import SimpleNamespace +from unittest.mock import MagicMock, Mock, patch + +from pyicloud.services.notes.rendering.attachments import ( + AttachmentContext, + _safe_url, + render_attachment, +) +from pyicloud.services.notes.rendering.ck_datasource import CloudKitNoteDataSource +from pyicloud.services.notes.rendering.exporter import ( + NoteExporter, + decode_and_parse_note, + download_image_assets, +) +from pyicloud.services.notes.rendering.options import ExportConfig +from pyicloud.services.notes.rendering.renderer import NoteRenderer, _safe_anchor_href +from pyicloud.services.notes.rendering.table_builder import ( + TableBuilder, + render_table_from_mergeable, +) + +FIXTURE_PATH = os.path.join(os.path.dirname(__file__), "fixtures", "note_fixture.json") +with open(FIXTURE_PATH, "r", encoding="utf-8") as fixture_file: + NOTE_FIXTURE = json.load(fixture_file) + + +class _Field: + def __init__(self, value): + self.value = value + + +class _Fields: + def __init__(self, values): + self.values = values + + def get_value(self, key): + return self.values.get(key) + + def get_field(self, key): + if key not in self.values: + return None + return _Field(self.values[key]) + + +class _Record: + def __init__(self, record_name, fields): + self.recordName = record_name + self.recordType = "Attachment" + self.fields = _Fields(fields) + + +FIXTURE_PATH = os.path.join(os.path.dirname(__file__), "fixtures", "note_fixture.json") +with open(FIXTURE_PATH, "r", encoding="utf-8") as fixture_file: + NOTE_FIXTURE = json.load(fixture_file) + + +class _Field: + def __init__(self, value): + self.value = value + + +class _Fields: + def __init__(self, values): + self.values = values + + def get_value(self, key): + return self.values.get(key) + + def get_field(self, key): + if key not in self.values: + return None + return _Field(self.values[key]) + + +class _Record: + def __init__(self, record_name, fields): + self.recordName = record_name + self.recordType = "Attachment" + self.fields = _Fields(fields) + + +class TestNoteRendering(unittest.TestCase): + def setUp(self): + self.fixture = NOTE_FIXTURE + + def _reconstruct_record(self, data): + # Helper to rebuild a pseudo-CKRecord from the JSON dict + # We need to minimally satisfy what build_datasource expects (fields.get_value) + class MockFields: + def __init__(self, fields_dict): + self.d = fields_dict + + def get_value(self, key): + val = self.d.get(key) + if isinstance(val, dict) and "__bytes__" in val: + import base64 + + return base64.b64decode(val["__bytes__"]) + return val + + def get_field(self, key): + # Needed for some checks like Attachments + # For references, we might need more complex reconstruction if the code checks types + # But let's start simple. + return None + + rec = Mock() + rec.recordName = data["recordName"] + rec.recordType = data["recordType"] + rec.fields = MockFields(data["fields"]) + return rec + + def test_render_fixture_output(self): + """Ensure the fixture note renders to HTML without crashing.""" + note_data = self.fixture["note"] + note_rec = self._reconstruct_record(note_data) + + # Manual decode to skip isinstance check causing issues with simple mocks + from pyicloud.services.notes.decoding import BodyDecoder + from pyicloud.services.notes.protobuf import notes_pb2 + + raw_cypher = note_rec.fields.get_value("TextDataEncrypted") + nb = BodyDecoder().decode(raw_cypher) + self.assertIsNotNone(nb, "Failed to BodyDecoder.decode fixture data") + + msg = notes_pb2.NoteStoreProto() + msg.ParseFromString(nb.bytes) + note = getattr(getattr(msg, "document", None), "note", None) + + # Mock datasource hydration + # We manually populate the datasource with the attachment records from the fixture + ds = CloudKitNoteDataSource() + att_data_list = self.fixture["attachments"] + for att_data in att_data_list: + att_rec = self._reconstruct_record(att_data) + ds.add_attachment_record(att_rec) + + renderer = NoteRenderer() + html = renderer.render(note, datasource=ds) + + # Verify basic structure + self.assertIn( + "checklist", + html.lower(), + "Should contain checkbox logic if note has checklist", + ) + # The test note had "pyicloud notes service test" in title, likely not in body. + # But we expect SOME content. + self.assertTrue(len(html) > 0) + + print("\n--- Rendered HTML Preview (First 500 chars) ---") + print(html[:500]) + print("-----------------------------------------------") + + def test_public_url_attachment_keeps_useful_title_and_href(self): + ds = CloudKitNoteDataSource() + ds.add_attachment_record( + _Record( + "url-1", + { + "UTI": "public.url", + "SummaryEncrypted": b"Discord Notes Link", + "URLStringEncrypted": b"https://discord.example.com/channel/1", + }, + ) + ) + + html = render_attachment( + AttachmentContext( + id="url-1", + uti=ds.get_attachment_uti("url-1") or "", + title=ds.get_title("url-1"), + primary_url=ds.get_primary_asset_url("url-1"), + thumb_url=ds.get_thumbnail_url("url-1"), + mergeable_gz=ds.get_mergeable_gz("url-1"), + ), + lambda _: "", + ) + + self.assertIn("Discord Notes Link", html) + self.assertIn('href="https://discord.example.com/channel/1"', html) + + def test_image_attachment_does_not_use_signed_url_as_alt_text(self): + signed_url = "https://cvws.icloud-content.com/B/example-signed-asset" + ds = CloudKitNoteDataSource() + ds.add_attachment_record( + _Record( + "img-1", + { + "UTI": "com.apple.paper", + "PreviewImages": [SimpleNamespace(downloadURL=signed_url)], + }, + ) + ) + + self.assertIsNone(ds.get_title("img-1")) + + html = render_attachment( + AttachmentContext( + id="img-1", + uti=ds.get_attachment_uti("img-1") or "", + title=ds.get_title("img-1"), + primary_url=ds.get_primary_asset_url("img-1"), + thumb_url=ds.get_thumbnail_url("img-1"), + mergeable_gz=ds.get_mergeable_gz("img-1"), + ), + lambda _: "", + ) + + self.assertIn(f'src="{signed_url}"', html) + self.assertNotIn(f'alt="{signed_url}"', html) + + def test_default_renderer_keeps_safe_relative_href(self): + html = render_attachment( + AttachmentContext( + id="att-1", + uti="com.example.unknown", + title="Attachment", + primary_url="assets/note/file.bin", + thumb_url=None, + mergeable_gz=None, + ), + lambda _: "", + ) + + self.assertIn('href="assets/note/file.bin"', html) + + def test_url_renderer_rejects_unsafe_schemes(self): + html = render_attachment( + AttachmentContext( + id="att-2", + uti="public.url", + title="Unsafe", + primary_url="javascript:alert(1)", + thumb_url=None, + mergeable_gz=None, + ), + lambda _: "", + ) + + self.assertNotIn("javascript:alert", html) + self.assertNotIn("href=", html) + + def test_image_renderer_rejects_protocol_relative_urls(self): + html = render_attachment( + AttachmentContext( + id="att-3", + uti="public.image", + title="Image", + primary_url="//evil.example.com/x.png", + thumb_url=None, + mergeable_gz=None, + ), + lambda _: "", + ) + + self.assertNotIn("src=", html) + + def test_image_renderer_falls_back_to_valid_thumbnail(self): + html = render_attachment( + AttachmentContext( + id="att-4", + uti="public.image", + title="Image", + primary_url="javascript:alert(1)", + thumb_url="https://example.com/thumb.png", + mergeable_gz=None, + ), + lambda _: "", + ) + + self.assertIn('src="https://example.com/thumb.png"', html) + + def test_render_table_from_mergeable_fails_closed_on_malformed_payload(self): + self.assertIsNone( + render_table_from_mergeable(b"not-a-table", lambda _: "

        x

        ") + ) + + def test_render_table_from_mergeable_uses_later_valid_root_candidate(self): + class _FakeValue: + def __init__(self, object_index): + self.object_index = object_index + + class _FakeMapEntry: + def __init__(self, key, object_index): + self.key = key + self.value = _FakeValue(object_index) + + class _FakeRootEntry: + def __init__(self, *map_entries): + self.custom_map = SimpleNamespace(type=0, map_entry=list(map_entries)) + + def HasField(self, field_name): + return field_name == "custom_map" + + class _AxisEntry: + def __init__(self, total): + self.total = total + + class _CellEntry: + def __init__(self, html): + self.cell_html = html + + class _FakeProto: + def __init__(self): + entries = [ + _FakeRootEntry( + _FakeMapEntry(0, 2), + _FakeMapEntry(1, 3), + _FakeMapEntry(2, 4), + ), + _FakeRootEntry( + _FakeMapEntry(0, 5), + _FakeMapEntry(1, 6), + _FakeMapEntry(2, 7), + ), + _AxisEntry(0), + _AxisEntry(0), + _CellEntry(""), + _AxisEntry(1), + _AxisEntry(1), + _CellEntry("

        ok

        "), + ] + data = SimpleNamespace( + mergeable_data_object_key_item=[ + "crRows", + "crColumns", + "cellColumns", + ], + mergeable_data_object_type_item=["com.apple.notes.ICTable"], + mergeable_data_object_uuid_item=[], + mergeable_data_object_entry=entries, + ) + self.mergable_data_object = SimpleNamespace( + mergeable_data_object_data=data + ) + + def ParseFromString(self, payload): + return None + + with ( + patch( + "pyicloud.services.notes.rendering.table_builder.pb.MergableDataProto", + _FakeProto, + ), + patch.object( + TableBuilder, + "parse_rows", + lambda self, entry: setattr(self.rows, "total", entry.total), + ), + patch.object( + TableBuilder, + "parse_cols", + lambda self, entry: setattr(self.cols, "total", entry.total), + ), + patch.object( + TableBuilder, + "parse_cell_columns", + lambda self, entry: self.cells.__setitem__( + 0, + [SimpleNamespace(html=entry.cell_html)], + ) + if self.cells + else None, + ), + ): + html = render_table_from_mergeable(b"candidate-scan", lambda _: "") + + self.assertIn("
        ", html) + self.assertIn("

        ok

        ", html) + + def test_table_builder_caps_large_allocations(self): + builder = TableBuilder( + key_items=[], + type_items=[], + uuid_items=[], + entries=[], + render_note_cb=lambda _: "", + ) + builder.rows.total = 400 + builder.cols.total = 400 + + builder.init_table_buffers() + + self.assertEqual(builder.cells, []) + + def test_safe_anchor_href_allows_only_expected_schemes(self): + self.assertEqual( + _safe_anchor_href("https://example.com"), "https://example.com" + ) + self.assertEqual( + _safe_anchor_href("mailto:test@example.com"), "mailto:test@example.com" + ) + self.assertEqual(_safe_anchor_href("tel:+352123456"), "tel:+352123456") + self.assertIsNone(_safe_anchor_href("javascript:alert(1)")) + self.assertIsNone(_safe_anchor_href("data:text/html,hi")) + + def test_safe_url_rejects_unsafe_and_protocol_relative_urls(self): + self.assertEqual( + _safe_url(" https://example.com/file ", allowed_schemes={"http", "https"}), + "https://example.com/file", + ) + self.assertEqual( + _safe_url("assets/file.png", allowed_schemes={"http", "https"}), + "assets/file.png", + ) + self.assertIsNone( + _safe_url("//evil.example.com/file.png", allowed_schemes={"http", "https"}) + ) + self.assertIsNone( + _safe_url("javascript:alert(1)", allowed_schemes={"http", "https"}) + ) + + def test_export_config_is_image_uti_normalizes_config_values(self): + config = ExportConfig( + image_uti_prefixes=("Public.Image",), + image_uti_exacts=("Com.Apple.Paper",), + ) + + self.assertTrue(config.is_image_uti("public.image")) + self.assertTrue(config.is_image_uti("com.apple.paper")) + + def test_export_config_is_image_uti_rejects_invalid_config_types(self): + config = ExportConfig(image_uti_exacts=("public.jpeg", 123)) + + with self.assertRaises(TypeError): + config.is_image_uti("public.jpeg") + + +class TestNoteExporter(unittest.TestCase): + def _note_record(self, record_name="note-1", title=b"Example Title"): + return _Record(record_name, {"TitleEncrypted": title}) + + def _output_dir(self, name): + path = os.path.join( + tempfile.gettempdir(), + "python-test-results", + "notes-rendering", + name, + ) + os.makedirs(path, exist_ok=True) + return path + + def test_export_archival_mode_downloads_assets_into_custom_assets_dir(self): + client = MagicMock() + datasource = MagicMock(name="datasource") + note_record = self._note_record() + config = ExportConfig( + export_mode="archival", + assets_dir=os.path.join( + tempfile.gettempdir(), + "python-test-results", + "notes-rendering", + "shared-assets", + ), + ) + exporter = NoteExporter(client, config=config) + + tmpdir = self._output_dir("archival-mode") + with ( + patch( + "pyicloud.services.notes.rendering.exporter.decode_and_parse_note", + return_value=MagicMock(name="note"), + ), + patch( + "pyicloud.services.notes.rendering.exporter.build_datasource", + return_value=(datasource, ["att-1"]), + ), + patch.object(exporter.renderer, "render", return_value="

        rendered

        "), + patch( + "pyicloud.services.notes.rendering.exporter.download_pdf_assets" + ) as mock_pdf, + patch( + "pyicloud.services.notes.rendering.exporter.download_image_assets" + ) as mock_img, + patch( + "pyicloud.services.notes.rendering.exporter.download_av_assets" + ) as mock_av, + patch( + "pyicloud.services.notes.rendering.exporter.download_vcard_assets" + ) as mock_vcard, + ): + path = exporter.export(note_record, output_dir=tmpdir, filename="note.html") + + expected_assets_dir = os.path.join(config.assets_dir, "note-1") + expected = { + "assets_dir": expected_assets_dir, + "out_dir": tmpdir, + "config": config, + } + + mock_pdf.assert_called_once_with(client, datasource, ["att-1"], **expected) + mock_img.assert_called_once_with(client, datasource, ["att-1"], **expected) + mock_av.assert_called_once_with(client, datasource, ["att-1"], **expected) + mock_vcard.assert_called_once_with(client, datasource, ["att-1"], **expected) + + with open(path, "r", encoding="utf-8") as handle: + html = handle.read() + + self.assertIn("", html) + self.assertIn("Example Title", html) + + def test_export_lightweight_mode_skips_downloads_and_writes_fragment(self): + client = MagicMock() + datasource = MagicMock(name="datasource") + note_record = self._note_record(title=b"Fragment Title") + config = ExportConfig(export_mode="lightweight", full_page=False) + exporter = NoteExporter(client, config=config) + + tmpdir = self._output_dir("lightweight-mode") + with ( + patch( + "pyicloud.services.notes.rendering.exporter.decode_and_parse_note", + return_value=MagicMock(name="note"), + ), + patch( + "pyicloud.services.notes.rendering.exporter.build_datasource", + return_value=(datasource, ["att-1"]), + ), + patch.object(exporter.renderer, "render", return_value="

        rendered

        "), + patch( + "pyicloud.services.notes.rendering.exporter.download_pdf_assets" + ) as mock_pdf, + patch( + "pyicloud.services.notes.rendering.exporter.download_image_assets" + ) as mock_img, + patch( + "pyicloud.services.notes.rendering.exporter.download_av_assets" + ) as mock_av, + patch( + "pyicloud.services.notes.rendering.exporter.download_vcard_assets" + ) as mock_vcard, + ): + path = exporter.export(note_record, output_dir=tmpdir, filename="note.html") + + with open(path, "r", encoding="utf-8") as handle: + html = handle.read() + + mock_pdf.assert_not_called() + mock_img.assert_not_called() + mock_av.assert_not_called() + mock_vcard.assert_not_called() + self.assertEqual(html, "

        rendered

        ") + + def test_decode_and_parse_note_returns_none_for_invalid_record_type(self): + self.assertIsNone(decode_and_parse_note(object())) + + def test_download_image_assets_uses_caller_config(self): + ck_client = MagicMock() + ds = MagicMock() + ds.get_attachment_uti.return_value = "com.apple.paper" + ds.get_primary_asset_url.return_value = ( + "https://cvws.icloud-content.com/B/image" + ) + ds.get_thumbnail_url.return_value = None + + config = ExportConfig(image_uti_exacts=()) + + tmpdir = self._output_dir("download-image-config") + updated = download_image_assets( + ck_client, + ds, + ["img-1"], + assets_dir=os.path.join(tmpdir, "assets"), + out_dir=tmpdir, + config=config, + ) + + ck_client.download_asset_to.assert_not_called() + self.assertEqual(updated, {}) + + +if __name__ == "__main__": + unittest.main() From d752fd39d103444e8e7800d2051390d187054e36 Mon Sep 17 00:00:00 2001 From: mrjarnould Date: Sun, 22 Mar 2026 23:12:05 +0100 Subject: [PATCH 2/3] Avoid logging trusted device metadata --- example_reminders.py | 12 ++---------- example_reminders_delta.py | 12 ++---------- examples/notes_cli.py | 7 ++----- 3 files changed, 6 insertions(+), 25 deletions(-) diff --git a/example_reminders.py b/example_reminders.py index ba2bd360..d0e1c8b5 100644 --- a/example_reminders.py +++ b/example_reminders.py @@ -176,14 +176,6 @@ def _prompt_selection( return selected_index -def _trusted_device_label(device: dict[str, Any]) -> str: - if device.get("phoneNumber"): - return "SMS trusted device" - if device.get("deviceName") or device.get("id"): - return "Trusted device" - return "Unknown trusted device" - - def _raw_token(value: str) -> str: if "/" not in value: return value @@ -225,8 +217,8 @@ def authenticate(args: argparse.Namespace) -> PyiCloudService: raise RuntimeError("2SA required but no trusted devices were returned.") print("Trusted devices:") - for index, device in enumerate(devices): - print(f" {index}: {_trusted_device_label(device)}") + for index, _device in enumerate(devices): + print(f" {index}: Trusted device") selected_index = _prompt_selection( "Select trusted device", diff --git a/example_reminders_delta.py b/example_reminders_delta.py index 7fba10cd..4c09638d 100644 --- a/example_reminders_delta.py +++ b/example_reminders_delta.py @@ -135,14 +135,6 @@ def _prompt_selection( return selected_index -def _trusted_device_label(device: dict[str, Any]) -> str: - if device.get("phoneNumber"): - return "SMS trusted device" - if device.get("deviceName") or device.get("id"): - return "Trusted device" - return "Unknown trusted device" - - def authenticate(args: argparse.Namespace) -> PyiCloudService: username, password = resolve_credentials(args) print("Authenticating with iCloud...") @@ -177,8 +169,8 @@ def authenticate(args: argparse.Namespace) -> PyiCloudService: raise RuntimeError("2SA required but no trusted devices were returned.") print("Trusted devices:") - for index, device in enumerate(devices): - print(f" {index}: {_trusted_device_label(device)}") + for index, _device in enumerate(devices): + print(f" {index}: Trusted device") selected_index = _prompt_selection( "Select trusted device", diff --git a/examples/notes_cli.py b/examples/notes_cli.py index 4dae7868..00f473e0 100644 --- a/examples/notes_cli.py +++ b/examples/notes_cli.py @@ -167,11 +167,8 @@ def ensure_auth(api: PyiCloudService) -> None: devices: List[dict[str, Any]] = api.trusted_devices if not devices: raise RuntimeError("No trusted devices available for 2SA") - for i, device in enumerate(devices): - label = ( - "SMS trusted device" if device.get("phoneNumber") else "Trusted device" - ) - logger.info(" %d: %s", i, label) + for i, _device in enumerate(devices): + logger.info(" %d: Trusted device", i) sel = input("Select device index [0]: ").strip() try: idx = int(sel) if sel else 0 From 39e754162cb8e9a85888090f3c468b8e29cc2c22 Mon Sep 17 00:00:00 2001 From: mrjarnould Date: Mon, 23 Mar 2026 18:34:46 +0100 Subject: [PATCH 3/3] Trim PR scope to notes and reminders --- .gitignore | 8 - README.md | 370 ---------- .../models/services/account/account_models.py | 420 ----------- .../services/calendar/missing_operations.md | 128 ---- .../hidemyemail/hidemyemail_models.py | 661 ------------------ .../services/hidemyemail/hme_list_test.py | 52 -- 6 files changed, 1639 deletions(-) delete mode 100644 pyicloud/models/services/account/account_models.py delete mode 100644 pyicloud/models/services/calendar/missing_operations.md delete mode 100644 pyicloud/models/services/hidemyemail/hidemyemail_models.py delete mode 100644 pyicloud/models/services/hidemyemail/hme_list_test.py diff --git a/.gitignore b/.gitignore index 93697ee2..c63c729a 100644 --- a/.gitignore +++ b/.gitignore @@ -78,11 +78,3 @@ uv.lock fetch_devices_*.py *.jpg /test*.py - -# Workspace -/workspace/ - -# Security excludes -*_examples/ -sample_*.json -test_real_data.py diff --git a/README.md b/README.md index 206b5cbc..f6836411 100644 --- a/README.md +++ b/README.md @@ -1307,376 +1307,6 @@ Important CLI flags: `--download-assets` is no longer supported in the example CLI. Use `--export-mode` to choose between archival and lightweight export behavior. -## Reminders - -You can access your iCloud Reminders through the `reminders` property: - -```python -reminders = api.reminders -``` - -The high-level Reminders service exposes typed list, reminder, alarm, hashtag, -attachment, and recurrence-rule models for both snapshot reads and mutations. - -_List reminder lists:_ - -```python -for lst in api.reminders.lists(): - print(lst.id, lst.title, lst.color, lst.count) -``` - -_List reminders globally or within one list:_ - -```python -reminders = api.reminders - -target_list = next(iter(reminders.lists()), None) -if target_list: - for reminder in reminders.reminders(list_id=target_list.id): - print(reminder.id, reminder.title, reminder.completed) - -for reminder in reminders.reminders(): - print(reminder.title) -``` - -_Fetch one reminder by ID:_ - -```python -reminder_id = "YOUR_REMINDER_ID" -reminder = api.reminders.get(reminder_id) - -print(reminder.title) -print(reminder.desc) -print(reminder.due_date) -``` - -_Create, update, and delete a reminder:_ - -```python -from datetime import datetime, timedelta, timezone - -reminders = api.reminders -target_list = next(iter(reminders.lists())) - -created = reminders.create( - list_id=target_list.id, - title="Buy milk", - desc="2 percent", - due_date=datetime.now(timezone.utc) + timedelta(days=1), - priority=1, - flagged=True, -) - -created.desc = "2 percent organic" -created.completed = True -reminders.update(created) - -fresh = reminders.get(created.id) -reminders.delete(fresh) -``` - -`priority` uses Apple's numeric values. Common values are `0` (none), `1` -(high), `5` (medium), and `9` (low). - -_Work with a compound list snapshot:_ - -```python -reminders = api.reminders -target_list = next(iter(reminders.lists())) - -result = api.reminders.list_reminders( - list_id=target_list.id, - include_completed=True, - results_limit=200, -) - -print(len(result.reminders)) -print(result.alarms.keys()) -print(result.attachments.keys()) -print(result.hashtags.keys()) -``` - -`list_reminders()` returns a `ListRemindersResult` containing: - -- `reminders` -- `alarms` -- `triggers` -- `attachments` -- `hashtags` -- `recurrence_rules` - -_Track incremental changes:_ - -```python -reminders = api.reminders - -cursor = reminders.sync_cursor() - -for event in reminders.iter_changes(since=cursor): - print(event.type, event.reminder_id) - if event.reminder is not None: - print(event.reminder.title) -``` - -`iter_changes(since=...)` yields `ReminderChangeEvent` objects. Updated -reminders include a hydrated `reminder` payload; deleted reminders only include -the `reminder_id`. - -_Add location triggers and inspect alarms:_ - -```python -from pyicloud.services.reminders.models import Proximity - -reminders = api.reminders -reminder = next(iter(reminders.reminders())) - -alarm, trigger = reminders.add_location_trigger( - reminder, - title="Office", - address="1 Infinite Loop, Cupertino, CA", - latitude=37.3318, - longitude=-122.0312, - radius=150.0, - proximity=Proximity.ARRIVING, -) - -for row in reminders.alarms_for(reminder): - print(row.alarm.id, row.trigger.id if row.trigger else None) -``` - -_Add hashtags, URL attachments, and recurrence rules:_ - -```python -from pyicloud.services.reminders.models import RecurrenceFrequency - -reminders = api.reminders -reminder = next(iter(reminders.reminders())) - -hashtag = reminders.create_hashtag(reminder, "errands") -attachment = reminders.create_url_attachment( - reminder, - url="https://example.com/checklist", -) -rule = reminders.create_recurrence_rule( - reminder, - frequency=RecurrenceFrequency.WEEKLY, - interval=1, -) - -print(reminders.tags_for(reminder)) -print(reminders.attachments_for(reminder)) -print(reminders.recurrence_rules_for(reminder)) -``` - -You can also update and delete related records: - -```python -reminders.update_attachment(attachment, url="https://example.org/checklist") -reminders.update_recurrence_rule(rule, interval=2) -reminders.delete_hashtag(reminder, hashtag) -reminders.delete_attachment(reminder, attachment) -reminders.delete_recurrence_rule(reminder, rule) -``` - -Reminders caveats: - -- Reminder mutations operate on typed models. The normal pattern is to fetch a - reminder, mutate fields locally, then call `update(reminder)`. -- Naive `datetime` values passed to `create()` are interpreted as UTC by the - service. -- `update_hashtag()` exists, but the iCloud Reminders web app currently treats - hashtag names as effectively read-only in some live flows, so rename behavior - may not be reflected consistently outside the API. - -### Reminders Example Scripts - -[`example_reminders.py`](example_reminders.py) is a comprehensive live -integration validator for the Reminders service. It exercises list discovery, -read paths, write paths, location triggers, hashtags, attachments, recurrence -rules, and delete flows against a real iCloud account. - -[`example_reminders_delta.py`](example_reminders_delta.py) is a smaller live -validator focused on `sync_cursor()` and `iter_changes(since=...)`. - -## Notes - -You can access your iCloud Notes through the `notes` property: - -```python -notes = api.notes -``` - -The high-level Notes service exposes typed note, folder, and attachment models -for common workflows such as recent-note listings, full-note retrieval, HTML -rendering, and on-disk exports. Prefer `api.notes` for normal use and treat -`api.notes.raw` as an advanced/debug escape hatch when you need direct access to -the underlying CloudKit client. - -_List recent notes:_ - -```python -notes = api.notes - -for summary in notes.recents(limit=10): - print(summary.id, summary.title, summary.modified_at) -``` - -_Iterate folders and list notes in one folder:_ - -```python -notes = api.notes - -folder = next(iter(notes.folders()), None) -if folder: - print(folder.id, folder.name, folder.has_subfolders) - for summary in notes.in_folder(folder.id, limit=5): - print(summary.title) -``` - -_Iterate all notes or capture a sync cursor for later incremental work:_ - -```python -notes = api.notes - -for summary in notes.iter_all(): - print(summary.id, summary.title) - -cursor = notes.sync_cursor() -print(cursor) -``` - -Persist the sync cursor from `sync_cursor()` and pass it back to -`iter_all(since=...)` or `iter_changes(since=...)` on a later run to enumerate -only newer changes. - -_Fetch a full note with attachment metadata:_ - -```python -note_id = "YOUR_NOTE_ID" -note = api.notes.get(note_id, with_attachments=True) - -print(note.title) -print(note.text) - -for attachment in note.attachments or []: - print(attachment.id, attachment.filename, attachment.uti, attachment.size) -``` - -_Render a note to an HTML fragment:_ - -```python -html_fragment = api.notes.render_note( - note_id, - preview_appearance="light", - pdf_object_height=600, -) - -print(html_fragment[:200]) -``` - -`render_note()` returns an HTML fragment string and does not download assets or -write files to disk. - -_Export a note to HTML on disk:_ - -```python -path = api.notes.export_note( - note_id, - "./exports/notes_html", - export_mode="archival", - assets_dir="./exports/assets", - full_page=True, -) - -print(path) -``` - -`export_note()` accepts `ExportConfig` keyword arguments such as -`export_mode`, `assets_dir`, `full_page`, `preview_appearance`, and -`pdf_object_height`. - -- `export_mode="archival"` downloads assets locally and rewrites the HTML to - use local file references for stable, offline-friendly output. -- `export_mode="lightweight"` skips local downloads and keeps remote/preview - asset references for quick inspection. - -_Save or stream an attachment:_ - -```python -note = api.notes.get(note_id, with_attachments=True) -attachment = next(iter(note.attachments or []), None) - -if attachment: - saved_path = attachment.save_to("./exports/notes_attachments", service=api.notes) - print(saved_path) - - with open("./attachment-copy.bin", "wb") as file_out: - for chunk in attachment.stream(service=api.notes): - file_out.write(chunk) -``` - -Notes caveats: - -- `get()` raises `NoteLockedError` for passphrase-locked notes whose content - cannot be read. -- `get()`, `render_note()`, and `export_note()` raise `NoteNotFound` when the - note ID does not exist. -- `api.notes.raw` is available for advanced/debug workflows, but it is not the - primary Notes API surface. - -### Notes CLI Example - -[`examples/notes_cli.py`](examples/notes_cli.py) is a local developer utility -built on top of `api.notes`. It is useful for searching notes, inspecting the -rendering pipeline, and exporting HTML, but its selection heuristics and debug -output are convenience behavior rather than part of the Notes service contract. - -_Archival export (downloads local assets):_ - -```bash -uv run python examples/notes_cli.py \ - --username you@example.com \ - --title "My Note" \ - --max 1 \ - --output-dir ./exports/notes_html \ - --assets-dir ./exports/assets \ - --export-mode archival \ - --full-page -``` - -_Lightweight export (skips local asset downloads):_ - -```bash -uv run python examples/notes_cli.py \ - --username you@example.com \ - --title-contains "meeting" \ - --max 3 \ - --output-dir ./exports/notes_html \ - --export-mode lightweight -``` - -Important CLI flags: - -- `--title` filters by exact note title. -- `--title-contains` filters by case-insensitive title substring. -- `--max` limits how many matching notes are exported. -- `--output-dir` selects the directory for saved HTML output. -- `--export-mode archival|lightweight` controls whether assets are downloaded - locally (`archival`) or left as remote/preview references (`lightweight`). -- `--assets-dir` selects the base directory for downloaded assets in archival - mode. -- `--full-page` wraps saved output in a complete HTML page. If omitted, the CLI - saves an HTML fragment. -- `--notes-debug` enables verbose Notes/export debugging. -- `--dump-runs` prints attribute runs and writes an annotated mapping under - `workspace/notes_runs`. -- `--preview-appearance light|dark` selects the preferred preview variant when - multiple appearances are available. -- `--pdf-height` sets the pixel height for embedded PDF `` elements. - -`--download-assets` is no longer supported in the example CLI. Use -`--export-mode` to choose between archival and lightweight export behavior. - ## Examples If you want to see some code samples, see the [examples](examples.py). diff --git a/pyicloud/models/services/account/account_models.py b/pyicloud/models/services/account/account_models.py deleted file mode 100644 index 129e6eef..00000000 --- a/pyicloud/models/services/account/account_models.py +++ /dev/null @@ -1,420 +0,0 @@ -""" -Pydantic models for the Account service. - -Models for these operations: - - {self.service_root}/setup/web/device/getDevices - - {self.service_root}/setup/web/family/getFamilyDetails - - {self.service_root}/setup/ws/1/storageUsageInfo - - {self._gateway_root}/v1/accounts/{dsid}/plans/icloud/pricing - - {self._gateway_root}/v3/accounts/{dsid}/subscriptions/features/cloud.storage/plan-summary - - {self._gateway_root}/v1/accounts/{dsid}/plans/next-larger-plan - - {self._gateway_root}/v3/accounts/{dsid}/subscriptions/features - - {self._gateway_root}/v4/accounts/{dsid}/subscriptions/features - - -""" - -from datetime import datetime -from typing import List, Literal, Optional - -from dateutil.parser import isoparse -from pydantic import BaseModel, ConfigDict, Field, HttpUrl, field_validator - -from pyicloud.utils import underscore_to_camelcase - - -# ─── Base and Shared Config ────────────────────────────────────────────────── -class ConfigModel(BaseModel): - """Base class providing camel-case aliases, population by name, and allowing extra fields.""" - - model_config = ConfigDict( - alias_generator=underscore_to_camelcase, populate_by_name=True, extra="allow" - ) - - -# ─── Constants ─────────────────────────────────────────────────────────────── - -# Example constants (anonymized) -EXAMPLE_SERIAL_MAC = "●●●●●XXXXX" -EXAMPLE_DEVICE_NAME_MAC = "User's MacBook Pro" - -# Device specification constants -EXAMPLE_MAC_OS_VERSION = "OSX;15.5" -EXAMPLE_MAC_MODEL = "MacBookPro18,4" -EXAMPLE_MAC_DISPLAY_NAME = 'MacBook Pro 14"' - - -# --- {self.service_root}/setup/web/device/getDevices ─────────────────────── - - -class AccountDevice(ConfigModel): - """Model for any account device.""" - - # Fields that are ALWAYS present (from sample data) - serial_number: str - """Device serial number (privacy-masked)""" - os_version: str - """Operating system and version (format: 'OS;version')""" - name: str - """User-assigned device name""" - model: str - """Apple's internal model identifier""" - udid: str - """Universally unique device identifier""" - model_display_name: str - """Human-readable model name""" - - # Device images (always present) - Keep manual aliases because uses "URL" not "Url" - model_large_photo_url1x: HttpUrl = Field(alias="modelLargePhotoURL1x") - """URL of large photo (1x)""" - model_large_photo_url2x: HttpUrl = Field(alias="modelLargePhotoURL2x") - """URL of large photo (2x)""" - model_small_photo_url1x: HttpUrl = Field(alias="modelSmallPhotoURL1x") - """URL of small photo (1x)""" - model_small_photo_url2x: HttpUrl = Field(alias="modelSmallPhotoURL2x") - """URL of small photo (2x)""" - - # Fields that MIGHT be present (observed in sample) - imei: Optional[str] = None - """International Mobile Equipment Identity (privacy-masked)""" - latest_backup: Optional[datetime] = None - """ISO timestamp of most recent backup""" - payment_methods: Optional[List[str]] = None - """List of payment method IDs associated with this device""" - - @field_validator("latest_backup", mode="before") - @classmethod - def _parse_latest_backup(cls, v): - """Parse ISO 8601 datetime string to datetime object.""" - if isinstance(v, str): - # Use dateutil for proper ISO 8601 parsing (handles "Z" suffix in Python 3.10+) - return isoparse(v) - return v - - # extra="allow" handles any other device-specific fields automatically - - -class AccountPaymentMethod(ConfigModel): - """Model for an account payment method.""" - - last_four_digits: str - """Last four digits of card/account number""" - balance_status: Literal["UNAVAILABLE", "NOTAPPLICABLE", "AVAILABLE"] - """Current balance status of the payment method""" - suspension_reason: Literal["ACTIVE", "SUSPENDED", "INACTIVE"] - """Current suspension status""" - id: str - """Unique payment method identifier""" - type: str - """Descriptive name of payment method""" - is_car_key: bool - """Whether this method can be used as a car key""" - - model_config = ConfigModel.model_config | ConfigDict( - json_schema_extra={ - "example": { - "lastFourDigits": "XXXX", - "balanceStatus": "UNAVAILABLE", - "suspensionReason": "ACTIVE", - "id": "redacted", - "type": "Revolut Mastercard", - "isCarKey": False, - } - } - ) - - -class GetDevicesResponse(ConfigModel): - """Response model for the Get Devices operation.""" - - devices: List[AccountDevice] - """List of devices associated with the account""" - payment_methods: List[AccountPaymentMethod] - """List of payment methods associated with the account""" - - model_config = ConfigModel.model_config | ConfigDict( - json_schema_extra={ - "example": { - "devices": [ - { - "serialNumber": EXAMPLE_SERIAL_MAC, - "osVersion": EXAMPLE_MAC_OS_VERSION, - "name": EXAMPLE_DEVICE_NAME_MAC, - "model": EXAMPLE_MAC_MODEL, - "modelDisplayName": EXAMPLE_MAC_DISPLAY_NAME, - } - ], - "paymentMethods": [ - { - "lastFourDigits": "XXXX", - "balanceStatus": "UNAVAILABLE", - "suspensionReason": "ACTIVE", - "id": "redacted", - "type": "Revolut Mastercard", - "isCarKey": False, - } - ], - } - } - ) - - -# ─── {self.service_root}/setup/web/family/getFamilyDetails ────────────────────────────────────────────────────── - - -class FamilyMember(ConfigModel): - """Model for a family member.""" - - last_name: str - """Family member's last name""" - dsid: str - """Apple ID Directory Services identifier""" - original_invitation_email: str - """Email address used for the original family invitation""" - full_name: str - """Complete name of the family member""" - age_classification: Literal["ADULT", "CHILD", "TEEN"] - """Age classification category""" - apple_id_for_purchases: str - """Apple ID used for purchases""" - apple_id: str - """Primary Apple ID""" - family_id: str - """Identifier of the family group""" - first_name: str - """Family member's first name""" - has_parental_privileges: bool - """Whether this member has parental control privileges""" - has_screen_time_enabled: bool - """Whether Screen Time is enabled for this member""" - has_ask_to_buy_enabled: bool - """Whether Ask to Buy is enabled for this member""" - has_share_purchases_enabled: bool - """Whether purchase sharing is enabled""" - has_share_my_location_enabled: bool - """Whether location sharing is enabled""" - dsid_for_purchases: str - """Directory Services ID used for purchases""" - - # Optional field - only appears for some family members - share_my_location_enabled_family_members: Optional[List[str]] = None - """List of family member DSIDs for whom location sharing is enabled""" - - -class Family(ConfigModel): - """Model for family group information.""" - - family_id: str - """Unique identifier for the family group""" - transfer_requests: List[str] - """List of pending transfer requests""" - invitations: List[str] - """List of pending family invitations""" - organizer: str - """DSID of the family organizer""" - members: List[str] - """List of family member DSIDs""" - outgoing_transfer_requests: List[str] - """List of outgoing transfer requests""" - etag: str - """Entity tag for caching/versioning""" - - -class GetFamilyDetailsResponse(ConfigModel): - """Response model for the Get Family Details operation.""" - - status_message: str = Field(alias="status-message") - """Human-readable status message""" - family_invitations: List[str] - """List of pending family invitations""" - outgoing_transfer_requests: List[str] - """List of outgoing transfer requests""" - is_member_of_family: bool - """Whether the current user is a family member""" - family: Family - """Family group information""" - family_members: List[FamilyMember] - """List of all family members""" - status: int - """Numeric status code""" - show_add_member_button: bool - """Whether to show the add member button in UI""" - - -# ─── {self.service_root}/setup/ws/1/storageUsageInfo ────────────────────────────────────────────────── - - -class StorageUsageByMedia(ConfigModel): - """Model for storage usage by media type.""" - - media_key: str - """Media type identifier (e.g., 'photos', 'backup', 'docs')""" - display_label: str - """Human-readable label for the media type""" - display_color: str - """Hex color code for UI display (without #)""" - usage_in_bytes: int - """Storage used by this media type in bytes""" - - -class StorageUsageInfo(ConfigModel): - """Model for overall storage usage information.""" - - comp_storage_in_bytes: int - """Complementary storage in bytes""" - used_storage_in_bytes: int - """Total used storage in bytes""" - total_storage_in_bytes: int - """Total available storage in bytes""" - commerce_storage_in_bytes: int - """Commercial storage allocation in bytes""" - - -class QuotaStatus(ConfigModel): - """Model for storage quota status information.""" - - over_quota: bool - """Whether the user is over their storage quota""" - have_max_quota_tier: bool - """Whether the user has the maximum quota tier""" - almost_full: bool = Field(alias="almost-full") - """Whether the storage is almost full""" - paid_quota: bool - """Whether the user has a paid storage quota""" - - -class StorageUsageInfoResponse(ConfigModel): - """Response model for the Get Storage Usage Info operation.""" - - storage_usage_by_media: List[StorageUsageByMedia] - """Breakdown of storage usage by media type""" - storage_usage_info: StorageUsageInfo - """Overall storage usage statistics""" - quota_status: QuotaStatus - """Storage quota status information""" - - -# --- {self._gateway_root}/v1/accounts/{dsid}/plans/icloud/pricing ────────────────────────────────────────────────────────── - - -class PricingPlansResponse(ConfigModel): - """Response model for the Get Pricing Plans operation.""" - - paid_plan: bool - """Whether this is a paid plan""" - price_for_display: str - """Formatted price string for display (e.g., '$9.99')""" - renewal_period: Literal["MONTHLY", "YEARLY"] - """Billing cycle frequency""" - - -# --- {self._gateway_root}/v3/accounts/{dsid}/subscriptions/features/cloud.storage/plan-summary ────────────────────────────────────────────────────────── - - -class PlanInclusion(ConfigModel): - """Model for plan inclusion information.""" - - included_in_plan: bool - """Whether the feature is included in this plan""" - limit: Optional[int] = None - """Storage limit amount (if applicable)""" - limit_units: Optional[str] = None - """Storage limit units (e.g., 'TIB', 'GIB')""" - - -class PlanSummaryResponse(ConfigModel): - """Response model for the Get Plan Summary operation.""" - - feature_key: str - """Feature identifier (e.g., 'cloud.storage')""" - summary: PlanInclusion - """Main plan summary information""" - included_with_account_purchased_plan: PlanInclusion - """Inclusion details for account purchased plan""" - included_with_apple_one_plan: PlanInclusion - """Inclusion details for Apple One plan""" - included_with_shared_plan: PlanInclusion - """Inclusion details for shared plan""" - included_with_comped_plan: PlanInclusion - """Inclusion details for complimentary plan""" - included_with_managed_plan: PlanInclusion - """Inclusion details for managed plan""" - - -# --- {self._gateway_root}/v1/accounts/{dsid}/plans/next-larger-plan ────────────────────────────────────────────────────────── - - -class NextLargerPlanResponse(ConfigModel): - """Response model for the Get Next Larger Plan operation.""" - - parameters: str - """URL-encoded parameters for the plan purchase""" - interrupted_buy_error_codes: str - """JSON-encoded array of error codes as string""" - price_for_display: str - """Formatted price string for display (e.g., '$29.99')""" - plan_size_in_bytes: int - """Storage plan size in bytes""" - plan_name: str - """Human-readable plan name (e.g., '6 TB')""" - highest_tier_plan_name: str - """Name of the highest available tier plan""" - user_eligible_for_offer: bool - """Whether the user is eligible for this offer""" - - -# --- {self._gateway_root}/v3/accounts/{dsid}/subscriptions/features ────────────────────────────────────────────────────────── - - -class SubscriptionV3Feature(ConfigModel): - """Model for an individual subscription feature.""" - - feature_key: str - """Feature identifier (e.g., 'cloud.storage', 'home.cameras')""" - can_use: bool - """Whether the user can use this feature""" - cache_till: datetime - """ISO timestamp when this feature data expires from cache""" - limit: Optional[int] = None - """Feature limit amount (if applicable)""" - limit_units: Optional[str] = None - """Feature limit units (e.g., 'TIB', 'GIB')""" - - @field_validator("cache_till", mode="before") - @classmethod - def _parse_cache_till(cls, v): - """Parse ISO 8601 datetime string to datetime object.""" - if isinstance(v, str): - return isoparse(v) - return v - - -# Type alias for the subscription features response (array of features) -SubscriptionFeaturesResponse = List[SubscriptionV3Feature] - - -# --- {self._gateway_root}/v4/accounts/{dsid}/subscriptions/features ────────────────────────────────────────────────────────── - - -class SubscriptionV4Feature(ConfigModel): - """Model for version 4 subscription features.""" - - feature_key: str - """Feature identifier (e.g., 'apps.rsvp.create-event')""" - can_use: bool - """Whether the user can use this feature""" - cache_till: datetime - """ISO timestamp when this feature data expires from cache""" - access_token: str - """JWT access token for this feature""" - - @field_validator("cache_till", mode="before") - @classmethod - def _parse_cache_till(cls, v): - """Parse ISO 8601 datetime string to datetime object.""" - if isinstance(v, str): - return isoparse(v) - return v - - -# Type alias for the v4 subscription features response (array of features) -SubscriptionV4FeaturesResponse = List[SubscriptionV4Feature] diff --git a/pyicloud/models/services/calendar/missing_operations.md b/pyicloud/models/services/calendar/missing_operations.md deleted file mode 100644 index c2659af5..00000000 --- a/pyicloud/models/services/calendar/missing_operations.md +++ /dev/null @@ -1,128 +0,0 @@ -# HTTP Operations not yet implemented in calendar.py service - -================================================================================ - -1. update_calendar - -================================================================================ - -PURPOSE: Updates an existing calendar -METHOD: POST -URL: {service_root}/ca/collections/{calendar.guid} -PATH PARAMS: - #TODO: determine path params for update_calendar -QUERY PARAMS: - #TODO: determine query params for update_calendar -PAYLOAD: #TODO: determine payload format for update_calendar -RESPONSE: #TODO: determine response format for update_calendar - -================================================================================ - -update_event - -================================================================================ - -PURPOSE: Updates an existing event -METHOD: POST -URL: {service_root}/ca/events/{event.pguid}/{event.guid} -PATH PARAMS: - #TODO: determine path params for update_event -QUERY PARAMS: - #TODO: determine query params for update_event -PAYLOAD: #TODO: determine payload format for update_event -RESPONSE: #TODO: determine response format for update_event - -================================================================================ - -Idle - -================================================================================ - -PURPOSE: Unknown purpose -METHOD: POST -URL: {service_root}/ca/idle -PATH PARAMS: - #TODO: determine path params for idle -QUERY PARAMS: - #TODO: determine query params for idle -PAYLOAD: #TODO: determine payload format for idle -RESPONSE: #TODO: determine response format for idle - -================================================================================ - -alarmtriggers - -================================================================================ - -PURPOSE: Unknown purpose -METHOD: GET -URL: {service_root}/alarmtriggers -PATH PARAMS: - #TODO: determine path params for alarmtriggers -QUERY PARAMS: - #TODO: determine query params for alarmtriggers -PAYLOAD: #TODO: determine payload format for alarmtriggers -RESPONSE: #TODO: determine response format for alarmtriggers - -================================================================================ - -State - -================================================================================ - -PURPOSE: Unknown purpose -METHOD: GET -URL: {service_root}/ca/state -PATH PARAMS: - #TODO: determine path params for State -QUERY PARAMS: - #TODO: determine query params for State -PAYLOAD: #TODO: determine payload format for State -RESPONSE: #TODO: determine response format for State - -================================================================================ - -serverpreferences - -================================================================================ - -PURPOSE: Unknown purpose -METHOD: POST -URL: {service_root}/ca/serverpreferences -PATH PARAMS: - #TODO: determine path params for serverpreferences -QUERY PARAMS: - #TODO: determine query params for serverpreferences -PAYLOAD: #TODO: determine payload format for serverpreferences -RESPONSE: #TODO: determine response format for serverpreferences - -================================================================================ - -Remove all events from a recurring event - -================================================================================ - -PURPOSE: Removes all events from a recurring event -METHOD: POST -URL: {service_root}/ca/events/{event.pguid}/{event.guid}\_\_20250802T100000Z/all -PATH PARAMS: - #TODO: determine path params for remove all events from a recurring event -QUERY PARAMS: - #TODO: determine query params for remove all events from a recurring event -PAYLOAD: #TODO: determine payload format for remove all events from a recurring event -RESPONSE: #TODO: determine response format for remove all events from a recurring event - -================================================================================ - -attachment - -================================================================================ - -PURPOSE: attach a file to an event -METHOD: POST -URL: {service_root}/ca/attachment/{event.pguid}/{event.guid} -PATH PARAMS: - #TODO: determine path params for attachment -QUERY PARAMS: - #TODO: determine query params for attachment - example: -[ -{'X-name': 'folo_logo.png'}, -{'X-type': 'image%2Fpng'}, -{'ctag': 'HwoQEgwAAQPbkyhd0AAAAAAYAxgAIhUIzZ7FhsqY69QyEPbZ8rWG86q0pAEoAEgA'}, -{'lang': 'en-US'}, -{'usertz': 'Europe%2FParis'}, -{'requestID': '132'}, -{'ifMatch': 'mdp8vzll'}, -{'startDate': '2025-07-26'}, -{'endDate': '2025-09-06'}, -{'clientBuildNumber': '2526Project38'}, -{'clientMasteringNumber': '2526B20'}, -{'clientId': '93cf465f-eb5a-4f4a-8043-f7bcbd9b57ac'}, -{'dsid': '10927495723'} -] -PAYLOAD: #TODO: determine payload format for attachment -RESPONSE: #TODO: determine response format for attachment diff --git a/pyicloud/models/services/hidemyemail/hidemyemail_models.py b/pyicloud/models/services/hidemyemail/hidemyemail_models.py deleted file mode 100644 index f2739f32..00000000 --- a/pyicloud/models/services/hidemyemail/hidemyemail_models.py +++ /dev/null @@ -1,661 +0,0 @@ -# Start of Selection -""" -Pydantic models for the HideMyEmail service. - -Models for these operations: - - Generate new email aliases - - Reserve specific aliases - - List all existing aliases - - Get alias details by ID - - Update alias metadata (label, note) - - Delete aliases - - Deactivate aliases - - Reactivate aliases -""" - -from datetime import datetime, timezone -from typing import Annotated, Literal, Union - -from pydantic import BaseModel, ConfigDict, EmailStr, Field, field_validator - -from pyicloud.utils import underscore_to_camelcase - - -# ─── Base and Shared Config ────────────────────────────────────────────────── -class ConfigModel(BaseModel): - """Base class providing camel-case aliases, population by name, and allowing extra fields.""" - - model_config = ConfigDict( - alias_generator=underscore_to_camelcase, - populate_by_name=True, - extra="allow", - json_encoders={ - datetime: lambda dt: int(dt.replace(tzinfo=timezone.utc).timestamp()) - }, - ) - - -# Example constants (anonymized) -EXAMPLE_FORWARD_EMAIL = "user@example.com" -EXAMPLE_ALIAS_ON_DEMAND = "alias-example-1a@icloud.com" -EXAMPLE_ALIAS_IN_APP = "alias-inapp-2b@icloud.com" -EXAMPLE_ALIAS_GENERATED = "alias-generated-3c@icloud.com" -EXAMPLE_ALIAS_RESERVE = "alias-reserve-4d@icloud.com" -EXAMPLE_LABEL = "Project Signup" -EXAMPLE_RESERVED_LABEL = "Reserved Label" - - -# ─── Shared building-blocks ───────────────────────────────────────────────── -class HideMyEmailByIdRequest(ConfigModel): - """Request payload for single-alias operations by anonymousId.""" - - anonymous_id: str = Field(..., alias="anonymousId") - """Anonymous ID of the alias.""" - - model_config = ConfigModel.model_config | ConfigDict( - json_schema_extra={"example": {"anonymousId": "abc123anonymous"}} - ) - - -class MessageResult(ConfigModel): - """Generic result payload containing only a `message` field.""" - - message: str = Field(...) - """Result message, e.g., 'success'.""" - - model_config = ConfigModel.model_config | ConfigDict( - json_schema_extra={"example": {"message": "success"}} - ) - - -# ─── Alias models ─────────────────────────────────────────────────────────── -class HideMyEmailBase(ConfigModel): - """Common fields for Hide My Email entries.""" - - origin: Literal["ON_DEMAND", "IN_APP"] - """The origin of the alias, either "ON_DEMAND" or "IN_APP".""" - - anonymous_id: str = Field(..., alias="anonymousId") - """Anonymous ID of the alias.""" - - domain: str - """The domain associated with the alias.""" - - forward_to_email: EmailStr = Field(..., alias="forwardToEmail") - """The email address to which emails are forwarded.""" - - hme: str - """The Hide My Email address.""" - - label: str - """The label for the alias.""" - - note: str - """The note for the alias.""" - - create_timestamp: datetime = Field(..., alias="createTimestamp") - """Creation timestamp as a datetime object.""" - - is_active: bool = Field(..., alias="isActive") - """Whether the alias is active.""" - - recipient_mail_id: str = Field(..., alias="recipientMailId") - """The recipient mail ID.""" - - @field_validator("create_timestamp", mode="before") - @classmethod - def _parse_create_timestamp(cls, v): - # API returns milliseconds since epoch - if isinstance(v, int): - return datetime.fromtimestamp(v / 1000, tz=timezone.utc) - return v - - model_config = ConfigModel.model_config | ConfigDict( - json_schema_extra={ - "example": { - "origin": "ON_DEMAND", - "anonymousId": "xyz000anon", - "domain": "", - "forwardToEmail": EXAMPLE_FORWARD_EMAIL, - "hme": EXAMPLE_ALIAS_ON_DEMAND, - "label": EXAMPLE_LABEL, - "note": "", - "createTimestamp": 1700000000000, - "isActive": True, - "recipientMailId": "", - } - } - ) - - -class HideMyEmailOnDemand(HideMyEmailBase): - """Alias created on demand via iCloud settings.""" - - origin: Literal["ON_DEMAND"] - """The origin of the alias, always "ON_DEMAND".""" - - model_config = ConfigModel.model_config - - -class HideMyEmailInApp(HideMyEmailBase): - """Alias created within a third-party app supporting Hide My Email.""" - - origin: Literal["IN_APP"] - """The origin of the alias, always "IN_APP".""" - - origin_app_name: str = Field(..., alias="originAppName") - """The name of the originating app.""" - - app_bundle_id: str = Field(..., alias="appBundleId") - """The bundle ID of the originating app.""" - - model_config = ConfigModel.model_config | ConfigDict( - json_schema_extra={ - "example": { - "origin": "IN_APP", - "anonymousId": "uvw111anon", - "domain": "com.example.app", - "forwardToEmail": EXAMPLE_FORWARD_EMAIL, - "hme": EXAMPLE_ALIAS_IN_APP, - "label": "App Feature", - "note": "Generated by App", - "createTimestamp": 1700000001234, - "isActive": True, - "recipientMailId": "", - "originAppName": "ExampleApp", - "appBundleId": "com.example.app", - } - } - ) - - -HideMyEmail = Annotated[ - Union[HideMyEmailOnDemand, HideMyEmailInApp], - Field(discriminator="origin"), -] - - -# ─── List endpoint ────────────────────────────────────────────────────────── -class HideMyEmailListResult(ConfigModel): - """Container for the result of a Hide My Email list operation.""" - - forward_to_emails: list[EmailStr] = Field( - default_factory=list, alias="forwardToEmails" - ) - """List of email addresses to which emails are forwarded.""" - - hme_emails: list[HideMyEmail] = Field(default_factory=list, alias="hmeEmails") - """List of Hide My Email aliases.""" - - selected_forward_to: EmailStr = Field(..., alias="selectedForwardTo") - """The currently selected forward-to email address.""" - - model_config = ConfigModel.model_config | ConfigDict( - json_schema_extra={ - "example": { - "forwardToEmails": [EXAMPLE_FORWARD_EMAIL], - "hmeEmails": [ - { - "origin": "ON_DEMAND", - "anonymousId": "xyz000anon", - "domain": "", - "forwardToEmail": EXAMPLE_FORWARD_EMAIL, - "hme": EXAMPLE_ALIAS_ON_DEMAND, - "label": EXAMPLE_LABEL, - "note": "", - "createTimestamp": 1700000000000, - "isActive": True, - "recipientMailId": "", - } - ], - "selectedForwardTo": EXAMPLE_FORWARD_EMAIL, - } - } - ) - - -class HideMyEmailListResponse(ConfigModel): - """Full response model for the Hide My Email 'list' API operation.""" - - success: bool - """Whether the API call was successful.""" - - timestamp: datetime - """Server timestamp as datetime object.""" - - result: HideMyEmailListResult - """The result payload.""" - - @field_validator("timestamp", mode="before") - @classmethod - def _parse_timestamp(cls, v): - if isinstance(v, int): - return datetime.fromtimestamp(v, tz=timezone.utc) - return v - - model_config = ConfigModel.model_config | ConfigDict( - json_schema_extra={ - "example": { - "success": True, - "timestamp": 1700000000, - "result": { - "forwardToEmails": [EXAMPLE_FORWARD_EMAIL], - "hmeEmails": [ - { - "origin": "ON_DEMAND", - "anonymousId": "xyz000anon", - "domain": "", - "forwardToEmail": EXAMPLE_FORWARD_EMAIL, - "hme": EXAMPLE_ALIAS_ON_DEMAND, - "label": EXAMPLE_LABEL, - "note": "", - "createTimestamp": 1700000000000, - "isActive": True, - "recipientMailId": "", - } - ], - "selectedForwardTo": EXAMPLE_FORWARD_EMAIL, - }, - } - } - ) - - -# ─── Generate endpoint ───────────────────────────────────────────────────── -class HideMyEmailGenerateRequest(ConfigModel): - """Request payload for generating a new Hide My Email address.""" - - lang_code: str = Field(..., alias="langCode") - """Language code for the request.""" - - model_config = ConfigModel.model_config | ConfigDict( - json_schema_extra={"example": {"langCode": "en-us"}} - ) - - -class HideMyEmailGenerateResult(ConfigModel): - """Result payload containing the newly generated Hide My Email address.""" - - hme: str - """The newly generated Hide My Email address.""" - - model_config = ConfigModel.model_config | ConfigDict( - json_schema_extra={"example": {"hme": EXAMPLE_ALIAS_GENERATED}} - ) - - -class HideMyEmailGenerateResponse(ConfigModel): - """Full response model for the Hide My Email 'generate' API operation.""" - - success: bool - """Whether the API call was successful.""" - - timestamp: datetime - """Server timestamp as datetime object.""" - - result: HideMyEmailGenerateResult = Field(...) - """The result payload.""" - - @field_validator("timestamp", mode="before") - @classmethod - def _parse_timestamp(cls, v): - if isinstance(v, int): - return datetime.fromtimestamp(v, tz=timezone.utc) - return v - - model_config = ConfigModel.model_config | ConfigDict( - json_schema_extra={ - "example": { - "success": True, - "timestamp": 1700000100, - "result": {"hme": EXAMPLE_ALIAS_GENERATED}, - } - } - ) - - -# ─── Reserve endpoint ──────────────────────────────────────────────────────── - - -class HideMyEmailReserveRequest(ConfigModel): - """Request payload for reserving an existing Hide My Email alias.""" - - hme: str - """The Hide My Email address to reserve.""" - - label: str - """The label for the reserved alias.""" - - note: str - """The note for the reserved alias.""" - - model_config = ConfigModel.model_config | ConfigDict( - json_schema_extra={ - "example": { - "hme": EXAMPLE_ALIAS_RESERVE, - "label": EXAMPLE_RESERVED_LABEL, - "note": "", - } - } - ) - - -class HideMyEmailReserveOnly(ConfigModel): - """Slim alias model for the 'reserve' operation (no forwardToEmail).""" - - origin: Literal["ON_DEMAND"] - """The origin of the alias, always "ON_DEMAND".""" - - anonymous_id: str = Field(..., alias="anonymousId") - """Anonymous ID of the alias.""" - - domain: str - """The domain associated with the alias.""" - - hme: str - """The Hide My Email address.""" - - label: str - """The label for the alias.""" - - note: str - """The note for the alias.""" - - create_timestamp: datetime = Field(..., alias="createTimestamp") - """Creation timestamp as a datetime object.""" - - is_active: bool = Field(..., alias="isActive") - """Whether the alias is active.""" - - recipient_mail_id: str = Field(..., alias="recipientMailId") - """The recipient mail ID.""" - - @field_validator("create_timestamp", mode="before") - @classmethod - def _parse_create_timestamp(cls, v): - if isinstance(v, int): - return datetime.fromtimestamp(v / 1000, tz=timezone.utc) - return v - - model_config = ConfigModel.model_config | ConfigDict( - json_schema_extra={ - "example": { - "hme": { - "origin": "ON_DEMAND", - "anonymousId": "xyz000anon", - "domain": "", - "hme": EXAMPLE_ALIAS_RESERVE, - "label": EXAMPLE_RESERVED_LABEL, - "note": "", - "createTimestamp": 1700000200, - "isActive": True, - "recipientMailId": "", - } - } - } - ) - - -class HideMyEmailReserveResponse(ConfigModel): - """Full response model for the Hide My Email 'reserve' API operation.""" - - success: bool - """Whether the API call was successful.""" - - timestamp: datetime - """Server timestamp as datetime object.""" - - result: HideMyEmailReserveOnly = Field(...) - """The result payload.""" - - @field_validator("timestamp", mode="before") - @classmethod - def _parse_timestamp(cls, v): - if isinstance(v, int): - return datetime.fromtimestamp(v, tz=timezone.utc) - return v - - model_config = ConfigModel.model_config | ConfigDict( - json_schema_extra={ - "example": { - "success": True, - "timestamp": 1700000200, - "result": { - "hme": { - "origin": "ON_DEMAND", - "anonymousId": "xyz000anon", - "domain": "", - "hme": EXAMPLE_ALIAS_RESERVE, - "label": EXAMPLE_RESERVED_LABEL, - "note": "", - "createTimestamp": 1700000200, - "isActive": True, - "recipientMailId": "", - } - }, - } - } - ) - - -# ─── Get endpoint ────────────────────────────────────────────────────────── - - -class HideMyEmailGetRequest(HideMyEmailByIdRequest): - """Request model for the Hide My Email 'get' API operation.""" - - pass - - -class HideMyEmailGetResponse(ConfigModel): - """Response model for the Hide My Email 'get' API operation.""" - - success: bool - """Whether the API call was successful.""" - - timestamp: datetime - """Server timestamp as datetime object.""" - - result: HideMyEmailBase = Field(...) - """The result payload.""" - - @field_validator("timestamp", mode="before") - @classmethod - def _parse_timestamp(cls, v): - if isinstance(v, int): - return datetime.fromtimestamp(v, tz=timezone.utc) - return v - - model_config = ConfigModel.model_config | ConfigDict( - json_schema_extra={ - "example": { - "success": True, - "timestamp": 1700000300, - "result": { - "origin": "ON_DEMAND", - "anonymousId": "xyz000anon", - "domain": "", - "forwardToEmail": EXAMPLE_FORWARD_EMAIL, - "hme": EXAMPLE_ALIAS_ON_DEMAND, - "label": EXAMPLE_LABEL, - "note": "", - "createTimestamp": 1700000000000, - "isActive": True, - "recipientMailId": "", - }, - } - } - ) - - -# ─── Update endpoint ──────────────────────────────────────────────────────── - - -class HideMyEmailUpdateRequest(HideMyEmailByIdRequest): - """Request model for the Hide My Email 'update' API operation.""" - - label: str - """The new label for the alias.""" - - note: str - """The new note for the alias.""" - - model_config = ConfigModel.model_config | ConfigDict( - json_schema_extra={ - "example": { - "anonymousId": "abc123anonymous", - "label": EXAMPLE_LABEL, - "note": "Updated note", - } - } - ) - - -class HideMyEmailUpdateResponse(ConfigModel): - """Response model for the Hide My Email 'update' API operation.""" - - success: bool - """Whether the API call was successful.""" - - timestamp: datetime - """Server timestamp as datetime object.""" - - result: MessageResult = Field(...) - """The result payload.""" - - @field_validator("timestamp", mode="before") - @classmethod - def _parse_timestamp(cls, v): - if isinstance(v, int): - return datetime.fromtimestamp(v, tz=timezone.utc) - return v - - model_config = ConfigModel.model_config | ConfigDict( - json_schema_extra={ - "example": { - "success": True, - "timestamp": 1700000400, - "result": {"message": "success"}, - } - } - ) - - -# ─── Delete endpoint ───────────────────────────────────────────────────────── - - -class HideMyEmailDeleteRequest(HideMyEmailByIdRequest): - """Request model for the Hide My Email 'delete' API operation.""" - - pass - - -class HideMyEmailDeleteResponse(ConfigModel): - """Response model for the Hide My Email 'delete' API operation.""" - - success: bool - """Whether the API call was successful.""" - - timestamp: datetime - """Server timestamp as datetime object.""" - - result: MessageResult = Field(...) - """The result payload.""" - - @field_validator("timestamp", mode="before") - @classmethod - def _parse_timestamp(cls, v): - if isinstance(v, int): - return datetime.fromtimestamp(v, tz=timezone.utc) - return v - - model_config = ConfigModel.model_config | ConfigDict( - json_schema_extra={ - "example": { - "success": True, - "timestamp": 1700000500, - "result": {"message": "success"}, - } - } - ) - - -# ─── Deactivate endpoint ───────────────────────────────────────────────────── - - -class HideMyEmailDeactivateRequest(HideMyEmailByIdRequest): - """Request model for the Hide My Email 'deactivate' API operation.""" - - pass - - -class HideMyEmailDeactivateResponse(ConfigModel): - """Response model for the Hide My Email 'deactivate' API operation.""" - - success: bool - """Whether the API call was successful.""" - - timestamp: datetime - """Server timestamp as datetime object.""" - - result: MessageResult = Field(...) - """The result payload.""" - - @field_validator("timestamp", mode="before") - @classmethod - def _parse_timestamp(cls, v): - if isinstance(v, int): - return datetime.fromtimestamp(v, tz=timezone.utc) - return v - - model_config = ConfigModel.model_config | ConfigDict( - json_schema_extra={ - "example": { - "success": True, - "timestamp": 1700000600, - "result": {"message": "success"}, - } - } - ) - - -# ─── Reactivate endpoint ──────────────────────────────────────────────────── - - -class HideMyEmailReactivateRequest(HideMyEmailByIdRequest): - """Request model for the Hide My Email 'reactivate' API operation.""" - - pass - - -class HideMyEmailReactivateResponse(ConfigModel): - """Response model for the Hide My Email 'reactivate' API operation.""" - - success: bool - """Whether the API call was successful.""" - - timestamp: datetime - """Server timestamp as datetime object.""" - - result: MessageResult = Field(...) - """The result payload.""" - - @field_validator("timestamp", mode="before") - @classmethod - def _parse_timestamp(cls, v): - if isinstance(v, int): - return datetime.fromtimestamp(v, tz=timezone.utc) - return v - - model_config = ConfigModel.model_config | ConfigDict( - json_schema_extra={ - "example": { - "success": True, - "timestamp": 1700000700, - "result": {"message": "success"}, - } - } - ) - - -# End of Selection diff --git a/pyicloud/models/services/hidemyemail/hme_list_test.py b/pyicloud/models/services/hidemyemail/hme_list_test.py deleted file mode 100644 index 2bc679ec..00000000 --- a/pyicloud/models/services/hidemyemail/hme_list_test.py +++ /dev/null @@ -1,52 +0,0 @@ -""" -Demo script to load a JSON file containing a Hide My Email "list" endpoint response -and demonstrate datetime parsing for timestamps and create_timestamps. - -Usage: - python load_list_response_demo.py path/to/list_response.json -""" - -import argparse -import json - -from rich import pretty -from rich.console import Console -from rich.traceback import install - -# Import the Pydantic model (ensure your models are on PYTHONPATH or adjust import) -from pyicloud.models.services.hidemyemail.hidemyemail_models import ( - HideMyEmailListResponse, -) - -install(show_locals=True) -pretty.install() - -console = Console() - - -def main(): - """ - Demo script to load a JSON file containing a Hide My Email "list" endpoint response - """ - parser = argparse.ArgumentParser( - description="Load and validate a Hide My Email list response, printing datetime fields." - ) - parser.add_argument( - "json_path", - help="Path to the JSON file with the 'list' endpoint response", - ) - args = parser.parse_args() - - # Load raw JSON - with open(args.json_path, "r", encoding="utf-8") as f: - data = json.load(f) - - # Validate and parse into Pydantic model - response = HideMyEmailListResponse.model_validate(data) - - console.rule("Response") - console.print(response) - - -if __name__ == "__main__": - main()