diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 65431ea90..bca4ddfa6 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -22,7 +22,7 @@ jobs: - name: Install Node.js, NPM and Yarn uses: actions/setup-node@v1 with: - node-version: 16 + node-version: 22 - name: Build/release Electron app uses: samuelmeuli/action-electron-builder@v1 diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 374956371..c1a9d40d1 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -23,7 +23,7 @@ jobs: - name: Install Node.js, NPM and Yarn uses: actions/setup-node@v1 with: - node-version: 16 + node-version: 22 - name: Get yarn cache directory path id: yarn-cache-dir-path @@ -57,7 +57,7 @@ jobs: - name: Install Node.js, NPM and Yarn uses: actions/setup-node@v1 with: - node-version: 16 + node-version: 22 github_token: ${{ secrets.github_token }} - name: Get yarn cache directory path diff --git a/common/core.ts b/common/core.ts index 052339248..84103406a 100644 --- a/common/core.ts +++ b/common/core.ts @@ -70,3 +70,20 @@ export function normalizeBase(str: string): string { .replace(/[\u0300-\u036f]/g, '') .toLowerCase(); } + +/** Returns the date at 00:00 today */ +export function getToday(): Date { + const today = new Date(); + today.setHours(0); + today.setMinutes(0); + today.setSeconds(0, 0); + return today; +} + +/** Returns the date at the start of the current week (Sunday at 00:00) */ +export function getWeekStart(): Date { + const date = getToday(); + const dayOfWeek = date.getDay(); + date.setDate(date.getDate() - dayOfWeek); + return date; +} diff --git a/src/frontend/TimingManager.ts b/common/debug.ts similarity index 79% rename from src/frontend/TimingManager.ts rename to common/debug.ts index 34a5b4e1d..9e45c0ad3 100644 --- a/src/frontend/TimingManager.ts +++ b/common/debug.ts @@ -66,3 +66,25 @@ export class TimingManager { this.timings.clear(); } } + +export const debugShallowCompare = >( + componentName = 'MemoComponent', +) => { + return (prev: T, next: T): boolean => { + let areEqual = true; + + const keys = new Set([...Object.keys(prev), ...Object.keys(next)]); + + for (const key of keys) { + if (!Object.is(prev[key], next[key])) { + areEqual = false; + console.log(`[${componentName}] prop changed → "${key}"`, { + from: prev[key], + to: next[key], + }); + } + } + + return areEqual; + }; +}; diff --git a/common/promise.ts b/common/promise.ts index b6dea6d32..3a1bb71bd 100644 --- a/common/promise.ts +++ b/common/promise.ts @@ -87,3 +87,52 @@ export function promiseAllLimit( return pendingPromise; } + +export type BatchFetcher = (opts?: NextOpts) => Promise<{ + items: T[]; + nextOpts?: NextOpts; +}>; + +export type BatchProcessor = (batch: T[], acc: Acc) => Promise | Acc; + +/** + * Iterates over paginated resources and applies a processing function to each batch. + * @template T - The type of items retrieved. + * @template NextOpts - The type of the cursor for pagination or any options to compute the next batch. + * @template Acc - The type of the result accumulator. + * @param fetchBatch - Function to retrieve the next batch of data. + * @param processBatch - Function to apply logic to the current batch and update the accumulator. + * @param initialAcc - The starting value for the accumulation. + * @param cancel - Optional callback to abort the process prematurely. + * @returns A promise that resolves to the final accumulated value. + */ +export async function batchReducer( + fetchBatch: BatchFetcher, + processBatch: BatchProcessor, + initialAcc: Acc, + cancel?: () => boolean, +): Promise { + let opts: NextOpts | undefined; + let acc = initialAcc; + + while (true) { + if (cancel?.()) { + console.log('CANCELLING!'); + break; + } + const { items, nextOpts } = await fetchBatch(opts); + + if (!items.length) { + break; + } + + acc = await processBatch(items, acc); + + if (!nextOpts) { + break; + } + opts = nextOpts; + } + + return acc; +} diff --git a/package.json b/package.json index e41cd8fca..4f933d0e3 100644 --- a/package.json +++ b/package.json @@ -13,6 +13,7 @@ "package": "yarn build && electron-builder", "logo": "ncp ./resources/logo/icns/allusion-logomark-fc.icns ./build/icon.icns && ncp ./resources/logo/ico/allusion-logomark-fc-256x256.ico ./build/icon.ico", "build": "rimraf dist && yarn production && yarn logo", + "postinstall": "electron-rebuild -f -w better-sqlite3", "pack": "electron-builder --dir", "dist": "electron-builder", "build:masonry": "cd wasm/wasm-build && cargo run masonry masonry/masonry-scalar && cargo run masonry masonry/masonry-simd -- -C target-feature=+simd128", @@ -77,6 +78,11 @@ "node_modules/picomatch/**/*", "node_modules/node-addon-api/**/*", "node_modules/is-extglob/**/*", + + "node_modules/better-sqlite3/**/*", + "node_modules/bindings/**/*", + "node_modules/file-uri-to-path/**/*", + "build/**/*", "package.json" ], @@ -94,6 +100,7 @@ "homepage": "https://github.com/RafaUC/Allusion/Allusion#readme", "devDependencies": { "@svgr/webpack": "^6.5.1", + "@types/better-sqlite3": "^7.6.13", "@types/chrome": "^0.0.195", "@types/fs-extra": "^11.0.1", "@types/jest": "^29.5.1", @@ -107,6 +114,7 @@ "css-loader": "^6.7.3", "electron": "21.3.0", "electron-builder": "23.6.0", + "electron-rebuild": "^3.2.9", "eslint": "^8.34.0", "eslint-config-prettier": "^8.6.0", "eslint-plugin-prettier": "^4.2.1", @@ -135,11 +143,12 @@ "@floating-ui/react-dom": "^1.3.0", "@parcel/watcher": "^2.5.1", "ag-psd": "^15.0.0", + "better-sqlite3": "9.6.0", + "chokidar": "^3.5.3", "comlink": "^4.4.1", - "dexie": "^3.2.3", - "dexie-export-import": "^1.0.3", "electron-updater": "^5.3.0", "fs-extra": "^11.1.0", + "kysely": "^0.28.7", "mobx": "^6.8.0", "mobx-react-lite": "^3.4.0", "node-exiftool": "^2.3.0", diff --git a/resources/style/advanced-search.scss b/resources/style/advanced-search.scss index 4f95d03df..e284c79b3 100644 --- a/resources/style/advanced-search.scss +++ b/resources/style/advanced-search.scss @@ -2,7 +2,7 @@ display: flex; flex-direction: column; // changed from min-width since large tag names expand the dialog width which looks bad - width: min(85ch, 80vw); + width: min(100ch, 80vw); margin-inline: auto; .criteria-input, @@ -19,46 +19,114 @@ } } +#criteria-builder-label { + display: flex; + padding-bottom: 0.4rem; +} + #criteria-builder { display: grid; - grid-template-columns: 1fr 1fr 1fr min-content; + grid-template-columns: 3.5rem 3fr 3fr 4fr min-content; gap: 0.25rem 0.5rem; align-items: center; label { text-transform: uppercase; font-size: smaller; + padding-left: 1ch; + align-self: end; } } #query-editor-container { overflow: hidden auto; - padding: 2px 0; + padding: 2px; // dialog height - height of basically everything except the container max-height: calc(80vh - 17.25rem); } -#query-editor { +#query-editor-container-label { + display: flex; + padding-block: 0.4rem; +} + +.query-editor { + display: grid; + grid-template-columns: 3.5rem 3fr 3fr 4fr min-content; + gap: 0.25rem 0.5rem; + align-items: center; width: 100%; border-spacing: 0; - tr { - margin-bottom: 0.25rem; + .criteria-input, + [role='combobox'] { + //min-width: 175px; + width: 100%; + } + + .group-containter { + grid-column: 1 / -1; + padding: 0.5rem; + border: 1px dashed var(--background-color-selected); + border-radius: 0.25rem; + } + + .separator-line, + .criteria-separator, + .separator { + grid-column: 1 / -1; + display: flex; + align-items: center; + text-align: center; + color: var(--text-color-muted); + opacity: 0.5; + + &::before, + &::after { + content: ""; + flex: 1; + height: 1px; + margin: 0 8px; + } + + .conjunction-input { + width: fit-content; + background: var(--background-color); + } } - td, - th { - padding: 0; - padding-bottom: 0.25rem; + .separator-line { + &::before, + &::after { + background: var(--text-color-muted); + } } - td { - padding-inline-start: 0.5rem; + .criteria-separator { + margin: -2.5px; + font-size: 0.55rem; + + .conjunction-input { + height: 0.78rem; + min-height: unset; + } } - .criteria-input, - [role='combobox'] { - min-width: 175px; - width: 100%; + .group-controls { + display: contents; + + .btn-icon, + .criteria-input { + height: 1.1rem !important; + min-height: unset !important; + margin-bottom: 2px; + overflow: visible; + color: var(--accent-color-orange); + } + + // shiulw be 3 cell width + .group-name-input { + grid-column: 2 / 5; + } } } diff --git a/resources/style/app-toolbar.scss b/resources/style/app-toolbar.scss index 0d8a36df1..ce5d71a93 100644 --- a/resources/style/app-toolbar.scss +++ b/resources/style/app-toolbar.scss @@ -2,6 +2,8 @@ justify-content: stretch; .toolbar-button { + flex-shrink: 0; + &[aria-pressed='true'], &[aria-checked='true'] { color: var(--text-color-strong); diff --git a/resources/style/content.scss b/resources/style/content.scss index 2a25ad4bf..cc5aeff8a 100644 --- a/resources/style/content.scss +++ b/resources/style/content.scss @@ -83,11 +83,14 @@ background-color: var(--accent-color-yellow); :is(img, video) { - clip-path: inset(0.175rem round calc(var(--thumbnail-radius) - 0.175rem)); + clip-path: inset(0.2rem round calc(var(--thumbnail-radius) - 0.2rem)); } } [aria-selected=true] > & > :is(img, video, .image-error){ - clip-path: inset(0.175rem round calc(var(--thumbnail-radius) - 0.175rem)); + clip-path: inset(0.2rem round calc(var(--thumbnail-radius) - 0.2rem)); + } + :not([aria-selected=true]) > & > :is(img, video, .image-error){ + transition: all 0ms; } } diff --git a/src/api/data-backup.ts b/src/api/data-backup.ts index cf95b6d45..54e45c5d5 100644 --- a/src/api/data-backup.ts +++ b/src/api/data-backup.ts @@ -5,5 +5,6 @@ export interface DataBackup { schedule(): void; backupToFile(path: string): Promise; restoreFromFile(path: string): Promise; + restoreEmpty(): Promise; peekFile(path: string): Promise<[numTags: number, numFiles: number]>; } diff --git a/src/api/data-storage-search.ts b/src/api/data-storage-search.ts index 0b22523f8..c7b82cfbd 100644 --- a/src/api/data-storage-search.ts +++ b/src/api/data-storage-search.ts @@ -1,3 +1,5 @@ +import { ID } from './id'; + export type PropertyKeys = { [K in keyof T]: K extends string ? K : never; }[keyof T]; @@ -13,8 +15,18 @@ export const enum OrderDirection { Desc, } +export type IndexableType = number | string | Date | Array | Uint8Array; + +export type PaginationDirection = 'after' | 'before'; + +export type Cursor = { id: ID; orderValue: number | string | bigint | null }; + // General search criteria for a database entity // FFR: Boolean keys are not supported in IndexedDB/Dexie - must store booleans as 0/1 +export type ConditionGroupDTO = { + conjunction: SearchConjunction; + children: Array | ConditionDTO>; +}; export type ConditionDTO = | ArrayConditionDTO @@ -53,6 +65,9 @@ export type BaseIndexSignature = { [key: string]: any }; // Trick for converting array to type https://stackoverflow.com/a/49529930/2350481 +export const SearchConjunctions = ['and', 'or'] as const; +export type SearchConjunction = (typeof SearchConjunctions)[number]; + export const NumberOperators = [ 'equals', 'notEqual', diff --git a/src/api/data-storage.ts b/src/api/data-storage.ts index bbb167175..11c09161e 100644 --- a/src/api/data-storage.ts +++ b/src/api/data-storage.ts @@ -1,11 +1,18 @@ -import { IndexableType } from 'dexie'; -import { ConditionDTO, OrderBy, OrderDirection } from './data-storage-search'; -import { FileDTO } from './file'; +import { + ConditionGroupDTO, + Cursor, + IndexableType, + OrderBy, + OrderDirection, + PaginationDirection, +} from './data-storage-search'; +import { FileDTO, FileStats } from './file'; import { FileSearchDTO } from './file-search'; import { ID } from './id'; import { LocationDTO } from './location'; import { TagDTO } from './tag'; import { ExtraPropertyDTO } from './extraProperty'; +import { BatchFetcher } from 'common/promise'; /** * The user generated persisted data edited or viewed by one or multiple actors (users, multiple devices etc.). @@ -22,6 +29,9 @@ export interface DataStorage { order: OrderBy, fileOrder: OrderDirection, useNaturalOrdering: boolean, + limit?: number, + pagination?: PaginationDirection, + cursor?: Cursor, extraPropertyID?: ID, ): Promise; fetchFilesByID(ids: ID[]): Promise; @@ -30,12 +40,14 @@ export interface DataStorage { fetchSearches(): Promise; fetchExtraProperties(): Promise; searchFiles( - criteria: ConditionDTO | [ConditionDTO, ...ConditionDTO[]], + criteria: ConditionGroupDTO | undefined, order: OrderBy, fileOrder: OrderDirection, useNaturalOrdering: boolean, + limit?: number, + pagination?: PaginationDirection, + cursor?: Cursor, extraPropertyID?: ID, - matchAny?: boolean, ): Promise; createTag(tag: TagDTO): Promise; createFilesFromPath(path: string, files: FileDTO[]): Promise; @@ -53,6 +65,36 @@ export interface DataStorage { removeLocation(location: ID): Promise; removeSearch(search: ID): Promise; removeExtraProperties(extraProperty: ID[]): Promise; - countFiles(): Promise<[fileCount: number, untaggedFileCount: number]>; + addTagsToFiles(tagIds: ID[], criteria?: ConditionGroupDTO): Promise; + removeTagsFromFiles(tagIds: ID[], criteria?: ConditionGroupDTO): Promise; + clearTagsFromFiles(criteria?: ConditionGroupDTO): Promise; + countFiles( + options?: { files: boolean; untagged: boolean }, + criteria?: ConditionGroupDTO, + ): Promise<[fileCount: number | undefined, untaggedFileCount: number | undefined]>; + compareFiles( + locationId: ID, + diskFiles: FileStats[], + ): Promise<{ createdStats: FileStats[]; missingFiles: FileDTO[] }>; + findMissingDBMatches( + missingFiles: FileDTO[], + ): Promise>; clear(): Promise; + setSeed(seed?: number): Promise; +} + +export function makeFileBatchFetcher( + backend: DataStorage, + n: number, + filter?: ConditionGroupDTO, +): BatchFetcher { + return async (cursor?: Cursor) => { + // eslint-disable-next-line prettier/prettier + const items = await backend.searchFiles(filter, 'absolutePath', OrderDirection.Desc, false, n, 'after', cursor); + const cursorItem = items.at(-1); + return { + items, + nextOpts: cursorItem ? { id: cursorItem.id, orderValue: cursorItem.absolutePath } : undefined, + }; + }; } diff --git a/src/api/extraProperty.ts b/src/api/extraProperty.ts index 964cf37ce..bc16ce793 100644 --- a/src/api/extraProperty.ts +++ b/src/api/extraProperty.ts @@ -3,6 +3,7 @@ import { ID } from './id'; export enum ExtraPropertyType { text = 'text', number = 'number', + //timestamp = 'timestamp', } //ToDo: Only support number and string for now, more types could be added in the future. diff --git a/src/api/file-search.ts b/src/api/file-search.ts index c678b8b5d..3e84cbba1 100644 --- a/src/api/file-search.ts +++ b/src/api/file-search.ts @@ -1,10 +1,17 @@ +import { SearchConjunction } from './data-storage-search'; import { ID } from './id'; import { SearchCriteria } from './search-criteria'; export type FileSearchDTO = { id: ID; name: string; - criteria: SearchCriteria[]; - matchAny?: boolean; index: number; + rootGroup: SearchGroupDTO; +}; + +export type SearchGroupDTO = { + id: ID; + name: string; + conjunction: SearchConjunction; + children: Array; }; diff --git a/src/api/file.ts b/src/api/file.ts index b51191074..f2459a797 100644 --- a/src/api/file.ts +++ b/src/api/file.ts @@ -1,6 +1,18 @@ import { ID } from './id'; import { ExtraProperties } from './extraProperty'; +export type FileStats = { + absolutePath: string; + /** When file was last modified on disk */ + dateModified: Date; + /** When file was created on disk */ + dateCreated: Date; + /** Current size of the file in bytes */ + size: number; + /** A unique identifier of the file created by the OS, stays identical even when renaming/moving files */ + ino: string; +}; + export type FileDTO = { id: ID; /** Identifier for a file that persists after renaming/moving (retrieved from fs.Stats.ino) */ @@ -10,18 +22,17 @@ export type FileDTO = { relativePath: string; absolutePath: string; tags: ID[]; - /** used only for index on dexie */ - extraPropertyIDs: ID[]; + tagSorting: FILE_TAGS_SORTING_TYPE; extraProperties: ExtraProperties; /** When the file was imported into Allusion */ dateAdded: Date; /** When the file was modified in Allusion, not related to OS modified date */ dateModified: Date; - /** Original dateModified for checking when searching for overwritten files + /** Original OS dateModified for checking when searching for overwritten files * If the system's modified date is not the same, it means the file has been overwritten or another file with the same name * overwritten in place of the previous one, and the thumbnail and metadata needs to be updated. */ - OrigDateModified: Date; + dateModifiedOS: Date; /** * When the file was last indexed in Allusion: concerning the metadata and thumbnail. * If the system's modified date of the file exceeds this date, those properties shoudld be re-initialized @@ -67,3 +78,6 @@ export const IMG_EXTENSIONS = [ 'ogg', ] as const; export type IMG_EXTENSIONS_TYPE = (typeof IMG_EXTENSIONS)[number]; + +export const FILE_TAGS_SORTING = ['insertion', 'hierarchy'] as const; +export type FILE_TAGS_SORTING_TYPE = (typeof FILE_TAGS_SORTING)[number]; diff --git a/src/api/location.ts b/src/api/location.ts index e80bb70bc..fe1bd155b 100644 --- a/src/api/location.ts +++ b/src/api/location.ts @@ -11,6 +11,7 @@ export type LocationDTO = { }; export type SubLocationDTO = { + id: ID; name: string; isExcluded: boolean; subLocations: SubLocationDTO[]; diff --git a/src/api/search-criteria.ts b/src/api/search-criteria.ts index 46d2d9c91..a9c7249f6 100644 --- a/src/api/search-criteria.ts +++ b/src/api/search-criteria.ts @@ -25,10 +25,13 @@ export type OperatorType = | StringOperatorType | BinaryOperatorType; +export type CriteriaValueType = 'number' | 'date' | 'string' | 'array' | 'indexSignature'; + // FFR: Boolean keys are not supported in IndexedDB/Dexie - must store booleans as 0/1 export interface IBaseSearchCriteria { + id: ID; key: keyof FileDTO; - valueType: 'number' | 'date' | 'string' | 'array' | 'indexSignature'; + valueType: CriteriaValueType; readonly operator: OperatorType; } diff --git a/src/api/tag.ts b/src/api/tag.ts index e02777ad8..bc0f1aa92 100644 --- a/src/api/tag.ts +++ b/src/api/tag.ts @@ -16,4 +16,6 @@ export type TagDTO = { isHeader: boolean; aliases: string[]; description: string; + fileCount: number; + isFileCountDirty: boolean; }; diff --git a/src/backend/README.md b/src/backend/README.md index 98fc7402e..046ec752a 100644 --- a/src/backend/README.md +++ b/src/backend/README.md @@ -1,10 +1,10 @@ # Backend Although we call this our "backend", this section of code all runs in the Electron's renderer process. -This is where image and tag data is persisted to a database (IndexedDB). +This is where image and tag data is persisted to a database ~~(IndexedDB)~~ SQLite. The database is exposed to the web application through the `Backend.ts`, which acts as an API to the database. The idea behind this backend was to create a separation between the back- and frontend you usually find in web applications. If we ever want to change the type of database we use, this would make it relatively straightforward. -This set-up is not optimal for performant fetching of large amounts of items from the database. +~~This set-up is not optimal for performant fetching of large amounts of items from the database.~~ diff --git a/src/backend/_deprecated/backend.ts b/src/backend/_deprecated/backend.ts new file mode 100644 index 000000000..1d4f3011b --- /dev/null +++ b/src/backend/_deprecated/backend.ts @@ -0,0 +1,850 @@ +// @ts-nocheck +import Dexie, { Collection, IndexableType, Table, WhereClause } from 'dexie'; + +import { retainArray, shuffleArray } from '../../../common/core'; +import { DataStorage } from '../../api/data-storage'; +import { + ArrayConditionDTO, + BaseIndexSignature, + ConditionDTO, + DateConditionDTO, + IndexSignatureConditionDTO, + NumberConditionDTO, + OrderBy, + OrderDirection, + PropertyKeys, + StringConditionDTO, + StringProperties, + isExtraPropertyOperatorType, + isNumberOperator, + isStringOperator, +} from '../../api/data-storage-search'; +import { FileDTO } from '../../api/file'; +import { FileSearchDTO } from '../../api/file-search'; +import { ID } from '../../api/id'; +import { LocationDTO } from '../../api/location'; +import { ROOT_TAG_ID, TagDTO } from '../../api/tag'; +import { ExtraPropertyDTO, ExtraPropertyType } from '../../api/extraProperty'; + +const USE_TIMING_PROXY = false; + +/** + * @deprecated + * The backend of the application serves as an API, even though it runs on the same machine. + * This helps code organization by enforcing a clear separation between backend/frontend logic. + * Whenever we want to change things in the backend, this should have no consequences in the frontend. + * The backend has access to the database, which is exposed to the frontend through a set of endpoints. + */ +export default class Backend implements DataStorage { + #files: Table; + #tags: Table; + #locations: Table; + #searches: Table; + #extraProperties: Table; + #db: Dexie; + #notifyChange: () => void; + + constructor(db: Dexie, notifyChange: () => void) { + console.info(`IndexedDB: Initializing database "${db.name}"...`); + // Initialize database tables + this.#files = db.table('files'); + this.#tags = db.table('tags'); + this.#locations = db.table('locations'); + this.#searches = db.table('searches'); + this.#extraProperties = db.table('extraProperties'); + this.#db = db; + this.#notifyChange = notifyChange; + } + + static async init(db: Dexie, notifyChange: () => void): Promise { + const backend = new Backend(db, notifyChange); + // Create a root tag if it does not exist + const tags = backend.#tags; + await db.transaction('rw', tags, async () => { + const tagCount = await tags.count(); + if (tagCount === 0) { + await tags.put({ + id: ROOT_TAG_ID, + name: 'Root', + dateAdded: new Date(), + subTags: [], + impliedTags: [], + color: '', + isHidden: false, + isVisibleInherited: false, + aliases: [], + description: '', + isHeader: false, + }); + } + }); + // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition + return USE_TIMING_PROXY ? createTimingProxy(backend) : backend; + } + + async fetchTags(): Promise { + console.info('IndexedDB: Fetching tags...'); + return this.#tags.toArray(); + } + + async fetchFiles( + order: OrderBy, + fileOrder: OrderDirection, + useNaturalOrdering: boolean, + extraPropertyID?: ID, + ): Promise { + console.info('IndexedDB: Fetching files...'); + if (order === 'random') { + return shuffleArray(await this.#files.toArray()); + } + if (order === 'extraProperty') { + order = 'dateAdded'; + if (extraPropertyID) { + const extraProperty = await this.#extraProperties.get(extraPropertyID); + if (extraProperty) { + return await orderByExtraProperty( + this.#files.orderBy(order), + fileOrder, + extraProperty, + useNaturalOrdering, + ); + } else { + console.error(`IndexedDB: Custom field with ID "${extraPropertyID}" not found.`); + } + } + } + + let items; + if (useNaturalOrdering && isFileDTOPropString(order)) { + const key = order as StringProperties; + items = (await this.#files.toArray()).sort((a: FileDTO, b: FileDTO) => + a[key].localeCompare(b[key], undefined, { numeric: true, sensitivity: 'base' }), + ); + } else { + const collection = this.#files.orderBy(order); + items = await collection.toArray(); + } + + if (fileOrder === OrderDirection.Desc) { + return items.reverse(); + } else { + return items; + } + } + + async fetchFilesByID(ids: ID[]): Promise { + console.info('IndexedDB: Fetching files by ID...'); + const files = await this.#files.bulkGet(ids); + retainArray(files, (file) => file !== undefined); + return files as FileDTO[]; + } + + async fetchFilesByKey(key: keyof FileDTO, value: IndexableType): Promise { + console.info('IndexedDB: Fetching files by key/value...', { key, value }); + return this.#files.where(key).equals(value).toArray(); + } + + async fetchLocations(): Promise { + console.info('IndexedDB: Fetching locations...'); + return this.#locations.orderBy('dateAdded').toArray(); + } + + async fetchSearches(): Promise { + console.info('IndexedDB: Fetching searches...'); + return this.#searches.toArray(); + } + + async fetchExtraProperties(): Promise { + console.info('IndexedDB: Fetching extra properties...'); + return this.#extraProperties.orderBy('name').toArray(); + } + + async searchFiles( + criteria: ConditionDTO | [ConditionDTO, ...ConditionDTO[]], + order: OrderBy, + fileOrder: OrderDirection, + useNaturalOrdering: boolean, + extraPropertyID?: ID, + matchAny?: boolean, + ): Promise { + console.info('IndexedDB: Searching files...', { criteria, matchAny }); + const criterias = Array.isArray(criteria) ? criteria : ([criteria] as [ConditionDTO]); + const collection = await filter(this.#files, criterias, matchAny ? 'or' : 'and'); + + if (order === 'random') { + return shuffleArray(await collection.toArray()); + } + if (order === 'extraProperty') { + order = 'dateAdded'; + if (extraPropertyID) { + const extraProperty = await this.#extraProperties.get(extraPropertyID); + if (extraProperty) { + return await orderByExtraProperty( + collection, + fileOrder, + extraProperty, + useNaturalOrdering, + ); + } else { + console.error(`IndexedDB: Custom field with ID "${extraPropertyID}" not found.`); + } + } + } + // table.reverse() can be an order of magnitude slower than a javascript .reverse() call + // (tested at ~5000 items, 500ms instead of 100ms) + // easy to verify here https://jsfiddle.net/dfahlander/xf2zrL4p + let items; + if (useNaturalOrdering && isFileDTOPropString(order)) { + const key = order as StringProperties; + items = (await collection.toArray()).sort((a: FileDTO, b: FileDTO) => + a[key].localeCompare(b[key], undefined, { numeric: true, sensitivity: 'base' }), + ); + } else { + items = await collection.sortBy(order); + } + + if (fileOrder === OrderDirection.Desc) { + return items.reverse(); + } else { + return items; + } + } + + async createTag(tag: TagDTO): Promise { + console.info('IndexedDB: Creating tag...', tag); + await this.#tags.add(tag); + this.#notifyChange(); + } + + async createLocation(location: LocationDTO): Promise { + console.info('IndexedDB: Creating location...', location); + await this.#locations.add(location); + this.#notifyChange(); + } + + async createSearch(search: FileSearchDTO): Promise { + console.info('IndexedDB: Creating search...', search); + await this.#searches.add(search); + this.#notifyChange(); + } + + async createExtraProperty(extraProperty: ExtraPropertyDTO): Promise { + console.info('IndexedDB: Creating extra property...', extraProperty); + await this.#extraProperties.add(extraProperty); + this.#notifyChange(); + } + + async saveTag(tag: TagDTO): Promise { + console.info('IndexedDB: Saving tag...', tag); + await this.#tags.put(tag); + this.#notifyChange(); + } + + async saveFiles(files: FileDTO[]): Promise { + console.info('IndexedDB: Saving files...', files); + await this.#files.bulkPut(files); + this.#notifyChange(); + } + + async saveLocation(location: LocationDTO): Promise { + console.info('IndexedDB: Saving location...', location); + await this.#locations.put(location); + this.#notifyChange(); + } + + async saveSearch(search: FileSearchDTO): Promise { + console.info('IndexedDB: Saving search...', search); + await this.#searches.put(search); + this.#notifyChange(); + } + + async saveExtraProperty(extraProperty: ExtraPropertyDTO): Promise { + console.info('IndexedDB: Saving extra property...', extraProperty); + await this.#extraProperties.put(extraProperty); + this.#notifyChange(); + } + + async removeTags(tags: ID[]): Promise { + console.info('IndexedDB: Removing tags...', tags); + await this.#db.transaction('rw', this.#files, this.#tags, () => { + const deletedTags = new Set(tags); + retainArray(tags, (tag) => deletedTags.has(tag)); + // We have to make sure files tagged with these tags should be untagged + this.#files + // Get all files with these tags + .where('tags') + .anyOf(tags) + .distinct() + // Remove tags from files + .modify((file) => retainArray(file.tags, (tag) => !deletedTags.has(tag))); + // Remove tag from db + this.#tags.bulkDelete(tags); + }); + this.#notifyChange(); + } + + async mergeTags(tagToBeRemoved: ID, tagToMergeWith: ID): Promise { + console.info('IndexedDB: Merging tags...', tagToBeRemoved, tagToMergeWith); + await this.#db.transaction('rw', this.#files, this.#tags, () => { + // Replace tag on all files with the tag to be removed + this.#files + .where('tags') + .anyOf(tagToBeRemoved) + .modify((file) => { + const tagToBeRemovedIndex = file.tags.findIndex((tag) => tag === tagToBeRemoved); + + if (tagToBeRemovedIndex !== -1) { + file.tags[tagToBeRemovedIndex] = tagToMergeWith; + // Might contain duplicates if the tag to be merged with was already on the file, so remove duplicates. + retainArray( + file.tags.slice(tagToBeRemovedIndex + 1), + (tag) => tag !== tagToMergeWith || tag !== tagToBeRemoved, + ); + } + }); + // Remove tag from DB + this.#tags.delete(tagToBeRemoved); + }); + this.#notifyChange(); + } + + async removeFiles(files: ID[]): Promise { + console.info('IndexedDB: Removing files...', files); + await this.#files.bulkDelete(files); + this.#notifyChange(); + } + + async removeLocation(location: ID): Promise { + console.info('IndexedDB: Removing location...', location); + await this.#db.transaction('rw', this.#files, this.#locations, () => { + this.#files.where('locationId').equals(location).delete(); + this.#locations.delete(location); + }); + this.#notifyChange(); + } + + async removeSearch(search: ID): Promise { + console.info('IndexedDB: Removing search...', search); + await this.#searches.delete(search); + this.#notifyChange(); + } + + async removeExtraProperties(extraPropertyIDs: ID[]): Promise { + console.info('IndexedDB: Removing extra properties...', extraPropertyIDs); + await this.#db.transaction('rw', this.#files, this.#extraProperties, async () => { + await this.#files + .where('extraPropertyIDs') + .anyOf(extraPropertyIDs) + .distinct() + .modify((file) => { + for (const id of extraPropertyIDs) { + delete file.extraProperties[id]; + } + //retainArray(file.extraPropertyIDs, (id) => !extraPropertyIDs.includes(id)); + }); + + await this.#extraProperties.bulkDelete(extraPropertyIDs); + }); + + this.#notifyChange(); + } + + async countFiles(): Promise<[fileCount: number, untaggedFileCount: number]> { + console.info('IndexedDB: Getting number stats of files...'); + return this.#db.transaction('r', this.#files, async () => { + // Aparently converting the whole table into array and check tags in a for loop is a lot faster than using a where tags filter followed by unique(). + const files = await this.#files.toArray(); + let unTaggedFileCount = 0; + for (let i = 0; i < files.length; i++) { + if (files[i].tags.length === 0) { + unTaggedFileCount++; + } + } + return [files.length, unTaggedFileCount]; + }); + } + + // Creates many files at once, and checks for duplicates in the path they are in + async createFilesFromPath(path: string, files: FileDTO[]): Promise { + console.info('IndexedDB: Creating files...', path, files); + await this.#db.transaction('rw', this.#files, async () => { + // previously we did filter getting all the paths that start with the base path using where('absolutePath').startsWith(path).keys() + // but converting to an array and extracting the paths is significantly faster than .keys() + // Also, for small batches of new files, checking each path individually is faster. + console.debug('Filtering files...'); + if (files.length > 500) { + // When creating a large number of files (likely adding a big location), + // it's faster to fetch all existing paths starting with the given base path. + const existingFilePaths = new Set( + (await this.#files.where('absolutePath').startsWith(path).toArray()).map( + (f) => f.absolutePath, + ), + ); + retainArray(files, (file) => !existingFilePaths.has(file.absolutePath)); + } else { + // For small batches, check each file path individually. + const checks = await Promise.all( + files.map(async (file) => { + const count = await this.#files.where('absolutePath').equals(file.absolutePath).count(); + return count === 0; + }), + ); + retainArray(files, (_, i) => checks[i]); + } + console.debug('Creating files...'); + this.#files.bulkAdd(files); + }); + console.debug('Done!'); + this.#notifyChange(); + } + + async clear(): Promise { + console.info('IndexedDB: Clearing database...'); + Dexie.delete(this.#db.name); + } +} + +// Creates a proxy that wraps the Backend instance to log the execution time of its methods. +function createTimingProxy(obj: Backend): Backend { + console.log('Creating timing proxy for Backend'); + return new Proxy(obj, { + get(target, prop, receiver) { + const original = Reflect.get(target, prop, receiver); + if (typeof original === 'function') { + return (...args: any[]) => { + const startTime = performance.now(); + const result = original.apply(target, args); + // Ensure both synchronous and asynchronous results are handled uniformly + return Promise.resolve(result).then((res) => { + const endTime = performance.now(); + console.log(`[Timing] ${String(prop)} took ${(endTime - startTime).toFixed(2)}ms`); + return res; + }); + }; + } + return original; + }, + }); +} + +const exampleFileDTO: FileDTO = { + id: '', + ino: '', + name: '', + relativePath: '', + absolutePath: '', + locationId: '', + extension: 'jpg', + tagsSorting: 'hierarchy', + size: 0, + width: 0, + height: 0, + dateAdded: new Date(), + dateCreated: new Date(), + dateLastIndexed: new Date(), + dateModified: new Date(), + dateModifiedOS: new Date(), + extraProperties: {}, + //extraPropertyIDs: [], + tags: [], +}; + +function isFileDTOPropString(prop: PropertyKeys): prop is StringProperties { + return typeof exampleFileDTO[prop] === 'string'; +} + +async function orderByExtraProperty( + collection: Dexie.Collection, + fileOrder: OrderDirection, + extraProperty: ExtraPropertyDTO, + useNaturalOrdering: boolean, +): Promise { + switch (extraProperty.type) { + case ExtraPropertyType.number: + return orderByCustomNumberField(collection, extraProperty.id, fileOrder); + case ExtraPropertyType.text: + return orderByCustomTextField(collection, extraProperty.id, fileOrder, useNaturalOrdering); + default: + throw new Error(`Unsupported custom field type: ${extraProperty.type}`); + } +} + +function castOrDefault(value: unknown, fallback: T, expectedType: string): T { + return typeof value === expectedType ? (value as T) : fallback; +} + +async function orderByCustomNumberField( + collection: Dexie.Collection, + extraPropertyID: ID, + fileOrder: OrderDirection, +): Promise { + const files = await collection.toArray(); + const fallback = fileOrder === OrderDirection.Desc ? -Infinity : Infinity; + files.sort((a, b) => { + const valueA: number = castOrDefault(a.extraProperties[extraPropertyID], fallback, 'number'); + const valueB: number = castOrDefault(b.extraProperties[extraPropertyID], fallback, 'number'); + return fileOrder === OrderDirection.Desc ? valueB - valueA : valueA - valueB; + }); + return files; +} + +async function orderByCustomTextField( + collection: Dexie.Collection, + extraPropertyID: ID, + fileOrder: OrderDirection, + numeric: boolean, +): Promise { + const files = await collection.toArray(); + const fallback = fileOrder === OrderDirection.Desc ? '\u0000' : '\uffff'; + files.sort((a, b) => { + const valueA: string = castOrDefault(a.extraProperties[extraPropertyID], fallback, 'string'); + const valueB: string = castOrDefault(b.extraProperties[extraPropertyID], fallback, 'string'); + + return fileOrder === OrderDirection.Desc + ? valueB.localeCompare(valueA, undefined, { numeric: numeric, sensitivity: 'base' }) + : valueA.localeCompare(valueB, undefined, { numeric: numeric, sensitivity: 'base' }); + }); + return files; +} + +type SearchConjunction = 'and' | 'or'; + +async function filter( + collection: Dexie.Table, + criterias: [ConditionDTO, ...ConditionDTO[]], + conjunction: SearchConjunction, +): Promise> { + // Searching with multiple 'wheres': https://stackoverflow.com/questions/35679590/dexiejs-indexeddb-chain-multiple-where-clauses + // Unfortunately doesn't work out of the box. + // It's one of the things they are working on, looks much better: https://github.com/dfahlander/Dexie.js/issues/427 + // We'll have to mostly rely on naive filter function (lambdas) + + if (criterias.length > 1 && conjunction === 'or') { + // OR: We can only chain ORs if all filters can be "where" functions - else we do an ugly .some() check on every document + + let allWheres = true; + let table: Dexie.Collection | undefined = undefined; + for (const crit of criterias) { + const where: WhereClause = !table + ? collection.where(crit.key) + : table.or(crit.key); + const tableOrFilter = filterWhere(where, crit); + + if (typeof tableOrFilter === 'function') { + allWheres = false; + break; + } else { + table = tableOrFilter; + } + } + + if (allWheres && table) { + return table; + } else { + const critLambdas = criterias.map(filterLambda); + return collection.filter((t) => critLambdas.some((lambda) => lambda(t))); + } + } + + // AND: We can get some efficiency for ANDS by separating the first crit from the rest... + // Dexie can use a fast "where" search for the initial search + // For consecutive "and" conjunctions, a lambda function must be used + // Since not all operators we need are supported by "where" filters, _filterWhere can also return a lambda. + const [firstCrit, ...otherCrits] = criterias; + + const where = collection.where(firstCrit.key); + const whereOrFilter = filterWhere(where, firstCrit); + let table = + typeof whereOrFilter !== 'function' ? whereOrFilter : collection.filter(whereOrFilter); + + // Then just chain a loop of and() calls. A .every() feels more efficient than chaining table.and() calls + if (otherCrits.length) { + const critLambdas = otherCrits.map(filterLambda); + table = table.and((item) => critLambdas.every((lambda) => lambda(item))); + } + // for (const crit of otherCrits) { + // table = table.and(this._filterLambda(crit)); + // } + return table; +} + +/////////////////////////////// +////// FILTERING METHODS ////// +/////////////////////////////// +// There are 'where' and 'lambda filter functions: +// - where: For filtering by a single criteria and for 'or' conjunctions, Dexie exposes indexeddb-accelerated functions. +// Since some of our search operations are not supported by Dexie, some _where functions return a lambda. +// - lambda: For 'and' conjunctions, a naive filter function (lambda) must be used. + +function filterWhere( + where: WhereClause, + crit: ConditionDTO, +): Collection | ((val: T) => boolean) { + switch (crit.valueType) { + case 'array': + return filterArrayWhere(where, crit); + case 'string': + return filterStringWhere(where, crit); + case 'number': + return filterNumberWhere(where, crit); + case 'date': + return filterDateWhere(where, crit); + case 'indexSignature': + return filterIndexSignatureLambda(crit); + } +} + +function filterLambda(crit: ConditionDTO): (val: T) => boolean { + switch (crit.valueType) { + case 'array': + return filterArrayLambda(crit); + case 'string': + return filterStringLambda(crit); + case 'number': + return filterNumberLambda(crit); + case 'date': + return filterDateLambda(crit); + case 'indexSignature': + return filterIndexSignatureLambda(crit); + } +} + +function filterArrayWhere( + where: WhereClause, + crit: ArrayConditionDTO, +): Collection | ((val: T) => boolean) { + // Querying array props: https://dexie.org/docs/MultiEntry-Index + // Check whether to search for empty arrays (e.g. no tags) + if (crit.value.length === 0) { + return crit.operator === 'contains' + ? (val: T): boolean => (val as any)[crit.key].length === 0 + : (val: T): boolean => (val as any)[crit.key].length !== 0; + } else { + // contains/notContains 1 or more elements + if (crit.operator === 'contains') { + return where.anyOf(crit.value).distinct(); + } else { + // not contains: there as a noneOf() function we used to use, but it matches every item individually, e.g. + // an item with tags "Apple, Pear" is matched twice: once as Apple, once as Pear; A "notContains Apple" still matches for Pear + return (val: T): boolean => + (val as any)[crit.key].every((val: string) => !crit.value.includes(val)); + } + } +} + +function filterArrayLambda(crit: ArrayConditionDTO): (val: T) => boolean { + if (crit.operator === 'contains') { + // Check whether to search for empty arrays (e.g. no tags) + return crit.value.length === 0 + ? (val: T): boolean => (val as any)[crit.key].length === 0 + : (val: T): boolean => crit.value.some((item) => (val as any)[crit.key].indexOf(item) !== -1); + } else { + // not contains + return crit.value.length === 0 + ? (val: T): boolean => (val as any)[crit.key].length !== 0 + : (val: T): boolean => + crit.value.every((item) => (val as any)[crit.key].indexOf(item) === -1); + } +} + +function filterStringWhere( + where: WhereClause, + crit: StringConditionDTO, +): Collection | ((t: any) => boolean) { + const dbStringOperators = [ + 'equalsIgnoreCase', + 'equals', + 'notEqual', + 'startsWithIgnoreCase', + 'startsWith', + ] as const; + + if ((dbStringOperators as readonly string[]).includes(crit.operator)) { + const funcName = crit.operator as unknown as (typeof dbStringOperators)[number]; + return where[funcName](crit.value); + } + // Use normal string filter as fallback for functions not supported by the DB + return filterStringLambda(crit); +} + +function filterStringLambda(crit: StringConditionDTO): (t: any) => boolean { + const { key, value } = crit; + const valLow = value.toLowerCase(); + + switch (crit.operator) { + case 'equals': + return (t: any) => (t[key] as string) === crit.value; + case 'equalsIgnoreCase': + return (t: any) => (t[key] as string).toLowerCase() === valLow; + case 'notEqual': + return (t: any) => (t[key] as string).toLowerCase() !== valLow; + case 'contains': + return (t: any) => (t[key] as string).toLowerCase().includes(valLow); + case 'notContains': + return (t: any) => !(t[key] as string).toLowerCase().includes(valLow); + case 'startsWith': + return (t: any) => (t[key] as string).startsWith(crit.value); + case 'startsWithIgnoreCase': + return (t: any) => (t[key] as string).toLowerCase().startsWith(valLow); + case 'notStartsWith': + return (t: any) => !(t[key] as string).toLowerCase().startsWith(valLow); + default: + console.log('String operator not allowed:', crit.operator); + return () => false; + } +} + +function filterNumberWhere( + where: WhereClause, + crit: NumberConditionDTO, +): Collection { + switch (crit.operator) { + case 'equals': + return where.equals(crit.value); + case 'notEqual': + return where.notEqual(crit.value); + case 'smallerThan': + return where.below(crit.value); + case 'smallerThanOrEquals': + return where.belowOrEqual(crit.value); + case 'greaterThan': + return where.above(crit.value); + case 'greaterThanOrEquals': + return where.aboveOrEqual(crit.value); + default: + const _exhaustiveCheck: never = crit.operator; + return _exhaustiveCheck; + } +} + +function filterNumberLambda(crit: NumberConditionDTO): (t: any) => boolean { + const { key, value } = crit; + + switch (crit.operator) { + case 'equals': + return (t: any) => t[key] === value; + case 'notEqual': + return (t: any) => t[key] !== value; + case 'smallerThan': + return (t: any) => t[key] < value; + case 'smallerThanOrEquals': + return (t: any) => t[key] <= value; + case 'greaterThan': + return (t: any) => t[key] > value; + case 'greaterThanOrEquals': + return (t: any) => t[key] >= value; + default: + const _exhaustiveCheck: never = crit.operator; + return _exhaustiveCheck; + } +} + +function filterIndexSignatureLambda( + crit: IndexSignatureConditionDTO, +): (t: any) => boolean { + const { + value: [keyIS, valueIS], + } = crit; + + if (isExtraPropertyOperatorType(crit.operator)) { + switch (crit.operator) { + case 'existsInFile': + return (t: any) => t[crit.key][keyIS] !== undefined; + case 'notExistsInFile': + return (t: any) => t[crit.key][keyIS] === undefined; + default: + const _exhaustiveCheck: never = crit.operator; + return _exhaustiveCheck; + } + } + switch (typeof valueIS) { + case 'number': + if (isNumberOperator(crit.operator)) { + const numberCrit: NumberConditionDTO = { + key: keyIS, + operator: crit.operator, + value: valueIS, + valueType: 'number', + }; + const lamda = filterNumberLambda(numberCrit); + return (t: any) => { + const obj = t[crit.key]; + return typeof obj[keyIS] === 'number' ? lamda(obj) : false; + }; + } + return () => false; + case 'string': + if (isStringOperator(crit.operator)) { + const stringCrit: StringConditionDTO = { + key: keyIS, + operator: crit.operator, + value: valueIS, + valueType: 'string', + }; + const lamda = filterStringLambda(stringCrit); + return (t: any) => { + const obj = t[crit.key]; + return typeof obj[keyIS] === 'string' ? lamda(obj) : false; + }; + } + return () => false; + default: + return () => false; + } +} + +function filterDateWhere( + where: WhereClause, + crit: DateConditionDTO, +): Collection { + const dateStart = new Date(crit.value); + dateStart.setHours(0, 0, 0); + const dateEnd = new Date(crit.value); + dateEnd.setHours(23, 59, 59); + + switch (crit.operator) { + // equal to this day, so between 0:00 and 23:59 + case 'equals': + return where.between(dateStart, dateEnd); + case 'smallerThan': + return where.below(dateStart); + case 'smallerThanOrEquals': + return where.below(dateEnd); + case 'greaterThan': + return where.above(dateEnd); + case 'greaterThanOrEquals': + return where.above(dateStart); + // not equal to this day, so before 0:00 or after 23:59 + case 'notEqual': + return where.below(dateStart).or(crit.key).above(dateEnd); + default: + const _exhaustiveCheck: never = crit.operator; + return _exhaustiveCheck; + } +} + +function filterDateLambda(crit: DateConditionDTO): (t: any) => boolean { + const { key } = crit; + const start = new Date(crit.value); + start.setHours(0, 0, 0); + const end = new Date(crit.value); + end.setHours(23, 59, 59); + + switch (crit.operator) { + case 'equals': + return (t: any) => t[key] >= start || t[key] <= end; + case 'notEqual': + return (t: any) => t[key] < start || t[key] > end; + case 'smallerThan': + return (t: any) => t[key] < start; + case 'smallerThanOrEquals': + return (t: any) => t[key] <= end; + case 'greaterThan': + return (t: any) => t[key] > end; + case 'greaterThanOrEquals': + return (t: any) => t[key] >= start; + default: + const _exhaustiveCheck: never = crit.operator; + return _exhaustiveCheck; + } +} diff --git a/src/backend/_deprecated/backup-scheduler.ts b/src/backend/_deprecated/backup-scheduler.ts new file mode 100644 index 000000000..b5517b9a3 --- /dev/null +++ b/src/backend/_deprecated/backup-scheduler.ts @@ -0,0 +1,131 @@ +// @ts-nocheck +import Dexie from 'dexie'; +import { exportDB, importDB, peakImportFile } from 'dexie-export-import'; +import fse from 'fs-extra'; +import path from 'path'; + +import { debounce } from '../../../common/timeout'; +import { DataBackup } from '../../api/data-backup'; +import { AUTO_BACKUP_TIMEOUT, NUM_AUTO_BACKUPS } from './config'; +import { getToday, getWeekStart } from 'common/core'; + +/** @deprecated */ +export default class BackupScheduler implements DataBackup { + #db: Dexie; + #backupDirectory: string = ''; + #lastBackupIndex: number = 0; + #lastBackupDate: Date = new Date(0); + + constructor(db: Dexie, directory: string) { + this.#db = db; + this.#backupDirectory = directory; + } + + restoreEmpty(): Promise { + throw new Error('Method not implemented.'); + } + + static async init(db: Dexie, backupDirectory: string): Promise { + await fse.ensureDir(backupDirectory); + return new BackupScheduler(db, backupDirectory); + } + + schedule(): void { + if (new Date().getTime() > this.#lastBackupDate.getTime() + AUTO_BACKUP_TIMEOUT) { + this.#createPeriodicBackup(); + } + } + + /** Creates a copy of a backup file, when the target file creation date is less than the provided date */ + static async #copyFileIfCreatedBeforeDate( + srcPath: string, + targetPath: string, + dateToCheck: Date, + ): Promise { + let createBackup = false; + try { + // If file creation date is less than provided date, create a back-up + const stats = await fse.stat(targetPath); + createBackup = stats.ctime < dateToCheck; + } catch (e) { + // File not found + createBackup = true; + } + if (createBackup) { + try { + await fse.copyFile(srcPath, targetPath); + console.log('Created backup', targetPath); + return true; + } catch (e) { + console.error('Could not create backup', targetPath, e); + } + } + return false; + } + + // Wait 10 seconds after a change for any other changes before creating a backup. + #createPeriodicBackup = debounce(async (): Promise => { + const filePath = path.join(this.#backupDirectory, `auto-backup-${this.#lastBackupIndex}.json`); + + this.#lastBackupDate = new Date(); + this.#lastBackupIndex = (this.#lastBackupIndex + 1) % NUM_AUTO_BACKUPS; + + try { + await this.backupToFile(filePath); + + console.log('Created automatic backup', filePath); + + // Check for daily backup + await BackupScheduler.#copyFileIfCreatedBeforeDate( + filePath, + path.join(this.#backupDirectory, 'daily.json'), + getToday(), + ); + + // Check for weekly backup + await BackupScheduler.#copyFileIfCreatedBeforeDate( + filePath, + path.join(this.#backupDirectory, 'weekly.json'), + getWeekStart(), + ); + } catch (e) { + console.error('Could not create periodic backup', filePath, e); + } + }, 10000); + + async backupToFile(path: string): Promise { + console.info('IndexedDB: Exporting database backup...', path); + + const blob = await exportDB(this.#db, { prettyJson: false }); + // might be nice to zip it and encode as base64 to save space. Keeping it simple for now + await fse.ensureFile(path); + await fse.writeFile(path, await blob.text()); + } + + async restoreFromFile(path: string): Promise { + console.info('IndexedDB: Importing database backup...', path); + + const buffer = await fse.readFile(path); + const blob = new Blob([buffer]); + + console.debug('Clearing database...'); + Dexie.delete(this.#db.name); + + await importDB(blob); + // There also is "importInto" which as an "clearTablesBeforeImport" option, + // but that didn't seem to work correctly (files were always re-created after restarting for some reason) + } + + async peekFile(path: string): Promise<[numTags: number, numFiles: number]> { + console.info('IndexedDB: Peeking database backup...', path); + const buffer = await fse.readFile(path); + const blob = new Blob([buffer]); + const metadata = await peakImportFile(blob); // heh, they made a typo + const tagsTable = metadata.data.tables.find((t) => t.name === 'tags'); + const filesTable = metadata.data.tables.find((t) => t.name === 'files'); + if (tagsTable && filesTable) { + return [tagsTable.rowCount, filesTable.rowCount]; + } + throw new Error('Database does not contain a table for files and/or tags'); + } +} diff --git a/src/backend/_deprecated/config.ts b/src/backend/_deprecated/config.ts new file mode 100644 index 000000000..f54165730 --- /dev/null +++ b/src/backend/_deprecated/config.ts @@ -0,0 +1,296 @@ +// @ts-nocheck +import Dexie, { Transaction } from 'dexie'; +import fse from 'fs-extra'; + +import { FileDTO } from '../../api/file'; +import { TagDTO } from 'src/api/tag'; +import { ID } from '../../api/id'; +import { ExtraProperties, ExtraPropertyType } from 'src/api/extraProperty'; +import { LocationDTO, SubLocationDTO } from 'src/api/location'; + +// The name of the IndexedDB +export const DB_NAME = 'Allusion'; + +export const NUM_AUTO_BACKUPS = 6; + +export const AUTO_BACKUP_TIMEOUT = 1000 * 60 * 10; // 10 minutes + +// Schema based on https://dexie.org/docs/Version/Version.stores()#schema-syntax +// Only for the indexes of the DB, not all fields +// Versions help with upgrading DB to new configurations: +// https://dexie.org/docs/Tutorial/Design#database-versioning +const dbConfig: DBVersioningConfig[] = [ + { + // Version 4, 19-9-20: Added system created date + version: 4, + collections: [ + { + name: 'files', + schema: + '++id, locationId, *tags, relativePath, &absolutePath, name, extension, size, width, height, dateAdded, dateModified, dateCreated', + }, + { + name: 'tags', + schema: '++id', + }, + { + name: 'locations', + schema: '++id, dateAdded', + }, + ], + }, + { + // Version 5, 29-5-21: Added sub-locations + version: 5, + collections: [], + upgrade: (tx: Transaction): void => { + tx.table('locations') + .toCollection() + .modify((location: any) => { + location.subLocations = []; + return location; + }); + }, + }, + { + // Version 6, 13-11-21: Added lastIndexed date to File for recreating thumbnails + version: 6, + collections: [], + upgrade: (tx: Transaction): void => { + tx.table('files') + .toCollection() + .modify((file: FileDTO) => { + file.dateLastIndexed = file.dateAdded; + return file; + }); + }, + }, + { + // Version 7, 4-1-22: Added saved searches + version: 7, + collections: [ + { + name: 'searches', + schema: '++id', + }, + ], + }, + { + // Version 8, 9-1-22: Added ino to file for detecting added/removed files as a single rename/move event + version: 8, + collections: [ + { + name: 'files', + schema: + '++id, ino, locationId, *tags, relativePath, &absolutePath, name, extension, size, width, height, dateAdded, dateModified, dateCreated', + }, + ], + upgrade: (tx: Transaction): void => { + tx.table('files') + .toCollection() + .modify((file: FileDTO) => { + try { + // apparently you can't do async stuff here, even though it is typed to return a PromiseLike :/ + const stats = fse.statSync(file.absolutePath); + // fallback to random value so that it won't be recognized as identical file to others where no ino could be found + file.ino = stats.ino.toString() || Math.random().toString(); + } catch (e) { + console.warn(`Could not get ino for ${file.absolutePath}`); + } + return file; + }); + }, + }, + { + version: 9, + collections: [ + { + name: 'tags', + schema: '++id', + }, + ], + upgrade: (tx: Transaction): void => { + tx.table('tags') + .toCollection() + .modify((tag: TagDTO) => { + tag.impliedTags = []; + return tag; + }); + }, + }, + { + // Version 10, 6-3-25: Added scores and .scores to file + version: 10, + collections: [ + { + name: 'scores', + schema: '++id, name, dateCreated, dateModified', + }, + { + name: 'files', + schema: + '++id, ino, locationId, *tags, scores, relativePath, &absolutePath, name, extension, size, width, height, dateAdded, dateModified, dateCreated', + }, + ], + upgrade: (tx: Transaction): void => { + tx.table('files') + .toCollection() + .modify((file: any) => { + file.scores = new Map(); + return file; + }); + }, + }, + { + // Version 11, Added OrigDateModified date to File for recreating thumbnails and metadata + version: 11, + collections: [], + upgrade: (tx: Transaction): void => { + tx.table('files') + .toCollection() + .modify((file: FileDTO) => { + file.dateModifiedOS = file.dateAdded; + return file; + }); + }, + }, + { + // Version 12 29-5-25: Rename table Scores to extraProperties, redefine scores in files to extraProperties, add isVisibleInherited: bool to tags and add tags to locations. + version: 12, + collections: [ + { + name: 'extraProperties', + schema: '++id, name', + }, + { + name: 'files', + schema: + '++id, ino, locationId, *tags, *extraPropertyIDs, relativePath, &absolutePath, name, extension, size, width, height, dateAdded, dateModified, dateCreated, OrigDateModified', + }, + ], + upgrade: (tx: Transaction): void => { + // Migrate "scores" to "extraProperties" + const oldScores = tx.table('scores'); + const extraProperties = tx.table('extraProperties'); + + oldScores.toArray().then((records) => { + const transformed = records.map((oldRecord: any) => { + return { + ...oldRecord, + type: ExtraPropertyType.number, + dateAdded: oldRecord.dateCreated, + dateCreated: undefined, + dateModified: undefined, + }; + }); + const cleaned = transformed.map((r) => { + delete r.dateCreated; + delete r.dateModified; + return r; + }); + + return extraProperties.bulkAdd(cleaned); + }); + + // Migrate property "scores" in files to "extraProperties" + tx.table('files') + .toCollection() + .modify((file: any) => { + if (file.scores instanceof Map) { + file.extraPropertyIDs = Array.from(file.scores.keys()); + file.extraProperties = Object.fromEntries(file.scores) as ExtraProperties; + } else { + file.extraPropertyIDs = []; + file.extraProperties = {}; + } + delete file.scores; + return file; + }); + + // Add isVisibleInherited to tags + tx.table('tags') + .toCollection() + .modify((tag: any) => { + tag.isVisibleInherited = true; + return tag; + }); + + // Add tags to locations and sublocations + function addTagsRecursively(location: any): any { + location.tags = []; + if (Array.isArray(location.subLocations)) { + location.subLocations = location.subLocations.map((sublocation: any) => + addTagsRecursively({ ...sublocation }), + ); + } + return location; + } + + tx.table('locations') + .toCollection() + .modify((location: any) => { + return addTagsRecursively(location); + }); + }, + }, + { + // Version 13 29-5-25: Drop table scores + version: 13, + collections: [ + { + name: 'scores', + schema: null, + }, + ], + }, + { + // Version 14 05-08-25: Added isHeader, aliases and description to tags. + version: 14, + collections: [], + upgrade: (tx: Transaction): void => { + tx.table('tags') + .toCollection() + .modify((tag: TagDTO) => { + tag.isHeader = false; + tag.aliases = []; + tag.description = ''; + return tag; + }); + // Add + tx.table('locations') + .toCollection() + .modify((location: LocationDTO) => { + location.isWatchingFiles = true; + return location; + }); + }, + }, +]; + +type DBVersioningConfig = { + version: number; + collections: Array<{ name: string; schema: string | null }>; + upgrade?: (tx: Transaction) => void | Promise; +}; + +/** + * @deprecated + * A function that should be called before using the database. + * It initializes the object stores + */ +export function dbInit(dbName: string): Dexie { + const db = new Dexie(dbName); + + // Initialize for each DB version: https://dexie.org/docs/Tutorial/Design#database-versioning + for (const config of dbConfig) { + const { version, collections, upgrade } = config; + const dbSchema: { [key: string]: string | null } = {}; + collections.forEach(({ name, schema }) => (dbSchema[name] = schema)); + const stores = db.version(version).stores(dbSchema); + if (upgrade) { + stores.upgrade(upgrade); + } + } + + return db; +} diff --git a/src/backend/backend.ts b/src/backend/backend.ts index 1218d271b..5a57bcbfc 100644 --- a/src/backend/backend.ts +++ b/src/backend/backend.ts @@ -1,404 +1,1167 @@ -import Dexie, { Collection, IndexableType, Table, WhereClause } from 'dexie'; - -import { retainArray, shuffleArray } from '../../common/core'; -import { DataStorage } from '../api/data-storage'; import { - ArrayConditionDTO, - BaseIndexSignature, - ConditionDTO, - DateConditionDTO, - IndexSignatureConditionDTO, - NumberConditionDTO, + AllusionDB_SQL, + deserializeBoolean, + deserializeDate, + EpValues, + Files, + LocationNodes, + Locations, + LocationTags, + serializeBoolean, + serializeDate, + SubLocations, + TagAliases, + TagImplications, + ExtraProperties as DbExtraProperties, + SavedSearches, + SearchCriteria, + FileTags, + SubTags, + SearchGroups, +} from './schemaTypes'; +import SQLite from 'better-sqlite3'; +import { + Kysely, + SqliteDialect, + ParseJSONResultsPlugin, + CamelCasePlugin, + sql, + SelectQueryBuilder, + SqlBool, + ExpressionBuilder, + OrderByDirection, + AnyColumn, + Insertable, + Expression, + RawBuilder, +} from 'kysely'; +import { kyselyLogger, migrateToLatest, PAD_STRING_LENGTH } from './config'; +import { DataStorage } from 'src/api/data-storage'; +import { OrderBy, OrderDirection, - PropertyKeys, - StringConditionDTO, - StringProperties, - isExtraPropertyOperatorType, + ConditionDTO, + StringOperatorType, + NumberOperatorType, + ArrayOperatorType, + ExtraPropertyOperatorType, isNumberOperator, isStringOperator, -} from '../api/data-storage-search'; -import { FileDTO } from '../api/file'; -import { FileSearchDTO } from '../api/file-search'; -import { ID } from '../api/id'; -import { LocationDTO } from '../api/location'; -import { ROOT_TAG_ID, TagDTO } from '../api/tag'; -import { ExtraPropertyDTO, ExtraPropertyType } from '../api/extraProperty'; - -const USE_TIMING_PROXY = false; + PropertyKeys, + StringProperties, + SearchConjunction, + ConditionGroupDTO, + PaginationDirection, + Cursor, + IndexableType, +} from 'src/api/data-storage-search'; +import { ExtraProperties, ExtraPropertyDTO } from 'src/api/extraProperty'; +import { FileDTO, FileStats } from 'src/api/file'; +import { FileSearchDTO, SearchGroupDTO } from 'src/api/file-search'; +import { generateId, ID } from 'src/api/id'; +import { LocationDTO, SubLocationDTO } from 'src/api/location'; +import { ROOT_TAG_ID, TagDTO } from 'src/api/tag'; +import { jsonArrayFrom } from 'kysely/helpers/sqlite'; +import { IS_DEV } from 'common/process'; +import { UpdateObject } from 'kysely/dist/cjs/parser/update-set-parser'; + +// Use to debug perfomance. +const USE_TIMING_PROXY = IS_DEV; +const USE_QUERY_LOGGER = false ? IS_DEV : false; -/** - * The backend of the application serves as an API, even though it runs on the same machine. - * This helps code organization by enforcing a clear separation between backend/frontend logic. - * Whenever we want to change things in the backend, this should have no consequences in the frontend. - * The backend has access to the database, which is exposed to the frontend through a set of endpoints. - */ export default class Backend implements DataStorage { - #files: Table; - #tags: Table; - #locations: Table; - #searches: Table; - #extraProperties: Table; - #db: Dexie; - #notifyChange: () => void; - - constructor(db: Dexie, notifyChange: () => void) { - console.info(`IndexedDB: Initializing database "${db.name}"...`); - // Initialize database tables - this.#files = db.table('files'); - this.#tags = db.table('tags'); - this.#locations = db.table('locations'); - this.#searches = db.table('searches'); - this.#extraProperties = db.table('extraProperties'); + readonly MAX_VARS!: number; + #db!: Kysely; + #dbPath!: string; + #notifyChange!: () => void; + #restoreEmpty!: () => Promise; + /** State variable that indicates if we need to recompute preAggregateJSON */ + #isQueryDirty: boolean = true; + // Seed used to have deterministic order when order by random + #seed: number = generateSeed(); + + constructor() { + // Must call init() before using to init the properties. + return USE_TIMING_PROXY ? createTimingProxy(this) : this; + } + + async init( + dbPath: string, + jsonToImport: string | undefined, + notifyChange: () => void, + restoreEmpty: () => Promise, + mode: 'default' | 'migrate' | 'readonly' = 'default', + ): Promise { + console.info(`SQLite3: Initializing database "${dbPath}"...`); + // For some reason, if initializing the better-sqlite3 db with readonly true, later when disposing the instance, + // it does not remove the WAL files, which is bothersome to leave in the backup directory. + //const isReadOnly = mode === 'readonly'; + const database = new SQLite(dbPath, { timeout: 50000 }); //, readonly: isReadOnly }); + + // HACK Use a padded string to do natural sorting + database.function('pad_string', { deterministic: true }, PadString); + database.function('stable_hash', { deterministic: true }, stableHash); + + const dialect = new SqliteDialect({ database }); + const db = new Kysely({ + dialect: dialect, + plugins: [new ParseJSONResultsPlugin(), new CamelCasePlugin()], + log: USE_QUERY_LOGGER ? kyselyLogger : undefined, // Used only for debugging. + }); + + // Instead of initializing this through the constructor, set the class properties here, + // this allows us to use the class as a worker having async await calls at init. this.#db = db; + this.#dbPath = dbPath; this.#notifyChange = notifyChange; - } + this.#restoreEmpty = restoreEmpty; + (this as any).MAX_VARS = await getSqliteMaxVariables(db); - static async init(db: Dexie, notifyChange: () => void): Promise { - const backend = new Backend(db, notifyChange); - // Create a root tag if it does not exist - const tags = backend.#tags; - await db.transaction('rw', tags, async () => { - const tagCount = await tags.count(); - if (tagCount === 0) { - await tags.put({ + // Run migrations if required + if (mode === 'default' || mode === 'migrate') { + await migrateToLatest(db, { jsonToImport }); + } + + if (mode === 'migrate' || mode === 'readonly') { + return; + } + // Configure PRAGMA settings (these can create WAL/SHM files) + // Enable WAL mode to not wait for writes and optimize database + await sql`PRAGMA journal_mode = WAL;`.execute(db); + await sql`PRAGMA case_sensitive_like = ON;`.execute(db); + await sql`PRAGMA synchronous = NORMAL;`.execute(db); + await sql`PRAGMA temp_store = MEMORY;`.execute(db); + await sql`PRAGMA automatic_index = ON;`.execute(db); + await sql`PRAGMA cache_size = -64000;`.execute(db); + await sql`PRAGMA OPTIMIZE;`.execute(db); + + // Create Root Tag if not exists. + const rootTag = await db + .selectFrom('tags') + .selectAll() + .where('id', '=', ROOT_TAG_ID) + .executeTakeFirst(); + if (!rootTag) { + await db + .insertInto('tags') + .values({ id: ROOT_TAG_ID, name: 'Root', - dateAdded: new Date(), - subTags: [], - impliedTags: [], + dateAdded: serializeDate(new Date()), color: '', - isHidden: false, - isVisibleInherited: false, - aliases: [], + isHidden: serializeBoolean(false), + isVisibleInherited: serializeBoolean(false), description: '', - isHeader: false, - }); - } - }); - // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition - return USE_TIMING_PROXY ? createTimingProxy(backend) : backend; + isHeader: serializeBoolean(false), + fileCount: 0, + isFileCountDirty: serializeBoolean(true), + }) + .execute(); + } + await this.preAggregateJSON(); + } + + async setSeed(seed?: number): Promise { + this.#seed = seed ?? generateSeed(); } async fetchTags(): Promise { - console.info('IndexedDB: Fetching tags...'); - return this.#tags.toArray(); + console.info('SQLite: Fetching tags...'); + const tags = ( + await this.#db + .selectFrom('tags') + .selectAll('tags') + .select((eb) => [ + jsonArrayFrom( + eb + .selectFrom('subTags') + .select('subTags.subTagId') + .whereRef('subTags.tagId', '=', 'tags.id') + .orderBy('subTags.idx'), + ).as('subTags'), + jsonArrayFrom( + eb + .selectFrom('tagImplications') + .select('tagImplications.impliedTagId') + .whereRef('tagImplications.tagId', '=', 'tags.id'), + ).as('impliedTags'), + jsonArrayFrom( + eb + .selectFrom('tagAliases') + .select('tagAliases.alias') + .whereRef('tagAliases.tagId', '=', 'tags.id'), + ).as('aliases'), + ]) + .execute() + ) + // convert data into TagDTO format + .map((dbTag) => ({ + id: dbTag.id, + name: dbTag.name, + dateAdded: deserializeDate(dbTag.dateAdded), + color: dbTag.color, + subTags: dbTag.subTags.map((st) => st.subTagId), + impliedTags: dbTag.impliedTags.map((it) => it.impliedTagId), + isHidden: deserializeBoolean(dbTag.isHidden), + isVisibleInherited: deserializeBoolean(dbTag.isVisibleInherited), + isHeader: deserializeBoolean(dbTag.isHeader), + aliases: dbTag.aliases.map((a) => a.alias), + description: dbTag.description, + fileCount: dbTag.fileCount, + isFileCountDirty: deserializeBoolean(dbTag.isFileCountDirty), + })); + return tags; + } + + // Original implementation by Pianissi + // Because creating the jsons takes a lot of time, let's preaggregate them everytime we save our files. + async preAggregateJSON(): Promise { + console.info('SQLite: Updating temp aggregates...'); + await sql` + DROP TABLE IF EXISTS file_tag_aggregates_temp; + `.execute(this.#db); + await sql` + DROP TABLE IF EXISTS file_ep_aggregates_temp; + `.execute(this.#db); + + await sql` + CREATE TEMPORARY TABLE IF NOT EXISTS file_tag_aggregates_temp AS + SELECT + file_id, + json_group_array(tag_id) AS tags + FROM file_tags + GROUP BY file_id; + `.execute(this.#db); + await sql` + CREATE TEMPORARY TABLE IF NOT EXISTS file_ep_aggregates_temp AS + SELECT + file_id, + json_group_array(json_object( + 'file_id', file_id, + 'ep_id', ep_id, + 'text_value', text_value, + 'number_value', number_value, + 'timestamp_value', timestamp_value)) + as extra_properties + FROM ep_values + GROUP BY file_id; + `.execute(this.#db); + + await sql` + CREATE INDEX IF NOT EXISTS idx_file_tag_aggregates_temp_file ON file_tag_aggregates_temp(file_id); + `.execute(this.#db); + await sql` + CREATE INDEX IF NOT EXISTS idx_file_ep_aggregates_temp_file ON file_ep_aggregates_temp(file_id); + `.execute(this.#db); + this.#isQueryDirty = false; + } + + async queryFiles>( + criteria: ConditionGroupDTO = { conjunction: 'and', children: [] }, + pagOptions: PaginationOptions, + modifyQuery?: (qb: Q) => Q, + ): Promise { + pagOptions.seed = this.#seed; + if (this.#isQueryDirty) { + await this.preAggregateJSON(); + } + const dbWithTemp = this.#db.withTables<{ + fileTagAggregatesTemp: { + fileId: ID; + tags: ID[]; + }; + fileEpAggregatesTemp: { + fileId: ID; + extraProperties: EpValues[]; + }; + }>(); + // Apply the filter criterias expressions to the files QueryBuilder and execute the query. + let query; + query = dbWithTemp + .selectFrom('files') + .leftJoin('fileTagAggregatesTemp as ft', 'ft.fileId', 'files.id') + .leftJoin('fileEpAggregatesTemp as fe', 'fe.fileId', 'files.id') + .selectAll('files') + .select(['ft.tags', 'fe.extraProperties']); + query = applyFileFilters(query, criteria); + query = await applyPagination(this.#db, query, pagOptions); + if (modifyQuery) { + query = modifyQuery(query as any); + } + + const files = (await query.execute()).map(mapToDTO); + const shouldReverse = pagOptions.pagination === 'before' && pagOptions.cursor !== undefined; + return shouldReverse ? files.reverse() : files; } async fetchFiles( order: OrderBy, fileOrder: OrderDirection, useNaturalOrdering: boolean, + limit?: number, + pagination?: PaginationDirection, + cursor?: Cursor, extraPropertyID?: ID, ): Promise { - console.info('IndexedDB: Fetching files...'); - if (order === 'random') { - return shuffleArray(await this.#files.toArray()); - } - if (order === 'extraProperty') { - order = 'dateAdded'; - if (extraPropertyID) { - const extraProperty = await this.#extraProperties.get(extraPropertyID); - if (extraProperty) { - return await orderByExtraProperty( - this.#files.orderBy(order), - fileOrder, - extraProperty, - useNaturalOrdering, - ); - } else { - console.error(`IndexedDB: Custom field with ID "${extraPropertyID}" not found.`); - } - } - } - - let items; - if (useNaturalOrdering && isFileDTOPropString(order)) { - const key = order as StringProperties; - items = (await this.#files.toArray()).sort((a: FileDTO, b: FileDTO) => - a[key].localeCompare(b[key], undefined, { numeric: true, sensitivity: 'base' }), - ); - } else { - const collection = this.#files.orderBy(order); - items = await collection.toArray(); - } + console.info('SQLite: Fetching all files...', cursor); + return this.queryFiles(undefined, { + order, + direction: fileOrder, + useNaturalOrdering, + limit, + pagination, + cursor, + extraPropertyID, + }); + } - if (fileOrder === OrderDirection.Desc) { - return items.reverse(); - } else { - return items; - } + async searchFiles( + criteria: ConditionGroupDTO | undefined, + order: OrderBy, + fileOrder: OrderDirection, + useNaturalOrdering: boolean, + limit?: number, + pagination?: PaginationDirection, + cursor?: Cursor, + extraPropertyID?: ID, + ): Promise { + console.info('SQLite: Searching files...', cursor, criteria); + return this.queryFiles(criteria, { + order, + direction: fileOrder, + useNaturalOrdering, + limit, + pagination, + cursor, + extraPropertyID, + }); } async fetchFilesByID(ids: ID[]): Promise { - console.info('IndexedDB: Fetching files by ID...'); - const files = await this.#files.bulkGet(ids); - retainArray(files, (file) => file !== undefined); - return files as FileDTO[]; + console.info('SQLite: Fetching files by ID...', ids); + return this.queryFiles(undefined, { order: 'dateAdded' }, (query) => + query.where('id', 'in', ids), + ); } - async fetchFilesByKey(key: keyof FileDTO, value: IndexableType): Promise { - console.info('IndexedDB: Fetching files by key/value...', { key, value }); - return this.#files.where(key).equals(value).toArray(); + async fetchFilesByKey(key: keyof FileDTO, values: IndexableType): Promise { + console.info('SQLite: Fetching files by key...'); + if (!['tags', 'extraProperties', 'extraPropertyIDs'].includes(key)) { + if (!Array.isArray(values)) { + values = [values as string | number | Date]; + } + return this.queryFiles(undefined, { order: 'dateAdded' }, (query) => + query.where(key, 'in', values), + ); + } + console.error('fetchFilesByKey error: Key or values not supported.'); + return []; } async fetchLocations(): Promise { - console.info('IndexedDB: Fetching locations...'); - return this.#locations.orderBy('dateAdded').toArray(); + console.info('SQLite: Fetching locations...'); + /** Map to quicly find a node and his parent */ + const locationNodesMap = new Map(); + const locations: LocationDTO[] = ( + await this.#db + .selectFrom('locations') + .innerJoin('locationNodes as node', 'node.id', 'locations.nodeId') + .selectAll() + .select((eb) => [ + jsonArrayFrom( + eb + .selectFrom('locationTags') + .select('locationTags.tagId') + .whereRef('locationTags.nodeId', '=', 'locations.nodeId'), + ).as('tags'), + ]) + .execute() + ).map((dbLoc) => { + // convert data into LocationDTO format + const lc: LocationDTO = { + id: dbLoc.id, + path: dbLoc.path, + dateAdded: deserializeDate(dbLoc.dateAdded), + subLocations: [], + tags: dbLoc.tags.map((t) => t.tagId), + index: dbLoc.idx, + isWatchingFiles: deserializeBoolean(dbLoc.isWatchingFiles), + }; + locationNodesMap.set(dbLoc.id, [lc, dbLoc.parentId]); + return lc; + }); + const subLocations: SubLocationDTO[] = ( + await this.#db + .selectFrom('subLocations') + .innerJoin('locationNodes as node', 'node.id', 'subLocations.nodeId') + .selectAll() + .select((eb) => [ + jsonArrayFrom( + eb + .selectFrom('locationTags') + .select('locationTags.tagId') + .whereRef('locationTags.nodeId', '=', 'subLocations.nodeId'), + ).as('tags'), + ]) + .execute() + ).map((dbLoc) => { + // convert data into SubLocationDTO format + const slc: SubLocationDTO = { + id: dbLoc.id, + name: dbLoc.path, + subLocations: [], + tags: dbLoc.tags.map((t) => t.tagId), + isExcluded: deserializeBoolean(dbLoc.isExcluded), + }; + locationNodesMap.set(dbLoc.id, [slc, dbLoc.parentId]); + return slc; + }); + // Insert sublocations into their parents + for (const subLocation of subLocations) { + const parent = locationNodesMap.get(locationNodesMap.get(subLocation.id)?.[1] ?? '')?.[0]; + if (parent) { + parent.subLocations.push(subLocation); + } + } + return locations; } async fetchSearches(): Promise { - console.info('IndexedDB: Fetching searches...'); - return this.#searches.toArray(); - } + console.info('SQLite: Fetching saved searches...'); + const groupsMap = new Map(); + // 1. Fetch searches + const savedSearches = await this.#db.selectFrom('savedSearches').selectAll().execute(); + if (!savedSearches.length) { + return []; + } + const savedSearchIds = savedSearches.map((s) => s.id); + + // 2. Fetch groups + const dbGroups = await this.#db + .selectFrom('searchGroups') + .select(['id', 'name', 'savedSearchId', 'parentGroupId', 'idx', 'conjunction']) + .where('savedSearchId', 'in', savedSearchIds) + .orderBy('savedSearchId') + .orderBy('parentGroupId') + .orderBy('idx') + .execute(); + + for (const grp of dbGroups) { + groupsMap.set(grp.id, { + id: grp.id, + name: grp.name, + conjunction: grp.conjunction, + children: [], + parentGroupId: grp.parentGroupId, + }); + } - async fetchExtraProperties(): Promise { - console.info('IndexedDB: Fetching extra properties...'); - return this.#extraProperties.orderBy('name').toArray(); - } + // 3. Fetch criteria + const dbCriteria = await this.#db + .selectFrom('searchCriteria') + .select(['id', 'groupId', 'idx', 'key', 'valueType', 'operator', 'jsonValue']) + .where('groupId', 'in', Array.from(groupsMap.keys())) + .orderBy('groupId') + .orderBy('idx') + .execute(); + + // 4. Attach criteria to their groups + for (const crit of dbCriteria) { + const parent = groupsMap.get(crit.groupId); + if (!parent) { + continue; + } - async searchFiles( - criteria: ConditionDTO | [ConditionDTO, ...ConditionDTO[]], - order: OrderBy, - fileOrder: OrderDirection, - useNaturalOrdering: boolean, - extraPropertyID?: ID, - matchAny?: boolean, - ): Promise { - console.info('IndexedDB: Searching files...', { criteria, matchAny }); - const criterias = Array.isArray(criteria) ? criteria : ([criteria] as [ConditionDTO]); - const collection = await filter(this.#files, criterias, matchAny ? 'or' : 'and'); + parent.children.push({ + id: crit.id, + key: crit.key, + operator: crit.operator, + valueType: crit.valueType, + value: + // the ParseJSONResultsPlugin already parses the arrays but not strings + crit.valueType === 'string' ? JSON.parse(crit.jsonValue as string) : crit.jsonValue, + }); + } - if (order === 'random') { - return shuffleArray(await collection.toArray()); - } - if (order === 'extraProperty') { - order = 'dateAdded'; - if (extraPropertyID) { - const extraProperty = await this.#extraProperties.get(extraPropertyID); - if (extraProperty) { - return await orderByExtraProperty( - collection, - fileOrder, - extraProperty, - useNaturalOrdering, - ); - } else { - console.error(`IndexedDB: Custom field with ID "${extraPropertyID}" not found.`); + // Attach child groups to their parents + const rootGroupsBySearch = new Map(); + + for (const [groupId, group] of groupsMap) { + if (group.parentGroupId) { + const parent = groupsMap.get(group.parentGroupId); + if (parent) { + parent.children.push(group); + } + } else { + // Root group + const dbGroup = dbGroups.find((g) => g.id === groupId); + if (dbGroup) { + rootGroupsBySearch.set(dbGroup.savedSearchId, group); } } } - // table.reverse() can be an order of magnitude slower than a javascript .reverse() call - // (tested at ~5000 items, 500ms instead of 100ms) - // easy to verify here https://jsfiddle.net/dfahlander/xf2zrL4p - let items; - if (useNaturalOrdering && isFileDTOPropString(order)) { - const key = order as StringProperties; - items = (await collection.toArray()).sort((a: FileDTO, b: FileDTO) => - a[key].localeCompare(b[key], undefined, { numeric: true, sensitivity: 'base' }), - ); - } else { - items = await collection.sortBy(order); - } - if (fileOrder === OrderDirection.Desc) { - return items.reverse(); - } else { - return items; - } + // 6. Build final DTOs + const searches: FileSearchDTO[] = savedSearches.map((search) => ({ + id: search.id, + name: search.name, + index: search.idx, + rootGroup: rootGroupsBySearch.get(search.id) ?? { + id: 'root-' + search.id, + name: 'root-' + search.name, + conjunction: 'and', + children: [], + }, + })); + + return searches; + } + + async fetchExtraProperties(): Promise { + console.info('SQLite: Fetching extra properties...'); + const eProperties = ( + await this.#db.selectFrom('extraProperties').selectAll().orderBy('name').execute() + ).map( + (dbEp): ExtraPropertyDTO => ({ + id: dbEp.id, + type: dbEp.type, + name: dbEp.name, + dateAdded: deserializeDate(dbEp.dateAdded), + }), + ); + return eProperties; } async createTag(tag: TagDTO): Promise { - console.info('IndexedDB: Creating tag...', tag); - await this.#tags.add(tag); + console.info('SQLite: Creating tag...', tag); + return this.upsertTag(tag); + } + + // Creates many files at once, and checks for duplicates in the path they are in + async createFilesFromPath(path: string, filesDTO: FileDTO[]): Promise { + console.info('SQLite: Creating files...', path, filesDTO.length); + + if (filesDTO.length === 0) { + return; + } + const { files } = normalizeFiles(filesDTO); + const FILES_BATCH_SIZE = computeBatchSize(this.MAX_VARS, files[0]); + await this.#db.transaction().execute(async (trx) => { + for (let i = 0; i < files.length; i += FILES_BATCH_SIZE) { + const batch = files.slice(i, i + FILES_BATCH_SIZE); + try { + await trx + .insertInto('files') + .values(batch) + .onConflict((oc) => oc.doNothing()) + .execute(); + } catch (error) { + console.error(`Failed to insert files batch at index ${i}:`, error); + } + } + }); + this.#isQueryDirty = true; this.#notifyChange(); + console.info('SQLite: Files created successfully'); } async createLocation(location: LocationDTO): Promise { - console.info('IndexedDB: Creating location...', location); - await this.#locations.add(location); - this.#notifyChange(); + console.info('SQLite: Creating location...', location); + return this.upsertLocation(location); } async createSearch(search: FileSearchDTO): Promise { - console.info('IndexedDB: Creating search...', search); - await this.#searches.add(search); - this.#notifyChange(); + console.info('SQLite: Creating search...', search); + return this.upsertSearch(search); } async createExtraProperty(extraProperty: ExtraPropertyDTO): Promise { - console.info('IndexedDB: Creating extra property...', extraProperty); - await this.#extraProperties.add(extraProperty); - this.#notifyChange(); + console.info('SQLite: Creating extra property...', extraProperty); + return this.upsertExtraProperty(extraProperty); } async saveTag(tag: TagDTO): Promise { - console.info('IndexedDB: Saving tag...', tag); - await this.#tags.put(tag); + console.info('SQLite: Saving tag...', tag); + return this.upsertTag(tag); + } + + async upsertTag(tag: TagDTO): Promise { + const { tagIds, tags, subTags, tagImplications, tagAliases } = normalizeTags([tag]); + if (tags.length === 0) { + return; + } + await this.#db.transaction().execute(async (trx) => { + await trx.deleteFrom('subTags').where('tagId', 'in', tagIds).execute(); + await trx.deleteFrom('tagImplications').where('tagId', 'in', tagIds).execute(); + await trx.deleteFrom('tagAliases').where('tagId', 'in', tagIds).execute(); + await upsertTable(this.MAX_VARS, trx, 'tags', tags, ['id'], ['dateAdded']); + if (subTags.length > 0) { + await upsertTable(this.MAX_VARS, trx, 'subTags', subTags, ['tagId', 'subTagId']); + } + if (tagImplications.length > 0) { + await upsertTable(this.MAX_VARS, trx, 'tagImplications', tagImplications, ['tagId', 'impliedTagId']); // eslint-disable-line prettier/prettier + } + if (tagAliases.length > 0) { + await upsertTable(this.MAX_VARS, trx, 'tagAliases', tagAliases, ['tagId', 'alias']); + } + }); this.#notifyChange(); } - async saveFiles(files: FileDTO[]): Promise { - console.info('IndexedDB: Saving files...', files); - await this.#files.bulkPut(files); + async saveFiles(filesDTO: FileDTO[]): Promise { + console.info('SQLite: Saving files...', filesDTO); + if (filesDTO.length === 0) { + return; + } + + const { fileIds, files, fileTags, epVal } = normalizeFiles(filesDTO); + + // Compute batch sizes. To use the maximum number of vars SQLite can handle per query. + const DELETE_BATCH_SIZE = this.MAX_VARS; + const FILES_BATCH_SIZE = computeBatchSize(this.MAX_VARS, files[0]); + const FILE_TAGS_BATCH_SIZE = computeBatchSize(this.MAX_VARS, fileTags[0]); + const EP_VALUES_BATCH_SIZE = computeBatchSize(this.MAX_VARS, epVal[0]); + + await this.#db.transaction().execute(async (trx) => { + // Create unique temp table names. + const tempSuffix = generateId(); + const tempFiles = `files_temp_${tempSuffix}`; + const tempFileTags = `file_tags_temp_${tempSuffix}`; + const tempEpValues = `ep_values_temp_${tempSuffix}`; + + try { + // Create temp tables form a copy of the actual tables. + await sql`CREATE TEMP TABLE ${sql.id(tempFiles)} AS SELECT * FROM files WHERE 0`.execute( + trx, + ); + await sql`CREATE TEMP TABLE ${sql.id( + tempFileTags, + )} AS SELECT * FROM file_tags WHERE 0`.execute(trx); + await sql`CREATE TEMP TABLE ${sql.id( + tempEpValues, + )} AS SELECT * FROM ep_values WHERE 0`.execute(trx); + // Insert files into temp files table + for (let i = 0; i < files.length; i += FILES_BATCH_SIZE) { + const batch = files.slice(i, i + FILES_BATCH_SIZE); + await trx + .insertInto(tempFiles as any) + .values(batch) + .execute(); + } + // Delete previous fileTags and epValues, it is quicker to delete all from related files and insert them in bulk. + if (fileIds.length > 0) { + for (let i = 0; i < fileIds.length; i += DELETE_BATCH_SIZE) { + const batchIds = fileIds.slice(i, i + DELETE_BATCH_SIZE); + await trx.deleteFrom('fileTags').where('fileId', 'in', batchIds).execute(); + await trx.deleteFrom('epValues').where('fileId', 'in', batchIds).execute(); + } + } + // Insert fileTags into temp table + if (fileTags.length > 0) { + for (let i = 0; i < fileTags.length; i += FILE_TAGS_BATCH_SIZE) { + const batch = fileTags.slice(i, i + FILE_TAGS_BATCH_SIZE); + await trx + .insertInto(tempFileTags as any) + .values(batch) + .execute(); + } + } + // Insert epValues into temp table + if (epVal.length > 0) { + for (let i = 0; i < epVal.length; i += EP_VALUES_BATCH_SIZE) { + const batch = epVal.slice(i, i + EP_VALUES_BATCH_SIZE); + await trx + .insertInto(tempEpValues as any) + .values(batch) + .execute(); + } + } + // Transfer from temp tables + // Upsert FILES + upsertTable( + this.MAX_VARS, + trx, + 'files', + sql`SELECT * FROM ${sql.id(tempFiles)} WHERE true`, + ['id'], + ['dateAdded'], + files[0], + ); + // Insert FileTags + if (fileTags.length > 0) { + await sql` + INSERT INTO file_tags + SELECT * FROM ${sql.id(tempFileTags)} + `.execute(trx); + } + // Insert EpValues + if (epVal.length > 0) { + await sql` + INSERT INTO ep_values + SELECT * FROM ${sql.id(tempEpValues)} + `.execute(trx); + } + this.#isQueryDirty = true; + console.info('SQLite: Files saved successfully'); + } finally { + // Clean temp table. + await sql`DROP TABLE IF EXISTS ${sql.id(tempFiles)}`.execute(trx); + await sql`DROP TABLE IF EXISTS ${sql.id(tempFileTags)}`.execute(trx); + await sql`DROP TABLE IF EXISTS ${sql.id(tempEpValues)}`.execute(trx); + } + }); this.#notifyChange(); } async saveLocation(location: LocationDTO): Promise { - console.info('IndexedDB: Saving location...', location); - await this.#locations.put(location); + console.info('SQLite: Saving location...', location); + return this.upsertLocation(location); + } + + async upsertLocation(location: LocationDTO): Promise { + const { nodeIds, locationNodes, locations, subLocations, locationTags } = normalizeLocations([ + location, + ]); + if (locationNodes.length === 0) { + return; + } + await this.#db.transaction().execute(async (trx) => { + await trx.deleteFrom('locationTags').where('nodeId', 'in', nodeIds).execute(); + await trx.deleteFrom('locationNodes').where('parentId', 'in', nodeIds).execute(); + await upsertTable(this.MAX_VARS, trx, 'locationNodes', locationNodes, ['id']); + if (locations.length > 0) { + await upsertTable(this.MAX_VARS, trx, 'locations', locations, ['nodeId'], ['dateAdded']); + } + if (subLocations.length > 0) { + await upsertTable(this.MAX_VARS, trx, 'subLocations', subLocations, ['nodeId']); + } + if (locationTags.length > 0) { + await upsertTable(this.MAX_VARS, trx, 'locationTags', locationTags, ['nodeId', 'tagId']); + } + }); this.#notifyChange(); } async saveSearch(search: FileSearchDTO): Promise { - console.info('IndexedDB: Saving search...', search); - await this.#searches.put(search); + console.info('SQLite: Saving search...', search); + return this.upsertSearch(search); + } + + async upsertSearch(search: FileSearchDTO): Promise { + const { savedSearchesIds, savedSearches, searchGroups, searchCriteria } = + normalizeSavedSearches([search]); + if (savedSearches.length === 0) { + return; + } + await this.#db.transaction().execute(async (trx) => { + await trx.deleteFrom('searchGroups').where('savedSearchId', 'in', savedSearchesIds).execute(); + await upsertTable(this.MAX_VARS, trx, 'savedSearches', savedSearches, ['id']); + if (searchGroups.length > 0) { + await upsertTable(this.MAX_VARS, trx, 'searchGroups', searchGroups, ['id']); + } + if (searchCriteria.length > 0) { + await upsertTable(this.MAX_VARS, trx, 'searchCriteria', searchCriteria, ['id']); + } + }); this.#notifyChange(); } async saveExtraProperty(extraProperty: ExtraPropertyDTO): Promise { - console.info('IndexedDB: Saving extra property...', extraProperty); - await this.#extraProperties.put(extraProperty); - this.#notifyChange(); + console.info('SQLite: Saving extra property...', extraProperty); + return this.upsertExtraProperty(extraProperty); } - async removeTags(tags: ID[]): Promise { - console.info('IndexedDB: Removing tags...', tags); - await this.#db.transaction('rw', this.#files, this.#tags, () => { - const deletedTags = new Set(tags); - retainArray(tags, (tag) => deletedTags.has(tag)); - // We have to make sure files tagged with these tags should be untagged - this.#files - // Get all files with these tags - .where('tags') - .anyOf(tags) - .distinct() - // Remove tags from files - .modify((file) => retainArray(file.tags, (tag) => !deletedTags.has(tag))); - // Remove tag from db - this.#tags.bulkDelete(tags); + async upsertExtraProperty(extraProperty: ExtraPropertyDTO): Promise { + const extraProperties: Insertable[] = [extraProperty].map((ep) => ({ + id: ep.id, + type: ep.type, + name: ep.name, + dateAdded: serializeDate(ep.dateAdded), + })); + await this.#db.transaction().execute(async (trx) => { + await upsertTable(this.MAX_VARS, trx, 'extraProperties', extraProperties, ['id'], ['dateAdded']); // eslint-disable-line prettier/prettier }); this.#notifyChange(); } async mergeTags(tagToBeRemoved: ID, tagToMergeWith: ID): Promise { - console.info('IndexedDB: Merging tags...', tagToBeRemoved, tagToMergeWith); - await this.#db.transaction('rw', this.#files, this.#tags, () => { - // Replace tag on all files with the tag to be removed - this.#files - .where('tags') - .anyOf(tagToBeRemoved) - .modify((file) => { - const tagToBeRemovedIndex = file.tags.findIndex((tag) => tag === tagToBeRemoved); - - if (tagToBeRemovedIndex !== -1) { - file.tags[tagToBeRemovedIndex] = tagToMergeWith; - // Might contain duplicates if the tag to be merged with was already on the file, so remove duplicates. - retainArray( - file.tags.slice(tagToBeRemovedIndex + 1), - (tag) => tag !== tagToMergeWith || tag !== tagToBeRemoved, - ); - } - }); - // Remove tag from DB - this.#tags.delete(tagToBeRemoved); + console.info('SQLite: Merging tags...', tagToBeRemoved, tagToMergeWith); + + await this.#db.transaction().execute(async (trx) => { + // Merge in FileTags + // first delete the records that would make a duplicate + await trx + .deleteFrom('fileTags') + .where('tagId', '=', tagToBeRemoved) + .where('fileId', 'in', (eb) => + eb.selectFrom('fileTags').select('fileId').where('tagId', '=', tagToMergeWith), + ) + .execute(); + // Update the thag ids + await trx + .updateTable('fileTags') + .set({ tagId: tagToMergeWith }) + .where('tagId', '=', tagToBeRemoved) + .execute(); + // Merge in locationTags + await trx + .deleteFrom('locationTags') + .where('tagId', '=', tagToBeRemoved) + .where('nodeId', 'in', (eb) => + eb.selectFrom('locationTags').select('nodeId').where('tagId', '=', tagToMergeWith), + ) + .execute(); + await trx + .updateTable('locationTags') + .set({ tagId: tagToMergeWith }) + .where('tagId', '=', tagToBeRemoved) + .execute(); + + // delete the tag + await trx.deleteFrom('tags').where('id', '=', tagToBeRemoved).execute(); }); this.#notifyChange(); } + async removeTags(tags: ID[]): Promise { + console.info('SQLite: Removing tags...', tags); + // Cascade delte in other tables deleting from tags table. + await this.#db.deleteFrom('tags').where('id', 'in', tags).execute(); + this.#notifyChange(); + } + async removeFiles(files: ID[]): Promise { - console.info('IndexedDB: Removing files...', files); - await this.#files.bulkDelete(files); + console.info('SQLite: Removing files...', files); + // Cascade delte in other tables deleting from files table. + await this.#db.deleteFrom('files').where('id', 'in', files).execute(); this.#notifyChange(); } async removeLocation(location: ID): Promise { - console.info('IndexedDB: Removing location...', location); - await this.#db.transaction('rw', this.#files, this.#locations, () => { - this.#files.where('locationId').equals(location).delete(); - this.#locations.delete(location); - }); + console.info('SQLite: Removing location...', location); + // Cascade delte in other tables deleting from locationNodes table. + await this.#db.deleteFrom('locationNodes').where('id', '=', location).execute(); + // Run VACUUM to free disk space after large deletions. + await sql`VACUUM;`.execute(this.#db); this.#notifyChange(); } async removeSearch(search: ID): Promise { - console.info('IndexedDB: Removing search...', search); - await this.#searches.delete(search); + console.info('SQLite: Removing search...', search); + // Cascade delte in other tables deleting from savedSearches table. + await this.#db.deleteFrom('savedSearches').where('id', '=', search).execute(); this.#notifyChange(); } async removeExtraProperties(extraPropertyIDs: ID[]): Promise { - console.info('IndexedDB: Removing extra properties...', extraPropertyIDs); - await this.#db.transaction('rw', this.#files, this.#extraProperties, async () => { - await this.#files - .where('extraPropertyIDs') - .anyOf(extraPropertyIDs) - .distinct() - .modify((file) => { - for (const id of extraPropertyIDs) { - delete file.extraProperties[id]; - } - retainArray(file.extraPropertyIDs, (id) => !extraPropertyIDs.includes(id)); - }); + console.info('SQLite: Removing extra properties...', extraPropertyIDs); + // Cascade delte in other tables deleting from extraProperties table. + await this.#db.deleteFrom('extraProperties').where('id', 'in', extraPropertyIDs).execute(); + this.#notifyChange(); + } - await this.#extraProperties.bulkDelete(extraPropertyIDs); - }); + async addTagsToFiles(tagIds: ID[], criteria?: ConditionGroupDTO): Promise { + console.info('SQLite: Add tags to filtered files...', criteria, tagIds); + // Subquery tipado correctamente + let fileSubquery = this.#db.selectFrom('files').select('files.id as fileId'); + fileSubquery = applyFileFilters(fileSubquery, criteria); + + // Crear valores de tags como CTE o subquery + await this.#db + .insertInto('fileTags') + .columns(['fileId', 'tagId']) + .expression(() => { + // Usar raw SQL para el cross join con los valores + const tagValues = tagIds.map((id) => `SELECT '${id}' as tag_id`).join(' UNION ALL '); + + return this.#db + .selectFrom(fileSubquery.as('matchedFiles')) + .crossJoin(sql`(${sql.raw(tagValues)})`.as('tagValues')) + .select(['matchedFiles.fileId', sql`tag_values.tag_id`.as('tagId')]) + .where(sql`true`); + }) + .onConflict((oc) => oc.doNothing()) + .execute(); + + this.#isQueryDirty = true; + } - this.#notifyChange(); + async removeTagsFromFiles(tagIds: ID[], criteria?: ConditionGroupDTO): Promise { + console.info('SQLite: Remove tags from filtered files...', criteria, tagIds); + + let fileSubquery = this.#db.selectFrom('files').select('files.id'); + fileSubquery = applyFileFilters(fileSubquery, criteria); + + await this.#db + .deleteFrom('fileTags') + .where('fileId', 'in', fileSubquery) + .where('tagId', 'in', tagIds) + .execute(); + + this.#isQueryDirty = true; + } + + async clearTagsFromFiles(criteria?: ConditionGroupDTO): Promise { + let fileSubquery = this.#db.selectFrom('files').select('files.id'); + fileSubquery = applyFileFilters(fileSubquery, criteria); + + await this.#db.deleteFrom('fileTags').where('fileId', 'in', fileSubquery).execute(); + + this.#isQueryDirty = true; } - async countFiles(): Promise<[fileCount: number, untaggedFileCount: number]> { - console.info('IndexedDB: Getting number stats of files...'); - return this.#db.transaction('r', this.#files, async () => { - // Aparently converting the whole table into array and check tags in a for loop is a lot faster than using a where tags filter followed by unique(). - const files = await this.#files.toArray(); - let unTaggedFileCount = 0; - for (let i = 0; i < files.length; i++) { - if (files[i].tags.length === 0) { - unTaggedFileCount++; + async countFiles( + options?: { files?: boolean; untagged?: boolean }, + criteria?: ConditionGroupDTO, + ): Promise<[fileCount: number | undefined, untaggedFileCount: number | undefined]> { + console.info('SQLite: Counting files...', options, criteria); + const result: [number | undefined, number | undefined] = [undefined, undefined]; + if (options?.files) { + let totalQuery = this.#db + .selectFrom('files') + .select(({ fn }) => fn.count('files.id').as('count')); + totalQuery = criteria ? applyFileFilters(totalQuery, criteria) : totalQuery; + const totalResult = await totalQuery.executeTakeFirst(); + result[0] = totalResult?.count ?? 0; + } + + if (options?.untagged) { + let untaggedQuery = this.#db + .selectFrom('files') + .leftJoin('fileTags as ft', 'ft.fileId', 'files.id') + .where('ft.fileId', 'is', null) + .select(({ fn }) => fn.count('files.id').as('count')); + untaggedQuery = criteria ? applyFileFilters(untaggedQuery, criteria) : untaggedQuery; + const untaggedResult = await untaggedQuery.executeTakeFirst(); + result[1] = untaggedResult?.count ?? 0; + } + return result; + } + + /** Compare the given disk files with the database files for the given location. */ + async compareFiles( + locationId: ID, + diskFiles: FileStats[], + ): Promise<{ createdStats: FileStats[]; missingFiles: FileDTO[] }> { + const dbWithTemp = this.#db.withTables<{ + tempDiskFiles: Omit & { + dateModified: number; + dateCreated: number; + }; + }>(); + // first insert all missing files into a temp table for easier and db optimized querying + // use unique table name for concurrency + const tempSuffix = generateId(); + const tempDiskFilesName = `temp_disk_files_${tempSuffix}`; + const tempDiskFiles = sql + .table(tempDiskFilesName) + .as('tempDiskFiles') as unknown as 'tempDiskFiles'; + + await sql` + CREATE TEMP TABLE ${sql.id(tempDiskFilesName)} ( + absolute_path TEXT PRIMARY KEY, + ino TEXT NOT NULL, + size INTEGER NOT NULL, + date_modified INTEGER NOT NULL, + date_created INTEGER NOT NULL + ) WITHOUT ROWID; + `.execute(this.#db); + + const DISK_FILES_BATCH_SIZE = computeBatchSize(this.MAX_VARS, diskFiles[0]); + await dbWithTemp.transaction().execute(async (trx) => { + for (let i = 0; i < diskFiles.length; i += DISK_FILES_BATCH_SIZE) { + const batch = []; + const end = Math.min(i + DISK_FILES_BATCH_SIZE, diskFiles.length); + for (let j = i; j < end; j++) { + const f = diskFiles[j]; + batch.push({ + absolutePath: f.absolutePath, + ino: f.ino, + size: f.size, + dateModified: serializeDate(f.dateModified), + dateCreated: serializeDate(f.dateCreated), + }); } + await trx + .insertInto(tempDiskFilesName as 'tempDiskFiles') + .values(batch) + .execute(); } - return [files.length, unTaggedFileCount]; }); + + // find created files, (the ones present in disk but not in db) + const createdStats: FileStats[] = ( + await dbWithTemp + .selectFrom(tempDiskFiles) + .leftJoin('files', (join) => + join + .onRef('files.absolutePath', '=', 'tempDiskFiles.absolutePath') + .on('files.locationId', '=', locationId), + ) + .where('files.id', 'is', null) + .selectAll('tempDiskFiles') + .execute() + ).map((df) => ({ + absolutePath: df.absolutePath, + ino: df.ino, + size: df.size, + dateModified: deserializeDate(df.dateModified), + dateCreated: deserializeDate(df.dateCreated), + })); + + // find missing files, (the ones present in db but not in disk) + const missingFiles = await this.queryFiles( + undefined, + { order: 'id' }, + (query: SelectQueryBuilder) => { + return query + .leftJoin(tempDiskFiles, (join) => + join.onRef('tempDiskFiles.absolutePath', '=', 'files.absolutePath'), + ) + .where('files.locationId', '=', locationId) + .where('tempDiskFiles.absolutePath', 'is', null); + }, + ); + // clean temp table + await sql`DROP TABLE IF EXISTS ${sql.id(tempDiskFilesName)}`.execute(this.#db); + + return { createdStats, missingFiles }; } - // Creates many files at once, and checks for duplicates in the path they are in - async createFilesFromPath(path: string, files: FileDTO[]): Promise { - console.info('IndexedDB: Creating files...', path, files); - await this.#db.transaction('rw', this.#files, async () => { - // previously we did filter getting all the paths that start with the base path using where('absolutePath').startsWith(path).keys() - // but converting to an array and extracting the paths is significantly faster than .keys() - // Also, for small batches of new files, checking each path individually is faster. - console.debug('Filtering files...'); - if (files.length > 500) { - // When creating a large number of files (likely adding a big location), - // it's faster to fetch all existing paths starting with the given base path. - const existingFilePaths = new Set( - (await this.#files.where('absolutePath').startsWith(path).toArray()).map( - (f) => f.absolutePath, - ), - ); - retainArray(files, (file) => !existingFilePaths.has(file.absolutePath)); - } else { - // For small batches, check each file path individually. - const checks = await Promise.all( - files.map(async (file) => { - const count = await this.#files.where('absolutePath').equals(file.absolutePath).count(); - return count === 0; - }), - ); - retainArray(files, (_, i) => checks[i]); + /** Find possible matches in the database for the given missing files based on their metadata. */ + async findMissingDBMatches( + missingFiles: FileDTO[], + ): Promise> { + if (missingFiles.length === 0) { + return []; + } + + const dbWithTemp = this.#db.withTables<{ + tempMissingFiles: { + id: string; + name: string; + ino: string; + width: number | null; + height: number | null; + dateCreated: number; + }; + fileTagAggregatesTemp: { + fileId: ID; + tags: ID[]; + }; + fileEpAggregatesTemp: { + fileId: ID; + extraProperties: EpValues[]; + }; + }>(); + + // first insert all missing files into a temp table for easier and db optimized querying + // use unique table name for concurrency + const tempMissingName = `temp_missing_files_${generateId()}`; + const tempMissingFiles = sql + .table(tempMissingName) + .as('tempMissingFiles') as unknown as 'tempMissingFiles'; + + await sql` + CREATE TEMP TABLE ${sql.id(tempMissingName)} ( + id TEXT PRIMARY KEY, + name TEXT, + ino TEXT, + width INTEGER, + height INTEGER, + date_created INTEGER + ) WITHOUT ROWID; + `.execute(this.#db); + + const BATCH_SIZE = computeBatchSize(this.MAX_VARS, missingFiles[0]); + await dbWithTemp.transaction().execute(async (trx) => { + for (let i = 0; i < missingFiles.length; i += BATCH_SIZE) { + const batch = []; + const end = Math.min(i + BATCH_SIZE, missingFiles.length); + for (let j = i; j < end; j++) { + const f = missingFiles[j]; + batch.push({ + id: f.id, + name: f.name, + ino: f.ino, + width: f.width, + height: f.height, + dateCreated: serializeDate(f.dateCreated), + }); + } + await trx + .insertInto(tempMissingName as 'tempMissingFiles') + .values(batch) + .execute(); } - console.debug('Creating files...'); - this.#files.bulkAdd(files); }); - console.debug('Done!'); - this.#notifyChange(); + + // Compare metadata of two files to determine whether the files are (likely to be) identical + // same logic as areFilesIdenticalBesidesName but in DB for optimization to trasverse all files. + const matches = await dbWithTemp + .selectFrom(tempMissingFiles) + .innerJoin('files', (join) => + join + .onRef('files.id', '!=', 'tempMissingFiles.id') + .on((eb) => + eb.or([ + eb('files.ino', '=', eb.ref('tempMissingFiles.ino')), + eb.and([ + eb('files.width', '=', eb.ref('tempMissingFiles.width')), + eb('files.height', '=', eb.ref('tempMissingFiles.height')), + eb('files.dateCreated', '=', eb.ref('tempMissingFiles.dateCreated')), + ]), + ]), + ), + ) + .leftJoin('fileTagAggregatesTemp as ft', 'ft.fileId', 'files.id') + .leftJoin('fileEpAggregatesTemp as fe', 'fe.fileId', 'files.id') + .selectAll('files') + .select(['ft.tags', 'fe.extraProperties', 'tempMissingFiles.id as missingSourceId']) + // prioritize matches by name first, then by id to have a stable order + .orderBy('tempMissingFiles.id') + .orderBy(sql`CASE WHEN files.name = ${sql.ref('tempMissingFiles.name')} THEN 0 ELSE 1 END`) + .execute(); + + // clean temp table + await sql`DROP TABLE IF EXISTS ${sql.id(tempMissingName)}`.execute(this.#db); + + // multiple matches can be found for the same missing file, keep the best one (first by name) + const uniqueMatches = new Map(); + for (const row of matches) { + if (!uniqueMatches.has(row.missingSourceId as ID)) { + const { missingSourceId, ...fileData } = row; + uniqueMatches.set(missingSourceId as ID, mapToDTO(fileData)); + } + } + + // return entries for compatibility with worker mode. + return Array.from(uniqueMatches.entries()).map(([missingId, matchedFile]) => [ + missingId, + matchedFile, + ]); } async clear(): Promise { - console.info('IndexedDB: Clearing database...'); - Dexie.delete(this.#db.name); + console.info('SQLite: Clearing database...'); + /* + const tables = await this.#db + .selectFrom('sqlite_master' as any) + .select('name') + .where('type', '=', 'table') + .where('name', 'not like', 'sqlite_%') + .execute(); + + for (const { name } of tables) { + if (name === 'kysely_migration' || name === 'kysely_migration_lock') { + continue; + } + await this.#db.deleteFrom(name as any).execute(); + } */ + + // Empy the tables with a large database takes too long, instead create an emprty DB, + // reinit and restore it at startup relying in the backup-scheduler checkAndRestoreDB behaviour. + await this.#restoreEmpty(); } } @@ -425,6 +1188,90 @@ function createTimingProxy(obj: Backend): Backend { }); } +function mapToDTO(dbFile: FileDTO | { [x: string]: any }): FileDTO { + // convert data into FileDTO format + const extraPropertyIDs: ID[] = []; + const extraProperties: ExtraProperties = {}; + for (const ep of dbFile.extraProperties ?? []) { + extraPropertyIDs.push(ep.epId); + const val = ep.textValue ?? ep.numberValue; // ?? ep.timestampValue; + if (val !== null) { + extraProperties[ep.epId] = val; + } + } + return { + id: dbFile.id, + ino: dbFile.ino, + locationId: dbFile.locationId, + relativePath: dbFile.relativePath, + absolutePath: dbFile.absolutePath, + tagSorting: dbFile.tagSorting, + dateAdded: deserializeDate(dbFile.dateAdded), + dateModified: deserializeDate(dbFile.dateModified), + dateModifiedOS: deserializeDate(dbFile.dateModifiedOS), + dateLastIndexed: deserializeDate(dbFile.dateLastIndexed), + dateCreated: deserializeDate(dbFile.dateCreated), + name: dbFile.name, + extension: dbFile.extension, + size: dbFile.size, + width: dbFile.width, + height: dbFile.height, + tags: dbFile.tags ?? [], + extraProperties: extraProperties, + }; +} + +export async function getSqliteMaxVariables(db: Kysely): Promise { + const rows = (await sql`PRAGMA compile_options`.execute(db)).rows; + const opt: any = rows.find((r: any) => r.compileOptions?.includes('MAX_VARIABLE_NUMBER')); + if (!opt) { + console.warn('MAX_VARIABLE_NUMBER not found, using 22766'); + return 22766; + } + const maxVars = parseInt(opt.compileOptions.split('=')[1], 10); + return isNaN(maxVars) ? 22766 : maxVars; +} + +export function computeBatchSize(maxVars: number, sampleObject?: Record): number { + if (!sampleObject) { + return 501; + } + const numCols = Object.keys(sampleObject).length; + return Math.floor(maxVars / numCols); +} + +function isValidCursor(cursor: any): cursor is Cursor { + if (typeof cursor === 'object' && 'orderValue' in cursor && 'id' in cursor) { + if (typeof cursor.id === 'string' && cursor.orderValue !== undefined) { + return true; + } + } + return false; +} + +function PadString(str: string): string { + return str.replace(/\d+/g, (num: string) => num.padStart(PAD_STRING_LENGTH, '0')); +} + +function stableHash(id: string, seed: number): number { + let h = seed | 0; + + for (let i = 0; i < id.length; i++) { + h = Math.imul(h ^ id.charCodeAt(i), 0x5bd1e995); + h ^= h >>> 15; + } + + return h >>> 0; +} + +function generateSeed() { + return Date.now() >>> 0; +} + +/////////////////// +///// SORTING ///// +/////////////////// + const exampleFileDTO: FileDTO = { id: '', ino: '', @@ -433,6 +1280,7 @@ const exampleFileDTO: FileDTO = { absolutePath: '', locationId: '', extension: 'jpg', + tagSorting: 'hierarchy', size: 0, width: 0, height: 0, @@ -440,9 +1288,8 @@ const exampleFileDTO: FileDTO = { dateCreated: new Date(), dateLastIndexed: new Date(), dateModified: new Date(), - OrigDateModified: new Date(), + dateModifiedOS: new Date(), extraProperties: {}, - extraPropertyIDs: [], tags: [], }; @@ -450,398 +1297,736 @@ function isFileDTOPropString(prop: PropertyKeys): prop is StringPropert return typeof exampleFileDTO[prop] === 'string'; } -async function orderByExtraProperty( - collection: Dexie.Collection, - fileOrder: OrderDirection, - extraProperty: ExtraPropertyDTO, - useNaturalOrdering: boolean, -): Promise { - switch (extraProperty.type) { - case ExtraPropertyType.number: - return orderByCustomNumberField(collection, extraProperty.id, fileOrder); - case ExtraPropertyType.text: - return orderByCustomTextField(collection, extraProperty.id, fileOrder, useNaturalOrdering); - default: - throw new Error(`Unsupported custom field type: ${extraProperty.type}`); - } -} - -function castOrDefault(value: unknown, fallback: T, expectedType: string): T { - return typeof value === expectedType ? (value as T) : fallback; -} - -async function orderByCustomNumberField( - collection: Dexie.Collection, - extraPropertyID: ID, - fileOrder: OrderDirection, -): Promise { - const files = await collection.toArray(); - const fallback = fileOrder === OrderDirection.Desc ? -Infinity : Infinity; - files.sort((a, b) => { - const valueA: number = castOrDefault(a.extraProperties[extraPropertyID], fallback, 'number'); - const valueB: number = castOrDefault(b.extraProperties[extraPropertyID], fallback, 'number'); - return fileOrder === OrderDirection.Desc ? valueB - valueA : valueA - valueB; - }); - return files; -} +type PaginationOptions = { + order: OrderBy; + direction?: OrderDirection; + useNaturalOrdering?: boolean; + limit?: number; + pagination?: PaginationDirection; + cursor?: Cursor; + extraPropertyID?: string; + seed?: number; +}; -async function orderByCustomTextField( - collection: Dexie.Collection, - extraPropertyID: ID, - fileOrder: OrderDirection, - numeric: boolean, -): Promise { - const files = await collection.toArray(); - const fallback = fileOrder === OrderDirection.Desc ? '\u0000' : '\uffff'; - files.sort((a, b) => { - const valueA: string = castOrDefault(a.extraProperties[extraPropertyID], fallback, 'string'); - const valueB: string = castOrDefault(b.extraProperties[extraPropertyID], fallback, 'string'); - - return fileOrder === OrderDirection.Desc - ? valueB.localeCompare(valueA, undefined, { numeric: numeric, sensitivity: 'base' }) - : valueA.localeCompare(valueB, undefined, { numeric: numeric, sensitivity: 'base' }); - }); - return files; -} +// Original implementation by Pianissi +async function applyPagination( + db: Kysely, + q: SelectQueryBuilder, + pagOptions: PaginationOptions, +): Promise> { + const { direction, useNaturalOrdering, extraPropertyID } = pagOptions; + const { pagination, cursor, limit } = pagOptions; + const { order } = pagOptions; + + let sqlDirection: OrderByDirection = direction === OrderDirection.Asc ? 'asc' : 'desc'; + let orderColumn: string | RawBuilder = + order === 'extraProperty' ? 'sortValue' : `files.${order}`; + let type: 'text' | 'number' = + order !== 'extraProperty' && order !== 'random' && isFileDTOPropString(order) + ? 'text' + : 'number'; + // Compute pagination consts + const isAfter = pagination === 'after'; + const isAsc = sqlDirection === 'asc'; + const operator = isAfter === isAsc ? '>' : '<'; + const isValidPagination = isValidCursor(cursor) && pagination; + // alter sqlDirection only if a valid pagination applies + if (isValidPagination) { + // if pagination === 'before' invert direction to fetch adjacent elements, then after executing the query apply a reverse to the result data. + sqlDirection = !isAfter ? (isAsc ? 'desc' : 'asc') : sqlDirection; + } -type SearchConjunction = 'and' | 'or'; - -async function filter( - collection: Dexie.Table, - criterias: [ConditionDTO, ...ConditionDTO[]], - conjunction: SearchConjunction, -): Promise> { - // Searching with multiple 'wheres': https://stackoverflow.com/questions/35679590/dexiejs-indexeddb-chain-multiple-where-clauses - // Unfortunately doesn't work out of the box. - // It's one of the things they are working on, looks much better: https://github.com/dfahlander/Dexie.js/issues/427 - // We'll have to mostly rely on naive filter function (lambdas) - - if (criterias.length > 1 && conjunction === 'or') { - // OR: We can only chain ORs if all filters can be "where" functions - else we do an ugly .some() check on every document - - let allWheres = true; - let table: Dexie.Collection | undefined = undefined; - for (const crit of criterias) { - const where: WhereClause = !table - ? collection.where(crit.key) - : table.or(crit.key); - const tableOrFilter = filterWhere(where, crit); - - if (typeof tableOrFilter === 'function') { - allWheres = false; - break; - } else { - table = tableOrFilter; - } + /// add extraproperty optional value /// + // because of how the joined table is returned as, we need to aggregate a sort value in the joined table which can be used as a key + if (order === 'extraProperty') { + const extraProp = await db + .selectFrom('extraProperties' as any) + .select('type') + .where('id' as any, '=', extraPropertyID) + .executeTakeFirst(); + + if (!extraPropertyID || !extraProp) { + q = q.select(sql`NULL`.as('sortValue')); + } else { + // maping value type to column + // TODO: add timestamp mapping when implementing that extra property + const valueColumn = extraProp.type === 'text' ? 'textValue' : 'numberValue'; + type = extraProp.type === 'text' ? 'text' : 'number'; + // Left join the corresponding extraProperty value and select it as sortValue + q = q + .leftJoin('epValues', (join) => + join.onRef('epValues.fileId', '=', 'files.id').on('epValues.epId', '=', extraPropertyID), + ) + .select(`epValues.${valueColumn} as sortValue` as any) as any; } + } - if (allWheres && table) { - return table; + // convert columns to handle nulls in pagination this also applies the natural ordering formating + const { safeColumn, safeOrderValue } = getOrderColumnExpression( + orderColumn, + type, + cursor?.orderValue, + direction, // use original direction since sqlDirection can be altered for pagination + useNaturalOrdering, + order === 'extraProperty', + ); + orderColumn = safeColumn; + + // PAGINATION LOGIC + if (isValidPagination) { + const { id } = cursor; + + if (order === 'random') { + // In random we use a pseudo random but stable hash value based on the cursor, this allow us to use pagination while order by random + const seed = pagOptions.seed ?? 0; + const cursorHash = stableHash(id, seed); + q = q.where((eb) => + eb.or([ + eb(sql`stable_hash(files.id, ${seed})`, operator, cursorHash), + eb.and([ + eb(sql`stable_hash(files.id, ${seed})`, '=', cursorHash), + eb('files.id', operator, id), + ]), + ]), + ); } else { - const critLambdas = criterias.map(filterLambda); - return collection.filter((t) => critLambdas.some((lambda) => lambda(t))); + // Standard pagination: (orderColumn, id) > (orderValue, id) + q = q.where((eb) => + eb.or([ + eb(orderColumn as any, operator, safeOrderValue), + eb.and([eb(orderColumn as any, '=', safeOrderValue), eb('files.id', operator, id)]), + ]), + ); } } + //PAGINATION LOGIC END - // AND: We can get some efficiency for ANDS by separating the first crit from the rest... - // Dexie can use a fast "where" search for the initial search - // For consecutive "and" conjunctions, a lambda function must be used - // Since not all operators we need are supported by "where" filters, _filterWhere can also return a lambda. - const [firstCrit, ...otherCrits] = criterias; + // Apply Ordering + if (order === 'random') { + const seed = pagOptions.seed ?? 0; + q = q.orderBy(sql`stable_hash(files.id, ${seed})`, sqlDirection); + } else { + // Default + q = q.orderBy(orderColumn as any, sqlDirection); + } - const where = collection.where(firstCrit.key); - const whereOrFilter = filterWhere(where, firstCrit); - let table = - typeof whereOrFilter !== 'function' ? whereOrFilter : collection.filter(whereOrFilter); + // Allways append order by some unique value, required for pagination + q = q.orderBy('files.id', sqlDirection); - // Then just chain a loop of and() calls. A .every() feels more efficient than chaining table.and() calls - if (otherCrits.length) { - const critLambdas = otherCrits.map(filterLambda); - table = table.and((item) => critLambdas.every((lambda) => lambda(item))); + // Apply limit + if (limit) { + q = q.limit(limit); } - // for (const crit of otherCrits) { - // table = table.and(this._filterLambda(crit)); - // } - return table; -} -/////////////////////////////// -////// FILTERING METHODS ////// -/////////////////////////////// -// There are 'where' and 'lambda filter functions: -// - where: For filtering by a single criteria and for 'or' conjunctions, Dexie exposes indexeddb-accelerated functions. -// Since some of our search operations are not supported by Dexie, some _where functions return a lambda. -// - lambda: For 'and' conjunctions, a naive filter function (lambda) must be used. - -function filterWhere( - where: WhereClause, - crit: ConditionDTO, -): Collection | ((val: T) => boolean) { - switch (crit.valueType) { - case 'array': - return filterArrayWhere(where, crit); - case 'string': - return filterStringWhere(where, crit); - case 'number': - return filterNumberWhere(where, crit); - case 'date': - return filterDateWhere(where, crit); - case 'indexSignature': - return filterIndexSignatureLambda(crit); - } + return q; } -function filterLambda(crit: ConditionDTO): (val: T) => boolean { - switch (crit.valueType) { - case 'array': - return filterArrayLambda(crit); - case 'string': - return filterStringLambda(crit); - case 'number': - return filterNumberLambda(crit); - case 'date': - return filterDateLambda(crit); - case 'indexSignature': - return filterIndexSignatureLambda(crit); +/** + * Normalizes a column and its cursor value for consistent sorting. + * Handles natural ordering via padding and provides fallback values + * for null/undefined to ensure stable pagination. + */ +export function getOrderColumnExpression( + columnName: string, + type: 'text' | 'number', + orderValue: unknown, + direction?: OrderDirection, + useNaturalOrdering?: boolean, + useNullFallback?: boolean, +): { safeColumn: RawBuilder; safeOrderValue: unknown } { + const isAsc = direction === OrderDirection.Asc; + const isText = type === 'text'; + + // Set a fallback value per data type, Date is managed as number + let fallbackValue; + if (isText) { + fallbackValue = isAsc ? '\uffff\uffff\uffff' : ''; + } else { + fallbackValue = isAsc ? Number.MAX_SAFE_INTEGER : -Number.MAX_SAFE_INTEGER; } -} -function filterArrayWhere( - where: WhereClause, - crit: ArrayConditionDTO, -): Collection | ((val: T) => boolean) { - // Querying array props: https://dexie.org/docs/MultiEntry-Index - // Check whether to search for empty arrays (e.g. no tags) - if (crit.value.length === 0) { - return crit.operator === 'contains' - ? (val: T): boolean => (val as any)[crit.key].length === 0 - : (val: T): boolean => (val as any)[crit.key].length !== 0; - } else { - // contains/notContains 1 or more elements - if (crit.operator === 'contains') { - return where.anyOf(crit.value).distinct(); - } else { - // not contains: there as a noneOf() function we used to use, but it matches every item individually, e.g. - // an item with tags "Apple, Pear" is matched twice: once as Apple, once as Pear; A "notContains Apple" still matches for Pear - return (val: T): boolean => - (val as any)[crit.key].every((val: string) => !crit.value.includes(val)); - } + let safeOrderValue = + useNullFallback && (orderValue === null || orderValue === undefined) + ? fallbackValue + : orderValue; + let colExpression = sql.ref(columnName); + // Add PAD_STRING if needed + if (isText && useNaturalOrdering) { + safeOrderValue = PadString(String(safeOrderValue)); + colExpression = sql`PAD_STRING(${colExpression})`; } + const safeColumn = useNullFallback + ? sql`COALESCE(${colExpression}, ${fallbackValue})` + : colExpression; + + return { safeColumn, safeOrderValue }; } -function filterArrayLambda(crit: ArrayConditionDTO): (val: T) => boolean { - if (crit.operator === 'contains') { - // Check whether to search for empty arrays (e.g. no tags) - return crit.value.length === 0 - ? (val: T): boolean => (val as any)[crit.key].length === 0 - : (val: T): boolean => crit.value.some((item) => (val as any)[crit.key].indexOf(item) !== -1); - } else { - // not contains - return crit.value.length === 0 - ? (val: T): boolean => (val as any)[crit.key].length !== 0 - : (val: T): boolean => - crit.value.every((item) => (val as any)[crit.key].indexOf(item) === -1); +/////////////////////////// +///////// FILTERS ///////// +/////////////////////////// + +type MustIncludeFiles = 'files' extends T ? T : never; + +export type ConditionWithConjunction = ConditionDTO & { + conjunction?: SearchConjunction; +}; + +function applyFileFilters, O>( + q: SelectQueryBuilder, + criteria?: ConditionGroupDTO, +): SelectQueryBuilder { + if (!criteria || criteria.children.length === 0) { + return q; } + return q.where((eb) => + expressionFromNode( + eb as ExpressionBuilder, + criteria as unknown as ConditionGroupDTO, + ), + ); } -function filterStringWhere( - where: WhereClause, - crit: StringConditionDTO, -): Collection | ((t: any) => boolean) { - const dbStringOperators = [ - 'equalsIgnoreCase', - 'equals', - 'notEqual', - 'startsWithIgnoreCase', - 'startsWith', - ] as const; - - if ((dbStringOperators as readonly string[]).includes(crit.operator)) { - const funcName = crit.operator as unknown as (typeof dbStringOperators)[number]; - return where[funcName](crit.value); - } - // Use normal string filter as fallback for functions not supported by the DB - return filterStringLambda(crit); +function expressionFromNode( + eb: ExpressionBuilder, + node: ConditionGroupDTO | ConditionDTO, +): ReturnType | ReturnType { + // if it's a condition + if (!('children' in node)) { + return expressionFromCriteria(eb, node); + } + // if it's a group recursively apply criterias + const expressions = node.children.map((child) => expressionFromNode(eb, child)).filter(Boolean); + // if no expressions return true for this criteria node + if (expressions.length === 0) { + return sql`TRUE`; + } + return node.conjunction === 'or' ? eb.or(expressions) : eb.and(expressions); } -function filterStringLambda(crit: StringConditionDTO): (t: any) => boolean { - const { key, value } = crit; - const valLow = value.toLowerCase(); +const expressionFromCriteria = ( + eb: ExpressionBuilder, + crit: ConditionDTO, +) => { + switch (crit.valueType) { + case 'string': + return applyStringCondition(eb, crit.key, crit.operator, crit.value); + case 'number': + return applyNumberCondition(eb, crit.key, crit.operator, crit.value); + case 'date': + return applyDateCondition(eb, crit.key, crit.operator, crit.value); + case 'array': + return applyTagArrayCondition(eb, crit.key, crit.operator, crit.value); + case 'indexSignature': + return applyExtraPropertyCondition(eb, crit.key, crit.operator, crit.value); + } +}; - switch (crit.operator) { +function applyStringCondition( + eb: ExpressionBuilder, + key: keyof Files, + operator: StringOperatorType, + value: string, +) { + switch (operator) { case 'equals': - return (t: any) => (t[key] as string) === crit.value; + return eb(`files.${key}`, '=', value); case 'equalsIgnoreCase': - return (t: any) => (t[key] as string).toLowerCase() === valLow; + return eb(sql`lower(${sql.ref(`files.${key}`)})`, '=', value.toLowerCase()); case 'notEqual': - return (t: any) => (t[key] as string).toLowerCase() !== valLow; + return eb(`files.${key}`, '!=', value); case 'contains': - return (t: any) => (t[key] as string).toLowerCase().includes(valLow); + return eb(`files.${key}`, 'like', `%${value}%`); case 'notContains': - return (t: any) => !(t[key] as string).toLowerCase().includes(valLow); + // use NOT LIKE + return eb(`files.${key}`, 'not like', `%${value}%`); case 'startsWith': - return (t: any) => (t[key] as string).startsWith(crit.value); + return eb(`files.${key}`, 'like', `${value}%`); case 'startsWithIgnoreCase': - return (t: any) => (t[key] as string).toLowerCase().startsWith(valLow); + return eb(sql`lower(${sql.ref(`files.${key}`)})`, 'like', `${value.toLowerCase()}%`); case 'notStartsWith': - return (t: any) => !(t[key] as string).toLowerCase().startsWith(valLow); + return eb(`files.${key}`, 'not like', `${value}%`); default: - console.log('String operator not allowed:', crit.operator); - return () => false; + const _exhaustiveCheck: never = operator; + return _exhaustiveCheck; } } -function filterNumberWhere( - where: WhereClause, - crit: NumberConditionDTO, -): Collection { - switch (crit.operator) { +function applyNumberCondition( + eb: ExpressionBuilder, + key: keyof Files, + operator: NumberOperatorType, + value: number, +) { + switch (operator) { case 'equals': - return where.equals(crit.value); + return eb(`files.${key}`, '=', value); case 'notEqual': - return where.notEqual(crit.value); + return eb(`files.${key}`, '!=', value); case 'smallerThan': - return where.below(crit.value); + return eb(`files.${key}`, '<', value); case 'smallerThanOrEquals': - return where.belowOrEqual(crit.value); + return eb(`files.${key}`, '<=', value); case 'greaterThan': - return where.above(crit.value); + return eb(`files.${key}`, '>', value); case 'greaterThanOrEquals': - return where.aboveOrEqual(crit.value); + return eb(`files.${key}`, '>=', value); default: - const _exhaustiveCheck: never = crit.operator; + const _exhaustiveCheck: never = operator; return _exhaustiveCheck; } } -function filterNumberLambda(crit: NumberConditionDTO): (t: any) => boolean { - const { key, value } = crit; - - switch (crit.operator) { +function applyDateCondition( + eb: ExpressionBuilder, + key: keyof Files, + operator: NumberOperatorType, + value: Date, +) { + // In DB dates are DateAsNumber, convert Date to number. + const startOfDay = new Date(value); + startOfDay.setHours(0, 0, 0, 0); + const endOfDay = new Date(value); + endOfDay.setHours(23, 59, 59, 999); + const s = serializeDate(startOfDay); + const e = serializeDate(endOfDay); + + switch (operator) { case 'equals': - return (t: any) => t[key] === value; + // equal to this day, so between 0:00 and 23:59 + return eb(`files.${key}`, '>=', s).and(`files.${key}`, '<=', e); case 'notEqual': - return (t: any) => t[key] !== value; + // not equal to this day, so before 0:00 or after 23:59 + return eb.or([eb(`files.${key}`, '<', s), eb(`files.${key}`, '>', e)]); case 'smallerThan': - return (t: any) => t[key] < value; + return eb(`files.${key}`, '<', s); case 'smallerThanOrEquals': - return (t: any) => t[key] <= value; + return eb(`files.${key}`, '<=', e); case 'greaterThan': - return (t: any) => t[key] > value; + return eb(`files.${key}`, '>', e); case 'greaterThanOrEquals': - return (t: any) => t[key] >= value; + return eb(`files.${key}`, '>=', s); default: - const _exhaustiveCheck: never = crit.operator; + const _exhaustiveCheck: never = operator; return _exhaustiveCheck; } } -function filterIndexSignatureLambda( - crit: IndexSignatureConditionDTO, -): (t: any) => boolean { - const { - value: [keyIS, valueIS], - } = crit; - - if (isExtraPropertyOperatorType(crit.operator)) { - switch (crit.operator) { - case 'existsInFile': - return (t: any) => t[crit.key][keyIS] !== undefined; - case 'notExistsInFile': - return (t: any) => t[crit.key][keyIS] === undefined; - default: - const _exhaustiveCheck: never = crit.operator; - return _exhaustiveCheck; - } - } - switch (typeof valueIS) { - case 'number': - if (isNumberOperator(crit.operator)) { - const numberCrit: NumberConditionDTO = { - key: keyIS, - operator: crit.operator, - value: valueIS, - valueType: 'number', - }; - const lamda = filterNumberLambda(numberCrit); - return (t: any) => { - const obj = t[crit.key]; - return typeof obj[keyIS] === 'number' ? lamda(obj) : false; - }; +/** + * Note / TODO: + * Array and IndexSignature condition appliers would work the same way as the next two examples. + * They could be used for any array or index signature property, but since those properties + * only exist in the DTO objects (not in the raw fetched data from the database) and are instead + * represented through relation tables, a mapping between the DTO property key and the corresponding + * subquery table must be defined. + * + * Currently, since only the "tags" and "extraProperties" properties use these conditions, + * the mapping is hard-coded to those specific database tables in each case. + */ + +function applyTagArrayCondition( + eb: ExpressionBuilder, + key: keyof FileDTO, + operator: ArrayOperatorType, + values: any[], +) { + // If the key is not tags return a neutral condition (always true) to avoid breaking + // the WHERE clause when no filter is applied + if (key !== 'tags') { + return sql`TRUE`; + } + if (values.length === 0) { + const anyTagFiles = eb.selectFrom('fileTags').select('fileId').distinct(); + if (operator === 'contains') { + // files with 0 tags -> NOT EXISTS fileTags for this file + return eb.not(eb('files.id', 'in', anyTagFiles)); + } else { + // notContains empty -> files which have at least one tag + return eb('files.id', 'in', anyTagFiles); + } + } else { + const matchingFiles = eb + .selectFrom('fileTags') + .select('fileId') + .where('tagId', 'in', values) + .distinct(); + if (operator === 'contains') { + return eb('files.id', 'in', matchingFiles); + } else { + // notContains: ensure NOT EXISTS any tag in the list for that file + return eb.not(eb('files.id', 'in', matchingFiles)); + } + } +} + +function applyExtraPropertyCondition( + eb: ExpressionBuilder, + key: keyof FileDTO, + operator: NumberOperatorType | StringOperatorType | ExtraPropertyOperatorType, + valueTuple: [string, any], +) { + // If the key is not extraProperties return a neutral condition (always true) + // to avoid breaking the WHERE clause when no filter is applied + if (key !== 'extraProperties') { + return sql`TRUE`; + } + const [epID, innerValue] = valueTuple; + let subquery = eb + .selectFrom('extraProperties') + .innerJoin('epValues', 'extraProperties.id', 'epValues.epId') + .select('epValues.fileId') + .distinct() + .where('extraProperties.id', '=', epID); + //.whereRef('epValues.fileId', '=', sql.ref('files.id')); + + if (operator === 'existsInFile') { + return eb('files.id', 'in', subquery); + } + + if (operator === 'notExistsInFile') { + return eb.not(eb('files.id', 'in', subquery)); + } + + // For typed comparisons add an echtra filter to the subquery + if (typeof innerValue === 'number' && isNumberOperator(operator)) { + // prettier-ignore + // use epValues.numberValue + switch (operator) { + case 'equals': + subquery = subquery.where('epValues.numberValue', '=', innerValue); + break; + case 'notEqual': + subquery = subquery.where('epValues.numberValue', '!=', innerValue); + break; + case 'greaterThan': + subquery = subquery.where('epValues.numberValue', '>', innerValue); + break; + case 'greaterThanOrEquals': + subquery = subquery.where('epValues.numberValue', '>=', innerValue); + break; + case 'smallerThan': + subquery = subquery.where('epValues.numberValue', '<', innerValue); + break; + case 'smallerThanOrEquals': + subquery = subquery.where('epValues.numberValue', '<=', innerValue); + break; + default: + const _exhaustiveCheck: never = operator; + return _exhaustiveCheck; } - return () => false; - case 'string': - if (isStringOperator(crit.operator)) { - const stringCrit: StringConditionDTO = { - key: keyIS, - operator: crit.operator, - value: valueIS, - valueType: 'string', - }; - const lamda = filterStringLambda(stringCrit); - return (t: any) => { - const obj = t[crit.key]; - return typeof obj[keyIS] === 'string' ? lamda(obj) : false; - }; + } else if (typeof innerValue === 'string' && isStringOperator(operator)) { + // prettier-ignore + // use epValues.textValue + switch (operator) { + case 'equals': + subquery = subquery.where('epValues.textValue', '=', innerValue); + break; + case 'equalsIgnoreCase': + subquery = subquery.where(sql`LOWER(${sql.ref('epValues.textValue')})`, '=', innerValue.toLowerCase()); + break; + case 'notEqual': + subquery = subquery.where('epValues.textValue', '=', innerValue); + break; + case 'contains': + subquery = subquery.where('epValues.textValue', 'like', `%${innerValue}%`); + break; + case 'notContains': + subquery = subquery.where('epValues.textValue', 'not like', `%${innerValue}%`); + break; + case 'startsWith': + subquery = subquery.where('epValues.textValue', 'like', `${innerValue}%`); + break; + case 'notStartsWith': + subquery = subquery.where('epValues.textValue', 'not like', `${innerValue}%`); + break; + case 'startsWithIgnoreCase': + subquery = subquery.where(sql`LOWER(${sql.ref('epValues.textValue')})`, 'like', `${innerValue.toLowerCase()}%`); + break; + default: + const _exhaustiveCheck: never = operator; + return _exhaustiveCheck; } - return () => false; - default: - return () => false; + } else { + throw new Error('Unsupported indexSignature value type'); } + // Return the expression + return eb('files.id', 'in', subquery); } -function filterDateWhere( - where: WhereClause, - crit: DateConditionDTO, -): Collection { - const dateStart = new Date(crit.value); - dateStart.setHours(0, 0, 0); - const dateEnd = new Date(crit.value); - dateEnd.setHours(23, 59, 59); - - switch (crit.operator) { - // equal to this day, so between 0:00 and 23:59 - case 'equals': - return where.between(dateStart, dateEnd); - case 'smallerThan': - return where.below(dateStart); - case 'smallerThanOrEquals': - return where.below(dateEnd); - case 'greaterThan': - return where.above(dateEnd); - case 'greaterThanOrEquals': - return where.above(dateStart); - // not equal to this day, so before 0:00 or after 23:59 - case 'notEqual': - return where.below(dateStart).or(crit.key).above(dateEnd); - default: - const _exhaustiveCheck: never = crit.operator; - return _exhaustiveCheck; +/////////////////// +///// HELPERS ///// +/////////////////// + +async function upsertTable< + Table extends keyof AllusionDB_SQL, + Columns extends ReadonlyArray>, +>( + maxVars: number, + db: Kysely, + table: Table, + values: Insertable[] | Expression, + conflictColumns: Columns, + excludeFromUpdate?: (keyof Insertable)[], + sampleObject?: Insertable, +) { + const isExpression = !Array.isArray(values); + if (!isExpression && values.length === 0) { + return; + } + + // Infer Columns + const referenceRow = (isExpression ? sampleObject : sampleObject || values[0]) as Record< + string, + unknown + >; + if (isExpression && !sampleObject) { + throw new Error( + `sampleObject is required when using SQL expressions for table ${String(table)}`, + ); + } + const columnsToUpdate = Object.keys(referenceRow).filter( + (key) => + !conflictColumns.includes(key as any) && + (!excludeFromUpdate || !excludeFromUpdate.includes(key as any)), + ); + const updateSet = columnsToUpdate.reduce((acc, column) => { + acc[column] = (eb: any) => eb.ref(`excluded.${column}`); + return acc; + }, {} as Record) as UpdateObject; + + let query; + if (isExpression) { + query = db.insertInto(table as keyof AllusionDB_SQL & string).expression(values as any); + } else { + query = db.insertInto(table as keyof AllusionDB_SQL & string); + } + + if (columnsToUpdate.length === 0) { + query = query.onConflict((oc) => oc.columns(conflictColumns as any).doNothing()); + } else { + query = query.onConflict((oc) => + oc.columns(conflictColumns as any).doUpdateSet(updateSet as any), + ); + } + + if (isExpression) { + return query.execute(); + } + + // batching logic for arrays + const batchSize = computeBatchSize(maxVars, referenceRow); + const results = []; + + for (let i = 0; i < values.length; i += batchSize) { + const batch = values.slice(i, i + batchSize); + const batchQuery = query.values(batch as any); + results.push(await batchQuery.execute()); } + + return results; } -function filterDateLambda(crit: DateConditionDTO): (t: any) => boolean { - const { key } = crit; - const start = new Date(crit.value); - start.setHours(0, 0, 0); - const end = new Date(crit.value); - end.setHours(23, 59, 59); +function normalizeTags(tags: TagDTO[]) { + const tagIds: ID[] = []; + const subTags: Insertable[] = []; + const tagImplications: Insertable[] = []; + const tagAliases: Insertable[] = []; - switch (crit.operator) { - case 'equals': - return (t: any) => t[key] >= start || t[key] <= end; - case 'notEqual': - return (t: any) => t[key] < start || t[key] > end; - case 'smallerThan': - return (t: any) => t[key] < start; - case 'smallerThanOrEquals': - return (t: any) => t[key] <= end; - case 'greaterThan': - return (t: any) => t[key] > end; - case 'greaterThanOrEquals': - return (t: any) => t[key] >= start; - default: - const _exhaustiveCheck: never = crit.operator; - return _exhaustiveCheck; + for (const tag of tags) { + tagIds.push(tag.id); + for (const [index, subTagId] of (Array.isArray(tag.subTags) ? tag.subTags : []).entries()) { + subTags.push({ tagId: tag.id, subTagId: subTagId, idx: index }); + } + for (const impliedTagId of Array.isArray(tag.impliedTags) ? tag.impliedTags : []) { + tagImplications.push({ tagId: tag.id, impliedTagId: impliedTagId }); + } + // Convert to Set to get rid of duplicates. + const aliases = new Set(Array.isArray(tag.aliases) ? tag.aliases : []); + for (const alias of aliases) { + tagAliases.push({ tagId: tag.id, alias: alias }); + } + } + + const normalizedTags = tags.map((tag) => ({ + id: tag.id, + name: tag.name, + color: tag.color, + isHidden: serializeBoolean(tag.isHidden), + isVisibleInherited: serializeBoolean(tag.isVisibleInherited), + isHeader: serializeBoolean(tag.isHeader), + description: tag.description, + dateAdded: serializeDate(tag.dateAdded), + fileCount: tag.fileCount, + isFileCountDirty: serializeBoolean(tag.isFileCountDirty), + })); + + return { tagIds, tags: normalizedTags, subTags, tagImplications, tagAliases }; +} + +function normalizeLocations(sourcelocations: LocationDTO[]) { + const locationNodes: Insertable[] = []; + const locations: Insertable[] = []; + const subLocations: Insertable[] = []; + const locationTags: Insertable[] = []; + const nodeIds: ID[] = []; + + function normalizeLocationNodeRecursive( + node: LocationDTO | SubLocationDTO, + parentId: ID | null, + isRoot: boolean, + ) { + const parentIdvalue = isRoot ? null : parentId; + const pathValue = 'path' in node ? node.path : node.name; + nodeIds.push(node.id); + locationNodes.push({ + id: node.id, + parentId: parentIdvalue, + path: pathValue, + }); + if (isRoot) { + const location = node as LocationDTO; + locations.push({ + nodeId: node.id, + idx: location.index, + isWatchingFiles: serializeBoolean(!!location.isWatchingFiles), + dateAdded: serializeDate(new Date(location.dateAdded)), + }); + } else { + const subLocation = node as SubLocationDTO; + subLocations.push({ + nodeId: node.id, + isExcluded: serializeBoolean(subLocation.isExcluded), + }); + } + // Insert tags + for (const tagId of Array.isArray(node.tags) ? node.tags : []) { + locationTags.push({ + nodeId: node.id, + tagId: tagId, + }); + } + // Recurse for sublocations + for (const sub of Array.isArray(node.subLocations) ? node.subLocations : []) { + normalizeLocationNodeRecursive(sub, node.id, false); + } + } + + for (const loc of sourcelocations) { + normalizeLocationNodeRecursive(loc, null, true); + } + return { nodeIds, locationNodes, locations, subLocations, locationTags }; +} + +function normalizeSavedSearches(sourceSearches: FileSearchDTO[]) { + const savedSearchesIds: ID[] = []; + const savedSearches: Insertable[] = []; + const searchGroups: Insertable[] = []; + const searchCriteria: Insertable[] = []; + + function normalizeGroupRecursive( + group: SearchGroupDTO, + savedSearchId: ID, + parentGroupId: ID | null, + ) { + // Insert group + searchGroups.push({ + id: group.id, + name: group.name, + savedSearchId: savedSearchId, + parentGroupId: parentGroupId, + idx: 0, // currently this is static, (insertion order) + conjunction: group.conjunction, + }); + let idx = 0; + for (const child of group.children) { + // if group recurse + if ('children' in child) { + normalizeGroupRecursive(child, savedSearchId, group.id); + } + // id criteria + else { + searchCriteria.push({ + id: child.id, + groupId: group.id, + idx: idx++, + key: child.key, + valueType: child.valueType, + operator: child.operator, + jsonValue: JSON.stringify(child.value), + }); + } + } + } + for (const search of sourceSearches) { + savedSearchesIds.push(search.id); + savedSearches.push({ + id: search.id, + name: search.name, + idx: search.index, + }); + normalizeGroupRecursive(search.rootGroup, search.id, null); + } + return { + savedSearchesIds, + savedSearches, + searchGroups, + searchCriteria, + }; +} + +function normalizeFiles(sourceFiles: FileDTO[]) { + const fileIds: ID[] = []; + const files: Insertable[] = []; + const fileTags: Insertable[] = []; + const epVal: Insertable[] = []; + + for (const file of sourceFiles) { + const fileId = file.id; + fileIds.push(fileId); + files.push({ + id: fileId, + ino: file.ino, + locationId: file.locationId, + relativePath: file.relativePath, + absolutePath: file.absolutePath, + tagSorting: file.tagSorting, + name: file.name, + extension: file.extension, + size: file.size, + width: file.width, + height: file.height, + dateAdded: serializeDate(file.dateAdded), + dateModified: serializeDate(file.dateModified), + dateModifiedOS: serializeDate(file.dateModifiedOS), + dateLastIndexed: serializeDate(file.dateLastIndexed), + dateCreated: serializeDate(file.dateCreated), + }); + // file_tags (tags relations) + for (const tagId of Array.isArray(file.tags) ? file.tags : []) { + fileTags.push({ + fileId: fileId, + tagId: tagId, + }); + } + // ep_values (extra properties relations) + for (const [epId, value] of Object.entries(file.extraProperties)) { + // TODO: Maybe should fetch the ExtraProperties types to assign the type based on + // the extra property definition, but since the DTO types do not overlap for now, this + // is good enough. + if (typeof value === 'number') { + epVal.push({ + fileId, + epId, + numberValue: value, + }); + } else { + epVal.push({ + fileId, + epId, + textValue: value, + }); + } + } } + return { fileIds, files, fileTags, epVal }; } diff --git a/src/backend/backup-scheduler.ts b/src/backend/backup-scheduler.ts index 6e4d9983d..fb1d2920a 100644 --- a/src/backend/backup-scheduler.ts +++ b/src/backend/backup-scheduler.ts @@ -1,43 +1,86 @@ -import Dexie from 'dexie'; -import { exportDB, importDB, peakImportFile } from 'dexie-export-import'; +import { promises as fs } from 'fs'; import fse from 'fs-extra'; import path from 'path'; - -import { debounce } from '../../common/timeout'; -import { DataBackup } from '../api/data-backup'; -import { AUTO_BACKUP_TIMEOUT, NUM_AUTO_BACKUPS } from './config'; - -/** Returns the date at 00:00 today */ -function getToday(): Date { - const today = new Date(); - today.setHours(0); - today.setMinutes(0); - today.setSeconds(0, 0); - return today; -} - -/** Returns the date at the start of the current week (Sunday at 00:00) */ -function getWeekStart(): Date { - const date = getToday(); - const dayOfWeek = date.getDay(); - date.setDate(date.getDate() - dayOfWeek); - return date; -} +import Backend from './backend'; +import { AUTO_BACKUP_TIMEOUT, DB_TO_IMPORT_NAME, NUM_AUTO_BACKUPS } from './config'; +import { DataBackup } from 'src/api/data-backup'; +import SQLite from 'better-sqlite3'; +import { debounce } from 'common/timeout'; +import { getToday, getWeekStart } from 'common/core'; export default class BackupScheduler implements DataBackup { - #db: Dexie; + #db!: SQLite.Database; #backupDirectory: string = ''; + #batabaseDirectory: string = ''; #lastBackupIndex: number = 0; #lastBackupDate: Date = new Date(0); - constructor(db: Dexie, directory: string) { - this.#db = db; - this.#backupDirectory = directory; - } + async init( + databasePath: string, + batabaseDirectory: string, + backupDirectory: string, + ): Promise { + this.#batabaseDirectory = batabaseDirectory; + this.#backupDirectory = backupDirectory; - static async init(db: Dexie, backupDirectory: string): Promise { await fse.ensureDir(backupDirectory); - return new BackupScheduler(db, backupDirectory); + await fse.ensureDir(batabaseDirectory); + + const tempJsonToImport = await BackupScheduler.checkAndRestoreDB( + databasePath, + batabaseDirectory, + backupDirectory, + ); + await fse.ensureFile(databasePath); + + this.#db = new SQLite(databasePath, { readonly: true }); + + return tempJsonToImport; + } + + private static async getLastJsonBackupPath(backupDirectory: string): Promise { + const files = await fse.readdir(backupDirectory); + const jsonFiles = files.filter((f) => f.endsWith('.json')); + if (!jsonFiles.length) { + return undefined; + } + const stats = await Promise.all( + jsonFiles.map(async (f) => ({ + path: path.join(backupDirectory, f), + mtime: (await fse.stat(path.join(backupDirectory, f))).mtime, + })), + ); + return stats.reduce((a, b) => (a.mtime > b.mtime ? a : b)).path; + } + + // Check if the DB to import exists, + // if it does and its a json we delete the old DB and return the json path to import. + // if it is a sqlite file we replace the old DB with the new file without opening it. + private static async checkAndRestoreDB( + databasePath: string, + batabaseDirectory: string, + backupDirectory: string, + ): Promise { + const importJsonPath = path.join(batabaseDirectory, `${DB_TO_IMPORT_NAME}.json`); + const importDbPath = path.join(batabaseDirectory, `${DB_TO_IMPORT_NAME}.sqlite`); + try { + if ((await fse.pathExists(importJsonPath)) || (await fse.pathExists(importDbPath))) { + console.info('BackupScheduler: Remove previous DB', databasePath); + await fse.remove(databasePath); + await fse.remove(`${databasePath}-shm`); + await fse.remove(`${databasePath}-wal`); + } + if (await fse.pathExists(importJsonPath)) { + return importJsonPath; + } + if (await fse.pathExists(importDbPath)) { + await fse.move(importDbPath, databasePath, { overwrite: true }); + return undefined; + } + } catch (error) { + console.error(error); + } + return this.getLastJsonBackupPath(backupDirectory); } schedule(): void { @@ -75,7 +118,10 @@ export default class BackupScheduler implements DataBackup { // Wait 10 seconds after a change for any other changes before creating a backup. #createPeriodicBackup = debounce(async (): Promise => { - const filePath = path.join(this.#backupDirectory, `auto-backup-${this.#lastBackupIndex}.json`); + const filePath = path.join( + this.#backupDirectory, + `auto-backup-${this.#lastBackupIndex}.sqlite`, + ); this.#lastBackupDate = new Date(); this.#lastBackupIndex = (this.#lastBackupIndex + 1) % NUM_AUTO_BACKUPS; @@ -88,14 +134,14 @@ export default class BackupScheduler implements DataBackup { // Check for daily backup await BackupScheduler.#copyFileIfCreatedBeforeDate( filePath, - path.join(this.#backupDirectory, 'daily.json'), + path.join(this.#backupDirectory, 'daily.sqlite'), getToday(), ); // Check for weekly backup await BackupScheduler.#copyFileIfCreatedBeforeDate( filePath, - path.join(this.#backupDirectory, 'weekly.json'), + path.join(this.#backupDirectory, 'weekly.sqlite'), getWeekStart(), ); } catch (e) { @@ -104,38 +150,73 @@ export default class BackupScheduler implements DataBackup { }, 10000); async backupToFile(path: string): Promise { - console.info('IndexedDB: Exporting database backup...', path); - - const blob = await exportDB(this.#db, { prettyJson: false }); - // might be nice to zip it and encode as base64 to save space. Keeping it simple for now - await fse.ensureFile(path); - await fse.writeFile(path, await blob.text()); + console.info('SQLite: Exporting database backup...', path); + await this.#db.backup(path); } - async restoreFromFile(path: string): Promise { - console.info('IndexedDB: Importing database backup...', path); - - const buffer = await fse.readFile(path); - const blob = new Blob([buffer]); + async restoreFromFile(sourcePath: string): Promise { + console.info('SQLite: Importing database backup...', sourcePath); - console.debug('Clearing database...'); - Dexie.delete(this.#db.name); + if (!(await fse.pathExists(sourcePath))) { + throw new Error(`Backup file not found: ${sourcePath}`); + } + const ext = path.extname(sourcePath); + const destPath = path.join(this.#batabaseDirectory, `${DB_TO_IMPORT_NAME}${ext}`); + // Replace file to import if exists. + await fse.remove(destPath); + await fse.copyFile(sourcePath, destPath); + console.info(`SQLite: Backup file copied to ${destPath}`); + } - await importDB(blob); - // There also is "importInto" which as an "clearTablesBeforeImport" option, - // but that didn't seem to work correctly (files were always re-created after restarting for some reason) + async restoreEmpty(): Promise { + const emptyDBPath = path.join(this.#batabaseDirectory, `${DB_TO_IMPORT_NAME}.sqlite`); + await fse.remove(emptyDBPath); + await fse.ensureFile(emptyDBPath); + const db = new Backend(); + // Init the DB to apply the migrations but passing an empty string to not import data brom backup folder. + await db.init( + emptyDBPath, + '', + () => {}, + async () => {}, + 'migrate', + ); } - async peekFile(path: string): Promise<[numTags: number, numFiles: number]> { - console.info('IndexedDB: Peeking database backup...', path); - const buffer = await fse.readFile(path); - const blob = new Blob([buffer]); - const metadata = await peakImportFile(blob); // heh, they made a typo - const tagsTable = metadata.data.tables.find((t) => t.name === 'tags'); - const filesTable = metadata.data.tables.find((t) => t.name === 'files'); - if (tagsTable && filesTable) { - return [tagsTable.rowCount, filesTable.rowCount]; + async peekFile(sourcePath: string): Promise<[numTags: number, numFiles: number]> { + console.info('SQLite: Peeking database backup...', sourcePath); + const ext = path.extname(sourcePath); + if (ext === '.json') { + const content = await fs.readFile(sourcePath, 'utf8'); + const json = JSON.parse(content); + if (json.formatName !== 'dexie') { + throw new Error('Invalid backup format (expected dexie .json)'); + } + const tables = Object.fromEntries( + json.data.data.map((table: any) => [table.tableName, table.rows]), + ); + return [tables.tags.length, tables.files.length]; + } + if (ext === '.sqlite') { + let db = null; + db = new Backend(); + await db.init( + sourcePath, + '', + () => {}, + async () => {}, + 'readonly', + ); + const tags = (await db.fetchTags()).length; + const files = (await db.countFiles({ files: true }))[0] ?? 0; + db = null; + if (global.gc) { + // Remove the backend instance to get rid of any WAL file. + console.log('Forcing Garbage Collection'); + global.gc(); + } + return [tags, files]; } - throw new Error('Database does not contain a table for files and/or tags'); + throw new Error('Invalid backup format (expected dexie .json or .sqlite)'); } } diff --git a/src/backend/config.ts b/src/backend/config.ts index 268f7a4c5..281f6b3b5 100644 --- a/src/backend/config.ts +++ b/src/backend/config.ts @@ -1,294 +1,69 @@ -import Dexie, { Transaction } from 'dexie'; -import fse from 'fs-extra'; +import { Kysely, Migrator, Migration, MigrationProvider, Logger, LogEvent } from 'kysely'; +import { AllusionDB_SQL } from './schemaTypes'; -import { FileDTO } from '../api/file'; -import { TagDTO } from 'src/api/tag'; -import { ID } from '../api/id'; -import { ExtraProperties, ExtraPropertyType } from 'src/api/extraProperty'; -import { LocationDTO, SubLocationDTO } from 'src/api/location'; - -// The name of the IndexedDB export const DB_NAME = 'Allusion'; +export const DB_TO_IMPORT_NAME = 'DB_TO_IMPORT'; + export const NUM_AUTO_BACKUPS = 6; export const AUTO_BACKUP_TIMEOUT = 1000 * 60 * 10; // 10 minutes -// Schema based on https://dexie.org/docs/Version/Version.stores()#schema-syntax -// Only for the indexes of the DB, not all fields -// Versions help with upgrading DB to new configurations: -// https://dexie.org/docs/Tutorial/Design#database-versioning -const dbConfig: DBVersioningConfig[] = [ - { - // Version 4, 19-9-20: Added system created date - version: 4, - collections: [ - { - name: 'files', - schema: - '++id, locationId, *tags, relativePath, &absolutePath, name, extension, size, width, height, dateAdded, dateModified, dateCreated', - }, - { - name: 'tags', - schema: '++id', - }, - { - name: 'locations', - schema: '++id, dateAdded', - }, - ], - }, - { - // Version 5, 29-5-21: Added sub-locations - version: 5, - collections: [], - upgrade: (tx: Transaction): void => { - tx.table('locations') - .toCollection() - .modify((location: any) => { - location.subLocations = []; - return location; - }); - }, - }, - { - // Version 6, 13-11-21: Added lastIndexed date to File for recreating thumbnails - version: 6, - collections: [], - upgrade: (tx: Transaction): void => { - tx.table('files') - .toCollection() - .modify((file: FileDTO) => { - file.dateLastIndexed = file.dateAdded; - return file; - }); - }, - }, - { - // Version 7, 4-1-22: Added saved searches - version: 7, - collections: [ - { - name: 'searches', - schema: '++id', - }, - ], - }, - { - // Version 8, 9-1-22: Added ino to file for detecting added/removed files as a single rename/move event - version: 8, - collections: [ - { - name: 'files', - schema: - '++id, ino, locationId, *tags, relativePath, &absolutePath, name, extension, size, width, height, dateAdded, dateModified, dateCreated', - }, - ], - upgrade: (tx: Transaction): void => { - tx.table('files') - .toCollection() - .modify((file: FileDTO) => { - try { - // apparently you can't do async stuff here, even though it is typed to return a PromiseLike :/ - const stats = fse.statSync(file.absolutePath); - // fallback to random value so that it won't be recognized as identical file to others where no ino could be found - file.ino = stats.ino.toString() || Math.random().toString(); - } catch (e) { - console.warn(`Could not get ino for ${file.absolutePath}`); - } - return file; - }); - }, - }, - { - version: 9, - collections: [ - { - name: 'tags', - schema: '++id', - }, - ], - upgrade: (tx: Transaction): void => { - tx.table('tags') - .toCollection() - .modify((tag: TagDTO) => { - tag.impliedTags = []; - return tag; - }); - }, - }, - { - // Version 10, 6-3-25: Added scores and .scores to file - version: 10, - collections: [ - { - name: 'scores', - schema: '++id, name, dateCreated, dateModified', - }, - { - name: 'files', - schema: - '++id, ino, locationId, *tags, scores, relativePath, &absolutePath, name, extension, size, width, height, dateAdded, dateModified, dateCreated', - }, - ], - upgrade: (tx: Transaction): void => { - tx.table('files') - .toCollection() - .modify((file: any) => { - file.scores = new Map(); - return file; - }); - }, - }, - { - // Version 11, Added OrigDateModified date to File for recreating thumbnails and metadata - version: 11, - collections: [], - upgrade: (tx: Transaction): void => { - tx.table('files') - .toCollection() - .modify((file: FileDTO) => { - file.OrigDateModified = file.dateAdded; - return file; - }); - }, - }, - { - // Version 12 29-5-25: Rename table Scores to extraProperties, redefine scores in files to extraProperties, add isVisibleInherited: bool to tags and add tags to locations. - version: 12, - collections: [ - { - name: 'extraProperties', - schema: '++id, name', - }, - { - name: 'files', - schema: - '++id, ino, locationId, *tags, *extraPropertyIDs, relativePath, &absolutePath, name, extension, size, width, height, dateAdded, dateModified, dateCreated, OrigDateModified', - }, - ], - upgrade: (tx: Transaction): void => { - // Migrate "scores" to "extraProperties" - const oldScores = tx.table('scores'); - const extraProperties = tx.table('extraProperties'); +export const USE_BACKEND_AS_WORKER = true; // easier to debug when false - oldScores.toArray().then((records) => { - const transformed = records.map((oldRecord: any) => { - return { - ...oldRecord, - type: ExtraPropertyType.number, - dateAdded: oldRecord.dateCreated, - dateCreated: undefined, - dateModified: undefined, - }; - }); - const cleaned = transformed.map((r) => { - delete r.dateCreated; - delete r.dateModified; - return r; - }); +export const PAD_STRING_LENGTH = 10; - return extraProperties.bulkAdd(cleaned); - }); +//Register the migrations here. +class InlineMigrationProvider implements MigrationProvider { + #context: Record; - // Migrate property "scores" in files to "extraProperties" - tx.table('files') - .toCollection() - .modify((file: any) => { - if (file.scores instanceof Map) { - file.extraPropertyIDs = Array.from(file.scores.keys()); - file.extraProperties = Object.fromEntries(file.scores) as ExtraProperties; - } else { - file.extraPropertyIDs = []; - file.extraProperties = {}; - } - delete file.scores; - return file; - }); - - // Add isVisibleInherited to tags - tx.table('tags') - .toCollection() - .modify((tag: any) => { - tag.isVisibleInherited = true; - return tag; - }); + constructor(context: Record = {}) { + this.#context = context; + } + async getMigrations(): Promise> { + const context = this.#context; + return { + '000_initial': await import('./migrations/000_initial'), + '001_migrateJSON': (await import('./migrations/001_migrateJSON')).default(context), + }; + } +} - // Add tags to locations and sublocations - function addTagsRecursively(location: any): any { - location.tags = []; - if (Array.isArray(location.subLocations)) { - location.subLocations = location.subLocations.map((sublocation: any) => - addTagsRecursively({ ...sublocation }), - ); - } - return location; - } +export async function migrateToLatest( + db: Kysely, + context: { jsonToImport: string | undefined }, +): Promise { + const migrator = new Migrator({ + db, + provider: new InlineMigrationProvider(context), + }); - tx.table('locations') - .toCollection() - .modify((location: any) => { - return addTagsRecursively(location); - }); - }, - }, - { - // Version 13 29-5-25: Drop table scores - version: 13, - collections: [ - { - name: 'scores', - schema: null, - }, - ], - }, - { - // Version 14 05-08-25: Added isHeader, aliases and description to tags. - version: 14, - collections: [], - upgrade: (tx: Transaction): void => { - tx.table('tags') - .toCollection() - .modify((tag: TagDTO) => { - tag.isHeader = false; - tag.aliases = []; - tag.description = ''; - return tag; - }); - // Add - tx.table('locations') - .toCollection() - .modify((location: LocationDTO) => { - location.isWatchingFiles = true; - return location; - }); - }, - }, -]; + const { error, results } = await migrator.migrateToLatest(); -type DBVersioningConfig = { - version: number; - collections: Array<{ name: string; schema: string | null }>; - upgrade?: (tx: Transaction) => void | Promise; -}; + results?.forEach((it) => { + if (it.status === 'Success') { + console.log(`migration "${it.migrationName}" was executed successfully`); + } else if (it.status === 'Error') { + console.error(`failed to execute migration "${it.migrationName}"`); + } + }); -/** - * A function that should be called before using the database. - * It initializes the object stores - */ -export function dbInit(dbName: string): Dexie { - const db = new Dexie(dbName); + if (error) { + console.error('failed to migrate'); + console.error(error); + } +} - // Initialize for each DB version: https://dexie.org/docs/Tutorial/Design#database-versioning - for (const config of dbConfig) { - const { version, collections, upgrade } = config; - const dbSchema: { [key: string]: string | null } = {}; - collections.forEach(({ name, schema }) => (dbSchema[name] = schema)); - const stores = db.version(version).stores(dbSchema); - if (upgrade) { - stores.upgrade(upgrade); - } +export const kyselyLogger: Logger = (event: LogEvent): void => { + if (event.level === 'query') { + console.log('SQL:', event.query.sql); + console.log('Parameters:', event.query.parameters); + console.log('Duration:', event.queryDurationMillis, 'ms'); } - return db; -} + if (event.level === 'error') { + console.error('SQL Error:', event.error); + console.error('Failed Query:', event.query.sql); + console.error('Parameters:', event.query.parameters); + } +}; diff --git a/src/backend/migrations/000_initial.ts b/src/backend/migrations/000_initial.ts new file mode 100644 index 000000000..e2e344919 --- /dev/null +++ b/src/backend/migrations/000_initial.ts @@ -0,0 +1,203 @@ +/* eslint-disable prettier/prettier */ +import { Kysely, sql } from 'kysely'; + +/* +Migration to create the SQLite database. Note that SQL table and column names +are in snake_case, which will later be converted to camelCase +by the Kysely camel case plugin. +*/ + +export async function up(db: Kysely): Promise { + //// TAGS //// + await db.schema + .createTable('tags') + .addColumn('id', 'text', (col) => col.primaryKey().notNull()) + .addColumn('name', 'text', (col) => col.notNull()) + .addColumn('date_added', 'timestamp', (col) => col.notNull().defaultTo(sql`CURRENT_TIMESTAMP`)) + .addColumn('color', 'text') + .addColumn('is_hidden', 'boolean', (col) => col.notNull().defaultTo(0)) + .addColumn('is_visible_inherited', 'boolean', (col) => col.notNull().defaultTo(1)) + .addColumn('is_header', 'boolean', (col) => col.notNull().defaultTo(0)) + .addColumn('description', 'text') + .addColumn('file_count', 'integer', (col) => col.notNull().defaultTo(0)) + .addColumn('is_file_count_dirty', 'boolean', (col) => col.notNull().defaultTo(1)) + .execute(); + + await db.schema + .createTable('sub_tags') + .addColumn('tag_id', 'text', (col) => col.notNull()) + .addColumn('sub_tag_id', 'text', (col) => col.notNull()) + .addColumn('idx', 'integer', (col) => col.notNull()) + .addPrimaryKeyConstraint('pk_tag_implications', ['tag_id', 'sub_tag_id']) + .addForeignKeyConstraint('fk_tag_implications_tag', ['tag_id'], 'tags', ['id'], (cb) => cb.onDelete('cascade')) + .addForeignKeyConstraint('fk_tag_implications_implied', ['sub_tag_id'], 'tags', ['id'], (cb) => cb.onDelete('cascade')) + .addUniqueConstraint('uq_sub_tags_sub_tag', ['sub_tag_id']) + .execute(); + + await db.schema + .createTable('tag_implications') + .addColumn('tag_id', 'text', (col) => col.notNull()) + .addColumn('implied_tag_id', 'text', (col) => col.notNull()) + .addPrimaryKeyConstraint('pk_tag_implications', ['tag_id', 'implied_tag_id']) + .addForeignKeyConstraint('fk_tag_implications_tag', ['tag_id'], 'tags', ['id'], (cb) => cb.onDelete('cascade')) + .addForeignKeyConstraint('fk_tag_implications_implied', ['implied_tag_id'], 'tags', ['id'], (cb) => cb.onDelete('cascade')) + .execute(); + + await db.schema + .createTable('tag_aliases') + .addColumn('tag_id', 'text', (col) => col.notNull()) + .addColumn('alias', 'text', (col) => col.notNull()) + .addPrimaryKeyConstraint('pk_tag_aliases', ['tag_id', 'alias']) + .addForeignKeyConstraint('fk_tag_aliases_tag', ['tag_id'], 'tags', ['id'], (cb) => cb.onDelete('cascade')) + .execute(); + + //// LOCATIONS //// + await db.schema + .createTable('location_nodes') + .addColumn('id', 'text', (col) => col.primaryKey().notNull()) + .addColumn('parent_id', 'text') + .addColumn('path', 'text', (col) => col.notNull()) + .addForeignKeyConstraint('fk_location_node_parent', ['parent_id'], 'location_nodes', ['id'], (cb) => cb.onDelete('cascade')) + .addUniqueConstraint('uq_location_node_parent_path', ['parent_id', 'path']) + .execute(); + + await db.schema + .createTable('locations') + .addColumn('node_id', 'text', (col) => col.primaryKey().notNull()) + .addColumn('date_added', 'timestamp', (col) => col.notNull().defaultTo(sql`CURRENT_TIMESTAMP`)) + .addColumn('idx', 'integer', (col) => col.notNull()) + .addColumn('is_watching_files', 'boolean', (col) => col.notNull().defaultTo(0)) + .addForeignKeyConstraint('fk_location_node', ['node_id'], 'location_nodes', ['id'], (cb) => cb.onDelete('cascade')) + .execute(); + + await db.schema + .createTable('sub_locations') + .addColumn('node_id', 'text', (col) => col.primaryKey().notNull()) + .addColumn('is_excluded', 'boolean', (col) => col.notNull().defaultTo(0)) + .addForeignKeyConstraint('fk_sub_location_node', ['node_id'], 'location_nodes', ['id'], (cb) => cb.onDelete('cascade')) + .execute(); + + await db.schema + .createTable('location_tags') + .addColumn('node_id', 'text', (col) => col.notNull()) + .addColumn('tag_id', 'text', (col) => col.notNull()) + .addPrimaryKeyConstraint('pk_location_tags', ['node_id', 'tag_id']) + .addForeignKeyConstraint('fk_location_tags_node', ['node_id'], 'location_nodes', ['id'], (cb) => cb.onDelete('cascade')) + .addForeignKeyConstraint('fk_location_tags_tag', ['tag_id'], 'tags', ['id'], (cb) => cb.onDelete('cascade')) + .execute(); + + //// FILES //// + await db.schema + .createTable('files') + .addColumn('id', 'text', (col) => col.primaryKey().notNull()) + .addColumn('ino', 'text', (col) => col.notNull()) + .addColumn('location_id', 'text', (col) => col.notNull()) + .addColumn('relative_path', 'text', (col) => col.notNull()) + .addColumn('absolute_path', 'text', (col) => col.notNull().unique()) + .addColumn('tag_sorting', 'text', (col) => col.notNull()) + .addColumn('date_added', 'timestamp', (col) => col.notNull().defaultTo(sql`CURRENT_TIMESTAMP`)) + .addColumn('date_modified', 'timestamp') + .addColumn('date_modified_os', 'timestamp') + .addColumn('date_last_indexed', 'timestamp') + .addColumn('name', 'text', (col) => col.notNull()) + .addColumn('extension', 'text') + .addColumn('size', 'integer') + .addColumn('width', 'integer') + .addColumn('height', 'integer') + .addColumn('date_created', 'timestamp') + .addForeignKeyConstraint('fk_files_location', ['location_id'], 'locations', ['node_id'], (cb) => cb.onDelete('cascade')) + .execute(); + + await db.schema + .createTable('file_tags') + .addColumn('file_id', 'text', (col) => col.notNull()) + .addColumn('tag_id', 'text', (col) => col.notNull()) + .addPrimaryKeyConstraint('pk_file_tags', ['file_id', 'tag_id']) + .addForeignKeyConstraint('fk_file_tags_file', ['file_id'], 'files', ['id'], (cb) => cb.onDelete('cascade')) + .addForeignKeyConstraint('fk_file_tags_tag', ['tag_id'], 'tags', ['id'], (cb) => cb.onDelete('cascade')) + .execute(); + await db.schema.createIndex('idx_file_tags_tag').on('file_tags').column('tag_id').execute(); + await db.schema.createIndex('idx_file_tags_file').on('file_tags').column('file_id').execute(); + + //// EXTRA PROPERTIES //// + await db.schema + .createTable('extra_properties') + .addColumn('id', 'text', (col) => col.primaryKey().notNull()) + .addColumn('type', 'text', (col) => col.notNull()) + .addColumn('name', 'text', (col) => col.notNull()) + .addColumn('date_added', 'timestamp', (col) => col.notNull().defaultTo(sql`CURRENT_TIMESTAMP`)) + .execute(); + + await db.schema + .createTable('ep_values') + .addColumn('file_id', 'text', (col) => col.notNull()) + .addColumn('ep_id', 'text', (col) => col.notNull()) + .addColumn('text_value', 'text') + .addColumn('number_value', 'integer') + .addColumn('timestamp_value', 'timestamp') + .addPrimaryKeyConstraint('pk_ep_values_text', ['file_id', 'ep_id']) + .addForeignKeyConstraint('fk_ep_values_text_file', ['file_id'], 'files', ['id'], (cb) => cb.onDelete('cascade')) + .addForeignKeyConstraint('fk_ep_values_text_ep', ['ep_id'], 'extra_properties', ['id'], (cb) => cb.onDelete('cascade')) + .execute(); + await db.schema.createIndex('idx_ep_values_text_value').ifNotExists().on('ep_values').column('text_value').execute(); + await db.schema.createIndex('idx_ep_values_number_value').ifNotExists().on('ep_values').column('number_value').execute(); + await db.schema.createIndex('idx_ep_values_timestamp_value').ifNotExists().on('ep_values').column('timestamp_value').execute(); + + //// SAVED SEARCHES //// +await db.schema + .createTable('saved_searches') + .addColumn('id', 'text', (col) => col.primaryKey().notNull()) + .addColumn('name', 'text', (col) => col.notNull()) + .addColumn('idx', 'integer', (col) => col.notNull()) + .execute(); + +await db.schema + .createTable('search_groups') + .addColumn('id', 'text', (col) => col.primaryKey().notNull()) + .addColumn('name', 'text', (col) => col.notNull()) + .addColumn('saved_search_id', 'text', (col) => col.notNull()) + .addColumn('parent_group_id', 'text') + .addColumn('idx', 'integer', (col) => col.notNull()) + .addColumn('conjunction', 'text', (col) => col.notNull()) + .addForeignKeyConstraint('fk_search_groups_saved_search', ['saved_search_id'], 'saved_searches', ['id'], (cb) => cb.onDelete('cascade')) + .addForeignKeyConstraint('fk_search_groups_parent', ['parent_group_id'], 'search_groups', ['id'], (cb) => cb.onDelete('cascade')) + .execute(); +await db.schema.createIndex('idx_search_groups_saved_search').on('search_groups').column('saved_search_id').execute(); +await db.schema.createIndex('idx_search_groups_parent').on('search_groups').column('parent_group_id').execute(); + +await db.schema + .createTable('search_criteria') + .addColumn('id', 'text', (col) => col.primaryKey().notNull()) + .addColumn('group_id', 'text', (col) => col.notNull()) + .addColumn('idx', 'integer', (col) => col.notNull()) + .addColumn('key', 'text', (col) => col.notNull()) + .addColumn('value_type', 'text', (col) => col.notNull()) + .addColumn('operator', 'text', (col) => col.notNull()) + .addColumn('json_value', 'text', (col) => col.notNull()) + .addForeignKeyConstraint('fk_search_criteria_group', ['group_id'], 'search_groups', ['id'], (cb) => cb.onDelete('cascade')) + .execute(); +await db.schema.createIndex('idx_search_criteria_group').on('search_criteria').column('group_id').execute(); +} + +export async function down(db: Kysely): Promise { + await db.schema.dropIndex('idx_ep_values_text_value').execute(); + await db.schema.dropIndex('idx_ep_values_number_value').execute(); + await db.schema.dropIndex('idx_ep_values_timestamp_value').execute(); + await db.schema.dropIndex('idx_file_tags_file').execute(); + await db.schema.dropIndex('idx_file_tags_tag').execute(); + await db.schema.dropTable('search_criteria').execute(); + await db.schema.dropTable('saved_searches').execute(); + await db.schema.dropTable('ep_values_timestamp').execute(); + await db.schema.dropTable('ep_values_number').execute(); + await db.schema.dropTable('ep_values_text').execute(); + await db.schema.dropTable('extra_properties').execute(); + await db.schema.dropTable('file_tags').execute(); + await db.schema.dropTable('files').execute(); + await db.schema.dropTable('location_tags').execute(); + await db.schema.dropTable('sub_locations').execute(); + await db.schema.dropTable('locations').execute(); + await db.schema.dropTable('location_nodes').execute(); + await db.schema.dropTable('tag_aliases').execute(); + await db.schema.dropTable('tag_implications').execute(); + await db.schema.dropTable('tags').execute(); +} diff --git a/src/backend/migrations/001_migrateJSON.ts b/src/backend/migrations/001_migrateJSON.ts new file mode 100644 index 000000000..240c08cf0 --- /dev/null +++ b/src/backend/migrations/001_migrateJSON.ts @@ -0,0 +1,380 @@ +import { promises as fs } from 'fs'; +import { Insertable, InsertObject, Kysely, sql } from 'kysely'; +import { generateId, ID } from 'src/api/id'; +import { setTimeout as delay } from 'node:timers/promises'; +import { + AllusionDB_SQL, + ExtraProperties, + EpValues, + Files, + FileTags, + LocationNodes, + Locations, + LocationTags, + SavedSearches, + serializeBoolean, + serializeDate, + SubLocations, + SearchCriteria, + TagImplications, + TagAliases, + SubTags, + Tags, + SearchGroups, +} from '../schemaTypes'; +import { ExtraPropertyType } from 'src/api/extraProperty'; +import { computeBatchSize, getSqliteMaxVariables } from 'src/backend/backend'; + +export default (context: { jsonToImport?: string }) => ({ + async up(db: Kysely): Promise { + const jsonToImport = context.jsonToImport; + await restoreFromOldJsonFormat(db, jsonToImport); + }, + async down(_: Kysely): Promise { + // No rollback for imports, maybe delete all the data + void _; + }, +}); + +export async function restoreFromOldJsonFormat( + db: Kysely, + backupFilePath: string | undefined, +): Promise { + if (backupFilePath === undefined) { + return; + } + const content = await fs.readFile(backupFilePath, 'utf8'); + const json = JSON.parse(content); + console.info('===================================================='); + console.info('-> Importing Dexie backup from', backupFilePath); + if (json.formatName !== 'dexie') { + throw new Error('Invalid backup format (expected dexie)'); + } + + const tables = Object.fromEntries( + json.data.data.map((table: any) => [table.tableName, table.rows]), + ); + + const MAX_VARS = await getSqliteMaxVariables(db); + console.info(`MAX_VARS: ${MAX_VARS}`); + + const saveEntries = async ( + entityName: TableName, + entries: InsertObject[], + ) => { + let errors = 0; + const batchSize = computeBatchSize(MAX_VARS, entries.find(Boolean)); + const MAX_RETRIES = 5; + const BASE_DELAY_MS = 100; + console.info( + `Importing ${entries.length} ${entityName} from old format. (Batch size: ${batchSize})`, + ); + await db.transaction().execute(async (trx) => { + for (let i = 0; i < entries.length; i += batchSize) { + const batch = entries.slice(i, i + batchSize); + + let attempt = 0; + while (true) { + try { + await trx + .insertInto(entityName) + .values(batch) + .onConflict((oc) => oc.doNothing()) + .execute(); + // If success, break the while + break; + } catch (err: any) { + if (err.code === 'SQLITE_BUSY' && attempt < MAX_RETRIES) { + const wait = BASE_DELAY_MS * Math.pow(2, attempt); + console.warn( + `SQLITE_BUSY on ${entityName} (batch ${ + i / batchSize + 1 + }). Retrying in ${wait} ms... (attempt ${attempt + 1}/${MAX_RETRIES})`, + ); + attempt++; + await delay(wait); + continue; // retry + } + + console.warn(`❌ Error while inserting ${entityName}`, err); + errors += batchSize; + break; // stop retry loop for this batch + } + } + } + }); + console.info(`Finished importing ${entityName}: ${errors} errors.`); + }; + + // Disable foreign key constraints + await sql`PRAGMA foreign_keys = OFF;`.execute(db); + + /// IMPORTING DATA /// + + // Import tags + const { tags, subTags, tagImplications, tagAliases } = normalizeTags(tables.tags ?? []); + + await saveEntries('tags', tags); + await saveEntries('subTags', subTags); + await saveEntries('tagImplications', tagImplications); + await saveEntries('tagAliases', tagAliases); + + // Import locations + const { locationNodes, locations, subLocations } = normalizeLocations(tables.locations ?? []); + + await saveEntries('locationNodes', locationNodes); + await saveEntries('locations', locations); + await saveEntries('subLocations', subLocations); + + // Import extra properties definitions + const extraProperties: Insertable[] = ( + tables.extraProperties ? (tables.extraProperties as Array) : [] + ).map((ep) => ({ + id: ep.id ?? generateId(), + type: ep.type ?? ExtraPropertyType.text, + name: ep.name ?? '(unnamed)', + dateAdded: serializeDate(ep.dateAdded ? new Date(ep.dateAdded) : new Date()), + })); + + await saveEntries('extraProperties', extraProperties); + + // Import files + const { files, fileTags, epVal } = normalizeFiles(tables.files ?? [], extraProperties); + + await saveEntries('files', files); + await saveEntries('fileTags', fileTags); + await saveEntries('epValues', epVal); + + // Import seved searches + const { savedSearches, searchGroups, searchCriteria } = normalizeSavedSearches( + tables.searches ?? [], + ); + await saveEntries('savedSearches', savedSearches); + await saveEntries('searchGroups', searchGroups); + await saveEntries('searchCriteria', searchCriteria); + + // Re-enable foreign keys + await sql`PRAGMA foreign_keys = ON;`.execute(db); + + // Validate foreign keys + const fkCheck = await sql`PRAGMA foreign_key_check;`.execute(db); + if (fkCheck.rows.length > 0) { + console.warn('Foreign key issues found:', fkCheck.rows); + // optional cleanup: remove invalid references + await sql`DELETE FROM files WHERE location_id NOT IN (SELECT node_id FROM locations);`.execute( + db, + ); + await sql`DELETE FROM file_tags WHERE tag_id NOT IN (SELECT id FROM tags);`.execute(db); + } else { + console.info('Complete succes! no foreign key issues found:', fkCheck.rows); + } + + console.info('Dexie backup import completed successfully.'); + console.info('===================================================='); +} + +function normalizeTags(tags: any[]) { + const subTags: Insertable[] = []; + const tagImplications: Insertable[] = []; + const tagAliases: Insertable[] = []; + + for (const tag of tags) { + for (const [index, subTagId] of (Array.isArray(tag.subTags) ? tag.subTags : []).entries()) { + subTags.push({ tagId: tag.id, subTagId: subTagId, idx: index }); + } + + for (const impliedTagId of Array.isArray(tag.impliedTags) ? tag.impliedTags : []) { + tagImplications.push({ tagId: tag.id, impliedTagId: impliedTagId }); + } + + // Convert to Set to get rid of duplicates. + const aliases = new Set(Array.isArray(tag.aliases) ? tag.aliases : []); + for (const alias of aliases) { + tagAliases.push({ tagId: tag.id, alias: alias }); + } + } + + const normalizedTags: Insertable[] = tags.map((tag) => ({ + id: tag.id ?? generateId(), + name: tag.name ?? '(untitled)', + color: tag.color ?? '', + isHidden: serializeBoolean(!!tag.isHidden), + isVisibleInherited: serializeBoolean(!!tag.isVisibleInherited), + isHeader: serializeBoolean(!!tag.isHeader), + description: tag.description ?? '', + dateAdded: serializeDate(tag.dateAdded ? new Date(tag.dateAdded) : new Date()), + fileCount: tag.fileCount ?? 0, + isFileCountDirty: serializeBoolean(tag.isFileCountDirty ?? true), + })); + + return { tags: normalizedTags, subTags, tagImplications, tagAliases }; +} + +function normalizeLocations(sourcelocations: any[]) { + const locationNodes: Insertable[] = []; + const locations: Insertable[] = []; + const subLocations: Insertable[] = []; + const locationTags: Insertable[] = []; + + function normalizeLocationNodeRecursive( + node: any, //LocationDTO | SubLocationDTO, + parentId: ID, + isRoot: boolean, + ) { + const nodeId = node.id ?? generateId(); + const parentIdvalue = isRoot ? null : parentId; + const pathValue = isRoot ? node.path ?? '' : node.name ?? ''; + // Insert into locationNodes + locationNodes.push({ + id: nodeId, + parentId: parentIdvalue, + path: pathValue, + }); + if (isRoot) { + locations.push({ + nodeId: nodeId, + idx: node.index ?? 0, + isWatchingFiles: serializeBoolean(!!node.isWatchingFiles), + dateAdded: serializeDate(node.dateAdded ? new Date(node.dateAdded) : new Date()), + }); + } else { + // Insert into sub_location + subLocations.push({ + nodeId: nodeId, + isExcluded: serializeBoolean(!!node.isExcluded), + }); + } + // Insert tags + for (const tagId of Array.isArray(node.tags) ? node.tags : []) { + locationTags.push({ + nodeId: nodeId, + tagId: tagId, + }); + } + // Recurse for sublocations + for (const sub of Array.isArray(node.subLocations) ? node.subLocations : []) { + normalizeLocationNodeRecursive(sub, nodeId, false); + } + } + + for (const loc of sourcelocations) { + normalizeLocationNodeRecursive(loc, loc.id ?? generateId(), true); + } + return { locationNodes, locations, subLocations }; +} + +function normalizeFiles(sourceFiles: any[], extraProperties: Insertable[]) { + const files: Insertable[] = []; + const fileTags: Insertable[] = []; + const epVal: Insertable[] = []; + + for (const file of sourceFiles) { + const fileId = file.id ?? generateId(); + files.push({ + id: fileId, + ino: file.ino ?? '', + locationId: file.locationId, + relativePath: file.relativePath ?? '', + absolutePath: file.absolutePath ?? '', + tagSorting: file.tagsSorting ?? 'none', + name: file.name ?? '(unnamed)', + extension: file.extension ?? '', + size: file.size ?? 10, + width: file.width ?? 10, + height: file.height ?? 10, + dateAdded: serializeDate(file.dateAdded ? new Date(file.dateAdded) : new Date()), + dateModified: serializeDate(file.dateModified ? new Date(file.dateModified) : new Date()), + dateModifiedOS: serializeDate( + file.OrigDateModified + ? new Date(file.OrigDateModified) + : file.dateModifiedOS + ? new Date(file.dateModifiedOS) + : new Date(), + ), + dateLastIndexed: serializeDate( + file.dateLastIndexed ? new Date(file.dateLastIndexed) : new Date(), + ), + dateCreated: serializeDate(file.dateCreated ? new Date(file.dateCreated) : new Date()), + }); + + // file_tags (tags relations) + for (const tagId of Array.isArray(file.tags) ? file.tags : []) { + fileTags.push({ + fileId: fileId, + tagId: tagId, + }); + } + + // ep_values (extra properties relations) + if (file.extraPropertyIDs) { + for (const epId of Array.isArray(file.extraPropertyIDs) ? file.extraPropertyIDs : []) { + const epRow = extraProperties.find((ep: any) => ep.id === epId); + + const value = file.extraProperties?.[epId]; + if (value !== undefined && value !== null) { + const epType = epRow?.type ?? typeof value; + if (epType === 'number') { + epVal.push({ + fileId, + epId, + numberValue: value, + }); + } else { + epVal.push({ + fileId, + epId, + textValue: value, + }); + } + } + } + } + } + return { files, fileTags, epVal }; +} + +function normalizeSavedSearches(sourceSearches: any[]) { + const savedSearches: Insertable[] = []; + const searchGroups: Insertable[] = []; + const searchCriteria: Insertable[] = []; + + for (const search of sourceSearches) { + const searchId = search.id ?? generateId(); + // Extract saved search + savedSearches.push({ + id: searchId, + name: search.name ?? '(unnamed search)', + idx: search.index ?? 0, + }); + // Root group + const rootGroupId = generateId(); + searchGroups.push({ + id: rootGroupId, + name: '', + savedSearchId: searchId, + parentGroupId: null, + idx: 0, + conjunction: search.matchAny ? 'or' : 'and', + }); + //Extract Criterias + const criteriaArray = Array.isArray(search.criteria) ? search.criteria : []; + for (const [idx, crit] of criteriaArray.entries()) { + const criteriaId = generateId(); + searchCriteria.push({ + id: criteriaId, + groupId: rootGroupId, + idx: idx, + key: crit.key ?? 'name', + valueType: crit.valueType ?? 'string', + operator: crit.operator ?? 'equals', + jsonValue: JSON.stringify(crit.value ?? 'error'), + }); + } + } + + return { + savedSearches, + searchGroups, + searchCriteria, + }; +} diff --git a/src/backend/schemaTypes.ts b/src/backend/schemaTypes.ts new file mode 100644 index 000000000..d95a3394b --- /dev/null +++ b/src/backend/schemaTypes.ts @@ -0,0 +1,176 @@ +/** + * In this file we define the types that Kysely will use to provide typing and build SQL queries. + * These types are a type-level equivalent representation of the actual SQLite database schema. + * + * Each exported interface represents a table in the SQLite database. Some schemas differ from + * Allusion's DTO API in favor of better normalization, avoiding nulls, and ensuring query-building compatibility. + * The serialization to and from the DTO API is handled by the data-storage implementation (backend) class. + * + * Note: These are only TypeScript types. Updating them will not update the database automatically. + * To apply changes to the actual schema you must manually write Kysely migrations, + * ensuring that the database schema is kept in sync with this definitions. + * + * Note: All index properties are named idx because index is a reserved keyword in SQLite. + */ + +import { ColumnType } from 'kysely'; +import { ID } from '../api/id'; +import { CriteriaValueType, OperatorType } from 'src/api/search-criteria'; +import { FILE_TAGS_SORTING_TYPE, FileDTO, IMG_EXTENSIONS_TYPE } from 'src/api/file'; +import { ExtraPropertyType } from 'src/api/extraProperty'; +import { SearchConjunction } from 'src/api/data-storage-search'; + +export type BooleanAsNumber = number; +export const serializeBoolean = (value: boolean): number => (value ? 1 : 0); +export const deserializeBoolean = (value: number): boolean => value === 1; +export type DateAsNumber = number; +export const serializeDate = (value: Date): number => value.getTime(); +export const deserializeDate = (value: number): Date => new Date(value); + +export type AllusionDB_SQL = { + tags: Tags; + subTags: SubTags; + tagImplications: TagImplications; + tagAliases: TagAliases; + locationNodes: LocationNodes; + locations: Locations; + subLocations: SubLocations; + locationTags: LocationTags; + files: Files; + fileTags: FileTags; + extraProperties: ExtraProperties; + epValues: EpValues; + savedSearches: SavedSearches; + searchGroups: SearchGroups; + searchCriteria: SearchCriteria; +}; + +///// TAGS ///// + +export type Tags = { + id: ColumnType; //pk + name: string; + dateAdded: ColumnType; + color: string; + isHidden: BooleanAsNumber; + isVisibleInherited: BooleanAsNumber; + isHeader: BooleanAsNumber; + description: string; + fileCount: number; + isFileCountDirty: BooleanAsNumber; +}; + +export type SubTags = { + tagId: ID; //pk fk + subTagId: ID; //pk fk + idx: number; +}; + +export type TagImplications = { + tagId: ID; //pk fk + impliedTagId: ID; //pk fk +}; + +export type TagAliases = { + tagId: ID; //pk + alias: string; //pk +}; + +/// LOCATIONS /// + +export type LocationNodes = { + id: ColumnType; //pk + parentId: ID | null; //fk + path: string; +}; + +export type Locations = { + nodeId: ID; //pk fk + dateAdded: ColumnType; + idx: number; + isWatchingFiles: BooleanAsNumber; +}; + +export type SubLocations = { + nodeId: ID; //pk fk + isExcluded: BooleanAsNumber; +}; + +export type LocationTags = { + nodeId: ID; //pk fk + tagId: ID; //pk fk +}; + +/// FILES /// + +export type Files = { + id: ColumnType; //pk + ino: string; + locationId: ID; //fk - to Location, not node table + relativePath: string; + absolutePath: string; + tagSorting: FILE_TAGS_SORTING_TYPE; + dateAdded: ColumnType; + dateModified: DateAsNumber; + dateModifiedOS: DateAsNumber; + dateLastIndexed: DateAsNumber; + name: string; + extension: IMG_EXTENSIONS_TYPE; + size: number; + width: number; + height: number; + dateCreated: DateAsNumber; +}; + +export type FileTags = { + fileId: ID; //pk fk + tagId: ID; //pk fk +}; + +/// EXTRA PROPERTIES /// + +export type ExtraProperties = { + id: ColumnType; //pk + type: ExtraPropertyType; + name: string; + dateAdded: ColumnType; +}; + +export type EpValues = { + fileId: ID; //pk fk + epId: ID; //pk fk + textValue: string | null; + numberValue: number | null; + timestampValue: DateAsNumber | null; +}; + +/// SAVED SEARCHES /// + +export type SavedSearches = { + id: ColumnType; //pk + name: string; + idx: number; +}; + +export type SearchGroups = { + id: ColumnType; + name: string; + savedSearchId: ID; + parentGroupId: ID | null; + idx: number; + conjunction: SearchConjunction; +}; + +export type SearchCriteria = { + id: ColumnType; //pk + groupId: ID; //fk + idx: number; + key: keyof FileDTO; + valueType: CriteriaValueType; + operator: OperatorType; + // Since we only need to filter by saved_search_id and not by individual value types, + // all values are stored as stringified JSON regardless of type. + // This simplifies the schema (single column) and querying. The type check is managed + // inside the app logic in the searchStore and thir related api types. + jsonValue: string; +}; diff --git a/src/frontend/Preview.tsx b/src/frontend/Preview.tsx index 2f336062f..fef66e165 100644 --- a/src/frontend/Preview.tsx +++ b/src/frontend/Preview.tsx @@ -18,12 +18,12 @@ const PreviewApp = observer(() => { useEffect(() => uiStore.enableSlideMode(), [uiStore]); const handleLeftButton = useCallback( - () => uiStore.setFirstItem(Math.max(0, uiStore.firstItem - 1)), + () => uiStore.setFirstItem(Math.max(0, uiStore.firstItemIndex - 1)), [uiStore], ); const handleRightButton = useCallback( - () => uiStore.setFirstItem(Math.min(uiStore.firstItem + 1, fileStore.fileList.length - 1)), + () => uiStore.setFirstItem(Math.min(uiStore.firstItemIndex + 1, fileStore.fileList.length - 1)), [fileStore.fileList.length, uiStore], ); @@ -32,7 +32,7 @@ const PreviewApp = observer(() => { useEffect(() => { setIsInitializing(true); setTimeout(() => setIsInitializing(false), 1000); - }, [fileStore.fileListLayoutLastModified]); + }, [fileStore.fileListLastRefetch]); return (
{ icon={IconSet.ARROW_LEFT} text="Previous Image" onClick={handleLeftButton} - disabled={uiStore.firstItem === 0} + disabled={uiStore.firstItemIndex === 0} /> Full size diff --git a/src/frontend/components/ExtraPropertySelector.tsx b/src/frontend/components/ExtraPropertySelector.tsx index 36e478531..efc0b4378 100644 --- a/src/frontend/components/ExtraPropertySelector.tsx +++ b/src/frontend/components/ExtraPropertySelector.tsx @@ -11,7 +11,7 @@ import { ClientExtraProperty } from '../entities/ExtraProperty'; import { Grid, GridCell, Row, RowSeparator, useGridFocus } from 'widgets/combobox/Grid'; import { observer } from 'mobx-react-lite'; import { useStore } from '../contexts/StoreContext'; -import { computed, runInAction } from 'mobx'; +import { computed } from 'mobx'; import { IconSet } from 'widgets/icons'; import { debounce } from 'common/timeout'; import { useGalleryInputKeydownHandler } from '../hooks/useHandleInputKeydown'; @@ -261,7 +261,9 @@ const CreateOptions = ({ inputText, hasMatches, resetTextBox }: CreateOptionProp const createExtraProperty = useCallback( async (type: ExtraPropertyType) => { const newExtraProperty = await extraPropertyStore.createExtraProperty(inputText, type); - runInAction(() => uiStore.fileSelection.forEach((f) => f.setExtraProperty(newExtraProperty))); + uiStore.dispatchToFileSelection(async (files) => + files.forEach((f) => f.setExtraProperty(newExtraProperty)), + ); resetTextBox(); }, [extraPropertyStore, inputText, resetTextBox, uiStore], diff --git a/src/frontend/components/FileExtraPropertiesEditor.tsx b/src/frontend/components/FileExtraPropertiesEditor.tsx index e5c13a891..13011b7f4 100644 --- a/src/frontend/components/FileExtraPropertiesEditor.tsx +++ b/src/frontend/components/FileExtraPropertiesEditor.tsx @@ -39,7 +39,7 @@ interface FileExtraPropertiesEditorProps { export const FileExtraPropertiesEditor = observer( ({ id, file, addButtonContainerID, menuPlacement }: FileExtraPropertiesEditorProps) => { - const { uiStore, fileStore } = useStore(); + const { uiStore, fileStore, extraPropertyStore } = useStore(); const [deletableExtraProperty, setDeletableExtraProperty] = useState(); const [removableExtraProperty, setRemovableExtraProperty] = useState<{ files: ClientFile[]; @@ -80,15 +80,12 @@ export const FileExtraPropertiesEditor = observer( // Create a copy of the selected files to ensure that callbacks // retain the original file selection if it changes between call and execution/confirmation. const files = Array.from(uiStore.fileSelection); + const onSelect = useCallback( (extraProperty: ClientExtraProperty) => { - files.forEach((f) => { - if (!f.extraProperties.has(extraProperty)) { - f.setExtraProperty(extraProperty); - } - }); + extraPropertyStore.dispatchOnFiles(files, extraProperty, undefined, false); }, - [files], + [extraPropertyStore, files], ); const onUpdate = useCallback( (extraProperty: ClientExtraProperty, value: ExtraPropertyValue) => { @@ -233,7 +230,11 @@ export const FileExtraPropertiesEditor = observer(
{uiStore.fileSelection.size === 0 && ( -
No files selected
// eslint-disable-line prettier/prettier +
+ + No files selected + +
// eslint-disable-line prettier/prettier )} { - const { uiStore } = useStore(); + const { uiStore, fileStore } = useStore(); const extraProperties = Array.from(counter.get()).sort(compareByExtraPropertyName); - const SelectionSize = uiStore.fileSelection.size; + const selectionSize = uiStore.fileSelection.size; + const filteredCount = fileStore.numFilteredFiles; + const isAllFilesSelected = uiStore.isAllFilesSelected && selectionSize !== filteredCount; const handleKeyDown = useGalleryInputKeydownHandler(); const handleRename = useCallback( (extraProperty: ClientExtraProperty) => dispatch(Factory.enableEditing(extraProperty.id)), @@ -439,7 +442,13 @@ const ExtraPropertyListEditor = observer( 1 ? `${count}/${SelectionSize}` : ''} + count={ + selectionSize > 1 + ? `${isAllFilesSelected ? '?' : count}/${ + isAllFilesSelected ? filteredCount : selectionSize + }` + : '' + } value={val} onUpdate={onUpdate} isEditingName={editorState.editableNode === extraProperty.id} diff --git a/src/frontend/components/FileTagsEditor.tsx b/src/frontend/components/FileTagsEditor.tsx index 3966fa8e2..250e3e6b5 100644 --- a/src/frontend/components/FileTagsEditor.tsx +++ b/src/frontend/components/FileTagsEditor.tsx @@ -1,4 +1,4 @@ -import { action, computed, IComputedValue, runInAction } from 'mobx'; +import { action, computed, IComputedValue } from 'mobx'; import { observer } from 'mobx-react-lite'; import React, { ForwardedRef, @@ -29,7 +29,6 @@ import { useTabTagAutocomplete, } from './TagSelector'; import { useStore } from '../contexts/StoreContext'; -import { ClientFile } from '../entities/File'; import { ClientTag } from '../entities/Tag'; import { useAction, useAutorun, useComputed } from '../hooks/mobx'; import { Menu, useContextMenu } from 'widgets/menus'; @@ -60,7 +59,7 @@ export const FileTagsEditor = observer(() => { const counter = useComputed(() => { const fileSelection = Array.from(uiStore.fileSelection); - const isTooMany = fileSelection.length > 1000; + const isTooMany = uiStore.isAllFilesSelected || fileSelection.length > 1000; // Count how often tags are used // Aded last bool value indicating if is an explicit tag -> should show delete button; const counter = new Map(); for (const file of fileSelection) { @@ -206,10 +205,8 @@ export const FileTagsEditor = observer(() => { } }, [clearInputOnSelect]); - const removeTag = useAction((tag: ClientTag) => { - for (const f of uiStore.fileSelection) { - f.removeTag(tag); - } + const removeTag = useAction(async (tag: ClientTag) => { + await uiStore.removeTagsFromSelectedFiles([tag]); inputRef.current?.focus(); }); @@ -345,28 +342,46 @@ const MatchingTagsList = observer( } }, [getTabMatchTagRef, matches]); + // When selecting all filles there's no way to know the true selected statos so instead + // we use a map to track the checked status. + // reset it using usingmemo each time isAllFilesSelected changes + const allSelectedToggleStatus = useMemo(() => { + if (uiStore.isAllFilesSelected) { + return new Map(); + } else { + return undefined; + } + }, [uiStore.isAllFilesSelected]); + // eslint-disable-next-line react-hooks/exhaustive-deps const toggleSelection = useCallback( - action((isSelected: boolean, tag: ClientTag) => { - const operation = isSelected - ? (f: ClientFile) => f.removeTag(tag) - : (f: ClientFile) => f.addTag(tag); - uiStore.fileSelection.forEach(operation); + action(async (isSelected: boolean, tag: ClientTag) => { + if (isSelected) { + await uiStore.removeTagsFromSelectedFiles([tag]); + allSelectedToggleStatus?.set(tag.id, false); + } else { + await uiStore.addTagsToSelectedFiles([tag]); + allSelectedToggleStatus?.set(tag.id, true); + } resetTextBox(); }), - [resetTextBox], + [resetTextBox, allSelectedToggleStatus], ); const isSelected: isTagSelected = useCallback( - // If all selected files have the tag mark it as selected, - // else if partially in selected files return undefined, else mark it as not selected. + // define the selected satus: + // - if any file has it, mark it as explicit + // - if not all selected files have the tag or is selecting all filtered + // files and its allSelectedToggleStatus is false, mark it as partial (tag: ClientTag) => { const tagRecord = counter.get().get(tag); const isExplicit = tagRecord?.[1] ?? false; - const isPartial = tagRecord?.[0] !== uiStore.fileSelection.size; + const isPartial = + tagRecord?.[0] !== uiStore.fileSelection.size || + (allSelectedToggleStatus && !allSelectedToggleStatus.get(tag.id)); return [tagRecord !== undefined && !isPartial, isExplicit]; }, - [counter, uiStore], + [allSelectedToggleStatus, counter, uiStore], ); const VirtualizableTagOption = useMemo( () => @@ -439,11 +454,7 @@ const CreateOption = ({ inputText, hasMatches, resetTextBox, style, index }: Cre const createTag = useCallback(async () => { const newTag = await tagStore.create(tagStore.root, inputText); - runInAction(() => { - for (const f of uiStore.fileSelection) { - f.addTag(newTag); - } - }); + await uiStore.addTagsToSelectedFiles([newTag]); resetTextBox(); // eslint-disable-next-line react-hooks/exhaustive-deps }, [inputText, resetTextBox]); @@ -508,8 +519,9 @@ interface IncrementalTagItemsProps { } export const IncrementalTagItems = observer((props: IncrementalTagItemsProps) => { - const { uiStore } = useStore(); + const { uiStore, fileStore } = useStore(); const isMultiSelection = uiStore.fileSelection.size > 1; + const isAllFilesSelected = uiStore.isAllFilesSelected; const { tags, counter, removeTag, onContextMenu, chunkSize = 5 } = props; const [visibleTags, setVisibleTags] = useState([]); @@ -542,12 +554,16 @@ export const IncrementalTagItems = observer((props: IncrementalTagItemsProps) => // eslint-disable-next-line react-hooks/exhaustive-deps }, [tags]); + const isAllfilesText = + isAllFilesSelected && uiStore.fileSelection.size !== fileStore.numFilteredFiles; + const isMultiText = counter && isMultiSelection; + const RenderTag = useMemo( () => observer(({ tag }: { tag: ClientTag }) => ( onContextMenu={onContextMenu ? (e) => onContextMenu(e, tag) : undefined} /> )), - [counter, isMultiSelection, onContextMenu, removeTag], + [counter, isAllfilesText, isMultiText, onContextMenu, removeTag], ); return ( diff --git a/src/frontend/components/RemovalAlert.tsx b/src/frontend/components/RemovalAlert.tsx index 6a322f322..6a5ae0b2d 100644 --- a/src/frontend/components/RemovalAlert.tsx +++ b/src/frontend/components/RemovalAlert.tsx @@ -110,8 +110,10 @@ export const ExtraPropertyUnAssign = observer( extraProperty: ClientExtraProperty; }>, ) => { - const { extraPropertyStore } = useStore(); - const fileCount = props.object.files.length; + const { extraPropertyStore, uiStore, fileStore } = useStore(); + const fileCount = uiStore.isAllFilesSelected + ? fileStore.numFilteredFiles + : props.object.files.length; //If the file selection has less than 2 files auto confirm useEffect(() => { if (fileCount < 2) { @@ -148,13 +150,15 @@ export const ExtraPropertyOverwrite = observer( value: ExtraPropertyValue; }>, ) => { - const { extraPropertyStore } = useStore(); - const fileCount = props.object.files.length; + const { extraPropertyStore, uiStore, fileStore } = useStore(); + const fileCount = uiStore.isAllFilesSelected + ? fileStore.numFilteredFiles + : props.object.files.length; //If the file selection has less than 2 files auto confirm useEffect(() => { if (fileCount < 2) { props.onClose(); - extraPropertyStore.setOnFiles( + extraPropertyStore.dispatchOnFiles( props.object.files, props.object.extraProperty, props.object.value, @@ -175,7 +179,7 @@ export const ExtraPropertyOverwrite = observer( onCancel={props.onClose} onConfirm={() => { props.onClose(); - extraPropertyStore.setOnFiles( + extraPropertyStore.dispatchOnFiles( props.object.files, props.object.extraProperty, props.object.value, diff --git a/src/frontend/containers/AdvancedSearch/CriteriaBuilder.tsx b/src/frontend/containers/AdvancedSearch/CriteriaBuilder.tsx index 564ca13a9..275ddc202 100644 --- a/src/frontend/containers/AdvancedSearch/CriteriaBuilder.tsx +++ b/src/frontend/containers/AdvancedSearch/CriteriaBuilder.tsx @@ -3,9 +3,8 @@ import React, { RefObject, memo, useMemo, useState } from 'react'; import { IconButton } from 'widgets/button'; import { IconSet } from 'widgets/icons'; import { InfoButton } from 'widgets/notifications'; -import { KeySelector, OperatorSelector, ValueInput } from './Inputs'; -import { QueryDispatch } from './QueryEditor'; -import { defaultQuery, generateCriteriaId } from './data'; +import { IndexInput, KeySelector, OperatorSelector, ValueInput } from './Inputs'; +import { appendCriteriaByIndexPath, CritIndexPath, defaultQuery, QueryDispatch } from './data'; import { useStore } from 'src/frontend/contexts/StoreContext'; export interface QueryBuilderProps { @@ -14,6 +13,7 @@ export interface QueryBuilderProps { } const CriteriaBuilder = memo(function QueryBuilder({ keySelector, dispatch }: QueryBuilderProps) { + const [path, setPath] = useState([]); const [criteria, setCriteria] = useState(defaultQuery('tags')); const { extraPropertyStore } = useStore(); const epID = 'extraProperty' in criteria ? criteria.extraProperty : undefined; @@ -23,7 +23,7 @@ const CriteriaBuilder = memo(function QueryBuilder({ keySelector, dispatch }: Qu ); const add = () => { - dispatch((query) => new Map(query.set(generateCriteriaId(), criteria))); + dispatch((query) => appendCriteriaByIndexPath(query, criteria, path)); setCriteria(defaultQuery('tags')); keySelector.current?.focus(); }; @@ -35,6 +35,10 @@ const CriteriaBuilder = memo(function QueryBuilder({ keySelector, dispatch }: Qu A criteria is made of three components:
    +
  • + nesting (decides in which group the criteria will be added. If empty, it will + be added to the root group), +
  • key (a property of the image file),
  • @@ -57,11 +61,17 @@ const CriteriaBuilder = memo(function QueryBuilder({ keySelector, dispatch }: Qu
    + + v + 1) + .join('.'); +} + +interface IndexInputProps { + path: string; + setValue: (toIndexPat: CritIndexPath) => void; + labelledby?: string; +} + +export const IndexInput = ({ path, setValue, labelledby }: IndexInputProps) => { + const [text, setText] = useState(formatPath(path)); + + useEffect(() => { + setText(formatPath(path)); + }, [path]); + + const debounceSetValue = useMemo(() => debounce(setValue, 0), [setValue]); + const commit = useCallback( + (raw: string) => { + // allow to set incomplete paths while typing but not commit them + if (raw.endsWith('.')) { + return; + } + const parts = raw ? raw.split('.') : []; + const path: number[] = []; + for (const part of parts) { + if (part === '') { + return; + } + let index = parseInt(part, 10); + if (isNaN(index)) { + return; + } + // UI is 1-based + index = Math.max(index, 1); + path.push(index - 1); + } + + const normalizedText = path.map((v) => v + 1).join('.'); + setText(normalizedText); + debounceSetValue(path); + }, + [debounceSetValue], + ); + + const handleInput = useCallback((e: React.ChangeEvent) => { + const raw = e.target.value; + if (!/^\d*(\.\d*)*$/.test(raw)) { + return; + } + setText(raw); + //commit(raw); + }, []); + + const handleKeydown = useCallback( + (e: React.KeyboardEvent) => { + if (e.key === 'Enter') { + commit(text); + } + }, + [commit, text], + ); + + const handleBlur = () => { + commit(text); + }; + + return ( + + ); +}; + +export interface ConjuctionSelectorProps { + labelledby: string; + value: SearchConjunction; + setConjunction: (conjunction: SearchConjunction) => void; +} + +export const ConjuctionSelector = ({ + labelledby, + value, + setConjunction, +}: ConjuctionSelectorProps) => { + const handleChange = (e: React.ChangeEvent) => { + const conjunction = e.target.value as SearchConjunction; + setConjunction(conjunction); + }; + + return ( + + ); +}; type SetCriteria = (fn: (criteria: Criteria) => Criteria) => void; @@ -444,7 +572,7 @@ function getOperatorOptions(key: Key, extraPropertyType?: ExtraPropertyType) { return []; } -const toOperatorOption = (o: T, labels?: Record) => ( +export const toOperatorOption = (o: T, labels?: Record) => ( diff --git a/src/frontend/containers/AdvancedSearch/QueryEditor.tsx b/src/frontend/containers/AdvancedSearch/QueryEditor.tsx index 484eb58b0..4fee9bf28 100644 --- a/src/frontend/containers/AdvancedSearch/QueryEditor.tsx +++ b/src/frontend/containers/AdvancedSearch/QueryEditor.tsx @@ -1,15 +1,32 @@ -import React, { memo, useMemo } from 'react'; +import React, { memo, useCallback, useMemo } from 'react'; import { ID } from 'src/api/id'; import { IconSet } from 'widgets/icons'; import { Callout, InfoButton } from 'widgets/notifications'; import { Radio, RadioGroup } from 'widgets/radio'; -import { KeySelector, OperatorSelector, ValueInput } from './Inputs'; -import { Criteria } from './data'; +import { + ConjuctionSelector, + ConjuctionSelectorProps, + IndexInput, + KeySelector, + OperatorSelector, + ValueInput, +} from './Inputs'; +import { + Criteria, + CriteriaGroup, + CritIndexPath, + deleteNode, + getPathByIndexPath, + isCriteriaGroup, + moveNodeByIndexPath, + parseIndexPath, + Query, + QueryDispatch, + updateNode, +} from './data'; import { useStore } from 'src/frontend/contexts/StoreContext'; - -export type Query = Map; -export type QueryDispatch = React.Dispatch>; +import { SearchConjunction } from 'src/api/data-storage-search'; export interface QueryEditorProps { query: Query; @@ -24,8 +41,8 @@ export const QueryEditor = memo(function QueryEditor({ }: QueryEditorProps) { return (
    -
    - Query Editor + + Query Editor A query is a list of criterias.
    @@ -38,56 +55,200 @@ export const QueryEditor = memo(function QueryEditor({ icon button next to the inputs.

    - Additionally, there is Match option that decides whether all criterias must match - or just one. + To change the order of criteria and groups, or move them to different groups, modify their + nesting position using the index/nesting input at the start of each criterion row. +
    +
    + To change how criteria are evaluated together, change the conjunction (AND/OR) between + groups/criteria. +
    +
    + You can set a name for groups for better identification. If defined, it will replace the + labels in the search bar and saved searches hierarchy. +
    +
    -
    - {query.size === 0 ? ( + + {query.children.size === 0 ? ( Your query is currently empty. Create a criteria above to enable the{' '} {submissionButtonText} button. ) : undefined}
    - - - - - - - - - - - - {Array.from(query.entries(), ([id, query], index) => ( - - ))} - -
    KeyOperatorValueRemove
    +
    ); }); +export const GroupConjunctionEditor = (props: ConjuctionSelectorProps & { className?: string }) => { + const { labelledby, value, setConjunction, className } = props; + return ( +
    + +
    + ); +}; + +export const CriteriaSeparator = ({ text, className }: { text: string; className?: string }) => { + return
    {text}
    ; +}; + +export const EditableGroupControls = (props: EditableCriteriaGroupProps) => { + const { groupId, group, path, setQuery } = props; + const handleChangeGroupIndex = useCallback( + (toIndexPat: CritIndexPath) => { + const groupIndexPat = parseIndexPath(path); + setQuery((query) => moveNodeByIndexPath(query, groupIndexPat, toIndexPat)); + }, + [path, setQuery], + ); + const handelDelete = useCallback(() => { + setQuery((query) => { + const critPath = getPathByIndexPath(query, parseIndexPath(path)); + if (!critPath) { + return query; + } + return deleteNode(query, critPath); + }); + }, [path, setQuery]); + const handelChangeName = useCallback( + (newName: string) => { + setQuery((query) => { + const critPath = getPathByIndexPath(query, parseIndexPath(path)); + if (!critPath) { + return query; + } + return updateNode(query, critPath, (node) => (node ? { ...node, name: newName } : null)); + }); + }, + [path, setQuery], + ); + return ( +
    + + handelChangeName(e.target.value)} + /> + +
    + ); +}; + +export interface EditableCriteriaGroupProps { + groupId: string; + group: CriteriaGroup; + path: string; + setQuery: QueryDispatch; +} + +export const EditableCriteriaGroup = React.memo(function EditableCriteriaGroup( + props: EditableCriteriaGroupProps, +) { + const { group, groupId, path, setQuery } = props; + const handleChangeConjunction = useCallback( + (conjunction: SearchConjunction) => { + setQuery((query) => { + const critPath = getPathByIndexPath(query, parseIndexPath(path)); + if (!critPath) { + return query; + } + return updateNode(query, critPath, (node) => (node ? { ...node, conjunction } : null)); + }); + }, + [path, setQuery], + ); + return ( +
    + {path !== '' && ( + + )} + {Array.from(group.children.entries(), ([nodeCompId, node], nodeIndex) => ( + + {nodeIndex > 0 && ( + + )} + {/*critIndex > 1 && ( + + )*/} + {isCriteriaGroup(node) ? ( + + ) : ( + + )} + + ))} +
    + ); +}); + export interface EditableCriteriaProps { - index: number; - id: ID; + critId: ID; criteria: Criteria; + path: string; dispatch: QueryDispatch; } // The main Criteria component, finds whatever input fields for the key should be rendered -export const EditableCriteria = ({ index, id, criteria, dispatch }: EditableCriteriaProps) => { +export const EditableCriteria = React.memo(function EditableCriteria(props: EditableCriteriaProps) { + const { critId, criteria, path, dispatch } = props; const setCriteria = (fn: (criteria: Criteria) => Criteria) => { - const c = fn(criteria); - dispatch((query) => new Map(query.set(id, c))); + dispatch((query) => { + const critPath = getPathByIndexPath(query, parseIndexPath(path)); + if (!critPath) { + return query; + } + return updateNode(query, critPath, (node) => + node ? (!isCriteriaGroup(node) ? { ...node, ...fn(node) } : { ...node }) : null, + ); + }); + }; + const setIndex = (toIndexPat: CritIndexPath) => { + const critIndexPat = parseIndexPath(path); + dispatch((query) => moveNodeByIndexPath(query, critIndexPat, toIndexPat)); }; const { extraPropertyStore } = useStore(); const epID = 'extraProperty' in criteria ? criteria.extraProperty : undefined; @@ -97,57 +258,55 @@ export const EditableCriteria = ({ index, id, criteria, dispatch }: EditableCrit ); return ( - - - {index + 1} - - - - - - - - - - - - - - +
    + + + + + +
    ); -}; +}); type QueryMatchProps = { searchMatchAny: boolean; diff --git a/src/frontend/containers/AdvancedSearch/SearchItemDialog.tsx b/src/frontend/containers/AdvancedSearch/SearchItemDialog.tsx index 947ec590c..590224b87 100644 --- a/src/frontend/containers/AdvancedSearch/SearchItemDialog.tsx +++ b/src/frontend/containers/AdvancedSearch/SearchItemDialog.tsx @@ -4,13 +4,12 @@ import React, { useCallback, useRef, useState } from 'react'; import { Button } from 'widgets/button'; import { IconSet } from 'widgets/icons'; import { Dialog } from 'widgets/popovers'; -import { ID } from '../../../api/id'; import { useStore } from '../../contexts/StoreContext'; import { ClientFileSearchItem } from '../../entities/SearchItem'; import { useAutorun } from '../../hooks/mobx'; import CriteriaBuilder from './CriteriaBuilder'; -import { QueryEditor, QueryMatch } from './QueryEditor'; -import { Criteria, fromCriteria, intoCriteria } from './data'; +import { QueryEditor } from './QueryEditor'; +import { queryFromCriteria, intoGroup, Query, getemptyQuery } from './data'; interface ISearchItemDialogProps { searchItem: ClientFileSearchItem; @@ -24,21 +23,15 @@ const SearchItemDialog = observer(({ searchItem, onClose // Copy state of search item: only update the ClientSearchItem on submit. const [name, setName] = useState(searchItem.name); - const [searchMatchAny, setSearchMatchAny] = useState(searchItem.matchAny); - const toggle = useCallback(() => setSearchMatchAny((v) => !v), []); - const [query, setQuery] = useState(new Map()); + const [query, setQuery] = useState(getemptyQuery()); const keySelector = useRef(null); const nameInput = useRef(null); // Initialize form with current queries. When the form is closed, all inputs // are unmounted to save memory. useAutorun(() => { - const map = new Map(); - for (const criteria of searchItem.criteria) { - const [id, query] = fromCriteria(criteria); - map.set(id, query); - } + const map = queryFromCriteria(searchItem.rootGroup); // Focus and select the input text so the user can rename immediately after creating a new search item requestAnimationFrame(() => requestAnimationFrame(() => { @@ -50,12 +43,11 @@ const SearchItemDialog = observer(({ searchItem, onClose }); const handleSubmit = useCallback(async () => { + searchItem.setRootGroup(intoGroup(query, tagStore)); searchItem.setName(name); - searchItem.setMatchAny(searchMatchAny); - searchItem.setCriteria(Array.from(query.values(), (vals) => intoCriteria(vals, tagStore))); searchStore.save(searchItem); onClose(); - }, [name, onClose, query, searchItem, searchMatchAny, searchStore, tagStore]); + }, [name, onClose, query, searchItem, searchStore, tagStore]); return ( (({ searchItem, onClose - -
    diff --git a/src/frontend/containers/AdvancedSearch/data.ts b/src/frontend/containers/AdvancedSearch/data.ts index 26ef348cf..52e4e7d59 100644 --- a/src/frontend/containers/AdvancedSearch/data.ts +++ b/src/frontend/containers/AdvancedSearch/data.ts @@ -2,10 +2,11 @@ import { generateWidgetId } from 'widgets/utility'; import { ExtraPropertyOperatorType, NumberOperatorType, + SearchConjunction, StringOperatorType, } from '../../../api/data-storage-search'; import { FileDTO, IMG_EXTENSIONS } from '../../../api/file'; -import { ID } from '../../../api/id'; +import { generateId, ID } from '../../../api/id'; import { BinaryOperatorType, OperatorType, TagOperatorType } from '../../../api/search-criteria'; import { ClientDateSearchCriteria, @@ -17,11 +18,35 @@ import { } from '../../entities/SearchCriteria'; import TagStore from '../../stores/TagStore'; import { ExtraPropertyType, ExtraPropertyValue } from 'src/api/extraProperty'; +import { ClientSearchGroup, isClientSearchGroup } from 'src/frontend/entities/SearchItem'; +import { clamp } from 'common/core'; + +export type Query = CriteriaGroup; +export type QueryDispatch = React.Dispatch>; export function generateCriteriaId() { return generateWidgetId('__criteria'); } +export function generateGroupId() { + return generateWidgetId('__group'); +} + +export type CriteriaNode = Criteria | CriteriaGroup; + +export type GroupMap = Map; + +export type CriteriaGroup = { + id: ID; + name: string; + conjunction: SearchConjunction; + children: GroupMap; +}; + +export function isCriteriaGroup(obj: any): obj is CriteriaGroup { + return obj && typeof obj === 'object' && 'children' in obj; +} + export type Criteria = | Field<'name' | 'absolutePath', StringOperatorType, string> | Field<'tags', TagOperatorType, TagValue> @@ -34,6 +59,7 @@ export type Criteria = | ExtraPropertyField; interface Field { + id: ID; key: K; operator: O; value: V; @@ -64,19 +90,30 @@ export type Value = string | number | Date | TagValue | ExtraPropertyValue; export type TagValue = ID | undefined; export type ExtraPropertyID = ID | undefined; +export function getemptyQuery(): Query { + return { + id: generateGroupId(), + name: '', + conjunction: 'and', + children: new Map(), + }; +} + export function defaultQuery(key: Key, extraPropertyType?: ExtraPropertyType): Criteria { if (key === 'name' || key === 'absolutePath') { - return { key, operator: 'contains', value: '' }; + return { id: generateId(), key, operator: 'contains', value: '' }; } else if (key === 'tags') { - return { key, operator: 'contains', value: undefined }; + return { id: generateId(), key, operator: 'contains', value: undefined }; } else if (key === 'extension') { return { + id: generateId(), key, operator: 'equals', value: IMG_EXTENSIONS[0], }; } else if (key === 'dateAdded') { return { + id: generateId(), key, operator: 'equals', value: new Date(), @@ -85,6 +122,7 @@ export function defaultQuery(key: Key, extraPropertyType?: ExtraPropertyType): C if (extraPropertyType !== undefined) { if (extraPropertyType === ExtraPropertyType.number) { return { + id: generateId(), extraProperty: undefined, key: 'extraProperties', value: 0, @@ -92,6 +130,7 @@ export function defaultQuery(key: Key, extraPropertyType?: ExtraPropertyType): C }; } else if (extraPropertyType === ExtraPropertyType.text) { return { + id: generateId(), extraProperty: undefined, key: 'extraProperties', value: '', @@ -100,13 +139,19 @@ export function defaultQuery(key: Key, extraPropertyType?: ExtraPropertyType): C } } return { + id: generateId(), extraProperty: undefined, key: 'extraProperties', value: 0, operator: 'equals', }; } else { - return { key: key, operator: 'greaterThanOrEquals', value: 0 }; + return { + id: generateId(), + key: key, + operator: 'greaterThanOrEquals', + value: 0, + }; } } @@ -152,25 +197,300 @@ export function fromCriteria(criteria: ClientFileSearchCriteria): [ID, Criteria] return [generateCriteriaId(), query]; } +/** Converts a ClientSearchGroup tree into a Query tree */ +export function queryFromCriteria(criteria: ClientSearchGroup): Query { + return { + id: generateId(), + name: criteria.name, + conjunction: criteria.conjunction, + children: new Map( + criteria.children.map<[ID, Criteria | CriteriaGroup]>((child) => { + if (isClientSearchGroup(child)) { + return [generateGroupId(), queryFromCriteria(child) as CriteriaGroup]; + } else { + return fromCriteria(child); + } + }), + ), + }; +} + +//prettier-ignore export function intoCriteria(query: Criteria, tagStore: TagStore): ClientFileSearchCriteria { if (query.key === 'name' || query.key === 'absolutePath' || query.key === 'extension') { - return new ClientStringSearchCriteria(query.key, query.value, query.operator); + return new ClientStringSearchCriteria(query.id, query.key, query.value, query.operator); } else if (query.key === 'dateAdded') { - return new ClientDateSearchCriteria(query.key, query.value, query.operator); + return new ClientDateSearchCriteria(query.id, query.key, query.value, query.operator); } else if (query.key === 'size') { - return new ClientNumberSearchCriteria(query.key, query.value * BYTES_IN_MB, query.operator); + return new ClientNumberSearchCriteria(query.id, query.key, query.value * BYTES_IN_MB, query.operator); } else if (query.key === 'width' || query.key === 'height') { - return new ClientNumberSearchCriteria(query.key, query.value, query.operator); + return new ClientNumberSearchCriteria(query.id, query.key, query.value, query.operator); } else if (query.key === 'tags') { const tag = query.value !== undefined ? tagStore.get(query.value) : undefined; - return new ClientTagSearchCriteria('tags', tag?.id, query.operator); + return new ClientTagSearchCriteria(query.id, 'tags', tag?.id, query.operator); } else if (query.key === 'extraProperties') { return new ClientExtraPropertySearchCriteria( + query.id, query.key, [query.extraProperty ?? '', query.value], query.operator, ); } else { - return new ClientTagSearchCriteria('tags'); + return new ClientTagSearchCriteria(query.id, 'tags'); + } +} + +export function intoGroup(query: Query, tagStore: TagStore): ClientSearchGroup { + const nodeId = generateId(); + const group = new ClientSearchGroup(nodeId, query.name, query.conjunction, []); + + for (const crit of query.children.values()) { + if (isCriteriaGroup(crit)) { + group.insertNode(nodeId, intoGroup(crit, tagStore)); + } else { + group.insertNode(nodeId, intoCriteria(crit, tagStore)); + } + } + return group; +} + +export type CritPath = string[]; +export type CritIndexPath = number[]; + +export function getPathByIndexPath(query: Query, indexPath: CritIndexPath): CritPath | null { + let children = Array.from(query.children.entries()); + const path: CritPath = []; + for (const index of indexPath) { + if (index < 0 || index >= children.length) { + // if index out of range, add a new group id and return, later when used into updateNode it will get undefined node + // and will decide what to do with if in each updateNode fn argument + path.push(generateGroupId()); + return path; + } + const [groupKey, node] = children[index]; + if (isCriteriaGroup(node)) { + path.push(groupKey); + children = Array.from(node.children.entries()); + } else { + // if is not group, add the id, stop the loop and return + path.push(groupKey); + return path; + } + } + return path; +} + +export function cloneGroup(group: CriteriaGroup): CriteriaGroup { + return { + ...group, + children: new Map(group.children), + }; +} + +export function getNode( + query: Query, + path: CritPath, + limit: number = path.length, +): CriteriaGroup | Criteria | null { + if (path.length === 0) { + return query; + } + const len = path.length; + const normalizedLimit = Math.min(len, Math.max(0, limit < 0 ? len + limit : limit)); + + let current: CriteriaGroup | Criteria | null = query; + for (let i = 0; i < normalizedLimit; i++) { + if (!isCriteriaGroup(current)) { + return null; + } + const next = current.children.get(path[i]); + current = next ?? null; + } + + return current; +} + +/** it search for a node given a path, rebuilding the path's nodes along + * the way and update the target node with the provided updater function */ +export function updateNode( + query: Query, + path: CritPath, + fn: (node: CriteriaNode | undefined) => CriteriaNode | null = (node) => + node ? { ...node } : null, +): Query { + let children = new Map(query.children); + query.children = children; + for (const id of path) { + let node = children.get(id); + if (id === path.at(-1)) { + const updated = fn(node); + if (updated === null) { + // delete node + children.delete(id); + } else { + children.set(id, updated); + } + } else if (node && isCriteriaGroup(node)) { + node = { ...node, children: new Map(node.children) }; + children.set(id, node); + children = node.children; + } + } + if (path.length === 0) { + const updated = fn(query); + return updated && isCriteriaGroup(updated) ? { ...updated } : { ...query }; + } + return { ...query }; +} + +export function deleteNode( + query: Query, + path: CritPath, + deletedCallback?: (deletedNode: CriteriaNode | undefined) => void, +): Query { + const parentPath = path.slice(0, -1); + const targetId = path.at(-1); + return updateNode(query, parentPath, (parent) => { + if (!parent) { + return null; + } + if (targetId && isCriteriaGroup(parent)) { + const newChildren = new Map(parent.children); + deletedCallback?.(newChildren.get(targetId)); + newChildren.delete(targetId); + if (newChildren.size === 0 && parentPath.length > 0) { + // prevent empty groups except for root + return null; + } + return { ...parent, children: newChildren }; + } + return { ...parent }; + }); +} + +export function insertNode( + query: Query, + path: CritPath, + node: CriteriaNode, + nodeId: string, + at?: number, + parentIndex?: number, +): Query { + const toParentId = path.at(-1); + let generatedGroupToInsert: CriteriaGroup | undefined; + query = updateNode(query, path, (parent) => { + if (isCriteriaGroup(parent)) { + // if parent is group insert into it + const newChildren = new Map(parent.children); + const entries = Array.from(newChildren.entries()); + const insertAt = clamp(at ?? entries.length, 0, entries.length); + entries.splice(insertAt, 0, [nodeId as ID, node]); + return { ...parent, children: new Map(entries) }; + } else { + // if parent is crieria insert both into new group + // if parent is null insert into new group + const entries: [string, CriteriaNode][] = parent + ? [[toParentId ?? generateCriteriaId(), parent]] + : []; + entries.splice(at ?? entries.length, 0, [nodeId as ID, node]); + const newGroup: CriteriaGroup = { + id: generateId(), + name: '', + conjunction: 'and', + children: new Map(entries), + }; + generatedGroupToInsert = newGroup; + // return null to delete the previous criteria node + return null; + } + }); + if (!generatedGroupToInsert) { + return query; + } + const newGroupToInsert: CriteriaGroup = { ...generatedGroupToInsert }; + // if new group was created, insert it into the query + query = insertNode(query, path.slice(0, -1), newGroupToInsert, generateGroupId(), parentIndex); + /* + query = updateNode(query, path.slice(0, -1), (parent) => { + if (!parent) { + return null; + } + if (isCriteriaGroup(parent)) { + const newChildren = new Map(parent.children); + newChildren.set(generateGroupId(), newGroupToInsert); + return { ...parent, children: newChildren }; + } + return parent; + });*/ + return query; +} + +export function moveNode( + query: Query, + fromPath: CritPath, + toPath: CritPath, + at?: number, + toParentIndex?: number, // used to preserve index when moving into a criteria node +): Query { + // get and remove from 'from parent' node: + const nodeId = fromPath.at(-1); + let nodeToMove: CriteriaNode | undefined; + query = deleteNode(query, fromPath, (deletedNode) => { + nodeToMove = deletedNode; + }); + if (!nodeToMove) { + return query; + } + const newNodeToMove: CriteriaNode = { ...nodeToMove }; + // insert into 'to parent' node: + query = insertNode( + query, + toPath, + newNodeToMove, + nodeId ?? (isCriteriaGroup(newNodeToMove) ? generateGroupId() : generateCriteriaId()), + at, + toParentIndex, + ); + return query; +} + +export function moveNodeByIndexPath( + query: Query, + fromIndexPath: CritIndexPath, + toIndexPath: CritIndexPath, +): Query { + if (fromIndexPath.length === 0 || toIndexPath.length === 0) { + return query; + } + const fromPath = getPathByIndexPath(query, fromIndexPath); + // ignore last index since it is the 'at' argument + const toPath = getPathByIndexPath(query, toIndexPath.slice(0, -1)); + if (!fromPath || !toPath) { + return query; + } + const at = toIndexPath[toIndexPath.length - 1]; + const parentIndex = toIndexPath[toIndexPath.length - 2]; + return moveNode(query, fromPath, toPath, at, parentIndex); +} + +export function appendCriteriaByIndexPath( + query: Query, + criteria: Criteria, + toIndexPath?: CritIndexPath, +) { + const toPath = toIndexPath ? getPathByIndexPath(query, toIndexPath) : undefined; + const critCompId = generateCriteriaId(); + if (!toPath || !toIndexPath) { + return { ...query, children: new Map(query.children.set(critCompId, criteria)) }; + } else { + const parentIndex = toIndexPath[toIndexPath.length - 1]; + return insertNode(query, toPath, criteria, critCompId, undefined, parentIndex); + } +} + +export function parseIndexPath(pathStr: string): CritIndexPath { + if (pathStr === '') { + return []; } + return pathStr.split('.').map((i) => parseInt(i, 10)); } diff --git a/src/frontend/containers/AdvancedSearch/index.tsx b/src/frontend/containers/AdvancedSearch/index.tsx index 16b1c90b3..161492ac9 100644 --- a/src/frontend/containers/AdvancedSearch/index.tsx +++ b/src/frontend/containers/AdvancedSearch/index.tsx @@ -1,41 +1,35 @@ import { observer } from 'mobx-react-lite'; import React, { useCallback, useRef, useState } from 'react'; -import { ID } from 'src/api/id'; import { useStore } from 'src/frontend/contexts/StoreContext'; import { useAutorun } from 'src/frontend/hooks/mobx'; import { Button, IconSet } from 'widgets'; import { Dialog } from 'widgets/popovers'; import CriteriaBuilder from './CriteriaBuilder'; -import { Criteria, fromCriteria, intoCriteria } from './data'; -import { QueryEditor, QueryMatch } from './QueryEditor'; +import { queryFromCriteria, intoGroup, Query, getemptyQuery } from './data'; +import { QueryEditor } from './QueryEditor'; export const AdvancedSearchDialog = observer(() => { const { uiStore, tagStore } = useStore(); - const [query, setQuery] = useState(new Map()); + const [query, setQuery] = useState(getemptyQuery()); const keySelector = useRef(null); - // Initialize form with current queries. When the form is closed, all inputs // are unmounted to save memory. useAutorun(() => { - const map = new Map(); + let newQuery: Query = getemptyQuery(); if (uiStore.isAdvancedSearchOpen) { - for (const criteria of uiStore.searchCriteriaList) { - const [id, query] = fromCriteria(criteria); - map.set(id, query); - } + newQuery = queryFromCriteria(uiStore.searchRootGroup); requestAnimationFrame(() => requestAnimationFrame(() => keySelector.current?.focus())); } - setQuery(map); + setQuery(newQuery); }); const search = useCallback(() => { - uiStore.replaceSearchCriterias( - Array.from(query.values(), (vals) => intoCriteria(vals, tagStore)), - ); + //uiStore.replaceSearchRootConjuction(rootConjunction); + uiStore.replaceSearchCriterias(intoGroup(query, tagStore)); uiStore.closeAdvancedSearch(); }, [query, tagStore, uiStore]); - const reset = useRef(() => setQuery(new Map())).current; + const reset = useRef(() => setQuery(getemptyQuery())).current; return ( { - +
    diff --git a/src/frontend/containers/AppToolbar/PrimaryCommands.tsx b/src/frontend/containers/AppToolbar/PrimaryCommands.tsx index f873a8dbe..7a8bbf9c8 100644 --- a/src/frontend/containers/AppToolbar/PrimaryCommands.tsx +++ b/src/frontend/containers/AppToolbar/PrimaryCommands.tsx @@ -86,13 +86,14 @@ export const SlideModeCommand = observer(() => { const FileSelectionCommand = observer(() => { const { uiStore, fileStore } = useStore(); - const selectionCount = uiStore.fileSelection.size; - const fileCount = fileStore.numLoadedFiles; + const fileLoadedCount = fileStore.numLoadedFiles; + const fileCount = fileStore.showsMissingContent ? fileLoadedCount : fileStore.numFilteredFiles; + const allFilesSelected = uiStore.isAllFilesSelected; + const selectionCount = allFilesSelected ? fileCount : uiStore.fileSelection.size; - const allFilesSelected = fileCount > 0 && selectionCount === fileCount; // If everything is selected, deselect all. Else, select all const handleToggleSelect = () => { - selectionCount === fileCount ? uiStore.clearFileSelection() : uiStore.selectAllFiles(); + allFilesSelected ? uiStore.clearFileSelection() : uiStore.selectAllFiles(); }; return ( @@ -102,7 +103,7 @@ const FileSelectionCommand = observer(() => { onClick={handleToggleSelect} pressed={allFilesSelected} text={fileCount == 0 ? '0' : selectionCount + ' / ' + fileCount} - tooltip="Selects or deselects all images" + tooltip={`Selects or deselects all images, (${fileLoadedCount} loaded files)`} disabled={fileCount === 0} /> ); diff --git a/src/frontend/containers/AppToolbar/Searchbar.tsx b/src/frontend/containers/AppToolbar/Searchbar.tsx index a9c5842a7..a84b87045 100644 --- a/src/frontend/containers/AppToolbar/Searchbar.tsx +++ b/src/frontend/containers/AppToolbar/Searchbar.tsx @@ -6,7 +6,7 @@ const SEARCHBAR_ID = 'toolbar-searchbar'; const Searchbar = observer(() => { const { uiStore } = useStore(); - const searchCriteriaList = uiStore.searchCriteriaList; + const searchCriteriaList = uiStore.searchRootGroup.children; // Only show quick search bar when all criteria are tags, // otherwise show a search bar that opens to the advanced search form @@ -16,6 +16,7 @@ const Searchbar = observer(() => { searchCriteriaList.length === 0 || searchCriteriaList.every( (crit) => + crit instanceof ClientTagSearchCriteria && crit.key === 'tags' && crit.operator === 'containsRecursively' && (crit as ClientTagSearchCriteria).value, @@ -67,7 +68,9 @@ const QuickSearchList = observer(() => { }); const handleSelect = useAction((item: Readonly) => - uiStore.addSearchCriteria(new ClientTagSearchCriteria('tags', item.id, 'containsRecursively')), + uiStore.addSearchCriteria( + new ClientTagSearchCriteria(undefined, 'tags', item.id, 'containsRecursively'), + ), ); const handleDeselect = useAction((item: Readonly) => { @@ -96,7 +99,9 @@ const QuickSearchList = observer(() => { value={`Search in file paths for "${query}"`} onClick={() => { resetTextBox(); - uiStore.addSearchCriteria(new ClientStringSearchCriteria('absolutePath', query)); + uiStore.addSearchCriteria( + new ClientStringSearchCriteria(undefined, 'absolutePath', query), + ); }} />, { onSelect={handleSelect} onDeselect={handleDeselect} onTagClick={uiStore.toggleAdvancedSearch} - onClear={uiStore.clearSearchCriteriaList} + onClear={uiStore.clearSearchCriteriaTree} ignoreOnBlur={ingnoreOnBlur} renderCreateOption={renderCreateOption} extraIconButtons={} @@ -186,6 +191,7 @@ const QuickExtraPropertySearchOption = (props: QuickEPOption) => { uiStore.addSearchCriteria( new ClientExtraPropertySearchCriteria( + undefined, 'extraProperties', [eventExtraProperty.id, value], operator, @@ -258,41 +264,35 @@ const CriteriaList = observer(() => {
    - {uiStore.searchCriteriaList.map((c, i) => ( + {uiStore.searchRootGroup.getLabels(CustomKeyDict, rootStore).map((label) => ( uiStore.removeSearchCriteriaByIndex(i)} + key={`${label.id}`} + text={label.label} + onRemove={() => uiStore.removeSearchCriteriaById(label.id)} // Italicize system tags (for now only "Untagged images") - className={ - c instanceof ClientTagSearchCriteria && c.isSystemTag() ? 'italic' : undefined - } + className={label.isSystemTag ? 'italic' : undefined} /> ))}
    - {uiStore.searchCriteriaList.length > 1 ? ( - { - uiStore.toggleSearchMatchAny(); - fileStore.refetch(); - e.stopPropagation(); - e.preventDefault(); - // TODO: search input element keeps focus after click??? - }} - className="btn-icon-large" - /> - ) : ( - <> - )} + { + uiStore.toggleSearchMatchAny(); + fileStore.refetch(); + e.stopPropagation(); + e.preventDefault(); + // TODO: search input element keeps focus after click??? + }} + className="btn-icon-large" + /> { - uiStore.clearSearchCriteriaList(); + uiStore.clearSearchCriteriaTree(); e.stopPropagation(); e.preventDefault(); }} diff --git a/src/frontend/containers/ContentView/Commands.tsx b/src/frontend/containers/ContentView/Commands.tsx index 0df063ee5..213b7131c 100644 --- a/src/frontend/containers/ContentView/Commands.tsx +++ b/src/frontend/containers/ContentView/Commands.tsx @@ -278,12 +278,11 @@ export function useCommandHandler( const ctx = uiStore.getTagContextItems(dndData.source.id); // Tag all selected files - unless the file that is being tagged is not selected - const filesToTag = uiStore.fileSelection.has(dropFile) ? uiStore.fileSelection : [dropFile]; - - for (const tag of ctx) { - for (const file of filesToTag) { - file.addTag(tag); - } + const tagToSelection = uiStore.fileSelection.has(dropFile); + if (tagToSelection) { + uiStore.addTagsToSelectedFiles(ctx); + } else { + dropFile.addTags(ctx); } } }); diff --git a/src/frontend/containers/ContentView/ContentProgressBar.tsx b/src/frontend/containers/ContentView/ContentProgressBar.tsx index 3b4246e45..91ab1dd6d 100644 --- a/src/frontend/containers/ContentView/ContentProgressBar.tsx +++ b/src/frontend/containers/ContentView/ContentProgressBar.tsx @@ -1,4 +1,4 @@ -import React, { useRef } from 'react'; +import React from 'react'; import { observer } from 'mobx-react-lite'; import ProgressBar from 'src/frontend/components/ProgressBar'; import { useStore } from 'src/frontend/contexts/StoreContext'; @@ -36,7 +36,7 @@ const ContentProgressBar = observer(() => { } else { return null; } - let simulatedTotal = total / 2; + let simulatedTotal = 0; const AverageTime = fileStore.activeAverageFetchTime * 1.05; const current = numLoadedFiles; diff --git a/src/frontend/containers/ContentView/LayoutSwitcher.tsx b/src/frontend/containers/ContentView/LayoutSwitcher.tsx index 44a37edda..8ee2ec581 100644 --- a/src/frontend/containers/ContentView/LayoutSwitcher.tsx +++ b/src/frontend/containers/ContentView/LayoutSwitcher.tsx @@ -69,16 +69,17 @@ const Layout = ({ contentRect }: LayoutProps) => { ); // Reset selection range when number of items changes: Else you can get phantom files when continuing your selection + // This is no longer needed ? useEffect(() => { initialSelectionIndex.current = undefined; lastSelectionIndex.current = undefined; - }, [fileStore.fileList.length]); + }, []); //fileStore.fileList.length]); useEffect(() => { const onKeyDown = action((e: KeyboardEvent) => { let index = lastSelectionIndex.current; if (index === undefined) { - index = clamp(uiStore.firstItem, 0, fileStore.fileList.length - 1); + index = clamp(uiStore.firstItemIndex, 0, fileStore.fileList.length - 1); } if (uiStore.isSlideMode) { return; diff --git a/src/frontend/containers/ContentView/ListGallery.tsx b/src/frontend/containers/ContentView/ListGallery.tsx index b1bb63403..30190fe8a 100644 --- a/src/frontend/containers/ContentView/ListGallery.tsx +++ b/src/frontend/containers/ContentView/ListGallery.tsx @@ -69,7 +69,7 @@ const ListGallery = observer(({ contentRect, select, lastSelectionIndex }: Galle }, [index, fileSelectionSize]); // While in slide mode, scroll to last shown image if not in view, for transition back to gallery - const { isSlideMode, firstItem } = uiStore; + const { isSlideMode, firstItemIndex: firstItem } = uiStore; useLayoutEffect(() => { if (isSlideMode) { ref.current?.scrollToItem(firstItem, 'smart'); @@ -112,7 +112,7 @@ const ListGallery = observer(({ contentRect, select, lastSelectionIndex }: Galle itemKey={getItemKey} children={Row} onScroll={handleScroll} - initialScrollOffset={uiStore.firstItem * cellSize} + initialScrollOffset={uiStore.firstItemIndex * cellSize} ref={ref} outerElementType={Table} innerElementType={Body} diff --git a/src/frontend/containers/ContentView/Masonry/MasonryRenderer.tsx b/src/frontend/containers/ContentView/Masonry/MasonryRenderer.tsx index 1c5ef336f..30c7e9816 100644 --- a/src/frontend/containers/ContentView/Masonry/MasonryRenderer.tsx +++ b/src/frontend/containers/ContentView/Masonry/MasonryRenderer.tsx @@ -1,4 +1,4 @@ -import { action } from 'mobx'; +import { action, runInAction } from 'mobx'; import { observer } from 'mobx-react-lite'; import React, { useEffect, useRef, useState } from 'react'; import { useStore } from 'src/frontend/contexts/StoreContext'; @@ -72,10 +72,28 @@ const MasonryRenderer = observer(({ contentRect, select, lastSelectionIndex }: G } } } else if (e.key === 'Home') { + (async () => { + await fileStore.jumpToFirst(); + setTimeout(() => { + uiStore.setFirstItem(0); + setLayoutTimestamp(new Date()); + }, 200); + })(); + return; + } else if (e.key === 'End') { + (async () => { + await fileStore.jumpToLast(); + setTimeout(() => { + uiStore.setFirstItem(runInAction(() => fileStore.fileDimensions.length - 1)); + setLayoutTimestamp(new Date()); + }, 200); + })(); + return; + } else if (e.key === 'PageUp') { uiStore.setFirstItem(0); setLayoutTimestamp(new Date()); // Force scroll with a new layout timestamp return; - } else if (e.key === 'End') { + } else if (e.key === 'PageDown') { uiStore.setFirstItem(numFiles - 1); setLayoutTimestamp(new Date()); // Force scroll with a new layout timestamp return; @@ -148,7 +166,7 @@ const MasonryRenderer = observer(({ contentRect, select, lastSelectionIndex }: G })(); } // eslint-disable-next-line react-hooks/exhaustive-deps - }, [numImages, fileStore.fileListLayoutLastModified]); + }, [numImages, fileStore.fileListLastRefetch]); const handleResize = useRef( (() => { diff --git a/src/frontend/containers/ContentView/Masonry/VirtualizedRenderer.tsx b/src/frontend/containers/ContentView/Masonry/VirtualizedRenderer.tsx index b89698893..25297d94f 100644 --- a/src/frontend/containers/ContentView/Masonry/VirtualizedRenderer.tsx +++ b/src/frontend/containers/ContentView/Masonry/VirtualizedRenderer.tsx @@ -11,6 +11,8 @@ import { MasonryCell } from '../GalleryItem'; import { Layouter, findViewportEdge } from './layout-helpers'; import { isFileExtensionVideo } from 'common/fs'; +const SCROLL_PAGE_THRESHOLD = 200; + interface IRendererProps { containerHeight: number; containerWidth: number; @@ -48,7 +50,7 @@ const VirtualizedRenderer = observer( const [startRenderIndex, setStartRenderIndex] = useState(0); const [endRenderIndex, setEndRenderIndex] = useState(0); const numImages = fileStore.fileDimensions.length; - const { isSlideMode, firstItem } = uiStore; + const { isSlideMode, firstItemIndex: firstItem } = uiStore; const determineRenderRegion = useCallback( (numImages: number, overdraw: number, setFirstItem = true) => { @@ -92,17 +94,6 @@ const VirtualizedRenderer = observer( // eslint-disable-next-line react-hooks/exhaustive-deps }, [numImages, containerWidth, containerHeight]); - const handleScroll = useCallback( - () => - throttledRedetermine.current( - numImages, - overscan || 0, - // dont't scroll set first item while in slide mode due to scrolling, since it's controlled over there - !isSlideMode, - ), - [numImages, overscan, isSlideMode], - ); - const scrollToIndex = useCallback( (index: number, block: 'nearest' | 'start' | 'end' | 'center' = 'nearest') => { if (!scrollAnchor.current) { @@ -126,6 +117,40 @@ const VirtualizedRenderer = observer( [layout, padding], ); + const loadingPage = useRef<'after' | 'before' | null>(null); + + const checkPagination = useCallback(() => { + const elem = wrapperRef.current; + if (!elem || hasRefreshed.current) { + return; + } + const { scrollTop, scrollHeight, clientHeight } = elem; + const needsLoadNext = scrollTop + clientHeight >= scrollHeight - SCROLL_PAGE_THRESHOLD; + const needsLoadPrev = scrollTop <= SCROLL_PAGE_THRESHOLD; + if (needsLoadNext && loadingPage.current !== 'after') { + //console.count('NEEDS NEXT PAGE'); + loadingPage.current = 'after'; + fileStore.fetchAfter(); + } + + if (needsLoadPrev && loadingPage.current !== 'before') { + //console.count('NEEDS PREV PAGE'); + loadingPage.current = 'before'; + fileStore.fetchBefore(); + } + }, [fileStore]); + + const handleScroll = useCallback(() => { + throttledRedetermine.current( + numImages, + overscan || 0, + // dont't scroll set first item while in slide mode due to scrolling, since it's controlled over there + // Also don't set it when loading pages + !isSlideMode && loadingPage.current === null, + ); + checkPagination(); + }, [numImages, overscan, isSlideMode, checkPagination]); + // The index currently selected image, or the "last selected" image when a range is selected, const lastSelIndex = lastSelectionIndex.current ? Math.min(lastSelectionIndex.current, numImages - 1) @@ -139,15 +164,23 @@ const VirtualizedRenderer = observer( } }, [isRefreshing]); - // When layout updates, scroll to firstImage (e.g. resize or thumbnail size changed) + // When layout updates, scroll to firstImage (e.g. pagination, resize or thumbnail size changed) + // and reset loadingPage state. // This also sets the initial scroll position on initial render, for when coming from another view mode useLayoutEffect(() => { + // If it has loaded the after pag, just reset loadingPage and return + if (loadingPage.current === 'after') { + loadingPage.current = null; + return; + } // If the gallery has been refreshed use nearest block behavior, otherwise keep the first item in view aligned at the start. - const block = hasRefreshed.current ? 'nearest' : 'start'; + const block = hasRefreshed.current || loadingPage.current ? 'nearest' : 'start'; hasRefreshed.current = false; - runInAction(() => scrollToIndex(uiStore.firstItem, block)); + runInAction(() => scrollToIndex(uiStore.firstItemIndex, block)); // Call throttledRedetermine in case no scroll has been applied. - throttledRedetermine.current(numImages, overscan, false); + determineRenderRegion(numImages, overscan, false); + //throttledRedetermine.current(numImages, overscan, false); + loadingPage.current = null; // eslint-disable-next-line react-hooks/exhaustive-deps }, [layoutUpdateDate, isMounted]); @@ -156,7 +189,8 @@ const VirtualizedRenderer = observer( useLayoutEffect(() => { // But don't scroll when there are no files selected: // else you will scroll when the user deselects everything - if (lastSelIndex !== undefined && fileSelectionSize > 0) { + // also don't scroll when the user select all files + if (lastSelIndex !== undefined && fileSelectionSize > 0 && fileSelectionSize < numImages) { scrollToIndex(lastSelIndex); } // eslint-disable-next-line react-hooks/exhaustive-deps diff --git a/src/frontend/containers/ContentView/Placeholder.tsx b/src/frontend/containers/ContentView/Placeholder.tsx index 41bc990ea..221f1fdc8 100644 --- a/src/frontend/containers/ContentView/Placeholder.tsx +++ b/src/frontend/containers/ContentView/Placeholder.tsx @@ -45,7 +45,7 @@ const PreviewWindowPlaceholder = observer(() => { } }, 1000); // eslint-disable-next-line react-hooks/exhaustive-deps - }, [fileStore.fileListLayoutLastModified]); + }, [fileStore.fileListLastRefetch]); if (isLoading) { return ( diff --git a/src/frontend/containers/ContentView/SlideMode/index.tsx b/src/frontend/containers/ContentView/SlideMode/index.tsx index 593257535..44daf6f26 100644 --- a/src/frontend/containers/ContentView/SlideMode/index.tsx +++ b/src/frontend/containers/ContentView/SlideMode/index.tsx @@ -47,8 +47,8 @@ const SlideView = observer(({ width, height }: SlideViewProps) => { const { uiStore, fileStore, imageLoader } = useStore(); const file = uiStore.firstFileInView; const eventManager = useMemo(() => (file ? new CommandDispatcher(file) : undefined), [file]); - const isFirst = useComputed(() => uiStore.firstItem === 0); - const isLast = useComputed(() => uiStore.firstItem === fileStore.fileList.length - 1); + const isFirst = useComputed(() => uiStore.firstItemIndex === 0); + const isLast = useComputed(() => uiStore.firstItemIndex === fileStore.fileList.length - 1); // Go to the first selected image on load useEffect(() => { @@ -82,14 +82,14 @@ const SlideView = observer(({ width, height }: SlideViewProps) => { }, [uiStore]); const decrImgIndex = useAction(() => { - const index = Math.max(0, uiStore.firstItem - 1); + const index = Math.max(0, uiStore.firstItemIndex - 1); uiStore.setFirstItem(index); // Select only this file: TagEditor overlay shows tags on selected images uiStore.selectFile(fileStore.fileList[index], true); }); const incrImgIndex = useAction(() => { - const index = Math.min(uiStore.firstItem + 1, fileStore.fileList.length - 1); + const index = Math.min(uiStore.firstItemIndex + 1, fileStore.fileList.length - 1); uiStore.setFirstItem(index); uiStore.selectFile(fileStore.fileList[index], true); }); @@ -121,8 +121,8 @@ const SlideView = observer(({ width, height }: SlideViewProps) => { useEffect(() => { let isEffectRunning = true; const dispose = autorun(() => { - if (!isLast.get() && uiStore.firstItem + 1 < fileStore.fileList.length) { - const nextFile = fileStore.fileList[uiStore.firstItem + 1]; + if (!isLast.get() && uiStore.firstItemIndex + 1 < fileStore.fileList.length) { + const nextFile = fileStore.fileList[uiStore.firstItemIndex + 1]; let nextImg: any; if (nextFile && isFileExtensionVideo(nextFile.extension)) { nextImg = document.createElement('video'); @@ -136,7 +136,7 @@ const SlideView = observer(({ width, height }: SlideViewProps) => { } } if (!isFirst.get() && fileStore.fileList.length > 0) { - const prevFile = fileStore.fileList[uiStore.firstItem - 1]; + const prevFile = fileStore.fileList[uiStore.firstItemIndex - 1]; let prevImg: any; if (prevFile && isFileExtensionVideo(prevFile.extension)) { prevImg = document.createElement('video'); diff --git a/src/frontend/containers/ContentView/menu-items.tsx b/src/frontend/containers/ContentView/menu-items.tsx index 76115c2fb..8246d1a03 100644 --- a/src/frontend/containers/ContentView/menu-items.tsx +++ b/src/frontend/containers/ContentView/menu-items.tsx @@ -32,7 +32,11 @@ export const MissingFileMenuItems = observer(() => { disabled={fileStore.showsMissingContent} /> - + ); }); @@ -45,7 +49,7 @@ export const FileViewerMenuItems = ({ file }: { file: ClientFile }) => { // ToDo: add a confirm dialog? uiStore.copyTagsToClipboard(); runInAction(() => { - uiStore.fileSelection.forEach((f) => f.clearTags()); + uiStore.dispatchToFileSelection(async (files) => files.forEach((f) => f.clearTags())); }); }; @@ -131,7 +135,9 @@ export const FileViewerMenuItems = ({ file }: { file: ClientFile }) => { onClick={(e) => handleSearchSimilar( e, - file.tags.toJSON().map((t) => new ClientTagSearchCriteria('tags', t.id, 'contains')), + file.tags + .toJSON() + .map((t) => new ClientTagSearchCriteria(undefined, 'tags', t.id, 'contains')), ) } text="Same Tags" @@ -142,6 +148,7 @@ export const FileViewerMenuItems = ({ file }: { file: ClientFile }) => { handleSearchSimilar( e, new ClientStringSearchCriteria( + undefined, 'absolutePath', SysPath.dirname(file.absolutePath) + SysPath.sep, 'startsWith', @@ -156,6 +163,7 @@ export const FileViewerMenuItems = ({ file }: { file: ClientFile }) => { handleSearchSimilar( e, new ClientStringSearchCriteria( + undefined, 'absolutePath', locationStore.get(file.locationId)!.path + SysPath.sep, 'startsWith', @@ -168,7 +176,7 @@ export const FileViewerMenuItems = ({ file }: { file: ClientFile }) => { onClick={(e) => handleSearchSimilar( e, - new ClientStringSearchCriteria('extension', file.extension, 'equals'), + new ClientStringSearchCriteria(undefined, 'extension', file.extension, 'equals'), ) } text="Same File Type" @@ -177,8 +185,8 @@ export const FileViewerMenuItems = ({ file }: { file: ClientFile }) => { handleSearchSimilar(e, [ - new ClientNumberSearchCriteria('width', file.width, 'equals'), - new ClientNumberSearchCriteria('height', file.height, 'equals'), + new ClientNumberSearchCriteria(undefined, 'width', file.width, 'equals'), + new ClientNumberSearchCriteria(undefined, 'height', file.height, 'equals'), ]) } text="Same Resolution" @@ -186,7 +194,10 @@ export const FileViewerMenuItems = ({ file }: { file: ClientFile }) => { /> - handleSearchSimilar(e, new ClientNumberSearchCriteria('size', file.size, 'equals')) + handleSearchSimilar( + e, + new ClientNumberSearchCriteria(undefined, 'size', file.size, 'equals'), + ) } text="Same File Size" icon={IconSet.FILTER_FILTER_DOWN} @@ -195,7 +206,7 @@ export const FileViewerMenuItems = ({ file }: { file: ClientFile }) => { onClick={(e) => handleSearchSimilar( e, - new ClientDateSearchCriteria('dateCreated', file.dateCreated, 'equals'), + new ClientDateSearchCriteria(undefined, 'dateCreated', file.dateCreated, 'equals'), ) } text="Same Creation Date" @@ -205,7 +216,7 @@ export const FileViewerMenuItems = ({ file }: { file: ClientFile }) => { onClick={(e) => handleSearchSimilar( e, - new ClientDateSearchCriteria('dateModified', file.dateModified, 'equals'), + new ClientDateSearchCriteria(undefined, 'dateModified', file.dateModified, 'equals'), ) } text="Same Modification Date" @@ -296,11 +307,13 @@ export const ExternalAppMenuItems = observer(({ file }: { file: ClientFile }) => onClick={() => shell.showItemInFolder(file.absolutePath)} text="Reveal in File Browser" icon={IconSet.FOLDER_CLOSE} + disabled={file.isBroken} /> 1 ? 's' : ''}`} icon={IconSet.DELETE} + disabled={file.isBroken} /> ); diff --git a/src/frontend/containers/ErrorBoundary.tsx b/src/frontend/containers/ErrorBoundary.tsx index 03b965fdf..4b0ecec4f 100644 --- a/src/frontend/containers/ErrorBoundary.tsx +++ b/src/frontend/containers/ErrorBoundary.tsx @@ -13,6 +13,7 @@ import { Alert, DialogButton } from 'widgets/popovers'; export const ClearDbButton = () => { const [isOpen, setIsOpen] = useState(false); const rootStore = useStore(); + const { fileStore } = rootStore; return ( <> @@ -31,6 +32,9 @@ export const ClearDbButton = () => { if (button === DialogButton.CloseButton) { setIsOpen(false); } else { + fileStore.setDirtyMissingFiles(true); + fileStore.setDirtyTotalFiles(true); + fileStore.setDirtyUntaggedFiles(true); await rootStore.clearDatabase(); rootStore.uiStore.closeSettings(); } diff --git a/src/frontend/containers/Inspector/index.tsx b/src/frontend/containers/Inspector/index.tsx index 89a670463..64d8d0218 100644 --- a/src/frontend/containers/Inspector/index.tsx +++ b/src/frontend/containers/Inspector/index.tsx @@ -12,7 +12,7 @@ import FileExtraPropertiesEditor from '../../components/FileExtraPropertiesEdito const Inspector = observer(() => { const { uiStore, fileStore } = useStore(); - if (uiStore.firstItem >= fileStore.fileList.length || !uiStore.isInspectorOpen) { + if (uiStore.firstItemIndex >= fileStore.fileList.length || !uiStore.isInspectorOpen) { return (