Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions eslint.config.js
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,7 @@ export default tseslint.config(
"build/**",
"*.config.js",
"*.config.ts",
"src/**/*.test.{js,jsx,ts,tsx}",
"vite-env.d.ts",
],
},
Expand Down
7 changes: 7 additions & 0 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@
"@ai-sdk/openai": "^1.3.22",
"@langchain/community": "^0.3.53",
"@langchain/core": "^0.3.72",
"@mediapipe/tasks-genai": "^0.10.14",
"ai": "^4.3.19",
"dedent": "^1.7.0",
"react-basic-contenteditable": "^1.0.6",
Expand Down
1 change: 1 addition & 0 deletions src/providers/index.ts
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
export { MediaPipeProvider } from "./mediaPipeProvider";
export { UserTokenProvider } from "./userTokenProvider";
4 changes: 4 additions & 0 deletions src/providers/mediaPipeProvider/cache/cache_constants.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
// Shared constants for MediaPipe model caching
// Bump MODEL_CACHE_KEY_VERSION to invalidate existing logical entries without manually clearing DB.
export const MODEL_CACHE_DB_NAME = "clover-ai-mediapipe-provider-models";
export const MODEL_CACHE_KEY_VERSION = "v1"; // increment when manifest/chunk schema or logic changes
131 changes: 131 additions & 0 deletions src/providers/mediaPipeProvider/cache/model_cache.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,131 @@
/**
* Helper class to manage model caching with Web Worker
*/

import { get_logger } from "../logger";
const logger = get_logger("ModelCache");

interface ProgressCallback {
(loaded: number, total: number, percent: number, source?: "cache" | "network"): void;
}

export class ModelCache {
#chunk_worker: Worker | null = null;
#worker: Worker | null = null;

#cleanup(): void {
if (this.#worker) {
this.#worker.terminate();
this.#worker = null;
}
}

#cleanup_chunk() {
if (this.#chunk_worker) {
this.#chunk_worker.terminate();
this.#chunk_worker = null;
}
}

/**
* Cancel ongoing download and cleanup
*/
cancel(): void {
this.#cleanup();
}

/**
* Load model from cache or network in chunks using a Web Worker and return a ReadableStreamDefaultReader
*/
async load_model(
url: string,
modelKey: string,
onProgress?: ProgressCallback,
chunkSize: number = 8 * 1024 * 1024,
): Promise<ReadableStreamDefaultReader<Uint8Array>> {
// Create a readable stream that we'll feed with worker chunk messages
let controller: ReadableStreamDefaultController<Uint8Array>;
const stream = new ReadableStream<Uint8Array>({
start(c) {
controller = c;
},
cancel: () => {
this.#cleanup_chunk();
},
});

return new Promise((resolve, reject) => {
this.#chunk_worker = new Worker(new URL("./model_cache_worker.ts", import.meta.url), {
type: "module",
});

let source: "cache" | "network" = "network";

// eslint-disable-next-line @typescript-eslint/no-explicit-any
this.#chunk_worker.onmessage = (event: MessageEvent<any>) => {
const msg = event.data;
if (msg && msg.type) {
switch (msg.type) {
case "cache":
logger.debug("cache-hit manifest received", msg);
source = "cache";
break;
case "progress":
if (msg.percent % 10 === 0) logger.debug("progress", msg.percent, "%");
break;
case "chunk":
if (msg.index % 100 === 0 || msg.final)
logger.debug("chunk", msg.index, "final=", msg.final);
break;
case "complete":
logger.info(
"complete",
msg.parts,
"parts totalMB=",
(msg.totalSize / 1024 / 1024).toFixed(1),
);
break;
case "error":
logger.error("error message", msg.error);
break;
}
}

switch (msg.type) {
case "progress":
onProgress?.(msg.loaded, msg.total, msg.percent, source);
break;
case "cache":
onProgress?.(0, msg.totalSize, 0, source);
break;
case "chunk":
if (msg.arrayBuffer && msg.arrayBuffer.byteLength > 0) {
controller.enqueue(new Uint8Array(msg.arrayBuffer));
}
if (msg.final) {
break;
}
break;
case "complete":
controller.close();
resolve(stream.getReader());
this.#cleanup_chunk();
break;
case "error":
controller.error(new Error(msg.error));
this.#cleanup_chunk();
reject(new Error(msg.error));
break;
}
};

this.#chunk_worker.onerror = (e) => {
controller.error(e);
this.#cleanup_chunk();
reject(new Error(`Chunk worker error: ${e.message}`));
};

this.#chunk_worker.postMessage({ type: "download", url, modelKey, chunkSize });
});
}
}
Loading