diff --git a/src/metamemory/index.ts b/src/metamemory/index.ts index 13a4cea..d71c193 100644 --- a/src/metamemory/index.ts +++ b/src/metamemory/index.ts @@ -15,7 +15,7 @@ import { InMemoryVectorSearch } from './utils/vector-search.js'; import type { ResponseInput, ResponseInputItem, Agent } from '@just-every/ensemble'; // Re-export types for backward compatibility -export type { MetamemoryState } from '../metamemory-old/types'; +export type { MetamemoryState } from './types'; export interface MetaMemoryOptions { config?: Partial; @@ -30,6 +30,7 @@ export class Metamemory { private tagger: MessageTagger; private compactor: ThreadCompactor; private contextAssembler: ContextAssembler; + private vectorSearch: InMemoryVectorSearch; private config: MetaMemoryConfig; private isProcessing: boolean = false; private messageQueue: ResponseInputItem[] = []; @@ -56,10 +57,10 @@ export class Metamemory { this.tagger = new MessageTagger(taggerLLM, this.config); const summarizer = options.summarizer || new LLMSummarizer(options.agent); - const vectorSearch = options.vectorSearch as InMemoryVectorSearch || new InMemoryVectorSearch(); - - this.compactor = new ThreadCompactor(this.threadManager, this.config, summarizer, vectorSearch); - this.contextAssembler = new ContextAssembler(this.threadManager, vectorSearch); + this.vectorSearch = options.vectorSearch as InMemoryVectorSearch || new InMemoryVectorSearch(); + + this.compactor = new ThreadCompactor(this.threadManager, this.config, summarizer, this.vectorSearch); + this.contextAssembler = new ContextAssembler(this.threadManager, this.vectorSearch); } /** @@ -200,7 +201,8 @@ export class Metamemory { metamemory: new Map(), // Empty for now threads: threadMap, lastProcessedIndex: this.lastProcessedIndex, - lastProcessedTime: Date.now() + lastProcessedTime: Date.now(), + vectorEmbeddings: this.vectorSearch.exportEmbeddings() }; } @@ -229,6 +231,17 @@ export class Metamemory { this.threadManager.importThreads(threads); this.lastProcessedIndex = state.lastProcessedIndex; + + this.vectorSearch.clear(); + if (state.vectorEmbeddings) { + this.vectorSearch.loadEmbeddings(state.vectorEmbeddings); + } else { + for (const thread of Object.values(threads)) { + if (thread.state === 'archived') { + void this.vectorSearch.addThread(thread); + } + } + } } /** @@ -266,6 +279,7 @@ export function createMetamemoryState(): MetamemoryStateType { metamemory: new Map(), threads: new Map(), lastProcessedIndex: 0, - lastProcessedTime: Date.now() + lastProcessedTime: Date.now(), + vectorEmbeddings: {} }; } \ No newline at end of file diff --git a/src/metamemory/types/index.ts b/src/metamemory/types/index.ts index 7c4a605..0e68b0b 100644 --- a/src/metamemory/types/index.ts +++ b/src/metamemory/types/index.ts @@ -1,7 +1,18 @@ export type TopicState = 'core' | 'active' | 'idle' | 'archived' | 'ephemeral'; // Add the missing type for backwards compatibility -export type { MetamemoryState } from '../../metamemory-old/types.js'; +import type { MetamemoryState as LegacyMetamemoryState } from '../../metamemory-old/types.js'; + +export interface VectorEmbeddings { + [topicName: string]: { + summary: string; + embedding: number[]; + }; +} + +export interface MetamemoryState extends LegacyMetamemoryState { + vectorEmbeddings?: VectorEmbeddings; +} export interface Message { id: string; diff --git a/src/metamemory/utils/vector-search.ts b/src/metamemory/utils/vector-search.ts index 608b43b..28b8ef8 100644 --- a/src/metamemory/utils/vector-search.ts +++ b/src/metamemory/utils/vector-search.ts @@ -104,13 +104,38 @@ export class InMemoryVectorSearch implements VectorSearchInterface { clear(): void { this.embeddings.clear(); } - + /** * Get the number of indexed threads */ size(): number { return this.embeddings.size; } + + /** + * Export embeddings for persistence + */ + exportEmbeddings(): Record { + const data: Record = {}; + for (const [topicName, value] of this.embeddings) { + data[topicName] = { summary: value.summary, embedding: value.embedding }; + } + return data; + } + + /** + * Load embeddings from persisted state + */ + loadEmbeddings(data: Record): void { + this.embeddings.clear(); + for (const [topicName, value] of Object.entries(data)) { + this.embeddings.set(topicName, { + topicName, + summary: value.summary, + embedding: value.embedding, + }); + } + } } /** diff --git a/test/metamemory-new.test.ts b/test/metamemory-new.test.ts index eb38453..82a4e64 100644 --- a/test/metamemory-new.test.ts +++ b/test/metamemory-new.test.ts @@ -311,5 +311,27 @@ describe('MetaMemory System', () => { const newState = metamemory.getState(); expect(newState.threads.size).toBe(1); }); + + it('should preserve vector search data after restoring state', async () => { + // Create an archived thread and index it + const manager: any = (metamemory as any).threadManager; + const search: InMemoryVectorSearch = (metamemory as any).vectorSearch; + + const thread = manager.createThread('archived_topic', 'archived'); + manager.updateThreadSummary('archived_topic', 'Discussion about caching data'); + await search.addThread(thread); + + const saved = metamemory.getState(); + + const restored = new Metamemory({ + agent: mockAgent, + taggerLLM: new MockTaggerLLM(), + summarizer: new MockSummarizer(), + }); + + restored.restoreState(saved); + const results = await (restored as any).vectorSearch.search('caching', 1); + expect(results[0].topicName).toBe('archived_topic'); + }); }); }); \ No newline at end of file