Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 21 additions & 7 deletions src/metamemory/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ import { InMemoryVectorSearch } from './utils/vector-search.js';
import type { ResponseInput, ResponseInputItem, Agent } from '@just-every/ensemble';

// Re-export types for backward compatibility
export type { MetamemoryState } from '../metamemory-old/types';
export type { MetamemoryState } from './types';

export interface MetaMemoryOptions {
config?: Partial<MetaMemoryConfig>;
Expand All @@ -30,6 +30,7 @@ export class Metamemory {
private tagger: MessageTagger;
private compactor: ThreadCompactor;
private contextAssembler: ContextAssembler;
private vectorSearch: InMemoryVectorSearch;
private config: MetaMemoryConfig;
private isProcessing: boolean = false;
private messageQueue: ResponseInputItem[] = [];
Expand All @@ -56,10 +57,10 @@ export class Metamemory {
this.tagger = new MessageTagger(taggerLLM, this.config);

const summarizer = options.summarizer || new LLMSummarizer(options.agent);
const vectorSearch = options.vectorSearch as InMemoryVectorSearch || new InMemoryVectorSearch();
this.compactor = new ThreadCompactor(this.threadManager, this.config, summarizer, vectorSearch);
this.contextAssembler = new ContextAssembler(this.threadManager, vectorSearch);
this.vectorSearch = options.vectorSearch as InMemoryVectorSearch || new InMemoryVectorSearch();

this.compactor = new ThreadCompactor(this.threadManager, this.config, summarizer, this.vectorSearch);
this.contextAssembler = new ContextAssembler(this.threadManager, this.vectorSearch);
}

/**
Expand Down Expand Up @@ -200,7 +201,8 @@ export class Metamemory {
metamemory: new Map(), // Empty for now
threads: threadMap,
lastProcessedIndex: this.lastProcessedIndex,
lastProcessedTime: Date.now()
lastProcessedTime: Date.now(),
vectorEmbeddings: this.vectorSearch.exportEmbeddings()
};
}

Expand Down Expand Up @@ -229,6 +231,17 @@ export class Metamemory {

this.threadManager.importThreads(threads);
this.lastProcessedIndex = state.lastProcessedIndex;

this.vectorSearch.clear();
if (state.vectorEmbeddings) {
this.vectorSearch.loadEmbeddings(state.vectorEmbeddings);
} else {
for (const thread of Object.values(threads)) {
if (thread.state === 'archived') {
void this.vectorSearch.addThread(thread);
}
}
}
}

/**
Expand Down Expand Up @@ -266,6 +279,7 @@ export function createMetamemoryState(): MetamemoryStateType {
metamemory: new Map(),
threads: new Map(),
lastProcessedIndex: 0,
lastProcessedTime: Date.now()
lastProcessedTime: Date.now(),
vectorEmbeddings: {}
};
}
13 changes: 12 additions & 1 deletion src/metamemory/types/index.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,18 @@
export type TopicState = 'core' | 'active' | 'idle' | 'archived' | 'ephemeral';

// Add the missing type for backwards compatibility
export type { MetamemoryState } from '../../metamemory-old/types.js';
import type { MetamemoryState as LegacyMetamemoryState } from '../../metamemory-old/types.js';

export interface VectorEmbeddings {
[topicName: string]: {
summary: string;
embedding: number[];
};
}

export interface MetamemoryState extends LegacyMetamemoryState {
vectorEmbeddings?: VectorEmbeddings;
}

export interface Message {
id: string;
Expand Down
27 changes: 26 additions & 1 deletion src/metamemory/utils/vector-search.ts
Original file line number Diff line number Diff line change
Expand Up @@ -104,13 +104,38 @@ export class InMemoryVectorSearch implements VectorSearchInterface {
clear(): void {
this.embeddings.clear();
}

/**
* Get the number of indexed threads
*/
size(): number {
return this.embeddings.size;
}

/**
* Export embeddings for persistence
*/
exportEmbeddings(): Record<string, { summary: string; embedding: number[] }> {
const data: Record<string, { summary: string; embedding: number[] }> = {};
for (const [topicName, value] of this.embeddings) {
data[topicName] = { summary: value.summary, embedding: value.embedding };
}
return data;
}

/**
* Load embeddings from persisted state
*/
loadEmbeddings(data: Record<string, { summary: string; embedding: number[] }>): void {
this.embeddings.clear();
for (const [topicName, value] of Object.entries(data)) {
this.embeddings.set(topicName, {
topicName,
summary: value.summary,
embedding: value.embedding,
});
}
}
}

/**
Expand Down
22 changes: 22 additions & 0 deletions test/metamemory-new.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -311,5 +311,27 @@ describe('MetaMemory System', () => {
const newState = metamemory.getState();
expect(newState.threads.size).toBe(1);
});

it('should preserve vector search data after restoring state', async () => {
// Create an archived thread and index it
const manager: any = (metamemory as any).threadManager;
const search: InMemoryVectorSearch = (metamemory as any).vectorSearch;

const thread = manager.createThread('archived_topic', 'archived');
manager.updateThreadSummary('archived_topic', 'Discussion about caching data');
await search.addThread(thread);

const saved = metamemory.getState();

const restored = new Metamemory({
agent: mockAgent,
taggerLLM: new MockTaggerLLM(),
summarizer: new MockSummarizer(),
});

restored.restoreState(saved);
const results = await (restored as any).vectorSearch.search('caching', 1);
expect(results[0].topicName).toBe('archived_topic');
});
});
});