Skip to content

Commit f081c86

Browse files
add LLM-driven tool_search and tool_execute
1 parent 8c1aca8 commit f081c86

4 files changed

Lines changed: 357 additions & 0 deletions

File tree

examples/meta-tools.ts

Lines changed: 159 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,159 @@
1+
/**
2+
* This example demonstrates the meta tools pattern (tool_search + tool_execute)
3+
* for LLM-driven tool discovery and execution.
4+
*
5+
* Instead of loading all tools upfront, the LLM autonomously searches for
6+
* relevant tools and executes them — keeping token usage minimal.
7+
*
8+
* @example
9+
* ```bash
10+
* # Run with required environment variables:
11+
* STACKONE_API_KEY=your-key OPENAI_API_KEY=your-key STACKONE_ACCOUNT_ID=your-account npx tsx examples/meta-tools.ts
12+
* ```
13+
*/
14+
15+
import process from 'node:process';
16+
import { openai } from '@ai-sdk/openai';
17+
import { StackOneToolSet } from '@stackone/ai';
18+
import { generateText, stepCountIs } from 'ai';
19+
20+
const apiKey = process.env.STACKONE_API_KEY;
21+
if (!apiKey) {
22+
console.error('STACKONE_API_KEY environment variable is required');
23+
process.exit(1);
24+
}
25+
26+
if (!process.env.OPENAI_API_KEY) {
27+
console.error('OPENAI_API_KEY environment variable is required');
28+
process.exit(1);
29+
}
30+
31+
const accountId = process.env.STACKONE_ACCOUNT_ID;
32+
33+
/**
34+
* Example 1: Meta tools with Vercel AI SDK
35+
*
36+
* The LLM receives only tool_search and tool_execute — two small tool definitions
37+
* regardless of how many tools exist. It searches for what it needs and executes.
38+
*/
39+
const metaToolsWithAISDK = async (): Promise<void> => {
40+
console.log('Example 1: Meta tools with Vercel AI SDK\n');
41+
42+
const toolset = new StackOneToolSet({
43+
search: { method: 'semantic', topK: 3 },
44+
...(accountId ? { accountId } : {}),
45+
});
46+
47+
// Get meta tools — returns a Tools collection with tool_search + tool_execute
48+
const accountIds = accountId ? [accountId] : [];
49+
const metaTools = toolset.getMetaTools({ accountIds });
50+
51+
console.log(`Meta tools: ${metaTools.toArray().map((t) => t.name).join(', ')}`);
52+
console.log();
53+
54+
// Pass to the LLM — it will search for calendly tools, then execute
55+
const { text, steps } = await generateText({
56+
model: openai('gpt-4o'),
57+
tools: await metaTools.toAISDK(),
58+
prompt: 'List my upcoming Calendly events for the next week.',
59+
stopWhen: stepCountIs(10),
60+
});
61+
62+
console.log('AI Response:', text);
63+
console.log('\nSteps taken:');
64+
for (const step of steps) {
65+
for (const call of step.toolCalls ?? []) {
66+
const argsStr = call.args ? JSON.stringify(call.args).slice(0, 100) : '{}';
67+
console.log(` - ${call.toolName}(${argsStr})`);
68+
}
69+
}
70+
};
71+
72+
/**
73+
* Example 2: Meta tools with OpenAI Chat Completions
74+
*
75+
* Same pattern, different framework. The meta tools convert to any format.
76+
*/
77+
const metaToolsWithOpenAI = async (): Promise<void> => {
78+
console.log('\nExample 2: Meta tools with OpenAI Chat Completions\n');
79+
80+
const { default: OpenAI } = await import('openai');
81+
82+
const toolset = new StackOneToolSet({
83+
search: { method: 'semantic', topK: 3 },
84+
...(accountId ? { accountId } : {}),
85+
});
86+
87+
const accountIds = accountId ? [accountId] : [];
88+
const metaTools = toolset.getMetaTools({ accountIds });
89+
const openaiTools = metaTools.toOpenAI();
90+
91+
const client = new OpenAI();
92+
const messages: OpenAI.Chat.Completions.ChatCompletionMessageParam[] = [
93+
{
94+
role: 'system',
95+
content:
96+
'You are a helpful scheduling assistant. Use tool_search to find relevant tools, then tool_execute to run them. Always read the parameter schemas from tool_search results carefully. If a tool needs a user URI, first search for and call a "get current user" tool to obtain it. If a tool execution fails, try different parameters or a different tool.',
97+
},
98+
{
99+
role: 'user',
100+
content: 'Check my upcoming Calendly events and list them.',
101+
},
102+
];
103+
104+
// Agent loop — let the LLM drive search and execution
105+
let continueLoop = true;
106+
while (continueLoop) {
107+
const response = await client.chat.completions.create({
108+
model: 'gpt-4o',
109+
messages,
110+
tools: openaiTools,
111+
tool_choice: 'auto',
112+
});
113+
114+
const choice = response.choices[0];
115+
116+
if (!choice.message.tool_calls?.length) {
117+
console.log('Final response:', choice.message.content);
118+
continueLoop = false;
119+
break;
120+
}
121+
122+
// Add assistant message with tool calls
123+
messages.push(choice.message);
124+
125+
// Execute each tool call
126+
for (const toolCall of choice.message.tool_calls) {
127+
console.log(`LLM called: ${toolCall.function.name}(${toolCall.function.arguments})`);
128+
129+
const tool = metaTools.getTool(toolCall.function.name);
130+
if (!tool) {
131+
messages.push({
132+
role: 'tool',
133+
tool_call_id: toolCall.id,
134+
content: JSON.stringify({ error: `Unknown tool: ${toolCall.function.name}` }),
135+
});
136+
continue;
137+
}
138+
139+
const result = await tool.execute(toolCall.function.arguments);
140+
messages.push({
141+
role: 'tool',
142+
tool_call_id: toolCall.id,
143+
content: JSON.stringify(result),
144+
});
145+
}
146+
}
147+
};
148+
149+
// Main execution
150+
const main = async (): Promise<void> => {
151+
try {
152+
await metaToolsWithAISDK();
153+
await metaToolsWithOpenAI();
154+
} catch (error) {
155+
console.error('Error running examples:', error);
156+
}
157+
};
158+
159+
await main();

src/index.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44

55
export { BaseTool, StackOneTool, Tools } from './tool';
66
export { createFeedbackTool } from './feedback';
7+
export { type MetaToolsOptions } from './meta-tools';
78
export { StackOneError } from './utils/error-stackone';
89
export { StackOneAPIError } from './utils/error-stackone-api';
910

src/meta-tools.ts

Lines changed: 162 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,162 @@
1+
import { z } from 'zod/v4';
2+
import { BaseTool } from './tool';
3+
import type { ExecuteOptions, JsonObject, LocalExecuteConfig, ToolParameters } from './types';
4+
import { StackOneError } from './utils/error-stackone';
5+
import { StackOneAPIError } from './utils/error-stackone-api';
6+
7+
import type { SearchMode, StackOneToolSet } from './toolsets';
8+
9+
/**
10+
* Options for getMetaTools().
11+
*/
12+
export interface MetaToolsOptions {
13+
/** Account IDs to scope tool discovery and execution */
14+
accountIds?: string[];
15+
/** Search mode for tool discovery */
16+
search?: SearchMode;
17+
/** Optional connector filter (e.g. 'bamboohr') */
18+
connector?: string;
19+
/** Maximum number of search results. Defaults to 5. */
20+
topK?: number;
21+
/** Minimum similarity score threshold 0-1 */
22+
minSimilarity?: number;
23+
}
24+
25+
const localConfig = (id: string): LocalExecuteConfig => ({
26+
kind: 'local',
27+
identifier: `meta:${id}`,
28+
});
29+
30+
// --- tool_search ---
31+
32+
const searchInputSchema = z.object({
33+
query: z
34+
.string()
35+
.transform((v) => v.trim())
36+
.refine((v) => v.length > 0, { message: 'query must be a non-empty string' }),
37+
connector: z.string().optional(),
38+
top_k: z.number().int().min(1).max(50).optional(),
39+
});
40+
41+
const searchParameters = {
42+
type: 'object',
43+
properties: {
44+
query: {
45+
type: 'string',
46+
description:
47+
'Natural language description of what you need (e.g. "create an employee", "list time off requests")',
48+
},
49+
connector: {
50+
type: 'string',
51+
description: 'Optional connector filter (e.g. "bamboohr", "hibob")',
52+
},
53+
top_k: {
54+
type: 'integer',
55+
description: 'Max results to return (1-50, default 5)',
56+
minimum: 1,
57+
maximum: 50,
58+
},
59+
},
60+
required: ['query'],
61+
} as const satisfies ToolParameters;
62+
63+
export function createSearchTool(toolset: StackOneToolSet, options: MetaToolsOptions = {}): BaseTool {
64+
const tool = new BaseTool(
65+
'tool_search',
66+
'Search for available tools by describing what you need. Returns matching tool names, descriptions, and parameter schemas. Use the returned parameter schemas to know exactly what to pass when calling tool_execute.',
67+
searchParameters,
68+
localConfig('search'),
69+
);
70+
71+
tool.execute = async (inputParams?: JsonObject | string): Promise<JsonObject> => {
72+
const raw = typeof inputParams === 'string' ? JSON.parse(inputParams) : inputParams || {};
73+
const parsed = searchInputSchema.parse(raw);
74+
75+
const results = await toolset.searchTools(parsed.query, {
76+
connector: parsed.connector ?? options.connector,
77+
topK: parsed.top_k ?? options.topK ?? 5,
78+
minSimilarity: options.minSimilarity,
79+
search: options.search,
80+
accountIds: options.accountIds,
81+
});
82+
83+
return {
84+
tools: results.toArray().map((t) => ({
85+
name: t.name,
86+
description: t.description,
87+
parameters: t.parameters.properties,
88+
})),
89+
total: results.length,
90+
query: parsed.query,
91+
};
92+
};
93+
94+
return tool;
95+
}
96+
97+
// --- tool_execute ---
98+
99+
const executeInputSchema = z.object({
100+
tool_name: z
101+
.string()
102+
.transform((v) => v.trim())
103+
.refine((v) => v.length > 0, { message: 'tool_name must be a non-empty string' }),
104+
parameters: z.record(z.string(), z.unknown()).optional().default({}),
105+
});
106+
107+
const executeParameters = {
108+
type: 'object',
109+
properties: {
110+
tool_name: {
111+
type: 'string',
112+
description: 'Exact tool name from tool_search results',
113+
},
114+
parameters: {
115+
type: 'object',
116+
description: 'Parameters for the tool. Pass an empty object {} if no parameters are needed.',
117+
},
118+
},
119+
required: ['tool_name'],
120+
} as const satisfies ToolParameters;
121+
122+
export function createExecuteTool(toolset: StackOneToolSet, options: MetaToolsOptions = {}): BaseTool {
123+
const tool = new BaseTool(
124+
'tool_execute',
125+
'Execute a tool by name with the given parameters. Use tool_search first to find available tools. The parameters field must match the parameter schema returned by tool_search. Pass parameters as a nested object matching the schema structure.',
126+
executeParameters,
127+
localConfig('execute'),
128+
);
129+
130+
tool.execute = async (
131+
inputParams?: JsonObject | string,
132+
executeOptions?: ExecuteOptions,
133+
): Promise<JsonObject> => {
134+
const raw = typeof inputParams === 'string' ? JSON.parse(inputParams) : inputParams || {};
135+
const parsed = executeInputSchema.parse(raw);
136+
137+
const allTools = await toolset.fetchTools({ accountIds: options.accountIds });
138+
const target = allTools.getTool(parsed.tool_name);
139+
140+
if (!target) {
141+
return {
142+
error: `Tool "${parsed.tool_name}" not found. Use tool_search to find available tools.`,
143+
};
144+
}
145+
146+
try {
147+
return await target.execute(parsed.parameters as JsonObject, executeOptions);
148+
} catch (error) {
149+
// Return API errors to the LLM so it can adjust parameters and retry
150+
if (error instanceof StackOneAPIError) {
151+
return {
152+
error: error.message,
153+
status_code: error.statusCode,
154+
tool_name: parsed.tool_name,
155+
};
156+
}
157+
throw error;
158+
}
159+
};
160+
161+
return tool;
162+
}

src/toolsets.ts

Lines changed: 35 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@ import { defu } from 'defu';
22
import type { MergeExclusive, SimplifyDeep } from 'type-fest';
33
import { DEFAULT_BASE_URL } from './consts';
44
import { createFeedbackTool } from './feedback';
5+
import { type MetaToolsOptions, createExecuteTool, createSearchTool } from './meta-tools';
56
import { type StackOneHeaders, normalizeHeaders, stackOneHeadersSchema } from './headers';
67
import { ToolIndex } from './local-search';
78
import { createMCPClient } from './mcp-client';
@@ -432,6 +433,40 @@ export class StackOneToolSet {
432433
return new SearchTool(this, config);
433434
}
434435

436+
/**
437+
* Get LLM-callable meta tools (tool_search + tool_execute) for agent-driven workflows.
438+
*
439+
* Returns a Tools collection that can be passed directly to any LLM framework.
440+
* The LLM uses tool_search to discover available tools, then tool_execute to run them.
441+
*
442+
* @param options - Options to scope search and execution (account IDs, search mode, etc.)
443+
* @returns Tools collection containing tool_search and tool_execute
444+
*
445+
* @example
446+
* ```typescript
447+
* const toolset = new StackOneToolSet({ accountIds: ['acc-123'] });
448+
* const metaTools = toolset.getMetaTools();
449+
*
450+
* // Pass to any framework
451+
* const result = await generateText({
452+
* model: openai('gpt-4o'),
453+
* tools: await metaTools.toAISDK(),
454+
* prompt: 'Create an employee in BambooHR',
455+
* });
456+
* ```
457+
*/
458+
getMetaTools(options?: MetaToolsOptions): Tools {
459+
if (this.searchConfig === null) {
460+
throw new ToolSetConfigError(
461+
'Search is disabled. Initialize StackOneToolSet with a search config to enable.',
462+
);
463+
}
464+
465+
const searchTool = createSearchTool(this, options);
466+
const executeTool = createExecuteTool(this, options);
467+
return new Tools([searchTool, executeTool]);
468+
}
469+
435470
/**
436471
* Search for and fetch tools using semantic or local search.
437472
*

0 commit comments

Comments
 (0)