Skip to content

Commit ded5dde

Browse files
add LLM-driven tool_search and tool_execute
1 parent 78ac0cb commit ded5dde

File tree

3 files changed

+519
-0
lines changed

3 files changed

+519
-0
lines changed

examples/meta_tools_example.py

Lines changed: 187 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,187 @@
1+
"""Meta tools example: LLM-driven tool discovery and execution.
2+
3+
Instead of loading all tools upfront, the LLM autonomously searches for
4+
relevant tools and executes them — keeping token usage minimal.
5+
6+
Prerequisites:
7+
- STACKONE_API_KEY environment variable
8+
- STACKONE_ACCOUNT_ID environment variable (comma-separated for multiple)
9+
- OPENAI_API_KEY or GOOGLE_API_KEY environment variable
10+
11+
Run with:
12+
uv run python examples/meta_tools_example.py
13+
"""
14+
15+
from __future__ import annotations
16+
17+
import json
18+
import os
19+
20+
try:
21+
from dotenv import load_dotenv
22+
23+
load_dotenv()
24+
except ModuleNotFoundError:
25+
pass
26+
27+
from stackone_ai import StackOneToolSet
28+
29+
_account_ids = [
30+
aid.strip()
31+
for aid in os.getenv("STACKONE_ACCOUNT_ID", "").split(",")
32+
if aid.strip()
33+
]
34+
35+
36+
def example_openai_meta_tools() -> None:
37+
"""Meta tools with OpenAI Chat Completions.
38+
39+
The LLM receives only tool_search and tool_execute — two small tool
40+
definitions regardless of how many tools exist. It searches for what
41+
it needs and executes.
42+
"""
43+
print("=" * 60)
44+
print("Example 1: Meta tools with OpenAI")
45+
print("=" * 60)
46+
print()
47+
48+
try:
49+
from openai import OpenAI
50+
except ImportError:
51+
print("Skipped: OpenAI library not installed. Install with: pip install openai")
52+
print()
53+
return
54+
55+
openai_key = os.getenv("OPENAI_API_KEY")
56+
google_key = os.getenv("GOOGLE_API_KEY")
57+
58+
if openai_key:
59+
client = OpenAI()
60+
model = "gpt-5.1"
61+
provider = "OpenAI"
62+
elif google_key:
63+
client = OpenAI(
64+
api_key=google_key,
65+
base_url="https://generativelanguage.googleapis.com/v1beta/openai/",
66+
)
67+
model = "gemini-3-pro-preview"
68+
provider = "Gemini"
69+
else:
70+
print("Skipped: Set OPENAI_API_KEY or GOOGLE_API_KEY to run this example.")
71+
print()
72+
return
73+
74+
print(f"Using {provider} ({model})")
75+
print()
76+
77+
toolset = StackOneToolSet(search={"method": "semantic", "top_k": 3})
78+
79+
# Get meta tools — returns a Tools collection with tool_search + tool_execute
80+
meta_tools = toolset.get_meta_tools(account_ids=_account_ids or None)
81+
openai_tools = meta_tools.to_openai()
82+
83+
print(f"Meta tools: {[t.name for t in meta_tools]}")
84+
print()
85+
86+
messages: list[dict] = [
87+
{
88+
"role": "system",
89+
"content": (
90+
"You are a helpful scheduling assistant. "
91+
"Use tool_search to find relevant tools, then tool_execute to run them. "
92+
"If a tool execution fails, try different parameters or a different tool. "
93+
"Do not repeat the same failed call."
94+
),
95+
},
96+
{
97+
"role": "user",
98+
"content": "List my upcoming Calendly events for the next week.",
99+
},
100+
]
101+
102+
# Agent loop — let the LLM drive search and execution
103+
max_iterations = 10
104+
for iteration in range(max_iterations):
105+
print(f"--- Iteration {iteration + 1} ---")
106+
107+
response = client.chat.completions.create(
108+
model=model,
109+
messages=messages,
110+
tools=openai_tools,
111+
tool_choice="auto",
112+
)
113+
114+
choice = response.choices[0]
115+
116+
if not choice.message.tool_calls:
117+
print(f"\n{provider} final response: {choice.message.content}")
118+
break
119+
120+
# Add assistant message with tool calls
121+
# Use model_dump with exclude_none to avoid null values that Gemini rejects
122+
messages.append(choice.message.model_dump(exclude_none=True))
123+
124+
# Execute each tool call
125+
for tool_call in choice.message.tool_calls:
126+
print(f"LLM called: {tool_call.function.name}({tool_call.function.arguments})")
127+
128+
tool = meta_tools.get_tool(tool_call.function.name)
129+
if tool is None:
130+
result = {"error": f"Unknown tool: {tool_call.function.name}"}
131+
else:
132+
result = tool.execute(tool_call.function.arguments)
133+
134+
messages.append(
135+
{
136+
"role": "tool",
137+
"tool_call_id": tool_call.id,
138+
"content": json.dumps(result),
139+
}
140+
)
141+
142+
print()
143+
144+
145+
def example_langchain_meta_tools() -> None:
146+
"""Meta tools with LangChain.
147+
148+
The meta tools convert to LangChain format just like any other Tools collection.
149+
"""
150+
print("=" * 60)
151+
print("Example 2: Meta tools with LangChain")
152+
print("=" * 60)
153+
print()
154+
155+
try:
156+
from langchain_core.tools import BaseTool # noqa: F401
157+
except ImportError:
158+
print("Skipped: LangChain not installed. Install with: pip install langchain-core")
159+
print()
160+
return
161+
162+
toolset = StackOneToolSet(search={"method": "semantic", "top_k": 3})
163+
meta_tools = toolset.get_meta_tools(account_ids=_account_ids or None)
164+
165+
langchain_tools = meta_tools.to_langchain()
166+
167+
print(f"Created {len(langchain_tools)} LangChain tools:")
168+
for tool in langchain_tools:
169+
print(f" - {tool.name}: {tool.description}")
170+
print()
171+
print("These tools are ready to use with LangChain agents (AgentExecutor, create_react_agent, etc.)")
172+
print()
173+
174+
175+
def main() -> None:
176+
"""Run all meta tools examples."""
177+
api_key = os.getenv("STACKONE_API_KEY")
178+
if not api_key:
179+
print("Set STACKONE_API_KEY to run these examples.")
180+
return
181+
182+
example_openai_meta_tools()
183+
example_langchain_meta_tools()
184+
185+
186+
if __name__ == "__main__":
187+
main()

0 commit comments

Comments
 (0)