1- """Meta tools example: LLM-driven tool discovery and execution.
1+ """Search and execute example: LLM-driven tool discovery and execution.
22
33There are two ways to give tools to an LLM:
44
551. ``toolset.openai()`` — fetches ALL tools and converts them to OpenAI format.
66 Token cost scales with the number of tools in your catalog.
77
8- 2. ``toolset.openai(mode="search_and_execute")`` — returns just 2 meta tools
8+ 2. ``toolset.openai(mode="search_and_execute")`` — returns just 2 tools
99 (tool_search + tool_execute). The LLM discovers and runs tools on-demand,
1010 keeping token usage constant regardless of catalog size.
1111
12- This example demonstrates approach 2 with a Gemini client (OpenAI-compatible).
12+ This example demonstrates approach 2 with two patterns:
13+ - Raw client (Gemini): manual agent loop with ``toolset.execute()``
14+ - LangChain: framework handles tool execution automatically
1315
1416Prerequisites:
1517 - STACKONE_API_KEY environment variable
1618 - STACKONE_ACCOUNT_ID environment variable
17- - GOOGLE_API_KEY environment variable (for Gemini)
19+ - GOOGLE_API_KEY environment variable (for Gemini/LangChain )
1820
1921Run with:
2022 uv run python examples/meta_tools_example.py
3638
3739
3840def example_gemini () -> None :
39- """Complete Gemini integration with meta tools via OpenAI-compatible API.
41+ """Raw client: Gemini via OpenAI-compatible API.
4042
41- Shows: init toolset -> get OpenAI tools -> agent loop -> final answer.
42- Uses gemini-3-pro-preview which handles tool schemas and dates well.
43+ Shows: init toolset -> get OpenAI tools -> manual agent loop with toolset.execute().
4344 """
4445 print ("=" * 60 )
45- print ("Example 1: Gemini client with meta tools " )
46+ print ("Example 1: Raw client (Gemini) — manual execution " )
4647 print ("=" * 60 )
4748 print ()
4849
@@ -94,7 +95,7 @@ def example_gemini() -> None:
9495 print (f"Answer: { choice .message .content } " )
9596 break
9697
97- # 5. Execute tool calls and feed results back
98+ # 5. Execute tool calls manually and feed results back
9899 messages .append (choice .message .model_dump (exclude_none = True ))
99100 for tool_call in choice .message .tool_calls :
100101 print (f" -> { tool_call .function .name } ({ tool_call .function .arguments } )" )
@@ -110,14 +111,74 @@ def example_gemini() -> None:
110111 print ()
111112
112113
114+ def example_langchain () -> None :
115+ """Framework: LangChain with auto-execution.
116+
117+ Shows: init toolset -> get LangChain tools -> bind to model -> framework executes tools.
118+ No toolset.execute() needed — the framework calls _run() on tools automatically.
119+ """
120+ print ("=" * 60 )
121+ print ("Example 2: LangChain — framework handles execution" )
122+ print ("=" * 60 )
123+ print ()
124+
125+ try :
126+ from langchain_core .messages import AIMessage , HumanMessage , ToolMessage
127+ from langchain_google_genai import ChatGoogleGenerativeAI
128+ except ImportError :
129+ print ("Skipped: pip install langchain-google-genai" )
130+ print ()
131+ return
132+
133+ if not os .getenv ("GOOGLE_API_KEY" ):
134+ print ("Skipped: Set GOOGLE_API_KEY to run this example." )
135+ print ()
136+ return
137+
138+ # 1. Init toolset
139+ account_id = os .getenv ("STACKONE_ACCOUNT_ID" )
140+ toolset = StackOneToolSet (
141+ account_id = account_id ,
142+ search = {"method" : "semantic" , "top_k" : 3 },
143+ execute = {"account_ids" : [account_id ]} if account_id else None ,
144+ )
145+
146+ # 2. Get tools in LangChain format and bind to model
147+ langchain_tools = toolset .langchain (mode = "search_and_execute" )
148+ tools_by_name = {tool .name : tool for tool in langchain_tools }
149+ model = ChatGoogleGenerativeAI (model = "gemini-3-pro-preview" ).bind_tools (langchain_tools )
150+
151+ # 3. Run agent loop
152+ messages = [HumanMessage (content = "List my upcoming Calendly events for the next week." )]
153+
154+ for _step in range (10 ):
155+ response : AIMessage = model .invoke (messages )
156+
157+ # 4. If no tool calls, print final answer and stop
158+ if not response .tool_calls :
159+ print (f"Answer: { response .content } " )
160+ break
161+
162+ # 5. Framework-compatible execution — invoke LangChain tools directly
163+ messages .append (response )
164+ for tool_call in response .tool_calls :
165+ print (f" -> { tool_call ['name' ]} ({ json .dumps (tool_call ['args' ])} )" )
166+ tool = tools_by_name [tool_call ["name" ]]
167+ result = tool .invoke (tool_call ["args" ])
168+ messages .append (ToolMessage (content = json .dumps (result ), tool_call_id = tool_call ["id" ]))
169+
170+ print ()
171+
172+
113173def main () -> None :
114- """Run all meta tools examples."""
174+ """Run all examples."""
115175 api_key = os .getenv ("STACKONE_API_KEY" )
116176 if not api_key :
117177 print ("Set STACKONE_API_KEY to run these examples." )
118178 return
119179
120180 example_gemini ()
181+ example_langchain ()
121182
122183
123184if __name__ == "__main__" :
0 commit comments