diff --git a/genai-function-calling/openai-agents/Dockerfile b/genai-function-calling/openai-agents/Dockerfile index 71a7388..0b0f1dc 100644 --- a/genai-function-calling/openai-agents/Dockerfile +++ b/genai-function-calling/openai-agents/Dockerfile @@ -1,5 +1,4 @@ -# Use glibc-based image with pre-compiled wheels for psutil -FROM python:3.13-slim +FROM python:3.13-alpine RUN --mount=type=cache,target=/root/.cache/pip python -m pip install --upgrade pip diff --git a/genai-function-calling/openai-agents/README.md b/genai-function-calling/openai-agents/README.md index f8fd658..b315805 100644 --- a/genai-function-calling/openai-agents/README.md +++ b/genai-function-calling/openai-agents/README.md @@ -87,7 +87,7 @@ dotenv -f .env run -- pytest ## Notes The LLM should generate something like "The latest stable version of -Elasticsearch is 9.1.5", unless it hallucinates. Just run it again, if you +Elasticsearch is 9.3.1", unless it hallucinates. Just run it again, if you see something else. OpenAI Agents SDK's OpenTelemetry instrumentation is via diff --git a/genai-function-calling/openai-agents/env.example b/genai-function-calling/openai-agents/env.example index 4266f64..57ca70a 100644 --- a/genai-function-calling/openai-agents/env.example +++ b/genai-function-calling/openai-agents/env.example @@ -4,12 +4,12 @@ OPENAI_API_KEY= # Uncomment to use Ollama instead of OpenAI # OPENAI_BASE_URL=http://localhost:11434/v1 # OPENAI_API_KEY=unused -# CHAT_MODEL=qwen3:0.6b +# CHAT_MODEL=qwen3:1.7b # Uncomment to use RamaLama instead of OpenAI # OPENAI_BASE_URL=http://localhost:8080/v1 # OPENAI_API_KEY=unused -# CHAT_MODEL=qwen3:0.6b +# CHAT_MODEL=qwen3:1.7b # Uncomment and complete if you want to use Azure OpenAI Service ## "Azure OpenAI Endpoint" in https://oai.azure.com/resource/overview diff --git a/genai-function-calling/openai-agents/main.py b/genai-function-calling/openai-agents/main.py index 53e2671..ad4b4bd 100644 --- a/genai-function-calling/openai-agents/main.py +++ b/genai-function-calling/openai-agents/main.py @@ -22,7 +22,7 @@ async def get_latest_elasticsearch_version(major_version: int = 0) -> str: Args: major_version: Major version to filter by (e.g. 7, 8). Defaults to latest """ - async with AsyncClient() as client: + async with AsyncClient(follow_redirects=True) as client: response = await client.get("https://ela.st/past-stack-releases") response.raise_for_status() releases = response.json()["releases"] diff --git a/genai-function-calling/openai-agents/requirements.txt b/genai-function-calling/openai-agents/requirements.txt index 3baae58..9d684d1 100644 --- a/genai-function-calling/openai-agents/requirements.txt +++ b/genai-function-calling/openai-agents/requirements.txt @@ -1,10 +1,10 @@ -openai-agents~=0.3.3 +openai-agents~=0.11.1 httpx~=0.28.1 -mcp~=1.16.0 +mcp~=1.26.0 -elastic-opentelemetry~=1.9.0 +elastic-opentelemetry~=1.11.0 elastic-opentelemetry-instrumentation-openai~=1.2.0 # Use openai-agents and MCP instrumentation from OpenInference -openinference-instrumentation-openai-agents~=1.3.0 -openinference-instrumentation-mcp~=1.3.1 +openinference-instrumentation-openai-agents~=1.4.0 +openinference-instrumentation-mcp~=1.3.3