Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 11 additions & 2 deletions lib/req_llm/provider/defaults.ex
Original file line number Diff line number Diff line change
Expand Up @@ -819,13 +819,18 @@ defmodule ReqLLM.Provider.Defaults do
defp encode_openai_content(content) when is_list(content) do
content
|> Enum.map(&encode_openai_content_part/1)
|> maybe_flatten_single_text()
|> normalize_encoded_content()
end

defp maybe_flatten_single_text(content) do
# Normalize encoded content parts: reject nils, flatten single text blocks,
# and collapse empty arrays to "" (vLLM/strict OpenAI rejects "content": []).
defp normalize_encoded_content(content) do
filtered = Enum.reject(content, &is_nil/1)

case filtered do
[] ->
""

[%{type: "text", text: text} = block] ->
if map_size(block) == 2, do: text, else: [block]

Expand Down Expand Up @@ -899,6 +904,10 @@ defmodule ReqLLM.Provider.Defaults do
}
end

# Reasoning model artifacts (e.g. chain-of-thought) — strip from OpenAI encoding
# since the format has no standard representation for thinking content.
defp encode_openai_content_part(%ReqLLM.Message.ContentPart{type: :thinking}), do: nil

defp encode_openai_content_part(_), do: nil

@passthrough_metadata_keys [:cache_control, "cache_control"]
Expand Down
54 changes: 53 additions & 1 deletion test/req_llm/provider/defaults_test.exs
Original file line number Diff line number Diff line change
Expand Up @@ -228,7 +228,7 @@ defmodule ReqLLM.Provider.DefaultsTest do
messages: [
%{
role: "assistant",
content: [],
content: "",
tool_calls: [
%{
id: "call_123",
Expand Down Expand Up @@ -296,6 +296,58 @@ defmodule ReqLLM.Provider.DefaultsTest do

refute Map.has_key?(encoded_message, :reasoning_details)
end

test "strips :thinking content parts from encoding" do
message = %Message{
role: :assistant,
content: [
%ContentPart{type: :thinking, text: "Let me reason about this..."},
%ContentPart{type: :text, text: "Here is the answer."}
]
}

context = %Context{messages: [message]}
result = Defaults.encode_context_to_openai_format(context, "gpt-4")

[encoded_message] = result.messages
# :thinking part stripped, single text part flattened to string
assert encoded_message.content == "Here is the answer."
end

test "collapses to empty string when all content parts are :thinking" do
message = %Message{
role: :assistant,
content: [
%ContentPart{type: :thinking, text: "Internal chain-of-thought"}
],
tool_calls: [
%{
id: "call_abc",
type: "function",
function: %{name: "get_weather", arguments: ~s({"city":"NYC"})}
}
]
}

context = %Context{messages: [message]}
result = Defaults.encode_context_to_openai_format(context, "gpt-4")

[encoded_message] = result.messages
# All parts filtered → empty array collapsed to ""
assert encoded_message.content == ""
# Tool calls still present
assert length(encoded_message.tool_calls) == 1
end

test "collapses empty content list to empty string" do
message = %Message{role: :assistant, content: []}

context = %Context{messages: [message]}
result = Defaults.encode_context_to_openai_format(context, "gpt-4")

[encoded_message] = result.messages
assert encoded_message.content == ""
end
end

describe "decode_response_body_openai_format/2" do
Expand Down
Loading