Skip to content

Commit

Permalink
openai[patch]: Fix langchain-openai unknown parameter error with gpt-…
Browse files Browse the repository at this point in the history
…4-turbo (#20271)

**Description:** 

I fixed langchain-openai unknown parameter error with gpt-4-turbo.

It seems that the behavior of the Chat Completions API implicitly
changed when using the latest gpt-4-turbo model, differing from previous
models. It now appears to reject parameters that are not listed in the
[API
Reference](https://platform.openai.com/docs/api-reference/chat/create).
So I found some errors and fixed them.

**Issue:** #20264

**Dependencies:** none

**Twitter handle:** https://twitter.com/oshima_123
  • Loading branch information
os1ma authored and hinthornw committed Apr 26, 2024
1 parent 056a9db commit 60c7e26
Show file tree
Hide file tree
Showing 2 changed files with 21 additions and 5 deletions.
11 changes: 10 additions & 1 deletion libs/partners/openai/langchain_openai/chat_models/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -177,6 +177,12 @@ def _convert_message_to_dict(message: BaseMessage) -> dict:
# If tool calls only, content is None not empty string
if message_dict["content"] == "":
message_dict["content"] = None

tool_call_supported_props = {"id", "type", "function"}
message_dict["tool_calls"] = [
{k: v for k, v in tool_call.items() if k in tool_call_supported_props}
for tool_call in message_dict["tool_calls"]
]
elif isinstance(message, SystemMessage):
message_dict["role"] = "system"
elif isinstance(message, FunctionMessage):
Expand Down Expand Up @@ -808,7 +814,10 @@ def bind_tools(
"function": {"name": tool_choice},
}
elif isinstance(tool_choice, bool):
tool_choice = formatted_tools[0]
tool_choice = {
"type": "function",
"function": {"name": formatted_tools[0]["function"]["name"]},
}
elif isinstance(tool_choice, dict):
if (
formatted_tools[0]["function"]["name"]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -479,7 +479,7 @@ class GenerateUsername(BaseModel):


def test_tool_use() -> None:
llm = ChatOpenAI(model="gpt-3.5-turbo", temperature=0)
llm = ChatOpenAI(model="gpt-4-turbo", temperature=0)
llm_with_tool = llm.bind_tools(tools=[GenerateUsername], tool_choice=True)
msgs: List = [HumanMessage("Sally has green hair, what would her username be?")]
ai_msg = llm_with_tool.invoke(msgs)
Expand All @@ -490,6 +490,12 @@ def test_tool_use() -> None:
tool_call = ai_msg.tool_calls[0]
assert "args" in tool_call

tool_msg = ToolMessage(
"sally_green_hair", tool_call_id=ai_msg.additional_kwargs["tool_calls"][0]["id"]
)
msgs.extend([ai_msg, tool_msg])
llm_with_tool.invoke(msgs)

# Test streaming
ai_messages = llm_with_tool.stream(msgs)
first = True
Expand All @@ -505,10 +511,11 @@ def test_tool_use() -> None:
tool_call_chunk = gathered.tool_call_chunks[0]
assert "args" in tool_call_chunk

tool_msg = ToolMessage(
"sally_green_hair", tool_call_id=ai_msg.additional_kwargs["tool_calls"][0]["id"]
streaming_tool_msg = ToolMessage(
"sally_green_hair",
tool_call_id=gathered.additional_kwargs["tool_calls"][0]["id"],
)
msgs.extend([ai_msg, tool_msg])
msgs.extend([gathered, streaming_tool_msg])
llm_with_tool.invoke(msgs)


Expand Down

0 comments on commit 60c7e26

Please sign in to comment.