Skip to content

Commit

Permalink
mistralai: update tool calling (langchain-ai#19451)
Browse files Browse the repository at this point in the history
```python
from langchain.agents import tool
from langchain_mistralai import ChatMistralAI


llm = ChatMistralAI(model="mistral-large-latest", temperature=0)

@tool
def get_word_length(word: str) -> int:
    """Returns the length of a word."""
    return len(word)


tools = [get_word_length]
llm_with_tools = llm.bind_tools(tools)

llm_with_tools.invoke("how long is the word chrysanthemum")
```
currently raises
```
AttributeError: 'dict' object has no attribute 'model_dump'
```

Same with `.with_structured_output`
```python
from langchain_mistralai import ChatMistralAI
from langchain_core.pydantic_v1 import BaseModel

class AnswerWithJustification(BaseModel):
    """An answer to the user question along with justification for the answer."""
    answer: str
    justification: str

llm = ChatMistralAI(model="mistral-large-latest", temperature=0)
structured_llm = llm.with_structured_output(AnswerWithJustification)

structured_llm.invoke("What weighs more a pound of bricks or a pound of feathers")
```

This appears to fix.
  • Loading branch information
ccurme authored and chrispy-snps committed Mar 30, 2024
1 parent 9cc6352 commit ca948a5
Show file tree
Hide file tree
Showing 2 changed files with 22 additions and 1 deletion.
2 changes: 1 addition & 1 deletion libs/partners/mistralai/langchain_mistralai/chat_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ def _convert_mistral_chat_message_to_message(

additional_kwargs: Dict = {}
if tool_calls := _message.get("tool_calls"):
additional_kwargs["tool_calls"] = [tc.model_dump() for tc in tool_calls]
additional_kwargs["tool_calls"] = tool_calls
return AIMessage(content=content, additional_kwargs=additional_kwargs)


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,3 +61,24 @@ def test_invoke() -> None:

result = llm.invoke("I'm Pickle Rick", config=dict(tags=["foo"]))
assert isinstance(result.content, str)


def test_structred_output() -> None:
llm = ChatMistralAI(model="mistral-large-latest", temperature=0)
schema = {
"title": "AnswerWithJustification",
"description": (
"An answer to the user question along with justification for the answer."
),
"type": "object",
"properties": {
"answer": {"title": "Answer", "type": "string"},
"justification": {"title": "Justification", "type": "string"},
},
"required": ["answer", "justification"],
}
structured_llm = llm.with_structured_output(schema)
result = structured_llm.invoke(
"What weighs more a pound of bricks or a pound of feathers"
)
assert isinstance(result, dict)

0 comments on commit ca948a5

Please sign in to comment.