Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions libs/partners/groq/langchain_groq/chat_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -841,6 +841,10 @@ def bind_tools(
`langchain.runnable.Runnable` constructor.

"""
strict_parameter = kwargs.pop("strict", None)
if strict_parameter is not None:
warnings.warn("strict parameter is not supported by Groq", stacklevel=1)

formatted_tools = [convert_to_openai_tool(tool) for tool in tools]
if tool_choice is not None and tool_choice:
if tool_choice == "any":
Expand Down
10 changes: 10 additions & 0 deletions libs/partners/ollama/langchain_ollama/chat_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@
import ast
import json
import logging
import warnings
from collections.abc import AsyncIterator, Callable, Iterator, Mapping, Sequence
from operator import itemgetter
from typing import Any, Literal, cast
Expand Down Expand Up @@ -1242,6 +1243,15 @@ def bind_tools(
kwargs: Any additional parameters are passed directly to
`self.bind(**kwargs)`.
"""
strict_parameter = kwargs.pop("strict", None)
response_format_parameter = kwargs.pop("response_format", None)
if strict_parameter is not None:
warnings.warn("strict parameter is not supported by Ollama", stacklevel=1)
if response_format_parameter is not None:
warnings.warn(
"response_format parameter is not supported by Ollama", stacklevel=1
)

formatted_tools = [convert_to_openai_tool(tool) for tool in tools]
return super().bind(tools=formatted_tools, **kwargs)

Expand Down
Loading