diff --git a/libs/community/langchain_community/chat_models/anyscale.py b/libs/community/langchain_community/chat_models/anyscale.py index 9eae0a3bc7577..e6f70b359f1eb 100644 --- a/libs/community/langchain_community/chat_models/anyscale.py +++ b/libs/community/langchain_community/chat_models/anyscale.py @@ -220,7 +220,9 @@ def get_num_tokens_from_messages( Official documentation: https://github.com/openai/openai-cookbook/blob/main/examples/How_to_format_inputs_to_ChatGPT_models.ipynb """ if tools is not None: - warnings.warn("Counting tokens in tool schemas is not yet supported.") + warnings.warn( + "Counting tokens in tool schemas is not yet supported. Ignoring tools." + ) if sys.version_info[1] <= 7: return super().get_num_tokens_from_messages(messages) model, encoding = self._get_encoding_model() diff --git a/libs/community/langchain_community/chat_models/everlyai.py b/libs/community/langchain_community/chat_models/everlyai.py index ea739f904a0ea..52dd4f6c5991d 100644 --- a/libs/community/langchain_community/chat_models/everlyai.py +++ b/libs/community/langchain_community/chat_models/everlyai.py @@ -162,7 +162,9 @@ def get_num_tokens_from_messages( Official documentation: https://github.com/openai/openai-cookbook/blob/ main/examples/How_to_format_inputs_to_ChatGPT_models.ipynb""" if tools is not None: - warnings.warn("Counting tokens in tool schemas is not yet supported.") + warnings.warn( + "Counting tokens in tool schemas is not yet supported. Ignoring tools." + ) if sys.version_info[1] <= 7: return super().get_num_tokens_from_messages(messages) model, encoding = self._get_encoding_model() diff --git a/libs/community/langchain_community/chat_models/openai.py b/libs/community/langchain_community/chat_models/openai.py index 475e509b553c4..4d1666b626869 100644 --- a/libs/community/langchain_community/chat_models/openai.py +++ b/libs/community/langchain_community/chat_models/openai.py @@ -657,7 +657,9 @@ def get_num_tokens_from_messages( Official documentation: https://github.com/openai/openai-cookbook/blob/ main/examples/How_to_format_inputs_to_ChatGPT_models.ipynb""" if tools is not None: - warnings.warn("Counting tokens in tool schemas is not yet supported.") + warnings.warn( + "Counting tokens in tool schemas is not yet supported. Ignoring tools." + ) if sys.version_info[1] <= 7: return super().get_num_tokens_from_messages(messages) model, encoding = self._get_encoding_model() diff --git a/libs/core/langchain_core/language_models/base.py b/libs/core/langchain_core/language_models/base.py index 43374c9e8f8ea..e9956b9f6b554 100644 --- a/libs/core/langchain_core/language_models/base.py +++ b/libs/core/langchain_core/language_models/base.py @@ -12,6 +12,7 @@ TypeVar, Union, ) +import warnings from pydantic import BaseModel, ConfigDict, Field, field_validator from typing_extensions import TypeAlias, TypedDict, override @@ -384,6 +385,10 @@ def get_num_tokens_from_messages( Returns: The sum of the number of tokens across the messages. """ + if tools is not None: + warnings.warn( + "Counting tokens in tool schemas is not yet supported. Ignoring tools." + ) return sum([self.get_num_tokens(get_buffer_string([m])) for m in messages]) @classmethod diff --git a/libs/partners/openai/langchain_openai/chat_models/base.py b/libs/partners/openai/langchain_openai/chat_models/base.py index f7f483ecff8ba..7d69b27fb1bc8 100644 --- a/libs/partners/openai/langchain_openai/chat_models/base.py +++ b/libs/partners/openai/langchain_openai/chat_models/base.py @@ -911,7 +911,9 @@ def get_num_tokens_from_messages( """ # TODO: Count bound tools as part of input. if tools is not None: - warnings.warn("Counting tokens in tool schemas is not yet supported. Ignoring tools.") + warnings.warn( + "Counting tokens in tool schemas is not yet supported. Ignoring tools." + ) if sys.version_info[1] <= 7: return super().get_num_tokens_from_messages(messages) model, encoding = self._get_encoding_model()