diff --git a/libs/community/langchain_community/chat_models/baidu_qianfan_endpoint.py b/libs/community/langchain_community/chat_models/baidu_qianfan_endpoint.py index 48bc37700e4be..61c993705851f 100644 --- a/libs/community/langchain_community/chat_models/baidu_qianfan_endpoint.py +++ b/libs/community/langchain_community/chat_models/baidu_qianfan_endpoint.py @@ -346,7 +346,9 @@ class Joke(BaseModel): client: Any #: :meta private: - qianfan_ak: SecretStr = Field(alias="api_key") + # It could be empty due to the use of Console API + # And they're not list here + qianfan_ak: Optional[SecretStr] = Field(default=None, alias="api_key") """Qianfan API KEY""" qianfan_sk: Optional[SecretStr] = Field(default=None, alias="secret_key") """Qianfan SECRET KEY""" @@ -365,13 +367,13 @@ class Joke(BaseModel): In the case of other model, passing these params will not affect the result. """ - model: str = "ERNIE-Lite-8K" + model: Optional[str] = Field(default=None) """Model name. you could get from https://cloud.baidu.com/doc/WENXINWORKSHOP/s/Nlks5zkzu preset models are mapping to an endpoint. `model` will be ignored if `endpoint` is set. - Default is ERNIE-Lite-8K. + Default is set by `qianfan` SDK, not here """ endpoint: Optional[str] = None @@ -386,16 +388,12 @@ class Config: def validate_environment(cls, values: Dict) -> Dict: values["qianfan_ak"] = convert_to_secret_str( get_from_dict_or_env( - values, - ["qianfan_ak", "api_key"], - "QIANFAN_AK", + values, ["qianfan_ak", "api_key"], "QIANFAN_AK", default="" ) ) values["qianfan_sk"] = convert_to_secret_str( get_from_dict_or_env( - values, - ["qianfan_sk", "secret_key"], - "QIANFAN_SK", + values, ["qianfan_sk", "secret_key"], "QIANFAN_SK", default="" ) ) diff --git a/libs/community/langchain_community/embeddings/baidu_qianfan_endpoint.py b/libs/community/langchain_community/embeddings/baidu_qianfan_endpoint.py index 44bfde0bf949a..6aa9df92364a5 100644 --- a/libs/community/langchain_community/embeddings/baidu_qianfan_endpoint.py +++ b/libs/community/langchain_community/embeddings/baidu_qianfan_endpoint.py @@ -55,7 +55,7 @@ class QianfanEmbeddingsEndpoint(BaseModel, Embeddings): chunk_size: int = 16 """Chunk size when multiple texts are input""" - model: str = "Embedding-V1" + model: Optional[str] = Field(default=None) """Model name you could get from https://cloud.baidu.com/doc/WENXINWORKSHOP/s/Nlks5zkzu diff --git a/libs/community/langchain_community/llms/baidu_qianfan_endpoint.py b/libs/community/langchain_community/llms/baidu_qianfan_endpoint.py index 34b5e00ea79d1..303d192f368d4 100644 --- a/libs/community/langchain_community/llms/baidu_qianfan_endpoint.py +++ b/libs/community/langchain_community/llms/baidu_qianfan_endpoint.py @@ -55,12 +55,14 @@ class QianfanLLMEndpoint(LLM): streaming: Optional[bool] = False """Whether to stream the results or not.""" - model: str = "ERNIE-Bot-turbo" + model: Optional[str] = Field(default=None) """Model name. you could get from https://cloud.baidu.com/doc/WENXINWORKSHOP/s/Nlks5zkzu preset models are mapping to an endpoint. `model` will be ignored if `endpoint` is set + + Default is set by `qianfan` SDK, not here """ endpoint: Optional[str] = None diff --git a/libs/community/tests/integration_tests/chat_models/test_qianfan_endpoint.py b/libs/community/tests/integration_tests/chat_models/test_qianfan_endpoint.py index 91a7fb9d23b40..e34bee531b6bc 100644 --- a/libs/community/tests/integration_tests/chat_models/test_qianfan_endpoint.py +++ b/libs/community/tests/integration_tests/chat_models/test_qianfan_endpoint.py @@ -306,7 +306,10 @@ def test_functions_call() -> None: def test_rate_limit() -> None: chat = QianfanChatEndpoint(model="ERNIE-Bot", init_kwargs={"query_per_second": 2}) # type: ignore[call-arg] - assert chat.client._client._rate_limiter._sync_limiter._query_per_second == 2 + assert ( + chat.client._client._rate_limiter._internal_qps_rate_limiter._sync_limiter._query_per_second + == 1.8 + ) responses = chat.batch( [ [HumanMessage(content="Hello")],