From 91460467a6e41e5a7b97452c3913ca1b624ff5c4 Mon Sep 17 00:00:00 2001 From: verlocks <45618243+verlocks@users.noreply.github.com> Date: Wed, 18 Oct 2023 10:21:21 +0800 Subject: [PATCH 1/3] Update tongyi.py to be compatible with DashScope API Add input parameter "prompt", and update parameter name "api_key". --- libs/langchain/langchain/chat_models/tongyi.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/libs/langchain/langchain/chat_models/tongyi.py b/libs/langchain/langchain/chat_models/tongyi.py index 42326b32df0223..49456ed10d69ba 100644 --- a/libs/langchain/langchain/chat_models/tongyi.py +++ b/libs/langchain/langchain/chat_models/tongyi.py @@ -318,10 +318,19 @@ def _generate( ) return _generate_from_stream(stream_iter) + if not messages: + raise ValueError("No messages provided.") + message_dicts, params = self._create_message_dicts(messages, stop) + + if message_dicts[-1]['role'] != 'user': + raise ValueError("Last message should be user message.") + params = {**params, **kwargs} + prompt = message_dicts[-1]['content'] + message_dicts = message_dicts[:-1] response = self.completion_with_retry( - messages=message_dicts, run_manager=run_manager, **params + messages=message_dicts, prompt=prompt, run_manager=run_manager, **params ) return self._create_chat_result(response) @@ -374,7 +383,7 @@ def _create_message_dicts( def _client_params(self) -> Dict[str, Any]: """Get the parameters used for the openai client.""" creds: Dict[str, Any] = { - "dashscope_api_key": self.dashscope_api_key, + "api_key": self.dashscope_api_key, } return {**self._default_params, **creds} From 9c4c22ab8b72382bed5849b9d352e1b682265445 Mon Sep 17 00:00:00 2001 From: verlocks <45618243+verlocks@users.noreply.github.com> Date: Thu, 19 Oct 2023 17:06:20 +0800 Subject: [PATCH 2/3] Rollback tongyi.py about prompt parameter --- libs/langchain/langchain/chat_models/tongyi.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/libs/langchain/langchain/chat_models/tongyi.py b/libs/langchain/langchain/chat_models/tongyi.py index 49456ed10d69ba..a8b64d2662488e 100644 --- a/libs/langchain/langchain/chat_models/tongyi.py +++ b/libs/langchain/langchain/chat_models/tongyi.py @@ -327,10 +327,8 @@ def _generate( raise ValueError("Last message should be user message.") params = {**params, **kwargs} - prompt = message_dicts[-1]['content'] - message_dicts = message_dicts[:-1] response = self.completion_with_retry( - messages=message_dicts, prompt=prompt, run_manager=run_manager, **params + messages=message_dicts, run_manager=run_manager, **params ) return self._create_chat_result(response) From a439410d7af3b344d964b0052a6d791f33457e72 Mon Sep 17 00:00:00 2001 From: verlocks <45618243+verlocks@users.noreply.github.com> Date: Fri, 20 Oct 2023 13:47:16 +0800 Subject: [PATCH 3/3] Format tongyi.py --- libs/langchain/langchain/chat_models/tongyi.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/libs/langchain/langchain/chat_models/tongyi.py b/libs/langchain/langchain/chat_models/tongyi.py index a8b64d2662488e..6d657483ccc945 100644 --- a/libs/langchain/langchain/chat_models/tongyi.py +++ b/libs/langchain/langchain/chat_models/tongyi.py @@ -320,12 +320,12 @@ def _generate( if not messages: raise ValueError("No messages provided.") - + message_dicts, params = self._create_message_dicts(messages, stop) - if message_dicts[-1]['role'] != 'user': + if message_dicts[-1]["role"] != "user": raise ValueError("Last message should be user message.") - + params = {**params, **kwargs} response = self.completion_with_retry( messages=message_dicts, run_manager=run_manager, **params