From ec96426c00dbdf71664df83660ad2bd619a3f65f Mon Sep 17 00:00:00 2001 From: Kevin Hu Date: Mon, 24 Feb 2025 14:04:25 +0800 Subject: [PATCH] Tongyi adapts deepseek. (#5285) ### What problem does this PR solve? ### Type of change - [x] New Feature (non-breaking change which adds functionality) --- rag/llm/chat_model.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/rag/llm/chat_model.py b/rag/llm/chat_model.py index 08a4f3e5..af98e264 100644 --- a/rag/llm/chat_model.py +++ b/rag/llm/chat_model.py @@ -258,8 +258,13 @@ class QWenChat(Base): import dashscope dashscope.api_key = key self.model_name = model_name + if model_name.lower().find("deepseek") >= 0: + super().__init__(key, model_name) def chat(self, system, history, gen_conf): + if self.model_name.lower().find("deepseek") >= 0: + return super.chat(system, history, gen_conf) + stream_flag = str(os.environ.get('QWEN_CHAT_BY_STREAM', 'true')).lower() == 'true' if not stream_flag: from http import HTTPStatus @@ -327,6 +332,9 @@ class QWenChat(Base): yield tk_count def chat_streamly(self, system, history, gen_conf): + if self.model_name.lower().find("deepseek") >= 0: + return super.chat_streamly(system, history, gen_conf) + return self._chat_streamly(system, history, gen_conf)