From 17479a23608a36a7d67f22f3d9f25d1046f24d3f Mon Sep 17 00:00:00 2001 From: better629 Date: Wed, 10 Jan 2024 11:26:23 +0800 Subject: [PATCH] fix system_prompt param that llm not support from issue 725 --- metagpt/provider/base_llm.py | 4 +++- tests/mock/mock_llm.py | 4 +++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/metagpt/provider/base_llm.py b/metagpt/provider/base_llm.py index 52dd96b1af..d23d162c86 100644 --- a/metagpt/provider/base_llm.py +++ b/metagpt/provider/base_llm.py @@ -43,7 +43,9 @@ async def aask( if system_msgs: message = self._system_msgs(system_msgs) else: - message = [self._default_system_msg()] if self.use_system_prompt else [] + message = [self._default_system_msg()] + if not self.use_system_prompt: + message = [] if format_msgs: message.extend(format_msgs) message.append(self._user_msg(msg)) diff --git a/tests/mock/mock_llm.py b/tests/mock/mock_llm.py index 6e7a1cdd5a..35e0e9ee9f 100644 --- a/tests/mock/mock_llm.py +++ b/tests/mock/mock_llm.py @@ -41,7 +41,9 @@ async def original_aask( if system_msgs: message = self._system_msgs(system_msgs) else: - message = [self._default_system_msg()] if self.use_system_prompt else [] + message = [self._default_system_msg()] + if not self.use_system_prompt: + message = [] if format_msgs: message.extend(format_msgs) message.append(self._user_msg(msg))