1212import os
1313import re
1414import uuid
15- from typing import Optional
1615
1716import pytest
1817
1918from metagpt .config import CONFIG , Config
2019from metagpt .const import DEFAULT_WORKSPACE_ROOT , TEST_DATA_PATH
2120from metagpt .llm import LLM
2221from metagpt .logs import logger
23- from metagpt .provider .openai_api import OpenAILLM
2422from metagpt .utils .git_repository import GitRepository
23+ from tests .mock .mock_llm import MockLLM
2524
26-
27- class MockLLM (OpenAILLM ):
28- rsp_cache : dict = {}
29-
30- async def original_aask (
31- self ,
32- msg : str ,
33- system_msgs : Optional [list [str ]] = None ,
34- format_msgs : Optional [list [dict [str , str ]]] = None ,
35- timeout = 3 ,
36- stream = True ,
37- ):
38- """A copy of metagpt.provider.base_llm.BaseLLM.aask, we can't use super().aask because it will be mocked"""
39- if system_msgs :
40- message = self ._system_msgs (system_msgs )
41- else :
42- message = [self ._default_system_msg ()] if self .use_system_prompt else []
43- if format_msgs :
44- message .extend (format_msgs )
45- message .append (self ._user_msg (msg ))
46- rsp = await self .acompletion_text (message , stream = stream , timeout = timeout )
47- return rsp
48-
49- async def aask (
50- self ,
51- msg : str ,
52- system_msgs : Optional [list [str ]] = None ,
53- format_msgs : Optional [list [dict [str , str ]]] = None ,
54- timeout = 3 ,
55- stream = True ,
56- ) -> str :
57- if msg not in self .rsp_cache :
58- # Call the original unmocked method
59- rsp = await self .original_aask (msg , system_msgs , format_msgs , timeout , stream )
60- logger .info (f"Added '{ rsp [:20 ]} ' ... to response cache" )
61- self .rsp_cache [msg ] = rsp
62- return rsp
63- else :
64- logger .info ("Use response cache" )
65- return self .rsp_cache [msg ]
25+ RSP_CACHE_NEW = {} # used globally for producing new and useful only response cache
26+ ALLOW_OPENAI_API_CALL = os .environ .get (
27+ "ALLOW_OPENAI_API_CALL" , True
28+ ) # NOTE: should change to default False once mock is complete
6629
6730
6831@pytest .fixture (scope = "session" )
@@ -76,16 +39,37 @@ def rsp_cache():
7639 else :
7740 rsp_cache_json = {}
7841 yield rsp_cache_json
79- with open (new_rsp_cache_file_path , "w" ) as f2 :
42+ with open (rsp_cache_file_path , "w" ) as f2 :
8043 json .dump (rsp_cache_json , f2 , indent = 4 , ensure_ascii = False )
44+ with open (new_rsp_cache_file_path , "w" ) as f2 :
45+ json .dump (RSP_CACHE_NEW , f2 , indent = 4 , ensure_ascii = False )
8146
8247
83- @pytest .fixture (scope = "function" )
84- def llm_mock (rsp_cache , mocker ):
85- llm = MockLLM ()
48+ # Hook to capture the test result
49+ @pytest .hookimpl (tryfirst = True , hookwrapper = True )
50+ def pytest_runtest_makereport (item , call ):
51+ outcome = yield
52+ rep = outcome .get_result ()
53+ if rep .when == "call" :
54+ item .test_outcome = rep
55+
56+
57+ @pytest .fixture (scope = "function" , autouse = True )
58+ def llm_mock (rsp_cache , mocker , request ):
59+ llm = MockLLM (allow_open_api_call = ALLOW_OPENAI_API_CALL )
8660 llm .rsp_cache = rsp_cache
8761 mocker .patch ("metagpt.provider.base_llm.BaseLLM.aask" , llm .aask )
62+ mocker .patch ("metagpt.provider.base_llm.BaseLLM.aask_batch" , llm .aask_batch )
8863 yield mocker
64+ if hasattr (request .node , "test_outcome" ) and request .node .test_outcome .passed :
65+ if llm .rsp_candidates :
66+ for rsp_candidate in llm .rsp_candidates :
67+ cand_key = list (rsp_candidate .keys ())[0 ]
68+ cand_value = list (rsp_candidate .values ())[0 ]
69+ if cand_key not in llm .rsp_cache :
70+ logger .info (f"Added '{ cand_key [:100 ]} ... -> { cand_value [:20 ]} ...' to response cache" )
71+ llm .rsp_cache .update (rsp_candidate )
72+ RSP_CACHE_NEW .update (rsp_candidate )
8973
9074
9175class Context :
@@ -173,6 +157,13 @@ def init_config():
173157 Config ()
174158
175159
160+ @pytest .fixture (scope = "function" )
161+ def new_filename (mocker ):
162+ # NOTE: Mock new filename to make reproducible llm aask, should consider changing after implementing requirement segmentation
163+ mocker .patch ("metagpt.utils.file_repository.FileRepository.new_filename" , lambda : "20240101" )
164+ yield mocker
165+
166+
176167@pytest .fixture
177168def aiohttp_mocker (mocker ):
178169 class MockAioResponse :
0 commit comments