Skip to content

Commit 9dcb437

Browse files
test(openai-agents): Replace mocks with library types for streamed responses
1 parent 4f16a9e commit 9dcb437

File tree

4 files changed

+210
-159
lines changed

4 files changed

+210
-159
lines changed

tests/conftest.py

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1004,6 +1004,15 @@ async def parse_stream():
10041004
return inner
10051005

10061006

1007+
@pytest.fixture()
1008+
def async_iterator():
1009+
async def inner(values):
1010+
for value in values:
1011+
yield value
1012+
1013+
return inner
1014+
1015+
10071016
class MockServerRequestHandler(BaseHTTPRequestHandler):
10081017
def do_GET(self): # noqa: N802
10091018
# Process an HTTP GET request and return a response.

tests/integrations/anthropic/test_anthropic.py

Lines changed: 3 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -67,11 +67,6 @@ async def __call__(self, *args, **kwargs):
6767
)
6868

6969

70-
async def async_iterator(values):
71-
for value in values:
72-
yield value
73-
74-
7570
@pytest.mark.parametrize(
7671
"send_default_pii, include_prompts",
7772
[
@@ -324,7 +319,7 @@ def test_streaming_create_message(
324319
],
325320
)
326321
async def test_streaming_create_message_async(
327-
sentry_init, capture_events, send_default_pii, include_prompts
322+
sentry_init, capture_events, send_default_pii, include_prompts, async_iterator
328323
):
329324
client = AsyncAnthropic(api_key="z")
330325
returned_stream = AsyncStream(cast_to=None, response=None, client=client)
@@ -567,7 +562,7 @@ def test_streaming_create_message_with_input_json_delta(
567562
],
568563
)
569564
async def test_streaming_create_message_with_input_json_delta_async(
570-
sentry_init, capture_events, send_default_pii, include_prompts
565+
sentry_init, capture_events, send_default_pii, include_prompts, async_iterator
571566
):
572567
client = AsyncAnthropic(api_key="z")
573568
returned_stream = AsyncStream(cast_to=None, response=None, client=client)
@@ -1361,7 +1356,7 @@ def test_streaming_create_message_with_system_prompt(
13611356
],
13621357
)
13631358
async def test_streaming_create_message_with_system_prompt_async(
1364-
sentry_init, capture_events, send_default_pii, include_prompts
1359+
sentry_init, capture_events, send_default_pii, include_prompts, async_iterator
13651360
):
13661361
"""Test that system prompts are properly captured in streaming mode (async)."""
13671362
client = AsyncAnthropic(api_key="z")

tests/integrations/openai/test_openai.py

Lines changed: 13 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -124,11 +124,6 @@ async def __call__(self, *args, **kwargs):
124124
)
125125

126126

127-
async def async_iterator(values):
128-
for value in values:
129-
yield value
130-
131-
132127
@pytest.mark.parametrize(
133128
"send_default_pii, include_prompts",
134129
[
@@ -689,7 +684,7 @@ def test_streaming_chat_completion(sentry_init, capture_events, messages, reques
689684
],
690685
)
691686
async def test_streaming_chat_completion_async_no_prompts(
692-
sentry_init, capture_events, send_default_pii, include_prompts
687+
sentry_init, capture_events, send_default_pii, include_prompts, async_iterator
693688
):
694689
sentry_init(
695690
integrations=[
@@ -829,7 +824,7 @@ async def test_streaming_chat_completion_async_no_prompts(
829824
],
830825
)
831826
async def test_streaming_chat_completion_async(
832-
sentry_init, capture_events, messages, request
827+
sentry_init, capture_events, messages, request, async_iterator
833828
):
834829
sentry_init(
835830
integrations=[
@@ -1463,7 +1458,9 @@ def test_span_origin_streaming_chat(sentry_init, capture_events):
14631458

14641459

14651460
@pytest.mark.asyncio
1466-
async def test_span_origin_streaming_chat_async(sentry_init, capture_events):
1461+
async def test_span_origin_streaming_chat_async(
1462+
sentry_init, capture_events, async_iterator
1463+
):
14671464
sentry_init(
14681465
integrations=[OpenAIIntegration()],
14691466
traces_sample_rate=1.0,
@@ -2420,7 +2417,7 @@ async def test_ai_client_span_responses_async_api(
24202417
)
24212418
@pytest.mark.skipif(SKIP_RESPONSES_TESTS, reason="Responses API not available")
24222419
async def test_ai_client_span_streaming_responses_async_api(
2423-
sentry_init, capture_events, instructions, input, request
2420+
sentry_init, capture_events, instructions, input, request, async_iterator
24242421
):
24252422
sentry_init(
24262423
integrations=[OpenAIIntegration(include_prompts=True)],
@@ -2799,7 +2796,7 @@ def test_streaming_responses_api(
27992796
)
28002797
@pytest.mark.skipif(SKIP_RESPONSES_TESTS, reason="Responses API not available")
28012798
async def test_streaming_responses_api_async(
2802-
sentry_init, capture_events, send_default_pii, include_prompts
2799+
sentry_init, capture_events, send_default_pii, include_prompts, async_iterator
28032800
):
28042801
sentry_init(
28052802
integrations=[
@@ -3037,7 +3034,9 @@ def test_streaming_chat_completion_ttft(sentry_init, capture_events):
30373034

30383035
# noinspection PyTypeChecker
30393036
@pytest.mark.asyncio
3040-
async def test_streaming_chat_completion_ttft_async(sentry_init, capture_events):
3037+
async def test_streaming_chat_completion_ttft_async(
3038+
sentry_init, capture_events, async_iterator
3039+
):
30413040
"""
30423041
Test that async streaming chat completions capture time-to-first-token (TTFT).
30433042
"""
@@ -3142,7 +3141,9 @@ def test_streaming_responses_api_ttft(sentry_init, capture_events):
31423141
# noinspection PyTypeChecker
31433142
@pytest.mark.asyncio
31443143
@pytest.mark.skipif(SKIP_RESPONSES_TESTS, reason="Responses API not available")
3145-
async def test_streaming_responses_api_ttft_async(sentry_init, capture_events):
3144+
async def test_streaming_responses_api_ttft_async(
3145+
sentry_init, capture_events, async_iterator
3146+
):
31463147
"""
31473148
Test that async streaming responses API captures time-to-first-token (TTFT).
31483149
"""

0 commit comments

Comments
 (0)