Skip to content

Commit d97b7c3

Browse files
authored
fix ugly rendering of 'omit' in UI due to incorrect strip (#2057)
## Problem LangSmith UI was showing ugly serialized objects like: "seed": "<openai.Omit object at 0x105d91940>", "stop": "<openai.Omit object at 0x105d91940>", <img width="1488" height="755" alt="image" src="https://github.com/user-attachments/assets/68ae5b0b-fd5e-4686-b3f6-64a11a7b840e" /> This happened because OpenAI SDK v1.106+ uses both NotGiven and Omit sentinel types to represent optional parameters that weren't provided, but the code only checked for NotGiven. after fix: <img width="1488" height="755" alt="image" src="https://github.com/user-attachments/assets/26cb7201-af47-4293-af11-d8a16e3c7cd6" />
1 parent d0026aa commit d97b7c3

File tree

3 files changed

+16
-11
lines changed

3 files changed

+16
-11
lines changed

python/langsmith/wrappers/_openai.py

Lines changed: 12 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -35,24 +35,29 @@
3535

3636

3737
@functools.lru_cache
38-
def _get_not_given() -> Optional[type]:
38+
def _get_omit_types() -> tuple[type, ...]:
39+
"""Get NotGiven/Omit sentinel types used by OpenAI SDK."""
40+
types = []
3941
try:
40-
from openai._types import NotGiven
42+
from openai._types import NotGiven, Omit
4143

42-
return NotGiven
44+
types.append(NotGiven)
45+
types.append(Omit)
4346
except ImportError:
44-
return None
47+
pass
48+
49+
return tuple(types)
4550

4651

4752
def _strip_not_given(d: dict) -> dict:
4853
try:
49-
not_given = _get_not_given()
50-
if not_given is None:
54+
omit_types = _get_omit_types()
55+
if not omit_types:
5156
return d
5257
return {
5358
k: v
5459
for k, v in d.items()
55-
if not (isinstance(v, not_given) or (k.startswith("extra_") and v is None))
60+
if not (isinstance(v, omit_types) or (k.startswith("extra_") and v is None))
5661
}
5762
except Exception as e:
5863
logger.error(f"Error stripping NotGiven: {e}")

python/tests/integration_tests/wrappers/test_azure_openai.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -34,14 +34,14 @@ def test_chat_sync_api(stream: bool):
3434
)
3535
messages = [{"role": "user", "content": "Say 'foo'"}]
3636
original = original_client.chat.completions.create(
37-
messages=messages, # noqa: [arg-type]
37+
messages=messages, # noqa: arg-type
3838
stream=stream,
3939
temperature=0,
4040
seed=42,
4141
model="gpt-4o-mini",
4242
)
4343
patched = patched_client.chat.completions.create(
44-
messages=messages, # noqa: [arg-type]
44+
messages=messages, # noqa: arg-type
4545
stream=stream,
4646
temperature=0,
4747
seed=42,

python/tests/integration_tests/wrappers/test_openai.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -23,14 +23,14 @@ def test_chat_sync_api(stream: bool):
2323
patched_client = wrap_openai(openai.Client(), tracing_extra={"client": client})
2424
messages = [{"role": "user", "content": "Say 'foo'"}]
2525
original = original_client.chat.completions.create(
26-
messages=messages, # noqa: [arg-type]
26+
messages=messages, # noqa: arg-type
2727
stream=stream,
2828
temperature=0,
2929
seed=42,
3030
model="gpt-3.5-turbo",
3131
)
3232
patched = patched_client.chat.completions.create(
33-
messages=messages, # noqa: [arg-type]
33+
messages=messages, # noqa: arg-type
3434
stream=stream,
3535
temperature=0,
3636
seed=42,

0 commit comments

Comments
 (0)