Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
180 changes: 180 additions & 0 deletions examples/hooks/combination_example.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,180 @@
"""
Example demonstrating how to combine hooks in Instructor.

This example shows three different ways to combine hooks:
1. Using the + operator to create a new combined hooks instance
2. Using the += operator to add hooks to an existing instance
3. Using the Hooks.combine() class method to combine multiple instances
"""

import instructor
import openai
import pydantic
from instructor.core.hooks import Hooks


class User(pydantic.BaseModel):
"""A simple user model."""

name: str
age: int


def create_logging_hooks() -> Hooks:
"""Create a hooks instance focused on logging."""
hooks = Hooks()

def log_request(**kwargs):
print(f"🔍 [LOGGING] Request: model={kwargs.get('model', 'unknown')}")

def log_response(response):
print(f"✅ [LOGGING] Response received successfully")
_ = response # Acknowledge we received the response

def log_error(error):
print(f"❌ [LOGGING] Error: {type(error).__name__}: {str(error)}")

hooks.on("completion:kwargs", log_request)
hooks.on("completion:response", log_response)
hooks.on("completion:error", log_error)
hooks.on("parse:error", log_error)

return hooks


def create_metrics_hooks() -> Hooks:
"""Create a hooks instance focused on metrics collection."""
hooks = Hooks()

# Simple metrics collector
metrics = {"requests": 0, "responses": 0, "errors": 0, "tokens": 0}

def count_request(*_args, **_kwargs):
metrics["requests"] += 1
print(f"📊 [METRICS] Total requests: {metrics['requests']}")

def count_response(response):
metrics["responses"] += 1
if hasattr(response, "usage") and response.usage:
tokens = response.usage.total_tokens
metrics["tokens"] += tokens
print(f"📊 [METRICS] Tokens used: {tokens}, Total: {metrics['tokens']}")
print(f"📊 [METRICS] Total responses: {metrics['responses']}")

def count_error(_error):
metrics["errors"] += 1
print(f"📊 [METRICS] Total errors: {metrics['errors']}")

hooks.on("completion:kwargs", count_request)
hooks.on("completion:response", count_response)
hooks.on("completion:error", count_error)
hooks.on("parse:error", count_error)

return hooks


def create_debug_hooks() -> Hooks:
"""Create a hooks instance focused on debugging."""
hooks = Hooks()

def debug_request(*_args, **kwargs):
print(f"🐛 [DEBUG] Messages: {len(kwargs.get('messages', []))} messages")

def debug_response(response):
print(f"🐛 [DEBUG] Response ID: {getattr(response, 'id', 'unknown')}")

def debug_error(error):
print(f"🐛 [DEBUG] Error details: {error}")

hooks.on("completion:kwargs", debug_request)
hooks.on("completion:response", debug_response)
hooks.on("completion:error", debug_error)
hooks.on("parse:error", debug_error)

return hooks


def main():
"""Demonstrate different ways to combine hooks."""

# Create individual hook instances
logging_hooks = create_logging_hooks()
metrics_hooks = create_metrics_hooks()
debug_hooks = create_debug_hooks()

print("=== Example 1: Using + operator ===")

# Combine using + operator (creates new instance)
combined_hooks = logging_hooks + metrics_hooks

client = instructor.from_openai(openai.OpenAI(), hooks=combined_hooks)

try:
user = client.chat.completions.create(
model="gpt-3.5-turbo",
messages=[{"role": "user", "content": "Extract: Alice is 25 years old"}],
response_model=User,
)
print(f"Result: {user}\n")
except Exception as e:
print(f"Exception: {e}\n")

print("=== Example 2: Using += operator ===")

# Start with logging hooks and add metrics hooks
combined_hooks_2 = create_logging_hooks()
combined_hooks_2 += metrics_hooks

client2 = instructor.from_openai(openai.OpenAI(), hooks=combined_hooks_2)

try:
user = client2.chat.completions.create(
model="gpt-3.5-turbo",
messages=[{"role": "user", "content": "Extract: Bob is 30 years old"}],
response_model=User,
)
print(f"Result: {user}\n")
except Exception as e:
print(f"Exception: {e}\n")

print("=== Example 3: Using Hooks.combine() class method ===")

# Combine all three using class method
all_combined = Hooks.combine(logging_hooks, metrics_hooks, debug_hooks)

client3 = instructor.from_openai(openai.OpenAI(), hooks=all_combined)

try:
user = client3.chat.completions.create(
model="gpt-3.5-turbo",
messages=[{"role": "user", "content": "Extract: Charlie is 35 years old"}],
response_model=User,
)
print(f"Result: {user}\n")
except Exception as e:
print(f"Exception: {e}\n")

print("=== Example 4: Creating a copy and modifying ===")

# Create a copy and add additional hooks
copied_hooks = logging_hooks.copy()
copied_hooks.on(
"completion:kwargs",
lambda *_args, **_kwargs: print("🔄 [COPY] Additional hook"),
)

client4 = instructor.from_openai(openai.OpenAI(), hooks=copied_hooks)

try:
user = client4.chat.completions.create(
model="gpt-3.5-turbo",
messages=[{"role": "user", "content": "Extract: Diana is 28 years old"}],
response_model=User,
)
print(f"Result: {user}")
except Exception as e:
print(f"Exception: {e}")


if __name__ == "__main__":
main()
180 changes: 180 additions & 0 deletions examples/hooks/per_call_hooks_example.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,180 @@
"""
Example demonstrating per-call hooks in Instructor.

This example shows how to use hooks at the individual call level,
combining them with client-level hooks for flexible event handling.
"""

import instructor
import openai
import pydantic
from instructor.core.hooks import Hooks


class User(pydantic.BaseModel):
"""A simple user model."""

name: str
age: int


def create_client_hooks() -> Hooks:
"""Create hooks that will be attached to the client."""
hooks = Hooks()

def log_all_requests(*_args, **kwargs):
print(
f"🌐 [CLIENT] All requests go through here: model={kwargs.get('model', 'unknown')}"
)

def log_all_responses(response):
print(f"🌐 [CLIENT] All responses logged here")
_ = response # Acknowledge we received the response

def log_all_errors(error):
print(f"🌐 [CLIENT] All errors logged: {type(error).__name__}")

hooks.on("completion:kwargs", log_all_requests)
hooks.on("completion:response", log_all_responses)
hooks.on("completion:error", log_all_errors)
hooks.on("parse:error", log_all_errors)

return hooks


def create_debug_hooks() -> Hooks:
"""Create hooks for debugging specific calls."""
hooks = Hooks()

def debug_request(*_args, **kwargs):
messages = kwargs.get("messages", [])
print(f"🐛 [DEBUG] Debugging this specific call:")
print(f"🐛 [DEBUG] - Message count: {len(messages)}")
print(f"🐛 [DEBUG] - Temperature: {kwargs.get('temperature', 'default')}")

def debug_response(response):
print(f"🐛 [DEBUG] Response details:")
print(f"🐛 [DEBUG] - Model used: {getattr(response, 'model', 'unknown')}")
if hasattr(response, "usage") and response.usage:
print(f"🐛 [DEBUG] - Tokens: {response.usage.total_tokens}")
_ = response # Acknowledge we received the response

hooks.on("completion:kwargs", debug_request)
hooks.on("completion:response", debug_response)

return hooks


def create_performance_hooks() -> Hooks:
"""Create hooks for performance monitoring specific calls."""
hooks = Hooks()

import time

start_time = None

def perf_start(**_kwargs):
nonlocal start_time
start_time = time.time()
print(f"⏱️ [PERF] Starting performance measurement")

def perf_end(_response):
nonlocal start_time
if start_time:
duration = time.time() - start_time
print(f"⏱️ [PERF] Call completed in {duration:.2f}s")

hooks.on("completion:kwargs", perf_start)
hooks.on("completion:response", perf_end)

return hooks


def main():
"""Demonstrate per-call hooks combined with client hooks."""

# Create client with global hooks
client_hooks = create_client_hooks()
client = instructor.from_openai(openai.OpenAI(), hooks=client_hooks)

print("=== Example 1: Regular call (only client hooks) ===")
try:
user = client.chat.completions.create(
model="gpt-3.5-turbo",
messages=[{"role": "user", "content": "Extract: Alice is 25 years old"}],
response_model=User,
)
print(f"Result: {user}\n")
except Exception as e:
print(f"Exception: {e}\n")

print("=== Example 2: Call with debug hooks ===")
debug_hooks = create_debug_hooks()
try:
user = client.chat.completions.create(
model="gpt-3.5-turbo",
messages=[{"role": "user", "content": "Extract: Bob is 30 years old"}],
response_model=User,
temperature=0.7,
hooks=debug_hooks, # Add debug hooks for this specific call
)
print(f"Result: {user}\n")
except Exception as e:
print(f"Exception: {e}\n")

print("=== Example 3: Call with performance monitoring ===")
perf_hooks = create_performance_hooks()
try:
user = client.chat.completions.create(
model="gpt-3.5-turbo",
messages=[{"role": "user", "content": "Extract: Charlie is 35 years old"}],
response_model=User,
hooks=perf_hooks, # Add performance hooks for this specific call
)
print(f"Result: {user}\n")
except Exception as e:
print(f"Exception: {e}\n")

print("=== Example 4: Call with combined debug + performance hooks ===")
# Combine multiple per-call hooks
combined_hooks = debug_hooks + perf_hooks
try:
user = client.chat.completions.create(
model="gpt-3.5-turbo",
messages=[{"role": "user", "content": "Extract: Diana is 28 years old"}],
response_model=User,
temperature=0.3,
hooks=combined_hooks, # Multiple per-call hooks combined
)
print(f"Result: {user}\n")
except Exception as e:
print(f"Exception: {e}\n")

print("=== Example 5: Another regular call (client hooks still work) ===")
try:
user = client.chat.completions.create(
model="gpt-3.5-turbo",
messages=[{"role": "user", "content": "Extract: Eve is 22 years old"}],
response_model=User,
)
print(f"Result: {user}\n")
except Exception as e:
print(f"Exception: {e}\n")

print("=== Example 6: Per-call hooks with create_partial ===")
try:
print("Using create_partial with debug hooks:")
for partial_user in client.chat.completions.create_partial(
model="gpt-3.5-turbo",
messages=[{"role": "user", "content": "Extract: Frank is 40 years old"}],
response_model=User,
hooks=debug_hooks, # Per-call hooks work with create_partial too
):
print(f"Partial result: {partial_user}")
print()
except Exception as e:
print(f"Exception: {e}\n")


if __name__ == "__main__":
main()
Loading
Loading