Skip to content
Merged
Show file tree
Hide file tree
Changes from 8 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@
logger = logging.getLogger(__name__)


class ErrorEvent(TypedDict):
class GroupEvent(TypedDict):
id: str
title: str
message: str
Expand Down Expand Up @@ -97,23 +97,24 @@ def get(self, request: Request, project: Project, replay_id: str) -> Response:
error_events = []
else:
error_events = fetch_error_details(project_id=project.id, error_ids=error_ids)

return self.paginate(
request=request,
paginator_cls=GenericOffsetPaginator,
data_fn=functools.partial(fetch_segments_metadata, project.id, replay_id),
on_results=functools.partial(analyze_recording_segments, error_events, replay_id),
on_results=functools.partial(
analyze_recording_segments, error_events, replay_id, project.id
),
)


def fetch_error_details(project_id: int, error_ids: list[str]) -> list[ErrorEvent]:
"""Fetch error details given error IDs and return a list of ErrorEvent objects."""
def fetch_error_details(project_id: int, error_ids: list[str]) -> list[GroupEvent]:
"""Fetch error details given error IDs and return a list of GroupEvent objects."""
try:
node_ids = [Event.generate_node_id(project_id, event_id=id) for id in error_ids]
events = nodestore.backend.get_multi(node_ids)

return [
ErrorEvent(
GroupEvent(
category="error",
id=event_id,
title=data.get("title", ""),
Expand All @@ -128,7 +129,35 @@ def fetch_error_details(project_id: int, error_ids: list[str]) -> list[ErrorEven
return []


def generate_error_log_message(error: ErrorEvent) -> str:
def fetch_feedback_details(feedback_id: str | None, project_id):
"""
Fetch user feedback associated with a specific feedback event ID.
"""
if feedback_id is None:
return None

try:
node_id = Event.generate_node_id(project_id, event_id=feedback_id)
event = nodestore.backend.get(node_id)

return (
GroupEvent(
category="feedback",
id=feedback_id,
title="User Feedback",
timestamp=event.get("timestamp", 0.0) * 1000, # feedback timestamp is in seconds
message=event.get("contexts", {}).get("feedback", {}).get("message", ""),
)
if event
else None
)

except Exception as e:
sentry_sdk.capture_exception(e)
return None


def generate_error_log_message(error: GroupEvent) -> str:
title = error["title"]
message = error["message"]
timestamp = error["timestamp"]
Expand All @@ -137,15 +166,19 @@ def generate_error_log_message(error: ErrorEvent) -> str:


def get_request_data(
iterator: Iterator[tuple[int, memoryview]], error_events: list[ErrorEvent]
iterator: Iterator[tuple[int, memoryview]],
error_events: list[GroupEvent],
project_id: int,
) -> list[str]:
# Sort error events by timestamp
error_events.sort(key=lambda x: x["timestamp"])
return list(gen_request_data(iterator, error_events))
return list(gen_request_data(iterator, error_events, project_id))


def gen_request_data(
iterator: Iterator[tuple[int, memoryview]], error_events: list[ErrorEvent]
iterator: Iterator[tuple[int, memoryview]],
error_events: list[GroupEvent],
project_id,
) -> Generator[str]:
"""Generate log messages from events and errors in chronological order."""
error_idx = 0
Expand All @@ -163,7 +196,7 @@ def gen_request_data(
error_idx += 1

# Yield the current event's log message
if message := as_log_message(event):
if message := as_log_message(event, project_id):
yield message

# Yield any remaining error messages
Expand All @@ -175,12 +208,15 @@ def gen_request_data(

@sentry_sdk.trace
def analyze_recording_segments(
error_events: list[ErrorEvent],
error_events: list[GroupEvent],
replay_id: str,
project_id: int,
segments: list[RecordingSegmentStorageMeta],
) -> dict[str, Any]:
# Combine breadcrumbs and error details
request_data = json.dumps({"logs": get_request_data(iter_segment_data(segments), error_events)})
request_data = json.dumps(
{"logs": get_request_data(iter_segment_data(segments), error_events, project_id)}
)

# Log when the input string is too large. This is potential for timeout.
if len(request_data) > 100000:
Expand All @@ -194,7 +230,7 @@ def analyze_recording_segments(
return json.loads(make_seer_request(request_data).decode("utf-8"))


def as_log_message(event: dict[str, Any]) -> str | None:
def as_log_message(event: dict[str, Any], project_id: int) -> str | None:
"""Return an event as a log message.

Useful in AI contexts where the event's structure is an impediment to the AI's understanding
Expand All @@ -206,6 +242,14 @@ def as_log_message(event: dict[str, Any]) -> str | None:
timestamp = event.get("timestamp", 0.0)

match event_type:
case EventType.FEEDBACK:
feedback_id = event["data"]["payload"].get("data", {}).get("feedback_id", None)
feedback = fetch_feedback_details(feedback_id, project_id)
if feedback:
message = feedback["message"]
return f"User submitted feedback: '{message}' at {timestamp}"
else:
return None
case EventType.CLICK:
return f"User clicked on {event["data"]["payload"]["message"]} at {timestamp}"
case EventType.DEAD_CLICK:
Expand Down
3 changes: 3 additions & 0 deletions src/sentry/replays/usecases/ingest/event_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,7 @@ class EventType(Enum):
UNKNOWN = 13
CANVAS = 14
OPTIONS = 15
FEEDBACK = 16


def which(event: dict[str, Any]) -> EventType:
Expand Down Expand Up @@ -135,6 +136,8 @@ def which(event: dict[str, Any]) -> EventType:
return EventType.HYDRATION_ERROR
elif category == "replay.mutations":
return EventType.MUTATIONS
elif category == "sentry.feedback":
return EventType.FEEDBACK
else:
return EventType.UNKNOWN
elif event["data"]["tag"] == "performanceSpan":
Expand Down
Loading
Loading