Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
110 changes: 75 additions & 35 deletions docs/sdk/main.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -423,17 +423,7 @@ def get_run_context(self) -> RunContext:
"""
if (run := current_run_span.get()) is None:
raise RuntimeError("get_run_context() must be called within a run")

# Capture OpenTelemetry trace context
trace_context: dict[str, str] = {}
propagate.inject(trace_context)

return {
"run_id": run.run_id,
"run_name": run.name,
"project": run.project,
"trace_context": trace_context,
}
return run.get_context()
```


Expand Down Expand Up @@ -501,30 +491,8 @@ def initialize(self) -> None:
f"Failed to connect to the Dreadnode server: {e}",
) from e

headers = {"User-Agent": f"dreadnode/{VERSION}", "X-Api-Key": self.token}
span_processors.append(
BatchSpanProcessor(
RemovePendingSpansExporter( # This will tell Logfire to emit pending spans to us as well
OTLPSpanExporter(
endpoint=urljoin(self.server, "/api/otel/traces"),
headers=headers,
compression=Compression.Gzip,
),
),
),
)
# TODO(nick): Metrics
# https://linear.app/dreadnode/issue/ENG-1310/sdk-add-metrics-exports
# metric_readers.append(
# PeriodicExportingMetricReader(
# OTLPMetricExporter(
# endpoint=urljoin(self.server, "/v1/metrics"),
# headers=headers,
# compression=Compression.Gzip,
# # preferred_temporality
# )
# )
# )
span_processors.append(RoutingSpanProcessor(self.server, self.token))

if self._api is not None:
api = self._api
self._credential_manager = CredentialManager(
Expand Down Expand Up @@ -1750,6 +1718,7 @@ run(
project: str | None = None,
autolog: bool = True,
name_prefix: str | None = None,
api_key: str | None = None,
attributes: AnyDict | None = None,
) -> RunSpan
```
Expand Down Expand Up @@ -1799,6 +1768,16 @@ with dreadnode.run("my_run"):
`True`
)
–Automatically log task inputs, outputs, and execution metrics if otherwise unspecified.
* **`name_prefix`**
(`str | None`, default:
`None`
)
–A prefix to use when generating a random name for the run.
* **`api_key`**
(`str | None`, default:
`None`
)
–An optional API key to use for tracing this run instead of the configured one.
* **`attributes`**
(`AnyDict | None`, default:
`None`
Expand All @@ -1823,6 +1802,7 @@ def run(
project: str | None = None,
autolog: bool = True,
name_prefix: str | None = None,
api_key: str | None = None,
attributes: AnyDict | None = None,
) -> RunSpan:
"""
Expand All @@ -1849,6 +1829,8 @@ def run(
the project passed to `configure()` will be used, or the
run will be associated with a default project.
autolog: Automatically log task inputs, outputs, and execution metrics if otherwise unspecified.
name_prefix: A prefix to use when generating a random name for the run.
api_key: An optional API key to use for tracing this run instead of the configured one.
attributes: Additional attributes to attach to the run span.

Returns:
Expand All @@ -1870,6 +1852,7 @@ def run(
tags=tags,
credential_manager=self._credential_manager, # type: ignore[arg-type]
autolog=autolog,
export_auth_token=api_key,
)
```

Expand Down Expand Up @@ -2649,6 +2632,63 @@ def task_span(
```


</Accordion>

### using\_api\_key

```python
using_api_key(api_key: str) -> t.Iterator[None]
```

Context manager to temporarily override the API key used for exporting spans.

This is useful for multi-user scenarios where you want to log data
on behalf of another user.

Example

```python
with dreadnode.with_api_key("other_user_api_key"):
Copy link

Copilot AI Oct 3, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The example code uses dreadnode.with_api_key() but the actual function name is using_api_key(). This inconsistency will cause the example to fail.

Copilot uses AI. Check for mistakes.
with dreadnode.run("my_run"):
# do some work here
pass
```

**Parameters:**

* **`api_key`**
(`str`)
–The API key to use for exporting spans within the context.

<Accordion title="Source code in dreadnode/main.py" icon="code">
```python
@contextlib.contextmanager
def using_api_key(self, api_key: str) -> t.Iterator[None]:
"""
Context manager to temporarily override the API key used for exporting spans.

This is useful for multi-user scenarios where you want to log data
on behalf of another user.

Example:
~~~
with dreadnode.with_api_key("other_user_api_key"):
Copy link

Copilot AI Oct 3, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The documentation example uses with_api_key() but the actual function name is using_api_key(). This inconsistency will cause the example to fail.

Copilot uses AI. Check for mistakes.
with dreadnode.run("my_run"):
# do some work here
pass
~~~

Args:
api_key: The API key to use for exporting spans within the context.
"""
token_token = current_export_auth_token_context.set(api_key)
try:
yield
finally:
current_export_auth_token_context.reset(token_token)
```


</Accordion>

DreadnodeConfigWarning
Expand Down
1 change: 1 addition & 0 deletions dreadnode/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@
task = DEFAULT_INSTANCE.task
task_span = DEFAULT_INSTANCE.task_span
run = DEFAULT_INSTANCE.run
using_api_key = DEFAULT_INSTANCE.using_api_key
task_and_run = DEFAULT_INSTANCE.task_and_run
scorer = DEFAULT_INSTANCE.scorer
score = DEFAULT_INSTANCE.score
Expand Down
75 changes: 35 additions & 40 deletions dreadnode/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,18 +6,14 @@
from dataclasses import dataclass
from datetime import datetime, timezone
from pathlib import Path
from urllib.parse import urljoin, urlparse, urlunparse
from urllib.parse import urlparse, urlunparse

import coolname # type: ignore [import-untyped]
import logfire
import rich
from fsspec.implementations.local import ( # type: ignore [import-untyped]
LocalFileSystem,
)
from logfire._internal.exporters.remove_pending import RemovePendingSpansExporter
from opentelemetry import propagate
from opentelemetry.exporter.otlp.proto.http import Compression
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
from opentelemetry.sdk.trace.export import BatchSpanProcessor

from dreadnode.api.client import ApiClient
Expand Down Expand Up @@ -56,11 +52,13 @@
FileMetricReader,
FileSpanExporter,
)
from dreadnode.tracing.processors import RoutingSpanProcessor
from dreadnode.tracing.span import (
RunContext,
RunSpan,
Span,
TaskSpan,
current_export_auth_token_context,
current_run_span,
current_task_span,
)
Expand Down Expand Up @@ -337,30 +335,8 @@ def initialize(self) -> None:
f"Failed to connect to the Dreadnode server: {e}",
) from e

headers = {"User-Agent": f"dreadnode/{VERSION}", "X-Api-Key": self.token}
span_processors.append(
BatchSpanProcessor(
RemovePendingSpansExporter( # This will tell Logfire to emit pending spans to us as well
OTLPSpanExporter(
endpoint=urljoin(self.server, "/api/otel/traces"),
headers=headers,
compression=Compression.Gzip,
),
),
),
)
# TODO(nick): Metrics
# https://linear.app/dreadnode/issue/ENG-1310/sdk-add-metrics-exports
# metric_readers.append(
# PeriodicExportingMetricReader(
# OTLPMetricExporter(
# endpoint=urljoin(self.server, "/v1/metrics"),
# headers=headers,
# compression=Compression.Gzip,
# # preferred_temporality
# )
# )
# )
span_processors.append(RoutingSpanProcessor(self.server, self.token))

if self._api is not None:
api = self._api
self._credential_manager = CredentialManager(
Expand Down Expand Up @@ -777,6 +753,7 @@ def run(
project: str | None = None,
autolog: bool = True,
name_prefix: str | None = None,
api_key: str | None = None,
attributes: AnyDict | None = None,
) -> RunSpan:
"""
Expand All @@ -803,6 +780,8 @@ def run(
the project passed to `configure()` will be used, or the
run will be associated with a default project.
autolog: Automatically log task inputs, outputs, and execution metrics if otherwise unspecified.
name_prefix: A prefix to use when generating a random name for the run.
api_key: An optional API key to use for tracing this run instead of the configured one.
attributes: Additional attributes to attach to the run span.

Returns:
Expand All @@ -824,8 +803,34 @@ def run(
tags=tags,
credential_manager=self._credential_manager, # type: ignore[arg-type]
autolog=autolog,
export_auth_token=api_key,
)

@contextlib.contextmanager
def using_api_key(self, api_key: str) -> t.Iterator[None]:
"""
Context manager to temporarily override the API key used for exporting spans.

This is useful for multi-user scenarios where you want to log data
on behalf of another user.

Example:
```
with dreadnode.with_api_key("other_user_api_key"):
with dreadnode.run("my_run"):
# do some work here
pass
```

Args:
api_key: The API key to use for exporting spans within the context.
"""
token_token = current_export_auth_token_context.set(api_key)
try:
yield
finally:
current_export_auth_token_context.reset(token_token)

@contextlib.contextmanager
def task_and_run(
self,
Expand Down Expand Up @@ -877,17 +882,7 @@ def get_run_context(self) -> RunContext:
"""
if (run := current_run_span.get()) is None:
raise RuntimeError("get_run_context() must be called within a run")

# Capture OpenTelemetry trace context
trace_context: dict[str, str] = {}
propagate.inject(trace_context)

return {
"run_id": run.run_id,
"run_name": run.name,
"project": run.project,
"trace_context": trace_context,
}
return run.get_context()

def continue_run(self, run_context: RunContext) -> RunSpan:
"""
Expand Down
3 changes: 3 additions & 0 deletions dreadnode/tracing/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,3 +33,6 @@
EVENT_ATTRIBUTE_ORIGIN_SPAN_ID = f"{SPAN_NAMESPACE}.origin.span_id"

METRIC_ATTRIBUTE_SOURCE_HASH = f"{SPAN_NAMESPACE}.origin.hash"

# Internal use only - used to support multi-user export flows
SPAN_RESOURCE_ATTRIBUTE_TOKEN = "_dreadnode_token" # noqa: S105 # nosec
Loading
Loading