-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathsetup.sh
More file actions
executable file
·227 lines (200 loc) · 7.67 KB
/
setup.sh
File metadata and controls
executable file
·227 lines (200 loc) · 7.67 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
#!/usr/bin/env bash
# =============================================================================
# One-command setup: MLflow + Claude Code on Databricks
# =============================================================================
# After running this, just use Claude Code normally in this directory.
# Every session gets traced to your Databricks workspace.
#
# Required environment variables:
# DATABRICKS_HOST — e.g. https://my-workspace.cloud.databricks.com
#
# Optional environment variables:
# DATABRICKS_PROFILE — Databricks CLI profile (default: derived from host)
# DELTA_TABLE — UC table for trace archival (e.g. catalog.schema.table)
# DB_USER — Databricks user email (auto-detected if not set)
#
# Usage:
# ./setup.sh # auto-generated experiment name
# ./setup.sh my-experiment # → /Users/<you>/my-experiment
# ./setup.sh /Users/user@co/my-exp # full path, used as-is
# =============================================================================
set -euo pipefail
DEMO_DIR="$(cd "$(dirname "$0")" && pwd)"
# ---- Validate required env vars ----
if [ -z "${DATABRICKS_HOST:-}" ]; then
echo "Error: DATABRICKS_HOST is not set."
echo ""
echo " export DATABRICKS_HOST=https://your-workspace.cloud.databricks.com"
echo ""
exit 1
fi
if [ -z "${DELTA_TABLE:-}" ]; then
echo "Error: DELTA_TABLE is not set."
echo ""
echo " export DELTA_TABLE=my_catalog.default.claude_code_traces"
echo ""
exit 1
fi
DATABRICKS_PROFILE="${DATABRICKS_PROFILE:-}"
EXPERIMENT_ARG="${1:-}"
echo ""
echo "╔═══════════════════════════════════════════════════════════════╗"
echo "║ MLflow + Claude Code Setup ║"
echo "║ Workspace: ${DATABRICKS_HOST}"
echo "╚═══════════════════════════════════════════════════════════════╝"
echo ""
# ---- Step 1: Python venv ----
if [ ! -d "${DEMO_DIR}/.venv" ]; then
echo "[1/5] Creating Python venv (Python 3.11) and installing dependencies..."
uv venv "${DEMO_DIR}/.venv" --python 3.11 --quiet
source "${DEMO_DIR}/.venv/bin/activate"
uv pip install "mlflow[mcp]>=3.5.1" "databricks-agents" --quiet
echo " Done."
else
echo "[1/5] Python venv found."
source "${DEMO_DIR}/.venv/bin/activate"
fi
echo ""
# ---- Step 2: Databricks auth ----
echo "[2/5] Checking Databricks authentication..."
export DATABRICKS_HOST
if databricks auth describe --host "${DATABRICKS_HOST}" 2>&1 | grep -qi "authenticated\|token"; then
echo " Authenticated to ${DATABRICKS_HOST}"
else
echo " Not authenticated. Logging in..."
databricks auth login --host "${DATABRICKS_HOST}"
fi
echo ""
# ---- Step 3: Resolve experiment name ----
echo "[3/5] Resolving experiment name..."
# Auto-detect DB_USER if not set
if [ -z "${DB_USER:-}" ]; then
PROFILE_FLAG=""
if [ -n "${DATABRICKS_PROFILE}" ]; then
PROFILE_FLAG="--profile ${DATABRICKS_PROFILE}"
fi
DB_USER=$(databricks current-user me ${PROFILE_FLAG} 2>/dev/null \
| python3 -c "import sys,json; print(json.load(sys.stdin)['emails'][0]['value'])" 2>/dev/null \
|| echo "")
fi
if [ -z "${DB_USER}" ]; then
echo "Error: Could not auto-detect Databricks user. Set DB_USER env var."
echo ""
echo " export [email protected]"
echo ""
exit 1
fi
if [ -n "${EXPERIMENT_ARG}" ]; then
if [[ "${EXPERIMENT_ARG}" == /* ]]; then
EXPERIMENT_NAME="${EXPERIMENT_ARG}"
else
EXPERIMENT_NAME="/Users/${DB_USER}/${EXPERIMENT_ARG}"
fi
echo " Using: ${EXPERIMENT_NAME}"
else
RANDOM_ID=$(python3 -c "import secrets; print(secrets.token_hex(4))")
EXPERIMENT_NAME="/Users/${DB_USER}/claude-code-traces-${RANDOM_ID}"
echo " Generated: ${EXPERIMENT_NAME}"
fi
# Create experiment if it doesn't exist
EXPERIMENT_ID=$(python3 << PYEOF
import mlflow, os
mlflow.set_tracking_uri("databricks")
os.environ["DATABRICKS_HOST"] = "${DATABRICKS_HOST}"
experiment_name = "${EXPERIMENT_NAME}"
if exp := mlflow.get_experiment_by_name(experiment_name):
print(exp.experiment_id)
else:
eid = mlflow.create_experiment(name=experiment_name)
print(eid)
PYEOF
)
echo " Experiment ID: ${EXPERIMENT_ID}"
echo ""
# ---- Step 4: Enable Delta Sync ----
echo "[4/5] Enabling Delta Sync to ${DELTA_TABLE}..."
python3 << PYEOF_DELTA
import mlflow, os
mlflow.set_tracking_uri("databricks")
os.environ["DATABRICKS_HOST"] = "${DATABRICKS_HOST}"
from mlflow.tracing.archival import enable_databricks_trace_archival
try:
enable_databricks_trace_archival(
delta_table_fullname="${DELTA_TABLE}",
experiment_id="${EXPERIMENT_ID}",
)
print(" Delta Sync enabled: ${DELTA_TABLE}")
except Exception as e:
if "already" in str(e).lower():
print(" Delta Sync already enabled")
else:
print(f" Delta Sync warning: {e}")
PYEOF_DELTA
echo ""
# ---- Step 5: Enable tracing + configure settings ----
echo "[5/5] Enabling Claude Code tracing..."
# Run mlflow autolog to install hooks
mlflow autolog claude "${DEMO_DIR}" \
-u "databricks" \
-n "${EXPERIMENT_NAME}"
# Patch settings.json: fix hook python path + add MCP server
python3 << PYEOF3
import json
settings_path = "${DEMO_DIR}/.claude/settings.json"
with open(settings_path) as f:
s = json.load(f)
# Fix hook to use venv python (mlflow autolog writes bare "python")
hooks = s.get("hooks", {}).get("Stop", [])
for hook_group in hooks:
for hook in hook_group.get("hooks", []):
if hook.get("type") == "command" and "python -c" in hook.get("command", ""):
hook["command"] = hook["command"].replace(
"python -c",
"${DEMO_DIR}/.venv/bin/python -c",
1
)
# Ensure MCP server is configured
s.setdefault("mcpServers", {})["mlflow-mcp"] = {
"command": "uv",
"args": ["run", "--with", "mlflow[mcp]>=3.5.1", "mlflow", "mcp", "run"],
"env": {
"MLFLOW_TRACKING_URI": "databricks",
"DATABRICKS_HOST": "${DATABRICKS_HOST}"
}
}
with open(settings_path, "w") as f:
json.dump(s, f, indent=2)
PYEOF3
echo ""
# ---- Verify ----
echo "--- Tracing Status ---"
mlflow autolog claude --status
echo ""
# ---- Done! ----
echo "╔═══════════════════════════════════════════════════════════════╗"
echo "║ Setup complete! ║"
echo "╚═══════════════════════════════════════════════════════════════╝"
echo ""
echo " Experiment: ${EXPERIMENT_NAME}"
echo " Delta Sync: ${DELTA_TABLE}"
echo ""
echo " Now just:"
echo ""
echo " 1. cd ${DEMO_DIR}"
echo " 2. claude # use Claude Code as normal"
echo " 3. Open your experiment:"
echo ""
echo " ${DATABRICKS_HOST}/ml/experiments/${EXPERIMENT_ID}/traces"
echo ""
echo " 4. Query traces with SQL (after Delta Sync, ~15 min):"
echo ""
echo " SELECT * FROM ${DELTA_TABLE}"
echo ""
echo " Run evaluations:"
echo " ./eval.sh # full (code + LLM judges)"
echo " ./eval.sh fast # fast (code scorers only)"
echo ""
echo " Other commands:"
echo " .venv/bin/mlflow autolog claude --status # check status"
echo " .venv/bin/mlflow autolog claude --disable # stop tracing"
echo ""