Skip to content

Commit d67b7f3

Browse files
cdoernfranciscojavierarceoclaude
authored
feat(cli): wire up ogx-client CLI entrypoint (#341)
## Summary - Register `ogx-client` as a console script in `pyproject.toml` - Rename `lib/cli/llama_stack_client.py` → `ogx_client.py`; rename group, prog_name, and help text - Construct `OgxClient` with `api_key`/`default_headers` (the Stainless-generated client no longer accepts `provider_data`); pack provider keys into the `X-OGX-Provider-Data` header - Fix stale `from llama_stack_client...` import in `configure.py` that crashed every CLI invocation - Move client config dir from `~/.llama/client` to `~/.ogx/client`; rename `LLAMA_STACK_CLIENT_CONFIG_DIR` → `OGX_CLIENT_CONFIG_DIR` - Replace remaining `Llama Stack` / `llama-stack-client` / `llama models` strings in CLI command help and messages with `OGX` / `ogx-client` - Update stray `AsyncLlamaStackClient` reference in `lib/agents/agent.py` docstring Stainless covers the SDK types and resources but doesn't generate the CLI, so this stitches the renamed CLI back together so `ogx-client` actually works. ## Test plan - [x] \`pip install -e .\` succeeds - [x] \`ogx-client --help\` lists subcommands and shows \`OGX\` branding - [x] \`ogx-client --version\` returns the package version - [x] \`ogx-client configure --help\`, \`models --help\`, \`inspect --help\` load without import errors - [ ] End-to-end smoke against a running OGX server --------- Signed-off-by: Charlie Doern <cdoern@redhat.com> Co-authored-by: Francisco Javier Arceo <farceo@redhat.com> Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
1 parent 7a863d3 commit d67b7f3

8 files changed

Lines changed: 49 additions & 43 deletions

File tree

pyproject.toml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -52,6 +52,9 @@ Repository = "https://github.com/ogx-ai/ogx-client-python"
5252
[project.optional-dependencies]
5353
aiohttp = ["aiohttp", "httpx_aiohttp>=0.1.9"]
5454

55+
[project.scripts]
56+
ogx-client = "ogx_client.lib.cli.ogx_client:main"
57+
5558
[tool.uv]
5659
managed = true
5760
required-version = ">=0.9"

src/ogx_client/lib/agents/agent.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -301,7 +301,7 @@ def __init__(
301301
"""Construct an async Agent backed by the responses + conversations APIs.
302302
303303
Args:
304-
client: An async OpenAI-compatible client (e.g., openai.AsyncOpenAI() or AsyncLlamaStackClient).
304+
client: An async OpenAI-compatible client (e.g., openai.AsyncOpenAI() or AsyncOgxClient).
305305
The client must support the responses and conversations APIs.
306306
"""
307307
self.client = client

src/ogx_client/lib/cli/configure.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77
import click
88
from prompt_toolkit import prompt
99
from prompt_toolkit.validation import Validator
10-
from llama_stack_client.lib.cli.constants import LLAMA_STACK_CLIENT_CONFIG_DIR, get_config_file_path
10+
from ogx_client.lib.cli.constants import OGX_CLIENT_CONFIG_DIR, get_config_file_path
1111

1212

1313
def get_config():
@@ -20,18 +20,18 @@ def get_config():
2020

2121
@click.command()
2222
@click.help_option("-h", "--help")
23-
@click.option("--endpoint", type=str, help="Llama Stack distribution endpoint", default="")
24-
@click.option("--api-key", type=str, help="Llama Stack distribution API key", default="")
23+
@click.option("--endpoint", type=str, help="OGX server endpoint", default="")
24+
@click.option("--api-key", type=str, help="OGX server API key", default="")
2525
def configure(endpoint: str | None, api_key: str | None):
26-
"""Configure Llama Stack Client CLI."""
27-
os.makedirs(LLAMA_STACK_CLIENT_CONFIG_DIR, exist_ok=True)
26+
"""Configure OGX Client CLI."""
27+
os.makedirs(OGX_CLIENT_CONFIG_DIR, exist_ok=True)
2828
config_path = get_config_file_path()
2929

3030
if endpoint != "":
3131
final_endpoint = endpoint
3232
else:
3333
final_endpoint = prompt(
34-
"> Enter the endpoint of the Llama Stack distribution server: ",
34+
"> Enter the endpoint of the OGX server: ",
3535
validator=Validator.from_callable(
3636
lambda x: len(x) > 0 and (parsed := urlparse(x)).scheme and parsed.netloc,
3737
error_message="Endpoint cannot be empty and must be a valid URL, please enter a valid endpoint",
@@ -60,4 +60,4 @@ def configure(endpoint: str | None, api_key: str | None):
6060
)
6161
)
6262

63-
print(f"Done! You can now use the Llama Stack Client CLI with endpoint {final_endpoint}") # noqa: T201
63+
print(f"Done! You can now use the OGX Client CLI with endpoint {final_endpoint}") # noqa: T201
Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
import os
22
from pathlib import Path
33

4-
LLAMA_STACK_CLIENT_CONFIG_DIR = Path(os.path.expanduser("~/.llama/client"))
4+
OGX_CLIENT_CONFIG_DIR = Path(os.path.expanduser("~/.ogx/client"))
55

66

77
def get_config_file_path():
8-
return LLAMA_STACK_CLIENT_CONFIG_DIR / "config.yaml"
8+
return OGX_CLIENT_CONFIG_DIR / "config.yaml"

src/ogx_client/lib/cli/eval/run_scoring.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
@click.option(
1717
"--dataset-id",
1818
required=False,
19-
help="Pre-registered dataset_id to score (from llama-stack-client datasets list)",
19+
help="Pre-registered dataset_id to score (from ogx-client datasets list)",
2020
)
2121
@click.option(
2222
"--dataset-path",
@@ -75,9 +75,7 @@ def run_scoring(
7575
if dataset_id is not None:
7676
dataset = client.datasets.retrieve(dataset_id=dataset_id)
7777
if not dataset:
78-
click.BadParameter(
79-
f"Dataset {dataset_id} not found. Please register using llama-stack-client datasets register"
80-
)
78+
click.BadParameter(f"Dataset {dataset_id} not found. Please register using ogx-client datasets register")
8179

8280
# TODO: this will eventually be replaced with jobs polling from server vis score_bath
8381
# For now, get all datasets rows via datasets API

src/ogx_client/lib/cli/inspect/version.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99
@click.pass_context
1010
@handle_client_errors("inspect version")
1111
def inspect_version(ctx):
12-
"""Show Llama Stack version on distribution endpoint"""
12+
"""Show OGX server version on distribution endpoint"""
1313
client = ctx.obj["client"]
1414
console = Console()
1515
version_response = client.inspect.version()

src/ogx_client/lib/cli/models/models.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ def models():
1414
"""Manage GenAI models."""
1515

1616

17-
@click.command(name="list", help="Show available llama models at distribution endpoint")
17+
@click.command(name="list", help="Show available models at distribution endpoint")
1818
@click.help_option("-h", "--help")
1919
@click.pass_context
2020
@handle_client_errors("list models")

src/ogx_client/lib/cli/llama_stack_client.py renamed to src/ogx_client/lib/cli/ogx_client.py

Lines changed: 32 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
from __future__ import annotations
22

33
import os
4+
import json
45
from importlib.metadata import version
56

67
import yaml
@@ -24,16 +25,15 @@
2425

2526
@click.group()
2627
@click.help_option("-h", "--help")
27-
@click.version_option(version=version("ogx-client"), prog_name="llama-stack-client")
28-
@click.option("--endpoint", type=str, help="Llama Stack distribution endpoint", default="")
29-
@click.option("--api-key", type=str, help="Llama Stack distribution API key", default="")
28+
@click.version_option(version=version("ogx_client"), prog_name="ogx-client")
29+
@click.option("--endpoint", type=str, help="OGX server endpoint", default="")
30+
@click.option("--api-key", type=str, help="OGX server API key", default="")
3031
@click.option("--config", type=str, help="Path to config file", default=None)
3132
@click.pass_context
32-
def llama_stack_client(ctx, endpoint: str, api_key: str, config: str | None):
33-
"""Welcome to the llama-stack-client CLI - a command-line interface for interacting with Llama Stack"""
33+
def ogx_client(ctx, endpoint: str, api_key: str, config: str | None):
34+
"""Welcome to the ogx-client CLI - a command-line interface for interacting with an OGX server"""
3435
ctx.ensure_object(dict)
3536

36-
# If no config provided, check default location
3737
if config and endpoint:
3838
raise ValueError("Cannot use both config and endpoint")
3939

@@ -55,40 +55,45 @@ def llama_stack_client(ctx, endpoint: str, api_key: str, config: str | None):
5555
if endpoint == "":
5656
endpoint = "http://localhost:8321"
5757

58-
default_headers = {}
58+
default_headers: dict[str, str] = {}
5959
if api_key != "":
60-
default_headers = {
61-
"Authorization": f"Bearer {api_key}",
62-
}
60+
default_headers["Authorization"] = f"Bearer {api_key}"
6361

64-
client = OgxClient(
65-
base_url=endpoint,
66-
provider_data={
62+
provider_data = {
63+
k: v
64+
for k, v in {
6765
"fireworks_api_key": os.environ.get("FIREWORKS_API_KEY", ""),
6866
"together_api_key": os.environ.get("TOGETHER_API_KEY", ""),
6967
"openai_api_key": os.environ.get("OPENAI_API_KEY", ""),
70-
},
68+
}.items()
69+
if v
70+
}
71+
if provider_data:
72+
default_headers["X-OGX-Provider-Data"] = json.dumps(provider_data)
73+
74+
client = OgxClient(
75+
base_url=endpoint,
76+
api_key=api_key or None,
7177
default_headers=default_headers,
7278
)
7379
ctx.obj = {"client": client}
7480

7581

76-
# Register all subcommands
77-
llama_stack_client.add_command(models, "models")
78-
llama_stack_client.add_command(vector_stores, "vector_stores")
79-
llama_stack_client.add_command(shields, "shields")
80-
llama_stack_client.add_command(eval_tasks, "eval_tasks")
81-
llama_stack_client.add_command(providers, "providers")
82-
llama_stack_client.add_command(datasets, "datasets")
83-
llama_stack_client.add_command(configure, "configure")
84-
llama_stack_client.add_command(scoring_functions, "scoring_functions")
85-
llama_stack_client.add_command(eval, "eval")
86-
llama_stack_client.add_command(inference, "inference")
87-
llama_stack_client.add_command(inspect, "inspect")
82+
ogx_client.add_command(models, "models")
83+
ogx_client.add_command(vector_stores, "vector_stores")
84+
ogx_client.add_command(shields, "shields")
85+
ogx_client.add_command(eval_tasks, "eval_tasks")
86+
ogx_client.add_command(providers, "providers")
87+
ogx_client.add_command(datasets, "datasets")
88+
ogx_client.add_command(configure, "configure")
89+
ogx_client.add_command(scoring_functions, "scoring_functions")
90+
ogx_client.add_command(eval, "eval")
91+
ogx_client.add_command(inference, "inference")
92+
ogx_client.add_command(inspect, "inspect")
8893

8994

9095
def main():
91-
llama_stack_client()
96+
ogx_client()
9297

9398

9499
if __name__ == "__main__":

0 commit comments

Comments
 (0)