Skip to content

Commit fc2f6d4

Browse files
committed
Adding API key support
Summary: Test Plan: Try to config with no args ``` llama-stack-client configure > Enter the host name of the Llama Stack distribution server: api.hostname.com > Enter the port number of the Llama Stack distribution server: 80 > Enter the API key (leave empty if no key is needed): _API_KEY_ > Is Llama Stack distribution server using HTTPS? (y/n): n Done! You can now use the Llama Stack Client CLI with endpoint https://api.hostname.com:80 ``` Config with args ``` llama-stack-client configure --endpoint=https://api.hostname.com --api-key='_API_KEY_' Done! You can now use the Llama Stack Client CLI with endpoint https://api.hostname.com ``` Try listing the models ``` llama-stack-client models list ┏━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━┳━━━━━━━━━━━━┓ ┃ identifier ┃ provider_id ┃ provider_resource_id ┃ metadata ┃ model_type ┃ ┡━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━╇━━━━━━━━━━━━┩ │ llama3.1-8b-instruct │ meta-llama │ llama3.1-8b-instruct │ {} │ llm │ │ llama3.3-70b-instruct │ meta-llama │ llama3.3-70b-instruct │ {} │ llm │ │ llama3.4-17b-instruct │ meta-llama │ llama3.4-17b-instruct │ {} │ llm │ └───────────────────────┴─────────────┴───────────────────────┴──────────┴────────────┘ ```
1 parent 6b6be35 commit fc2f6d4

File tree

3 files changed

+42
-6
lines changed

3 files changed

+42
-6
lines changed

src/llama_stack_client/_client.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -103,6 +103,7 @@ def __init__(
103103
self,
104104
*,
105105
base_url: str | httpx.URL | None = None,
106+
api_key: str | None = None,
106107
timeout: Union[float, Timeout, None, NotGiven] = NOT_GIVEN,
107108
max_retries: int = DEFAULT_MAX_RETRIES,
108109
default_headers: Mapping[str, str] | None = None,
@@ -132,6 +133,8 @@ def __init__(
132133
custom_headers["X-LlamaStack-Client-Version"] = __version__
133134
if provider_data is not None:
134135
custom_headers["X-LlamaStack-Provider-Data"] = json.dumps(provider_data)
136+
if api_key is not None:
137+
custom_headers["Authorization"] = f"Bearer {api_key}"
135138

136139
super().__init__(
137140
version=__version__,
@@ -300,6 +303,7 @@ def __init__(
300303
self,
301304
*,
302305
base_url: str | httpx.URL | None = None,
306+
api_key: str | None = None,
303307
timeout: Union[float, Timeout, None, NotGiven] = NOT_GIVEN,
304308
max_retries: int = DEFAULT_MAX_RETRIES,
305309
default_headers: Mapping[str, str] | None = None,
@@ -329,6 +333,8 @@ def __init__(
329333
custom_headers["X-LlamaStack-Client-Version"] = __version__
330334
if provider_data is not None:
331335
custom_headers["X-LlamaStack-Provider-Data"] = json.dumps(provider_data)
336+
if api_key is not None:
337+
custom_headers["Authorization"] = f"Bearer {api_key}"
332338

333339
super().__init__(
334340
version=__version__,

src/llama_stack_client/lib/cli/configure.py

Lines changed: 30 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -26,8 +26,10 @@ def get_config():
2626
@click.command()
2727
@click.option("--host", type=str, help="Llama Stack distribution host")
2828
@click.option("--port", type=str, help="Llama Stack distribution port number")
29+
@click.option("--use-https", type=bool, default=False, help="Llama Stack distribution uses HTTPS")
2930
@click.option("--endpoint", type=str, help="Llama Stack distribution endpoint")
30-
def configure(host: str | None, port: str | None, endpoint: str | None):
31+
@click.option("--api-key", type=str, help="Llama Stack distribution API key")
32+
def configure(host: str | None, port: str | None, use_https: bool | None, endpoint: str | None, api_key: str | None):
3133
"""Configure Llama Stack Client CLI"""
3234
os.makedirs(LLAMA_STACK_CLIENT_CONFIG_DIR, exist_ok=True)
3335
config_path = get_config_file_path()
@@ -36,7 +38,10 @@ def configure(host: str | None, port: str | None, endpoint: str | None):
3638
final_endpoint = endpoint
3739
else:
3840
if host and port:
39-
final_endpoint = f"http://{host}:{port}"
41+
if use_https:
42+
final_endpoint = f"https://{host}:{port}"
43+
else:
44+
final_endpoint = f"http://{host}:{port}"
4045
else:
4146
host = prompt(
4247
"> Enter the host name of the Llama Stack distribution server: ",
@@ -52,14 +57,33 @@ def configure(host: str | None, port: str | None, endpoint: str | None):
5257
error_message="Please enter a valid port number",
5358
),
5459
)
55-
final_endpoint = f"http://{host}:{port}"
60+
api_key = prompt(
61+
"> Enter the API key (leave empty if no key is needed): ",
62+
)
63+
64+
is_https = prompt(
65+
"> Is Llama Stack distribution server using HTTPS? (y/n): ",
66+
validator=Validator.from_callable(
67+
lambda x: x.lower() in ["y", "n", "yes", "no"],
68+
error_message="Please enter a valid response for HTTPS, yes or no",
69+
),
70+
)
71+
if is_https == "y":
72+
final_endpoint = f"https://{host}:{port}"
73+
else:
74+
final_endpoint = f"http://{host}:{port}"
75+
76+
# Prepare config dict before writing it
77+
config_dict = {
78+
"endpoint": final_endpoint,
79+
}
80+
if api_key:
81+
config_dict["api_key"] = api_key
5682

5783
with open(config_path, "w") as f:
5884
f.write(
5985
yaml.dump(
60-
{
61-
"endpoint": final_endpoint,
62-
},
86+
config_dict,
6387
sort_keys=True,
6488
)
6589
)

src/llama_stack_client/lib/cli/llama_stack_client.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -55,15 +55,21 @@ def cli(ctx, endpoint: str, config: str | None):
5555
with open(config, "r") as f:
5656
config_dict = yaml.safe_load(f)
5757
endpoint = config_dict.get("endpoint", endpoint)
58+
api_key = config_dict.get("api_key", None)
5859
except Exception as e:
5960
click.echo(f"Error loading config from {config}: {str(e)}", err=True)
6061
click.echo("Falling back to HTTP client with endpoint", err=True)
6162

6263
if endpoint == "":
6364
endpoint = "http://localhost:8321"
6465

66+
print(f"[DEBUG] Using base url: {endpoint}")
67+
if api_key:
68+
print(f"[DEBUG] Using API key: {api_key}")
69+
6570
client = LlamaStackClient(
6671
base_url=endpoint,
72+
api_key=api_key,
6773
provider_data={
6874
"fireworks_api_key": os.environ.get("FIREWORKS_API_KEY", ""),
6975
"together_api_key": os.environ.get("TOGETHER_API_KEY", ""),

0 commit comments

Comments
 (0)