From 055770a7915a733a44645d0d498ae4f285a27194 Mon Sep 17 00:00:00 2001 From: Ashwin Bharambe Date: Tue, 17 Sep 2024 22:40:51 -0700 Subject: [PATCH] Stop asking for "apis to serve" as part of configure --- llama_stack/distribution/configure.py | 13 +------------ llama_stack/distribution/utils/prompt_for_config.py | 2 +- 2 files changed, 2 insertions(+), 13 deletions(-) diff --git a/llama_stack/distribution/configure.py b/llama_stack/distribution/configure.py index 8286f5195..c54bb27b6 100644 --- a/llama_stack/distribution/configure.py +++ b/llama_stack/distribution/configure.py @@ -17,12 +17,6 @@ from llama_stack.distribution.utils.prompt_for_config import prompt_for_config -# These are hacks so we can re-use the `prompt_for_config` utility -# This needs a bunch of work to be made very user friendly. -class ReqApis(BaseModel): - apis_to_serve: List[str] - - def make_routing_entry_type(config_class: Any): class BaseModelWithConfig(BaseModel): routing_key: str @@ -40,12 +34,7 @@ def configure_api_providers( print("Enter comma-separated list of APIs to serve:") apis = config.apis_to_serve or list(spec.providers.keys()) - apis = [a for a in apis if a != "telemetry"] - req_apis = ReqApis( - apis_to_serve=apis, - ) - req_apis = prompt_for_config(ReqApis, req_apis) - config.apis_to_serve = req_apis.apis_to_serve + config.apis_to_serve = [a for a in apis if a != "telemetry"] print("") apis = [v.value for v in stack_apis()] diff --git a/llama_stack/distribution/utils/prompt_for_config.py b/llama_stack/distribution/utils/prompt_for_config.py index d9d778540..63ee64fb0 100644 --- a/llama_stack/distribution/utils/prompt_for_config.py +++ b/llama_stack/distribution/utils/prompt_for_config.py @@ -258,7 +258,7 @@ def prompt_for_config( except json.JSONDecodeError: print( - "Invalid JSON. Please enter a valid JSON-encoded list." + 'Invalid JSON. Please enter a valid JSON-encoded list e.g., ["foo","bar"]' ) continue except ValueError as e: