Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion strix/agents/base_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ def cancel_current_execution(self) -> None:
self._current_task.cancel()
self._current_task = None

async def agent_loop(self, task: str) -> dict[str, Any]: # noqa: PLR0912, PLR0915
async def agent_loop(self, task: str) -> dict[str, Any]:
await self._initialize_sandbox_and_state(task)

from strix.telemetry.tracer import get_global_tracer
Expand Down
114 changes: 100 additions & 14 deletions strix/interface/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
import shutil
import sys
from pathlib import Path
from typing import List, Optional

import litellm
from docker.errors import DockerException
Expand Down Expand Up @@ -39,7 +40,7 @@
logging.getLogger().setLevel(logging.ERROR)


def validate_environment() -> None: # noqa: PLR0912, PLR0915
def validate_environment() -> None:
console = Console()
missing_required_vars = []
missing_optional_vars = []
Expand Down Expand Up @@ -185,14 +186,24 @@ def check_docker_installed() -> None:


async def warm_up_llm() -> None:
"""
Try to warm up the LLM connection.
If multiple API keys are provided in LLM_API_KEYS (comma-separated),
try them in order until one succeeds.
"""
console = Console()

try:
model_name = os.getenv("STRIX_LLM", "openai/gpt-5")
api_key = os.getenv("LLM_API_KEY")

if api_key:
litellm.api_key = api_key
env_keys = os.getenv("LLM_API_KEYS")
keys_to_try: List[str] = []
if env_keys:
keys_to_try = [k.strip() for k in env_keys.split(",") if k.strip()]
else:
single_key = os.getenv("LLM_API_KEY")
if single_key:
keys_to_try = [single_key]

api_base = (
os.getenv("LLM_API_BASE")
Expand All @@ -203,19 +214,40 @@ async def warm_up_llm() -> None:
if api_base:
litellm.api_base = api_base

test_messages = [
{"role": "system", "content": "You are a helpful assistant."},
{"role": "user", "content": "Reply with just 'OK'."},
]
if not keys_to_try:
keys_to_try = []

response = litellm.completion(
model=model_name,
messages=test_messages,
)
success = False
last_exception: Optional[Exception] = None

for api_key in keys_to_try:
try:
litellm.api_key = api_key

test_messages = [
{"role": "system", "content": "You are a helpful assistant."},
{"role": "user", "content": "Reply with just 'OK'."},
]

response = litellm.completion(
model=model_name,
messages=test_messages,
)

validate_llm_response(response)
validate_llm_response(response)

except Exception as e: # noqa: BLE001
success = True
os.environ["LLM_API_KEY"] = api_key
break

except Exception as e:
last_exception = e
continue

if not success:
raise last_exception or RuntimeError("No API key provided or all keys failed.")

except Exception as e:
error_text = Text()
error_text.append("❌ ", style="bold red")
error_text.append("LLM CONNECTION FAILED", style="bold red")
Expand Down Expand Up @@ -440,11 +472,65 @@ def pull_docker_image() -> None:
console.print()


def collect_multiple_api_keys_interactive() -> None:
"""
If running interactively and there is no LLM_API_KEY set, prompt the user
to enter N API keys. Store them in LLM_API_KEYS (comma-separated) and set
LLM_API_KEY to the first one for backward compatibility.
"""
console = Console()

if os.getenv("LLM_API_KEY"):
return

if not sys.stdin.isatty():
return

try:
console.print()
console.print("[bold cyan]No LLM API key found in environment.[/]")
console.print(
"[dim]You can provide multiple API keys to try them in sequence if one fails.[/]"
)
console.print()

while True:
count_str = console.input("How many API keys would you like to provide? (0 to skip) ")
try:
count = int(count_str.strip())
if count < 0:
console.print("[red]Please enter 0 or a positive integer.[/]")
continue
break
except ValueError:
console.print("[red]Please enter a valid integer.[/]")

if count == 0:
console.print("[dim]Skipping interactive API key input.[/]")
console.print()
return

keys: List[str] = []
for i in range(1, count + 1):
key = console.input(f"Enter API key #{i}: ").strip()
if key:
keys.append(key)

if keys:
os.environ["LLM_API_KEYS"] = ",".join(keys)
os.environ["LLM_API_KEY"] = keys[0]
console.print(f"[green]Stored {len(keys)} API key(s) for use.[/]")
console.print()
except Exception:
return


def main() -> None:
if sys.platform == "win32":
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())

args = parse_arguments()
collect_multiple_api_keys_interactive()

check_docker_installed()
pull_docker_image()
Expand Down