Skip to content

Commit 6e1f3a1

Browse files
committed
Refactor API key handling in LlamaEdgeClient to enforce requirement for non-local endpoints
1 parent 00b0ec5 commit 6e1f3a1

File tree

1 file changed

+11
-6
lines changed

1 file changed

+11
-6
lines changed

app/llm_client.py

Lines changed: 11 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -16,18 +16,23 @@ def __init__(self, api_key=None, api_base=None, model=None, embed_model=None):
1616
model: Model name (overrides LLM_MODEL env var)
1717
embed_model: Embedding model name (overrides LLM_EMBED_MODEL env var)
1818
"""
19-
self.api_key = api_key or os.getenv("LLM_API_KEY")
20-
if not self.api_key:
21-
raise ValueError("API key is required")
22-
19+
self.api_key = api_key or os.getenv("LLM_API_KEY", "")
20+
2321
# Use provided parameters with fallback to environment variables
2422
self.base_url = api_base or os.getenv("LLM_API_BASE", "http://localhost:8080/v1")
2523
self.llm_model = model or os.getenv("LLM_MODEL", "Qwen2.5-Coder-3B-Instruct")
26-
self.llm_embed_model = embed_model or os.getenv("LLM_EMBED_MODEL", "gte-Qwen2-1.5B-instruct") # Fixed variable name
24+
self.llm_embed_model = embed_model or os.getenv("LLM_EMBED_MODEL", "gte-Qwen2-1.5B-instruct")
2725

26+
# Only require API key if not using a local endpoint
27+
is_local_endpoint = self.base_url.startswith("http://localhost") or self.base_url.startswith("http://host.docker.internal")
28+
if not self.api_key and not is_local_endpoint:
29+
raise ValueError("API key is required for non-local endpoints")
30+
2831
# Initialize OpenAI client with custom base URL
32+
# Use dummy API key for local endpoints if not provided
33+
api_key_to_use = self.api_key if self.api_key else "dummy_api_key_for_local_setup"
2934
self.client = OpenAI(
30-
api_key=self.api_key,
35+
api_key=api_key_to_use,
3136
base_url=self.base_url
3237
)
3338

0 commit comments

Comments
 (0)