Skip to content

Commit 96644ae

Browse files
committed
Refactor LlamaEdgeClient initialization to simplify API key requirement message and remove redundant documentation
1 parent 6e1f3a1 commit 96644ae

File tree

1 file changed

+2
-9
lines changed

1 file changed

+2
-9
lines changed

app/llm_client.py

Lines changed: 2 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -8,14 +8,7 @@ class LlamaEdgeClient:
88
"""Client for interacting with LlamaEdge OpenAI-compatible API"""
99

1010
def __init__(self, api_key=None, api_base=None, model=None, embed_model=None):
11-
"""Initialize LlamaEdgeClient with API credentials
12-
13-
Args:
14-
api_key: API key for LLM service
15-
api_base: Base URL for API (overrides LLM_API_BASE env var)
16-
model: Model name (overrides LLM_MODEL env var)
17-
embed_model: Embedding model name (overrides LLM_EMBED_MODEL env var)
18-
"""
11+
"""Initialize LlamaEdgeClient with API credentials"""
1912
self.api_key = api_key or os.getenv("LLM_API_KEY", "")
2013

2114
# Use provided parameters with fallback to environment variables
@@ -26,7 +19,7 @@ def __init__(self, api_key=None, api_base=None, model=None, embed_model=None):
2619
# Only require API key if not using a local endpoint
2720
is_local_endpoint = self.base_url.startswith("http://localhost") or self.base_url.startswith("http://host.docker.internal")
2821
if not self.api_key and not is_local_endpoint:
29-
raise ValueError("API key is required for non-local endpoints")
22+
raise ValueError("API key is required")
3023

3124
# Initialize OpenAI client with custom base URL
3225
# Use dummy API key for local endpoints if not provided

0 commit comments

Comments
 (0)