Skip to content

Commit f347c43

Browse files
committed
feat: update LlamaEdgeClient initialization with additional parameters and refactor QdrantStore setup
1 parent 86448c7 commit f347c43

File tree

2 files changed

+12
-6
lines changed

2 files changed

+12
-6
lines changed

app/main.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,15 @@
11
import os
22
import uuid
33
import shutil
4+
import json # Move from within functions to top level
45
from typing import Dict, List, Optional
56
from dotenv import load_dotenv
67

78
# Load environment variables from .env file
89
load_dotenv()
910

1011
from fastapi import FastAPI, BackgroundTasks, HTTPException
11-
from fastapi.responses import JSONResponse
12+
from fastapi.responses import JSONResponse, PlainTextResponse # Move from within function to top level
1213
from pydantic import BaseModel
1314

1415
from app.prompt_generator import PromptGenerator
@@ -96,7 +97,6 @@ async def get_project_status(project_id: str):
9697
raise HTTPException(status_code=404, detail="Project not found")
9798

9899
# Read status file
99-
import json
100100
with open(status_file, 'r') as f:
101101
status = json.load(f)
102102

@@ -132,7 +132,6 @@ async def mcp_compile_and_fix_rust(request: dict):
132132
output_text += f"[filename: {filename}]\n{content}\n\n"
133133

134134
# Return as plain text
135-
from fastapi.responses import PlainTextResponse
136135
return PlainTextResponse(content=output_text.strip())
137136
else:
138137
# For errors, we can still return JSON
@@ -298,7 +297,6 @@ async def handle_project_generation(
298297

299298
def save_status(project_dir: str, status: Dict):
300299
"""Save project status to file"""
301-
import json
302300
with open(f"{project_dir}/status.json", 'w') as f:
303301
json.dump(status, f)
304302

examples/run_mcp_server.py

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,13 +17,21 @@ def main():
1717
# Get LLM API base URL
1818
llm_api_base = os.getenv("LLM_API_BASE", "https://coder.gaia.domains/v1")
1919
llm_model = os.getenv("LLM_MODEL", "Qwen2.5-Coder-32B-Instruct-Q5_K_M")
20+
llm_embed_model = os.getenv("LLM_EMBED_MODEL", "nomic-embed")
2021

2122
# Get Qdrant connection details
2223
qdrant_host = os.getenv("QDRANT_HOST", "localhost")
2324
qdrant_port = int(os.getenv("QDRANT_PORT", "6333"))
2425

25-
llm_client = LlamaEdgeClient(api_key=api_key, api_base=llm_api_base, model=llm_model)
26-
vector_store = QdrantStore(embedding_size=llm_embed_size, host=qdrant_host, port=qdrant_port)
26+
# Set environment variables for QdrantStore
27+
os.environ["QDRANT_HOST"] = qdrant_host
28+
os.environ["QDRANT_PORT"] = str(qdrant_port)
29+
30+
# Initialize LlamaEdgeClient with all required parameters
31+
llm_client = LlamaEdgeClient(api_key=api_key, api_base=llm_api_base, model=llm_model, embed_model=llm_embed_model)
32+
33+
# Initialize QdrantStore with just the embedding_size parameter
34+
vector_store = QdrantStore(embedding_size=llm_embed_size)
2735
vector_store.create_collection("project_examples")
2836
vector_store.create_collection("error_examples")
2937

0 commit comments

Comments
 (0)