File tree Expand file tree Collapse file tree 5 files changed +37
-21
lines changed Expand file tree Collapse file tree 5 files changed +37
-21
lines changed Original file line number Diff line number Diff line change @@ -44,17 +44,30 @@ jobs:
4444 echo "Checking Docker container status..."
4545 docker ps
4646
47- # Check if API container exists before getting logs
48- if docker ps | grep -q api; then
49- echo "API container is running, checking logs:"
50- docker logs $(docker ps -q --filter name=api)
51- else
47+ # Check if all containers are running
48+ if ! docker ps | grep -q api; then
5249 echo "ERROR: API container is not running!"
53- echo "Checking docker-compose logs for errors:"
54- docker compose logs
50+ echo "Building container logs:"
51+ docker-compose logs --tail=100 api
52+ exit 1
53+ fi
54+
55+ if ! docker ps | grep -q mcp-server; then
56+ echo "ERROR: MCP server container is not running!"
57+ docker-compose logs --tail=100 mcp-server
5558 exit 1
5659 fi
5760
61+ if ! docker ps | grep -q mcp-proxy; then
62+ echo "ERROR: MCP proxy container is not running!"
63+ docker-compose logs --tail=100 mcp-proxy
64+ exit 1
65+ fi
66+
67+ echo "All required containers are running."
68+ echo "API container logs:"
69+ docker logs $(docker ps -q --filter name=api)
70+
5871 echo "Waiting 30 more seconds for services to stabilize..."
5972 sleep 30
6073
Original file line number Diff line number Diff line change @@ -12,13 +12,14 @@ ENV PATH="/root/.cargo/bin:${PATH}"
1212# Set working directory
1313WORKDIR /app
1414
15- # Copy requirements and install dependencies
15+ # Install pip dependencies first for better caching
1616COPY requirements.txt .
17- # Remove the "|| echo" part to fail if packages aren't installed
18- RUN pip install --no-cache-dir -r requirements.txt
1917
20- # Install MCP packages explicitly
21- RUN pip install mcp-python mcp-proxy fastapi uvicorn
18+ # Install packages step by step to debug any issues
19+ RUN pip install --no-cache-dir -U pip && \
20+ pip install --no-cache-dir openai && \
21+ pip install --no-cache-dir cmcp mcp-python mcp-proxy && \
22+ pip install --no-cache-dir -r requirements.txt
2223
2324# Copy application code
2425COPY . .
Original file line number Diff line number Diff line change @@ -7,10 +7,10 @@ services:
77 - " 8000:8000"
88 environment :
99 - LLM_API_KEY=${LLM_API_KEY}
10- - LLM_API_BASE=${LLM_API_BASE:-http ://localhost:8080 /v1}
11- - LLM_MODEL=${LLM_MODEL:-Qwen2.5-Coder-3B -Instruct}
12- - LLM_EMBED_MODEL=${LLM_EMBED_MODEL:-gte-Qwen2-1.5B-instruct }
13- - LLM_EMBED_SIZE=${LLM_EMBED_SIZE:-1536 }
10+ - LLM_API_BASE=${LLM_API_BASE:-https ://coder.gaia.domains /v1}
11+ - LLM_MODEL=${LLM_MODEL:-Qwen2.5-Coder-32B -Instruct-Q5_K_M }
12+ - LLM_EMBED_MODEL=${LLM_EMBED_MODEL:-nomic-embed }
13+ - LLM_EMBED_SIZE=${LLM_EMBED_SIZE:-768 }
1414 - QDRANT_HOST=qdrant
1515 - QDRANT_PORT=6333
1616 depends_on :
@@ -20,10 +20,10 @@ services:
2020 build : .
2121 environment :
2222 - LLM_API_KEY=${LLM_API_KEY}
23- - LLM_API_BASE=${LLM_API_BASE:-http ://localhost:8080 /v1}
24- - LLM_MODEL=${LLM_MODEL:-Qwen2.5-Coder-3B -Instruct}
25- - LLM_EMBED_MODEL=${LLM_EMBED_MODEL:-gte-Qwen2-1.5B-instruct }
26- - LLM_EMBED_SIZE=${LLM_EMBED_SIZE:-1536 }
23+ - LLM_API_BASE=${LLM_API_BASE:-https ://coder.gaia.domains /v1}
24+ - LLM_MODEL=${LLM_MODEL:-Qwen2.5-Coder-32B -Instruct-Q5_K_M }
25+ - LLM_EMBED_MODEL=${LLM_EMBED_MODEL:-nomic-embed }
26+ - LLM_EMBED_SIZE=${LLM_EMBED_SIZE:-768 }
2727 - QDRANT_HOST=qdrant
2828 - QDRANT_PORT=6333
2929 depends_on :
File renamed without changes.
Original file line number Diff line number Diff line change @@ -5,4 +5,6 @@ python-dotenv>=1.0.0
55requests >= 2.28.2
66qdrant-client >= 1.3.0
77mcp-python >= 0.1.0
8- mcp-proxy >= 0.1.0
8+ mcp-proxy >= 0.1.0
9+ cmcp >= 0.1.0
10+ openai >= 1.0.0
You can’t perform that action at this time.
0 commit comments