@@ -155,18 +155,27 @@ async def handle_project_generation(
155155 save_status (project_dir , status )
156156
157157 try :
158- # Step 1: Check if similar project exists in vector DB
159- query_embedding = llm_client .get_embeddings ([description ])[0 ]
160- similar_projects = vector_store .search ("project_examples" , query_embedding , limit = 1 )
158+ # Skip vector search if environment variable is set
159+ skip_vector_search = os .getenv ("SKIP_VECTOR_SEARCH" , "" ).lower () == "true"
161160
162- # If we found a similar project, use it as reference
163161 example_text = ""
164- if similar_projects :
165- example_text = f"\n Here's a similar project you can use as reference:\n { similar_projects [0 ]['example' ]} "
166- if requirements :
167- requirements += example_text
168- else :
169- requirements = example_text
162+ if not skip_vector_search :
163+ try :
164+ # Step 1: Check if similar project exists in vector DB
165+ query_embedding = llm_client .get_embeddings ([description ])[0 ]
166+ similar_projects = vector_store .search ("project_examples" , query_embedding , limit = 1 )
167+
168+ # If we found a similar project, use it as reference
169+ if similar_projects :
170+ example_text = f"\n Here's a similar project you can use as reference:\n { similar_projects [0 ]['example' ]} "
171+ except Exception as e :
172+ print (f"Vector search error (non-critical): { e } " )
173+ # Continue without vector search results
174+
175+ if example_text and requirements :
176+ requirements += example_text
177+ elif example_text :
178+ requirements = example_text
170179
171180 # Step 2: Generate prompt and get response from LLM
172181 prompt = prompt_gen .generate_prompt (description , requirements )
@@ -232,10 +241,19 @@ async def handle_project_generation(
232241 # Extract error context
233242 error_context = compiler .extract_error_context (output )
234243
235- # Find similar errors in vector DB
236- error_embedding = llm_client . get_embeddings ([ error_context [ "full_error" ]])[ 0 ]
237- similar_errors = vector_store . search ( "error_examples" , error_embedding , limit = 3 )
244+ # Skip vector search if environment variable is set
245+ skip_vector_search = os . getenv ( "SKIP_VECTOR_SEARCH" , "" ). lower () == "true"
246+ similar_errors = []
238247
248+ if not skip_vector_search :
249+ try :
250+ # Find similar errors in vector DB
251+ error_embedding = llm_client .get_embeddings ([error_context ["full_error" ]])[0 ]
252+ similar_errors = vector_store .search ("error_examples" , error_embedding , limit = 3 )
253+ except Exception as e :
254+ print (f"Vector search error (non-critical): { e } " )
255+ # Continue without vector search results
256+
239257 # Generate fix prompt
240258 fix_examples = ""
241259 if similar_errors :
0 commit comments