Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
53 changes: 37 additions & 16 deletions src/bin/semcode-mcp.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2090,6 +2090,7 @@ impl McpServer {
"initialize" => self.handle_initialize(params).await,
"tools/list" => self.handle_list_tools().await,
"tools/call" => self.handle_tool_call(params).await,
"ping" => json!({}),
_ => json!({
"error": {
"code": -32601,
Expand Down Expand Up @@ -5086,20 +5087,38 @@ async fn run_stdio_server(server: Arc<McpServer>) -> Result<()> {

match serde_json::from_str::<Value>(&line) {
Ok(request) => {
let response = server.handle_request(request).await;
if let Ok(response_str) = serde_json::to_string(&response) {
let mut stdout_guard = stdout.lock().await;
if let Err(e) = stdout_guard.write_all(response_str.as_bytes()).await {
eprintln!("Failed to write response: {e}");
break;
}
if let Err(e) = stdout_guard.write_all(b"\n").await {
eprintln!("Failed to write newline: {e}");
break;
// Check if this is a notification (no id) or a request (has id)
if request.get("id").is_some() {
// It's a request, send a response
let response = server.handle_request(request).await;
if let Ok(response_str) = serde_json::to_string(&response) {
let mut stdout_guard = stdout.lock().await;
if let Err(e) = stdout_guard.write_all(response_str.as_bytes()).await {
eprintln!("Failed to write response: {e}");
break;
}
if let Err(e) = stdout_guard.write_all(b"\n").await {
eprintln!("Failed to write newline: {e}");
break;
}
if let Err(e) = stdout_guard.flush().await {
eprintln!("Failed to flush stdout: {e}");
break;
}
}
if let Err(e) = stdout_guard.flush().await {
eprintln!("Failed to flush stdout: {e}");
break;
} else {
// It's a notification, handle without response
let method = request["method"].as_str().unwrap_or("");
match method {
"notifications/initialized" => {
eprintln!("Client initialized");
}
"notifications/cancelled" => {
eprintln!("Request cancelled (notification received)");
}
_ => {
eprintln!("Received notification: {}", method);
}
}
}
}
Expand Down Expand Up @@ -5159,16 +5178,18 @@ async fn main() -> Result<()> {
// Create MCP server
let server = Arc::new(McpServer::new(&database_path, &args.git_repo, args.model_path).await?);

// Ensure tables exist
server.db.create_tables().await?;

// Spawn background task to index current commit if needed
eprintln!("[Background] Spawning background indexing task");
let db_for_indexing = server.db.clone();
let git_repo_for_indexing = args.git_repo.clone();
let indexing_state_for_bg = server.indexing_state.clone();
let notification_tx_for_bg = server.notification_tx.clone();
let _indexing_handle = tokio::spawn(async move {
// Ensure tables exist before indexing
if let Err(e) = db_for_indexing.create_tables().await {
eprintln!("[Background] Error creating/verifying tables: {}", e);
}

index_current_commit_background(
db_for_indexing,
git_repo_for_indexing,
Expand Down
16 changes: 8 additions & 8 deletions src/database/connection.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1966,30 +1966,30 @@ impl DatabaseManager {
pub async fn optimize_database(&self) -> Result<()> {
use colored::Colorize;

println!(
tracing::info!(
"\n{}",
"═══ DATABASE OPTIMIZATION STARTED ═══".yellow().bold()
);
let start_time = std::time::Instant::now();
tracing::info!("Running database optimization...");

// Rebuild all scalar indices to ensure they're optimal
println!("{}", " → Rebuilding scalar indices...".cyan());
tracing::info!("{}", " → Rebuilding scalar indices...".cyan());
self.rebuild_indices().await?;
println!("{}", " ✓ Scalar indices rebuilt".green());
tracing::info!("{}", " ✓ Scalar indices rebuilt".green());

// Run table optimization
println!("{}", " → Optimizing tables...".cyan());
tracing::info!("{}", " → Optimizing tables...".cyan());
self.optimize_tables().await?;
println!("{}", " ✓ Tables optimized".green());
tracing::info!("{}", " ✓ Tables optimized".green());

// Compact and cleanup (triggers compression)
println!("{}", " → Compacting and pruning old versions...".cyan());
tracing::info!("{}", " → Compacting and pruning old versions...".cyan());
self.compact_and_cleanup().await?;
println!("{}", " ✓ Compaction complete".green());
tracing::info!("{}", " ✓ Compaction complete".green());

let elapsed = start_time.elapsed();
println!(
tracing::info!(
"{}",
format!(
"═══ DATABASE OPTIMIZATION COMPLETE ({:.1}s) ═══\n",
Expand Down
26 changes: 13 additions & 13 deletions src/git_range.rs
Original file line number Diff line number Diff line change
Expand Up @@ -749,7 +749,7 @@ pub async fn process_git_range(
let commit_count = commit_shas.len();

if !commit_shas.is_empty() {
println!(
info!(
"Checking for {} commits already in database...",
commit_count
);
Expand All @@ -768,13 +768,13 @@ pub async fn process_git_range(

let already_indexed = commit_count - new_commit_shas.len();
if already_indexed > 0 {
println!(
info!(
"{} commits already indexed, processing {} new commits",
already_indexed,
new_commit_shas.len()
);
} else {
println!("Processing all {} new commits", new_commit_shas.len());
info!("Processing all {} new commits", new_commit_shas.len());
}

if !new_commit_shas.is_empty() {
Expand All @@ -799,7 +799,7 @@ pub async fn process_git_range(
commit_extraction_start.elapsed().as_secs_f64()
);
} else {
println!("All commits in range are already indexed!");
info!("All commits in range are already indexed!");
}
} else {
info!("No commits found in range");
Expand Down Expand Up @@ -843,24 +843,24 @@ pub async fn process_git_range(

let total_time = start_time.elapsed();

println!("\n=== Git Range Pipeline Complete ===");
println!("Total time: {:.1}s", total_time.as_secs_f64());
println!("Commits indexed: {commit_count}");
println!("Files processed: {}", stats.files_processed);
println!("Functions indexed: {}", stats.functions_count);
println!("Types indexed: {}", stats.types_count);
info!("\n=== Git Range Pipeline Complete ===");
info!("Total time: {:.1}s", total_time.as_secs_f64());
info!("Commits indexed: {commit_count}");
info!("Files processed: {}", stats.files_processed);
info!("Functions indexed: {}", stats.functions_count);
info!("Types indexed: {}", stats.types_count);

// Check if optimization is needed after git range indexing
match db_manager.check_optimization_health().await {
Ok((needs_optimization, message)) => {
if needs_optimization {
println!("\n{}", message);
info!("\n{}", message);
match db_manager.optimize_database().await {
Ok(_) => println!("Database optimization completed successfully"),
Ok(_) => info!("Database optimization completed successfully"),
Err(e) => error!("Failed to optimize database: {}", e),
}
} else {
println!("\n{}", message);
info!("\n{}", message);
}
}
Err(e) => {
Expand Down
22 changes: 11 additions & 11 deletions src/indexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -53,14 +53,14 @@ pub async fn check_and_optimize_if_needed(
match db_manager.check_optimization_health().await {
Ok((needs_optimization, message)) => {
if needs_optimization {
println!("{}", message);
println!(
info!("{}", message);
info!(
"\n{} Fragment threshold exceeded during indexing, running optimization...",
"⚠️".yellow()
);
match db_manager.optimize_database().await {
Ok(_) => {
println!("{} In-progress optimization completed", "✓".green());
info!("{} In-progress optimization completed", "✓".green());
}
Err(e) => {
warn!("In-progress optimization failed: {}", e);
Expand Down Expand Up @@ -1027,11 +1027,11 @@ pub async fn index_git_commits(
let commit_count = commit_shas.len();

if commit_shas.is_empty() {
println!("No commits found in range: {}", git_range);
info!("No commits found in range: {}", git_range);
return Ok(0);
}

println!(
info!(
"Checking for {} commits already in database...",
commit_count
);
Expand All @@ -1050,17 +1050,17 @@ pub async fn index_git_commits(

let already_indexed = commit_count - new_commit_shas.len();
if already_indexed > 0 {
println!(
info!(
"{} commits already indexed, processing {} new commits",
already_indexed,
new_commit_shas.len()
);
} else {
println!("Processing all {} new commits", new_commit_shas.len());
info!("Processing all {} new commits", new_commit_shas.len());
}

if new_commit_shas.is_empty() {
println!("All commits in range are already indexed!");
info!("All commits in range are already indexed!");
return Ok(commit_count);
}

Expand All @@ -1083,9 +1083,9 @@ pub async fn index_git_commits(

let total_time = start_time.elapsed();

println!("\n=== Commit Indexing Complete ===");
println!("Total time: {:.1}s", total_time.as_secs_f64());
println!("Commits indexed: {}", commit_count);
info!("\n=== Commit Indexing Complete ===");
info!("Total time: {:.1}s", total_time.as_secs_f64());
info!("Commits indexed: {}", commit_count);

Ok(commit_count)
}
5 changes: 4 additions & 1 deletion src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,9 @@ pub mod logging {
.add_directive("DatasetRecordBatchStream=error".parse().unwrap());
}

tracing_subscriber::fmt().with_env_filter(env_filter).init();
tracing_subscriber::fmt()
.with_env_filter(env_filter)
.with_writer(std::io::stderr)
.init();
}
}