Skip to content

Commit e74c4c5

Browse files
committed
🐛 Fix processing time format error and add diff parsing debug logging
🔧 CRITICAL FIXES: - Fix TypeError when processing_time is None in result logging - Handle None processing_time gracefully with fallback to 0.0 🔍 DEBUG IMPROVEMENTS: - Add comprehensive debug logging to diff parsing - Log unidiff PatchSet creation and file processing details - Track file skipping reasons (binary files, invalid paths, no hunks) - Add detailed logging for troubleshooting 0-file parsing issue 📊 ENHANCED LOGGING: - Better visibility into diff parsing process - Debug logs for each file conversion step - Warning indicators for skipped files - Preview of diff content when parsing fails This addresses the immediate runtime error and provides tools to diagnose why diff parsing is returning 0 files despite having content.
1 parent 55784bc commit e74c4c5

File tree

2 files changed

+15
-5
lines changed

2 files changed

+15
-5
lines changed

gemini_reviewer/diff_parser.py

Lines changed: 13 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -47,8 +47,11 @@ def parse_diff(self, diff_content: str) -> List[DiffFile]:
4747
if diff_files:
4848
logger.info(f"Successfully parsed {len(diff_files)} files using unidiff")
4949
return diff_files
50+
else:
51+
logger.warning("Unidiff parsing returned 0 files, trying manual parsing")
5052
except Exception as e:
5153
logger.warning(f"Unidiff parsing failed: {str(e)}, trying manual parsing")
54+
logger.debug(f"Diff content preview: {diff_content[:500]}...")
5255

5356
try:
5457
# Fallback to manual parsing
@@ -63,20 +66,26 @@ def _parse_with_unidiff(self, diff_content: str) -> List[DiffFile]:
6366
"""Parse diff using the unidiff library."""
6467
try:
6568
patch_set = PatchSet(diff_content)
69+
logger.debug(f"Unidiff PatchSet created with {len(patch_set)} files")
6670
diff_files = []
6771

68-
for patched_file in patch_set:
72+
for i, patched_file in enumerate(patch_set):
73+
logger.debug(f"Processing patched file {i+1}: {patched_file.source_file} -> {patched_file.target_file}")
6974
diff_file = self._convert_patched_file(patched_file)
7075
if diff_file:
7176
diff_files.append(diff_file)
7277
self._parsed_files_count += 1
78+
logger.debug(f"✅ Successfully converted file: {diff_file.file_info.path}")
7379
else:
7480
self._skipped_files_count += 1
81+
logger.debug(f"⚠️ Skipped file: {patched_file.source_file} -> {patched_file.target_file}")
7582

83+
logger.info(f"Unidiff parsing completed: {len(diff_files)} files processed, {self._skipped_files_count} skipped")
7684
return diff_files
7785

7886
except Exception as e:
79-
logger.debug(f"Unidiff parsing error: {str(e)}")
87+
logger.warning(f"Unidiff parsing error: {str(e)}")
88+
logger.debug(f"Diff content preview: {diff_content[:1000]}...")
8089
raise
8190

8291
def _convert_patched_file(self, patched_file: PatchedFile) -> Optional[DiffFile]:
@@ -113,7 +122,7 @@ def _convert_patched_file(self, patched_file: PatchedFile) -> Optional[DiffFile]
113122

114123
# Skip binary files
115124
if file_info.is_binary:
116-
logger.debug(f"Skipping binary file: {file_path}")
125+
logger.debug(f"⚠️ Skipping binary file: {file_path}")
117126
return None
118127

119128
# Convert hunks
@@ -124,7 +133,7 @@ def _convert_patched_file(self, patched_file: PatchedFile) -> Optional[DiffFile]
124133
hunks.append(hunk_info)
125134

126135
if not hunks:
127-
logger.debug(f"No valid hunks found for file: {file_path}")
136+
logger.debug(f"⚠️ No valid hunks found for file: {file_path}")
128137
return None
129138

130139
diff_file = DiffFile(file_info=file_info, hunks=hunks)

review_code_gemini.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -161,7 +161,8 @@ async def _log_review_results(result: ReviewResult, reviewer: CodeReviewer):
161161
logger.info(f"PR: #{result.pr_details.pull_number} - {result.pr_details.title}")
162162
logger.info(f"Files processed: {result.processed_files}")
163163
logger.info(f"Comments generated: {result.total_comments}")
164-
logger.info(f"Processing time: {result.processing_time:.2f}s")
164+
processing_time = result.processing_time or 0.0
165+
logger.info(f"Processing time: {processing_time:.2f}s")
165166

166167
# Comment breakdown by priority
167168
if result.comments:

0 commit comments

Comments
 (0)