Skip to content

Commit 3187182

Browse files
committed
Add auto-assign feature in github actions for PRs
- Add logic to check existing reviewers before assignment - Skip auto-assignment if reviewers already exist (unless --force-assign) - Improve logging and error handling with status messages - Add force-assign option for manual override - Switch back to pull_request trigger for proper access - Fix workflow parameter handling for force_assign option Signed-off-by: Venky Ganesh <[email protected]>
1 parent 6ee94c7 commit 3187182

File tree

5 files changed

+285
-2
lines changed

5 files changed

+285
-2
lines changed

.github/module-paths.json

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
{
2+
"cpp/": "Generic Runtime",
3+
"triton_backend/": "Triton Backend",
4+
"tensorrt_llm/_torch/peft/": "Lora/P-tuning",
5+
"tensorrt_llm/": "LLM API/Workflow",
6+
"benchmarks/": "Performance",
7+
"examples/disaggregated/": "Disaggregated Serving",
8+
"docs/": "Documentation",
9+
"docker/": "Installation",
10+
".github/": "CI/CD",
11+
"jenkins/": "CI/CD",
12+
"tensorrt_llm/_torch/": "Torch Framework",
13+
"tensorrt_llm/_torch/attention_backend/": "Torch Attention Backend",
14+
"tensorrt_llm/_torch/auto_deploy/": "Torch AutoDeploy",
15+
"tensorrt_llm/_torch/compilation/": "Torch Compilation",
16+
"tensorrt_llm/_torch/custom_ops/": "Torch Custom Ops",
17+
"tensorrt_llm/_torch/distributed/": "Torch Distributed",
18+
"tensorrt_llm/_torch/pyexecutor/": "Torch PyExecutor",
19+
"tensorrt_llm/_torch/speculative/": "Torch Speculative",
20+
"tensorrt_llm/autotuner.py": "Autotuner",
21+
"tensorrt_llm/pipeline_interface.py": "Pipeline Interface",
22+
"tensorrt_llm/_torch/models/": "Torch Models",
23+
"tensorrt_llm/_torch/models/modeling_deepseekv3.py": "Torch Models DeepSeekV3",
24+
"tensorrt_llm/_torch/models/modeling_llama.py": "Torch Models Llama",
25+
"tensorrt_llm/_torch/modules/": "Torch Modules",
26+
"tensorrt_llm/_torch/modules/attention.py": "Torch Modules Attention",
27+
"tensorrt_llm/_torch/modules/fused_moe.py": "Torch Modules Fused MOE",
28+
"tests/unittest/_torch/": "Torch Tests",
29+
"examples/pytorch/": "PyTorch Examples"
30+
}
Lines changed: 186 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,186 @@
1+
import argparse
2+
import json
3+
import os
4+
import random
5+
import subprocess
6+
import sys
7+
from pathlib import Path
8+
9+
10+
def get_pr_changed_files(pr_number: str) -> list[str]:
11+
"""Get files changed in PR using GitHub CLI (more reliable than git diff)"""
12+
result = subprocess.run(
13+
[
14+
"gh", "pr", "view", pr_number, "--json", "files", "--jq",
15+
".files[].path"
16+
],
17+
capture_output=True,
18+
text=True,
19+
check=True,
20+
)
21+
return [line.strip() for line in result.stdout.splitlines() if line.strip()]
22+
23+
24+
def get_existing_reviewers(pr_number: str) -> tuple[set[str], set[str]]:
25+
"""Get currently assigned reviewers (users and teams) for a PR"""
26+
try:
27+
# Get user reviewers
28+
user_result = subprocess.run(
29+
[
30+
"gh", "pr", "view", pr_number, "--json", "reviewRequests",
31+
"--jq",
32+
"(.reviewRequests // []) | .[] | select(.login) | .login"
33+
],
34+
capture_output=True,
35+
text=True,
36+
check=True,
37+
)
38+
user_reviewers = {
39+
line.strip()
40+
for line in user_result.stdout.splitlines() if line.strip()
41+
}
42+
43+
# Get team reviewers
44+
team_result = subprocess.run(
45+
[
46+
"gh", "pr", "view", pr_number, "--json", "reviewRequests",
47+
"--jq", "(.reviewRequests // []) | .[] | select(.name) | .name"
48+
],
49+
capture_output=True,
50+
text=True,
51+
check=True,
52+
)
53+
team_reviewers = {
54+
line.strip()
55+
for line in team_result.stdout.splitlines() if line.strip()
56+
}
57+
58+
return user_reviewers, team_reviewers
59+
except subprocess.CalledProcessError as e:
60+
print(f"Warning: Could not fetch existing reviewers: {e}")
61+
return set(), set()
62+
63+
64+
def load_json(path: str):
65+
with open(path, "r", encoding="utf-8") as f:
66+
return json.load(f)
67+
68+
69+
def map_modules(changed_files: list[str], module_paths: dict[str,
70+
str]) -> set[str]:
71+
modules: set[str] = set()
72+
for file in changed_files:
73+
for prefix, module in module_paths.items():
74+
if file.startswith(prefix):
75+
modules.add(module)
76+
break
77+
return modules
78+
79+
80+
def gather_reviewers(modules: set[str],
81+
module_owners: dict[str, list[str]],
82+
*,
83+
pr_author: str | None = None,
84+
existing_reviewers: set[str] | None = None) -> list[str]:
85+
reviewers: set[str] = set()
86+
for module in modules:
87+
reviewers.update(module_owners.get(module, []))
88+
89+
if pr_author:
90+
reviewers.discard(pr_author)
91+
92+
# Remove existing reviewers to avoid duplicate assignments
93+
if existing_reviewers:
94+
reviewers -= existing_reviewers
95+
96+
return sorted(reviewers)
97+
98+
99+
def main() -> None:
100+
parser = argparse.ArgumentParser(
101+
description="Assign reviewers based on changed modules")
102+
parser.add_argument("--dry-run",
103+
action="store_true",
104+
help="Print the gh command instead of executing")
105+
parser.add_argument(
106+
"--force-assign",
107+
action="store_true",
108+
help=
109+
"Assign reviewers even if some already exist (default: only assign if no reviewers)"
110+
)
111+
args = parser.parse_args()
112+
113+
pr_number = os.environ["PR_NUMBER"]
114+
reviewer_limit = int(os.environ.get("REVIEWER_LIMIT", "0"))
115+
pr_author = os.environ.get("PR_AUTHOR")
116+
117+
print(f"Testing PR #{pr_number} with author: {pr_author}")
118+
119+
# Check existing reviewers
120+
existing_user_reviewers, existing_team_reviewers = get_existing_reviewers(
121+
pr_number)
122+
total_existing = len(existing_user_reviewers) + len(existing_team_reviewers)
123+
124+
print(f"Existing user reviewers: {sorted(existing_user_reviewers)}")
125+
print(f"Existing team reviewers: {sorted(existing_team_reviewers)}")
126+
127+
# Skip assignment if reviewers already exist (unless forced)
128+
if total_existing > 0 and not args.force_assign:
129+
print(
130+
f"✅ PR already has {total_existing} reviewer(s) assigned. Skipping auto-assignment."
131+
)
132+
print(" Use --force-assign to assign additional reviewers.")
133+
return
134+
135+
try:
136+
changed_files = get_pr_changed_files(pr_number)
137+
print(f"Changed files: {changed_files}")
138+
139+
module_paths = load_json(Path(".github") / "module-paths.json")
140+
module_owners = load_json(
141+
Path(".github/workflows") / "module-owners.json")
142+
143+
modules = map_modules(changed_files, module_paths)
144+
reviewers = gather_reviewers(
145+
modules,
146+
module_owners,
147+
pr_author=pr_author,
148+
existing_reviewers=
149+
existing_user_reviewers # Avoid re-assigning existing users
150+
)
151+
152+
if reviewer_limit and len(reviewers) > reviewer_limit:
153+
reviewers = random.sample(reviewers, reviewer_limit)
154+
155+
print(f"Changed modules: {sorted(modules)}")
156+
print(f"Potential reviewers: {reviewers}")
157+
158+
if reviewers:
159+
cmd = ["gh", "pr", "edit", pr_number]
160+
for reviewer in reviewers:
161+
cmd.extend(["--add-reviewer", reviewer])
162+
163+
if args.dry_run:
164+
print(f"🔍 DRY RUN: {' '.join(cmd)}")
165+
else:
166+
try:
167+
subprocess.run(cmd, check=True)
168+
print(
169+
f"✅ Successfully assigned {len(reviewers)} new reviewer(s)"
170+
)
171+
except subprocess.CalledProcessError as e:
172+
print(f"❌ Failed to add reviewers: {e}", file=sys.stderr)
173+
print(
174+
" This might be due to permissions or invalid usernames"
175+
)
176+
sys.exit(1)
177+
else:
178+
print("✅ No new reviewers to assign")
179+
180+
except subprocess.CalledProcessError as e:
181+
print(f"❌ Error processing PR: {e}", file=sys.stderr)
182+
sys.exit(1)
183+
184+
185+
if __name__ == "__main__":
186+
main()
Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
name: Auto assign reviewers
2+
on:
3+
pull_request:
4+
types: [opened, synchronize, reopened]
5+
workflow_dispatch:
6+
inputs:
7+
pr_number:
8+
description: 'PR number to test assignment on'
9+
required: true
10+
type: string
11+
dry_run:
12+
description: 'Run in dry-run mode (just print commands)'
13+
required: false
14+
type: boolean
15+
default: false
16+
force_assign:
17+
description: 'Force assign even if reviewers already exist'
18+
required: false
19+
type: boolean
20+
default: false
21+
jobs:
22+
assign:
23+
runs-on: ubuntu-latest
24+
permissions:
25+
pull-requests: write
26+
contents: read
27+
steps:
28+
- uses: actions/checkout@v4
29+
- uses: actions/setup-python@v5
30+
with:
31+
python-version: '3.12'
32+
- name: Assign reviewers
33+
env:
34+
PR_NUMBER: ${{ github.event.inputs.pr_number || github.event.pull_request.number }}
35+
PR_AUTHOR: ${{ github.event.pull_request.user.login || github.event.inputs.pr_author || '' }}
36+
GH_TOKEN: ${{ secrets.REVIEW_ASSIGNING_TOKEN }}
37+
REVIEWER_LIMIT: '3'
38+
run: |
39+
python3 .github/scripts/assign_reviewers.py \
40+
${{ github.event.inputs.dry_run == 'true' && '--dry-run' || '' }} \
41+
${{ github.event.inputs.force_assign == 'true' && '--force-assign' || '' }}

.github/workflows/module-owners.json

Lines changed: 21 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,10 +7,29 @@
77
"Speculative Decoding":["yweng0828", "nekorobov", "lfr-0531"],
88
"Customized Kernels":["lowsfer", "PerkzZheng", "jdemouth-nvidia"],
99
"Performance": ["kaiyux", "jiahanc", "hypdeb"],
10-
"Lora/P-tuning":["byshiue", "Naveassaf"],
10+
"Lora/P-tuning":["byshiue", "shaharmor98"],
1111
"Disaggregated Serving":["Shixiaowei02", "joyang-nv", "chuangz0", "schetlur-nv"],
1212
"Documentation":["nv-guomingz"],
1313
"Sampling": ["dcampora", "lfr-0531", "Naveassaf", "syuoni", "yweng0828"],
1414
"Memory": ["litaotju", "peaceh-nv"],
15-
"Installation": ["hchings", "Superjomn", "nv-guomingz", "QiJune"]
15+
"Installation": ["hchings", "Superjomn", "nv-guomingz", "QiJune"],
16+
"CI/CD": ["chzblych", "syuoni"],
17+
"Torch Framework": ["QiJune", "hlu1"],
18+
"Torch Attention Backend": ["yuxianq", "hlu1"],
19+
"Torch AutoDeploy": ["lucaslie", "suyoggupta"],
20+
"Torch Compilation": ["litaotju", "yizhang-nv", "liji-nv"],
21+
"Torch Custom Ops": ["yizhang-nv"],
22+
"Torch Distributed": ["yilin-void", "yuxianq", "hyukn", "yizhang-nv", "hlu1"],
23+
"Torch PyExecutor": ["dongxuy04", "funatiq", "dcampora", "HuiGao-NV"],
24+
"Torch Speculative": ["lfr-0531", "mikeiovine"],
25+
"Autotuner": ["hyukn", "litaotju"],
26+
"Pipeline Interface": ["amukkara", "chang-l"],
27+
"Torch Models": ["QiJune", "hlu1"],
28+
"Torch Models DeepSeekV3": ["hlu1", "zongfeijing"],
29+
"Torch Models Llama": ["chang-l", "mikeiovine"],
30+
"Torch Modules": ["QiJune", "hlu1"],
31+
"Torch Modules Attention": ["yuxianq", "hlu1"],
32+
"Torch Modules Fused MOE": ["hlu1", "dongxuy04", "zongfeijing", "HuiGao-NV"],
33+
"Torch Tests": ["QiJune", "hlu1"],
34+
"PyTorch Examples": ["QiJune", "hlu1"]
1635
}

CONTRIBUTING.md

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -93,6 +93,13 @@ Developer workflow for code contributions is as follows:
9393
3. Once the code changes are staged on the fork and ready for review, a [Pull Request](https://help.github.com/en/articles/about-pull-requests) (PR) can be [requested](https://help.github.com/en/articles/creating-a-pull-request) to merge the changes from a branch of the fork into a selected branch of upstream. PRs should typically target the `main` branch.
9494
* Creation of a PR creation kicks off the code review process.
9595
* At least one TensorRT-LLM engineer will be assigned for the review. When the PR is under review, the label `Pending Review` will be added to the PR.
96+
* Reviewers are automatically requested based on the modules affected in the PR. Module paths are defined in `.github/module-paths.json` and ownership in `.github/workflows/module-owners.json`.
97+
* You can test the assignment script locally with the `--dry-run` flag:
98+
```bash
99+
GH_TOKEN=<token> BASE_SHA=<base> HEAD_SHA=<head> PR_NUMBER=<pr> \
100+
PR_AUTHOR=<username> \
101+
python3 .github/scripts/assign_reviewers.py --dry-run
102+
```
96103
* If changes are requested, then the reviewer will add the label `Changes Requested` to the PR.
97104
* Once changes are approved, CI will be launched to validate the change. When CI passes, the reviewer will merge the PR.
98105
* If CI reports any failures, it's up to the requester to fix any CI failures before requesting another review.

0 commit comments

Comments
 (0)