Skip to content

Commit 92cd27f

Browse files
committed
Feat: enhance commit message generation with AI models
1 parent 8a37930 commit 92cd27f

File tree

2 files changed

+76
-52
lines changed

2 files changed

+76
-52
lines changed

gitswift.exe

4.54 MB
Binary file not shown.

server/src/routes/commit_message.rs

Lines changed: 76 additions & 52 deletions
Original file line numberDiff line numberDiff line change
@@ -22,21 +22,39 @@ async fn hello_controller() -> Result<Json<Value>> {
2222
}
2323

2424
async fn generate_commit_message(payload: Json<RequestPayload>) -> Result<Json<Value>> {
25-
let api_key = env::var("GROQ_API_KEY").expect("API key not found.");
25+
let api_key = env::var("GROQ_API_KEY").map_err(|e| {
26+
eprintln!("Failed to get GROQ_API_KEY: {}", e);
27+
crate::errors::Error::UnableToGenerateCommitMessage
28+
})?;
29+
2630
let commit_messages = generate_commit_messages(&payload.diff, &api_key).await;
2731
match commit_messages {
2832
Ok(messages) => Ok(Json(json!({ "messages": messages }))),
29-
Err(_) => Err(crate::errors::Error::UnableToGenerateCommitMessage),
33+
Err(e) => {
34+
eprintln!("Failed to generate commit messages: {:?}", e);
35+
Err(e)
36+
}
3037
}
3138
}
3239

3340
pub async fn generate_commit_messages(diff: &str, api_key: &str) -> Result<Vec<String>> {
3441
let client = reqwest::Client::new();
42+
let models = vec![
43+
"gemma2-9b-it",
44+
"llama-3.3-70b-versatile",
45+
"llama-3.1-8b-instant"
46+
];
47+
48+
let mut all_messages = Vec::new();
3549

3650
let mut headers = HeaderMap::new();
3751
headers.insert(
3852
AUTHORIZATION,
39-
HeaderValue::from_str(&format!("Bearer {}", api_key)).unwrap(),
53+
HeaderValue::from_str(&format!("Bearer {}", api_key))
54+
.map_err(|e| {
55+
eprintln!("Header error: {}", e);
56+
crate::errors::Error::UnableToGenerateCommitMessage
57+
})?
4058
);
4159
headers.insert(CONTENT_TYPE, HeaderValue::from_static("application/json"));
4260

@@ -47,60 +65,66 @@ pub async fn generate_commit_messages(diff: &str, api_key: &str) -> Result<Vec<S
4765
",
4866
diff
4967
);
50-
let system_prompt = "You are an AI tool that is deployed to analyze the git diff given as prompt by the user and generate 3 different commit messages,
51-
each adhering to conventional commit message standards (e.g., 50-character summary line, followed by a detailed body if necessary).
52-
53-
Each message should have a different focus/perspective but don't include any headers or labels for the options.
54-
Just provide the commit messages directly, separated by '---' on a new line.
55-
56-
Use prefixes like Fix: Refreactor: Feat: Chore: Docs: etc. as needed.
68+
let system_prompt = "You are an AI tool that analyzes git diffs and generates commit messages.
69+
Generate a single commit message that follows conventional commit message standards (e.g., 50-character summary line, followed by a detailed body if necessary).
70+
71+
Use prefixes like Fix: Refactor: Feat: Chore: Docs: etc. as needed.
5772
5873
IMPORTANT:
59-
Make sure each message follows the conventional/standard commit format and is ready to be used directly.
74+
Make sure the message follows the conventional/standard commit format and is ready to be used directly.
75+
Don't give any explanation or meta-commentary about the message.
6076
";
6177

78+
for model in models {
79+
println!("Trying model: {}", model); // Debug log
80+
81+
let request_body = GroqRequest {
82+
model: model.to_string(),
83+
messages: vec![
84+
Message {
85+
role: "system".to_string(),
86+
content: system_prompt.to_string(),
87+
},
88+
Message {
89+
role: "user".to_string(),
90+
content: prompt.clone(),
91+
}
92+
],
93+
temperature: 0.5,
94+
};
95+
96+
let response = client
97+
.post("https://api.groq.com/openai/v1/chat/completions")
98+
.headers(headers.clone())
99+
.json(&request_body)
100+
.send()
101+
.await
102+
.map_err(|e| {
103+
eprintln!("API request error for model {}: {}", model, e);
104+
crate::errors::Error::UnableToGenerateCommitMessage
105+
})?;
106+
107+
let response_text = response
108+
.text()
109+
.await
110+
.map_err(|e| {
111+
eprintln!("Failed to get response text for model {}: {}", model, e);
112+
crate::errors::Error::UnableToGenerateCommitMessage
113+
})?;
114+
115+
println!("Response from {}: {}", model, response_text); // Debug log
116+
117+
let groq_response: GroqResponse = serde_json::from_str(&response_text)
118+
.map_err(|e| {
119+
eprintln!("Failed to parse JSON for model {}: {}", model, e);
120+
eprintln!("Response text: {}", response_text);
121+
crate::errors::Error::UnableToGenerateCommitMessage
122+
})?;
123+
124+
all_messages.push(groq_response.choices[0].message.content.trim().to_string());
125+
}
62126

63-
let request_body = GroqRequest {
64-
model: "gemma2-9b-it".to_string(),
65-
messages: vec![
66-
Message {
67-
role: "system".to_string(),
68-
content: system_prompt.to_string(),
69-
},
70-
Message {
71-
role: "user".to_string(),
72-
content: prompt,
73-
}
74-
],
75-
temperature: 0.5,
76-
};
77-
78-
let response = client
79-
.post("https://api.groq.com/openai/v1/chat/completions")
80-
.headers(headers)
81-
.json(&request_body)
82-
.send()
83-
.await
84-
.map_err(|_| crate::errors::Error::UnableToGenerateCommitMessage)?;
85-
86-
let response_text = response
87-
.text()
88-
.await
89-
.map_err(|_| crate::errors::Error::UnableToGenerateCommitMessage)?;
90-
91-
92-
let groq_response: GroqResponse = serde_json::from_str(&response_text)
93-
.map_err(|_| crate::errors::Error::UnableToGenerateCommitMessage)?;
94-
95-
let messages: Vec<String> = groq_response.choices[0]
96-
.message
97-
.content
98-
.split("---")
99-
.map(|msg| msg.trim().to_string())
100-
.filter(|msg| !msg.is_empty())
101-
.collect();
102-
103-
Ok(messages)
127+
Ok(all_messages)
104128
}
105129

106130
#[derive(Debug, Deserialize)]

0 commit comments

Comments
 (0)