Skip to content

Commit 89f7e41

Browse files
committed
✨ feat(version): update project version to 0.0.9
- update version in package.json and package-lock.json ♻️ refactor(generator): improve commit message generation - format function parameters for readability - enhance error handling and progress reporting - improve code structure and readability - update error messages for clarity 💄 style(utils): adjust progress notification title - capitalize extension name in progress notification
1 parent fce0d6d commit 89f7e41

4 files changed

Lines changed: 75 additions & 72 deletions

File tree

package-lock.json

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
"name": "ai-commit",
33
"displayName": "AI Commit",
44
"description": "Use Azure/OpenAI API to review Git changes, generate conventional commit messages that meet the conventions, simplify the commit process, and keep the commit conventions consistent.",
5-
"version": "0.0.8",
5+
"version": "0.0.9",
66
"engines": {
77
"node": ">=16",
88
"vscode": "^1.77.0"

src/generate-commit-msg.ts

Lines changed: 72 additions & 69 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,10 @@ import { ProgressHandler } from './utils';
1414
* @param {string} additionalContext - Additional context for the changes.
1515
* @returns {Promise<Array<{ role: string, content: string }>>} - A promise that resolves to an array of messages for the chat completion.
1616
*/
17-
const generateCommitMessageChatCompletionPrompt = async (diff: string, additionalContext?: string) => {
17+
const generateCommitMessageChatCompletionPrompt = async (
18+
diff: string,
19+
additionalContext?: string
20+
) => {
1821
const INIT_MESSAGES_PROMPT = await getMainCommitPrompt();
1922
const chatContextAsCompletionRequest = [...INIT_MESSAGES_PROMPT];
2023

@@ -64,82 +67,82 @@ export async function getRepo(arg) {
6467
* @returns {Promise<void>} - A promise that resolves when the commit message has been generated and set in the SCM input box.
6568
*/
6669
export async function generateCommitMsg(arg) {
67-
return ProgressHandler.withProgress(
68-
'Generating commit message...',
69-
async (progress) => {
70-
try {
71-
const configManager = ConfigurationManager.getInstance();
72-
const repo = await getRepo(arg);
73-
const apiKey = configManager.getConfig<string>(ConfigKeys.OPENAI_API_KEY);
70+
return ProgressHandler.withProgress('', async (progress) => {
71+
try {
72+
const configManager = ConfigurationManager.getInstance();
73+
const repo = await getRepo(arg);
74+
const apiKey = configManager.getConfig<string>(ConfigKeys.OPENAI_API_KEY);
75+
76+
if (!apiKey) {
77+
throw new Error('OpenAI API Key not configured');
78+
}
7479

75-
if (!apiKey) {
76-
throw new Error('OpenAI API Key not configured');
77-
}
80+
progress.report({ message: 'Getting staged changes...' });
81+
const { diff, error } = await getDiffStaged(repo);
7882

79-
progress.report({ message: 'Getting staged changes...' });
80-
const { diff, error } = await getDiffStaged(repo);
83+
if (error) {
84+
throw new Error(`Failed to get staged changes: ${error}`);
85+
}
8186

82-
if (error) {
83-
throw new Error(`Failed to get staged changes: ${error}`);
84-
}
87+
if (!diff || diff === 'No changes staged.') {
88+
throw new Error('No changes staged for commit');
89+
}
8590

86-
if (!diff || diff === 'No changes staged.') {
87-
throw new Error('No changes staged for commit');
88-
}
91+
const scmInputBox = repo.inputBox;
92+
if (!scmInputBox) {
93+
throw new Error('Unable to find the SCM input box');
94+
}
8995

90-
const scmInputBox = repo.inputBox;
91-
if (!scmInputBox) {
92-
throw new Error('Unable to find the SCM input box');
96+
const additionalContext = scmInputBox.value.trim();
97+
98+
progress.report({
99+
message: additionalContext
100+
? 'Analyzing changes with additional context...'
101+
: 'Analyzing changes...'
102+
});
103+
const messages = await generateCommitMessageChatCompletionPrompt(
104+
diff,
105+
additionalContext
106+
);
107+
108+
progress.report({
109+
message: additionalContext
110+
? 'Generating commit message with additional context...'
111+
: 'Generating commit message...'
112+
});
113+
try {
114+
const commitMessage = await ChatGPTAPI(
115+
messages as ChatCompletionMessageParam[]
116+
);
117+
if (commitMessage) {
118+
scmInputBox.value = commitMessage;
119+
} else {
120+
throw new Error('Failed to generate commit message');
93121
}
94-
95-
const additionalContext = scmInputBox.value.trim();
96-
97-
progress.report({
98-
message: additionalContext
99-
? 'Analyzing changes with additional context...'
100-
: 'Analyzing changes...'
101-
});
102-
const messages = await generateCommitMessageChatCompletionPrompt(diff, additionalContext);
103-
104-
progress.report({
105-
message: additionalContext
106-
? 'Generating commit message with additional context...'
107-
: 'Generating commit message...'
108-
});
109-
try {
110-
const commitMessage = await ChatGPTAPI(
111-
messages as ChatCompletionMessageParam[]
112-
);
113-
if (commitMessage) {
114-
scmInputBox.value = commitMessage;
115-
} else {
116-
throw new Error('Failed to generate commit message');
117-
}
118-
} catch (err) {
119-
let errorMessage = 'An unexpected error occurred';
120-
121-
if (err.response?.status) {
122-
switch (err.response.status) {
123-
case 401:
124-
errorMessage = 'Invalid API key or unauthorized access';
125-
break;
126-
case 429:
127-
errorMessage = 'Rate limit exceeded. Please try again later';
128-
break;
129-
case 500:
130-
errorMessage = 'OpenAI server error. Please try again later';
131-
break;
132-
case 503:
133-
errorMessage = 'OpenAI service is temporarily unavailable';
134-
break;
135-
}
122+
} catch (err) {
123+
let errorMessage = 'An unexpected error occurred';
124+
125+
if (err.response?.status) {
126+
switch (err.response.status) {
127+
case 401:
128+
errorMessage = 'Invalid API key or unauthorized access';
129+
break;
130+
case 429:
131+
errorMessage = 'Rate limit exceeded. Please try again later';
132+
break;
133+
case 500:
134+
errorMessage = 'OpenAI server error. Please try again later';
135+
break;
136+
case 503:
137+
errorMessage = 'OpenAI service is temporarily unavailable';
138+
break;
136139
}
137-
138-
throw new Error(errorMessage);
139140
}
140-
} catch (error) {
141-
throw error;
141+
142+
throw new Error(errorMessage);
142143
}
144+
} catch (error) {
145+
throw error;
143146
}
144-
);
147+
});
145148
}

src/utils.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ export class ProgressHandler {
1313
return vscode.window.withProgress(
1414
{
1515
location: vscode.ProgressLocation.Notification,
16-
title: `[ai-commit] ${title}`,
16+
title: `[AI Commit] ${title}`,
1717
cancellable: true
1818
},
1919
task

0 commit comments

Comments
 (0)