diff --git a/README.md b/README.md index 6287b8da..8ae1d06e 100644 --- a/README.md +++ b/README.md @@ -180,6 +180,12 @@ To clear the proxy option, you can use the command (note the empty value after t aicommits config set proxy= ``` +#### model + +Default: `gpt-3.5-turbo` + +The Chat Completions (`/v1/chat/completions`) model to use. Consult the list of models available in the [OpenAI Documentation](https://platform.openai.com/docs/models/model-endpoint-compatibility). + ## How it works This CLI tool runs `git diff` to grab all your latest code changes, sends them to OpenAI's GPT-3, then returns the AI generated commit message. diff --git a/src/commands/aicommits.ts b/src/commands/aicommits.ts index eb457a7b..06bda6db 100644 --- a/src/commands/aicommits.ts +++ b/src/commands/aicommits.ts @@ -54,6 +54,7 @@ export default async ( try { messages = await generateCommitMessage( config.OPENAI_KEY, + config.model, config.locale, staged.diff, config.generate, diff --git a/src/commands/prepare-commit-msg-hook.ts b/src/commands/prepare-commit-msg-hook.ts index 51799021..8444d510 100644 --- a/src/commands/prepare-commit-msg-hook.ts +++ b/src/commands/prepare-commit-msg-hook.ts @@ -41,6 +41,7 @@ export default () => (async () => { try { messages = await generateCommitMessage( config.OPENAI_KEY, + config.model, config.locale, staged!.diff, config.generate, diff --git a/src/utils/config.ts b/src/utils/config.ts index d3e0befe..94806dcc 100644 --- a/src/utils/config.ts +++ b/src/utils/config.ts @@ -2,6 +2,7 @@ import fs from 'fs/promises'; import path from 'path'; import os from 'os'; import ini from 'ini'; +import type { TiktokenModel } from '@dqbd/tiktoken'; import { fileExists } from './fs.js'; import { KnownError } from './error.js'; @@ -60,6 +61,13 @@ const configParsers = { return url; }, + model(model?: string) { + if (!model || model.length === 0) { + return 'gpt-3.5-turbo'; + } + + return model as TiktokenModel; + }, } as const; type ConfigKeys = keyof typeof configParsers; diff --git a/src/utils/openai.ts b/src/utils/openai.ts index 59682c0c..e7dcf303 100644 --- a/src/utils/openai.ts +++ b/src/utils/openai.ts @@ -1,7 +1,7 @@ import https from 'https'; import type { ClientRequest, IncomingMessage } from 'http'; import type { CreateChatCompletionRequest, CreateChatCompletionResponse } from 'openai'; -import { encoding_for_model as encodingForModel } from '@dqbd/tiktoken'; +import { type TiktokenModel, encoding_for_model as encodingForModel } from '@dqbd/tiktoken'; import createHttpsProxyAgent from 'https-proxy-agent'; import { KnownError } from './error.js'; @@ -99,10 +99,9 @@ const deduplicateMessages = (array: string[]) => Array.from(new Set(array)); const getPrompt = (locale: string, diff: string) => `Write an insightful but concise Git commit message in a complete sentence in present tense for the following diff without prefacing it with anything, the response must be in the language ${locale}:\n${diff}`; -const model = 'gpt-3.5-turbo'; - export const generateCommitMessage = async ( apiKey: string, + model: TiktokenModel, locale: string, diff: string, completions: number,