Skip to content

Commit

Permalink
Add configurable model option to AI chat
Browse files Browse the repository at this point in the history
- Added 'model' field to chatOptions struct
- Replaced hardcoded model with aiChat.options.model
- Added command line flag for model selection
  • Loading branch information
tkawachi committed Mar 19, 2023
1 parent 7af30bd commit 054c1d6
Showing 1 changed file with 8 additions and 4 deletions.
12 changes: 8 additions & 4 deletions aichat.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ import (
)

type chatOptions struct {
model string
temperature float32
maxTokens int
nonStreaming bool
Expand Down Expand Up @@ -87,7 +88,7 @@ func (aiChat *AIChat) stdChatLoop() error {
})
fmt.Print("assistant: ")
request := gogpt.ChatCompletionRequest{
Model: gogpt.GPT3Dot5Turbo,
Model: aiChat.options.model,
Messages: messages,
Temperature: aiChat.options.temperature,
MaxTokens: aiChat.options.maxTokens,
Expand Down Expand Up @@ -143,7 +144,7 @@ func (aiChat *AIChat) fold(prompt *Prompt, input string) error {
firstInput := aiChat.encoder.Decode(firstEncoded)
temperature := firstNonZeroFloat32(aiChat.options.temperature, prompt.Temperature)
firstRequest := gogpt.ChatCompletionRequest{
Model: gogpt.GPT3Dot5Turbo,
Model: aiChat.options.model,
Messages: prompt.CreateMessages(firstInput),
Temperature: temperature,
}
Expand Down Expand Up @@ -183,7 +184,7 @@ func (aiChat *AIChat) fold(prompt *Prompt, input string) error {
}
input := aiChat.encoder.Decode(encoded[idx:nextIdx])
request := gogpt.ChatCompletionRequest{
Model: gogpt.GPT3Dot5Turbo,
Model: aiChat.options.model,
Messages: prompt.CreateSubsequentMessages(output, input),
Temperature: temperature,
}
Expand Down Expand Up @@ -214,12 +215,14 @@ func main() {
var listPrompts = false
var nonStreaming = false
var split = false
var model = gogpt.GPT3Dot5Turbo
getopt.FlagLong(&temperature, "temperature", 't', "temperature")
getopt.FlagLong(&maxTokens, "max-tokens", 'm', "max tokens, 0 to use default")
getopt.FlagLong(&verbose, "verbose", 'v', "verbose output")
getopt.FlagLong(&listPrompts, "list-prompts", 'l', "list prompts")
getopt.FlagLong(&nonStreaming, "non-streaming", 0, "non streaming mode")
getopt.FlagLong(&split, "split", 0, "split input")
getopt.FlagLong(&model, "model", 0, "model")
getopt.Parse()

if listPrompts {
Expand All @@ -234,6 +237,7 @@ func main() {
log.Fatal(err)
}
options := chatOptions{
model: model,
temperature: temperature,
maxTokens: maxTokens,
nonStreaming: nonStreaming,
Expand Down Expand Up @@ -303,7 +307,7 @@ func main() {
for _, messages := range messagesSlice {

request := gogpt.ChatCompletionRequest{
Model: gogpt.GPT3Dot5Turbo,
Model: model,
Messages: messages,
Temperature: firstNonZeroFloat32(prompt.Temperature, aiChat.options.temperature),
MaxTokens: maxTokens,
Expand Down

0 comments on commit 054c1d6

Please sign in to comment.