const maxDelay = 60 * time.Second
type ChatCompletion struct {
- Opts
- messages []openai.ChatCompletionMessage
-}
-
-type Opts struct {
model string
maxTokens int
stream bool
+ messages []openai.ChatCompletionMessage
}
-type OptFunc func(*Opts)
+type OptFunc func(*ChatCompletion)
+
+func WithMaxTokens(n int) OptFunc {
+ return func(c *ChatCompletion) {
+ c.maxTokens = n
+ }
+}
-func withMaxTokens(n int) OptFunc {
- return func(opts *Opts) {
- opts.maxTokens = n
+func WithChatCompletionMessages(m []openai.ChatCompletionMessage) OptFunc {
+ return func(c *ChatCompletion) {
+ c.messages = m
}
}
-func defaultOpts() Opts {
- return Opts{
+func NewChatCompletion(opts ...OptFunc) ChatCompletion {
+ c := ChatCompletion{
model: openai.GPT3Dot5Turbo,
maxTokens: 2048,
stream: true,
+ messages: []openai.ChatCompletionMessage{},
}
-}
-func NewChatCompletion(opts ...OptFunc) ChatCompletion {
- o := defaultOpts()
for _, fn := range opts {
- fn(&o)
+ fn(&c)
}
- return ChatCompletion{Opts: o}
+ return c
}
func (c *ChatCompletion) Message(role, content string) error {
func (c ChatCompletion) Request() openai.ChatCompletionRequest {
return openai.ChatCompletionRequest{
- Model: c.Opts.model,
- MaxTokens: c.Opts.maxTokens,
- Stream: c.Opts.stream,
+ Model: c.model,
+ MaxTokens: c.maxTokens,
+ Stream: c.stream,
Messages: c.messages,
}
+}
+func (c ChatCompletion) Messages() []openai.ChatCompletionMessage {
+ return c.messages
}
func getEBO(retries int) time.Duration {
}
} else if len(args) == 0 {
i := ""
+
+ msgs := []openai.ChatCompletionMessage{}
for {
- m := model.NewModel(i)
+
+ modelOpts := []model.OptFunc{}
+ if i != "" {
+ modelOpts = append(modelOpts, model.WithInitialValue(i))
+ }
+
+ if len(msgs) >= 1 {
+ modelOpts = append(modelOpts, model.WithChatCompletionMessages(msgs))
+ }
+
+ m := model.NewModel(modelOpts...)
if _, err := tea.NewProgram(m).Run(); err != nil {
return err
}
}
if m.Ready() {
- chatCompletion := chatcompletion.NewChatCompletion()
+ chatCompletionOpts := []chatcompletion.OptFunc{}
+ if len(m.Messages()) >= 1 {
+ chatCompletionOpts = append(chatCompletionOpts, chatcompletion.WithChatCompletionMessages(m.Messages()))
+ }
+ chatCompletion := chatcompletion.NewChatCompletion(chatCompletionOpts...)
if err := chatCompletion.Message("user", m.UserInput.FilteredInput()); err != nil {
return err
}
+
_, err := chatcompletion.StreamChatCompletion(cmd.Context(), c, chatCompletion.Request())
if err != nil {
return err
}
+ msgs = chatCompletion.Messages()
fmt.Println()
m.Reset()
} else {
m.UserInput.Update(content)
return nil
}
+
+type PrintMessagesCommand struct{}
+
+func NewPrintMessagesCommand() Command {
+ return PrintMessagesCommand{}
+}
+
+func (c PrintMessagesCommand) Run(m *Model) error {
+ fmt.Printf("%#+v\n\n", m.Messages())
+ return nil
+}
"github.com/charmbracelet/bubbles/textarea"
tea "github.com/charmbracelet/bubbletea"
+ "github.com/sashabaranov/go-openai"
)
type CommandMsg struct {
type Model struct {
textarea textarea.Model
UserInput UserInput
- ready bool
commandMsgs []CommandMsg
err error
+ Opts
+}
+
+type Opts struct {
+ ready bool
+ messages []openai.ChatCompletionMessage
+ initialValue string
+}
+
+type OptFunc func(*Opts)
+
+func WithChatCompletionMessages(m []openai.ChatCompletionMessage) OptFunc {
+ return func(opts *Opts) {
+ opts.messages = m
+ }
+}
+
+func WithInitialValue(s string) OptFunc {
+ return func(opts *Opts) {
+ opts.initialValue = s
+ }
+}
+
+func WithReady() OptFunc {
+ return func(opts *Opts) {
+ opts.ready = true
+ }
+}
+
+func defaultOpts() Opts {
+ return Opts{
+ ready: false,
+ messages: nil,
+ initialValue: "",
+ }
}
var initialTextAreaHeight = 3
-func NewModel(s string) *Model {
+func NewModel(opts ...OptFunc) *Model {
+ o := defaultOpts()
+ for _, fn := range opts {
+ fn(&o)
+ }
+
t := textarea.New()
t.Placeholder = "press [tab] to submit prompt"
t.SetWidth(72)
t.SetHeight(initialTextAreaHeight)
t.ShowLineNumbers = false
t.Focus()
- if s != "" {
- t.SetValue(s)
+
+ if o.initialValue != "" {
+ t.SetValue(o.initialValue)
}
return &Model{
textarea: t,
+ Opts: o,
}
}
+func (m *Model) SetReady(r bool) {
+ m.Opts.ready = r
+}
+
func (m *Model) Ready() bool {
- return m.ready
+ return m.Opts.ready
}
func (m *Model) Init() tea.Cmd {
return m, tea.Quit
case tea.KeyTab:
m.UserInput = NewUserInput(m.textarea.Value())
- m.ready = true
+ m.SetReady(true)
for _, command := range m.UserInput.Commands() {
m.commandMsgs = append(m.commandMsgs, NewCommandMsg(command))
}
if len(m.commandMsgs) >= 1 {
- m.ready = false
+ m.SetReady(false)
return m.Update(m.commandMsgs[0])
}
func (m *Model) Reset() {
m.textarea.SetValue("")
}
+
+func (m *Model) Messages() []openai.ChatCompletionMessage {
+ return m.Opts.messages
+}
u.commands = append(u.commands, NewEchoCommand())
case "editor":
u.commands = append(u.commands, NewEditorCommand())
+ case "printmessages":
+ u.commands = append(u.commands, NewPrintMessagesCommand())
default:
}
}