Documentation
¶
Index ¶
- Constants
- Variables
- type OpenAI
- func (s *OpenAI) ChatCompletion(conversation llm.BotConversation, opts ...llm.LanguageModelOption) (*llm.TextStreamResult, error)
- func (s *OpenAI) ChatCompletionNoStream(conversation llm.BotConversation, opts ...llm.LanguageModelOption) (string, error)
- func (s *OpenAI) CountTokens(text string) int
- func (s *OpenAI) GenerateImage(prompt string) (image.Image, error)
- func (s *OpenAI) GetDefaultConfig() llm.LanguageModelConfig
- func (s *OpenAI) InputTokenLimit() int
- func (s *OpenAI) Transcribe(file io.Reader) (*subtitles.Subtitles, error)
- type ToolBufferElement
Constants ¶
View Source
const MaxFunctionCalls = 10
View Source
const OpenAIMaxImageSize = 20 * 1024 * 1024 // 20 MB
View Source
const StreamingTimeoutDefault = 10 * time.Second
Variables ¶
View Source
var ErrStreamingTimeout = errors.New("timeout streaming")
Functions ¶
This section is empty.
Types ¶
type OpenAI ¶
type OpenAI struct {
// contains filtered or unexported fields
}
func NewCompatible ¶
func (*OpenAI) ChatCompletion ¶
func (s *OpenAI) ChatCompletion(conversation llm.BotConversation, opts ...llm.LanguageModelOption) (*llm.TextStreamResult, error)
func (*OpenAI) ChatCompletionNoStream ¶
func (s *OpenAI) ChatCompletionNoStream(conversation llm.BotConversation, opts ...llm.LanguageModelOption) (string, error)
func (*OpenAI) CountTokens ¶
func (*OpenAI) GetDefaultConfig ¶
func (s *OpenAI) GetDefaultConfig() llm.LanguageModelConfig
func (*OpenAI) InputTokenLimit ¶
type ToolBufferElement ¶
type ToolBufferElement struct {
// contains filtered or unexported fields
}
Click to show internal directories.
Click to hide internal directories.