Documentation ¶
Overview ¶
Package openai provides a wrapper around the OpenAI API.
Index ¶
- Constants
- Variables
- type Function
- type FunctionParameterOption
- type Legacy
- func (o *Legacy) BatchCompletion(ctx context.Context, prompts []string) ([]string, error)
- func (o *Legacy) BatchCompletionStream(ctx context.Context, callbackFn []StreamCallback, prompts []string) error
- func (o *Legacy) BindFunction(fn interface{}, name string, description string, ...) error
- func (o *Legacy) CalledFunctionName() *string
- func (o *Legacy) Chat(ctx context.Context, prompt *chat.Chat) (string, error)
- func (o *Legacy) ChatStream(ctx context.Context, callbackFn StreamCallback, prompt *chat.Chat) error
- func (o *Legacy) Completion(ctx context.Context, prompt string) (string, error)
- func (o *Legacy) CompletionStream(ctx context.Context, callbackFn StreamCallback, prompt string) error
- func (o *Legacy) FinishReason() string
- func (o *Legacy) SetStop(stop []string)
- func (o *Legacy) WithCallback(callback UsageCallback) *Legacy
- func (o *Legacy) WithClient(client *openai.Client) *Legacy
- func (o *Legacy) WithCompletionCache(cache *cache.Cache) *Legacy
- func (o *Legacy) WithMaxTokens(maxTokens int) *Legacy
- func (o *Legacy) WithModel(model Model) *Legacy
- func (o *Legacy) WithStop(stop []string) *Legacy
- func (o *Legacy) WithTemperature(temperature float32) *Legacy
- func (o *Legacy) WithVerbose(verbose bool) *Legacy
- type Model
- type OpenAI
- func (o *OpenAI) BindFunction(fn interface{}, name string, description string, ...) error
- func (o *OpenAI) Generate(ctx context.Context, t *thread.Thread) error
- func (o *OpenAI) SetStop(stop []string)
- func (o *OpenAI) WithCache(cache *cache.Cache) *OpenAI
- func (o *OpenAI) WithClient(client *openai.Client) *OpenAI
- func (o *OpenAI) WithMaxTokens(maxTokens int) *OpenAI
- func (o *OpenAI) WithModel(model Model) *OpenAI
- func (o *OpenAI) WithResponseFormat(responseFormat ResponseFormat) *OpenAI
- func (o *OpenAI) WithStop(stop []string) *OpenAI
- func (o *OpenAI) WithStream(enable bool, callbackFn StreamCallback) *OpenAI
- func (o *OpenAI) WithTemperature(temperature float32) *OpenAI
- func (o *OpenAI) WithToolChoice(toolChoice *string) *OpenAI
- func (o *OpenAI) WithTools(tools ...Tool) *OpenAI
- func (o *OpenAI) WithUsageCallback(callback UsageCallback) *OpenAI
- type ResponseFormat
- type StreamCallback
- type Tool
- type UsageCallback
Constants ¶
const ( DefaultOpenAIMaxTokens = 256 DefaultOpenAITemperature = 0.7 DefaultOpenAINumResults = 1 DefaultOpenAITopP = 1.0 DefaultMaxIterations = 3 )
const (
EOS = "\x00"
)
Variables ¶
var ( ErrOpenAICompletion = fmt.Errorf("openai completion error") ErrOpenAIChat = fmt.Errorf("openai chat error") )
Functions ¶
This section is empty.
Types ¶
type FunctionParameterOption ¶ added in v0.0.8
type Legacy ¶ added in v0.1.0
type Legacy struct {
// contains filtered or unexported fields
}
func NewCompletion ¶ added in v0.0.5
func NewCompletion() *Legacy
func (*Legacy) BatchCompletion ¶ added in v0.1.0
BatchCompletion returns multiple completions for the given prompts.
func (*Legacy) BatchCompletionStream ¶ added in v0.1.0
func (o *Legacy) BatchCompletionStream(ctx context.Context, callbackFn []StreamCallback, prompts []string) error
BatchCompletionStream returns multiple completion streams for the given prompts.
func (*Legacy) BindFunction ¶ added in v0.1.0
func (o *Legacy) BindFunction( fn interface{}, name string, description string, functionParameterOptions ...FunctionParameterOption, ) error
func (*Legacy) CalledFunctionName ¶ added in v0.1.0
CalledFunctionName returns the name of the function that was called.
func (*Legacy) ChatStream ¶ added in v0.1.0
func (o *Legacy) ChatStream(ctx context.Context, callbackFn StreamCallback, prompt *chat.Chat) error
ChatStream returns a single chat stream for the given prompt.
func (*Legacy) Completion ¶ added in v0.1.0
Completion returns a single completion for the given prompt.
func (*Legacy) CompletionStream ¶ added in v0.1.0
func (o *Legacy) CompletionStream(ctx context.Context, callbackFn StreamCallback, prompt string) error
CompletionStream returns a single completion stream for the given prompt.
func (*Legacy) FinishReason ¶ added in v0.1.0
FinishReason returns the LLM finish reason.
func (*Legacy) WithCallback ¶ added in v0.1.0
func (o *Legacy) WithCallback(callback UsageCallback) *Legacy
WithUsageCallback sets the usage callback to use for the OpenAI instance.
func (*Legacy) WithClient ¶ added in v0.1.0
WithClient sets the client to use for the OpenAI instance.
func (*Legacy) WithCompletionCache ¶ added in v0.1.0
WithCache sets the cache to use for the OpenAI instance.
func (*Legacy) WithMaxTokens ¶ added in v0.1.0
WithMaxTokens sets the max tokens to use for the OpenAI instance.
func (*Legacy) WithStop ¶ added in v0.1.0
WithStop sets the stop sequences to use for the OpenAI instance.
func (*Legacy) WithTemperature ¶ added in v0.1.0
WithTemperature sets the temperature to use for the OpenAI instance.
func (*Legacy) WithVerbose ¶ added in v0.1.0
WithVerbose sets the verbose flag to use for the OpenAI instance.
type Model ¶
type Model string
const ( GPT432K0613 Model = openai.GPT432K0613 GPT432K0314 Model = openai.GPT432K0314 GPT432K Model = openai.GPT432K GPT40613 Model = openai.GPT40613 GPT40314 Model = openai.GPT40314 GPT4o Model = openai.GPT4o GPT4o20240513 Model = openai.GPT4o20240513 GPT4Turbo Model = openai.GPT4Turbo GPT4Turbo20240409 Model = openai.GPT4Turbo20240409 GPT4Turbo0125 Model = openai.GPT4Turbo0125 GPT4Turbo1106 Model = openai.GPT4Turbo1106 GPT4TurboPreview Model = openai.GPT4TurboPreview GPT4VisionPreview Model = openai.GPT4VisionPreview GPT4 Model = openai.GPT4 GPT3Dot5Turbo0125 Model = openai.GPT3Dot5Turbo0125 GPT3Dot5Turbo1106 Model = openai.GPT3Dot5Turbo1106 GPT3Dot5Turbo0613 Model = openai.GPT3Dot5Turbo0613 GPT3Dot5Turbo0301 Model = openai.GPT3Dot5Turbo0301 GPT3Dot5Turbo16K Model = openai.GPT3Dot5Turbo16K GPT3Dot5Turbo16K0613 Model = openai.GPT3Dot5Turbo16K0613 GPT3Dot5Turbo Model = openai.GPT3Dot5Turbo GPT3Dot5TurboInstruct Model = openai.GPT3Dot5TurboInstruct GPT3Davinci Model = openai.GPT3Davinci GPT3Davinci002 Model = openai.GPT3Davinci002 GPT3Curie Model = openai.GPT3Curie GPT3Curie002 Model = openai.GPT3Curie002 GPT3Ada Model = openai.GPT3Ada GPT3Ada002 Model = openai.GPT3Ada002 GPT3Babbage Model = openai.GPT3Babbage GPT3Babbage002 Model = openai.GPT3Babbage002 )
type OpenAI ¶
type OpenAI struct { Name string // contains filtered or unexported fields }
func (*OpenAI) BindFunction ¶ added in v0.0.9
func (o *OpenAI) BindFunction( fn interface{}, name string, description string, functionParameterOptions ...FunctionParameterOption, ) error
func (*OpenAI) WithClient ¶ added in v0.0.9
WithClient sets the client to use for the OpenAI instance.
func (*OpenAI) WithMaxTokens ¶ added in v0.0.9
WithMaxTokens sets the max tokens to use for the OpenAI instance.
func (*OpenAI) WithResponseFormat ¶ added in v0.2.0
func (o *OpenAI) WithResponseFormat(responseFormat ResponseFormat) *OpenAI
func (*OpenAI) WithStop ¶ added in v0.0.9
WithStop sets the stop sequences to use for the OpenAI instance.
func (*OpenAI) WithStream ¶ added in v0.1.0
func (o *OpenAI) WithStream(enable bool, callbackFn StreamCallback) *OpenAI
func (*OpenAI) WithTemperature ¶ added in v0.0.9
WithTemperature sets the temperature to use for the OpenAI instance.
func (*OpenAI) WithToolChoice ¶ added in v0.1.0
func (*OpenAI) WithUsageCallback ¶ added in v0.1.0
func (o *OpenAI) WithUsageCallback(callback UsageCallback) *OpenAI
WithUsageCallback sets the usage callback to use for the OpenAI instance.
type ResponseFormat ¶ added in v0.2.0
type ResponseFormat = openai.ChatCompletionResponseFormatType
const ( ResponseFormatJSONObject ResponseFormat = openai.ChatCompletionResponseFormatTypeJSONObject ResponseFormatText ResponseFormat = openai.ChatCompletionResponseFormatTypeText )
type StreamCallback ¶ added in v0.0.11
type StreamCallback func(string)