Documentation ¶
Index ¶
- Constants
- func GetCacheKey(provider string, sEnc string) string
- func NumTokensFromMessages(messages []openai.ChatCompletionMessage, model string) (num_tokens int, err error)
- func NumTokensFromPrompt(prompt string, model string) (num_tokens int, err error)
- type ILLM
- type LLMConfig
- type OpenAIClient
- func (c *OpenAIClient) Configure(config LLMConfig) error
- func (c *OpenAIClient) GetCompletion(ctx context.Context, prompt string, options ...ParamOption) (string, error)
- func (a *OpenAIClient) GetName() string
- func (a *OpenAIClient) Parse(ctx context.Context, prompt string, cache cache.ICache, options ...ParamOption) (string, error)
- type ParamOption
- type ParamOptions
Constants ¶
View Source
const ( DefaultOpenAIModel = openai.GPT3Dot5Turbo DefaultOpenAIModelTokenLimit = "4096" )
Variables ¶
This section is empty.
Functions ¶
func GetCacheKey ¶
func NumTokensFromMessages ¶
Types ¶
type ILLM ¶
type LLMConfig ¶
type LLMConfig struct { Name string Model string Token string BaseURL string Proxy string APIType string }
func (*LLMConfig) GetAPIType ¶
func (*LLMConfig) GetBaseURL ¶
type OpenAIClient ¶
type OpenAIClient struct {
// contains filtered or unexported fields
}
func (*OpenAIClient) Configure ¶
func (c *OpenAIClient) Configure(config LLMConfig) error
func (*OpenAIClient) GetCompletion ¶
func (c *OpenAIClient) GetCompletion(ctx context.Context, prompt string, options ...ParamOption) (string, error)
@todo add ability to supply multiple messages
func (*OpenAIClient) GetName ¶
func (a *OpenAIClient) GetName() string
type ParamOption ¶
type ParamOption func(*ParamOptions)
ParamOption is a function that configures a CallOptions.
func WithLogitBias ¶
func WithLogitBias(logitBias map[string]int) ParamOption
func WithMaxTokens ¶
func WithMaxTokens(maxTokens int) ParamOption
func WithModel ¶
func WithModel(model string) ParamOption
func WithOptions ¶
func WithOptions(options ParamOptions) ParamOption
func WithStopWords ¶
func WithStopWords(stopWords []string) ParamOption
func WithTemperature ¶
func WithTemperature(temperature float32) ParamOption
type ParamOptions ¶
type ParamOptions struct { // Model is the model to use. Model string `json:"model"` // MaxTokens is the maximum number of tokens to generate. MaxTokens int `json:"max_tokens"` // Temperature is the temperature for sampling, between 0 and 1. Temperature float32 `json:"temperature"` // StopWords is a list of words to stop on. StopWords []string `json:"stop_words"` LogitBias map[string]int `json:"logit_bias"` }
ParamOptions is a set of options.
func ValidOptions ¶
func ValidOptions(options ParamOptions) ParamOptions
Click to show internal directories.
Click to hide internal directories.