Documentation ¶
Index ¶
- Constants
- Variables
- func EmbedTexts(ctx context.Context, appState *models.AppState, model *models.EmbeddingModel, ...) ([][]float32, error)
- func Float64ToFloat32Matrix(in [][]float64) [][]float32
- func GetEmbeddingModel(appState *models.AppState, documentType string) (*models.EmbeddingModel, error)
- func GetLLMModelName(cfg *config.Config) (string, error)
- func NewAnthropicLLM(ctx context.Context, cfg *config.Config) (models.ZepLLM, error)
- func NewLLMClient(ctx context.Context, cfg *config.Config) (models.ZepLLM, error)
- func NewOpenAILLM(ctx context.Context, cfg *config.Config) (models.ZepLLM, error)
- func NewRetryableHTTPClient(retryMax int, timeout time.Duration) *http.Client
- type LLMError
- type ZepAnthropicLLM
- func (zllm *ZepAnthropicLLM) Call(ctx context.Context, prompt string, options ...llms.CallOption) (string, error)
- func (zllm *ZepAnthropicLLM) EmbedTexts(_ context.Context, _ []string) ([][]float32, error)
- func (zllm *ZepAnthropicLLM) GetTokenCount(_ string) (int, error)
- func (zllm *ZepAnthropicLLM) Init(_ context.Context, cfg *config.Config) error
- type ZepLLM
- func (zllm *ZepLLM) Call(ctx context.Context, prompt string, options ...llms.CallOption) (string, error)
- func (zllm *ZepLLM) EmbedTexts(ctx context.Context, texts []string) ([][]float32, error)
- func (zllm *ZepLLM) GetTokenCount(text string) (int, error)
- func (zllm *ZepLLM) Init(ctx context.Context, cfg *config.Config) error
- type ZepOpenAILLM
- func (zllm *ZepOpenAILLM) Call(ctx context.Context, prompt string, options ...llms.CallOption) (string, error)
- func (zllm *ZepOpenAILLM) EmbedTexts(ctx context.Context, texts []string) ([][]float32, error)
- func (zllm *ZepOpenAILLM) GetTokenCount(text string) (int, error)
- func (zllm *ZepOpenAILLM) Init(_ context.Context, cfg *config.Config) error
Constants ¶
View Source
const AnthropicAPIKeyNotSetError = "ZEP_ANTHROPIC_API_KEY is not set" //nolint:gosec
View Source
const AnthropicAPITimeout = 30 * time.Second
View Source
const DefaultTemperature = 0.0
View Source
const InvalidLLMModelError = "llm model is not set or is invalid"
View Source
const LocalEmbedderTimeout = 60 * time.Second
View Source
const MaxLocalEmbedderRetryAttempts = 5
View Source
const MaxOpenAIAPIRequestAttempts = 5
View Source
const OpenAIAPIKeyNotSetError = "ZEP_OPENAI_API_KEY is not set" //nolint:gosec
View Source
const OpenAIAPITimeout = 20 * time.Second
View Source
const OpenAICallTimeout = 60 * time.Second
View Source
const OtelLLMTracerName = "llm"
Variables ¶
View Source
var MaxLLMTokensMap = map[string]int{
"gpt-3.5-turbo": 4096,
"gpt-3.5-turbo-16k": 16_385,
"gpt-3.5-turbo-1106": 16_385,
"gpt-4": 8192,
"gpt-4-32k": 32_768,
"gpt-4-1106-preview": 128_000,
"gpt-4-turbo": 128_000,
"gpt-4o": 128_000,
"gpt-4o-mini": 128_000,
"claude-instant-1": 100_000,
"claude-2": 100_000,
"claude-3-5-sonnet-20240620": 200_000,
"claude-3-haiku-20240307": 200_000,
"claude-3-opus-20240229": 200_000,
"claude-3-sonnet-20240229": 200_000,
}
View Source
var ValidAnthropicLLMs = map[string]bool{ "claude-instant-1": true, "claude-2": true, "claude-3-5-sonnet-20240620": true, "claude-3-opus-20240229": true, "claude-3-sonnet-20240229": true, "claude-3-haiku-20240307": true, }
View Source
var ValidLLMMap = internal.MergeMaps(ValidOpenAILLMs, ValidAnthropicLLMs)
View Source
var ValidOpenAILLMs = map[string]bool{ "gpt-3.5-turbo": true, "gpt-4": true, "gpt-3.5-turbo-16k": true, "gpt-3.5-turbo-1106": true, "gpt-4-32k": true, "gpt-4-1106-preview": true, "gpt-4-turbo": true, "gpt-4o": true, "gpt-4o-2024-08-06": true, "gpt-4o-mini": true, "gpt-4o-mini-2024-07-18": true, }
Functions ¶
func EmbedTexts ¶ added in v0.6.5
func Float64ToFloat32Matrix ¶ added in v0.10.0
func GetEmbeddingModel ¶ added in v0.10.0
func NewAnthropicLLM ¶ added in v0.10.0
func NewLLMClient ¶ added in v0.10.0
func NewOpenAILLM ¶ added in v0.10.0
Types ¶
type LLMError ¶
type LLMError struct {
// contains filtered or unexported fields
}
func NewLLMError ¶
type ZepAnthropicLLM ¶ added in v0.10.0
type ZepAnthropicLLM struct {
// contains filtered or unexported fields
}
func (*ZepAnthropicLLM) Call ¶ added in v0.10.0
func (zllm *ZepAnthropicLLM) Call(ctx context.Context, prompt string, options ...llms.CallOption, ) (string, error)
func (*ZepAnthropicLLM) EmbedTexts ¶ added in v0.10.0
func (*ZepAnthropicLLM) GetTokenCount ¶ added in v0.10.0
func (zllm *ZepAnthropicLLM) GetTokenCount(_ string) (int, error)
GetTokenCount returns the number of tokens in the text. Return 0 for now, since we don't have a token count function
type ZepLLM ¶ added in v0.19.0
type ZepLLM struct {
// contains filtered or unexported fields
}
ZepLLM is a wrapper around the Zep LLM implementations that implements the ZepLLM interface and adds OpenTelemetry tracing
func (*ZepLLM) EmbedTexts ¶ added in v0.19.0
func (*ZepLLM) GetTokenCount ¶ added in v0.19.0
type ZepOpenAILLM ¶ added in v0.10.0
type ZepOpenAILLM struct {
// contains filtered or unexported fields
}
func (*ZepOpenAILLM) Call ¶ added in v0.10.0
func (zllm *ZepOpenAILLM) Call(ctx context.Context, prompt string, options ...llms.CallOption, ) (string, error)
func (*ZepOpenAILLM) EmbedTexts ¶ added in v0.10.0
func (*ZepOpenAILLM) GetTokenCount ¶ added in v0.10.0
func (zllm *ZepOpenAILLM) GetTokenCount(text string) (int, error)
GetTokenCount returns the number of tokens in the text
Click to show internal directories.
Click to hide internal directories.