Documentation
¶
Index ¶
- Constants
- Variables
- func GetProviders() []string
- func RegisterProvider(name string, constructor ProviderConstructor) error
- type AzureLLM
- type BaseLLM
- func (b *BaseLLM) BuildHeaders() map[string]string
- func (b *BaseLLM) BuildURL() string
- func (b *BaseLLM) FormatMessages(message string, history []types.Message) (interface{}, error)
- func (b *BaseLLM) GetRequiredConfig() map[string]config.ConfigRequirement
- func (b *BaseLLM) GetUsage(data []byte) (string, error)
- func (b *BaseLLM) MakeRequest(ctx context.Context, client *http.Client, provider LLM, message string, ...) (string, error)
- func (b *BaseLLM) ParseResponse(response []byte) (string, error)
- type ChatGLMLLM
- type ClaudeLLM
- func (c *ClaudeLLM) BuildHeaders() map[string]string
- func (c *ClaudeLLM) BuildURL() string
- func (c *ClaudeLLM) FormatMessages(message string, history []types.Message) (interface{}, error)
- func (c *ClaudeLLM) GetRequiredConfig() map[string]config.ConfigRequirement
- func (c *ClaudeLLM) GetUsage(data []byte) (string, error)
- func (c *ClaudeLLM) MakeRequest(ctx context.Context, client *http.Client, message string, ...) (string, error)
- func (c *ClaudeLLM) Name() string
- func (c *ClaudeLLM) ParseResponse(response []byte) (string, error)
- type CohereLLM
- type DeepSeekLLM
- type DefaultLLM
- type GeminiLLM
- func (g *GeminiLLM) BuildHeaders() map[string]string
- func (g *GeminiLLM) BuildURL() string
- func (g *GeminiLLM) FormatMessages(message string, history []types.Message) (interface{}, error)
- func (g *GeminiLLM) GetRequiredConfig() map[string]config.ConfigRequirement
- func (g *GeminiLLM) GetUsage(data []byte) (string, error)
- func (g *GeminiLLM) MakeRequest(ctx context.Context, client *http.Client, message string, ...) (string, error)
- func (g *GeminiLLM) Name() string
- type GroqLLM
- func (g *GroqLLM) BuildHeaders() map[string]string
- func (g *GroqLLM) FormatMessages(message string, history []types.Message) (interface{}, error)
- func (g *GroqLLM) GetRequiredConfig() map[string]config.ConfigRequirement
- func (g *GroqLLM) GetUsage(data []byte) (string, error)
- func (g *GroqLLM) MakeRequest(ctx context.Context, client *http.Client, message string, ...) (string, error)
- func (g *GroqLLM) Name() string
- type KimiLLM
- type LLM
- type MistralLLM
- type OllamaLLM
- func (o *OllamaLLM) BuildHeaders() map[string]string
- func (o *OllamaLLM) FormatMessages(message string, history []types.Message) (interface{}, error)
- func (o *OllamaLLM) GetRequiredConfig() map[string]config.ConfigRequirement
- func (o *OllamaLLM) GetUsage(data []byte) (string, error)
- func (o *OllamaLLM) MakeRequest(ctx context.Context, client *http.Client, message string, ...) (string, error)
- func (o *OllamaLLM) Name() string
- type OpenAILLM
- func (o *OpenAILLM) BuildHeaders() map[string]string
- func (o *OpenAILLM) BuildURL() string
- func (o *OpenAILLM) FormatMessages(message string, history []types.Message) (interface{}, error)
- func (o *OpenAILLM) GetRequiredConfig() map[string]config.ConfigRequirement
- func (o *OpenAILLM) GetUsage(data []byte) (string, error)
- func (o *OpenAILLM) MakeRequest(ctx context.Context, client *http.Client, message string, ...) (string, error)
- func (o *OpenAILLM) Name() string
- type OpenRouterLLM
- type ProviderConstructor
- type SambanovaLLM
- type SiliconLLM
- type TongyiLLM
- type VertexLLM
- func (v *VertexLLM) BuildURL() string
- func (v *VertexLLM) FormatMessages(message string, history []types.Message) (interface{}, error)
- func (v *VertexLLM) GetRequiredConfig() map[string]config.ConfigRequirement
- func (v *VertexLLM) GetUsage(data []byte) (string, error)
- func (v *VertexLLM) MakeRequest(ctx context.Context, client *http.Client, message string, ...) (string, error)
- func (v *VertexLLM) Name() string
- type XAILLM
Constants ¶
const (
DefaultGroqModel = "llama-3.3-70b-versatile"
)
Variables ¶
var DefaultOpenrouterModel = "meta-llama/llama-3.1-70b-instruct:free"
Functions ¶
func GetProviders ¶
func GetProviders() []string
GetProviders returns a list of all registered providers
func RegisterProvider ¶
func RegisterProvider(name string, constructor ProviderConstructor) error
RegisterProvider registers a new LLM provider constructor
Types ¶
type AzureLLM ¶
type AzureLLM struct {
*OpenAILLM
}
AzureLLM implements the LLM interface for Azure OpenAI
func NewAzureLLM ¶
func NewAzureLLM(config *types.ClientConfig) *AzureLLM
NewAzureLLM creates a new AzureLLM
func (*AzureLLM) BuildHeaders ¶
BuildHeaders builds request headers for Azure OpenAI
func (*AzureLLM) GetRequiredConfig ¶
func (a *AzureLLM) GetRequiredConfig() map[string]config.ConfigRequirement
GetRequiredConfig returns provider-specific configuration requirements
type BaseLLM ¶
type BaseLLM struct {
Config *types.ClientConfig
}
BaseLLM provides common functionality for all LLM providers
func NewBaseLLM ¶
func NewBaseLLM(config *types.ClientConfig) *BaseLLM
NewBaseLLM creates a new BaseLLM.
If config is nil, it sets default values for the required configuration options. Otherwise, it uses the values provided in config.
func (*BaseLLM) BuildHeaders ¶
BuildHeaders provides a default implementation for building headers
func (*BaseLLM) BuildURL ¶
BuildURL builds the API URL by trimming and joining the API base and completion path.
func (*BaseLLM) FormatMessages ¶
FormatMessages formats messages for the provider's API
This is a default implementation which should be overridden by the provider if it needs to format the messages differently.
func (*BaseLLM) GetRequiredConfig ¶
func (b *BaseLLM) GetRequiredConfig() map[string]config.ConfigRequirement
GetRequiredConfig returns provider-specific configuration requirements
The map keys are the configuration option names, and the values are config.ConfigRequirement structs that define the default value and prompt message for each option.
The default values are only used if the user does not provide a value for the option.
func (*BaseLLM) GetUsage ¶
GetUsage returns a string representing the token usage of the response. It tries to extract the usage information from the response data using the following field names: "prompt_tokens", "completion_tokens", and "total_tokens". If the information is not found, it returns an empty string.
func (*BaseLLM) MakeRequest ¶
func (b *BaseLLM) MakeRequest(ctx context.Context, client *http.Client, provider LLM, message string, history []types.Message) (string, error)
MakeRequest makes a request to the provider's API, formats the response, and returns the result as a string.
If the request fails or the response is invalid, it returns an error.
The function takes the following parameters:
- ctx: the context for the request
- client: the HTTP client to use for the request
- provider: the provider to make the request to
- message: the message to send to the provider
- history: the message history to send to the provider
The function returns the response from the provider as a string, or an error if the request fails.
func (*BaseLLM) ParseResponse ¶
ParseResponse parses the response from the API according to the provider's configuration. It first tries to extract the answer using the answer path specified in the configuration. If the answer is not found, it returns an error. If the answer is found, it trims any leading or trailing triple backticks if present, and returns the trimmed text.
type ChatGLMLLM ¶
type ChatGLMLLM struct {
*OpenAILLM
}
ChatGLMLLM implements the LLM interface for ChatGLM
func NewChatGLMLLM ¶
func NewChatGLMLLM(config *types.ClientConfig) *ChatGLMLLM
NewChatGLMLLM creates a new ChatGLMLLM
func (*ChatGLMLLM) GetRequiredConfig ¶
func (c *ChatGLMLLM) GetRequiredConfig() map[string]config.ConfigRequirement
GetRequiredConfig returns provider-specific configuration requirements
func (*ChatGLMLLM) Name ¶
func (c *ChatGLMLLM) Name() string
type ClaudeLLM ¶
type ClaudeLLM struct {
*BaseLLM
}
ClaudeLLM is the Claude LLM provider implementation
func NewClaudeLLM ¶
func NewClaudeLLM(config *types.ClientConfig) *ClaudeLLM
NewClaudeLLM creates a new ClaudeLLM
func (*ClaudeLLM) BuildHeaders ¶
BuildHeaders builds request headers
func (*ClaudeLLM) FormatMessages ¶
FormatMessages formats messages for Claude API
func (*ClaudeLLM) GetRequiredConfig ¶
func (c *ClaudeLLM) GetRequiredConfig() map[string]config.ConfigRequirement
GetRequiredConfig returns provider-specific configuration requirements
type CohereLLM ¶
type CohereLLM struct {
*OpenAILLM
}
CohereLLM implements the LLM interface for Cohere
func NewCohereLLM ¶
func NewCohereLLM(config *types.ClientConfig) *CohereLLM
NewCohereLLM creates a new CohereLLM
func (*CohereLLM) FormatMessages ¶
FormatMessages formats messages for Cohere API
func (*CohereLLM) GetRequiredConfig ¶
func (c *CohereLLM) GetRequiredConfig() map[string]config.ConfigRequirement
GetRequiredConfig returns provider-specific configuration requirements
type DeepSeekLLM ¶
type DeepSeekLLM struct {
*OpenAILLM
}
DeepSeekLLM implements the LLM interface for DeepSeek
func NewDeepSeekLLM ¶
func NewDeepSeekLLM(config *types.ClientConfig) *DeepSeekLLM
NewDeepSeekLLM creates a new DeepSeekLLM
func (*DeepSeekLLM) GetRequiredConfig ¶
func (d *DeepSeekLLM) GetRequiredConfig() map[string]config.ConfigRequirement
GetRequiredConfig returns provider-specific configuration requirements
func (*DeepSeekLLM) Name ¶
func (d *DeepSeekLLM) Name() string
type DefaultLLM ¶
type DefaultLLM struct {
*BaseLLM
}
DefaultLLM provides default implementation of LLM interface
func NewDefaultLLM ¶
func NewDefaultLLM(config *types.ClientConfig) *DefaultLLM
NewDefaultLLM creates a new DefaultLLM.
If config is nil, it sets default values for the required configuration options. Otherwise, it uses the values provided in config.
func (*DefaultLLM) MakeRequest ¶
func (d *DefaultLLM) MakeRequest(ctx context.Context, client *http.Client, message string, history []types.Message) (string, error)
MakeRequest implements the LLM interface for DefaultLLM.
func (*DefaultLLM) Name ¶
func (d *DefaultLLM) Name() string
Name returns the name of the provider, which is "default" for DefaultLLM.
type GeminiLLM ¶
type GeminiLLM struct {
*BaseLLM
}
GeminiLLM implements the LLM interface for Gemini
func NewGeminiLLM ¶
func NewGeminiLLM(config *types.ClientConfig) *GeminiLLM
NewGeminiLLM creates a new GeminiLLM
func (*GeminiLLM) BuildHeaders ¶
BuildHeaders builds request headers
func (*GeminiLLM) FormatMessages ¶
FormatMessages formats messages for Gemini API
func (*GeminiLLM) GetRequiredConfig ¶
func (g *GeminiLLM) GetRequiredConfig() map[string]config.ConfigRequirement
GetRequiredConfig returns provider-specific configuration requirements
type GroqLLM ¶
type GroqLLM struct {
*OpenAILLM
}
GroqLLM implements the LLM interface for Groq
func NewGroqLLM ¶
func NewGroqLLM(config *types.ClientConfig) *GroqLLM
NewGroqLLM creates a new GroqLLM
func (*GroqLLM) BuildHeaders ¶
BuildHeaders builds request headers for Groq
func (*GroqLLM) FormatMessages ¶
func (*GroqLLM) GetRequiredConfig ¶
func (g *GroqLLM) GetRequiredConfig() map[string]config.ConfigRequirement
GetRequiredConfig returns provider-specific configuration requirements
func (*GroqLLM) GetUsage ¶
GetUsage returns usage information for the provider
The function takes the following parameters:
- data: the response data from the provider
The function returns a string describing the token usage and an error if the usage information is not found.
type KimiLLM ¶
type KimiLLM struct {
*OpenAILLM
}
KimiLLM implements the LLM interface for Kimi
func NewKimiLLM ¶
func NewKimiLLM(config *types.ClientConfig) *KimiLLM
NewKimiLLM creates a new KimiLLM
func (*KimiLLM) GetRequiredConfig ¶
func (k *KimiLLM) GetRequiredConfig() map[string]config.ConfigRequirement
GetRequiredConfig returns provider-specific configuration requirements
type LLM ¶
type LLM interface { // Name returns the name of the provider Name() string // BuildURL builds the API URL BuildURL() string // GetRequiredConfig returns provider-specific configuration requirements GetRequiredConfig() map[string]config.ConfigRequirement // FormatMessages formats messages for the provider's API FormatMessages(message string, history []types.Message) (interface{}, error) // MakeRequest makes a request to the API MakeRequest(ctx context.Context, client *http.Client, message string, history []types.Message) (string, error) // GetUsage returns usage information for the provider GetUsage(data []byte) (string, error) // BuildHeaders builds request headers BuildHeaders() map[string]string // ParseResponse parses the response from the API ParseResponse(response []byte) (string, error) }
LLM is the interface that all LLM providers must implement
func CreateProvider ¶
func CreateProvider(config *types.ClientConfig) (LLM, error)
CreateProvider creates a new provider instance with given config
func NewProvider ¶
func NewProvider(providerName string, config *types.ClientConfig) (LLM, error)
NewProvider creates a new LLM provider instance based on the given provider name and configuration. It returns an LLM interface and an error if any occurs during the creation process.
Parameters:
- providerName: A string representing the name of the desired LLM provider.
- config: A pointer to types.ClientConfig containing the configuration for the provider.
Returns:
- LLM: An interface representing the created LLM provider instance.
- error: An error if the provider creation fails, or nil if successful.
If the specified provider is not registered, it returns a DefaultLLM instance. If the config parameter is nil, it returns an error.
type MistralLLM ¶
type MistralLLM struct {
*OpenAILLM
}
MistralLLM implements the LLM interface for Mistral
func NewMistralLLM ¶
func NewMistralLLM(config *types.ClientConfig) *MistralLLM
NewMistralLLM creates a new MistralLLM
func (*MistralLLM) GetRequiredConfig ¶
func (m *MistralLLM) GetRequiredConfig() map[string]config.ConfigRequirement
GetRequiredConfig returns provider-specific configuration requirements
func (*MistralLLM) Name ¶
func (m *MistralLLM) Name() string
type OllamaLLM ¶
type OllamaLLM struct {
*BaseLLM
}
OllamaLLM is the Ollama LLM provider implementation
func NewOllamaLLM ¶
func NewOllamaLLM(config *types.ClientConfig) *OllamaLLM
NewOllamaLLM creates a new OllamaLLM
func (*OllamaLLM) BuildHeaders ¶
BuildHeaders builds request headers
func (*OllamaLLM) FormatMessages ¶
FormatMessages formats messages for Ollama API
func (*OllamaLLM) GetRequiredConfig ¶
func (o *OllamaLLM) GetRequiredConfig() map[string]config.ConfigRequirement
GetRequiredConfig returns provider-specific configuration requirements
type OpenAILLM ¶
type OpenAILLM struct {
*BaseLLM
}
OpenAILLM is the OpenAI LLM provider implementation
func NewOpenAILLM ¶
func NewOpenAILLM(config *types.ClientConfig) *OpenAILLM
NewOpenAILLM creates a new OpenAILLM
func (*OpenAILLM) BuildHeaders ¶
BuildHeaders builds request headers
func (*OpenAILLM) FormatMessages ¶
FormatMessages formats messages for OpenAI API
func (*OpenAILLM) GetRequiredConfig ¶
func (o *OpenAILLM) GetRequiredConfig() map[string]config.ConfigRequirement
GetRequiredConfig returns provider-specific configuration requirements
type OpenRouterLLM ¶ added in v0.2.2
type OpenRouterLLM struct {
*OpenAILLM
}
OpenRouterLLM implements the LLM interface for OpenRouter
func NewOpenRouterLLM ¶ added in v0.2.2
func NewOpenRouterLLM(config *types.ClientConfig) *OpenRouterLLM
NewOpenRouterLLM creates a new OpenRouterLLM
func (*OpenRouterLLM) BuildHeaders ¶ added in v0.2.2
func (o *OpenRouterLLM) BuildHeaders() map[string]string
BuildHeaders overrides the parent's BuildHeaders to add OpenRouter specific headers
func (*OpenRouterLLM) GetRequiredConfig ¶ added in v0.2.2
func (o *OpenRouterLLM) GetRequiredConfig() map[string]config.ConfigRequirement
GetRequiredConfig returns provider-specific configuration requirements
func (*OpenRouterLLM) Name ¶ added in v0.2.2
func (o *OpenRouterLLM) Name() string
type ProviderConstructor ¶
type ProviderConstructor func(config *types.ClientConfig) LLM
ProviderConstructor is a function that creates a new LLM instance
type SambanovaLLM ¶
type SambanovaLLM struct {
*OpenAILLM
}
SambanovaLLM implements the LLM interface for SambaNova
func NewSambanovaLLM ¶
func NewSambanovaLLM(config *types.ClientConfig) *SambanovaLLM
NewSambanovaLLM creates a new SambanovaLLM
func (*SambanovaLLM) GetRequiredConfig ¶
func (s *SambanovaLLM) GetRequiredConfig() map[string]config.ConfigRequirement
GetRequiredConfig returns provider-specific configuration requirements
func (*SambanovaLLM) Name ¶
func (s *SambanovaLLM) Name() string
type SiliconLLM ¶
type SiliconLLM struct {
*OpenAILLM
}
SiliconLLM implements the LLM interface for Silicon
func NewSiliconLLM ¶
func NewSiliconLLM(config *types.ClientConfig) *SiliconLLM
NewSiliconLLM creates a new SiliconLLM
func (*SiliconLLM) GetRequiredConfig ¶
func (s *SiliconLLM) GetRequiredConfig() map[string]config.ConfigRequirement
GetRequiredConfig returns provider-specific configuration requirements
func (*SiliconLLM) Name ¶
func (s *SiliconLLM) Name() string
type TongyiLLM ¶
type TongyiLLM struct {
*OpenAILLM
}
TongyiLLM implements the LLM interface for Tongyi (DashScope)
func NewTongyiLLM ¶
func NewTongyiLLM(config *types.ClientConfig) *TongyiLLM
NewTongyiLLM creates a new TongyiLLM
func (*TongyiLLM) BuildHeaders ¶
BuildHeaders builds request headers
func (*TongyiLLM) GetRequiredConfig ¶
func (t *TongyiLLM) GetRequiredConfig() map[string]config.ConfigRequirement
GetRequiredConfig returns provider-specific configuration requirements
type VertexLLM ¶
type VertexLLM struct {
*BaseLLM
}
VertexLLM implements the LLM interface for Google Cloud Vertex AI
func NewVertexLLM ¶
func NewVertexLLM(config *types.ClientConfig) *VertexLLM
NewVertexLLM creates a new VertexLLM
func (*VertexLLM) FormatMessages ¶
FormatMessages formats messages for Vertex AI
func (*VertexLLM) GetRequiredConfig ¶
func (v *VertexLLM) GetRequiredConfig() map[string]config.ConfigRequirement
GetRequiredConfig returns provider-specific configuration requirements
type XAILLM ¶
type XAILLM struct {
*OpenAILLM
}
XAILLM implements the LLM interface for XAI
func (*XAILLM) GetRequiredConfig ¶
func (x *XAILLM) GetRequiredConfig() map[string]config.ConfigRequirement
GetRequiredConfig returns provider-specific configuration requirements