llm

package
v1.0.9 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Jan 14, 2025 License: MIT Imports: 18 Imported by: 15

Documentation

Index

Constants

This section is empty.

Variables

This section is empty.

Functions

This section is empty.

Types

type ChatCompletionRequest

type ChatCompletionRequest struct {
	Model            string    `json:"model"`
	Messages         []Message `json:"messages"`
	Temperature      float32   `json:"temperature,omitempty"`
	TopP             float32   `json:"top_p,omitempty"`
	N                int       `json:"n,omitempty"`
	Stop             []string  `json:"stop,omitempty"`
	MaxTokens        int       `json:"max_tokens,omitempty"`
	PresencePenalty  float32   `json:"presence_penalty,omitempty"`
	FrequencyPenalty float32   `json:"frequency_penalty,omitempty"`
	User             string    `json:"user,omitempty"`
	Tools            []Tool    `json:"tools,omitempty"`
	Stream           bool      `json:"stream,omitempty"`
}

ChatCompletionRequest represents a generic request for chat completion

type ChatCompletionResponse

type ChatCompletionResponse struct {
	ID      string   `json:"id"`
	Choices []Choice `json:"choices"`
	Usage   Usage    `json:"usage"`
}

ChatCompletionResponse represents a generic response from chat completion

type ChatCompletionStream

type ChatCompletionStream interface {
	Recv() (ChatCompletionResponse, error)
	Close() error
}

ChatCompletionStream represents a streaming response

type Choice

type Choice struct {
	Index        int     `json:"index"`
	Message      Message `json:"message"`
	FinishReason string  `json:"finish_reason"`
}

Choice represents a completion choice

type ClaudeLLM added in v1.0.6

type ClaudeLLM struct {
	// contains filtered or unexported fields
}

ClaudeLLM implements the LLM interface for Anthropic's Claude

func NewClaudeLLM added in v1.0.6

func NewClaudeLLM(apiKey string) *ClaudeLLM

NewClaudeLLM creates a new Claude LLM client

func (*ClaudeLLM) CreateChatCompletion added in v1.0.6

func (c *ClaudeLLM) CreateChatCompletion(ctx context.Context, req ChatCompletionRequest) (ChatCompletionResponse, error)

CreateChatCompletion implements the LLM interface for Claude

func (*ClaudeLLM) CreateChatCompletionStream added in v1.0.6

func (c *ClaudeLLM) CreateChatCompletionStream(ctx context.Context, req ChatCompletionRequest) (ChatCompletionStream, error)

CreateChatCompletionStream implements the LLM interface for Claude streaming

type DeepSeekLLM added in v1.0.8

type DeepSeekLLM struct {
	// contains filtered or unexported fields
}

DeepSeekLLM implements the LLM interface for DeepSeek

func NewDeepSeekLLM added in v1.0.8

func NewDeepSeekLLM(apiKey string) *DeepSeekLLM

NewDeepSeekLLM creates a new DeepSeek LLM client

func (*DeepSeekLLM) CreateChatCompletion added in v1.0.8

func (l *DeepSeekLLM) CreateChatCompletion(ctx context.Context, req ChatCompletionRequest) (ChatCompletionResponse, error)

CreateChatCompletion implements the LLM interface for DeepSeek

func (*DeepSeekLLM) CreateChatCompletionStream added in v1.0.8

func (l *DeepSeekLLM) CreateChatCompletionStream(ctx context.Context, req ChatCompletionRequest) (ChatCompletionStream, error)

CreateChatCompletionStream implements the LLM interface for DeepSeek streaming

type Function

type Function struct {
	Name        string                 `json:"name"`
	Description string                 `json:"description"`
	Parameters  map[string]interface{} `json:"parameters"`
}

Function represents a function definition

type GeminiLLM

type GeminiLLM struct {
	// contains filtered or unexported fields
}

GeminiLLM implements the LLM interface for Google's Gemini

func NewGeminiLLM

func NewGeminiLLM(apiKey string, opts ...GeminiOptions) (*GeminiLLM, error)

NewGeminiLLM creates a new Gemini LLM client

func (*GeminiLLM) CreateChatCompletion

func (g *GeminiLLM) CreateChatCompletion(ctx context.Context, req ChatCompletionRequest) (ChatCompletionResponse, error)

CreateChatCompletion implements the LLM interface for Gemini

func (*GeminiLLM) CreateChatCompletionStream

func (g *GeminiLLM) CreateChatCompletionStream(ctx context.Context, req ChatCompletionRequest) (ChatCompletionStream, error)

CreateChatCompletionStream implements the LLM interface for Gemini streaming

type GeminiOptions

type GeminiOptions struct {
	Model          string
	HarmThreshold  genai.HarmBlockThreshold
	SafetySettings []*genai.SafetySetting
}

GeminiOptions contains configuration options for the Gemini model

type LLM

type LLM interface {
	CreateChatCompletion(ctx context.Context, req ChatCompletionRequest) (ChatCompletionResponse, error)
	CreateChatCompletionStream(ctx context.Context, req ChatCompletionRequest) (ChatCompletionStream, error)
}

LLM defines the interface that all LLM providers must implement

type LLMProvider

type LLMProvider string

LLMProvider represents different LLM providers

const (
	OpenAI          LLMProvider = "OPEN_AI"
	Azure           LLMProvider = "AZURE"
	AzureAD         LLMProvider = "AZURE_AD"
	CloudflareAzure LLMProvider = "CLOUDFLARE_AZURE"
	Gemini          LLMProvider = "GEMINI"
	Claude          LLMProvider = "CLAUDE"
	Ollama          LLMProvider = "OLLAMA"
	DeepSeek        LLMProvider = "DEEPSEEK"
)

type Message

type Message struct {
	Role      Role       `json:"role"`
	Content   string     `json:"content"`
	Name      string     `json:"name,omitempty"`
	ToolCalls []ToolCall `json:"tool_calls,omitempty"`
}

Message represents a single message in a chat conversation

type OllamaLLM added in v1.0.7

type OllamaLLM struct {
	// contains filtered or unexported fields
}

OllamaLLM implements the LLM interface for Ollama

func NewOllamaLLM added in v1.0.7

func NewOllamaLLM() (*OllamaLLM, error)

NewOllamaLLM creates a new Ollama LLM client

func NewOllamaLLMWithURL added in v1.0.7

func NewOllamaLLMWithURL(baseURL string) (*OllamaLLM, error)

NewOllamaLLMWithURL creates a new Ollama LLM client with a custom URL

func (*OllamaLLM) CreateChatCompletion added in v1.0.7

func (o *OllamaLLM) CreateChatCompletion(ctx context.Context, req ChatCompletionRequest) (ChatCompletionResponse, error)

CreateChatCompletion implements the LLM interface for Ollama

func (*OllamaLLM) CreateChatCompletionStream added in v1.0.7

func (o *OllamaLLM) CreateChatCompletionStream(ctx context.Context, req ChatCompletionRequest) (ChatCompletionStream, error)

CreateChatCompletionStream implements the LLM interface for Ollama streaming

type OpenAILLM

type OpenAILLM struct {
	// contains filtered or unexported fields
}

OpenAILLM implements the LLM interface for OpenAI

func NewOpenAILLM

func NewOpenAILLM(apiKey string) *OpenAILLM

NewOpenAILLM creates a new OpenAI LLM client

func (*OpenAILLM) CreateChatCompletion

func (o *OpenAILLM) CreateChatCompletion(ctx context.Context, req ChatCompletionRequest) (ChatCompletionResponse, error)

CreateChatCompletion implements the LLM interface for OpenAI

func (*OpenAILLM) CreateChatCompletionStream

func (o *OpenAILLM) CreateChatCompletionStream(ctx context.Context, req ChatCompletionRequest) (ChatCompletionStream, error)

CreateChatCompletionStream implements the LLM interface for OpenAI streaming

type Role

type Role string

Role represents the role of a message participant

const (
	RoleSystem    Role = "system"
	RoleUser      Role = "user"
	RoleAssistant Role = "assistant"
	RoleFunction  Role = "function"
	RoleTool      Role = "tool"
)

type StreamChoice

type StreamChoice struct {
	Index        int         `json:"index"`
	Delta        StreamDelta `json:"delta"`
	FinishReason string      `json:"finish_reason"`
}

StreamChoice represents a choice in a streaming response

type StreamDelta

type StreamDelta struct {
	Role       Role       `json:"role"`
	Content    string     `json:"content"`
	ToolCalls  []ToolCall `json:"tool_calls,omitempty"`
	ToolCallID string     `json:"tool_call_id,omitempty"`
}

StreamDelta represents a delta in a streaming response

type Tool

type Tool struct {
	Type     string    `json:"type"`
	Function *Function `json:"function,omitempty"`
}

Tool represents a function that can be called by the LLM

type ToolCall

type ToolCall struct {
	ID       string           `json:"id"`
	Type     string           `json:"type"`
	Function ToolCallFunction `json:"function"`
}

ToolCall represents a tool/function call from the LLM

type ToolCallFunction

type ToolCallFunction struct {
	Name      string `json:"name"`
	Arguments string `json:"arguments"`
}

type Usage

type Usage struct {
	PromptTokens     int `json:"prompt_tokens"`
	CompletionTokens int `json:"completion_tokens"`
	TotalTokens      int `json:"total_tokens"`
}

Usage represents token usage

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL