gollm

package module
v0.0.0-...-f2e6e4e Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Feb 24, 2025 License: Apache-2.0 Imports: 13 Imported by: 0

Documentation

Index

Constants

This section is empty.

Variables

This section is empty.

Functions

This section is empty.

Types

type Candidate

type Candidate interface {
	// String returns a string representation of the candidate.
	fmt.Stringer

	// Parts returns the parts of the candidate.
	Parts() []Part
}

Candidate is one of a set of candidate response from the LLM.

type Chat

type Chat interface {
	SendMessage(ctx context.Context, userParts ...string) (ChatResponse, error)
	SetFunctionDefinitions(functionDefinitions []*FunctionDefinition) error
	SendFunctionResults(ctx context.Context, functionResults []FunctionCallResult) (ChatResponse, error)
}

Chat is an active conversation with a language model. Messages are sent and received, and add to a conversation history.

type ChatResponse

type ChatResponse interface {
	UsageMetadata() any

	// Candidates are a set of candidate responses from the LLM.
	// The LLM may return multiple candidates, and we can choose the best one.
	Candidates() []Candidate
}

Response is a generic chat response from the LLM.

type Client

type Client interface {
	io.Closer

	// StartChat starts a new multi-turn chat with a language model.
	StartChat(systemPrompt string) Chat

	// GenerateCompletion generates a single completion for a given prompt.
	GenerateCompletion(ctx context.Context, req *CompletionRequest) (CompletionResponse, error)
}

Client is a client for a language model.

type CompletionRequest

type CompletionRequest struct {
	Prompt string `json:"prompt,omitempty"`
}

CompletionRequest is a request to generate a completion for a given prompt.

type CompletionResponse

type CompletionResponse interface {
	Response() string
	UsageMetadata() any
}

CompletionResponse is a response from the GenerateCompletion method.

type FunctionCall

type FunctionCall struct {
	Name      string         `json:"name,omitempty"`
	Arguments map[string]any `json:"arguments,omitempty"`
}

FunctionCall is a function call to a language model. The LLM will reply with a FunctionCall to a user-defined function, and we will send the results back.

type FunctionCallResult

type FunctionCallResult struct {
	Name   string         `json:"name,omitempty"`
	Result map[string]any `json:"result,omitempty"`
}

FunctionCallResult is the result of a function call. We use this to send the results back to the LLM.

type FunctionDefinition

type FunctionDefinition struct {
	Name        string  `json:"name,omitempty"`
	Description string  `json:"description,omitempty"`
	Parameters  *Schema `json:"parameters,omitempty"`
}

FunctionDefinition is a user-defined function that can be called by the LLM. If the LLM determines the function should be called, it will reply with a FunctionCall object; we will invoke the function and the results back.

type GeminiCandidate

type GeminiCandidate struct {
	// contains filtered or unexported fields
}

GeminiCandidate is a candidate for the response. It implements the Candidate interface.

func (*GeminiCandidate) Parts

func (r *GeminiCandidate) Parts() []Part

Parts returns the parts of the candidate.

func (*GeminiCandidate) String

func (r *GeminiCandidate) String() string

String returns a string representation of the response.

type GeminiChat

type GeminiChat struct {
	// contains filtered or unexported fields
}

GeminiChat is a chat with the model. It implements the Chat interface.

func (*GeminiChat) SendFunctionResults

func (c *GeminiChat) SendFunctionResults(ctx context.Context, functionResults []FunctionCallResult) (ChatResponse, error)

SendFunctionResults sends the results of a function call to the model. It returns a ChatResponse object containing the response from the model.

func (*GeminiChat) SendMessage

func (c *GeminiChat) SendMessage(ctx context.Context, parts ...string) (ChatResponse, error)

SendMessage sends a message to the model. It returns a ChatResponse object containing the response from the model.

func (*GeminiChat) SetFunctionDefinitions

func (c *GeminiChat) SetFunctionDefinitions(functionDefinitions []*FunctionDefinition) error

SetFunctionDefinitions sets the function definitions for the chat. This allows the LLM to call user-defined functions.

type GeminiChatResponse

type GeminiChatResponse struct {
	// contains filtered or unexported fields
}

GeminiChatResponse is a response from the Gemini API. It implements the ChatResponse interface.

func (*GeminiChatResponse) Candidates

func (r *GeminiChatResponse) Candidates() []Candidate

Candidates returns the candidates for the response.

func (*GeminiChatResponse) String

func (r *GeminiChatResponse) String() string

String returns a string representation of the response.

func (*GeminiChatResponse) UsageMetadata

func (r *GeminiChatResponse) UsageMetadata() any

UsageMetadata returns the usage metadata for the response.

type GeminiClient

type GeminiClient struct {
	// contains filtered or unexported fields
}

GeminiClient is a client for the Gemini API. It implements the Client interface.

func NewGeminiClient

func NewGeminiClient(ctx context.Context) (*GeminiClient, error)

NewGeminiClient builds a client for the Gemini API.

func (*GeminiClient) Close

func (c *GeminiClient) Close() error

Close frees the resources used by the client.

func (*GeminiClient) GenerateCompletion

func (c *GeminiClient) GenerateCompletion(ctx context.Context, request *CompletionRequest) (CompletionResponse, error)

func (*GeminiClient) ListModels

func (c *GeminiClient) ListModels(ctx context.Context) (modelNames []string, err error)

ListModels lists the models available in the Gemini API.

func (*GeminiClient) StartChat

func (c *GeminiClient) StartChat(systemPrompt string) Chat

StartChat starts a new chat with the model.

func (*GeminiClient) WithModel

func (c *GeminiClient) WithModel(model string) *GeminiClient

WithModel sets the model to use for the client.

type GeminiCompletionResponse

type GeminiCompletionResponse struct {
	// contains filtered or unexported fields
}

func (*GeminiCompletionResponse) Response

func (r *GeminiCompletionResponse) Response() string

func (*GeminiCompletionResponse) UsageMetadata

func (r *GeminiCompletionResponse) UsageMetadata() any

type GeminiPart

type GeminiPart struct {
	// contains filtered or unexported fields
}

GeminiPart is a part of a candidate. It implements the Part interface.

func (*GeminiPart) AsFunctionCalls

func (p *GeminiPart) AsFunctionCalls() ([]FunctionCall, bool)

AsFunctionCalls returns the function calls of the part.

func (*GeminiPart) AsText

func (p *GeminiPart) AsText() (string, bool)

AsText returns the text of the part.

type Part

type Part interface {
	// AsText returns the text of the part.
	// if the part is not text, it returns ("", false)
	AsText() (string, bool)

	// AsFunctionCalls returns the function calls of the part.
	// if the part is not a function call, it returns (nil, false)
	AsFunctionCalls() ([]FunctionCall, bool)
}

Part is a part of a candidate response from the LLM. It can be a text response, or a function call. A response may comprise multiple parts, for example a text response and a function call where the text response is "I need to do the necessary" and then the function call is "do_necessary".

type Schema

type Schema struct {
	Type        SchemaType         `json:"type,omitempty"`
	Properties  map[string]*Schema `json:"properties,omitempty"`
	Description string             `json:"description,omitempty"`
	Required    []string           `json:"required,omitempty"`
}

Schema is a schema for a function definition.

type SchemaType

type SchemaType string

SchemaType is the type of a field in a Schema.

const (
	TypeObject SchemaType = "object"
	TypeString SchemaType = "string"
)

type VertexAICandidate

type VertexAICandidate struct {
	// contains filtered or unexported fields
}

VertexAICandidate is a candidate for the response. It implements the Candidate interface.

func (*VertexAICandidate) Parts

func (r *VertexAICandidate) Parts() []Part

Parts returns the parts of the candidate.

func (*VertexAICandidate) String

func (r *VertexAICandidate) String() string

String returns a string representation of the response.

type VertexAIChat

type VertexAIChat struct {
	// contains filtered or unexported fields
}

VertexAIChat is a chat with the model. It implements the Chat interface.

func (*VertexAIChat) SendFunctionResults

func (c *VertexAIChat) SendFunctionResults(ctx context.Context, functionResults []FunctionCallResult) (ChatResponse, error)

SendFunctionResults sends the results of a function call to the model. It returns a ChatResponse object containing the response from the model.

func (*VertexAIChat) SendMessage

func (c *VertexAIChat) SendMessage(ctx context.Context, parts ...string) (ChatResponse, error)

SendMessage sends a message to the model. It returns a ChatResponse object containing the response from the model.

func (*VertexAIChat) SetFunctionDefinitions

func (c *VertexAIChat) SetFunctionDefinitions(functionDefinitions []*FunctionDefinition) error

SetFunctionDefinitions sets the function definitions for the chat. This allows the LLM to call user-defined functions.

type VertexAIChatResponse

type VertexAIChatResponse struct {
	// contains filtered or unexported fields
}

VertexAIChatResponse is a response from the VertexAI API. It implements the ChatResponse interface.

func (*VertexAIChatResponse) Candidates

func (r *VertexAIChatResponse) Candidates() []Candidate

Candidates returns the candidates for the response.

func (*VertexAIChatResponse) String

func (r *VertexAIChatResponse) String() string

String returns a string representation of the response.

func (*VertexAIChatResponse) UsageMetadata

func (r *VertexAIChatResponse) UsageMetadata() any

UsageMetadata returns the usage metadata for the response.

type VertexAIClient

type VertexAIClient struct {
	// contains filtered or unexported fields
}

VertexAIClient is a client for the VertexAI API. It implements the Client interface.

func NewVertexAIClient

func NewVertexAIClient(ctx context.Context) (*VertexAIClient, error)

NewVertexAIClient builds a client for the VertexAI API.

func (*VertexAIClient) Close

func (c *VertexAIClient) Close() error

Close frees the resources used by the client.

func (*VertexAIClient) GenerateCompletion

func (c *VertexAIClient) GenerateCompletion(ctx context.Context, request *CompletionRequest) (CompletionResponse, error)

func (*VertexAIClient) StartChat

func (c *VertexAIClient) StartChat(systemPrompt string) Chat

StartChat starts a new chat with the model.

func (*VertexAIClient) WithModel

func (c *VertexAIClient) WithModel(model string) *VertexAIClient

WithModel sets the model to use for the client.

type VertexAICompletionResponse

type VertexAICompletionResponse struct {
	// contains filtered or unexported fields
}

func (*VertexAICompletionResponse) Response

func (r *VertexAICompletionResponse) Response() string

func (*VertexAICompletionResponse) UsageMetadata

func (r *VertexAICompletionResponse) UsageMetadata() any

type VertexAIPart

type VertexAIPart struct {
	// contains filtered or unexported fields
}

VertexAIPart is a part of a candidate. It implements the Part interface.

func (*VertexAIPart) AsFunctionCalls

func (p *VertexAIPart) AsFunctionCalls() ([]FunctionCall, bool)

AsFunctionCalls returns the function calls of the part.

func (*VertexAIPart) AsText

func (p *VertexAIPart) AsText() (string, bool)

AsText returns the text of the part.

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL