golc

package module
v0.0.7 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Jun 17, 2023 License: MIT Imports: 3 Imported by: 0

README

🦜️🔗 GoLC

⚡ Building applications with LLMs through composability ⚡

Build Status Go Reference

GoLC is an innovative project heavily inspired by the LangChain project, aimed at building applications with Large Language Models (LLMs) by leveraging the concept of composability. It provides a framework that enables developers to create and integrate LLM-based applications seamlessly. Through the principles of composability, GoLC allows for the modular construction of LLM-based components, offering flexibility and extensibility to develop powerful language processing applications. By leveraging the capabilities of LLMs and embracing composability, GoLC brings new opportunities to the Golang ecosystem for the development of natural language processing applications.

How to use

package main

import (
	"context"
	"fmt"
	"log"
	"os"

	"github.com/hupe1980/golc/llm"
)

func main() {
	openai, err := llm.NewOpenAI(os.Getenv("OPENAI_API_KEY"))
	if err != nil {
		log.Fatal(err)
	}

	completion, err := openai.Predict(context.Background(), "What is the capital of France?")
	if err != nil {
		log.Fatal(err)
	}

	fmt.Println(completion)
}

Output:

The capital of France is Paris.

For more example usage, see _examples.

References

License

MIT

Documentation

Index

Constants

This section is empty.

Variables

View Source
var Verbose = false

Functions

func StringifyChatMessages

func StringifyChatMessages(messages []ChatMessage, optFns ...func(o *StringifyChatMessagesOptions)) (string, error)

Types

type AIChatMessage

type AIChatMessage struct {
	// contains filtered or unexported fields
}

func NewAIChatMessage

func NewAIChatMessage(text string) *AIChatMessage

func (AIChatMessage) Text

func (m AIChatMessage) Text() string

func (AIChatMessage) Type

func (m AIChatMessage) Type() ChatMessageType

type Agent added in v0.0.3

type Agent interface {
	Plan(ctx context.Context, intermediateSteps []AgentStep, inputs map[string]string) ([]AgentAction, *AgentFinish, error)
	InputKeys() []string
	OutputKeys() []string
}

type AgentAction added in v0.0.4

type AgentAction struct {
	Tool      string
	ToolInput string
	Log       string
}

AgentAction is the agent's action to take.

type AgentFinish added in v0.0.4

type AgentFinish struct {
	ReturnValues map[string]any
	Log          string
}

AgentFinish is the agent's return value.

type AgentStep added in v0.0.4

type AgentStep struct {
	Action      AgentAction
	Observation string
}

AgentStep is a step of the agent.

type Callback added in v0.0.4

type Callback interface {
	AlwaysVerbose() bool
	RaiseError() bool
	OnLLMStart(llmName string, prompts []string) error
	OnLLMNewToken(token string) error
	OnLLMEnd(result *LLMResult) error
	OnLLMError(llmError error) error
	OnChainStart(chainName string, inputs *ChainValues) error
	OnChainEnd(outputs *ChainValues) error
	OnChainError(chainError error) error
}

type Chain added in v0.0.2

type Chain interface {
	Call(ctx context.Context, inputs ChainValues) (ChainValues, error)
	InputKeys() []string
	OutputKeys() []string
}

type ChainValues

type ChainValues map[string]any

type ChatMessage

type ChatMessage interface {
	Text() string
	Type() ChatMessageType
}

type ChatMessageHistory added in v0.0.6

type ChatMessageHistory interface {
	// Messages returns the messages stored in the store.
	Messages() ([]ChatMessage, error)
	// Add a user message to the store.
	AddUserMessage(text string) error
	// Add an AI message to the store.
	AddAIMessage(text string) error
	// Add a self-created message to the store.
	AddMessage(message ChatMessage) error
	// Remove all messages from the store.
	Clear() error
}

type ChatMessageType

type ChatMessageType string
const (
	ChatMessageTypeHuman   ChatMessageType = "human"
	ChatMessageTypeAI      ChatMessageType = "ai"
	ChatMessageTypeSystem  ChatMessageType = "system"
	ChatMessageTypeGeneric ChatMessageType = "generic"
)

type Document

type Document struct {
	PageContent string
	Metadata    map[string]any
}

type Embedder added in v0.0.5

type Embedder interface {
	// EmbedDocuments returns a vector for each text.
	EmbedDocuments(ctx context.Context, texts []string) ([][]float64, error)
	// EmbedQuery embeds a single text.
	EmbedQuery(ctx context.Context, text string) ([]float64, error)
}

Embedder is the interface for creating vector embeddings from texts.

type GenerateOptions added in v0.0.7

type GenerateOptions struct {
	Stop      []string
	Callbacks []Callback
}

type Generation

type Generation struct {
	Text    string
	Message ChatMessage
	Info    map[string]any
}

type GenericChatMessage

type GenericChatMessage struct {
	// contains filtered or unexported fields
}

func NewGenericChatMessage

func NewGenericChatMessage(text, role string) *GenericChatMessage

func (GenericChatMessage) Role

func (m GenericChatMessage) Role() string

func (GenericChatMessage) Text

func (m GenericChatMessage) Text() string

func (GenericChatMessage) Type

type HumanChatMessage

type HumanChatMessage struct {
	// contains filtered or unexported fields
}

func NewHumanChatMessage

func NewHumanChatMessage(text string) *HumanChatMessage

func (HumanChatMessage) Text

func (m HumanChatMessage) Text() string

func (HumanChatMessage) Type

type LLM

type LLM interface {
	Tokenizer
	GeneratePrompt(ctx context.Context, promptValues []PromptValue, optFns ...func(o *GenerateOptions)) (*LLMResult, error)
	Predict(ctx context.Context, text string, optFns ...func(o *GenerateOptions)) (string, error)
	PredictMessages(ctx context.Context, messages []ChatMessage, optFns ...func(o *GenerateOptions)) (ChatMessage, error)
}

type LLMResult

type LLMResult struct {
	Generations [][]*Generation
	LLMOutput   map[string]any
}

type Memory

type Memory interface {
	// Input keys this memory class will load dynamically.
	MemoryVariables() []string
	// Return key-value pairs given the text input to the chain.
	// If None, return all memories
	LoadMemoryVariables(inputs map[string]any) (map[string]any, error)
	// Save the context of this model run to memory.
	SaveContext(inputs map[string]any, outputs map[string]any) error
	// Clear memory contents.
	Clear() error
}

type OutputParser added in v0.0.2

type OutputParser[T any] interface {
	// Parse parses the output of an LLM call.
	Parse(text string) (T, error)
	// ParseWithPrompt parses the output of an LLM call with the prompt used.
	ParseWithPrompt(text string, prompt PromptValue) (T, error)
	// GetFormatInstructions returns a string describing the format of the output.
	GetFormatInstructions() (string, error)
	// Type returns the string type key uniquely identifying this class of parser
	Type() string
}

OutputParser is an interface for parsing the output of an LLM call.

type PromptValue

type PromptValue interface {
	String() string
	Messages() []ChatMessage
}

type Retriever

type Retriever interface {
	GetRelevantDocuments(ctx context.Context, query string) ([]Document, error)
}

type StringifyChatMessagesOptions

type StringifyChatMessagesOptions struct {
	HumanPrefix  string
	AIPrefix     string
	SystemPrefix string
}

type SystemChatMessage

type SystemChatMessage struct {
	// contains filtered or unexported fields
}

func NewSystemChatMessage

func NewSystemChatMessage(text string) *SystemChatMessage

func (SystemChatMessage) Text

func (m SystemChatMessage) Text() string

func (SystemChatMessage) Type

type Tokenizer added in v0.0.7

type Tokenizer interface {
	GetTokenIDs(text string) ([]int, error)
	GetNumTokens(text string) (int, error)
	GetNumTokensFromMessage(messages []ChatMessage) (int, error)
}

type Tool added in v0.0.3

type Tool interface {
	Name() string
	Description() string
	Run(context.Context, string) (string, error)
}

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL