golc

package module
v0.0.3 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Jun 13, 2023 License: MIT Imports: 3 Imported by: 0

README

🦜️🔗 GoLC

⚡ Building applications with LLMs through composability ⚡

Build Status Go Reference

GoLC is an innovative project heavily inspired by the LangChain project, aimed at building applications with Large Language Models (LLMs) by leveraging the concept of composability. It provides a framework that enables developers to create and integrate LLM-based applications seamlessly. Through the principles of composability, GoLC allows for the modular construction of LLM-based components, offering flexibility and extensibility to develop powerful language processing applications. By leveraging the capabilities of LLMs and embracing composability, GoLC brings new opportunities to the Golang ecosystem for the development of natural language processing applications.

How to use

package main

import (
	"context"
	"fmt"
	"log"
	"os"

	"github.com/hupe1980/golc/llm/openai"
)

func main() {
	llm, err := openai.New(os.Getenv("OPENAI_API_KEY"))
	if err != nil {
		log.Fatal(err)
	}

	completion, err := llm.Call(context.Background(), "What is the capital of France?")
	if err != nil {
		log.Fatal(err)
	}

	fmt.Println(completion)
}

Output:

The capital of France is Paris.

For more example usage, see _examples.

References

License

MIT

Documentation

Index

Constants

This section is empty.

Variables

This section is empty.

Functions

func StringifyChatMessages

func StringifyChatMessages(messages []ChatMessage, optFns ...func(o *StringifyChatMessagesOptions)) (string, error)

Types

type AIChatMessage

type AIChatMessage struct {
	// contains filtered or unexported fields
}

func NewAIChatMessage

func NewAIChatMessage(text string) *AIChatMessage

func (AIChatMessage) Text

func (m AIChatMessage) Text() string

func (AIChatMessage) Type

func (m AIChatMessage) Type() ChatMessageType

type Agent added in v0.0.3

type Agent interface {
	Plan(ctx context.Context)
	InputKeys() []string
	OutputKeys() []string
}

type Chain added in v0.0.2

type Chain interface {
	Call(ctx context.Context, values ChainValues) (ChainValues, error)
	InputKeys() []string
	OutputKeys() []string
}

type ChainValues

type ChainValues map[string]any

type ChatMessage

type ChatMessage interface {
	Text() string
	Type() ChatMessageType
}

type ChatMessageType

type ChatMessageType string
const (
	ChatMessageTypeHuman   ChatMessageType = "human"
	ChatMessageTypeAI      ChatMessageType = "ai"
	ChatMessageTypeSystem  ChatMessageType = "system"
	ChatMessageTypeGeneric ChatMessageType = "generic"
)

type Document

type Document struct {
	PageContent string
	Metadata    map[string]any
}

type Generation

type Generation struct {
	Text    string
	Message ChatMessage
	Info    map[string]any
}

type GenericChatMessage

type GenericChatMessage struct {
	// contains filtered or unexported fields
}

func NewGenericChatMessage

func NewGenericChatMessage(text, role string) *GenericChatMessage

func (GenericChatMessage) Role

func (m GenericChatMessage) Role() string

func (GenericChatMessage) Text

func (m GenericChatMessage) Text() string

func (GenericChatMessage) Type

type HumanChatMessage

type HumanChatMessage struct {
	// contains filtered or unexported fields
}

func NewHumanChatMessage

func NewHumanChatMessage(text string) *HumanChatMessage

func (HumanChatMessage) Text

func (m HumanChatMessage) Text() string

func (HumanChatMessage) Type

type LLM

type LLM interface {
	GeneratePrompt(ctx context.Context, promptValues []PromptValue) (*LLMResult, error)
	Predict(ctx context.Context, text string) (string, error)
	PredictMessages(ctx context.Context, messages []ChatMessage) (ChatMessage, error)
}

type LLMResult

type LLMResult struct {
	Generations [][]Generation
	LLMOutput   map[string]any
}

type Memory

type Memory interface {
	// Input keys this memory class will load dynamically.
	MemoryVariables() []string
	// Return key-value pairs given the text input to the chain.
	// If None, return all memories
	LoadMemoryVariables(inputs map[string]any) (map[string]any, error)
	// Save the context of this model run to memory.
	SaveContext(inputs map[string]any, outputs map[string]any) error
	// Clear memory contents.
	Clear() error
}

type OutputParser added in v0.0.2

type OutputParser[T any] interface {
	// Parse parses the output of an LLM call.
	Parse(text string) (T, error)
	// ParseWithPrompt parses the output of an LLM call with the prompt used.
	ParseWithPrompt(text string, prompt PromptValue) (T, error)
	// GetFormatInstructions returns a string describing the format of the output.
	GetFormatInstructions() (string, error)
	// Type returns the string type key uniquely identifying this class of parser
	Type() string
}

OutputParser is an interface for parsing the output of an LLM call.

type PromptValue

type PromptValue interface {
	String() string
	Messages() []ChatMessage
}

type Retriever

type Retriever interface {
	GetRelevantDocuments(ctx context.Context, query string) ([]Document, error)
}

type StringifyChatMessagesOptions

type StringifyChatMessagesOptions struct {
	HumanPrefix  string
	AIPrefix     string
	SystemPrefix string
}

type SystemChatMessage

type SystemChatMessage struct {
	// contains filtered or unexported fields
}

func NewSystemChatMessage

func NewSystemChatMessage(text string) *SystemChatMessage

func (SystemChatMessage) Text

func (m SystemChatMessage) Text() string

func (SystemChatMessage) Type

type Tool added in v0.0.3

type Tool interface {
	Name() string
	Description() string
	Run(context.Context, string) (string, error)
}

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL