pipeline

package
v0.0.4 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: May 5, 2023 License: MIT Imports: 7 Imported by: 0

Documentation

Overview

Package pipeline provides a way to chain multiple llm executions.

Index

Constants

This section is empty.

Variables

View Source
var (
	ErrDecoding       = errors.New("decoding input error")
	ErrInvalidLmmMode = errors.New("invalid LLM mode")
	ErrLLMExecution   = errors.New("llm execution error")
)
View Source
var (
	ErrSplitFunction = fmt.Errorf("split function error")
)

Functions

func New

func New(pipes ...Pipe) pipeline

func NewSplitter added in v0.0.3

func NewSplitter(
	name string,
	llm Llm,
	outputDecoder Decoder,
	memory Memory,
	splitterFn SplitterFn,
) *splitter

Types

type Decoder

type Decoder interface {
	Decode(input string) (types.M, error)
}

type Llm

type Llm struct {
	LlmEngine LlmEngine
	LlmMode   LlmMode
	Prompt    Prompt
	Chat      *chat.Chat
}

type LlmEngine

type LlmEngine interface {
	Completion(ctx context.Context, prompt string) (string, error)
	Chat(ctx context.Context, chat *chat.Chat) (string, error)
}

type LlmMode

type LlmMode int
const (
	LlmModeChat LlmMode = iota
	LlmModeCompletion
)

type Memory

type Memory interface {
	Get(key string) interface{}
	Set(key string, value interface{}) error
	All() types.M
	Delete(key string) error
	Clear() error
}

type Pipe added in v0.0.3

type Pipe interface {
	Run(ctx context.Context, input types.M) (types.M, error)
}

type Prompt

type Prompt interface {
	String() string
	Format(input types.M) error
}

type SplitterFn added in v0.0.3

type SplitterFn func(input types.M) ([]types.M, error)

type Tube added in v0.0.3

type Tube struct {
	// contains filtered or unexported fields
}

func NewTube added in v0.0.3

func NewTube(
	name string,
	llm Llm,
	outputDecoder Decoder,
	memory Memory,
) *Tube

func (*Tube) Run added in v0.0.3

func (s *Tube) Run(ctx context.Context, input types.M) (types.M, error)

Run execute the step and return the output. The prompt is formatted with the input and the output of the prompt is used as input for the LLM. If the step has a memory, the output is stored in the memory.

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL