graph

package
v0.0.0-...-952c2e5 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Oct 1, 2024 License: MIT Imports: 4 Imported by: 0

Documentation

Index

Examples

Constants

View Source
const END = "END"

END is a special constant used to represent the end node in the graph.

Variables

View Source
var (
	// ErrEntryPointNotSet is returned when the entry point of the graph is not set.
	ErrEntryPointNotSet = errors.New("entry point not set")

	// ErrNodeNotFound is returned when a node is not found in the graph.
	ErrNodeNotFound = errors.New("node not found")

	// ErrNoOutgoingEdge is returned when no outgoing edge is found for a node.
	ErrNoOutgoingEdge = errors.New("no outgoing edge found for node")
)

Functions

This section is empty.

Types

type Edge

type Edge struct {
	// From is the name of the node from which the edge originates.
	From string

	// To is the name of the node to which the edge points.
	To string
}

Edge represents an edge in the message graph.

type MessageGraph

type MessageGraph struct {
	// contains filtered or unexported fields
}

MessageGraph represents a message graph.

Example
package main

import (
	"context"
	"fmt"

	"github.com/Delcin1/langgraphgo/graph"
	"github.com/tmc/langchaingo/llms"
	"github.com/tmc/langchaingo/llms/openai"
)

func main() {
	model, err := openai.New()
	if err != nil {
		panic(err)
	}

	g := graph.NewMessageGraph()

	g.AddNode("oracle", func(ctx context.Context, state []llms.MessageContent) ([]llms.MessageContent, error) {
		r, err := model.GenerateContent(ctx, state, llms.WithTemperature(0.0))
		if err != nil {
			return nil, err
		}
		return append(state,
			llms.TextParts(llms.ChatMessageTypeAI, r.Choices[0].Content),
		), nil
	})
	g.AddNode(graph.END, func(_ context.Context, state []llms.MessageContent) ([]llms.MessageContent, error) {
		return state, nil
	})

	g.AddEdge("oracle", graph.END)
	g.SetEntryPoint("oracle")

	runnable, err := g.Compile()
	if err != nil {
		panic(err)
	}

	ctx := context.Background()
	// Let's run it!
	res, err := runnable.Invoke(ctx, []llms.MessageContent{
		llms.TextParts(llms.ChatMessageTypeHuman, "What is 1 + 1?"),
	})
	if err != nil {
		panic(err)
	}

	fmt.Println(res)

}
Output:

[{human [{What is 1 + 1?}]} {ai [{1 + 1 equals 2.}]}]

func NewMessageGraph

func NewMessageGraph() *MessageGraph

NewMessageGraph creates a new instance of MessageGraph.

func (*MessageGraph) AddConditionalEdge

func (g *MessageGraph) AddConditionalEdge(from string, condition func(ctx context.Context, state []llms.MessageContent) string)

AddConditionalEdge adds a new edge in which "from" node is identified based on the "condition".

func (*MessageGraph) AddEdge

func (g *MessageGraph) AddEdge(from, to string)

AddEdge adds a new edge to the message graph between the "from" and "to" nodes.

func (*MessageGraph) AddNode

func (g *MessageGraph) AddNode(name string, fn func(ctx context.Context, state []llms.MessageContent) ([]llms.MessageContent, error))

AddNode adds a new node to the message graph with the given name and function.

func (*MessageGraph) Compile

func (g *MessageGraph) Compile() (*Runnable, error)

Compile compiles the message graph and returns a Runnable instance. It returns an error if the entry point is not set.

func (*MessageGraph) SetEntryPoint

func (g *MessageGraph) SetEntryPoint(name string)

SetEntryPoint sets the entry point node name for the message graph.

type Node

type Node struct {
	// Name is the unique identifier for the node.
	Name string

	// Function is the function associated with the node.
	// It takes a context and a slice of MessageContent as input and returns a slice of MessageContent and an error.
	Function func(ctx context.Context, state []llms.MessageContent) ([]llms.MessageContent, error)
}

Node represents a node in the message graph.

type Runnable

type Runnable struct {
	// contains filtered or unexported fields
}

Runnable represents a compiled message graph that can be invoked.

func (*Runnable) Invoke

func (r *Runnable) Invoke(ctx context.Context, messages []llms.MessageContent) ([]llms.MessageContent, error)

Invoke executes the compiled message graph with the given input messages. It returns the resulting messages and an error if any occurs during the execution. Invoke executes the compiled message graph with the given input messages. It returns the resulting messages and an error if any occurs during the execution.

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL