Documentation
¶
Index ¶
- Constants
- func ObjectSchema(kv ...string) *openapi3.Schema
- type AuthResponse
- type Call
- type CallContext
- type CallFrame
- type Document
- type EventType
- type Frame
- type GPTScript
- func (g *GPTScript) Close()
- func (g *GPTScript) Confirm(ctx context.Context, resp AuthResponse) error
- func (g *GPTScript) Evaluate(ctx context.Context, opts Options, tools ...ToolDef) (*Run, error)
- func (g *GPTScript) Fmt(ctx context.Context, nodes []Node) (string, error)
- func (g *GPTScript) ListModels(ctx context.Context) ([]string, error)
- func (g *GPTScript) ListTools(ctx context.Context) (string, error)
- func (g *GPTScript) Parse(ctx context.Context, fileName string) ([]Node, error)
- func (g *GPTScript) ParseTool(ctx context.Context, toolDef string) ([]Node, error)
- func (g *GPTScript) PromptResponse(ctx context.Context, resp PromptResponse) error
- func (g *GPTScript) Run(ctx context.Context, toolPath string, opts Options) (*Run, error)
- func (g *GPTScript) Version(ctx context.Context) (string, error)
- type GlobalOptions
- type InputContext
- type Node
- type Options
- type Output
- type Program
- type PromptFrame
- type PromptResponse
- type Repo
- type Run
- func (r *Run) Bytes() ([]byte, error)
- func (r *Run) Calls() map[string]CallFrame
- func (r *Run) ChatState() string
- func (r *Run) Close() error
- func (r *Run) Err() error
- func (r *Run) ErrorOutput() string
- func (r *Run) Events() <-chan Frame
- func (r *Run) NextChat(ctx context.Context, input string) (*Run, error)
- func (r *Run) ParentCallFrame() (CallFrame, bool)
- func (r *Run) Program() *Program
- func (r *Run) RawOutput() (map[string]any, error)
- func (r *Run) RespondingTool() Tool
- func (r *Run) State() RunState
- func (r *Run) Text() (string, error)
- type RunFrame
- type RunState
- type TextNode
- type Tool
- type ToolCategory
- type ToolDef
- type ToolNode
- type ToolReference
- type ToolSet
- type ToolSource
- type Usage
Constants ¶
const ( ProviderToolCategory ToolCategory = "provider" CredentialToolCategory ToolCategory = "credential" ContextToolCategory ToolCategory = "context" InputToolCategory ToolCategory = "input" OutputToolCategory ToolCategory = "output" NoCategory ToolCategory = "" EventTypeRunStart EventType = "runStart" EventTypeCallStart EventType = "callStart" EventTypeCallContinue EventType = "callContinue" EventTypeCallSubCalls EventType = "callSubCalls" EventTypeCallProgress EventType = "callProgress" EventTypeChat EventType = "callChat" EventTypeCallConfirm EventType = "callConfirm" EventTypeCallFinish EventType = "callFinish" EventTypeRunFinish EventType = "runFinish" EventTypePrompt EventType = "prompt" )
Variables ¶
This section is empty.
Functions ¶
func ObjectSchema ¶
Types ¶
type AuthResponse ¶
type CallContext ¶
type CallContext struct { ID string `json:"id"` Tool Tool `json:"tool"` AgentGroup []ToolReference `json:"agentGroup,omitempty"` CurrentAgent ToolReference `json:"currentAgent,omitempty"` DisplayText string `json:"displayText"` InputContext []InputContext `json:"inputContext"` ToolCategory ToolCategory `json:"toolCategory,omitempty"` ToolName string `json:"toolName,omitempty"` ParentID string `json:"parentID,omitempty"` }
type CallFrame ¶
type CallFrame struct { CallContext `json:",inline"` Type EventType `json:"type"` Start time.Time `json:"start"` End time.Time `json:"end"` Input string `json:"input"` Output []Output `json:"output"` Usage Usage `json:"usage"` LLMRequest any `json:"llmRequest"` LLMResponse any `json:"llmResponse"` }
type Frame ¶
type Frame struct { Run *RunFrame `json:"run,omitempty"` Call *CallFrame `json:"call,omitempty"` Prompt *PromptFrame `json:"prompt,omitempty"` }
type GPTScript ¶
type GPTScript struct {
// contains filtered or unexported fields
}
func NewGPTScript ¶
func NewGPTScript(opts GlobalOptions) (*GPTScript, error)
func (*GPTScript) Confirm ¶
func (g *GPTScript) Confirm(ctx context.Context, resp AuthResponse) error
func (*GPTScript) ListModels ¶
ListModels will list all the available models.
func (*GPTScript) PromptResponse ¶
func (g *GPTScript) PromptResponse(ctx context.Context, resp PromptResponse) error
type GlobalOptions ¶
type GlobalOptions struct { OpenAIAPIKey string `json:"APIKey"` OpenAIBaseURL string `json:"BaseURL"` DefaultModel string `json:"DefaultModel"` Env []string `json:"env"` }
GlobalOptions allows specification of settings that are used for every call made. These options can be overridden by the corresponding Options.
type InputContext ¶
type Options ¶
type Options struct { GlobalOptions `json:",inline"` Confirm bool `json:"confirm"` Input string `json:"input"` DisableCache bool `json:"disableCache"` CacheDir string `json:"cacheDir"` SubTool string `json:"subTool"` Workspace string `json:"workspace"` ChatState string `json:"chatState"` IncludeEvents bool `json:"includeEvents"` Prompt bool `json:"prompt"` CredentialOverrides []string `json:"credentialOverrides"` Location string `json:"location"` ForceSequential bool `json:"forceSequential"` }
Options represents options for the gptscript tool or file.
type PromptFrame ¶
type PromptFrame struct { ID string `json:"id,omitempty"` Type EventType `json:"type,omitempty"` Time time.Time `json:"time,omitempty"` Message string `json:"message,omitempty"` Fields []string `json:"fields,omitempty"` Sensitive bool `json:"sensitive,omitempty"` }
func (*PromptFrame) String ¶
func (p *PromptFrame) String() string
type PromptResponse ¶
type Run ¶
type Run struct {
// contains filtered or unexported fields
}
func (*Run) Bytes ¶
Bytes returns the output of the gptscript in bytes. It blocks until the output is ready.
func (*Run) ErrorOutput ¶
ErrorOutput returns the stderr output of the gptscript. Should only be called after Bytes or Text has returned an error.
func (*Run) Events ¶
Events returns a channel that streams the gptscript events as they occur as Frames.
func (*Run) NextChat ¶
NextChat will pass input and create the next run in a chat. The new Run will be returned.
func (*Run) ParentCallFrame ¶
ParentCallFrame returns the CallFrame for the top-level or "parent" call. The boolean indicates whether there is a parent CallFrame.
func (*Run) RawOutput ¶
RawOutput returns the raw output of the gptscript. Most users should use Text or Bytes instead.
func (*Run) RespondingTool ¶
RespondingTool returns the name of the tool that produced the output.
type RunFrame ¶
type RunFrame struct { ID string `json:"id"` Program Program `json:"program"` Input string `json:"input"` Output string `json:"output"` Error string `json:"error"` Start time.Time `json:"start"` End time.Time `json:"end"` State RunState `json:"state"` ChatState any `json:"chatState"` Type EventType `json:"type"` }
type Tool ¶
type Tool struct { ToolDef `json:",inline"` ID string `json:"id,omitempty"` Arguments *openapi3.Schema `json:"arguments,omitempty"` ToolMapping map[string][]ToolReference `json:"toolMapping,omitempty"` LocalTools map[string]string `json:"localTools,omitempty"` Source ToolSource `json:"source,omitempty"` WorkingDir string `json:"workingDir,omitempty"` }
type ToolCategory ¶
type ToolCategory string
type ToolDef ¶
type ToolDef struct { Name string `json:"name,omitempty"` Description string `json:"description,omitempty"` MaxTokens int `json:"maxTokens,omitempty"` ModelName string `json:"modelName,omitempty"` ModelProvider bool `json:"modelProvider,omitempty"` JSONResponse bool `json:"jsonResponse,omitempty"` Chat bool `json:"chat,omitempty"` Temperature *float32 `json:"temperature,omitempty"` Cache *bool `json:"cache,omitempty"` InternalPrompt *bool `json:"internalPrompt"` Arguments *openapi3.Schema `json:"arguments,omitempty"` Tools []string `json:"tools,omitempty"` GlobalTools []string `json:"globalTools,omitempty"` GlobalModelName string `json:"globalModelName,omitempty"` Context []string `json:"context,omitempty"` ExportContext []string `json:"exportContext,omitempty"` Export []string `json:"export,omitempty"` Agents []string `json:"agents,omitempty"` Credentials []string `json:"credentials,omitempty"` Instructions string `json:"instructions,omitempty"` }
ToolDef struct represents a tool with various configurations.