Documentation ¶
Index ¶
- Constants
- Variables
- func NewAssistantMessage(messageStr string) lazyMessage
- func NewMessage(role string, messageStr string) lazyMessage
- func NewSystemMessage(messageStr string) lazyMessage
- func NewUserMessage(messageStr string) lazyMessage
- type BatchChatCompletionArgs
- type BatchChatCompletionBuilder
- type ChatCompletion
- type ChatCompletionError
- type ChatCompletionResult
- type FengChao
- func (f *FengChao) BatchChatCompletion(ctx context.Context, bccb *BatchChatCompletionBuilder) (map[*BatchChatCompletionArgs]*ChatCompletionResult, ...)
- func (f *FengChao) ChatCompletion(ctx context.Context, prompt Prompt, ...) (*ChatCompletionResult, error)
- func (f *FengChao) ChatCompletionStream(ctx context.Context, prompt Prompt, ...) (*JsonStreamReader[ChatCompletionResult], error)
- func (f *FengChao) ChatCompletionStreamSimple(ctx context.Context, prompt Prompt, ...) (iter.Seq[ChatCompletionResult], error)
- func (f *FengChao) GetAvailableModels() []Model
- func (f *FengChao) QuickCompletion(ctx context.Context, chatCompletionOption ...Option[ChatCompletion]) (*ChatCompletionResult, error)
- func (f *FengChao) SetDebug(debug bool) *FengChao
- type JsonStreamReader
- type Message
- type Model
- type Option
- func WithDoSample(doSample bool) Option[ChatCompletion]
- func WithIsSensitive(isSensitive bool) Option[ChatCompletion]
- func WithMaxTokens(maxTokens int) Option[ChatCompletion]
- func WithModel(model string) Option[ChatCompletion]
- func WithParams(variables any) Option[ChatCompletion]
- func WithPredefinedPrompts(predefinedPrompts string) Option[ChatCompletion]
- func WithQuery(query string) Option[ChatCompletion]
- func WithRequestID(requestID string) Option[ChatCompletion]
- func WithStop(stop []string) Option[ChatCompletion]
- func WithSystem(system string) Option[ChatCompletion]
- func WithTemperature(temperature float64) Option[ChatCompletion]
- func WithTimeout(timeout int) Option[ChatCompletion]
- func WithTopP(topP float64) Option[ChatCompletion]
- type Prompt
- type PromptTemplate
- type StreamAble
- type StreamReader
Constants ¶
const ( InvokeMode = "invoke" StreamMode = "stream" )
const ( // RoleUser 用户消息 RoleUser = "user" // RoleAssistant 机器人消息 RoleAssistant = "assistant" // RoleSystem 系统消息 RoleSystem = "system" )
消息的角色, user and assistant and system
const ( // StreamStartEvent start event 生成启动的事件 StreamStartEvent = "start" // StreamAddEvent add event 生成数据的事件 StreamAddEvent = "add" // StreamFinishEvent finish event 生成结束的事件 StreamFinishEvent = "stop" // StreamErrorEvent error event 生成错误的事件 StreamErrorEvent = "error" )
const BasicRequestTimeout int = 3
const BatchMaxSize = 5
BatchSize 批量请求最大数量
const ExpiresTime = 1700
Variables ¶
var ( // StartEventPrefix start event 生成启动的事件的字节前缀 StartEventPrefix = []byte("event: start") // AddEventPrefix add event 生成数据的事件的字节前缀 AddEventPrefix = []byte("event: add") // FinishEventPrefix finish event 生成结束的事件的字节前缀 FinishEventPrefix = []byte("event: stop") // ErrorEventPrefix error event 生成错误的事件的字节前缀 ErrorEventPrefix = []byte("event: error") // DataPrefix data event 生成数据的事件的字节前缀 DataPrefix = []byte("data: ") )
event bytes prefix 用于判断事件数据行的类型
var DefaultChatCompletionOption = &ChatCompletion{ Model: "ERNIE-Bot-4", Stop: []string{}, MaxTokens: 2000, Timeout: 60, IsSensitive: false, }
DefaultChatCompletionOption 默认配置, 可以覆盖
Functions ¶
func NewAssistantMessage ¶ added in v0.1.2
func NewAssistantMessage(messageStr string) lazyMessage
NewAssistantMessage 生成机器人消息
func NewMessage ¶
NewMessage 生成消息(这个消息是预渲染的消息)
func NewSystemMessage ¶ added in v0.1.2
func NewSystemMessage(messageStr string) lazyMessage
NewSystemMessage 生成系统消息
func NewUserMessage ¶ added in v0.1.2
func NewUserMessage(messageStr string) lazyMessage
NewUserMessage 生成用户消息
Types ¶
type BatchChatCompletionArgs ¶ added in v0.1.1
type BatchChatCompletionArgs struct { Prompt Prompt Params []Option[ChatCompletion] }
BatchChatCompletionArgs 批量请求参数
type BatchChatCompletionBuilder ¶ added in v0.1.1
type BatchChatCompletionBuilder struct { Args []*BatchChatCompletionArgs // contains filtered or unexported fields }
BatchChatCompletionBuilder 批量请求创建器
func NewBatchChatCompletionBuilder ¶ added in v0.1.1
func NewBatchChatCompletionBuilder() *BatchChatCompletionBuilder
NewBatchChatCompletionBuilder 创建
func (*BatchChatCompletionBuilder) Add ¶ added in v0.1.1
func (bccb *BatchChatCompletionBuilder) Add(prompt Prompt, params ...Option[ChatCompletion]) (*BatchChatCompletionArgs, error)
Add 添加
type ChatCompletion ¶
type ChatCompletion struct { // RequestID 请求ID RequestID string `json:"request_id"` // Model 模型 Model string `json:"model"` // Temperature 模型参数 Temperature float64 `json:"temperature,omitempty"` // TopP 模型参数 TopP float64 `json:"top_p,omitempty"` // DoSample 是否开启采样 DoSample bool `json:"do_sample"` // IsSensitive 是否开启敏感词 IsSensitive bool `json:"is_sensitive"` // MaxTokens 最大长度 MaxTokens int `json:"max_tokens,omitempty"` // Stop 停用词 History []*Message `json:"history,omitempty"` // Query 问题 Query string `json:"query"` // System 系统消息 System string `json:"system"` // Mode 是否流式返回 Mode string `json:"mode,omitempty"` // PredefinedPrompts 预定义的prompt提示工程 PredefinedPrompts string `json:"prompt,omitempty"` // Stop 停用词 Stop []string `json:"-"` // Timeout 超时时间 Timeout int `json:"-"` // contains filtered or unexported fields }
func NewChatCompletion ¶ added in v1.0.0
func NewChatCompletion(helpers ...Option[ChatCompletion]) *ChatCompletion
使用配置创建一个ChatCompletion参数
func (*ChatCompletion) Apply ¶
func (cc *ChatCompletion) Apply(helpers ...Option[ChatCompletion])
Apply 应用配置
func (*ChatCompletion) LoadPromptTemplates ¶
func (cc *ChatCompletion) LoadPromptTemplates(prompt Prompt) ([]*Message, error)
RenderMessages 渲染消息列表
func (*ChatCompletion) String ¶ added in v0.1.1
func (cc *ChatCompletion) String() string
String 转字符串
type ChatCompletionError ¶ added in v0.1.1
type ChatCompletionError struct {
Detail string `json:"detail"`
}
ChatCompletionError 聊天错误
func (*ChatCompletionError) String ¶ added in v0.1.1
func (cce *ChatCompletionError) String() string
String 聊天错误信息
type ChatCompletionResult ¶
type ChatCompletionResult struct { RequestID string `json:"request_id"` Object string `json:"object"` Created string `json:"created"` Choices []struct { Index int `json:"index"` Role string `json:"role"` FinishReason string `json:"finish_reason"` Message Message `json:"message"` } `json:"choices"` Usage struct { PromptTokens int `json:"prompt_tokens"` CompletionTokens int `json:"completion_tokens"` TotalTokens int `json:"total_tokens"` } `json:"usage"` Msg string `json:"msg"` Status int `json:"status"` History []*Message Error error }
ChatCompletionResult 聊天结果
func (*ChatCompletionResult) GetHistoryPrompts ¶
func (r *ChatCompletionResult) GetHistoryPrompts() *PromptTemplate
GetHistoryPrompts 获取历史消息(Prompt)
func (*ChatCompletionResult) HandleError ¶ added in v1.0.0
func (ccr *ChatCompletionResult) HandleError() error
func (*ChatCompletionResult) String ¶
func (r *ChatCompletionResult) String() string
String 获取结果的正文内容字符串
type FengChao ¶
type FengChao struct { // ApiKey fengchao api key ApiKey string // SecretKey fengchao secret key SecretKey string // BaseUrl api url BaseUrl string sync.Mutex // contains filtered or unexported fields }
FengChaoOptions 配置
func (*FengChao) BatchChatCompletion ¶ added in v0.1.1
func (f *FengChao) BatchChatCompletion(ctx context.Context, bccb *BatchChatCompletionBuilder) (map[*BatchChatCompletionArgs]*ChatCompletionResult, map[*BatchChatCompletionArgs]error, bool)
BatchChatCompletion 批量请求
func (*FengChao) ChatCompletion ¶
func (f *FengChao) ChatCompletion(ctx context.Context, prompt Prompt, chatCompletionOption ...Option[ChatCompletion]) (*ChatCompletionResult, error)
ChatCompletion 聊天
func (*FengChao) ChatCompletionStream ¶ added in v0.1.1
func (f *FengChao) ChatCompletionStream(ctx context.Context, prompt Prompt, chatCompletionOption ...Option[ChatCompletion]) (*JsonStreamReader[ChatCompletionResult], error)
ChatCompletionStream 流式聊天
func (*FengChao) ChatCompletionStreamSimple ¶ added in v0.1.1
func (f *FengChao) ChatCompletionStreamSimple(ctx context.Context, prompt Prompt, chatCompletionOption ...Option[ChatCompletion]) (iter.Seq[ChatCompletionResult], error)
ChatCompletionStreamSimple 流式聊天
func (*FengChao) GetAvailableModels ¶
GetAvailableModels 获取可用模型
func (*FengChao) QuickCompletion ¶ added in v0.1.1
func (f *FengChao) QuickCompletion(ctx context.Context, chatCompletionOption ...Option[ChatCompletion]) (*ChatCompletionResult, error)
QuickCompletion 使用预定义prompt, 快速生成文本
type JsonStreamReader ¶ added in v0.1.1
type JsonStreamReader[T StreamAble] struct { // contains filtered or unexported fields }
JsonStreamReader Json流式数据读取器
func (*JsonStreamReader[T]) Close ¶ added in v0.1.1
func (j *JsonStreamReader[T]) Close() error
Close 关闭数据流
func (*JsonStreamReader[T]) Read ¶ added in v0.1.1
func (j *JsonStreamReader[T]) Read() (*T, bool, error)
Read 读取数据直到获得一个完整的数据包, 或者遇到错误或者遇到结束事件(包括EOF), 但一般情况不会遇到EOF 需要自定义处理数据流可以使用这个方法, 一般使用Stream方法, 可以更轻松的处理数据流
func (*JsonStreamReader[T]) Stream ¶ added in v0.1.1
func (j *JsonStreamReader[T]) Stream() iter.Seq[T]
Stream 返回一个生成器函数
type Message ¶
type Message struct { Role string `json:"role"` Content string `json:"content"` // contains filtered or unexported fields }
Message 消息
type Model ¶
type Model struct { ID string `json:"id"` OwnedBy string `json:"owned_by"` MaxInputToken int `json:"max_input_token"` MaxOutputToken int `json:"max_output_token"` InPrice float64 `json:"in_price"` OutPrice float64 `json:"out_price"` Unit string `json:"unit"` Modes []string `json:"mode"` Channel string `json:"channel"` Created string `json:"created"` }
Model 模型
type Option ¶
type Option[T any] func(option *T)
OptionHelper 配置
func WithIsSensitive ¶
func WithIsSensitive(isSensitive bool) Option[ChatCompletion]
WithIsSensitive 设置是否开启敏感词
func WithPredefinedPrompts ¶ added in v0.1.1
func WithPredefinedPrompts(predefinedPrompts string) Option[ChatCompletion]
withPredefinedPrompts 设置预定义的prompt提示工程
func WithRequestID ¶
func WithRequestID(requestID string) Option[ChatCompletion]
WithRequestID 设置请求ID
func WithTemperature ¶
func WithTemperature(temperature float64) Option[ChatCompletion]
WithTemperature 设置模型参数
type Prompt ¶
type Prompt interface { // Render 渲染, 这个用来展示 Render(vairables map[string]interface{}) ([]byte, error) // RenderMessages 渲染消息列表, 对应的渲染方法是 Render, 提供给用户自定义使用 RenderMessages(vairables map[string]interface{}) ([]*Message, error) }
Prompt 接口, 暴露一个渲染的能力
type PromptTemplate ¶
PromptTemplate 模板
func NewPromptTemplate ¶
func NewPromptTemplate(p ...Prompt) *PromptTemplate
NewPromptTemplate 创建 PromptTemplate
func (*PromptTemplate) MarshalJSON ¶
func (m *PromptTemplate) MarshalJSON() ([]byte, error)
MarshalJSON 渲染
func (*PromptTemplate) Render ¶
func (m *PromptTemplate) Render(vairables map[string]interface{}) ([]byte, error)
Render 渲染 Prompt
func (*PromptTemplate) RenderMessages ¶
func (m *PromptTemplate) RenderMessages(vairables map[string]interface{}) ([]*Message, error)
RenderMessages 渲染消息列表
type StreamAble ¶ added in v0.1.1
type StreamAble interface { ChatCompletionResult }
StreamAble 用于判断事件数据行的类型
type StreamReader ¶ added in v0.1.1
type StreamReader[T StreamAble] interface { Read() (T, error) }
StreamReader 流式数据读取器, 可用用来读取生成内容的数据包