Documentation ¶
Index ¶
- Constants
- Variables
- func NewMessage(role string, messageStr string) lazyMessage
- type ChatCompletion
- type ChatCompletionResult
- type FengChao
- type Message
- type Model
- type Option
- func WithDoSample(doSample bool) Option[ChatCompletion]
- func WithHistory(history []*Message) Option[ChatCompletion]
- func WithIsSensitive(isSensitive bool) Option[ChatCompletion]
- func WithMaxTokens(maxTokens int) Option[ChatCompletion]
- func WithModel(model string) Option[ChatCompletion]
- func WithParams(variables any) Option[ChatCompletion]
- func WithQuery(query string) Option[ChatCompletion]
- func WithRequestID(requestID string) Option[ChatCompletion]
- func WithStop(stop []string) Option[ChatCompletion]
- func WithSystem(system string) Option[ChatCompletion]
- func WithTemperature(temperature float64) Option[ChatCompletion]
- func WithTimeout(timeout int) Option[ChatCompletion]
- func WithTopP(topP float64) Option[ChatCompletion]
- type Prompt
- type PromptTemplate
Constants ¶
View Source
const ( RoleUser = "user" RoleAssistant = "assistant" RoleSystem = "system" )
View Source
const BasicRequestTimeout int = 3
View Source
const ExpiresTime = 1700
Variables ¶
View Source
var DefaultChatCompletionOption = &ChatCompletion{ Model: "ERNIE-Bot-4", Stop: []string{}, MaxTokens: 2000, Timeout: 60, IsSensitive: false, }
DefaultChatCompletionOption 默认配置, 可以覆盖
Functions ¶
func NewMessage ¶
Types ¶
type ChatCompletion ¶
type ChatCompletion struct { // RequestID 请求ID RequestID string `json:"request_id"` // Model 模型 Model string `json:"model"` // Temperature 模型参数 Temperature float64 `json:"temperature,omitempty"` // TopP 模型参数 TopP float64 `json:"top_p,omitempty"` // DoSample 是否开启采样 DoSample bool `json:"do_sample"` // IsSensitive 是否开启敏感词 IsSensitive bool `json:"is_sensitive"` // MaxTokens 最大长度 MaxTokens int `json:"max_tokens,omitempty"` // Stop 停用词 History []*Message `json:"history"` // Query 问题 Query string `json:"query"` // System 系统消息 System string `json:"system"` // Stop 停用词 Stop []string // Timeout 超时时间 Timeout int // contains filtered or unexported fields }
func (*ChatCompletion) Apply ¶
func (option *ChatCompletion) Apply(helpers ...Option[ChatCompletion])
Apply 应用配置
func (*ChatCompletion) LoadPromptTemplates ¶
func (option *ChatCompletion) LoadPromptTemplates(prompt Prompt) ([]*Message, error)
RenderMessages 渲染消息列表
type ChatCompletionResult ¶
type ChatCompletionResult struct { RequestID string `json:"request_id"` Object string `json:"object"` Created string `json:"created"` Choices []struct { Index int `json:"index"` Role string `json:"role"` FinishReason string `json:"finish_reason"` Message Message `json:"message"` } `json:"choices"` Usage struct { PromptTokens int `json:"prompt_tokens"` CompletionTokens int `json:"completion_tokens"` TotalTokens int `json:"total_tokens"` } `json:"usage"` Msg string `json:"msg"` Status int `json:"status"` History []*Message }
func (*ChatCompletionResult) GetHistoryPrompts ¶
func (r *ChatCompletionResult) GetHistoryPrompts() *PromptTemplate
func (*ChatCompletionResult) String ¶
func (r *ChatCompletionResult) String() string
type FengChao ¶
type FengChao struct { // ApiKey fengchao api key ApiKey string // SecretKey fengchao secret key SecretKey string // BaseUrl api url BaseUrl string sync.Mutex // contains filtered or unexported fields }
FengChaoOptions 配置
func (*FengChao) ChatCompletion ¶
func (f *FengChao) ChatCompletion(ctx context.Context, prompt Prompt, chatCompletionOption ...Option[ChatCompletion]) (*ChatCompletionResult, error)
func (*FengChao) GetAvailableModels ¶
type Message ¶
type Model ¶
type Model struct { ID string `json:"id"` OwnedBy string `json:"owned_by"` MaxInputToken int `json:"max_input_token"` MaxOutputToken int `json:"max_output_token"` InPrice float64 `json:"in_price"` OutPrice float64 `json:"out_price"` Unit string `json:"unit"` Modes []string `json:"mode"` Channel string `json:"channel"` Created string `json:"created"` }
type Option ¶
type Option[T any] func(option *T)
OptionHelper 配置
func WithIsSensitive ¶
func WithIsSensitive(isSensitive bool) Option[ChatCompletion]
WithIsSensitive 设置是否开启敏感词
func WithRequestID ¶
func WithRequestID(requestID string) Option[ChatCompletion]
func WithTemperature ¶
func WithTemperature(temperature float64) Option[ChatCompletion]
WithTemperature 设置模型参数
type Prompt ¶
type Prompt interface { // Render 渲染,这个用来展示 Render(vairables map[string]interface{}) ([]byte, error) // RenderMessages 渲染消息列表,对应的渲染方法是 Render,这个提供给用户自定义使用 RenderMessages(vairables map[string]interface{}) ([]*Message, error) }
Prompt 接口, 暴露一个渲染的能力
type PromptTemplate ¶
PromptTemplate 模板
func NewPromptTemplate ¶
func NewPromptTemplate(p ...Prompt) *PromptTemplate
NewPromptTemplate 创建 PromptTemplate
func (*PromptTemplate) MarshalJSON ¶
func (m *PromptTemplate) MarshalJSON() ([]byte, error)
MarshalJSON 渲染
func (*PromptTemplate) Render ¶
func (m *PromptTemplate) Render(vairables map[string]interface{}) ([]byte, error)
Render 渲染 Prompt
func (*PromptTemplate) RenderMessages ¶
func (m *PromptTemplate) RenderMessages(vairables map[string]interface{}) ([]*Message, error)
RenderMessages 渲染消息列表
Source Files ¶
Click to show internal directories.
Click to hide internal directories.