zhipuai

package
v0.1.0 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Dec 29, 2023 License: Apache-2.0 Imports: 19 Imported by: 1

Documentation

Overview

NOTE: Reference zhipuai's python sdk: utils/jwt_token.py

Index

Constants

View Source
const (
	ZhipuaiModelAPIURL         = "https://open.bigmodel.cn/api/paas/v3/model-api"
	ZhipuaiModelDefaultTimeout = 300 * time.Second
	RetryLimit                 = 3
)
View Source
const (
	APITokenTTLSeconds = 3 * 60
	// FIXME: impl TLL Cache
	CacheTTLSeconds = (APITokenTTLSeconds - 30)
)
View Source
const (
	CodeConcurrencyHigh = 1302 // 您当前使用该 API 的并发数过高,请降低并发,或联系客服增加限额
	CodefrequencyHigh   = 1303 // 您当前使用该 API 的频率过高,请降低频率,或联系客服增加限额
	CodeTimesHigh       = 1305 // 当前 API 请求过多,请稍后重试
)

Variables

View Source
var (
	ErrEmptyResponse = errors.New("no response")
	ErrEmptyPrompt   = errors.New("empty prompt")
)

Functions

func BuildAPIURL

func BuildAPIURL(model string, method Method) string

func GenerateToken

func GenerateToken(apikey string, expSeconds int64) (string, error)

func Stream

func Stream(apiURL, token string, params ModelParams, timeout time.Duration, handler func(*sse.Event)) error

func ValidateModelParams

func ValidateModelParams(params ModelParams) error

Types

type Choice

type Choice struct {
	Content string `json:"content"`
	Role    string `json:"role"`
}

type Data

type Data struct {
	RequestID  string `json:"request_id,omitempty"`
	TaskID     string `json:"task_id,omitempty"`
	TaskStatus string `json:"task_status,omitempty"`
	Usage      Usage  `json:"usage,omitempty"`

	Choices []Choice `json:"choices,omitempty"`
}

type EmbeddingData

type EmbeddingData struct {
	RequestID  string `json:"request_id,omitempty"`
	TaskID     string `json:"task_id,omitempty"`
	TaskStatus string `json:"task_status,omitempty"`
	Usage      Usage  `json:"usage,omitempty"`

	Embedding []float32 `json:"embedding,omitempty"` // Vectorized texts, length 1024.
}

type EmbeddingResponse

type EmbeddingResponse struct {
	Code    int            `json:"code"`
	Data    *EmbeddingData `json:"data"`
	Msg     string         `json:"msg"`
	Success bool           `json:"success"`
}

func EmbeddingPost

func EmbeddingPost(apiURL, token string, text EmbeddingText, timeout time.Duration) (*EmbeddingResponse, error)

EmbeddingPost posts embedding request to model FIXME: should this be made an interface?

func (*EmbeddingResponse) Bytes

func (embeddingResp *EmbeddingResponse) Bytes() []byte

func (*EmbeddingResponse) String

func (embeddingResp *EmbeddingResponse) String() string

func (*EmbeddingResponse) Type

func (embeddingResp *EmbeddingResponse) Type() llms.LLMType

func (*EmbeddingResponse) Unmarshall

func (embeddingResp *EmbeddingResponse) Unmarshall(bytes []byte) error

type EmbeddingText

type EmbeddingText struct {
	Prompt    string `json:"prompt,omitempty"`
	RequestID string `json:"request_id,omitempty"`
}

type Method

type Method string
const (
	// POST
	ZhiPuAIInvoke      Method = "invoke"
	ZhiPuAIAsyncInvoke Method = "async-invoke"
	ZhiPuAISSEInvoke   Method = "sse-invoke"
	// GET
	ZhiPuAIAsyncGet Method = "async-get"
)

type ModelParams

type ModelParams struct {
	// Method used for this prompt call
	Method Method `json:"method,omitempty"`

	// Model used for this prompt call
	Model string `json:"model,omitempty"`

	// Temperature is float in zhipuai
	Temperature float32 `json:"temperature,omitempty"`
	// TopP is float in zhipuai
	TopP float32 `json:"top_p,omitempty"`
	// Contents
	Prompt []Prompt `json:"prompt"`

	// TaskID is used for getting result of AsyncInvoke
	TaskID string `json:"task_id,omitempty"`

	// Incremental is only Used for SSE Invoke
	Incremental bool `json:"incremental,omitempty"`
}

+kubebuilder:object:generate=true ZhiPuAIParams defines the params of ZhiPuAI Prompt Call

func DefaultModelParams

func DefaultModelParams() ModelParams

func MergeParams

func MergeParams(a, b ModelParams) ModelParams

MergeZhiPuAI merges b to a with this rule - if a.x is empty and b.x is not, then a.x = b.x

func (*ModelParams) DeepCopy

func (in *ModelParams) DeepCopy() *ModelParams

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ModelParams.

func (*ModelParams) DeepCopyInto

func (in *ModelParams) DeepCopyInto(out *ModelParams)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*ModelParams) Marshal

func (params *ModelParams) Marshal() []byte

func (*ModelParams) Unmarshal

func (params *ModelParams) Unmarshal(bytes []byte) error

type Prompt

type Prompt struct {
	Role    Role   `json:"role,omitempty"`
	Content string `json:"content,omitempty"`
}

+kubebuilder:object:generate=true Prompt defines the content of ZhiPuAI Prompt Call

func (*Prompt) DeepCopy

func (in *Prompt) DeepCopy() *Prompt

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Prompt.

func (*Prompt) DeepCopyInto

func (in *Prompt) DeepCopyInto(out *Prompt)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type Response

type Response struct {
	Code    int    `json:"code"`
	Data    *Data  `json:"data"`
	Msg     string `json:"msg"`
	Success bool   `json:"success"`
}

func Get

func Get(apiURL, token string, timeout time.Duration) (*Response, error)

func Post

func Post(apiURL, token string, params ModelParams, timeout time.Duration) (*Response, error)

func (*Response) Bytes

func (response *Response) Bytes() []byte

func (*Response) String

func (response *Response) String() string

func (*Response) Type

func (response *Response) Type() llms.LLMType

func (*Response) Unmarshal

func (response *Response) Unmarshal(bytes []byte) error

type Role

type Role string
const (
	User      Role = "user"
	Assistant Role = "assistant"
)

type SSEClient

type SSEClient struct {
	LastEventID    atomic.Value // []byte
	EncodingBase64 bool
	// contains filtered or unexported fields
}

func NewSSEClient

func NewSSEClient() *SSEClient

func (*SSEClient) Events

func (c *SSEClient) Events(resp *http.Response) (<-chan *sse.Event, <-chan error)

type Usage

type Usage struct {
	TotalTokens int `json:"total_tokens,omitempty"`
}

type ZhiPuAI

type ZhiPuAI struct {
	// contains filtered or unexported fields
}

func NewZhiPuAI

func NewZhiPuAI(apiKey string) *ZhiPuAI

func (*ZhiPuAI) AsyncInvoke

func (z *ZhiPuAI) AsyncInvoke(params ModelParams) (*Response, error)

AsyncInvoke only returns a task id which can be used to get result of task later

func (*ZhiPuAI) Call

func (z *ZhiPuAI) Call(data []byte) (llms.Response, error)

Call wraps a common AI api call

func (*ZhiPuAI) CreateEmbedding

func (z *ZhiPuAI) CreateEmbedding(ctx context.Context, inputTexts []string) ([][]float32, error)

CreateEmbedding do batch embedding To compatible with langchaingo/llms

func (*ZhiPuAI) Embedding

func (z *ZhiPuAI) Embedding(text EmbeddingText) (*EmbeddingResponse, error)

func (*ZhiPuAI) Get

func (z *ZhiPuAI) Get(params ModelParams) (*Response, error)

Get result of task async-invoke

func (*ZhiPuAI) Invoke

func (z *ZhiPuAI) Invoke(params ModelParams) (*Response, error)

Invoke calls zhipuai and returns result immediately

func (*ZhiPuAI) SSEInvoke

func (z *ZhiPuAI) SSEInvoke(params ModelParams, handler func(*sse.Event)) error

func (ZhiPuAI) Type

func (z ZhiPuAI) Type() llms.LLMType

func (*ZhiPuAI) Validate

func (z *ZhiPuAI) Validate(ctx context.Context, options ...langchainllms.CallOption) (llms.Response, error)

Validate zhipuai service against CallOption

type ZhiPuAILLM

type ZhiPuAILLM struct {
	ZhiPuAI
	RetryTimes int
}

func (*ZhiPuAILLM) Call

func (z *ZhiPuAILLM) Call(ctx context.Context, messages []schema.ChatMessage, options ...langchainllm.CallOption) (*schema.AIChatMessage, error)

func (*ZhiPuAILLM) Generate

func (z *ZhiPuAILLM) Generate(ctx context.Context, messageSets [][]schema.ChatMessage, options ...langchainllm.CallOption) ([]*langchainllm.Generation, error)

func (*ZhiPuAILLM) GeneratePrompt

func (z *ZhiPuAILLM) GeneratePrompt(ctx context.Context, promptValues []schema.PromptValue, options ...langchainllm.CallOption) (langchainllm.LLMResult, error)

func (*ZhiPuAILLM) GetNumTokens

func (z *ZhiPuAILLM) GetNumTokens(text string) int

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL