dashscope

package
v0.2.2 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Apr 12, 2024 License: Apache-2.0 Imports: 16 Imported by: 0

Documentation

Overview

Copyright 2023 KubeAGI.

Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.

Index

Constants

View Source
const (
	DashScopeChatURL          = "https://dashscope.aliyuncs.com/api/v1/services/aigc/text-generation/generation"
	DashScopeTextEmbeddingURL = "https://dashscope.aliyuncs.com/api/v1/services/embeddings/text-embedding/text-embedding"
	DashScopeTaskURL          = "https://dashscope.aliyuncs.com/api/v1/tasks/"
)

Variables

This section is empty.

Functions

func DownloadAndExtract

func DownloadAndExtract(url string, dest string) error

func ValidateModelParams

func ValidateModelParams(params ModelParams) error

Types

type Choice

type Choice struct {
	FinishReason FinishReason `json:"finish_reason"`
	Message      Message      `json:"message"`
}

type CommonResponse

type CommonResponse struct {
	// https://help.aliyun.com/zh/dashscope/response-status-codes
	StatusCode int    `json:"status_code,omitempty"`
	Code       string `json:"code,omitempty"`
	Message    string `json:"message,omitempty"`
	RequestID  string `json:"request_id"`
}

type DashScope

type DashScope struct {
	// contains filtered or unexported fields
}

func NewDashScope

func NewDashScope(apiKey string, sse bool) *DashScope

func (*DashScope) Call

func (z *DashScope) Call(data []byte) (llms.Response, error)

Call wraps a common AI api call

func (*DashScope) CreateEmbedding

func (z *DashScope) CreateEmbedding(ctx context.Context, inputTexts []string, query bool) ([]Embeddings, error)

func (*DashScope) CreateEmbeddingAsync

func (z *DashScope) CreateEmbeddingAsync(ctx context.Context, inputURL string, query bool) (taskID string, err error)

func (*DashScope) GetTaskDetail

func (z *DashScope) GetTaskDetail(ctx context.Context, taskID string) (outURL string, err error)

func (*DashScope) StreamCall

func (z *DashScope) StreamCall(ctx context.Context, data []byte, handler func(event *sse.Event, last string) (data string)) error

func (DashScope) Type

func (z DashScope) Type() llms.LLMType

func (*DashScope) Validate

func (z *DashScope) Validate(ctx context.Context, options ...langchainllms.CallOption) (llms.Response, error)

type EmbeddingInput

type EmbeddingInput struct {
	*EmbeddingInputSync
	*EmbeddingInputAsync
}

type EmbeddingInputAsync

type EmbeddingInputAsync struct {
	URL string `json:"url,omitempty"`
}

type EmbeddingInputSync

type EmbeddingInputSync struct {
	Texts []string `json:"texts,omitempty"`
}

type EmbeddingOutput

type EmbeddingOutput struct {
	*EmbeddingOutputSync
	*EmbeddingOutputASync
}

type EmbeddingOutputASync

type EmbeddingOutputASync struct {
	TaskID        string     `json:"task_id"`
	TaskStatus    TaskStatus `json:"task_status"`
	URL           string     `json:"url"`
	SubmitTime    string     `json:"submit_time,omitempty"`
	ScheduledTime string     `json:"scheduled_time,omitempty"`
	EndTime       string     `json:"end_time,omitempty"`
	// when failed
	Code    string `json:"code,omitempty"`
	Message string `json:"message,omitempty"`
}

type EmbeddingOutputSync

type EmbeddingOutputSync struct {
	Embeddings []Embeddings `json:"embeddings"`
}

type EmbeddingParameters

type EmbeddingParameters struct {
	TextType TextType `json:"text_type"`
}

type EmbeddingRequest

type EmbeddingRequest struct {
	Model      Model               `json:"model"`
	Input      EmbeddingInput      `json:"input"`
	Parameters EmbeddingParameters `json:"parameters"`
}

type EmbeddingResponse

type EmbeddingResponse struct {
	CommonResponse
	Output EmbeddingOutput `json:"output"`
	Usage  EmbeddingUsage  `json:"usage"`
}

type EmbeddingUsage

type EmbeddingUsage struct {
	TotalTokens int `json:"total_tokens"`
}

type Embeddings

type Embeddings struct {
	TextIndex int       `json:"text_index"`
	Embedding []float32 `json:"embedding"`
}

type FinishReason

type FinishReason string
const (
	Finish     FinishReason = "stop"
	Generating FinishReason = "null"
	ToLoogin   FinishReason = "length"
)

type Input

type Input struct {
	Messages []Message `json:"messages,omitempty"`
	Prompt   string    `json:"prompt,omitempty"`
	History  *[]string `json:"history,omitempty"`
}

func (*Input) DeepCopy

func (in *Input) DeepCopy() *Input

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Input.

func (*Input) DeepCopyInto

func (in *Input) DeepCopyInto(out *Input)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type Message

type Message struct {
	Role    Role   `json:"role,omitempty"`
	Content string `json:"content,omitempty"`
}

func (*Message) DeepCopy

func (in *Message) DeepCopy() *Message

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Message.

func (*Message) DeepCopyInto

func (in *Message) DeepCopyInto(out *Message)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type Model

type Model string
const (
	// 通义千问对外开源的 14B / 7B 规模参数量的经过人类指令对齐的 chat 模型
	QWEN14BChat Model = "qwen-14b-chat"
	QWEN7BChat  Model = "qwen-7b-chat"
	// LLaMa2 系列大语言模型由 Meta 开发并公开发布,其规模从 70 亿到 700 亿参数不等。在灵积上提供的 llama2-7b-chat-v2 和 llama2-13b-chat-v2,分别为 7B 和 13B 规模的 LLaMa2 模型,针对对话场景微调优化后的版本。
	LLAMA27BCHATV2   Model = "llama2-7b-chat-v2"
	LLAMA213BCHATV2  Model = "llama2-13b-chat-v2"
	BAICHUAN7BV1     Model = "baichuan-7b-v1"          // baichuan-7B 是由百川智能开发的一个开源的大规模预训练模型。基于 Transformer 结构,在大约 1.2 万亿 tokens 上训练的 70 亿参数模型,支持中英双语,上下文窗口长度为 4096。在标准的中文和英文权威 benchmark(C-EVAL/MMLU)上均取得同尺寸最好的效果。
	CHATGLM6BV2      Model = "chatglm-6b-v2"           // ChatGLM2 模型是由智谱 AI 出品的大规模语言模型,它在灵积平台上的模型名称为 "chatglm-6b-v2".
	EmbeddingV1      Model = "text-embedding-v1"       // 通用文本向量 同步调用
	EmbeddingAsyncV1 Model = "text-embedding-async-v1" // 通用文本向量 批处理调用
)

type ModelParams

type ModelParams struct {
	Model      Model      `json:"model"`
	Input      Input      `json:"input"`
	Parameters Parameters `json:"parameters,omitempty"`
}

ModelParams ref: https://help.aliyun.com/zh/dashscope/developer-reference/tongyi-qianwen-7b-14b-api-detailes#25745d61fbx49 do not use 'input.history', according to the above document, this parameter will be deprecated soon. use 'message' in 'parameters.result_format' to keep better compatibility.

func DefaultModelParams

func DefaultModelParams() ModelParams

func DefaultModelParamsSimpleChat

func DefaultModelParamsSimpleChat() ModelParams

func (*ModelParams) DeepCopy

func (in *ModelParams) DeepCopy() *ModelParams

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ModelParams.

func (*ModelParams) DeepCopyInto

func (in *ModelParams) DeepCopyInto(out *ModelParams)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*ModelParams) Marshal

func (params *ModelParams) Marshal() []byte

func (*ModelParams) Unmarshal

func (params *ModelParams) Unmarshal(bytes []byte) error

type Output

type Output struct {
	Choices []Choice `json:"choices,omitempty"`
	Text    string   `json:"text,omitempty"`
	History []string `json:"history,omitempty"`
}

type Parameters

type Parameters struct {
	TopP         float32 `json:"top_p,omitempty"`
	TopK         int     `json:"top_k,omitempty"`
	Seed         int     `json:"seed,omitempty"`
	ResultFormat string  `json:"result_format,omitempty"`
}

type Response

type Response struct {
	CommonResponse
	Output Output `json:"output"`
	Usage  Usage  `json:"usage"`
}

func (*Response) Bytes

func (response *Response) Bytes() []byte

func (*Response) String

func (response *Response) String() string

func (*Response) Type

func (response *Response) Type() llms.LLMType

func (*Response) Unmarshal

func (response *Response) Unmarshal(bytes []byte) error

type ResponseChatGLB6B

type ResponseChatGLB6B struct {
	CommonResponse
	Output struct {
		Text struct {
			Response string `json:"response,omitempty"`
		} `json:"text,omitempty"`
		History []string `json:"history,omitempty"`
	} `json:"output"`
	Usage Usage `json:"usage"`
}

func (*ResponseChatGLB6B) Bytes

func (r *ResponseChatGLB6B) Bytes() []byte

func (*ResponseChatGLB6B) String

func (r *ResponseChatGLB6B) String() string

func (*ResponseChatGLB6B) Type

func (r *ResponseChatGLB6B) Type() llms.LLMType

func (*ResponseChatGLB6B) Unmarshal

func (r *ResponseChatGLB6B) Unmarshal(bytes []byte) error

type Role

type Role string
const (
	System    Role = "system"
	User      Role = "user"
	Assistant Role = "assistant"
)

type SSEClient

type SSEClient struct {
	LastEventID    atomic.Value // []byte
	EncodingBase64 bool
	// contains filtered or unexported fields
}

func NewSSEClient

func NewSSEClient() *SSEClient

func (*SSEClient) Events

func (c *SSEClient) Events(resp *http.Response) (<-chan *sse.Event, <-chan error)

type TaskStatus

type TaskStatus string
const (
	TaskStatusPending   TaskStatus = "PENDING"
	TaskStatusRunning   TaskStatus = "RUNNING"
	TaskStatusSucceeded TaskStatus = "SUCCEEDED"
	TaskStatusFailed    TaskStatus = "FAILED"
	TaskStatusUnknown   TaskStatus = "UNKNOWN"
)

type TextType

type TextType string
const (
	TextTypeQuery    TextType = "query"
	TextTypeDocument TextType = "document"
)

type Usage

type Usage struct {
	OutputTokens int `json:"output_tokens"`
	InputTokens  int `json:"input_tokens"`
}

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL