model

package
v1.0.176 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Jan 2, 2025 License: Apache-2.0 Imports: 9 Imported by: 10

Documentation

Index

Constants

View Source
const (
	ChatMessageRoleSystem    = "system"
	ChatMessageRoleUser      = "user"
	ChatMessageRoleAssistant = "assistant"
	ChatMessageRoleTool      = "tool"
)
View Source
const (
	ToolChoiceStringTypeAuto     = "auto"
	ToolChoiceStringTypeNone     = "none"
	ToolChoiceStringTypeRequired = "required"
)
View Source
const (
	ClientRequestHeader = "X-Client-Request-Id"
	RetryAfterHeader    = "Retry-After"

	DefaultMandatoryRefreshTimeout = 10 * 60          // 10 min
	DefaultAdvisoryRefreshTimeout  = 30 * 60          // 30 min
	DefaultStsTimeout              = 7 * 24 * 60 * 60 // 7 days

	InitialRetryDelay = 0.5
	MaxRetryDelay     = 8.0

	ErrorRetryBaseDelay = 500 * time.Millisecond
	ErrorRetryMaxDelay  = 8 * time.Second
)

Variables

View Source
var (
	ErrTooManyEmptyStreamMessages       = errors.New("stream has sent too many empty messages")
	ErrChatCompletionInvalidModel       = errors.New("this model is not supported with this method, please use CreateCompletion client method instead") //nolint:lll
	ErrChatCompletionStreamNotSupported = errors.New("streaming is not supported with this method, please use CreateChatCompletionStream")              //nolint:lll
	ErrContentFieldsMisused             = errors.New("can't use both Content and MultiContent properties simultaneously")
	ErrBodyWithoutEndpoint              = errors.New("can't fetch endpoint sts token without endpoint")
	ErrBodyWithoutBot                   = errors.New("can't fetch bot sts token without bot id")
)

Functions

This section is empty.

Types

type APIError

type APIError struct {
	Code           string  `json:"code,omitempty"`
	Message        string  `json:"message"`
	Param          *string `json:"param,omitempty"`
	Type           string  `json:"type"`
	HTTPStatusCode int     `json:"-"`
	RequestId      string  `json:"request_id"`
}

func (*APIError) Error

func (e *APIError) Error() string

type Base64Embedding

type Base64Embedding struct {
	Object    string       `json:"object"`
	Embedding base64String `json:"embedding"`
	Index     int          `json:"index"`
}

Base64Embedding is a container for base64 encoded embeddings.

type BotActionUsage added in v1.0.146

type BotActionUsage struct {
	Name             string `json:"name"`
	PromptTokens     string `json:"prompt_tokens,omitempty"`
	CompletionTokens int    `json:"completion_tokens,omitempty"`
	TotalTokens      int    `json:"total_tokens,omitempty"`
	SearchCount      int    `json:"search_count,omitempty"`
}

type BotChatCompletionRequest added in v1.0.146

type BotChatCompletionRequest struct {
	BotId             string                   `json:"bot_id,omitempty"`
	Model             string                   `json:"model"`
	Messages          []*ChatCompletionMessage `json:"messages"`
	MaxTokens         int                      `json:"max_tokens,omitempty"`
	Temperature       float32                  `json:"temperature,omitempty"`
	TopP              float32                  `json:"top_p,omitempty"`
	Stream            bool                     `json:"stream,omitempty"`
	Stop              []string                 `json:"stop,omitempty"`
	FrequencyPenalty  float32                  `json:"frequency_penalty,omitempty"`
	LogitBias         map[string]int           `json:"logit_bias,omitempty"`
	LogProbs          bool                     `json:"logprobs,omitempty"`
	TopLogProbs       int                      `json:"top_logprobs,omitempty"`
	User              string                   `json:"user,omitempty"`
	FunctionCall      interface{}              `json:"function_call,omitempty"`
	Tools             []*Tool                  `json:"tools,omitempty"`
	ToolChoice        interface{}              `json:"tool_choice,omitempty"`
	StreamOptions     *StreamOptions           `json:"stream_options,omitempty"`
	PresencePenalty   float32                  `json:"presence_penalty,omitempty"`
	RepetitionPenalty float32                  `json:"repetition_penalty,omitempty"`
	N                 int                      `json:"n,omitempty"`
	ResponseFormat    *ResponseFormat          `json:"response_format,omitempty"`
	Metadata          map[string]interface{}   `json:"metadata,omitempty"`
}

type BotChatCompletionResponse added in v1.0.146

type BotChatCompletionResponse struct {
	ChatCompletionResponse
	Metadata   map[string]interface{}    `json:"metadata,omitempty"`
	BotUsage   *BotUsage                 `json:"bot_usage,omitempty"`
	References []*BotChatResultReference `json:"references,omitempty"`
}

type BotChatCompletionStreamResponse added in v1.0.146

type BotChatCompletionStreamResponse struct {
	ChatCompletionStreamResponse
	Metadata   map[string]interface{}    `json:"metadata,omitempty"`
	BotUsage   *BotUsage                 `json:"bot_usage,omitempty"`
	References []*BotChatResultReference `json:"references,omitempty"`
}

type BotChatResultReference added in v1.0.146

type BotChatResultReference struct {
	Url                string                 `json:"url,omitempty"`
	LogoUrl            string                 `json:"logo_url,omitempty"`
	MobileUrl          string                 `json:"mobile_url,omitempty"`
	SiteName           string                 `json:"site_name,omitempty"`
	Title              string                 `json:"title,omitempty"`
	CoverImage         *BotCoverImage         `json:"cover_image,omitempty"`
	Summary            string                 `json:"summary,omitempty"`
	PublishTime        string                 `json:"publish_time,omitempty"`
	CollectionName     string                 `json:"collection_name,omitempty"`
	Project            string                 `json:"project,omitempty"`
	DocId              string                 `json:"doc_id,omitempty"`
	DocName            string                 `json:"doc_name,omitempty"`
	DocType            string                 `json:"doc_type,omitempty"`
	DocTitle           string                 `json:"doc_title,omitempty"`
	ChunkId            string                 `json:"chunk_id,omitempty"`
	ChunkTitle         string                 `json:"chunk_title,omitempty"`
	PageNums           string                 `json:"page_nums,omitempty"`
	OriginTextTokenLen int                    `json:"origin_text_token_len,omitempty"`
	FileName           string                 `json:"file_name,omitempty"`
	Extra              map[string]interface{} `json:"extra,omitempty"`
}

type BotCoverImage added in v1.0.146

type BotCoverImage struct {
	Url    string `json:"url,omitempty"`
	Width  int    `json:"width,omitempty"`
	Height int    `json:"height,omitempty"`
}

type BotModelUsage added in v1.0.146

type BotModelUsage struct {
	Usage
	Name string `json:"name"`
}

type BotUsage added in v1.0.146

type BotUsage struct {
	ModelUsage  []*BotModelUsage  `json:"model_usage,omitempty"`
	ActionUsage []*BotActionUsage `json:"action_usage,omitempty"`
}

type ChatCompletionChoice

type ChatCompletionChoice struct {
	Index   int                   `json:"index"`
	Message ChatCompletionMessage `json:"message"`
	// FinishReason
	// stop: API returned complete message,
	// or a message terminated by one of the stop sequences provided via the stop parameter
	// length: Incomplete model output due to max_tokens parameter or token limit
	// function_call: The model decided to call a function
	// content_filter: Omitted content due to a flag from our content filters
	// null: API response still in progress or incomplete
	FinishReason FinishReason `json:"finish_reason"`
	LogProbs     *LogProbs    `json:"logprobs,omitempty"`
}

type ChatCompletionMessage

type ChatCompletionMessage struct {
	Role         string                        `json:"role"`
	Content      *ChatCompletionMessageContent `json:"content"`
	FunctionCall *FunctionCall                 `json:"function_call,omitempty"`
	ToolCalls    []*ToolCall                   `json:"tool_calls,omitempty"`
	ToolCallID   string                        `json:"tool_call_id,omitempty"`
}

type ChatCompletionMessageContent

type ChatCompletionMessageContent struct {
	StringValue *string
	ListValue   []*ChatCompletionMessageContentPart
}

func (ChatCompletionMessageContent) MarshalJSON

func (j ChatCompletionMessageContent) MarshalJSON() ([]byte, error)

MarshalJSON implements json.Marshaler.

func (*ChatCompletionMessageContent) UnmarshalJSON

func (j *ChatCompletionMessageContent) UnmarshalJSON(b []byte) error

type ChatCompletionMessageContentPart

type ChatCompletionMessageContentPart struct {
	Type     ChatCompletionMessageContentPartType `json:"type,omitempty"`
	Text     string                               `json:"text,omitempty"`
	ImageURL *ChatMessageImageURL                 `json:"image_url,omitempty"`
}

type ChatCompletionMessageContentPartType

type ChatCompletionMessageContentPartType string
const (
	ChatCompletionMessageContentPartTypeText     ChatCompletionMessageContentPartType = "text"
	ChatCompletionMessageContentPartTypeImageURL ChatCompletionMessageContentPartType = "image_url"
)

type ChatCompletionRequest

type ChatCompletionRequest struct {
	Model             string                   `json:"model"`
	Messages          []*ChatCompletionMessage `json:"messages"`
	MaxTokens         int                      `json:"max_tokens,omitempty"`
	Temperature       float32                  `json:"temperature,omitempty"`
	TopP              float32                  `json:"top_p,omitempty"`
	Stream            bool                     `json:"stream,omitempty"`
	Stop              []string                 `json:"stop,omitempty"`
	FrequencyPenalty  float32                  `json:"frequency_penalty,omitempty"`
	LogitBias         map[string]int           `json:"logit_bias,omitempty"`
	LogProbs          bool                     `json:"logprobs,omitempty"`
	TopLogProbs       int                      `json:"top_logprobs,omitempty"`
	User              string                   `json:"user,omitempty"`
	FunctionCall      interface{}              `json:"function_call,omitempty"`
	Tools             []*Tool                  `json:"tools,omitempty"`
	ToolChoice        interface{}              `json:"tool_choice,omitempty"`
	StreamOptions     *StreamOptions           `json:"stream_options,omitempty"`
	PresencePenalty   float32                  `json:"presence_penalty,omitempty"`
	RepetitionPenalty float32                  `json:"repetition_penalty,omitempty"`
	N                 int                      `json:"n,omitempty"`
	ResponseFormat    *ResponseFormat          `json:"response_format,omitempty"`
}

ChatCompletionRequest[Deprecated] - When making a request using this struct, only non-zero fields take effect. This means that if your field value is 0, an empty string (""), false, or other zero values, it will not be sent to the server. The server will handle these fields according to their default values. If you need to specify a zero value, please use CreateChatCompletionRequest.

func (ChatCompletionRequest) GetModel added in v1.0.159

func (r ChatCompletionRequest) GetModel() string

func (ChatCompletionRequest) IsStream added in v1.0.159

func (r ChatCompletionRequest) IsStream() bool

func (ChatCompletionRequest) MarshalJSON added in v1.0.159

func (r ChatCompletionRequest) MarshalJSON() ([]byte, error)

func (ChatCompletionRequest) WithStream added in v1.0.159

func (r ChatCompletionRequest) WithStream(stream bool) ChatRequest

type ChatCompletionResponse

type ChatCompletionResponse struct {
	ID      string                  `json:"id"`
	Object  string                  `json:"object"`
	Created int64                   `json:"created"`
	Model   string                  `json:"model"`
	Choices []*ChatCompletionChoice `json:"choices"`
	Usage   Usage                   `json:"usage"`

	HttpHeader
}

ChatCompletionResponse represents a response structure for chat completion API.

type ChatCompletionStreamChoice

type ChatCompletionStreamChoice struct {
	Index        int                             `json:"index"`
	Delta        ChatCompletionStreamChoiceDelta `json:"delta"`
	LogProbs     *LogProbs                       `json:"logprobs,omitempty"`
	FinishReason FinishReason                    `json:"finish_reason"`
}

type ChatCompletionStreamChoiceDelta

type ChatCompletionStreamChoiceDelta struct {
	Content      string        `json:"content,omitempty"`
	Role         string        `json:"role,omitempty"`
	FunctionCall *FunctionCall `json:"function_call,omitempty"`
	ToolCalls    []*ToolCall   `json:"tool_calls,omitempty"`
}

type ChatCompletionStreamResponse

type ChatCompletionStreamResponse struct {
	ID      string                        `json:"id"`
	Object  string                        `json:"object"`
	Created int64                         `json:"created"`
	Model   string                        `json:"model"`
	Choices []*ChatCompletionStreamChoice `json:"choices"`
	// An optional field that will only be present when you set stream_options: {"include_usage": true} in your request.
	// When present, it contains a null value except for the last chunk which contains the token usage statistics
	// for the entire request.
	Usage *Usage `json:"usage,omitempty"`
}

type ChatMessageImageURL

type ChatMessageImageURL struct {
	URL    string         `json:"url,omitempty"`
	Detail ImageURLDetail `json:"detail,omitempty"`
}

type ChatRequest added in v1.0.159

type ChatRequest interface {
	json.Marshaler
	WithStream(stream bool) ChatRequest
	IsStream() bool
	GetModel() string
}

type ContextChatCompletionRequest added in v1.0.173

type ContextChatCompletionRequest struct {
	ContextID        string                   `json:"context_id"`
	Mode             ContextMode              `json:"mode"`
	Model            string                   `json:"model"`
	Messages         []*ChatCompletionMessage `json:"messages"`
	MaxTokens        int                      `json:"max_tokens,omitempty"`
	Temperature      float32                  `json:"temperature,omitempty"`
	TopP             float32                  `json:"top_p,omitempty"`
	Stream           bool                     `json:"stream,omitempty"`
	Stop             []string                 `json:"stop,omitempty"`
	FrequencyPenalty float32                  `json:"frequency_penalty,omitempty"`
	LogitBias        map[string]int           `json:"logit_bias,omitempty"`
	LogProbs         bool                     `json:"logprobs,omitempty"`
	TopLogProbs      int                      `json:"top_logprobs,omitempty"`
	User             string                   `json:"user,omitempty"`
	FunctionCall     interface{}              `json:"function_call,omitempty"`
	Tools            []*Tool                  `json:"tools,omitempty"`
	ToolChoice       interface{}              `json:"tool_choice,omitempty"`
	StreamOptions    *StreamOptions           `json:"stream_options,omitempty"`
	Metadata         map[string]interface{}   `json:"metadata,omitempty"`
}

type ContextMode added in v1.0.173

type ContextMode string
const (
	ContextModeSession      ContextMode = "session"
	ContextModeCommonPrefix ContextMode = "common_prefix"
)

type CreateChatCompletionRequest added in v1.0.159

type CreateChatCompletionRequest struct {
	Model             string                   `json:"model"`
	Messages          []*ChatCompletionMessage `json:"messages"`
	MaxTokens         *int                     `json:"max_tokens,omitempty"`
	Temperature       *float32                 `json:"temperature,omitempty"`
	TopP              *float32                 `json:"top_p,omitempty"`
	Stream            *bool                    `json:"stream,omitempty"`
	Stop              []string                 `json:"stop,omitempty"`
	FrequencyPenalty  *float32                 `json:"frequency_penalty,omitempty"`
	LogitBias         map[string]int           `json:"logit_bias,omitempty"`
	LogProbs          *bool                    `json:"logprobs,omitempty"`
	TopLogProbs       *int                     `json:"top_logprobs,omitempty"`
	User              *string                  `json:"user,omitempty"`
	FunctionCall      interface{}              `json:"function_call,omitempty"`
	Tools             []*Tool                  `json:"tools,omitempty"`
	ToolChoice        interface{}              `json:"tool_choice,omitempty"`
	StreamOptions     *StreamOptions           `json:"stream_options,omitempty"`
	PresencePenalty   *float32                 `json:"presence_penalty,omitempty"`
	RepetitionPenalty *float32                 `json:"repetition_penalty,omitempty"`
	N                 *int                     `json:"n,omitempty"`
	ResponseFormat    *ResponseFormat          `json:"response_format,omitempty"`
}

CreateChatCompletionRequest - When making a request using this struct, if your field value is 0, an empty string (""), false, or other zero values, it will be sent to the server. The server will handle these fields according to the specified values.

func (CreateChatCompletionRequest) GetModel added in v1.0.159

func (r CreateChatCompletionRequest) GetModel() string

func (CreateChatCompletionRequest) IsStream added in v1.0.159

func (r CreateChatCompletionRequest) IsStream() bool

func (CreateChatCompletionRequest) MarshalJSON added in v1.0.159

func (r CreateChatCompletionRequest) MarshalJSON() ([]byte, error)

func (CreateChatCompletionRequest) WithStream added in v1.0.159

func (r CreateChatCompletionRequest) WithStream(stream bool) ChatRequest

type CreateContextRequest added in v1.0.173

type CreateContextRequest struct {
	Model              string                   `json:"model"`
	Mode               ContextMode              `json:"mode"`
	Messages           []*ChatCompletionMessage `json:"messages"`
	TTL                *int                     `json:"ttl,omitempty"`
	TruncationStrategy *TruncationStrategy      `json:"truncation_strategy,omitempty"`
}

type CreateContextResponse added in v1.0.173

type CreateContextResponse struct {
	ID                 string              `json:"id"`
	Mode               ContextMode         `json:"mode"`
	Model              string              `json:"model"`
	TTL                *int                `json:"ttl,omitempty"`
	TruncationStrategy *TruncationStrategy `json:"truncation_strategy,omitempty"`

	Usage Usage `json:"usage"`

	HttpHeader
}

type Embedding

type Embedding struct {
	Object    string    `json:"object"`
	Embedding []float32 `json:"embedding"`
	Index     int       `json:"index"`
}

Embedding is a special format of data representation that can be easily utilized by machine learning models and algorithms. The embedding is an information dense representation of the semantic meaning of a piece of text. Each embedding is a vector of floating point numbers, such that the distance between two embeddings in the vector space is correlated with semantic similarity between two inputs in the original format. For example, if two texts are similar, then their vector representations should also be similar.

type EmbeddingEncodingFormat

type EmbeddingEncodingFormat string

EmbeddingEncodingFormat is the format of the embeddings data. Currently, only "float" and "base64" are supported, however, "base64" is not officially documented. If not specified will use "float".

const (
	EmbeddingEncodingFormatFloat  EmbeddingEncodingFormat = "float"
	EmbeddingEncodingFormatBase64 EmbeddingEncodingFormat = "base64"
)

type EmbeddingRequest

type EmbeddingRequest struct {
	Input          interface{}             `json:"input"`
	Model          string                  `json:"model"`
	User           string                  `json:"user"`
	EncodingFormat EmbeddingEncodingFormat `json:"encoding_format,omitempty"`
	// Dimensions The number of dimensions the resulting output embeddings should have.
	// Only supported in text-embedding-3 and later models.
	Dimensions int `json:"dimensions,omitempty"`
}

func (EmbeddingRequest) Convert

func (r EmbeddingRequest) Convert() EmbeddingRequest

type EmbeddingRequestConverter

type EmbeddingRequestConverter interface {
	// Needs to be of type EmbeddingRequestStrings or EmbeddingRequestTokens
	Convert() EmbeddingRequest
}

type EmbeddingRequestStrings

type EmbeddingRequestStrings struct {
	// Input is a slice of strings for which you want to generate an Embedding vector.
	// Each input must not exceed 8192 tokens in length.
	// OpenAPI suggests replacing newlines (\n) in your input with a single space, as they
	// have observed inferior results when newlines are present.
	// E.g.
	//	"The food was delicious and the waiter..."
	Input []string `json:"input"`
	// ID of the model to use. You can use the List models API to see all of your available models,
	// or see our Model overview for descriptions of them.
	Model string `json:"model"`
	// A unique identifier representing your end-user, which will help to monitor and detect abuse.
	User string `json:"user"`
	// EmbeddingEncodingFormat is the format of the embeddings data.
	// Currently, only "float" and "base64" are supported, however, "base64" is not officially documented.
	// If not specified will use "float".
	EncodingFormat EmbeddingEncodingFormat `json:"encoding_format,omitempty"`
	// Dimensions The number of dimensions the resulting output embeddings should have.
	// Only supported in text-embedding-3 and later models.
	Dimensions int `json:"dimensions,omitempty"`
}

EmbeddingRequestStrings is the input to a create embeddings request with a slice of strings.

func (EmbeddingRequestStrings) Convert

type EmbeddingRequestTokens

type EmbeddingRequestTokens struct {
	// Input is a slice of slices of ints ([][]int) for which you want to generate an Embedding vector.
	// Each input must not exceed 8192 tokens in length.
	// OpenAPI suggests replacing newlines (\n) in your input with a single space, as they
	// have observed inferior results when newlines are present.
	// E.g.
	//	"The food was delicious and the waiter..."
	Input [][]int `json:"input"`
	// ID of the model to use. You can use the List models API to see all of your available models,
	// or see our Model overview for descriptions of them.
	Model string `json:"model"`
	// A unique identifier representing your end-user, which will help to monitor and detect abuse.
	User string `json:"user"`
	// EmbeddingEncodingFormat is the format of the embeddings data.
	// Currently, only "float" and "base64" are supported, however, "base64" is not officially documented.
	// If not specified will use "float".
	EncodingFormat EmbeddingEncodingFormat `json:"encoding_format,omitempty"`
	// Dimensions The number of dimensions the resulting output embeddings should have.
	// Only supported in text-embedding-3 and later models.
	Dimensions int `json:"dimensions,omitempty"`
}

func (EmbeddingRequestTokens) Convert

type EmbeddingResponse

type EmbeddingResponse struct {
	ID      string      `json:"id"`
	Created int         `json:"created"`
	Object  string      `json:"object"`
	Data    []Embedding `json:"data"`
	Model   string      `json:"model"`
	Usage   Usage       `json:"usage"`

	HttpHeader
}

EmbeddingResponse is the response from a Create embeddings request.

type EmbeddingResponseBase64

type EmbeddingResponseBase64 struct {
	Object string            `json:"object"`
	Data   []Base64Embedding `json:"data"`
	Model  string            `json:"model"`
	Usage  Usage             `json:"usage"`

	HttpHeader
}

EmbeddingResponseBase64 is the response from a Create embeddings request with base64 encoding format.

func (*EmbeddingResponseBase64) ToEmbeddingResponse

func (r *EmbeddingResponseBase64) ToEmbeddingResponse() (EmbeddingResponse, error)

ToEmbeddingResponse converts an embeddingResponseBase64 to an EmbeddingResponse.

type ErrorResponse

type ErrorResponse struct {
	Error *APIError `json:"error,omitempty"`
}

type FinishReason

type FinishReason string
const (
	FinishReasonStop          FinishReason = "stop"
	FinishReasonLength        FinishReason = "length"
	FinishReasonFunctionCall  FinishReason = "function_call"
	FinishReasonToolCalls     FinishReason = "tool_calls"
	FinishReasonContentFilter FinishReason = "content_filter"
	FinishReasonNull          FinishReason = "null"
)

func (FinishReason) MarshalJSON

func (r FinishReason) MarshalJSON() ([]byte, error)

type FunctionCall

type FunctionCall struct {
	Name      string `json:"name,omitempty"`
	Arguments string `json:"arguments,omitempty"`
}

type FunctionDefine deprecated

type FunctionDefine = FunctionDefinition

Deprecated: use FunctionDefinition instead.

type FunctionDefinition

type FunctionDefinition struct {
	Name        string `json:"name"`
	Description string `json:"description,omitempty"`
	// Parameters is an object describing the function.
	// You can pass json.RawMessage to describe the schema,
	// or you can pass in a struct which serializes to the proper JSON schema.
	// The jsonschema package is provided for convenience, but you should
	// consider another specialized library if you require more complex schemas.
	Parameters interface{} `json:"parameters"`
}

type HttpHeader

type HttpHeader http.Header

func (*HttpHeader) GetHeader added in v1.0.172

func (h *HttpHeader) GetHeader() http.Header

func (*HttpHeader) Header

func (h *HttpHeader) Header() http.Header

func (*HttpHeader) SetHeader

func (h *HttpHeader) SetHeader(header http.Header)

type ImageURLDetail

type ImageURLDetail string
const (
	ImageURLDetailHigh ImageURLDetail = "high"
	ImageURLDetailLow  ImageURLDetail = "low"
	ImageURLDetailAuto ImageURLDetail = "auto"
)

type LogProb

type LogProb struct {
	Token   string  `json:"token"`
	LogProb float64 `json:"logprob"`
	Bytes   []rune  `json:"bytes,omitempty"` // Omitting the field if it is null
	// TopLogProbs is a list of the most likely tokens and their log probability, at this token position.
	// In rare cases, there may be fewer than the number of requested top_logprobs returned.
	TopLogProbs []*TopLogProbs `json:"top_logprobs"`
}

LogProb represents the probability information for a token.

type LogProbs

type LogProbs struct {
	// Content is a list of message content tokens with log probability information.
	Content []*LogProb `json:"content"`
}

LogProbs is the top-level structure containing the log probability information.

type PromptTokensDetail added in v1.0.173

type PromptTokensDetail struct {
	CachedTokens int `json:"cached_tokens"`
}

type RawResponse

type RawResponse struct {
	io.ReadCloser

	HttpHeader
}

type RequestError

type RequestError struct {
	HTTPStatusCode int
	Err            error
	RequestId      string `json:"request_id"`
}

RequestError provides information about generic request errors.

func NewRequestError added in v1.0.160

func NewRequestError(httpStatusCode int, rawErr error, requestID string) *RequestError

func (*RequestError) Error

func (e *RequestError) Error() string

func (*RequestError) Unwrap

func (e *RequestError) Unwrap() error

type Response

type Response interface {
	SetHeader(http.Header)
	GetHeader() http.Header
}

type ResponseFormat added in v1.0.151

type ResponseFormat struct {
	Type   ResponseFormatType `json:"type"`
	Schema interface{}        `json:"schema,omitempty"`
}

type ResponseFormatType added in v1.0.151

type ResponseFormatType string
const (
	ResponseFormatJsonObject ResponseFormatType = "json_object"
	ResponseFormatText       ResponseFormatType = "text"
)

type StreamOptions

type StreamOptions struct {
	// If set, an additional chunk will be streamed before the data: [DONE] message.
	// The usage field on this chunk shows the token usage statistics for the entire request,
	// and the choices field will always be an empty array.
	// All other chunks will also include a usage field, but with a null value.
	IncludeUsage bool `json:"include_usage,omitempty"`
}

type Tokenization added in v1.0.151

type Tokenization struct {
	Index         int     `json:"index"`
	Object        string  `json:"object"`
	TotalTokens   int     `json:"total_tokens"`
	TokenIDs      []int   `json:"token_ids"`
	OffsetMapping [][]int `json:"offset_mapping"`
}

type TokenizationRequest added in v1.0.151

type TokenizationRequest struct {
	Text  interface{} `json:"text"`
	Model string      `json:"model"`
	User  string      `json:"user"`
}

func (TokenizationRequest) Convert added in v1.0.151

type TokenizationRequestConverter added in v1.0.151

type TokenizationRequestConverter interface {
	Convert() TokenizationRequest
}

type TokenizationRequestString added in v1.0.151

type TokenizationRequestString struct {
	Text  string `json:"text"`
	Model string `json:"model"`
	User  string `json:"user"`
}

TokenizationRequestString is the input to a create tokenization request with a slice of strings.

func (TokenizationRequestString) Convert added in v1.0.151

type TokenizationRequestStrings added in v1.0.151

type TokenizationRequestStrings struct {
	Text  []string `json:"text"`
	Model string   `json:"model"`
	User  string   `json:"user"`
}

TokenizationRequestStrings is the input to a create tokenization request with a slice of strings.

func (TokenizationRequestStrings) Convert added in v1.0.151

type TokenizationResponse added in v1.0.151

type TokenizationResponse struct {
	ID      string          `json:"id"`
	Created int             `json:"created"`
	Model   string          `json:"model"`
	Object  string          `json:"object"`
	Data    []*Tokenization `json:"data"`

	HttpHeader
}

TokenizationResponse is the response from a Create tokenization request.

type Tool

type Tool struct {
	Type     ToolType            `json:"type"`
	Function *FunctionDefinition `json:"function,omitempty"`
}

type ToolCall

type ToolCall struct {
	ID       string       `json:"id"`
	Type     ToolType     `json:"type"`
	Function FunctionCall `json:"function"`
}

type ToolChoice

type ToolChoice struct {
	Type     ToolType           `json:"type"`
	Function ToolChoiceFunction `json:"function,omitempty"`
}

type ToolChoiceFunction added in v1.0.151

type ToolChoiceFunction struct {
	Name string `json:"name"`
}

type ToolType

type ToolType string
const (
	ToolTypeFunction ToolType = "function"
)

type TopLogProbs

type TopLogProbs struct {
	Token   string  `json:"token"`
	LogProb float64 `json:"logprob"`
	Bytes   []rune  `json:"bytes,omitempty"`
}

type TruncationStrategy added in v1.0.173

type TruncationStrategy struct {
	Type              TruncationStrategyType `json:"type"`
	LastHistoryTokens *int                   `json:"last_history_tokens,omitempty"`
	RollingTokens     *bool                  `json:"rolling_tokens,omitempty"`
}

type TruncationStrategyType added in v1.0.173

type TruncationStrategyType string
const (
	TruncationStrategyTypeLastHistoryTokens TruncationStrategyType = "last_history_tokens"
	TruncationStrategyTypeRollingTokens     TruncationStrategyType = "rolling_tokens"
)

type Usage

type Usage struct {
	PromptTokens        int                `json:"prompt_tokens"`
	CompletionTokens    int                `json:"completion_tokens"`
	TotalTokens         int                `json:"total_tokens"`
	PromptTokensDetails PromptTokensDetail `json:"prompt_tokens_details"`
}

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL