openai

package module
v0.0.0-...-57857b7 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Jan 28, 2025 License: Apache-2.0 Imports: 16 Imported by: 7

README

Chat completion client

This is a hard fork of github.com/sashabaranov/go-openai that just includes the APIs need for GPTScript. Eventually this will be rewritten

License

This project is licensed under the Apache License - see the LICENSE file for details.

Documentation

Index

Constants

View Source
const (
	ChatMessageRoleSystem    = "system"
	ChatMessageRoleUser      = "user"
	ChatMessageRoleAssistant = "assistant"
	ChatMessageRoleFunction  = "function"
	ChatMessageRoleTool      = "tool"
)

Chat message role defined by the OpenAI API.

View Source
const (
	GPT432K0613           = "gpt-4-32k-0613"
	GPT432K0314           = "gpt-4-32k-0314"
	GPT432K               = "gpt-4-32k"
	GPT40613              = "gpt-4-0613"
	GPT40314              = "gpt-4-0314"
	GPT4Turbo             = "gpt-4-turbo"
	GPT4Turbo20240409     = "gpt-4-turbo-2024-04-09"
	GPT4o                 = "gpt-4o"
	GPT4o20240513         = "gpt-4o-2024-05-13"
	GPT4Turbo0125         = "gpt-4-0125-preview"
	GPT4Turbo1106         = "gpt-4-1106-preview"
	GPT4TurboPreview      = "gpt-4-turbo-preview"
	GPT4VisionPreview     = "gpt-4-vision-preview"
	GPT4                  = "gpt-4"
	GPT3Dot5Turbo0125     = "gpt-3.5-turbo-0125"
	GPT3Dot5Turbo1106     = "gpt-3.5-turbo-1106"
	GPT3Dot5Turbo0613     = "gpt-3.5-turbo-0613"
	GPT3Dot5Turbo0301     = "gpt-3.5-turbo-0301"
	GPT3Dot5Turbo16K      = "gpt-3.5-turbo-16k"
	GPT3Dot5Turbo16K0613  = "gpt-3.5-turbo-16k-0613"
	GPT3Dot5Turbo         = "gpt-3.5-turbo"
	GPT3Dot5TurboInstruct = "gpt-3.5-turbo-instruct"
	// Deprecated: Will be shut down on January 04, 2024. Use gpt-3.5-turbo-instruct instead.
	GPT3TextDavinci003 = "text-davinci-003"
	// Deprecated: Will be shut down on January 04, 2024. Use gpt-3.5-turbo-instruct instead.
	GPT3TextDavinci002 = "text-davinci-002"
	// Deprecated: Will be shut down on January 04, 2024. Use gpt-3.5-turbo-instruct instead.
	GPT3TextCurie001 = "text-curie-001"
	// Deprecated: Will be shut down on January 04, 2024. Use gpt-3.5-turbo-instruct instead.
	GPT3TextBabbage001 = "text-babbage-001"
	// Deprecated: Will be shut down on January 04, 2024. Use gpt-3.5-turbo-instruct instead.
	GPT3TextAda001 = "text-ada-001"
	// Deprecated: Will be shut down on January 04, 2024. Use gpt-3.5-turbo-instruct instead.
	GPT3TextDavinci001 = "text-davinci-001"
	// Deprecated: Will be shut down on January 04, 2024. Use gpt-3.5-turbo-instruct instead.
	GPT3DavinciInstructBeta = "davinci-instruct-beta"
	GPT3Davinci             = "davinci"
	GPT3Davinci002          = "davinci-002"
	// Deprecated: Will be shut down on January 04, 2024. Use gpt-3.5-turbo-instruct instead.
	GPT3CurieInstructBeta = "curie-instruct-beta"
	GPT3Curie             = "curie"
	GPT3Curie002          = "curie-002"
	GPT3Ada               = "ada"
	GPT3Ada002            = "ada-002"
	GPT3Babbage           = "babbage"
	GPT3Babbage002        = "babbage-002"
)

GPT3 Defines the models provided by OpenAI to use when generating completions from OpenAI. GPT3 Models are designed for text-based tasks. For code-specific tasks, please refer to the Codex series of models.

View Source
const AzureAPIKeyHeader = "api-key"

Variables

View Source
var (
	ErrChatCompletionInvalidModel       = errors.New("this model is not supported with this method, please use CreateCompletion client method instead") //nolint:lll
	ErrChatCompletionStreamNotSupported = errors.New("streaming is not supported with this method, please use CreateChatCompletionStream")              //nolint:lll
	ErrContentFieldsMisused             = errors.New("can't use both Content and MultiContent properties simultaneously")
)
View Source
var (
	ErrTooManyEmptyStreamMessages = errors.New("stream has sent too many empty messages")
)

Functions

This section is empty.

Types

type APIError

type APIError struct {
	Code           any         `json:"code,omitempty"`
	Message        string      `json:"message"`
	Param          *string     `json:"param,omitempty"`
	Type           string      `json:"type"`
	HTTPStatusCode int         `json:"-"`
	HTTPStatus     string      `json:"-"`
	InnerError     *InnerError `json:"innererror,omitempty"`
}

APIError provides error information returned by the OpenAI API. InnerError struct is only valid for Azure OpenAI Service.

func (*APIError) Error

func (e *APIError) Error() string

func (*APIError) UnmarshalJSON

func (e *APIError) UnmarshalJSON(data []byte) (err error)

type APIType

type APIType string
const (
	APITypeOpenAI  APIType = "OPEN_AI"
	APITypeAzure   APIType = "AZURE"
	APITypeAzureAD APIType = "AZURE_AD"
)

type ChatCompletionChoice

type ChatCompletionChoice struct {
	Index   int                   `json:"index"`
	Message ChatCompletionMessage `json:"message"`
	// FinishReason
	// stop: API returned complete message,
	// or a message terminated by one of the stop sequences provided via the stop parameter
	// length: Incomplete model output due to max_tokens parameter or token limit
	// function_call: The model decided to call a function
	// content_filter: Omitted content due to a flag from our content filters
	// null: API response still in progress or incomplete
	FinishReason FinishReason `json:"finish_reason"`
	LogProbs     *LogProbs    `json:"logprobs,omitempty"`
}

type ChatCompletionMessage

type ChatCompletionMessage struct {
	Role         string `json:"role"`
	Content      string `json:"content"`
	MultiContent []ChatMessagePart

	// This property isn't in the official documentation, but it's in
	// the documentation for the official library for python:
	// - https://github.com/openai/openai-python/blob/main/chatml.md
	// - https://github.com/openai/openai-cookbook/blob/main/examples/How_to_count_tokens_with_tiktoken.ipynb
	Name string `json:"name,omitempty"`

	FunctionCall *FunctionCall `json:"function_call,omitempty"`

	// For Role=assistant prompts this may be set to the tool calls generated by the model, such as function calls.
	ToolCalls []ToolCall `json:"tool_calls,omitempty"`

	// For Role=tool prompts this should be set to the ID given in the assistant's prior request to call a tool.
	ToolCallID string `json:"tool_call_id,omitempty"`
}

func (ChatCompletionMessage) MarshalJSON

func (m ChatCompletionMessage) MarshalJSON() ([]byte, error)

func (*ChatCompletionMessage) UnmarshalJSON

func (m *ChatCompletionMessage) UnmarshalJSON(bs []byte) error

type ChatCompletionRequest

type ChatCompletionRequest struct {
	Model            string                        `json:"model"`
	Messages         []ChatCompletionMessage       `json:"messages"`
	MaxTokens        int                           `json:"max_tokens,omitempty"`
	Temperature      *float32                      `json:"temperature,omitempty"`
	TopP             float32                       `json:"top_p,omitempty"`
	N                int                           `json:"n,omitempty"`
	Stream           bool                          `json:"stream,omitempty"`
	StreamOptions    *StreamOptions                `json:"stream_options,omitempty"`
	Stop             []string                      `json:"stop,omitempty"`
	PresencePenalty  float32                       `json:"presence_penalty,omitempty"`
	ResponseFormat   *ChatCompletionResponseFormat `json:"response_format,omitempty"`
	Seed             *int                          `json:"seed,omitempty"`
	FrequencyPenalty float32                       `json:"frequency_penalty,omitempty"`
	// LogitBias is must be a token id string (specified by their token ID in the tokenizer), not a word string.
	// incorrect: `"logit_bias":{"You": 6}`, correct: `"logit_bias":{"1639": 6}`
	// refs: https://platform.openai.com/docs/api-reference/chat/create#chat/create-logit_bias
	LogitBias map[string]int `json:"logit_bias,omitempty"`
	// LogProbs indicates whether to return log probabilities of the output tokens or not.
	// If true, returns the log probabilities of each output token returned in the content of message.
	// This option is currently not available on the gpt-4-vision-preview model.
	LogProbs bool `json:"logprobs,omitempty"`
	// TopLogProbs is an integer between 0 and 5 specifying the number of most likely tokens to return at each
	// token position, each with an associated log probability.
	// logprobs must be set to true if this parameter is used.
	TopLogProbs int    `json:"top_logprobs,omitempty"`
	User        string `json:"user,omitempty"`
	// Deprecated: use Tools instead.
	Functions []FunctionDefinition `json:"functions,omitempty"`
	// Deprecated: use ToolChoice instead.
	FunctionCall any    `json:"function_call,omitempty"`
	Tools        []Tool `json:"tools,omitempty"`
	// This can be either a string or an ToolChoice object.
	ToolChoice any `json:"tool_choice,omitempty"`
}

ChatCompletionRequest represents a request structure for chat completion API.

type ChatCompletionResponse

type ChatCompletionResponse struct {
	ID                string                 `json:"id"`
	Object            string                 `json:"object"`
	Created           int64                  `json:"created"`
	Model             string                 `json:"model"`
	Choices           []ChatCompletionChoice `json:"choices"`
	Usage             Usage                  `json:"usage"`
	SystemFingerprint string                 `json:"system_fingerprint"`
	// contains filtered or unexported fields
}

ChatCompletionResponse represents a response structure for chat completion API.

func (*ChatCompletionResponse) GetRateLimitHeaders

func (h *ChatCompletionResponse) GetRateLimitHeaders() RateLimitHeaders

func (*ChatCompletionResponse) Header

func (h *ChatCompletionResponse) Header() http.Header

func (*ChatCompletionResponse) SetHeader

func (h *ChatCompletionResponse) SetHeader(header http.Header)

type ChatCompletionResponseFormat

type ChatCompletionResponseFormat struct {
	Type ChatCompletionResponseFormatType `json:"type,omitempty"`
}

type ChatCompletionResponseFormatType

type ChatCompletionResponseFormatType string
const (
	ChatCompletionResponseFormatTypeJSONObject ChatCompletionResponseFormatType = "json_object"
	ChatCompletionResponseFormatTypeText       ChatCompletionResponseFormatType = "text"
)

type ChatCompletionStream

type ChatCompletionStream struct {
	// contains filtered or unexported fields
}

ChatCompletionStream Note: Perhaps it is more elegant to abstract Stream using generics.

func (ChatCompletionStream) Close

func (stream ChatCompletionStream) Close() error

func (ChatCompletionStream) Recv

func (stream ChatCompletionStream) Recv() (response T, err error)

type ChatCompletionStreamChoice

type ChatCompletionStreamChoice struct {
	Index                int                             `json:"index"`
	Delta                ChatCompletionStreamChoiceDelta `json:"delta"`
	FinishReason         FinishReason                    `json:"finish_reason"`
	ContentFilterResults ContentFilterResults            `json:"content_filter_results,omitempty"`
}

type ChatCompletionStreamChoiceDelta

type ChatCompletionStreamChoiceDelta struct {
	Content      string        `json:"content,omitempty"`
	Role         string        `json:"role,omitempty"`
	FunctionCall *FunctionCall `json:"function_call,omitempty"`
	ToolCalls    []ToolCall    `json:"tool_calls,omitempty"`
}

type ChatCompletionStreamResponse

type ChatCompletionStreamResponse struct {
	ID                string                       `json:"id"`
	Object            string                       `json:"object"`
	Created           int64                        `json:"created"`
	Model             string                       `json:"model"`
	Choices           []ChatCompletionStreamChoice `json:"choices"`
	PromptAnnotations []PromptAnnotation           `json:"prompt_annotations,omitempty"`
	Usage             Usage                        `json:"usage,omitempty"`
}

type ChatMessageImageURL

type ChatMessageImageURL struct {
	URL    string         `json:"url,omitempty"`
	Detail ImageURLDetail `json:"detail,omitempty"`
}

type ChatMessagePart

type ChatMessagePart struct {
	Type     ChatMessagePartType  `json:"type,omitempty"`
	Text     string               `json:"text,omitempty"`
	ImageURL *ChatMessageImageURL `json:"image_url,omitempty"`
}

type ChatMessagePartType

type ChatMessagePartType string
const (
	ChatMessagePartTypeText     ChatMessagePartType = "text"
	ChatMessagePartTypeImageURL ChatMessagePartType = "image_url"
)

type Client

type Client struct {
	// contains filtered or unexported fields
}

Client is OpenAI GPT-3 API client.

func NewClient

func NewClient(authToken string) *Client

NewClient creates new OpenAI API client.

func NewClientWithConfig

func NewClientWithConfig(config ClientConfig) *Client

NewClientWithConfig creates new OpenAI API client for specified config.

func (*Client) CreateChatCompletion

func (c *Client) CreateChatCompletion(
	ctx context.Context,
	request ChatCompletionRequest,
	headers map[string]string,
	retryOpts ...RetryOptions,
) (response ChatCompletionResponse, err error)

CreateChatCompletion — API call to Create a completion for the chat message.

func (*Client) CreateChatCompletionStream

func (c *Client) CreateChatCompletionStream(
	ctx context.Context,
	request ChatCompletionRequest,
	headers map[string]string,
	retryOpts ...RetryOptions,
) (stream *ChatCompletionStream, err error)

CreateChatCompletionStream — API call to create a chat completion w/ streaming support. It sets whether to stream back partial progress. If set, tokens will be sent as data-only server-sent events as they become available, with the stream terminated by a data: [DONE] message.

func (*Client) CreateMessage

func (c *Client) CreateMessage(ctx context.Context, threadID string, request MessageRequest) (msg Message, err error)

CreateMessage creates a new message.

func (*Client) GetAPIKeyAndBaseURL

func (c *Client) GetAPIKeyAndBaseURL() (string, string)

func (*Client) ListMessage

func (c *Client) ListMessage(ctx context.Context, threadID string,
	limit *int,
	order *string,
	after *string,
	before *string,
) (messages MessagesList, err error)

ListMessage fetches all messages in the thread.

func (*Client) ListMessageFiles

func (c *Client) ListMessageFiles(
	ctx context.Context,
	threadID, messageID string,
) (files MessageFilesList, err error)

ListMessageFiles fetches all files attached to a message.

func (*Client) ListModels

func (c *Client) ListModels(ctx context.Context) (models ModelsList, err error)

ListModels Lists the currently available models, and provides basic information about each model such as the model id and parent.

func (*Client) ModifyMessage

func (c *Client) ModifyMessage(
	ctx context.Context,
	threadID, messageID string,
	metadata map[string]any,
) (msg Message, err error)

ModifyMessage modifies a message.

func (*Client) RetrieveMessage

func (c *Client) RetrieveMessage(
	ctx context.Context,
	threadID, messageID string,
) (msg Message, err error)

RetrieveMessage retrieves a Message.

func (*Client) RetrieveMessageFile

func (c *Client) RetrieveMessageFile(
	ctx context.Context,
	threadID, messageID, fileID string,
) (file MessageFile, err error)

RetrieveMessageFile fetches a message file.

func (*Client) SetAPIKey

func (c *Client) SetAPIKey(apiKey string)

type ClientConfig

type ClientConfig struct {
	BaseURL              string
	OrgID                string
	APIType              APIType
	APIVersion           string                    // required when APIType is APITypeAzure or APITypeAzureAD
	AzureModelMapperFunc func(model string) string // replace model to azure deployment name func
	HTTPClient           *http.Client

	EmptyMessagesLimit uint
	// contains filtered or unexported fields
}

ClientConfig is a configuration of a client.

func DefaultAzureConfig

func DefaultAzureConfig(apiKey, baseURL string) ClientConfig

func DefaultConfig

func DefaultConfig(authToken string) ClientConfig

func (ClientConfig) GetAzureDeploymentByModel

func (c ClientConfig) GetAzureDeploymentByModel(model string) string

func (ClientConfig) String

func (ClientConfig) String() string

type ContentFilterResults

type ContentFilterResults struct {
	Hate     Hate     `json:"hate,omitempty"`
	SelfHarm SelfHarm `json:"self_harm,omitempty"`
	Sexual   Sexual   `json:"sexual,omitempty"`
	Violence Violence `json:"violence,omitempty"`
}

type ErrorResponse

type ErrorResponse struct {
	Error *APIError `json:"error,omitempty"`
}

type FineTuneModelDeleteResponse

type FineTuneModelDeleteResponse struct {
	ID      string `json:"id"`
	Object  string `json:"object"`
	Deleted bool   `json:"deleted"`
	// contains filtered or unexported fields
}

FineTuneModelDeleteResponse represents the deletion status of a fine-tuned model.

func (*FineTuneModelDeleteResponse) GetRateLimitHeaders

func (h *FineTuneModelDeleteResponse) GetRateLimitHeaders() RateLimitHeaders

func (*FineTuneModelDeleteResponse) Header

func (h *FineTuneModelDeleteResponse) Header() http.Header

func (*FineTuneModelDeleteResponse) SetHeader

func (h *FineTuneModelDeleteResponse) SetHeader(header http.Header)

type FinishReason

type FinishReason string
const (
	FinishReasonStop          FinishReason = "stop"
	FinishReasonLength        FinishReason = "length"
	FinishReasonFunctionCall  FinishReason = "function_call"
	FinishReasonToolCalls     FinishReason = "tool_calls"
	FinishReasonContentFilter FinishReason = "content_filter"
	FinishReasonNull          FinishReason = "null"
)

func (FinishReason) MarshalJSON

func (r FinishReason) MarshalJSON() ([]byte, error)

type FunctionCall

type FunctionCall struct {
	Name string `json:"name,omitempty"`
	// call function with arguments in JSON format
	Arguments string `json:"arguments,omitempty"`
}

type FunctionDefine deprecated

type FunctionDefine = FunctionDefinition

Deprecated: use FunctionDefinition instead.

type FunctionDefinition

type FunctionDefinition struct {
	Name        string `json:"name"`
	Description string `json:"description,omitempty"`
	// Parameters is an object describing the function.
	// You can pass json.RawMessage to describe the schema,
	// or you can pass in a struct which serializes to the proper JSON schema.
	// The jsonschema package is provided for convenience, but you should
	// consider another specialized library if you require more complex schemas.
	Parameters any `json:"parameters"`
}

type Hate

type Hate struct {
	Filtered bool   `json:"filtered"`
	Severity string `json:"severity,omitempty"`
}

type ImageFile

type ImageFile struct {
	FileID string `json:"file_id"`
}

type ImageURLDetail

type ImageURLDetail string
const (
	ImageURLDetailHigh ImageURLDetail = "high"
	ImageURLDetailLow  ImageURLDetail = "low"
	ImageURLDetailAuto ImageURLDetail = "auto"
)

type InnerError

type InnerError struct {
	Code                 string               `json:"code,omitempty"`
	ContentFilterResults ContentFilterResults `json:"content_filter_result,omitempty"`
}

InnerError Azure Content filtering. Only valid for Azure OpenAI Service.

type LogProb

type LogProb struct {
	Token   string  `json:"token"`
	LogProb float64 `json:"logprob"`
	Bytes   []byte  `json:"bytes,omitempty"` // Omitting the field if it is null
	// TopLogProbs is a list of the most likely tokens and their log probability, at this token position.
	// In rare cases, there may be fewer than the number of requested top_logprobs returned.
	TopLogProbs []TopLogProbs `json:"top_logprobs"`
}

LogProb represents the probability information for a token.

type LogProbs

type LogProbs struct {
	// Content is a list of message content tokens with log probability information.
	Content []LogProb `json:"content"`
}

LogProbs is the top-level structure containing the log probability information.

type Message

type Message struct {
	ID          string           `json:"id"`
	Object      string           `json:"object"`
	CreatedAt   int              `json:"created_at"`
	ThreadID    string           `json:"thread_id"`
	Role        string           `json:"role"`
	Content     []MessageContent `json:"content"`
	FileIds     []string         `json:"file_ids"` //nolint:revive //backwards-compatibility
	AssistantID *string          `json:"assistant_id,omitempty"`
	RunID       *string          `json:"run_id,omitempty"`
	Metadata    map[string]any   `json:"metadata"`
	// contains filtered or unexported fields
}

func (*Message) GetRateLimitHeaders

func (h *Message) GetRateLimitHeaders() RateLimitHeaders

func (*Message) Header

func (h *Message) Header() http.Header

func (*Message) SetHeader

func (h *Message) SetHeader(header http.Header)

type MessageContent

type MessageContent struct {
	Type      string       `json:"type"`
	Text      *MessageText `json:"text,omitempty"`
	ImageFile *ImageFile   `json:"image_file,omitempty"`
}

type MessageFile

type MessageFile struct {
	ID        string `json:"id"`
	Object    string `json:"object"`
	CreatedAt int    `json:"created_at"`
	MessageID string `json:"message_id"`
	// contains filtered or unexported fields
}

func (*MessageFile) GetRateLimitHeaders

func (h *MessageFile) GetRateLimitHeaders() RateLimitHeaders

func (*MessageFile) Header

func (h *MessageFile) Header() http.Header

func (*MessageFile) SetHeader

func (h *MessageFile) SetHeader(header http.Header)

type MessageFilesList

type MessageFilesList struct {
	MessageFiles []MessageFile `json:"data"`
	// contains filtered or unexported fields
}

func (*MessageFilesList) GetRateLimitHeaders

func (h *MessageFilesList) GetRateLimitHeaders() RateLimitHeaders

func (*MessageFilesList) Header

func (h *MessageFilesList) Header() http.Header

func (*MessageFilesList) SetHeader

func (h *MessageFilesList) SetHeader(header http.Header)

type MessageRequest

type MessageRequest struct {
	Role     string         `json:"role"`
	Content  string         `json:"content"`
	FileIds  []string       `json:"file_ids,omitempty"` //nolint:revive // backwards-compatibility
	Metadata map[string]any `json:"metadata,omitempty"`
}

type MessageText

type MessageText struct {
	Value       string `json:"value"`
	Annotations []any  `json:"annotations"`
}

type MessagesList

type MessagesList struct {
	Messages []Message `json:"data"`

	Object  string  `json:"object"`
	FirstID *string `json:"first_id"`
	LastID  *string `json:"last_id"`
	HasMore bool    `json:"has_more"`
	// contains filtered or unexported fields
}

func (*MessagesList) GetRateLimitHeaders

func (h *MessagesList) GetRateLimitHeaders() RateLimitHeaders

func (*MessagesList) Header

func (h *MessagesList) Header() http.Header

func (*MessagesList) SetHeader

func (h *MessagesList) SetHeader(header http.Header)

type Model

type Model struct {
	CreatedAt  int64             `json:"created"`
	ID         string            `json:"id"`
	Object     string            `json:"object"`
	OwnedBy    string            `json:"owned_by"`
	Permission []Permission      `json:"permission"`
	Root       string            `json:"root"`
	Parent     string            `json:"parent"`
	Metadata   map[string]string `json:"metadata"`
	// contains filtered or unexported fields
}

Model struct represents an OpenAPI model.

func (*Model) GetRateLimitHeaders

func (h *Model) GetRateLimitHeaders() RateLimitHeaders

func (*Model) Header

func (h *Model) Header() http.Header

func (*Model) SetHeader

func (h *Model) SetHeader(header http.Header)

type ModelsList

type ModelsList struct {
	Models []Model `json:"data"`
	// contains filtered or unexported fields
}

ModelsList is a list of models, including those that belong to the user or organization.

func (*ModelsList) GetRateLimitHeaders

func (h *ModelsList) GetRateLimitHeaders() RateLimitHeaders

func (*ModelsList) Header

func (h *ModelsList) Header() http.Header

func (*ModelsList) SetHeader

func (h *ModelsList) SetHeader(header http.Header)

type Permission

type Permission struct {
	CreatedAt          int64       `json:"created"`
	ID                 string      `json:"id"`
	Object             string      `json:"object"`
	AllowCreateEngine  bool        `json:"allow_create_engine"`
	AllowSampling      bool        `json:"allow_sampling"`
	AllowLogprobs      bool        `json:"allow_logprobs"`
	AllowSearchIndices bool        `json:"allow_search_indices"`
	AllowView          bool        `json:"allow_view"`
	AllowFineTuning    bool        `json:"allow_fine_tuning"`
	Organization       string      `json:"organization"`
	Group              interface{} `json:"group"`
	IsBlocking         bool        `json:"is_blocking"`
}

Permission struct represents an OpenAPI permission.

type PromptAnnotation

type PromptAnnotation struct {
	PromptIndex          int                  `json:"prompt_index,omitempty"`
	ContentFilterResults ContentFilterResults `json:"content_filter_results,omitempty"`
}

type RateLimitHeaders

type RateLimitHeaders struct {
	LimitRequests     int       `json:"x-ratelimit-limit-requests"`
	LimitTokens       int       `json:"x-ratelimit-limit-tokens"`
	RemainingRequests int       `json:"x-ratelimit-remaining-requests"`
	RemainingTokens   int       `json:"x-ratelimit-remaining-tokens"`
	ResetRequests     ResetTime `json:"x-ratelimit-reset-requests"`
	ResetTokens       ResetTime `json:"x-ratelimit-reset-tokens"`
}

RateLimitHeaders struct represents Openai rate limits headers.

type RequestError

type RequestError struct {
	HTTPStatusCode int
	HTTPStatus     string
	Err            error
}

RequestError provides informations about generic request errors.

func (*RequestError) Error

func (e *RequestError) Error() string

func (*RequestError) Unwrap

func (e *RequestError) Unwrap() error

type ResetTime

type ResetTime string

func (ResetTime) String

func (r ResetTime) String() string

func (ResetTime) Time

func (r ResetTime) Time() time.Time

type Response

type Response interface {
	SetHeader(http.Header)
}

type RetryOptions

type RetryOptions struct {
	// Retries is the number of times to retry the request. 0 means no retries.
	Retries int

	// RetryAboveCode is the status code above which the request should be retried.
	RetryAboveCode int
	RetryCodes     []int
}

func NewDefaultRetryOptions

func NewDefaultRetryOptions() RetryOptions

type SelfHarm

type SelfHarm struct {
	Filtered bool   `json:"filtered"`
	Severity string `json:"severity,omitempty"`
}

type Sexual

type Sexual struct {
	Filtered bool   `json:"filtered"`
	Severity string `json:"severity,omitempty"`
}

type StreamOptions

type StreamOptions struct {
	IncludeUsage bool `json:"include_usage,omitempty"`
}

type Tool

type Tool struct {
	Type     ToolType            `json:"type"`
	Function *FunctionDefinition `json:"function,omitempty"`
}

type ToolCall

type ToolCall struct {
	// Index is not nil only in chat completion chunk object
	Index    *int         `json:"index,omitempty"`
	ID       string       `json:"id"`
	Type     ToolType     `json:"type"`
	Function FunctionCall `json:"function"`
}

type ToolChoice

type ToolChoice struct {
	Type     ToolType     `json:"type"`
	Function ToolFunction `json:"function,omitempty"`
}

type ToolFunction

type ToolFunction struct {
	Name string `json:"name"`
}

type ToolType

type ToolType string
const (
	ToolTypeFunction ToolType = "function"
)

type TopLogProbs

type TopLogProbs struct {
	Token   string  `json:"token"`
	LogProb float64 `json:"logprob"`
	Bytes   []byte  `json:"bytes,omitempty"`
}

type Usage

type Usage struct {
	PromptTokens     int `json:"prompt_tokens"`
	CompletionTokens int `json:"completion_tokens"`
	TotalTokens      int `json:"total_tokens"`
}

Usage Represents the total token usage per request to OpenAI.

type Violence

type Violence struct {
	Filtered bool   `json:"filtered"`
	Severity string `json:"severity,omitempty"`
}

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL