openai

package
v0.0.0-...-df6d72d Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Jun 24, 2024 License: Apache-2.0 Imports: 14 Imported by: 0

Documentation

Index

Constants

This section is empty.

Variables

View Source
var (
	CompleteReason  = "stop"
	MaxTokensReason = "length"
	FilteredReason  = "content_filter"
)
View Source
var StreamDoneMarker = []byte("[DONE]")

Functions

This section is empty.

Types

type ChatCompletion

type ChatCompletion struct {
	ID                string   `json:"id"`
	Object            string   `json:"object"`
	Created           int      `json:"created"`
	ModelName         string   `json:"model"`
	SystemFingerprint string   `json:"system_fingerprint"`
	Choices           []Choice `json:"choices"`
	Usage             Usage    `json:"usage"`
}

ChatCompletion Ref: https://platform.openai.com/docs/api-reference/chat/object

type ChatCompletionChunk

type ChatCompletionChunk struct {
	ID                string         `json:"id"`
	Object            string         `json:"object"`
	Created           int            `json:"created"`
	ModelName         string         `json:"model"`
	SystemFingerprint string         `json:"system_fingerprint"`
	Choices           []StreamChoice `json:"choices"`
}

ChatCompletionChunk represents SSEvent a chat response is broken down on chat streaming Ref: https://platform.openai.com/docs/api-reference/chat/streaming

type ChatRequest

type ChatRequest struct {
	Model            string                `json:"model"`
	Messages         []schemas.ChatMessage `json:"messages"`
	Temperature      float64               `json:"temperature,omitempty"`
	TopP             float64               `json:"top_p,omitempty"`
	MaxTokens        int                   `json:"max_tokens,omitempty"`
	N                int                   `json:"n,omitempty"`
	StopWords        []string              `json:"stop,omitempty"`
	Stream           bool                  `json:"stream,omitempty"`
	FrequencyPenalty int                   `json:"frequency_penalty,omitempty"`
	PresencePenalty  int                   `json:"presence_penalty,omitempty"`
	LogitBias        *map[int]float64      `json:"logit_bias,omitempty"`
	User             *string               `json:"user,omitempty"`
	Seed             *int                  `json:"seed,omitempty"`
	Tools            []string              `json:"tools,omitempty"`
	ToolChoice       interface{}           `json:"tool_choice,omitempty"`
	ResponseFormat   interface{}           `json:"response_format,omitempty"`
}

ChatRequest is an OpenAI-specific request schema

func NewChatRequestFromConfig

func NewChatRequestFromConfig(cfg *Config) *ChatRequest

NewChatRequestFromConfig fills the struct from the config. Not using reflection because of performance penalty it gives

func (*ChatRequest) ApplyParams

func (r *ChatRequest) ApplyParams(params *schemas.ChatParams)

type ChatStream

type ChatStream struct {
	// contains filtered or unexported fields
}

ChatStream represents OpenAI chat stream for a specific request

func NewChatStream

func NewChatStream(
	client *http.Client,
	req *http.Request,
	finishReasonMapper *FinishReasonMapper,
	errMapper *ErrorMapper,
	logger *zap.Logger,
) *ChatStream

func (*ChatStream) Close

func (s *ChatStream) Close() error

func (*ChatStream) Open

func (s *ChatStream) Open() error

func (*ChatStream) Recv

func (s *ChatStream) Recv() (*schemas.ChatStreamChunk, error)

type Choice

type Choice struct {
	Index        int                 `json:"index"`
	Message      schemas.ChatMessage `json:"message"`
	Logprobs     interface{}         `json:"logprobs"`
	FinishReason string              `json:"finish_reason"`
}

type Client

type Client struct {
	// contains filtered or unexported fields
}

Client is a client for accessing OpenAI API

func NewClient

func NewClient(providerConfig *Config, clientConfig *clients.ClientConfig, tel *telemetry.Telemetry) (*Client, error)

NewClient creates a new OpenAI client for the OpenAI API.

func (*Client) Chat

func (c *Client) Chat(ctx context.Context, params *schemas.ChatParams) (*schemas.ChatResponse, error)

Chat sends a chat request to the specified OpenAI model.

func (*Client) ChatStream

func (c *Client) ChatStream(ctx context.Context, params *schemas.ChatParams) (clients.ChatStream, error)

func (*Client) ModelName

func (c *Client) ModelName() string

func (*Client) Provider

func (c *Client) Provider() string

func (*Client) SupportChatStream

func (c *Client) SupportChatStream() bool

type Config

type Config struct {
	BaseURL       string        `yaml:"base_url" json:"base_url" validate:"required"`
	ChatEndpoint  string        `yaml:"chat_endpoint" json:"chat_endpoint" validate:"required"`
	ModelName     string        `yaml:"model" json:"model" validate:"required"`
	APIKey        fields.Secret `yaml:"api_key" json:"-" validate:"required"`
	DefaultParams *Params       `yaml:"default_params,omitempty" json:"default_params"`
}

func DefaultConfig

func DefaultConfig() *Config

DefaultConfig for OpenAI models

func (*Config) UnmarshalYAML

func (c *Config) UnmarshalYAML(unmarshal func(interface{}) error) error

type ErrorMapper

type ErrorMapper struct {
	// contains filtered or unexported fields
}

func NewErrorMapper

func NewErrorMapper(tel *telemetry.Telemetry) *ErrorMapper

func (*ErrorMapper) Map

func (m *ErrorMapper) Map(resp *http.Response) error

type FinishReasonMapper

type FinishReasonMapper struct {
	// contains filtered or unexported fields
}

func NewFinishReasonMapper

func NewFinishReasonMapper(tel *telemetry.Telemetry) *FinishReasonMapper

func (*FinishReasonMapper) Map

func (m *FinishReasonMapper) Map(finishReason string) *schemas.FinishReason

type Params

type Params struct {
	Temperature      float64          `yaml:"temperature,omitempty" json:"temperature"`
	TopP             float64          `yaml:"top_p,omitempty" json:"top_p"`
	MaxTokens        int              `yaml:"max_tokens,omitempty" json:"max_tokens"`
	N                int              `yaml:"n,omitempty" json:"n"`
	StopWords        []string         `yaml:"stop,omitempty" json:"stop"`
	FrequencyPenalty int              `yaml:"frequency_penalty,omitempty" json:"frequency_penalty"`
	PresencePenalty  int              `yaml:"presence_penalty,omitempty" json:"presence_penalty"`
	LogitBias        *map[int]float64 `yaml:"logit_bias,omitempty" json:"logit_bias"`
	User             *string          `yaml:"user,omitempty" json:"user"`
	Seed             *int             `yaml:"seed,omitempty" json:"seed"`
	Tools            []string         `yaml:"tools,omitempty" json:"tools"`
	ToolChoice       interface{}      `yaml:"tool_choice,omitempty" json:"tool_choice"`
	ResponseFormat   interface{}      `yaml:"response_format,omitempty" json:"response_format"` // TODO: should this be a part of the chat request API?
}

Params defines OpenAI-specific model params with the specific validation of values TODO: Add validations

func DefaultParams

func DefaultParams() Params

func (*Params) UnmarshalYAML

func (p *Params) UnmarshalYAML(unmarshal func(interface{}) error) error

type StreamChoice

type StreamChoice struct {
	Index        int                 `json:"index"`
	Delta        schemas.ChatMessage `json:"delta"`
	Logprobs     interface{}         `json:"logprobs"`
	FinishReason string              `json:"finish_reason"`
}

type Usage

type Usage struct {
	PromptTokens     int `json:"prompt_tokens"`
	CompletionTokens int `json:"completion_tokens"`
	TotalTokens      int `json:"total_tokens"`
}

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL