chat

package
v0.0.0-...-94216e4 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Sep 20, 2023 License: MIT, Apache-2.0 Imports: 10 Imported by: 0

README

A simplified go client for models that emulate the OpenAI API

Forked from github.com/sashabaranov/go-openai

Documentation

Index

Constants

View Source
const (
	ChatMessageRoleSystem    = "system"
	ChatMessageRoleUser      = "user"
	ChatMessageRoleAssistant = "assistant"
	ChatMessageRoleFunction  = "function"
)

Chat message role defined by the OpenAI API.

Variables

View Source
var (
	ErrChatCompletionStreamNotSupported = errors.New("streaming is not supported with this method, please use CreateChatCompletionStream") //nolint:lll
)
View Source
var (
	ErrTooManyEmptyStreamMessages = errors.New("stream has sent too many empty messages")
)

Functions

This section is empty.

Types

type APIError

type APIError struct {
	Code           any         `json:"code,omitempty"`
	Message        string      `json:"message"`
	Param          *string     `json:"param,omitempty"`
	Type           string      `json:"type"`
	HTTPStatusCode int         `json:"-"`
	InnerError     *InnerError `json:"innererror,omitempty"`
}

APIError provides error information returned by the OpenAI API. InnerError struct is only valid for Azure OpenAI Service.

func (*APIError) Error

func (e *APIError) Error() string

func (*APIError) UnmarshalJSON

func (e *APIError) UnmarshalJSON(data []byte) (err error)

type ChatCompletionChoice

type ChatCompletionChoice struct {
	Index   int                   `json:"index"`
	Message ChatCompletionMessage `json:"message"`
	// FinishReason
	// stop: API returned complete message,
	// or a message terminated by one of the stop sequences provided via the stop parameter
	// length: Incomplete model output due to max_tokens parameter or token limit
	// function_call: The model decided to call a function
	// content_filter: Omitted content due to a flag from our content filters
	// null: API response still in progress or incomplete
	FinishReason FinishReason `json:"finish_reason"`
}

type ChatCompletionMessage

type ChatCompletionMessage struct {
	Role    string `json:"role"`
	Content string `json:"content"`

	// This property isn't in the official documentation, but it's in
	// the documentation for the official library for python:
	// - https://github.com/openai/openai-python/blob/main/chatml.md
	// - https://github.com/openai/openai-cookbook/blob/main/examples/How_to_count_tokens_with_tiktoken.ipynb
	Name string `json:"name,omitempty"`

	FunctionCall *FunctionCall `json:"function_call,omitempty"`
}

func (*ChatCompletionMessage) TokenLength

func (m *ChatCompletionMessage) TokenLength() int

type ChatCompletionRequest

type ChatCompletionRequest struct {
	Model            string                  `json:"model"`
	Messages         []ChatCompletionMessage `json:"messages"`
	MaxTokens        int                     `json:"max_tokens,omitempty"`
	Temperature      float32                 `json:"temperature,omitempty"`
	TopP             float32                 `json:"top_p,omitempty"`
	N                int                     `json:"n,omitempty"`
	Stream           bool                    `json:"stream,omitempty"`
	Stop             []string                `json:"stop,omitempty"`
	PresencePenalty  float32                 `json:"presence_penalty,omitempty"`
	FrequencyPenalty float32                 `json:"frequency_penalty,omitempty"`
	// LogitBias is must be a token id string (specified by their token ID in the tokenizer), not a word string.
	// incorrect: `"logit_bias":{"You": 6}`, correct: `"logit_bias":{"1639": 6}`
	// refs: https://platform.openai.com/docs/api-reference/chat/create#chat/create-logit_bias
	LogitBias    map[string]int       `json:"logit_bias,omitempty"`
	User         string               `json:"user,omitempty"`
	Functions    []FunctionDefinition `json:"functions,omitempty"`
	FunctionCall any                  `json:"function_call,omitempty"`
}

ChatCompletionRequest represents a request structure for chat completion API.

func (*ChatCompletionRequest) InsertMessagesAt

func (r *ChatCompletionRequest) InsertMessagesAt(i int, more ...ChatCompletionMessage)

type ChatCompletionResponse

type ChatCompletionResponse struct {
	ID      string                 `json:"id"`
	Object  string                 `json:"object"`
	Created int64                  `json:"created"`
	Model   string                 `json:"model"`
	Choices []ChatCompletionChoice `json:"choices"`
	Usage   Usage                  `json:"usage"`
}

ChatCompletionResponse represents a response structure for chat completion API.

type ChatCompletionStream

type ChatCompletionStream struct {
	// contains filtered or unexported fields
}

ChatCompletionStream Note: Perhaps it is more elegant to abstract Stream using generics.

func (ChatCompletionStream) Close

func (stream ChatCompletionStream) Close()

func (ChatCompletionStream) Recv

func (stream ChatCompletionStream) Recv() (response T, err error)

type ChatCompletionStreamChoice

type ChatCompletionStreamChoice struct {
	Index        int                             `json:"index"`
	Delta        ChatCompletionStreamChoiceDelta `json:"delta"`
	FinishReason FinishReason                    `json:"finish_reason"`
}

type ChatCompletionStreamChoiceDelta

type ChatCompletionStreamChoiceDelta struct {
	Content      string        `json:"content,omitempty"`
	Role         string        `json:"role,omitempty"`
	FunctionCall *FunctionCall `json:"function_call,omitempty"`
}

type ChatCompletionStreamResponse

type ChatCompletionStreamResponse struct {
	ID                string                       `json:"id"`
	Object            string                       `json:"object"`
	Created           int64                        `json:"created"`
	Model             string                       `json:"model"`
	Choices           []ChatCompletionStreamChoice `json:"choices"`
	PromptAnnotations []PromptAnnotation           `json:"prompt_annotations,omitempty"`
}

type Client

type Client struct {
	// contains filtered or unexported fields
}

Client is OpenAI GPT-3 API client.

func NewClient

func NewClient(baseURL string) *Client

NewClient creates new OpenAI API client.

func NewClientWithConfig

func NewClientWithConfig(config ClientConfig) *Client

NewClientWithConfig creates new OpenAI API client for specified config.

func (*Client) CreateChatCompletion

func (c *Client) CreateChatCompletion(
	ctx context.Context,
	request ChatCompletionRequest,
) (response ChatCompletionResponse, err error)

CreateChatCompletion — API call to Create a completion for the chat message.

func (*Client) CreateChatCompletionStream

func (c *Client) CreateChatCompletionStream(
	ctx context.Context,
	request ChatCompletionRequest,
) (stream *ChatCompletionStream, err error)

CreateChatCompletionStream — API call to create a chat completion w/ streaming support. It sets whether to stream back partial progress. If set, tokens will be sent as data-only server-sent events as they become available, with the stream terminated by a data: [DONE] message.

type ClientConfig

type ClientConfig struct {
	BaseURL    string
	HTTPClient *http.Client

	EmptyMessagesLimit uint
}

ClientConfig is a configuration of a client.

func DefaultConfig

func DefaultConfig(baseURL string) ClientConfig

func (ClientConfig) String

func (ClientConfig) String() string

type ErrorResponse

type ErrorResponse struct {
	Error *APIError `json:"error,omitempty"`
}

type FinishReason

type FinishReason string
const (
	FinishReasonStop          FinishReason = "stop"
	FinishReasonLength        FinishReason = "length"
	FinishReasonFunctionCall  FinishReason = "function_call"
	FinishReasonContentFilter FinishReason = "content_filter"
	FinishReasonNull          FinishReason = "null"
)

func (FinishReason) MarshalJSON

func (r FinishReason) MarshalJSON() ([]byte, error)

type FunctionCall

type FunctionCall struct {
	Name string `json:"name,omitempty"`
	// call function with arguments in JSON format
	Arguments string `json:"arguments,omitempty"`
}

type FunctionDefine deprecated

type FunctionDefine = FunctionDefinition

Deprecated: use FunctionDefinition instead.

type FunctionDefinition

type FunctionDefinition struct {
	Name        string `json:"name"`
	Description string `json:"description,omitempty"`
	// Parameters is an object describing the function.
	// You can pass json.RawMessage to describe the schema,
	// or you can pass in a struct which serializes to the proper JSON schema.
	// The jsonschema package is provided for convenience, but you should
	// consider another specialized library if you require more complex schemas.
	Parameters any `json:"parameters"`
}

type InnerError

type InnerError struct {
	Code string `json:"code,omitempty"`
}

InnerError Azure Content filtering. Only valid for Azure OpenAI Service.

type PromptAnnotation

type PromptAnnotation struct {
	PromptIndex int `json:"prompt_index,omitempty"`
}

type RequestError

type RequestError struct {
	HTTPStatusCode int
	Err            error
}

RequestError provides informations about generic request errors.

func (*RequestError) Error

func (e *RequestError) Error() string

func (*RequestError) Unwrap

func (e *RequestError) Unwrap() error

type Usage

type Usage struct {
	PromptTokens     int `json:"prompt_tokens"`
	CompletionTokens int `json:"completion_tokens"`
	TotalTokens      int `json:"total_tokens"`
}

Usage Represents the total token usage per request to OpenAI.

Directories

Path Synopsis
Package jsonschema provides very simple functionality for representing a JSON schema as a (nested) struct.
Package jsonschema provides very simple functionality for representing a JSON schema as a (nested) struct.

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL