Documentation ¶
Index ¶
- Constants
- Variables
- func ChatCompletion[Messages any](api OpenAIClient, httpClient HTTPClient, body *CompletionRequest[Messages]) (*CompletionResponse, *OpenAIErr)
- func ChunkText(opts ChunkTextOpts) []string
- func ChunksSummary(client OpenAIClient, httpClient HTTPClient, relevantChunks []string, ...) (string, error)
- func CreateEmbedding[Input string | []string, Encoding []float64 | Base64](api OpenAIClient, httpClient HTTPClient, body *EmbeddingRequest[Input]) (*EmbeddingResponse[Encoding], *OpenAIErr)
- func FindMostRelevantEmbeddings(q, e [][]float64) ([]int, error)
- func ImageUrl(url string) *imageUrl
- func ImagesGenerations(api OpenAIClient, httpClient HTTPClient, body *ImagesGenerationsRequestBody) (*ImagesGenerationsResponse, *OpenAIErr)
- func Moderator[Input string | []string](api OpenAIClient, httpClient HTTPClient, body *ModerationRequest[Input]) (*ModerationResponse, *OpenAIErr)
- func NaturalImageStyle() imageStyle
- func Transcription(api OpenAIClient, httpClient HTTPClient, body *TranscriptionsRequestBody) (*TranscriptionResponse, *OpenAIErr)
- func VividImageStyle() imageStyle
- type Base64
- type Choice
- type ChunkTextOpts
- type Client
- type CompletionRequest
- type CompletionResponse
- type DefaultMessages
- type Embedding
- type EmbeddingRequest
- type EmbeddingResponse
- type Function
- type FunctionParameters
- type FunctionPropertie
- type FunctionProperties
- type HTTPClient
- type ImagesGenerationsRequestBody
- type ImagesGenerationsResponse
- type JSONErr
- type MediaMessage
- type MediaMessages
- type Message
- type ModerationRequest
- type ModerationResponse
- type OpenAIClient
- type OpenAIErr
- type SpeechRequestBody
- type Tool
- type ToolCall
- type TranscriptionResponse
- type TranscriptionsRequestBody
- type Usage
Constants ¶
const DefaultTranscriptionModel = "whisper-1"
Variables ¶
var SpeechVoices = &openaiSpeechVoices{
Alloy: alloy,
Echo: echo,
Fable: fable,
Onyx: onyx,
Nova: nova,
Shimmer: shimmer,
}
Functions ¶
func ChatCompletion ¶
func ChatCompletion[Messages any](api OpenAIClient, httpClient HTTPClient, body *CompletionRequest[Messages]) (*CompletionResponse, *OpenAIErr)
func ChunkText ¶
func ChunkText(opts ChunkTextOpts) []string
ChunkText splits the input text into chunks of specified size.
func ChunksSummary ¶
func ChunksSummary(client OpenAIClient, httpClient HTTPClient, relevantChunks []string, query string) (string, error)
ChunksSummary returns the summary of a randomly selected relevant chunk
func CreateEmbedding ¶
func CreateEmbedding[Input string | []string, Encoding []float64 | Base64](api OpenAIClient, httpClient HTTPClient, body *EmbeddingRequest[Input]) (*EmbeddingResponse[Encoding], *OpenAIErr)
CreateEmbedding sends a request to create embeddings for the given input.
func FindMostRelevantEmbeddings ¶
FindMostRelevantEmbeddings finds the most relevant embeddings. q is the query embedding, and e is the matrix of embeddings to search in.
func ImagesGenerations ¶
func ImagesGenerations(api OpenAIClient, httpClient HTTPClient, body *ImagesGenerationsRequestBody) (*ImagesGenerationsResponse, *OpenAIErr)
func Moderator ¶
func Moderator[Input string | []string](api OpenAIClient, httpClient HTTPClient, body *ModerationRequest[Input]) (*ModerationResponse, *OpenAIErr)
func NaturalImageStyle ¶
func NaturalImageStyle() imageStyle
NaturalImageStyle causes the model to produce more natural, less hyper-real looking images.
func Transcription ¶
func Transcription(api OpenAIClient, httpClient HTTPClient, body *TranscriptionsRequestBody) (*TranscriptionResponse, *OpenAIErr)
func VividImageStyle ¶
func VividImageStyle() imageStyle
VividImageStyle causes the model to lean towards generating hyper-real and dramatic images
Types ¶
type Choice ¶
type Choice struct { Index int `json:"index"` Message Message[string] `json:"message"` Logprobs interface{} `json:"logprobs,omitempty"` FinishReason string `json:"finish_reason"` }
Choice represents a response choice in the conversation.
type ChunkTextOpts ¶
type CompletionRequest ¶
type CompletionRequest[T any] struct { Model string `json:"model"` Messages T `json:"messages"` ToolChoice string `json:"tool_choice,omitempty"` Tools []Tool `json:"tools,omitempty"` }
CompletionRequest represents the structure of the request sent to the OpenAI API.
type CompletionResponse ¶
type CompletionResponse struct { ID string `json:"id"` Object string `json:"object"` Created int64 `json:"created"` Model string `json:"model"` Choices []Choice `json:"choices"` Usage Usage `json:"usage"` }
CompletionResponse represents the structure of the response received from the OpenAI API.
type DefaultMessages ¶
type EmbeddingRequest ¶
type EmbeddingRequest[Input string | []string] struct { // Input text to embed, encoded as a string or array of tokens. Input Input `json:"input"` // ID of the model to use. Model string `json:"model"` // The format to return the embeddings in. Can be either float or base64. Encoding string `json:"encoding_format,omitempty"` // The number of dimensions the resulting output embeddings should have. Only supported in text-embedding-3 and later models. Dimensions int `json:"dimensions,omitempty"` // A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. User string `json:"user,omitempty"` }
type EmbeddingResponse ¶
type Function ¶
type Function struct { Name string `json:"name"` Description string `json:"description"` Parameters FunctionParameters `json:"parameters"` }
Function represents a function call that can be used as a tool.
type FunctionParameters ¶
type FunctionParameters struct { Type string `json:"type"` FunctionProperties `json:"properties"` }
FunctionParameters represents the parameters of a function
type FunctionPropertie ¶
type FunctionPropertie struct { Type string `json:"type"` Description string `json:"description"` Enum []string `json:"enum,omitempty"` }
FunctionPropertie represents a property of a function.
type FunctionProperties ¶
type FunctionProperties goxios.GenericJSON[FunctionPropertie]
type HTTPClient ¶
type ImagesGenerationsResponse ¶
type ImagesGenerationsResponse struct { Created int64 `json:"created"` Data []struct { Url string `json:"url"` } `json:"data"` }
func (*ImagesGenerationsResponse) Download ¶
func (igr *ImagesGenerationsResponse) Download(httpClient HTTPClient, filePaths []string) error
type MediaMessage ¶
type MediaMessages ¶
type MediaMessages []Message[[]MediaMessage]
type Message ¶
type Message[T string | []MediaMessage] struct { Role string `json:"role"` Content T `json:"content"` ToolCalls []ToolCall `json:"tool_calls,omitempty"` }
Message represents a message in the conversation.
type ModerationRequest ¶
type ModerationResponse ¶
type ModerationResponse struct { Id string `json:"id"` Model string `json:"model"` Results []struct { Flagged bool `json:"flagged"` Categories goxios.GenericJSON[bool] `json:"categories"` } `json:"results"` CategoryScores goxios.GenericJSON[float64] `json:"category_scores"` }
type OpenAIClient ¶
type OpenAIErr ¶
type OpenAIErr struct { Err JSONErr `json:"error"` // contains filtered or unexported fields }
func TextToSpeech ¶
func TextToSpeech(api OpenAIClient, httpClient HTTPClient, body *SpeechRequestBody) (io.ReadCloser, *OpenAIErr)
type SpeechRequestBody ¶
type SpeechRequestBody struct { Model string `json:"model"` // The model for speech synthesis. Input string `json:"input"` // The input text for synthesis. Voice string `json:"voice"` // The voice to be used for synthesis. }
SpeechRequestBody represents the request body for the speech API.
type TranscriptionResponse ¶
type TranscriptionResponse struct {
Text string `json:"text"`
}
type TranscriptionsRequestBody ¶
type TranscriptionsRequestBody struct {
Model, Filename, AudioFilePath string
}