Documentation ¶
Index ¶
- Variables
- func FilterSlice[I any](slice []I, f func(I) bool) []I
- func MapSlice[I, O any](slice []I, f func(I) O) []O
- func RequestToCurl(req *http.Request) string
- func SliceContains[I comparable](slice []I, value I) bool
- type BackoffRoundTripper
- type Gpt
- func (g *Gpt) CancelBatch(ctx context.Context, batchId string) goerror.TraceableError
- func (g *Gpt) DeleteFile(ctx context.Context, fileId string) goerror.TraceableError
- func (g *Gpt) NewBatchSession() *GptBatchSession
- func (g *Gpt) RetrieveBatch(ctx context.Context, batchId string) (GptBatchResponse, goerror.TraceableError)
- func (g *Gpt) RetrieveBatches(ctx context.Context, stati ...GptBatchStatus) ([]GptBatchResponse, goerror.TraceableError)
- func (g *Gpt) RetrieveFile(ctx context.Context, fileId string) (GptFileResponse, goerror.TraceableError)
- func (g *Gpt) RetrieveFileContent(ctx context.Context, fileId string) ([]byte, goerror.TraceableError)
- func (g *Gpt) RetrieveFiles(ctx context.Context) ([]GptFileResponse, goerror.TraceableError)
- type GptBatchResponse
- type GptBatchSession
- func (s *GptBatchSession) AddToBatch(customRequestId, systemPrompt, userPrompt string, options ...RequestOption) goerror.TraceableError
- func (s *GptBatchSession) CreateBatch(ctx context.Context, batchName string) (string, goerror.TraceableError)
- func (s *GptBatchSession) RetrieveBatchedRequest(ctx context.Context, batchId string, lineIdx int) ([]byte, goerror.TraceableError)
- type GptBatchStatus
- type GptFilePurpose
- type GptFileResponse
- type GptFileStatus
- type GzipRoundTripper
- type HeaderRoundTripper
- type Option
- type RequestOption
Constants ¶
This section is empty.
Variables ¶
var ( ErrRequestBatch = goerror.New("gpt:retrieve_batch", "Failed to retrieve batch") ErrCancelBatch = goerror.New("gpt:cancel_batch", "Failed to cancel batch") ErrCreateBatch = goerror.New("gpt:create_batch", "Failed to create batch") )
var ( ErrBatchFailed = goerror.New("gpt:batch_failed", "Batch failed") ErrBatchNotCompleted = goerror.New("gpt:batch_not_completed", "Batch not completed yet") ErrSerializeBatchRequest = goerror.New("gpt:serialize_batch_request", "Failed to serialize batch request") ErrExceedsFileLimit = goerror.New("gpt:exceeds_file_limit", "Exceeds file limit") ErrParseBatchLine = goerror.New("gpt:parse_batch_line", "failed to parse batch line") )
var ( ErrRequestFile = goerror.New("gpt:request_file", "failed to request files") ErrDeleteFile = goerror.New("gpt:delete_file", "Failed to delete file") ErrCreateFile = goerror.New("gpt:create_file", "Failed to create file") )
var ErrGptAsk = goerror.New("gpt_ask", "Error while asking GPT")
var ErrInvalidContent = goerror.New("invalid_content", "Invalid content")
var ErrTooManyRetries = goerror.New("too_many_retries", "too many retries")
var WithCacheDir = func(cacheDir string) Option { return func(g *Gpt) { g.cacheDir = cacheDir } }
var WithJsonSchema = func(v any) RequestOption { return func(a *appliedRequestOption) error { schema, err := getJsonSchema(v) if err != nil { return goerror.New("gpt:json_schema", "failed to get json schema").WithError(err).WithOrigin() } a.responseFormat.Type = "json_schema" a.responseFormat.JsonSchema = &gptJsonSchema{ Name: getStructName(v), Strict: true, Schema: schema, } return nil } }
WithJsonSchema adds a JSON schema to the request as response format. v must be a struct or pointer to a struct.
var WithModel = func(model string) Option { return func(g *Gpt) { if model != "" { g.model = model } } }
Functions ¶
func FilterSlice ¶
func RequestToCurl ¶
func SliceContains ¶
func SliceContains[I comparable](slice []I, value I) bool
Types ¶
type BackoffRoundTripper ¶
type BackoffRoundTripper struct {
// contains filtered or unexported fields
}
BackoffRoundTripper is a custom http.RoundTripper that limits the number of concurrent requests and applies backoff when receiving a 429 response.
func NewBackoffRoundTripper ¶
func NewBackoffRoundTripper(transport http.RoundTripper) *BackoffRoundTripper
type Gpt ¶
type Gpt struct {
// contains filtered or unexported fields
}
func (*Gpt) CancelBatch ¶
func (*Gpt) DeleteFile ¶
func (*Gpt) NewBatchSession ¶
func (g *Gpt) NewBatchSession() *GptBatchSession
func (*Gpt) RetrieveBatch ¶
func (g *Gpt) RetrieveBatch(ctx context.Context, batchId string) (GptBatchResponse, goerror.TraceableError)
func (*Gpt) RetrieveBatches ¶
func (g *Gpt) RetrieveBatches(ctx context.Context, stati ...GptBatchStatus) ([]GptBatchResponse, goerror.TraceableError)
func (*Gpt) RetrieveFile ¶
func (g *Gpt) RetrieveFile(ctx context.Context, fileId string) (GptFileResponse, goerror.TraceableError)
func (*Gpt) RetrieveFileContent ¶
func (*Gpt) RetrieveFiles ¶
func (g *Gpt) RetrieveFiles(ctx context.Context) ([]GptFileResponse, goerror.TraceableError)
type GptBatchResponse ¶
type GptBatchResponse struct { ID string `json:"id"` Object string `json:"object"` Endpoint string `json:"endpoint"` Errors *struct { Object string `json:"object"` Data []gptBatchError `json:"data"` } `json:"errors"` // Using *string to allow null value InputFileID string `json:"input_file_id"` CompletionWindow string `json:"completion_window"` Status GptBatchStatus `json:"status"` OutputFileID *string `json:"output_file_id"` ErrorFileID *string `json:"error_file_id"` CreatedAt int64 `json:"created_at"` InProgressAt *int64 `json:"in_progress_at"` ExpiresAt *int64 `json:"expires_at"` FinalizingAt *int64 `json:"finalizing_at"` CompletedAt *int64 `json:"completed_at"` FailedAt *int64 `json:"failed_at"` // Using *int64 to allow null value ExpiredAt *int64 `json:"expired_at"` CancellingAt *int64 `json:"cancelling_at"` CancelledAt *int64 `json:"cancelled_at"` RequestCounts struct { Total int `json:"total"` Completed int `json:"completed"` Failed int `json:"failed"` } `json:"request_counts"` Metadata struct { CustomerID string `json:"customer_id"` BatchDescription string `json:"batch_description"` } `json:"metadata"` }
type GptBatchSession ¶
type GptBatchSession struct {
// contains filtered or unexported fields
}
func (*GptBatchSession) AddToBatch ¶
func (s *GptBatchSession) AddToBatch(customRequestId, systemPrompt, userPrompt string, options ...RequestOption) goerror.TraceableError
AddToBatch adds a request to the current batch data. The customRequestId is used to identify the request in the batch. It should have an application wide prefix to avoid collisions with other applications that batch data. Further the customRequestId should be unique, a timestamp is a good choice. The systemPrompt should describe the task and the userPrompt should contain the input data. The callee is responsible for store the lineIdx of the request. If AddToBatch is called the third time, the lineIdx of the request within the current batch is 2. If the batch data exceeds the 512MB limit, ErrExceedsFileLimit is returned, signaling that the s.CreateBatch() should be called to flush the current batch data
func (*GptBatchSession) CreateBatch ¶
func (s *GptBatchSession) CreateBatch(ctx context.Context, batchName string) (string, goerror.TraceableError)
CreateBatch creates a new batch with the current batch data. The batchName is used to identify the batch. It is the prefix for the file created and the batch created. the batchname should be unique to this application, to differentiate between different batches of different applications. CreateBatch will not clear its data. Create a new session to start a new batch.
func (*GptBatchSession) RetrieveBatchedRequest ¶
func (s *GptBatchSession) RetrieveBatchedRequest(ctx context.Context, batchId string, lineIdx int) ([]byte, goerror.TraceableError)
RetrieveBatchedRequest retrieves a single request from a batch. The batchId is the id of the batch to retrieve the request from. The lineIdx is the index of the request in the batch. If the batch is not completed yet, ErrBatchNotCompleted is returned, which is more a flag indicating that the request should be retried later. If the batch failed, ErrBatchFailed is returned, which contains the error that caused the batch to fail. RetrieveBatchedRequest returns the raw []byte of the answer GPT gave (respnse.Body.Choices[0].Message.Content), since it is agnostic to the response format (could be JSON, could be plain text). Sometimes GPT messes up, and a JSONL line is malformed. In this case ErrParseBatchLine is returned. If you want to see the file itself causing that, just add a WithCacheDir to GPT instance and the file will be stored in the cache directory.
type GptBatchStatus ¶
type GptBatchStatus string
const ( BatchStatusInProgress GptBatchStatus = "in_progress" BatchStatusComplete GptBatchStatus = "completed" BatchStatusFailed GptBatchStatus = "failed" BatchStatusFinalizing GptBatchStatus = "finalizing" )
type GptFilePurpose ¶
type GptFilePurpose string
const ( Assistants GptFilePurpose = "assistants" Assistants_output GptFilePurpose = "assistants_output" Batch GptFilePurpose = "batch" Batch_output GptFilePurpose = "batch_output" FineTune GptFilePurpose = "fine-tune" FineTuneResults GptFilePurpose = "fine-tune-results" Vision GptFilePurpose = "vision" )
type GptFileResponse ¶
type GptFileResponse struct { ID string `json:"id"` Object string `json:"object"` Bytes int `json:"bytes"` CreatedAt int64 `json:"created_at"` Filename string `json:"filename"` Purpose GptFilePurpose `json:"purpose"` Status *GptFileStatus `json:"status,omitempty"` }
type GptFileStatus ¶
type GptFileStatus string
const ( Uploaded GptFileStatus = "uploaded" Processed GptFileStatus = "processed" Error GptFileStatus = "error" )
type GzipRoundTripper ¶
type GzipRoundTripper struct {
Transport http.RoundTripper
}
GzipRoundTripper wraps an http.RoundTripper, adding gzip compression support.
type HeaderRoundTripper ¶
type HeaderRoundTripper struct { Transport http.RoundTripper // contains filtered or unexported fields }
HeaderRoundTripper wraps an http.RoundTripper, adding specific headers
type RequestOption ¶ added in v1.0.1
type RequestOption func(*appliedRequestOption) error