Documentation ¶
Index ¶
- Constants
- func MaxQueryTokens(model string) int
- func TokenEstimator(msg GoGPTMessage, model string) int
- type EmbeddingData
- type GoGPTChat
- type GoGPTChoice
- type GoGPTEmbeddings
- type GoGPTEmbeddingsRequest
- type GoGPTError
- type GoGPTFunction
- type GoGPTFunctionCall
- type GoGPTMessage
- type GoGPTQuery
- type GoGPTResponse
- type GoGPTUsage
Constants ¶
View Source
const ( API_ENDPOINT = "https://api.openai.com/v1/chat/completions" EMBEDDINGS_ENDPOINT = "https://api.openai.com/v1/embeddings" MODEL_35_TURBO = "gpt-3.5-turbo-1106" MODEL_4_TURBO = "gpt-4-1106-preview" MODEL_4 = "gpt-4" MODEL_EMBEDDING_ADA = "text-embedding-ada-002" ROLE_SYSTEM = "system" ROLE_USER = "user" ROLE_ASSISTANT = "assistant" ROLE_FUNCTION = "function" RETRIES = 3 )
View Source
const (
// Since token estimates are inexact, how much of a buffer should we leave?
BUFF_MARGIN = 48
)
Variables ¶
This section is empty.
Functions ¶
func MaxQueryTokens ¶ added in v0.0.2
func TokenEstimator ¶ added in v0.0.2
func TokenEstimator(msg GoGPTMessage, model string) int
This is an estimate of the number of tokens in a string.
Types ¶
type EmbeddingData ¶ added in v0.0.5
type GoGPTChat ¶ added in v0.0.2
type GoGPTChat struct { Query *GoGPTQuery Summary string MessageQueue []GoGPTMessage // contains filtered or unexported fields }
func NewGoGPTChat ¶ added in v0.0.2
func (*GoGPTChat) AddMessage ¶ added in v0.0.2
A convenience function for method chaining
func (*GoGPTChat) Generate ¶ added in v0.0.2
func (g *GoGPTChat) Generate() (*GoGPTResponse, error)
A function that encapsulates the query generation method and handles summariation.
type GoGPTChoice ¶
type GoGPTChoice struct { Index int `json:"index"` Message GoGPTMessage `json:"message"` FinishReason string `json:"finish_reason"` }
type GoGPTEmbeddings ¶ added in v0.0.5
type GoGPTEmbeddings struct { Model string `json:"model"` Object string `json:"object"` Data []EmbeddingData `json:"data"` Usage GoGPTUsage `json:"usage"` }
func GetEmbedding ¶ added in v0.0.5
func GetEmbedding(input string, key string) (*GoGPTEmbeddings, error)
type GoGPTEmbeddingsRequest ¶ added in v0.0.5
type GoGPTError ¶
type GoGPTFunction ¶
type GoGPTFunction struct { Name string `json:"name"` Description string `json:"description,omitempty"` Parameters *jsonschema.Schema `json:"parameters"` }
type GoGPTFunctionCall ¶
type GoGPTMessage ¶
type GoGPTMessage struct { Role string `json:"role"` Content string `json:"content"` Name string `json:"name,omitempty"` FunctionCall *GoGPTFunctionCall `json:"function_call,omitempty"` }
type GoGPTQuery ¶
type GoGPTQuery struct { Model string `json:"model"` Messages []GoGPTMessage `json:"messages"` Functions []GoGPTFunction `json:"functions,omitempty"` FunctionCall string `json:"function_call,omitempty"` Temperature float32 `json:"temperature,omitempty"` TopP float32 `json:"top_p,omitempty"` N int `json:"n,omitempty"` Stream bool `json:"stream,omitempty"` Stop string `json:"stop,omitempty"` MaxTokens int `json:"max_tokens,omitempty"` PresencePenalty float32 `json:"presence_penalty,omitempty"` LogitBias map[string]float32 `json:"logit_bias,omitempty"` User string `json:"user,omitempty"` Key string `json:"-"` OrgName string `json:"-"` OrgId string `json:"-"` Endpoint string `json:"-"` Timeout time.Duration `json:"-"` }
func NewGoGPTQuery ¶
func NewGoGPTQuery(key string) *GoGPTQuery
func (*GoGPTQuery) AddFunction ¶
func (g *GoGPTQuery) AddFunction(name string, desc string, obj interface{}) (*GoGPTQuery, error)
func (*GoGPTQuery) AddMessage ¶
func (g *GoGPTQuery) AddMessage(role string, name string, content string) *GoGPTQuery
func (*GoGPTQuery) Generate ¶
func (g *GoGPTQuery) Generate() (*GoGPTResponse, error)
type GoGPTResponse ¶
type GoGPTResponse struct { Error *GoGPTError `json:"error,omitempty"` Id string `json:"id"` Object string `json:"object"` Created int32 `json:"created"` Model string `json:"model"` Choices []GoGPTChoice `json:"choices"` Usage GoGPTUsage `json:"usage"` }
type GoGPTUsage ¶
Click to show internal directories.
Click to hide internal directories.