Documentation ¶
Index ¶
Constants ¶
View Source
const ( PromptHuman = "\n\nHuman:" PromptAI = "\n\nAssistant:" MessageDone = "[DONE]" // https://docs.anthropic.com/claude/reference/selecting-a-model ModelClaudeLatest = "claude-2" // latest model family, manually updated ModelClaude2 = "claude-2" // latest major version ModelClaude20 = "claude-2.0" // latest full version ModelClaudeInstantLatest = "claude-instant-1" // latest instant model family, manually updated ModelClaudeInstant1 = "claude-instant-1" // latest instant major version ModelClaudeInstant11 = "claude-instant-1.1" // latest instant full version // old deprecated models, keeped for compatibility ModelClaudeV1_100K = "claude-v1-100k" ModelClaudeV10 = "claude-v1.0" ModelClaudeV12 = "claude-v1.2" ModelClaudeV13 = "claude-v1.3" ModelClaudeV13_100K = "claude-v1.3-100k" ModelClaudeInstantV1 = "claude-instant-v1" ModelClaudeInstantV1_100K = "claude-instant-v1-100k" ModelClaudeInstantV10 = "claude-instant-v1.0" ModelClaudeInstantV11 = "claude-instant-v1.1" ModelClaudeInstantV11_100K = "claude-instant-v1.1-100k" )
Variables ¶
View Source
var ( BaseURL = "https://api.anthropic.com" DefaultMaxTokensToSample = 1200 DefaultStopSequences = []string{PromptHuman} )
Functions ¶
func WrapPrompt ¶
Types ¶
type SamplingParameters ¶
type SamplingParameters struct { Temperature *float64 `json:"temperature,omitempty"` TopK *int `json:"top_k,omitempty"` TopP *float64 `json:"top_p,omitempty"` Tags map[string]string `json:"tags,omitempty"` Prompt string `json:"prompt"` Model string `json:"model"` StopSequences []string `json:"stop_sequences"` MaxTokensToSample int `json:"max_tokens_to_sample"` Stream bool `json:"stream"` }
func NewSimpleSamplingParameters ¶
func NewSimpleSamplingParameters(prompt string, model string) *SamplingParameters
Click to show internal directories.
Click to hide internal directories.