Documentation ¶
Index ¶
Constants ¶
View Source
const MistralURL = "https://api.mistral.ai/v1/chat/completions"
Variables ¶
View Source
var MINSTRAL_DEFAULT = Mistral{ Model: "mistral-large-latest", Temperature: 0.7, TopP: 1.0, Url: MistralURL, MaxTokens: 100000, }
Functions ¶
This section is empty.
Types ¶
type Mistral ¶
type Mistral struct { Model string `json:"model"` Url string `json:"url"` TopP float64 `json:"top_p"` Temperature float64 `json:"temperature"` SafePrompt bool `json:"safe_prompt"` MaxTokens int `json:"max_tokens"` RandomSeed int `json:"random_seed"` // contains filtered or unexported fields }
func (*Mistral) StreamCompletions ¶
type MistralTool ¶
type MistralTool struct { Description string `json:"description"` Name string `json:"name"` Parameters tools.InputSchema `json:"parameters"` }
type MistralToolSuper ¶
type MistralToolSuper struct { Function MistralTool `json:"function"` Type string `json:"type"` }
type Request ¶
type Request struct { MaxTokens int `json:"max_tokens,omitempty"` Messages []models.Message `json:"messages,omitempty"` Model string `json:"model,omitempty"` RandomSeed int `json:"random_seed,omitempty"` SafePrompt bool `json:"safe_prompt,omitempty"` Stream bool `json:"stream,omitempty"` Temperature float64 `json:"temperature,omitempty"` ToolChoice string `json:"tool_choice,omitempty"` Tools []MistralToolSuper `json:"tools,omitempty"` TopP float64 `json:"top_p,omitempty"` }
Click to show internal directories.
Click to hide internal directories.