Documentation
¶
Index ¶
- type Answer
- type Choice
- type Delta
- type Embedding
- type Function
- type FunctionTool
- type GenAnswer
- type GenQuery
- type LLM
- type Message
- type MessageRecord
- type ModelInformation
- type OpenAIAnswer
- type OpenAIEmbeddingResponse
- type OpenAIMessage
- type OpenAIQuery
- type OpenAIQuery4Embedding
- type Options
- type Parameters
- type Property
- type PullResult
- type Query
- type Query4Embedding
- type Tool
- type Usage
- type VectorRecord
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
This section is empty.
Types ¶
type Answer ¶
type Answer struct { Model string `json:"model"` Message Message `json:"message"` // For Chat Completion Done bool `json:"done"` CreatedAt time.Time `json:"created_at"` TotalDuration int64 `json:"total_duration"` LoadDuration int `json:"load_duration"` PromptEvalCount int `json:"prompt_eval_count"` PromptEvalDuration int `json:"prompt_eval_duration"` EvalCount int `json:"eval_count"` EvalDuration int64 `json:"eval_duration"` }
func (*Answer) ToJsonString ¶ added in v0.1.0
type Choice ¶ added in v0.1.7
type Choice struct { Index int `json:"index,omitempty"` Message OpenAIMessage `json:"message,omitempty"` Logprobs *string `json:"logprobs,omitempty"` // Assuming logprobs can be null FinishReason string `json:"finish_reason,omitempty"` Delta Delta `json:"delta,omitempty"` }
type Function ¶ added in v0.0.6
type Function struct { Name string `json:"name"` Description string `json:"description"` Parameters Parameters `json:"parameters"` }
type FunctionTool ¶ added in v0.1.6
type FunctionTool struct { Name string `json:"name"` Arguments map[string]interface{} `json:"arguments"` // used for the ToolCalls list }
func (*FunctionTool) ToJSONString ¶ added in v0.1.6
func (ft *FunctionTool) ToJSONString() (string, error)
type GenAnswer ¶ added in v0.1.7
type GenAnswer struct { Model string `json:"model"` Done bool `json:"done"` Response string `json:"response"` // For "Simple" Completion Context []int `json:"context"` // For "Simple" Completion CreatedAt time.Time `json:"created_at"` TotalDuration int64 `json:"total_duration"` LoadDuration int `json:"load_duration"` PromptEvalCount int `json:"prompt_eval_count"` PromptEvalDuration int `json:"prompt_eval_duration"` EvalCount int `json:"eval_count"` EvalDuration int64 `json:"eval_duration"` }
func (*GenAnswer) ToJsonString ¶ added in v0.1.7
type GenQuery ¶ added in v0.1.7
type GenQuery struct { Model string `json:"model"` Options Options `json:"options"` Stream bool `json:"stream"` Prompt string `json:"prompt"` // For "Simple" Completion Context []int `json:"context"` // For "Simple" Completion //Tools []Tool `json:"tools"` Suffix string `json:"suffix,omitempty"` Format string `json:"format,omitempty"` // https://github.com/ollama/ollama/blob/main/docs/api.md#request-json-mode KeepAlive bool `json:"keep_alive,omitempty"` Raw bool `json:"raw,omitempty"` System string `json:"system,omitempty"` Template string `json:"template,omitempty"` TokenHeaderName string TokenHeaderValue string }
https://github.com/ollama/ollama/blob/main/docs/api.md#parameters
func (*GenQuery) ToJsonString ¶ added in v0.1.7
type Message ¶
type Message struct { Role string `json:"role"` Content string `json:"content"` ToolCalls []struct { Function FunctionTool `json:"function"` } `json:"tool_calls"` }
func (*Message) FirstToolCallToJSONString ¶ added in v0.1.6
func (*Message) ToolCallsToJSONString ¶ added in v0.0.7
type MessageRecord ¶ added in v0.0.3
type ModelInformation ¶ added in v0.0.7
type ModelInformation struct { Modelfile string `json:"modelfile"` Parameters string `json:"parameters"` Template string `json:"template"` Details struct { Format string `json:"format"` Family string `json:"family"` Families []string `json:"families"` ParameterSize string `json:"parameter_size"` QuantizationLevel string `json:"quantization_level"` } `json:"details"` }
func ShowModelInformation ¶ added in v0.0.7
func ShowModelInformation(url, model string) (ModelInformation, int, error)
ShowModelInformation retrieves information about a model from the specified URL.
Parameters: - url: the base URL of the API. - model: the name of the model to retrieve information for.
Returns: - ModelInformation: the information about the model. - int: the HTTP status code of the response. - error: an error if the request fails.
type OpenAIAnswer ¶ added in v0.1.7
type OpenAIAnswer struct { ID string `json:"id"` Object string `json:"object"` Created int64 `json:"created"` Model string `json:"model"` SystemFingerprint string `json:"system_fingerprint"` Choices []Choice `json:"choices"` Usage Usage `json:"usage"` }
func (*OpenAIAnswer) ToJsonString ¶ added in v0.1.7
func (answer *OpenAIAnswer) ToJsonString() string
type OpenAIEmbeddingResponse ¶ added in v0.1.8
type OpenAIMessage ¶ added in v0.1.7
type OpenAIQuery ¶ added in v0.1.7
type OpenAIQuery struct { Model string `json:"model"` Messages []Message `json:"messages"` //Options OpenAIOptions `json:"options"` //-------------------------------------------- Stop []string `json:"stop,omitempty"` Seed int `json:"seed,omitempty"` Temperature float64 `json:"temperature,omitempty"` TopP float64 `json:"top_p,omitempty"` PresencePenalty float64 `json:"presence_penalty,omitempty"` FrequencyPenalty float64 `json:"frequency_penalty,omitempty"` LogitBias map[string]interface{} `json:"logit_bias,omitempty"` // OpenAI specific Logprobs bool `json:"logprobs,omitempty"` // OpenAI specific TopLogprobs int `json:"top_logprobs,omitempty"` // OpenAI specific MaxTokens int `json:"max_tokens,omitempty"` // OpenAI specific N int `json:"n,omitempty"` // OpenAI specific Response_format map[string]interface{} `json:"response_format,omitempty"` // OpenAI specific ServiceTier string `json:"service_tier,omitempty"` // OpenAI specific StreamOptions map[string]interface{} `json:"stream_options,omitempty"` // OpenAI specific Stream bool `json:"stream"` Tools []Tool `json:"tools,omitempty"` // not used right now ToolChoices string `json:"tool_choices,omitempty"` // not used right now ParallelToolCalls bool `json:"parallel_tool_calls,omitempty"` // not used right now User string `json:"user,omitempty"` // not used right now //TokenHeaderName string //TokenHeaderValue string Verbose bool `json:"-"` OpenAIAPIKey string `json:"-"` }
func (*OpenAIQuery) ToJsonString ¶ added in v0.1.7
func (query *OpenAIQuery) ToJsonString() string
type OpenAIQuery4Embedding ¶ added in v0.1.8
type OpenAIQuery4Embedding struct { Input string `json:"input"` Model string `json:"model"` OpenAIAPIKey string `json:"-"` }
https://platform.openai.com/docs/guides/embeddings/what-are-embeddings
type Options ¶
type Options struct { RepeatLastN int `json:"repeat_last_n,omitempty"` Temperature float64 `json:"temperature,omitempty"` Seed int `json:"seed,omitempty"` RepeatPenalty float64 `json:"repeat_penalty,omitempty"` Stop []string `json:"stop,omitempty"` NumKeep int `json:"num_keep,omitempty"` NumPredict int `json:"num_predict,omitempty"` TopK int `json:"top_k,omitempty"` TopP float64 `json:"top_p,omitempty"` TFSZ float64 `json:"tfs_z,omitempty"` TypicalP float64 `json:"typical_p,omitempty"` PresencePenalty float64 `json:"presence_penalty,omitempty"` FrequencyPenalty float64 `json:"frequency_penalty,omitempty"` Mirostat int `json:"mirostat,omitempty"` MirostatTau float64 `json:"mirostat_tau,omitempty"` MirostatEta float64 `json:"mirostat_eta,omitempty"` PenalizeNewline bool `json:"penalize_newline,omitempty"` Verbose bool }
type Parameters ¶ added in v0.0.6
type PullResult ¶ added in v0.0.7
type PullResult struct {
Status string `json:"status"`
}
func PullModel ¶ added in v0.0.7
func PullModel(url, model string) (PullResult, int, error)
PullModel sends a POST request to the specified URL to pull a model with the given name.
Parameters: - url: The URL to send the request to. - model: The name of the model to pull.
Returns: - PullResult: The result of the pull operation. - int: The HTTP status code of the response. - error: An error if the request fails.
type Query ¶
type Query struct { Model string `json:"model"` Messages []Message `json:"messages"` // For Chat Completion Options Options `json:"options"` Stream bool `json:"stream"` Tools []Tool `json:"tools"` Format string `json:"format,omitempty"` // https://github.com/ollama/ollama/blob/main/docs/api.md#request-json-mode KeepAlive bool `json:"keep_alive,omitempty"` Raw bool `json:"raw,omitempty"` System string `json:"system,omitempty"` Template string `json:"template,omitempty"` TokenHeaderName string TokenHeaderValue string }