Documentation ¶
Index ¶
Constants ¶
const DefaultAPIURL = "http://localhost:11434/api"
DefaultAPIURL is the default URL for a local Ollama API server
Variables ¶
var ( // ModelCodeLlama represents the codellama model ModelCodeLlama = types.Model{"codellama", 0, types.ModelTypeChat} // ModelDeepseekCoder represents the deepseek-coder model ModelDeepseekCoder = types.Model{"deepseek-coder", 0, types.ModelTypeChat} // ModelWizardCoder represents the wizard-coder model ModelWizardCoder = types.Model{"wizard-coder", 0, types.ModelTypeChat} // ModelPhindCodeLlama represents the phind-codellama model ModelPhindCodeLlama = types.Model{"phind-codellama", 0, types.ModelTypeChat} // ModeCodeUp represents the codeup model ModelCodeUp = types.Model{"codeup", 0, types.ModelTypeChat} // ModeStarCoder represents the starcoder model ModelStarCoder = types.Model{"starcoder", 0, types.ModelTypeChat} // ModelSQLCoder represents the sqlcoder model ModelSQLCoder = types.Model{"sqlcoder", 0, types.ModelTypeChat} // ModelStableCode represents the stablecode model ModelStableCode = types.Model{"stablecode", 0, types.ModelTypeChat} // ModelMagicoder represents the magicoder model ModelMagicoder = types.Model{"magicoder", 0, types.ModelTypeChat} // ModelCodeBooga represents the codebooga model ModelCodeBooga = types.Model{"codebooga", 0, types.ModelTypeChat} // SupportedModels is a list of all language models supported by this // backend implementation. SupportedModels = []types.Model{ ModelCodeLlama, ModelDeepseekCoder, ModelWizardCoder, ModelPhindCodeLlama, ModelCodeUp, ModelStarCoder, ModelSQLCoder, ModelStableCode, ModelMagicoder, ModelCodeBooga, } )
Functions ¶
This section is empty.
Types ¶
type Client ¶
type Client struct {
*requests.HTTPClient
}
Client is a structure used to continuously generate IaC code via Ollama
func NewClient ¶
func NewClient(opts *NewClientOptions) *Client
NewClient creates a new instance of the Client struct, with the provided input options. The Ollama API server is not contacted at this point.
func (*Client) Chat ¶
func (client *Client) Chat(model types.Model) types.Conversation
Chat initiates a conversation with an OpenAI chat model. A conversation maintains context, allowing to send further instructions to modify the output from previous requests, just like using the ChatGPT website.
func (*Client) Complete ¶
func (client *Client) Complete( ctx context.Context, model types.Model, prompt string, ) (res types.Response, err error)
Complete sends a request to OpenAI's Completions API using the provided model and prompt, and returns the response
func (*Client) DefaultModel ¶
DefaultModel returns the default model used by this backend.
func (*Client) ListModels ¶
ListModels returns a list of all the models supported by this backend implementation.
type Conversation ¶
type Conversation struct {
// contains filtered or unexported fields
}
Conversation is a struct used to converse with an OpenAI chat model. It maintains all messages sent/received in order to maintain context just like using ChatGPT.
func (*Conversation) Send ¶
func (conv *Conversation) Send(ctx context.Context, prompt string, msgs ...types.Message) ( res types.Response, err error, )
Send sends the provided message to the API and returns a Response object. To maintain context, all previous messages (whether from you to the API or vice-versa) are sent as well, allowing you to ask the API to modify the code it already generated.
type NewClientOptions ¶
type NewClientOptions struct { // URL is the URL of the API server (including the /api path prefix). Defaults to DefaultAPIURL. URL string }
NewClientOptions is a struct containing all the parameters accepted by the NewClient constructor.