Documentation ¶
Overview ¶
Package githubmodels is the Github Models llm provider, see https://github.com/marketplace/models
Index ¶
- type Provider
- func (p *Provider) GetChatCompletions(ctx context.Context, req openai.ChatCompletionRequest, _ metadata.M) (openai.ChatCompletionResponse, error)
- func (p *Provider) GetChatCompletionsStream(ctx context.Context, req openai.ChatCompletionRequest, _ metadata.M) (provider.ResponseRecver, error)
- func (p *Provider) Name() string
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
This section is empty.
Types ¶
type Provider ¶
type Provider struct { // APIKey is the API key for Github Models APIKey string // Model is the model for Github Models, see https://github.com/marketplace/models // e.g. "Meta-Llama-3.1-405B-Instruct", "Mistral-large-2407", "gpt-4o" Model string // contains filtered or unexported fields }
Provider is the provider for Github Models
func NewProvider ¶
NewProvider creates a new OpenAIProvider
func (*Provider) GetChatCompletions ¶
func (p *Provider) GetChatCompletions(ctx context.Context, req openai.ChatCompletionRequest, _ metadata.M) (openai.ChatCompletionResponse, error)
GetChatCompletions implements ai.LLMProvider.
func (*Provider) GetChatCompletionsStream ¶
func (p *Provider) GetChatCompletionsStream(ctx context.Context, req openai.ChatCompletionRequest, _ metadata.M) (provider.ResponseRecver, error)
GetChatCompletionsStream implements ai.LLMProvider.
Click to show internal directories.
Click to hide internal directories.