Documentation
¶
Index ¶
- Variables
- func Handler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage)
- func ResponseClaude2OpenAI(claudeResponse *Response) *openai.TextResponse
- func StreamHandler(m *meta.Meta, c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage)
- type Adaptor
- func (a *Adaptor) ConvertRequest(meta *meta.Meta, req *http.Request) (string, http.Header, io.Reader, error)
- func (a *Adaptor) DoRequest(_ *meta.Meta, _ *gin.Context, req *http.Request) (*http.Response, error)
- func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (usage *relaymodel.Usage, err *relaymodel.ErrorWithStatusCode)
- func (a *Adaptor) GetBaseURL() string
- func (a *Adaptor) GetChannelName() string
- func (a *Adaptor) GetModelList() []*model.ModelConfig
- func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error)
- func (a *Adaptor) SetupRequestHeader(meta *meta.Meta, c *gin.Context, req *http.Request) error
- type Content
- type Delta
- type Error
- type ImageSource
- type InputSchema
- type Message
- type Metadata
- type Request
- type Response
- type StreamResponse
- type Tool
- type Usage
Constants ¶
This section is empty.
Variables ¶
View Source
var ModelList = []*model.ModelConfig{ { Model: "claude-3-haiku-20240307", Type: relaymode.ChatCompletions, Owner: model.ModelOwnerAnthropic, InputPrice: 0.0025, OutputPrice: 0.0125, Config: model.NewModelConfig( model.WithModelConfigMaxContextTokens(200000), model.WithModelConfigMaxOutputTokens(4096), ), }, { Model: "claude-3-opus-20240229", Type: relaymode.ChatCompletions, Owner: model.ModelOwnerAnthropic, InputPrice: 0.015, OutputPrice: 0.075, Config: model.NewModelConfig( model.WithModelConfigMaxContextTokens(200000), model.WithModelConfigMaxOutputTokens(4096), ), }, { Model: "claude-3-5-haiku-20241022", Type: relaymode.ChatCompletions, Owner: model.ModelOwnerAnthropic, InputPrice: 0.0008, OutputPrice: 0.004, Config: model.NewModelConfig( model.WithModelConfigMaxContextTokens(200000), model.WithModelConfigMaxOutputTokens(4096), model.WithModelConfigToolChoice(true), ), }, { Model: "claude-3-5-sonnet-20240620", Type: relaymode.ChatCompletions, Owner: model.ModelOwnerAnthropic, InputPrice: 0.003, OutputPrice: 0.015, Config: model.NewModelConfig( model.WithModelConfigMaxContextTokens(200000), model.WithModelConfigMaxOutputTokens(8192), model.WithModelConfigToolChoice(true), ), }, { Model: "claude-3-5-sonnet-20241022", Type: relaymode.ChatCompletions, Owner: model.ModelOwnerAnthropic, InputPrice: 0.003, OutputPrice: 0.015, Config: model.NewModelConfig( model.WithModelConfigMaxContextTokens(200000), model.WithModelConfigMaxOutputTokens(8192), model.WithModelConfigToolChoice(true), ), }, { Model: "claude-3-5-sonnet-latest", Type: relaymode.ChatCompletions, Owner: model.ModelOwnerAnthropic, InputPrice: 0.003, OutputPrice: 0.015, Config: model.NewModelConfig( model.WithModelConfigMaxContextTokens(200000), model.WithModelConfigMaxOutputTokens(8192), model.WithModelConfigToolChoice(true), ), }, }
Functions ¶
func ResponseClaude2OpenAI ¶
func ResponseClaude2OpenAI(claudeResponse *Response) *openai.TextResponse
Types ¶
type Adaptor ¶
type Adaptor struct{}
func (*Adaptor) ConvertRequest ¶
func (*Adaptor) DoResponse ¶
func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (usage *relaymodel.Usage, err *relaymodel.ErrorWithStatusCode)
func (*Adaptor) GetBaseURL ¶
func (*Adaptor) GetChannelName ¶
func (*Adaptor) GetModelList ¶
func (a *Adaptor) GetModelList() []*model.ModelConfig
type Content ¶
type Content struct { Type string `json:"type"` Text string `json:"text,omitempty"` Source *ImageSource `json:"source,omitempty"` // tool_calls ID string `json:"id,omitempty"` Name string `json:"name,omitempty"` Input any `json:"input,omitempty"` Content string `json:"content,omitempty"` ToolUseID string `json:"tool_use_id,omitempty"` }
type ImageSource ¶
type InputSchema ¶
type Request ¶
type Request struct { ToolChoice any `json:"tool_choice,omitempty"` Temperature *float64 `json:"temperature,omitempty"` TopP *float64 `json:"top_p,omitempty"` Model string `json:"model"` System string `json:"system,omitempty"` Messages []Message `json:"messages"` StopSequences []string `json:"stop_sequences,omitempty"` Tools []Tool `json:"tools,omitempty"` MaxTokens int `json:"max_tokens,omitempty"` TopK int `json:"top_k,omitempty"` Stream bool `json:"stream,omitempty"` }
type Response ¶
type Response struct { StopReason *string `json:"stop_reason"` StopSequence *string `json:"stop_sequence"` Error Error `json:"error"` ID string `json:"id"` Type string `json:"type"` Role string `json:"role"` Model string `json:"model"` Content []Content `json:"content"` Usage Usage `json:"usage"` }
func StreamResponseClaude2OpenAI ¶
func StreamResponseClaude2OpenAI(claudeResponse *StreamResponse) (*openai.ChatCompletionsStreamResponse, *Response)
https://docs.anthropic.com/claude/reference/messages-streaming
type StreamResponse ¶
type Tool ¶
type Tool struct { InputSchema InputSchema `json:"input_schema"` Name string `json:"name"` Description string `json:"description,omitempty"` }
Click to show internal directories.
Click to hide internal directories.