Documentation ¶
Index ¶
- Variables
- type ChatCompletionMessage
- type ChatCompletionResponseFormat
- type ChatCompletionResponseFormatType
- type ChatMessageImageURL
- type ChatMessagePart
- type ChatMessagePartType
- type ImageURLDetail
- type InferenceRes
- type InitiateSessionReq
- type MORRPCController
- type OpenAiCompletitionRequest
- type PromptHead
- type PromptReq
- type ProxyController
- type ProxyReceiver
- type ProxyServiceSender
- type ResponderFlusher
- type SendResponse
Constants ¶
This section is empty.
Variables ¶
View Source
var ( ErrValidation = fmt.Errorf("request validation failed") ErrUnmarshal = fmt.Errorf("failed to unmarshal request") )
View Source
var ( ErrMissingPrKey = fmt.Errorf("missing private key") ErrCreateReq = fmt.Errorf("failed to create request") ErrProvider = fmt.Errorf("provider request failed") ErrInvalidSig = fmt.Errorf("received invalid signature from provider") ErrFailedStore = fmt.Errorf("failed store user") ErrInvalidResponse = fmt.Errorf("invalid response") ErrResponseErr = fmt.Errorf("response error") ErrDecrFailed = fmt.Errorf("failed to decrypt ai response chunk") ErrMasrshalFailed = fmt.Errorf("failed to marshal response") ErrDecode = fmt.Errorf("failed to decode response") ErrSessionNotFound = fmt.Errorf("session not found") ErrProviderNotFound = fmt.Errorf("provider not found") )
View Source
var (
ErrUnknownMethod = fmt.Errorf("unknown method")
)
Functions ¶
This section is empty.
Types ¶
type ChatCompletionMessage ¶
type ChatCompletionMessage struct { Role string `json:"role"` Content string `json:"content"` MultiContent []ChatMessagePart // This property isn't in the official documentation, but it's in // the documentation for the official library for python: // - https://github.com/openai/openai-python/blob/main/chatml.md // - https://github.com/openai/openai-cookbook/blob/main/examples/How_to_count_tokens_with_tiktoken.ipynb Name string `json:"name,omitempty"` // For Role=tool prompts this should be set to the ID given in the assistant's prior request to call a tool. ToolCallID string `json:"tool_call_id,omitempty"` }
type ChatCompletionResponseFormat ¶
type ChatCompletionResponseFormat struct {
Type ChatCompletionResponseFormatType `json:"type,omitempty"`
}
type ChatCompletionResponseFormatType ¶
type ChatCompletionResponseFormatType string
type ChatMessageImageURL ¶
type ChatMessageImageURL struct { URL string `json:"url,omitempty"` Detail ImageURLDetail `json:"detail,omitempty"` }
type ChatMessagePart ¶
type ChatMessagePart struct { Type ChatMessagePartType `json:"type,omitempty"` Text string `json:"text,omitempty"` ImageURL *ChatMessageImageURL `json:"image_url,omitempty"` }
type ChatMessagePartType ¶
type ChatMessagePartType string
const ( ChatMessagePartTypeText ChatMessagePartType = "text" ChatMessagePartTypeImageURL ChatMessagePartType = "image_url" )
type ImageURLDetail ¶
type ImageURLDetail string
const ( ImageURLDetailHigh ImageURLDetail = "high" ImageURLDetailLow ImageURLDetail = "low" ImageURLDetailAuto ImageURLDetail = "auto" )
type InferenceRes ¶
type InferenceRes struct { Signature lib.HexString `json:"signature,omitempty" validate:"required,hexadecimal"` Message json.RawMessage `json:"message" validate:"required"` Timestamp uint64 `json:"timestamp" validate:"required,timestamp"` }
type InitiateSessionReq ¶
type InitiateSessionReq struct { User common.Address `json:"user" validate:"required,eth_addr"` Provider common.Address `json:"provider" validate:"required,eth_addr"` Spend lib.BigInt `json:"spend" validate:"required,number"` ProviderUrl string `json:"providerUrl" validate:"required,hostname_port"` BidID common.Hash `json:"bidId" validate:"required,hex32"` }
type MORRPCController ¶
type MORRPCController struct {
// contains filtered or unexported fields
}
func NewMORRPCController ¶
func NewMORRPCController(service *ProxyReceiver, validator *validator.Validate, sessionStorage *storages.SessionStorage) *MORRPCController
func (*MORRPCController) Handle ¶
func (s *MORRPCController) Handle(ctx context.Context, msg m.RPCMessage, sourceLog lib.ILogger, sendResponse SendResponse) error
type OpenAiCompletitionRequest ¶
type OpenAiCompletitionRequest struct { Model string `json:"model"` Messages []ChatCompletionMessage `json:"messages"` MaxTokens int `json:"max_tokens,omitempty"` Temperature float32 `json:"temperature,omitempty"` TopP float32 `json:"top_p,omitempty"` N int `json:"n,omitempty"` Stream bool `json:"stream,omitempty"` Stop []string `json:"stop,omitempty"` PresencePenalty float32 `json:"presence_penalty,omitempty"` ResponseFormat *ChatCompletionResponseFormat `json:"response_format,omitempty"` Seed *int `json:"seed,omitempty"` FrequencyPenalty float32 `json:"frequency_penalty,omitempty"` // LogitBias is must be a token id string (specified by their token ID in the tokenizer), not a word string. // incorrect: `"logit_bias":{"You": 6}`, correct: `"logit_bias":{"1639": 6}` // refs: https://platform.openai.com/docs/api-reference/chat/create#chat/create-logit_bias LogitBias map[string]int `json:"logit_bias,omitempty"` // LogProbs indicates whether to return log probabilities of the output tokens or not. // If true, returns the log probabilities of each output token returned in the content of message. // This option is currently not available on the gpt-4-vision-preview model. LogProbs bool `json:"logprobs,omitempty"` // TopLogProbs is an integer between 0 and 5 specifying the number of most likely tokens to return at each // token position, each with an associated log probability. // logprobs must be set to true if this parameter is used. TopLogProbs int `json:"top_logprobs,omitempty"` User string `json:"user,omitempty"` // Deprecated: use ToolChoice instead. FunctionCall any `json:"function_call,omitempty"` // This can be either a string or an ToolChoice object. ToolChoice any `json:"tool_choice,omitempty"` }
type PromptHead ¶
type PromptReq ¶
type PromptReq struct { Signature string `json:"signature" validate:"required,hexadecimal"` Message json.RawMessage `json:"message" validate:"required"` Timestamp string `json:"timestamp" validate:"required,timestamp"` }
type ProxyController ¶
type ProxyController struct {
// contains filtered or unexported fields
}
func NewProxyController ¶
func NewProxyController(service *ProxyServiceSender, aiEngine *aiengine.AiEngine) *ProxyController
func (*ProxyController) InitiateSession ¶
func (s *ProxyController) InitiateSession(ctx *gin.Context)
InitiateSession godoc
@Summary Initiate Session with Provider @Description sends a handshake to the provider @Tags sessions @Produce json @Success 200 {object} interface{} @Router /proxy/sessions/initiate [post]
func (*ProxyController) Prompt ¶
func (c *ProxyController) Prompt(ctx *gin.Context)
SendPrompt godoc
@Summary Send Local Or Remote Prompt @Description Send prompt to a local or remote model based on session id in header @Tags wallet @Produce json @Param prompt body proxyapi.OpenAiCompletitionRequest true "Prompt" @Param session_id header string false "Session ID" @Success 200 {object} interface{} @Router /v1/chat/completions [post]
func (*ProxyController) RegisterRoutes ¶
func (s *ProxyController) RegisterRoutes(r interfaces.Router)
type ProxyReceiver ¶
type ProxyReceiver struct {
// contains filtered or unexported fields
}
func NewProxyReceiver ¶
func NewProxyReceiver(privateKeyHex, publicKeyHex lib.HexString, sessionStorage *storages.SessionStorage, aiEngine *aiengine.AiEngine) *ProxyReceiver
func (*ProxyReceiver) SessionPrompt ¶
func (s *ProxyReceiver) SessionPrompt(ctx context.Context, requestID string, userPubKey string, rq *m.SessionPromptReq, sendResponse SendResponse, sourceLog lib.ILogger) error
func (*ProxyReceiver) SessionRequest ¶
func (s *ProxyReceiver) SessionRequest(ctx context.Context, msgID string, reqID string, req *m.SessionReq, sourceLog lib.ILogger) (*msg.RpcResponse, error)
type ProxyServiceSender ¶
type ProxyServiceSender struct {
// contains filtered or unexported fields
}
func NewProxySender ¶
func NewProxySender(publicUrl *url.URL, privateKey interfaces.PrKeyProvider, logStorage *lib.Collection[*interfaces.LogStorage], sessionStorage *storages.SessionStorage, log lib.ILogger) *ProxyServiceSender
func (*ProxyServiceSender) InitiateSession ¶
func (*ProxyServiceSender) SendPrompt ¶
func (p *ProxyServiceSender) SendPrompt(ctx context.Context, resWriter ResponderFlusher, prompt *openai.ChatCompletionRequest, sessionID common.Hash) error
type ResponderFlusher ¶
type ResponderFlusher interface { http.ResponseWriter http.Flusher }
type SendResponse ¶
type SendResponse func(*msg.RpcResponse) error
Source Files ¶
Click to show internal directories.
Click to hide internal directories.