Documentation ¶
Index ¶
- Constants
- Variables
- type ChatModel
- type Client
- func (client *Client) AutoComplete(ctx context.Context, prompt string, model ChatModel) (string, error)
- func (client *Client) GenerateImage(ctx context.Context, prompt string, model ChatModel) ([]byte, error)
- func (client *Client) Translate(ctx context.Context, prompt string, sourceLang string, targetLang string, ...) (string, error)
- type CloudFlareAiWorkerAPI
- type CloudflareApiClient
- func (api *CloudflareApiClient) GenerateImage(ctx context.Context, prompt string, model string) ([]byte, error)
- func (api *CloudflareApiClient) GenerateText(ctx context.Context, prompt string, model string) (string, error)
- func (api *CloudflareApiClient) GenerateTranslation(ctx context.Context, req *GenerateTranslationRequest) (string, error)
- type GeminiAPI
- type GeminiApiClient
- type GenerateTranslationRequest
- type GptAPI
- type LastResponse
- type MockClient
- type OpenAIApiClient
- type Request
- type Resources
- type Response
- type Service
- type TranslateResponse
Constants ¶
View Source
const ( CF_LLAMA_2_7B_CHAT_INT8_MODEL = "@cf/meta/llama-2-7b-chat-int8" CF_LLAMA_3_8B_INSTRUCT_MODEL = "@cf/meta/llama-3-8b-instruct" CF_LLAMA_3_1_INSTRUCT_MODEL = "@cf/meta/llama-3.1-8b-instruct" CF_SQL_MODEL = "@cf/defog/sqlcoder-7b-2" CF_AWQ_MODEL = "@hf/thebloke/llama-2-13b-chat-awq" CF_OPEN_CHAT_MODEL = "@cf/openchat/openchat-3.5-0106" CF_STABLE_DIFFUSION = "@cf/stabilityai/stable-diffusion-xl-base-1.0" CF_META_TRANSLATION_MODEL = "@cf/meta/m2m100-1.2b" CF_QWEN_MODEL = "@cf/qwen/qwen1.5-1.8b-chat" )
Variables ¶
View Source
var AvaliableModels = []string{ CHAT_MODEL_GPT.String(), CHAT_MODEL_GEMINI.String(), CHAT_MODEL_META.String(), CHAT_MODEL_SQL.String(), CHAT_MODEL_OPEN.String(), CHAT_MODEL_AWQ.String(), CHAT_MODEL_STABLE_DIFFUSION.String(), CHAT_MODEL_QWEN.String(), }
View Source
var CHAT_MODEL_TO_CF_MODEL = map[ChatModel]string{ CHAT_MODEL_SQL: CF_SQL_MODEL, CHAT_MODEL_AWQ: CF_AWQ_MODEL, CHAT_MODEL_META: CF_LLAMA_3_1_INSTRUCT_MODEL, CHAT_MODEL_OPEN: CF_OPEN_CHAT_MODEL, CHAT_MODEL_STABLE_DIFFUSION: CF_STABLE_DIFFUSION, CHAT_MODEL_TRANSLATIONS: CF_META_TRANSLATION_MODEL, CHAT_MODEL_QWEN: CF_QWEN_MODEL, }
Functions ¶
This section is empty.
Types ¶
type ChatModel ¶
type ChatModel string
const ( CHAT_MODEL_GEMINI ChatModel = "gemini" CHAT_MODEL_GPT ChatModel = "gpt" CHAT_MODEL_META ChatModel = "llama" CHAT_MODEL_AWQ ChatModel = "awq" CHAT_MODEL_TRANSLATIONS ChatModel = "translate" CHAT_MODEL_OPEN ChatModel = "open chat" CHAT_MODEL_SQL ChatModel = "sql" CHAT_MODEL_STABLE_DIFFUSION ChatModel = "stable" CHAT_MODEL_QWEN ChatModel = "qwen" )
type Client ¶
type Client struct {
*Resources
}
func (*Client) AutoComplete ¶
func (*Client) GenerateImage ¶
type CloudFlareAiWorkerAPI ¶
type CloudflareApiClient ¶
func NewCloudflareApiClient ¶
func NewCloudflareApiClient(accountID, apiKey string) *CloudflareApiClient
func (*CloudflareApiClient) GenerateImage ¶
func (*CloudflareApiClient) GenerateText ¶
func (*CloudflareApiClient) GenerateTranslation ¶
func (api *CloudflareApiClient) GenerateTranslation(ctx context.Context, req *GenerateTranslationRequest) (string, error)
type GeminiApiClient ¶
type GeminiApiClient struct {
// contains filtered or unexported fields
}
func NewGeminiApiClient ¶
func NewGeminiApiClient(token string) *GeminiApiClient
func (*GeminiApiClient) GeminiChat ¶
func (api *GeminiApiClient) GeminiChat(ctx context.Context, prompt string) (*genai.GenerateContentResponse, error)
type LastResponse ¶
type OpenAIApiClient ¶
type OpenAIApiClient struct { Token string OpenAIClient *openai.Client }
func NewOpenAiApiClient ¶
func NewOpenAiApiClient(token string) *OpenAIApiClient
func (*OpenAIApiClient) AutoComplete ¶
func (api *OpenAIApiClient) AutoComplete(ctx context.Context, prompt string) (openai.ChatCompletionResponse, error)
type Resources ¶
type Resources struct { GPTApi GptAPI GeminiAPI GeminiAPI CloudflareApiClient CloudFlareAiWorkerAPI }
type TranslateResponse ¶
Click to show internal directories.
Click to hide internal directories.