Documentation ¶
Index ¶
- Constants
- Variables
- type ChatModel
- type Client
- func (client *Client) GenerateImage(ctx context.Context, prompt string, model ImageModel) ([]byte, error)
- func (client *Client) TextGeneration(ctx context.Context, prompt string, model ChatModel) (string, error)
- func (client *Client) Translate(ctx context.Context, prompt string, sourceLang string, targetLang string, ...) (string, error)
- type CloudFlareAiWorkerAPI
- type CloudflareApiClient
- func (api *CloudflareApiClient) GenerateImage(ctx context.Context, prompt string, model string) ([]byte, error)
- func (api *CloudflareApiClient) GenerateTextWithModel(ctx context.Context, prompt string, model string) (string, error)
- func (api *CloudflareApiClient) GenerateTranslation(ctx context.Context, req *GenerateTranslationRequest) (string, error)
- type GeminiAPI
- type GeminiApiClient
- type GenerateTranslationRequest
- type GptAPI
- type ImageModel
- type LastResponse
- type MockClient
- type OpenAIApiClient
- type Request
- type Resources
- type Response
- type Service
- type TranslateResponse
Constants ¶
View Source
const ( CF_LLAMA_2_7B_CHAT_INT8_MODEL = "@cf/meta/llama-2-7b-chat-int8" CF_LLAMA_3_8B_INSTRUCT_MODEL = "@cf/meta/llama-3-8b-instruct" CF_LLAMA_3_1_INSTRUCT_MODEL = "@cf/meta/llama-3.1-8b-instruct" CF_LLAMA_3_2_3B_INSTRUCT_MODEL = "@cf/meta/llama-3.2-3b-instruct" CF_SQL_MODEL = "@cf/defog/sqlcoder-7b-2" CF_AWQ_MODEL = "@hf/thebloke/llama-2-13b-chat-awq" CF_OPEN_CHAT_MODEL = "@cf/openchat/openchat-3.5-0106" CF_STABLE_DIFFUSION = "@cf/stabilityai/stable-diffusion-xl-base-1.0" CF_META_TRANSLATION_MODEL = "@cf/meta/m2m100-1.2b" CF_QWEN_MODEL = "@cf/qwen/qwen1.5-1.8b-chat" )
View Source
const MODEL string = "gemini-1.0-pro"
Variables ¶
View Source
var AvaliableModels = []string{ CHAT_MODEL_GPT.String(), CHAT_MODEL_GEMINI.String(), CHAT_MODEL_META.String(), CHAT_MODEL_SQL.String(), CHAT_MODEL_OPEN.String(), CHAT_MODEL_AWQ.String(), CHAT_MODEL_QWEN.String(), CHAT_MODEL_GPT_V4.String(), }
View Source
var CHAT_MODEL_TO_CF_MODEL = map[ChatModel]string{ CHAT_MODEL_SQL: CF_SQL_MODEL, CHAT_MODEL_AWQ: CF_AWQ_MODEL, CHAT_MODEL_META: CF_LLAMA_3_2_3B_INSTRUCT_MODEL, CHAT_MODEL_OPEN: CF_OPEN_CHAT_MODEL, CHAT_MODEL_TRANSLATIONS: CF_META_TRANSLATION_MODEL, CHAT_MODEL_QWEN: CF_QWEN_MODEL, }
View Source
var CHAT_MODEL_TO_OPENAI_MODEL = map[ChatModel]string{
CHAT_MODEL_GPT: openai.O1Mini,
CHAT_MODEL_GPT_V4: openai.GPT4o,
}
View Source
var IMAGE_MODEL_TO_CF_MODEL = map[ImageModel]string{ IMAGE_MODEL_STABLE_DIFFUSION: CF_STABLE_DIFFUSION, }
View Source
var IMAGE_MODEL_TO_OPENAI_MODEL = map[ImageModel]string{
IMAGE_MODEL_DALL_E_3: openai.CreateImageModelDallE3,
IMAGE_MODEL_DALL_E_2: openai.CreateImageModelDallE2,
}
View Source
var ImageModels = []string{ IMAGE_MODEL_STABLE_DIFFUSION.String(), IMAGE_MODEL_DALL_E_3.String(), IMAGE_MODEL_DALL_E_2.String(), }
View Source
var MissingContentError = errors.New("Missing content")
View Source
var StrToImageModel = map[string]ImageModel{ IMAGE_MODEL_STABLE_DIFFUSION.String(): IMAGE_MODEL_STABLE_DIFFUSION, IMAGE_MODEL_DALL_E_2.String(): IMAGE_MODEL_DALL_E_2, IMAGE_MODEL_DALL_E_3.String(): IMAGE_MODEL_DALL_E_3, }
Functions ¶
This section is empty.
Types ¶
type ChatModel ¶
type ChatModel string
const ( CHAT_MODEL_GEMINI ChatModel = "gemini" CHAT_MODEL_GPT ChatModel = "gpt" CHAT_MODEL_META ChatModel = "llama" CHAT_MODEL_AWQ ChatModel = "awq" CHAT_MODEL_TRANSLATIONS ChatModel = "translate" CHAT_MODEL_OPEN ChatModel = "open chat" CHAT_MODEL_SQL ChatModel = "sql" CHAT_MODEL_QWEN ChatModel = "qwen" CHAT_MODEL_GPT_V4 ChatModel = "g. p. t. version number four" )
type Client ¶
type Client struct {
*Resources
}
func (*Client) GenerateImage ¶
func (*Client) TextGeneration ¶
type CloudFlareAiWorkerAPI ¶
type CloudflareApiClient ¶
func NewCloudflareApiClient ¶
func NewCloudflareApiClient(accountID, apiKey string) *CloudflareApiClient
func (*CloudflareApiClient) GenerateImage ¶
func (*CloudflareApiClient) GenerateTextWithModel ¶
func (*CloudflareApiClient) GenerateTranslation ¶
func (api *CloudflareApiClient) GenerateTranslation(ctx context.Context, req *GenerateTranslationRequest) (string, error)
type GeminiApiClient ¶
type GeminiApiClient struct {
// contains filtered or unexported fields
}
func NewGeminiApiClient ¶
func NewGeminiApiClient(token string) *GeminiApiClient
func (*GeminiApiClient) GenerateText ¶
type ImageModel ¶
type ImageModel string
const ( IMAGE_MODEL_STABLE_DIFFUSION ImageModel = "stable" IMAGE_MODEL_DALL_E_2 ImageModel = "dallas v2" IMAGE_MODEL_DALL_E_3 ImageModel = "dallas" )
func (ImageModel) String ¶
func (c ImageModel) String() string
type LastResponse ¶
type MockClient ¶
func (*MockClient) GenerateImage ¶
func (client *MockClient) GenerateImage(ctx context.Context, prompt string, model ImageModel) (res []byte, err error)
func (*MockClient) TextGeneration ¶
type OpenAIApiClient ¶
type OpenAIApiClient struct { Token string OpenAIClient *openai.Client }
func NewOpenAiApiClient ¶
func NewOpenAiApiClient(token string) *OpenAIApiClient
func (*OpenAIApiClient) GenerateImage ¶
func (*OpenAIApiClient) GenerateTextWithModel ¶
type Request ¶
type Request struct { Prompt string `json:"prompt"` TargetLanguage string `json:"target_language,omitempty"` SourceLanguage string `json:"source_language,omitempty"` Model ChatModel `json:"model"` ImageModel *ImageModel `json:"image_model"` }
type Resources ¶
type Resources struct { GPTApi GptAPI GeminiAPI GeminiAPI CloudflareApiClient CloudFlareAiWorkerAPI }
type TranslateResponse ¶
Click to show internal directories.
Click to hide internal directories.