Documentation
¶
Index ¶
- Constants
- Variables
- type ChatModel
- type Client
- func (client *Client) GenerateImage(ctx context.Context, prompt string, model ImageModel) ([]byte, error)
- func (client *Client) GetLLmModel(model ChatModel) (llms.Model, []llms.CallOption)
- func (client *Client) TextGeneration(ctx context.Context, prompt string, model ChatModel) (string, error)
- func (client *Client) TextGenerationWithSystem(ctx context.Context, system string, prompt string, model ChatModel) (result string, err error)
- func (client *Client) Translate(ctx context.Context, prompt string, sourceLang string, targetLang string, ...) (string, error)
- type CloudFlareAiWorkerAPI
- type CloudflareApiClient
- func (api *CloudflareApiClient) GenerateContent(ctx context.Context, messages []llms.MessageContent, ...) (*llms.ContentResponse, error)
- func (api *CloudflareApiClient) GenerateImage(ctx context.Context, prompt string, model string) ([]byte, error)
- func (api *CloudflareApiClient) GenerateTranslation(ctx context.Context, req *GenerateTranslationRequest) (string, error)
- func (api *CloudflareApiClient) GetModel() llms.Model
- func (api *CloudflareApiClient) SetModel(model string)
- type GeminiAPI
- type GeminiApiClient
- type GenerateTranslationRequest
- type GptAPI
- type ImageModel
- type LastResponse
- type LlmContentGenerator
- type LlmModel
- type MockClient
- type OpenAIApiClient
- type Request
- type Resources
- type Response
- type Service
- type TranslateResponse
Constants ¶
View Source
const ( CF_LLAMA_2_7B_CHAT_INT8_MODEL = "@cf/meta/llama-2-7b-chat-int8" CF_LLAMA_3_8B_INSTRUCT_MODEL = "@cf/meta/llama-3-8b-instruct" CF_LLAMA_3_1_INSTRUCT_MODEL = "@cf/meta/llama-3.1-8b-instruct" CF_LLAMA_3_2_3B_INSTRUCT_MODEL = "@cf/meta/llama-3.2-3b-instruct" CF_LLAMA_3_3_70B_INSTRUCT_MODEL = "@cf/meta/llama-3.3-70b-instruct-fp8-fast" CF_SQL_MODEL = "@cf/defog/sqlcoder-7b-2" CF_AWQ_MODEL = "@hf/thebloke/llama-2-13b-chat-awq" CF_OPEN_CHAT_MODEL = "@cf/openchat/openchat-3.5-0106" CF_STABLE_DIFFUSION = "@cf/stabilityai/stable-diffusion-xl-base-1.0" CF_META_TRANSLATION_MODEL = "@cf/meta/m2m100-1.2b" CF_QWEN_MODEL = "@cf/deepseek-ai/deepseek-r1-distill-qwen-32b" )
View Source
const ( VERTEX_MODEL string = "gemini-2.0-flash-exp" VERTEX_API_LOCATION string = "us-central1" IMAGE_IMAGEN_MODEL = "imagen-3.0-generate-002" )
Variables ¶
View Source
var AvaliableModels = []string{ CHAT_MODEL_GPT.String(), CHAT_MODEL_GEMINI.String(), CHAT_MODEL_META.String(), CHAT_MODEL_SQL.String(), CHAT_MODEL_OPEN.String(), CHAT_MODEL_AWQ.String(), CHAT_MODEL_QWEN.String(), CHAT_MODEL_GPT_V4.String(), }
View Source
var CHAT_MODEL_TO_CF_MODEL = map[ChatModel]string{ CHAT_MODEL_SQL: CF_SQL_MODEL, CHAT_MODEL_AWQ: CF_AWQ_MODEL, CHAT_MODEL_META: CF_LLAMA_3_3_70B_INSTRUCT_MODEL, CHAT_MODEL_OPEN: CF_OPEN_CHAT_MODEL, CHAT_MODEL_TRANSLATIONS: CF_META_TRANSLATION_MODEL, CHAT_MODEL_QWEN: CF_QWEN_MODEL, }
View Source
var CHAT_MODEL_TO_OPENAI_MODEL = map[ChatModel]string{
CHAT_MODEL_GPT: "o1-mini",
CHAT_MODEL_GPT_V4: "gpt-4o",
}
View Source
var IMAGE_MODEL_TO_CF_MODEL = map[ImageModel]string{ IMAGE_MODEL_STABLE_DIFFUSION: CF_STABLE_DIFFUSION, }
View Source
var IMAGE_MODEL_TO_OPENAI_MODEL = map[ImageModel]string{
IMAGE_MODEL_DALL_E_3: "dall-e-3",
IMAGE_MODEL_DALL_E_2: "dall-e-2",
}
View Source
var ImageModels = []string{ IMAGE_MODEL_STABLE_DIFFUSION.String(), IMAGE_MODEL_DALL_E_3.String(), IMAGE_MODEL_DALL_E_2.String(), IMAGE_MODEL_GEMINI.String(), }
View Source
var MissingContentError = errors.New("Missing content")
View Source
var StrToImageModel = map[string]ImageModel{ IMAGE_MODEL_STABLE_DIFFUSION.String(): IMAGE_MODEL_STABLE_DIFFUSION, IMAGE_MODEL_DALL_E_2.String(): IMAGE_MODEL_DALL_E_2, IMAGE_MODEL_DALL_E_3.String(): IMAGE_MODEL_DALL_E_3, IMAGE_MODEL_GEMINI.String(): IMAGE_IMAGEN_MODEL, }
Functions ¶
This section is empty.
Types ¶
type ChatModel ¶
type ChatModel string
const ( CHAT_MODEL_GEMINI ChatModel = "gemini" CHAT_MODEL_GPT ChatModel = "gpt" CHAT_MODEL_META ChatModel = "llama" CHAT_MODEL_AWQ ChatModel = "awq" CHAT_MODEL_TRANSLATIONS ChatModel = "translate" CHAT_MODEL_OPEN ChatModel = "open chat" CHAT_MODEL_SQL ChatModel = "sql" CHAT_MODEL_QWEN ChatModel = "qwen" CHAT_MODEL_GPT_V4 ChatModel = "g. p. t. version number four" )
type Client ¶
type Client struct {
*Resources
}
func (*Client) GenerateImage ¶
func (*Client) GetLLmModel ¶ added in v1.0.0
func (*Client) TextGeneration ¶
func (*Client) TextGenerationWithSystem ¶ added in v1.0.0
type CloudFlareAiWorkerAPI ¶
type CloudflareApiClient ¶
type CloudflareApiClient struct { AccountID string APIKey string LlmClient *cloudflare.LLM }
func NewCloudflareApiClient ¶
func NewCloudflareApiClient(accountID, apiKey string) *CloudflareApiClient
func (*CloudflareApiClient) GenerateContent ¶ added in v1.0.0
func (api *CloudflareApiClient) GenerateContent( ctx context.Context, messages []llms.MessageContent, options ...llms.CallOption, ) (*llms.ContentResponse, error)
func (*CloudflareApiClient) GenerateImage ¶
func (*CloudflareApiClient) GenerateTranslation ¶
func (api *CloudflareApiClient) GenerateTranslation(ctx context.Context, req *GenerateTranslationRequest) (string, error)
func (*CloudflareApiClient) GetModel ¶ added in v1.0.0
func (api *CloudflareApiClient) GetModel() llms.Model
func (*CloudflareApiClient) SetModel ¶ added in v1.0.0
func (api *CloudflareApiClient) SetModel(model string)
type GeminiApiClient ¶
type GeminiApiClient struct { LlmClient *googlegenai.GoogleAI // contains filtered or unexported fields }
func NewGeminiApiClient ¶
func NewGeminiApiClient(credsToken string) *GeminiApiClient
func (*GeminiApiClient) GenerateContent ¶ added in v1.0.0
func (api *GeminiApiClient) GenerateContent( ctx context.Context, messages []llms.MessageContent, options ...llms.CallOption, ) (*llms.ContentResponse, error)
func (*GeminiApiClient) GenerateImage ¶ added in v1.0.0
func (*GeminiApiClient) GetModel ¶ added in v1.0.0
func (api *GeminiApiClient) GetModel() llms.Model
type ImageModel ¶
type ImageModel string
const ( IMAGE_MODEL_STABLE_DIFFUSION ImageModel = "stable" IMAGE_MODEL_DALL_E_2 ImageModel = "dallas v2" IMAGE_MODEL_DALL_E_3 ImageModel = "dallas" IMAGE_MODEL_GEMINI ImageModel = "gemini image" )
func (ImageModel) String ¶
func (c ImageModel) String() string
type LastResponse ¶
type LlmContentGenerator ¶ added in v1.0.0
type LlmContentGenerator interface { GenerateContent( ctx context.Context, messages []llms.MessageContent, options ...llms.CallOption, ) (*llms.ContentResponse, error) }
type MockClient ¶
func (*MockClient) GenerateImage ¶
func (client *MockClient) GenerateImage(ctx context.Context, prompt string, model ImageModel) (res []byte, err error)
func (*MockClient) TextGeneration ¶
type OpenAIApiClient ¶
type OpenAIApiClient struct { Token string LlmClient *langchain_openai.LLM }
func NewOpenAiApiClient ¶
func NewOpenAiApiClient(token string) *OpenAIApiClient
func (*OpenAIApiClient) GenerateContent ¶ added in v1.0.0
func (api *OpenAIApiClient) GenerateContent( ctx context.Context, messages []llms.MessageContent, options ...llms.CallOption, ) (*llms.ContentResponse, error)
func (*OpenAIApiClient) GenerateImage ¶
func (*OpenAIApiClient) GetModel ¶ added in v1.0.0
func (api *OpenAIApiClient) GetModel() llms.Model
type Resources ¶
type Resources struct { GPTApi GptAPI GeminiAPI GeminiAPI CloudflareApiClient CloudFlareAiWorkerAPI }
type Service ¶
type Service interface { TextGeneration(context.Context, string, ChatModel) (string, error) TextGenerationWithSystem(context.Context, string, string, ChatModel) (string, error) GenerateImage(context.Context, string, ImageModel) ([]byte, error) Translate( ctx context.Context, prompt string, sourceLang string, targetLang string, model ChatModel, ) (string, error) }
type TranslateResponse ¶
Click to show internal directories.
Click to hide internal directories.