config

package
v0.0.0-...-f03bf9e Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: May 29, 2024 License: Apache-2.0 Imports: 5 Imported by: 2

Documentation

Index

Constants

View Source
const (
	DefaultConfigBase = "friday.conf"
)

Variables

View Source
var FilePath string

Functions

func LocalUserPath

func LocalUserPath() string

Types

type Config

type Config struct {
	Debug  bool `json:"debug,omitempty"`
	Logger logger.Logger

	// llm limit token
	LimitToken int `json:"limit_token,omitempty"` // used by summary, split input into mutil sub-docs summaried by llm separately.

	// openai key
	OpenAIBaseUrl string `json:"open_ai_base_url,omitempty"` // if openai is used for embedding or llm, it is needed, default is "https://api.openai.com"
	OpenAIKey     string `json:"open_ai_key,omitempty"`      // if openai is used for embedding or llm, it is needed

	// gemini key
	GeminiBaseUri string `json:"gemini_base_uri,omitempty"` // if gemini is used for embedding or llm, it is needed, default is "https://generativelanguage.googleapis.com"
	GeminiKey     string `json:"gemini_key,omitempty"`      // if gemini is used for embedding or llm, it is needed

	// embedding config
	EmbeddingConfig EmbeddingConfig `json:"embedding_config,omitempty"`

	// vector store config
	VectorStoreConfig VectorStoreConfig `json:"vector_store_config,omitempty"`

	// LLM
	LLMConfig LLMConfig `json:"llm_config,omitempty"`

	// text spliter
	TextSpliterConfig TextSpliterConfig `json:"text_spliter_config,omitempty"`
}

type EmbeddingConfig

type EmbeddingConfig struct {
	EmbeddingType EmbeddingType     `json:"embedding_type"`
	OpenAI        OpenAIConfig      `json:"openai,omitempty"`
	HuggingFace   HuggingFaceConfig `json:"hugging_face,omitempty"`
	Gemini        GeminiConfig      `json:"gemini,omitempty"`
}

type EmbeddingType

type EmbeddingType string
const (
	EmbeddingOpenAI      EmbeddingType = "openai"
	EmbeddingHuggingFace EmbeddingType = "huggingface"
	EmbeddingGemini      EmbeddingType = "gemini"
)

type GLM6BConfig

type GLM6BConfig struct {
	Url string `json:"url,omitempty"`
}

type GeminiConfig

type GeminiConfig struct {
	QueryPerMinute int     `json:"query_per_minute,omitempty"` // qpm, default is 3
	Burst          int     `json:"burst,omitempty"`            // burst, default is 5
	Model          *string `json:"model,omitempty"`            // model of gemini, default for llm is "gemini-pro"; default for embedding is "embedding-001"
}

type HuggingFaceConfig

type HuggingFaceConfig struct {
	EmbeddingUrl   string `json:"embedding_url,omitempty"`
	EmbeddingModel string `json:"embedding_model,omitempty"`
}

type LLMConfig

type LLMConfig struct {
	LLMType LLMType           `json:"llm_type"`
	Prompts map[string]string `json:"prompts,omitempty"`
	OpenAI  OpenAIConfig      `json:"openai,omitempty"`
	GLM6B   GLM6BConfig       `json:"glm6b,omitempty"`
	Gemini  GeminiConfig      `json:"gemini,omitempty"`
}

type LLMType

type LLMType string
const (
	LLMGLM6B  LLMType = "glm-6b"
	LLMOpenAI LLMType = "openai"
	LLMGemini LLMType = "gemini"
)

type Loader

type Loader interface {
	GetConfig() (Config, error)
}

func NewConfigLoader

func NewConfigLoader() Loader

type OpenAIConfig

type OpenAIConfig struct {
	QueryPerMinute   int      `json:"query_per_minute,omitempty"` // qpm, default is 3
	Burst            int      `json:"burst,omitempty"`            // burst, default is 5
	Model            *string  `json:"model,omitempty"`            // model of openai, default for llm is "gpt-3.5-turbo"; default for embedding is "text-embedding-ada-002"
	MaxReturnToken   *int     `json:"max_return_token,omitempty"` // maxReturnToken + VectorStoreConfig.TopK * TextSpliterConfig.SpliterChunkSize <= token limit of llm model
	FrequencyPenalty *uint    `json:"frequency_penalty,omitempty"`
	PresencePenalty  *uint    `json:"presence_penalty,omitempty"`
	Temperature      *float32 `json:"temperature,omitempty"`
}

type TextSpliterConfig

type TextSpliterConfig struct {
	SpliterChunkSize    int    `json:"spliter_chunk_size,omitempty"`    // chunk of files splited to store, default is 4000
	SpliterChunkOverlap int    `json:"spliter_chunk_overlap,omitempty"` // overlap of each chunks, default is 200
	SpliterSeparator    string `json:"spliter_separator,omitempty"`     // separator to split files, default is \n
}

type VectorStoreConfig

type VectorStoreConfig struct {
	VectorStoreType VectorStoreType `json:"vector_store_type"`
	VectorUrl       string          `json:"vector_url"`
	TopK            *int            `json:"top_k,omitempty"`         // topk of knn, default is 6
	EmbeddingDim    int             `json:"embedding_dim,omitempty"` // embedding dimension, default is 1536
}

type VectorStoreType

type VectorStoreType string
const (
	VectorStoreRedis    VectorStoreType = "redis"
	VectorStorePostgres VectorStoreType = "postgres"
	VectorStorePGVector VectorStoreType = "pgvector"
)

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL