bedrock

package
v0.0.0-...-df6d72d Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Jun 24, 2024 License: Apache-2.0 Imports: 17 Imported by: 0

Documentation

Index

Constants

This section is empty.

Variables

View Source
var (
	ErrEmptyResponse = errors.New("empty response")
)

ErrEmptyResponse is returned when the OpenAI API returns an empty response.

Functions

This section is empty.

Types

type ChatCompletion

type ChatCompletion struct {
	InputTextTokenCount int `json:"inputTextTokenCount"`
	Results             []struct {
		TokenCount       int    `json:"tokenCount"`
		OutputText       string `json:"outputText"`
		CompletionReason string `json:"completionReason"`
	} `json:"results"`
}

Bedrock Chat Response

type ChatRequest

type ChatRequest struct {
	Messages             string               `json:"inputText"`
	TextGenerationConfig TextGenerationConfig `json:"textGenerationConfig"`
}

ChatRequest is a Bedrock-specific request schema

func NewChatRequestFromConfig

func NewChatRequestFromConfig(cfg *Config) *ChatRequest

NewChatRequestFromConfig fills the struct from the config. Not using reflection because of performance penalty it gives

func (*ChatRequest) ApplyParams

func (r *ChatRequest) ApplyParams(params *schemas.ChatParams)

type Client

type Client struct {
	// contains filtered or unexported fields
}

Client is a client for accessing OpenAI API

func NewClient

func NewClient(providerConfig *Config, clientConfig *clients.ClientConfig, tel *telemetry.Telemetry) (*Client, error)

NewClient creates a new OpenAI client for the OpenAI API.

func (*Client) Chat

func (c *Client) Chat(ctx context.Context, params *schemas.ChatParams) (*schemas.ChatResponse, error)

Chat sends a chat request to the specified bedrock model.

func (*Client) ChatStream

func (c *Client) ChatStream(_ context.Context, _ *schemas.ChatParams) (clients.ChatStream, error)

func (*Client) ModelName

func (c *Client) ModelName() string

func (*Client) Provider

func (c *Client) Provider() string

func (*Client) SupportChatStream

func (c *Client) SupportChatStream() bool

type Config

type Config struct {
	BaseURL       string        `yaml:"base_url" json:"base_url" validate:"required"`
	ChatEndpoint  string        `yaml:"chat_endpoint" json:"chat_endpoint" validate:"required"`
	ModelName     string        `yaml:"model" json:"model" validate:"required"`
	APIKey        fields.Secret `yaml:"api_key" json:"-" validate:"required"`
	AccessKey     string        `yaml:"access_key" json:"-" validate:"required"`
	SecretKey     string        `yaml:"secret_key" json:"-" validate:"required"`
	AWSRegion     string        `yaml:"aws_region" json:"awsRegion" validate:"required"`
	DefaultParams *Params       `yaml:"default_params,omitempty" json:"default_params"`
}

func DefaultConfig

func DefaultConfig() *Config

DefaultConfig for OpenAI models

func (*Config) UnmarshalYAML

func (c *Config) UnmarshalYAML(unmarshal func(interface{}) error) error

type Params

type Params struct {
	Temperature  float64  `yaml:"temperature" json:"temperature"`
	TopP         float64  `yaml:"top_p" json:"top_p"`
	MaxTokens    int      `yaml:"max_tokens" json:"max_tokens"`
	StopSequence []string `yaml:"stop_sequences" json:"stop"`
}

Params defines OpenAI-specific model params with the specific validation of values TODO: Add validations

func DefaultParams

func DefaultParams() Params

func (*Params) UnmarshalYAML

func (p *Params) UnmarshalYAML(unmarshal func(interface{}) error) error

type TextGenerationConfig

type TextGenerationConfig struct {
	Temperature   float64  `json:"temperature"`
	TopP          float64  `json:"topP"`
	MaxTokenCount int      `json:"maxTokenCount"`
	StopSequences []string `json:"stopSequences,omitempty"`
}

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL