Documentation ¶
Index ¶
- Variables
- func BlankResult(f *entity.Flag, evalContext models.EvalContext, msg string) *models.EvalResult
- func ErrorMessage(s string, data ...interface{}) *models.Error
- func LoadSimpleBooleanFlagTemplate(flag *entity.Flag, tx *gorm.DB) error
- func Setup(api *operations.FlagrAPI)
- type CRUD
- type DataRecordFrame
- type DataRecordFrameOptions
- type DataRecorder
- type Error
- type Eval
- type EvalCache
- type EvalCacheJSON
Constants ¶
This section is empty.
Variables ¶
var EvalFlag = func(evalContext models.EvalContext) *models.EvalResult { flag := LookupFlag(evalContext) return EvalFlagWithContext(flag, evalContext) }
var EvalFlagWithContext = func(flag *entity.Flag, evalContext models.EvalContext) *models.EvalResult { flagID := util.SafeUint(evalContext.FlagID) flagKey := util.SafeString(evalContext.FlagKey) if flag == nil { emptyFlag := &entity.Flag{Model: gorm.Model{ID: flagID}, Key: flagKey} return BlankResult(emptyFlag, evalContext, fmt.Sprintf("flagID %v not found or deleted", flagID)) } if !flag.Enabled { return BlankResult(flag, evalContext, fmt.Sprintf("flagID %v is not enabled", flag.ID)) } if len(flag.Segments) == 0 { return BlankResult(flag, evalContext, fmt.Sprintf("flagID %v has no segments", flag.ID)) } if evalContext.EntityID == "" { evalContext.EntityID = fmt.Sprintf("randomly_generated_%d", rand.Int31()) } if flag.EntityType != "" { evalContext.EntityType = flag.EntityType } logs := []*models.SegmentDebugLog{} var vID int64 var sID int64 for _, segment := range flag.Segments { sID = int64(segment.ID) variantID, log, evalNextSegment := evalSegment(flag.ID, evalContext, segment) if config.Config.EvalDebugEnabled && evalContext.EnableDebug { logs = append(logs, log) } if variantID != nil { vID = int64(*variantID) } if !evalNextSegment { break } } evalResult := BlankResult(flag, evalContext, "") evalResult.EvalDebugLog.SegmentDebugLogs = logs evalResult.SegmentID = sID evalResult.VariantID = vID v := flag.FlagEvaluation.VariantsMap[util.SafeUint(vID)] if v != nil { evalResult.VariantAttachment = v.Attachment evalResult.VariantKey = v.Key } logEvalResult(evalResult, flag.DataRecordsEnabled) return evalResult }
var EvalFlagsByTags = func(evalContext models.EvalContext) []*models.EvalResult { cache := GetEvalCache() fs := cache.GetByTags(evalContext.FlagTags, evalContext.FlagTagsOperator) results := []*models.EvalResult{} for _, f := range fs { results = append(results, EvalFlagWithContext(f, evalContext)) } return results }
var GetEvalCache = func() *EvalCache { singletonEvalCacheOnce.Do(func() { ec := &EvalCache{ refreshTimeout: config.Config.EvalCacheRefreshTimeout, refreshInterval: config.Config.EvalCacheRefreshInterval, } singletonEvalCache = ec }) return singletonEvalCache }
GetEvalCache gets the EvalCache
var LookupFlag = func(evalContext models.EvalContext) *entity.Flag { cache := GetEvalCache() flagID := util.SafeUint(evalContext.FlagID) flagKey := util.SafeString(evalContext.FlagKey) f := cache.GetByFlagKeyOrID(flagID) if f == nil { f = cache.GetByFlagKeyOrID(flagKey) } return f }
var NewKafkaRecorder = func() DataRecorder { cfg := sarama.NewConfig() tlscfg := createTLSConfiguration( config.Config.RecorderKafkaCertFile, config.Config.RecorderKafkaKeyFile, config.Config.RecorderKafkaCAFile, config.Config.RecorderKafkaVerifySSL, config.Config.RecorderKafkaSimpleSSL, ) if tlscfg != nil { cfg.Net.TLS.Enable = true cfg.Net.TLS.Config = tlscfg } if config.Config.RecorderKafkaSASLUsername != "" && config.Config.RecorderKafkaSASLPassword != "" { cfg.Net.SASL.Enable = true cfg.Net.SASL.User = config.Config.RecorderKafkaSASLUsername cfg.Net.SASL.Password = config.Config.RecorderKafkaSASLPassword } cfg.Net.MaxOpenRequests = config.Config.RecorderKafkaMaxOpenReqs cfg.Producer.Compression = sarama.CompressionCodec(config.Config.RecorderKafkaCompressionCodec) cfg.Producer.RequiredAcks = sarama.RequiredAcks(config.Config.RecorderKafkaRequiredAcks) cfg.Producer.Idempotent = config.Config.RecorderKafkaIdempotent cfg.Producer.Retry.Max = config.Config.RecorderKafkaRetryMax cfg.Producer.Flush.Frequency = config.Config.RecorderKafkaFlushFrequency cfg.Version = mustParseKafkaVersion(config.Config.RecorderKafkaVersion) brokerList := strings.Split(config.Config.RecorderKafkaBrokers, ",") producer, err := saramaNewAsyncProducer(brokerList, cfg) if err != nil { logrus.WithField("kafka_error", err).Fatal("Failed to start Sarama producer:") } if producer != nil { go func() { for err := range producer.Errors() { logrus.WithField("kafka_error", err).Error("failed to write access log entry") } }() } var encryptor dataRecordEncryptor if config.Config.RecorderKafkaEncrypted && config.Config.RecorderKafkaEncryptionKey != "" { encryptor = newSimpleboxEncryptor(config.Config.RecorderKafkaEncryptionKey) } return &kafkaRecorder{ topic: config.Config.RecorderKafkaTopic, producer: producer, options: DataRecordFrameOptions{ Encrypted: config.Config.RecorderKafkaEncrypted, Encryptor: encryptor, FrameOutputMode: config.Config.RecorderFrameOutputMode, }, } }
NewKafkaRecorder creates a new Kafka recorder
var NewKinesisRecorder = func() DataRecorder { se, err := session.NewSession(aws.NewConfig()) if err != nil { logrus.WithField("kinesis_error", err).Fatal("error creating aws session") } client := kinesis.New(se) p := newKinesisProducer(&producer.Config{ StreamName: config.Config.RecorderKinesisStreamName, Client: client, BacklogCount: config.Config.RecorderKinesisBacklogCount, MaxConnections: config.Config.RecorderKinesisMaxConnections, FlushInterval: config.Config.RecorderKinesisFlushInterval, BatchSize: config.Config.RecorderKinesisBatchSize, BatchCount: config.Config.RecorderKinesisBatchCount, AggregateBatchCount: config.Config.RecorderKinesisAggregateBatchCount, AggregateBatchSize: config.Config.RecorderKinesisAggregateBatchSize, Verbose: config.Config.RecorderKinesisVerbose, Logger: logrus.WithField("producer", "kinesis"), }) p.Start() go func() { for err := range p.NotifyFailures() { logrus.WithField("kinesis_error", err).Error("error pushing to kinesis") } }() return &kinesisRecorder{ producer: p, options: DataRecordFrameOptions{ Encrypted: false, FrameOutputMode: config.Config.RecorderFrameOutputMode, }, } }
NewKinesisRecorder creates a new Kinesis recorder
var NewPubsubRecorder = func() DataRecorder { client, err := pubsubClient() if err != nil { logrus.WithField("pubsub_error", err).Fatal("error getting pubsub client") } return &pubsubRecorder{ producer: client, topic: client.Topic(config.Config.RecorderPubsubTopicName), options: DataRecordFrameOptions{ Encrypted: false, FrameOutputMode: config.Config.RecorderFrameOutputMode, }, } }
NewPubsubRecorder creates a new Pubsub recorder
Functions ¶
func BlankResult ¶
func BlankResult(f *entity.Flag, evalContext models.EvalContext, msg string) *models.EvalResult
BlankResult creates a blank result
func ErrorMessage ¶
ErrorMessage generates error messages
func LoadSimpleBooleanFlagTemplate ¶
LoadSimpleBooleanFlagTemplate loads the simple boolean flag template into a new flag. It creates a single segment, variant ('on'), and distribution.
Types ¶
type CRUD ¶
type CRUD interface { // Flags FindFlags(flag.FindFlagsParams) middleware.Responder CreateFlag(flag.CreateFlagParams) middleware.Responder GetFlag(flag.GetFlagParams) middleware.Responder PutFlag(flag.PutFlagParams) middleware.Responder DeleteFlag(flag.DeleteFlagParams) middleware.Responder RestoreFlag(flag.RestoreFlagParams) middleware.Responder SetFlagEnabledState(flag.SetFlagEnabledParams) middleware.Responder GetFlagSnapshots(params flag.GetFlagSnapshotsParams) middleware.Responder GetFlagEntityTypes(params flag.GetFlagEntityTypesParams) middleware.Responder //Tags CreateTag(tag.CreateTagParams) middleware.Responder DeleteTag(tag.DeleteTagParams) middleware.Responder FindTags(tag.FindTagsParams) middleware.Responder FindAllTags(params tag.FindAllTagsParams) middleware.Responder // Segments CreateSegment(segment.CreateSegmentParams) middleware.Responder FindSegments(segment.FindSegmentsParams) middleware.Responder PutSegment(segment.PutSegmentParams) middleware.Responder DeleteSegment(segment.DeleteSegmentParams) middleware.Responder PutSegmentsReorder(segment.PutSegmentsReorderParams) middleware.Responder // Constraints CreateConstraint(constraint.CreateConstraintParams) middleware.Responder FindConstraints(constraint.FindConstraintsParams) middleware.Responder PutConstraint(params constraint.PutConstraintParams) middleware.Responder DeleteConstraint(params constraint.DeleteConstraintParams) middleware.Responder // Distributions FindDistributions(distribution.FindDistributionsParams) middleware.Responder PutDistributions(distribution.PutDistributionsParams) middleware.Responder // Variants CreateVariant(variant.CreateVariantParams) middleware.Responder FindVariants(variant.FindVariantsParams) middleware.Responder PutVariant(variant.PutVariantParams) middleware.Responder DeleteVariant(variant.DeleteVariantParams) middleware.Responder }
CRUD is the CRUD interface
type DataRecordFrame ¶
type DataRecordFrame struct {
// contains filtered or unexported fields
}
DataRecordFrame represents the structure we can json.Marshal into data recorders
func (*DataRecordFrame) GetPartitionKey ¶
func (drf *DataRecordFrame) GetPartitionKey() string
GetPartitionKey gets the partition key from entityID
func (*DataRecordFrame) MarshalJSON ¶
func (drf *DataRecordFrame) MarshalJSON() ([]byte, error)
MarshalJSON defines the behavior of MarshalJSON for DataRecordFrame
func (*DataRecordFrame) Output ¶
func (drf *DataRecordFrame) Output() ([]byte, error)
Output sets the paylaod using its input and returns the json marshal bytes
type DataRecordFrameOptions ¶
type DataRecordFrameOptions struct { Encrypted bool Encryptor dataRecordEncryptor FrameOutputMode string }
DataRecordFrameOptions represents the options we can set to create a DataRecordFrame
type DataRecorder ¶
type DataRecorder interface { AsyncRecord(models.EvalResult) NewDataRecordFrame(models.EvalResult) DataRecordFrame }
DataRecorder can record and produce the evaluation result
type Eval ¶
type Eval interface { PostEvaluation(evaluation.PostEvaluationParams) middleware.Responder PostEvaluationBatch(evaluation.PostEvaluationBatchParams) middleware.Responder }
Eval is the Eval interface
type EvalCache ¶
type EvalCache struct {
// contains filtered or unexported fields
}
EvalCache is the in-memory cache just for evaluation
func GenFixtureEvalCache ¶
func GenFixtureEvalCache() *EvalCache
GenFixtureEvalCache generates a fixture
func (*EvalCache) GetByFlagKeyOrID ¶
GetByFlagKeyOrID gets the flag by Key or ID
type EvalCacheJSON ¶
EvalCacheJSON is the JSON serialization format of EvalCache's flags