Documentation ¶
Overview ¶
Package proto includes all proto definitions used in the golang package in one large package.
It uses go generate tools to generate it from the source code, but we include the generated files in github, so one doesn't need to install anything.
Index ¶
- Constants
- Variables
- type Header
- func (*Header) Descriptor() ([]byte, []int)deprecated
- func (x *Header) GetInitialPredictions() []float32
- func (x *Header) GetLoss() Loss
- func (x *Header) GetNodeFormat() string
- func (x *Header) GetNumNodeShards() int32
- func (x *Header) GetNumTrees() int64
- func (x *Header) GetNumTreesPerIter() int32
- func (x *Header) GetOutputLogits() bool
- func (x *Header) GetTrainingLogs() *TrainingLogs
- func (x *Header) GetValidationLoss() float32
- func (*Header) ProtoMessage()
- func (x *Header) ProtoReflect() protoreflect.Message
- func (x *Header) Reset()
- func (x *Header) String() string
- type Loss
- func (Loss) Descriptor() protoreflect.EnumDescriptor
- func (x Loss) Enum() *Loss
- func (Loss) EnumDescriptor() ([]byte, []int)deprecated
- func (x Loss) Number() protoreflect.EnumNumber
- func (x Loss) String() string
- func (Loss) Type() protoreflect.EnumType
- func (x *Loss) UnmarshalJSON(b []byte) errordeprecated
- type TrainingLogs
- func (*TrainingLogs) Descriptor() ([]byte, []int)deprecated
- func (x *TrainingLogs) GetEntries() []*TrainingLogs_Entry
- func (x *TrainingLogs) GetNumberOfTreesInFinalModel() int32
- func (x *TrainingLogs) GetSecondaryMetricNames() []string
- func (*TrainingLogs) ProtoMessage()
- func (x *TrainingLogs) ProtoReflect() protoreflect.Message
- func (x *TrainingLogs) Reset()
- func (x *TrainingLogs) String() string
- type TrainingLogs_Entry
- func (*TrainingLogs_Entry) Descriptor() ([]byte, []int)deprecated
- func (x *TrainingLogs_Entry) GetMeanAbsPrediction() float64
- func (x *TrainingLogs_Entry) GetNumberOfTrees() int32
- func (x *TrainingLogs_Entry) GetSubsampleFactor() float32
- func (x *TrainingLogs_Entry) GetTrainingLoss() float32
- func (x *TrainingLogs_Entry) GetTrainingSecondaryMetrics() []float32
- func (x *TrainingLogs_Entry) GetValidationLoss() float32
- func (x *TrainingLogs_Entry) GetValidationSecondaryMetrics() []float32
- func (*TrainingLogs_Entry) ProtoMessage()
- func (x *TrainingLogs_Entry) ProtoReflect() protoreflect.Message
- func (x *TrainingLogs_Entry) Reset()
- func (x *TrainingLogs_Entry) String() string
Constants ¶
View Source
const ( Default_Header_NumTreesPerIter = int32(1) Default_Header_NodeFormat = string("TFE_RECORDIO") Default_Header_OutputLogits = bool(false) )
Default values for Header fields.
View Source
const (
Default_TrainingLogs_Entry_SubsampleFactor = float32(1)
)
Default values for TrainingLogs_Entry fields.
Variables ¶
View Source
var ( Loss_name = map[int32]string{ 0: "DEFAULT", 1: "BINOMIAL_LOG_LIKELIHOOD", 2: "SQUARED_ERROR", 3: "MULTINOMIAL_LOG_LIKELIHOOD", 4: "LAMBDA_MART_NDCG5", 5: "XE_NDCG_MART", 6: "BINARY_FOCAL_LOSS", 7: "POISSON", } Loss_value = map[string]int32{ "DEFAULT": 0, "BINOMIAL_LOG_LIKELIHOOD": 1, "SQUARED_ERROR": 2, "MULTINOMIAL_LOG_LIKELIHOOD": 3, "LAMBDA_MART_NDCG5": 4, "XE_NDCG_MART": 5, "BINARY_FOCAL_LOSS": 6, "POISSON": 7, } )
Enum value maps for Loss.
View Source
var File_yggdrasil_decision_forests_model_gradient_boosted_trees_gradient_boosted_trees_proto protoreflect.FileDescriptor
Functions ¶
This section is empty.
Types ¶
type Header ¶
type Header struct { // Number of shards used to store the nodes. NumNodeShards *int32 `protobuf:"varint,1,opt,name=num_node_shards,json=numNodeShards" json:"num_node_shards,omitempty"` // Number of trees. NumTrees *int64 `protobuf:"varint,2,opt,name=num_trees,json=numTrees" json:"num_trees,omitempty"` // Loss used to train the model. Loss *Loss `` /* 127-byte string literal not displayed */ // Initial predictions of the model (before any tree is applied). The semantic // of the prediction depend on the "loss". InitialPredictions []float32 `protobuf:"fixed32,4,rep,name=initial_predictions,json=initialPredictions" json:"initial_predictions,omitempty"` // Number of trees extracted at each gradient boosting operation. NumTreesPerIter *int32 `protobuf:"varint,5,opt,name=num_trees_per_iter,json=numTreesPerIter,def=1" json:"num_trees_per_iter,omitempty"` // Loss evaluated on the validation dataset. Only available is a validation // dataset was provided during training. ValidationLoss *float32 `protobuf:"fixed32,6,opt,name=validation_loss,json=validationLoss" json:"validation_loss,omitempty"` // Container used to store the trees' nodes. NodeFormat *string `protobuf:"bytes,7,opt,name=node_format,json=nodeFormat,def=TFE_RECORDIO" json:"node_format,omitempty"` // Evaluation metrics and other meta-data computed during training. TrainingLogs *TrainingLogs `protobuf:"bytes,8,opt,name=training_logs,json=trainingLogs" json:"training_logs,omitempty"` // If true, call to predict methods return logits (e.g. instead of probability // in the case of classification). OutputLogits *bool `protobuf:"varint,9,opt,name=output_logits,json=outputLogits,def=0" json:"output_logits,omitempty"` // contains filtered or unexported fields }
Header for the gradient boosted trees model.
func (*Header) Descriptor
deprecated
func (*Header) GetInitialPredictions ¶
func (*Header) GetNodeFormat ¶
func (*Header) GetNumNodeShards ¶
func (*Header) GetNumTrees ¶
func (*Header) GetNumTreesPerIter ¶
func (*Header) GetOutputLogits ¶
func (*Header) GetTrainingLogs ¶
func (x *Header) GetTrainingLogs() *TrainingLogs
func (*Header) GetValidationLoss ¶
func (*Header) ProtoMessage ¶
func (*Header) ProtoMessage()
func (*Header) ProtoReflect ¶
func (x *Header) ProtoReflect() protoreflect.Message
type Loss ¶
type Loss int32
const ( // Selects the most adapted loss according to the nature of the task and the // statistics of the label. // - Binary classification -> BINOMIAL_LOG_LIKELIHOOD. Loss_DEFAULT Loss = 0 // Binomial log likelihood. Only valid for binary classification. Loss_BINOMIAL_LOG_LIKELIHOOD Loss = 1 // Least square loss. Only valid for regression. Loss_SQUARED_ERROR Loss = 2 // Multinomial log likelihood i.e. cross-entropy. Loss_MULTINOMIAL_LOG_LIKELIHOOD Loss = 3 // LambdaMART with NDCG5 Loss_LAMBDA_MART_NDCG5 Loss = 4 // XE_NDCG_MART [arxiv.org/abs/1911.09798] Loss_XE_NDCG_MART Loss = 5 // EXPERIMENTAl. Focal loss. Only valid for binary classification. // [https://arxiv.org/pdf/1708.02002.pdf] Loss_BINARY_FOCAL_LOSS Loss = 6 // Poisson loss. Only valid for regression. Loss_POISSON Loss = 7 )
func (Loss) Descriptor ¶
func (Loss) Descriptor() protoreflect.EnumDescriptor
func (Loss) EnumDescriptor
deprecated
func (Loss) Number ¶
func (x Loss) Number() protoreflect.EnumNumber
func (Loss) Type ¶
func (Loss) Type() protoreflect.EnumType
func (*Loss) UnmarshalJSON
deprecated
type TrainingLogs ¶
type TrainingLogs struct { // Measurements the model size and performances during the training. Entries []*TrainingLogs_Entry `protobuf:"bytes,1,rep,name=entries" json:"entries,omitempty"` // Names of the metrics stored in "secondary_metrics" field. The secondary // metrics depends on the task (e.g. classification) and is accessible with // "SecondaryMetricNames()". The i-th metric name of "secondary_metric_names" // correspond to the i-th metric value in "training_secondary_metrics" and // "validation_secondary_metrics". SecondaryMetricNames []string `protobuf:"bytes,2,rep,name=secondary_metric_names,json=secondaryMetricNames" json:"secondary_metric_names,omitempty"` // Number of trees in the final model. Without early stopping, // "number_of_trees_in_final_model" is equal to the "number_of_trees" of the // last "entries". NumberOfTreesInFinalModel *int32 `` /* 138-byte string literal not displayed */ // contains filtered or unexported fields }
Log of the training. This proto is generated during the training of the model and optionally exported (as a plot) in the training logs directory.
func (*TrainingLogs) Descriptor
deprecated
func (*TrainingLogs) Descriptor() ([]byte, []int)
Deprecated: Use TrainingLogs.ProtoReflect.Descriptor instead.
func (*TrainingLogs) GetEntries ¶
func (x *TrainingLogs) GetEntries() []*TrainingLogs_Entry
func (*TrainingLogs) GetNumberOfTreesInFinalModel ¶
func (x *TrainingLogs) GetNumberOfTreesInFinalModel() int32
func (*TrainingLogs) GetSecondaryMetricNames ¶
func (x *TrainingLogs) GetSecondaryMetricNames() []string
func (*TrainingLogs) ProtoMessage ¶
func (*TrainingLogs) ProtoMessage()
func (*TrainingLogs) ProtoReflect ¶
func (x *TrainingLogs) ProtoReflect() protoreflect.Message
func (*TrainingLogs) Reset ¶
func (x *TrainingLogs) Reset()
func (*TrainingLogs) String ¶
func (x *TrainingLogs) String() string
type TrainingLogs_Entry ¶
type TrainingLogs_Entry struct { // Number of trees. In the case of multi-dimensional gradients, // "number_of_trees" is the number of training step. NumberOfTrees *int32 `protobuf:"varint,1,opt,name=number_of_trees,json=numberOfTrees" json:"number_of_trees,omitempty"` // Performance of the model on the training dataset. TrainingLoss *float32 `protobuf:"fixed32,2,opt,name=training_loss,json=trainingLoss" json:"training_loss,omitempty"` TrainingSecondaryMetrics []float32 `` /* 130-byte string literal not displayed */ // Performance of the model on the validation dataset. ValidationLoss *float32 `protobuf:"fixed32,4,opt,name=validation_loss,json=validationLoss" json:"validation_loss,omitempty"` ValidationSecondaryMetrics []float32 `` /* 136-byte string literal not displayed */ // Average of the absolute value of the new tree predictions estimated on // the training dataset. See Dart paper // (http://proceedings.mlr.press/v38/korlakaivinayak15.pdf) for details on // how to interpret it. MeanAbsPrediction *float64 `protobuf:"fixed64,6,opt,name=mean_abs_prediction,json=meanAbsPrediction" json:"mean_abs_prediction,omitempty"` // Sub-sampling factor applied during training on top of the "sampling" // hyper-parameter. Currently, the "subsample_factor" is only controlled by // the "adapt_subsample_for_maximum_training_duration" field i.e. the // "subsample_factor" factor (default to 1) is reduced so the training // finishes in "maximum_training_duration". SubsampleFactor *float32 `protobuf:"fixed32,7,opt,name=subsample_factor,json=subsampleFactor,def=1" json:"subsample_factor,omitempty"` // contains filtered or unexported fields }
func (*TrainingLogs_Entry) Descriptor
deprecated
func (*TrainingLogs_Entry) Descriptor() ([]byte, []int)
Deprecated: Use TrainingLogs_Entry.ProtoReflect.Descriptor instead.
func (*TrainingLogs_Entry) GetMeanAbsPrediction ¶
func (x *TrainingLogs_Entry) GetMeanAbsPrediction() float64
func (*TrainingLogs_Entry) GetNumberOfTrees ¶
func (x *TrainingLogs_Entry) GetNumberOfTrees() int32
func (*TrainingLogs_Entry) GetSubsampleFactor ¶
func (x *TrainingLogs_Entry) GetSubsampleFactor() float32
func (*TrainingLogs_Entry) GetTrainingLoss ¶
func (x *TrainingLogs_Entry) GetTrainingLoss() float32
func (*TrainingLogs_Entry) GetTrainingSecondaryMetrics ¶
func (x *TrainingLogs_Entry) GetTrainingSecondaryMetrics() []float32
func (*TrainingLogs_Entry) GetValidationLoss ¶
func (x *TrainingLogs_Entry) GetValidationLoss() float32
func (*TrainingLogs_Entry) GetValidationSecondaryMetrics ¶
func (x *TrainingLogs_Entry) GetValidationSecondaryMetrics() []float32
func (*TrainingLogs_Entry) ProtoMessage ¶
func (*TrainingLogs_Entry) ProtoMessage()
func (*TrainingLogs_Entry) ProtoReflect ¶
func (x *TrainingLogs_Entry) ProtoReflect() protoreflect.Message
func (*TrainingLogs_Entry) Reset ¶
func (x *TrainingLogs_Entry) Reset()
func (*TrainingLogs_Entry) String ¶
func (x *TrainingLogs_Entry) String() string
Click to show internal directories.
Click to hide internal directories.