Documentation ¶
Index ¶
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
This section is empty.
Types ¶
type DecisionTreeNode ¶
type DecisionTreeNode struct { Type NodeType Children map[string]*DecisionTreeNode ClassDist map[string]int Class string ClassAttr base.Attribute SplitRule *DecisionTreeRule }
DecisionTreeNode represents a given portion of a decision tree.
func InferID3Tree ¶
func InferID3Tree(from base.FixedDataGrid, with RuleGenerator) *DecisionTreeNode
InferID3Tree builds a decision tree using a RuleGenerator from a set of Instances (implements the ID3 algorithm)
func (*DecisionTreeNode) Predict ¶
func (d *DecisionTreeNode) Predict(what base.FixedDataGrid) (base.FixedDataGrid, error)
Predict outputs a base.Instances containing predictions from this tree
func (*DecisionTreeNode) Prune ¶
func (d *DecisionTreeNode) Prune(using base.FixedDataGrid)
Prune eliminates branches which hurt accuracy
func (*DecisionTreeNode) String ¶
func (d *DecisionTreeNode) String() string
String returns a human-readable representation of a given node and it's children
type DecisionTreeRule ¶
DecisionTreeRule represents the "decision" in "decision tree".
func (*DecisionTreeRule) String ¶
func (d *DecisionTreeRule) String() string
String prints a human-readable summary of this thing.
type GiniCoefficientRuleGenerator ¶
type GiniCoefficientRuleGenerator struct { }
GiniCoefficientRuleGenerator generates DecisionTreeRules which minimize the Geni impurity coefficient at each node.
func (*GiniCoefficientRuleGenerator) GenerateSplitRule ¶
func (g *GiniCoefficientRuleGenerator) GenerateSplitRule(f base.FixedDataGrid) *DecisionTreeRule
GenerateSplitRule returns the non-class Attribute-based DecisionTreeRule which maximises the information gain.
IMPORTANT: passing a base.Instances with no Attributes other than the class variable will panic()
func (*GiniCoefficientRuleGenerator) GetSplitRuleFromSelection ¶
func (g *GiniCoefficientRuleGenerator) GetSplitRuleFromSelection(consideredAttributes []base.Attribute, f base.FixedDataGrid) *DecisionTreeRule
GetSplitRuleFromSelection returns the DecisionTreeRule which maximises the information gain amongst consideredAttributes
IMPORTANT: passing a zero-length consideredAttributes parameter will panic()
type ID3DecisionTree ¶
type ID3DecisionTree struct { base.BaseClassifier Root *DecisionTreeNode PruneSplit float64 Rule RuleGenerator }
ID3DecisionTree represents an ID3-based decision tree using the Information Gain metric to select which attributes to split on at each node.
func NewID3DecisionTree ¶
func NewID3DecisionTree(prune float64) *ID3DecisionTree
NewID3DecisionTree returns a new ID3DecisionTree with the specified test-prune ratio and InformationGain as the rule generator. If the ratio is less than 0.001, the tree isn't pruned.
func NewID3DecisionTreeFromRule ¶
func NewID3DecisionTreeFromRule(prune float64, rule RuleGenerator) *ID3DecisionTree
NewID3DecisionTreeFromRule returns a new ID3DecisionTree with the specified test-prun ratio and the given rule gnereator.
func (*ID3DecisionTree) Fit ¶
func (t *ID3DecisionTree) Fit(on base.FixedDataGrid) error
Fit builds the ID3 decision tree
func (*ID3DecisionTree) Predict ¶
func (t *ID3DecisionTree) Predict(what base.FixedDataGrid) (base.FixedDataGrid, error)
Predict outputs predictions from the ID3 decision tree
func (*ID3DecisionTree) String ¶
func (t *ID3DecisionTree) String() string
String returns a human-readable version of this ID3 tree
type InformationGainRatioRuleGenerator ¶
type InformationGainRatioRuleGenerator struct { }
InformationGainRatioRuleGenerator generates DecisionTreeRules which maximise the InformationGain at each node.
func (*InformationGainRatioRuleGenerator) GenerateSplitRule ¶
func (r *InformationGainRatioRuleGenerator) GenerateSplitRule(f base.FixedDataGrid) *DecisionTreeRule
GenerateSplitRule returns a DecisionTreeRule which maximises information gain ratio considering every available Attribute.
IMPORTANT: passing a base.Instances with no Attributes other than the class variable will panic()
func (*InformationGainRatioRuleGenerator) GetSplitRuleFromSelection ¶
func (r *InformationGainRatioRuleGenerator) GetSplitRuleFromSelection(consideredAttributes []base.Attribute, f base.FixedDataGrid) *DecisionTreeRule
GetSplitRuleFromSelection returns the DecisionRule which maximizes information gain, considering only a subset of Attributes.
IMPORTANT: passing a zero-length consideredAttributes parameter will panic()
type InformationGainRuleGenerator ¶
type InformationGainRuleGenerator struct { }
InformationGainRuleGenerator generates DecisionTreeRules which maximize information gain at each node.
func (*InformationGainRuleGenerator) GenerateSplitRule ¶
func (r *InformationGainRuleGenerator) GenerateSplitRule(f base.FixedDataGrid) *DecisionTreeRule
GenerateSplitRule returns a DecisionTreeNode based on a non-class Attribute which maximises the information gain.
IMPORTANT: passing a base.Instances with no Attributes other than the class variable will panic()
func (*InformationGainRuleGenerator) GetSplitRuleFromSelection ¶
func (r *InformationGainRuleGenerator) GetSplitRuleFromSelection(consideredAttributes []base.Attribute, f base.FixedDataGrid) *DecisionTreeRule
GetSplitRuleFromSelection returns a DecisionTreeRule which maximises the information gain amongst the considered Attributes.
IMPORTANT: passing a zero-length consideredAttributes parameter will panic()
type RandomTree ¶
type RandomTree struct { base.BaseClassifier Root *DecisionTreeNode Rule *RandomTreeRuleGenerator }
RandomTree builds a decision tree by considering a fixed number of randomly-chosen attributes at each node
func NewRandomTree ¶
func NewRandomTree(attrs int) *RandomTree
NewRandomTree returns a new RandomTree which considers attrs randomly chosen attributes at each node.
func (*RandomTree) Fit ¶
func (rt *RandomTree) Fit(from base.FixedDataGrid) error
Fit builds a RandomTree suitable for prediction
func (*RandomTree) Predict ¶
func (rt *RandomTree) Predict(from base.FixedDataGrid) (base.FixedDataGrid, error)
Predict returns a set of Instances containing predictions
func (*RandomTree) Prune ¶
func (rt *RandomTree) Prune(with base.FixedDataGrid)
Prune removes nodes from the tree which are detrimental to determining the accuracy of the test set (with)
func (*RandomTree) String ¶
func (rt *RandomTree) String() string
String returns a human-readable representation of this structure
type RandomTreeRuleGenerator ¶
type RandomTreeRuleGenerator struct { Attributes int // contains filtered or unexported fields }
RandomTreeRuleGenerator is used to generate decision rules for Random Trees
func (*RandomTreeRuleGenerator) GenerateSplitRule ¶
func (r *RandomTreeRuleGenerator) GenerateSplitRule(f base.FixedDataGrid) *DecisionTreeRule
GenerateSplitRule returns the best attribute out of those randomly chosen which maximises Information Gain
type RuleGenerator ¶
type RuleGenerator interface {
GenerateSplitRule(base.FixedDataGrid) *DecisionTreeRule
}
RuleGenerator implementations analyse instances and determine the best value to split on.