Documentation ¶
Index ¶
- Variables
- func MaxScore(m map[string]float64) string
- func ParseOntonotes(ontoPath string, outPath string) error
- func Round(val float64, roundOn float64, places int) (newVal float64)
- type AveragedPerceptron
- type BaseTagger
- type FeatureClass
- type Leaves
- type Pair
- type PairList
- type PerceptronTagger
- type PosTagger
- type RegexpTokenizer
- type SplitTokenizer
- type SyntaxParser
- type Tag
- type TaggedToken
- type Token
- type Tokenizer
- type Tree
- type TreeBankTokenizer
- type WordsTags
Constants ¶
This section is empty.
Variables ¶
View Source
var ( ErrBadThing = errors.New("error description") ErrAnotherBadThing = errors.New("error description") )
View Source
var ( START = []string{"-START-", "-START2-"} END = []string{"-END-", "-END2-"} MODEL_GOB_PATH = "avp_model.gob" )
Functions ¶
func ParseOntonotes ¶
Types ¶
type AveragedPerceptron ¶
type AveragedPerceptron struct { Classes map[string]struct{} // contains filtered or unexported fields }
func NewAveragedPerceptron ¶
func NewAveragedPerceptron() *AveragedPerceptron
func (*AveragedPerceptron) AverageWeights ¶
func (ap *AveragedPerceptron) AverageWeights()
type BaseTagger ¶
type BaseTagger struct {
// contains filtered or unexported fields
}
type FeatureClass ¶
type FeatureClass struct {
// contains filtered or unexported fields
}
type PerceptronTagger ¶
type PerceptronTagger struct { Model *AveragedPerceptron TagMap map[string]string BaseTagger // contains filtered or unexported fields }
func NewPerceptronTagger ¶
func NewPerceptronTagger(tokenizer Tokenizer, load bool, path string) (*PerceptronTagger, error)
type RegexpTokenizer ¶
type RegexpTokenizer struct {
// contains filtered or unexported fields
}
func NewRegexpTokenizer ¶
func NewRegexpTokenizer(pattern string) *RegexpTokenizer
func (*RegexpTokenizer) Tokenize ¶
func (t *RegexpTokenizer) Tokenize(s string) []Token
type SplitTokenizer ¶
type SplitTokenizer struct {
// contains filtered or unexported fields
}
func NewSplitTokenizer ¶
func NewSplitTokenizer(sep string) *SplitTokenizer
func (*SplitTokenizer) Tokenize ¶
func (t *SplitTokenizer) Tokenize(s string) []Token
type SyntaxParser ¶
type SyntaxParser struct { }
func NewSyntaxParser ¶
func NewSyntaxParser() *SyntaxParser
func (*SyntaxParser) Parse ¶
func (sp *SyntaxParser) Parse(taggedTokens []TaggedToken) (Tree, error)
func (*SyntaxParser) PrettyPrint ¶
func (sp *SyntaxParser) PrettyPrint(tree Tree, level int)
type TaggedToken ¶
type TreeBankTokenizer ¶
type TreeBankTokenizer struct {
// contains filtered or unexported fields
}
func NewTreeBankTokenizer ¶
func NewTreeBankTokenizer() *TreeBankTokenizer
func (*TreeBankTokenizer) Tokenize ¶
func (t *TreeBankTokenizer) Tokenize(s string) []Token
Source Files ¶
Click to show internal directories.
Click to hide internal directories.