Documentation ¶
Index ¶
- Variables
- func RegisterLanguageServiceServer(s *grpc.Server, srv LanguageServiceServer)
- type AnalyzeEntitiesRequest
- func (*AnalyzeEntitiesRequest) Descriptor() ([]byte, []int)
- func (m *AnalyzeEntitiesRequest) GetDocument() *Document
- func (m *AnalyzeEntitiesRequest) GetEncodingType() EncodingType
- func (*AnalyzeEntitiesRequest) ProtoMessage()
- func (m *AnalyzeEntitiesRequest) Reset()
- func (m *AnalyzeEntitiesRequest) String() string
- func (m *AnalyzeEntitiesRequest) XXX_DiscardUnknown()
- func (m *AnalyzeEntitiesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *AnalyzeEntitiesRequest) XXX_Merge(src proto.Message)
- func (m *AnalyzeEntitiesRequest) XXX_Size() int
- func (m *AnalyzeEntitiesRequest) XXX_Unmarshal(b []byte) error
- type AnalyzeEntitiesResponse
- func (*AnalyzeEntitiesResponse) Descriptor() ([]byte, []int)
- func (m *AnalyzeEntitiesResponse) GetEntities() []*Entity
- func (m *AnalyzeEntitiesResponse) GetLanguage() string
- func (*AnalyzeEntitiesResponse) ProtoMessage()
- func (m *AnalyzeEntitiesResponse) Reset()
- func (m *AnalyzeEntitiesResponse) String() string
- func (m *AnalyzeEntitiesResponse) XXX_DiscardUnknown()
- func (m *AnalyzeEntitiesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *AnalyzeEntitiesResponse) XXX_Merge(src proto.Message)
- func (m *AnalyzeEntitiesResponse) XXX_Size() int
- func (m *AnalyzeEntitiesResponse) XXX_Unmarshal(b []byte) error
- type AnalyzeSentimentRequest
- func (*AnalyzeSentimentRequest) Descriptor() ([]byte, []int)
- func (m *AnalyzeSentimentRequest) GetDocument() *Document
- func (m *AnalyzeSentimentRequest) GetEncodingType() EncodingType
- func (*AnalyzeSentimentRequest) ProtoMessage()
- func (m *AnalyzeSentimentRequest) Reset()
- func (m *AnalyzeSentimentRequest) String() string
- func (m *AnalyzeSentimentRequest) XXX_DiscardUnknown()
- func (m *AnalyzeSentimentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *AnalyzeSentimentRequest) XXX_Merge(src proto.Message)
- func (m *AnalyzeSentimentRequest) XXX_Size() int
- func (m *AnalyzeSentimentRequest) XXX_Unmarshal(b []byte) error
- type AnalyzeSentimentResponse
- func (*AnalyzeSentimentResponse) Descriptor() ([]byte, []int)
- func (m *AnalyzeSentimentResponse) GetDocumentSentiment() *Sentiment
- func (m *AnalyzeSentimentResponse) GetLanguage() string
- func (m *AnalyzeSentimentResponse) GetSentences() []*Sentence
- func (*AnalyzeSentimentResponse) ProtoMessage()
- func (m *AnalyzeSentimentResponse) Reset()
- func (m *AnalyzeSentimentResponse) String() string
- func (m *AnalyzeSentimentResponse) XXX_DiscardUnknown()
- func (m *AnalyzeSentimentResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *AnalyzeSentimentResponse) XXX_Merge(src proto.Message)
- func (m *AnalyzeSentimentResponse) XXX_Size() int
- func (m *AnalyzeSentimentResponse) XXX_Unmarshal(b []byte) error
- type AnalyzeSyntaxRequest
- func (*AnalyzeSyntaxRequest) Descriptor() ([]byte, []int)
- func (m *AnalyzeSyntaxRequest) GetDocument() *Document
- func (m *AnalyzeSyntaxRequest) GetEncodingType() EncodingType
- func (*AnalyzeSyntaxRequest) ProtoMessage()
- func (m *AnalyzeSyntaxRequest) Reset()
- func (m *AnalyzeSyntaxRequest) String() string
- func (m *AnalyzeSyntaxRequest) XXX_DiscardUnknown()
- func (m *AnalyzeSyntaxRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *AnalyzeSyntaxRequest) XXX_Merge(src proto.Message)
- func (m *AnalyzeSyntaxRequest) XXX_Size() int
- func (m *AnalyzeSyntaxRequest) XXX_Unmarshal(b []byte) error
- type AnalyzeSyntaxResponse
- func (*AnalyzeSyntaxResponse) Descriptor() ([]byte, []int)
- func (m *AnalyzeSyntaxResponse) GetLanguage() string
- func (m *AnalyzeSyntaxResponse) GetSentences() []*Sentence
- func (m *AnalyzeSyntaxResponse) GetTokens() []*Token
- func (*AnalyzeSyntaxResponse) ProtoMessage()
- func (m *AnalyzeSyntaxResponse) Reset()
- func (m *AnalyzeSyntaxResponse) String() string
- func (m *AnalyzeSyntaxResponse) XXX_DiscardUnknown()
- func (m *AnalyzeSyntaxResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *AnalyzeSyntaxResponse) XXX_Merge(src proto.Message)
- func (m *AnalyzeSyntaxResponse) XXX_Size() int
- func (m *AnalyzeSyntaxResponse) XXX_Unmarshal(b []byte) error
- type AnnotateTextRequest
- func (*AnnotateTextRequest) Descriptor() ([]byte, []int)
- func (m *AnnotateTextRequest) GetDocument() *Document
- func (m *AnnotateTextRequest) GetEncodingType() EncodingType
- func (m *AnnotateTextRequest) GetFeatures() *AnnotateTextRequest_Features
- func (*AnnotateTextRequest) ProtoMessage()
- func (m *AnnotateTextRequest) Reset()
- func (m *AnnotateTextRequest) String() string
- func (m *AnnotateTextRequest) XXX_DiscardUnknown()
- func (m *AnnotateTextRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *AnnotateTextRequest) XXX_Merge(src proto.Message)
- func (m *AnnotateTextRequest) XXX_Size() int
- func (m *AnnotateTextRequest) XXX_Unmarshal(b []byte) error
- type AnnotateTextRequest_Features
- func (*AnnotateTextRequest_Features) Descriptor() ([]byte, []int)
- func (m *AnnotateTextRequest_Features) GetExtractDocumentSentiment() bool
- func (m *AnnotateTextRequest_Features) GetExtractEntities() bool
- func (m *AnnotateTextRequest_Features) GetExtractSyntax() bool
- func (*AnnotateTextRequest_Features) ProtoMessage()
- func (m *AnnotateTextRequest_Features) Reset()
- func (m *AnnotateTextRequest_Features) String() string
- func (m *AnnotateTextRequest_Features) XXX_DiscardUnknown()
- func (m *AnnotateTextRequest_Features) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *AnnotateTextRequest_Features) XXX_Merge(src proto.Message)
- func (m *AnnotateTextRequest_Features) XXX_Size() int
- func (m *AnnotateTextRequest_Features) XXX_Unmarshal(b []byte) error
- type AnnotateTextResponse
- func (*AnnotateTextResponse) Descriptor() ([]byte, []int)
- func (m *AnnotateTextResponse) GetDocumentSentiment() *Sentiment
- func (m *AnnotateTextResponse) GetEntities() []*Entity
- func (m *AnnotateTextResponse) GetLanguage() string
- func (m *AnnotateTextResponse) GetSentences() []*Sentence
- func (m *AnnotateTextResponse) GetTokens() []*Token
- func (*AnnotateTextResponse) ProtoMessage()
- func (m *AnnotateTextResponse) Reset()
- func (m *AnnotateTextResponse) String() string
- func (m *AnnotateTextResponse) XXX_DiscardUnknown()
- func (m *AnnotateTextResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *AnnotateTextResponse) XXX_Merge(src proto.Message)
- func (m *AnnotateTextResponse) XXX_Size() int
- func (m *AnnotateTextResponse) XXX_Unmarshal(b []byte) error
- type DependencyEdge
- func (*DependencyEdge) Descriptor() ([]byte, []int)
- func (m *DependencyEdge) GetHeadTokenIndex() int32
- func (m *DependencyEdge) GetLabel() DependencyEdge_Label
- func (*DependencyEdge) ProtoMessage()
- func (m *DependencyEdge) Reset()
- func (m *DependencyEdge) String() string
- func (m *DependencyEdge) XXX_DiscardUnknown()
- func (m *DependencyEdge) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *DependencyEdge) XXX_Merge(src proto.Message)
- func (m *DependencyEdge) XXX_Size() int
- func (m *DependencyEdge) XXX_Unmarshal(b []byte) error
- type DependencyEdge_Label
- type Document
- func (*Document) Descriptor() ([]byte, []int)
- func (m *Document) GetContent() string
- func (m *Document) GetGcsContentUri() string
- func (m *Document) GetLanguage() string
- func (m *Document) GetSource() isDocument_Source
- func (m *Document) GetType() Document_Type
- func (*Document) ProtoMessage()
- func (m *Document) Reset()
- func (m *Document) String() string
- func (m *Document) XXX_DiscardUnknown()
- func (m *Document) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *Document) XXX_Merge(src proto.Message)
- func (*Document) XXX_OneofWrappers() []interface{}
- func (m *Document) XXX_Size() int
- func (m *Document) XXX_Unmarshal(b []byte) error
- type Document_Content
- type Document_GcsContentUri
- type Document_Type
- type EncodingType
- type Entity
- func (*Entity) Descriptor() ([]byte, []int)
- func (m *Entity) GetMentions() []*EntityMention
- func (m *Entity) GetMetadata() map[string]string
- func (m *Entity) GetName() string
- func (m *Entity) GetSalience() float32
- func (m *Entity) GetType() Entity_Type
- func (*Entity) ProtoMessage()
- func (m *Entity) Reset()
- func (m *Entity) String() string
- func (m *Entity) XXX_DiscardUnknown()
- func (m *Entity) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *Entity) XXX_Merge(src proto.Message)
- func (m *Entity) XXX_Size() int
- func (m *Entity) XXX_Unmarshal(b []byte) error
- type EntityMention
- func (*EntityMention) Descriptor() ([]byte, []int)
- func (m *EntityMention) GetText() *TextSpan
- func (m *EntityMention) GetType() EntityMention_Type
- func (*EntityMention) ProtoMessage()
- func (m *EntityMention) Reset()
- func (m *EntityMention) String() string
- func (m *EntityMention) XXX_DiscardUnknown()
- func (m *EntityMention) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *EntityMention) XXX_Merge(src proto.Message)
- func (m *EntityMention) XXX_Size() int
- func (m *EntityMention) XXX_Unmarshal(b []byte) error
- type EntityMention_Type
- type Entity_Type
- type LanguageServiceClient
- type LanguageServiceServer
- type PartOfSpeech
- func (*PartOfSpeech) Descriptor() ([]byte, []int)
- func (m *PartOfSpeech) GetAspect() PartOfSpeech_Aspect
- func (m *PartOfSpeech) GetCase() PartOfSpeech_Case
- func (m *PartOfSpeech) GetForm() PartOfSpeech_Form
- func (m *PartOfSpeech) GetGender() PartOfSpeech_Gender
- func (m *PartOfSpeech) GetMood() PartOfSpeech_Mood
- func (m *PartOfSpeech) GetNumber() PartOfSpeech_Number
- func (m *PartOfSpeech) GetPerson() PartOfSpeech_Person
- func (m *PartOfSpeech) GetProper() PartOfSpeech_Proper
- func (m *PartOfSpeech) GetReciprocity() PartOfSpeech_Reciprocity
- func (m *PartOfSpeech) GetTag() PartOfSpeech_Tag
- func (m *PartOfSpeech) GetTense() PartOfSpeech_Tense
- func (m *PartOfSpeech) GetVoice() PartOfSpeech_Voice
- func (*PartOfSpeech) ProtoMessage()
- func (m *PartOfSpeech) Reset()
- func (m *PartOfSpeech) String() string
- func (m *PartOfSpeech) XXX_DiscardUnknown()
- func (m *PartOfSpeech) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *PartOfSpeech) XXX_Merge(src proto.Message)
- func (m *PartOfSpeech) XXX_Size() int
- func (m *PartOfSpeech) XXX_Unmarshal(b []byte) error
- type PartOfSpeech_Aspect
- type PartOfSpeech_Case
- type PartOfSpeech_Form
- type PartOfSpeech_Gender
- type PartOfSpeech_Mood
- type PartOfSpeech_Number
- type PartOfSpeech_Person
- type PartOfSpeech_Proper
- type PartOfSpeech_Reciprocity
- type PartOfSpeech_Tag
- type PartOfSpeech_Tense
- type PartOfSpeech_Voice
- type Sentence
- func (*Sentence) Descriptor() ([]byte, []int)
- func (m *Sentence) GetSentiment() *Sentiment
- func (m *Sentence) GetText() *TextSpan
- func (*Sentence) ProtoMessage()
- func (m *Sentence) Reset()
- func (m *Sentence) String() string
- func (m *Sentence) XXX_DiscardUnknown()
- func (m *Sentence) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *Sentence) XXX_Merge(src proto.Message)
- func (m *Sentence) XXX_Size() int
- func (m *Sentence) XXX_Unmarshal(b []byte) error
- type Sentiment
- func (*Sentiment) Descriptor() ([]byte, []int)
- func (m *Sentiment) GetMagnitude() float32
- func (m *Sentiment) GetPolarity() float32
- func (m *Sentiment) GetScore() float32
- func (*Sentiment) ProtoMessage()
- func (m *Sentiment) Reset()
- func (m *Sentiment) String() string
- func (m *Sentiment) XXX_DiscardUnknown()
- func (m *Sentiment) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *Sentiment) XXX_Merge(src proto.Message)
- func (m *Sentiment) XXX_Size() int
- func (m *Sentiment) XXX_Unmarshal(b []byte) error
- type TextSpan
- func (*TextSpan) Descriptor() ([]byte, []int)
- func (m *TextSpan) GetBeginOffset() int32
- func (m *TextSpan) GetContent() string
- func (*TextSpan) ProtoMessage()
- func (m *TextSpan) Reset()
- func (m *TextSpan) String() string
- func (m *TextSpan) XXX_DiscardUnknown()
- func (m *TextSpan) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *TextSpan) XXX_Merge(src proto.Message)
- func (m *TextSpan) XXX_Size() int
- func (m *TextSpan) XXX_Unmarshal(b []byte) error
- type Token
- func (*Token) Descriptor() ([]byte, []int)
- func (m *Token) GetDependencyEdge() *DependencyEdge
- func (m *Token) GetLemma() string
- func (m *Token) GetPartOfSpeech() *PartOfSpeech
- func (m *Token) GetText() *TextSpan
- func (*Token) ProtoMessage()
- func (m *Token) Reset()
- func (m *Token) String() string
- func (m *Token) XXX_DiscardUnknown()
- func (m *Token) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *Token) XXX_Merge(src proto.Message)
- func (m *Token) XXX_Size() int
- func (m *Token) XXX_Unmarshal(b []byte) error
- type UnimplementedLanguageServiceServer
- func (*UnimplementedLanguageServiceServer) AnalyzeEntities(ctx context.Context, req *AnalyzeEntitiesRequest) (*AnalyzeEntitiesResponse, error)
- func (*UnimplementedLanguageServiceServer) AnalyzeSentiment(ctx context.Context, req *AnalyzeSentimentRequest) (*AnalyzeSentimentResponse, error)
- func (*UnimplementedLanguageServiceServer) AnalyzeSyntax(ctx context.Context, req *AnalyzeSyntaxRequest) (*AnalyzeSyntaxResponse, error)
- func (*UnimplementedLanguageServiceServer) AnnotateText(ctx context.Context, req *AnnotateTextRequest) (*AnnotateTextResponse, error)
Constants ¶
This section is empty.
Variables ¶
var DependencyEdge_Label_name = map[int32]string{
0: "UNKNOWN",
1: "ABBREV",
2: "ACOMP",
3: "ADVCL",
4: "ADVMOD",
5: "AMOD",
6: "APPOS",
7: "ATTR",
8: "AUX",
9: "AUXPASS",
10: "CC",
11: "CCOMP",
12: "CONJ",
13: "CSUBJ",
14: "CSUBJPASS",
15: "DEP",
16: "DET",
17: "DISCOURSE",
18: "DOBJ",
19: "EXPL",
20: "GOESWITH",
21: "IOBJ",
22: "MARK",
23: "MWE",
24: "MWV",
25: "NEG",
26: "NN",
27: "NPADVMOD",
28: "NSUBJ",
29: "NSUBJPASS",
30: "NUM",
31: "NUMBER",
32: "P",
33: "PARATAXIS",
34: "PARTMOD",
35: "PCOMP",
36: "POBJ",
37: "POSS",
38: "POSTNEG",
39: "PRECOMP",
40: "PRECONJ",
41: "PREDET",
42: "PREF",
43: "PREP",
44: "PRONL",
45: "PRT",
46: "PS",
47: "QUANTMOD",
48: "RCMOD",
49: "RCMODREL",
50: "RDROP",
51: "REF",
52: "REMNANT",
53: "REPARANDUM",
54: "ROOT",
55: "SNUM",
56: "SUFF",
57: "TMOD",
58: "TOPIC",
59: "VMOD",
60: "VOCATIVE",
61: "XCOMP",
62: "SUFFIX",
63: "TITLE",
64: "ADVPHMOD",
65: "AUXCAUS",
66: "AUXVV",
67: "DTMOD",
68: "FOREIGN",
69: "KW",
70: "LIST",
71: "NOMC",
72: "NOMCSUBJ",
73: "NOMCSUBJPASS",
74: "NUMC",
75: "COP",
76: "DISLOCATED",
}
var DependencyEdge_Label_value = map[string]int32{
"UNKNOWN": 0,
"ABBREV": 1,
"ACOMP": 2,
"ADVCL": 3,
"ADVMOD": 4,
"AMOD": 5,
"APPOS": 6,
"ATTR": 7,
"AUX": 8,
"AUXPASS": 9,
"CC": 10,
"CCOMP": 11,
"CONJ": 12,
"CSUBJ": 13,
"CSUBJPASS": 14,
"DEP": 15,
"DET": 16,
"DISCOURSE": 17,
"DOBJ": 18,
"EXPL": 19,
"GOESWITH": 20,
"IOBJ": 21,
"MARK": 22,
"MWE": 23,
"MWV": 24,
"NEG": 25,
"NN": 26,
"NPADVMOD": 27,
"NSUBJ": 28,
"NSUBJPASS": 29,
"NUM": 30,
"NUMBER": 31,
"P": 32,
"PARATAXIS": 33,
"PARTMOD": 34,
"PCOMP": 35,
"POBJ": 36,
"POSS": 37,
"POSTNEG": 38,
"PRECOMP": 39,
"PRECONJ": 40,
"PREDET": 41,
"PREF": 42,
"PREP": 43,
"PRONL": 44,
"PRT": 45,
"PS": 46,
"QUANTMOD": 47,
"RCMOD": 48,
"RCMODREL": 49,
"RDROP": 50,
"REF": 51,
"REMNANT": 52,
"REPARANDUM": 53,
"ROOT": 54,
"SNUM": 55,
"SUFF": 56,
"TMOD": 57,
"TOPIC": 58,
"VMOD": 59,
"VOCATIVE": 60,
"XCOMP": 61,
"SUFFIX": 62,
"TITLE": 63,
"ADVPHMOD": 64,
"AUXCAUS": 65,
"AUXVV": 66,
"DTMOD": 67,
"FOREIGN": 68,
"KW": 69,
"LIST": 70,
"NOMC": 71,
"NOMCSUBJ": 72,
"NOMCSUBJPASS": 73,
"NUMC": 74,
"COP": 75,
"DISLOCATED": 76,
}
var Document_Type_name = map[int32]string{
0: "TYPE_UNSPECIFIED",
1: "PLAIN_TEXT",
2: "HTML",
}
var Document_Type_value = map[string]int32{
"TYPE_UNSPECIFIED": 0,
"PLAIN_TEXT": 1,
"HTML": 2,
}
var EncodingType_name = map[int32]string{
0: "NONE",
1: "UTF8",
2: "UTF16",
3: "UTF32",
}
var EncodingType_value = map[string]int32{
"NONE": 0,
"UTF8": 1,
"UTF16": 2,
"UTF32": 3,
}
var EntityMention_Type_name = map[int32]string{
0: "TYPE_UNKNOWN",
1: "PROPER",
2: "COMMON",
}
var EntityMention_Type_value = map[string]int32{
"TYPE_UNKNOWN": 0,
"PROPER": 1,
"COMMON": 2,
}
var Entity_Type_name = map[int32]string{
0: "UNKNOWN",
1: "PERSON",
2: "LOCATION",
3: "ORGANIZATION",
4: "EVENT",
5: "WORK_OF_ART",
6: "CONSUMER_GOOD",
7: "OTHER",
}
var Entity_Type_value = map[string]int32{
"UNKNOWN": 0,
"PERSON": 1,
"LOCATION": 2,
"ORGANIZATION": 3,
"EVENT": 4,
"WORK_OF_ART": 5,
"CONSUMER_GOOD": 6,
"OTHER": 7,
}
var PartOfSpeech_Aspect_name = map[int32]string{
0: "ASPECT_UNKNOWN",
1: "PERFECTIVE",
2: "IMPERFECTIVE",
3: "PROGRESSIVE",
}
var PartOfSpeech_Aspect_value = map[string]int32{
"ASPECT_UNKNOWN": 0,
"PERFECTIVE": 1,
"IMPERFECTIVE": 2,
"PROGRESSIVE": 3,
}
var PartOfSpeech_Case_name = map[int32]string{
0: "CASE_UNKNOWN",
1: "ACCUSATIVE",
2: "ADVERBIAL",
3: "COMPLEMENTIVE",
4: "DATIVE",
5: "GENITIVE",
6: "INSTRUMENTAL",
7: "LOCATIVE",
8: "NOMINATIVE",
9: "OBLIQUE",
10: "PARTITIVE",
11: "PREPOSITIONAL",
12: "REFLEXIVE_CASE",
13: "RELATIVE_CASE",
14: "VOCATIVE",
}
var PartOfSpeech_Case_value = map[string]int32{
"CASE_UNKNOWN": 0,
"ACCUSATIVE": 1,
"ADVERBIAL": 2,
"COMPLEMENTIVE": 3,
"DATIVE": 4,
"GENITIVE": 5,
"INSTRUMENTAL": 6,
"LOCATIVE": 7,
"NOMINATIVE": 8,
"OBLIQUE": 9,
"PARTITIVE": 10,
"PREPOSITIONAL": 11,
"REFLEXIVE_CASE": 12,
"RELATIVE_CASE": 13,
"VOCATIVE": 14,
}
var PartOfSpeech_Form_name = map[int32]string{
0: "FORM_UNKNOWN",
1: "ADNOMIAL",
2: "AUXILIARY",
3: "COMPLEMENTIZER",
4: "FINAL_ENDING",
5: "GERUND",
6: "REALIS",
7: "IRREALIS",
8: "SHORT",
9: "LONG",
10: "ORDER",
11: "SPECIFIC",
}
var PartOfSpeech_Form_value = map[string]int32{
"FORM_UNKNOWN": 0,
"ADNOMIAL": 1,
"AUXILIARY": 2,
"COMPLEMENTIZER": 3,
"FINAL_ENDING": 4,
"GERUND": 5,
"REALIS": 6,
"IRREALIS": 7,
"SHORT": 8,
"LONG": 9,
"ORDER": 10,
"SPECIFIC": 11,
}
var PartOfSpeech_Gender_name = map[int32]string{
0: "GENDER_UNKNOWN",
1: "FEMININE",
2: "MASCULINE",
3: "NEUTER",
}
var PartOfSpeech_Gender_value = map[string]int32{
"GENDER_UNKNOWN": 0,
"FEMININE": 1,
"MASCULINE": 2,
"NEUTER": 3,
}
var PartOfSpeech_Mood_name = map[int32]string{
0: "MOOD_UNKNOWN",
1: "CONDITIONAL_MOOD",
2: "IMPERATIVE",
3: "INDICATIVE",
4: "INTERROGATIVE",
5: "JUSSIVE",
6: "SUBJUNCTIVE",
}
var PartOfSpeech_Mood_value = map[string]int32{
"MOOD_UNKNOWN": 0,
"CONDITIONAL_MOOD": 1,
"IMPERATIVE": 2,
"INDICATIVE": 3,
"INTERROGATIVE": 4,
"JUSSIVE": 5,
"SUBJUNCTIVE": 6,
}
var PartOfSpeech_Number_name = map[int32]string{
0: "NUMBER_UNKNOWN",
1: "SINGULAR",
2: "PLURAL",
3: "DUAL",
}
var PartOfSpeech_Number_value = map[string]int32{
"NUMBER_UNKNOWN": 0,
"SINGULAR": 1,
"PLURAL": 2,
"DUAL": 3,
}
var PartOfSpeech_Person_name = map[int32]string{
0: "PERSON_UNKNOWN",
1: "FIRST",
2: "SECOND",
3: "THIRD",
4: "REFLEXIVE_PERSON",
}
var PartOfSpeech_Person_value = map[string]int32{
"PERSON_UNKNOWN": 0,
"FIRST": 1,
"SECOND": 2,
"THIRD": 3,
"REFLEXIVE_PERSON": 4,
}
var PartOfSpeech_Proper_name = map[int32]string{
0: "PROPER_UNKNOWN",
1: "PROPER",
2: "NOT_PROPER",
}
var PartOfSpeech_Proper_value = map[string]int32{
"PROPER_UNKNOWN": 0,
"PROPER": 1,
"NOT_PROPER": 2,
}
var PartOfSpeech_Reciprocity_name = map[int32]string{
0: "RECIPROCITY_UNKNOWN",
1: "RECIPROCAL",
2: "NON_RECIPROCAL",
}
var PartOfSpeech_Reciprocity_value = map[string]int32{
"RECIPROCITY_UNKNOWN": 0,
"RECIPROCAL": 1,
"NON_RECIPROCAL": 2,
}
var PartOfSpeech_Tag_name = map[int32]string{
0: "UNKNOWN",
1: "ADJ",
2: "ADP",
3: "ADV",
4: "CONJ",
5: "DET",
6: "NOUN",
7: "NUM",
8: "PRON",
9: "PRT",
10: "PUNCT",
11: "VERB",
12: "X",
13: "AFFIX",
}
var PartOfSpeech_Tag_value = map[string]int32{
"UNKNOWN": 0,
"ADJ": 1,
"ADP": 2,
"ADV": 3,
"CONJ": 4,
"DET": 5,
"NOUN": 6,
"NUM": 7,
"PRON": 8,
"PRT": 9,
"PUNCT": 10,
"VERB": 11,
"X": 12,
"AFFIX": 13,
}
var PartOfSpeech_Tense_name = map[int32]string{
0: "TENSE_UNKNOWN",
1: "CONDITIONAL_TENSE",
2: "FUTURE",
3: "PAST",
4: "PRESENT",
5: "IMPERFECT",
6: "PLUPERFECT",
}
var PartOfSpeech_Tense_value = map[string]int32{
"TENSE_UNKNOWN": 0,
"CONDITIONAL_TENSE": 1,
"FUTURE": 2,
"PAST": 3,
"PRESENT": 4,
"IMPERFECT": 5,
"PLUPERFECT": 6,
}
var PartOfSpeech_Voice_name = map[int32]string{
0: "VOICE_UNKNOWN",
1: "ACTIVE",
2: "CAUSATIVE",
3: "PASSIVE",
}
var PartOfSpeech_Voice_value = map[string]int32{
"VOICE_UNKNOWN": 0,
"ACTIVE": 1,
"CAUSATIVE": 2,
"PASSIVE": 3,
}
Functions ¶
func RegisterLanguageServiceServer ¶
func RegisterLanguageServiceServer(s *grpc.Server, srv LanguageServiceServer)
Types ¶
type AnalyzeEntitiesRequest ¶
type AnalyzeEntitiesRequest struct { // Input document. Document *Document `protobuf:"bytes,1,opt,name=document,proto3" json:"document,omitempty"` // The encoding type used by the API to calculate offsets. EncodingType EncodingType `` /* 146-byte string literal not displayed */ XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` }
The entity analysis request message.
func (*AnalyzeEntitiesRequest) Descriptor ¶
func (*AnalyzeEntitiesRequest) Descriptor() ([]byte, []int)
func (*AnalyzeEntitiesRequest) GetDocument ¶
func (m *AnalyzeEntitiesRequest) GetDocument() *Document
func (*AnalyzeEntitiesRequest) GetEncodingType ¶
func (m *AnalyzeEntitiesRequest) GetEncodingType() EncodingType
func (*AnalyzeEntitiesRequest) ProtoMessage ¶
func (*AnalyzeEntitiesRequest) ProtoMessage()
func (*AnalyzeEntitiesRequest) Reset ¶
func (m *AnalyzeEntitiesRequest) Reset()
func (*AnalyzeEntitiesRequest) String ¶
func (m *AnalyzeEntitiesRequest) String() string
func (*AnalyzeEntitiesRequest) XXX_DiscardUnknown ¶
func (m *AnalyzeEntitiesRequest) XXX_DiscardUnknown()
func (*AnalyzeEntitiesRequest) XXX_Marshal ¶
func (m *AnalyzeEntitiesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*AnalyzeEntitiesRequest) XXX_Merge ¶
func (m *AnalyzeEntitiesRequest) XXX_Merge(src proto.Message)
func (*AnalyzeEntitiesRequest) XXX_Size ¶
func (m *AnalyzeEntitiesRequest) XXX_Size() int
func (*AnalyzeEntitiesRequest) XXX_Unmarshal ¶
func (m *AnalyzeEntitiesRequest) XXX_Unmarshal(b []byte) error
type AnalyzeEntitiesResponse ¶
type AnalyzeEntitiesResponse struct { // The recognized entities in the input document. Entities []*Entity `protobuf:"bytes,1,rep,name=entities,proto3" json:"entities,omitempty"` // The language of the text, which will be the same as the language specified // in the request or, if not specified, the automatically-detected language. // See [Document.language][google.cloud.language.v1beta1.Document.language] // field for more details. Language string `protobuf:"bytes,2,opt,name=language,proto3" json:"language,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` }
The entity analysis response message.
func (*AnalyzeEntitiesResponse) Descriptor ¶
func (*AnalyzeEntitiesResponse) Descriptor() ([]byte, []int)
func (*AnalyzeEntitiesResponse) GetEntities ¶
func (m *AnalyzeEntitiesResponse) GetEntities() []*Entity
func (*AnalyzeEntitiesResponse) GetLanguage ¶
func (m *AnalyzeEntitiesResponse) GetLanguage() string
func (*AnalyzeEntitiesResponse) ProtoMessage ¶
func (*AnalyzeEntitiesResponse) ProtoMessage()
func (*AnalyzeEntitiesResponse) Reset ¶
func (m *AnalyzeEntitiesResponse) Reset()
func (*AnalyzeEntitiesResponse) String ¶
func (m *AnalyzeEntitiesResponse) String() string
func (*AnalyzeEntitiesResponse) XXX_DiscardUnknown ¶
func (m *AnalyzeEntitiesResponse) XXX_DiscardUnknown()
func (*AnalyzeEntitiesResponse) XXX_Marshal ¶
func (m *AnalyzeEntitiesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*AnalyzeEntitiesResponse) XXX_Merge ¶
func (m *AnalyzeEntitiesResponse) XXX_Merge(src proto.Message)
func (*AnalyzeEntitiesResponse) XXX_Size ¶
func (m *AnalyzeEntitiesResponse) XXX_Size() int
func (*AnalyzeEntitiesResponse) XXX_Unmarshal ¶
func (m *AnalyzeEntitiesResponse) XXX_Unmarshal(b []byte) error
type AnalyzeSentimentRequest ¶
type AnalyzeSentimentRequest struct { // Input document. Document *Document `protobuf:"bytes,1,opt,name=document,proto3" json:"document,omitempty"` // The encoding type used by the API to calculate sentence offsets for the // sentence sentiment. EncodingType EncodingType `` /* 146-byte string literal not displayed */ XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` }
The sentiment analysis request message.
func (*AnalyzeSentimentRequest) Descriptor ¶
func (*AnalyzeSentimentRequest) Descriptor() ([]byte, []int)
func (*AnalyzeSentimentRequest) GetDocument ¶
func (m *AnalyzeSentimentRequest) GetDocument() *Document
func (*AnalyzeSentimentRequest) GetEncodingType ¶
func (m *AnalyzeSentimentRequest) GetEncodingType() EncodingType
func (*AnalyzeSentimentRequest) ProtoMessage ¶
func (*AnalyzeSentimentRequest) ProtoMessage()
func (*AnalyzeSentimentRequest) Reset ¶
func (m *AnalyzeSentimentRequest) Reset()
func (*AnalyzeSentimentRequest) String ¶
func (m *AnalyzeSentimentRequest) String() string
func (*AnalyzeSentimentRequest) XXX_DiscardUnknown ¶
func (m *AnalyzeSentimentRequest) XXX_DiscardUnknown()
func (*AnalyzeSentimentRequest) XXX_Marshal ¶
func (m *AnalyzeSentimentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*AnalyzeSentimentRequest) XXX_Merge ¶
func (m *AnalyzeSentimentRequest) XXX_Merge(src proto.Message)
func (*AnalyzeSentimentRequest) XXX_Size ¶
func (m *AnalyzeSentimentRequest) XXX_Size() int
func (*AnalyzeSentimentRequest) XXX_Unmarshal ¶
func (m *AnalyzeSentimentRequest) XXX_Unmarshal(b []byte) error
type AnalyzeSentimentResponse ¶
type AnalyzeSentimentResponse struct { // The overall sentiment of the input document. DocumentSentiment *Sentiment `protobuf:"bytes,1,opt,name=document_sentiment,json=documentSentiment,proto3" json:"document_sentiment,omitempty"` // The language of the text, which will be the same as the language specified // in the request or, if not specified, the automatically-detected language. // See [Document.language][google.cloud.language.v1beta1.Document.language] // field for more details. Language string `protobuf:"bytes,2,opt,name=language,proto3" json:"language,omitempty"` // The sentiment for all the sentences in the document. Sentences []*Sentence `protobuf:"bytes,3,rep,name=sentences,proto3" json:"sentences,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` }
The sentiment analysis response message.
func (*AnalyzeSentimentResponse) Descriptor ¶
func (*AnalyzeSentimentResponse) Descriptor() ([]byte, []int)
func (*AnalyzeSentimentResponse) GetDocumentSentiment ¶
func (m *AnalyzeSentimentResponse) GetDocumentSentiment() *Sentiment
func (*AnalyzeSentimentResponse) GetLanguage ¶
func (m *AnalyzeSentimentResponse) GetLanguage() string
func (*AnalyzeSentimentResponse) GetSentences ¶
func (m *AnalyzeSentimentResponse) GetSentences() []*Sentence
func (*AnalyzeSentimentResponse) ProtoMessage ¶
func (*AnalyzeSentimentResponse) ProtoMessage()
func (*AnalyzeSentimentResponse) Reset ¶
func (m *AnalyzeSentimentResponse) Reset()
func (*AnalyzeSentimentResponse) String ¶
func (m *AnalyzeSentimentResponse) String() string
func (*AnalyzeSentimentResponse) XXX_DiscardUnknown ¶
func (m *AnalyzeSentimentResponse) XXX_DiscardUnknown()
func (*AnalyzeSentimentResponse) XXX_Marshal ¶
func (m *AnalyzeSentimentResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*AnalyzeSentimentResponse) XXX_Merge ¶
func (m *AnalyzeSentimentResponse) XXX_Merge(src proto.Message)
func (*AnalyzeSentimentResponse) XXX_Size ¶
func (m *AnalyzeSentimentResponse) XXX_Size() int
func (*AnalyzeSentimentResponse) XXX_Unmarshal ¶
func (m *AnalyzeSentimentResponse) XXX_Unmarshal(b []byte) error
type AnalyzeSyntaxRequest ¶
type AnalyzeSyntaxRequest struct { // Input document. Document *Document `protobuf:"bytes,1,opt,name=document,proto3" json:"document,omitempty"` // The encoding type used by the API to calculate offsets. EncodingType EncodingType `` /* 146-byte string literal not displayed */ XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` }
The syntax analysis request message.
func (*AnalyzeSyntaxRequest) Descriptor ¶
func (*AnalyzeSyntaxRequest) Descriptor() ([]byte, []int)
func (*AnalyzeSyntaxRequest) GetDocument ¶
func (m *AnalyzeSyntaxRequest) GetDocument() *Document
func (*AnalyzeSyntaxRequest) GetEncodingType ¶
func (m *AnalyzeSyntaxRequest) GetEncodingType() EncodingType
func (*AnalyzeSyntaxRequest) ProtoMessage ¶
func (*AnalyzeSyntaxRequest) ProtoMessage()
func (*AnalyzeSyntaxRequest) Reset ¶
func (m *AnalyzeSyntaxRequest) Reset()
func (*AnalyzeSyntaxRequest) String ¶
func (m *AnalyzeSyntaxRequest) String() string
func (*AnalyzeSyntaxRequest) XXX_DiscardUnknown ¶
func (m *AnalyzeSyntaxRequest) XXX_DiscardUnknown()
func (*AnalyzeSyntaxRequest) XXX_Marshal ¶
func (m *AnalyzeSyntaxRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*AnalyzeSyntaxRequest) XXX_Merge ¶
func (m *AnalyzeSyntaxRequest) XXX_Merge(src proto.Message)
func (*AnalyzeSyntaxRequest) XXX_Size ¶
func (m *AnalyzeSyntaxRequest) XXX_Size() int
func (*AnalyzeSyntaxRequest) XXX_Unmarshal ¶
func (m *AnalyzeSyntaxRequest) XXX_Unmarshal(b []byte) error
type AnalyzeSyntaxResponse ¶
type AnalyzeSyntaxResponse struct { // Sentences in the input document. Sentences []*Sentence `protobuf:"bytes,1,rep,name=sentences,proto3" json:"sentences,omitempty"` // Tokens, along with their syntactic information, in the input document. Tokens []*Token `protobuf:"bytes,2,rep,name=tokens,proto3" json:"tokens,omitempty"` // The language of the text, which will be the same as the language specified // in the request or, if not specified, the automatically-detected language. // See [Document.language][google.cloud.language.v1beta1.Document.language] // field for more details. Language string `protobuf:"bytes,3,opt,name=language,proto3" json:"language,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` }
The syntax analysis response message.
func (*AnalyzeSyntaxResponse) Descriptor ¶
func (*AnalyzeSyntaxResponse) Descriptor() ([]byte, []int)
func (*AnalyzeSyntaxResponse) GetLanguage ¶
func (m *AnalyzeSyntaxResponse) GetLanguage() string
func (*AnalyzeSyntaxResponse) GetSentences ¶
func (m *AnalyzeSyntaxResponse) GetSentences() []*Sentence
func (*AnalyzeSyntaxResponse) GetTokens ¶
func (m *AnalyzeSyntaxResponse) GetTokens() []*Token
func (*AnalyzeSyntaxResponse) ProtoMessage ¶
func (*AnalyzeSyntaxResponse) ProtoMessage()
func (*AnalyzeSyntaxResponse) Reset ¶
func (m *AnalyzeSyntaxResponse) Reset()
func (*AnalyzeSyntaxResponse) String ¶
func (m *AnalyzeSyntaxResponse) String() string
func (*AnalyzeSyntaxResponse) XXX_DiscardUnknown ¶
func (m *AnalyzeSyntaxResponse) XXX_DiscardUnknown()
func (*AnalyzeSyntaxResponse) XXX_Marshal ¶
func (m *AnalyzeSyntaxResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*AnalyzeSyntaxResponse) XXX_Merge ¶
func (m *AnalyzeSyntaxResponse) XXX_Merge(src proto.Message)
func (*AnalyzeSyntaxResponse) XXX_Size ¶
func (m *AnalyzeSyntaxResponse) XXX_Size() int
func (*AnalyzeSyntaxResponse) XXX_Unmarshal ¶
func (m *AnalyzeSyntaxResponse) XXX_Unmarshal(b []byte) error
type AnnotateTextRequest ¶
type AnnotateTextRequest struct { // Input document. Document *Document `protobuf:"bytes,1,opt,name=document,proto3" json:"document,omitempty"` // The enabled features. Features *AnnotateTextRequest_Features `protobuf:"bytes,2,opt,name=features,proto3" json:"features,omitempty"` // The encoding type used by the API to calculate offsets. EncodingType EncodingType `` /* 146-byte string literal not displayed */ XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` }
The request message for the text annotation API, which can perform multiple analysis types (sentiment, entities, and syntax) in one call.
func (*AnnotateTextRequest) Descriptor ¶
func (*AnnotateTextRequest) Descriptor() ([]byte, []int)
func (*AnnotateTextRequest) GetDocument ¶
func (m *AnnotateTextRequest) GetDocument() *Document
func (*AnnotateTextRequest) GetEncodingType ¶
func (m *AnnotateTextRequest) GetEncodingType() EncodingType
func (*AnnotateTextRequest) GetFeatures ¶
func (m *AnnotateTextRequest) GetFeatures() *AnnotateTextRequest_Features
func (*AnnotateTextRequest) ProtoMessage ¶
func (*AnnotateTextRequest) ProtoMessage()
func (*AnnotateTextRequest) Reset ¶
func (m *AnnotateTextRequest) Reset()
func (*AnnotateTextRequest) String ¶
func (m *AnnotateTextRequest) String() string
func (*AnnotateTextRequest) XXX_DiscardUnknown ¶
func (m *AnnotateTextRequest) XXX_DiscardUnknown()
func (*AnnotateTextRequest) XXX_Marshal ¶
func (m *AnnotateTextRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*AnnotateTextRequest) XXX_Merge ¶
func (m *AnnotateTextRequest) XXX_Merge(src proto.Message)
func (*AnnotateTextRequest) XXX_Size ¶
func (m *AnnotateTextRequest) XXX_Size() int
func (*AnnotateTextRequest) XXX_Unmarshal ¶
func (m *AnnotateTextRequest) XXX_Unmarshal(b []byte) error
type AnnotateTextRequest_Features ¶
type AnnotateTextRequest_Features struct { // Extract syntax information. ExtractSyntax bool `protobuf:"varint,1,opt,name=extract_syntax,json=extractSyntax,proto3" json:"extract_syntax,omitempty"` // Extract entities. ExtractEntities bool `protobuf:"varint,2,opt,name=extract_entities,json=extractEntities,proto3" json:"extract_entities,omitempty"` // Extract document-level sentiment. ExtractDocumentSentiment bool `` /* 136-byte string literal not displayed */ XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` }
All available features for sentiment, syntax, and semantic analysis. Setting each one to true will enable that specific analysis for the input.
func (*AnnotateTextRequest_Features) Descriptor ¶
func (*AnnotateTextRequest_Features) Descriptor() ([]byte, []int)
func (*AnnotateTextRequest_Features) GetExtractDocumentSentiment ¶
func (m *AnnotateTextRequest_Features) GetExtractDocumentSentiment() bool
func (*AnnotateTextRequest_Features) GetExtractEntities ¶
func (m *AnnotateTextRequest_Features) GetExtractEntities() bool
func (*AnnotateTextRequest_Features) GetExtractSyntax ¶
func (m *AnnotateTextRequest_Features) GetExtractSyntax() bool
func (*AnnotateTextRequest_Features) ProtoMessage ¶
func (*AnnotateTextRequest_Features) ProtoMessage()
func (*AnnotateTextRequest_Features) Reset ¶
func (m *AnnotateTextRequest_Features) Reset()
func (*AnnotateTextRequest_Features) String ¶
func (m *AnnotateTextRequest_Features) String() string
func (*AnnotateTextRequest_Features) XXX_DiscardUnknown ¶
func (m *AnnotateTextRequest_Features) XXX_DiscardUnknown()
func (*AnnotateTextRequest_Features) XXX_Marshal ¶
func (m *AnnotateTextRequest_Features) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*AnnotateTextRequest_Features) XXX_Merge ¶
func (m *AnnotateTextRequest_Features) XXX_Merge(src proto.Message)
func (*AnnotateTextRequest_Features) XXX_Size ¶
func (m *AnnotateTextRequest_Features) XXX_Size() int
func (*AnnotateTextRequest_Features) XXX_Unmarshal ¶
func (m *AnnotateTextRequest_Features) XXX_Unmarshal(b []byte) error
type AnnotateTextResponse ¶
type AnnotateTextResponse struct { // Sentences in the input document. Populated if the user enables // [AnnotateTextRequest.Features.extract_syntax][google.cloud.language.v1beta1.AnnotateTextRequest.Features.extract_syntax]. Sentences []*Sentence `protobuf:"bytes,1,rep,name=sentences,proto3" json:"sentences,omitempty"` // Tokens, along with their syntactic information, in the input document. // Populated if the user enables // [AnnotateTextRequest.Features.extract_syntax][google.cloud.language.v1beta1.AnnotateTextRequest.Features.extract_syntax]. Tokens []*Token `protobuf:"bytes,2,rep,name=tokens,proto3" json:"tokens,omitempty"` // Entities, along with their semantic information, in the input document. // Populated if the user enables // [AnnotateTextRequest.Features.extract_entities][google.cloud.language.v1beta1.AnnotateTextRequest.Features.extract_entities]. Entities []*Entity `protobuf:"bytes,3,rep,name=entities,proto3" json:"entities,omitempty"` // The overall sentiment for the document. Populated if the user enables // [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1beta1.AnnotateTextRequest.Features.extract_document_sentiment]. DocumentSentiment *Sentiment `protobuf:"bytes,4,opt,name=document_sentiment,json=documentSentiment,proto3" json:"document_sentiment,omitempty"` // The language of the text, which will be the same as the language specified // in the request or, if not specified, the automatically-detected language. // See [Document.language][google.cloud.language.v1beta1.Document.language] // field for more details. Language string `protobuf:"bytes,5,opt,name=language,proto3" json:"language,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` }
The text annotations response message.
func (*AnnotateTextResponse) Descriptor ¶
func (*AnnotateTextResponse) Descriptor() ([]byte, []int)
func (*AnnotateTextResponse) GetDocumentSentiment ¶
func (m *AnnotateTextResponse) GetDocumentSentiment() *Sentiment
func (*AnnotateTextResponse) GetEntities ¶
func (m *AnnotateTextResponse) GetEntities() []*Entity
func (*AnnotateTextResponse) GetLanguage ¶
func (m *AnnotateTextResponse) GetLanguage() string
func (*AnnotateTextResponse) GetSentences ¶
func (m *AnnotateTextResponse) GetSentences() []*Sentence
func (*AnnotateTextResponse) GetTokens ¶
func (m *AnnotateTextResponse) GetTokens() []*Token
func (*AnnotateTextResponse) ProtoMessage ¶
func (*AnnotateTextResponse) ProtoMessage()
func (*AnnotateTextResponse) Reset ¶
func (m *AnnotateTextResponse) Reset()
func (*AnnotateTextResponse) String ¶
func (m *AnnotateTextResponse) String() string
func (*AnnotateTextResponse) XXX_DiscardUnknown ¶
func (m *AnnotateTextResponse) XXX_DiscardUnknown()
func (*AnnotateTextResponse) XXX_Marshal ¶
func (m *AnnotateTextResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*AnnotateTextResponse) XXX_Merge ¶
func (m *AnnotateTextResponse) XXX_Merge(src proto.Message)
func (*AnnotateTextResponse) XXX_Size ¶
func (m *AnnotateTextResponse) XXX_Size() int
func (*AnnotateTextResponse) XXX_Unmarshal ¶
func (m *AnnotateTextResponse) XXX_Unmarshal(b []byte) error
type DependencyEdge ¶
type DependencyEdge struct { // Represents the head of this token in the dependency tree. // This is the index of the token which has an arc going to this token. // The index is the position of the token in the array of tokens returned // by the API method. If this token is a root token, then the // `head_token_index` is its own index. HeadTokenIndex int32 `protobuf:"varint,1,opt,name=head_token_index,json=headTokenIndex,proto3" json:"head_token_index,omitempty"` // The parse label for the token. Label DependencyEdge_Label `protobuf:"varint,2,opt,name=label,proto3,enum=google.cloud.language.v1beta1.DependencyEdge_Label" json:"label,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` }
Represents dependency parse tree information for a token.
func (*DependencyEdge) Descriptor ¶
func (*DependencyEdge) Descriptor() ([]byte, []int)
func (*DependencyEdge) GetHeadTokenIndex ¶
func (m *DependencyEdge) GetHeadTokenIndex() int32
func (*DependencyEdge) GetLabel ¶
func (m *DependencyEdge) GetLabel() DependencyEdge_Label
func (*DependencyEdge) ProtoMessage ¶
func (*DependencyEdge) ProtoMessage()
func (*DependencyEdge) Reset ¶
func (m *DependencyEdge) Reset()
func (*DependencyEdge) String ¶
func (m *DependencyEdge) String() string
func (*DependencyEdge) XXX_DiscardUnknown ¶
func (m *DependencyEdge) XXX_DiscardUnknown()
func (*DependencyEdge) XXX_Marshal ¶
func (m *DependencyEdge) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*DependencyEdge) XXX_Merge ¶
func (m *DependencyEdge) XXX_Merge(src proto.Message)
func (*DependencyEdge) XXX_Size ¶
func (m *DependencyEdge) XXX_Size() int
func (*DependencyEdge) XXX_Unmarshal ¶
func (m *DependencyEdge) XXX_Unmarshal(b []byte) error
type DependencyEdge_Label ¶
type DependencyEdge_Label int32
The parse label enum for the token.
const ( // Unknown DependencyEdge_UNKNOWN DependencyEdge_Label = 0 // Abbreviation modifier DependencyEdge_ABBREV DependencyEdge_Label = 1 // Adjectival complement DependencyEdge_ACOMP DependencyEdge_Label = 2 // Adverbial clause modifier DependencyEdge_ADVCL DependencyEdge_Label = 3 // Adverbial modifier DependencyEdge_ADVMOD DependencyEdge_Label = 4 // Adjectival modifier of an NP DependencyEdge_AMOD DependencyEdge_Label = 5 // Appositional modifier of an NP DependencyEdge_APPOS DependencyEdge_Label = 6 // Attribute dependent of a copular verb DependencyEdge_ATTR DependencyEdge_Label = 7 // Auxiliary (non-main) verb DependencyEdge_AUX DependencyEdge_Label = 8 // Passive auxiliary DependencyEdge_AUXPASS DependencyEdge_Label = 9 // Coordinating conjunction DependencyEdge_CC DependencyEdge_Label = 10 // Clausal complement of a verb or adjective DependencyEdge_CCOMP DependencyEdge_Label = 11 // Conjunct DependencyEdge_CONJ DependencyEdge_Label = 12 // Clausal subject DependencyEdge_CSUBJ DependencyEdge_Label = 13 // Clausal passive subject DependencyEdge_CSUBJPASS DependencyEdge_Label = 14 // Dependency (unable to determine) DependencyEdge_DEP DependencyEdge_Label = 15 // Determiner DependencyEdge_DET DependencyEdge_Label = 16 // Discourse DependencyEdge_DISCOURSE DependencyEdge_Label = 17 // Direct object DependencyEdge_DOBJ DependencyEdge_Label = 18 // Expletive DependencyEdge_EXPL DependencyEdge_Label = 19 // Goes with (part of a word in a text not well edited) DependencyEdge_GOESWITH DependencyEdge_Label = 20 // Indirect object DependencyEdge_IOBJ DependencyEdge_Label = 21 // Marker (word introducing a subordinate clause) DependencyEdge_MARK DependencyEdge_Label = 22 // Multi-word expression DependencyEdge_MWE DependencyEdge_Label = 23 // Multi-word verbal expression DependencyEdge_MWV DependencyEdge_Label = 24 // Negation modifier DependencyEdge_NEG DependencyEdge_Label = 25 // Noun compound modifier DependencyEdge_NN DependencyEdge_Label = 26 // Noun phrase used as an adverbial modifier DependencyEdge_NPADVMOD DependencyEdge_Label = 27 // Nominal subject DependencyEdge_NSUBJ DependencyEdge_Label = 28 // Passive nominal subject DependencyEdge_NSUBJPASS DependencyEdge_Label = 29 // Numeric modifier of a noun DependencyEdge_NUM DependencyEdge_Label = 30 // Element of compound number DependencyEdge_NUMBER DependencyEdge_Label = 31 // Punctuation mark DependencyEdge_P DependencyEdge_Label = 32 // Parataxis relation DependencyEdge_PARATAXIS DependencyEdge_Label = 33 // Participial modifier DependencyEdge_PARTMOD DependencyEdge_Label = 34 // The complement of a preposition is a clause DependencyEdge_PCOMP DependencyEdge_Label = 35 // Object of a preposition DependencyEdge_POBJ DependencyEdge_Label = 36 // Possession modifier DependencyEdge_POSS DependencyEdge_Label = 37 // Postverbal negative particle DependencyEdge_POSTNEG DependencyEdge_Label = 38 // Predicate complement DependencyEdge_PRECOMP DependencyEdge_Label = 39 // Preconjunt DependencyEdge_PRECONJ DependencyEdge_Label = 40 // Predeterminer DependencyEdge_PREDET DependencyEdge_Label = 41 // Prefix DependencyEdge_PREF DependencyEdge_Label = 42 // Prepositional modifier DependencyEdge_PREP DependencyEdge_Label = 43 // The relationship between a verb and verbal morpheme DependencyEdge_PRONL DependencyEdge_Label = 44 // Particle DependencyEdge_PRT DependencyEdge_Label = 45 // Associative or possessive marker DependencyEdge_PS DependencyEdge_Label = 46 // Quantifier phrase modifier DependencyEdge_QUANTMOD DependencyEdge_Label = 47 // Relative clause modifier DependencyEdge_RCMOD DependencyEdge_Label = 48 // Complementizer in relative clause DependencyEdge_RCMODREL DependencyEdge_Label = 49 // Ellipsis without a preceding predicate DependencyEdge_RDROP DependencyEdge_Label = 50 // Referent DependencyEdge_REF DependencyEdge_Label = 51 // Remnant DependencyEdge_REMNANT DependencyEdge_Label = 52 // Reparandum DependencyEdge_REPARANDUM DependencyEdge_Label = 53 // Root DependencyEdge_ROOT DependencyEdge_Label = 54 // Suffix specifying a unit of number DependencyEdge_SNUM DependencyEdge_Label = 55 // Suffix DependencyEdge_SUFF DependencyEdge_Label = 56 // Temporal modifier DependencyEdge_TMOD DependencyEdge_Label = 57 // Topic marker DependencyEdge_TOPIC DependencyEdge_Label = 58 // Clause headed by an infinite form of the verb that modifies a noun DependencyEdge_VMOD DependencyEdge_Label = 59 // Vocative DependencyEdge_VOCATIVE DependencyEdge_Label = 60 // Open clausal complement DependencyEdge_XCOMP DependencyEdge_Label = 61 // Name suffix DependencyEdge_SUFFIX DependencyEdge_Label = 62 // Name title DependencyEdge_TITLE DependencyEdge_Label = 63 // Adverbial phrase modifier DependencyEdge_ADVPHMOD DependencyEdge_Label = 64 // Causative auxiliary DependencyEdge_AUXCAUS DependencyEdge_Label = 65 // Helper auxiliary DependencyEdge_AUXVV DependencyEdge_Label = 66 // Rentaishi (Prenominal modifier) DependencyEdge_DTMOD DependencyEdge_Label = 67 // Foreign words DependencyEdge_FOREIGN DependencyEdge_Label = 68 // Keyword DependencyEdge_KW DependencyEdge_Label = 69 // List for chains of comparable items DependencyEdge_LIST DependencyEdge_Label = 70 // Nominalized clause DependencyEdge_NOMC DependencyEdge_Label = 71 // Nominalized clausal subject DependencyEdge_NOMCSUBJ DependencyEdge_Label = 72 // Nominalized clausal passive DependencyEdge_NOMCSUBJPASS DependencyEdge_Label = 73 // Compound of numeric modifier DependencyEdge_NUMC DependencyEdge_Label = 74 // Copula DependencyEdge_COP DependencyEdge_Label = 75 // Dislocated relation (for fronted/topicalized elements) DependencyEdge_DISLOCATED DependencyEdge_Label = 76 )
func (DependencyEdge_Label) EnumDescriptor ¶
func (DependencyEdge_Label) EnumDescriptor() ([]byte, []int)
func (DependencyEdge_Label) String ¶
func (x DependencyEdge_Label) String() string
type Document ¶
type Document struct { // Required. If the type is not set or is `TYPE_UNSPECIFIED`, // returns an `INVALID_ARGUMENT` error. Type Document_Type `protobuf:"varint,1,opt,name=type,proto3,enum=google.cloud.language.v1beta1.Document_Type" json:"type,omitempty"` // The source of the document: a string containing the content or a // Google Cloud Storage URI. // // Types that are valid to be assigned to Source: // *Document_Content // *Document_GcsContentUri Source isDocument_Source `protobuf_oneof:"source"` // The language of the document (if not specified, the language is // automatically detected). Both ISO and BCP-47 language codes are // accepted.<br> // [Language // Support](https://cloud.google.com/natural-language/docs/languages) lists // currently supported languages for each API method. If the language (either // specified by the caller or automatically detected) is not supported by the // called API method, an `INVALID_ARGUMENT` error is returned. Language string `protobuf:"bytes,4,opt,name=language,proto3" json:"language,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` }
################################################################ #
Represents the input to API methods.
func (*Document) Descriptor ¶
func (*Document) GetContent ¶
func (*Document) GetGcsContentUri ¶
func (*Document) GetLanguage ¶
func (*Document) GetType ¶
func (m *Document) GetType() Document_Type
func (*Document) ProtoMessage ¶
func (*Document) ProtoMessage()
func (*Document) XXX_DiscardUnknown ¶
func (m *Document) XXX_DiscardUnknown()
func (*Document) XXX_Marshal ¶
func (*Document) XXX_OneofWrappers ¶
func (*Document) XXX_OneofWrappers() []interface{}
XXX_OneofWrappers is for the internal use of the proto package.
func (*Document) XXX_Unmarshal ¶
type Document_Content ¶
type Document_Content struct {
Content string `protobuf:"bytes,2,opt,name=content,proto3,oneof"`
}
type Document_GcsContentUri ¶
type Document_GcsContentUri struct {
GcsContentUri string `protobuf:"bytes,3,opt,name=gcs_content_uri,json=gcsContentUri,proto3,oneof"`
}
type Document_Type ¶
type Document_Type int32
The document types enum.
const ( // The content type is not specified. Document_TYPE_UNSPECIFIED Document_Type = 0 // Plain text Document_PLAIN_TEXT Document_Type = 1 // HTML Document_HTML Document_Type = 2 )
func (Document_Type) EnumDescriptor ¶
func (Document_Type) EnumDescriptor() ([]byte, []int)
func (Document_Type) String ¶
func (x Document_Type) String() string
type EncodingType ¶
type EncodingType int32
Represents the text encoding that the caller uses to process the output. Providing an `EncodingType` is recommended because the API provides the beginning offsets for various outputs, such as tokens and mentions, and languages that natively use different text encodings may access offsets differently.
const ( // If `EncodingType` is not specified, encoding-dependent information (such as // `begin_offset`) will be set at `-1`. EncodingType_NONE EncodingType = 0 // Encoding-dependent information (such as `begin_offset`) is calculated based // on the UTF-8 encoding of the input. C++ and Go are examples of languages // that use this encoding natively. EncodingType_UTF8 EncodingType = 1 // Encoding-dependent information (such as `begin_offset`) is calculated based // on the UTF-16 encoding of the input. Java and Javascript are examples of // languages that use this encoding natively. EncodingType_UTF16 EncodingType = 2 // Encoding-dependent information (such as `begin_offset`) is calculated based // on the UTF-32 encoding of the input. Python is an example of a language // that uses this encoding natively. EncodingType_UTF32 EncodingType = 3 )
func (EncodingType) EnumDescriptor ¶
func (EncodingType) EnumDescriptor() ([]byte, []int)
func (EncodingType) String ¶
func (x EncodingType) String() string
type Entity ¶
type Entity struct { // The representative name for the entity. Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` // The entity type. Type Entity_Type `protobuf:"varint,2,opt,name=type,proto3,enum=google.cloud.language.v1beta1.Entity_Type" json:"type,omitempty"` // Metadata associated with the entity. // // Currently, Wikipedia URLs and Knowledge Graph MIDs are provided, if // available. The associated keys are "wikipedia_url" and "mid", respectively. Metadata map[string]string `` /* 157-byte string literal not displayed */ // The salience score associated with the entity in the [0, 1.0] range. // // The salience score for an entity provides information about the // importance or centrality of that entity to the entire document text. // Scores closer to 0 are less salient, while scores closer to 1.0 are highly // salient. Salience float32 `protobuf:"fixed32,4,opt,name=salience,proto3" json:"salience,omitempty"` // The mentions of this entity in the input document. The API currently // supports proper noun mentions. Mentions []*EntityMention `protobuf:"bytes,5,rep,name=mentions,proto3" json:"mentions,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` }
Represents a phrase in the text that is a known entity, such as a person, an organization, or location. The API associates information, such as salience and mentions, with entities.
func (*Entity) Descriptor ¶
func (*Entity) GetMentions ¶
func (m *Entity) GetMentions() []*EntityMention
func (*Entity) GetMetadata ¶
func (*Entity) GetSalience ¶
func (*Entity) GetType ¶
func (m *Entity) GetType() Entity_Type
func (*Entity) ProtoMessage ¶
func (*Entity) ProtoMessage()
func (*Entity) XXX_DiscardUnknown ¶
func (m *Entity) XXX_DiscardUnknown()
func (*Entity) XXX_Marshal ¶
func (*Entity) XXX_Unmarshal ¶
type EntityMention ¶
type EntityMention struct { // The mention text. Text *TextSpan `protobuf:"bytes,1,opt,name=text,proto3" json:"text,omitempty"` // The type of the entity mention. Type EntityMention_Type `protobuf:"varint,2,opt,name=type,proto3,enum=google.cloud.language.v1beta1.EntityMention_Type" json:"type,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` }
Represents a mention for an entity in the text. Currently, proper noun mentions are supported.
func (*EntityMention) Descriptor ¶
func (*EntityMention) Descriptor() ([]byte, []int)
func (*EntityMention) GetText ¶
func (m *EntityMention) GetText() *TextSpan
func (*EntityMention) GetType ¶
func (m *EntityMention) GetType() EntityMention_Type
func (*EntityMention) ProtoMessage ¶
func (*EntityMention) ProtoMessage()
func (*EntityMention) Reset ¶
func (m *EntityMention) Reset()
func (*EntityMention) String ¶
func (m *EntityMention) String() string
func (*EntityMention) XXX_DiscardUnknown ¶
func (m *EntityMention) XXX_DiscardUnknown()
func (*EntityMention) XXX_Marshal ¶
func (m *EntityMention) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*EntityMention) XXX_Merge ¶
func (m *EntityMention) XXX_Merge(src proto.Message)
func (*EntityMention) XXX_Size ¶
func (m *EntityMention) XXX_Size() int
func (*EntityMention) XXX_Unmarshal ¶
func (m *EntityMention) XXX_Unmarshal(b []byte) error
type EntityMention_Type ¶
type EntityMention_Type int32
The supported types of mentions.
const ( // Unknown EntityMention_TYPE_UNKNOWN EntityMention_Type = 0 // Proper name EntityMention_PROPER EntityMention_Type = 1 // Common noun (or noun compound) EntityMention_COMMON EntityMention_Type = 2 )
func (EntityMention_Type) EnumDescriptor ¶
func (EntityMention_Type) EnumDescriptor() ([]byte, []int)
func (EntityMention_Type) String ¶
func (x EntityMention_Type) String() string
type Entity_Type ¶
type Entity_Type int32
The type of the entity.
const ( // Unknown Entity_UNKNOWN Entity_Type = 0 // Person Entity_PERSON Entity_Type = 1 // Location Entity_LOCATION Entity_Type = 2 // Organization Entity_ORGANIZATION Entity_Type = 3 // Event Entity_EVENT Entity_Type = 4 // Work of art Entity_WORK_OF_ART Entity_Type = 5 // Consumer goods Entity_CONSUMER_GOOD Entity_Type = 6 // Other types Entity_OTHER Entity_Type = 7 )
func (Entity_Type) EnumDescriptor ¶
func (Entity_Type) EnumDescriptor() ([]byte, []int)
func (Entity_Type) String ¶
func (x Entity_Type) String() string
type LanguageServiceClient ¶
type LanguageServiceClient interface { // Analyzes the sentiment of the provided text. AnalyzeSentiment(ctx context.Context, in *AnalyzeSentimentRequest, opts ...grpc.CallOption) (*AnalyzeSentimentResponse, error) // Finds named entities (currently proper names and common nouns) in the text // along with entity types, salience, mentions for each entity, and // other properties. AnalyzeEntities(ctx context.Context, in *AnalyzeEntitiesRequest, opts ...grpc.CallOption) (*AnalyzeEntitiesResponse, error) // Analyzes the syntax of the text and provides sentence boundaries and // tokenization along with part of speech tags, dependency trees, and other // properties. AnalyzeSyntax(ctx context.Context, in *AnalyzeSyntaxRequest, opts ...grpc.CallOption) (*AnalyzeSyntaxResponse, error) // A convenience method that provides all the features that analyzeSentiment, // analyzeEntities, and analyzeSyntax provide in one call. AnnotateText(ctx context.Context, in *AnnotateTextRequest, opts ...grpc.CallOption) (*AnnotateTextResponse, error) }
LanguageServiceClient is the client API for LanguageService service.
For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.
func NewLanguageServiceClient ¶
func NewLanguageServiceClient(cc grpc.ClientConnInterface) LanguageServiceClient
type LanguageServiceServer ¶
type LanguageServiceServer interface { // Analyzes the sentiment of the provided text. AnalyzeSentiment(context.Context, *AnalyzeSentimentRequest) (*AnalyzeSentimentResponse, error) // Finds named entities (currently proper names and common nouns) in the text // along with entity types, salience, mentions for each entity, and // other properties. AnalyzeEntities(context.Context, *AnalyzeEntitiesRequest) (*AnalyzeEntitiesResponse, error) // Analyzes the syntax of the text and provides sentence boundaries and // tokenization along with part of speech tags, dependency trees, and other // properties. AnalyzeSyntax(context.Context, *AnalyzeSyntaxRequest) (*AnalyzeSyntaxResponse, error) // A convenience method that provides all the features that analyzeSentiment, // analyzeEntities, and analyzeSyntax provide in one call. AnnotateText(context.Context, *AnnotateTextRequest) (*AnnotateTextResponse, error) }
LanguageServiceServer is the server API for LanguageService service.
type PartOfSpeech ¶
type PartOfSpeech struct { // The part of speech tag. Tag PartOfSpeech_Tag `protobuf:"varint,1,opt,name=tag,proto3,enum=google.cloud.language.v1beta1.PartOfSpeech_Tag" json:"tag,omitempty"` // The grammatical aspect. Aspect PartOfSpeech_Aspect `protobuf:"varint,2,opt,name=aspect,proto3,enum=google.cloud.language.v1beta1.PartOfSpeech_Aspect" json:"aspect,omitempty"` // The grammatical case. Case PartOfSpeech_Case `protobuf:"varint,3,opt,name=case,proto3,enum=google.cloud.language.v1beta1.PartOfSpeech_Case" json:"case,omitempty"` // The grammatical form. Form PartOfSpeech_Form `protobuf:"varint,4,opt,name=form,proto3,enum=google.cloud.language.v1beta1.PartOfSpeech_Form" json:"form,omitempty"` // The grammatical gender. Gender PartOfSpeech_Gender `protobuf:"varint,5,opt,name=gender,proto3,enum=google.cloud.language.v1beta1.PartOfSpeech_Gender" json:"gender,omitempty"` // The grammatical mood. Mood PartOfSpeech_Mood `protobuf:"varint,6,opt,name=mood,proto3,enum=google.cloud.language.v1beta1.PartOfSpeech_Mood" json:"mood,omitempty"` // The grammatical number. Number PartOfSpeech_Number `protobuf:"varint,7,opt,name=number,proto3,enum=google.cloud.language.v1beta1.PartOfSpeech_Number" json:"number,omitempty"` // The grammatical person. Person PartOfSpeech_Person `protobuf:"varint,8,opt,name=person,proto3,enum=google.cloud.language.v1beta1.PartOfSpeech_Person" json:"person,omitempty"` // The grammatical properness. Proper PartOfSpeech_Proper `protobuf:"varint,9,opt,name=proper,proto3,enum=google.cloud.language.v1beta1.PartOfSpeech_Proper" json:"proper,omitempty"` // The grammatical reciprocity. Reciprocity PartOfSpeech_Reciprocity `` /* 137-byte string literal not displayed */ // The grammatical tense. Tense PartOfSpeech_Tense `protobuf:"varint,11,opt,name=tense,proto3,enum=google.cloud.language.v1beta1.PartOfSpeech_Tense" json:"tense,omitempty"` // The grammatical voice. Voice PartOfSpeech_Voice `protobuf:"varint,12,opt,name=voice,proto3,enum=google.cloud.language.v1beta1.PartOfSpeech_Voice" json:"voice,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` }
Represents part of speech information for a token.
func (*PartOfSpeech) Descriptor ¶
func (*PartOfSpeech) Descriptor() ([]byte, []int)
func (*PartOfSpeech) GetAspect ¶
func (m *PartOfSpeech) GetAspect() PartOfSpeech_Aspect
func (*PartOfSpeech) GetCase ¶
func (m *PartOfSpeech) GetCase() PartOfSpeech_Case
func (*PartOfSpeech) GetForm ¶
func (m *PartOfSpeech) GetForm() PartOfSpeech_Form
func (*PartOfSpeech) GetGender ¶
func (m *PartOfSpeech) GetGender() PartOfSpeech_Gender
func (*PartOfSpeech) GetMood ¶
func (m *PartOfSpeech) GetMood() PartOfSpeech_Mood
func (*PartOfSpeech) GetNumber ¶
func (m *PartOfSpeech) GetNumber() PartOfSpeech_Number
func (*PartOfSpeech) GetPerson ¶
func (m *PartOfSpeech) GetPerson() PartOfSpeech_Person
func (*PartOfSpeech) GetProper ¶
func (m *PartOfSpeech) GetProper() PartOfSpeech_Proper
func (*PartOfSpeech) GetReciprocity ¶
func (m *PartOfSpeech) GetReciprocity() PartOfSpeech_Reciprocity
func (*PartOfSpeech) GetTag ¶
func (m *PartOfSpeech) GetTag() PartOfSpeech_Tag
func (*PartOfSpeech) GetTense ¶
func (m *PartOfSpeech) GetTense() PartOfSpeech_Tense
func (*PartOfSpeech) GetVoice ¶
func (m *PartOfSpeech) GetVoice() PartOfSpeech_Voice
func (*PartOfSpeech) ProtoMessage ¶
func (*PartOfSpeech) ProtoMessage()
func (*PartOfSpeech) Reset ¶
func (m *PartOfSpeech) Reset()
func (*PartOfSpeech) String ¶
func (m *PartOfSpeech) String() string
func (*PartOfSpeech) XXX_DiscardUnknown ¶
func (m *PartOfSpeech) XXX_DiscardUnknown()
func (*PartOfSpeech) XXX_Marshal ¶
func (m *PartOfSpeech) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*PartOfSpeech) XXX_Merge ¶
func (m *PartOfSpeech) XXX_Merge(src proto.Message)
func (*PartOfSpeech) XXX_Size ¶
func (m *PartOfSpeech) XXX_Size() int
func (*PartOfSpeech) XXX_Unmarshal ¶
func (m *PartOfSpeech) XXX_Unmarshal(b []byte) error
type PartOfSpeech_Aspect ¶
type PartOfSpeech_Aspect int32
The characteristic of a verb that expresses time flow during an event.
const ( // Aspect is not applicable in the analyzed language or is not predicted. PartOfSpeech_ASPECT_UNKNOWN PartOfSpeech_Aspect = 0 // Perfective PartOfSpeech_PERFECTIVE PartOfSpeech_Aspect = 1 // Imperfective PartOfSpeech_IMPERFECTIVE PartOfSpeech_Aspect = 2 // Progressive PartOfSpeech_PROGRESSIVE PartOfSpeech_Aspect = 3 )
func (PartOfSpeech_Aspect) EnumDescriptor ¶
func (PartOfSpeech_Aspect) EnumDescriptor() ([]byte, []int)
func (PartOfSpeech_Aspect) String ¶
func (x PartOfSpeech_Aspect) String() string
type PartOfSpeech_Case ¶
type PartOfSpeech_Case int32
The grammatical function performed by a noun or pronoun in a phrase, clause, or sentence. In some languages, other parts of speech, such as adjective and determiner, take case inflection in agreement with the noun.
const ( // Case is not applicable in the analyzed language or is not predicted. PartOfSpeech_CASE_UNKNOWN PartOfSpeech_Case = 0 // Accusative PartOfSpeech_ACCUSATIVE PartOfSpeech_Case = 1 // Adverbial PartOfSpeech_ADVERBIAL PartOfSpeech_Case = 2 // Complementive PartOfSpeech_COMPLEMENTIVE PartOfSpeech_Case = 3 // Dative PartOfSpeech_DATIVE PartOfSpeech_Case = 4 // Genitive PartOfSpeech_GENITIVE PartOfSpeech_Case = 5 // Instrumental PartOfSpeech_INSTRUMENTAL PartOfSpeech_Case = 6 // Locative PartOfSpeech_LOCATIVE PartOfSpeech_Case = 7 // Nominative PartOfSpeech_NOMINATIVE PartOfSpeech_Case = 8 // Oblique PartOfSpeech_OBLIQUE PartOfSpeech_Case = 9 // Partitive PartOfSpeech_PARTITIVE PartOfSpeech_Case = 10 // Prepositional PartOfSpeech_PREPOSITIONAL PartOfSpeech_Case = 11 // Reflexive PartOfSpeech_REFLEXIVE_CASE PartOfSpeech_Case = 12 // Relative PartOfSpeech_RELATIVE_CASE PartOfSpeech_Case = 13 // Vocative PartOfSpeech_VOCATIVE PartOfSpeech_Case = 14 )
func (PartOfSpeech_Case) EnumDescriptor ¶
func (PartOfSpeech_Case) EnumDescriptor() ([]byte, []int)
func (PartOfSpeech_Case) String ¶
func (x PartOfSpeech_Case) String() string
type PartOfSpeech_Form ¶
type PartOfSpeech_Form int32
Depending on the language, Form can be categorizing different forms of verbs, adjectives, adverbs, etc. For example, categorizing inflected endings of verbs and adjectives or distinguishing between short and long forms of adjectives and participles
const ( // Form is not applicable in the analyzed language or is not predicted. PartOfSpeech_FORM_UNKNOWN PartOfSpeech_Form = 0 // Adnomial PartOfSpeech_ADNOMIAL PartOfSpeech_Form = 1 // Auxiliary PartOfSpeech_AUXILIARY PartOfSpeech_Form = 2 // Complementizer PartOfSpeech_COMPLEMENTIZER PartOfSpeech_Form = 3 // Final ending PartOfSpeech_FINAL_ENDING PartOfSpeech_Form = 4 // Gerund PartOfSpeech_GERUND PartOfSpeech_Form = 5 // Realis PartOfSpeech_REALIS PartOfSpeech_Form = 6 // Irrealis PartOfSpeech_IRREALIS PartOfSpeech_Form = 7 // Short form PartOfSpeech_SHORT PartOfSpeech_Form = 8 // Long form PartOfSpeech_LONG PartOfSpeech_Form = 9 // Order form PartOfSpeech_ORDER PartOfSpeech_Form = 10 // Specific form PartOfSpeech_SPECIFIC PartOfSpeech_Form = 11 )
func (PartOfSpeech_Form) EnumDescriptor ¶
func (PartOfSpeech_Form) EnumDescriptor() ([]byte, []int)
func (PartOfSpeech_Form) String ¶
func (x PartOfSpeech_Form) String() string
type PartOfSpeech_Gender ¶
type PartOfSpeech_Gender int32
Gender classes of nouns reflected in the behaviour of associated words.
const ( // Gender is not applicable in the analyzed language or is not predicted. PartOfSpeech_GENDER_UNKNOWN PartOfSpeech_Gender = 0 // Feminine PartOfSpeech_FEMININE PartOfSpeech_Gender = 1 // Masculine PartOfSpeech_MASCULINE PartOfSpeech_Gender = 2 // Neuter PartOfSpeech_NEUTER PartOfSpeech_Gender = 3 )
func (PartOfSpeech_Gender) EnumDescriptor ¶
func (PartOfSpeech_Gender) EnumDescriptor() ([]byte, []int)
func (PartOfSpeech_Gender) String ¶
func (x PartOfSpeech_Gender) String() string
type PartOfSpeech_Mood ¶
type PartOfSpeech_Mood int32
The grammatical feature of verbs, used for showing modality and attitude.
const ( // Mood is not applicable in the analyzed language or is not predicted. PartOfSpeech_MOOD_UNKNOWN PartOfSpeech_Mood = 0 // Conditional PartOfSpeech_CONDITIONAL_MOOD PartOfSpeech_Mood = 1 // Imperative PartOfSpeech_IMPERATIVE PartOfSpeech_Mood = 2 // Indicative PartOfSpeech_INDICATIVE PartOfSpeech_Mood = 3 // Interrogative PartOfSpeech_INTERROGATIVE PartOfSpeech_Mood = 4 // Jussive PartOfSpeech_JUSSIVE PartOfSpeech_Mood = 5 // Subjunctive PartOfSpeech_SUBJUNCTIVE PartOfSpeech_Mood = 6 )
func (PartOfSpeech_Mood) EnumDescriptor ¶
func (PartOfSpeech_Mood) EnumDescriptor() ([]byte, []int)
func (PartOfSpeech_Mood) String ¶
func (x PartOfSpeech_Mood) String() string
type PartOfSpeech_Number ¶
type PartOfSpeech_Number int32
Count distinctions.
const ( // Number is not applicable in the analyzed language or is not predicted. PartOfSpeech_NUMBER_UNKNOWN PartOfSpeech_Number = 0 // Singular PartOfSpeech_SINGULAR PartOfSpeech_Number = 1 // Plural PartOfSpeech_PLURAL PartOfSpeech_Number = 2 // Dual PartOfSpeech_DUAL PartOfSpeech_Number = 3 )
func (PartOfSpeech_Number) EnumDescriptor ¶
func (PartOfSpeech_Number) EnumDescriptor() ([]byte, []int)
func (PartOfSpeech_Number) String ¶
func (x PartOfSpeech_Number) String() string
type PartOfSpeech_Person ¶
type PartOfSpeech_Person int32
The distinction between the speaker, second person, third person, etc.
const ( // Person is not applicable in the analyzed language or is not predicted. PartOfSpeech_PERSON_UNKNOWN PartOfSpeech_Person = 0 // First PartOfSpeech_FIRST PartOfSpeech_Person = 1 // Second PartOfSpeech_SECOND PartOfSpeech_Person = 2 // Third PartOfSpeech_THIRD PartOfSpeech_Person = 3 // Reflexive PartOfSpeech_REFLEXIVE_PERSON PartOfSpeech_Person = 4 )
func (PartOfSpeech_Person) EnumDescriptor ¶
func (PartOfSpeech_Person) EnumDescriptor() ([]byte, []int)
func (PartOfSpeech_Person) String ¶
func (x PartOfSpeech_Person) String() string
type PartOfSpeech_Proper ¶
type PartOfSpeech_Proper int32
This category shows if the token is part of a proper name.
const ( // Proper is not applicable in the analyzed language or is not predicted. PartOfSpeech_PROPER_UNKNOWN PartOfSpeech_Proper = 0 // Proper PartOfSpeech_PROPER PartOfSpeech_Proper = 1 // Not proper PartOfSpeech_NOT_PROPER PartOfSpeech_Proper = 2 )
func (PartOfSpeech_Proper) EnumDescriptor ¶
func (PartOfSpeech_Proper) EnumDescriptor() ([]byte, []int)
func (PartOfSpeech_Proper) String ¶
func (x PartOfSpeech_Proper) String() string
type PartOfSpeech_Reciprocity ¶
type PartOfSpeech_Reciprocity int32
Reciprocal features of a pronoun.
const ( // Reciprocity is not applicable in the analyzed language or is not // predicted. PartOfSpeech_RECIPROCITY_UNKNOWN PartOfSpeech_Reciprocity = 0 // Reciprocal PartOfSpeech_RECIPROCAL PartOfSpeech_Reciprocity = 1 // Non-reciprocal PartOfSpeech_NON_RECIPROCAL PartOfSpeech_Reciprocity = 2 )
func (PartOfSpeech_Reciprocity) EnumDescriptor ¶
func (PartOfSpeech_Reciprocity) EnumDescriptor() ([]byte, []int)
func (PartOfSpeech_Reciprocity) String ¶
func (x PartOfSpeech_Reciprocity) String() string
type PartOfSpeech_Tag ¶
type PartOfSpeech_Tag int32
The part of speech tags enum.
const ( // Unknown PartOfSpeech_UNKNOWN PartOfSpeech_Tag = 0 // Adjective PartOfSpeech_ADJ PartOfSpeech_Tag = 1 // Adposition (preposition and postposition) PartOfSpeech_ADP PartOfSpeech_Tag = 2 // Adverb PartOfSpeech_ADV PartOfSpeech_Tag = 3 // Conjunction PartOfSpeech_CONJ PartOfSpeech_Tag = 4 // Determiner PartOfSpeech_DET PartOfSpeech_Tag = 5 // Noun (common and proper) PartOfSpeech_NOUN PartOfSpeech_Tag = 6 // Cardinal number PartOfSpeech_NUM PartOfSpeech_Tag = 7 // Pronoun PartOfSpeech_PRON PartOfSpeech_Tag = 8 // Particle or other function word PartOfSpeech_PRT PartOfSpeech_Tag = 9 // Punctuation PartOfSpeech_PUNCT PartOfSpeech_Tag = 10 // Verb (all tenses and modes) PartOfSpeech_VERB PartOfSpeech_Tag = 11 // Other: foreign words, typos, abbreviations PartOfSpeech_X PartOfSpeech_Tag = 12 // Affix PartOfSpeech_AFFIX PartOfSpeech_Tag = 13 )
func (PartOfSpeech_Tag) EnumDescriptor ¶
func (PartOfSpeech_Tag) EnumDescriptor() ([]byte, []int)
func (PartOfSpeech_Tag) String ¶
func (x PartOfSpeech_Tag) String() string
type PartOfSpeech_Tense ¶
type PartOfSpeech_Tense int32
Time reference.
const ( // Tense is not applicable in the analyzed language or is not predicted. PartOfSpeech_TENSE_UNKNOWN PartOfSpeech_Tense = 0 // Conditional PartOfSpeech_CONDITIONAL_TENSE PartOfSpeech_Tense = 1 // Future PartOfSpeech_FUTURE PartOfSpeech_Tense = 2 // Past PartOfSpeech_PAST PartOfSpeech_Tense = 3 // Present PartOfSpeech_PRESENT PartOfSpeech_Tense = 4 // Imperfect PartOfSpeech_IMPERFECT PartOfSpeech_Tense = 5 // Pluperfect PartOfSpeech_PLUPERFECT PartOfSpeech_Tense = 6 )
func (PartOfSpeech_Tense) EnumDescriptor ¶
func (PartOfSpeech_Tense) EnumDescriptor() ([]byte, []int)
func (PartOfSpeech_Tense) String ¶
func (x PartOfSpeech_Tense) String() string
type PartOfSpeech_Voice ¶
type PartOfSpeech_Voice int32
The relationship between the action that a verb expresses and the participants identified by its arguments.
const ( // Voice is not applicable in the analyzed language or is not predicted. PartOfSpeech_VOICE_UNKNOWN PartOfSpeech_Voice = 0 // Active PartOfSpeech_ACTIVE PartOfSpeech_Voice = 1 // Causative PartOfSpeech_CAUSATIVE PartOfSpeech_Voice = 2 // Passive PartOfSpeech_PASSIVE PartOfSpeech_Voice = 3 )
func (PartOfSpeech_Voice) EnumDescriptor ¶
func (PartOfSpeech_Voice) EnumDescriptor() ([]byte, []int)
func (PartOfSpeech_Voice) String ¶
func (x PartOfSpeech_Voice) String() string
type Sentence ¶
type Sentence struct { // The sentence text. Text *TextSpan `protobuf:"bytes,1,opt,name=text,proto3" json:"text,omitempty"` // For calls to [AnalyzeSentiment][] or if // [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1beta1.AnnotateTextRequest.Features.extract_document_sentiment] // is set to true, this field will contain the sentiment for the sentence. Sentiment *Sentiment `protobuf:"bytes,2,opt,name=sentiment,proto3" json:"sentiment,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` }
Represents a sentence in the input document.
func (*Sentence) Descriptor ¶
func (*Sentence) GetSentiment ¶
func (*Sentence) ProtoMessage ¶
func (*Sentence) ProtoMessage()
func (*Sentence) XXX_DiscardUnknown ¶
func (m *Sentence) XXX_DiscardUnknown()
func (*Sentence) XXX_Marshal ¶
func (*Sentence) XXX_Unmarshal ¶
type Sentiment ¶
type Sentiment struct { // DEPRECATED FIELD - This field is being deprecated in // favor of score. Please refer to our documentation at // https://cloud.google.com/natural-language/docs for more information. Polarity float32 `protobuf:"fixed32,1,opt,name=polarity,proto3" json:"polarity,omitempty"` // A non-negative number in the [0, +inf) range, which represents // the absolute magnitude of sentiment regardless of score (positive or // negative). Magnitude float32 `protobuf:"fixed32,2,opt,name=magnitude,proto3" json:"magnitude,omitempty"` // Sentiment score between -1.0 (negative sentiment) and 1.0 // (positive sentiment). Score float32 `protobuf:"fixed32,3,opt,name=score,proto3" json:"score,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` }
Represents the feeling associated with the entire text or entities in the text.
func (*Sentiment) Descriptor ¶
func (*Sentiment) GetMagnitude ¶
func (*Sentiment) GetPolarity ¶
func (*Sentiment) ProtoMessage ¶
func (*Sentiment) ProtoMessage()
func (*Sentiment) XXX_DiscardUnknown ¶
func (m *Sentiment) XXX_DiscardUnknown()
func (*Sentiment) XXX_Marshal ¶
func (*Sentiment) XXX_Unmarshal ¶
type TextSpan ¶
type TextSpan struct { // The content of the output text. Content string `protobuf:"bytes,1,opt,name=content,proto3" json:"content,omitempty"` // The API calculates the beginning offset of the content in the original // document according to the // [EncodingType][google.cloud.language.v1beta1.EncodingType] specified in the // API request. BeginOffset int32 `protobuf:"varint,2,opt,name=begin_offset,json=beginOffset,proto3" json:"begin_offset,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` }
Represents an output piece of text.
func (*TextSpan) Descriptor ¶
func (*TextSpan) GetBeginOffset ¶
func (*TextSpan) GetContent ¶
func (*TextSpan) ProtoMessage ¶
func (*TextSpan) ProtoMessage()
func (*TextSpan) XXX_DiscardUnknown ¶
func (m *TextSpan) XXX_DiscardUnknown()
func (*TextSpan) XXX_Marshal ¶
func (*TextSpan) XXX_Unmarshal ¶
type Token ¶
type Token struct { // The token text. Text *TextSpan `protobuf:"bytes,1,opt,name=text,proto3" json:"text,omitempty"` // Parts of speech tag for this token. PartOfSpeech *PartOfSpeech `protobuf:"bytes,2,opt,name=part_of_speech,json=partOfSpeech,proto3" json:"part_of_speech,omitempty"` // Dependency tree parse for this token. DependencyEdge *DependencyEdge `protobuf:"bytes,3,opt,name=dependency_edge,json=dependencyEdge,proto3" json:"dependency_edge,omitempty"` // [Lemma](https://en.wikipedia.org/wiki/Lemma_%28morphology%29) of the token. Lemma string `protobuf:"bytes,4,opt,name=lemma,proto3" json:"lemma,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` }
Represents the smallest syntactic building block of the text.
func (*Token) Descriptor ¶
func (*Token) GetDependencyEdge ¶
func (m *Token) GetDependencyEdge() *DependencyEdge
func (*Token) GetPartOfSpeech ¶
func (m *Token) GetPartOfSpeech() *PartOfSpeech
func (*Token) ProtoMessage ¶
func (*Token) ProtoMessage()
func (*Token) XXX_DiscardUnknown ¶
func (m *Token) XXX_DiscardUnknown()
func (*Token) XXX_Marshal ¶
func (*Token) XXX_Unmarshal ¶
type UnimplementedLanguageServiceServer ¶
type UnimplementedLanguageServiceServer struct { }
UnimplementedLanguageServiceServer can be embedded to have forward compatible implementations.
func (*UnimplementedLanguageServiceServer) AnalyzeEntities ¶
func (*UnimplementedLanguageServiceServer) AnalyzeEntities(ctx context.Context, req *AnalyzeEntitiesRequest) (*AnalyzeEntitiesResponse, error)
func (*UnimplementedLanguageServiceServer) AnalyzeSentiment ¶
func (*UnimplementedLanguageServiceServer) AnalyzeSentiment(ctx context.Context, req *AnalyzeSentimentRequest) (*AnalyzeSentimentResponse, error)
func (*UnimplementedLanguageServiceServer) AnalyzeSyntax ¶
func (*UnimplementedLanguageServiceServer) AnalyzeSyntax(ctx context.Context, req *AnalyzeSyntaxRequest) (*AnalyzeSyntaxResponse, error)
func (*UnimplementedLanguageServiceServer) AnnotateText ¶
func (*UnimplementedLanguageServiceServer) AnnotateText(ctx context.Context, req *AnnotateTextRequest) (*AnnotateTextResponse, error)