Documentation ¶
Overview ¶
Package chroma takes source code and other structured text and converts it into syntax highlighted HTML, ANSI- coloured text, etc.
Chroma is based heavily on Pygments, and includes translators for Pygments lexers and styles.
For more information, go here: https://github.com/alecthomas/chroma
Index ¶
- Constants
- Variables
- func Words(prefix, suffix string, words ...string) string
- type Analyser
- type Colour
- func (c Colour) Blue() uint8
- func (c Colour) Brighten(factor float64) Colour
- func (c Colour) Brightness() float64
- func (c Colour) Distance(e2 Colour) float64
- func (c Colour) GoString() string
- func (c Colour) Green() uint8
- func (c Colour) IsSet() bool
- func (c Colour) Red() uint8
- func (c Colour) String() string
- type Colours
- type CompiledRule
- type CompiledRules
- type Config
- type Emitter
- type EmitterFunc
- type Formatter
- type FormatterFunc
- type Iterator
- type Lexer
- type LexerMutator
- type LexerState
- type Lexers
- type Mutator
- type MutatorFunc
- type PrioritisedLexers
- type RegexLexer
- func (r *RegexLexer) AnalyseText(text string) float32
- func (r *RegexLexer) Config() *Config
- func (r *RegexLexer) SetAnalyser(analyser func(text string) float32) *RegexLexer
- func (r *RegexLexer) Tokenise(options *TokeniseOptions, text string) (Iterator, error)
- func (r *RegexLexer) Trace(trace bool) *RegexLexer
- type Rule
- type Rules
- type Style
- type StyleBuilder
- func (s *StyleBuilder) Add(ttype TokenType, entry string) *StyleBuilder
- func (s *StyleBuilder) AddAll(entries StyleEntries) *StyleBuilder
- func (s *StyleBuilder) AddEntry(ttype TokenType, entry StyleEntry) *StyleBuilder
- func (s *StyleBuilder) Build() (*Style, error)
- func (s *StyleBuilder) Get(ttype TokenType) StyleEntry
- type StyleEntries
- type StyleEntry
- type Token
- type TokenType
- func (t TokenType) Category() TokenType
- func (t TokenType) Emit(groups []string, lexer Lexer) Iterator
- func (t TokenType) InCategory(other TokenType) bool
- func (t TokenType) InSubCategory(other TokenType) bool
- func (t *TokenType) MarshalJSON() ([]byte, error)
- func (t TokenType) Parent() TokenType
- func (i TokenType) String() string
- func (t TokenType) SubCategory() TokenType
- func (t *TokenType) UnmarshalJSON(data []byte) error
- type TokeniseOptions
- type Trilean
- type TypeMapping
Constants ¶
const ( Whitespace = TextWhitespace Date = LiteralDate String = LiteralString StringAffix = LiteralStringAffix StringBacktick = LiteralStringBacktick StringChar = LiteralStringChar StringDelimiter = LiteralStringDelimiter StringDoc = LiteralStringDoc StringDouble = LiteralStringDouble StringEscape = LiteralStringEscape StringHeredoc = LiteralStringHeredoc StringInterpol = LiteralStringInterpol StringOther = LiteralStringOther StringRegex = LiteralStringRegex StringSingle = LiteralStringSingle StringSymbol = LiteralStringSymbol Number = LiteralNumber NumberBin = LiteralNumberBin NumberFloat = LiteralNumberFloat NumberHex = LiteralNumberHex NumberInteger = LiteralNumberInteger NumberIntegerLong = LiteralNumberIntegerLong NumberOct = LiteralNumberOct )
Aliases.
Variables ¶
var ANSI2RGB = map[string]string{
"#ansiblack": "000000",
"#ansidarkred": "7f0000",
"#ansidarkgreen": "007f00",
"#ansibrown": "7f7fe0",
"#ansidarkblue": "00007f",
"#ansipurple": "7f007f",
"#ansiteal": "007f7f",
"#ansilightgray": "e5e5e5",
"#ansidarkgray": "555555",
"#ansired": "ff0000",
"#ansigreen": "00ff00",
"#ansiyellow": "ffff00",
"#ansiblue": "0000ff",
"#ansifuchsia": "ff00ff",
"#ansiturquoise": "00ffff",
"#ansiwhite": "ffffff",
"#black": "000000",
"#darkred": "7f0000",
"#darkgreen": "007f00",
"#brown": "7f7fe0",
"#darkblue": "00007f",
"#purple": "7f007f",
"#teal": "007f7f",
"#lightgray": "e5e5e5",
"#darkgray": "555555",
"#red": "ff0000",
"#green": "00ff00",
"#yellow": "ffff00",
"#blue": "0000ff",
"#fuchsia": "ff00ff",
"#turquoise": "00ffff",
"#white": "ffffff",
}
ANSI2RGB maps ANSI colour names, as supported by Chroma, to hex RGB values.
var ( StandardTypes = map[TokenType]string{ Background: "chroma", LineNumbers: "ln", LineNumbersTable: "lnt", LineHighlight: "hl", LineTable: "lntable", LineTableTD: "lntd", Text: "", Whitespace: "w", Error: "err", Other: "x", Keyword: "k", KeywordConstant: "kc", KeywordDeclaration: "kd", KeywordNamespace: "kn", KeywordPseudo: "kp", KeywordReserved: "kr", KeywordType: "kt", Name: "n", NameAttribute: "na", NameBuiltin: "nb", NameBuiltinPseudo: "bp", NameClass: "nc", NameConstant: "no", NameDecorator: "nd", NameEntity: "ni", NameException: "ne", NameFunction: "nf", NameFunctionMagic: "fm", NameProperty: "py", NameLabel: "nl", NameNamespace: "nn", NameOther: "nx", NameTag: "nt", NameVariable: "nv", NameVariableClass: "vc", NameVariableGlobal: "vg", NameVariableInstance: "vi", NameVariableMagic: "vm", Literal: "l", LiteralDate: "ld", String: "s", StringAffix: "sa", StringBacktick: "sb", StringChar: "sc", StringDelimiter: "dl", StringDoc: "sd", StringDouble: "s2", StringEscape: "se", StringHeredoc: "sh", StringInterpol: "si", StringOther: "sx", StringRegex: "sr", StringSingle: "s1", StringSymbol: "ss", Number: "m", NumberBin: "mb", NumberFloat: "mf", NumberHex: "mh", NumberInteger: "mi", NumberIntegerLong: "il", NumberOct: "mo", Operator: "o", OperatorWord: "ow", Punctuation: "p", Comment: "c", CommentHashbang: "ch", CommentMultiline: "cm", CommentPreproc: "cp", CommentPreprocFile: "cpf", CommentSingle: "c1", CommentSpecial: "cs", Generic: "g", GenericDeleted: "gd", GenericEmph: "ge", GenericError: "gr", GenericHeading: "gh", GenericInserted: "gi", GenericOutput: "go", GenericPrompt: "gp", GenericStrong: "gs", GenericSubheading: "gu", GenericTraceback: "gt", } )
Functions ¶
Types ¶
type Colour ¶
type Colour int32
Colour represents an RGB colour.
func MustParseColour ¶
MustParseColour is like ParseColour except it panics if the colour is invalid.
Will panic if colour is in an invalid format.
func ParseColour ¶
ParseColour in the forms #rgb, #rrggbb, #ansi<colour>, or #<colour>. Will return an "unset" colour if invalid.
func (Colour) Brighten ¶
Brighten returns a copy of this colour with its brightness adjusted.
If factor is negative, the colour is darkened.
Uses approach described here (http://www.pvladov.com/2012/09/make-color-lighter-or-darker.html).
func (Colour) Brightness ¶
Brightness of the colour (roughly) in the range 0.0 to 1.0
func (Colour) Distance ¶
Distance between this colour and another.
This uses the approach described here (https://www.compuphase.com/cmetric.htm). This is not as accurate as LAB, et. al. but is *vastly* simpler and sufficient for our needs.
type CompiledRule ¶
A CompiledRule is a Rule with a pre-compiled regex.
Note that regular expressions are lazily compiled on first use of the lexer.
type CompiledRules ¶
type CompiledRules map[string][]*CompiledRule
type Config ¶
type Config struct { // Name of the lexer. Name string // Shortcuts for the lexer Aliases []string // File name globs Filenames []string // Secondary file name globs AliasFilenames []string // MIME types MimeTypes []string // Regex matching is case-insensitive. CaseInsensitive bool // Regex matches all characters. DotAll bool // Regex does not match across lines ($ matches EOL). // // Defaults to multiline. NotMultiline bool // Make sure that the input ends with a newline. This // is required for some lexers that consume input linewise. EnsureNL bool // Priority of lexer. // // If this is 0 it will be treated as a default of 1. Priority float32 }
Config for a lexer.
type Emitter ¶
type Emitter interface { // Emit tokens for the given regex groups. Emit(groups []string, lexer Lexer) Iterator }
An Emitter takes group matches and returns tokens.
type EmitterFunc ¶
EmitterFunc is a function that is an Emitter.
type Formatter ¶
type Formatter interface { // Format returns a formatting function for tokens. // // If the iterator panics, the Formatter should recover. Format(w io.Writer, style *Style, iterator Iterator) error }
A Formatter for Chroma lexers.
func RecoveringFormatter ¶
RecoveringFormatter wraps a formatter with panic recovery.
type FormatterFunc ¶
A FormatterFunc is a Formatter implemented as a function.
Guards against iterator panics.
type Iterator ¶
type Iterator func() *Token
An Iterator across tokens.
nil will be returned at the end of the Token stream.
If an error occurs within an Iterator, it may propagate this in a panic. Formatters should recover.
func Concaterator ¶
Concaterator concatenates tokens from a series of iterators.
type Lexer ¶
type Lexer interface { // Config describing the features of the Lexer. Config() *Config // Tokenise returns an Iterator over tokens in text. Tokenise(options *TokeniseOptions, text string) (Iterator, error) }
A Lexer for tokenising source code.
func Coalesce ¶
Coalesce is a Lexer interceptor that collapses runs of common types into a single token.
func RemappingLexer ¶ added in v0.2.0
RemappingLexer remaps a token to a set of, potentially empty, tokens.
func TypeRemappingLexer ¶ added in v0.2.0
func TypeRemappingLexer(lexer Lexer, mapping TypeMapping) Lexer
TypeRemappingLexer remaps types of tokens coming from a parent Lexer.
eg. Map "defvaralias" tokens of type NameVariable to NameFunction:
mapping := TypeMapping{ {NameVariable, NameFunction, []string{"defvaralias"}, } lexer = TypeRemappingLexer(lexer, mapping)
type LexerMutator ¶
type LexerMutator interface { // Rules are the lexer rules, state is the state key for the rule the mutator is associated with. MutateLexer(rules CompiledRules, state string, rule int) error }
A LexerMutator is an additional interface that a Mutator can implement to modify the lexer when it is compiled.
type LexerState ¶
type LexerState struct { Lexer *RegexLexer Text []rune Pos int Rules CompiledRules Stack []string State string Rule int // Group matches. Groups []string // Custum context for mutators. MutatorContext map[interface{}]interface{} // contains filtered or unexported fields }
func (*LexerState) Get ¶
func (l *LexerState) Get(key interface{}) interface{}
func (*LexerState) Iterator ¶
func (l *LexerState) Iterator() *Token
func (*LexerState) Set ¶
func (l *LexerState) Set(key interface{}, value interface{})
type Mutator ¶
type Mutator interface { // Mutate the lexer state machine as it is processing. Mutate(state *LexerState) error }
A Mutator modifies the behaviour of the lexer.
type MutatorFunc ¶
type MutatorFunc func(state *LexerState) error
A MutatorFunc is a Mutator that mutates the lexer state machine as it is processing.
func Mutators ¶
func Mutators(modifiers ...Mutator) MutatorFunc
Mutators applies a set of Mutators in order.
func (MutatorFunc) Mutate ¶
func (m MutatorFunc) Mutate(state *LexerState) error
type PrioritisedLexers ¶ added in v0.2.0
type PrioritisedLexers []Lexer
PrioritisedLexers is a slice of lexers sortable by priority.
func (PrioritisedLexers) Len ¶ added in v0.2.0
func (l PrioritisedLexers) Len() int
func (PrioritisedLexers) Less ¶ added in v0.2.0
func (l PrioritisedLexers) Less(i, j int) bool
func (PrioritisedLexers) Swap ¶ added in v0.2.0
func (l PrioritisedLexers) Swap(i, j int)
type RegexLexer ¶
type RegexLexer struct {
// contains filtered or unexported fields
}
func MustNewLexer ¶
func MustNewLexer(config *Config, rules Rules) *RegexLexer
MustNewLexer creates a new Lexer or panics.
func NewLexer ¶
func NewLexer(config *Config, rules Rules) (*RegexLexer, error)
NewLexer creates a new regex-based Lexer.
"rules" is a state machine transitition map. Each key is a state. Values are sets of rules that match input, optionally modify lexer state, and output tokens.
func (*RegexLexer) AnalyseText ¶
func (r *RegexLexer) AnalyseText(text string) float32
func (*RegexLexer) Config ¶
func (r *RegexLexer) Config() *Config
func (*RegexLexer) SetAnalyser ¶
func (r *RegexLexer) SetAnalyser(analyser func(text string) float32) *RegexLexer
SetAnalyser sets the analyser function used to perform content inspection.
func (*RegexLexer) Tokenise ¶
func (r *RegexLexer) Tokenise(options *TokeniseOptions, text string) (Iterator, error)
func (*RegexLexer) Trace ¶
func (r *RegexLexer) Trace(trace bool) *RegexLexer
type Style ¶
type Style struct { Name string // contains filtered or unexported fields }
A Style definition.
See http://pygments.org/docs/styles/ for details. Semantics are intended to be identical.
func MustNewStyle ¶
func MustNewStyle(name string, entries StyleEntries) *Style
MustNewStyle creates a new style or panics.
func NewStyle ¶
func NewStyle(name string, entries StyleEntries) (*Style, error)
NewStyle creates a new style definition.
func (*Style) Builder ¶
func (s *Style) Builder() *StyleBuilder
Builder creates a mutable builder from this Style.
The builder can then be safely modified. This is a cheap operation.
func (*Style) Get ¶
func (s *Style) Get(ttype TokenType) StyleEntry
Get a style entry. Will try sub-category or category if an exact match is not found, and finally return the Background.
type StyleBuilder ¶
type StyleBuilder struct {
// contains filtered or unexported fields
}
A StyleBuilder is a mutable structure for building styles.
Once built, a Style is immutable.
func NewStyleBuilder ¶
func NewStyleBuilder(name string) *StyleBuilder
func (*StyleBuilder) Add ¶
func (s *StyleBuilder) Add(ttype TokenType, entry string) *StyleBuilder
Add an entry to the Style map.
See http://pygments.org/docs/styles/#style-rules for details.
func (*StyleBuilder) AddAll ¶
func (s *StyleBuilder) AddAll(entries StyleEntries) *StyleBuilder
func (*StyleBuilder) AddEntry ¶
func (s *StyleBuilder) AddEntry(ttype TokenType, entry StyleEntry) *StyleBuilder
func (*StyleBuilder) Build ¶
func (s *StyleBuilder) Build() (*Style, error)
func (*StyleBuilder) Get ¶
func (s *StyleBuilder) Get(ttype TokenType) StyleEntry
type StyleEntries ¶
StyleEntries mapping TokenType to colour definition.
type StyleEntry ¶
type StyleEntry struct { // Hex colours. Colour Colour Background Colour Border Colour Bold Trilean Italic Trilean Underline Trilean NoInherit bool }
A StyleEntry in the Style map.
func ParseStyleEntry ¶
func ParseStyleEntry(entry string) (StyleEntry, error)
ParseStyleEntry parses a Pygments style entry.
func (StyleEntry) Inherit ¶
func (s StyleEntry) Inherit(ancestors ...StyleEntry) StyleEntry
Inherit styles from ancestors.
Ancestors should be provided from oldest to newest.
func (StyleEntry) IsZero ¶
func (s StyleEntry) IsZero() bool
func (StyleEntry) String ¶
func (s StyleEntry) String() string
func (StyleEntry) Sub ¶
func (s StyleEntry) Sub(e StyleEntry) StyleEntry
type Token ¶
Token output to formatter.
type TokenType ¶
type TokenType int
TokenType is the type of token to highlight.
It is also an Emitter, emitting a single token of itself
const ( // Default background style. Background TokenType = -1 - iota // Line numbers in output. LineNumbers // Line numbers in output when in table. LineNumbersTable // Line higlight style. LineHighlight // Line numbers table wrapper style. LineTable // Line numbers table TD wrapper style. LineTableTD // Input that could not be tokenised. Error // Other is used by the Delegate lexer to indicate which tokens should be handled by the delegate. Other // No highlighting. None )
Meta token types.
const ( Keyword TokenType = 1000 + iota KeywordConstant KeywordDeclaration KeywordNamespace KeywordPseudo KeywordReserved KeywordType )
Keywords.
const ( Name TokenType = 2000 + iota NameAttribute NameBuiltin NameBuiltinPseudo NameClass NameConstant NameDecorator NameEntity NameException NameFunction NameFunctionMagic NameKeyword NameLabel NameNamespace NameOperator NameOther NamePseudo NameProperty NameTag NameVariable NameVariableAnonymous NameVariableClass NameVariableGlobal NameVariableInstance NameVariableMagic )
Names.
const ( LiteralString TokenType = 3100 + iota LiteralStringAffix LiteralStringAtom LiteralStringBacktick LiteralStringBoolean LiteralStringChar LiteralStringDelimiter LiteralStringDoc LiteralStringDouble LiteralStringEscape LiteralStringHeredoc LiteralStringInterpol LiteralStringName LiteralStringOther LiteralStringRegex LiteralStringSingle LiteralStringSymbol )
Strings.
const ( LiteralNumber TokenType = 3200 + iota LiteralNumberBin LiteralNumberFloat LiteralNumberHex LiteralNumberInteger LiteralNumberIntegerLong LiteralNumberOct )
Literals.
const ( Comment TokenType = 6000 + iota CommentHashbang CommentMultiline CommentSingle CommentSpecial )
Comments.
const ( Generic TokenType = 7000 + iota GenericDeleted GenericEmph GenericError GenericHeading GenericInserted GenericOutput GenericPrompt GenericStrong GenericSubheading GenericTraceback GenericUnderline )
Generic tokens.
func (TokenType) InCategory ¶
func (TokenType) InSubCategory ¶
func (*TokenType) MarshalJSON ¶ added in v0.2.1
func (TokenType) SubCategory ¶
func (*TokenType) UnmarshalJSON ¶ added in v0.2.1
type TokeniseOptions ¶
type TypeMapping ¶ added in v0.2.0
Source Files ¶
Directories ¶
Path | Synopsis |
---|---|
_tools
|
|
cmd
|
|
Package lexers contains the registry of all lexers.
|
Package lexers contains the registry of all lexers. |
Package quick provides simple, no-configuration source code highlighting.
|
Package quick provides simple, no-configuration source code highlighting. |