Documentation ¶
Overview ¶
Package parse is the top-level package for the Cogent Core parsing system.
The code is organized into the various sub-packages, dealing with the different stages of parsing etc.
Sub-package languages has the parsers for specific languages, including Go (of course), markdown and tex (latter are lexer-only)
Note that the GUI editor framework for creating and testing parsers is currently in the piv subpackage in Cogent Code: https://github.com/cogentcore/cogent/tree/main/code/piv
Index ¶
- Variables
- type FileState
- func (fs *FileState) ClearAST()
- func (fs *FileState) Destroy()
- func (fs *FileState) FindAnyChildren(sym *syms.Symbol, seed string, scope syms.SymMap, kids *syms.SymMap) bool
- func (fs *FileState) FindChildren(sym *syms.Symbol, seed string, scope syms.SymMap, kids *syms.SymMap) bool
- func (fs *FileState) FindNamePrefixScoped(seed string, scope syms.SymMap, matches *syms.SymMap)
- func (fs *FileState) FindNameScoped(nm string, scope syms.SymMap) (*syms.Symbol, bool)
- func (fs *FileState) Init()
- func (fs *FileState) LexAtEnd() bool
- func (fs *FileState) LexErrReport() string
- func (fs *FileState) LexHasErrs() bool
- func (fs *FileState) LexLine(ln int) lexer.Line
- func (fs *FileState) LexLineString() string
- func (fs *FileState) LexNextSrcLine() string
- func (fs *FileState) NextAnonName(ctxt string) string
- func (fs *FileState) ParseAtEnd() bool
- func (fs *FileState) ParseErrReport() string
- func (fs *FileState) ParseErrReportAll() string
- func (fs *FileState) ParseErrReportDetailed() string
- func (fs *FileState) ParseHasErrs() bool
- func (fs *FileState) ParseNextSrcLine() string
- func (fs *FileState) ParseRuleString(full bool) string
- func (fs *FileState) PassTwoErrReport() string
- func (fs *FileState) PassTwoHasErrs() bool
- func (fs *FileState) PathMapLoad(path string) (string, bool)
- func (fs *FileState) PathMapStore(path, abs string)
- func (fs *FileState) SetSrc(src [][]rune, fname, basepath string, sup fileinfo.Known)
- type FileStates
- func (fs *FileStates) DeleteMetaData(key string)
- func (fs *FileStates) Done() *FileState
- func (fs *FileStates) DoneNoLock() *FileState
- func (fs *FileStates) EndProc()
- func (fs *FileStates) MetaData(key string) (string, bool)
- func (fs *FileStates) Proc() *FileState
- func (fs *FileStates) ProcNoLock() *FileState
- func (fs *FileStates) SetMetaData(key, value string)
- func (fs *FileStates) SetSrc(fname, basepath string, sup fileinfo.Known)
- func (fs *FileStates) StartProc(txt []byte) *FileState
- func (fs *FileStates) Switch()
- type Language
- type LanguageDirOptions
- type LanguageFlags
- func (i LanguageFlags) Desc() string
- func (i LanguageFlags) Int64() int64
- func (i LanguageFlags) MarshalText() ([]byte, error)
- func (i *LanguageFlags) SetInt64(in int64)
- func (i *LanguageFlags) SetString(s string) error
- func (i LanguageFlags) String() string
- func (i *LanguageFlags) UnmarshalText(text []byte) error
- func (i LanguageFlags) Values() []enums.Enum
- type LanguageProperties
- type LanguageSupporter
- type Parser
- func (pr *Parser) DoPassTwo(fs *FileState)
- func (pr *Parser) Init()
- func (pr *Parser) InitAll()
- func (pr *Parser) LexAll(fs *FileState)
- func (pr *Parser) LexInit(fs *FileState)
- func (pr *Parser) LexLine(fs *FileState, ln int, txt []rune) lexer.Line
- func (pr *Parser) LexNext(fs *FileState) *lexer.Rule
- func (pr *Parser) LexNextLine(fs *FileState) *lexer.Rule
- func (pr *Parser) LexRun(fs *FileState)
- func (pr *Parser) OpenJSON(filename string) error
- func (pr *Parser) ParseAll(fs *FileState)
- func (pr *Parser) ParseLine(fs *FileState, ln int) *FileState
- func (pr *Parser) ParseNext(fs *FileState) *parser.Rule
- func (pr *Parser) ParseRun(fs *FileState)
- func (pr *Parser) ParseString(str string, fname string, sup fileinfo.Known) *FileState
- func (pr *Parser) ParserInit(fs *FileState) bool
- func (pr *Parser) ReadJSON(b []byte) error
- func (pr *Parser) SaveGrammar(filename string) error
- func (pr *Parser) SaveJSON(filename string) error
Constants ¶
This section is empty.
Variables ¶
var LanguageSupport = LanguageSupporter{}
LanguageSupport is the main language support hub for accessing parse support interfaces for each supported language
var StandardLanguageProperties = map[fileinfo.Known]*LanguageProperties{ fileinfo.Ada: {fileinfo.Ada, "--", "", "", nil, nil, nil}, fileinfo.Bash: {fileinfo.Bash, "# ", "", "", nil, nil, nil}, fileinfo.Csh: {fileinfo.Csh, "# ", "", "", nil, nil, nil}, fileinfo.C: {fileinfo.C, "// ", "/* ", " */", nil, nil, nil}, fileinfo.CSharp: {fileinfo.CSharp, "// ", "/* ", " */", nil, nil, nil}, fileinfo.D: {fileinfo.D, "// ", "/* ", " */", nil, nil, nil}, fileinfo.ObjC: {fileinfo.ObjC, "// ", "/* ", " */", nil, nil, nil}, fileinfo.Go: {fileinfo.Go, "// ", "/* ", " */", []LanguageFlags{IndentTab}, nil, nil}, fileinfo.Java: {fileinfo.Java, "// ", "/* ", " */", nil, nil, nil}, fileinfo.JavaScript: {fileinfo.JavaScript, "// ", "/* ", " */", nil, nil, nil}, fileinfo.Eiffel: {fileinfo.Eiffel, "--", "", "", nil, nil, nil}, fileinfo.Haskell: {fileinfo.Haskell, "--", "{- ", "-}", nil, nil, nil}, fileinfo.Lisp: {fileinfo.Lisp, "; ", "", "", nil, nil, nil}, fileinfo.Lua: {fileinfo.Lua, "--", "---[[ ", "--]]", nil, nil, nil}, fileinfo.Makefile: {fileinfo.Makefile, "# ", "", "", []LanguageFlags{IndentTab}, nil, nil}, fileinfo.Matlab: {fileinfo.Matlab, "% ", "%{ ", " %}", nil, nil, nil}, fileinfo.OCaml: {fileinfo.OCaml, "", "(* ", " *)", nil, nil, nil}, fileinfo.Pascal: {fileinfo.Pascal, "// ", " ", " }", nil, nil, nil}, fileinfo.Perl: {fileinfo.Perl, "# ", "", "", nil, nil, nil}, fileinfo.Python: {fileinfo.Python, "# ", "", "", []LanguageFlags{IndentSpace}, nil, nil}, fileinfo.Php: {fileinfo.Php, "// ", "/* ", " */", nil, nil, nil}, fileinfo.R: {fileinfo.R, "# ", "", "", nil, nil, nil}, fileinfo.Ruby: {fileinfo.Ruby, "# ", "", "", nil, nil, nil}, fileinfo.Rust: {fileinfo.Rust, "// ", "/* ", " */", nil, nil, nil}, fileinfo.Scala: {fileinfo.Scala, "// ", "/* ", " */", nil, nil, nil}, fileinfo.Html: {fileinfo.Html, "", "<!-- ", " -->", nil, nil, nil}, fileinfo.TeX: {fileinfo.TeX, "% ", "", "", nil, nil, nil}, fileinfo.Markdown: {fileinfo.Markdown, "", "<!--- ", " -->", []LanguageFlags{IndentSpace}, nil, nil}, fileinfo.Yaml: {fileinfo.Yaml, "#", "", "", []LanguageFlags{IndentSpace}, nil, nil}, }
StandardLanguageProperties is the standard compiled-in set of language properties
Functions ¶
This section is empty.
Types ¶
type FileState ¶
type FileState struct { // the source to be parsed -- also holds the full lexed tokens Src lexer.File `json:"-" xml:"-"` // state for lexing LexState lexer.State `json:"_" xml:"-"` // state for second pass nesting depth and EOS matching TwoState lexer.TwoState `json:"-" xml:"-"` // state for parsing ParseState parser.State `json:"-" xml:"-"` // ast output tree from parsing AST *parser.AST `json:"-" xml:"-"` // symbols contained within this file -- initialized at start of parsing and created by AddSymbol or PushNewScope actions. These are then processed after parsing by the language-specific code, via Lang interface. Syms syms.SymMap `json:"-" xml:"-"` // External symbols that are entirely maintained in a language-specific way by the Lang interface code. These are only here as a convenience and are not accessed in any way by the language-general parse code. ExtSyms syms.SymMap `json:"-" xml:"-"` // mutex protecting updates / reading of Syms symbols SymsMu sync.RWMutex `display:"-" json:"-" xml:"-"` // waitgroup for coordinating processing of other items WaitGp sync.WaitGroup `display:"-" json:"-" xml:"-"` // anonymous counter -- counts up AnonCtr int `display:"-" json:"-" xml:"-"` // path mapping cache -- for other files referred to by this file, this stores the full path associated with a logical path (e.g., in go, the logical import path -> local path with actual files) -- protected for access from any thread PathMap sync.Map `display:"-" json:"-" xml:"-"` }
FileState contains the full lexing and parsing state information for a given file. It is the master state record for everything that happens in parse. One of these should be maintained for each file; texteditor.Buf has one as ParseState field.
Separate State structs are maintained for each stage (Lexing, PassTwo, Parsing) and the final output of Parsing goes into the AST and Syms fields.
The Src lexer.File field maintains all the info about the source file, and the basic tokenized version of the source produced initially by lexing and updated by the remaining passes. It has everything that is maintained at a line-by-line level.
func NewFileState ¶
func NewFileState() *FileState
NewFileState returns a new initialized file state
func (*FileState) FindAnyChildren ¶
func (fs *FileState) FindAnyChildren(sym *syms.Symbol, seed string, scope syms.SymMap, kids *syms.SymMap) bool
FindAnyChildren fills out map with either direct children of given symbol or those of the type of this symbol -- useful for completion. If seed is non-empty it is used as a prefix for filtering children names. Returns false if no children were found.
func (*FileState) FindChildren ¶
func (fs *FileState) FindChildren(sym *syms.Symbol, seed string, scope syms.SymMap, kids *syms.SymMap) bool
FindChildren fills out map with direct children of given symbol If seed is non-empty it is used as a prefix for filtering children names. Returns false if no children were found.
func (*FileState) FindNamePrefixScoped ¶
FindNamePrefixScoped looks for given symbol name prefix within given map first (if non nil) and then in fs.Syms and ExtSyms maps, and any children on those global maps that are of subcategory token.NameScope (i.e., namespace, module, package, library) adds to given matches map (which can be nil), for more efficient recursive use
func (*FileState) FindNameScoped ¶
FindNameScoped looks for given symbol name within given map first (if non nil) and then in fs.Syms and ExtSyms maps, and any children on those global maps that are of subcategory token.NameScope (i.e., namespace, module, package, library)
func (*FileState) LexErrReport ¶
LexErrReport returns a report of all the lexing errors -- these should only occur during development of lexer so we use a detailed report format
func (*FileState) LexHasErrs ¶
LexHasErrs returns true if there were errors from lexing
func (*FileState) LexLine ¶
LexLine returns the lexing output for given line, combining comments and all other tokens and allocating new memory using clone
func (*FileState) LexLineString ¶
LexLineString returns a string rep of the current lexing output for the current line
func (*FileState) LexNextSrcLine ¶
LexNextSrcLine returns the next line of source that the lexer is currently at
func (*FileState) NextAnonName ¶
NextAnonName returns the next anonymous name for this file, using counter here and given context name (e.g., package name)
func (*FileState) ParseAtEnd ¶
ParseAtEnd returns true if parsing state is now at end of source
func (*FileState) ParseErrReport ¶
ParseErrReport returns at most 10 parsing errors in end-user format, sorted
func (*FileState) ParseErrReportAll ¶
ParseErrReportAll returns all parsing errors in end-user format, sorted
func (*FileState) ParseErrReportDetailed ¶
ParseErrReportDetailed returns at most 10 parsing errors in detailed format, sorted
func (*FileState) ParseHasErrs ¶
ParseHasErrs returns true if there were errors from parsing
func (*FileState) ParseNextSrcLine ¶
ParseNextSrcLine returns the next line of source that the parser is currently at
func (*FileState) ParseRuleString ¶
RuleString returns the rule info for entire source -- if full then it includes the full stack at each point -- otherwise just the top of stack
func (*FileState) PassTwoErrReport ¶
PassTwoErrString returns all the pass two errors as a string -- these should only occur during development so we use a detailed report format
func (*FileState) PassTwoHasErrs ¶
PassTwoHasErrs returns true if there were errors from pass two processing
func (*FileState) PathMapLoad ¶
PathMapLoad does a mutex-protected load of PathMap for given string, returning value and true if found
func (*FileState) PathMapStore ¶
PathMapStore does a mutex-protected store of abs path for given path key
type FileStates ¶
type FileStates struct { // the filename Filename string // the known file type, if known (typically only known files are processed) Known fileinfo.Known // base path for reporting file names -- this must be set externally e.g., by gide for the project root path BasePath string // index of the state that is done DoneIndex int // one filestate FsA FileState // one filestate FsB FileState // mutex locking the switching of Done vs. Proc states SwitchMu sync.Mutex // mutex locking the parsing of Proc state -- reading states can happen fine with this locked, but no switching ProcMu sync.Mutex // extra meta data associated with this FileStates Meta map[string]string }
FileStates contains two FileState's: one is being processed while the other is being used externally. The FileStates maintains a common set of file information set in each of the FileState items when they are used.
func NewFileStates ¶
func NewFileStates(fname, basepath string, sup fileinfo.Known) *FileStates
NewFileStates returns a new FileStates for given filename, basepath, and known file type.
func (*FileStates) DeleteMetaData ¶
func (fs *FileStates) DeleteMetaData(key string)
DeleteMetaData deletes given meta data record
func (*FileStates) Done ¶
func (fs *FileStates) Done() *FileState
Done returns the filestate that is done being updated, and is ready for use by external clients etc. Proc is the other one which is currently being processed by the parser and is not ready to be used externally. The state is accessed under a lock, and as long as any use of state is fast enough, it should be usable over next two switches (typically true).
func (*FileStates) DoneNoLock ¶
func (fs *FileStates) DoneNoLock() *FileState
DoneNoLock returns the filestate that is done being updated, and is ready for use by external clients etc. Proc is the other one which is currently being processed by the parser and is not ready to be used externally. The state is accessed under a lock, and as long as any use of state is fast enough, it should be usable over next two switches (typically true).
func (*FileStates) EndProc ¶
func (fs *FileStates) EndProc()
EndProc is called when primary processing (parsing) has been completed -- there still may be ongoing updating of symbols after this point but parse is done. This calls Switch to move Proc over to done, under cover of ProcMu Lock
func (*FileStates) MetaData ¶
func (fs *FileStates) MetaData(key string) (string, bool)
MetaData returns given meta data string for given key, returns true if present, false if not
func (*FileStates) Proc ¶
func (fs *FileStates) Proc() *FileState
Proc returns the filestate that is currently being processed by the parser etc and is not ready for external use. Access is protected by a lock so it will wait if currently switching. The state is accessed under a lock, and as long as any use of state is fast enough, it should be usable over next two switches (typically true).
func (*FileStates) ProcNoLock ¶
func (fs *FileStates) ProcNoLock() *FileState
ProcNoLock returns the filestate that is currently being processed by the parser etc and is not ready for external use. Access is protected by a lock so it will wait if currently switching. The state is accessed under a lock, and as long as any use of state is fast enough, it should be usable over next two switches (typically true).
func (*FileStates) SetMetaData ¶
func (fs *FileStates) SetMetaData(key, value string)
SetMetaData sets given meta data record
func (*FileStates) SetSrc ¶
func (fs *FileStates) SetSrc(fname, basepath string, sup fileinfo.Known)
SetSrc sets the source that is processed by this FileStates if basepath is empty then it is set to the path for the filename.
func (*FileStates) StartProc ¶
func (fs *FileStates) StartProc(txt []byte) *FileState
StartProc should be called when starting to process the file, and returns the FileState to use for processing. It locks the Proc state, sets the current source code, and returns the filestate for subsequent processing.
func (*FileStates) Switch ¶
func (fs *FileStates) Switch()
Switch switches so that the current Proc() filestate is now the Done() it is assumed to be called under ProcMu.Locking cover, and also does the Swtich locking.
type Language ¶ added in v0.2.3
type Language interface { // Parser returns the [Parser] for this language Parser() *Parser // ParseFile does the complete processing of a given single file, given by txt bytes, // as appropriate for the language -- e.g., runs the lexer followed by the parser, and // manages any symbol output from parsing as appropriate for the language / format. // This is to be used for files of "primary interest" -- it does full type inference // and symbol resolution etc. The Proc() FileState is locked during parsing, // and Switch is called after, so Done() will contain the processed info after this call. // If txt is nil then any existing source in fs is used. ParseFile(fs *FileStates, txt []byte) // HighlightLine does the lexing and potentially parsing of a given line of the file, // for purposes of syntax highlighting -- uses Done() FileState of existing context // if available from prior lexing / parsing. Line is in 0-indexed "internal" line indexes, // and provides relevant context for the overall parsing, which is performed // on the given line of text runes, and also updates corresponding source in FileState // (via a copy). If txt is nil then any existing source in fs is used. HighlightLine(fs *FileStates, line int, txt []rune) lexer.Line // CompleteLine provides the list of relevant completions for given text // which is at given position within the file. // Typically the language will call ParseLine on that line, and use the AST // to guide the selection of relevant symbols that can complete the code at // the given point. CompleteLine(fs *FileStates, text string, pos lexer.Pos) complete.Matches // CompleteEdit returns the completion edit data for integrating the // selected completion into the source CompleteEdit(fs *FileStates, text string, cp int, comp complete.Completion, seed string) (ed complete.Edit) // Lookup returns lookup results for given text which is at given position // within the file. This can either be a file and position in file to // open and view, or direct text to show. Lookup(fs *FileStates, text string, pos lexer.Pos) complete.Lookup // IndentLine returns the indentation level for given line based on // previous line's indentation level, and any delta change based on // e.g., brackets starting or ending the previous or current line, or // other language-specific keywords. See lexer.BracketIndentLine for example. // Indent level is in increments of tabSz for spaces, and tabs for tabs. // Operates on rune source with markup lex tags per line. IndentLine(fs *FileStates, src [][]rune, tags []lexer.Line, ln int, tabSz int) (pInd, delInd, pLn int, ichr indent.Character) // AutoBracket returns what to do when a user types a starting bracket character // (bracket, brace, paren) while typing. // pos = position where bra will be inserted, and curLn is the current line // match = insert the matching ket, and newLine = insert a new line. AutoBracket(fs *FileStates, bra rune, pos lexer.Pos, curLn []rune) (match, newLine bool) // ParseDir does the complete processing of a given directory, optionally including // subdirectories, and optionally forcing the re-processing of the directory(s), // instead of using cached symbols. Typically the cache will be used unless files // have a more recent modification date than the cache file. This returns the // language-appropriate set of symbols for the directory(s), which could then provide // the symbols for a given package, library, or module at that path. ParseDir(fs *FileState, path string, opts LanguageDirOptions) *syms.Symbol // LexLine is a lower-level call (mostly used internally to the language) that // does just the lexing of a given line of the file, using existing context // if available from prior lexing / parsing. // Line is in 0-indexed "internal" line indexes. // The rune source is updated from the given text if non-nil. LexLine(fs *FileState, line int, txt []rune) lexer.Line // ParseLine is a lower-level call (mostly used internally to the language) that // does complete parser processing of a single line from given file, and returns // the FileState for just that line. Line is in 0-indexed "internal" line indexes. // The rune source information is assumed to have already been updated in FileState // Existing context information from full-file parsing is used as appropriate, but // the results will NOT be used to update any existing full-file AST representation -- // should call ParseFile to update that as appropriate. ParseLine(fs *FileState, line int) *FileState }
Language provides a general interface for language-specific management of the lexing, parsing, and symbol lookup process. The parse lexer and parser machinery is entirely language-general but specific languages may need specific ways of managing these processes, and processing their outputs, to best support the features of those languages. That is what this interface provides.
Each language defines a type supporting this interface, which is in turn registered with the StdLangProperties map. Each supported language has its own .go file in this parse package that defines its own implementation of the interface and any other associated functionality.
The Language is responsible for accessing the appropriate Parser for this language (initialized and managed via LangSupport.OpenStandard() etc) and the FileState structure contains all the input and output state information for a given file.
This interface is likely to evolve as we expand the range of supported languages.
type LanguageDirOptions ¶ added in v0.2.3
type LanguageDirOptions struct { // process subdirectories -- otherwise not Subdirs bool // rebuild the symbols by reprocessing from scratch instead of using cache Rebuild bool // do not update the cache with results from processing Nocache bool }
LanguageDirOptions provides options for the [Language.ParseDir] method
type LanguageFlags ¶ added in v0.2.3
type LanguageFlags int32 //enums:enum
LanguageFlags are special properties of a given language
const ( // NoFlags = nothing special NoFlags LanguageFlags = iota // IndentSpace means that spaces must be used for this language IndentSpace // IndentTab means that tabs must be used for this language IndentTab // ReAutoIndent causes current line to be re-indented during AutoIndent for Enter // (newline) -- this should only be set for strongly indented languages where // the previous + current line can tell you exactly what indent the current line // should be at. ReAutoIndent )
LangFlags
const LanguageFlagsN LanguageFlags = 4
LanguageFlagsN is the highest valid value for type LanguageFlags, plus one.
func LanguageFlagsValues ¶ added in v0.2.3
func LanguageFlagsValues() []LanguageFlags
LanguageFlagsValues returns all possible values for the type LanguageFlags.
func (LanguageFlags) Desc ¶ added in v0.2.3
func (i LanguageFlags) Desc() string
Desc returns the description of the LanguageFlags value.
func (LanguageFlags) Int64 ¶ added in v0.2.3
func (i LanguageFlags) Int64() int64
Int64 returns the LanguageFlags value as an int64.
func (LanguageFlags) MarshalText ¶ added in v0.2.3
func (i LanguageFlags) MarshalText() ([]byte, error)
MarshalText implements the encoding.TextMarshaler interface.
func (*LanguageFlags) SetInt64 ¶ added in v0.2.3
func (i *LanguageFlags) SetInt64(in int64)
SetInt64 sets the LanguageFlags value from an int64.
func (*LanguageFlags) SetString ¶ added in v0.2.3
func (i *LanguageFlags) SetString(s string) error
SetString sets the LanguageFlags value from its string representation, and returns an error if the string is invalid.
func (LanguageFlags) String ¶ added in v0.2.3
func (i LanguageFlags) String() string
String returns the string representation of this LanguageFlags value.
func (*LanguageFlags) UnmarshalText ¶ added in v0.2.3
func (i *LanguageFlags) UnmarshalText(text []byte) error
UnmarshalText implements the encoding.TextUnmarshaler interface.
func (LanguageFlags) Values ¶ added in v0.2.3
func (i LanguageFlags) Values() []enums.Enum
Values returns all possible values for the type LanguageFlags.
type LanguageProperties ¶ added in v0.2.3
type LanguageProperties struct { // known language -- must be a supported one from Known list Known fileinfo.Known // character(s) that start a single-line comment -- if empty then multi-line comment syntax will be used CommentLn string // character(s) that start a multi-line comment or one that requires both start and end CommentSt string // character(s) that end a multi-line comment or one that requires both start and end CommentEd string // special properties for this language -- as an explicit list of options to make them easier to see and set in defaults Flags []LanguageFlags // Lang interface for this language Lang Language `json:"-" xml:"-"` // parser for this language -- initialized in OpenStandard Parser *Parser `json:"-" xml:"-"` }
LanguageProperties contains properties of languages supported by the parser framework
func (*LanguageProperties) HasFlag ¶ added in v0.2.3
func (lp *LanguageProperties) HasFlag(flg LanguageFlags) bool
HasFlag returns true if given flag is set in Flags
type LanguageSupporter ¶ added in v0.2.3
type LanguageSupporter struct{}
LanguageSupporter provides general support for supported languages. e.g., looking up lexers and parsers by name. Also implements the lexer.LangLexer interface to provide access to other Guest Lexers
func (*LanguageSupporter) LexerByName ¶ added in v0.2.3
func (ll *LanguageSupporter) LexerByName(lang string) *lexer.Rule
LexerByName looks up Lexer for given language by name (with case-insensitive fallback). Returns nil if not supported.
func (*LanguageSupporter) OpenStandard ¶ added in v0.2.3
func (ll *LanguageSupporter) OpenStandard() error
OpenStandard opens all the standard parsers for languages, from the langs/ directory
func (*LanguageSupporter) Properties ¶ added in v0.2.3
func (ll *LanguageSupporter) Properties(sup fileinfo.Known) (*LanguageProperties, error)
Properties looks up language properties by fileinfo.Known const int type
func (*LanguageSupporter) PropertiesByName ¶ added in v0.2.3
func (ll *LanguageSupporter) PropertiesByName(lang string) (*LanguageProperties, error)
PropertiesByName looks up language properties by string name of language (with case-insensitive fallback). Returns error if not supported.
type Parser ¶
type Parser struct { // lexer rules for first pass of lexing file Lexer *lexer.Rule // second pass after lexing -- computes nesting depth and EOS finding PassTwo lexer.PassTwo // parser rules for parsing lexed tokens Parser *parser.Rule // file name for overall parser (not file being parsed!) Filename string // if true, reports errors after parsing, to stdout ReportErrs bool // when loaded from file, this is the modification time of the parser -- re-processes cache if parser is newer than cached files ModTime time.Time `json:"-" xml:"-"` }
Parser is the overall parser for managing the parsing
func (*Parser) Init ¶
func (pr *Parser) Init()
Init initializes the parser -- must be called after creation
func (*Parser) InitAll ¶
func (pr *Parser) InitAll()
InitAll initializes everything about the parser -- call this when setting up a new parser after it has been loaded etc
func (*Parser) LexLine ¶
LexLine runs lexer for given single line of source, which is updated from the given text (if non-nil) Returns merged regular and token comment lines, cloned and ready for use.
func (*Parser) LexNext ¶
LexNext does next step of lexing -- returns lowest-level rule that matched, and nil when nomatch err or at end of source input
func (*Parser) LexNextLine ¶
LexNextLine does next line of lexing -- returns lowest-level rule that matched at end, and nil when nomatch err or at end of source input
func (*Parser) OpenJSON ¶
OpenJSON opens lexer and parser rules from the given filename, in a standard JSON-formatted file
func (*Parser) ParseAll ¶
ParseAll does full parsing, including ParseInit and ParseRun, assuming LexAll has been done already
func (*Parser) ParseLine ¶
ParseLine runs parser for given single line of source does Parsing in a separate FileState and returns that with AST etc (or nil if nothing). Assumes LexLine has already been run on given line.
func (*Parser) ParseNext ¶
ParseNext does next step of parsing -- returns lowest-level rule that matched or nil if no match error or at end
func (*Parser) ParseString ¶
ParseString runs lexer and parser on given string of text, returning FileState of results (can be nil if string is empty or no lexical tokens). Also takes supporting contextual info for file / language that this string is associated with (only for reference)
func (*Parser) ParserInit ¶
ParserInit initializes the parser prior to running
func (*Parser) ReadJSON ¶
ReadJSON opens lexer and parser rules from Bytes, in a standard JSON-formatted file
func (*Parser) SaveGrammar ¶
SaveGrammar saves lexer and parser grammar rules to BNF-like .parsegrammar file
Source Files ¶
Directories ¶
Path | Synopsis |
---|---|
cmd
|
|
update
Command update updates all of the .parse files within or beneath the current directory by opening and saving them.
|
Command update updates all of the .parse files within or beneath the current directory by opening and saving them. |
Package lexer provides all the lexing functions that transform text into lexical tokens, using token types defined in the token package.
|
Package lexer provides all the lexing functions that transform text into lexical tokens, using token types defined in the token package. |
Package lsp contains types for the Language Server Protocol LSP: https://microsoft.github.io/language-server-protocol/specification and mappings from these elements into the token.Tokens types which are used internally in parse.
|
Package lsp contains types for the Language Server Protocol LSP: https://microsoft.github.io/language-server-protocol/specification and mappings from these elements into the token.Tokens types which are used internally in parse. |
Package parse does the parsing stage after lexing
|
Package parse does the parsing stage after lexing |
Package supportedlanguages includes all the supported languages for parse -- need to import this package to get those all included in a given target
|
Package supportedlanguages includes all the supported languages for parse -- need to import this package to get those all included in a given target |
Package syms defines the symbols and their properties that are accumulated from a parsed file, and are then used for e.g., completion lookup, etc.
|
Package syms defines the symbols and their properties that are accumulated from a parsed file, and are then used for e.g., completion lookup, etc. |
Package token defines a complete set of all lexical tokens for any kind of language! It is based on the alecthomas/chroma / pygments lexical tokens plus all the more detailed tokens needed for actually parsing languages
|
Package token defines a complete set of all lexical tokens for any kind of language! It is based on the alecthomas/chroma / pygments lexical tokens plus all the more detailed tokens needed for actually parsing languages |