mirror of
https://github.com/alecthomas/chroma.git
synced 2025-05-31 22:59:51 +02:00
Add tracing + better error recovery.
This commit is contained in:
parent
e5e7b57c46
commit
60797cc03f
@ -28,6 +28,7 @@ var (
|
|||||||
profileFlag = kingpin.Flag("profile", "Enable profiling to file.").Hidden().String()
|
profileFlag = kingpin.Flag("profile", "Enable profiling to file.").Hidden().String()
|
||||||
listFlag = kingpin.Flag("list", "List lexers, styles and formatters.").Bool()
|
listFlag = kingpin.Flag("list", "List lexers, styles and formatters.").Bool()
|
||||||
unbufferedFlag = kingpin.Flag("unbuffered", "Do not buffer output.").Bool()
|
unbufferedFlag = kingpin.Flag("unbuffered", "Do not buffer output.").Bool()
|
||||||
|
traceFlag = kingpin.Flag("trace", "Trace lexer states as they are traversed.").Bool()
|
||||||
|
|
||||||
lexerFlag = kingpin.Flag("lexer", "Lexer to use when formatting.").PlaceHolder("autodetect").Short('l').Enum(lexers.Names(true)...)
|
lexerFlag = kingpin.Flag("lexer", "Lexer to use when formatting.").PlaceHolder("autodetect").Short('l').Enum(lexers.Names(true)...)
|
||||||
styleFlag = kingpin.Flag("style", "Style to use for formatting.").Short('s').Default("swapoff").Enum(styles.Names()...)
|
styleFlag = kingpin.Flag("style", "Style to use for formatting.").Short('s').Default("swapoff").Enum(styles.Names()...)
|
||||||
@ -198,6 +199,9 @@ func lex(path string, contents string) chroma.Iterator {
|
|||||||
if lexer == nil {
|
if lexer == nil {
|
||||||
lexer = lexers.Fallback
|
lexer = lexers.Fallback
|
||||||
}
|
}
|
||||||
|
if rel, ok := lexer.(*chroma.RegexLexer); ok {
|
||||||
|
rel.Trace(*traceFlag)
|
||||||
|
}
|
||||||
lexer = chroma.Coalesce(lexer)
|
lexer = chroma.Coalesce(lexer)
|
||||||
it, err := lexer.Tokenise(nil, string(contents))
|
it, err := lexer.Tokenise(nil, string(contents))
|
||||||
kingpin.FatalIfError(err, "")
|
kingpin.FatalIfError(err, "")
|
||||||
|
@ -4,8 +4,8 @@ import (
|
|||||||
. "github.com/alecthomas/chroma" // nolint
|
. "github.com/alecthomas/chroma" // nolint
|
||||||
)
|
)
|
||||||
|
|
||||||
// Xml lexer.
|
// XML lexer.
|
||||||
var Xml = Register(MustNewLexer(
|
var XML = Register(MustNewLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "XML",
|
Name: "XML",
|
||||||
Aliases: []string{"xml"},
|
Aliases: []string{"xml"},
|
||||||
|
@ -86,6 +86,9 @@ func Push(states ...string) MutatorFunc {
|
|||||||
// Pop state from the stack when rule matches.
|
// Pop state from the stack when rule matches.
|
||||||
func Pop(n int) MutatorFunc {
|
func Pop(n int) MutatorFunc {
|
||||||
return func(state *LexerState) error {
|
return func(state *LexerState) error {
|
||||||
|
if len(state.Stack) == 0 {
|
||||||
|
return fmt.Errorf("nothing to pop")
|
||||||
|
}
|
||||||
state.Stack = state.Stack[:len(state.Stack)-n]
|
state.Stack = state.Stack[:len(state.Stack)-n]
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
19
regexp.go
19
regexp.go
@ -2,6 +2,7 @@ package chroma
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"os"
|
||||||
"regexp"
|
"regexp"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
@ -129,6 +130,11 @@ func NewLexer(config *Config, rules Rules) (*RegexLexer, error) {
|
|||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r *RegexLexer) Trace(trace bool) *RegexLexer {
|
||||||
|
r.trace = trace
|
||||||
|
return r
|
||||||
|
}
|
||||||
|
|
||||||
// A CompiledRule is a Rule with a pre-compiled regex.
|
// A CompiledRule is a Rule with a pre-compiled regex.
|
||||||
//
|
//
|
||||||
// Note that regular expressions are lazily compiled on first use of the lexer.
|
// Note that regular expressions are lazily compiled on first use of the lexer.
|
||||||
@ -166,7 +172,7 @@ func (l *LexerState) Iterator() Iterator {
|
|||||||
iteratorStack := []Iterator{}
|
iteratorStack := []Iterator{}
|
||||||
return func() *Token {
|
return func() *Token {
|
||||||
for l.Pos < len(l.Text) && len(l.Stack) > 0 {
|
for l.Pos < len(l.Text) && len(l.Stack) > 0 {
|
||||||
// Exhaust the IteratorStack, if any.
|
// Exhaust the iterator stack, if any.
|
||||||
for len(iteratorStack) > 0 {
|
for len(iteratorStack) > 0 {
|
||||||
n := len(iteratorStack) - 1
|
n := len(iteratorStack) - 1
|
||||||
t := iteratorStack[n]()
|
t := iteratorStack[n]()
|
||||||
@ -178,6 +184,9 @@ func (l *LexerState) Iterator() Iterator {
|
|||||||
}
|
}
|
||||||
|
|
||||||
l.State = l.Stack[len(l.Stack)-1]
|
l.State = l.Stack[len(l.Stack)-1]
|
||||||
|
if l.Lexer.trace {
|
||||||
|
fmt.Fprintf(os.Stderr, "%s: pos=%d, text=%q\n", l.State, l.Pos, string(l.Text[l.Pos:]))
|
||||||
|
}
|
||||||
ruleIndex, rule, groups := matchRules(l.Text[l.Pos:], l.Rules[l.State])
|
ruleIndex, rule, groups := matchRules(l.Text[l.Pos:], l.Rules[l.State])
|
||||||
// No match.
|
// No match.
|
||||||
if groups == nil {
|
if groups == nil {
|
||||||
@ -207,6 +216,13 @@ func (l *LexerState) Iterator() Iterator {
|
|||||||
}
|
}
|
||||||
return t
|
return t
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// If we get to here and we still have text, return it as an error.
|
||||||
|
if l.Pos != len(l.Text) && len(l.Stack) == 0 {
|
||||||
|
value := string(l.Text[l.Pos:])
|
||||||
|
l.Pos = len(l.Text)
|
||||||
|
return &Token{Type: Error, Value: value}
|
||||||
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -214,6 +230,7 @@ func (l *LexerState) Iterator() Iterator {
|
|||||||
type RegexLexer struct {
|
type RegexLexer struct {
|
||||||
config *Config
|
config *Config
|
||||||
analyser func(text string) float32
|
analyser func(text string) float32
|
||||||
|
trace bool
|
||||||
|
|
||||||
mu sync.Mutex
|
mu sync.Mutex
|
||||||
compiled bool
|
compiled bool
|
||||||
|
Loading…
x
Reference in New Issue
Block a user