1
0
mirror of https://github.com/alecthomas/chroma.git synced 2025-07-01 00:35:06 +02:00

Document and add iterator panic recovery.

This commit is contained in:
Alec Thomas
2017-09-20 22:30:25 +10:00
parent cc0e4a59ab
commit e2d6abaa64
10 changed files with 75 additions and 27 deletions

View File

@ -18,9 +18,6 @@ func main() {
kingpin.CommandLine.Help = "Exercise linters against a list of files."
kingpin.Parse()
writer, err := formatters.NoOp.Format(ioutil.Discard, styles.SwapOff)
kingpin.FatalIfError(err, "")
for _, file := range *filesArgs {
lexer := lexers.Match(file)
if lexer == nil {
@ -29,8 +26,10 @@ func main() {
}
text, err := ioutil.ReadFile(file)
kingpin.FatalIfError(err, "")
err = lexer.Tokenise(nil, string(text), writer)
it, err := lexer.Tokenise(nil, string(text))
kingpin.FatalIfError(err, "%s failed to tokenise %q", lexer.Config().Name, file)
err = formatters.NoOp.Format(ioutil.Discard, styles.SwapOff, it)
kingpin.FatalIfError(err, "%s failed to format %q", lexer.Config().Name, file)
fmt.Printf("ok: %q\n", file)
}
}

View File

@ -1,13 +1,9 @@
package chroma
// Coalesce is a Lexer interceptor that collapses runs of common types into a single token.
func Coalesce(lexer Lexer) Lexer {
return &coalescer{lexer}
}
func Coalesce(lexer Lexer) Lexer { return &coalescer{lexer} }
type coalescer struct {
Lexer
}
type coalescer struct{ Lexer }
func (d *coalescer) Tokenise(options *TokeniseOptions, text string) (Iterator, error) {
var prev *Token
@ -22,6 +18,10 @@ func (d *coalescer) Tokenise(options *TokeniseOptions, text string) (Iterator, e
} else {
if prev.Type == token.Type && len(prev.Value) < 8192 {
prev.Value += token.Value
} else {
out := prev
prev = token
return out
}
}
}

View File

@ -7,10 +7,37 @@ import (
// A Formatter for Chroma lexers.
type Formatter interface {
// Format returns a formatting function for tokens.
//
// If the iterator panics, the Formatter should recover.
Format(w io.Writer, style *Style, iterator Iterator) error
}
// A FormatterFunc is a Formatter implemented as a function.
//
// Guards against iterator panics.
type FormatterFunc func(w io.Writer, style *Style, iterator Iterator) error
func (f FormatterFunc) Format(w io.Writer, s *Style, it Iterator) error { return f(w, s, it) }
func (f FormatterFunc) Format(w io.Writer, s *Style, it Iterator) (err error) {
defer func() {
if perr := recover(); perr != nil {
err = perr.(error)
}
}()
return f(w, s, it)
}
type recoveringFormatter struct {
Formatter
}
func (r recoveringFormatter) Format(w io.Writer, s *Style, it Iterator) (err error) {
defer func() {
if perr := recover(); perr != nil {
err = perr.(error)
}
}()
return r.Formatter.Format(w, s, it)
}
// RecoveringFormatter wraps a formatter with panic recovery.
func RecoveringFormatter(formatter Formatter) Formatter { return recoveringFormatter{formatter} }

View File

@ -67,8 +67,13 @@ func (h highlightRanges) Len() int { return len(h) }
func (h highlightRanges) Swap(i, j int) { h[i], h[j] = h[j], h[i] }
func (h highlightRanges) Less(i, j int) bool { return h[i][0] < h[j][0] }
func (f *Formatter) Format(w io.Writer, style *chroma.Style, iterator chroma.Iterator) error {
return f.writeHTML(w, style, chroma.Flatten(iterator))
func (f *Formatter) Format(w io.Writer, style *chroma.Style, iterator chroma.Iterator) (err error) {
defer func() {
if perr := recover(); perr != nil {
err = perr.(error)
}
}()
return f.writeHTML(w, style, iterator.Tokens())
}
func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []*chroma.Token) error { // nolint: gocyclo

View File

@ -1,6 +1,7 @@
package html
import (
"errors"
"io/ioutil"
"testing"
@ -49,3 +50,11 @@ func TestSplitTokensIntoLines(t *testing.T) {
actual := splitTokensIntoLines(in)
assert.Equal(t, expected, actual)
}
func TestIteratorPanicRecovery(t *testing.T) {
it := func() *chroma.Token {
panic(errors.New("bad"))
}
err := New().Format(ioutil.Discard, styles.Fallback, it)
assert.Error(t, err)
}

View File

@ -234,7 +234,12 @@ type indexedTTYFormatter struct {
table *ttyTable
}
func (c *indexedTTYFormatter) Format(w io.Writer, style *chroma.Style, it chroma.Iterator) error {
func (c *indexedTTYFormatter) Format(w io.Writer, style *chroma.Style, it chroma.Iterator) (err error) {
defer func() {
if perr := recover(); perr != nil {
err = perr.(error)
}
}()
theme := styleToEscapeSequence(c.table, style)
for token := it(); token != nil; token = it() {
// TODO: Cache token lookups?

View File

@ -3,8 +3,19 @@ package chroma
// An Iterator across tokens.
//
// nil will be returned at the end of the Token stream.
//
// If an error occurs within an Iterator, it may propagate this in a panic. Formatters should recover.
type Iterator func() *Token
// Tokens consumes all tokens from the iterator and returns them as a slice.
func (i Iterator) Tokens() []*Token {
out := []*Token{}
for t := i(); t != nil; t = i() {
out = append(out, t)
}
return out
}
// Concaterator concatenates tokens from a series of iterators.
func Concaterator(iterators ...Iterator) Iterator {
return func() *Token {
@ -30,12 +41,3 @@ func Literator(tokens ...*Token) Iterator {
return token
}
}
// Flatten an Iterator into its tokens.
func Flatten(iterator Iterator) []*Token {
out := []*Token{}
for t := iterator(); t != nil; t = iterator() {
out = append(out, t)
}
return out
}

View File

@ -8,9 +8,9 @@ import (
var Makefile = Register(MustNewLexer(
&Config{
Name: "Base Makefile",
Aliases: []string{"make"},
Filenames: []string{},
MimeTypes: []string{},
Aliases: []string{"make", "makefile", "mf", "bsdmake"},
Filenames: []string{"*.mak", "*.mk", "Makefile", "makefile", "Makefile.*", "GNUmakefile"},
MimeTypes: []string{"text/x-makefile"},
},
Rules{
"root": {

View File

@ -55,7 +55,7 @@ func handleCodeblock(groups []string, lexer Lexer) Iterator {
if err != nil {
panic(err)
}
iterators = append(iterators, sub)
iterators = append(iterators, Literator(tokens...), sub)
}
iterators = append(iterators, Literator(&Token{String, groups[5]}))
return Concaterator(iterators...)

View File

@ -267,6 +267,7 @@ func (r *RegexLexer) Tokenise(options *TokeniseOptions, text string) (Iterator,
options = defaultOptions
}
state := &LexerState{
Lexer: r,
Text: []rune(text),
Stack: []string{options.State},
Rules: r.rules,