2017-06-01 16:17:21 +02:00
|
|
|
package chroma
|
|
|
|
|
|
|
|
// Coalesce is a Lexer interceptor that collapses runs of common types into a single token.
|
2017-09-20 14:30:25 +02:00
|
|
|
func Coalesce(lexer Lexer) Lexer { return &coalescer{lexer} }
|
2017-06-01 16:17:21 +02:00
|
|
|
|
2017-09-20 14:30:25 +02:00
|
|
|
type coalescer struct{ Lexer }
|
2017-06-01 16:17:21 +02:00
|
|
|
|
2017-09-20 14:19:36 +02:00
|
|
|
func (d *coalescer) Tokenise(options *TokeniseOptions, text string) (Iterator, error) {
|
2018-11-04 01:22:51 +02:00
|
|
|
var prev Token
|
2017-09-20 14:19:36 +02:00
|
|
|
it, err := d.Lexer.Tokenise(options, text)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2018-11-04 01:22:51 +02:00
|
|
|
return func() Token {
|
|
|
|
for token := it(); token != (EOF); token = it() {
|
2018-03-19 01:32:36 +02:00
|
|
|
if len(token.Value) == 0 {
|
|
|
|
continue
|
|
|
|
}
|
2018-11-04 01:22:51 +02:00
|
|
|
if prev == EOF {
|
2017-07-20 08:51:16 +02:00
|
|
|
prev = token
|
2017-09-20 14:19:36 +02:00
|
|
|
} else {
|
|
|
|
if prev.Type == token.Type && len(prev.Value) < 8192 {
|
|
|
|
prev.Value += token.Value
|
2017-09-20 14:30:25 +02:00
|
|
|
} else {
|
|
|
|
out := prev
|
|
|
|
prev = token
|
|
|
|
return out
|
2017-09-20 14:19:36 +02:00
|
|
|
}
|
2017-06-02 07:15:15 +02:00
|
|
|
}
|
2017-06-01 16:17:21 +02:00
|
|
|
}
|
2017-09-20 14:19:36 +02:00
|
|
|
out := prev
|
2018-11-04 01:22:51 +02:00
|
|
|
prev = EOF
|
2017-09-20 14:19:36 +02:00
|
|
|
return out
|
|
|
|
}, nil
|
2017-06-01 16:17:21 +02:00
|
|
|
}
|