1
0
mirror of https://github.com/alecthomas/chroma.git synced 2025-02-19 19:00:13 +02:00
chroma/regexp_test.go
Daniel Eloff 9c3abeae1d Tokens by value (#187)
This results in about a 8% improvement in speed.
2018-11-04 10:22:51 +11:00

28 lines
627 B
Go

package chroma
import (
"testing"
"github.com/alecthomas/assert"
)
func TestNewlineAtEndOfFile(t *testing.T) {
l := Coalesce(MustNewLexer(&Config{EnsureNL: true}, Rules{
"root": {
{`(\w+)(\n)`, ByGroups(Keyword, Whitespace), nil},
},
}))
it, err := l.Tokenise(nil, `hello`)
assert.NoError(t, err)
assert.Equal(t, []Token{{Keyword, "hello"}, {Whitespace, "\n"}}, it.Tokens())
l = Coalesce(MustNewLexer(nil, Rules{
"root": {
{`(\w+)(\n)`, ByGroups(Keyword, Whitespace), nil},
},
}))
it, err = l.Tokenise(nil, `hello`)
assert.NoError(t, err)
assert.Equal(t, []Token{{Error, "hello"}}, it.Tokens())
}