2017-10-13 10:58:37 +11:00
|
|
|
package chroma
|
|
|
|
|
|
|
|
import (
|
|
|
|
"testing"
|
|
|
|
|
|
|
|
"github.com/alecthomas/assert"
|
|
|
|
)
|
|
|
|
|
|
|
|
func TestNewlineAtEndOfFile(t *testing.T) {
|
|
|
|
l := Coalesce(MustNewLexer(&Config{EnsureNL: true}, Rules{
|
|
|
|
"root": {
|
|
|
|
{`(\w+)(\n)`, ByGroups(Keyword, Whitespace), nil},
|
|
|
|
},
|
|
|
|
}))
|
|
|
|
it, err := l.Tokenise(nil, `hello`)
|
|
|
|
assert.NoError(t, err)
|
2018-11-03 16:22:51 -07:00
|
|
|
assert.Equal(t, []Token{{Keyword, "hello"}, {Whitespace, "\n"}}, it.Tokens())
|
2017-10-13 10:58:37 +11:00
|
|
|
|
|
|
|
l = Coalesce(MustNewLexer(nil, Rules{
|
|
|
|
"root": {
|
|
|
|
{`(\w+)(\n)`, ByGroups(Keyword, Whitespace), nil},
|
|
|
|
},
|
|
|
|
}))
|
|
|
|
it, err = l.Tokenise(nil, `hello`)
|
|
|
|
assert.NoError(t, err)
|
2018-11-03 16:22:51 -07:00
|
|
|
assert.Equal(t, []Token{{Error, "hello"}}, it.Tokens())
|
2017-10-13 10:58:37 +11:00
|
|
|
}
|