mirror of
https://github.com/alecthomas/chroma.git
synced 2025-01-26 03:20:10 +02:00
parent
528bed2b60
commit
573c1d157d
@ -3,6 +3,7 @@ package main
|
||||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
|
||||
"github.com/alecthomas/chroma/formatters"
|
||||
"github.com/alecthomas/chroma/lexers"
|
||||
@ -24,12 +25,14 @@ func main() {
|
||||
fmt.Printf("warning: could not find lexer for %q\n", file)
|
||||
continue
|
||||
}
|
||||
fmt.Printf("%s: ", file)
|
||||
os.Stdout.Sync()
|
||||
text, err := ioutil.ReadFile(file)
|
||||
kingpin.FatalIfError(err, "")
|
||||
it, err := lexer.Tokenise(nil, string(text))
|
||||
kingpin.FatalIfError(err, "%s failed to tokenise %q", lexer.Config().Name, file)
|
||||
err = formatters.NoOp.Format(ioutil.Discard, styles.SwapOff, it)
|
||||
kingpin.FatalIfError(err, "%s failed to format %q", lexer.Config().Name, file)
|
||||
fmt.Printf("ok: %q\n", file)
|
||||
fmt.Printf("ok\n")
|
||||
}
|
||||
}
|
||||
|
@ -3,17 +3,17 @@ package chroma
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/alecthomas/assert"
|
||||
)
|
||||
|
||||
func TestCoalesce(t *testing.T) {
|
||||
lexer := Coalesce(MustNewLexer(nil, Rules{
|
||||
lexer := Coalesce(MustNewLexer(&Config{DontEnsureNL: true}, Rules{
|
||||
"root": []Rule{
|
||||
{`[!@#$%^&*()]`, Punctuation, nil},
|
||||
},
|
||||
}))
|
||||
actual, err := Tokenise(lexer, nil, "!@#$")
|
||||
require.NoError(t, err)
|
||||
assert.NoError(t, err)
|
||||
expected := []*Token{{Punctuation, "!@#$"}}
|
||||
require.Equal(t, expected, actual)
|
||||
assert.Equal(t, expected, actual)
|
||||
}
|
||||
|
@ -3,7 +3,7 @@ package chroma
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/alecthomas/assert"
|
||||
)
|
||||
|
||||
func TestColourRGB(t *testing.T) {
|
||||
|
@ -5,7 +5,7 @@ import (
|
||||
"io/ioutil"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/alecthomas/assert"
|
||||
|
||||
"github.com/alecthomas/chroma"
|
||||
"github.com/alecthomas/chroma/lexers"
|
||||
|
2
lexer.go
2
lexer.go
@ -46,7 +46,7 @@ type Config struct {
|
||||
|
||||
// Make sure that the input does not end with a newline. This
|
||||
// is required for some lexers that consume input linewise.
|
||||
// DontEnsureNL bool
|
||||
DontEnsureNL bool
|
||||
|
||||
// If given and greater than 0, expand tabs in the input.
|
||||
// TabSize int
|
||||
|
@ -3,13 +3,13 @@ package chroma
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/alecthomas/assert"
|
||||
)
|
||||
|
||||
func TestTokenTypeClassifiers(t *testing.T) {
|
||||
require.True(t, GenericDeleted.InCategory(Generic))
|
||||
require.True(t, LiteralStringBacktick.InSubCategory(String))
|
||||
require.Equal(t, LiteralStringBacktick.String(), "LiteralStringBacktick")
|
||||
assert.True(t, GenericDeleted.InCategory(Generic))
|
||||
assert.True(t, LiteralStringBacktick.InSubCategory(String))
|
||||
assert.Equal(t, LiteralStringBacktick.String(), "LiteralStringBacktick")
|
||||
}
|
||||
|
||||
func TestSimpleLexer(t *testing.T) {
|
||||
@ -28,13 +28,13 @@ func TestSimpleLexer(t *testing.T) {
|
||||
},
|
||||
},
|
||||
)
|
||||
require.NoError(t, err)
|
||||
assert.NoError(t, err)
|
||||
actual, err := Tokenise(lexer, nil, `
|
||||
; this is a comment
|
||||
[section]
|
||||
a = 10
|
||||
`)
|
||||
require.NoError(t, err)
|
||||
assert.NoError(t, err)
|
||||
expected := []*Token{
|
||||
{Whitespace, "\n\t"},
|
||||
{Comment, "; this is a comment"},
|
||||
@ -48,5 +48,5 @@ func TestSimpleLexer(t *testing.T) {
|
||||
{LiteralString, "10"},
|
||||
{Whitespace, "\n"},
|
||||
}
|
||||
require.Equal(t, expected, actual)
|
||||
assert.Equal(t, expected, actual)
|
||||
}
|
||||
|
@ -4,7 +4,7 @@ import (
|
||||
"io/ioutil"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/alecthomas/assert"
|
||||
|
||||
"github.com/alecthomas/chroma/formatters"
|
||||
"github.com/alecthomas/chroma/lexers"
|
||||
|
@ -3,7 +3,7 @@ package lexers
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/alecthomas/assert"
|
||||
)
|
||||
|
||||
const lexerBenchSource = `package chroma
|
||||
|
@ -33,7 +33,9 @@ var VHDL = Register(MustNewLexer(
|
||||
{`(entity|component)(\s+)([a-z_]\w*)`, ByGroups(Keyword, Text, NameClass), nil},
|
||||
{`(architecture|configuration)(\s+)([a-z_]\w*)(\s+)(of)(\s+)([a-z_]\w*)(\s+)(is)`, ByGroups(Keyword, Text, NameClass, Text, Keyword, Text, NameClass, Text, Keyword), nil},
|
||||
{`([a-z_]\w*)(:)(\s+)(process|for)`, ByGroups(NameClass, Operator, Text, Keyword), nil},
|
||||
{`(end)(\s+)`, ByGroups(UsingSelf("root"), Text), Push("endblock")},
|
||||
// This seems to cause a recursive loop.
|
||||
// {`(end)(\s+)`, ByGroups(UsingSelf("root"), Text), Push("endblock")},
|
||||
{`(end)(\s+)`, ByGroups(Keyword, Text), Push("endblock")},
|
||||
Include("types"),
|
||||
Include("keywords"),
|
||||
Include("numbers"),
|
||||
|
@ -3,8 +3,7 @@ package chroma
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/alecthomas/assert"
|
||||
)
|
||||
|
||||
func TestInclude(t *testing.T) {
|
||||
@ -18,7 +17,7 @@ func TestInclude(t *testing.T) {
|
||||
}
|
||||
lexer := &RegexLexer{rules: actual}
|
||||
err := include.Mutator.(LexerMutator).MutateLexer(lexer.rules, "root", 0)
|
||||
require.NoError(t, err)
|
||||
assert.NoError(t, err)
|
||||
expected := CompiledRules{
|
||||
"root": {
|
||||
{Rule: Rule{
|
||||
@ -41,18 +40,18 @@ func TestInclude(t *testing.T) {
|
||||
}},
|
||||
},
|
||||
}
|
||||
require.Equal(t, expected, actual)
|
||||
assert.Equal(t, expected, actual)
|
||||
}
|
||||
|
||||
func TestCombine(t *testing.T) {
|
||||
l := MustNewLexer(nil, Rules{
|
||||
l := MustNewLexer(&Config{DontEnsureNL: true}, Rules{
|
||||
"root": {{`hello`, String, Combined("world", "bye", "space")}},
|
||||
"world": {{`world`, Name, nil}},
|
||||
"bye": {{`bye`, Name, nil}},
|
||||
"space": {{`\s+`, Whitespace, nil}},
|
||||
})
|
||||
it, err := l.Tokenise(nil, "hello world")
|
||||
require.NoError(t, err)
|
||||
assert.NoError(t, err)
|
||||
expected := []*Token{{String, `hello`}, {Whitespace, ` `}, {Name, `world`}}
|
||||
assert.Equal(t, expected, it.Tokens())
|
||||
}
|
||||
|
@ -300,6 +300,9 @@ func (r *RegexLexer) Tokenise(options *TokeniseOptions, text string) (Iterator,
|
||||
if options == nil {
|
||||
options = defaultOptions
|
||||
}
|
||||
if !r.config.DontEnsureNL && !strings.HasSuffix(text, "\n") {
|
||||
text += "\n"
|
||||
}
|
||||
state := &LexerState{
|
||||
Lexer: r,
|
||||
Text: []rune(text),
|
||||
|
@ -3,11 +3,11 @@ package chroma
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/alecthomas/assert"
|
||||
)
|
||||
|
||||
func TestRemappingLexer(t *testing.T) {
|
||||
var lexer Lexer = MustNewLexer(nil, Rules{
|
||||
var lexer Lexer = MustNewLexer(&Config{DontEnsureNL: true}, Rules{
|
||||
"root": {
|
||||
{`\s+`, Whitespace, nil},
|
||||
{`\w+`, Name, nil},
|
||||
|
@ -3,7 +3,7 @@ package chroma
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/alecthomas/assert"
|
||||
)
|
||||
|
||||
func TestStyleInherit(t *testing.T) {
|
||||
|
Loading…
x
Reference in New Issue
Block a user