mirror of
https://github.com/alecthomas/chroma.git
synced 2025-04-23 12:08:58 +02:00
parent
528bed2b60
commit
573c1d157d
@ -3,6 +3,7 @@ package main
|
|||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
|
||||||
"github.com/alecthomas/chroma/formatters"
|
"github.com/alecthomas/chroma/formatters"
|
||||||
"github.com/alecthomas/chroma/lexers"
|
"github.com/alecthomas/chroma/lexers"
|
||||||
@ -24,12 +25,14 @@ func main() {
|
|||||||
fmt.Printf("warning: could not find lexer for %q\n", file)
|
fmt.Printf("warning: could not find lexer for %q\n", file)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
fmt.Printf("%s: ", file)
|
||||||
|
os.Stdout.Sync()
|
||||||
text, err := ioutil.ReadFile(file)
|
text, err := ioutil.ReadFile(file)
|
||||||
kingpin.FatalIfError(err, "")
|
kingpin.FatalIfError(err, "")
|
||||||
it, err := lexer.Tokenise(nil, string(text))
|
it, err := lexer.Tokenise(nil, string(text))
|
||||||
kingpin.FatalIfError(err, "%s failed to tokenise %q", lexer.Config().Name, file)
|
kingpin.FatalIfError(err, "%s failed to tokenise %q", lexer.Config().Name, file)
|
||||||
err = formatters.NoOp.Format(ioutil.Discard, styles.SwapOff, it)
|
err = formatters.NoOp.Format(ioutil.Discard, styles.SwapOff, it)
|
||||||
kingpin.FatalIfError(err, "%s failed to format %q", lexer.Config().Name, file)
|
kingpin.FatalIfError(err, "%s failed to format %q", lexer.Config().Name, file)
|
||||||
fmt.Printf("ok: %q\n", file)
|
fmt.Printf("ok\n")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3,17 +3,17 @@ package chroma
|
|||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/alecthomas/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestCoalesce(t *testing.T) {
|
func TestCoalesce(t *testing.T) {
|
||||||
lexer := Coalesce(MustNewLexer(nil, Rules{
|
lexer := Coalesce(MustNewLexer(&Config{DontEnsureNL: true}, Rules{
|
||||||
"root": []Rule{
|
"root": []Rule{
|
||||||
{`[!@#$%^&*()]`, Punctuation, nil},
|
{`[!@#$%^&*()]`, Punctuation, nil},
|
||||||
},
|
},
|
||||||
}))
|
}))
|
||||||
actual, err := Tokenise(lexer, nil, "!@#$")
|
actual, err := Tokenise(lexer, nil, "!@#$")
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
expected := []*Token{{Punctuation, "!@#$"}}
|
expected := []*Token{{Punctuation, "!@#$"}}
|
||||||
require.Equal(t, expected, actual)
|
assert.Equal(t, expected, actual)
|
||||||
}
|
}
|
||||||
|
@ -3,7 +3,7 @@ package chroma
|
|||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/alecthomas/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestColourRGB(t *testing.T) {
|
func TestColourRGB(t *testing.T) {
|
||||||
|
@ -5,7 +5,7 @@ import (
|
|||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/alecthomas/assert"
|
||||||
|
|
||||||
"github.com/alecthomas/chroma"
|
"github.com/alecthomas/chroma"
|
||||||
"github.com/alecthomas/chroma/lexers"
|
"github.com/alecthomas/chroma/lexers"
|
||||||
|
2
lexer.go
2
lexer.go
@ -46,7 +46,7 @@ type Config struct {
|
|||||||
|
|
||||||
// Make sure that the input does not end with a newline. This
|
// Make sure that the input does not end with a newline. This
|
||||||
// is required for some lexers that consume input linewise.
|
// is required for some lexers that consume input linewise.
|
||||||
// DontEnsureNL bool
|
DontEnsureNL bool
|
||||||
|
|
||||||
// If given and greater than 0, expand tabs in the input.
|
// If given and greater than 0, expand tabs in the input.
|
||||||
// TabSize int
|
// TabSize int
|
||||||
|
@ -3,13 +3,13 @@ package chroma
|
|||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/alecthomas/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestTokenTypeClassifiers(t *testing.T) {
|
func TestTokenTypeClassifiers(t *testing.T) {
|
||||||
require.True(t, GenericDeleted.InCategory(Generic))
|
assert.True(t, GenericDeleted.InCategory(Generic))
|
||||||
require.True(t, LiteralStringBacktick.InSubCategory(String))
|
assert.True(t, LiteralStringBacktick.InSubCategory(String))
|
||||||
require.Equal(t, LiteralStringBacktick.String(), "LiteralStringBacktick")
|
assert.Equal(t, LiteralStringBacktick.String(), "LiteralStringBacktick")
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestSimpleLexer(t *testing.T) {
|
func TestSimpleLexer(t *testing.T) {
|
||||||
@ -28,13 +28,13 @@ func TestSimpleLexer(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
actual, err := Tokenise(lexer, nil, `
|
actual, err := Tokenise(lexer, nil, `
|
||||||
; this is a comment
|
; this is a comment
|
||||||
[section]
|
[section]
|
||||||
a = 10
|
a = 10
|
||||||
`)
|
`)
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
expected := []*Token{
|
expected := []*Token{
|
||||||
{Whitespace, "\n\t"},
|
{Whitespace, "\n\t"},
|
||||||
{Comment, "; this is a comment"},
|
{Comment, "; this is a comment"},
|
||||||
@ -48,5 +48,5 @@ func TestSimpleLexer(t *testing.T) {
|
|||||||
{LiteralString, "10"},
|
{LiteralString, "10"},
|
||||||
{Whitespace, "\n"},
|
{Whitespace, "\n"},
|
||||||
}
|
}
|
||||||
require.Equal(t, expected, actual)
|
assert.Equal(t, expected, actual)
|
||||||
}
|
}
|
||||||
|
@ -4,7 +4,7 @@ import (
|
|||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/alecthomas/assert"
|
||||||
|
|
||||||
"github.com/alecthomas/chroma/formatters"
|
"github.com/alecthomas/chroma/formatters"
|
||||||
"github.com/alecthomas/chroma/lexers"
|
"github.com/alecthomas/chroma/lexers"
|
||||||
|
@ -3,7 +3,7 @@ package lexers
|
|||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/alecthomas/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
const lexerBenchSource = `package chroma
|
const lexerBenchSource = `package chroma
|
||||||
|
@ -33,7 +33,9 @@ var VHDL = Register(MustNewLexer(
|
|||||||
{`(entity|component)(\s+)([a-z_]\w*)`, ByGroups(Keyword, Text, NameClass), nil},
|
{`(entity|component)(\s+)([a-z_]\w*)`, ByGroups(Keyword, Text, NameClass), nil},
|
||||||
{`(architecture|configuration)(\s+)([a-z_]\w*)(\s+)(of)(\s+)([a-z_]\w*)(\s+)(is)`, ByGroups(Keyword, Text, NameClass, Text, Keyword, Text, NameClass, Text, Keyword), nil},
|
{`(architecture|configuration)(\s+)([a-z_]\w*)(\s+)(of)(\s+)([a-z_]\w*)(\s+)(is)`, ByGroups(Keyword, Text, NameClass, Text, Keyword, Text, NameClass, Text, Keyword), nil},
|
||||||
{`([a-z_]\w*)(:)(\s+)(process|for)`, ByGroups(NameClass, Operator, Text, Keyword), nil},
|
{`([a-z_]\w*)(:)(\s+)(process|for)`, ByGroups(NameClass, Operator, Text, Keyword), nil},
|
||||||
{`(end)(\s+)`, ByGroups(UsingSelf("root"), Text), Push("endblock")},
|
// This seems to cause a recursive loop.
|
||||||
|
// {`(end)(\s+)`, ByGroups(UsingSelf("root"), Text), Push("endblock")},
|
||||||
|
{`(end)(\s+)`, ByGroups(Keyword, Text), Push("endblock")},
|
||||||
Include("types"),
|
Include("types"),
|
||||||
Include("keywords"),
|
Include("keywords"),
|
||||||
Include("numbers"),
|
Include("numbers"),
|
||||||
|
@ -3,8 +3,7 @@ package chroma
|
|||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/alecthomas/assert"
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestInclude(t *testing.T) {
|
func TestInclude(t *testing.T) {
|
||||||
@ -18,7 +17,7 @@ func TestInclude(t *testing.T) {
|
|||||||
}
|
}
|
||||||
lexer := &RegexLexer{rules: actual}
|
lexer := &RegexLexer{rules: actual}
|
||||||
err := include.Mutator.(LexerMutator).MutateLexer(lexer.rules, "root", 0)
|
err := include.Mutator.(LexerMutator).MutateLexer(lexer.rules, "root", 0)
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
expected := CompiledRules{
|
expected := CompiledRules{
|
||||||
"root": {
|
"root": {
|
||||||
{Rule: Rule{
|
{Rule: Rule{
|
||||||
@ -41,18 +40,18 @@ func TestInclude(t *testing.T) {
|
|||||||
}},
|
}},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
require.Equal(t, expected, actual)
|
assert.Equal(t, expected, actual)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestCombine(t *testing.T) {
|
func TestCombine(t *testing.T) {
|
||||||
l := MustNewLexer(nil, Rules{
|
l := MustNewLexer(&Config{DontEnsureNL: true}, Rules{
|
||||||
"root": {{`hello`, String, Combined("world", "bye", "space")}},
|
"root": {{`hello`, String, Combined("world", "bye", "space")}},
|
||||||
"world": {{`world`, Name, nil}},
|
"world": {{`world`, Name, nil}},
|
||||||
"bye": {{`bye`, Name, nil}},
|
"bye": {{`bye`, Name, nil}},
|
||||||
"space": {{`\s+`, Whitespace, nil}},
|
"space": {{`\s+`, Whitespace, nil}},
|
||||||
})
|
})
|
||||||
it, err := l.Tokenise(nil, "hello world")
|
it, err := l.Tokenise(nil, "hello world")
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
expected := []*Token{{String, `hello`}, {Whitespace, ` `}, {Name, `world`}}
|
expected := []*Token{{String, `hello`}, {Whitespace, ` `}, {Name, `world`}}
|
||||||
assert.Equal(t, expected, it.Tokens())
|
assert.Equal(t, expected, it.Tokens())
|
||||||
}
|
}
|
||||||
|
@ -300,6 +300,9 @@ func (r *RegexLexer) Tokenise(options *TokeniseOptions, text string) (Iterator,
|
|||||||
if options == nil {
|
if options == nil {
|
||||||
options = defaultOptions
|
options = defaultOptions
|
||||||
}
|
}
|
||||||
|
if !r.config.DontEnsureNL && !strings.HasSuffix(text, "\n") {
|
||||||
|
text += "\n"
|
||||||
|
}
|
||||||
state := &LexerState{
|
state := &LexerState{
|
||||||
Lexer: r,
|
Lexer: r,
|
||||||
Text: []rune(text),
|
Text: []rune(text),
|
||||||
|
@ -3,11 +3,11 @@ package chroma
|
|||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/alecthomas/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestRemappingLexer(t *testing.T) {
|
func TestRemappingLexer(t *testing.T) {
|
||||||
var lexer Lexer = MustNewLexer(nil, Rules{
|
var lexer Lexer = MustNewLexer(&Config{DontEnsureNL: true}, Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`\s+`, Whitespace, nil},
|
{`\s+`, Whitespace, nil},
|
||||||
{`\w+`, Name, nil},
|
{`\w+`, Name, nil},
|
||||||
|
@ -3,7 +3,7 @@ package chroma
|
|||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/alecthomas/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestStyleInherit(t *testing.T) {
|
func TestStyleInherit(t *testing.T) {
|
||||||
|
Loading…
x
Reference in New Issue
Block a user