mirror of
https://github.com/alecthomas/chroma.git
synced 2025-03-17 20:58:08 +02:00
Make lexer fixture running cleaner.
This commit is contained in:
parent
e56590a815
commit
e0f32fbcb5
@ -9,22 +9,31 @@ import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/alecthomas/chroma"
|
||||
)
|
||||
|
||||
// Test source files are in the form <key>.<key> and validation data is in the form <key>.<key>.expected.
|
||||
func TestLexers(t *testing.T) {
|
||||
for _, lexer := range Registry.Lexers {
|
||||
name := strings.ToLower(lexer.Config().Name)
|
||||
filename := filepath.Join("testdata", name+"."+name)
|
||||
expectedFilename := filepath.Join("testdata", name+".expected")
|
||||
if _, err := os.Stat(filename); err != nil {
|
||||
files, err := ioutil.ReadDir("testdata")
|
||||
require.NoError(t, err)
|
||||
|
||||
for _, file := range files {
|
||||
ext := filepath.Ext(file.Name())[1:]
|
||||
if ext != "actual" {
|
||||
continue
|
||||
}
|
||||
|
||||
lexer := Get(strings.TrimSuffix(file.Name(), filepath.Ext(file.Name())))
|
||||
if !assert.NotNil(t, lexer) {
|
||||
continue
|
||||
}
|
||||
|
||||
filename := filepath.Join("testdata", file.Name())
|
||||
expectedFilename := strings.TrimSuffix(filename, filepath.Ext(filename)) + ".expected"
|
||||
|
||||
lexer = chroma.Coalesce(lexer)
|
||||
t.Run(lexer.Config().Name, func(t *testing.T) {
|
||||
// Read and tokenise source text.
|
||||
actualText, err := ioutil.ReadFile(filename)
|
||||
|
49
lexers/testdata/README.md
vendored
49
lexers/testdata/README.md
vendored
@ -2,59 +2,32 @@
|
||||
|
||||
This directory contains input source and expected output lexer tokens.
|
||||
|
||||
Input filenames for lexers are in the form `<name>.<name>`. Expected output filenames are in the form `<name>.expected`.
|
||||
Input filenames for lexers are in the form `<name>.actual`. Expected output filenames are in the form `<name>.expected`.
|
||||
|
||||
Each input filename is parsed by the corresponding lexer and checked against the expected JSON-encoded token list.
|
||||
|
||||
|
||||
To add/update tests do the following:
|
||||
|
||||
1. `export LEXER=css`
|
||||
1. Create/edit a file `lexers/testdata/${LEXER}.${LEXER}` (eg. `css.css`).
|
||||
2. Run `go run ./cmd/chroma/main.go --lexer ${LEXER} --json lexers/testdata/${LEXER}.${LEXER} > lexers/testdata/${LEXER}.expected`.
|
||||
3. Run `go test -v ./lexers`.
|
||||
1. `export LEXER=csharp`
|
||||
1. Create/edit a file `lexers/testdata/${LEXER}.actual` (eg. `csharp.actual`).
|
||||
2. Run `go run ./cmd/chroma/main.go --lexer ${LEXER} --json lexers/testdata/${LEXER}.actual > lexers/testdata/${LEXER}.expected`.
|
||||
3. Run `go test -v -run TestLexers ./lexers`.
|
||||
|
||||
|
||||
eg.
|
||||
|
||||
```bash
|
||||
$ export LEXER=css
|
||||
$ export LEXER=csharp
|
||||
$ go run ./cmd/chroma/main.go --lexer ${LEXER} --json lexers/testdata/${LEXER}.${LEXER} > lexers/testdata/${LEXER}.expected
|
||||
$ cat lexers/testdata/${LEXER}.expected
|
||||
[
|
||||
{"type":"Punctuation","value":":"},
|
||||
{"type":"NameDecorator","value":"root"},
|
||||
{"type":"Text","value":" "},
|
||||
{"type":"Punctuation","value":"{"},
|
||||
{"type":"Text","value":"\n "},
|
||||
{"type":"NameVariable","value":"--variable-name"},
|
||||
{"type":"Text","value":""},
|
||||
{"type":"Punctuation","value":":"},
|
||||
{"type":"Text","value":" "},
|
||||
{"type":"LiteralNumberHex","value":"#fff"},
|
||||
{"type":"Punctuation","value":";"},
|
||||
{"type":"Text","value":"\n"},
|
||||
{"type":"Punctuation","value":"}"},
|
||||
{"type":"Text","value":"\n"}
|
||||
]
|
||||
$ go test -v ./lexers
|
||||
=== RUN TestDiffLexerWithoutTralingNewLine
|
||||
--- PASS: TestDiffLexerWithoutTralingNewLine (0.00s)
|
||||
$ go test -v -run TestLexers ./lexers
|
||||
=== RUN TestLexers
|
||||
=== RUN TestLexers/C#
|
||||
=== RUN TestLexers/CSS
|
||||
--- PASS: TestLexers (0.00s)
|
||||
--- PASS: TestLexers (0.01s)
|
||||
--- PASS: TestLexers/C# (0.00s)
|
||||
--- PASS: TestLexers/CSS (0.00s)
|
||||
=== RUN TestCompileAllRegexes
|
||||
--- PASS: TestCompileAllRegexes (0.61s)
|
||||
=== RUN TestGet
|
||||
=== RUN TestGet/ByName
|
||||
=== RUN TestGet/ByAlias
|
||||
=== RUN TestGet/ViaFilename
|
||||
--- PASS: TestGet (0.00s)
|
||||
--- PASS: TestGet/ByName (0.00s)
|
||||
--- PASS: TestGet/ByAlias (0.00s)
|
||||
--- PASS: TestGet/ViaFilename (0.00s)
|
||||
PASS
|
||||
ok github.com/alecthomas/chroma/lexers 0.649s
|
||||
ok github.com/alecthomas/chroma/lexers 0.032s
|
||||
```
|
||||
|
||||
|
11
lexers/testdata/csharp.actual
vendored
Normal file
11
lexers/testdata/csharp.actual
vendored
Normal file
@ -0,0 +1,11 @@
|
||||
DriveInfo[] drives = DriveInfo.GetDrives();
|
||||
foreach (DriveInfo drive in drives)
|
||||
{
|
||||
IEnumerable<string> driveFolders =
|
||||
Directory.EnumerateDirectories(drive.RootDirectory.ToString());
|
||||
|
||||
foreach (string dir in driveFolders)
|
||||
{
|
||||
Console.WriteLine(dir);
|
||||
}
|
||||
}
|
73
lexers/testdata/csharp.expected
vendored
Normal file
73
lexers/testdata/csharp.expected
vendored
Normal file
@ -0,0 +1,73 @@
|
||||
[
|
||||
{"type":"Name","value":"DriveInfo"},
|
||||
{"type":"NameAttribute","value":"[]"},
|
||||
{"type":"Text","value":" "},
|
||||
{"type":"Name","value":"drives"},
|
||||
{"type":"Text","value":" "},
|
||||
{"type":"Punctuation","value":"="},
|
||||
{"type":"Text","value":" "},
|
||||
{"type":"NameClass","value":"DriveInfo"},
|
||||
{"type":"Punctuation","value":"."},
|
||||
{"type":"Name","value":"GetDrives"},
|
||||
{"type":"Punctuation","value":"();"},
|
||||
{"type":"Text","value":"\n"},
|
||||
{"type":"Keyword","value":"foreach"},
|
||||
{"type":"Text","value":" "},
|
||||
{"type":"Punctuation","value":"("},
|
||||
{"type":"Name","value":"DriveInfo"},
|
||||
{"type":"Text","value":" "},
|
||||
{"type":"Name","value":"drive"},
|
||||
{"type":"Text","value":" "},
|
||||
{"type":"Keyword","value":"in"},
|
||||
{"type":"Text","value":" "},
|
||||
{"type":"Name","value":"drives"},
|
||||
{"type":"Punctuation","value":")"},
|
||||
{"type":"Text","value":"\n"},
|
||||
{"type":"Punctuation","value":"{"},
|
||||
{"type":"Text","value":"\n "},
|
||||
{"type":"Name","value":"IEnumerable"},
|
||||
{"type":"Punctuation","value":"\u003c"},
|
||||
{"type":"KeywordType","value":"string"},
|
||||
{"type":"Punctuation","value":"\u003e"},
|
||||
{"type":"Text","value":" "},
|
||||
{"type":"Name","value":"driveFolders"},
|
||||
{"type":"Text","value":" "},
|
||||
{"type":"Punctuation","value":"="},
|
||||
{"type":"Text","value":"\n "},
|
||||
{"type":"NameClass","value":"Directory"},
|
||||
{"type":"Punctuation","value":"."},
|
||||
{"type":"Name","value":"EnumerateDirectories"},
|
||||
{"type":"Punctuation","value":"("},
|
||||
{"type":"NameClass","value":"drive"},
|
||||
{"type":"Punctuation","value":"."},
|
||||
{"type":"NameClass","value":"RootDirectory"},
|
||||
{"type":"Punctuation","value":"."},
|
||||
{"type":"Name","value":"ToString"},
|
||||
{"type":"Punctuation","value":"());"},
|
||||
{"type":"Text","value":"\n\n "},
|
||||
{"type":"Keyword","value":"foreach"},
|
||||
{"type":"Text","value":" "},
|
||||
{"type":"Punctuation","value":"("},
|
||||
{"type":"KeywordType","value":"string"},
|
||||
{"type":"Text","value":" "},
|
||||
{"type":"Name","value":"dir"},
|
||||
{"type":"Text","value":" "},
|
||||
{"type":"Keyword","value":"in"},
|
||||
{"type":"Text","value":" "},
|
||||
{"type":"Name","value":"driveFolders"},
|
||||
{"type":"Punctuation","value":")"},
|
||||
{"type":"Text","value":"\n "},
|
||||
{"type":"Punctuation","value":"{"},
|
||||
{"type":"Text","value":"\n "},
|
||||
{"type":"NameClass","value":"Console"},
|
||||
{"type":"Punctuation","value":"."},
|
||||
{"type":"Name","value":"WriteLine"},
|
||||
{"type":"Punctuation","value":"("},
|
||||
{"type":"Name","value":"dir"},
|
||||
{"type":"Punctuation","value":");"},
|
||||
{"type":"Text","value":"\n "},
|
||||
{"type":"Punctuation","value":"}"},
|
||||
{"type":"Text","value":"\n"},
|
||||
{"type":"Punctuation","value":"}"},
|
||||
{"type":"Text","value":"\n"}
|
||||
]
|
Loading…
x
Reference in New Issue
Block a user