From e0f32fbcb59bc99c521910291b7ba8ecdb17ca34 Mon Sep 17 00:00:00 2001 From: Alec Thomas Date: Tue, 2 Jan 2018 15:11:41 +1100 Subject: [PATCH] Make lexer fixture running cleaner. --- lexers/lexers_test.go | 19 +++++-- lexers/testdata/README.md | 49 ++++------------- lexers/testdata/csharp.actual | 11 ++++ lexers/testdata/csharp.expected | 73 +++++++++++++++++++++++++ lexers/testdata/{css.css => css.actual} | 0 5 files changed, 109 insertions(+), 43 deletions(-) create mode 100644 lexers/testdata/csharp.actual create mode 100644 lexers/testdata/csharp.expected rename lexers/testdata/{css.css => css.actual} (100%) diff --git a/lexers/lexers_test.go b/lexers/lexers_test.go index 94ed31f..6975118 100644 --- a/lexers/lexers_test.go +++ b/lexers/lexers_test.go @@ -9,22 +9,31 @@ import ( "testing" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" "github.com/alecthomas/chroma" ) // Test source files are in the form . and validation data is in the form ..expected. func TestLexers(t *testing.T) { - for _, lexer := range Registry.Lexers { - name := strings.ToLower(lexer.Config().Name) - filename := filepath.Join("testdata", name+"."+name) - expectedFilename := filepath.Join("testdata", name+".expected") - if _, err := os.Stat(filename); err != nil { + files, err := ioutil.ReadDir("testdata") + require.NoError(t, err) + + for _, file := range files { + ext := filepath.Ext(file.Name())[1:] + if ext != "actual" { continue } + + lexer := Get(strings.TrimSuffix(file.Name(), filepath.Ext(file.Name()))) if !assert.NotNil(t, lexer) { continue } + + filename := filepath.Join("testdata", file.Name()) + expectedFilename := strings.TrimSuffix(filename, filepath.Ext(filename)) + ".expected" + + lexer = chroma.Coalesce(lexer) t.Run(lexer.Config().Name, func(t *testing.T) { // Read and tokenise source text. actualText, err := ioutil.ReadFile(filename) diff --git a/lexers/testdata/README.md b/lexers/testdata/README.md index b8392bb..52ceeb1 100644 --- a/lexers/testdata/README.md +++ b/lexers/testdata/README.md @@ -2,59 +2,32 @@ This directory contains input source and expected output lexer tokens. -Input filenames for lexers are in the form `.`. Expected output filenames are in the form `.expected`. +Input filenames for lexers are in the form `.actual`. Expected output filenames are in the form `.expected`. Each input filename is parsed by the corresponding lexer and checked against the expected JSON-encoded token list. To add/update tests do the following: -1. `export LEXER=css` -1. Create/edit a file `lexers/testdata/${LEXER}.${LEXER}` (eg. `css.css`). -2. Run `go run ./cmd/chroma/main.go --lexer ${LEXER} --json lexers/testdata/${LEXER}.${LEXER} > lexers/testdata/${LEXER}.expected`. -3. Run `go test -v ./lexers`. +1. `export LEXER=csharp` +1. Create/edit a file `lexers/testdata/${LEXER}.actual` (eg. `csharp.actual`). +2. Run `go run ./cmd/chroma/main.go --lexer ${LEXER} --json lexers/testdata/${LEXER}.actual > lexers/testdata/${LEXER}.expected`. +3. Run `go test -v -run TestLexers ./lexers`. eg. ```bash -$ export LEXER=css +$ export LEXER=csharp $ go run ./cmd/chroma/main.go --lexer ${LEXER} --json lexers/testdata/${LEXER}.${LEXER} > lexers/testdata/${LEXER}.expected -$ cat lexers/testdata/${LEXER}.expected -[ - {"type":"Punctuation","value":":"}, - {"type":"NameDecorator","value":"root"}, - {"type":"Text","value":" "}, - {"type":"Punctuation","value":"{"}, - {"type":"Text","value":"\n "}, - {"type":"NameVariable","value":"--variable-name"}, - {"type":"Text","value":""}, - {"type":"Punctuation","value":":"}, - {"type":"Text","value":" "}, - {"type":"LiteralNumberHex","value":"#fff"}, - {"type":"Punctuation","value":";"}, - {"type":"Text","value":"\n"}, - {"type":"Punctuation","value":"}"}, - {"type":"Text","value":"\n"} -] -$ go test -v ./lexers -=== RUN TestDiffLexerWithoutTralingNewLine ---- PASS: TestDiffLexerWithoutTralingNewLine (0.00s) +$ go test -v -run TestLexers ./lexers === RUN TestLexers +=== RUN TestLexers/C# === RUN TestLexers/CSS ---- PASS: TestLexers (0.00s) +--- PASS: TestLexers (0.01s) + --- PASS: TestLexers/C# (0.00s) --- PASS: TestLexers/CSS (0.00s) -=== RUN TestCompileAllRegexes ---- PASS: TestCompileAllRegexes (0.61s) -=== RUN TestGet -=== RUN TestGet/ByName -=== RUN TestGet/ByAlias -=== RUN TestGet/ViaFilename ---- PASS: TestGet (0.00s) - --- PASS: TestGet/ByName (0.00s) - --- PASS: TestGet/ByAlias (0.00s) - --- PASS: TestGet/ViaFilename (0.00s) PASS -ok github.com/alecthomas/chroma/lexers 0.649s +ok github.com/alecthomas/chroma/lexers 0.032s ``` diff --git a/lexers/testdata/csharp.actual b/lexers/testdata/csharp.actual new file mode 100644 index 0000000..283f6d9 --- /dev/null +++ b/lexers/testdata/csharp.actual @@ -0,0 +1,11 @@ +DriveInfo[] drives = DriveInfo.GetDrives(); +foreach (DriveInfo drive in drives) +{ + IEnumerable driveFolders = + Directory.EnumerateDirectories(drive.RootDirectory.ToString()); + + foreach (string dir in driveFolders) + { + Console.WriteLine(dir); + } +} diff --git a/lexers/testdata/csharp.expected b/lexers/testdata/csharp.expected new file mode 100644 index 0000000..d2ced80 --- /dev/null +++ b/lexers/testdata/csharp.expected @@ -0,0 +1,73 @@ +[ + {"type":"Name","value":"DriveInfo"}, + {"type":"NameAttribute","value":"[]"}, + {"type":"Text","value":" "}, + {"type":"Name","value":"drives"}, + {"type":"Text","value":" "}, + {"type":"Punctuation","value":"="}, + {"type":"Text","value":" "}, + {"type":"NameClass","value":"DriveInfo"}, + {"type":"Punctuation","value":"."}, + {"type":"Name","value":"GetDrives"}, + {"type":"Punctuation","value":"();"}, + {"type":"Text","value":"\n"}, + {"type":"Keyword","value":"foreach"}, + {"type":"Text","value":" "}, + {"type":"Punctuation","value":"("}, + {"type":"Name","value":"DriveInfo"}, + {"type":"Text","value":" "}, + {"type":"Name","value":"drive"}, + {"type":"Text","value":" "}, + {"type":"Keyword","value":"in"}, + {"type":"Text","value":" "}, + {"type":"Name","value":"drives"}, + {"type":"Punctuation","value":")"}, + {"type":"Text","value":"\n"}, + {"type":"Punctuation","value":"{"}, + {"type":"Text","value":"\n "}, + {"type":"Name","value":"IEnumerable"}, + {"type":"Punctuation","value":"\u003c"}, + {"type":"KeywordType","value":"string"}, + {"type":"Punctuation","value":"\u003e"}, + {"type":"Text","value":" "}, + {"type":"Name","value":"driveFolders"}, + {"type":"Text","value":" "}, + {"type":"Punctuation","value":"="}, + {"type":"Text","value":"\n "}, + {"type":"NameClass","value":"Directory"}, + {"type":"Punctuation","value":"."}, + {"type":"Name","value":"EnumerateDirectories"}, + {"type":"Punctuation","value":"("}, + {"type":"NameClass","value":"drive"}, + {"type":"Punctuation","value":"."}, + {"type":"NameClass","value":"RootDirectory"}, + {"type":"Punctuation","value":"."}, + {"type":"Name","value":"ToString"}, + {"type":"Punctuation","value":"());"}, + {"type":"Text","value":"\n\n "}, + {"type":"Keyword","value":"foreach"}, + {"type":"Text","value":" "}, + {"type":"Punctuation","value":"("}, + {"type":"KeywordType","value":"string"}, + {"type":"Text","value":" "}, + {"type":"Name","value":"dir"}, + {"type":"Text","value":" "}, + {"type":"Keyword","value":"in"}, + {"type":"Text","value":" "}, + {"type":"Name","value":"driveFolders"}, + {"type":"Punctuation","value":")"}, + {"type":"Text","value":"\n "}, + {"type":"Punctuation","value":"{"}, + {"type":"Text","value":"\n "}, + {"type":"NameClass","value":"Console"}, + {"type":"Punctuation","value":"."}, + {"type":"Name","value":"WriteLine"}, + {"type":"Punctuation","value":"("}, + {"type":"Name","value":"dir"}, + {"type":"Punctuation","value":");"}, + {"type":"Text","value":"\n "}, + {"type":"Punctuation","value":"}"}, + {"type":"Text","value":"\n"}, + {"type":"Punctuation","value":"}"}, + {"type":"Text","value":"\n"} +] diff --git a/lexers/testdata/css.css b/lexers/testdata/css.actual similarity index 100% rename from lexers/testdata/css.css rename to lexers/testdata/css.actual