1
0
mirror of https://github.com/alecthomas/chroma.git synced 2025-03-17 20:58:08 +02:00

Add set text analyser for MySQL lexer

This commit is contained in:
Carlos Henrique Guardão Gandarez 2020-12-14 14:35:47 -03:00 committed by Alec Thomas
parent d964e7c774
commit a566061e65
4 changed files with 83 additions and 1 deletions

View File

@ -5,6 +5,7 @@ import (
"io/ioutil"
"os"
"path/filepath"
"strconv"
"strings"
"testing"
@ -79,6 +80,11 @@ func TestLexers(t *testing.T) {
assert.NoError(t, err)
for _, file := range files {
// skip text analysis test files
if file.Name() == "analysis" {
continue
}
if file.IsDir() {
dirname := filepath.Join("testdata", file.Name())
lexer := lexers.Get(file.Name())
@ -117,3 +123,52 @@ func TestLexers(t *testing.T) {
}
}
}
func FileTestAnalysis(t *testing.T, lexer chroma.Lexer, actualFilepath, expectedFilepath string) {
t.Helper()
t.Run(lexer.Config().Name+"/"+actualFilepath, func(t *testing.T) {
expectedData, err := ioutil.ReadFile(expectedFilepath)
assert.NoError(t, err)
analyser, ok := lexer.(chroma.Analyser)
assert.True(t, ok, "lexer %q does not set analyser", lexer.Config().Name)
data, err := ioutil.ReadFile(actualFilepath)
assert.NoError(t, err)
actual := analyser.AnalyseText(string(data))
if os.Getenv("RECORD") == "true" {
// Update the expected file with the generated output of this lexer
f, err := os.Create(expectedFilepath)
defer f.Close() // nolint: gosec
assert.NoError(t, err)
_, err = f.WriteString(strconv.FormatFloat(float64(actual), 'f', -1, 32))
assert.NoError(t, err)
} else {
expected, err := strconv.ParseFloat(strings.TrimSpace(string(expectedData)), 32)
assert.NoError(t, err)
assert.Equal(t, float32(expected), actual)
}
})
}
func TestLexersTextAnalyser(t *testing.T) {
files, err := filepath.Glob("testdata/analysis/*.actual")
assert.NoError(t, err)
for _, actualFilepath := range files {
filename := filepath.Base(actualFilepath)
baseFilename := strings.TrimSuffix(filename, filepath.Ext(filename))
lexerName := strings.Split(baseFilename, ".")[0]
lexer := lexers.Get(lexerName)
assert.NotNil(t, lexer, "no lexer found for name %q", lexerName)
expectedFilepath := "testdata/analysis/" + baseFilename + ".expected"
FileTestAnalysis(t, lexer, actualFilepath, expectedFilepath)
}
}

View File

@ -1,10 +1,17 @@
package m
import (
"regexp"
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
var (
mysqlAnalyserNameBetweenBacktickRe = regexp.MustCompile("`[a-zA-Z_]\\w*`")
mysqlAnalyserNameBetweenBracketRe = regexp.MustCompile(`\[[a-zA-Z_]\w*\]`)
)
// MySQL lexer.
var MySQL = internal.Register(MustNewLazyLexer(
&Config{
@ -16,7 +23,25 @@ var MySQL = internal.Register(MustNewLazyLexer(
CaseInsensitive: true,
},
mySQLRules,
))
).SetAnalyser(func(text string) float32 {
nameBetweenBacktickCount := len(mysqlAnalyserNameBetweenBacktickRe.FindAllString(text, -1))
nameBetweenBracketCount := len(mysqlAnalyserNameBetweenBracketRe.FindAllString(text, -1))
var result float32
// Same logic as above in the TSQL analysis.
dialectNameCount := nameBetweenBacktickCount + nameBetweenBracketCount
if dialectNameCount >= 1 && nameBetweenBacktickCount >= (2*nameBetweenBracketCount) {
// Found at least twice as many `name` as [name].
result += 0.5
} else if nameBetweenBacktickCount > nameBetweenBracketCount {
result += 0.2
} else if nameBetweenBacktickCount > 0 {
result += 0.1
}
return result
}))
func mySQLRules() Rules {
return Rules{

View File

@ -0,0 +1 @@
CREATE TABLE `my_table` (id INT);

View File

@ -0,0 +1 @@
0.5