mirror of
https://github.com/alecthomas/chroma.git
synced 2025-03-17 20:58:08 +02:00
Add set text analyser for MySQL lexer
This commit is contained in:
parent
d964e7c774
commit
a566061e65
@ -5,6 +5,7 @@ import (
|
|||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
@ -79,6 +80,11 @@ func TestLexers(t *testing.T) {
|
|||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
for _, file := range files {
|
for _, file := range files {
|
||||||
|
// skip text analysis test files
|
||||||
|
if file.Name() == "analysis" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
if file.IsDir() {
|
if file.IsDir() {
|
||||||
dirname := filepath.Join("testdata", file.Name())
|
dirname := filepath.Join("testdata", file.Name())
|
||||||
lexer := lexers.Get(file.Name())
|
lexer := lexers.Get(file.Name())
|
||||||
@ -117,3 +123,52 @@ func TestLexers(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func FileTestAnalysis(t *testing.T, lexer chroma.Lexer, actualFilepath, expectedFilepath string) {
|
||||||
|
t.Helper()
|
||||||
|
t.Run(lexer.Config().Name+"/"+actualFilepath, func(t *testing.T) {
|
||||||
|
expectedData, err := ioutil.ReadFile(expectedFilepath)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
analyser, ok := lexer.(chroma.Analyser)
|
||||||
|
assert.True(t, ok, "lexer %q does not set analyser", lexer.Config().Name)
|
||||||
|
|
||||||
|
data, err := ioutil.ReadFile(actualFilepath)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
actual := analyser.AnalyseText(string(data))
|
||||||
|
|
||||||
|
if os.Getenv("RECORD") == "true" {
|
||||||
|
// Update the expected file with the generated output of this lexer
|
||||||
|
f, err := os.Create(expectedFilepath)
|
||||||
|
defer f.Close() // nolint: gosec
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
_, err = f.WriteString(strconv.FormatFloat(float64(actual), 'f', -1, 32))
|
||||||
|
assert.NoError(t, err)
|
||||||
|
} else {
|
||||||
|
expected, err := strconv.ParseFloat(strings.TrimSpace(string(expectedData)), 32)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
assert.Equal(t, float32(expected), actual)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLexersTextAnalyser(t *testing.T) {
|
||||||
|
files, err := filepath.Glob("testdata/analysis/*.actual")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
for _, actualFilepath := range files {
|
||||||
|
filename := filepath.Base(actualFilepath)
|
||||||
|
baseFilename := strings.TrimSuffix(filename, filepath.Ext(filename))
|
||||||
|
lexerName := strings.Split(baseFilename, ".")[0]
|
||||||
|
|
||||||
|
lexer := lexers.Get(lexerName)
|
||||||
|
assert.NotNil(t, lexer, "no lexer found for name %q", lexerName)
|
||||||
|
|
||||||
|
expectedFilepath := "testdata/analysis/" + baseFilename + ".expected"
|
||||||
|
|
||||||
|
FileTestAnalysis(t, lexer, actualFilepath, expectedFilepath)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -1,10 +1,17 @@
|
|||||||
package m
|
package m
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"regexp"
|
||||||
|
|
||||||
. "github.com/alecthomas/chroma" // nolint
|
. "github.com/alecthomas/chroma" // nolint
|
||||||
"github.com/alecthomas/chroma/lexers/internal"
|
"github.com/alecthomas/chroma/lexers/internal"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
mysqlAnalyserNameBetweenBacktickRe = regexp.MustCompile("`[a-zA-Z_]\\w*`")
|
||||||
|
mysqlAnalyserNameBetweenBracketRe = regexp.MustCompile(`\[[a-zA-Z_]\w*\]`)
|
||||||
|
)
|
||||||
|
|
||||||
// MySQL lexer.
|
// MySQL lexer.
|
||||||
var MySQL = internal.Register(MustNewLazyLexer(
|
var MySQL = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
@ -16,7 +23,25 @@ var MySQL = internal.Register(MustNewLazyLexer(
|
|||||||
CaseInsensitive: true,
|
CaseInsensitive: true,
|
||||||
},
|
},
|
||||||
mySQLRules,
|
mySQLRules,
|
||||||
))
|
).SetAnalyser(func(text string) float32 {
|
||||||
|
nameBetweenBacktickCount := len(mysqlAnalyserNameBetweenBacktickRe.FindAllString(text, -1))
|
||||||
|
nameBetweenBracketCount := len(mysqlAnalyserNameBetweenBracketRe.FindAllString(text, -1))
|
||||||
|
|
||||||
|
var result float32
|
||||||
|
|
||||||
|
// Same logic as above in the TSQL analysis.
|
||||||
|
dialectNameCount := nameBetweenBacktickCount + nameBetweenBracketCount
|
||||||
|
if dialectNameCount >= 1 && nameBetweenBacktickCount >= (2*nameBetweenBracketCount) {
|
||||||
|
// Found at least twice as many `name` as [name].
|
||||||
|
result += 0.5
|
||||||
|
} else if nameBetweenBacktickCount > nameBetweenBracketCount {
|
||||||
|
result += 0.2
|
||||||
|
} else if nameBetweenBacktickCount > 0 {
|
||||||
|
result += 0.1
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}))
|
||||||
|
|
||||||
func mySQLRules() Rules {
|
func mySQLRules() Rules {
|
||||||
return Rules{
|
return Rules{
|
||||||
|
1
lexers/testdata/analysis/mysql.backtick.actual
vendored
Normal file
1
lexers/testdata/analysis/mysql.backtick.actual
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
CREATE TABLE `my_table` (id INT);
|
1
lexers/testdata/analysis/mysql.backtick.expected
vendored
Normal file
1
lexers/testdata/analysis/mysql.backtick.expected
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
0.5
|
Loading…
x
Reference in New Issue
Block a user