1
0
mirror of https://github.com/alecthomas/chroma.git synced 2025-03-29 21:56:56 +02:00

Update golangci-lint so we can force use of LazyLexer.

This commit is contained in:
Alec Thomas 2021-04-29 12:07:50 +10:00
parent f3874594e7
commit 7e282be495
15 changed files with 193 additions and 167 deletions

@ -22,7 +22,7 @@ jobs:
- run:
name: Prepare
command: |
curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | bash -s v1.26.0
curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | bash -s v1.37.0
mkdir ~/report
when: always
- run:

@ -25,6 +25,17 @@ linters:
- testpackage
- godot
- nestif
- paralleltest
- nlreturn
- cyclop
- exhaustivestruct
- gci
- gofumpt
- errorlint
- exhaustive
- ifshort
- wrapcheck
- stylecheck
linters-settings:
govet:
@ -36,6 +47,11 @@ linters-settings:
goconst:
min-len: 8
min-occurrences: 3
forbidigo:
forbid:
- (Must)?NewLexer
exclude_godoc_examples: false
issues:
max-per-linter: 0

@ -7,7 +7,7 @@ import (
)
func TestCoalesce(t *testing.T) {
lexer := Coalesce(MustNewLexer(nil, Rules{
lexer := Coalesce(MustNewLexer(nil, Rules{ // nolint: forbidigo
"root": []Rule{
{`[!@#$%^&*()]`, Punctuation, nil},
},

@ -7,7 +7,7 @@ import (
)
func makeDelegationTestLexers() (lang Lexer, root Lexer) {
return MustNewLexer(nil, Rules{
return MustNewLexer(nil, Rules{ // nolint: forbidigo
"root": {
{`\<\?`, CommentPreproc, Push("inside")},
{`.`, Other, nil},
@ -18,7 +18,7 @@ func makeDelegationTestLexers() (lang Lexer, root Lexer) {
{`\s+`, Whitespace, nil},
},
}),
MustNewLexer(nil, Rules{
MustNewLexer(nil, Rules{ // nolint: forbidigo
"root": {
{`\bhello\b`, Keyword, nil},
{`\b(world|there)\b`, Name, nil},

@ -120,7 +120,7 @@ func maxLineWidth(lines [][]chroma.Token) int {
for _, tokens := range lines {
length := 0
for _, token := range tokens {
length += len(strings.Replace(token.String(), ` `, " ", -1))
length += len(strings.ReplaceAll(token.String(), ` `, " "))
}
if length > maxWidth {
maxWidth = length
@ -136,7 +136,7 @@ func (f *Formatter) writeTokenBackgrounds(w io.Writer, lines [][]chroma.Token, s
for index, tokens := range lines {
lineLength := 0
for _, token := range tokens {
length := len(strings.Replace(token.String(), ` `, " ", -1))
length := len(strings.ReplaceAll(token.String(), ` `, " "))
tokenBackground := style.Get(token.Type).Background
if tokenBackground.IsSet() && tokenBackground != style.Get(chroma.Background).Background {
fmt.Fprintf(w, "<rect id=\"%s\" x=\"%dch\" y=\"%fem\" width=\"%dch\" height=\"1.2em\" fill=\"%s\" />\n", escapeString(token.String()), lineLength, 1.2*float64(index)+0.25, length, style.Get(token.Type).Background.String())

@ -13,7 +13,7 @@ func TestTokenTypeClassifiers(t *testing.T) {
}
func TestSimpleLexer(t *testing.T) {
lexer, err := NewLexer(
lexer, err := NewLexer( // nolint: forbidigo
&Config{
Name: "INI",
Aliases: []string{"ini", "cfg"},

@ -6,7 +6,7 @@ import (
)
// Dylan lexer.
var Dylan = internal.Register(MustNewLexer(
var Dylan = internal.Register(MustNewLazyLexer(
&Config{
Name: "Dylan",
Aliases: []string{"dylan"},
@ -14,7 +14,8 @@ var Dylan = internal.Register(MustNewLexer(
MimeTypes: []string{"text/x-dylan"},
CaseInsensitive: true,
},
Rules{
func() Rules {
return Rules{
"root": {
{`\s+`, Whitespace, nil},
{`//.*?\n`, CommentSingle, nil},
@ -70,5 +71,6 @@ var Dylan = internal.Register(MustNewLexer(
{`\\\n`, LiteralString, nil},
{`\\`, LiteralString, nil},
},
}
},
))

@ -61,7 +61,7 @@ var JavascriptRules = Rules{
}
// Javascript lexer.
var Javascript = internal.Register(MustNewLexer(
var Javascript = internal.Register(MustNewLexer( // nolint: forbidigo
&Config{
Name: "JavaScript",
Aliases: []string{"js", "javascript"},

@ -3,7 +3,7 @@
// Sub-packages contain lexer implementations.
package lexers
// nolint: golint
// nolint
import (
"github.com/alecthomas/chroma"
_ "github.com/alecthomas/chroma/lexers/a"

@ -6,7 +6,7 @@ import (
)
// mcfunction lexer.
var MCFunction = internal.Register(MustNewLexer(
var MCFunction = internal.Register(MustNewLazyLexer(
&Config{
Name: "mcfunction",
Aliases: []string{"mcfunction"},
@ -15,7 +15,8 @@ var MCFunction = internal.Register(MustNewLexer(
NotMultiline: true,
DotAll: true,
},
Rules{
func() Rules {
return Rules{
"simplevalue": {
{`(true|false)`, KeywordConstant, nil},
{`[01]b`, LiteralNumber, nil},
@ -103,5 +104,6 @@ var MCFunction = internal.Register(MustNewLexer(
Include("simplevalue"),
{`\s+`, TextWhitespace, nil},
},
}
},
))

@ -44,7 +44,7 @@ func TestInclude(t *testing.T) {
}
func TestCombine(t *testing.T) {
l := MustNewLexer(nil, Rules{
l := MustNewLexer(nil, Rules{ // nolint: forbidigo
"root": {{`hello`, String, Combined("world", "bye", "space")}},
"world": {{`world`, Name, nil}},
"bye": {{`bye`, Name, nil}},

@ -162,10 +162,10 @@ func Tokenise(lexer Lexer, options *TokeniseOptions, text string) ([]Token, erro
type Rules map[string][]Rule
// Rename clones rules then a rule.
func (r Rules) Rename(old, new string) Rules {
func (r Rules) Rename(oldRule, newRule string) Rules {
r = r.Clone()
r[new] = r[old]
delete(r, old)
r[newRule] = r[oldRule]
delete(r, oldRule)
return r
}
@ -209,8 +209,10 @@ func NewLazyLexer(config *Config, rulesFunc func() Rules) (*RegexLexer, error) {
}
// MustNewLexer creates a new Lexer or panics.
func MustNewLexer(config *Config, rules Rules) *RegexLexer {
lexer, err := NewLexer(config, rules)
//
// Deprecated: Use MustNewLazyLexer instead.
func MustNewLexer(config *Config, rules Rules) *RegexLexer { // nolint: forbidigo
lexer, err := NewLexer(config, rules) // nolint: forbidigo
if err != nil {
panic(err)
}
@ -221,7 +223,9 @@ func MustNewLexer(config *Config, rules Rules) *RegexLexer {
//
// "rules" is a state machine transitition map. Each key is a state. Values are sets of rules
// that match input, optionally modify lexer state, and output tokens.
func NewLexer(config *Config, rules Rules) (*RegexLexer, error) {
//
// Deprecated: Use NewLazyLexer instead.
func NewLexer(config *Config, rules Rules) (*RegexLexer, error) { // nolint: forbidigo
return NewLazyLexer(config, func() Rules { return rules })
}

@ -7,7 +7,7 @@ import (
)
func TestNewlineAtEndOfFile(t *testing.T) {
l := Coalesce(MustNewLexer(&Config{EnsureNL: true}, Rules{
l := Coalesce(MustNewLexer(&Config{EnsureNL: true}, Rules{ // nolint: forbidigo
"root": {
{`(\w+)(\n)`, ByGroups(Keyword, Whitespace), nil},
},
@ -16,7 +16,7 @@ func TestNewlineAtEndOfFile(t *testing.T) {
assert.NoError(t, err)
assert.Equal(t, []Token{{Keyword, "hello"}, {Whitespace, "\n"}}, it.Tokens())
l = Coalesce(MustNewLexer(nil, Rules{
l = Coalesce(MustNewLexer(nil, Rules{ // nolint: forbidigo
"root": {
{`(\w+)(\n)`, ByGroups(Keyword, Whitespace), nil},
},
@ -27,7 +27,7 @@ func TestNewlineAtEndOfFile(t *testing.T) {
}
func TestMatchingAtStart(t *testing.T) {
l := Coalesce(MustNewLexer(&Config{}, Rules{
l := Coalesce(MustNewLexer(&Config{}, Rules{ // nolint: forbidigo
"root": {
{`\s+`, Whitespace, nil},
{`^-`, Punctuation, Push("directive")},
@ -45,7 +45,7 @@ func TestMatchingAtStart(t *testing.T) {
}
func TestEnsureLFOption(t *testing.T) {
l := Coalesce(MustNewLexer(&Config{}, Rules{
l := Coalesce(MustNewLexer(&Config{}, Rules{ // nolint: forbidigo
"root": {
{`(\w+)(\r?\n|\r)`, ByGroups(Keyword, Whitespace), nil},
},
@ -62,7 +62,7 @@ func TestEnsureLFOption(t *testing.T) {
{Whitespace, "\n"},
}, it.Tokens())
l = Coalesce(MustNewLexer(nil, Rules{
l = Coalesce(MustNewLexer(nil, Rules{ // nolint: forbidigo
"root": {
{`(\w+)(\r?\n|\r)`, ByGroups(Keyword, Whitespace), nil},
},

@ -7,7 +7,7 @@ import (
)
func TestRemappingLexer(t *testing.T) {
var lexer Lexer = MustNewLexer(nil, Rules{
var lexer Lexer = MustNewLexer(nil, Rules{ // nolint: forbidigo
"root": {
{`\s+`, Whitespace, nil},
{`\w+`, Name, nil},

@ -287,8 +287,10 @@ func (s *Style) synthesise(ttype TokenType) StyleEntry {
// If we don't have line numbers, use the text colour but 20% brighter/darker
case LineNumbers, LineNumbersTable:
return text
}
default:
return StyleEntry{}
}
}
func (s *Style) synthesisable(ttype TokenType) bool {