1
0
mirror of https://github.com/alecthomas/chroma.git synced 2025-07-01 00:35:06 +02:00

Add golangci-lint and fix all lint issues.

This commit is contained in:
Alec Thomas
2018-12-31 22:44:27 +11:00
parent e27f19c12f
commit da5ac60d8c
18 changed files with 118 additions and 45 deletions

View File

@ -11,6 +11,7 @@ import (
"github.com/dlclark/regexp2"
)
// A Rule is the fundamental matching unit of the Regex lexer state machine.
type Rule struct {
Pattern string
Type Emitter
@ -56,26 +57,26 @@ func ByGroups(emitters ...Emitter) Emitter {
//
// Example:
//
// var Markdown = internal.Register(MustNewLexer(
// &Config{
// Name: "markdown",
// Aliases: []string{"md", "mkd"},
// Filenames: []string{"*.md", "*.mkd", "*.markdown"},
// MimeTypes: []string{"text/x-markdown"},
// },
// Rules{
// "root": {
// {"^(```)(\\w+)(\\n)([\\w\\W]*?)(^```$)",
// UsingByGroup(
// internal.Get,
// 2, 4,
// String, String, String, Text, String,
// ),
// nil,
// },
// },
// },
// ))
// var Markdown = internal.Register(MustNewLexer(
// &Config{
// Name: "markdown",
// Aliases: []string{"md", "mkd"},
// Filenames: []string{"*.md", "*.mkd", "*.markdown"},
// MimeTypes: []string{"text/x-markdown"},
// },
// Rules{
// "root": {
// {"^(```)(\\w+)(\\n)([\\w\\W]*?)(^```$)",
// UsingByGroup(
// internal.Get,
// 2, 4,
// String, String, String, Text, String,
// ),
// nil,
// },
// },
// },
// ))
//
// See the lexers/m/markdown.go for the complete example.
//
@ -155,6 +156,7 @@ func Tokenise(lexer Lexer, options *TokeniseOptions, text string) ([]Token, erro
// Rules maps from state to a sequence of Rules.
type Rules map[string][]Rule
// Clone returns a clone of the Rules.
func (r Rules) Clone() Rules {
out := map[string][]Rule{}
for key, rules := range r {
@ -207,6 +209,7 @@ func NewLexer(config *Config, rules Rules) (*RegexLexer, error) {
}, nil
}
// Trace enables debug tracing.
func (r *RegexLexer) Trace(trace bool) *RegexLexer {
r.trace = trace
return r
@ -221,8 +224,10 @@ type CompiledRule struct {
flags string
}
// CompiledRules is a map of rule name to sequence of compiled rules in that rule.
type CompiledRules map[string][]*CompiledRule
// LexerState contains the state for a single lex.
type LexerState struct {
Lexer *RegexLexer
Text []rune
@ -238,14 +243,17 @@ type LexerState struct {
iteratorStack []Iterator
}
// Set mutator context.
func (l *LexerState) Set(key interface{}, value interface{}) {
l.MutatorContext[key] = value
}
// Get mutator context.
func (l *LexerState) Get(key interface{}) interface{} {
return l.MutatorContext[key]
}
// Iterator returns the next Token from the lexer.
func (l *LexerState) Iterator() Token {
for l.Pos < len(l.Text) && len(l.Stack) > 0 {
// Exhaust the iterator stack, if any.
@ -306,6 +314,7 @@ func (l *LexerState) Iterator() Token {
return EOF
}
// RegexLexer is the default lexer implementation used in Chroma.
type RegexLexer struct {
config *Config
analyser func(text string) float32
@ -322,14 +331,14 @@ func (r *RegexLexer) SetAnalyser(analyser func(text string) float32) *RegexLexer
return r
}
func (r *RegexLexer) AnalyseText(text string) float32 {
func (r *RegexLexer) AnalyseText(text string) float32 { // nolint
if r.analyser != nil {
return r.analyser(text)
}
return 0.0
}
func (r *RegexLexer) Config() *Config {
func (r *RegexLexer) Config() *Config { // nolint
return r.config
}
@ -374,7 +383,7 @@ restart:
return nil
}
func (r *RegexLexer) Tokenise(options *TokeniseOptions, text string) (Iterator, error) {
func (r *RegexLexer) Tokenise(options *TokeniseOptions, text string) (Iterator, error) { // nolint
if err := r.maybeCompile(); err != nil {
return nil, err
}