mirror of
https://github.com/alecthomas/chroma.git
synced 2025-07-17 01:22:22 +02:00
Update golangci-lint so we can force use of LazyLexer.
This commit is contained in:
@ -22,7 +22,7 @@ jobs:
|
|||||||
- run:
|
- run:
|
||||||
name: Prepare
|
name: Prepare
|
||||||
command: |
|
command: |
|
||||||
curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | bash -s v1.26.0
|
curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | bash -s v1.37.0
|
||||||
mkdir ~/report
|
mkdir ~/report
|
||||||
when: always
|
when: always
|
||||||
- run:
|
- run:
|
||||||
|
@ -25,6 +25,17 @@ linters:
|
|||||||
- testpackage
|
- testpackage
|
||||||
- godot
|
- godot
|
||||||
- nestif
|
- nestif
|
||||||
|
- paralleltest
|
||||||
|
- nlreturn
|
||||||
|
- cyclop
|
||||||
|
- exhaustivestruct
|
||||||
|
- gci
|
||||||
|
- gofumpt
|
||||||
|
- errorlint
|
||||||
|
- exhaustive
|
||||||
|
- ifshort
|
||||||
|
- wrapcheck
|
||||||
|
- stylecheck
|
||||||
|
|
||||||
linters-settings:
|
linters-settings:
|
||||||
govet:
|
govet:
|
||||||
@ -36,6 +47,11 @@ linters-settings:
|
|||||||
goconst:
|
goconst:
|
||||||
min-len: 8
|
min-len: 8
|
||||||
min-occurrences: 3
|
min-occurrences: 3
|
||||||
|
forbidigo:
|
||||||
|
forbid:
|
||||||
|
- (Must)?NewLexer
|
||||||
|
exclude_godoc_examples: false
|
||||||
|
|
||||||
|
|
||||||
issues:
|
issues:
|
||||||
max-per-linter: 0
|
max-per-linter: 0
|
||||||
|
@ -7,7 +7,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestCoalesce(t *testing.T) {
|
func TestCoalesce(t *testing.T) {
|
||||||
lexer := Coalesce(MustNewLexer(nil, Rules{
|
lexer := Coalesce(MustNewLexer(nil, Rules{ // nolint: forbidigo
|
||||||
"root": []Rule{
|
"root": []Rule{
|
||||||
{`[!@#$%^&*()]`, Punctuation, nil},
|
{`[!@#$%^&*()]`, Punctuation, nil},
|
||||||
},
|
},
|
||||||
|
@ -7,7 +7,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func makeDelegationTestLexers() (lang Lexer, root Lexer) {
|
func makeDelegationTestLexers() (lang Lexer, root Lexer) {
|
||||||
return MustNewLexer(nil, Rules{
|
return MustNewLexer(nil, Rules{ // nolint: forbidigo
|
||||||
"root": {
|
"root": {
|
||||||
{`\<\?`, CommentPreproc, Push("inside")},
|
{`\<\?`, CommentPreproc, Push("inside")},
|
||||||
{`.`, Other, nil},
|
{`.`, Other, nil},
|
||||||
@ -18,7 +18,7 @@ func makeDelegationTestLexers() (lang Lexer, root Lexer) {
|
|||||||
{`\s+`, Whitespace, nil},
|
{`\s+`, Whitespace, nil},
|
||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
MustNewLexer(nil, Rules{
|
MustNewLexer(nil, Rules{ // nolint: forbidigo
|
||||||
"root": {
|
"root": {
|
||||||
{`\bhello\b`, Keyword, nil},
|
{`\bhello\b`, Keyword, nil},
|
||||||
{`\b(world|there)\b`, Name, nil},
|
{`\b(world|there)\b`, Name, nil},
|
||||||
|
@ -120,7 +120,7 @@ func maxLineWidth(lines [][]chroma.Token) int {
|
|||||||
for _, tokens := range lines {
|
for _, tokens := range lines {
|
||||||
length := 0
|
length := 0
|
||||||
for _, token := range tokens {
|
for _, token := range tokens {
|
||||||
length += len(strings.Replace(token.String(), ` `, " ", -1))
|
length += len(strings.ReplaceAll(token.String(), ` `, " "))
|
||||||
}
|
}
|
||||||
if length > maxWidth {
|
if length > maxWidth {
|
||||||
maxWidth = length
|
maxWidth = length
|
||||||
@ -136,7 +136,7 @@ func (f *Formatter) writeTokenBackgrounds(w io.Writer, lines [][]chroma.Token, s
|
|||||||
for index, tokens := range lines {
|
for index, tokens := range lines {
|
||||||
lineLength := 0
|
lineLength := 0
|
||||||
for _, token := range tokens {
|
for _, token := range tokens {
|
||||||
length := len(strings.Replace(token.String(), ` `, " ", -1))
|
length := len(strings.ReplaceAll(token.String(), ` `, " "))
|
||||||
tokenBackground := style.Get(token.Type).Background
|
tokenBackground := style.Get(token.Type).Background
|
||||||
if tokenBackground.IsSet() && tokenBackground != style.Get(chroma.Background).Background {
|
if tokenBackground.IsSet() && tokenBackground != style.Get(chroma.Background).Background {
|
||||||
fmt.Fprintf(w, "<rect id=\"%s\" x=\"%dch\" y=\"%fem\" width=\"%dch\" height=\"1.2em\" fill=\"%s\" />\n", escapeString(token.String()), lineLength, 1.2*float64(index)+0.25, length, style.Get(token.Type).Background.String())
|
fmt.Fprintf(w, "<rect id=\"%s\" x=\"%dch\" y=\"%fem\" width=\"%dch\" height=\"1.2em\" fill=\"%s\" />\n", escapeString(token.String()), lineLength, 1.2*float64(index)+0.25, length, style.Get(token.Type).Background.String())
|
||||||
|
@ -13,7 +13,7 @@ func TestTokenTypeClassifiers(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestSimpleLexer(t *testing.T) {
|
func TestSimpleLexer(t *testing.T) {
|
||||||
lexer, err := NewLexer(
|
lexer, err := NewLexer( // nolint: forbidigo
|
||||||
&Config{
|
&Config{
|
||||||
Name: "INI",
|
Name: "INI",
|
||||||
Aliases: []string{"ini", "cfg"},
|
Aliases: []string{"ini", "cfg"},
|
||||||
|
@ -6,7 +6,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
// Dylan lexer.
|
// Dylan lexer.
|
||||||
var Dylan = internal.Register(MustNewLexer(
|
var Dylan = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Dylan",
|
Name: "Dylan",
|
||||||
Aliases: []string{"dylan"},
|
Aliases: []string{"dylan"},
|
||||||
@ -14,61 +14,63 @@ var Dylan = internal.Register(MustNewLexer(
|
|||||||
MimeTypes: []string{"text/x-dylan"},
|
MimeTypes: []string{"text/x-dylan"},
|
||||||
CaseInsensitive: true,
|
CaseInsensitive: true,
|
||||||
},
|
},
|
||||||
Rules{
|
func() Rules {
|
||||||
"root": {
|
return Rules{
|
||||||
{`\s+`, Whitespace, nil},
|
"root": {
|
||||||
{`//.*?\n`, CommentSingle, nil},
|
{`\s+`, Whitespace, nil},
|
||||||
{`([a-z0-9-]+:)([ \t]*)(.*(?:\n[ \t].+)*)`, ByGroups(NameAttribute, Whitespace, LiteralString), nil},
|
{`//.*?\n`, CommentSingle, nil},
|
||||||
Default(Push("code")),
|
{`([a-z0-9-]+:)([ \t]*)(.*(?:\n[ \t].+)*)`, ByGroups(NameAttribute, Whitespace, LiteralString), nil},
|
||||||
},
|
Default(Push("code")),
|
||||||
"code": {
|
},
|
||||||
{`\s+`, Whitespace, nil},
|
"code": {
|
||||||
{`//.*?\n`, CommentSingle, nil},
|
{`\s+`, Whitespace, nil},
|
||||||
{`/\*`, CommentMultiline, Push("comment")},
|
{`//.*?\n`, CommentSingle, nil},
|
||||||
{`"`, LiteralString, Push("string")},
|
{`/\*`, CommentMultiline, Push("comment")},
|
||||||
{`'(\\.|\\[0-7]{1,3}|\\x[a-f0-9]{1,2}|[^\\\'\n])'`, LiteralStringChar, nil},
|
{`"`, LiteralString, Push("string")},
|
||||||
{`#b[01]+`, LiteralNumberBin, nil},
|
{`'(\\.|\\[0-7]{1,3}|\\x[a-f0-9]{1,2}|[^\\\'\n])'`, LiteralStringChar, nil},
|
||||||
{`#o[0-7]+`, LiteralNumberOct, nil},
|
{`#b[01]+`, LiteralNumberBin, nil},
|
||||||
{`[-+]?(\d*\.\d+([ed][-+]?\d+)?|\d+(\.\d*)?e[-+]?\d+)`, LiteralNumberFloat, nil},
|
{`#o[0-7]+`, LiteralNumberOct, nil},
|
||||||
{`[-+]?\d+`, LiteralNumberInteger, nil},
|
{`[-+]?(\d*\.\d+([ed][-+]?\d+)?|\d+(\.\d*)?e[-+]?\d+)`, LiteralNumberFloat, nil},
|
||||||
{`#x[0-9a-f]+`, LiteralNumberHex, nil},
|
{`[-+]?\d+`, LiteralNumberInteger, nil},
|
||||||
|
{`#x[0-9a-f]+`, LiteralNumberHex, nil},
|
||||||
|
|
||||||
{`(\?\\?)([\w!&*<>|^$%@+~?/=-]+)(:)(token|name|variable|expression|body|case-body|\*)`,
|
{`(\?\\?)([\w!&*<>|^$%@+~?/=-]+)(:)(token|name|variable|expression|body|case-body|\*)`,
|
||||||
ByGroups(Operator, NameVariable, Operator, NameBuiltin), nil},
|
ByGroups(Operator, NameVariable, Operator, NameBuiltin), nil},
|
||||||
{`(\?)(:)(token|name|variable|expression|body|case-body|\*)`,
|
{`(\?)(:)(token|name|variable|expression|body|case-body|\*)`,
|
||||||
ByGroups(Operator, Operator, NameVariable), nil},
|
ByGroups(Operator, Operator, NameVariable), nil},
|
||||||
{`(\?\\?)([\w!&*<>|^$%@+~?/=-]+)`, ByGroups(Operator, NameVariable), nil},
|
{`(\?\\?)([\w!&*<>|^$%@+~?/=-]+)`, ByGroups(Operator, NameVariable), nil},
|
||||||
|
|
||||||
{`(=>|::|#\(|#\[|##|\?\?|\?=|\?|[(){}\[\],.;])`, Punctuation, nil},
|
{`(=>|::|#\(|#\[|##|\?\?|\?=|\?|[(){}\[\],.;])`, Punctuation, nil},
|
||||||
{`:=`, Operator, nil},
|
{`:=`, Operator, nil},
|
||||||
{`#[tf]`, Literal, nil},
|
{`#[tf]`, Literal, nil},
|
||||||
{`#"`, LiteralStringSymbol, Push("symbol")},
|
{`#"`, LiteralStringSymbol, Push("symbol")},
|
||||||
{`#[a-z0-9-]+`, Keyword, nil},
|
{`#[a-z0-9-]+`, Keyword, nil},
|
||||||
{`#(all-keys|include|key|next|rest)`, Keyword, nil},
|
{`#(all-keys|include|key|next|rest)`, Keyword, nil},
|
||||||
{`[\w!&*<>|^$%@+~?/=-]+:`, KeywordConstant, nil},
|
{`[\w!&*<>|^$%@+~?/=-]+:`, KeywordConstant, nil},
|
||||||
{`<[\w!&*<>|^$%@+~?/=-]+>`, NameClass, nil},
|
{`<[\w!&*<>|^$%@+~?/=-]+>`, NameClass, nil},
|
||||||
{`\*[\w!&*<>|^$%@+~?/=-]+\*`, NameVariableGlobal, nil},
|
{`\*[\w!&*<>|^$%@+~?/=-]+\*`, NameVariableGlobal, nil},
|
||||||
{`\$[\w!&*<>|^$%@+~?/=-]+`, NameConstant, nil},
|
{`\$[\w!&*<>|^$%@+~?/=-]+`, NameConstant, nil},
|
||||||
{`(let|method|function)([ \t]+)([\w!&*<>|^$%@+~?/=-]+)`, ByGroups(NameBuiltin, Whitespace, NameVariable), nil},
|
{`(let|method|function)([ \t]+)([\w!&*<>|^$%@+~?/=-]+)`, ByGroups(NameBuiltin, Whitespace, NameVariable), nil},
|
||||||
{`(error|signal|return|break)`, NameException, nil},
|
{`(error|signal|return|break)`, NameException, nil},
|
||||||
{`(\\?)([\w!&*<>|^$%@+~?/=-]+)`, ByGroups(Operator, Name), nil},
|
{`(\\?)([\w!&*<>|^$%@+~?/=-]+)`, ByGroups(Operator, Name), nil},
|
||||||
},
|
},
|
||||||
"comment": {
|
"comment": {
|
||||||
{`[^*/]`, CommentMultiline, nil},
|
{`[^*/]`, CommentMultiline, nil},
|
||||||
{`/\*`, CommentMultiline, Push()},
|
{`/\*`, CommentMultiline, Push()},
|
||||||
{`\*/`, CommentMultiline, Pop(1)},
|
{`\*/`, CommentMultiline, Pop(1)},
|
||||||
{`[*/]`, CommentMultiline, nil},
|
{`[*/]`, CommentMultiline, nil},
|
||||||
},
|
},
|
||||||
"symbol": {
|
"symbol": {
|
||||||
{`"`, LiteralStringSymbol, Pop(1)},
|
{`"`, LiteralStringSymbol, Pop(1)},
|
||||||
{`[^\\"]+`, LiteralStringSymbol, nil},
|
{`[^\\"]+`, LiteralStringSymbol, nil},
|
||||||
},
|
},
|
||||||
"string": {
|
"string": {
|
||||||
{`"`, LiteralString, Pop(1)},
|
{`"`, LiteralString, Pop(1)},
|
||||||
{`\\([\\abfnrtv"\']|x[a-f0-9]{2,4}|[0-7]{1,3})`, LiteralStringEscape, nil},
|
{`\\([\\abfnrtv"\']|x[a-f0-9]{2,4}|[0-7]{1,3})`, LiteralStringEscape, nil},
|
||||||
{`[^\\"\n]+`, LiteralString, nil},
|
{`[^\\"\n]+`, LiteralString, nil},
|
||||||
{`\\\n`, LiteralString, nil},
|
{`\\\n`, LiteralString, nil},
|
||||||
{`\\`, LiteralString, nil},
|
{`\\`, LiteralString, nil},
|
||||||
},
|
},
|
||||||
|
}
|
||||||
},
|
},
|
||||||
))
|
))
|
||||||
|
@ -61,7 +61,7 @@ var JavascriptRules = Rules{
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Javascript lexer.
|
// Javascript lexer.
|
||||||
var Javascript = internal.Register(MustNewLexer(
|
var Javascript = internal.Register(MustNewLexer( // nolint: forbidigo
|
||||||
&Config{
|
&Config{
|
||||||
Name: "JavaScript",
|
Name: "JavaScript",
|
||||||
Aliases: []string{"js", "javascript"},
|
Aliases: []string{"js", "javascript"},
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
// Sub-packages contain lexer implementations.
|
// Sub-packages contain lexer implementations.
|
||||||
package lexers
|
package lexers
|
||||||
|
|
||||||
// nolint: golint
|
// nolint
|
||||||
import (
|
import (
|
||||||
"github.com/alecthomas/chroma"
|
"github.com/alecthomas/chroma"
|
||||||
_ "github.com/alecthomas/chroma/lexers/a"
|
_ "github.com/alecthomas/chroma/lexers/a"
|
||||||
|
@ -6,7 +6,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
// mcfunction lexer.
|
// mcfunction lexer.
|
||||||
var MCFunction = internal.Register(MustNewLexer(
|
var MCFunction = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "mcfunction",
|
Name: "mcfunction",
|
||||||
Aliases: []string{"mcfunction"},
|
Aliases: []string{"mcfunction"},
|
||||||
@ -15,93 +15,95 @@ var MCFunction = internal.Register(MustNewLexer(
|
|||||||
NotMultiline: true,
|
NotMultiline: true,
|
||||||
DotAll: true,
|
DotAll: true,
|
||||||
},
|
},
|
||||||
Rules{
|
func() Rules {
|
||||||
"simplevalue": {
|
return Rules{
|
||||||
{`(true|false)`, KeywordConstant, nil},
|
"simplevalue": {
|
||||||
{`[01]b`, LiteralNumber, nil},
|
{`(true|false)`, KeywordConstant, nil},
|
||||||
{`-?(0|[1-9]\d*)(\.\d+[eE](\+|-)?\d+|[eE](\+|-)?\d+|\.\d+)`, LiteralNumberFloat, nil},
|
{`[01]b`, LiteralNumber, nil},
|
||||||
{`(-?\d+)(\.\.)(-?\d+)`, ByGroups(LiteralNumberInteger, Punctuation, LiteralNumberInteger), nil},
|
{`-?(0|[1-9]\d*)(\.\d+[eE](\+|-)?\d+|[eE](\+|-)?\d+|\.\d+)`, LiteralNumberFloat, nil},
|
||||||
{`-?(0|[1-9]\d*)`, LiteralNumberInteger, nil},
|
{`(-?\d+)(\.\.)(-?\d+)`, ByGroups(LiteralNumberInteger, Punctuation, LiteralNumberInteger), nil},
|
||||||
{`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil},
|
{`-?(0|[1-9]\d*)`, LiteralNumberInteger, nil},
|
||||||
{`'[^']+'`, LiteralStringSingle, nil},
|
{`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil},
|
||||||
{`([!#]?)(\w+)`, ByGroups(Punctuation, Text), nil},
|
{`'[^']+'`, LiteralStringSingle, nil},
|
||||||
},
|
{`([!#]?)(\w+)`, ByGroups(Punctuation, Text), nil},
|
||||||
"nbtobjectattribute": {
|
},
|
||||||
Include("nbtvalue"),
|
"nbtobjectattribute": {
|
||||||
{`:`, Punctuation, nil},
|
Include("nbtvalue"),
|
||||||
{`,`, Punctuation, Pop(1)},
|
{`:`, Punctuation, nil},
|
||||||
{`\}`, Punctuation, Pop(2)},
|
{`,`, Punctuation, Pop(1)},
|
||||||
},
|
{`\}`, Punctuation, Pop(2)},
|
||||||
"nbtobjectvalue": {
|
},
|
||||||
{`("(\\\\|\\"|[^"])*"|[a-zA-Z0-9_]+)`, NameTag, Push("nbtobjectattribute")},
|
"nbtobjectvalue": {
|
||||||
{`\}`, Punctuation, Pop(1)},
|
{`("(\\\\|\\"|[^"])*"|[a-zA-Z0-9_]+)`, NameTag, Push("nbtobjectattribute")},
|
||||||
},
|
{`\}`, Punctuation, Pop(1)},
|
||||||
"nbtarrayvalue": {
|
},
|
||||||
Include("nbtvalue"),
|
"nbtarrayvalue": {
|
||||||
{`,`, Punctuation, nil},
|
Include("nbtvalue"),
|
||||||
{`\]`, Punctuation, Pop(1)},
|
{`,`, Punctuation, nil},
|
||||||
},
|
{`\]`, Punctuation, Pop(1)},
|
||||||
"nbtvalue": {
|
},
|
||||||
Include("simplevalue"),
|
"nbtvalue": {
|
||||||
{`\{`, Punctuation, Push("nbtobjectvalue")},
|
Include("simplevalue"),
|
||||||
{`\[`, Punctuation, Push("nbtarrayvalue")},
|
{`\{`, Punctuation, Push("nbtobjectvalue")},
|
||||||
},
|
{`\[`, Punctuation, Push("nbtarrayvalue")},
|
||||||
"argumentvalue": {
|
},
|
||||||
Include("simplevalue"),
|
"argumentvalue": {
|
||||||
{`,`, Punctuation, Pop(1)},
|
Include("simplevalue"),
|
||||||
{`[}\]]`, Punctuation, Pop(2)},
|
{`,`, Punctuation, Pop(1)},
|
||||||
},
|
{`[}\]]`, Punctuation, Pop(2)},
|
||||||
"argumentlist": {
|
},
|
||||||
{`(nbt)(={)`, ByGroups(NameAttribute, Punctuation), Push("nbtobjectvalue")},
|
"argumentlist": {
|
||||||
{`([A-Za-z0-9/_!]+)(={)`, ByGroups(NameAttribute, Punctuation), Push("argumentlist")},
|
{`(nbt)(={)`, ByGroups(NameAttribute, Punctuation), Push("nbtobjectvalue")},
|
||||||
{`([A-Za-z0-9/_!]+)(=)`, ByGroups(NameAttribute, Punctuation), Push("argumentvalue")},
|
{`([A-Za-z0-9/_!]+)(={)`, ByGroups(NameAttribute, Punctuation), Push("argumentlist")},
|
||||||
Include("simplevalue"),
|
{`([A-Za-z0-9/_!]+)(=)`, ByGroups(NameAttribute, Punctuation), Push("argumentvalue")},
|
||||||
{`,`, Punctuation, nil},
|
Include("simplevalue"),
|
||||||
{`[}\]]`, Punctuation, Pop(1)},
|
{`,`, Punctuation, nil},
|
||||||
},
|
{`[}\]]`, Punctuation, Pop(1)},
|
||||||
"root": {
|
},
|
||||||
{`#.*?\n`, CommentSingle, nil},
|
"root": {
|
||||||
{Words(`/?`, `\b`, `ability`, `attributes`, `advancement`,
|
{`#.*?\n`, CommentSingle, nil},
|
||||||
`ban`, `ban-ip`, `banlist`, `bossbar`,
|
{Words(`/?`, `\b`, `ability`, `attributes`, `advancement`,
|
||||||
`camerashake`, `classroommode`, `clear`,
|
`ban`, `ban-ip`, `banlist`, `bossbar`,
|
||||||
`clearspawnpoint`, `clone`, `code`, `collect`,
|
`camerashake`, `classroommode`, `clear`,
|
||||||
`createagent`, `data`, `datapack`, `debug`,
|
`clearspawnpoint`, `clone`, `code`, `collect`,
|
||||||
`defaultgamemode`, `deop`, `destroy`, `detect`,
|
`createagent`, `data`, `datapack`, `debug`,
|
||||||
`detectredstone`, `difficulty`, `dropall`,
|
`defaultgamemode`, `deop`, `destroy`, `detect`,
|
||||||
`effect`, `enchant`, `event`, `execute`,
|
`detectredstone`, `difficulty`, `dropall`,
|
||||||
`experience`, `fill`, `flog`, `forceload`,
|
`effect`, `enchant`, `event`, `execute`,
|
||||||
`function`, `gamemode`, `gamerule`,
|
`experience`, `fill`, `flog`, `forceload`,
|
||||||
`geteduclientinfo`, `give`, `help`, `item`,
|
`function`, `gamemode`, `gamerule`,
|
||||||
`immutableworld`, `kick`, `kill`, `list`,
|
`geteduclientinfo`, `give`, `help`, `item`,
|
||||||
`locate`, `locatebiome`, `loot`, `me`, `mixer`,
|
`immutableworld`, `kick`, `kill`, `list`,
|
||||||
`mobevent`, `move`, `msg`, `music`, `op`,
|
`locate`, `locatebiome`, `loot`, `me`, `mixer`,
|
||||||
`pardon`, `particle`, `playanimation`,
|
`mobevent`, `move`, `msg`, `music`, `op`,
|
||||||
`playsound`, `position`, `publish`,
|
`pardon`, `particle`, `playanimation`,
|
||||||
`raytracefog`, `recipe`, `reload`, `remove`,
|
`playsound`, `position`, `publish`,
|
||||||
`replaceitem`, `ride`, `save`, `save-all`,
|
`raytracefog`, `recipe`, `reload`, `remove`,
|
||||||
`save-off`, `save-on`, `say`, `schedule`,
|
`replaceitem`, `ride`, `save`, `save-all`,
|
||||||
`scoreboard`, `seed`, `setblock`,
|
`save-off`, `save-on`, `say`, `schedule`,
|
||||||
`setidletimeout`, `setmaxplayers`,
|
`scoreboard`, `seed`, `setblock`,
|
||||||
`setworldspawn`, `spawnpoint`, `spectate`,
|
`setidletimeout`, `setmaxplayers`,
|
||||||
`spreadplayers`, `stop`, `stopsound`,
|
`setworldspawn`, `spawnpoint`, `spectate`,
|
||||||
`structure`, `summon`, `tag`, `team`, `teammsg`,
|
`spreadplayers`, `stop`, `stopsound`,
|
||||||
`teleport`, `tell`, `tellraw`, `testfor`,
|
`structure`, `summon`, `tag`, `team`, `teammsg`,
|
||||||
`testforblock`, `testforblocks`, `tickingarea`,
|
`teleport`, `tell`, `tellraw`, `testfor`,
|
||||||
`time`, `title`, `toggledownfall`, `tp`,
|
`testforblock`, `testforblocks`, `tickingarea`,
|
||||||
`tpagent`, `transfer`, `transferserver`,
|
`time`, `title`, `toggledownfall`, `tp`,
|
||||||
`trigger`, `turn`, `w`, `weather`, `whitelist`,
|
`tpagent`, `transfer`, `transferserver`,
|
||||||
`worldborder`, `worldbuilder`, `wsserver`, `xp`,
|
`trigger`, `turn`, `w`, `weather`, `whitelist`,
|
||||||
), KeywordReserved, nil},
|
`worldborder`, `worldbuilder`, `wsserver`, `xp`,
|
||||||
{Words(``, ``, `@p`, `@r`, `@a`, `@e`, `@s`, `@c`, `@v`),
|
), KeywordReserved, nil},
|
||||||
KeywordConstant, nil},
|
{Words(``, ``, `@p`, `@r`, `@a`, `@e`, `@s`, `@c`, `@v`),
|
||||||
{`\[`, Punctuation, Push("argumentlist")},
|
KeywordConstant, nil},
|
||||||
{`{`, Punctuation, Push("nbtobjectvalue")},
|
{`\[`, Punctuation, Push("argumentlist")},
|
||||||
{`~`, NameBuiltin, nil},
|
{`{`, Punctuation, Push("nbtobjectvalue")},
|
||||||
{`([a-zA-Z_]+:)?[a-zA-Z_]+\b`, Text, nil},
|
{`~`, NameBuiltin, nil},
|
||||||
{`([a-z]+)(\.)([0-9]+)\b`, ByGroups(Text, Punctuation, LiteralNumber), nil},
|
{`([a-zA-Z_]+:)?[a-zA-Z_]+\b`, Text, nil},
|
||||||
{`([<>=]|<=|>=)`, Punctuation, nil},
|
{`([a-z]+)(\.)([0-9]+)\b`, ByGroups(Text, Punctuation, LiteralNumber), nil},
|
||||||
Include("simplevalue"),
|
{`([<>=]|<=|>=)`, Punctuation, nil},
|
||||||
{`\s+`, TextWhitespace, nil},
|
Include("simplevalue"),
|
||||||
},
|
{`\s+`, TextWhitespace, nil},
|
||||||
|
},
|
||||||
|
}
|
||||||
},
|
},
|
||||||
))
|
))
|
||||||
|
@ -44,7 +44,7 @@ func TestInclude(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestCombine(t *testing.T) {
|
func TestCombine(t *testing.T) {
|
||||||
l := MustNewLexer(nil, Rules{
|
l := MustNewLexer(nil, Rules{ // nolint: forbidigo
|
||||||
"root": {{`hello`, String, Combined("world", "bye", "space")}},
|
"root": {{`hello`, String, Combined("world", "bye", "space")}},
|
||||||
"world": {{`world`, Name, nil}},
|
"world": {{`world`, Name, nil}},
|
||||||
"bye": {{`bye`, Name, nil}},
|
"bye": {{`bye`, Name, nil}},
|
||||||
|
16
regexp.go
16
regexp.go
@ -162,10 +162,10 @@ func Tokenise(lexer Lexer, options *TokeniseOptions, text string) ([]Token, erro
|
|||||||
type Rules map[string][]Rule
|
type Rules map[string][]Rule
|
||||||
|
|
||||||
// Rename clones rules then a rule.
|
// Rename clones rules then a rule.
|
||||||
func (r Rules) Rename(old, new string) Rules {
|
func (r Rules) Rename(oldRule, newRule string) Rules {
|
||||||
r = r.Clone()
|
r = r.Clone()
|
||||||
r[new] = r[old]
|
r[newRule] = r[oldRule]
|
||||||
delete(r, old)
|
delete(r, oldRule)
|
||||||
return r
|
return r
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -209,8 +209,10 @@ func NewLazyLexer(config *Config, rulesFunc func() Rules) (*RegexLexer, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// MustNewLexer creates a new Lexer or panics.
|
// MustNewLexer creates a new Lexer or panics.
|
||||||
func MustNewLexer(config *Config, rules Rules) *RegexLexer {
|
//
|
||||||
lexer, err := NewLexer(config, rules)
|
// Deprecated: Use MustNewLazyLexer instead.
|
||||||
|
func MustNewLexer(config *Config, rules Rules) *RegexLexer { // nolint: forbidigo
|
||||||
|
lexer, err := NewLexer(config, rules) // nolint: forbidigo
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
panic(err)
|
||||||
}
|
}
|
||||||
@ -221,7 +223,9 @@ func MustNewLexer(config *Config, rules Rules) *RegexLexer {
|
|||||||
//
|
//
|
||||||
// "rules" is a state machine transitition map. Each key is a state. Values are sets of rules
|
// "rules" is a state machine transitition map. Each key is a state. Values are sets of rules
|
||||||
// that match input, optionally modify lexer state, and output tokens.
|
// that match input, optionally modify lexer state, and output tokens.
|
||||||
func NewLexer(config *Config, rules Rules) (*RegexLexer, error) {
|
//
|
||||||
|
// Deprecated: Use NewLazyLexer instead.
|
||||||
|
func NewLexer(config *Config, rules Rules) (*RegexLexer, error) { // nolint: forbidigo
|
||||||
return NewLazyLexer(config, func() Rules { return rules })
|
return NewLazyLexer(config, func() Rules { return rules })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -7,7 +7,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestNewlineAtEndOfFile(t *testing.T) {
|
func TestNewlineAtEndOfFile(t *testing.T) {
|
||||||
l := Coalesce(MustNewLexer(&Config{EnsureNL: true}, Rules{
|
l := Coalesce(MustNewLexer(&Config{EnsureNL: true}, Rules{ // nolint: forbidigo
|
||||||
"root": {
|
"root": {
|
||||||
{`(\w+)(\n)`, ByGroups(Keyword, Whitespace), nil},
|
{`(\w+)(\n)`, ByGroups(Keyword, Whitespace), nil},
|
||||||
},
|
},
|
||||||
@ -16,7 +16,7 @@ func TestNewlineAtEndOfFile(t *testing.T) {
|
|||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, []Token{{Keyword, "hello"}, {Whitespace, "\n"}}, it.Tokens())
|
assert.Equal(t, []Token{{Keyword, "hello"}, {Whitespace, "\n"}}, it.Tokens())
|
||||||
|
|
||||||
l = Coalesce(MustNewLexer(nil, Rules{
|
l = Coalesce(MustNewLexer(nil, Rules{ // nolint: forbidigo
|
||||||
"root": {
|
"root": {
|
||||||
{`(\w+)(\n)`, ByGroups(Keyword, Whitespace), nil},
|
{`(\w+)(\n)`, ByGroups(Keyword, Whitespace), nil},
|
||||||
},
|
},
|
||||||
@ -27,7 +27,7 @@ func TestNewlineAtEndOfFile(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestMatchingAtStart(t *testing.T) {
|
func TestMatchingAtStart(t *testing.T) {
|
||||||
l := Coalesce(MustNewLexer(&Config{}, Rules{
|
l := Coalesce(MustNewLexer(&Config{}, Rules{ // nolint: forbidigo
|
||||||
"root": {
|
"root": {
|
||||||
{`\s+`, Whitespace, nil},
|
{`\s+`, Whitespace, nil},
|
||||||
{`^-`, Punctuation, Push("directive")},
|
{`^-`, Punctuation, Push("directive")},
|
||||||
@ -45,7 +45,7 @@ func TestMatchingAtStart(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestEnsureLFOption(t *testing.T) {
|
func TestEnsureLFOption(t *testing.T) {
|
||||||
l := Coalesce(MustNewLexer(&Config{}, Rules{
|
l := Coalesce(MustNewLexer(&Config{}, Rules{ // nolint: forbidigo
|
||||||
"root": {
|
"root": {
|
||||||
{`(\w+)(\r?\n|\r)`, ByGroups(Keyword, Whitespace), nil},
|
{`(\w+)(\r?\n|\r)`, ByGroups(Keyword, Whitespace), nil},
|
||||||
},
|
},
|
||||||
@ -62,7 +62,7 @@ func TestEnsureLFOption(t *testing.T) {
|
|||||||
{Whitespace, "\n"},
|
{Whitespace, "\n"},
|
||||||
}, it.Tokens())
|
}, it.Tokens())
|
||||||
|
|
||||||
l = Coalesce(MustNewLexer(nil, Rules{
|
l = Coalesce(MustNewLexer(nil, Rules{ // nolint: forbidigo
|
||||||
"root": {
|
"root": {
|
||||||
{`(\w+)(\r?\n|\r)`, ByGroups(Keyword, Whitespace), nil},
|
{`(\w+)(\r?\n|\r)`, ByGroups(Keyword, Whitespace), nil},
|
||||||
},
|
},
|
||||||
|
@ -7,7 +7,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestRemappingLexer(t *testing.T) {
|
func TestRemappingLexer(t *testing.T) {
|
||||||
var lexer Lexer = MustNewLexer(nil, Rules{
|
var lexer Lexer = MustNewLexer(nil, Rules{ // nolint: forbidigo
|
||||||
"root": {
|
"root": {
|
||||||
{`\s+`, Whitespace, nil},
|
{`\s+`, Whitespace, nil},
|
||||||
{`\w+`, Name, nil},
|
{`\w+`, Name, nil},
|
||||||
|
4
style.go
4
style.go
@ -287,8 +287,10 @@ func (s *Style) synthesise(ttype TokenType) StyleEntry {
|
|||||||
// If we don't have line numbers, use the text colour but 20% brighter/darker
|
// If we don't have line numbers, use the text colour but 20% brighter/darker
|
||||||
case LineNumbers, LineNumbersTable:
|
case LineNumbers, LineNumbersTable:
|
||||||
return text
|
return text
|
||||||
|
|
||||||
|
default:
|
||||||
|
return StyleEntry{}
|
||||||
}
|
}
|
||||||
return StyleEntry{}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Style) synthesisable(ttype TokenType) bool {
|
func (s *Style) synthesisable(ttype TokenType) bool {
|
||||||
|
Reference in New Issue
Block a user