mirror of
https://github.com/alecthomas/chroma.git
synced 2025-03-31 22:05:17 +02:00
Add golangci-lint and fix all lint issues.
This commit is contained in:
parent
e27f19c12f
commit
da5ac60d8c
1
.gitignore
vendored
1
.gitignore
vendored
@ -16,3 +16,4 @@
|
||||
|
||||
_models/
|
||||
|
||||
_examples/
|
||||
|
44
.golangci.yml
Normal file
44
.golangci.yml
Normal file
@ -0,0 +1,44 @@
|
||||
run:
|
||||
tests: true
|
||||
skip-dirs:
|
||||
- _examples
|
||||
|
||||
output:
|
||||
print-issued-lines: false
|
||||
|
||||
linters:
|
||||
enable-all: true
|
||||
disable:
|
||||
- maligned
|
||||
- megacheck
|
||||
- lll
|
||||
- gocyclo
|
||||
- dupl
|
||||
|
||||
linters-settings:
|
||||
govet:
|
||||
check-shadowing: true
|
||||
gocyclo:
|
||||
min-complexity: 10
|
||||
dupl:
|
||||
threshold: 100
|
||||
goconst:
|
||||
min-len: 8
|
||||
min-occurrences: 3
|
||||
|
||||
issues:
|
||||
max-per-linter: 0
|
||||
max-same: 0
|
||||
exclude-use-default: false
|
||||
exclude:
|
||||
# Captured by errcheck.
|
||||
- '^(G104|G204):'
|
||||
# Very commonly not checked.
|
||||
- 'Error return value of .(.*\.Help|.*\.MarkFlagRequired|(os\.)?std(out|err)\..*|.*Close|.*Flush|os\.Remove(All)?|.*printf?|os\.(Un)?Setenv). is not checked'
|
||||
- 'exported method (.*\.MarshalJSON|.*\.UnmarshalJSON|.*\.EntityURN|.*\.GoString|.*\.Pos) should have comment or be unexported'
|
||||
- 'composite literal uses unkeyed fields'
|
||||
- 'declaration of "err" shadows declaration'
|
||||
- 'should not use dot imports'
|
||||
- 'Potential file inclusion via variable'
|
||||
- 'should have comment or be unexported'
|
||||
- 'comment on exported var .* should be of the form'
|
@ -1,5 +1,9 @@
|
||||
sudo: false
|
||||
language: go
|
||||
script:
|
||||
- go test -v ./...
|
||||
- curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | bash -s v1.10.2
|
||||
- ./bin/golangci-lint run
|
||||
after_success:
|
||||
go get github.com/goreleaser/goreleaser && goreleaser
|
||||
|
||||
|
@ -14,8 +14,8 @@ import (
|
||||
"strings"
|
||||
|
||||
"github.com/alecthomas/kong"
|
||||
colorable "github.com/mattn/go-colorable"
|
||||
isatty "github.com/mattn/go-isatty"
|
||||
"github.com/mattn/go-colorable"
|
||||
"github.com/mattn/go-isatty"
|
||||
|
||||
"github.com/alecthomas/chroma"
|
||||
"github.com/alecthomas/chroma/formatters"
|
||||
@ -88,7 +88,8 @@ func main() {
|
||||
if cli.Profile != "" {
|
||||
f, err := os.Create(cli.Profile)
|
||||
ctx.FatalIfErrorf(err)
|
||||
pprof.StartCPUProfile(f)
|
||||
err = pprof.StartCPUProfile(f)
|
||||
ctx.FatalIfErrorf(err)
|
||||
signals := make(chan os.Signal, 1)
|
||||
signal.Notify(signals, os.Interrupt)
|
||||
go func() {
|
||||
@ -109,7 +110,7 @@ func main() {
|
||||
} else {
|
||||
w = bufio.NewWriterSize(out, 16384)
|
||||
}
|
||||
defer w.Flush()
|
||||
defer w.Flush() // nolint: errcheck
|
||||
|
||||
if cli.JSON {
|
||||
cli.Formatter = "json"
|
||||
@ -133,7 +134,8 @@ func main() {
|
||||
// Dump styles.
|
||||
if cli.HTMLStyles {
|
||||
formatter := html.New(html.WithClasses())
|
||||
formatter.WriteCSS(w, style)
|
||||
err = formatter.WriteCSS(w, style)
|
||||
ctx.FatalIfErrorf(err)
|
||||
return
|
||||
}
|
||||
|
||||
@ -238,7 +240,7 @@ func lex(ctx *kong.Context, path string, contents string) chroma.Iterator {
|
||||
rel.Trace(cli.Trace)
|
||||
}
|
||||
lexer = chroma.Coalesce(lexer)
|
||||
it, err := lexer.Tokenise(nil, string(contents))
|
||||
it, err := lexer.Tokenise(nil, contents)
|
||||
ctx.FatalIfErrorf(err)
|
||||
return it
|
||||
}
|
||||
|
@ -127,6 +127,7 @@ func MustParseColour(colour string) Colour {
|
||||
return parsed
|
||||
}
|
||||
|
||||
// IsSet returns true if the colour is set.
|
||||
func (c Colour) IsSet() bool { return c != 0 }
|
||||
|
||||
func (c Colour) String() string { return fmt.Sprintf("#%06x", int(c-1)) }
|
||||
|
@ -17,7 +17,7 @@ type Formatter interface {
|
||||
// Guards against iterator panics.
|
||||
type FormatterFunc func(w io.Writer, style *Style, iterator Iterator) error
|
||||
|
||||
func (f FormatterFunc) Format(w io.Writer, s *Style, it Iterator) (err error) {
|
||||
func (f FormatterFunc) Format(w io.Writer, s *Style, it Iterator) (err error) { // nolint
|
||||
defer func() {
|
||||
if perr := recover(); perr != nil {
|
||||
err = perr.(error)
|
||||
|
@ -19,7 +19,7 @@ var (
|
||||
return nil
|
||||
}))
|
||||
// Default HTML formatter outputs self-contained HTML.
|
||||
htmlFull = Register("html", html.New(html.Standalone(), html.WithClasses()))
|
||||
htmlFull = Register("html", html.New(html.Standalone(), html.WithClasses())) // nolint
|
||||
)
|
||||
|
||||
// Fallback formatter.
|
||||
|
@ -113,7 +113,10 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.
|
||||
fmt.Fprint(w, "<html>\n")
|
||||
if f.Classes {
|
||||
fmt.Fprint(w, "<style type=\"text/css\">\n")
|
||||
f.WriteCSS(w, style)
|
||||
err = f.WriteCSS(w, style)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
fmt.Fprintf(w, "body { %s; }\n", css[chroma.Background])
|
||||
fmt.Fprint(w, "</style>")
|
||||
}
|
||||
@ -243,7 +246,7 @@ func (f *Formatter) styleAttr(styles map[chroma.TokenType]string, tt chroma.Toke
|
||||
if cls == "" {
|
||||
return ""
|
||||
}
|
||||
return string(fmt.Sprintf(` class="%s"`, cls))
|
||||
return fmt.Sprintf(` class="%s"`, cls)
|
||||
}
|
||||
if _, ok := styles[tt]; !ok {
|
||||
tt = tt.SubCategory()
|
||||
|
@ -8,6 +8,7 @@ import (
|
||||
"testing"
|
||||
|
||||
"github.com/alecthomas/assert"
|
||||
|
||||
"github.com/alecthomas/chroma"
|
||||
"github.com/alecthomas/chroma/lexers"
|
||||
"github.com/alecthomas/chroma/styles"
|
||||
@ -89,7 +90,8 @@ func TestClassPrefix(t *testing.T) {
|
||||
}
|
||||
|
||||
var styleBuf bytes.Buffer
|
||||
withPrefix.WriteCSS(&styleBuf, styles.Fallback)
|
||||
err := withPrefix.WriteCSS(&styleBuf, styles.Fallback)
|
||||
assert.NoError(t, err)
|
||||
if !strings.Contains(styleBuf.String(), ".some-prefix-chroma ") {
|
||||
t.Error("Stylesheets should have a class prefix")
|
||||
}
|
||||
|
@ -44,8 +44,9 @@ func Literator(tokens ...Token) Iterator {
|
||||
}
|
||||
}
|
||||
|
||||
// SplitTokensIntoLines splits tokens containing newlines in two.
|
||||
func SplitTokensIntoLines(tokens []Token) (out [][]Token) {
|
||||
var line []Token
|
||||
var line []Token // nolint: prealloc
|
||||
for _, token := range tokens {
|
||||
for strings.Contains(token.Value, "\n") {
|
||||
parts := strings.SplitAfterN(token.Value, "\n", 2)
|
||||
|
3
lexer.go
3
lexer.go
@ -66,12 +66,15 @@ type Token struct {
|
||||
func (t *Token) String() string { return t.Value }
|
||||
func (t *Token) GoString() string { return fmt.Sprintf("&Token{%s, %q}", t.Type, t.Value) }
|
||||
|
||||
// Clone returns a clone of the Token.
|
||||
func (t *Token) Clone() Token {
|
||||
return *t
|
||||
}
|
||||
|
||||
// EOF is returned by lexers at the end of input.
|
||||
var EOF Token
|
||||
|
||||
// TokeniseOptions contains options for tokenisers.
|
||||
type TokeniseOptions struct {
|
||||
// State to start tokenisation in. Defaults to "root".
|
||||
State string
|
||||
|
@ -8,11 +8,10 @@ import (
|
||||
)
|
||||
|
||||
var (
|
||||
scalaOp = "[-~\\^\\*!%&\\\\<>\\|+=:/?@\xa6-\xa7\xa9\xac\xae\xb0-\xb1\xb6\xd7\xf7\u03f6\u0482\u0606-\u0608\u060e-\u060f\u06e9\u06fd-\u06fe\u07f6\u09fa\u0b70\u0bf3-\u0bf8\u0bfa\u0c7f\u0cf1-\u0cf2\u0d79\u0f01-\u0f03\u0f13-\u0f17\u0f1a-\u0f1f\u0f34\u0f36\u0f38\u0fbe-\u0fc5\u0fc7-\u0fcf\u109e-\u109f\u1360\u1390-\u1399\u1940\u19e0-\u19ff\u1b61-\u1b6a\u1b74-\u1b7c\u2044\u2052\u207a-\u207c\u208a-\u208c\u2100-\u2101\u2103-\u2106\u2108-\u2109\u2114\u2116-\u2118\u211e-\u2123\u2125\u2127\u2129\u212e\u213a-\u213b\u2140-\u2144\u214a-\u214d\u214f\u2190-\u2328\u232b-\u244a\u249c-\u24e9\u2500-\u2767\u2794-\u27c4\u27c7-\u27e5\u27f0-\u2982\u2999-\u29d7\u29dc-\u29fb\u29fe-\u2b54\u2ce5-\u2cea\u2e80-\u2ffb\u3004\u3012-\u3013\u3020\u3036-\u3037\u303e-\u303f\u3190-\u3191\u3196-\u319f\u31c0-\u31e3\u3200-\u321e\u322a-\u3250\u3260-\u327f\u328a-\u32b0\u32c0-\u33ff\u4dc0-\u4dff\ua490-\ua4c6\ua828-\ua82b\ufb29\ufdfd\ufe62\ufe64-\ufe66\uff0b\uff1c-\uff1e\uff5c\uff5e\uffe2\uffe4\uffe8-\uffee\ufffc-\ufffd]+"
|
||||
scalaUpper = "[A-Z\\$_\xc0-\xd6\xd8-\xde\u0100\u0102\u0104\u0106\u0108\u010a\u010c\u010e\u0110\u0112\u0114\u0116\u0118\u011a\u011c\u011e\u0120\u0122\u0124\u0126\u0128\u012a\u012c\u012e\u0130\u0132\u0134\u0136\u0139\u013b\u013d\u013f\u0141\u0143\u0145\u0147\u014a\u014c\u014e\u0150\u0152\u0154\u0156\u0158\u015a\u015c\u015e\u0160\u0162\u0164\u0166\u0168\u016a\u016c\u016e\u0170\u0172\u0174\u0176\u0178-\u0179\u017b\u017d\u0181-\u0182\u0184\u0186-\u0187\u0189-\u018b\u018e-\u0191\u0193-\u0194\u0196-\u0198\u019c-\u019d\u019f-\u01a0\u01a2\u01a4\u01a6-\u01a7\u01a9\u01ac\u01ae-\u01af\u01b1-\u01b3\u01b5\u01b7-\u01b8\u01bc\u01c4\u01c7\u01ca\u01cd\u01cf\u01d1\u01d3\u01d5\u01d7\u01d9\u01db\u01de\u01e0\u01e2\u01e4\u01e6\u01e8\u01ea\u01ec\u01ee\u01f1\u01f4\u01f6-\u01f8\u01fa\u01fc\u01fe\u0200\u0202\u0204\u0206\u0208\u020a\u020c\u020e\u0210\u0212\u0214\u0216\u0218\u021a\u021c\u021e\u0220\u0222\u0224\u0226\u0228\u022a\u022c\u022e\u0230\u0232\u023a-\u023b\u023d-\u023e\u0241\u0243-\u0246\u0248\u024a\u024c\u024e\u0370\u0372\u0376\u0386\u0388-\u038f\u0391-\u03ab\u03cf\u03d2-\u03d4\u03d8\u03da\u03dc\u03de\u03e0\u03e2\u03e4\u03e6\u03e8\u03ea\u03ec\u03ee\u03f4\u03f7\u03f9-\u03fa\u03fd-\u042f\u0460\u0462\u0464\u0466\u0468\u046a\u046c\u046e\u0470\u0472\u0474\u0476\u0478\u047a\u047c\u047e\u0480\u048a\u048c\u048e\u0490\u0492\u0494\u0496\u0498\u049a\u049c\u049e\u04a0\u04a2\u04a4\u04a6\u04a8\u04aa\u04ac\u04ae\u04b0\u04b2\u04b4\u04b6\u04b8\u04ba\u04bc\u04be\u04c0-\u04c1\u04c3\u04c5\u04c7\u04c9\u04cb\u04cd\u04d0\u04d2\u04d4\u04d6\u04d8\u04da\u04dc\u04de\u04e0\u04e2\u04e4\u04e6\u04e8\u04ea\u04ec\u04ee\u04f0\u04f2\u04f4\u04f6\u04f8\u04fa\u04fc\u04fe\u0500\u0502\u0504\u0506\u0508\u050a\u050c\u050e\u0510\u0512\u0514\u0516\u0518\u051a\u051c\u051e\u0520\u0522\u0531-\u0556\u10a0-\u10c5\u1e00\u1e02\u1e04\u1e06\u1e08\u1e0a\u1e0c\u1e0e\u1e10\u1e12\u1e14\u1e16\u1e18\u1e1a\u1e1c\u1e1e\u1e20\u1e22\u1e24\u1e26\u1e28\u1e2a\u1e2c\u1e2e\u1e30\u1e32\u1e34\u1e36\u1e38\u1e3a\u1e3c\u1e3e\u1e40\u1e42\u1e44\u1e46\u1e48\u1e4a\u1e4c\u1e4e\u1e50\u1e52\u1e54\u1e56\u1e58\u1e5a\u1e5c\u1e5e\u1e60\u1e62\u1e64\u1e66\u1e68\u1e6a\u1e6c\u1e6e\u1e70\u1e72\u1e74\u1e76\u1e78\u1e7a\u1e7c\u1e7e\u1e80\u1e82\u1e84\u1e86\u1e88\u1e8a\u1e8c\u1e8e\u1e90\u1e92\u1e94\u1e9e\u1ea0\u1ea2\u1ea4\u1ea6\u1ea8\u1eaa\u1eac\u1eae\u1eb0\u1eb2\u1eb4\u1eb6\u1eb8\u1eba\u1ebc\u1ebe\u1ec0\u1ec2\u1ec4\u1ec6\u1ec8\u1eca\u1ecc\u1ece\u1ed0\u1ed2\u1ed4\u1ed6\u1ed8\u1eda\u1edc\u1ede\u1ee0\u1ee2\u1ee4\u1ee6\u1ee8\u1eea\u1eec\u1eee\u1ef0\u1ef2\u1ef4\u1ef6\u1ef8\u1efa\u1efc\u1efe\u1f08-\u1f0f\u1f18-\u1f1d\u1f28-\u1f2f\u1f38-\u1f3f\u1f48-\u1f4d\u1f59-\u1f5f\u1f68-\u1f6f\u1fb8-\u1fbb\u1fc8-\u1fcb\u1fd8-\u1fdb\u1fe8-\u1fec\u1ff8-\u1ffb\u2102\u2107\u210b-\u210d\u2110-\u2112\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u2130-\u2133\u213e-\u213f\u2145\u2183\u2c00-\u2c2e\u2c60\u2c62-\u2c64\u2c67\u2c69\u2c6b\u2c6d-\u2c6f\u2c72\u2c75\u2c80\u2c82\u2c84\u2c86\u2c88\u2c8a\u2c8c\u2c8e\u2c90\u2c92\u2c94\u2c96\u2c98\u2c9a\u2c9c\u2c9e\u2ca0\u2ca2\u2ca4\u2ca6\u2ca8\u2caa\u2cac\u2cae\u2cb0\u2cb2\u2cb4\u2cb6\u2cb8\u2cba\u2cbc\u2cbe\u2cc0\u2cc2\u2cc4\u2cc6\u2cc8\u2cca\u2ccc\u2cce\u2cd0\u2cd2\u2cd4\u2cd6\u2cd8\u2cda\u2cdc\u2cde\u2ce0\u2ce2\ua640\ua642\ua644\ua646\ua648\ua64a\ua64c\ua64e\ua650\ua652\ua654\ua656\ua658\ua65a\ua65c\ua65e\ua662\ua664\ua666\ua668\ua66a\ua66c\ua680\ua682\ua684\ua686\ua688\ua68a\ua68c\ua68e\ua690\ua692\ua694\ua696\ua722\ua724\ua726\ua728\ua72a\ua72c\ua72e\ua732\ua734\ua736\ua738\ua73a\ua73c\ua73e\ua740\ua742\ua744\ua746\ua748\ua74a\ua74c\ua74e\ua750\ua752\ua754\ua756\ua758\ua75a\ua75c\ua75e\ua760\ua762\ua764\ua766\ua768\ua76a\ua76c\ua76e\ua779\ua77b\ua77d-\ua77e\ua780\ua782\ua784\ua786\ua78b\uff21-\uff3a]"
|
||||
scalaLetter = `[a-zA-Z\\$_ªµºÀ-ÖØ-öø-ʯͰ-ͳͶ-ͷͻ-ͽΆΈ-ϵϷ-ҁҊ-Ֆա-ևא-ײء-ؿف-يٮ-ٯٱ-ۓەۮ-ۯۺ-ۼۿܐܒ-ܯݍ-ޥޱߊ-ߪऄ-हऽॐक़-ॡॲ-ॿঅ-হঽৎড়-ৡৰ-ৱਅ-ਹਖ਼-ਫ਼ੲ-ੴઅ-હઽૐ-ૡଅ-ହଽଡ଼-ୡୱஃ-ஹௐఅ-ఽౘ-ౡಅ-ಹಽೞ-ೡഅ-ഽൠ-ൡൺ-ൿඅ-ෆก-ะา-ำเ-ๅກ-ະາ-ຳຽ-ໄໜ-ༀཀ-ཬྈ-ྋက-ဪဿၐ-ၕၚ-ၝၡၥ-ၦၮ-ၰၵ-ႁႎႠ-ჺᄀ-ፚᎀ-ᎏᎠ-ᙬᙯ-ᙶᚁ-ᚚᚠ-ᛪᛮ-ᜑᜠ-ᜱᝀ-ᝑᝠ-ᝰក-ឳៜᠠ-ᡂᡄ-ᢨᢪ-ᤜᥐ-ᦩᧁ-ᧇᨀ-ᨖᬅ-ᬳᭅ-ᭋᮃ-ᮠᮮ-ᮯᰀ-ᰣᱍ-ᱏᱚ-ᱷᴀ-ᴫᵢ-ᵷᵹ-ᶚḀ-ᾼιῂ-ῌῐ-Ίῠ-Ῥῲ-ῼⁱⁿℂℇℊ-ℓℕℙ-ℝℤΩℨK-ℭℯ-ℹℼ-ℿⅅ-ⅉⅎⅠ-ↈⰀ-ⱼⲀ-ⳤⴀ-ⵥⶀ-ⷞ〆-〇〡-〩〸-〺〼ぁ-ゖゟァ-ヺヿ-ㆎㆠ-ㆷㇰ-ㇿ㐀-䶵一-ꀔꀖ-ꒌꔀ-ꘋꘐ-ꘟꘪ-ꙮꚀ-ꚗꜢ-ꝯꝱ-ꞇꞋ-ꠁꠃ-ꠅꠇ-ꠊꠌ-ꠢꡀ-ꡳꢂ-ꢳꤊ-ꤥꤰ-ꥆꨀ-ꨨꩀ-ꩂꩄ-ꩋ가-힣豈-יִײַ-ﬨשׁ-ﴽﵐ-ﷻﹰ-ﻼA-Za-zヲ-ッア-ンᅠ-ᅵ]`
|
||||
scalaIDRest = fmt.Sprintf(`%s(?:%s|[0-9])*(?:(?<=_)%s)?`, scalaLetter, scalaLetter, scalaOp)
|
||||
scalaLetterLetterDigit = fmt.Sprintf(`%s(?:%s|\d)*`, scalaLetter, scalaLetter)
|
||||
scalaOp = "[-~\\^\\*!%&\\\\<>\\|+=:/?@\xa6-\xa7\xa9\xac\xae\xb0-\xb1\xb6\xd7\xf7\u03f6\u0482\u0606-\u0608\u060e-\u060f\u06e9\u06fd-\u06fe\u07f6\u09fa\u0b70\u0bf3-\u0bf8\u0bfa\u0c7f\u0cf1-\u0cf2\u0d79\u0f01-\u0f03\u0f13-\u0f17\u0f1a-\u0f1f\u0f34\u0f36\u0f38\u0fbe-\u0fc5\u0fc7-\u0fcf\u109e-\u109f\u1360\u1390-\u1399\u1940\u19e0-\u19ff\u1b61-\u1b6a\u1b74-\u1b7c\u2044\u2052\u207a-\u207c\u208a-\u208c\u2100-\u2101\u2103-\u2106\u2108-\u2109\u2114\u2116-\u2118\u211e-\u2123\u2125\u2127\u2129\u212e\u213a-\u213b\u2140-\u2144\u214a-\u214d\u214f\u2190-\u2328\u232b-\u244a\u249c-\u24e9\u2500-\u2767\u2794-\u27c4\u27c7-\u27e5\u27f0-\u2982\u2999-\u29d7\u29dc-\u29fb\u29fe-\u2b54\u2ce5-\u2cea\u2e80-\u2ffb\u3004\u3012-\u3013\u3020\u3036-\u3037\u303e-\u303f\u3190-\u3191\u3196-\u319f\u31c0-\u31e3\u3200-\u321e\u322a-\u3250\u3260-\u327f\u328a-\u32b0\u32c0-\u33ff\u4dc0-\u4dff\ua490-\ua4c6\ua828-\ua82b\ufb29\ufdfd\ufe62\ufe64-\ufe66\uff0b\uff1c-\uff1e\uff5c\uff5e\uffe2\uffe4\uffe8-\uffee\ufffc-\ufffd]+"
|
||||
scalaUpper = "[A-Z\\$_\xc0-\xd6\xd8-\xde\u0100\u0102\u0104\u0106\u0108\u010a\u010c\u010e\u0110\u0112\u0114\u0116\u0118\u011a\u011c\u011e\u0120\u0122\u0124\u0126\u0128\u012a\u012c\u012e\u0130\u0132\u0134\u0136\u0139\u013b\u013d\u013f\u0141\u0143\u0145\u0147\u014a\u014c\u014e\u0150\u0152\u0154\u0156\u0158\u015a\u015c\u015e\u0160\u0162\u0164\u0166\u0168\u016a\u016c\u016e\u0170\u0172\u0174\u0176\u0178-\u0179\u017b\u017d\u0181-\u0182\u0184\u0186-\u0187\u0189-\u018b\u018e-\u0191\u0193-\u0194\u0196-\u0198\u019c-\u019d\u019f-\u01a0\u01a2\u01a4\u01a6-\u01a7\u01a9\u01ac\u01ae-\u01af\u01b1-\u01b3\u01b5\u01b7-\u01b8\u01bc\u01c4\u01c7\u01ca\u01cd\u01cf\u01d1\u01d3\u01d5\u01d7\u01d9\u01db\u01de\u01e0\u01e2\u01e4\u01e6\u01e8\u01ea\u01ec\u01ee\u01f1\u01f4\u01f6-\u01f8\u01fa\u01fc\u01fe\u0200\u0202\u0204\u0206\u0208\u020a\u020c\u020e\u0210\u0212\u0214\u0216\u0218\u021a\u021c\u021e\u0220\u0222\u0224\u0226\u0228\u022a\u022c\u022e\u0230\u0232\u023a-\u023b\u023d-\u023e\u0241\u0243-\u0246\u0248\u024a\u024c\u024e\u0370\u0372\u0376\u0386\u0388-\u038f\u0391-\u03ab\u03cf\u03d2-\u03d4\u03d8\u03da\u03dc\u03de\u03e0\u03e2\u03e4\u03e6\u03e8\u03ea\u03ec\u03ee\u03f4\u03f7\u03f9-\u03fa\u03fd-\u042f\u0460\u0462\u0464\u0466\u0468\u046a\u046c\u046e\u0470\u0472\u0474\u0476\u0478\u047a\u047c\u047e\u0480\u048a\u048c\u048e\u0490\u0492\u0494\u0496\u0498\u049a\u049c\u049e\u04a0\u04a2\u04a4\u04a6\u04a8\u04aa\u04ac\u04ae\u04b0\u04b2\u04b4\u04b6\u04b8\u04ba\u04bc\u04be\u04c0-\u04c1\u04c3\u04c5\u04c7\u04c9\u04cb\u04cd\u04d0\u04d2\u04d4\u04d6\u04d8\u04da\u04dc\u04de\u04e0\u04e2\u04e4\u04e6\u04e8\u04ea\u04ec\u04ee\u04f0\u04f2\u04f4\u04f6\u04f8\u04fa\u04fc\u04fe\u0500\u0502\u0504\u0506\u0508\u050a\u050c\u050e\u0510\u0512\u0514\u0516\u0518\u051a\u051c\u051e\u0520\u0522\u0531-\u0556\u10a0-\u10c5\u1e00\u1e02\u1e04\u1e06\u1e08\u1e0a\u1e0c\u1e0e\u1e10\u1e12\u1e14\u1e16\u1e18\u1e1a\u1e1c\u1e1e\u1e20\u1e22\u1e24\u1e26\u1e28\u1e2a\u1e2c\u1e2e\u1e30\u1e32\u1e34\u1e36\u1e38\u1e3a\u1e3c\u1e3e\u1e40\u1e42\u1e44\u1e46\u1e48\u1e4a\u1e4c\u1e4e\u1e50\u1e52\u1e54\u1e56\u1e58\u1e5a\u1e5c\u1e5e\u1e60\u1e62\u1e64\u1e66\u1e68\u1e6a\u1e6c\u1e6e\u1e70\u1e72\u1e74\u1e76\u1e78\u1e7a\u1e7c\u1e7e\u1e80\u1e82\u1e84\u1e86\u1e88\u1e8a\u1e8c\u1e8e\u1e90\u1e92\u1e94\u1e9e\u1ea0\u1ea2\u1ea4\u1ea6\u1ea8\u1eaa\u1eac\u1eae\u1eb0\u1eb2\u1eb4\u1eb6\u1eb8\u1eba\u1ebc\u1ebe\u1ec0\u1ec2\u1ec4\u1ec6\u1ec8\u1eca\u1ecc\u1ece\u1ed0\u1ed2\u1ed4\u1ed6\u1ed8\u1eda\u1edc\u1ede\u1ee0\u1ee2\u1ee4\u1ee6\u1ee8\u1eea\u1eec\u1eee\u1ef0\u1ef2\u1ef4\u1ef6\u1ef8\u1efa\u1efc\u1efe\u1f08-\u1f0f\u1f18-\u1f1d\u1f28-\u1f2f\u1f38-\u1f3f\u1f48-\u1f4d\u1f59-\u1f5f\u1f68-\u1f6f\u1fb8-\u1fbb\u1fc8-\u1fcb\u1fd8-\u1fdb\u1fe8-\u1fec\u1ff8-\u1ffb\u2102\u2107\u210b-\u210d\u2110-\u2112\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u2130-\u2133\u213e-\u213f\u2145\u2183\u2c00-\u2c2e\u2c60\u2c62-\u2c64\u2c67\u2c69\u2c6b\u2c6d-\u2c6f\u2c72\u2c75\u2c80\u2c82\u2c84\u2c86\u2c88\u2c8a\u2c8c\u2c8e\u2c90\u2c92\u2c94\u2c96\u2c98\u2c9a\u2c9c\u2c9e\u2ca0\u2ca2\u2ca4\u2ca6\u2ca8\u2caa\u2cac\u2cae\u2cb0\u2cb2\u2cb4\u2cb6\u2cb8\u2cba\u2cbc\u2cbe\u2cc0\u2cc2\u2cc4\u2cc6\u2cc8\u2cca\u2ccc\u2cce\u2cd0\u2cd2\u2cd4\u2cd6\u2cd8\u2cda\u2cdc\u2cde\u2ce0\u2ce2\ua640\ua642\ua644\ua646\ua648\ua64a\ua64c\ua64e\ua650\ua652\ua654\ua656\ua658\ua65a\ua65c\ua65e\ua662\ua664\ua666\ua668\ua66a\ua66c\ua680\ua682\ua684\ua686\ua688\ua68a\ua68c\ua68e\ua690\ua692\ua694\ua696\ua722\ua724\ua726\ua728\ua72a\ua72c\ua72e\ua732\ua734\ua736\ua738\ua73a\ua73c\ua73e\ua740\ua742\ua744\ua746\ua748\ua74a\ua74c\ua74e\ua750\ua752\ua754\ua756\ua758\ua75a\ua75c\ua75e\ua760\ua762\ua764\ua766\ua768\ua76a\ua76c\ua76e\ua779\ua77b\ua77d-\ua77e\ua780\ua782\ua784\ua786\ua78b\uff21-\uff3a]"
|
||||
scalaLetter = `[a-zA-Z\\$_ªµºÀ-ÖØ-öø-ʯͰ-ͳͶ-ͷͻ-ͽΆΈ-ϵϷ-ҁҊ-Ֆա-ևא-ײء-ؿف-يٮ-ٯٱ-ۓەۮ-ۯۺ-ۼۿܐܒ-ܯݍ-ޥޱߊ-ߪऄ-हऽॐक़-ॡॲ-ॿঅ-হঽৎড়-ৡৰ-ৱਅ-ਹਖ਼-ਫ਼ੲ-ੴઅ-હઽૐ-ૡଅ-ହଽଡ଼-ୡୱஃ-ஹௐఅ-ఽౘ-ౡಅ-ಹಽೞ-ೡഅ-ഽൠ-ൡൺ-ൿඅ-ෆก-ะา-ำเ-ๅກ-ະາ-ຳຽ-ໄໜ-ༀཀ-ཬྈ-ྋက-ဪဿၐ-ၕၚ-ၝၡၥ-ၦၮ-ၰၵ-ႁႎႠ-ჺᄀ-ፚᎀ-ᎏᎠ-ᙬᙯ-ᙶᚁ-ᚚᚠ-ᛪᛮ-ᜑᜠ-ᜱᝀ-ᝑᝠ-ᝰក-ឳៜᠠ-ᡂᡄ-ᢨᢪ-ᤜᥐ-ᦩᧁ-ᧇᨀ-ᨖᬅ-ᬳᭅ-ᭋᮃ-ᮠᮮ-ᮯᰀ-ᰣᱍ-ᱏᱚ-ᱷᴀ-ᴫᵢ-ᵷᵹ-ᶚḀ-ᾼιῂ-ῌῐ-Ίῠ-Ῥῲ-ῼⁱⁿℂℇℊ-ℓℕℙ-ℝℤΩℨK-ℭℯ-ℹℼ-ℿⅅ-ⅉⅎⅠ-ↈⰀ-ⱼⲀ-ⳤⴀ-ⵥⶀ-ⷞ〆-〇〡-〩〸-〺〼ぁ-ゖゟァ-ヺヿ-ㆎㆠ-ㆷㇰ-ㇿ㐀-䶵一-ꀔꀖ-ꒌꔀ-ꘋꘐ-ꘟꘪ-ꙮꚀ-ꚗꜢ-ꝯꝱ-ꞇꞋ-ꠁꠃ-ꠅꠇ-ꠊꠌ-ꠢꡀ-ꡳꢂ-ꢳꤊ-ꤥꤰ-ꥆꨀ-ꨨꩀ-ꩂꩄ-ꩋ가-힣豈-יִײַ-ﬨשׁ-ﴽﵐ-ﷻﹰ-ﻼA-Za-zヲ-ッア-ンᅠ-ᅵ]`
|
||||
scalaIDRest = fmt.Sprintf(`%s(?:%s|[0-9])*(?:(?<=_)%s)?`, scalaLetter, scalaLetter, scalaOp)
|
||||
)
|
||||
|
||||
// Scala lexer.
|
||||
|
@ -5,8 +5,8 @@ import (
|
||||
"github.com/alecthomas/chroma/lexers/internal"
|
||||
)
|
||||
|
||||
// Sql lexer.
|
||||
var Sql = internal.Register(MustNewLexer(
|
||||
// SQL lexer.
|
||||
var SQL = internal.Register(MustNewLexer(
|
||||
&Config{
|
||||
Name: "SQL",
|
||||
Aliases: []string{"sql"},
|
||||
|
@ -21,7 +21,7 @@ type LexerMutator interface {
|
||||
// A MutatorFunc is a Mutator that mutates the lexer state machine as it is processing.
|
||||
type MutatorFunc func(state *LexerState) error
|
||||
|
||||
func (m MutatorFunc) Mutate(state *LexerState) error { return m(state) }
|
||||
func (m MutatorFunc) Mutate(state *LexerState) error { return m(state) } // nolint
|
||||
|
||||
// Mutators applies a set of Mutators in order.
|
||||
func Mutators(modifiers ...Mutator) MutatorFunc {
|
||||
|
55
regexp.go
55
regexp.go
@ -11,6 +11,7 @@ import (
|
||||
"github.com/dlclark/regexp2"
|
||||
)
|
||||
|
||||
// A Rule is the fundamental matching unit of the Regex lexer state machine.
|
||||
type Rule struct {
|
||||
Pattern string
|
||||
Type Emitter
|
||||
@ -56,26 +57,26 @@ func ByGroups(emitters ...Emitter) Emitter {
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// var Markdown = internal.Register(MustNewLexer(
|
||||
// &Config{
|
||||
// Name: "markdown",
|
||||
// Aliases: []string{"md", "mkd"},
|
||||
// Filenames: []string{"*.md", "*.mkd", "*.markdown"},
|
||||
// MimeTypes: []string{"text/x-markdown"},
|
||||
// },
|
||||
// Rules{
|
||||
// "root": {
|
||||
// {"^(```)(\\w+)(\\n)([\\w\\W]*?)(^```$)",
|
||||
// UsingByGroup(
|
||||
// internal.Get,
|
||||
// 2, 4,
|
||||
// String, String, String, Text, String,
|
||||
// ),
|
||||
// nil,
|
||||
// },
|
||||
// },
|
||||
// },
|
||||
// ))
|
||||
// var Markdown = internal.Register(MustNewLexer(
|
||||
// &Config{
|
||||
// Name: "markdown",
|
||||
// Aliases: []string{"md", "mkd"},
|
||||
// Filenames: []string{"*.md", "*.mkd", "*.markdown"},
|
||||
// MimeTypes: []string{"text/x-markdown"},
|
||||
// },
|
||||
// Rules{
|
||||
// "root": {
|
||||
// {"^(```)(\\w+)(\\n)([\\w\\W]*?)(^```$)",
|
||||
// UsingByGroup(
|
||||
// internal.Get,
|
||||
// 2, 4,
|
||||
// String, String, String, Text, String,
|
||||
// ),
|
||||
// nil,
|
||||
// },
|
||||
// },
|
||||
// },
|
||||
// ))
|
||||
//
|
||||
// See the lexers/m/markdown.go for the complete example.
|
||||
//
|
||||
@ -155,6 +156,7 @@ func Tokenise(lexer Lexer, options *TokeniseOptions, text string) ([]Token, erro
|
||||
// Rules maps from state to a sequence of Rules.
|
||||
type Rules map[string][]Rule
|
||||
|
||||
// Clone returns a clone of the Rules.
|
||||
func (r Rules) Clone() Rules {
|
||||
out := map[string][]Rule{}
|
||||
for key, rules := range r {
|
||||
@ -207,6 +209,7 @@ func NewLexer(config *Config, rules Rules) (*RegexLexer, error) {
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Trace enables debug tracing.
|
||||
func (r *RegexLexer) Trace(trace bool) *RegexLexer {
|
||||
r.trace = trace
|
||||
return r
|
||||
@ -221,8 +224,10 @@ type CompiledRule struct {
|
||||
flags string
|
||||
}
|
||||
|
||||
// CompiledRules is a map of rule name to sequence of compiled rules in that rule.
|
||||
type CompiledRules map[string][]*CompiledRule
|
||||
|
||||
// LexerState contains the state for a single lex.
|
||||
type LexerState struct {
|
||||
Lexer *RegexLexer
|
||||
Text []rune
|
||||
@ -238,14 +243,17 @@ type LexerState struct {
|
||||
iteratorStack []Iterator
|
||||
}
|
||||
|
||||
// Set mutator context.
|
||||
func (l *LexerState) Set(key interface{}, value interface{}) {
|
||||
l.MutatorContext[key] = value
|
||||
}
|
||||
|
||||
// Get mutator context.
|
||||
func (l *LexerState) Get(key interface{}) interface{} {
|
||||
return l.MutatorContext[key]
|
||||
}
|
||||
|
||||
// Iterator returns the next Token from the lexer.
|
||||
func (l *LexerState) Iterator() Token {
|
||||
for l.Pos < len(l.Text) && len(l.Stack) > 0 {
|
||||
// Exhaust the iterator stack, if any.
|
||||
@ -306,6 +314,7 @@ func (l *LexerState) Iterator() Token {
|
||||
return EOF
|
||||
}
|
||||
|
||||
// RegexLexer is the default lexer implementation used in Chroma.
|
||||
type RegexLexer struct {
|
||||
config *Config
|
||||
analyser func(text string) float32
|
||||
@ -322,14 +331,14 @@ func (r *RegexLexer) SetAnalyser(analyser func(text string) float32) *RegexLexer
|
||||
return r
|
||||
}
|
||||
|
||||
func (r *RegexLexer) AnalyseText(text string) float32 {
|
||||
func (r *RegexLexer) AnalyseText(text string) float32 { // nolint
|
||||
if r.analyser != nil {
|
||||
return r.analyser(text)
|
||||
}
|
||||
return 0.0
|
||||
}
|
||||
|
||||
func (r *RegexLexer) Config() *Config {
|
||||
func (r *RegexLexer) Config() *Config { // nolint
|
||||
return r.config
|
||||
}
|
||||
|
||||
@ -374,7 +383,7 @@ restart:
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *RegexLexer) Tokenise(options *TokeniseOptions, text string) (Iterator, error) {
|
||||
func (r *RegexLexer) Tokenise(options *TokeniseOptions, text string) (Iterator, error) { // nolint
|
||||
if err := r.maybeCompile(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
1
remap.go
1
remap.go
@ -36,6 +36,7 @@ func (r *remappingLexer) Tokenise(options *TokeniseOptions, text string) (Iterat
|
||||
}, nil
|
||||
}
|
||||
|
||||
// TypeMapping defines type maps for the TypeRemappingLexer.
|
||||
type TypeMapping []struct {
|
||||
From, To TokenType
|
||||
Words []string
|
||||
|
3
style.go
3
style.go
@ -8,6 +8,7 @@ import (
|
||||
// Trilean value for StyleEntry value inheritance.
|
||||
type Trilean uint8
|
||||
|
||||
// Trilean states.
|
||||
const (
|
||||
Pass Trilean = iota
|
||||
Yes
|
||||
@ -25,6 +26,7 @@ func (t Trilean) String() string {
|
||||
}
|
||||
}
|
||||
|
||||
// Prefix returns s with "no" as a prefix if Trilean is no.
|
||||
func (t Trilean) Prefix(s string) string {
|
||||
if t == Yes {
|
||||
return s
|
||||
@ -73,6 +75,7 @@ func (s StyleEntry) String() string {
|
||||
return strings.Join(out, " ")
|
||||
}
|
||||
|
||||
// Sub subtracts e from s where elements match.
|
||||
func (s StyleEntry) Sub(e StyleEntry) StyleEntry {
|
||||
out := StyleEntry{}
|
||||
if e.Colour != s.Colour {
|
||||
|
@ -4,8 +4,8 @@ import (
|
||||
"github.com/alecthomas/chroma"
|
||||
)
|
||||
|
||||
// Algol_Nu style.
|
||||
var Algol_Nu = Register(chroma.MustNewStyle("algol_nu", chroma.StyleEntries{
|
||||
// AlgolNu style.
|
||||
var AlgolNu = Register(chroma.MustNewStyle("algol_nu", chroma.StyleEntries{
|
||||
chroma.Comment: "italic #888",
|
||||
chroma.CommentPreproc: "bold noitalic #888",
|
||||
chroma.CommentSpecial: "bold noitalic #888",
|
||||
|
Loading…
x
Reference in New Issue
Block a user