mirror of
https://github.com/alecthomas/chroma.git
synced 2025-02-05 13:05:18 +02:00
3044bf5f32
This PR changes `CommentSingle` to not consume the newline at the end as a part of comment. That solves the problems of single line comment being not parsed at the end of the line or at the end of the file. Which was reported earlier as the reason to not highlight single line comment properly. Disabling `EnsureNL: true` does not add unnecessary newline element for `Text`, `CommentSymbol` symbols. Using chroma in console with syntax highlighting was unusable becasue of this, since typing e.g. `b := ` adds newline each time space is at the end when host app asks for highlighted text from `quick`. Tokens behavior: <table> <tr> <td> Before </td> <td> After </td> </tr> <tr> <td> ``` go t.Run("Single space", func(t *testing.T) { tokens, _ := chroma.Tokenise(Go, nil, " ") expected := []chroma.Token{ {chroma.Text, " \n"}, } assert.Equal(t, expected, tokens) }) t.Run("Assignment unfinished", func(t *testing.T) { tokens, _ := chroma.Tokenise(Go, nil, "i = ") expected := []chroma.Token{ { chroma.NameOther, "i" }, { chroma.Text, " " }, { chroma.Punctuation, "=" }, { chroma.Text, " \n" }, } assert.Equal(t, expected, tokens) }) t.Run("Single comment", func(t *testing.T) { tokens, _ := chroma.Tokenise(Go, nil, "// W") expected := []chroma.Token{ { chroma.CommentSingle, "// W\n" }, } assert.Equal(t, expected, tokens) }) ``` </td> <td> ``` go t.Run("Single space", func(t *testing.T) { tokens, _ := chroma.Tokenise(Go, nil, " ") expected := []chroma.Token{ {chroma.Text, " "}, } assert.Equal(t, expected, tokens) }) t.Run("Assignment unfinished", func(t *testing.T) { tokens, _ := chroma.Tokenise(Go, nil, "i = ") expected := []chroma.Token{ { chroma.NameOther, "i" }, { chroma.Text, " " }, { chroma.Punctuation, "=" }, { chroma.Text, " " }, } assert.Equal(t, expected, tokens) }) t.Run("Single comment", func(t *testing.T) { tokens, _ := chroma.Tokenise(Go, nil, "// W") expected := []chroma.Token{ { chroma.CommentSingle, "// W" }, } assert.Equal(t, expected, tokens) }) ``` </td> </tr> </table>
82 lines
3.2 KiB
Go
82 lines
3.2 KiB
Go
package lexers
|
|
|
|
import (
|
|
"strings"
|
|
|
|
. "github.com/alecthomas/chroma/v2" // nolint
|
|
)
|
|
|
|
// Go lexer.
|
|
var Go = Register(MustNewLexer(
|
|
&Config{
|
|
Name: "Go",
|
|
Aliases: []string{"go", "golang"},
|
|
Filenames: []string{"*.go"},
|
|
MimeTypes: []string{"text/x-gosrc"},
|
|
},
|
|
goRules,
|
|
).SetAnalyser(func(text string) float32 {
|
|
if strings.Contains(text, "fmt.") && strings.Contains(text, "package ") {
|
|
return 0.5
|
|
}
|
|
if strings.Contains(text, "package ") {
|
|
return 0.1
|
|
}
|
|
return 0.0
|
|
}))
|
|
|
|
func goRules() Rules {
|
|
return Rules{
|
|
"root": {
|
|
{`\n`, Text, nil},
|
|
{`\s+`, Text, nil},
|
|
{`\\\n`, Text, nil},
|
|
{`//[^\n\r]*`, CommentSingle, nil},
|
|
{`/(\\\n)?[*](.|\n)*?[*](\\\n)?/`, CommentMultiline, nil},
|
|
{`(import|package)\b`, KeywordNamespace, nil},
|
|
{`(var|func|struct|map|chan|type|interface|const)\b`, KeywordDeclaration, nil},
|
|
{Words(``, `\b`, `break`, `default`, `select`, `case`, `defer`, `go`, `else`, `goto`, `switch`, `fallthrough`, `if`, `range`, `continue`, `for`, `return`), Keyword, nil},
|
|
{`(true|false|iota|nil)\b`, KeywordConstant, nil},
|
|
{Words(``, `\b(\()`, `uint`, `uint8`, `uint16`, `uint32`, `uint64`, `int`, `int8`, `int16`, `int32`, `int64`, `float`, `float32`, `float64`, `complex64`, `complex128`, `byte`, `rune`, `string`, `bool`, `error`, `uintptr`, `print`, `println`, `panic`, `recover`, `close`, `complex`, `real`, `imag`, `len`, `cap`, `append`, `copy`, `delete`, `new`, `make`, `clear`, `min`, `max`), ByGroups(NameBuiltin, Punctuation), nil},
|
|
{Words(``, `\b`, `uint`, `uint8`, `uint16`, `uint32`, `uint64`, `int`, `int8`, `int16`, `int32`, `int64`, `float`, `float32`, `float64`, `complex64`, `complex128`, `byte`, `rune`, `string`, `bool`, `error`, `uintptr`), KeywordType, nil},
|
|
{`\d+i`, LiteralNumber, nil},
|
|
{`\d+\.\d*([Ee][-+]\d+)?i`, LiteralNumber, nil},
|
|
{`\.\d+([Ee][-+]\d+)?i`, LiteralNumber, nil},
|
|
{`\d+[Ee][-+]\d+i`, LiteralNumber, nil},
|
|
{`\d+(\.\d+[eE][+\-]?\d+|\.\d*|[eE][+\-]?\d+)`, LiteralNumberFloat, nil},
|
|
{`\.\d+([eE][+\-]?\d+)?`, LiteralNumberFloat, nil},
|
|
{`0[0-7]+`, LiteralNumberOct, nil},
|
|
{`0[xX][0-9a-fA-F_]+`, LiteralNumberHex, nil},
|
|
{`0b[01_]+`, LiteralNumberBin, nil},
|
|
{`(0|[1-9][0-9_]*)`, LiteralNumberInteger, nil},
|
|
{`'(\\['"\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|[^\\])'`, LiteralStringChar, nil},
|
|
{"(`)([^`]*)(`)", ByGroups(LiteralString, UsingLexer(TypeRemappingLexer(GoTextTemplate, TypeMapping{{Other, LiteralString, nil}})), LiteralString), nil},
|
|
{`"(\\\\|\\"|[^"])*"`, LiteralString, nil},
|
|
{`(<<=|>>=|<<|>>|<=|>=|&\^=|&\^|\+=|-=|\*=|/=|%=|&=|\|=|&&|\|\||<-|\+\+|--|==|!=|:=|\.\.\.|[+\-*/%&])`, Operator, nil},
|
|
{`([a-zA-Z_]\w*)(\s*)(\()`, ByGroups(NameFunction, UsingSelf("root"), Punctuation), nil},
|
|
{`[|^<>=!()\[\]{}.,;:~]`, Punctuation, nil},
|
|
{`[^\W\d]\w*`, NameOther, nil},
|
|
},
|
|
}
|
|
}
|
|
|
|
var GoHTMLTemplate = Register(DelegatingLexer(HTML, MustNewXMLLexer(
|
|
embedded,
|
|
"embedded/go_template.xml",
|
|
).SetConfig(
|
|
&Config{
|
|
Name: "Go HTML Template",
|
|
Aliases: []string{"go-html-template"},
|
|
},
|
|
)))
|
|
|
|
var GoTextTemplate = Register(MustNewXMLLexer(
|
|
embedded,
|
|
"embedded/go_template.xml",
|
|
).SetConfig(
|
|
&Config{
|
|
Name: "Go Text Template",
|
|
Aliases: []string{"go-text-template"},
|
|
},
|
|
))
|