1
0
mirror of https://github.com/alecthomas/chroma.git synced 2025-03-17 20:58:08 +02:00

Go lexer: single line comment without consuming endline, disable EnsureNL (#984)

This PR changes `CommentSingle` to not consume the newline at the end as
a part of comment.
That solves the problems of single line comment being not parsed at the
end of the line or at the end of the file. Which was reported earlier as
the reason to not highlight single line comment properly.

Disabling `EnsureNL: true` does not add unnecessary newline element for
`Text`, `CommentSymbol` symbols. Using chroma in console with syntax
highlighting was unusable becasue of this, since typing e.g. `b := `
adds newline each time space is at the end when host app asks for
highlighted text from `quick`.

Tokens behavior:
<table>
<tr>
<td> Before </td> <td> After </td>
</tr>
<tr>
<td>

``` go
t.Run("Single space", func(t *testing.T) {
        tokens, _ := chroma.Tokenise(Go, nil, " ")
        expected := []chroma.Token{
                {chroma.Text, " \n"},
        }
        assert.Equal(t, expected, tokens)
})
t.Run("Assignment unfinished", func(t *testing.T) {
        tokens, _ := chroma.Tokenise(Go, nil, "i = ")
        expected := []chroma.Token{
                { chroma.NameOther, "i" },
                { chroma.Text, " " },
                { chroma.Punctuation, "=" },
                { chroma.Text, " \n" },
        }
        assert.Equal(t, expected, tokens)
})
t.Run("Single comment", func(t *testing.T) {
        tokens, _ := chroma.Tokenise(Go, nil, "// W")
        expected := []chroma.Token{
                { chroma.CommentSingle, "// W\n" },
        }
        assert.Equal(t, expected, tokens)
})
```

</td>
<td>
    
``` go
t.Run("Single space", func(t *testing.T) {
        tokens, _ := chroma.Tokenise(Go, nil, " ")
        expected := []chroma.Token{
                {chroma.Text, " "},
        }
        assert.Equal(t, expected, tokens)
})
t.Run("Assignment unfinished", func(t *testing.T) {
        tokens, _ := chroma.Tokenise(Go, nil, "i = ")
        expected := []chroma.Token{
                { chroma.NameOther, "i" },
                { chroma.Text, " " },
                { chroma.Punctuation, "=" },
                { chroma.Text, " " },
        }
        assert.Equal(t, expected, tokens)
})
t.Run("Single comment", func(t *testing.T) {
        tokens, _ := chroma.Tokenise(Go, nil, "// W")
        expected := []chroma.Token{
                { chroma.CommentSingle, "// W" },
        }
        assert.Equal(t, expected, tokens)
})
```
</td>
</tr>
</table>
This commit is contained in:
Mikhail Sorochan 2024-07-22 20:19:08 +04:00 committed by GitHub
parent 40e5e9989e
commit 3044bf5f32
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 6 additions and 6 deletions

View File

@ -13,7 +13,6 @@ var Go = Register(MustNewLexer(
Aliases: []string{"go", "golang"},
Filenames: []string{"*.go"},
MimeTypes: []string{"text/x-gosrc"},
EnsureNL: true,
},
goRules,
).SetAnalyser(func(text string) float32 {
@ -32,7 +31,7 @@ func goRules() Rules {
{`\n`, Text, nil},
{`\s+`, Text, nil},
{`\\\n`, Text, nil},
{`//(.*?)\n`, CommentSingle, nil},
{`//[^\n\r]*`, CommentSingle, nil},
{`/(\\\n)?[*](.|\n)*?[*](\\\n)?/`, CommentMultiline, nil},
{`(import|package)\b`, KeywordNamespace, nil},
{`(var|func|struct|map|chan|type|interface|const)\b`, KeywordDeclaration, nil},

View File

@ -19,4 +19,4 @@ func hello(a int) {
type Int interface {
~int | ~int8 | ~int16 | ~int32 | ~int64
}
} // The very last comment w/o LF

View File

@ -84,8 +84,8 @@
{"type":"Text","value":"\n"},
{"type":"Punctuation","value":"}"},
{"type":"Text","value":" "},
{"type":"CommentSingle","value":"// One last thing\n"},
{"type":"Text","value":"\n"},
{"type":"CommentSingle","value":"// One last thing"},
{"type":"Text","value":"\n\n"},
{"type":"KeywordDeclaration","value":"type"},
{"type":"Text","value":" "},
{"type":"NameOther","value":"Int"},
@ -118,5 +118,6 @@
{"type":"KeywordType","value":"int64"},
{"type":"Text","value":"\n"},
{"type":"Punctuation","value":"}"},
{"type":"Text","value":"\n"}
{"type":"Text","value":" "},
{"type":"CommentSingle","value":"// The very last comment w/o LF"}
]