mirror of
https://github.com/alecthomas/chroma.git
synced 2025-03-17 20:58:08 +02:00
Fix a few bugs including sub-lexers adding additional newlines when
EnsureNL is true.
This commit is contained in:
parent
2a1e1a1bf4
commit
5da831672d
@ -17,6 +17,20 @@ var c = chroma.MustParseColour
|
||||
|
||||
var ttyTables = map[int]*ttyTable{
|
||||
8: {
|
||||
foreground: map[chroma.Colour]string{
|
||||
c("#000000"): "\033[30m", c("#7f0000"): "\033[31m", c("#007f00"): "\033[32m", c("#7f7fe0"): "\033[33m",
|
||||
c("#00007f"): "\033[34m", c("#7f007f"): "\033[35m", c("#007f7f"): "\033[36m", c("#e5e5e5"): "\033[37m",
|
||||
c("#555555"): "\033[1m\033[30m", c("#ff0000"): "\033[1m\033[31m", c("#00ff00"): "\033[1m\033[32m", c("#ffff00"): "\033[1m\033[33m",
|
||||
c("#0000ff"): "\033[1m\033[34m", c("#ff00ff"): "\033[1m\033[35m", c("#00ffff"): "\033[1m\033[36m", c("#ffffff"): "\033[1m\033[37m",
|
||||
},
|
||||
background: map[chroma.Colour]string{
|
||||
c("#000000"): "\033[40m", c("#7f0000"): "\033[41m", c("#007f00"): "\033[42m", c("#7f7fe0"): "\033[43m",
|
||||
c("#00007f"): "\033[44m", c("#7f007f"): "\033[45m", c("#007f7f"): "\033[46m", c("#e5e5e5"): "\033[47m",
|
||||
c("#555555"): "\033[1m\033[40m", c("#ff0000"): "\033[1m\033[41m", c("#00ff00"): "\033[1m\033[42m", c("#ffff00"): "\033[1m\033[43m",
|
||||
c("#0000ff"): "\033[1m\033[44m", c("#ff00ff"): "\033[1m\033[45m", c("#00ffff"): "\033[1m\033[46m", c("#ffffff"): "\033[1m\033[47m",
|
||||
},
|
||||
},
|
||||
16: {
|
||||
foreground: map[chroma.Colour]string{
|
||||
c("#000000"): "\033[30m", c("#7f0000"): "\033[31m", c("#007f00"): "\033[32m", c("#7f7fe0"): "\033[33m",
|
||||
c("#00007f"): "\033[34m", c("#7f007f"): "\033[35m", c("#007f7f"): "\033[36m", c("#e5e5e5"): "\033[37m",
|
||||
@ -227,15 +241,11 @@ type indexedTTYFormatter struct {
|
||||
func (c *indexedTTYFormatter) Format(w io.Writer, style *chroma.Style, it chroma.Iterator) (err error) {
|
||||
theme := styleToEscapeSequence(c.table, style)
|
||||
for token := it(); token != chroma.EOF; token = it() {
|
||||
// TODO: Cache token lookups?
|
||||
clr, ok := theme[token.Type]
|
||||
if !ok {
|
||||
clr, ok = theme[token.Type.SubCategory()]
|
||||
if !ok {
|
||||
clr = theme[token.Type.Category()]
|
||||
// if !ok {
|
||||
// clr = theme[chroma.InheritStyle]
|
||||
// }
|
||||
}
|
||||
}
|
||||
if clr != "" {
|
||||
@ -249,10 +259,22 @@ func (c *indexedTTYFormatter) Format(w io.Writer, style *chroma.Style, it chroma
|
||||
return nil
|
||||
}
|
||||
|
||||
// TTY is an 8-colour terminal formatter.
|
||||
//
|
||||
// The Lab colour space is used to map RGB values to the most appropriate index colour.
|
||||
var TTY = Register("terminal", &indexedTTYFormatter{ttyTables[8]})
|
||||
|
||||
// TTY8 is an 8-colour terminal formatter.
|
||||
//
|
||||
// The Lab colour space is used to map RGB values to the most appropriate index colour.
|
||||
var TTY8 = Register("terminal", &indexedTTYFormatter{ttyTables[8]})
|
||||
var TTY8 = Register("terminal8", &indexedTTYFormatter{ttyTables[8]})
|
||||
|
||||
// TTY16 is a 16-colour terminal formatter.
|
||||
//
|
||||
// It uses \033[3xm for normal colours and \033[90Xm for bright colours.
|
||||
//
|
||||
// The Lab colour space is used to map RGB values to the most appropriate index colour.
|
||||
var TTY16 = Register("terminal16", &indexedTTYFormatter{ttyTables[16]})
|
||||
|
||||
// TTY256 is a 256-colour terminal formatter.
|
||||
//
|
||||
|
@ -4,7 +4,7 @@ import "strings"
|
||||
|
||||
// An Iterator across tokens.
|
||||
//
|
||||
// nil will be returned at the end of the Token stream.
|
||||
// EOF will be returned at the end of the Token stream.
|
||||
//
|
||||
// If an error occurs within an Iterator, it may propagate this in a panic. Formatters should recover.
|
||||
type Iterator func() Token
|
||||
|
3
lexers/testdata/arduino.expected
vendored
3
lexers/testdata/arduino.expected
vendored
@ -541,6 +541,5 @@
|
||||
{"type":"Text","value":" \n "},
|
||||
{"type":"Punctuation","value":"}"},
|
||||
{"type":"Text","value":"\n"},
|
||||
{"type":"Punctuation","value":"}"},
|
||||
{"type":"Text","value":"\n"}
|
||||
{"type":"Punctuation","value":"}"}
|
||||
]
|
||||
|
6
lexers/testdata/promql.expected
vendored
6
lexers/testdata/promql.expected
vendored
@ -15,7 +15,6 @@
|
||||
{"type":"LiteralString","value":"alertmanager"},
|
||||
{"type":"Punctuation","value":"\"}"},
|
||||
{"type":"TextWhitespace","value":"\n\n"},
|
||||
|
||||
{"type":"CommentSingle","value":"# Aggregation operators"},
|
||||
{"type":"TextWhitespace","value":"\n"},
|
||||
{"type":"Keyword","value":"sum"},
|
||||
@ -47,7 +46,6 @@
|
||||
{"type":"TextWhitespace","value":" "},
|
||||
{"type":"LiteralNumberInteger","value":"1024"},
|
||||
{"type":"TextWhitespace","value":"\n\n"},
|
||||
|
||||
{"type":"CommentSingle","value":"# Metric with multiple lables and whitespaces"},
|
||||
{"type":"TextWhitespace","value":"\n"},
|
||||
{"type":"NameVariable","value":"go_gc_duration_seconds"},
|
||||
@ -67,7 +65,6 @@
|
||||
{"type":"TextWhitespace","value":" "},
|
||||
{"type":"Punctuation","value":"}"},
|
||||
{"type":"TextWhitespace","value":"\n\n"},
|
||||
|
||||
{"type":"CommentSingle","value":"# Expression and comment"},
|
||||
{"type":"TextWhitespace","value":"\n"},
|
||||
{"type":"NameVariable","value":"go_gc_duration_seconds"},
|
||||
@ -80,7 +77,6 @@
|
||||
{"type":"TextWhitespace","value":" "},
|
||||
{"type":"CommentSingle","value":"# single comment"},
|
||||
{"type":"TextWhitespace","value":"\n\n"},
|
||||
|
||||
{"type":"CommentSingle","value":"# Delta function"},
|
||||
{"type":"TextWhitespace","value":"\n"},
|
||||
{"type":"KeywordReserved","value":"delta"},
|
||||
@ -96,7 +92,6 @@
|
||||
{"type":"Punctuation","value":"]"},
|
||||
{"type":"Operator","value":")"},
|
||||
{"type":"TextWhitespace","value":"\n\n"},
|
||||
|
||||
{"type":"CommentSingle","value":"# Sum with arguments"},
|
||||
{"type":"TextWhitespace","value":"\n"},
|
||||
{"type":"Keyword","value":"sum"},
|
||||
@ -114,7 +109,6 @@
|
||||
{"type":"NameVariable","value":"instance_memory_usage_bytes"},
|
||||
{"type":"Operator","value":")"},
|
||||
{"type":"TextWhitespace","value":"\n\n"},
|
||||
|
||||
{"type":"CommentSingle","value":"# Multi-line with offset"},
|
||||
{"type":"TextWhitespace","value":"\n"},
|
||||
{"type":"KeywordReserved","value":"label_replace"},
|
||||
|
34
lexers/testdata/stylus.expected
vendored
34
lexers/testdata/stylus.expected
vendored
@ -1,19 +1,19 @@
|
||||
[
|
||||
{"type":"NameVariable", "value":"$white"},
|
||||
{"type":"Text", "value":" "},
|
||||
{"type":"Operator", "value":"?="},
|
||||
{"type":"Text", "value":" "},
|
||||
{"type":"LiteralNumberHex", "value":"#fff"},
|
||||
{"type":"Text", "value":"\n"},
|
||||
{"type":"NameVariable","value":"$white"},
|
||||
{"type":"Text","value":" "},
|
||||
{"type":"Operator","value":"?="},
|
||||
{"type":"Text","value":" "},
|
||||
{"type":"LiteralNumberHex","value":"#fff"},
|
||||
{"type":"Text","value":"\n"},
|
||||
{"type":"NameTag","value":"body"},
|
||||
{"type":"Text", "value":" "},
|
||||
{"type":"Punctuation", "value":"{"},
|
||||
{"type":"Text", "value":"\n "},
|
||||
{"type":"NameProperty", "value":"color"},
|
||||
{"type":"Punctuation", "value":":"},
|
||||
{"type":"Text", "value":" "},
|
||||
{"type":"NameVariable", "value":"$white"},
|
||||
{"type":"Punctuation", "value":";"},
|
||||
{"type":"Text", "value":"\n"},
|
||||
{"type":"Punctuation", "value":"}"}
|
||||
]
|
||||
{"type":"Text","value":" "},
|
||||
{"type":"Punctuation","value":"{"},
|
||||
{"type":"Text","value":"\n "},
|
||||
{"type":"NameProperty","value":"color"},
|
||||
{"type":"Punctuation","value":":"},
|
||||
{"type":"Text","value":" "},
|
||||
{"type":"NameVariable","value":"$white"},
|
||||
{"type":"Punctuation","value":";"},
|
||||
{"type":"Text","value":"\n"},
|
||||
{"type":"Punctuation","value":"}"}
|
||||
]
|
||||
|
3
lexers/testdata/tsx.expected
vendored
3
lexers/testdata/tsx.expected
vendored
@ -60,6 +60,5 @@
|
||||
{"type":"LiteralStringSingle","value":"'root'"},
|
||||
{"type":"Punctuation","value":"),"},
|
||||
{"type":"Text","value":"\n"},
|
||||
{"type":"Punctuation","value":")"},
|
||||
{"type":"Text","value":"\n"}
|
||||
{"type":"Punctuation","value":")"}
|
||||
]
|
||||
|
10
regexp.go
10
regexp.go
@ -264,6 +264,7 @@ type LexerState struct {
|
||||
MutatorContext map[interface{}]interface{}
|
||||
iteratorStack []Iterator
|
||||
options *TokeniseOptions
|
||||
newlineAdded bool
|
||||
}
|
||||
|
||||
// Set mutator context.
|
||||
@ -278,7 +279,11 @@ func (l *LexerState) Get(key interface{}) interface{} {
|
||||
|
||||
// Iterator returns the next Token from the lexer.
|
||||
func (l *LexerState) Iterator() Token { // nolint: gocognit
|
||||
for l.Pos < len(l.Text) && len(l.Stack) > 0 {
|
||||
end := len(l.Text)
|
||||
if l.newlineAdded {
|
||||
end--
|
||||
}
|
||||
for l.Pos < end && len(l.Stack) > 0 {
|
||||
// Exhaust the iterator stack, if any.
|
||||
for len(l.iteratorStack) > 0 {
|
||||
n := len(l.iteratorStack) - 1
|
||||
@ -432,10 +437,13 @@ func (r *RegexLexer) Tokenise(options *TokeniseOptions, text string) (Iterator,
|
||||
if options.EnsureLF {
|
||||
text = ensureLF(text)
|
||||
}
|
||||
newlineAdded := false
|
||||
if !options.Nested && r.config.EnsureNL && !strings.HasSuffix(text, "\n") {
|
||||
text += "\n"
|
||||
newlineAdded = true
|
||||
}
|
||||
state := &LexerState{
|
||||
newlineAdded: newlineAdded,
|
||||
options: options,
|
||||
Lexer: r,
|
||||
Text: []rune(text),
|
||||
|
Loading…
x
Reference in New Issue
Block a user