diff --git a/formatters/tty_indexed.go b/formatters/tty_indexed.go index eb90ea7..47fbb1a 100644 --- a/formatters/tty_indexed.go +++ b/formatters/tty_indexed.go @@ -17,6 +17,20 @@ var c = chroma.MustParseColour var ttyTables = map[int]*ttyTable{ 8: { + foreground: map[chroma.Colour]string{ + c("#000000"): "\033[30m", c("#7f0000"): "\033[31m", c("#007f00"): "\033[32m", c("#7f7fe0"): "\033[33m", + c("#00007f"): "\033[34m", c("#7f007f"): "\033[35m", c("#007f7f"): "\033[36m", c("#e5e5e5"): "\033[37m", + c("#555555"): "\033[1m\033[30m", c("#ff0000"): "\033[1m\033[31m", c("#00ff00"): "\033[1m\033[32m", c("#ffff00"): "\033[1m\033[33m", + c("#0000ff"): "\033[1m\033[34m", c("#ff00ff"): "\033[1m\033[35m", c("#00ffff"): "\033[1m\033[36m", c("#ffffff"): "\033[1m\033[37m", + }, + background: map[chroma.Colour]string{ + c("#000000"): "\033[40m", c("#7f0000"): "\033[41m", c("#007f00"): "\033[42m", c("#7f7fe0"): "\033[43m", + c("#00007f"): "\033[44m", c("#7f007f"): "\033[45m", c("#007f7f"): "\033[46m", c("#e5e5e5"): "\033[47m", + c("#555555"): "\033[1m\033[40m", c("#ff0000"): "\033[1m\033[41m", c("#00ff00"): "\033[1m\033[42m", c("#ffff00"): "\033[1m\033[43m", + c("#0000ff"): "\033[1m\033[44m", c("#ff00ff"): "\033[1m\033[45m", c("#00ffff"): "\033[1m\033[46m", c("#ffffff"): "\033[1m\033[47m", + }, + }, + 16: { foreground: map[chroma.Colour]string{ c("#000000"): "\033[30m", c("#7f0000"): "\033[31m", c("#007f00"): "\033[32m", c("#7f7fe0"): "\033[33m", c("#00007f"): "\033[34m", c("#7f007f"): "\033[35m", c("#007f7f"): "\033[36m", c("#e5e5e5"): "\033[37m", @@ -227,15 +241,11 @@ type indexedTTYFormatter struct { func (c *indexedTTYFormatter) Format(w io.Writer, style *chroma.Style, it chroma.Iterator) (err error) { theme := styleToEscapeSequence(c.table, style) for token := it(); token != chroma.EOF; token = it() { - // TODO: Cache token lookups? clr, ok := theme[token.Type] if !ok { clr, ok = theme[token.Type.SubCategory()] if !ok { clr = theme[token.Type.Category()] - // if !ok { - // clr = theme[chroma.InheritStyle] - // } } } if clr != "" { @@ -249,10 +259,22 @@ func (c *indexedTTYFormatter) Format(w io.Writer, style *chroma.Style, it chroma return nil } +// TTY is an 8-colour terminal formatter. +// +// The Lab colour space is used to map RGB values to the most appropriate index colour. +var TTY = Register("terminal", &indexedTTYFormatter{ttyTables[8]}) + // TTY8 is an 8-colour terminal formatter. // // The Lab colour space is used to map RGB values to the most appropriate index colour. -var TTY8 = Register("terminal", &indexedTTYFormatter{ttyTables[8]}) +var TTY8 = Register("terminal8", &indexedTTYFormatter{ttyTables[8]}) + +// TTY16 is a 16-colour terminal formatter. +// +// It uses \033[3xm for normal colours and \033[90Xm for bright colours. +// +// The Lab colour space is used to map RGB values to the most appropriate index colour. +var TTY16 = Register("terminal16", &indexedTTYFormatter{ttyTables[16]}) // TTY256 is a 256-colour terminal formatter. // diff --git a/iterator.go b/iterator.go index c8845a1..d5175de 100644 --- a/iterator.go +++ b/iterator.go @@ -4,7 +4,7 @@ import "strings" // An Iterator across tokens. // -// nil will be returned at the end of the Token stream. +// EOF will be returned at the end of the Token stream. // // If an error occurs within an Iterator, it may propagate this in a panic. Formatters should recover. type Iterator func() Token diff --git a/lexers/testdata/arduino.expected b/lexers/testdata/arduino.expected index c59ed2e..72661c8 100644 --- a/lexers/testdata/arduino.expected +++ b/lexers/testdata/arduino.expected @@ -541,6 +541,5 @@ {"type":"Text","value":" \n "}, {"type":"Punctuation","value":"}"}, {"type":"Text","value":"\n"}, - {"type":"Punctuation","value":"}"}, - {"type":"Text","value":"\n"} + {"type":"Punctuation","value":"}"} ] diff --git a/lexers/testdata/promql.expected b/lexers/testdata/promql.expected index 2560137..dd02289 100644 --- a/lexers/testdata/promql.expected +++ b/lexers/testdata/promql.expected @@ -15,7 +15,6 @@ {"type":"LiteralString","value":"alertmanager"}, {"type":"Punctuation","value":"\"}"}, {"type":"TextWhitespace","value":"\n\n"}, - {"type":"CommentSingle","value":"# Aggregation operators"}, {"type":"TextWhitespace","value":"\n"}, {"type":"Keyword","value":"sum"}, @@ -47,7 +46,6 @@ {"type":"TextWhitespace","value":" "}, {"type":"LiteralNumberInteger","value":"1024"}, {"type":"TextWhitespace","value":"\n\n"}, - {"type":"CommentSingle","value":"# Metric with multiple lables and whitespaces"}, {"type":"TextWhitespace","value":"\n"}, {"type":"NameVariable","value":"go_gc_duration_seconds"}, @@ -67,7 +65,6 @@ {"type":"TextWhitespace","value":" "}, {"type":"Punctuation","value":"}"}, {"type":"TextWhitespace","value":"\n\n"}, - {"type":"CommentSingle","value":"# Expression and comment"}, {"type":"TextWhitespace","value":"\n"}, {"type":"NameVariable","value":"go_gc_duration_seconds"}, @@ -80,7 +77,6 @@ {"type":"TextWhitespace","value":" "}, {"type":"CommentSingle","value":"# single comment"}, {"type":"TextWhitespace","value":"\n\n"}, - {"type":"CommentSingle","value":"# Delta function"}, {"type":"TextWhitespace","value":"\n"}, {"type":"KeywordReserved","value":"delta"}, @@ -96,7 +92,6 @@ {"type":"Punctuation","value":"]"}, {"type":"Operator","value":")"}, {"type":"TextWhitespace","value":"\n\n"}, - {"type":"CommentSingle","value":"# Sum with arguments"}, {"type":"TextWhitespace","value":"\n"}, {"type":"Keyword","value":"sum"}, @@ -114,7 +109,6 @@ {"type":"NameVariable","value":"instance_memory_usage_bytes"}, {"type":"Operator","value":")"}, {"type":"TextWhitespace","value":"\n\n"}, - {"type":"CommentSingle","value":"# Multi-line with offset"}, {"type":"TextWhitespace","value":"\n"}, {"type":"KeywordReserved","value":"label_replace"}, diff --git a/lexers/testdata/stylus.expected b/lexers/testdata/stylus.expected index 5dbcc85..2e8ecef 100644 --- a/lexers/testdata/stylus.expected +++ b/lexers/testdata/stylus.expected @@ -1,19 +1,19 @@ [ - {"type":"NameVariable", "value":"$white"}, - {"type":"Text", "value":" "}, - {"type":"Operator", "value":"?="}, - {"type":"Text", "value":" "}, - {"type":"LiteralNumberHex", "value":"#fff"}, - {"type":"Text", "value":"\n"}, + {"type":"NameVariable","value":"$white"}, + {"type":"Text","value":" "}, + {"type":"Operator","value":"?="}, + {"type":"Text","value":" "}, + {"type":"LiteralNumberHex","value":"#fff"}, + {"type":"Text","value":"\n"}, {"type":"NameTag","value":"body"}, - {"type":"Text", "value":" "}, - {"type":"Punctuation", "value":"{"}, - {"type":"Text", "value":"\n "}, - {"type":"NameProperty", "value":"color"}, - {"type":"Punctuation", "value":":"}, - {"type":"Text", "value":" "}, - {"type":"NameVariable", "value":"$white"}, - {"type":"Punctuation", "value":";"}, - {"type":"Text", "value":"\n"}, - {"type":"Punctuation", "value":"}"} -] \ No newline at end of file + {"type":"Text","value":" "}, + {"type":"Punctuation","value":"{"}, + {"type":"Text","value":"\n "}, + {"type":"NameProperty","value":"color"}, + {"type":"Punctuation","value":":"}, + {"type":"Text","value":" "}, + {"type":"NameVariable","value":"$white"}, + {"type":"Punctuation","value":";"}, + {"type":"Text","value":"\n"}, + {"type":"Punctuation","value":"}"} +] diff --git a/lexers/testdata/tsx.expected b/lexers/testdata/tsx.expected index c498ff3..c7ddc32 100644 --- a/lexers/testdata/tsx.expected +++ b/lexers/testdata/tsx.expected @@ -60,6 +60,5 @@ {"type":"LiteralStringSingle","value":"'root'"}, {"type":"Punctuation","value":"),"}, {"type":"Text","value":"\n"}, - {"type":"Punctuation","value":")"}, - {"type":"Text","value":"\n"} + {"type":"Punctuation","value":")"} ] diff --git a/regexp.go b/regexp.go index a3f4eb1..7c60efa 100644 --- a/regexp.go +++ b/regexp.go @@ -264,6 +264,7 @@ type LexerState struct { MutatorContext map[interface{}]interface{} iteratorStack []Iterator options *TokeniseOptions + newlineAdded bool } // Set mutator context. @@ -278,7 +279,11 @@ func (l *LexerState) Get(key interface{}) interface{} { // Iterator returns the next Token from the lexer. func (l *LexerState) Iterator() Token { // nolint: gocognit - for l.Pos < len(l.Text) && len(l.Stack) > 0 { + end := len(l.Text) + if l.newlineAdded { + end-- + } + for l.Pos < end && len(l.Stack) > 0 { // Exhaust the iterator stack, if any. for len(l.iteratorStack) > 0 { n := len(l.iteratorStack) - 1 @@ -432,10 +437,13 @@ func (r *RegexLexer) Tokenise(options *TokeniseOptions, text string) (Iterator, if options.EnsureLF { text = ensureLF(text) } + newlineAdded := false if !options.Nested && r.config.EnsureNL && !strings.HasSuffix(text, "\n") { text += "\n" + newlineAdded = true } state := &LexerState{ + newlineAdded: newlineAdded, options: options, Lexer: r, Text: []rune(text),