mirror of
https://github.com/alecthomas/chroma.git
synced 2025-07-15 01:14:21 +02:00
Add support for line numbers.
This commit is contained in:
@ -10,6 +10,7 @@ import (
|
||||
"runtime"
|
||||
"runtime/pprof"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/mattn/go-colorable"
|
||||
@ -38,6 +39,9 @@ var (
|
||||
htmlOnlyFlag = kingpin.Flag("html-only", "Output HTML fragment.").Bool()
|
||||
htmlInlineStyleFlag = kingpin.Flag("html-inline-styles", "Output HTML with inline styles (no classes).").Bool()
|
||||
htmlTabWidthFlag = kingpin.Flag("html-tab-width", "Set the HTML tab width.").Default("8").Int()
|
||||
htmlLinesFlag = kingpin.Flag("html-line-numbers", "Include line numbers in output.").Bool()
|
||||
htmlHighlightStyleFlag = kingpin.Flag("html-highlight-style", "Style used for highlighting lines.").Default("bg:#yellow").String()
|
||||
htmlHighlightFlag = kingpin.Flag("html-highlight", "Highlight these ranges (N:M).").Strings()
|
||||
|
||||
filesArgs = kingpin.Arg("files", "Files to highlight.").ExistingFiles()
|
||||
)
|
||||
@ -108,6 +112,24 @@ command, for Go.
|
||||
if !*htmlOnlyFlag {
|
||||
options = append(options, html.Standalone())
|
||||
}
|
||||
if *htmlLinesFlag {
|
||||
options = append(options, html.WithLineNumbers())
|
||||
}
|
||||
if len(*htmlHighlightFlag) > 0 {
|
||||
ranges := [][2]int{}
|
||||
for _, span := range *htmlHighlightFlag {
|
||||
parts := strings.Split(span, ":")
|
||||
if len(parts) != 2 {
|
||||
kingpin.Fatalf("range should be N:M, not %q", span)
|
||||
}
|
||||
start, err := strconv.ParseInt(parts[0], 10, 64)
|
||||
kingpin.FatalIfError(err, "min value of range should be integer not %q", parts[0])
|
||||
end, err := strconv.ParseInt(parts[0], 10, 64)
|
||||
kingpin.FatalIfError(err, "max value of range should be integer not %q", parts[0])
|
||||
ranges = append(ranges, [2]int{int(start), int(end)})
|
||||
}
|
||||
options = append(options, html.HighlightLines(*htmlHighlightStyleFlag, ranges))
|
||||
}
|
||||
formatters.Register("html", html.New(options...))
|
||||
}
|
||||
writer := getWriter(w)
|
||||
|
@ -39,6 +39,7 @@ func HighlightLines(style string, ranges [][2]int) Option {
|
||||
return func(f *Formatter) {
|
||||
f.highlightStyle = style
|
||||
f.highlightRanges = ranges
|
||||
sort.Sort(f.highlightRanges)
|
||||
}
|
||||
}
|
||||
|
||||
@ -59,14 +60,34 @@ type Formatter struct {
|
||||
tabWidth int
|
||||
lineNumbers bool
|
||||
highlightStyle string
|
||||
highlightRanges [][2]int
|
||||
highlightRanges highlightRanges
|
||||
}
|
||||
|
||||
type highlightRanges [][2]int
|
||||
|
||||
func (h highlightRanges) Len() int { return len(h) }
|
||||
func (h highlightRanges) Swap(i, j int) { h[i], h[j] = h[j], h[i] }
|
||||
func (h highlightRanges) Less(i, j int) bool { return h[i][0] < h[j][0] }
|
||||
|
||||
func (f *Formatter) Format(w io.Writer, style *chroma.Style) (func(*chroma.Token), error) {
|
||||
styles := f.typeStyles(style)
|
||||
tokens := []*chroma.Token{}
|
||||
return func(token *chroma.Token) {
|
||||
tokens = append(tokens, token)
|
||||
if token.Type == chroma.EOF {
|
||||
f.writeHTML(w, style, tokens)
|
||||
return
|
||||
}
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []*chroma.Token) error {
|
||||
// We deliberately don't use html/template here because it is two orders of magnitude slower (benchmarked).
|
||||
//
|
||||
// OTOH we need to be super careful about correct escaping...
|
||||
css := f.styleToCSS(style)
|
||||
if !f.classes {
|
||||
for t, style := range styles {
|
||||
styles[t] = compressStyle(style)
|
||||
for t, style := range css {
|
||||
css[t] = compressStyle(style)
|
||||
}
|
||||
}
|
||||
if f.standalone {
|
||||
@ -74,34 +95,47 @@ func (f *Formatter) Format(w io.Writer, style *chroma.Style) (func(*chroma.Token
|
||||
if f.classes {
|
||||
fmt.Fprint(w, "<style type=\"text/css\">\n")
|
||||
f.WriteCSS(w, style)
|
||||
fmt.Fprintf(w, "body { %s; }\n", styles[chroma.Background])
|
||||
fmt.Fprintf(w, "body { %s; }\n", css[chroma.Background])
|
||||
fmt.Fprint(w, "</style>")
|
||||
}
|
||||
fmt.Fprintf(w, "<body%s>\n", f.styleAttr(styles, chroma.Background))
|
||||
fmt.Fprintf(w, "<body%s>\n", f.styleAttr(css, chroma.Background))
|
||||
}
|
||||
fmt.Fprintf(w, "<pre%s>\n", f.styleAttr(styles, chroma.Background))
|
||||
return func(token *chroma.Token) {
|
||||
if token.Type == chroma.EOF {
|
||||
|
||||
fmt.Fprintf(w, "<pre%s>\n", f.styleAttr(css, chroma.Background))
|
||||
lines := splitTokensIntoLines(tokens)
|
||||
lineDigits := len(fmt.Sprintf("%d", len(lines)))
|
||||
for line, tokens := range lines {
|
||||
if f.lineNumbers {
|
||||
fmt.Fprintf(w, "<span class=\"ln\">%*d</span>", lineDigits, line+1)
|
||||
}
|
||||
|
||||
for _, token := range tokens {
|
||||
html := html.EscapeString(token.String())
|
||||
attr := f.styleAttr(css, token.Type)
|
||||
if attr != "" {
|
||||
html = fmt.Sprintf("<span%s>%s</span>", attr, html)
|
||||
}
|
||||
fmt.Fprint(w, html)
|
||||
}
|
||||
}
|
||||
|
||||
fmt.Fprint(w, "</pre>\n")
|
||||
if f.standalone {
|
||||
fmt.Fprint(w, "</body>\n")
|
||||
fmt.Fprint(w, "</html>\n")
|
||||
}
|
||||
return
|
||||
}
|
||||
html := html.EscapeString(token.String())
|
||||
attr := f.styleAttr(styles, token.Type)
|
||||
if attr == "" {
|
||||
fmt.Fprint(w, html)
|
||||
} else {
|
||||
fmt.Fprintf(w, "<span%s>%s</span>", attr, html)
|
||||
}
|
||||
}, nil
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (f *Formatter) class(tt chroma.TokenType) string {
|
||||
if tt == chroma.Background {
|
||||
switch tt {
|
||||
case chroma.Background:
|
||||
return "chroma"
|
||||
case chroma.LineNumbers:
|
||||
return "ln"
|
||||
case chroma.Highlight:
|
||||
return "hl"
|
||||
}
|
||||
if tt < 0 {
|
||||
return fmt.Sprintf("%sss%x", f.prefix, -int(tt))
|
||||
@ -132,6 +166,75 @@ func (f *Formatter) tabWidthStyle() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
// WriteCSS writes CSS style definitions (without any surrounding HTML).
|
||||
func (f *Formatter) WriteCSS(w io.Writer, style *chroma.Style) error {
|
||||
css := f.styleToCSS(style)
|
||||
// Special-case background as it is mapped to the outer ".chroma" class.
|
||||
if _, err := fmt.Fprintf(w, "/* %s */ .chroma { %s }\n", chroma.Background, css[chroma.Background]); err != nil {
|
||||
return err
|
||||
}
|
||||
// No line-numbers, add a default.
|
||||
if _, ok := css[chroma.LineNumbers]; !ok {
|
||||
css[chroma.LineNumbers] = "color: #888"
|
||||
}
|
||||
css[chroma.LineNumbers] += "; margin-right: 0.5em"
|
||||
tts := []int{}
|
||||
for tt := range css {
|
||||
tts = append(tts, int(tt))
|
||||
}
|
||||
sort.Ints(tts)
|
||||
for _, ti := range tts {
|
||||
tt := chroma.TokenType(ti)
|
||||
if tt == chroma.Background {
|
||||
continue
|
||||
}
|
||||
styles := css[tt]
|
||||
if _, err := fmt.Fprintf(w, "/* %s */ .chroma .%s { %s }\n", tt, f.class(tt), styles); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (f *Formatter) styleToCSS(style *chroma.Style) map[chroma.TokenType]string {
|
||||
bg := style.Get(chroma.Background)
|
||||
classes := map[chroma.TokenType]string{}
|
||||
// Insert highlight colour if needed.
|
||||
if len(f.highlightRanges) > 0 {
|
||||
highlight := chroma.ParseStyleEntry(bg, f.highlightStyle).Sub(bg)
|
||||
classes[chroma.Highlight] = StyleEntryToCSS(highlight)
|
||||
}
|
||||
// Convert the style.
|
||||
for t := range style.Entries {
|
||||
e := style.Entries[t]
|
||||
if t != chroma.Background {
|
||||
e = e.Sub(bg)
|
||||
}
|
||||
classes[t] = StyleEntryToCSS(e)
|
||||
}
|
||||
classes[chroma.Background] += f.tabWidthStyle()
|
||||
return classes
|
||||
}
|
||||
|
||||
// StyleEntryToCSS converts a chroma.StyleEntry to CSS attributes.
|
||||
func StyleEntryToCSS(e *chroma.StyleEntry) string {
|
||||
styles := []string{}
|
||||
if e.Colour.IsSet() {
|
||||
styles = append(styles, "color: "+e.Colour.String())
|
||||
}
|
||||
if e.Background.IsSet() {
|
||||
styles = append(styles, "background-color: "+e.Background.String())
|
||||
}
|
||||
if e.Bold {
|
||||
styles = append(styles, "font-weight: bold")
|
||||
}
|
||||
if e.Italic {
|
||||
styles = append(styles, "font-style: italic")
|
||||
}
|
||||
return strings.Join(styles, "; ")
|
||||
}
|
||||
|
||||
// Compress CSS attributes - remove spaces, transform 6-digit colours to 3.
|
||||
func compressStyle(s string) string {
|
||||
s = strings.Replace(s, " ", "", -1)
|
||||
parts := strings.Split(s, ";")
|
||||
@ -148,57 +251,25 @@ func compressStyle(s string) string {
|
||||
return strings.Join(out, ";")
|
||||
}
|
||||
|
||||
// WriteCSS writes CSS style definitions (without any surrounding HTML).
|
||||
func (f *Formatter) WriteCSS(w io.Writer, style *chroma.Style) error {
|
||||
classes := f.typeStyles(style)
|
||||
if _, err := fmt.Fprintf(w, "/* %s */ .chroma { %s }\n", chroma.Background, classes[chroma.Background]); err != nil {
|
||||
return err
|
||||
}
|
||||
tts := []int{}
|
||||
for tt := range classes {
|
||||
tts = append(tts, int(tt))
|
||||
}
|
||||
sort.Ints(tts)
|
||||
for _, ti := range tts {
|
||||
tt := chroma.TokenType(ti)
|
||||
styles := classes[tt]
|
||||
if tt < 0 {
|
||||
continue
|
||||
}
|
||||
if _, err := fmt.Fprintf(w, "/* %s */ .chroma .%ss%x { %s }\n", tt, f.prefix, int(tt), styles); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
func splitTokensIntoLines(tokens []*chroma.Token) (out [][]*chroma.Token) {
|
||||
line := []*chroma.Token{}
|
||||
for _, token := range tokens {
|
||||
for strings.Contains(token.Value, "\n") {
|
||||
parts := strings.SplitAfterN(token.Value, "\n", 2)
|
||||
// Token becomes the tail.
|
||||
token.Value = parts[1]
|
||||
|
||||
func (f *Formatter) typeStyles(style *chroma.Style) map[chroma.TokenType]string {
|
||||
bg := style.Get(chroma.Background)
|
||||
classes := map[chroma.TokenType]string{}
|
||||
for t := range style.Entries {
|
||||
e := style.Entries[t]
|
||||
if t != chroma.Background {
|
||||
e = e.Sub(bg)
|
||||
// Append the head to the line and flush the line.
|
||||
clone := token.Clone()
|
||||
clone.Value = parts[0]
|
||||
line = append(line, clone)
|
||||
out = append(out, line)
|
||||
line = nil
|
||||
}
|
||||
classes[t] = f.styleEntryToCSS(e)
|
||||
line = append(line, token)
|
||||
}
|
||||
classes[chroma.Background] += f.tabWidthStyle()
|
||||
return classes
|
||||
if len(line) > 0 {
|
||||
out = append(out, line)
|
||||
}
|
||||
|
||||
func (f *Formatter) styleEntryToCSS(e *chroma.StyleEntry) string {
|
||||
styles := []string{}
|
||||
if e.Colour.IsSet() {
|
||||
styles = append(styles, "color: "+e.Colour.String())
|
||||
}
|
||||
if e.Background.IsSet() {
|
||||
styles = append(styles, "background-color: "+e.Background.String())
|
||||
}
|
||||
if e.Bold {
|
||||
styles = append(styles, "font-weight: bold")
|
||||
}
|
||||
if e.Italic {
|
||||
styles = append(styles, "font-style: italic")
|
||||
}
|
||||
return strings.Join(styles, "; ")
|
||||
return
|
||||
}
|
||||
|
@ -6,6 +6,7 @@ import (
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
"github.com/alecthomas/chroma"
|
||||
"github.com/alecthomas/chroma/lexers"
|
||||
"github.com/alecthomas/chroma/styles"
|
||||
)
|
||||
@ -27,3 +28,26 @@ func BenchmarkHTMLFormatter(b *testing.B) {
|
||||
assert.NoError(b, err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSplitTokensIntoLines(t *testing.T) {
|
||||
in := []*chroma.Token{
|
||||
{Value: "hello", Type: chroma.NameKeyword},
|
||||
{Value: " world\nwhat?\n", Type: chroma.NameKeyword},
|
||||
{Type: chroma.EOF},
|
||||
}
|
||||
expected := [][]*chroma.Token{
|
||||
[]*chroma.Token{
|
||||
{Type: chroma.NameKeyword, Value: "hello"},
|
||||
{Type: chroma.NameKeyword, Value: " world\n"},
|
||||
},
|
||||
[]*chroma.Token{
|
||||
{Type: chroma.NameKeyword, Value: "what?\n"},
|
||||
},
|
||||
[]*chroma.Token{
|
||||
{Type: chroma.NameKeyword},
|
||||
{Type: chroma.EOF},
|
||||
},
|
||||
}
|
||||
actual := splitTokensIntoLines(in)
|
||||
assert.Equal(t, expected, actual)
|
||||
}
|
||||
|
6
lexer.go
6
lexer.go
@ -66,6 +66,12 @@ type Token struct {
|
||||
func (t *Token) String() string { return t.Value }
|
||||
func (t *Token) GoString() string { return fmt.Sprintf("Token{%s, %q}", t.Type, t.Value) }
|
||||
|
||||
func (t *Token) Clone() *Token {
|
||||
clone := &Token{}
|
||||
*clone = *t
|
||||
return clone
|
||||
}
|
||||
|
||||
type TokeniseOptions struct {
|
||||
// State to start tokenisation in. Defaults to "root".
|
||||
State string
|
||||
|
2
style.go
2
style.go
@ -180,7 +180,7 @@ func ParseStyleEntry(parent *StyleEntry, entry string) *StyleEntry { // nolint:
|
||||
case strings.HasPrefix(part, "#"):
|
||||
out.Colour = ParseColour(part)
|
||||
default:
|
||||
panic("unsupported style entry " + part)
|
||||
// Here lies an error, but we ignore it in the interests of convenience.
|
||||
}
|
||||
}
|
||||
return out
|
||||
|
@ -4,100 +4,102 @@ package chroma
|
||||
|
||||
import "fmt"
|
||||
|
||||
const _TokenType_name = "EOFNoneOtherErrorEscapeBackgroundKeywordKeywordConstantKeywordDeclarationKeywordNamespaceKeywordPseudoKeywordReservedKeywordTypeNameNameAttributeNameBuiltinNameBuiltinPseudoNameClassNameConstantNameDecoratorNameEntityNameExceptionNameFunctionNameFunctionMagicNameKeywordNameLabelNameNamespaceNameOperatorNameOtherNamePseudoNamePropertyNameTagNameVariableNameVariableAnonymousNameVariableClassNameVariableGlobalNameVariableInstanceNameVariableMagicLiteralLiteralDateLiteralOtherLiteralStringLiteralStringAffixLiteralStringAtomLiteralStringBacktickLiteralStringBooleanLiteralStringCharLiteralStringDelimiterLiteralStringDocLiteralStringDoubleLiteralStringEscapeLiteralStringHeredocLiteralStringInterpolLiteralStringNameLiteralStringOtherLiteralStringRegexLiteralStringSingleLiteralStringSymbolLiteralNumberLiteralNumberBinLiteralNumberFloatLiteralNumberHexLiteralNumberIntegerLiteralNumberIntegerLongLiteralNumberOctOperatorOperatorWordPunctuationCommentCommentHashbangCommentMultilineCommentSingleCommentSpecialCommentPreprocCommentPreprocFileGenericGenericDeletedGenericEmphGenericErrorGenericHeadingGenericInsertedGenericOutputGenericPromptGenericStrongGenericSubheadingGenericTracebackGenericUnderlineTextTextWhitespaceTextSymbolTextPunctuation"
|
||||
const _TokenType_name = "EOFNoneOtherErrorEscapeHighlightLineNumbersBackgroundKeywordKeywordConstantKeywordDeclarationKeywordNamespaceKeywordPseudoKeywordReservedKeywordTypeNameNameAttributeNameBuiltinNameBuiltinPseudoNameClassNameConstantNameDecoratorNameEntityNameExceptionNameFunctionNameFunctionMagicNameKeywordNameLabelNameNamespaceNameOperatorNameOtherNamePseudoNamePropertyNameTagNameVariableNameVariableAnonymousNameVariableClassNameVariableGlobalNameVariableInstanceNameVariableMagicLiteralLiteralDateLiteralOtherLiteralStringLiteralStringAffixLiteralStringAtomLiteralStringBacktickLiteralStringBooleanLiteralStringCharLiteralStringDelimiterLiteralStringDocLiteralStringDoubleLiteralStringEscapeLiteralStringHeredocLiteralStringInterpolLiteralStringNameLiteralStringOtherLiteralStringRegexLiteralStringSingleLiteralStringSymbolLiteralNumberLiteralNumberBinLiteralNumberFloatLiteralNumberHexLiteralNumberIntegerLiteralNumberIntegerLongLiteralNumberOctOperatorOperatorWordPunctuationCommentCommentHashbangCommentMultilineCommentSingleCommentSpecialCommentPreprocCommentPreprocFileGenericGenericDeletedGenericEmphGenericErrorGenericHeadingGenericInsertedGenericOutputGenericPromptGenericStrongGenericSubheadingGenericTracebackGenericUnderlineTextTextWhitespaceTextSymbolTextPunctuation"
|
||||
|
||||
var _TokenType_map = map[TokenType]string{
|
||||
-6: _TokenType_name[0:3],
|
||||
-5: _TokenType_name[3:7],
|
||||
-4: _TokenType_name[7:12],
|
||||
-3: _TokenType_name[12:17],
|
||||
-2: _TokenType_name[17:23],
|
||||
-1: _TokenType_name[23:33],
|
||||
1000: _TokenType_name[33:40],
|
||||
1001: _TokenType_name[40:55],
|
||||
1002: _TokenType_name[55:73],
|
||||
1003: _TokenType_name[73:89],
|
||||
1004: _TokenType_name[89:102],
|
||||
1005: _TokenType_name[102:117],
|
||||
1006: _TokenType_name[117:128],
|
||||
2000: _TokenType_name[128:132],
|
||||
2001: _TokenType_name[132:145],
|
||||
2002: _TokenType_name[145:156],
|
||||
2003: _TokenType_name[156:173],
|
||||
2004: _TokenType_name[173:182],
|
||||
2005: _TokenType_name[182:194],
|
||||
2006: _TokenType_name[194:207],
|
||||
2007: _TokenType_name[207:217],
|
||||
2008: _TokenType_name[217:230],
|
||||
2009: _TokenType_name[230:242],
|
||||
2010: _TokenType_name[242:259],
|
||||
2011: _TokenType_name[259:270],
|
||||
2012: _TokenType_name[270:279],
|
||||
2013: _TokenType_name[279:292],
|
||||
2014: _TokenType_name[292:304],
|
||||
2015: _TokenType_name[304:313],
|
||||
2016: _TokenType_name[313:323],
|
||||
2017: _TokenType_name[323:335],
|
||||
2018: _TokenType_name[335:342],
|
||||
2019: _TokenType_name[342:354],
|
||||
2020: _TokenType_name[354:375],
|
||||
2021: _TokenType_name[375:392],
|
||||
2022: _TokenType_name[392:410],
|
||||
2023: _TokenType_name[410:430],
|
||||
2024: _TokenType_name[430:447],
|
||||
3000: _TokenType_name[447:454],
|
||||
3001: _TokenType_name[454:465],
|
||||
3002: _TokenType_name[465:477],
|
||||
3100: _TokenType_name[477:490],
|
||||
3101: _TokenType_name[490:508],
|
||||
3102: _TokenType_name[508:525],
|
||||
3103: _TokenType_name[525:546],
|
||||
3104: _TokenType_name[546:566],
|
||||
3105: _TokenType_name[566:583],
|
||||
3106: _TokenType_name[583:605],
|
||||
3107: _TokenType_name[605:621],
|
||||
3108: _TokenType_name[621:640],
|
||||
3109: _TokenType_name[640:659],
|
||||
3110: _TokenType_name[659:679],
|
||||
3111: _TokenType_name[679:700],
|
||||
3112: _TokenType_name[700:717],
|
||||
3113: _TokenType_name[717:735],
|
||||
3114: _TokenType_name[735:753],
|
||||
3115: _TokenType_name[753:772],
|
||||
3116: _TokenType_name[772:791],
|
||||
3200: _TokenType_name[791:804],
|
||||
3201: _TokenType_name[804:820],
|
||||
3202: _TokenType_name[820:838],
|
||||
3203: _TokenType_name[838:854],
|
||||
3204: _TokenType_name[854:874],
|
||||
3205: _TokenType_name[874:898],
|
||||
3206: _TokenType_name[898:914],
|
||||
4000: _TokenType_name[914:922],
|
||||
4001: _TokenType_name[922:934],
|
||||
5000: _TokenType_name[934:945],
|
||||
6000: _TokenType_name[945:952],
|
||||
6001: _TokenType_name[952:967],
|
||||
6002: _TokenType_name[967:983],
|
||||
6003: _TokenType_name[983:996],
|
||||
6004: _TokenType_name[996:1010],
|
||||
6100: _TokenType_name[1010:1024],
|
||||
6101: _TokenType_name[1024:1042],
|
||||
7000: _TokenType_name[1042:1049],
|
||||
7001: _TokenType_name[1049:1063],
|
||||
7002: _TokenType_name[1063:1074],
|
||||
7003: _TokenType_name[1074:1086],
|
||||
7004: _TokenType_name[1086:1100],
|
||||
7005: _TokenType_name[1100:1115],
|
||||
7006: _TokenType_name[1115:1128],
|
||||
7007: _TokenType_name[1128:1141],
|
||||
7008: _TokenType_name[1141:1154],
|
||||
7009: _TokenType_name[1154:1171],
|
||||
7010: _TokenType_name[1171:1187],
|
||||
7011: _TokenType_name[1187:1203],
|
||||
8000: _TokenType_name[1203:1207],
|
||||
8001: _TokenType_name[1207:1221],
|
||||
8002: _TokenType_name[1221:1231],
|
||||
8003: _TokenType_name[1231:1246],
|
||||
-8: _TokenType_name[0:3],
|
||||
-7: _TokenType_name[3:7],
|
||||
-6: _TokenType_name[7:12],
|
||||
-5: _TokenType_name[12:17],
|
||||
-4: _TokenType_name[17:23],
|
||||
-3: _TokenType_name[23:32],
|
||||
-2: _TokenType_name[32:43],
|
||||
-1: _TokenType_name[43:53],
|
||||
1000: _TokenType_name[53:60],
|
||||
1001: _TokenType_name[60:75],
|
||||
1002: _TokenType_name[75:93],
|
||||
1003: _TokenType_name[93:109],
|
||||
1004: _TokenType_name[109:122],
|
||||
1005: _TokenType_name[122:137],
|
||||
1006: _TokenType_name[137:148],
|
||||
2000: _TokenType_name[148:152],
|
||||
2001: _TokenType_name[152:165],
|
||||
2002: _TokenType_name[165:176],
|
||||
2003: _TokenType_name[176:193],
|
||||
2004: _TokenType_name[193:202],
|
||||
2005: _TokenType_name[202:214],
|
||||
2006: _TokenType_name[214:227],
|
||||
2007: _TokenType_name[227:237],
|
||||
2008: _TokenType_name[237:250],
|
||||
2009: _TokenType_name[250:262],
|
||||
2010: _TokenType_name[262:279],
|
||||
2011: _TokenType_name[279:290],
|
||||
2012: _TokenType_name[290:299],
|
||||
2013: _TokenType_name[299:312],
|
||||
2014: _TokenType_name[312:324],
|
||||
2015: _TokenType_name[324:333],
|
||||
2016: _TokenType_name[333:343],
|
||||
2017: _TokenType_name[343:355],
|
||||
2018: _TokenType_name[355:362],
|
||||
2019: _TokenType_name[362:374],
|
||||
2020: _TokenType_name[374:395],
|
||||
2021: _TokenType_name[395:412],
|
||||
2022: _TokenType_name[412:430],
|
||||
2023: _TokenType_name[430:450],
|
||||
2024: _TokenType_name[450:467],
|
||||
3000: _TokenType_name[467:474],
|
||||
3001: _TokenType_name[474:485],
|
||||
3002: _TokenType_name[485:497],
|
||||
3100: _TokenType_name[497:510],
|
||||
3101: _TokenType_name[510:528],
|
||||
3102: _TokenType_name[528:545],
|
||||
3103: _TokenType_name[545:566],
|
||||
3104: _TokenType_name[566:586],
|
||||
3105: _TokenType_name[586:603],
|
||||
3106: _TokenType_name[603:625],
|
||||
3107: _TokenType_name[625:641],
|
||||
3108: _TokenType_name[641:660],
|
||||
3109: _TokenType_name[660:679],
|
||||
3110: _TokenType_name[679:699],
|
||||
3111: _TokenType_name[699:720],
|
||||
3112: _TokenType_name[720:737],
|
||||
3113: _TokenType_name[737:755],
|
||||
3114: _TokenType_name[755:773],
|
||||
3115: _TokenType_name[773:792],
|
||||
3116: _TokenType_name[792:811],
|
||||
3200: _TokenType_name[811:824],
|
||||
3201: _TokenType_name[824:840],
|
||||
3202: _TokenType_name[840:858],
|
||||
3203: _TokenType_name[858:874],
|
||||
3204: _TokenType_name[874:894],
|
||||
3205: _TokenType_name[894:918],
|
||||
3206: _TokenType_name[918:934],
|
||||
4000: _TokenType_name[934:942],
|
||||
4001: _TokenType_name[942:954],
|
||||
5000: _TokenType_name[954:965],
|
||||
6000: _TokenType_name[965:972],
|
||||
6001: _TokenType_name[972:987],
|
||||
6002: _TokenType_name[987:1003],
|
||||
6003: _TokenType_name[1003:1016],
|
||||
6004: _TokenType_name[1016:1030],
|
||||
6100: _TokenType_name[1030:1044],
|
||||
6101: _TokenType_name[1044:1062],
|
||||
7000: _TokenType_name[1062:1069],
|
||||
7001: _TokenType_name[1069:1083],
|
||||
7002: _TokenType_name[1083:1094],
|
||||
7003: _TokenType_name[1094:1106],
|
||||
7004: _TokenType_name[1106:1120],
|
||||
7005: _TokenType_name[1120:1135],
|
||||
7006: _TokenType_name[1135:1148],
|
||||
7007: _TokenType_name[1148:1161],
|
||||
7008: _TokenType_name[1161:1174],
|
||||
7009: _TokenType_name[1174:1191],
|
||||
7010: _TokenType_name[1191:1207],
|
||||
7011: _TokenType_name[1207:1223],
|
||||
8000: _TokenType_name[1223:1227],
|
||||
8001: _TokenType_name[1227:1241],
|
||||
8002: _TokenType_name[1241:1251],
|
||||
8003: _TokenType_name[1251:1266],
|
||||
}
|
||||
|
||||
func (i TokenType) String() string {
|
||||
|
Reference in New Issue
Block a user