mirror of
https://github.com/alecthomas/chroma.git
synced 2025-02-13 13:28:27 +02:00
255 lines
9.6 KiB
Go
255 lines
9.6 KiB
Go
// Code generated by "enumer -text -type TokenType"; DO NOT EDIT.
|
|
|
|
package chroma
|
|
|
|
import (
|
|
"fmt"
|
|
)
|
|
|
|
const _TokenTypeName = "NoneOtherErrorCodeLineLineLinkLineTableTDLineTableLineHighlightLineNumbersTableLineNumbersLinePreWrapperBackgroundEOFTypeKeywordKeywordConstantKeywordDeclarationKeywordNamespaceKeywordPseudoKeywordReservedKeywordTypeNameNameAttributeNameBuiltinNameBuiltinPseudoNameClassNameConstantNameDecoratorNameEntityNameExceptionNameFunctionNameFunctionMagicNameKeywordNameLabelNameNamespaceNameOperatorNameOtherNamePseudoNamePropertyNameTagNameVariableNameVariableAnonymousNameVariableClassNameVariableGlobalNameVariableInstanceNameVariableMagicLiteralLiteralDateLiteralOtherLiteralStringLiteralStringAffixLiteralStringAtomLiteralStringBacktickLiteralStringBooleanLiteralStringCharLiteralStringDelimiterLiteralStringDocLiteralStringDoubleLiteralStringEscapeLiteralStringHeredocLiteralStringInterpolLiteralStringNameLiteralStringOtherLiteralStringRegexLiteralStringSingleLiteralStringSymbolLiteralNumberLiteralNumberBinLiteralNumberFloatLiteralNumberHexLiteralNumberIntegerLiteralNumberIntegerLongLiteralNumberOctOperatorOperatorWordPunctuationCommentCommentHashbangCommentMultilineCommentSingleCommentSpecialCommentPreprocCommentPreprocFileGenericGenericDeletedGenericEmphGenericErrorGenericHeadingGenericInsertedGenericOutputGenericPromptGenericStrongGenericSubheadingGenericTracebackGenericUnderlineTextTextWhitespaceTextSymbolTextPunctuation"
|
|
|
|
var _TokenTypeMap = map[TokenType]string{
|
|
-13: _TokenTypeName[0:4],
|
|
-12: _TokenTypeName[4:9],
|
|
-11: _TokenTypeName[9:14],
|
|
-10: _TokenTypeName[14:22],
|
|
-9: _TokenTypeName[22:30],
|
|
-8: _TokenTypeName[30:41],
|
|
-7: _TokenTypeName[41:50],
|
|
-6: _TokenTypeName[50:63],
|
|
-5: _TokenTypeName[63:79],
|
|
-4: _TokenTypeName[79:90],
|
|
-3: _TokenTypeName[90:94],
|
|
-2: _TokenTypeName[94:104],
|
|
-1: _TokenTypeName[104:114],
|
|
0: _TokenTypeName[114:121],
|
|
1000: _TokenTypeName[121:128],
|
|
1001: _TokenTypeName[128:143],
|
|
1002: _TokenTypeName[143:161],
|
|
1003: _TokenTypeName[161:177],
|
|
1004: _TokenTypeName[177:190],
|
|
1005: _TokenTypeName[190:205],
|
|
1006: _TokenTypeName[205:216],
|
|
2000: _TokenTypeName[216:220],
|
|
2001: _TokenTypeName[220:233],
|
|
2002: _TokenTypeName[233:244],
|
|
2003: _TokenTypeName[244:261],
|
|
2004: _TokenTypeName[261:270],
|
|
2005: _TokenTypeName[270:282],
|
|
2006: _TokenTypeName[282:295],
|
|
2007: _TokenTypeName[295:305],
|
|
2008: _TokenTypeName[305:318],
|
|
2009: _TokenTypeName[318:330],
|
|
2010: _TokenTypeName[330:347],
|
|
2011: _TokenTypeName[347:358],
|
|
2012: _TokenTypeName[358:367],
|
|
2013: _TokenTypeName[367:380],
|
|
2014: _TokenTypeName[380:392],
|
|
2015: _TokenTypeName[392:401],
|
|
2016: _TokenTypeName[401:411],
|
|
2017: _TokenTypeName[411:423],
|
|
2018: _TokenTypeName[423:430],
|
|
2019: _TokenTypeName[430:442],
|
|
2020: _TokenTypeName[442:463],
|
|
2021: _TokenTypeName[463:480],
|
|
2022: _TokenTypeName[480:498],
|
|
2023: _TokenTypeName[498:518],
|
|
2024: _TokenTypeName[518:535],
|
|
3000: _TokenTypeName[535:542],
|
|
3001: _TokenTypeName[542:553],
|
|
3002: _TokenTypeName[553:565],
|
|
3100: _TokenTypeName[565:578],
|
|
3101: _TokenTypeName[578:596],
|
|
3102: _TokenTypeName[596:613],
|
|
3103: _TokenTypeName[613:634],
|
|
3104: _TokenTypeName[634:654],
|
|
3105: _TokenTypeName[654:671],
|
|
3106: _TokenTypeName[671:693],
|
|
3107: _TokenTypeName[693:709],
|
|
3108: _TokenTypeName[709:728],
|
|
3109: _TokenTypeName[728:747],
|
|
3110: _TokenTypeName[747:767],
|
|
3111: _TokenTypeName[767:788],
|
|
3112: _TokenTypeName[788:805],
|
|
3113: _TokenTypeName[805:823],
|
|
3114: _TokenTypeName[823:841],
|
|
3115: _TokenTypeName[841:860],
|
|
3116: _TokenTypeName[860:879],
|
|
3200: _TokenTypeName[879:892],
|
|
3201: _TokenTypeName[892:908],
|
|
3202: _TokenTypeName[908:926],
|
|
3203: _TokenTypeName[926:942],
|
|
3204: _TokenTypeName[942:962],
|
|
3205: _TokenTypeName[962:986],
|
|
3206: _TokenTypeName[986:1002],
|
|
4000: _TokenTypeName[1002:1010],
|
|
4001: _TokenTypeName[1010:1022],
|
|
5000: _TokenTypeName[1022:1033],
|
|
6000: _TokenTypeName[1033:1040],
|
|
6001: _TokenTypeName[1040:1055],
|
|
6002: _TokenTypeName[1055:1071],
|
|
6003: _TokenTypeName[1071:1084],
|
|
6004: _TokenTypeName[1084:1098],
|
|
6100: _TokenTypeName[1098:1112],
|
|
6101: _TokenTypeName[1112:1130],
|
|
7000: _TokenTypeName[1130:1137],
|
|
7001: _TokenTypeName[1137:1151],
|
|
7002: _TokenTypeName[1151:1162],
|
|
7003: _TokenTypeName[1162:1174],
|
|
7004: _TokenTypeName[1174:1188],
|
|
7005: _TokenTypeName[1188:1203],
|
|
7006: _TokenTypeName[1203:1216],
|
|
7007: _TokenTypeName[1216:1229],
|
|
7008: _TokenTypeName[1229:1242],
|
|
7009: _TokenTypeName[1242:1259],
|
|
7010: _TokenTypeName[1259:1275],
|
|
7011: _TokenTypeName[1275:1291],
|
|
8000: _TokenTypeName[1291:1295],
|
|
8001: _TokenTypeName[1295:1309],
|
|
8002: _TokenTypeName[1309:1319],
|
|
8003: _TokenTypeName[1319:1334],
|
|
}
|
|
|
|
func (i TokenType) String() string {
|
|
if str, ok := _TokenTypeMap[i]; ok {
|
|
return str
|
|
}
|
|
return fmt.Sprintf("TokenType(%d)", i)
|
|
}
|
|
|
|
var _TokenTypeValues = []TokenType{-13, -12, -11, -10, -9, -8, -7, -6, -5, -4, -3, -2, -1, 0, 1000, 1001, 1002, 1003, 1004, 1005, 1006, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023, 2024, 3000, 3001, 3002, 3100, 3101, 3102, 3103, 3104, 3105, 3106, 3107, 3108, 3109, 3110, 3111, 3112, 3113, 3114, 3115, 3116, 3200, 3201, 3202, 3203, 3204, 3205, 3206, 4000, 4001, 5000, 6000, 6001, 6002, 6003, 6004, 6100, 6101, 7000, 7001, 7002, 7003, 7004, 7005, 7006, 7007, 7008, 7009, 7010, 7011, 8000, 8001, 8002, 8003}
|
|
|
|
var _TokenTypeNameToValueMap = map[string]TokenType{
|
|
_TokenTypeName[0:4]: -13,
|
|
_TokenTypeName[4:9]: -12,
|
|
_TokenTypeName[9:14]: -11,
|
|
_TokenTypeName[14:22]: -10,
|
|
_TokenTypeName[22:30]: -9,
|
|
_TokenTypeName[30:41]: -8,
|
|
_TokenTypeName[41:50]: -7,
|
|
_TokenTypeName[50:63]: -6,
|
|
_TokenTypeName[63:79]: -5,
|
|
_TokenTypeName[79:90]: -4,
|
|
_TokenTypeName[90:94]: -3,
|
|
_TokenTypeName[94:104]: -2,
|
|
_TokenTypeName[104:114]: -1,
|
|
_TokenTypeName[114:121]: 0,
|
|
_TokenTypeName[121:128]: 1000,
|
|
_TokenTypeName[128:143]: 1001,
|
|
_TokenTypeName[143:161]: 1002,
|
|
_TokenTypeName[161:177]: 1003,
|
|
_TokenTypeName[177:190]: 1004,
|
|
_TokenTypeName[190:205]: 1005,
|
|
_TokenTypeName[205:216]: 1006,
|
|
_TokenTypeName[216:220]: 2000,
|
|
_TokenTypeName[220:233]: 2001,
|
|
_TokenTypeName[233:244]: 2002,
|
|
_TokenTypeName[244:261]: 2003,
|
|
_TokenTypeName[261:270]: 2004,
|
|
_TokenTypeName[270:282]: 2005,
|
|
_TokenTypeName[282:295]: 2006,
|
|
_TokenTypeName[295:305]: 2007,
|
|
_TokenTypeName[305:318]: 2008,
|
|
_TokenTypeName[318:330]: 2009,
|
|
_TokenTypeName[330:347]: 2010,
|
|
_TokenTypeName[347:358]: 2011,
|
|
_TokenTypeName[358:367]: 2012,
|
|
_TokenTypeName[367:380]: 2013,
|
|
_TokenTypeName[380:392]: 2014,
|
|
_TokenTypeName[392:401]: 2015,
|
|
_TokenTypeName[401:411]: 2016,
|
|
_TokenTypeName[411:423]: 2017,
|
|
_TokenTypeName[423:430]: 2018,
|
|
_TokenTypeName[430:442]: 2019,
|
|
_TokenTypeName[442:463]: 2020,
|
|
_TokenTypeName[463:480]: 2021,
|
|
_TokenTypeName[480:498]: 2022,
|
|
_TokenTypeName[498:518]: 2023,
|
|
_TokenTypeName[518:535]: 2024,
|
|
_TokenTypeName[535:542]: 3000,
|
|
_TokenTypeName[542:553]: 3001,
|
|
_TokenTypeName[553:565]: 3002,
|
|
_TokenTypeName[565:578]: 3100,
|
|
_TokenTypeName[578:596]: 3101,
|
|
_TokenTypeName[596:613]: 3102,
|
|
_TokenTypeName[613:634]: 3103,
|
|
_TokenTypeName[634:654]: 3104,
|
|
_TokenTypeName[654:671]: 3105,
|
|
_TokenTypeName[671:693]: 3106,
|
|
_TokenTypeName[693:709]: 3107,
|
|
_TokenTypeName[709:728]: 3108,
|
|
_TokenTypeName[728:747]: 3109,
|
|
_TokenTypeName[747:767]: 3110,
|
|
_TokenTypeName[767:788]: 3111,
|
|
_TokenTypeName[788:805]: 3112,
|
|
_TokenTypeName[805:823]: 3113,
|
|
_TokenTypeName[823:841]: 3114,
|
|
_TokenTypeName[841:860]: 3115,
|
|
_TokenTypeName[860:879]: 3116,
|
|
_TokenTypeName[879:892]: 3200,
|
|
_TokenTypeName[892:908]: 3201,
|
|
_TokenTypeName[908:926]: 3202,
|
|
_TokenTypeName[926:942]: 3203,
|
|
_TokenTypeName[942:962]: 3204,
|
|
_TokenTypeName[962:986]: 3205,
|
|
_TokenTypeName[986:1002]: 3206,
|
|
_TokenTypeName[1002:1010]: 4000,
|
|
_TokenTypeName[1010:1022]: 4001,
|
|
_TokenTypeName[1022:1033]: 5000,
|
|
_TokenTypeName[1033:1040]: 6000,
|
|
_TokenTypeName[1040:1055]: 6001,
|
|
_TokenTypeName[1055:1071]: 6002,
|
|
_TokenTypeName[1071:1084]: 6003,
|
|
_TokenTypeName[1084:1098]: 6004,
|
|
_TokenTypeName[1098:1112]: 6100,
|
|
_TokenTypeName[1112:1130]: 6101,
|
|
_TokenTypeName[1130:1137]: 7000,
|
|
_TokenTypeName[1137:1151]: 7001,
|
|
_TokenTypeName[1151:1162]: 7002,
|
|
_TokenTypeName[1162:1174]: 7003,
|
|
_TokenTypeName[1174:1188]: 7004,
|
|
_TokenTypeName[1188:1203]: 7005,
|
|
_TokenTypeName[1203:1216]: 7006,
|
|
_TokenTypeName[1216:1229]: 7007,
|
|
_TokenTypeName[1229:1242]: 7008,
|
|
_TokenTypeName[1242:1259]: 7009,
|
|
_TokenTypeName[1259:1275]: 7010,
|
|
_TokenTypeName[1275:1291]: 7011,
|
|
_TokenTypeName[1291:1295]: 8000,
|
|
_TokenTypeName[1295:1309]: 8001,
|
|
_TokenTypeName[1309:1319]: 8002,
|
|
_TokenTypeName[1319:1334]: 8003,
|
|
}
|
|
|
|
// TokenTypeString retrieves an enum value from the enum constants string name.
|
|
// Throws an error if the param is not part of the enum.
|
|
func TokenTypeString(s string) (TokenType, error) {
|
|
if val, ok := _TokenTypeNameToValueMap[s]; ok {
|
|
return val, nil
|
|
}
|
|
return 0, fmt.Errorf("%s does not belong to TokenType values", s)
|
|
}
|
|
|
|
// TokenTypeValues returns all values of the enum
|
|
func TokenTypeValues() []TokenType {
|
|
return _TokenTypeValues
|
|
}
|
|
|
|
// IsATokenType returns "true" if the value is listed in the enum definition. "false" otherwise
|
|
func (i TokenType) IsATokenType() bool {
|
|
_, ok := _TokenTypeMap[i]
|
|
return ok
|
|
}
|
|
|
|
// MarshalText implements the encoding.TextMarshaler interface for TokenType
|
|
func (i TokenType) MarshalText() ([]byte, error) {
|
|
return []byte(i.String()), nil
|
|
}
|
|
|
|
// UnmarshalText implements the encoding.TextUnmarshaler interface for TokenType
|
|
func (i *TokenType) UnmarshalText(text []byte) error {
|
|
var err error
|
|
*i, err = TokenTypeString(string(text))
|
|
return err
|
|
}
|