1
0
mirror of https://github.com/alecthomas/chroma.git synced 2025-07-15 01:14:21 +02:00
Fixes #111.
This commit is contained in:
Alec Thomas
2018-02-07 22:11:40 +11:00
parent 5964ce913c
commit 35126f9a94
5 changed files with 174 additions and 52 deletions

File diff suppressed because one or more lines are too long

41
lexers/jsx.go Normal file
View File

@ -0,0 +1,41 @@
package lexers
import (
. "github.com/alecthomas/chroma" // nolint
)
var JSXRules = func() Rules {
rules := JavascriptRules.Clone()
rules["jsx"] = []Rule{
{`(<)([\w_\-]+)`, ByGroups(Punctuation, NameTag), Push("tag")},
{`(<)(/)(\s*)([\w_\-]+)(\s*)(>)`, ByGroups(Punctuation, Punctuation, Text, NameTag, Text, Punctuation), nil},
}
rules["tag"] = []Rule{
{`\s+`, Text, nil},
{`([\w]+\s*)(=)(\s*)`, ByGroups(NameAttribute, Operator, Text), Push("attr")},
{`[{}]+`, Punctuation, nil},
{`[\w\.]+`, NameAttribute, nil},
{`(/?)(\s*)(>)`, ByGroups(Punctuation, Text, Punctuation), Pop(1)},
}
rules["attr"] = []Rule{
{`\s+`, Text, nil},
{`".*?"`, String, Pop(1)},
{`'.*?'`, String, Pop(1)},
{`[^\s>]+`, String, Pop(1)},
}
rules["root"] = append([]Rule{Include("jsx")}, rules["root"]...)
return rules
}()
// JSX lexer.
var JSX = Register(MustNewLexer(
&Config{
Name: "JSX",
Aliases: []string{"react"},
Filenames: []string{"*.jsx", "*.react"},
MimeTypes: []string{"text/jsx", "text/typescript-jsx"},
DotAll: true,
},
JSXRules,
))

10
lexers/testdata/jsx.actual vendored Normal file
View File

@ -0,0 +1,10 @@
import React from 'react';
import ReactDOM from 'react-dom';
import App from './component/App';
import './index.css';
import 'github-fork-ribbon-css/gh-fork-ribbon.css';
ReactDOM.render(
<App />,
document.getElementById('root')
);

60
lexers/testdata/jsx.expected vendored Normal file
View File

@ -0,0 +1,60 @@
[
{"type":"Keyword","value":"import"},
{"type":"Text","value":" "},
{"type":"NameOther","value":"React"},
{"type":"Text","value":" "},
{"type":"NameOther","value":"from"},
{"type":"Text","value":" "},
{"type":"LiteralStringSingle","value":"'react'"},
{"type":"Punctuation","value":";"},
{"type":"Text","value":"\n"},
{"type":"Keyword","value":"import"},
{"type":"Text","value":" "},
{"type":"NameOther","value":"ReactDOM"},
{"type":"Text","value":" "},
{"type":"NameOther","value":"from"},
{"type":"Text","value":" "},
{"type":"LiteralStringSingle","value":"'react-dom'"},
{"type":"Punctuation","value":";"},
{"type":"Text","value":"\n"},
{"type":"Keyword","value":"import"},
{"type":"Text","value":" "},
{"type":"NameOther","value":"App"},
{"type":"Text","value":" "},
{"type":"NameOther","value":"from"},
{"type":"Text","value":" "},
{"type":"LiteralStringSingle","value":"'./component/App'"},
{"type":"Punctuation","value":";"},
{"type":"Text","value":"\n"},
{"type":"Keyword","value":"import"},
{"type":"Text","value":" "},
{"type":"LiteralStringSingle","value":"'./index.css'"},
{"type":"Punctuation","value":";"},
{"type":"Text","value":"\n"},
{"type":"Keyword","value":"import"},
{"type":"Text","value":" "},
{"type":"LiteralStringSingle","value":"'github-fork-ribbon-css/gh-fork-ribbon.css'"},
{"type":"Punctuation","value":";"},
{"type":"Text","value":"\n\n"},
{"type":"NameOther","value":"ReactDOM"},
{"type":"Punctuation","value":"."},
{"type":"NameOther","value":"render"},
{"type":"Punctuation","value":"("},
{"type":"Text","value":"\n "},
{"type":"Punctuation","value":"\u003c"},
{"type":"NameTag","value":"App"},
{"type":"Text","value":" "},
{"type":"Punctuation","value":"/"},
{"type":"Text","value":""},
{"type":"Punctuation","value":"\u003e,"},
{"type":"Text","value":"\n "},
{"type":"NameBuiltin","value":"document"},
{"type":"Punctuation","value":"."},
{"type":"NameOther","value":"getElementById"},
{"type":"Punctuation","value":"("},
{"type":"LiteralStringSingle","value":"'root'"},
{"type":"Punctuation","value":")"},
{"type":"Text","value":"\n"},
{"type":"Punctuation","value":");"},
{"type":"Text","value":"\n"}
]

View File

@ -87,6 +87,15 @@ func Tokenise(lexer Lexer, options *TokeniseOptions, text string) ([]*Token, err
// Rules maps from state to a sequence of Rules.
type Rules map[string][]Rule
func (r Rules) Clone() Rules {
out := map[string][]Rule{}
for key, rules := range r {
out[key] = make([]Rule, len(rules))
copy(out[key], rules)
}
return out
}
// MustNewLexer creates a new Lexer or panics.
func MustNewLexer(config *Config, rules Rules) *RegexLexer {
lexer, err := NewLexer(config, rules)