mirror of
https://github.com/alecthomas/chroma.git
synced 2025-07-15 01:14:21 +02:00
Implement rudimentary JSX lexer based on https://github.com/fcurella/jsx-lexer/blob/master/jsx/lexer.py
Fixes #111.
This commit is contained in:
@ -4,16 +4,7 @@ import (
|
||||
. "github.com/alecthomas/chroma" // nolint
|
||||
)
|
||||
|
||||
// Javascript lexer.
|
||||
var Javascript = Register(MustNewLexer(
|
||||
&Config{
|
||||
Name: "JavaScript",
|
||||
Aliases: []string{"js", "javascript"},
|
||||
Filenames: []string{"*.js", "*.jsm"},
|
||||
MimeTypes: []string{"application/javascript", "application/x-javascript", "text/x-javascript", "text/javascript"},
|
||||
DotAll: true,
|
||||
},
|
||||
Rules{
|
||||
var JavascriptRules = Rules{
|
||||
"commentsandwhitespace": {
|
||||
{`\s+`, Text, nil},
|
||||
{`<!--`, Comment, nil},
|
||||
@ -64,5 +55,16 @@ var Javascript = Register(MustNewLexer(
|
||||
{`\}`, LiteralStringInterpol, Pop(1)},
|
||||
Include("root"),
|
||||
},
|
||||
}
|
||||
|
||||
// Javascript lexer.
|
||||
var Javascript = Register(MustNewLexer(
|
||||
&Config{
|
||||
Name: "JavaScript",
|
||||
Aliases: []string{"js", "javascript"},
|
||||
Filenames: []string{"*.js", "*.jsm"},
|
||||
MimeTypes: []string{"application/javascript", "application/x-javascript", "text/x-javascript", "text/javascript"},
|
||||
DotAll: true,
|
||||
},
|
||||
JavascriptRules,
|
||||
))
|
||||
|
41
lexers/jsx.go
Normal file
41
lexers/jsx.go
Normal file
@ -0,0 +1,41 @@
|
||||
package lexers
|
||||
|
||||
import (
|
||||
. "github.com/alecthomas/chroma" // nolint
|
||||
)
|
||||
|
||||
var JSXRules = func() Rules {
|
||||
rules := JavascriptRules.Clone()
|
||||
rules["jsx"] = []Rule{
|
||||
{`(<)([\w_\-]+)`, ByGroups(Punctuation, NameTag), Push("tag")},
|
||||
{`(<)(/)(\s*)([\w_\-]+)(\s*)(>)`, ByGroups(Punctuation, Punctuation, Text, NameTag, Text, Punctuation), nil},
|
||||
}
|
||||
rules["tag"] = []Rule{
|
||||
{`\s+`, Text, nil},
|
||||
{`([\w]+\s*)(=)(\s*)`, ByGroups(NameAttribute, Operator, Text), Push("attr")},
|
||||
{`[{}]+`, Punctuation, nil},
|
||||
{`[\w\.]+`, NameAttribute, nil},
|
||||
{`(/?)(\s*)(>)`, ByGroups(Punctuation, Text, Punctuation), Pop(1)},
|
||||
}
|
||||
rules["attr"] = []Rule{
|
||||
{`\s+`, Text, nil},
|
||||
{`".*?"`, String, Pop(1)},
|
||||
{`'.*?'`, String, Pop(1)},
|
||||
{`[^\s>]+`, String, Pop(1)},
|
||||
}
|
||||
|
||||
rules["root"] = append([]Rule{Include("jsx")}, rules["root"]...)
|
||||
return rules
|
||||
}()
|
||||
|
||||
// JSX lexer.
|
||||
var JSX = Register(MustNewLexer(
|
||||
&Config{
|
||||
Name: "JSX",
|
||||
Aliases: []string{"react"},
|
||||
Filenames: []string{"*.jsx", "*.react"},
|
||||
MimeTypes: []string{"text/jsx", "text/typescript-jsx"},
|
||||
DotAll: true,
|
||||
},
|
||||
JSXRules,
|
||||
))
|
10
lexers/testdata/jsx.actual
vendored
Normal file
10
lexers/testdata/jsx.actual
vendored
Normal file
@ -0,0 +1,10 @@
|
||||
import React from 'react';
|
||||
import ReactDOM from 'react-dom';
|
||||
import App from './component/App';
|
||||
import './index.css';
|
||||
import 'github-fork-ribbon-css/gh-fork-ribbon.css';
|
||||
|
||||
ReactDOM.render(
|
||||
<App />,
|
||||
document.getElementById('root')
|
||||
);
|
60
lexers/testdata/jsx.expected
vendored
Normal file
60
lexers/testdata/jsx.expected
vendored
Normal file
@ -0,0 +1,60 @@
|
||||
[
|
||||
{"type":"Keyword","value":"import"},
|
||||
{"type":"Text","value":" "},
|
||||
{"type":"NameOther","value":"React"},
|
||||
{"type":"Text","value":" "},
|
||||
{"type":"NameOther","value":"from"},
|
||||
{"type":"Text","value":" "},
|
||||
{"type":"LiteralStringSingle","value":"'react'"},
|
||||
{"type":"Punctuation","value":";"},
|
||||
{"type":"Text","value":"\n"},
|
||||
{"type":"Keyword","value":"import"},
|
||||
{"type":"Text","value":" "},
|
||||
{"type":"NameOther","value":"ReactDOM"},
|
||||
{"type":"Text","value":" "},
|
||||
{"type":"NameOther","value":"from"},
|
||||
{"type":"Text","value":" "},
|
||||
{"type":"LiteralStringSingle","value":"'react-dom'"},
|
||||
{"type":"Punctuation","value":";"},
|
||||
{"type":"Text","value":"\n"},
|
||||
{"type":"Keyword","value":"import"},
|
||||
{"type":"Text","value":" "},
|
||||
{"type":"NameOther","value":"App"},
|
||||
{"type":"Text","value":" "},
|
||||
{"type":"NameOther","value":"from"},
|
||||
{"type":"Text","value":" "},
|
||||
{"type":"LiteralStringSingle","value":"'./component/App'"},
|
||||
{"type":"Punctuation","value":";"},
|
||||
{"type":"Text","value":"\n"},
|
||||
{"type":"Keyword","value":"import"},
|
||||
{"type":"Text","value":" "},
|
||||
{"type":"LiteralStringSingle","value":"'./index.css'"},
|
||||
{"type":"Punctuation","value":";"},
|
||||
{"type":"Text","value":"\n"},
|
||||
{"type":"Keyword","value":"import"},
|
||||
{"type":"Text","value":" "},
|
||||
{"type":"LiteralStringSingle","value":"'github-fork-ribbon-css/gh-fork-ribbon.css'"},
|
||||
{"type":"Punctuation","value":";"},
|
||||
{"type":"Text","value":"\n\n"},
|
||||
{"type":"NameOther","value":"ReactDOM"},
|
||||
{"type":"Punctuation","value":"."},
|
||||
{"type":"NameOther","value":"render"},
|
||||
{"type":"Punctuation","value":"("},
|
||||
{"type":"Text","value":"\n "},
|
||||
{"type":"Punctuation","value":"\u003c"},
|
||||
{"type":"NameTag","value":"App"},
|
||||
{"type":"Text","value":" "},
|
||||
{"type":"Punctuation","value":"/"},
|
||||
{"type":"Text","value":""},
|
||||
{"type":"Punctuation","value":"\u003e,"},
|
||||
{"type":"Text","value":"\n "},
|
||||
{"type":"NameBuiltin","value":"document"},
|
||||
{"type":"Punctuation","value":"."},
|
||||
{"type":"NameOther","value":"getElementById"},
|
||||
{"type":"Punctuation","value":"("},
|
||||
{"type":"LiteralStringSingle","value":"'root'"},
|
||||
{"type":"Punctuation","value":")"},
|
||||
{"type":"Text","value":"\n"},
|
||||
{"type":"Punctuation","value":");"},
|
||||
{"type":"Text","value":"\n"}
|
||||
]
|
@ -87,6 +87,15 @@ func Tokenise(lexer Lexer, options *TokeniseOptions, text string) ([]*Token, err
|
||||
// Rules maps from state to a sequence of Rules.
|
||||
type Rules map[string][]Rule
|
||||
|
||||
func (r Rules) Clone() Rules {
|
||||
out := map[string][]Rule{}
|
||||
for key, rules := range r {
|
||||
out[key] = make([]Rule, len(rules))
|
||||
copy(out[key], rules)
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
// MustNewLexer creates a new Lexer or panics.
|
||||
func MustNewLexer(config *Config, rules Rules) *RegexLexer {
|
||||
lexer, err := NewLexer(config, rules)
|
||||
|
Reference in New Issue
Block a user