1
0
mirror of https://github.com/alecthomas/chroma.git synced 2025-02-09 13:23:51 +02:00

190 lines
4.4 KiB
Go
Raw Normal View History

2019-10-06 19:58:36 +11:00
// Package internal contains common API functions and structures shared between lexer packages.
package internal
2017-06-02 00:17:21 +10:00
import (
"path/filepath"
2017-09-19 10:47:22 +10:00
"sort"
"strings"
2017-06-02 00:17:21 +10:00
"github.com/danwakefield/fnmatch"
"github.com/alecthomas/chroma"
)
var (
backupSuffixes = [...]string{
// Editor backups
"~", ".bak", ".old", ".orig",
// Debian and derivatives apt/dpkg
".dpkg-dist", ".dpkg-old",
// Red Hat and derivatives rpm
".rpmnew", ".rpmorig", ".rpmsave",
}
)
// Registry of Lexers.
var Registry = struct {
Lexers chroma.Lexers
byName map[string]chroma.Lexer
byAlias map[string]chroma.Lexer
}{
byName: map[string]chroma.Lexer{},
byAlias: map[string]chroma.Lexer{},
2017-06-02 00:17:21 +10:00
}
// Names of all lexers, optionally including aliases.
func Names(withAliases bool) []string {
2017-06-02 00:17:21 +10:00
out := []string{}
for _, lexer := range Registry.Lexers {
2017-06-02 00:17:21 +10:00
config := lexer.Config()
out = append(out, config.Name)
if withAliases {
out = append(out, config.Aliases...)
}
}
2017-09-19 10:47:22 +10:00
sort.Strings(out)
2017-06-02 00:17:21 +10:00
return out
}
// Get a Lexer by name, alias or file extension.
func Get(name string) chroma.Lexer {
if lexer := Registry.byName[name]; lexer != nil {
return lexer
}
if lexer := Registry.byAlias[name]; lexer != nil {
return lexer
}
if lexer := Registry.byName[strings.ToLower(name)]; lexer != nil {
return lexer
}
if lexer := Registry.byAlias[strings.ToLower(name)]; lexer != nil {
return lexer
}
candidates := chroma.PrioritisedLexers{}
// Try file extension.
if lexer := Match("filename." + name); lexer != nil {
candidates = append(candidates, lexer)
}
// Try exact filename.
if lexer := Match(name); lexer != nil {
candidates = append(candidates, lexer)
}
if len(candidates) == 0 {
return nil
}
sort.Sort(candidates)
return candidates[0]
2017-06-02 00:17:21 +10:00
}
2017-09-19 11:52:23 +10:00
// MatchMimeType attempts to find a lexer for the given MIME type.
func MatchMimeType(mimeType string) chroma.Lexer {
matched := chroma.PrioritisedLexers{}
2017-09-19 11:52:23 +10:00
for _, l := range Registry.Lexers {
for _, lmt := range l.Config().MimeTypes {
if mimeType == lmt {
matched = append(matched, l)
2017-09-19 11:52:23 +10:00
}
}
}
if len(matched) != 0 {
sort.Sort(matched)
return matched[0]
}
2017-09-19 11:52:23 +10:00
return nil
}
// Match returns the first lexer matching filename.
2017-06-07 10:27:10 +10:00
func Match(filename string) chroma.Lexer {
filename = filepath.Base(filename)
matched := chroma.PrioritisedLexers{}
// First, try primary filename matches.
for _, lexer := range Registry.Lexers {
2017-06-02 00:17:21 +10:00
config := lexer.Config()
for _, glob := range config.Filenames {
if fnmatch.Match(glob, filename, 0) {
matched = append(matched, lexer)
} else {
for _, suf := range &backupSuffixes {
if fnmatch.Match(glob+suf, filename, 0) {
matched = append(matched, lexer)
break
}
}
2017-06-02 00:17:21 +10:00
}
}
}
if len(matched) > 0 {
sort.Sort(matched)
return matched[0]
}
matched = nil
// Next, try filename aliases.
for _, lexer := range Registry.Lexers {
config := lexer.Config()
for _, glob := range config.AliasFilenames {
if fnmatch.Match(glob, filename, 0) {
matched = append(matched, lexer)
} else {
for _, suf := range &backupSuffixes {
if fnmatch.Match(glob+suf, filename, 0) {
matched = append(matched, lexer)
break
}
}
}
}
}
if len(matched) > 0 {
sort.Sort(matched)
return matched[0]
}
2017-06-07 10:27:10 +10:00
return nil
2017-06-02 00:17:21 +10:00
}
2017-06-07 10:27:10 +10:00
// Analyse text content and return the "best" lexer..
func Analyse(text string) chroma.Lexer {
var picked chroma.Lexer
highest := float32(0.0)
for _, lexer := range Registry.Lexers {
if analyser, ok := lexer.(chroma.Analyser); ok {
weight := analyser.AnalyseText(text)
if weight > highest {
picked = lexer
highest = weight
}
}
2017-06-02 00:17:21 +10:00
}
2017-06-07 10:27:10 +10:00
return picked
}
// Register a Lexer with the global registry.
func Register(lexer chroma.Lexer) chroma.Lexer {
2017-06-02 00:17:21 +10:00
config := lexer.Config()
Registry.byName[config.Name] = lexer
Registry.byName[strings.ToLower(config.Name)] = lexer
2017-06-02 00:17:21 +10:00
for _, alias := range config.Aliases {
Registry.byAlias[alias] = lexer
Registry.byAlias[strings.ToLower(alias)] = lexer
2017-06-02 00:17:21 +10:00
}
Registry.Lexers = append(Registry.Lexers, lexer)
return lexer
}
// PlaintextRules is used for the fallback lexer as well as the explicit
// plaintext lexer.
func PlaintextRules() chroma.Rules {
return chroma.Rules{
"root": []chroma.Rule{
{`.+`, chroma.Text, nil},
{`\n`, chroma.Text, nil},
},
}
2018-04-18 23:47:59 +02:00
}
// Fallback lexer if no other is found.
var Fallback chroma.Lexer = chroma.MustNewLazyLexer(&chroma.Config{
2018-04-18 23:47:59 +02:00
Name: "fallback",
Filenames: []string{"*"},
}, PlaintextRules)