2018-03-19 17:08:44 +01:00
|
|
|
package internal
|
2017-10-22 05:37:38 +02:00
|
|
|
|
|
|
|
import (
|
|
|
|
"path/filepath"
|
|
|
|
"sort"
|
2018-01-06 23:02:24 +01:00
|
|
|
"strings"
|
2017-10-22 05:37:38 +02:00
|
|
|
|
|
|
|
"github.com/danwakefield/fnmatch"
|
|
|
|
|
|
|
|
"github.com/alecthomas/chroma"
|
|
|
|
)
|
|
|
|
|
|
|
|
// Registry of Lexers.
|
|
|
|
var Registry = struct {
|
|
|
|
Lexers chroma.Lexers
|
|
|
|
byName map[string]chroma.Lexer
|
|
|
|
byAlias map[string]chroma.Lexer
|
|
|
|
}{
|
|
|
|
byName: map[string]chroma.Lexer{},
|
|
|
|
byAlias: map[string]chroma.Lexer{},
|
|
|
|
}
|
|
|
|
|
|
|
|
// Names of all lexers, optionally including aliases.
|
|
|
|
func Names(withAliases bool) []string {
|
|
|
|
out := []string{}
|
|
|
|
for _, lexer := range Registry.Lexers {
|
|
|
|
config := lexer.Config()
|
|
|
|
out = append(out, config.Name)
|
|
|
|
if withAliases {
|
|
|
|
out = append(out, config.Aliases...)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
sort.Strings(out)
|
|
|
|
return out
|
|
|
|
}
|
|
|
|
|
2018-01-06 23:02:24 +01:00
|
|
|
// Get a Lexer by name, alias or file extension.
|
2017-10-22 05:37:38 +02:00
|
|
|
func Get(name string) chroma.Lexer {
|
2018-01-06 23:02:24 +01:00
|
|
|
candidates := chroma.PrioritisedLexers{}
|
2017-10-22 05:37:38 +02:00
|
|
|
if lexer := Registry.byName[name]; lexer != nil {
|
2018-01-06 23:02:24 +01:00
|
|
|
candidates = append(candidates, lexer)
|
2017-10-22 05:37:38 +02:00
|
|
|
}
|
2018-01-06 23:02:24 +01:00
|
|
|
if lexer := Registry.byAlias[name]; lexer != nil {
|
|
|
|
candidates = append(candidates, lexer)
|
|
|
|
}
|
|
|
|
if lexer := Registry.byName[strings.ToLower(name)]; lexer != nil {
|
|
|
|
candidates = append(candidates, lexer)
|
|
|
|
}
|
|
|
|
if lexer := Registry.byAlias[strings.ToLower(name)]; lexer != nil {
|
|
|
|
candidates = append(candidates, lexer)
|
|
|
|
}
|
|
|
|
// Try file extension.
|
|
|
|
if lexer := Match("filename." + name); lexer != nil {
|
|
|
|
candidates = append(candidates, lexer)
|
|
|
|
}
|
|
|
|
// Try exact filename.
|
|
|
|
if lexer := Match(name); lexer != nil {
|
|
|
|
candidates = append(candidates, lexer)
|
|
|
|
}
|
|
|
|
if len(candidates) == 0 {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
sort.Sort(candidates)
|
|
|
|
return candidates[0]
|
2017-10-22 05:37:38 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// MatchMimeType attempts to find a lexer for the given MIME type.
|
|
|
|
func MatchMimeType(mimeType string) chroma.Lexer {
|
2018-01-06 23:02:24 +01:00
|
|
|
matched := chroma.PrioritisedLexers{}
|
2017-10-22 05:37:38 +02:00
|
|
|
for _, l := range Registry.Lexers {
|
|
|
|
for _, lmt := range l.Config().MimeTypes {
|
|
|
|
if mimeType == lmt {
|
2018-01-06 23:02:24 +01:00
|
|
|
matched = append(matched, l)
|
2017-10-22 05:37:38 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2018-01-06 23:02:24 +01:00
|
|
|
if len(matched) != 0 {
|
|
|
|
sort.Sort(matched)
|
|
|
|
return matched[0]
|
|
|
|
}
|
2017-10-22 05:37:38 +02:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// Match returns the first lexer matching filename.
|
|
|
|
func Match(filename string) chroma.Lexer {
|
|
|
|
filename = filepath.Base(filename)
|
2018-01-06 23:02:24 +01:00
|
|
|
matched := chroma.PrioritisedLexers{}
|
2017-10-22 05:37:38 +02:00
|
|
|
// First, try primary filename matches.
|
|
|
|
for _, lexer := range Registry.Lexers {
|
|
|
|
config := lexer.Config()
|
|
|
|
for _, glob := range config.Filenames {
|
|
|
|
if fnmatch.Match(glob, filename, 0) {
|
2018-01-06 23:02:24 +01:00
|
|
|
matched = append(matched, lexer)
|
2017-10-22 05:37:38 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2018-01-06 23:02:24 +01:00
|
|
|
if len(matched) > 0 {
|
|
|
|
sort.Sort(matched)
|
|
|
|
return matched[0]
|
|
|
|
}
|
|
|
|
matched = nil
|
2017-10-22 05:37:38 +02:00
|
|
|
// Next, try filename aliases.
|
|
|
|
for _, lexer := range Registry.Lexers {
|
|
|
|
config := lexer.Config()
|
|
|
|
for _, glob := range config.AliasFilenames {
|
|
|
|
if fnmatch.Match(glob, filename, 0) {
|
2018-01-06 23:02:24 +01:00
|
|
|
matched = append(matched, lexer)
|
2017-10-22 05:37:38 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2018-01-06 23:02:24 +01:00
|
|
|
if len(matched) > 0 {
|
|
|
|
sort.Sort(matched)
|
|
|
|
return matched[0]
|
|
|
|
}
|
2017-10-22 05:37:38 +02:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// Analyse text content and return the "best" lexer..
|
|
|
|
func Analyse(text string) chroma.Lexer {
|
|
|
|
var picked chroma.Lexer
|
|
|
|
highest := float32(0.0)
|
|
|
|
for _, lexer := range Registry.Lexers {
|
|
|
|
if analyser, ok := lexer.(chroma.Analyser); ok {
|
|
|
|
weight := analyser.AnalyseText(text)
|
|
|
|
if weight > highest {
|
|
|
|
picked = lexer
|
|
|
|
highest = weight
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return picked
|
|
|
|
}
|
|
|
|
|
|
|
|
// Register a Lexer with the global registry.
|
|
|
|
func Register(lexer chroma.Lexer) chroma.Lexer {
|
|
|
|
config := lexer.Config()
|
|
|
|
Registry.byName[config.Name] = lexer
|
2018-01-06 23:02:24 +01:00
|
|
|
Registry.byName[strings.ToLower(config.Name)] = lexer
|
2017-10-22 05:37:38 +02:00
|
|
|
for _, alias := range config.Aliases {
|
|
|
|
Registry.byAlias[alias] = lexer
|
2018-01-06 23:02:24 +01:00
|
|
|
Registry.byAlias[strings.ToLower(alias)] = lexer
|
2017-10-22 05:37:38 +02:00
|
|
|
}
|
|
|
|
Registry.Lexers = append(Registry.Lexers, lexer)
|
|
|
|
return lexer
|
|
|
|
}
|
|
|
|
|
|
|
|
// Fallback lexer if no other is found.
|
|
|
|
var Fallback chroma.Lexer = chroma.MustNewLexer(&chroma.Config{
|
|
|
|
Name: "fallback",
|
|
|
|
Filenames: []string{"*"},
|
|
|
|
}, chroma.Rules{
|
|
|
|
"root": []chroma.Rule{
|
|
|
|
{`.+`, chroma.Text, nil},
|
|
|
|
{`\n`, chroma.Text, nil},
|
|
|
|
},
|
|
|
|
})
|