chroma-markdown/vendor/github.com/alecthomas/chroma/v2/remap.go

95 lines
2.1 KiB
Go
Raw Normal View History

2017-10-22 05:37:38 +02:00
package chroma
type remappingLexer struct {
lexer Lexer
mapper func(Token) []Token
2017-10-22 05:37:38 +02:00
}
// RemappingLexer remaps a token to a set of, potentially empty, tokens.
func RemappingLexer(lexer Lexer, mapper func(Token) []Token) Lexer {
2017-10-22 05:37:38 +02:00
return &remappingLexer{lexer, mapper}
}
2023-11-26 06:32:49 +01:00
func (r *remappingLexer) AnalyseText(text string) float32 {
return r.lexer.AnalyseText(text)
}
func (r *remappingLexer) SetAnalyser(analyser func(text string) float32) Lexer {
r.lexer.SetAnalyser(analyser)
return r
}
func (r *remappingLexer) SetRegistry(registry *LexerRegistry) Lexer {
r.lexer.SetRegistry(registry)
return r
}
2017-10-22 05:37:38 +02:00
func (r *remappingLexer) Config() *Config {
return r.lexer.Config()
}
func (r *remappingLexer) Tokenise(options *TokeniseOptions, text string) (Iterator, error) {
it, err := r.lexer.Tokenise(options, text)
if err != nil {
return nil, err
}
var buffer []Token
return func() Token {
2017-10-22 05:37:38 +02:00
for {
if len(buffer) > 0 {
t := buffer[0]
buffer = buffer[1:]
return t
}
t := it()
if t == EOF {
2017-10-22 05:37:38 +02:00
return t
}
buffer = r.mapper(t)
}
}, nil
}
2019-07-01 05:14:09 +02:00
// TypeMapping defines type maps for the TypeRemappingLexer.
2017-10-22 05:37:38 +02:00
type TypeMapping []struct {
From, To TokenType
Words []string
}
// TypeRemappingLexer remaps types of tokens coming from a parent Lexer.
//
// eg. Map "defvaralias" tokens of type NameVariable to NameFunction:
//
2023-11-26 06:32:49 +01:00
// mapping := TypeMapping{
// {NameVariable, NameFunction, []string{"defvaralias"},
// }
// lexer = TypeRemappingLexer(lexer, mapping)
2017-10-22 05:37:38 +02:00
func TypeRemappingLexer(lexer Lexer, mapping TypeMapping) Lexer {
// Lookup table for fast remapping.
lut := map[TokenType]map[string]TokenType{}
for _, rt := range mapping {
km, ok := lut[rt.From]
if !ok {
km = map[string]TokenType{}
lut[rt.From] = km
}
if len(rt.Words) == 0 {
km[""] = rt.To
} else {
for _, k := range rt.Words {
km[k] = rt.To
}
2017-10-22 05:37:38 +02:00
}
}
return RemappingLexer(lexer, func(t Token) []Token {
2017-10-22 05:37:38 +02:00
if k, ok := lut[t.Type]; ok {
if tt, ok := k[t.Value]; ok {
t.Type = tt
} else if tt, ok := k[""]; ok {
t.Type = tt
2017-10-22 05:37:38 +02:00
}
}
return []Token{t}
2017-10-22 05:37:38 +02:00
})
}