chroma-markdown/vendor/github.com/alecthomas/chroma/v2/lexers/http.go

132 lines
3.3 KiB
Go
Raw Normal View History

2023-11-26 06:32:49 +01:00
package lexers
2018-01-06 23:02:24 +01:00
import (
"strings"
2023-11-26 06:32:49 +01:00
. "github.com/alecthomas/chroma/v2" // nolint
2018-01-06 23:02:24 +01:00
)
// HTTP lexer.
2023-11-26 06:32:49 +01:00
var HTTP = Register(httpBodyContentTypeLexer(MustNewLexer(
2018-01-06 23:02:24 +01:00
&Config{
Name: "HTTP",
Aliases: []string{"http"},
Filenames: []string{},
MimeTypes: []string{},
NotMultiline: true,
DotAll: true,
},
httpRules,
)))
func httpRules() Rules {
return Rules{
2018-01-06 23:02:24 +01:00
"root": {
2023-11-26 06:32:49 +01:00
{`(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE|PATCH|CONNECT)( +)([^ ]+)( +)(HTTP)(/)([123](?:\.[01])?)(\r?\n|\Z)`, ByGroups(NameFunction, Text, NameNamespace, Text, KeywordReserved, Operator, LiteralNumber, Text), Push("headers")},
{`(HTTP)(/)([123](?:\.[01])?)( +)(\d{3})( *)([^\r\n]*)(\r?\n|\Z)`, ByGroups(KeywordReserved, Operator, LiteralNumber, Text, LiteralNumber, Text, NameException, Text), Push("headers")},
2018-01-06 23:02:24 +01:00
},
"headers": {
{`([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|\Z)`, EmitterFunc(httpHeaderBlock), nil},
{`([\t ]+)([^\r\n]+)(\r?\n|\Z)`, EmitterFunc(httpContinuousHeaderBlock), nil},
{`\r?\n`, Text, Push("content")},
},
"content": {
{`.+`, EmitterFunc(httpContentBlock), nil},
},
}
}
2018-01-06 23:02:24 +01:00
func httpContentBlock(groups []string, state *LexerState) Iterator {
tokens := []Token{
2018-01-06 23:02:24 +01:00
{Generic, groups[0]},
}
return Literator(tokens...)
}
func httpHeaderBlock(groups []string, state *LexerState) Iterator {
tokens := []Token{
2018-01-06 23:02:24 +01:00
{Name, groups[1]},
{Text, groups[2]},
{Operator, groups[3]},
{Text, groups[4]},
{Literal, groups[5]},
{Text, groups[6]},
}
return Literator(tokens...)
}
func httpContinuousHeaderBlock(groups []string, state *LexerState) Iterator {
tokens := []Token{
2018-01-06 23:02:24 +01:00
{Text, groups[1]},
{Literal, groups[2]},
{Text, groups[3]},
}
return Literator(tokens...)
}
func httpBodyContentTypeLexer(lexer Lexer) Lexer { return &httpBodyContentTyper{lexer} }
type httpBodyContentTyper struct{ Lexer }
func (d *httpBodyContentTyper) Tokenise(options *TokeniseOptions, text string) (Iterator, error) { // nolint: gocognit
2018-01-06 23:02:24 +01:00
var contentType string
var isContentType bool
var subIterator Iterator
it, err := d.Lexer.Tokenise(options, text)
if err != nil {
return nil, err
}
return func() Token {
2019-07-01 05:14:09 +02:00
token := it()
if token == EOF {
if subIterator != nil {
return subIterator()
}
return EOF
}
switch {
case token.Type == Name && strings.ToLower(token.Value) == "content-type":
{
isContentType = true
}
case token.Type == Literal && isContentType:
{
isContentType = false
contentType = strings.TrimSpace(token.Value)
pos := strings.Index(contentType, ";")
if pos > 0 {
contentType = strings.TrimSpace(contentType[:pos])
2018-01-06 23:02:24 +01:00
}
2019-07-01 05:14:09 +02:00
}
case token.Type == Generic && contentType != "":
{
2023-11-26 06:32:49 +01:00
lexer := MatchMimeType(contentType)
2019-07-01 05:14:09 +02:00
// application/calendar+xml can be treated as application/xml
// if there's not a better match.
if lexer == nil && strings.Contains(contentType, "+") {
slashPos := strings.Index(contentType, "/")
plusPos := strings.LastIndex(contentType, "+")
contentType = contentType[:slashPos+1] + contentType[plusPos+1:]
2023-11-26 06:32:49 +01:00
lexer = MatchMimeType(contentType)
2018-01-06 23:02:24 +01:00
}
2019-07-01 05:14:09 +02:00
if lexer == nil {
token.Type = Text
} else {
subIterator, err = lexer.Tokenise(nil, token.Value)
if err != nil {
panic(err)
2018-01-06 23:02:24 +01:00
}
2019-07-01 05:14:09 +02:00
return EOF
2018-01-06 23:02:24 +01:00
}
}
}
2019-07-01 05:14:09 +02:00
return token
2018-01-06 23:02:24 +01:00
}, nil
}