Gopkg.toml,vendor: update alecthomas/chroma

This commit is contained in:
Kevin Burke 2020-05-09 13:49:03 -07:00
parent 324e3676af
commit b6814fd76b
No known key found for this signature in database
GPG Key ID: D1D71AA4DED6C5C4
62 changed files with 2079 additions and 259 deletions

4
Gopkg.lock generated
View File

@ -2,7 +2,7 @@
[[projects]]
digest = "1:edded8bfb3a265c0e051d1d1a4459445a30543c7c46007eb2246c0e89603b2fe"
digest = "1:d0038638a197fe6bbc26587366f10a35d9545a4e763bc7783cb36b84726f323c"
name = "github.com/alecthomas/chroma"
packages = [
".",
@ -37,7 +37,7 @@
"styles",
]
pruneopts = "UT"
revision = "v0.6.3"
revision = "v0.7.3"
[[projects]]
branch = "master"

View File

@ -4,7 +4,7 @@
[[constraint]]
name = "github.com/alecthomas/chroma"
revision = "v0.6.3"
revision = "v0.7.3"
[[constraint]]
name = "golang.org/x/sys"

View File

@ -14,6 +14,12 @@ linters:
- lll
- gocyclo
- dupl
- gochecknoglobals
- funlen
- godox
- wsl
- gomnd
- gocognit
linters-settings:
govet:
@ -42,3 +48,8 @@ issues:
- 'Potential file inclusion via variable'
- 'should have comment or be unexported'
- 'comment on exported var .* should be of the form'
- 'at least one file in a package should have a package comment'
- 'string literal contains the Unicode'
- 'methods on the same type should have the same receiver name'
- '_TokenType_name should be _TokenTypeName'
- '`_TokenType_map` should be `_TokenTypeMap`'

View File

@ -3,28 +3,30 @@ release:
github:
owner: alecthomas
name: chroma
brew:
install: bin.install "chroma"
brews:
-
install: bin.install "chroma"
builds:
- goos:
- linux
- darwin
- windows
- linux
- darwin
- windows
goarch:
- amd64
- "386"
- amd64
- "386"
goarm:
- "6"
- "6"
main: ./cmd/chroma/main.go
ldflags: -s -w -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.date={{.Date}}
binary: chroma
archive:
format: tar.gz
name_template: '{{ .Binary }}-{{ .Version }}-{{ .Os }}-{{ .Arch }}{{ if .Arm }}v{{
archives:
-
format: tar.gz
name_template: '{{ .Binary }}-{{ .Version }}-{{ .Os }}-{{ .Arch }}{{ if .Arm }}v{{
.Arm }}{{ end }}'
files:
- COPYING
- README*
files:
- COPYING
- README*
snapshot:
name_template: SNAPSHOT-{{ .Commit }}
checksum:

View File

@ -1,10 +1,12 @@
sudo: false
language: go
go:
- "1.13.x"
script:
- go test -v ./...
- curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | bash -s v1.10.2
- curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | bash -s v1.22.2
- ./bin/golangci-lint run
- git clean -fdx .
after_success:
go get github.com/goreleaser/goreleaser && goreleaser
curl -sL https://git.io/goreleaser | bash && goreleaser

19
vendor/github.com/alecthomas/chroma/Makefile generated vendored Normal file
View File

@ -0,0 +1,19 @@
.PHONY: chromad upload all
all: README.md tokentype_string.go
README.md: lexers/*/*.go
./table.py
tokentype_string.go: types.go
go generate
chromad:
(cd ./cmd/chromad && go get github.com/GeertJohan/go.rice/rice@master && go install github.com/GeertJohan/go.rice/rice)
rm -f chromad
(export CGOENABLED=0 GOOS=linux ; cd ./cmd/chromad && go build -o ../../chromad .)
rice append -i ./cmd/chromad --exec=./chromad
upload: chromad
scp chromad root@swapoff.org: && \
ssh root@swapoff.org 'install -m755 ./chromad /srv/http/swapoff.org/bin && service chromad restart'

View File

@ -8,57 +8,68 @@ highlighted HTML, ANSI-coloured text, etc.
Chroma is based heavily on [Pygments](http://pygments.org/), and includes
translators for Pygments lexers and styles.
<a id="markdown-table-of-contents" name="table-of-contents"></a>
## Table of Contents
<!-- MarkdownTOC -->
<!-- TOC -->
1. [Supported languages](#supported-languages)
1. [Using the library](#using-the-library)
1. [Table of Contents](#table-of-contents)
2. [Supported languages](#supported-languages)
3. [Try it](#try-it)
4. [Using the library](#using-the-library)
1. [Quick start](#quick-start)
1. [Identifying the language](#identifying-the-language)
1. [Formatting the output](#formatting-the-output)
1. [The HTML formatter](#the-html-formatter)
1. [More detail](#more-detail)
2. [Identifying the language](#identifying-the-language)
3. [Formatting the output](#formatting-the-output)
4. [The HTML formatter](#the-html-formatter)
5. [More detail](#more-detail)
1. [Lexers](#lexers)
1. [Formatters](#formatters)
1. [Styles](#styles)
1. [Command-line interface](#command-line-interface)
1. [What's missing compared to Pygments?](#whats-missing-compared-to-pygments)
2. [Formatters](#formatters)
3. [Styles](#styles)
6. [Command-line interface](#command-line-interface)
7. [What's missing compared to Pygments?](#whats-missing-compared-to-pygments)
<!-- /MarkdownTOC -->
<!-- /TOC -->
<a id="markdown-supported-languages" name="supported-languages"></a>
## Supported languages
Prefix | Language
:----: | --------
A | ABNF, ActionScript, ActionScript 3, Ada, Angular2, ANTLR, ApacheConf, APL, AppleScript, Arduino, Awk
A | ABAP, ABNF, ActionScript, ActionScript 3, Ada, Angular2, ANTLR, ApacheConf, APL, AppleScript, Arduino, Awk
B | Ballerina, Base Makefile, Bash, Batchfile, BlitzBasic, BNF, Brainfuck
C | C, C#, C++, Cassandra CQL, CFEngine3, cfstatement/ColdFusion, CMake, COBOL, CSS, Cap'n Proto, Ceylon, ChaiScript, Cheetah, Clojure, CoffeeScript, Common Lisp, Coq, Crystal, Cython
D | Dart, Diff, Django/Jinja, Docker, DTD
C | C, C#, C++, Cap'n Proto, Cassandra CQL, Ceylon, CFEngine3, cfstatement, ChaiScript, Cheetah, Clojure, CMake, COBOL, CoffeeScript, Common Lisp, Coq, Crystal, CSS, Cython
D | D, Dart, Diff, Django/Jinja, Docker, DTD
E | EBNF, Elixir, Elm, EmacsLisp, Erlang
F | Factor, Fish, Forth, Fortran, FSharp
G | GAS, GDScript, GLSL, Genshi, Genshi HTML, Genshi Text, Gnuplot, Go, Go HTML Template, Go Text Template, GraphQL, Groovy
H | Handlebars, Haskell, Haxe, Hexdump, HTML, HTTP, Hy
G | GAS, GDScript, Genshi, Genshi HTML, Genshi Text, GLSL, Gnuplot, Go, Go HTML Template, Go Text Template, GraphQL, Groovy
H | Handlebars, Haskell, Haxe, HCL, Hexdump, HTML, HTTP, Hy
I | Idris, INI, Io
J | Java, JavaScript, JSON, Jsx, Julia, Jungle
J | J, Java, JavaScript, JSON, Julia, Jungle
K | Kotlin
L | Lighttpd configuration file, LLVM, Lua
M | Mako, Markdown, Mason, Mathematica, MiniZinc, Modula-2, MonkeyC, MorrowindScript, Myghty, MySQL
M | Mako, markdown, Mason, Mathematica, Matlab, MiniZinc, MLIR, Modula-2, MonkeyC, MorrowindScript, Myghty, MySQL
N | NASM, Newspeak, Nginx configuration file, Nim, Nix
O | Objective-C, OCaml, Octave, OpenSCAD, Org Mode
P | PacmanConf, Perl, PHP, Pig, PkgConfig, Plaintext, PL/pgSQL, PostgreSQL SQL dialect, PostScript, POVRay, PowerShell, Prolog, Protocol Buffer, Puppet, Python, Python 3
P | PacmanConf, Perl, PHP, Pig, PkgConfig, PL/pgSQL, plaintext, PostgreSQL SQL dialect, PostScript, POVRay, PowerShell, Prolog, Protocol Buffer, Puppet, Python, Python 3
Q | QBasic
R | R, Racket, Ragel, reg, reStructuredText, Rexx, Ruby, Rust
S | Sass, Scala, Scheme, Scilab, SCSS, Smalltalk, Smarty, Snobol, Solidity, SPARQL, SQL, SquidConf, Swift, systemd, Systemverilog
T | TASM, Tcl, Tcsh, Termcap, Terminfo, Terraform, TeX, Thrift, TOML, TradingView, Transact-SQL, Turtle, Twig, TypeScript, TypoScript, TypoScriptCssData, TypoScriptHtmlData
V | verilog, VHDL, VimL
R | R, Racket, Ragel, react, reg, reStructuredText, Rexx, Ruby, Rust
S | Sass, Scala, Scheme, Scilab, SCSS, Smalltalk, Smarty, SML, Snobol, Solidity, SPARQL, SQL, SquidConf, Swift, SYSTEMD, systemverilog
T | TableGen, TASM, Tcl, Tcsh, Termcap, Terminfo, Terraform, TeX, Thrift, TOML, TradingView, Transact-SQL, Turing, Turtle, Twig, TypeScript, TypoScript, TypoScriptCssData, TypoScriptHtmlData
V | VB.net, verilog, VHDL, VimL, vue
W | WDTE
X | XML, Xorg
Y | YAML
_I will attempt to keep this section up to date, but an authoritative list can be
displayed with `chroma --list`._
<a id="markdown-try-it" name="try-it"></a>
## Try it
Try out various languages and styles on the [Chroma Playground](https://swapoff.org/chroma/playground/).
<a id="markdown-using-the-library" name="using-the-library"></a>
## Using the library
Chroma, like Pygments, has the concepts of
@ -79,6 +90,7 @@ In all cases, if a lexer, formatter or style can not be determined, `nil` will
be returned. In this situation you may want to default to the `Fallback`
value in each respective package, which provides sane defaults.
<a id="markdown-quick-start" name="quick-start"></a>
### Quick start
A convenience function exists that can be used to simply format some source
@ -88,6 +100,7 @@ text, without any effort:
err := quick.Highlight(os.Stdout, someSourceCode, "go", "html", "monokai")
```
<a id="markdown-identifying-the-language" name="identifying-the-language"></a>
### Identifying the language
To highlight code, you'll first have to identify what language the code is
@ -127,6 +140,7 @@ token types into a single token:
lexer = chroma.Coalesce(lexer)
```
<a id="markdown-formatting-the-output" name="formatting-the-output"></a>
### Formatting the output
Once a language is identified you will need to pick a formatter and a style (theme).
@ -155,6 +169,7 @@ And finally, format the tokens from the iterator:
err := formatter.Format(w, style, iterator)
```
<a id="markdown-the-html-formatter" name="the-html-formatter"></a>
### The HTML formatter
By default the `html` registered formatter generates standalone HTML with
@ -168,6 +183,7 @@ following constructor options:
- `ClassPrefix(prefix)` - prefix each generated CSS class.
- `TabWidth(width)` - Set the rendered tab width, in characters.
- `WithLineNumbers()` - Render line numbers (style with `LineNumbers`).
- `LinkableLineNumbers()` - Make the line numbers linkable.
- `HighlightLines(ranges)` - Highlight lines in these ranges (style with `LineHighlight`).
- `LineNumbersInTable()` - Use a table for formatting line numbers and code, rather than spans.
@ -178,8 +194,10 @@ formatter := html.New(html.WithClasses())
err := formatter.WriteCSS(w, style)
```
<a id="markdown-more-detail" name="more-detail"></a>
## More detail
<a id="markdown-lexers" name="lexers"></a>
### Lexers
See the [Pygments documentation](http://pygments.org/docs/lexerdevelopment/)
@ -200,6 +218,7 @@ python3 ~/Projects/chroma/_tools/pygments2chroma.py \
See notes in [pygments-lexers.go](https://github.com/alecthomas/chroma/blob/master/pygments-lexers.txt)
for a list of lexers, and notes on some of the issues importing them.
<a id="markdown-formatters" name="formatters"></a>
### Formatters
Chroma supports HTML output, as well as terminal output in 8 colour, 256 colour, and true-colour.
@ -207,13 +226,14 @@ Chroma supports HTML output, as well as terminal output in 8 colour, 256 colour,
A `noop` formatter is included that outputs the token text only, and a `tokens`
formatter outputs raw tokens. The latter is useful for debugging lexers.
<a id="markdown-styles" name="styles"></a>
### Styles
Chroma styles use the [same syntax](http://pygments.org/docs/styles/) as Pygments.
All Pygments styles have been converted to Chroma using the `_tools/style.py` script.
When you work with one of [Chroma's styles](https://github.com/alecthomas/chroma/tree/master/styles), know that the `chroma.Background` token type provides the default style for tokens. It does so by defining a foreground color and background color.
When you work with one of [Chroma's styles](https://github.com/alecthomas/chroma/tree/master/styles), know that the `chroma.Background` token type provides the default style for tokens. It does so by defining a foreground color and background color.
For example, this gives each token name not defined in the style a default color of `#f8f8f8` and uses `#000000` for the highlighted code block's background:
@ -225,6 +245,7 @@ Also, token types in a style file are hierarchical. For instance, when `CommentS
For a quick overview of the available styles and how they look, check out the [Chroma Style Gallery](https://xyproto.github.io/splash/docs/).
<a id="markdown-command-line-interface" name="command-line-interface"></a>
## Command-line interface
A command-line interface to Chroma is included. It can be installed with:
@ -233,6 +254,7 @@ A command-line interface to Chroma is included. It can be installed with:
go get -u github.com/alecthomas/chroma/cmd/chroma
```
<a id="markdown-whats-missing-compared-to-pygments" name="whats-missing-compared-to-pygments"></a>
## What's missing compared to Pygments?
- Quite a few lexers, for various reasons (pull-requests welcome):

View File

@ -34,7 +34,7 @@ type insertion struct {
tokens []Token
}
func (d *delegatingLexer) Tokenise(options *TokeniseOptions, text string) (Iterator, error) {
func (d *delegatingLexer) Tokenise(options *TokeniseOptions, text string) (Iterator, error) { // nolint: gocognit
tokens, err := Tokenise(Coalesce(d.language), options, text)
if err != nil {
return nil, err

View File

@ -14,32 +14,59 @@ import (
type Option func(f *Formatter)
// Standalone configures the HTML formatter for generating a standalone HTML document.
func Standalone() Option { return func(f *Formatter) { f.standalone = true } }
func Standalone(b bool) Option { return func(f *Formatter) { f.standalone = b } }
// ClassPrefix sets the CSS class prefix.
func ClassPrefix(prefix string) Option { return func(f *Formatter) { f.prefix = prefix } }
// WithClasses emits HTML using CSS classes, rather than inline styles.
func WithClasses() Option { return func(f *Formatter) { f.Classes = true } }
func WithClasses(b bool) Option { return func(f *Formatter) { f.Classes = b } }
// WithAllClasses disables an optimisation that omits redundant CSS classes.
func WithAllClasses(b bool) Option { return func(f *Formatter) { f.allClasses = b } }
// TabWidth sets the number of characters for a tab. Defaults to 8.
func TabWidth(width int) Option { return func(f *Formatter) { f.tabWidth = width } }
// PreventSurroundingPre prevents the surrounding pre tags around the generated code
func PreventSurroundingPre() Option { return func(f *Formatter) { f.preventSurroundingPre = true } }
// PreventSurroundingPre prevents the surrounding pre tags around the generated code.
func PreventSurroundingPre(b bool) Option {
return func(f *Formatter) {
if b {
f.preWrapper = nopPreWrapper
} else {
f.preWrapper = defaultPreWrapper
}
}
}
// WithPreWrapper allows control of the surrounding pre tags.
func WithPreWrapper(wrapper PreWrapper) Option {
return func(f *Formatter) {
f.preWrapper = wrapper
}
}
// WithLineNumbers formats output with line numbers.
func WithLineNumbers() Option {
func WithLineNumbers(b bool) Option {
return func(f *Formatter) {
f.lineNumbers = true
f.lineNumbers = b
}
}
// LineNumbersInTable will, when combined with WithLineNumbers, separate the line numbers
// and code in table td's, which make them copy-and-paste friendly.
func LineNumbersInTable() Option {
func LineNumbersInTable(b bool) Option {
return func(f *Formatter) {
f.lineNumbersInTable = true
f.lineNumbersInTable = b
}
}
// LinkableLineNumbers decorates the line numbers HTML elements with an "id"
// attribute so they can be linked.
func LinkableLineNumbers(b bool, prefix string) Option {
return func(f *Formatter) {
f.linkableLineNumbers = b
f.lineNumbersIDPrefix = prefix
}
}
@ -64,6 +91,7 @@ func BaseLineNumber(n int) Option {
func New(options ...Option) *Formatter {
f := &Formatter{
baseLineNumber: 1,
preWrapper: defaultPreWrapper,
}
for _, option := range options {
option(f)
@ -71,17 +99,60 @@ func New(options ...Option) *Formatter {
return f
}
// PreWrapper defines the operations supported in WithPreWrapper.
type PreWrapper interface {
// Start is called to write a start <pre> element.
// The code flag tells whether this block surrounds
// highlighted code. This will be false when surrounding
// line numbers.
Start(code bool, styleAttr string) string
// End is called to write the end </pre> element.
End(code bool) string
}
type preWrapper struct {
start func(code bool, styleAttr string) string
end func(code bool) string
}
func (p preWrapper) Start(code bool, styleAttr string) string {
return p.start(code, styleAttr)
}
func (p preWrapper) End(code bool) string {
return p.end(code)
}
var (
nopPreWrapper = preWrapper{
start: func(code bool, styleAttr string) string { return "" },
end: func(code bool) string { return "" },
}
defaultPreWrapper = preWrapper{
start: func(code bool, styleAttr string) string {
return fmt.Sprintf("<pre%s>", styleAttr)
},
end: func(code bool) string {
return "</pre>"
},
}
)
// Formatter that generates HTML.
type Formatter struct {
standalone bool
prefix string
Classes bool // Exported field to detect when classes are being used
preventSurroundingPre bool
tabWidth int
lineNumbers bool
lineNumbersInTable bool
highlightRanges highlightRanges
baseLineNumber int
standalone bool
prefix string
Classes bool // Exported field to detect when classes are being used
allClasses bool
preWrapper PreWrapper
tabWidth int
lineNumbers bool
lineNumbersInTable bool
linkableLineNumbers bool
lineNumbersIDPrefix string
highlightRanges highlightRanges
baseLineNumber int
}
type highlightRanges [][2]int
@ -91,11 +162,6 @@ func (h highlightRanges) Swap(i, j int) { h[i], h[j] = h[j], h[i] }
func (h highlightRanges) Less(i, j int) bool { return h[i][0] < h[j][0] }
func (f *Formatter) Format(w io.Writer, style *chroma.Style, iterator chroma.Iterator) (err error) {
defer func() {
if perr := recover(); perr != nil {
err = perr.(error)
}
}()
return f.writeHTML(w, style, iterator.Tokens())
}
@ -126,7 +192,7 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.
wrapInTable := f.lineNumbers && f.lineNumbersInTable
lines := chroma.SplitTokensIntoLines(tokens)
lineDigits := len(fmt.Sprintf("%d", len(lines)))
lineDigits := len(fmt.Sprintf("%d", f.baseLineNumber+len(lines)-1))
highlightIndex := 0
if wrapInTable {
@ -134,9 +200,7 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.
fmt.Fprintf(w, "<div%s>\n", f.styleAttr(css, chroma.Background))
fmt.Fprintf(w, "<table%s><tr>", f.styleAttr(css, chroma.LineTable))
fmt.Fprintf(w, "<td%s>\n", f.styleAttr(css, chroma.LineTableTD))
if !f.preventSurroundingPre {
fmt.Fprintf(w, "<pre%s>", f.styleAttr(css, chroma.Background))
}
fmt.Fprintf(w, f.preWrapper.Start(false, f.styleAttr(css, chroma.Background)))
for index := range lines {
line := f.baseLineNumber + index
highlight, next := f.shouldHighlight(highlightIndex, line)
@ -147,22 +211,19 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.
fmt.Fprintf(w, "<span%s>", f.styleAttr(css, chroma.LineHighlight))
}
fmt.Fprintf(w, "<span%s>%*d\n</span>", f.styleAttr(css, chroma.LineNumbersTable), lineDigits, line)
fmt.Fprintf(w, "<span%s%s>%*d\n</span>", f.styleAttr(css, chroma.LineNumbersTable), f.lineIDAttribute(line), lineDigits, line)
if highlight {
fmt.Fprintf(w, "</span>")
}
}
if !f.preventSurroundingPre {
fmt.Fprint(w, "</pre>")
}
fmt.Fprint(w, f.preWrapper.End(false))
fmt.Fprint(w, "</td>\n")
fmt.Fprintf(w, "<td%s>\n", f.styleAttr(css, chroma.LineTableTD))
fmt.Fprintf(w, "<td%s>\n", f.styleAttr(css, chroma.LineTableTD, "width:100%"))
}
if !f.preventSurroundingPre {
fmt.Fprintf(w, "<pre%s>", f.styleAttr(css, chroma.Background))
}
fmt.Fprintf(w, f.preWrapper.Start(true, f.styleAttr(css, chroma.Background)))
highlightIndex = 0
for index, tokens := range lines {
// 1-based line number.
@ -176,7 +237,7 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.
}
if f.lineNumbers && !wrapInTable {
fmt.Fprintf(w, "<span%s>%*d</span>", f.styleAttr(css, chroma.LineNumbers), lineDigits, line)
fmt.Fprintf(w, "<span%s%s>%*d</span>", f.styleAttr(css, chroma.LineNumbers), f.lineIDAttribute(line), lineDigits, line)
}
for _, token := range tokens {
@ -192,9 +253,7 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.
}
}
if !f.preventSurroundingPre {
fmt.Fprint(w, "</pre>")
}
fmt.Fprintf(w, f.preWrapper.End(true))
if wrapInTable {
fmt.Fprint(w, "</td></tr></table>\n")
@ -209,6 +268,13 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.
return nil
}
func (f *Formatter) lineIDAttribute(line int) string {
if !f.linkableLineNumbers {
return ""
}
return fmt.Sprintf(" id=\"%s%d\"", f.lineNumbersIDPrefix, line)
}
func (f *Formatter) shouldHighlight(highlightIndex, line int) (bool, bool) {
next := false
for highlightIndex < len(f.highlightRanges) && line > f.highlightRanges[highlightIndex][1] {
@ -240,7 +306,7 @@ func (f *Formatter) class(t chroma.TokenType) string {
return ""
}
func (f *Formatter) styleAttr(styles map[chroma.TokenType]string, tt chroma.TokenType) string {
func (f *Formatter) styleAttr(styles map[chroma.TokenType]string, tt chroma.TokenType, extraCSS ...string) string {
if f.Classes {
cls := f.class(tt)
if cls == "" {
@ -257,7 +323,9 @@ func (f *Formatter) styleAttr(styles map[chroma.TokenType]string, tt chroma.Toke
}
}
}
return fmt.Sprintf(` style="%s"`, styles[tt])
css := []string{styles[tt]}
css = append(css, extraCSS...)
return fmt.Sprintf(` style="%s"`, strings.Join(css, ";"))
}
func (f *Formatter) tabWidthStyle() string {
@ -281,6 +349,13 @@ func (f *Formatter) WriteCSS(w io.Writer, style *chroma.Style) error {
return err
}
}
// Special-case line number highlighting when targeted.
if f.lineNumbers || f.lineNumbersInTable {
targetedLineCSS := StyleEntryToCSS(style.Get(chroma.LineHighlight))
for _, tt := range []chroma.TokenType{chroma.LineNumbers, chroma.LineNumbersTable} {
fmt.Fprintf(w, "/* %s targeted by URL anchor */ .%schroma .%s:target { %s }\n", tt, f.prefix, f.class(tt), targetedLineCSS)
}
}
tts := []int{}
for tt := range css {
tts = append(tts, int(tt))
@ -291,8 +366,12 @@ func (f *Formatter) WriteCSS(w io.Writer, style *chroma.Style) error {
if tt == chroma.Background {
continue
}
class := f.class(tt)
if class == "" {
continue
}
styles := css[tt]
if _, err := fmt.Fprintf(w, "/* %s */ .%schroma .%s { %s }\n", tt, f.prefix, f.class(tt), styles); err != nil {
if _, err := fmt.Fprintf(w, "/* %s */ .%schroma .%s { %s }\n", tt, f.prefix, class, styles); err != nil {
return err
}
}
@ -308,7 +387,7 @@ func (f *Formatter) styleToCSS(style *chroma.Style) map[chroma.TokenType]string
if t != chroma.Background {
entry = entry.Sub(bg)
}
if entry.IsZero() {
if !f.allClasses && entry.IsZero() {
continue
}
classes[t] = StyleEntryToCSS(entry)

View File

@ -1,14 +1,18 @@
module github.com/alecthomas/chroma
go 1.13
require (
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721 // indirect
github.com/alecthomas/kong v0.1.15
github.com/alecthomas/kong v0.2.4
github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897 // indirect
github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964
github.com/dlclark/regexp2 v1.1.6
github.com/mattn/go-colorable v0.0.9
github.com/mattn/go-isatty v0.0.4
github.com/dlclark/regexp2 v1.2.0
github.com/mattn/go-colorable v0.1.6
github.com/mattn/go-isatty v0.0.12
github.com/pkg/errors v0.9.1 // indirect
github.com/sergi/go-diff v1.0.0 // indirect
golang.org/x/sys v0.0.0-20181128092732-4ed8d59d0b35 // indirect
github.com/stretchr/testify v1.3.0 // indirect
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4 // indirect
)

View File

@ -2,25 +2,35 @@ github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38 h1:smF2tmSOzy2Mm
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38/go.mod h1:r7bzyVFMNntcxPZXK3/+KdruV1H5KSlyVY0gc+NgInI=
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721 h1:JHZL0hZKJ1VENNfmXvHbgYlbUOvpzYzvy2aZU5gXVeo=
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721/go.mod h1:QO9JBoKquHd+jz9nshCh40fOfO+JzsoXy8qTHF68zU0=
github.com/alecthomas/kong v0.1.15 h1:IWBg+KrLvoHBicD50OzMI8fKjrtAa1okMR9g38HVM/s=
github.com/alecthomas/kong v0.1.15/go.mod h1:0m2VYms8rH0qbCqVB2gvGHk74bqLIq0HXjCs5bNbNQU=
github.com/alecthomas/kong v0.2.4 h1:Y0ZBCHAvHhTHw7FFJ2FzCAAG4pkbTgA45nc7BpMhDNk=
github.com/alecthomas/kong v0.2.4/go.mod h1:kQOmtJgV+Lb4aj+I2LEn40cbtawdWJ9Y8QLq+lElKxE=
github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897 h1:p9Sln00KOTlrYkxI1zYWl1QLnEqAqEARBEYa8FQnQcY=
github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897/go.mod h1:xTS7Pm1pD1mvyM075QCDSRqH6qRLXylzS24ZTpRiSzQ=
github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964 h1:y5HC9v93H5EPKqaS1UYVg1uYah5Xf51mBfIoWehClUQ=
github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964/go.mod h1:Xd9hchkHSWYkEqJwUGisez3G1QY8Ryz0sdWrLPMGjLk=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/dlclark/regexp2 v1.1.6 h1:CqB4MjHw0MFCDj+PHHjiESmHX+N7t0tJzKvC6M97BRg=
github.com/dlclark/regexp2 v1.1.6/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
github.com/mattn/go-colorable v0.0.9 h1:UVL0vNpWh04HeJXV0KLcaT7r06gOH2l4OW6ddYRUIY4=
github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
github.com/mattn/go-isatty v0.0.4 h1:bnP0vzxcAdeI1zdubAl5PjU6zsERjGZb7raWodagDYs=
github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
github.com/dlclark/regexp2 v1.2.0 h1:8sAhBGEM0dRWogWqWyQeIJnxjWO6oIjl8FKqREDsGfk=
github.com/dlclark/regexp2 v1.2.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
github.com/mattn/go-colorable v0.1.6 h1:6Su7aK7lXmJ/U79bYtBjLNaha4Fs1Rg9plHpcH+vvnE=
github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY=
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ=
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
golang.org/x/sys v0.0.0-20181128092732-4ed8d59d0b35 h1:YAFjXN64LMvktoUZH9zgY4lGc/msGN7HQfoSuKCgaDU=
golang.org/x/sys v0.0.0-20181128092732-4ed8d59d0b35/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4 h1:opSr2sbRXk5X5/givKrrKj9HXxFpW2sdCiP8MJSKLQY=
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=

View File

@ -6,7 +6,8 @@ import (
var (
defaultOptions = &TokeniseOptions{
State: "root",
State: "root",
EnsureLF: true,
}
)
@ -80,6 +81,10 @@ type TokeniseOptions struct {
State string
// Nested tokenisation.
Nested bool
// If true, all EOLs are converted into LF
// by replacing CRLF and CR
EnsureLF bool
}
// A Lexer for tokenising source code.

View File

@ -7,13 +7,31 @@ that its output matches `<name>.exported`.
Run the tests as normal:
```go
go run ./lexers
go test ./lexers
```
## Updating the existing tests
## Update existing tests
When you add a new test data file (`*.actual`), you need to regenerate all tests. That's how Chroma creates the `*.expected` test file based on the corresponding lexer.
You can regenerate all the test outputs
To regenerate all tests, type in your terminal:
```go
RECORD=true go test ./lexers
```
This first sets the `RECORD` environment variable to `true`. Then it runs `go test` on the `./lexers` directory of the Chroma project.
(That environment variable tells Chroma it needs to output test data. After running `go test ./lexers` you can remove or reset that variable.)
### Windows users
Windows users will find that the `RECORD=true go test ./lexers` command fails in both the standard command prompt terminal and in PowerShell.
Instead we have to perform both steps separately:
- Set the `RECORD` environment variable to `true`.
+ In the regular command prompt window, the `set` command sets an environment variable for the current session: `set RECORD=true`. See [this page](https://superuser.com/questions/212150/how-to-set-env-variable-in-windows-cmd-line) for more.
+ In PowerShell, you can use the `$env:RECORD = 'true'` command for that. See [this article](https://mcpmag.com/articles/2019/03/28/environment-variables-in-powershell.aspx) for more.
+ You can also make a persistent environment variable by hand in the Windows computer settings. See [this article](https://www.computerhope.com/issues/ch000549.htm) for how.
- When the environment variable is set, run `go tests ./lexers`.
Chroma will now regenerate the test files and print its results to the console window.

56
vendor/github.com/alecthomas/chroma/lexers/a/abap.go generated vendored Normal file
View File

@ -0,0 +1,56 @@
package a
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
// ABAP lexer.
var Abap = internal.Register(MustNewLexer(
&Config{
Name: "ABAP",
Aliases: []string{"abap"},
Filenames: []string{"*.abap", "*.ABAP"},
MimeTypes: []string{"text/x-abap"},
CaseInsensitive: true,
},
Rules{
"common": {
{`\s+`, Text, nil},
{`^\*.*$`, CommentSingle, nil},
{`\".*?\n`, CommentSingle, nil},
{`##\w+`, CommentSpecial, nil},
},
"variable-names": {
{`<\S+>`, NameVariable, nil},
{`\w[\w~]*(?:(\[\])|->\*)?`, NameVariable, nil},
},
"root": {
Include("common"),
{`CALL\s+(?:BADI|CUSTOMER-FUNCTION|FUNCTION)`, Keyword, nil},
{`(CALL\s+(?:DIALOG|SCREEN|SUBSCREEN|SELECTION-SCREEN|TRANSACTION|TRANSFORMATION))\b`, Keyword, nil},
{`(FORM|PERFORM)(\s+)(\w+)`, ByGroups(Keyword, Text, NameFunction), nil},
{`(PERFORM)(\s+)(\()(\w+)(\))`, ByGroups(Keyword, Text, Punctuation, NameVariable, Punctuation), nil},
{`(MODULE)(\s+)(\S+)(\s+)(INPUT|OUTPUT)`, ByGroups(Keyword, Text, NameFunction, Text, Keyword), nil},
{`(METHOD)(\s+)([\w~]+)`, ByGroups(Keyword, Text, NameFunction), nil},
{`(\s+)([\w\-]+)([=\-]>)([\w\-~]+)`, ByGroups(Text, NameVariable, Operator, NameFunction), nil},
{`(?<=(=|-)>)([\w\-~]+)(?=\()`, NameFunction, nil},
{`(TEXT)(-)(\d{3})`, ByGroups(Keyword, Punctuation, LiteralNumberInteger), nil},
{`(TEXT)(-)(\w{3})`, ByGroups(Keyword, Punctuation, NameVariable), nil},
{`(ADD-CORRESPONDING|AUTHORITY-CHECK|CLASS-DATA|CLASS-EVENTS|CLASS-METHODS|CLASS-POOL|DELETE-ADJACENT|DIVIDE-CORRESPONDING|EDITOR-CALL|ENHANCEMENT-POINT|ENHANCEMENT-SECTION|EXIT-COMMAND|FIELD-GROUPS|FIELD-SYMBOLS|FUNCTION-POOL|INTERFACE-POOL|INVERTED-DATE|LOAD-OF-PROGRAM|LOG-POINT|MESSAGE-ID|MOVE-CORRESPONDING|MULTIPLY-CORRESPONDING|NEW-LINE|NEW-PAGE|NEW-SECTION|NO-EXTENSION|OUTPUT-LENGTH|PRINT-CONTROL|SELECT-OPTIONS|START-OF-SELECTION|SUBTRACT-CORRESPONDING|SYNTAX-CHECK|SYSTEM-EXCEPTIONS|TYPE-POOL|TYPE-POOLS|NO-DISPLAY)\b`, Keyword, nil},
{`(?<![-\>])(CREATE\s+(PUBLIC|PRIVATE|DATA|OBJECT)|(PUBLIC|PRIVATE|PROTECTED)\s+SECTION|(TYPE|LIKE)\s+((LINE\s+OF|REF\s+TO|(SORTED|STANDARD|HASHED)\s+TABLE\s+OF))?|FROM\s+(DATABASE|MEMORY)|CALL\s+METHOD|(GROUP|ORDER) BY|HAVING|SEPARATED BY|GET\s+(BADI|BIT|CURSOR|DATASET|LOCALE|PARAMETER|PF-STATUS|(PROPERTY|REFERENCE)\s+OF|RUN\s+TIME|TIME\s+(STAMP)?)?|SET\s+(BIT|BLANK\s+LINES|COUNTRY|CURSOR|DATASET|EXTENDED\s+CHECK|HANDLER|HOLD\s+DATA|LANGUAGE|LEFT\s+SCROLL-BOUNDARY|LOCALE|MARGIN|PARAMETER|PF-STATUS|PROPERTY\s+OF|RUN\s+TIME\s+(ANALYZER|CLOCK\s+RESOLUTION)|SCREEN|TITLEBAR|UPADTE\s+TASK\s+LOCAL|USER-COMMAND)|CONVERT\s+((INVERTED-)?DATE|TIME|TIME\s+STAMP|TEXT)|(CLOSE|OPEN)\s+(DATASET|CURSOR)|(TO|FROM)\s+(DATA BUFFER|INTERNAL TABLE|MEMORY ID|DATABASE|SHARED\s+(MEMORY|BUFFER))|DESCRIBE\s+(DISTANCE\s+BETWEEN|FIELD|LIST|TABLE)|FREE\s(MEMORY|OBJECT)?|PROCESS\s+(BEFORE\s+OUTPUT|AFTER\s+INPUT|ON\s+(VALUE-REQUEST|HELP-REQUEST))|AT\s+(LINE-SELECTION|USER-COMMAND|END\s+OF|NEW)|AT\s+SELECTION-SCREEN(\s+(ON(\s+(BLOCK|(HELP|VALUE)-REQUEST\s+FOR|END\s+OF|RADIOBUTTON\s+GROUP))?|OUTPUT))?|SELECTION-SCREEN:?\s+((BEGIN|END)\s+OF\s+((TABBED\s+)?BLOCK|LINE|SCREEN)|COMMENT|FUNCTION\s+KEY|INCLUDE\s+BLOCKS|POSITION|PUSHBUTTON|SKIP|ULINE)|LEAVE\s+(LIST-PROCESSING|PROGRAM|SCREEN|TO LIST-PROCESSING|TO TRANSACTION)(ENDING|STARTING)\s+AT|FORMAT\s+(COLOR|INTENSIFIED|INVERSE|HOTSPOT|INPUT|FRAMES|RESET)|AS\s+(CHECKBOX|SUBSCREEN|WINDOW)|WITH\s+(((NON-)?UNIQUE)?\s+KEY|FRAME)|(BEGIN|END)\s+OF|DELETE(\s+ADJACENT\s+DUPLICATES\sFROM)?|COMPARING(\s+ALL\s+FIELDS)?|(INSERT|APPEND)(\s+INITIAL\s+LINE\s+(IN)?TO|\s+LINES\s+OF)?|IN\s+((BYTE|CHARACTER)\s+MODE|PROGRAM)|END-OF-(DEFINITION|PAGE|SELECTION)|WITH\s+FRAME(\s+TITLE)|(REPLACE|FIND)\s+((FIRST|ALL)\s+OCCURRENCES?\s+OF\s+)?(SUBSTRING|REGEX)?|MATCH\s+(LENGTH|COUNT|LINE|OFFSET)|(RESPECTING|IGNORING)\s+CASE|IN\s+UPDATE\s+TASK|(SOURCE|RESULT)\s+(XML)?|REFERENCE\s+INTO|AND\s+(MARK|RETURN)|CLIENT\s+SPECIFIED|CORRESPONDING\s+FIELDS\s+OF|IF\s+FOUND|FOR\s+EVENT|INHERITING\s+FROM|LEAVE\s+TO\s+SCREEN|LOOP\s+AT\s+(SCREEN)?|LOWER\s+CASE|MATCHCODE\s+OBJECT|MODIF\s+ID|MODIFY\s+SCREEN|NESTING\s+LEVEL|NO\s+INTERVALS|OF\s+STRUCTURE|RADIOBUTTON\s+GROUP|RANGE\s+OF|REF\s+TO|SUPPRESS DIALOG|TABLE\s+OF|UPPER\s+CASE|TRANSPORTING\s+NO\s+FIELDS|VALUE\s+CHECK|VISIBLE\s+LENGTH|HEADER\s+LINE|COMMON\s+PART)\b`, Keyword, nil},
{`(^|(?<=(\s|\.)))(ABBREVIATED|ABSTRACT|ADD|ALIASES|ALIGN|ALPHA|ASSERT|AS|ASSIGN(ING)?|AT(\s+FIRST)?|BACK|BLOCK|BREAK-POINT|CASE|CATCH|CHANGING|CHECK|CLASS|CLEAR|COLLECT|COLOR|COMMIT|CREATE|COMMUNICATION|COMPONENTS?|COMPUTE|CONCATENATE|CONDENSE|CONSTANTS|CONTEXTS|CONTINUE|CONTROLS|COUNTRY|CURRENCY|DATA|DATE|DECIMALS|DEFAULT|DEFINE|DEFINITION|DEFERRED|DEMAND|DETAIL|DIRECTORY|DIVIDE|DO|DUMMY|ELSE(IF)?|ENDAT|ENDCASE|ENDCATCH|ENDCLASS|ENDDO|ENDFORM|ENDFUNCTION|ENDIF|ENDINTERFACE|ENDLOOP|ENDMETHOD|ENDMODULE|ENDSELECT|ENDTRY|ENDWHILE|ENHANCEMENT|EVENTS|EXACT|EXCEPTIONS?|EXIT|EXPONENT|EXPORT|EXPORTING|EXTRACT|FETCH|FIELDS?|FOR|FORM|FORMAT|FREE|FROM|FUNCTION|HIDE|ID|IF|IMPORT|IMPLEMENTATION|IMPORTING|IN|INCLUDE|INCLUDING|INDEX|INFOTYPES|INITIALIZATION|INTERFACE|INTERFACES|INTO|LANGUAGE|LEAVE|LENGTH|LINES|LOAD|LOCAL|JOIN|KEY|NEXT|MAXIMUM|MESSAGE|METHOD[S]?|MINIMUM|MODULE|MODIFIER|MODIFY|MOVE|MULTIPLY|NODES|NUMBER|OBLIGATORY|OBJECT|OF|OFF|ON|OTHERS|OVERLAY|PACK|PAD|PARAMETERS|PERCENTAGE|POSITION|PROGRAM|PROVIDE|PUBLIC|PUT|PF\d\d|RAISE|RAISING|RANGES?|READ|RECEIVE|REDEFINITION|REFRESH|REJECT|REPORT|RESERVE|RESUME|RETRY|RETURN|RETURNING|RIGHT|ROLLBACK|REPLACE|SCROLL|SEARCH|SELECT|SHIFT|SIGN|SINGLE|SIZE|SKIP|SORT|SPLIT|STATICS|STOP|STYLE|SUBMATCHES|SUBMIT|SUBTRACT|SUM(?!\()|SUMMARY|SUMMING|SUPPLY|TABLE|TABLES|TIMESTAMP|TIMES?|TIMEZONE|TITLE|\??TO|TOP-OF-PAGE|TRANSFER|TRANSLATE|TRY|TYPES|ULINE|UNDER|UNPACK|UPDATE|USING|VALUE|VALUES|VIA|VARYING|VARY|WAIT|WHEN|WHERE|WIDTH|WHILE|WITH|WINDOW|WRITE|XSD|ZERO)\b`, Keyword, nil},
{`(abs|acos|asin|atan|boolc|boolx|bit_set|char_off|charlen|ceil|cmax|cmin|condense|contains|contains_any_of|contains_any_not_of|concat_lines_of|cos|cosh|count|count_any_of|count_any_not_of|dbmaxlen|distance|escape|exp|find|find_end|find_any_of|find_any_not_of|floor|frac|from_mixed|insert|lines|log|log10|match|matches|nmax|nmin|numofchar|repeat|replace|rescale|reverse|round|segment|shift_left|shift_right|sign|sin|sinh|sqrt|strlen|substring|substring_after|substring_from|substring_before|substring_to|tan|tanh|to_upper|to_lower|to_mixed|translate|trunc|xstrlen)(\()\b`, ByGroups(NameBuiltin, Punctuation), nil},
{`&[0-9]`, Name, nil},
{`[0-9]+`, LiteralNumberInteger, nil},
{`(?<=(\s|.))(AND|OR|EQ|NE|GT|LT|GE|LE|CO|CN|CA|NA|CS|NOT|NS|CP|NP|BYTE-CO|BYTE-CN|BYTE-CA|BYTE-NA|BYTE-CS|BYTE-NS|IS\s+(NOT\s+)?(INITIAL|ASSIGNED|REQUESTED|BOUND))\b`, OperatorWord, nil},
Include("variable-names"),
{`[?*<>=\-+&]`, Operator, nil},
{`'(''|[^'])*'`, LiteralStringSingle, nil},
{"`([^`])*`", LiteralStringSingle, nil},
{`([|}])([^{}|]*?)([|{])`, ByGroups(Punctuation, LiteralStringSingle, Punctuation), nil},
{`[/;:()\[\],.]`, Punctuation, nil},
{`(!)(\w+)`, ByGroups(Operator, Name), nil},
},
},
))

View File

@ -31,7 +31,7 @@ var Applescript = internal.Register(MustNewLexer(
{`\b(as )(alias |application |boolean |class |constant |date |file |integer |list |number |POSIX file |real |record |reference |RGB color |script |text |unit types|(?:Unicode )?text|string)\b`, ByGroups(Keyword, NameClass), nil},
{`\b(AppleScript|current application|false|linefeed|missing value|pi|quote|result|return|space|tab|text item delimiters|true|version)\b`, NameConstant, nil},
{`\b(ASCII (character|number)|activate|beep|choose URL|choose application|choose color|choose file( name)?|choose folder|choose from list|choose remote application|clipboard info|close( access)?|copy|count|current date|delay|delete|display (alert|dialog)|do shell script|duplicate|exists|get eof|get volume settings|info for|launch|list (disks|folder)|load script|log|make|mount volume|new|offset|open( (for access|location))?|path to|print|quit|random number|read|round|run( script)?|say|scripting components|set (eof|the clipboard to|volume)|store script|summarize|system attribute|system info|the clipboard|time to GMT|write|quoted form)\b`, NameBuiltin, nil},
{`\b(considering|else|error|exit|from|if|ignoring|in|repeat|tell|then|times|to|try|until|using terms from|while|whith|with timeout( of)?|with transaction|by|continue|end|its?|me|my|return|of|as)\b`, Keyword, nil},
{`\b(considering|else|error|exit|from|if|ignoring|in|repeat|tell|then|times|to|try|until|using terms from|while|with|with timeout( of)?|with transaction|by|continue|end|its?|me|my|return|of|as)\b`, Keyword, nil},
{`\b(global|local|prop(erty)?|set|get)\b`, Keyword, nil},
{`\b(but|put|returning|the)\b`, NameBuiltin, nil},
{`\b(attachment|attribute run|character|day|month|paragraph|word|year)s?\b`, NameBuiltin, nil},

View File

@ -25,7 +25,7 @@ var Ballerina = internal.Register(MustNewLexer(
{`(annotation|bind|but|endpoint|error|function|object|private|public|returns|service|type|var|with|worker)\b`, KeywordDeclaration, nil},
{`(boolean|byte|decimal|float|int|json|map|nil|record|string|table|xml)\b`, KeywordType, nil},
{`(true|false|null)\b`, KeywordConstant, nil},
{`import(\s+)`, ByGroups(KeywordNamespace, Text), Push("import")},
{`(import)(\s+)`, ByGroups(KeywordNamespace, Text), Push("import")},
{`"(\\\\|\\"|[^"])*"`, LiteralString, nil},
{`'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'`, LiteralStringChar, nil},
{`(\.)((?:[^\W\d]|\$)[\w$]*)`, ByGroups(Operator, NameAttribute), nil},

View File

@ -53,7 +53,7 @@ var Bash = internal.Register(MustNewLexer(
{`&`, Punctuation, nil},
{`\|`, Punctuation, nil},
{`\s+`, Text, nil},
{`\d+\b`, LiteralNumber, nil},
{`\d+(?= |$)`, LiteralNumber, nil},
{"[^=\\s\\[\\]{}()$\"\\'`\\\\<&|;]+", Text, nil},
{`<`, Text, nil},
},

76
vendor/github.com/alecthomas/chroma/lexers/b/bibtex.go generated vendored Normal file
View File

@ -0,0 +1,76 @@
package b
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
// Bibtex lexer.
var Bibtex = internal.Register(MustNewLexer(
&Config{
Name: "BibTeX",
Aliases: []string{"bib", "bibtex"},
Filenames: []string{"*.bib"},
MimeTypes: []string{"text/x-bibtex"},
NotMultiline: true,
CaseInsensitive: true,
},
Rules{
"root": {
Include("whitespace"),
{`@comment`, Comment, nil},
{`@preamble`, NameClass, Push("closing-brace", "value", "opening-brace")},
{`@string`, NameClass, Push("closing-brace", "field", "opening-brace")},
{"@[a-z_@!$&*+\\-./:;<>?\\[\\\\\\]^`|~][\\w@!$&*+\\-./:;<>?\\[\\\\\\]^`|~]*", NameClass, Push("closing-brace", "command-body", "opening-brace")},
{`.+`, Comment, nil},
},
"opening-brace": {
Include("whitespace"),
{`[{(]`, Punctuation, Pop(1)},
},
"closing-brace": {
Include("whitespace"),
{`[})]`, Punctuation, Pop(1)},
},
"command-body": {
Include("whitespace"),
{`[^\s\,\}]+`, NameLabel, Push("#pop", "fields")},
},
"fields": {
Include("whitespace"),
{`,`, Punctuation, Push("field")},
Default(Pop(1)),
},
"field": {
Include("whitespace"),
{"[a-z_@!$&*+\\-./:;<>?\\[\\\\\\]^`|~][\\w@!$&*+\\-./:;<>?\\[\\\\\\]^`|~]*", NameAttribute, Push("value", "=")},
Default(Pop(1)),
},
"=": {
Include("whitespace"),
{`=`, Punctuation, Pop(1)},
},
"value": {
Include("whitespace"),
{"[a-z_@!$&*+\\-./:;<>?\\[\\\\\\]^`|~][\\w@!$&*+\\-./:;<>?\\[\\\\\\]^`|~]*", NameVariable, nil},
{`"`, LiteralString, Push("quoted-string")},
{`\{`, LiteralString, Push("braced-string")},
{`[\d]+`, LiteralNumber, nil},
{`#`, Punctuation, nil},
Default(Pop(1)),
},
"quoted-string": {
{`\{`, LiteralString, Push("braced-string")},
{`"`, LiteralString, Pop(1)},
{`[^\{\"]+`, LiteralString, nil},
},
"braced-string": {
{`\{`, LiteralString, Push()},
{`\}`, LiteralString, Pop(1)},
{`[^\{\}]+`, LiteralString, nil},
},
"whitespace": {
{`\s+`, Text, nil},
},
},
))

View File

@ -5,8 +5,232 @@ import (
"github.com/alecthomas/chroma/lexers/internal"
)
var (
clBuiltinFunctions = []string{
"<", "<=", "=", ">", ">=", "-", "/", "/=", "*", "+", "1-", "1+",
"abort", "abs", "acons", "acos", "acosh", "add-method", "adjoin",
"adjustable-array-p", "adjust-array", "allocate-instance",
"alpha-char-p", "alphanumericp", "append", "apply", "apropos",
"apropos-list", "aref", "arithmetic-error-operands",
"arithmetic-error-operation", "array-dimension", "array-dimensions",
"array-displacement", "array-element-type", "array-has-fill-pointer-p",
"array-in-bounds-p", "arrayp", "array-rank", "array-row-major-index",
"array-total-size", "ash", "asin", "asinh", "assoc", "assoc-if",
"assoc-if-not", "atan", "atanh", "atom", "bit", "bit-and", "bit-andc1",
"bit-andc2", "bit-eqv", "bit-ior", "bit-nand", "bit-nor", "bit-not",
"bit-orc1", "bit-orc2", "bit-vector-p", "bit-xor", "boole",
"both-case-p", "boundp", "break", "broadcast-stream-streams",
"butlast", "byte", "byte-position", "byte-size", "caaaar", "caaadr",
"caaar", "caadar", "caaddr", "caadr", "caar", "cadaar", "cadadr",
"cadar", "caddar", "cadddr", "caddr", "cadr", "call-next-method", "car",
"cdaaar", "cdaadr", "cdaar", "cdadar", "cdaddr", "cdadr", "cdar",
"cddaar", "cddadr", "cddar", "cdddar", "cddddr", "cdddr", "cddr", "cdr",
"ceiling", "cell-error-name", "cerror", "change-class", "char", "char<",
"char<=", "char=", "char>", "char>=", "char/=", "character",
"characterp", "char-code", "char-downcase", "char-equal",
"char-greaterp", "char-int", "char-lessp", "char-name",
"char-not-equal", "char-not-greaterp", "char-not-lessp", "char-upcase",
"cis", "class-name", "class-of", "clear-input", "clear-output",
"close", "clrhash", "code-char", "coerce", "compile",
"compiled-function-p", "compile-file", "compile-file-pathname",
"compiler-macro-function", "complement", "complex", "complexp",
"compute-applicable-methods", "compute-restarts", "concatenate",
"concatenated-stream-streams", "conjugate", "cons", "consp",
"constantly", "constantp", "continue", "copy-alist", "copy-list",
"copy-pprint-dispatch", "copy-readtable", "copy-seq", "copy-structure",
"copy-symbol", "copy-tree", "cos", "cosh", "count", "count-if",
"count-if-not", "decode-float", "decode-universal-time", "delete",
"delete-duplicates", "delete-file", "delete-if", "delete-if-not",
"delete-package", "denominator", "deposit-field", "describe",
"describe-object", "digit-char", "digit-char-p", "directory",
"directory-namestring", "disassemble", "documentation", "dpb",
"dribble", "echo-stream-input-stream", "echo-stream-output-stream",
"ed", "eighth", "elt", "encode-universal-time", "endp",
"enough-namestring", "ensure-directories-exist",
"ensure-generic-function", "eq", "eql", "equal", "equalp", "error",
"eval", "evenp", "every", "exp", "export", "expt", "fboundp",
"fceiling", "fdefinition", "ffloor", "fifth", "file-author",
"file-error-pathname", "file-length", "file-namestring",
"file-position", "file-string-length", "file-write-date",
"fill", "fill-pointer", "find", "find-all-symbols", "find-class",
"find-if", "find-if-not", "find-method", "find-package", "find-restart",
"find-symbol", "finish-output", "first", "float", "float-digits",
"floatp", "float-precision", "float-radix", "float-sign", "floor",
"fmakunbound", "force-output", "format", "fourth", "fresh-line",
"fround", "ftruncate", "funcall", "function-keywords",
"function-lambda-expression", "functionp", "gcd", "gensym", "gentemp",
"get", "get-decoded-time", "get-dispatch-macro-character", "getf",
"gethash", "get-internal-real-time", "get-internal-run-time",
"get-macro-character", "get-output-stream-string", "get-properties",
"get-setf-expansion", "get-universal-time", "graphic-char-p",
"hash-table-count", "hash-table-p", "hash-table-rehash-size",
"hash-table-rehash-threshold", "hash-table-size", "hash-table-test",
"host-namestring", "identity", "imagpart", "import",
"initialize-instance", "input-stream-p", "inspect",
"integer-decode-float", "integer-length", "integerp",
"interactive-stream-p", "intern", "intersection",
"invalid-method-error", "invoke-debugger", "invoke-restart",
"invoke-restart-interactively", "isqrt", "keywordp", "last", "lcm",
"ldb", "ldb-test", "ldiff", "length", "lisp-implementation-type",
"lisp-implementation-version", "list", "list*", "list-all-packages",
"listen", "list-length", "listp", "load",
"load-logical-pathname-translations", "log", "logand", "logandc1",
"logandc2", "logbitp", "logcount", "logeqv", "logical-pathname",
"logical-pathname-translations", "logior", "lognand", "lognor",
"lognot", "logorc1", "logorc2", "logtest", "logxor", "long-site-name",
"lower-case-p", "machine-instance", "machine-type", "machine-version",
"macroexpand", "macroexpand-1", "macro-function", "make-array",
"make-broadcast-stream", "make-concatenated-stream", "make-condition",
"make-dispatch-macro-character", "make-echo-stream", "make-hash-table",
"make-instance", "make-instances-obsolete", "make-list",
"make-load-form", "make-load-form-saving-slots", "make-package",
"make-pathname", "make-random-state", "make-sequence", "make-string",
"make-string-input-stream", "make-string-output-stream", "make-symbol",
"make-synonym-stream", "make-two-way-stream", "makunbound", "map",
"mapc", "mapcan", "mapcar", "mapcon", "maphash", "map-into", "mapl",
"maplist", "mask-field", "max", "member", "member-if", "member-if-not",
"merge", "merge-pathnames", "method-combination-error",
"method-qualifiers", "min", "minusp", "mismatch", "mod",
"muffle-warning", "name-char", "namestring", "nbutlast", "nconc",
"next-method-p", "nintersection", "ninth", "no-applicable-method",
"no-next-method", "not", "notany", "notevery", "nreconc", "nreverse",
"nset-difference", "nset-exclusive-or", "nstring-capitalize",
"nstring-downcase", "nstring-upcase", "nsublis", "nsubst", "nsubst-if",
"nsubst-if-not", "nsubstitute", "nsubstitute-if", "nsubstitute-if-not",
"nth", "nthcdr", "null", "numberp", "numerator", "nunion", "oddp",
"open", "open-stream-p", "output-stream-p", "package-error-package",
"package-name", "package-nicknames", "packagep",
"package-shadowing-symbols", "package-used-by-list", "package-use-list",
"pairlis", "parse-integer", "parse-namestring", "pathname",
"pathname-device", "pathname-directory", "pathname-host",
"pathname-match-p", "pathname-name", "pathnamep", "pathname-type",
"pathname-version", "peek-char", "phase", "plusp", "position",
"position-if", "position-if-not", "pprint", "pprint-dispatch",
"pprint-fill", "pprint-indent", "pprint-linear", "pprint-newline",
"pprint-tab", "pprint-tabular", "prin1", "prin1-to-string", "princ",
"princ-to-string", "print", "print-object", "probe-file", "proclaim",
"provide", "random", "random-state-p", "rassoc", "rassoc-if",
"rassoc-if-not", "rational", "rationalize", "rationalp", "read",
"read-byte", "read-char", "read-char-no-hang", "read-delimited-list",
"read-from-string", "read-line", "read-preserving-whitespace",
"read-sequence", "readtable-case", "readtablep", "realp", "realpart",
"reduce", "reinitialize-instance", "rem", "remhash", "remove",
"remove-duplicates", "remove-if", "remove-if-not", "remove-method",
"remprop", "rename-file", "rename-package", "replace", "require",
"rest", "restart-name", "revappend", "reverse", "room", "round",
"row-major-aref", "rplaca", "rplacd", "sbit", "scale-float", "schar",
"search", "second", "set", "set-difference",
"set-dispatch-macro-character", "set-exclusive-or",
"set-macro-character", "set-pprint-dispatch", "set-syntax-from-char",
"seventh", "shadow", "shadowing-import", "shared-initialize",
"short-site-name", "signal", "signum", "simple-bit-vector-p",
"simple-condition-format-arguments", "simple-condition-format-control",
"simple-string-p", "simple-vector-p", "sin", "sinh", "sixth", "sleep",
"slot-boundp", "slot-exists-p", "slot-makunbound", "slot-missing",
"slot-unbound", "slot-value", "software-type", "software-version",
"some", "sort", "special-operator-p", "sqrt", "stable-sort",
"standard-char-p", "store-value", "stream-element-type",
"stream-error-stream", "stream-external-format", "streamp", "string",
"string<", "string<=", "string=", "string>", "string>=", "string/=",
"string-capitalize", "string-downcase", "string-equal",
"string-greaterp", "string-left-trim", "string-lessp",
"string-not-equal", "string-not-greaterp", "string-not-lessp",
"stringp", "string-right-trim", "string-trim", "string-upcase",
"sublis", "subseq", "subsetp", "subst", "subst-if", "subst-if-not",
"substitute", "substitute-if", "substitute-if-not", "subtypep", "svref",
"sxhash", "symbol-function", "symbol-name", "symbolp", "symbol-package",
"symbol-plist", "symbol-value", "synonym-stream-symbol", "syntax:",
"tailp", "tan", "tanh", "tenth", "terpri", "third",
"translate-logical-pathname", "translate-pathname", "tree-equal",
"truename", "truncate", "two-way-stream-input-stream",
"two-way-stream-output-stream", "type-error-datum",
"type-error-expected-type", "type-of", "typep", "unbound-slot-instance",
"unexport", "unintern", "union", "unread-char", "unuse-package",
"update-instance-for-different-class",
"update-instance-for-redefined-class", "upgraded-array-element-type",
"upgraded-complex-part-type", "upper-case-p", "use-package",
"user-homedir-pathname", "use-value", "values", "values-list", "vector",
"vectorp", "vector-pop", "vector-push", "vector-push-extend", "warn",
"wild-pathname-p", "write", "write-byte", "write-char", "write-line",
"write-sequence", "write-string", "write-to-string", "yes-or-no-p",
"y-or-n-p", "zerop",
}
clSpecialForms = []string{
"block", "catch", "declare", "eval-when", "flet", "function", "go", "if",
"labels", "lambda", "let", "let*", "load-time-value", "locally", "macrolet",
"multiple-value-call", "multiple-value-prog1", "progn", "progv", "quote",
"return-from", "setq", "symbol-macrolet", "tagbody", "the", "throw",
"unwind-protect",
}
clMacros = []string{
"and", "assert", "call-method", "case", "ccase", "check-type", "cond",
"ctypecase", "decf", "declaim", "defclass", "defconstant", "defgeneric",
"define-compiler-macro", "define-condition", "define-method-combination",
"define-modify-macro", "define-setf-expander", "define-symbol-macro",
"defmacro", "defmethod", "defpackage", "defparameter", "defsetf",
"defstruct", "deftype", "defun", "defvar", "destructuring-bind", "do",
"do*", "do-all-symbols", "do-external-symbols", "dolist", "do-symbols",
"dotimes", "ecase", "etypecase", "formatter", "handler-bind",
"handler-case", "ignore-errors", "incf", "in-package", "lambda", "loop",
"loop-finish", "make-method", "multiple-value-bind", "multiple-value-list",
"multiple-value-setq", "nth-value", "or", "pop",
"pprint-exit-if-list-exhausted", "pprint-logical-block", "pprint-pop",
"print-unreadable-object", "prog", "prog*", "prog1", "prog2", "psetf",
"psetq", "push", "pushnew", "remf", "restart-bind", "restart-case",
"return", "rotatef", "setf", "shiftf", "step", "time", "trace", "typecase",
"unless", "untrace", "when", "with-accessors", "with-compilation-unit",
"with-condition-restarts", "with-hash-table-iterator",
"with-input-from-string", "with-open-file", "with-open-stream",
"with-output-to-string", "with-package-iterator", "with-simple-restart",
"with-slots", "with-standard-io-syntax",
}
clLambdaListKeywords = []string{
"&allow-other-keys", "&aux", "&body", "&environment", "&key", "&optional",
"&rest", "&whole",
}
clDeclarations = []string{
"dynamic-extent", "ignore", "optimize", "ftype", "inline", "special",
"ignorable", "notinline", "type",
}
clBuiltinTypes = []string{
"atom", "boolean", "base-char", "base-string", "bignum", "bit",
"compiled-function", "extended-char", "fixnum", "keyword", "nil",
"signed-byte", "short-float", "single-float", "double-float", "long-float",
"simple-array", "simple-base-string", "simple-bit-vector", "simple-string",
"simple-vector", "standard-char", "unsigned-byte",
// Condition Types
"arithmetic-error", "cell-error", "condition", "control-error",
"division-by-zero", "end-of-file", "error", "file-error",
"floating-point-inexact", "floating-point-overflow",
"floating-point-underflow", "floating-point-invalid-operation",
"parse-error", "package-error", "print-not-readable", "program-error",
"reader-error", "serious-condition", "simple-condition", "simple-error",
"simple-type-error", "simple-warning", "stream-error", "storage-condition",
"style-warning", "type-error", "unbound-variable", "unbound-slot",
"undefined-function", "warning",
}
clBuiltinClasses = []string{
"array", "broadcast-stream", "bit-vector", "built-in-class", "character",
"class", "complex", "concatenated-stream", "cons", "echo-stream",
"file-stream", "float", "function", "generic-function", "hash-table",
"integer", "list", "logical-pathname", "method-combination", "method",
"null", "number", "package", "pathname", "ratio", "rational", "readtable",
"real", "random-state", "restart", "sequence", "standard-class",
"standard-generic-function", "standard-method", "standard-object",
"string-stream", "stream", "string", "structure-class", "structure-object",
"symbol", "synonym-stream", "t", "two-way-stream", "vector",
}
)
// Common Lisp lexer.
var CommonLisp = internal.Register(MustNewLexer(
var CommonLisp = internal.Register(TypeRemappingLexer(MustNewLexer(
&Config{
Name: "Common Lisp",
Aliases: []string{"common-lisp", "cl", "lisp"},
@ -71,4 +295,12 @@ var CommonLisp = internal.Register(MustNewLexer(
{`\)`, Punctuation, Pop(1)},
},
},
))
), TypeMapping{
{NameVariable, NameFunction, clBuiltinFunctions},
{NameVariable, Keyword, clSpecialForms},
{NameVariable, NameBuiltin, clMacros},
{NameVariable, Keyword, clLambdaListKeywords},
{NameVariable, Keyword, clDeclarations},
{NameVariable, KeywordType, clBuiltinTypes},
{NameVariable, NameClass, clBuiltinClasses},
}))

View File

@ -16,26 +16,28 @@ var CPP = internal.Register(MustNewLexer(
},
Rules{
"statements": {
{Words(``, `\b`, `catch`, `const_cast`, `delete`, `dynamic_cast`, `explicit`, `export`, `friend`, `mutable`, `namespace`, `new`, `operator`, `private`, `protected`, `public`, `reinterpret_cast`, `restrict`, `static_cast`, `template`, `this`, `throw`, `throws`, `try`, `typeid`, `typename`, `using`, `virtual`, `constexpr`, `nullptr`, `decltype`, `thread_local`, `alignas`, `alignof`, `static_assert`, `noexcept`, `override`, `final`), Keyword, nil},
{`char(16_t|32_t)\b`, KeywordType, nil},
{`(class)\b`, ByGroups(Keyword, Text), Push("classname")},
{Words(``, `\b`, `catch`, `const_cast`, `delete`, `dynamic_cast`, `explicit`, `export`, `friend`, `mutable`, `namespace`, `new`, `operator`, `private`, `protected`, `public`, `reinterpret_cast`, `restrict`, `static_cast`, `template`, `this`, `throw`, `throws`, `try`, `typeid`, `typename`, `using`, `virtual`, `constexpr`, `nullptr`, `decltype`, `thread_local`, `alignas`, `alignof`, `static_assert`, `noexcept`, `override`, `final`, `concept`, `requires`, `consteval`, `co_await`, `co_return`, `co_yield`), Keyword, nil},
{`(enum)\b(\s+)(class)\b(\s*)`, ByGroups(Keyword, Text, Keyword, Text), Push("classname")},
{`(class|struct|enum|union)\b(\s*)`, ByGroups(Keyword, Text), Push("classname")},
{`\[\[.+\]\]`, NameAttribute, nil},
{`(R)(")([^\\()\s]{,16})(\()((?:.|\n)*?)(\)\3)(")`, ByGroups(LiteralStringAffix, LiteralString, LiteralStringDelimiter, LiteralStringDelimiter, LiteralString, LiteralStringDelimiter, LiteralString), nil},
{`(u8|u|U)(")`, ByGroups(LiteralStringAffix, LiteralString), Push("string")},
{`(L?)(")`, ByGroups(LiteralStringAffix, LiteralString), Push("string")},
{`(L?)(')(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])(')`, ByGroups(LiteralStringAffix, LiteralStringChar, LiteralStringChar, LiteralStringChar), nil},
{`(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*`, LiteralNumberFloat, nil},
{`(\d+\.\d*|\.\d+|\d+[fF])[fF]?`, LiteralNumberFloat, nil},
{`0x[0-9a-fA-F]+[LlUu]*`, LiteralNumberHex, nil},
{`0[0-7]+[LlUu]*`, LiteralNumberOct, nil},
{`\d+[LlUu]*`, LiteralNumberInteger, nil},
{`0[xX]([0-9A-Fa-f]('?[0-9A-Fa-f]+)*)[LlUu]*`, LiteralNumberHex, nil},
{`0('?[0-7]+)+[LlUu]*`, LiteralNumberOct, nil},
{`0[Bb][01]('?[01]+)*[LlUu]*`, LiteralNumberBin, nil},
{`[0-9]('?[0-9]+)*[LlUu]*`, LiteralNumberInteger, nil},
{`\*/`, Error, nil},
{`[~!%^&*+=|?:<>/-]`, Operator, nil},
{`[()\[\],.]`, Punctuation, nil},
{Words(``, `\b`, `asm`, `auto`, `break`, `case`, `const`, `continue`, `default`, `do`, `else`, `enum`, `extern`, `for`, `goto`, `if`, `register`, `restricted`, `return`, `sizeof`, `static`, `struct`, `switch`, `typedef`, `union`, `volatile`, `while`), Keyword, nil},
{`(bool|int|long|float|short|double|char|unsigned|signed|void)\b`, KeywordType, nil},
{`(bool|int|long|float|short|double|char((8|16|32)_t)?|wchar_t|unsigned|signed|void|u?int(_fast|_least|)(8|16|32|64)_t)\b`, KeywordType, nil},
{Words(``, `\b`, `inline`, `_inline`, `__inline`, `naked`, `restrict`, `thread`, `typename`), KeywordReserved, nil},
{`(__m(128i|128d|128|64))\b`, KeywordReserved, nil},
{Words(`__`, `\b`, `asm`, `int8`, `based`, `except`, `int16`, `stdcall`, `cdecl`, `fastcall`, `int32`, `declspec`, `finally`, `int64`, `try`, `leave`, `wchar_t`, `w64`, `unaligned`, `raise`, `noop`, `identifier`, `forceinline`, `assume`), KeywordReserved, nil},
{Words(`__`, `\b`, `asm`, `int8`, `based`, `except`, `int16`, `stdcall`, `cdecl`, `fastcall`, `int32`, `declspec`, `finally`, `int64`, `try`, `leave`, `w64`, `unaligned`, `raise`, `noop`, `identifier`, `forceinline`, `assume`), KeywordReserved, nil},
{`(true|false|NULL)\b`, NameBuiltin, nil},
{`([a-zA-Z_]\w*)(\s*)(:)(?!:)`, ByGroups(NameLabel, Text, Punctuation), nil},
{`[a-zA-Z_]\w*`, Name, nil},
@ -49,8 +51,9 @@ var CPP = internal.Register(MustNewLexer(
{`__(offload|blockingoffload|outer)\b`, KeywordPseudo, nil},
},
"classname": {
{`(\[\[.+\]\])(\s*)`, ByGroups(NameAttribute, Text), nil},
{`[a-zA-Z_]\w*`, NameClass, Pop(1)},
{`\s*(?=>)`, Text, Pop(1)},
{`\s*(?=[>{])`, Text, Pop(1)},
},
"whitespace": {
{`^#if\s+0`, CommentPreproc, Push("if0")},
@ -67,8 +70,8 @@ var CPP = internal.Register(MustNewLexer(
"statement": {
Include("whitespace"),
Include("statements"),
{`[{}]`, Punctuation, nil},
{`;`, Punctuation, Pop(1)},
{`[{]`, Punctuation, Push("root")},
{`[;}]`, Punctuation, Pop(1)},
},
"function": {
Include("whitespace"),

69
vendor/github.com/alecthomas/chroma/lexers/d/d.go generated vendored Normal file
View File

@ -0,0 +1,69 @@
package d
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
// D lexer. https://dlang.org/spec/lex.html
var D = internal.Register(MustNewLexer(
&Config{
Name: "D",
Aliases: []string{"d"},
Filenames: []string{"*.d", "*.di"},
MimeTypes: []string{"text/x-d"},
EnsureNL: true,
},
Rules{
"root": {
{`[^\S\n]+`, Text, nil},
// https://dlang.org/spec/lex.html#comment
{`//.*?\n`, CommentSingle, nil},
{`/\*.*?\*/`, CommentMultiline, nil},
{`/\+.*?\+/`, CommentMultiline, nil},
// https://dlang.org/spec/lex.html#keywords
{`(asm|assert|body|break|case|cast|catch|continue|default|debug|delete|deprecated|do|else|finally|for|foreach|foreach_reverse|goto|if|in|invariant|is|macro|mixin|new|out|pragma|return|super|switch|this|throw|try|version|while|with)\b`, Keyword, nil},
{`__(FILE|FILE_FULL_PATH|MODULE|LINE|FUNCTION|PRETTY_FUNCTION|DATE|EOF|TIME|TIMESTAMP|VENDOR|VERSION)__\b`, NameBuiltin, nil},
{`__(traits|vector|parameters)\b`, NameBuiltin, nil},
{`((?:(?:[^\W\d]|\$)[\w.\[\]$<>]*\s+)+?)((?:[^\W\d]|\$)[\w$]*)(\s*)(\()`, ByGroups(UsingSelf("root"), NameFunction, Text, Operator), nil},
// https://dlang.org/spec/attribute.html#uda
{`@[\w.]*`, NameDecorator, nil},
{`(abstract|auto|alias|align|const|delegate|enum|export|final|function|inout|lazy|nothrow|override|package|private|protected|public|pure|static|synchronized|template|volatile|__gshared)\b`, KeywordDeclaration, nil},
// https://dlang.org/spec/type.html#basic-data-types
{`(void|bool|byte|ubyte|short|ushort|int|uint|long|ulong|cent|ucent|float|double|real|ifloat|idouble|ireal|cfloat|cdouble|creal|char|wchar|dchar|string|wstring|dstring)\b`, KeywordType, nil},
{`(module)(\s+)`, ByGroups(KeywordNamespace, Text), Push("import")},
{`(true|false|null)\b`, KeywordConstant, nil},
{`(class|interface|struct|template|union)(\s+)`, ByGroups(KeywordDeclaration, Text), Push("class")},
{`(import)(\s+)`, ByGroups(KeywordNamespace, Text), Push("import")},
// https://dlang.org/spec/lex.html#string_literals
// TODO support delimited strings
{`[qr]?"(\\\\|\\"|[^"])*"[cwd]?`, LiteralString, nil},
{"(`)([^`]*)(`)[cwd]?", LiteralString, nil},
{`'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'`, LiteralStringChar, nil},
{`(\.)((?:[^\W\d]|\$)[\w$]*)`, ByGroups(Operator, NameAttribute), nil},
{`^\s*([^\W\d]|\$)[\w$]*:`, NameLabel, nil},
// https://dlang.org/spec/lex.html#floatliteral
{`([0-9][0-9_]*\.([0-9][0-9_]*)?|\.[0-9][0-9_]*)([eE][+\-]?[0-9][0-9_]*)?[fFL]?i?|[0-9][eE][+\-]?[0-9][0-9_]*[fFL]?|[0-9]([eE][+\-]?[0-9][0-9_]*)?[fFL]|0[xX]([0-9a-fA-F][0-9a-fA-F_]*\.?|([0-9a-fA-F][0-9a-fA-F_]*)?\.[0-9a-fA-F][0-9a-fA-F_]*)[pP][+\-]?[0-9][0-9_]*[fFL]?`, LiteralNumberFloat, nil},
// https://dlang.org/spec/lex.html#integerliteral
{`0[xX][0-9a-fA-F][0-9a-fA-F_]*[lL]?`, LiteralNumberHex, nil},
{`0[bB][01][01_]*[lL]?`, LiteralNumberBin, nil},
{`0[0-7_]+[lL]?`, LiteralNumberOct, nil},
{`0|[1-9][0-9_]*[lL]?`, LiteralNumberInteger, nil},
{`([~^*!%&\[\](){}<>|+=:;,./?-]|q{)`, Operator, nil},
{`([^\W\d]|\$)[\w$]*`, Name, nil},
{`\n`, Text, nil},
},
"class": {
{`([^\W\d]|\$)[\w$]*`, NameClass, Pop(1)},
},
"import": {
{`[\w.]+\*?`, NameNamespace, Pop(1)},
},
},
))

View File

@ -2,8 +2,9 @@ package d
import (
. "github.com/alecthomas/chroma" // nolint
. "github.com/alecthomas/chroma/lexers/b"
"github.com/alecthomas/chroma/lexers/b"
"github.com/alecthomas/chroma/lexers/internal"
"github.com/alecthomas/chroma/lexers/j"
)
// Docker lexer.
@ -17,11 +18,14 @@ var Docker = internal.Register(MustNewLexer(
},
Rules{
"root": {
{`^(ONBUILD)(\s+)((?:FROM|MAINTAINER|CMD|EXPOSE|ENV|ADD|ENTRYPOINT|VOLUME|WORKDIR))\b`, ByGroups(NameKeyword, TextWhitespace, Keyword), nil},
{`^((?:FROM|MAINTAINER|CMD|EXPOSE|ENV|ADD|ENTRYPOINT|VOLUME|WORKDIR))\b(.*)`, ByGroups(Keyword, LiteralString), nil},
{`#.*`, Comment, nil},
{`RUN`, Keyword, nil},
{`(.*\\\n)*.+`, Using(Bash), nil},
{`(ONBUILD)((?:\s*\\?\s*))`, ByGroups(Keyword, Using(b.Bash)), nil},
{`(HEALTHCHECK)(((?:\s*\\?\s*)--\w+=\w+(?:\s*\\?\s*))*)`, ByGroups(Keyword, Using(b.Bash)), nil},
{`(VOLUME|ENTRYPOINT|CMD|SHELL)((?:\s*\\?\s*))(\[.*?\])`, ByGroups(Keyword, Using(b.Bash), Using(j.JSON)), nil},
{`(LABEL|ENV|ARG)((?:(?:\s*\\?\s*)\w+=\w+(?:\s*\\?\s*))*)`, ByGroups(Keyword, Using(b.Bash)), nil},
{`((?:FROM|MAINTAINER|EXPOSE|WORKDIR|USER|STOPSIGNAL)|VOLUME)\b(.*)`, ByGroups(Keyword, LiteralString), nil},
{`((?:RUN|CMD|ENTRYPOINT|ENV|ARG|LABEL|ADD|COPY))`, Keyword, nil},
{`(.*\\\n)*.+`, Using(b.Bash), nil},
},
},
))

View File

@ -10,7 +10,7 @@ var Forth = internal.Register(MustNewLexer(
&Config{
Name: "Forth",
Aliases: []string{"forth"},
Filenames: []string{"*.frt", "*.fs"},
Filenames: []string{"*.frt", "*.fth", "*.fs"},
MimeTypes: []string{"application/x-forth"},
CaseInsensitive: true,
},

118
vendor/github.com/alecthomas/chroma/lexers/g/gherkin.go generated vendored Normal file
View File

@ -0,0 +1,118 @@
package g
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
var stepKeywords = `^(\s*)(하지만|조건|먼저|만일|만약|단|그리고|그러면|那麼|那么|而且|當|当|前提|假設|假设|假如|假定|但是|但し|並且|并且|同時|同时|もし|ならば|ただし|しかし|かつ|و |متى |لكن |عندما |ثم |بفرض |اذاً |כאשר |וגם |בהינתן |אזי |אז |אבל |Якщо |Унда |То |Припустимо, що |Припустимо |Онда |Но |Нехай |Лекин |Когато |Када |Кад |К тому же |И |Задато |Задати |Задате |Если |Допустим |Дадено |Ва |Бирок |Аммо |Али |Але |Агар |А |І |Și |És |Zatati |Zakładając |Zadato |Zadate |Zadano |Zadani |Zadan |Youse know when youse got |Youse know like when |Yna |Ya know how |Ya gotta |Y |Wun |Wtedy |When y'all |When |Wenn |WEN |Và |Ve |Und |Un |Thì |Then y'all |Then |Tapi |Tak |Tada |Tad |Så |Stel |Soit |Siis |Si |Sed |Se |Quando |Quand |Quan |Pryd |Pokud |Pokiaľ |Però |Pero |Pak |Oraz |Onda |Ond |Oletetaan |Og |Och |O zaman |Når |När |Niin |Nhưng |N |Mutta |Men |Mas |Maka |Majd |Mais |Maar |Ma |Lorsque |Lorsqu'|Kun |Kuid |Kui |Khi |Keď |Ketika |Když |Kaj |Kai |Kada |Kad |Jeżeli |Ja |Ir |I CAN HAZ |I |Ha |Givun |Givet |Given y'all |Given |Gitt |Gegeven |Gegeben sei |Fakat |Eğer ki |Etant donné |Et |Então |Entonces |Entao |En |Eeldades |E |Duota |Dun |Donitaĵo |Donat |Donada |Do |Diyelim ki |Dengan |Den youse gotta |De |Dato |Dar |Dann |Dan |Dado |Dacă |Daca |DEN |Când |Cuando |Cho |Cept |Cand |Cal |But y'all |But |Buh |Biết |Bet |BUT |Atès |Atunci |Atesa |Anrhegedig a |Angenommen |And y'all |And |An |Ama |Als |Alors |Allora |Ali |Aleshores |Ale |Akkor |Aber |AN |A také |A |\* )`
var featureKeywords = `^(기능|機能|功能|フィーチャ|خاصية|תכונה|Функціонал|Функционалност|Функционал|Фича|Особина|Могућност|Özellik|Właściwość|Tính năng|Trajto|Savybė|Požiadavka|Požadavek|Osobina|Ominaisuus|Omadus|OH HAI|Mogućnost|Mogucnost|Jellemző|Fīča|Funzionalità|Funktionalität|Funkcionalnost|Funkcionalitāte|Funcționalitate|Functionaliteit|Functionalitate|Funcionalitat|Funcionalidade|Fonctionnalité|Fitur|Feature|Egenskap|Egenskab|Crikey|Característica|Arwedd)(:)(.*)$`
var featureElementKeywords = `^(\s*)(시나리오 개요|시나리오|배경|背景|場景大綱|場景|场景大纲|场景|劇本大綱|劇本|剧本大纲|剧本|テンプレ|シナリオテンプレート|シナリオテンプレ|シナリオアウトライン|シナリオ|سيناريو مخطط|سيناريو|الخلفية|תרחיש|תבנית תרחיש|רקע|Тарих|Сценарій|Сценарио|Сценарий структураси|Сценарий|Структура сценарію|Структура сценарија|Структура сценария|Скица|Рамка на сценарий|Пример|Предыстория|Предистория|Позадина|Передумова|Основа|Концепт|Контекст|Założenia|Wharrimean is|Tình huống|The thing of it is|Tausta|Taust|Tapausaihio|Tapaus|Szenariogrundriss|Szenario|Szablon scenariusza|Stsenaarium|Struktura scenarija|Skica|Skenario konsep|Skenario|Situācija|Senaryo taslağı|Senaryo|Scénář|Scénario|Schema dello scenario|Scenārijs pēc parauga|Scenārijs|Scenár|Scenaro|Scenariusz|Scenariul de şablon|Scenariul de sablon|Scenariu|Scenario Outline|Scenario Amlinellol|Scenario|Scenarijus|Scenarijaus šablonas|Scenarij|Scenarie|Rerefons|Raamstsenaarium|Primer|Pozadí|Pozadina|Pozadie|Plan du scénario|Plan du Scénario|Osnova scénáře|Osnova|Náčrt Scénáře|Náčrt Scenáru|Mate|MISHUN SRSLY|MISHUN|Kịch bản|Konturo de la scenaro|Kontext|Konteksts|Kontekstas|Kontekst|Koncept|Khung tình huống|Khung kịch bản|Háttér|Grundlage|Geçmiş|Forgatókönyv vázlat|Forgatókönyv|Fono|Esquema do Cenário|Esquema do Cenario|Esquema del escenario|Esquema de l'escenari|Escenario|Escenari|Dis is what went down|Dasar|Contexto|Contexte|Contesto|Condiţii|Conditii|Cenário|Cenario|Cefndir|Bối cảnh|Blokes|Bakgrunn|Bakgrund|Baggrund|Background|B4|Antecedents|Antecedentes|All y'all|Achtergrond|Abstrakt Scenario|Abstract Scenario)(:)(.*)$`
var examplesKeywords = `^(\s*)(예|例子|例|サンプル|امثلة|דוגמאות|Сценарији|Примери|Приклади|Мисоллар|Значения|Örnekler|Voorbeelden|Variantai|Tapaukset|Scenarios|Scenariji|Scenarijai|Příklady|Példák|Príklady|Przykłady|Primjeri|Primeri|Piemēri|Pavyzdžiai|Paraugs|Juhtumid|Exemplos|Exemples|Exemplele|Exempel|Examples|Esempi|Enghreifftiau|Ekzemploj|Eksempler|Ejemplos|EXAMPLZ|Dữ liệu|Contoh|Cobber|Beispiele)(:)(.*)$`
// Gherkin lexer.
var Gherkin = internal.Register(MustNewLexer(
&Config{
Name: "Gherkin",
Aliases: []string{"cucumber", "Cucumber", "gherkin", "Gherkin"},
Filenames: []string{"*.feature", "*.FEATURE"},
MimeTypes: []string{"text/x-gherkin"},
},
Rules{
"comments": {
{`\s*#.*$`, Comment, nil},
},
"featureElements": {
{stepKeywords, Keyword, Push("stepContentStack")},
Include("comments"),
{`(\s|.)`, NameFunction, nil},
},
"featureElementsOnStack": {
{stepKeywords, Keyword, Pop(2)},
Include("comments"),
{`(\s|.)`, NameFunction, nil},
},
"examplesTable": {
{`\s+\|`, Keyword, Push("examplesTableHeader")},
Include("comments"),
{`(\s|.)`, NameFunction, nil},
},
"examplesTableHeader": {
{`\s+\|\s*$`, Keyword, Pop(2)},
Include("comments"),
{`\\\|`, NameVariable, nil},
{`\s*\|`, Keyword, nil},
{`[^|]`, NameVariable, nil},
},
"scenarioSectionsOnStack": {
{featureElementKeywords, ByGroups(NameFunction, Keyword, Keyword, NameFunction), Push("featureElementsOnStack")},
},
"narrative": {
Include("scenarioSectionsOnStack"),
{`(\s|.)`, NameFunction, nil},
},
"tableVars": {
{`(<[^>]+>)`, NameVariable, nil},
},
"numbers": {
{`(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?`, LiteralString, nil},
},
"string": {
Include("tableVars"),
{`(\s|.)`, LiteralString, nil},
},
"pyString": {
{`"""`, Keyword, Pop(1)},
Include("string"),
},
"stepContentRoot": {
{`$`, Keyword, Pop(1)},
Include("stepContent"),
},
"stepContentStack": {
{`$`, Keyword, Pop(2)},
Include("stepContent"),
},
"stepContent": {
{`"`, NameFunction, Push("doubleString")},
Include("tableVars"),
Include("numbers"),
Include("comments"),
{`(\s|.)`, NameFunction, nil},
},
"tableContent": {
{`\s+\|\s*$`, Keyword, Pop(1)},
Include("comments"),
{`\\\|`, LiteralString, nil},
{`\s*\|`, Keyword, nil},
{`"`, LiteralString, Push("doubleStringTable")},
Include("string"),
},
"doubleString": {
{`"`, NameFunction, Pop(1)},
Include("string"),
},
"doubleStringTable": {
{`"`, LiteralString, Pop(1)},
Include("string"),
},
"root": {
{`\n`, NameFunction, nil},
Include("comments"),
{`"""`, Keyword, Push("pyString")},
{`\s+\|`, Keyword, Push("tableContent")},
{`"`, NameFunction, Push("doubleString")},
Include("tableVars"),
Include("numbers"),
{`(\s*)(@[^@\r\n\t ]+)`, ByGroups(NameFunction, NameTag), nil},
{stepKeywords, ByGroups(NameFunction, Keyword), Push("stepContentRoot")},
{featureKeywords, ByGroups(Keyword, Keyword, NameFunction), Push("narrative")},
{featureElementKeywords, ByGroups(NameFunction, Keyword, Keyword, NameFunction), Push("featureElements")},
{examplesKeywords, ByGroups(NameFunction, Keyword, Keyword, NameFunction), Push("examplesTable")},
{`(\s|.)`, NameFunction, nil},
},
},
))

View File

@ -619,13 +619,14 @@ func haxePreProcMutator(state *LexerState) error {
}
proc := state.Groups[2]
if proc == "if" {
switch proc {
case "if":
stack = append(stack, state.Stack)
} else if proc == "else" || proc == "elseif" {
case "else", "elseif":
if len(stack) > 0 {
state.Stack = stack[len(stack)-1]
}
} else if proc == "end" {
case "end":
stack = stack[:len(stack)-1]
}

69
vendor/github.com/alecthomas/chroma/lexers/h/hcl.go generated vendored Normal file
View File

@ -0,0 +1,69 @@
package h
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
// HCL lexer.
var HCL = internal.Register(MustNewLexer(
&Config{
Name: "HCL",
Aliases: []string{"hcl"},
Filenames: []string{"*.hcl"},
MimeTypes: []string{"application/x-hcl"},
},
Rules{
"root": {
Include("string"),
Include("punctuation"),
Include("curly"),
Include("basic"),
Include("whitespace"),
{`[0-9]+`, LiteralNumber, nil},
},
"basic": {
{Words(`\b`, `\b`, `true`, `false`), KeywordType, nil},
{`\s*/\*`, CommentMultiline, Push("comment")},
{`\s*#.*\n`, CommentSingle, nil},
{`(.*?)(\s*)(=)`, ByGroups(Name, Text, Operator), nil},
{`\d+`, Number, nil},
{`\b\w+\b`, Keyword, nil},
{`\$\{`, LiteralStringInterpol, Push("var_builtin")},
},
"function": {
{`(\s+)(".*")(\s+)`, ByGroups(Text, LiteralString, Text), nil},
Include("punctuation"),
Include("curly"),
},
"var_builtin": {
{`\$\{`, LiteralStringInterpol, Push()},
{Words(`\b`, `\b`, `concat`, `file`, `join`, `lookup`, `element`), NameBuiltin, nil},
Include("string"),
Include("punctuation"),
{`\s+`, Text, nil},
{`\}`, LiteralStringInterpol, Pop(1)},
},
"string": {
{`(".*")`, ByGroups(LiteralStringDouble), nil},
},
"punctuation": {
{`[\[\](),.]`, Punctuation, nil},
},
"curly": {
{`\{`, TextPunctuation, nil},
{`\}`, TextPunctuation, nil},
},
"comment": {
{`[^*/]`, CommentMultiline, nil},
{`/\*`, CommentMultiline, Push()},
{`\*/`, CommentMultiline, Pop(1)},
{`[*/]`, CommentMultiline, nil},
},
"whitespace": {
{`\n`, Text, nil},
{`\s+`, Text, nil},
{`\\\n`, Text, nil},
},
},
))

View File

@ -38,7 +38,6 @@ func httpContentBlock(groups []string, lexer Lexer) Iterator {
{Generic, groups[0]},
}
return Literator(tokens...)
}
func httpHeaderBlock(groups []string, lexer Lexer) Iterator {
@ -66,7 +65,7 @@ func httpBodyContentTypeLexer(lexer Lexer) Lexer { return &httpBodyContentTyper{
type httpBodyContentTyper struct{ Lexer }
func (d *httpBodyContentTyper) Tokenise(options *TokeniseOptions, text string) (Iterator, error) {
func (d *httpBodyContentTyper) Tokenise(options *TokeniseOptions, text string) (Iterator, error) { // nolint: gocognit
var contentType string
var isContentType bool
var subIterator Iterator
@ -123,9 +122,7 @@ func (d *httpBodyContentTyper) Tokenise(options *TokeniseOptions, text string) (
return EOF
}
}
}
return token
}, nil
}

54
vendor/github.com/alecthomas/chroma/lexers/hlb.go generated vendored Normal file
View File

@ -0,0 +1,54 @@
package lexers
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
// HLB lexer.
var HLB = internal.Register(MustNewLexer(
&Config{
Name: "HLB",
Aliases: []string{"hlb"},
Filenames: []string{"*.hlb"},
MimeTypes: []string{},
},
Rules{
"root": {
{`(#.*)`, ByGroups(CommentSingle), nil},
{`((\b(0(b|B|o|O|x|X)[a-fA-F0-9]+)\b)|(\b(0|[1-9][0-9]*)\b))`, ByGroups(LiteralNumber), nil},
{`((\b(true|false)\b))`, ByGroups(NameBuiltin), nil},
{`(\bstring\b|\bint\b|\bbool\b|\bfs\b|\boption\b)`, ByGroups(KeywordType), nil},
{`(\b[a-zA-Z_][a-zA-Z0-9]*\b)(\()`, ByGroups(NameFunction, Punctuation), Push("params")},
{`(\{)`, ByGroups(Punctuation), Push("block")},
{`(\n|\r|\r\n)`, Text, nil},
{`.`, Text, nil},
},
"string": {
{`"`, LiteralString, Pop(1)},
{`\\"`, LiteralString, nil},
{`[^\\"]+`, LiteralString, nil},
},
"block": {
{`(\})`, ByGroups(Punctuation), Pop(1)},
{`(#.*)`, ByGroups(CommentSingle), nil},
{`((\b(0(b|B|o|O|x|X)[a-fA-F0-9]+)\b)|(\b(0|[1-9][0-9]*)\b))`, ByGroups(LiteralNumber), nil},
{`((\b(true|false)\b))`, ByGroups(KeywordConstant), nil},
{`"`, LiteralString, Push("string")},
{`(with)`, ByGroups(KeywordReserved), nil},
{`(as)([\t ]+)(\b[a-zA-Z_][a-zA-Z0-9]*\b)`, ByGroups(KeywordReserved, Text, NameFunction), nil},
{`(\bstring\b|\bint\b|\bbool\b|\bfs\b|\boption\b)([\t ]+)(\{)`, ByGroups(KeywordType, Text, Punctuation), Push("block")},
{`(?!\b(?:scratch|image|resolve|http|checksum|chmod|filename|git|keepGitDir|local|includePatterns|excludePatterns|followPaths|generate|frontendInput|shell|run|readonlyRootfs|env|dir|user|network|security|host|ssh|secret|mount|target|localPath|uid|gid|mode|readonly|tmpfs|sourcePath|cache|mkdir|createParents|chown|createdTime|mkfile|rm|allowNotFound|allowWildcards|copy|followSymlinks|contentsOnly|unpack|createDestPath)\b)(\b[a-zA-Z_][a-zA-Z0-9]*\b)`, ByGroups(NameOther), nil},
{`(\n|\r|\r\n)`, Text, nil},
{`.`, Text, nil},
},
"params": {
{`(\))`, ByGroups(Punctuation), Pop(1)},
{`(variadic)`, ByGroups(Keyword), nil},
{`(\bstring\b|\bint\b|\bbool\b|\bfs\b|\boption\b)`, ByGroups(KeywordType), nil},
{`(\b[a-zA-Z_][a-zA-Z0-9]*\b)`, ByGroups(NameOther), nil},
{`(\n|\r|\r\n)`, Text, nil},
{`.`, Text, nil},
},
},
))

32
vendor/github.com/alecthomas/chroma/lexers/i/igor.go generated vendored Normal file

File diff suppressed because one or more lines are too long

View File

@ -1,3 +1,4 @@
// Package internal contains common API functions and structures shared between lexer packages.
package internal
import (
@ -36,19 +37,20 @@ func Names(withAliases bool) []string {
// Get a Lexer by name, alias or file extension.
func Get(name string) chroma.Lexer {
candidates := chroma.PrioritisedLexers{}
if lexer := Registry.byName[name]; lexer != nil {
candidates = append(candidates, lexer)
return lexer
}
if lexer := Registry.byAlias[name]; lexer != nil {
candidates = append(candidates, lexer)
return lexer
}
if lexer := Registry.byName[strings.ToLower(name)]; lexer != nil {
candidates = append(candidates, lexer)
return lexer
}
if lexer := Registry.byAlias[strings.ToLower(name)]; lexer != nil {
candidates = append(candidates, lexer)
return lexer
}
candidates := chroma.PrioritisedLexers{}
// Try file extension.
if lexer := Match("filename." + name); lexer != nil {
candidates = append(candidates, lexer)

73
vendor/github.com/alecthomas/chroma/lexers/j/j.go generated vendored Normal file
View File

@ -0,0 +1,73 @@
package j
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
// J lexer.
var J = internal.Register(MustNewLexer(
&Config{
Name: "J",
Aliases: []string{"j"},
Filenames: []string{"*.ijs"},
MimeTypes: []string{"text/x-j"},
},
Rules{
"root": {
{`#!.*$`, CommentPreproc, nil},
{`NB\..*`, CommentSingle, nil},
{`\n+\s*Note`, CommentMultiline, Push("comment")},
{`\s*Note.*`, CommentSingle, nil},
{`\s+`, Text, nil},
{`'`, LiteralString, Push("singlequote")},
{`0\s+:\s*0|noun\s+define\s*$`, NameEntity, Push("nounDefinition")},
{`(([1-4]|13)\s+:\s*0|(adverb|conjunction|dyad|monad|verb)\s+define)\b`, NameFunction, Push("explicitDefinition")},
{Words(``, `\b[a-zA-Z]\w*\.`, `for_`, `goto_`, `label_`), NameLabel, nil},
{Words(``, `\.`, `assert`, `break`, `case`, `catch`, `catchd`, `catcht`, `continue`, `do`, `else`, `elseif`, `end`, `fcase`, `for`, `if`, `return`, `select`, `throw`, `try`, `while`, `whilst`), NameLabel, nil},
{`\b[a-zA-Z]\w*`, NameVariable, nil},
{Words(``, ``, `ARGV`, `CR`, `CRLF`, `DEL`, `Debug`, `EAV`, `EMPTY`, `FF`, `JVERSION`, `LF`, `LF2`, `Note`, `TAB`, `alpha17`, `alpha27`, `apply`, `bind`, `boxopen`, `boxxopen`, `bx`, `clear`, `cutLF`, `cutopen`, `datatype`, `def`, `dfh`, `drop`, `each`, `echo`, `empty`, `erase`, `every`, `evtloop`, `exit`, `expand`, `fetch`, `file2url`, `fixdotdot`, `fliprgb`, `getargs`, `getenv`, `hfd`, `inv`, `inverse`, `iospath`, `isatty`, `isutf8`, `items`, `leaf`, `list`, `nameclass`, `namelist`, `names`, `nc`, `nl`, `on`, `pick`, `rows`, `script`, `scriptd`, `sign`, `sminfo`, `smoutput`, `sort`, `split`, `stderr`, `stdin`, `stdout`, `table`, `take`, `timespacex`, `timex`, `tmoutput`, `toCRLF`, `toHOST`, `toJ`, `tolower`, `toupper`, `type`, `ucp`, `ucpcount`, `usleep`, `utf8`, `uucp`), NameFunction, nil},
{`=[.:]`, Operator, nil},
{"[-=+*#$%@!~`^&\";:.,<>{}\\[\\]\\\\|/]", Operator, nil},
{`[abCdDeEfHiIjLMoprtT]\.`, KeywordReserved, nil},
{`[aDiLpqsStux]\:`, KeywordReserved, nil},
{`(_[0-9])\:`, KeywordConstant, nil},
{`\(`, Punctuation, Push("parentheses")},
Include("numbers"),
},
"comment": {
{`[^)]`, CommentMultiline, nil},
{`^\)`, CommentMultiline, Pop(1)},
{`[)]`, CommentMultiline, nil},
},
"explicitDefinition": {
{`\b[nmuvxy]\b`, NameDecorator, nil},
Include("root"),
{`[^)]`, Name, nil},
{`^\)`, NameLabel, Pop(1)},
{`[)]`, Name, nil},
},
"numbers": {
{`\b_{1,2}\b`, LiteralNumber, nil},
{`_?\d+(\.\d+)?(\s*[ejr]\s*)_?\d+(\.?=\d+)?`, LiteralNumber, nil},
{`_?\d+\.(?=\d+)`, LiteralNumberFloat, nil},
{`_?\d+x`, LiteralNumberIntegerLong, nil},
{`_?\d+`, LiteralNumberInteger, nil},
},
"nounDefinition": {
{`[^)]`, LiteralString, nil},
{`^\)`, NameLabel, Pop(1)},
{`[)]`, LiteralString, nil},
},
"parentheses": {
{`\)`, Punctuation, Pop(1)},
Include("explicitDefinition"),
Include("root"),
},
"singlequote": {
{`[^']`, LiteralString, nil},
{`''`, LiteralString, nil},
{`'`, LiteralString, Pop(1)},
},
},
))

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -21,12 +21,13 @@ var Julia = internal.Register(MustNewLexer(
{`#.*$`, Comment, nil},
{`[\[\]{}(),;]`, Punctuation, nil},
{`in\b`, KeywordPseudo, nil},
{`isa\b`, KeywordPseudo, nil},
{`(true|false)\b`, KeywordConstant, nil},
{`(local|global|const)\b`, KeywordDeclaration, nil},
{Words(``, `\b`, `function`, `type`, `typealias`, `abstract`, `immutable`, `mutable`, `struct`, `baremodule`, `begin`, `bitstype`, `break`, `catch`, `ccall`, `continue`, `do`, `else`, `elseif`, `end`, `export`, `finally`, `for`, `if`, `import`, `importall`, `let`, `macro`, `module`, `quote`, `return`, `try`, `using`, `while`), Keyword, nil},
{Words(``, `\b`, `ANY`, `ASCIIString`, `AbstractArray`, `AbstractChannel`, `AbstractFloat`, `AbstractMatrix`, `AbstractRNG`, `AbstractSparseArray`, `AbstractSparseMatrix`, `AbstractSparseVector`, `AbstractString`, `AbstractVecOrMat`, `AbstractVector`, `Any`, `ArgumentError`, `Array`, `AssertionError`, `Associative`, `Base64DecodePipe`, `Base64EncodePipe`, `Bidiagonal`, `BigFloat`, `BigInt`, `BitArray`, `BitMatrix`, `BitVector`, `Bool`, `BoundsError`, `Box`, `BufferStream`, `CapturedException`, `CartesianIndex`, `CartesianRange`, `Cchar`, `Cdouble`, `Cfloat`, `Channel`, `Char`, `Cint`, `Cintmax_t`, `Clong`, `Clonglong`, `ClusterManager`, `Cmd`, `Coff_t`, `Colon`, `Complex`, `Complex128`, `Complex32`, `Complex64`, `CompositeException`, `Condition`, `Cptrdiff_t`, `Cshort`, `Csize_t`, `Cssize_t`, `Cstring`, `Cuchar`, `Cuint`, `Cuintmax_t`, `Culong`, `Culonglong`, `Cushort`, `Cwchar_t`, `Cwstring`, `DataType`, `Date`, `DateTime`, `DenseArray`, `DenseMatrix`, `DenseVecOrMat`, `DenseVector`, `Diagonal`, `Dict`, `DimensionMismatch`, `Dims`, `DirectIndexString`, `Display`, `DivideError`, `DomainError`, `EOFError`, `EachLine`, `Enum`, `Enumerate`, `ErrorException`, `Exception`, `Expr`, `Factorization`, `FileMonitor`, `FileOffset`, `Filter`, `Float16`, `Float32`, `Float64`, `FloatRange`, `Function`, `GenSym`, `GlobalRef`, `GotoNode`, `HTML`, `Hermitian`, `IO`, `IOBuffer`, `IOStream`, `IPv4`, `IPv6`, `InexactError`, `InitError`, `Int`, `Int128`, `Int16`, `Int32`, `Int64`, `Int8`, `IntSet`, `Integer`, `InterruptException`, `IntrinsicFunction`, `InvalidStateException`, `Irrational`, `KeyError`, `LabelNode`, `LambdaStaticData`, `LinSpace`, `LineNumberNode`, `LoadError`, `LocalProcess`, `LowerTriangular`, `MIME`, `Matrix`, `MersenneTwister`, `Method`, `MethodError`, `MethodTable`, `Module`, `NTuple`, `NewvarNode`, `NullException`, `Nullable`, `Number`, `ObjectIdDict`, `OrdinalRange`, `OutOfMemoryError`, `OverflowError`, `Pair`, `ParseError`, `PartialQuickSort`, `Pipe`, `PollingFileWatcher`, `ProcessExitedException`, `ProcessGroup`, `Ptr`, `QuoteNode`, `RandomDevice`, `Range`, `Rational`, `RawFD`, `ReadOnlyMemoryError`, `Real`, `ReentrantLock`, `Ref`, `Regex`, `RegexMatch`, `RemoteException`, `RemoteRef`, `RepString`, `RevString`, `RopeString`, `RoundingMode`, `SegmentationFault`, `SerializationState`, `Set`, `SharedArray`, `SharedMatrix`, `SharedVector`, `Signed`, `SimpleVector`, `SparseMatrixCSC`, `StackOverflowError`, `StatStruct`, `StepRange`, `StridedArray`, `StridedMatrix`, `StridedVecOrMat`, `StridedVector`, `SubArray`, `SubString`, `SymTridiagonal`, `Symbol`, `SymbolNode`, `Symmetric`, `SystemError`, `TCPSocket`, `Task`, `Text`, `TextDisplay`, `Timer`, `TopNode`, `Tridiagonal`, `Tuple`, `Type`, `TypeConstructor`, `TypeError`, `TypeName`, `TypeVar`, `UDPSocket`, `UInt`, `UInt128`, `UInt16`, `UInt32`, `UInt64`, `UInt8`, `UTF16String`, `UTF32String`, `UTF8String`, `UndefRefError`, `UndefVarError`, `UnicodeError`, `UniformScaling`, `Union`, `UnitRange`, `Unsigned`, `UpperTriangular`, `Val`, `Vararg`, `VecOrMat`, `Vector`, `VersionNumber`, `Void`, `WString`, `WeakKeyDict`, `WeakRef`, `WorkerConfig`, `Zip`), KeywordType, nil},
{Words(``, `\b`, `function`, `abstract type`, `primitive type`, `baremodule`, `begin`, `bitstype`, `break`, `catch`, `ccall`, `continue`, `do`, `else`, `elseif`, `end`, `export`, `finally`, `for`, `if`, `import`, `let`, `macro`, `module`, `mutable`, `quote`, `return`, `struct`, `try`, `using`, `while`), Keyword, nil},
{Words(``, `\b`, `ASCIIString`, `AbstractArray`, `AbstractChannel`, `AbstractDict`, `AbstractFloat`, `AbstractMatrix`, `AbstractRNG`, `AbstractSparseArray`, `AbstractSparseMatrix`, `AbstractSparseVector`, `AbstractString`, `AbstractVecOrMat`, `AbstractVector`, `Any`, `ArgumentError`, `Array`, `AssertionError`, `Base64DecodePipe`, `Base64EncodePipe`, `Bidiagonal`, `BigFloat`, `BigInt`, `BitArray`, `BitMatrix`, `BitVector`, `Bool`, `BoundsError`, `Box`, `BufferStream`, `CapturedException`, `CartesianIndex`, `CartesianRange`, `Cchar`, `Cdouble`, `Cfloat`, `Channel`, `Char`, `Cint`, `Cintmax_t`, `Clong`, `Clonglong`, `ClusterManager`, `Cmd`, `Coff_t`, `Colon`, `Complex`, `Complex128`, `Complex32`, `Complex64`, `CompositeException`, `Condition`, `Cptrdiff_t`, `Cshort`, `Csize_t`, `Cssize_t`, `Cstring`, `Cuchar`, `Cuint`, `Cuintmax_t`, `Culong`, `Culonglong`, `Cushort`, `Cwchar_t`, `Cwstring`, `DataType`, `Date`, `DateTime`, `DenseArray`, `DenseMatrix`, `DenseVecOrMat`, `DenseVector`, `Diagonal`, `Dict`, `DimensionMismatch`, `Dims`, `DirectIndexString`, `Display`, `DivideError`, `DomainError`, `EOFError`, `EachLine`, `Enum`, `Enumerate`, `ErrorException`, `Exception`, `Expr`, `Factorization`, `FileMonitor`, `FileOffset`, `Filter`, `Float16`, `Float32`, `Float64`, `FloatRange`, `Function`, `GenSym`, `GlobalRef`, `GotoNode`, `HTML`, `Hermitian`, `IO`, `IOBuffer`, `IOStream`, `IPv4`, `IPv6`, `InexactError`, `InitError`, `Int`, `Int128`, `Int16`, `Int32`, `Int64`, `Int8`, `IntSet`, `Integer`, `InterruptException`, `IntrinsicFunction`, `InvalidStateException`, `Irrational`, `KeyError`, `LabelNode`, `LambdaStaticData`, `LinSpace`, `LineNumberNode`, `LoadError`, `LocalProcess`, `LowerTriangular`, `MIME`, `Matrix`, `MersenneTwister`, `Method`, `MethodError`, `MethodTable`, `Module`, `NTuple`, `NewvarNode`, `NullException`, `Nullable`, `Number`, `ObjectIdDict`, `OrdinalRange`, `OutOfMemoryError`, `OverflowError`, `Pair`, `ParseError`, `PartialQuickSort`, `Pipe`, `PollingFileWatcher`, `ProcessExitedException`, `ProcessGroup`, `Ptr`, `QuoteNode`, `RandomDevice`, `Range`, `Rational`, `RawFD`, `ReadOnlyMemoryError`, `Real`, `ReentrantLock`, `Ref`, `Regex`, `RegexMatch`, `RemoteException`, `RemoteRef`, `RepString`, `RevString`, `RopeString`, `RoundingMode`, `SegmentationFault`, `SerializationState`, `Set`, `SharedArray`, `SharedMatrix`, `SharedVector`, `Signed`, `SimpleVector`, `SparseMatrixCSC`, `StackOverflowError`, `StatStruct`, `StepRange`, `StridedArray`, `StridedMatrix`, `StridedVecOrMat`, `StridedVector`, `SubArray`, `SubString`, `SymTridiagonal`, `Symbol`, `SymbolNode`, `Symmetric`, `SystemError`, `TCPSocket`, `Task`, `Text`, `TextDisplay`, `Timer`, `TopNode`, `Tridiagonal`, `Tuple`, `Type`, `TypeConstructor`, `TypeError`, `TypeName`, `TypeVar`, `UDPSocket`, `UInt`, `UInt128`, `UInt16`, `UInt32`, `UInt64`, `UInt8`, `UTF16String`, `UTF32String`, `UTF8String`, `UndefRefError`, `UndefVarError`, `UnicodeError`, `UniformScaling`, `Union`, `UnitRange`, `Unsigned`, `UpperTriangular`, `Val`, `Vararg`, `VecOrMat`, `Vector`, `VersionNumber`, `Void`, `WString`, `WeakKeyDict`, `WeakRef`, `WorkerConfig`, `Zip`), KeywordType, nil},
{Words(``, `\b`, `ARGS`, `CPU_CORES`, `C_NULL`, `DevNull`, `ENDIAN_BOM`, `ENV`, `I`, `Inf`, `Inf16`, `Inf32`, `Inf64`, `InsertionSort`, `JULIA_HOME`, `LOAD_PATH`, `MergeSort`, `NaN`, `NaN16`, `NaN32`, `NaN64`, `OS_NAME`, `QuickSort`, `RoundDown`, `RoundFromZero`, `RoundNearest`, `RoundNearestTiesAway`, `RoundNearestTiesUp`, `RoundToZero`, `RoundUp`, `STDERR`, `STDIN`, `STDOUT`, `VERSION`, `WORD_SIZE`, `catalan`, `e`, `eu`, `eulergamma`, `golden`, `im`, `nothing`, `pi`, `γ`, `π`, `φ`), NameBuiltin, nil},
{Words(``, ``, `=`, `:=`, `+=`, `-=`, `*=`, `/=`, `//=`, `.//=`, `.*=`, `./=`, `\=`, `.\=`, `^=`, `.^=`, `÷=`, `.÷=`, `%=`, `.%=`, `|=`, `&=`, `$=`, `=>`, `<<=`, `>>=`, `>>>=`, `~`, `.+=`, `.-=`, `?`, `--`, `-->`, `||`, `&&`, `>`, `<`, `>=`, ``, `<=`, ``, `==`, `===`, ``, `!=`, ``, `!==`, ``, `.>`, `.<`, `.>=`, `.≥`, `.<=`, `.≤`, `.==`, `.!=`, `.≠`, `.=`, `.!`, `<:`, `>:`, ``, ``, ``, ``, ``, ``, ``, ``, ``, `|>`, `<|`, `:`, `+`, `-`, `.+`, `.-`, `|`, ``, `$`, `<<`, `>>`, `>>>`, `.<<`, `.>>`, `.>>>`, `*`, `/`, `./`, `÷`, ``, `%`, ``, `.%`, `.*`, `\`, `.\`, `&`, ``, `//`, `.//`, `^`, `.^`, `::`, `.`, `+`, `-`, `!`, `~`, ``, ``, ``), Operator, nil},
{Words(``, ``, `=`, `:=`, `+=`, `-=`, `*=`, `/=`, `//=`, `.//=`, `.*=`, `./=`, `\=`, `.\=`, `^=`, `.^=`, `÷=`, `.÷=`, `%=`, `.%=`, `|=`, `&=`, `$=`, `=>`, `<<=`, `>>=`, `>>>=`, `~`, `.+=`, `.-=`, `?`, `--`, `-->`, `||`, `&&`, `>`, `<`, `>=`, ``, `<=`, ``, `==`, `===`, ``, `!=`, ``, `!==`, ``, `.>`, `.<`, `.>=`, `.≥`, `.<=`, `.≤`, `.==`, `.!=`, `.≠`, `.=`, `.!`, `<:`, `>:`, ``, ``, ``, ``, ``, ``, ``, ``, ``, `|>`, `<|`, `:`, `+`, `-`, `.+`, `.-`, `|`, ``, `$`, `<<`, `>>`, `>>>`, `.<<`, `.>>`, `.>>>`, `*`, `/`, `./`, `÷`, ``, `%`, ``, `.%`, `.*`, `\`, `.\`, `&`, ``, `//`, `.//`, `^`, `.^`, `::`, `.`, `+`, `-`, `!`, ``, ``, ``), Operator, nil},
{`'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,3}|\\u[a-fA-F0-9]{1,4}|\\U[a-fA-F0-9]{1,6}|[^\\\'\n])'`, LiteralStringChar, nil},
{`(?<=[.\w)\]])\'+`, Operator, nil},
{`"""`, LiteralString, Push("tqstring")},
@ -34,8 +35,8 @@ var Julia = internal.Register(MustNewLexer(
{`r"""`, LiteralStringRegex, Push("tqregex")},
{`r"`, LiteralStringRegex, Push("regex")},
{"`", LiteralStringBacktick, Push("command")},
{`(?:[a-zA-Z_¡-￿]|[𐀀-􏿿])(?:[a-zA-Z_0-9¡-￿]|[𐀀-􏿿])*!*`, Name, nil},
{`@(?:[a-zA-Z_¡-￿]|[𐀀-􏿿])(?:[a-zA-Z_0-9¡-￿]|[𐀀-􏿿])*!*`, NameDecorator, nil},
{`((?:[a-zA-Z_¡-￿]|[𐀀-􏿿])(?:[a-zA-Z_0-9¡-￿]|[𐀀-􏿿])*!*)(')?`, ByGroups(Name, Operator), nil},
{`(@(?:[a-zA-Z_¡-￿]|[𐀀-􏿿])(?:[a-zA-Z_0-9¡-￿]|[𐀀-􏿿])*!*)(')?`, ByGroups(NameDecorator, Operator), nil},
{`(\d+(_\d+)+\.\d*|\d*\.\d+(_\d+)+)([eEf][+-]?[0-9]+)?`, LiteralNumberFloat, nil},
{`(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?`, LiteralNumberFloat, nil},
{`\d+(_\d+)+[eEf][+-]?[0-9]+`, LiteralNumberFloat, nil},

View File

@ -2,11 +2,12 @@ package m
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/h"
"github.com/alecthomas/chroma/lexers/internal"
)
// Markdown lexer.
var Markdown = internal.Register(MustNewLexer(
var Markdown = internal.Register(DelegatingLexer(h.HTML, MustNewLexer(
&Config{
Name: "markdown",
Aliases: []string{"md", "mkd"},
@ -40,8 +41,8 @@ var Markdown = internal.Register(MustNewLexer(
{"`[^`]+`", LiteralStringBacktick, nil},
{`[@#][\w/:]+`, NameEntity, nil},
{`(!?\[)([^]]+)(\])(\()([^)]+)(\))`, ByGroups(Text, NameTag, Text, Text, NameAttribute, Text), nil},
{`[^\\\s]+`, Text, nil},
{`.|\n`, Text, nil},
{`[^\\\s]+`, Other, nil},
{`.|\n`, Other, nil},
},
},
))
)))

43
vendor/github.com/alecthomas/chroma/lexers/m/mlir.go generated vendored Normal file
View File

@ -0,0 +1,43 @@
package m
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
// MLIR lexer.
var Mlir = internal.Register(MustNewLexer(
&Config{
Name: "MLIR",
Aliases: []string{"mlir"},
Filenames: []string{"*.mlir"},
MimeTypes: []string{"text/x-mlir"},
},
Rules{
"root": {
Include("whitespace"),
{`c?"[^"]*?"`, LiteralString, nil},
{`\^([-a-zA-Z$._][\w\-$.0-9]*)\s*`, NameLabel, nil},
{`([\w\d_$.]+)\s*=`, NameLabel, nil},
Include("keyword"),
{`->`, Punctuation, nil},
{`@([\w_][\w\d_$.]*)`, NameFunction, nil},
{`[%#][\w\d_$.]+`, NameVariable, nil},
{`([1-9?][\d?]*\s*x)+`, LiteralNumber, nil},
{`0[xX][a-fA-F0-9]+`, LiteralNumber, nil},
{`-?\d+(?:[.]\d+)?(?:[eE][-+]?\d+(?:[.]\d+)?)?`, LiteralNumber, nil},
{`[=<>{}\[\]()*.,!:]|x\b`, Punctuation, nil},
{`[\w\d]+`, Text, nil},
},
"whitespace": {
{`(\n|\s)+`, Text, nil},
{`//.*?\n`, Comment, nil},
},
"keyword": {
{Words(``, ``, `constant`, `return`), KeywordType, nil},
{Words(``, ``, `func`, `loc`, `memref`, `tensor`, `vector`), KeywordType, nil},
{`bf16|f16|f32|f64|index`, Keyword, nil},
{`i[1-9]\d*`, Keyword, nil},
},
},
))

View File

@ -26,7 +26,7 @@ var MySQL = internal.Register(MustNewLexer(
{`((?:_[a-z0-9]+)?)(")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Push("double-string")},
{"[+*/<>=~!@#%^&|`?-]", Operator, nil},
{`\b(tinyint|smallint|mediumint|int|integer|bigint|date|datetime|time|bit|bool|tinytext|mediumtext|longtext|text|tinyblob|mediumblob|longblob|blob|float|double|double\s+precision|real|numeric|dec|decimal|timestamp|year|char|varchar|varbinary|varcharacter|enum|set)(\b\s*)(\()?`, ByGroups(KeywordType, Text, Punctuation), nil},
{`\b(add|all|alter|analyze|and|as|asc|asensitive|before|between|bigint|binary|blob|both|by|call|cascade|case|change|char|character|check|collate|column|condition|constraint|continue|convert|create|cross|current_date|current_time|current_timestamp|current_user|cursor|database|databases|day_hour|day_microsecond|day_minute|day_second|dec|decimal|declare|default|delayed|delete|desc|describe|deterministic|distinct|distinctrow|div|double|drop|dual|each|else|elseif|enclosed|escaped|exists|exit|explain|fetch|flush|float|float4|float8|for|force|foreign|from|fulltext|grant|group|having|high_priority|hour_microsecond|hour_minute|hour_second|if|ignore|in|index|infile|inner|inout|insensitive|insert|int|int1|int2|int3|int4|int8|integer|interval|into|is|iterate|join|key|keys|kill|leading|leave|left|like|limit|lines|load|localtime|localtimestamp|lock|long|loop|low_priority|match|minute_microsecond|minute_second|mod|modifies|natural|no_write_to_binlog|not|numeric|on|optimize|option|optionally|or|order|out|outer|outfile|precision|primary|procedure|purge|raid0|read|reads|real|references|regexp|release|rename|repeat|replace|require|restrict|return|revoke|right|rlike|schema|schemas|second_microsecond|select|sensitive|separator|set|show|smallint|soname|spatial|specific|sql|sql_big_result|sql_calc_found_rows|sql_small_result|sqlexception|sqlstate|sqlwarning|ssl|starting|straight_join|table|terminated|then|to|trailing|trigger|undo|union|unique|unlock|unsigned|update|usage|use|using|utc_date|utc_time|utc_timestamp|values|varying|when|where|while|with|write|x509|xor|year_month|zerofill)\b`, Keyword, nil},
{`\b(add|all|alter|analyze|and|as|asc|asensitive|before|between|bigint|binary|blob|both|by|call|cascade|case|change|char|character|check|collate|column|condition|constraint|continue|convert|create|cross|current_date|current_time|current_timestamp|current_user|cursor|database|databases|day_hour|day_microsecond|day_minute|day_second|dec|decimal|declare|default|delayed|delete|desc|describe|deterministic|distinct|distinctrow|div|double|drop|dual|each|else|elseif|enclosed|escaped|exists|exit|explain|fetch|flush|float|float4|float8|for|force|foreign|from|fulltext|grant|group|having|high_priority|hour_microsecond|hour_minute|hour_second|identified|if|ignore|in|index|infile|inner|inout|insensitive|insert|int|int1|int2|int3|int4|int8|integer|interval|into|is|iterate|join|key|keys|kill|leading|leave|left|like|limit|lines|load|localtime|localtimestamp|lock|long|loop|low_priority|match|minute_microsecond|minute_second|mod|modifies|natural|no_write_to_binlog|not|numeric|on|optimize|option|optionally|or|order|out|outer|outfile|precision|primary|privileges|procedure|purge|raid0|read|reads|real|references|regexp|release|rename|repeat|replace|require|restrict|return|revoke|right|rlike|schema|schemas|second_microsecond|select|sensitive|separator|set|show|smallint|soname|spatial|specific|sql|sql_big_result|sql_calc_found_rows|sql_small_result|sqlexception|sqlstate|sqlwarning|ssl|starting|straight_join|table|terminated|then|to|trailing|trigger|undo|union|unique|unlock|unsigned|update|usage|use|user|using|utc_date|utc_time|utc_timestamp|values|varying|when|where|while|with|write|x509|xor|year_month|zerofill)\b`, Keyword, nil},
{`\b(auto_increment|engine|charset|tables)\b`, KeywordPseudo, nil},
{`(true|false|null)`, NameConstant, nil},
{`([a-z_]\w*)(\s*)(\()`, ByGroups(NameFunction, Text, Punctuation), nil},

View File

@ -16,6 +16,7 @@ var Nim = internal.Register(MustNewLexer(
},
Rules{
"root": {
{`#\[[\s\S]*?\]#`, CommentMultiline, nil},
{`##.*$`, LiteralStringDoc, nil},
{`#.*$`, Comment, nil},
{`[*=><+\-/@$~&%!?|\\\[\]]`, Operator, nil},

View File

@ -11,6 +11,7 @@ var Plaintext = internal.Register(MustNewLexer(
Aliases: []string{"text", "plain", "no-highlight"},
Filenames: []string{"*.txt"},
MimeTypes: []string{"text/plain"},
Priority: 0.1,
},
internal.PlaintextRules,
))

View File

@ -22,6 +22,7 @@ var Powershell = internal.Register(MustNewLexer(
{`^(\s*#[#\s]*)(\.(?:component|description|example|externalhelp|forwardhelpcategory|forwardhelptargetname|functionality|inputs|link|notes|outputs|parameter|remotehelprunspace|role|synopsis))([^\n]*$)`, ByGroups(Comment, LiteralStringDoc, Comment), nil},
{`#[^\n]*?$`, Comment, nil},
{`(&lt;|<)#`, CommentMultiline, Push("multline")},
{`(?i)([A-Z]:)`, Name, nil},
{`@"\n`, LiteralStringHeredoc, Push("heredoc-double")},
{`@'\n.*?\n'@`, LiteralStringHeredoc, nil},
{"`[\\'\"$@-]", Punctuation, nil},
@ -30,7 +31,8 @@ var Powershell = internal.Register(MustNewLexer(
{`(\$|@@|@)((global|script|private|env):)?\w+`, NameVariable, nil},
{`(while|validateset|validaterange|validatepattern|validatelength|validatecount|until|trap|switch|return|ref|process|param|parameter|in|if|global:|function|foreach|for|finally|filter|end|elseif|else|dynamicparam|do|default|continue|cmdletbinding|break|begin|alias|\?|%|#script|#private|#local|#global|mandatory|parametersetname|position|valuefrompipeline|valuefrompipelinebypropertyname|valuefromremainingarguments|helpmessage|try|catch|throw)\b`, Keyword, nil},
{`-(and|as|band|bnot|bor|bxor|casesensitive|ccontains|ceq|cge|cgt|cle|clike|clt|cmatch|cne|cnotcontains|cnotlike|cnotmatch|contains|creplace|eq|exact|f|file|ge|gt|icontains|ieq|ige|igt|ile|ilike|ilt|imatch|ine|inotcontains|inotlike|inotmatch|ireplace|is|isnot|le|like|lt|match|ne|not|notcontains|notlike|notmatch|or|regex|replace|wildcard)\b`, Operator, nil},
{`(write|where|wait|use|update|unregister|undo|trace|test|tee|take|suspend|stop|start|split|sort|skip|show|set|send|select|scroll|resume|restore|restart|resolve|resize|reset|rename|remove|register|receive|read|push|pop|ping|out|new|move|measure|limit|join|invoke|import|group|get|format|foreach|export|expand|exit|enter|enable|disconnect|disable|debug|cxnew|copy|convertto|convertfrom|convert|connect|complete|compare|clear|checkpoint|aggregate|add)-[a-z_]\w*\b`, NameBuiltin, nil},
{`(write|where|watch|wait|use|update|unregister|unpublish|unprotect|unlock|uninstall|undo|unblock|trace|test|tee|take|sync|switch|suspend|submit|stop|step|start|split|sort|skip|show|set|send|select|search|scroll|save|revoke|resume|restore|restart|resolve|resize|reset|request|repair|rename|remove|register|redo|receive|read|push|publish|protect|pop|ping|out|optimize|open|new|move|mount|merge|measure|lock|limit|join|invoke|install|initialize|import|hide|group|grant|get|format|foreach|find|export|expand|exit|enter|enable|edit|dismount|disconnect|disable|deny|debug|cxnew|copy|convertto|convertfrom|convert|connect|confirm|compress|complete|compare|close|clear|checkpoint|block|backup|assert|approve|aggregate|add)-[a-z_]\w*\b`, NameBuiltin, nil},
{`(ac|asnp|cat|cd|cfs|chdir|clc|clear|clhy|cli|clp|cls|clv|cnsn|compare|copy|cp|cpi|cpp|curl|cvpa|dbp|del|diff|dir|dnsn|ebp|echo|epal|epcsv|epsn|erase|etsn|exsn|fc|fhx|fl|foreach|ft|fw|gal|gbp|gc|gci|gcm|gcs|gdr|ghy|gi|gjb|gl|gm|gmo|gp|gps|gpv|group|gsn|gsnp|gsv|gu|gv|gwmi|h|history|icm|iex|ihy|ii|ipal|ipcsv|ipmo|ipsn|irm|ise|iwmi|iwr|kill|lp|ls|man|md|measure|mi|mount|move|mp|mv|nal|ndr|ni|nmo|npssc|nsn|nv|ogv|oh|popd|ps|pushd|pwd|r|rbp|rcjb|rcsn|rd|rdr|ren|ri|rjb|rm|rmdir|rmo|rni|rnp|rp|rsn|rsnp|rujb|rv|rvpa|rwmi|sajb|sal|saps|sasv|sbp|sc|select|set|shcm|si|sl|sleep|sls|sort|sp|spjb|spps|spsv|start|sujb|sv|swmi|tee|trcm|type|wget|where|wjb|write)\s`, NameBuiltin, nil},
{"\\[[a-z_\\[][\\w. `,\\[\\]]*\\]", NameConstant, nil},
{`-[a-z_]\w*`, Name, nil},
{`\w+`, Name, nil},

View File

@ -15,7 +15,6 @@ var Prolog = internal.Register(MustNewLexer(
},
Rules{
"root": {
{`^#.*`, CommentSingle, nil},
{`/\*`, CommentMultiline, Push("nested-comment")},
{`%.*`, CommentSingle, nil},
{`0\'.`, LiteralStringChar, nil},

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,67 @@
package r
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
// Reasonml lexer.
var Reasonml = internal.Register(MustNewLexer(
&Config{
Name: "ReasonML",
Aliases: []string{"reason", "reasonml"},
Filenames: []string{"*.re", "*.rei"},
MimeTypes: []string{"text/x-reasonml"},
},
Rules{
"escape-sequence": {
{`\\[\\"\'ntbr]`, LiteralStringEscape, nil},
{`\\[0-9]{3}`, LiteralStringEscape, nil},
{`\\x[0-9a-fA-F]{2}`, LiteralStringEscape, nil},
},
"root": {
{`\s+`, Text, nil},
{`false|true|\(\)|\[\]`, NameBuiltinPseudo, nil},
{`\b([A-Z][\w\']*)(?=\s*\.)`, NameNamespace, Push("dotted")},
{`\b([A-Z][\w\']*)`, NameClass, nil},
{`//.*?\n`, CommentSingle, nil},
{`\/\*(?![\/])`, CommentMultiline, Push("comment")},
{`\b(as|assert|begin|class|constraint|do|done|downto|else|end|exception|external|false|for|fun|esfun|function|functor|if|in|include|inherit|initializer|lazy|let|switch|module|pub|mutable|new|nonrec|object|of|open|pri|rec|sig|struct|then|to|true|try|type|val|virtual|when|while|with)\b`, Keyword, nil},
{"(~|\\}|\\|]|\\||\\|\\||\\{<|\\{|`|_|]|\\[\\||\\[>|\\[<|\\[|\\?\\?|\\?|>\\}|>]|>|=|<-|<|;;|;|:>|:=|::|:|\\.\\.\\.|\\.\\.|\\.|=>|-\\.|-|,|\\+|\\*|\\)|\\(|&&|&|#|!=)", OperatorWord, nil},
{`([=<>@^|&+\*/$%-]|[!?~])?[!$%&*+\./:<=>?@^|~-]`, Operator, nil},
{`\b(and|asr|land|lor|lsl|lsr|lxor|mod|or)\b`, OperatorWord, nil},
{`\b(unit|int|float|bool|string|char|list|array)\b`, KeywordType, nil},
{`[^\W\d][\w']*`, Name, nil},
{`-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)`, LiteralNumberFloat, nil},
{`0[xX][\da-fA-F][\da-fA-F_]*`, LiteralNumberHex, nil},
{`0[oO][0-7][0-7_]*`, LiteralNumberOct, nil},
{`0[bB][01][01_]*`, LiteralNumberBin, nil},
{`\d[\d_]*`, LiteralNumberInteger, nil},
{`'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'`, LiteralStringChar, nil},
{`'.'`, LiteralStringChar, nil},
{`'`, Keyword, nil},
{`"`, LiteralStringDouble, Push("string")},
{`[~?][a-z][\w\']*:`, NameVariable, nil},
},
"comment": {
{`[^\/*]+`, CommentMultiline, nil},
{`\/\*`, CommentMultiline, Push()},
{`\*\/`, CommentMultiline, Pop(1)},
{`[\*]`, CommentMultiline, nil},
},
"string": {
{`[^\\"]+`, LiteralStringDouble, nil},
Include("escape-sequence"),
{`\\\n`, LiteralStringDouble, nil},
{`"`, LiteralStringDouble, Pop(1)},
},
"dotted": {
{`\s+`, Text, nil},
{`\.`, Punctuation, nil},
{`[A-Z][\w\']*(?=\s*\.)`, NameNamespace, nil},
{`[A-Z][\w\']*`, NameClass, Pop(1)},
{`[a-z_][\w\']*`, Name, Pop(1)},
Default(Pop(1)),
},
},
))

View File

@ -58,7 +58,7 @@ var Rust = internal.Register(MustNewLexer(
{`'[a-zA-Z_]\w*`, NameAttribute, nil},
{`[{}()\[\],.;]`, Punctuation, nil},
{`[+\-*/%&|<>^!~@=:?]`, Operator, nil},
{`[a-zA-Z_]\w*`, Name, nil},
{`(r#)?[a-zA-Z_]\w*`, Name, nil},
{`#!?\[`, CommentPreproc, Push("attribute[")},
{`([A-Za-z_]\w*)(!)(\s*)([A-Za-z_]\w*)?(\s*)(\{)`, ByGroups(CommentPreproc, Punctuation, TextWhitespace, Name, TextWhitespace, Punctuation), Push("macro{")},
{`([A-Za-z_]\w*)(!)(\s*)([A-Za-z_]\w*)?(\()`, ByGroups(CommentPreproc, Punctuation, TextWhitespace, Name, Punctuation), Push("macro(")},

94
vendor/github.com/alecthomas/chroma/lexers/s/sas.go generated vendored Normal file
View File

@ -0,0 +1,94 @@
package s
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
// Sas lexer.
var Sas = internal.Register(MustNewLexer(
&Config{
Name: "SAS",
Aliases: []string{"sas"},
Filenames: []string{"*.SAS", "*.sas"},
MimeTypes: []string{"text/x-sas", "text/sas", "application/x-sas"},
CaseInsensitive: true,
},
Rules{
"root": {
Include("comments"),
Include("proc-data"),
Include("cards-datalines"),
Include("logs"),
Include("general"),
{`.`, Text, nil},
{`\\\n`, Text, nil},
{`\n`, Text, nil},
},
"comments": {
{`^\s*\*.*?;`, Comment, nil},
{`/\*.*?\*/`, Comment, nil},
{`^\s*\*(.|\n)*?;`, CommentMultiline, nil},
{`/[*](.|\n)*?[*]/`, CommentMultiline, nil},
},
"proc-data": {
{`(^|;)\s*(proc \w+|data|run|quit)[\s;]`, KeywordReserved, nil},
},
"cards-datalines": {
{`^\s*(datalines|cards)\s*;\s*$`, Keyword, Push("data")},
},
"data": {
{`(.|\n)*^\s*;\s*$`, Other, Pop(1)},
},
"logs": {
{`\n?^\s*%?put `, Keyword, Push("log-messages")},
},
"log-messages": {
{`NOTE(:|-).*`, Generic, Pop(1)},
{`WARNING(:|-).*`, GenericEmph, Pop(1)},
{`ERROR(:|-).*`, GenericError, Pop(1)},
Include("general"),
},
"general": {
Include("keywords"),
Include("vars-strings"),
Include("special"),
Include("numbers"),
},
"keywords": {
{Words(`\b`, `\b`, `abort`, `array`, `attrib`, `by`, `call`, `cards`, `cards4`, `catname`, `continue`, `datalines`, `datalines4`, `delete`, `delim`, `delimiter`, `display`, `dm`, `drop`, `endsas`, `error`, `file`, `filename`, `footnote`, `format`, `goto`, `in`, `infile`, `informat`, `input`, `keep`, `label`, `leave`, `length`, `libname`, `link`, `list`, `lostcard`, `merge`, `missing`, `modify`, `options`, `output`, `out`, `page`, `put`, `redirect`, `remove`, `rename`, `replace`, `retain`, `return`, `select`, `set`, `skip`, `startsas`, `stop`, `title`, `update`, `waitsas`, `where`, `window`, `x`, `systask`), Keyword, nil},
{Words(`\b`, `\b`, `add`, `and`, `alter`, `as`, `cascade`, `check`, `create`, `delete`, `describe`, `distinct`, `drop`, `foreign`, `from`, `group`, `having`, `index`, `insert`, `into`, `in`, `key`, `like`, `message`, `modify`, `msgtype`, `not`, `null`, `on`, `or`, `order`, `primary`, `references`, `reset`, `restrict`, `select`, `set`, `table`, `unique`, `update`, `validate`, `view`, `where`), Keyword, nil},
{Words(`\b`, `\b`, `do`, `if`, `then`, `else`, `end`, `until`, `while`), Keyword, nil},
{Words(`%`, `\b`, `bquote`, `nrbquote`, `cmpres`, `qcmpres`, `compstor`, `datatyp`, `display`, `do`, `else`, `end`, `eval`, `global`, `goto`, `if`, `index`, `input`, `keydef`, `label`, `left`, `length`, `let`, `local`, `lowcase`, `macro`, `mend`, `nrquote`, `nrstr`, `put`, `qleft`, `qlowcase`, `qscan`, `qsubstr`, `qsysfunc`, `qtrim`, `quote`, `qupcase`, `scan`, `str`, `substr`, `superq`, `syscall`, `sysevalf`, `sysexec`, `sysfunc`, `sysget`, `syslput`, `sysprod`, `sysrc`, `sysrput`, `then`, `to`, `trim`, `unquote`, `until`, `upcase`, `verify`, `while`, `window`), NameBuiltin, nil},
{Words(`\b`, `\(`, `abs`, `addr`, `airy`, `arcos`, `arsin`, `atan`, `attrc`, `attrn`, `band`, `betainv`, `blshift`, `bnot`, `bor`, `brshift`, `bxor`, `byte`, `cdf`, `ceil`, `cexist`, `cinv`, `close`, `cnonct`, `collate`, `compbl`, `compound`, `compress`, `cos`, `cosh`, `css`, `curobs`, `cv`, `daccdb`, `daccdbsl`, `daccsl`, `daccsyd`, `dacctab`, `dairy`, `date`, `datejul`, `datepart`, `datetime`, `day`, `dclose`, `depdb`, `depdbsl`, `depsl`, `depsyd`, `deptab`, `dequote`, `dhms`, `dif`, `digamma`, `dim`, `dinfo`, `dnum`, `dopen`, `doptname`, `doptnum`, `dread`, `dropnote`, `dsname`, `erf`, `erfc`, `exist`, `exp`, `fappend`, `fclose`, `fcol`, `fdelete`, `fetch`, `fetchobs`, `fexist`, `fget`, `fileexist`, `filename`, `fileref`, `finfo`, `finv`, `fipname`, `fipnamel`, `fipstate`, `floor`, `fnonct`, `fnote`, `fopen`, `foptname`, `foptnum`, `fpoint`, `fpos`, `fput`, `fread`, `frewind`, `frlen`, `fsep`, `fuzz`, `fwrite`, `gaminv`, `gamma`, `getoption`, `getvarc`, `getvarn`, `hbound`, `hms`, `hosthelp`, `hour`, `ibessel`, `index`, `indexc`, `indexw`, `input`, `inputc`, `inputn`, `int`, `intck`, `intnx`, `intrr`, `irr`, `jbessel`, `juldate`, `kurtosis`, `lag`, `lbound`, `left`, `length`, `lgamma`, `libname`, `libref`, `log`, `log10`, `log2`, `logpdf`, `logpmf`, `logsdf`, `lowcase`, `max`, `mdy`, `mean`, `min`, `minute`, `mod`, `month`, `mopen`, `mort`, `n`, `netpv`, `nmiss`, `normal`, `note`, `npv`, `open`, `ordinal`, `pathname`, `pdf`, `peek`, `peekc`, `pmf`, `point`, `poisson`, `poke`, `probbeta`, `probbnml`, `probchi`, `probf`, `probgam`, `probhypr`, `probit`, `probnegb`, `probnorm`, `probt`, `put`, `putc`, `putn`, `qtr`, `quote`, `ranbin`, `rancau`, `ranexp`, `rangam`, `range`, `rank`, `rannor`, `ranpoi`, `rantbl`, `rantri`, `ranuni`, `repeat`, `resolve`, `reverse`, `rewind`, `right`, `round`, `saving`, `scan`, `sdf`, `second`, `sign`, `sin`, `sinh`, `skewness`, `soundex`, `spedis`, `sqrt`, `std`, `stderr`, `stfips`, `stname`, `stnamel`, `substr`, `sum`, `symget`, `sysget`, `sysmsg`, `sysprod`, `sysrc`, `system`, `tan`, `tanh`, `time`, `timepart`, `tinv`, `tnonct`, `today`, `translate`, `tranwrd`, `trigamma`, `trim`, `trimn`, `trunc`, `uniform`, `upcase`, `uss`, `var`, `varfmt`, `varinfmt`, `varlabel`, `varlen`, `varname`, `varnum`, `varray`, `varrayx`, `vartype`, `verify`, `vformat`, `vformatd`, `vformatdx`, `vformatn`, `vformatnx`, `vformatw`, `vformatwx`, `vformatx`, `vinarray`, `vinarrayx`, `vinformat`, `vinformatd`, `vinformatdx`, `vinformatn`, `vinformatnx`, `vinformatw`, `vinformatwx`, `vinformatx`, `vlabel`, `vlabelx`, `vlength`, `vlengthx`, `vname`, `vnamex`, `vtype`, `vtypex`, `weekday`, `year`, `yyq`, `zipfips`, `zipname`, `zipnamel`, `zipstate`), NameBuiltin, nil},
},
"vars-strings": {
{`&[a-z_]\w{0,31}\.?`, NameVariable, nil},
{`%[a-z_]\w{0,31}`, NameFunction, nil},
{`\'`, LiteralString, Push("string_squote")},
{`"`, LiteralString, Push("string_dquote")},
},
"string_squote": {
{`'`, LiteralString, Pop(1)},
{`\\\\|\\"|\\\n`, LiteralStringEscape, nil},
{`[^$\'\\]+`, LiteralString, nil},
{`[$\'\\]`, LiteralString, nil},
},
"string_dquote": {
{`"`, LiteralString, Pop(1)},
{`\\\\|\\"|\\\n`, LiteralStringEscape, nil},
{`&`, NameVariable, Push("validvar")},
{`[^$&"\\]+`, LiteralString, nil},
{`[$"\\]`, LiteralString, nil},
},
"validvar": {
{`[a-z_]\w{0,31}\.?`, NameVariable, Pop(1)},
},
"numbers": {
{`\b[+-]?([0-9]+(\.[0-9]+)?|\.[0-9]+|\.)(E[+-]?[0-9]+)?i?\b`, LiteralNumber, nil},
},
"special": {
{`(null|missing|_all_|_automatic_|_character_|_n_|_infile_|_name_|_null_|_numeric_|_user_|_webout_)`, KeywordConstant, nil},
},
},
))

View File

@ -80,7 +80,7 @@ var Scss = internal.Register(MustNewLexer(
{`[\w-]+`, NameTag, nil},
{`#\{`, LiteralStringInterpol, Push("interpolation")},
{`&`, Keyword, nil},
{`[~^*!&\[\]()<>|+=@:;,./?-]`, Operator, nil},
{`[~^*!&\[\]()<>|+=@:,./?-]`, Operator, nil},
{`"`, LiteralStringDouble, Push("string-double")},
{`'`, LiteralStringSingle, Push("string-single")},
{`\n`, Text, nil},
@ -92,9 +92,9 @@ var Scss = internal.Register(MustNewLexer(
{`"`, LiteralStringDouble, Pop(1)},
},
"string-single": {
{`(\\.|#(?=[^\n{])|[^\n'#])+`, LiteralStringDouble, nil},
{`(\\.|#(?=[^\n{])|[^\n'#])+`, LiteralStringSingle, nil},
{`#\{`, LiteralStringInterpol, Push("interpolation")},
{`'`, LiteralStringDouble, Pop(1)},
{`'`, LiteralStringSingle, Pop(1)},
},
"string-url": {
{`(\\#|#(?=[^\n{])|[^\n#)])+`, LiteralStringOther, nil},

200
vendor/github.com/alecthomas/chroma/lexers/s/sml.go generated vendored Normal file
View File

@ -0,0 +1,200 @@
package s
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
// Standard ML lexer.
var StandardML = internal.Register(MustNewLexer(
&Config{
Name: "Standard ML",
Aliases: []string{"sml"},
Filenames: []string{"*.sml", "*.sig", "*.fun"},
MimeTypes: []string{"text/x-standardml", "application/x-standardml"},
},
Rules{
"whitespace": {
{`\s+`, Text, nil},
{`\(\*`, CommentMultiline, Push("comment")},
},
"delimiters": {
{`\(|\[|\{`, Punctuation, Push("main")},
{`\)|\]|\}`, Punctuation, Pop(1)},
{`\b(let|if|local)\b(?!\')`, KeywordReserved, Push("main", "main")},
{`\b(struct|sig|while)\b(?!\')`, KeywordReserved, Push("main")},
{`\b(do|else|end|in|then)\b(?!\')`, KeywordReserved, Pop(1)},
},
"core": {
{`(_|\}|\{|\)|;|,|\[|\(|\]|\.\.\.)`, Punctuation, nil},
{`#"`, LiteralStringChar, Push("char")},
{`"`, LiteralStringDouble, Push("string")},
{`~?0x[0-9a-fA-F]+`, LiteralNumberHex, nil},
{`0wx[0-9a-fA-F]+`, LiteralNumberHex, nil},
{`0w\d+`, LiteralNumberInteger, nil},
{`~?\d+\.\d+[eE]~?\d+`, LiteralNumberFloat, nil},
{`~?\d+\.\d+`, LiteralNumberFloat, nil},
{`~?\d+[eE]~?\d+`, LiteralNumberFloat, nil},
{`~?\d+`, LiteralNumberInteger, nil},
{`#\s*[1-9][0-9]*`, NameLabel, nil},
{`#\s*([a-zA-Z][\w']*)`, NameLabel, nil},
{"#\\s+([!%&$#+\\-/:<=>?@\\\\~`^|*]+)", NameLabel, nil},
{`\b(datatype|abstype)\b(?!\')`, KeywordReserved, Push("dname")},
{`(?=\b(exception)\b(?!\'))`, Text, Push("ename")},
{`\b(functor|include|open|signature|structure)\b(?!\')`, KeywordReserved, Push("sname")},
{`\b(type|eqtype)\b(?!\')`, KeywordReserved, Push("tname")},
{`\'[\w\']*`, NameDecorator, nil},
{`([a-zA-Z][\w']*)(\.)`, NameNamespace, Push("dotted")},
{`\b(abstype|and|andalso|as|case|datatype|do|else|end|exception|fn|fun|handle|if|in|infix|infixr|let|local|nonfix|of|op|open|orelse|raise|rec|then|type|val|with|withtype|while|eqtype|functor|include|sharing|sig|signature|struct|structure|where)\b`, KeywordReserved, nil},
{`([a-zA-Z][\w']*)`, Name, nil},
{`\b(:|\|,=|=>|->|#|:>)\b`, KeywordReserved, nil},
{"([!%&$#+\\-/:<=>?@\\\\~`^|*]+)", Name, nil},
},
"dotted": {
{`([a-zA-Z][\w']*)(\.)`, NameNamespace, nil},
// ignoring reserved words
{`([a-zA-Z][\w']*)`, Name, Pop(1)},
// ignoring reserved words
{"([!%&$#+\\-/:<=>?@\\\\~`^|*]+)", Name, Pop(1)},
{`\s+`, Error, nil},
{`\S+`, Error, nil},
},
"root": {
Default(Push("main")),
},
"main": {
Include("whitespace"),
{`\b(val|and)\b(?!\')`, KeywordReserved, Push("vname")},
{`\b(fun)\b(?!\')`, KeywordReserved, Push("#pop", "main-fun", "fname")},
Include("delimiters"),
Include("core"),
{`\S+`, Error, nil},
},
"main-fun": {
Include("whitespace"),
{`\s`, Text, nil},
{`\(\*`, CommentMultiline, Push("comment")},
{`\b(fun|and)\b(?!\')`, KeywordReserved, Push("fname")},
{`\b(val)\b(?!\')`, KeywordReserved, Push("#pop", "main", "vname")},
{`\|`, Punctuation, Push("fname")},
{`\b(case|handle)\b(?!\')`, KeywordReserved, Push("#pop", "main")},
Include("delimiters"),
Include("core"),
{`\S+`, Error, nil},
},
"char": {
{`[^"\\]`, LiteralStringChar, nil},
{`\\[\\"abtnvfr]`, LiteralStringEscape, nil},
{`\\\^[\x40-\x5e]`, LiteralStringEscape, nil},
{`\\[0-9]{3}`, LiteralStringEscape, nil},
{`\\u[0-9a-fA-F]{4}`, LiteralStringEscape, nil},
{`\\\s+\\`, LiteralStringInterpol, nil},
{`"`, LiteralStringChar, Pop(1)},
},
"string": {
{`[^"\\]`, LiteralStringDouble, nil},
{`\\[\\"abtnvfr]`, LiteralStringEscape, nil},
{`\\\^[\x40-\x5e]`, LiteralStringEscape, nil},
{`\\[0-9]{3}`, LiteralStringEscape, nil},
{`\\u[0-9a-fA-F]{4}`, LiteralStringEscape, nil},
{`\\\s+\\`, LiteralStringInterpol, nil},
{`"`, LiteralStringDouble, Pop(1)},
},
"breakout": {
{`(?=\b(where|do|handle|if|sig|op|while|case|as|else|signature|andalso|struct|infixr|functor|in|structure|then|local|rec|end|fun|of|orelse|val|include|fn|with|exception|let|and|infix|sharing|datatype|type|abstype|withtype|eqtype|nonfix|raise|open)\b(?!\'))`, Text, Pop(1)},
},
"sname": {
Include("whitespace"),
Include("breakout"),
{`([a-zA-Z][\w']*)`, NameNamespace, nil},
Default(Pop(1)),
},
"fname": {
Include("whitespace"),
{`\'[\w\']*`, NameDecorator, nil},
{`\(`, Punctuation, Push("tyvarseq")},
{`([a-zA-Z][\w']*)`, NameFunction, Pop(1)},
{"([!%&$#+\\-/:<=>?@\\\\~`^|*]+)", NameFunction, Pop(1)},
Default(Pop(1)),
},
"vname": {
Include("whitespace"),
{`\'[\w\']*`, NameDecorator, nil},
{`\(`, Punctuation, Push("tyvarseq")},
{"([a-zA-Z][\\w']*)(\\s*)(=(?![!%&$#+\\-/:<=>?@\\\\~`^|*]+))", ByGroups(NameVariable, Text, Punctuation), Pop(1)},
{"([!%&$#+\\-/:<=>?@\\\\~`^|*]+)(\\s*)(=(?![!%&$#+\\-/:<=>?@\\\\~`^|*]+))", ByGroups(NameVariable, Text, Punctuation), Pop(1)},
{`([a-zA-Z][\w']*)`, NameVariable, Pop(1)},
{"([!%&$#+\\-/:<=>?@\\\\~`^|*]+)", NameVariable, Pop(1)},
Default(Pop(1)),
},
"tname": {
Include("whitespace"),
Include("breakout"),
{`\'[\w\']*`, NameDecorator, nil},
{`\(`, Punctuation, Push("tyvarseq")},
{"=(?![!%&$#+\\-/:<=>?@\\\\~`^|*]+)", Punctuation, Push("#pop", "typbind")},
{`([a-zA-Z][\w']*)`, KeywordType, nil},
{"([!%&$#+\\-/:<=>?@\\\\~`^|*]+)", KeywordType, nil},
{`\S+`, Error, Pop(1)},
},
"typbind": {
Include("whitespace"),
{`\b(and)\b(?!\')`, KeywordReserved, Push("#pop", "tname")},
Include("breakout"),
Include("core"),
{`\S+`, Error, Pop(1)},
},
"dname": {
Include("whitespace"),
Include("breakout"),
{`\'[\w\']*`, NameDecorator, nil},
{`\(`, Punctuation, Push("tyvarseq")},
{`(=)(\s*)(datatype)`, ByGroups(Punctuation, Text, KeywordReserved), Pop(1)},
{"=(?![!%&$#+\\-/:<=>?@\\\\~`^|*]+)", Punctuation, Push("#pop", "datbind", "datcon")},
{`([a-zA-Z][\w']*)`, KeywordType, nil},
{"([!%&$#+\\-/:<=>?@\\\\~`^|*]+)", KeywordType, nil},
{`\S+`, Error, Pop(1)},
},
"datbind": {
Include("whitespace"),
{`\b(and)\b(?!\')`, KeywordReserved, Push("#pop", "dname")},
{`\b(withtype)\b(?!\')`, KeywordReserved, Push("#pop", "tname")},
{`\b(of)\b(?!\')`, KeywordReserved, nil},
{`(\|)(\s*)([a-zA-Z][\w']*)`, ByGroups(Punctuation, Text, NameClass), nil},
{"(\\|)(\\s+)([!%&$#+\\-/:<=>?@\\\\~`^|*]+)", ByGroups(Punctuation, Text, NameClass), nil},
Include("breakout"),
Include("core"),
{`\S+`, Error, nil},
},
"ename": {
Include("whitespace"),
{`(exception|and)\b(\s+)([a-zA-Z][\w']*)`, ByGroups(KeywordReserved, Text, NameClass), nil},
{"(exception|and)\\b(\\s*)([!%&$#+\\-/:<=>?@\\\\~`^|*]+)", ByGroups(KeywordReserved, Text, NameClass), nil},
{`\b(of)\b(?!\')`, KeywordReserved, nil},
Include("breakout"),
Include("core"),
{`\S+`, Error, nil},
},
"datcon": {
Include("whitespace"),
{`([a-zA-Z][\w']*)`, NameClass, Pop(1)},
{"([!%&$#+\\-/:<=>?@\\\\~`^|*]+)", NameClass, Pop(1)},
{`\S+`, Error, Pop(1)},
},
"tyvarseq": {
{`\s`, Text, nil},
{`\(\*`, CommentMultiline, Push("comment")},
{`\'[\w\']*`, NameDecorator, nil},
{`[a-zA-Z][\w']*`, Name, nil},
{`,`, Punctuation, nil},
{`\)`, Punctuation, Pop(1)},
{"[!%&$#+\\-/:<=>?@\\\\~`^|*]+", Name, nil},
},
"comment": {
{`[^(*)]`, CommentMultiline, nil},
{`\(\*`, CommentMultiline, Push()},
{`\*\)`, CommentMultiline, Pop(1)},
{`[(*)]`, CommentMultiline, nil},
},
},
))

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,42 @@
package t
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
// TableGen lexer.
var Tablegen = internal.Register(MustNewLexer(
&Config{
Name: "TableGen",
Aliases: []string{"tablegen"},
Filenames: []string{"*.td"},
MimeTypes: []string{"text/x-tablegen"},
},
Rules{
"root": {
Include("macro"),
Include("whitespace"),
{`c?"[^"]*?"`, LiteralString, nil},
Include("keyword"),
{`\$[_a-zA-Z][_\w]*`, NameVariable, nil},
{`\d*[_a-zA-Z][_\w]*`, NameVariable, nil},
{`\[\{[\w\W]*?\}\]`, LiteralString, nil},
{`[+-]?\d+|0x[\da-fA-F]+|0b[01]+`, LiteralNumber, nil},
{`[=<>{}\[\]()*.,!:;]`, Punctuation, nil},
},
"macro": {
{`(#include\s+)("[^"]*")`, ByGroups(CommentPreproc, LiteralString), nil},
{`^\s*#(ifdef|ifndef)\s+[_\w][_\w\d]*`, CommentPreproc, nil},
{`^\s*#define\s+[_\w][_\w\d]*`, CommentPreproc, nil},
{`^\s*#endif`, CommentPreproc, nil},
},
"whitespace": {
{`(\n|\s)+`, Text, nil},
{`//.*?\n`, Comment, nil},
},
"keyword": {
{Words(``, `\b`, `bit`, `bits`, `class`, `code`, `dag`, `def`, `defm`, `field`, `foreach`, `in`, `int`, `let`, `list`, `multiclass`, `string`), Keyword, nil},
},
},
))

View File

@ -15,55 +15,46 @@ var Terraform = internal.Register(MustNewLexer(
},
Rules{
"root": {
Include("string"),
Include("punctuation"),
Include("curly"),
Include("basic"),
Include("whitespace"),
{`[0-9]+`, LiteralNumber, nil},
{`[\[\](),.{}]`, Punctuation, nil},
{`-?[0-9]+`, LiteralNumber, nil},
{`=>`, Punctuation, nil},
{Words(``, `\b`, `true`, `false`), KeywordConstant, nil},
{`/(?s)\*(((?!\*/).)*)\*/`, CommentMultiline, nil},
{`\s*(#|//).*\n`, CommentSingle, nil},
{`([a-zA-Z]\w*)(\s*)(=(?!>))`, ByGroups(NameAttribute, Text, Text), nil},
{Words(`^\s*`, `\b`, `variable`, `data`, `resource`, `provider`, `provisioner`, `module`, `output`), KeywordReserved, nil},
{Words(``, `\b`, `for`, `in`), Keyword, nil},
{Words(``, ``, `count`, `data`, `var`, `module`, `each`), NameBuiltin, nil},
{Words(``, `\b`, `abs`, `ceil`, `floor`, `log`, `max`, `min`, `parseint`, `pow`, `signum`), NameBuiltin, nil},
{Words(``, `\b`, `chomp`, `format`, `formatlist`, `indent`, `join`, `lower`, `regex`, `regexall`, `replace`, `split`, `strrev`, `substr`, `title`, `trim`, `trimprefix`, `trimsuffix`, `trimspace`, `upper`), NameBuiltin, nil},
{Words(`[^.]`, `\b`, `chunklist`, `coalesce`, `coalescelist`, `compact`, `concat`, `contains`, `distinct`, `element`, `flatten`, `index`, `keys`, `length`, `list`, `lookup`, `map`, `matchkeys`, `merge`, `range`, `reverse`, `setintersection`, `setproduct`, `setsubtract`, `setunion`, `slice`, `sort`, `transpose`, `values`, `zipmap`), NameBuiltin, nil},
{Words(`[^.]`, `\b`, `base64decode`, `base64encode`, `base64gzip`, `csvdecode`, `jsondecode`, `jsonencode`, `urlencode`, `yamldecode`, `yamlencode`), NameBuiltin, nil},
{Words(``, `\b`, `abspath`, `dirname`, `pathexpand`, `basename`, `file`, `fileexists`, `fileset`, `filebase64`, `templatefile`), NameBuiltin, nil},
{Words(``, `\b`, `formatdate`, `timeadd`, `timestamp`), NameBuiltin, nil},
{Words(``, `\b`, `base64sha256`, `base64sha512`, `bcrypt`, `filebase64sha256`, `filebase64sha512`, `filemd5`, `filesha1`, `filesha256`, `filesha512`, `md5`, `rsadecrypt`, `sha1`, `sha256`, `sha512`, `uuid`, `uuidv5`), NameBuiltin, nil},
{Words(``, `\b`, `cidrhost`, `cidrnetmask`, `cidrsubnet`), NameBuiltin, nil},
{Words(``, `\b`, `can`, `tobool`, `tolist`, `tomap`, `tonumber`, `toset`, `tostring`, `try`), NameBuiltin, nil},
{`=(?!>)|\+|-|\*|\/|:|!|%|>|<(?!<)|>=|<=|==|!=|&&|\||\?`, Operator, nil},
{`\n|\s+|\\\n`, Text, nil},
{`[a-zA-Z]\w*`, NameOther, nil},
{`"`, LiteralStringDouble, Push("string")},
{`(?s)(<<-?)(\w+)(\n\s*(?:(?!\2).)*\s*\n\s*)(\2)`, ByGroups(Operator, Operator, String, Operator), nil},
},
"basic": {
{Words(`\b`, `\b`, `true`, `false`), KeywordType, nil},
{`\s*/\*`, CommentMultiline, Push("comment")},
{`\s*#.*\n`, CommentSingle, nil},
{`(.*?)(\s*)(=)`, ByGroups(NameAttribute, Text, Operator), nil},
{Words(`\b`, `\b`, `variable`, `resource`, `provider`, `provisioner`, `module`), KeywordReserved, Push("function")},
{Words(`\b`, `\b`, `ingress`, `egress`, `listener`, `default`, `connection`, `alias`), KeywordDeclaration, nil},
{`\$\{`, LiteralStringInterpol, Push("var_builtin")},
},
"function": {
{`(\s+)(".*")(\s+)`, ByGroups(Text, LiteralString, Text), nil},
Include("punctuation"),
Include("curly"),
},
"var_builtin": {
{`\$\{`, LiteralStringInterpol, Push()},
{Words(`\b`, `\b`, `concat`, `file`, `join`, `lookup`, `element`), NameBuiltin, nil},
Include("string"),
Include("punctuation"),
{`\s+`, Text, nil},
{`\}`, LiteralStringInterpol, Pop(1)},
"declaration": {
{`(\s*)("(?:\\\\|\\"|[^"])*")(\s*)`, ByGroups(Text, NameVariable, Text), nil},
{`\{`, Punctuation, Pop(1)},
},
"string": {
{`(".*")`, ByGroups(LiteralStringDouble), nil},
{`"`, LiteralStringDouble, Pop(1)},
{`\\\\`, LiteralStringDouble, nil},
{`\\\\"`, LiteralStringDouble, nil},
{`\$\{`, LiteralStringInterpol, Push("interp-inside")},
{`\$`, LiteralStringDouble, nil},
{`[^"\\\\$]+`, LiteralStringDouble, nil},
},
"punctuation": {
{`[\[\](),.]`, Punctuation, nil},
},
"curly": {
{`\{`, TextPunctuation, nil},
{`\}`, TextPunctuation, nil},
},
"comment": {
{`[^*/]`, CommentMultiline, nil},
{`/\*`, CommentMultiline, Push()},
{`\*/`, CommentMultiline, Pop(1)},
{`[*/]`, CommentMultiline, nil},
},
"whitespace": {
{`\n`, Text, nil},
{`\s+`, Text, nil},
{`\\\n`, Text, nil},
"interp-inside": {
{`\}`, LiteralStringInterpol, Pop(1)},
Include("root"),
},
},
))

View File

@ -5,7 +5,7 @@ import (
"github.com/alecthomas/chroma/lexers/internal"
)
// TradingView lexer.
// TradingView lexer
var TradingView = internal.Register(MustNewLexer(
&Config{
Name: "TradingView",
@ -26,10 +26,12 @@ var TradingView = internal.Register(MustNewLexer(
{`'\\.'|'[^\\]'`, LiteralString, nil},
{`[0-9](\.[0-9]*)?([eE][+-][0-9]+)?`, LiteralNumber, nil},
{`#[a-fA-F0-9]{8}|#[a-fA-F0-9]{6}|#[a-fA-F0-9]{3}`, LiteralStringOther, nil},
{`(abs|acos|alertcondition|alma|asin|atan|atr|avg|barcolor|barssince|bgcolor|cci|ceil|change|cog|correlation|cos|crossover|crossunder|cum|dev|ema|exp|falling|fill|fixnan|floor|heikinashi|highest|highestbars|hline|iff|input|kagi|linebreak|linreg|log|log10|lowest|lowestbars|macd|max|min|mom|nz|percentile_(linear_interpolation|nearest_rank)|percentrank|pivothigh|pivotlow|plot|plotarrow|plotbar|plotcandle|plotchar|plotshape|pointfigure|pow|renko|rising|rma|roc|round|rsi|sar|security|sign|sin|sma|sqrt|stdev|stoch|study|sum|swma|tan|timestamp|tostring|tsi|valuewhen|variance|vwma|wma|strategy\.(cancel|cancel_all|close|close_all|entry|exit|order|risk\.(allow_entry_in|max_cons_loss_days|max_drawdown|max_intraday_filled_orders|max_intraday_loss|max_position_size)))\b`, NameFunction, nil},
{`\b(cross|dayofmonth|dayofweek|hour|minute|month|na|offset|second|strategy|tickerid|time|tr|vwap|weekofyear|year)(\()`, ByGroups(NameFunction, Text), nil}, // functions that can also be variable
{`(accdist|adjustment\.(dividends|none|splits)|aqua|area|areabr|black|blue|bool|circles|close|columns|currency\.(AUD|CAD|CHF|EUR|GBP|HKD|JPY|NOK|NONE|NZD|RUB|SEK|SGD|TRY|USD|ZAR)|dashed|dotted|float|friday|fuchsia|gray|green|high|histogram|hl2|hlc3|integer|interval|isdaily|isdwm|isintraday|ismonthly|isweekly|lime|line|linebr|location\.(abovebar|absolute|belowbar|bottom|top)|low|maroon|monday|n|navy|ohlc4|olive|open|orange|period|purple|red|resolution|saturday|scale\.(left|none|right)|session|session\.(extended|regular)|silver|size\.(auto|huge|large|normal|small|tiny)|solid|source|stepline|string|sunday|symbol|syminfo\.(mintick|pointvalue|prefix|root|session|timezone)|teal|thursday|ticker|timenow|tuesday|volume|wednesday|white|yellow|strategy\.(cash|closedtrades|commission\.(cash_per_contract|cash_per_order|percent)|direction\.(all|long|short)|equity|eventrades|fixed|grossloss|grossprofit|initial_capital|long|losstrades|max_contracts_held_(all|long|short)|max_drawdown|netprofit|oca\.(cancel|none|reduce)|openprofit|opentrades|percent_of_equity|position_avg_price|position_entry_name|position_size|short|wintrades)|shape\.(arrowdown|arrowup|circle|cross|diamond|flag|labeldown|labelup|square|triangledown|triangleup|xcross)|barstate\.is(first|history|last|new|realtime)|barmerge\.(gaps_on|gaps_off|lookahead_on|lookahead_off))\b`, NameVariable, nil},
{`(abs|acos|alertcondition|alma|asin|atan|atr|avg|barcolor|barssince|bgcolor|cci|ceil|change|cog|color\.new|correlation|cos|crossover|crossunder|cum|dev|ema|exp|falling|fill|fixnan|floor|heikinashi|highest|highestbars|hline|iff|kagi|label\.(delete|get_text|get_x|get_y|new|set_color|set_size|set_style|set_text|set_textcolor|set_x|set_xloc|set_xy|set_y|set_yloc)|line\.(new|delete|get_x1|get_x2|get_y1|get_y2|set_color|set_width|set_style|set_extend|set_xy1|set_xy2|set_x1|set_x2|set_y1|set_y2|set_xloc)|linebreak|linreg|log|log10|lowest|lowestbars|macd|max|max_bars_back|min|mom|nz|percentile_(linear_interpolation|nearest_rank)|percentrank|pivothigh|pivotlow|plot|plotarrow|plotbar|plotcandle|plotchar|plotshape|pointfigure|pow|renko|rising|rma|roc|round|rsi|sar|security|sign|sin|sma|sqrt|stdev|stoch|study|sum|swma|tan|timestamp|tostring|tsi|valuewhen|variance|vwma|wma|strategy\.(cancel|cancel_all|close|close_all|entry|exit|order|risk\.(allow_entry_in|max_cons_loss_days|max_drawdown|max_intraday_filled_orders|max_intraday_loss|max_position_size)))\b`, NameFunction, nil},
{`\b(bool|color|cross|dayofmonth|dayofweek|float|hour|input|int|label|line|minute|month|na|offset|second|strategy|string|tickerid|time|tr|vwap|weekofyear|year)(\()`, ByGroups(NameFunction, Text), nil}, // functions that can also be variable
{`(accdist|adjustment\.(dividends|none|splits)|aqua|area|areabr|bar_index|black|blue|bool|circles|close|columns|currency\.(AUD|CAD|CHF|EUR|GBP|HKD|JPY|NOK|NONE|NZD|RUB|SEK|SGD|TRY|USD|ZAR)|color\.(aqua|black|blue|fuchsia|gray|green|lime|maroon|navy|olive|orange|purple|red|silver|teal|white|yellow)|dashed|dotted|dayofweek\.(monday|tuesday|wednesday|thursday|friday|saturday|sunday)|extend\.(both|left|right|none)|float|format\.(inherit|price|volume)|friday|fuchsia|gray|green|high|histogram|hl2|hlc3|hline\.style_(dotted|solid|dashed)|input\.(bool|float|integer|resolution|session|source|string|symbol)|integer|interval|isdaily|isdwm|isintraday|ismonthly|isweekly|label\.style_(arrowdown|arrowup|circle|cross|diamond|flag|labeldown|labelup|none|square|triangledown|triangleup|xcross)|lime|line\.style_(dashed|dotted|solid|arrow_both|arrow_left|arrow_right)|linebr|location\.(abovebar|absolute|belowbar|bottom|top)|low|maroon|monday|n|navy|ohlc4|olive|open|orange|period|plot\.style_(area|areabr|circles|columns|cross|histogram|line|linebr|stepline)|purple|red|resolution|saturday|scale\.(left|none|right)|session|session\.(extended|regular)|silver|size\.(auto|huge|large|normal|small|tiny)|solid|source|stepline|string|sunday|symbol|syminfo\.(mintick|pointvalue|prefix|root|session|ticker|tickerid|timezone)|teal|thursday|ticker|timeframe\.(isdaily|isdwm|isintraday|ismonthly|isweekly|multiplier|period)|timenow|tuesday|volume|wednesday|white|yellow|strategy\.(cash|closedtrades|commission\.(cash_per_contract|cash_per_order|percent)|direction\.(all|long|short)|equity|eventrades|fixed|grossloss|grossprofit|initial_capital|long|losstrades|max_contracts_held_(all|long|short)|max_drawdown|netprofit|oca\.(cancel|none|reduce)|openprofit|opentrades|percent_of_equity|position_avg_price|position_entry_name|position_size|short|wintrades)|shape\.(arrowdown|arrowup|circle|cross|diamond|flag|labeldown|labelup|square|triangledown|triangleup|xcross)|barstate\.is(first|history|last|new|realtime)|barmerge\.(gaps_on|gaps_off|lookahead_on|lookahead_off)|xloc\.bar_(index|time)|yloc\.(abovebar|belowbar|price))\b`, NameVariable, nil},
{`(cross|dayofmonth|dayofweek|hour|minute|month|na|second|tickerid|time|tr|vwap|weekofyear|year)(\b[^\(])`, ByGroups(NameVariable, Text), nil}, // variables that can also be function
{`(int|float|bool|color|string|label|line)(\b[^\(=.])`, ByGroups(KeywordType, Text), nil}, // types that can also be a function
{`(var)\b`, KeywordType, nil},
{`(true|false)\b`, KeywordConstant, nil},
{`(and|or|not|if|else|for|to)\b`, OperatorWord, nil},
{`@?[_a-zA-Z]\w*`, Text, nil},

View File

@ -9,10 +9,11 @@ import (
var TypeScript = internal.Register(MustNewLexer(
&Config{
Name: "TypeScript",
Aliases: []string{"ts", "typescript"},
Aliases: []string{"ts", "tsx", "typescript"},
Filenames: []string{"*.ts", "*.tsx"},
MimeTypes: []string{"text/x-typescript"},
DotAll: true,
EnsureNL: true,
},
Rules{
"commentsandwhitespace": {
@ -31,6 +32,7 @@ var TypeScript = internal.Register(MustNewLexer(
{`\n`, Text, Pop(1)},
},
"root": {
Include("jsx"),
{`^(?=\s|/|<!--)`, Text, Push("slashstartsregex")},
Include("commentsandwhitespace"),
{`\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?`, Operator, Push("slashstartsregex")},
@ -68,5 +70,28 @@ var TypeScript = internal.Register(MustNewLexer(
{`\}`, LiteralStringInterpol, Pop(1)},
Include("root"),
},
"jsx": {
{`(<)(/?)(>)`, ByGroups(Punctuation, Punctuation, Punctuation), nil},
{`(<)([\w\.]+)`, ByGroups(Punctuation, NameTag), Push("tag")},
{`(<)(/)([\w\.]*)(>)`, ByGroups(Punctuation, Punctuation, NameTag, Punctuation), nil},
},
"tag": {
{`\s+`, Text, nil},
{`([\w]+\s*)(=)(\s*)`, ByGroups(NameAttribute, Operator, Text), Push("attr")},
{`[{}]+`, Punctuation, nil},
{`[\w\.]+`, NameAttribute, nil},
{`(/?)(\s*)(>)`, ByGroups(Punctuation, Text, Punctuation), Pop(1)},
},
"attr": {
{`{`, Punctuation, Push("expression")},
{`".*?"`, LiteralString, Pop(1)},
{`'.*?'`, LiteralString, Pop(1)},
Default(Pop(1)),
},
"expression": {
{`{`, Punctuation, Push()},
{`}`, Punctuation, Pop(1)},
Include("root"),
},
},
))

View File

@ -10,9 +10,10 @@ var Typoscript = internal.Register(MustNewLexer(
&Config{
Name: "TypoScript",
Aliases: []string{"typoscript"},
Filenames: []string{"*.ts", "*.txt"},
Filenames: []string{"*.ts"},
MimeTypes: []string{"text/x-typoscript"},
DotAll: true,
Priority: 0.1,
},
Rules{
"root": {

107
vendor/github.com/alecthomas/chroma/lexers/v/vue.go generated vendored Normal file

File diff suppressed because one or more lines are too long

View File

@ -15,12 +15,15 @@ var YAML = internal.Register(MustNewLexer(
Rules{
"root": {
Include("whitespace"),
{`#.*`, Comment, nil},
{`^---`, Text, nil},
{`[\n?]?\s*- `, Text, nil},
{`#.*$`, Comment, nil},
{`!![^\s]+`, CommentPreproc, nil},
{`&[^\s]+`, CommentPreproc, nil},
{`\*[^\s]+`, CommentPreproc, nil},
{`^%include\s+[^\n\r]+`, CommentPreproc, nil},
{`([>|+-]\s+)(\s+)((?:(?:.*?$)(?:[\n\r]*?)?)*)`, ByGroups(StringDoc, StringDoc, StringDoc), nil},
Include("key"),
Include("value"),
{`[?:,\[\]]`, Punctuation, nil},
{`.`, Text, nil},
@ -33,8 +36,15 @@ var YAML = internal.Register(MustNewLexer(
{`\b[+\-]?(0x[\da-f]+|0o[0-7]+|(\d+\.?\d*|\.?\d+)(e[\+\-]?\d+)?|\.inf|\.nan)\b`, Number, nil},
{`\b[\w]+\b`, Text, nil},
},
"key": {
{`"[^"\n].*": `, Keyword, nil},
{`(-)( )([^"\n{]*)(:)( )`, ByGroups(Punctuation, Whitespace, Keyword, Punctuation, Whitespace), nil},
{`([^"\n{]*)(:)( )`, ByGroups(Keyword, Punctuation, Whitespace), nil},
{`([^"\n{]*)(:)(\n)`, ByGroups(Keyword, Punctuation, Whitespace), nil},
},
"whitespace": {
{`\s+`, Whitespace, nil},
{`\n+`, Whitespace, nil},
},
},
))

67
vendor/github.com/alecthomas/chroma/lexers/y/yang.go generated vendored Normal file
View File

@ -0,0 +1,67 @@
package y
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
var YANG = internal.Register(MustNewLexer(
&Config{
Name: "YANG",
Aliases: []string{"yang"},
Filenames: []string{"*.yang"},
MimeTypes: []string{"application/yang"},
},
Rules{
"root": {
{`\s+`, Whitespace, nil},
{`[\{\}\;]+`, Punctuation, nil},
{`(?<![\-\w])(and|or|not|\+|\.)(?![\-\w])`, Operator, nil},
{`"(?:\\"|[^"])*?"`, StringDouble, nil},
{`'(?:\\'|[^'])*?'`, StringSingle, nil},
{`/\*`, CommentMultiline, Push("comments")},
{`//.*?$`, CommentSingle, nil},
//match BNF stmt for `node-identifier` with [ prefix ":"]
{`(?:^|(?<=[\s{};]))([\w.-]+)(:)([\w.-]+)(?=[\s{};])`, ByGroups(KeywordNamespace, Punctuation, Text), nil},
//match BNF stmt `date-arg-str`
{`([0-9]{4}\-[0-9]{2}\-[0-9]{2})(?=[\s\{\}\;])`, LiteralDate, nil},
{`([0-9]+\.[0-9]+)(?=[\s\{\}\;])`, NumberFloat, nil},
{`([0-9]+)(?=[\s\{\}\;])`, NumberInteger, nil},
//TOP_STMTS_KEYWORDS
{Words(``, `(?=[^\w\-\:])`, `module`, `submodule`), Keyword, nil},
//MODULE_HEADER_STMT_KEYWORDS
{Words(``, `(?=[^\w\-\:])`, `belongs-to`, `namespace`, `prefix`, `yang-version`), Keyword, nil},
//META_STMT_KEYWORDS
{Words(``, `(?=[^\w\-\:])`, `contact`, `description`, `organization`, `reference`, `revision`), Keyword, nil},
//LINKAGE_STMTS_KEYWORDS
{Words(``, `(?=[^\w\-\:])`, `import`, `include`, `revision-date`), Keyword, nil},
//BODY_STMT_KEYWORDS
{Words(``, `(?=[^\w\-\:])`, `action`, `argument`, `augment`, `deviation`, `extension`, `feature`, `grouping`, `identity`, `if-feature`, `input`, `notification`, `output`, `rpc`, `typedef`), Keyword, nil},
//DATA_DEF_STMT_KEYWORDS
{Words(``, `(?=[^\w\-\:])`, `anydata`, `anyxml`, `case`, `choice`, `config`, `container`, `deviate`, `leaf`, `leaf-list`, `list`, `must`, `presence`, `refine`, `uses`, `when`), Keyword, nil},
//TYPE_STMT_KEYWORDS
{Words(``, `(?=[^\w\-\:])`, `base`, `bit`, `default`, `enum`, `error-app-tag`, `error-message`, `fraction-digits`, `length`, `max-elements`, `min-elements`, `modifier`, `ordered-by`, `path`, `pattern`, `position`, `range`, `require-instance`, `status`, `type`, `units`, `value`, `yin-element`), Keyword, nil},
//LIST_STMT_KEYWORDS
{Words(``, `(?=[^\w\-\:])`, `key`, `mandatory`, `unique`), Keyword, nil},
//CONSTANTS_KEYWORDS - RFC7950 other keywords
{Words(``, `(?=[^\w\-\:])`, `add`, `current`, `delete`, `deprecated`, `false`, `invert-match`, `max`, `min`, `not-supported`, `obsolete`, `replace`, `true`, `unbounded`, `user`), NameClass, nil},
//RFC7950 Built-In Types
{Words(``, `(?=[^\w\-\:])`, `binary`, `bits`, `boolean`, `decimal64`, `empty`, `enumeration`, `identityref`, `instance-identifier`, `int16`, `int32`, `int64`, `int8`, `leafref`, `string`, `uint16`, `uint32`, `uint64`, `uint8`, `union`), NameClass, nil},
{`[^;{}\s\'\"]+`, Text, nil},
},
"comments": {
{`[^*/]`, CommentMultiline, nil},
{`/\*`, CommentMultiline, Push("comment")},
{`\*/`, CommentMultiline, Pop(1)},
{`[*/]`, CommentMultiline, nil},
},
},
))

View File

@ -34,9 +34,13 @@ func (e EmitterFunc) Emit(groups []string, lexer Lexer) Iterator { return e(grou
func ByGroups(emitters ...Emitter) Emitter {
return EmitterFunc(func(groups []string, lexer Lexer) Iterator {
iterators := make([]Iterator, 0, len(groups)-1)
// NOTE: If this panics, there is a mismatch with groups
for i, group := range groups[1:] {
iterators = append(iterators, emitters[i].Emit([]string{group}, lexer))
if len(emitters) != len(groups)-1 {
iterators = append(iterators, Error.Emit(groups, lexer))
// panic(errors.Errorf("number of groups %q does not match number of emitters %v", groups, emitters))
} else {
for i, group := range groups[1:] {
iterators = append(iterators, emitters[i].Emit([]string{group}, lexer))
}
}
return Concaterator(iterators...)
})
@ -241,6 +245,7 @@ type LexerState struct {
// Custum context for mutators.
MutatorContext map[interface{}]interface{}
iteratorStack []Iterator
options *TokeniseOptions
}
// Set mutator context.
@ -254,7 +259,7 @@ func (l *LexerState) Get(key interface{}) interface{} {
}
// Iterator returns the next Token from the lexer.
func (l *LexerState) Iterator() Token {
func (l *LexerState) Iterator() Token { // nolint: gocognit
for l.Pos < len(l.Text) && len(l.Stack) > 0 {
// Exhaust the iterator stack, if any.
for len(l.iteratorStack) > 0 {
@ -275,9 +280,19 @@ func (l *LexerState) Iterator() Token {
if !ok {
panic("unknown state " + l.State)
}
ruleIndex, rule, groups := matchRules(l.Text[l.Pos:], selectedRule)
ruleIndex, rule, groups := matchRules(l.Text, l.Pos, selectedRule)
// No match.
if groups == nil {
// From Pygments :\
//
// If the RegexLexer encounters a newline that is flagged as an error token, the stack is
// emptied and the lexer continues scanning in the 'root' state. This can help producing
// error-tolerant highlighting for erroneous input, e.g. when a single-line string is not
// closed.
if l.Text[l.Pos] == '\n' && l.State != l.options.State {
l.Stack = []string{l.options.State}
continue
}
l.Pos++
return Token{Error, string(l.Text[l.Pos-1 : l.Pos])}
}
@ -352,7 +367,12 @@ func (r *RegexLexer) maybeCompile() (err error) {
for state, rules := range r.rules {
for i, rule := range rules {
if rule.Regexp == nil {
rule.Regexp, err = regexp2.Compile("^(?"+rule.flags+")(?:"+rule.Pattern+")", 0)
pattern := "(?:" + rule.Pattern + ")"
if rule.flags != "" {
pattern = "(?" + rule.flags + ")" + pattern
}
pattern = `\G` + pattern
rule.Regexp, err = regexp2.Compile(pattern, 0)
if err != nil {
return fmt.Errorf("failed to compile rule %s.%d: %s", state, i, err)
}
@ -390,10 +410,14 @@ func (r *RegexLexer) Tokenise(options *TokeniseOptions, text string) (Iterator,
if options == nil {
options = defaultOptions
}
if options.EnsureLF {
text = ensureLF(text)
}
if !options.Nested && r.config.EnsureNL && !strings.HasSuffix(text, "\n") {
text += "\n"
}
state := &LexerState{
options: options,
Lexer: r,
Text: []rune(text),
Stack: []string{options.State},
@ -403,10 +427,10 @@ func (r *RegexLexer) Tokenise(options *TokeniseOptions, text string) (Iterator,
return state.Iterator, nil
}
func matchRules(text []rune, rules []*CompiledRule) (int, *CompiledRule, []string) {
func matchRules(text []rune, pos int, rules []*CompiledRule) (int, *CompiledRule, []string) {
for i, rule := range rules {
match, err := rule.Regexp.FindRunesMatch(text)
if match != nil && err == nil {
match, err := rule.Regexp.FindRunesMatchStartingAt(text, pos)
if match != nil && err == nil && match.Index == pos {
groups := []string{}
for _, g := range match.Groups() {
groups = append(groups, g.String())
@ -416,3 +440,22 @@ func matchRules(text []rune, rules []*CompiledRule) (int, *CompiledRule, []strin
}
return 0, &CompiledRule{}, nil
}
// replace \r and \r\n with \n
// same as strings.ReplaceAll but more efficient
func ensureLF(text string) string {
buf := make([]byte, len(text))
var j int
for i := 0; i < len(text); i++ {
c := text[i]
if c == '\r' {
if i < len(text)-1 && text[i+1] == '\n' {
continue
}
c = '\n'
}
buf[j] = c
j++
}
return string(buf[:j])
}

View File

@ -66,7 +66,6 @@ func TypeRemappingLexer(lexer Lexer, mapping TypeMapping) Lexer {
km[k] = rt.To
}
}
}
return RemappingLexer(lexer, func(t Token) []Token {
if k, ok := lut[t.Type]; ok {

View File

@ -5,7 +5,7 @@ import (
)
// Pygments default theme.
var Pygments = Register(chroma.MustNewStyle("pygments", map[chroma.TokenType]string{
var Pygments = Register(chroma.MustNewStyle("pygments", chroma.StyleEntries{
chroma.Whitespace: "#bbbbbb",
chroma.Comment: "italic #408080",
chroma.CommentPreproc: "noitalic #BC7A00",

View File

@ -5,7 +5,7 @@ import (
)
// SwapOff theme.
var SwapOff = Register(chroma.MustNewStyle("swapoff", map[chroma.TokenType]string{
var SwapOff = Register(chroma.MustNewStyle("swapoff", chroma.StyleEntries{
chroma.Background: "#lightgray bg:#black",
chroma.Number: "bold #ansiyellow",
chroma.Comment: "#ansiteal",

15
vendor/github.com/alecthomas/chroma/table.py generated vendored Executable file
View File

@ -0,0 +1,15 @@
#!/usr/bin/env python3
from collections import defaultdict
from subprocess import check_output
lines = check_output(["go", "run", "./cmd/chroma/main.go", "--list"]).decode('utf-8').splitlines()
lines = [line.strip() for line in lines if line.startswith(" ") and not line.startswith(" ")]
lines = sorted(lines, key=lambda l: l.lower())
table = defaultdict(list)
for line in lines:
table[line[0].upper()].append(line)
for key, value in table.items():
print("{} | {}".format(key, ', '.join(value)))

View File

@ -4,6 +4,107 @@ package chroma
import "strconv"
func _() {
// An "invalid array index" compiler error signifies that the constant values have changed.
// Re-run the stringer command to generate them again.
var x [1]struct{}
_ = x[Background - -1]
_ = x[LineNumbers - -2]
_ = x[LineNumbersTable - -3]
_ = x[LineHighlight - -4]
_ = x[LineTable - -5]
_ = x[LineTableTD - -6]
_ = x[Error - -7]
_ = x[Other - -8]
_ = x[None - -9]
_ = x[EOFType-0]
_ = x[Keyword-1000]
_ = x[KeywordConstant-1001]
_ = x[KeywordDeclaration-1002]
_ = x[KeywordNamespace-1003]
_ = x[KeywordPseudo-1004]
_ = x[KeywordReserved-1005]
_ = x[KeywordType-1006]
_ = x[Name-2000]
_ = x[NameAttribute-2001]
_ = x[NameBuiltin-2002]
_ = x[NameBuiltinPseudo-2003]
_ = x[NameClass-2004]
_ = x[NameConstant-2005]
_ = x[NameDecorator-2006]
_ = x[NameEntity-2007]
_ = x[NameException-2008]
_ = x[NameFunction-2009]
_ = x[NameFunctionMagic-2010]
_ = x[NameKeyword-2011]
_ = x[NameLabel-2012]
_ = x[NameNamespace-2013]
_ = x[NameOperator-2014]
_ = x[NameOther-2015]
_ = x[NamePseudo-2016]
_ = x[NameProperty-2017]
_ = x[NameTag-2018]
_ = x[NameVariable-2019]
_ = x[NameVariableAnonymous-2020]
_ = x[NameVariableClass-2021]
_ = x[NameVariableGlobal-2022]
_ = x[NameVariableInstance-2023]
_ = x[NameVariableMagic-2024]
_ = x[Literal-3000]
_ = x[LiteralDate-3001]
_ = x[LiteralOther-3002]
_ = x[LiteralString-3100]
_ = x[LiteralStringAffix-3101]
_ = x[LiteralStringAtom-3102]
_ = x[LiteralStringBacktick-3103]
_ = x[LiteralStringBoolean-3104]
_ = x[LiteralStringChar-3105]
_ = x[LiteralStringDelimiter-3106]
_ = x[LiteralStringDoc-3107]
_ = x[LiteralStringDouble-3108]
_ = x[LiteralStringEscape-3109]
_ = x[LiteralStringHeredoc-3110]
_ = x[LiteralStringInterpol-3111]
_ = x[LiteralStringName-3112]
_ = x[LiteralStringOther-3113]
_ = x[LiteralStringRegex-3114]
_ = x[LiteralStringSingle-3115]
_ = x[LiteralStringSymbol-3116]
_ = x[LiteralNumber-3200]
_ = x[LiteralNumberBin-3201]
_ = x[LiteralNumberFloat-3202]
_ = x[LiteralNumberHex-3203]
_ = x[LiteralNumberInteger-3204]
_ = x[LiteralNumberIntegerLong-3205]
_ = x[LiteralNumberOct-3206]
_ = x[Operator-4000]
_ = x[OperatorWord-4001]
_ = x[Punctuation-5000]
_ = x[Comment-6000]
_ = x[CommentHashbang-6001]
_ = x[CommentMultiline-6002]
_ = x[CommentSingle-6003]
_ = x[CommentSpecial-6004]
_ = x[CommentPreproc-6100]
_ = x[CommentPreprocFile-6101]
_ = x[Generic-7000]
_ = x[GenericDeleted-7001]
_ = x[GenericEmph-7002]
_ = x[GenericError-7003]
_ = x[GenericHeading-7004]
_ = x[GenericInserted-7005]
_ = x[GenericOutput-7006]
_ = x[GenericPrompt-7007]
_ = x[GenericStrong-7008]
_ = x[GenericSubheading-7009]
_ = x[GenericTraceback-7010]
_ = x[GenericUnderline-7011]
_ = x[Text-8000]
_ = x[TextWhitespace-8001]
_ = x[TextSymbol-8002]
_ = x[TextPunctuation-8003]
}
const _TokenType_name = "NoneOtherErrorLineTableTDLineTableLineHighlightLineNumbersTableLineNumbersBackgroundEOFTypeKeywordKeywordConstantKeywordDeclarationKeywordNamespaceKeywordPseudoKeywordReservedKeywordTypeNameNameAttributeNameBuiltinNameBuiltinPseudoNameClassNameConstantNameDecoratorNameEntityNameExceptionNameFunctionNameFunctionMagicNameKeywordNameLabelNameNamespaceNameOperatorNameOtherNamePseudoNamePropertyNameTagNameVariableNameVariableAnonymousNameVariableClassNameVariableGlobalNameVariableInstanceNameVariableMagicLiteralLiteralDateLiteralOtherLiteralStringLiteralStringAffixLiteralStringAtomLiteralStringBacktickLiteralStringBooleanLiteralStringCharLiteralStringDelimiterLiteralStringDocLiteralStringDoubleLiteralStringEscapeLiteralStringHeredocLiteralStringInterpolLiteralStringNameLiteralStringOtherLiteralStringRegexLiteralStringSingleLiteralStringSymbolLiteralNumberLiteralNumberBinLiteralNumberFloatLiteralNumberHexLiteralNumberIntegerLiteralNumberIntegerLongLiteralNumberOctOperatorOperatorWordPunctuationCommentCommentHashbangCommentMultilineCommentSingleCommentSpecialCommentPreprocCommentPreprocFileGenericGenericDeletedGenericEmphGenericErrorGenericHeadingGenericInsertedGenericOutputGenericPromptGenericStrongGenericSubheadingGenericTracebackGenericUnderlineTextTextWhitespaceTextSymbolTextPunctuation"
var _TokenType_map = map[TokenType]string{