Update vendored chroma version

This commit is contained in:
Kevin Burke 2018-01-06 14:02:24 -08:00
parent 9022c437d7
commit d441569025
No known key found for this signature in database
GPG Key ID: 24B0EF06511BA263
1923 changed files with 3526 additions and 56 deletions

17
Gopkg.lock generated
View File

@ -2,10 +2,14 @@
[[projects]]
branch = "master"
name = "github.com/alecthomas/chroma"
packages = [".","formatters/html","lexers","styles"]
revision = "03b0c0d6bb7b9f7f8fd58fca6f1c6a2caffb9ca8"
packages = [
".",
"formatters/html",
"lexers",
"styles"
]
revision = "v0.2.1"
[[projects]]
branch = "master"
@ -15,13 +19,16 @@
[[projects]]
name = "github.com/dlclark/regexp2"
packages = [".","syntax"]
packages = [
".",
"syntax"
]
revision = "487489b64fb796de2e55f4e8a4ad1e145f80e957"
version = "v1.1.6"
[solve-meta]
analyzer-name = "dep"
analyzer-version = 1
inputs-digest = "f31184a55e50f445537c2f983f07a94468ddd049e1c6bc5c3dd81fcc0f954e14"
inputs-digest = "d000af53c3e4361e32b9cefa257a3a9e965a5b2a08fab400e6b4825e78da5919"
solver-name = "gps-cdcl"
solver-version = 1

View File

@ -1,3 +1,3 @@
[[constraint]]
name = "github.com/alecthomas/chroma"
branch = "master"
revision = "v0.2.1"

View File

@ -3,6 +3,7 @@
*.dll
*.so
*.dylib
/cmd/chroma/chroma
# Test binary, build with `go test -c`
*.test
@ -14,3 +15,4 @@
.glide/
_models/

View File

@ -6,7 +6,7 @@ Chroma takes source code and other structured text and converts it into syntax
highlighted HTML, ANSI-coloured text, etc.
Chroma is based heavily on [Pygments](http://pygments.org/), and includes
translaters for Pygments lexers and styles.
translators for Pygments lexers and styles.
## Table of Contents
@ -29,7 +29,7 @@ translaters for Pygments lexers and styles.
## Supported languages
ABNF, ANTLR, APL, ActionScript, ActionScript 3, Ada, Angular2, ApacheConf, AppleScript, Awk, BNF, Base Makefile, Bash, Batchfile, BlitzBasic, Brainfuck, C, C#, C++, CFEngine3, CMake, COBOL, CSS, Cap'n Proto, Ceylon, ChaiScript, Cheetah, Clojure, CoffeeScript, Common Lisp, Coq, Crystal, Cython, DTD, Dart, Diff, Django/Jinja, Docker, EBNF, Elixir, Elm, EmacsLisp, Erlang, FSharp, Factor, Fish, Forth, Fortran, GAS, GDScript, GLSL, Genshi, Genshi HTML, Genshi Text, Gnuplot, Go, Groovy, HTML, Handlebars, Haskell, Haxe, Hexdump, Hy, INI, Idris, Io, JSON, Java, JavaScript, Julia, Kotlin, LLVM, Lighttpd configuration file, Lua, Mako, Mason, Mathematica, MiniZinc, Modula-2, MySQL, Myghty, NASM, Newspeak, Nginx configuration file, Nim, OCaml, Octave, PHP, PL/pgSQL, POVRay, PacmanConf, Perl, Pig, PkgConfig, PostScript, PostgreSQL SQL dialect, PowerShell, Prolog, Protocol Buffer, Puppet, Python, Python 3, QBasic, R, Racket, Ragel, Rexx, Ruby, Rust, SPARQL, SQL, Sass, Scala, Scheme, Scilab, Smalltalk, Smarty, Snobol, SquidConf, Swift, TASM, Tcl, Tcsh, Termcap, Terminfo, Terraform, Thrift, Transact-SQL, Turtle, Twig, TypeScript, TypoScript, TypoScriptCssData, TypoScriptHtmlData, VHDL, VimL, XML, Xorg, YAML, cfstatement, markdown, reStructuredText, reg, systemverilog, verilog
ABNF, ANTLR, APL, ActionScript, ActionScript 3, Ada, Angular2, ApacheConf, AppleScript, Awk, BNF, Base Makefile, Bash, Batchfile, BlitzBasic, Brainfuck, C, C#, C++, CFEngine3, CMake, COBOL, CSS, Cap'n Proto, Ceylon, ChaiScript, Cheetah, Clojure, CoffeeScript, Common Lisp, Coq, Crystal, Cython, DTD, Dart, Diff, Django/Jinja, Docker, EBNF, Elixir, Elm, EmacsLisp, Erlang, FSharp, Factor, Fish, Forth, Fortran, GAS, GDScript, GLSL, Genshi, Genshi HTML, Genshi Text, Gnuplot, Go, Groovy, HTML, Handlebars, Haskell, Haxe, Hexdump, Hy, INI, Idris, Io, JSON, Java, JavaScript, Julia, Kotlin, LLVM, Lighttpd configuration file, Lua, Mako, Mason, Mathematica, MiniZinc, Modula-2, MySQL, Myghty, NASM, Newspeak, Nginx configuration file, Nim, OCaml, Octave, PHP, PL/pgSQL, POVRay, PacmanConf, Perl, Pig, PkgConfig, PostScript, PostgreSQL SQL dialect, PowerShell, Prolog, Protocol Buffer, Puppet, Python, Python 3, QBasic, R, Racket, Ragel, Rexx, Ruby, Rust, SPARQL, SQL, Sass, Scala, Scheme, Scilab, Smalltalk, Smarty, Snobol, SquidConf, SVG, Swift, TASM, Tcl, Tcsh, Termcap, Terminfo, Terraform, Thrift, Transact-SQL, Turtle, Twig, TypeScript, TypoScript, TypoScriptCssData, TypoScriptHtmlData, VHDL, VimL, XML, Xorg, YAML, cfstatement, markdown, reStructuredText, reg, systemverilog, verilog
_I will attempt to keep this section up to date, but an authoritative list can be
displayed with `chroma --list`._
@ -86,7 +86,7 @@ written in. There are three primary ways to do that:
lexer := lexers.Analyse("package main\n\nfunc main()\n{\n}\n")
```
In all cases, `nil` will be returned if the langauge can not be identified.
In all cases, `nil` will be returned if the language can not be identified.
```go
if lexer == nil {

View File

@ -0,0 +1,136 @@
package main
import (
"io/ioutil"
"os"
"strings"
"text/template"
"github.com/aymerick/douceur/css"
"github.com/aymerick/douceur/parser"
"gopkg.in/alecthomas/kingpin.v3-unstable"
"github.com/alecthomas/chroma"
)
const (
outputTemplate = `package styles
import (
"github.com/alecthomas/chroma"
)
// {{.Name}} style.
var {{.Name}} = Register(chroma.MustNewStyle("{{.Name|Lower}}", chroma.StyleEntries{
{{- range .Rules}}
{{- if .Prelude|TokenType}}
chroma.{{.Prelude|TokenType}}: "{{.Declarations|TranslateDecls}}",
{{- end}}
{{- end}}
}))
`
)
var (
typeByClass = map[string]chroma.TokenType{
".hll": chroma.Background,
}
cssNamedColours = map[string]string{
"black": "#000000", "silver": "#c0c0c0", "gray": "#808080", "white": "#ffffff",
"maroon": "#800000", "red": "#ff0000", "purple": "#800080", "fuchsia": "#ff00ff",
"green": "#008000", "lime": "#00ff00", "olive": "#808000", "yellow": "#ffff00",
"navy": "#000080", "blue": "#0000ff", "teal": "#008080", "aqua": "#00ffff",
"orange": "#ffa500", "aliceblue": "#f0f8ff", "antiquewhite": "#faebd7", "aquamarine": "#7fffd4",
"azure": "#f0ffff", "beige": "#f5f5dc", "bisque": "#ffe4c4", "blanchedalmond": "#ffebcd",
"blueviolet": "#8a2be2", "brown": "#a52a2a", "burlywood": "#deb887", "cadetblue": "#5f9ea0",
"chartreuse": "#7fff00", "chocolate": "#d2691e", "coral": "#ff7f50", "cornflowerblue": "#6495ed",
"cornsilk": "#fff8dc", "crimson": "#dc143c", "cyan": "#00ffff", "darkblue": "#00008b",
"darkcyan": "#008b8b", "darkgoldenrod": "#b8860b", "darkgray": "#a9a9a9", "darkgreen": "#006400",
"darkgrey": "#a9a9a9", "darkkhaki": "#bdb76b", "darkmagenta": "#8b008b", "darkolivegreen": "#556b2f",
"darkorange": "#ff8c00", "darkorchid": "#9932cc", "darkred": "#8b0000", "darksalmon": "#e9967a",
"darkseagreen": "#8fbc8f", "darkslateblue": "#483d8b", "darkslategray": "#2f4f4f", "darkslategrey": "#2f4f4f",
"darkturquoise": "#00ced1", "darkviolet": "#9400d3", "deeppink": "#ff1493", "deepskyblue": "#00bfff",
"dimgray": "#696969", "dimgrey": "#696969", "dodgerblue": "#1e90ff", "firebrick": "#b22222",
"floralwhite": "#fffaf0", "forestgreen": "#228b22", "gainsboro": "#dcdcdc", "ghostwhite": "#f8f8ff",
"gold": "#ffd700", "goldenrod": "#daa520", "greenyellow": "#adff2f", "grey": "#808080",
"honeydew": "#f0fff0", "hotpink": "#ff69b4", "indianred": "#cd5c5c", "indigo": "#4b0082",
"ivory": "#fffff0", "khaki": "#f0e68c", "lavender": "#e6e6fa", "lavenderblush": "#fff0f5",
"lawngreen": "#7cfc00", "lemonchiffon": "#fffacd", "lightblue": "#add8e6", "lightcoral": "#f08080",
"lightcyan": "#e0ffff", "lightgoldenrodyellow": "#fafad2", "lightgray": "#d3d3d3", "lightgreen": "#90ee90",
"lightgrey": "#d3d3d3", "lightpink": "#ffb6c1", "lightsalmon": "#ffa07a", "lightseagreen": "#20b2aa",
"lightskyblue": "#87cefa", "lightslategray": "#778899", "lightslategrey": "#778899", "lightsteelblue": "#b0c4de",
"lightyellow": "#ffffe0", "limegreen": "#32cd32", "linen": "#faf0e6", "magenta": "#ff00ff",
"mediumaquamarine": "#66cdaa", "mediumblue": "#0000cd", "mediumorchid": "#ba55d3", "mediumpurple": "#9370db",
"mediumseagreen": "#3cb371", "mediumslateblue": "#7b68ee", "mediumspringgreen": "#00fa9a", "mediumturquoise": "#48d1cc",
"mediumvioletred": "#c71585", "midnightblue": "#191970", "mintcream": "#f5fffa", "mistyrose": "#ffe4e1",
"moccasin": "#ffe4b5", "navajowhite": "#ffdead", "oldlace": "#fdf5e6", "olivedrab": "#6b8e23",
"orangered": "#ff4500", "orchid": "#da70d6", "palegoldenrod": "#eee8aa", "palegreen": "#98fb98",
"paleturquoise": "#afeeee", "palevioletred": "#db7093", "papayawhip": "#ffefd5", "peachpuff": "#ffdab9",
"peru": "#cd853f", "pink": "#ffc0cb", "plum": "#dda0dd", "powderblue": "#b0e0e6",
"rosybrown": "#bc8f8f", "royalblue": "#4169e1", "saddlebrown": "#8b4513", "salmon": "#fa8072",
"sandybrown": "#f4a460", "seagreen": "#2e8b57", "seashell": "#fff5ee", "sienna": "#a0522d",
"skyblue": "#87ceeb", "slateblue": "#6a5acd", "slategray": "#708090", "slategrey": "#708090",
"snow": "#fffafa", "springgreen": "#00ff7f", "steelblue": "#4682b4", "tan": "#d2b48c",
"thistle": "#d8bfd8", "tomato": "#ff6347", "turquoise": "#40e0d0", "violet": "#ee82ee",
"wheat": "#f5deb3", "whitesmoke": "#f5f5f5", "yellowgreen": "#9acd32", "rebeccapurple": "#663399",
}
nameArg = kingpin.Arg("name", "Name of output style.").Required().String()
fileArg = kingpin.Arg("stylesheets", ".css file to import").Required().ExistingFile()
)
func init() {
for tt, str := range chroma.StandardTypes {
typeByClass["."+str] = tt
}
}
func translateDecls(decls []*css.Declaration) string {
out := []string{}
for _, decl := range decls {
switch decl.Property {
case "color":
clr := decl.Value
if c, ok := cssNamedColours[clr]; ok {
clr = c
}
out = append(out, clr)
case "background-color":
out = append(out, "bg:"+decl.Value)
case "font-style":
if strings.Contains(decl.Value, "italic") {
out = append(out, "italic")
}
case "font-weight":
if strings.Contains(decl.Value, "bold") {
out = append(out, "bold")
}
case "text-decoration":
if strings.Contains(decl.Value, "underline") {
out = append(out, "underline")
}
}
}
return strings.Join(out, " ")
}
func main() {
kingpin.Parse()
source, err := ioutil.ReadFile(*fileArg)
kingpin.FatalIfError(err, "")
css, err := parser.Parse(string(source))
kingpin.FatalIfError(err, "")
context := map[string]interface{}{
"Name": *nameArg,
"Rules": css.Rules,
}
tmpl := template.Must(template.New("style").Funcs(template.FuncMap{
"Lower": strings.ToLower,
"TranslateDecls": translateDecls,
"TokenType": func(s string) chroma.TokenType { return typeByClass[s] },
}).Parse(outputTemplate))
err = tmpl.Execute(os.Stdout, context)
kingpin.FatalIfError(err, "")
}

View File

@ -0,0 +1,38 @@
package main
import (
"fmt"
"io/ioutil"
"os"
"github.com/alecthomas/chroma/formatters"
"github.com/alecthomas/chroma/lexers"
"github.com/alecthomas/chroma/styles"
"gopkg.in/alecthomas/kingpin.v3-unstable"
)
var (
filesArgs = kingpin.Arg("file", "Files to use to exercise lexers.").Required().ExistingFiles()
)
func main() {
kingpin.CommandLine.Help = "Exercise linters against a list of files."
kingpin.Parse()
for _, file := range *filesArgs {
lexer := lexers.Match(file)
if lexer == nil {
fmt.Printf("warning: could not find lexer for %q\n", file)
continue
}
fmt.Printf("%s: ", file)
os.Stdout.Sync()
text, err := ioutil.ReadFile(file)
kingpin.FatalIfError(err, "")
it, err := lexer.Tokenise(nil, string(text))
kingpin.FatalIfError(err, "%s failed to tokenise %q", lexer.Config().Name, file)
err = formatters.NoOp.Format(ioutil.Discard, styles.SwapOff, it)
kingpin.FatalIfError(err, "%s failed to format %q", lexer.Config().Name, file)
fmt.Printf("ok\n")
}
}

View File

@ -0,0 +1,196 @@
import functools
import importlib
import json
import os
import re
import sys
import types
import pystache
from pygments import lexer as pygments_lexer
from pygments.token import _TokenType
TEMPLATE = r'''
package lexers
import (
. "github.com/alecthomas/chroma" // nolint
)
// {{upper_name}} lexer.
var {{upper_name}} = Register(MustNewLexer(
&Config{
Name: "{{name}}",
Aliases: []string{ {{#aliases}}"{{.}}", {{/aliases}} },
Filenames: []string{ {{#filenames}}"{{.}}", {{/filenames}} },
MimeTypes: []string{ {{#mimetypes}}"{{.}}", {{/mimetypes}} },
{{#re_not_multiline}}
NotMultiline: true,
{{/re_not_multiline}}
{{#re_dotall}}
DotAll: true,
{{/re_dotall}}
{{#re_ignorecase}}
CaseInsensitive: true,
{{/re_ignorecase}}
},
Rules{
{{#tokens}}
"{{state}}": {
{{#rules}}
{{{.}}},
{{/rules}}
},
{{/tokens}}
},
))
'''
def go_regex(s):
return go_string(s)
def go_string(s):
if '`' not in s:
return '`' + s + '`'
return json.dumps(s)
def to_camel_case(snake_str):
components = snake_str.split('_')
return ''.join(x.title() for x in components)
def warning(message):
print('warning: ' + message, file=sys.stderr)
def resolve_emitter(emitter):
if isinstance(emitter, types.FunctionType):
if repr(emitter).startswith('<function bygroups.'):
args = emitter.__closure__[0].cell_contents
emitter = 'ByGroups(%s)' % ', '.join(resolve_emitter(e) for e in args)
elif repr(emitter).startswith('<function using.'):
args = emitter.__closure__[0].cell_contents
if isinstance(args, dict):
state = 'root'
if 'stack' in args:
state = args['stack'][1]
args.pop('stack')
assert args == {}, args
emitter = 'UsingSelf("%s")' % state
elif issubclass(args, pygments_lexer.Lexer):
name = args.__name__
if name.endswith('Lexer'):
name = name[:-5]
emitter = 'Using(%s, nil)' % name
else:
raise ValueError('only support "using" with lexer classes, not %r' % args)
else:
warning('unsupported emitter function %r' % emitter)
emitter = '?? %r ??' % emitter
elif isinstance(emitter, _TokenType):
emitter = str(emitter).replace('.', '')[5:]
elif emitter is None:
# This generally only occurs when a lookahead/behind assertion is used, so we just allow it
# through.
return 'None'
else:
raise ValueError('unsupported emitter type %r' % emitter)
assert isinstance(emitter, str)
return emitter
def process_state_action(action):
if isinstance(action, tuple):
return functools.reduce(lambda a, b: a + b, (process_state_action(a) for a in action))
if action.startswith('#'):
action = action[1:]
if action== 'pop':
action = 'Pop(1)'
elif action.startswith('pop:'):
action = 'Pop(%s)' % action[4:]
elif action == 'push':
action = 'Push()'
elif action.startswith('push:'):
action = 'Push("%s")' % action[5:]
else:
raise ValueError('unsupported action %r' % (action,))
else:
action = 'Push("%s")' % action
return (action,)
def translate_rules(rules):
out = []
for rule in rules:
if isinstance(rule, tuple):
regex = rule[0]
if isinstance(regex, str):
regex = go_regex(regex)
elif isinstance(regex, pygments_lexer.words):
regex = 'Words(%s, %s, %s)' % (go_string(regex.prefix),
go_string(regex.suffix),
', '.join(go_string(w) for w in regex.words))
else:
raise ValueError('expected regex string but got %r' % regex)
emitter = resolve_emitter(rule[1])
if len(rule) == 2:
modifier = 'nil'
elif type(rule[2]) is str:
modifier = process_state_action(rule[2])[0]
elif isinstance(rule[2], pygments_lexer.combined):
modifier = 'Combined("%s")' % '", "'.join(rule[2])
elif type(rule[2]) is tuple:
modifier = 'Push("%s")' % '", "'.join(rule[2])
else:
raise ValueError('unsupported modifier %r' % (rule[2],))
out.append('{{ {}, {}, {} }}'.format(regex, emitter, modifier))
elif isinstance(rule, pygments_lexer.include):
out.append('Include("{}")'.format(rule))
elif isinstance(rule, pygments_lexer.default):
out.append('Default({})'.format(', '.join(process_state_action(rule.state))))
else:
raise ValueError('unsupported rule %r' % (rule,))
return out
class TemplateView(object):
def __init__(self, **kwargs):
for key, value in kwargs.items():
setattr(self, key, value)
def re_not_multiline(self):
return not (self.regex_flags & re.MULTILINE)
def re_dotall(self):
return self.regex_flags & re.DOTALL
def re_ignorecase(self):
return self.regex_flags & re.IGNORECASE
def main():
package_name, symbol_name = sys.argv[1].rsplit(sep=".", maxsplit=1)
package = importlib.import_module(package_name)
lexer_cls = getattr(package, symbol_name)
assert issubclass(lexer_cls, pygments_lexer.RegexLexer), 'can only translate from RegexLexer'
print(pystache.render(TEMPLATE, TemplateView(
name=lexer_cls.name,
regex_flags=lexer_cls.flags,
upper_name=to_camel_case(lexer_cls.name),
aliases=lexer_cls.aliases,
filenames=lexer_cls.filenames,
mimetypes=lexer_cls.mimetypes,
tokens=[{'state': state, 'rules': translate_rules(rules)} for (state, rules) in lexer_cls.get_tokendefs().items()],
)))
if __name__ == '__main__':
main()

62
vendor/github.com/alecthomas/chroma/_tools/style.py generated vendored Normal file
View File

@ -0,0 +1,62 @@
import importlib
import sys
import pystache
from pygments.style import Style
from pygments.token import Token
TEMPLATE = r'''
package styles
import (
"github.com/alecthomas/chroma"
)
// {{upper_name}} style.
var {{upper_name}} = Register(chroma.MustNewStyle("{{name}}", chroma.StyleEntries{
{{#styles}}
chroma.{{type}}: "{{style}}",
{{/styles}}
}))
'''
def to_camel_case(snake_str):
components = snake_str.split('_')
return ''.join(x.title() for x in components)
def translate_token_type(t):
if t == Token:
t = Token.Background
return "".join(map(str, t))
def main():
name = sys.argv[1]
package_name, symbol_name = sys.argv[2].rsplit(sep=".", maxsplit=1)
package = importlib.import_module(package_name)
style_cls = getattr(package, symbol_name)
assert issubclass(style_cls, Style), 'can only translate from Style subclass'
styles = dict(style_cls.styles)
bg = "bg:" + style_cls.background_color
if Token in styles:
styles[Token] += " " + bg
else:
styles[Token] = bg
context = {
'upper_name': style_cls.__name__[:-5],
'name': name,
'styles': [{'type': translate_token_type(t), 'style': s}
for t, s in styles.items() if s],
}
print(pystache.render(TEMPLATE, context))
if __name__ == '__main__':
main()

269
vendor/github.com/alecthomas/chroma/cmd/chroma/main.go generated vendored Normal file
View File

@ -0,0 +1,269 @@
package main
import (
"bufio"
"fmt"
"io"
"io/ioutil"
"os"
"os/signal"
"runtime"
"runtime/pprof"
"sort"
"strconv"
"strings"
"github.com/mattn/go-colorable"
"github.com/mattn/go-isatty"
"gopkg.in/alecthomas/kingpin.v3-unstable"
"github.com/alecthomas/chroma"
"github.com/alecthomas/chroma/formatters"
"github.com/alecthomas/chroma/formatters/html"
"github.com/alecthomas/chroma/lexers"
"github.com/alecthomas/chroma/styles"
)
var (
// Populated by goreleaser.
version = "?"
commit = "?"
date = "?"
profileFlag = kingpin.Flag("profile", "Enable profiling to file.").Hidden().String()
listFlag = kingpin.Flag("list", "List lexers, styles and formatters.").Bool()
unbufferedFlag = kingpin.Flag("unbuffered", "Do not buffer output.").Bool()
traceFlag = kingpin.Flag("trace", "Trace lexer states as they are traversed.").Bool()
checkFlag = kingpin.Flag("check", "Do not format, check for tokenization errors instead.").Bool()
filenameFlag = kingpin.Flag("filename", "Filename to use for selecting a lexer when reading from stdin.").String()
lexerFlag = kingpin.Flag("lexer", "Lexer to use when formatting.").PlaceHolder("autodetect").Short('l').Enum(lexers.Names(true)...)
styleFlag = kingpin.Flag("style", "Style to use for formatting.").Short('s').Default("swapoff").Enum(styles.Names()...)
formatterFlag = kingpin.Flag("formatter", "Formatter to use.").Default("terminal").Short('f').Enum(formatters.Names()...)
jsonFlag = kingpin.Flag("json", "Output JSON representation of tokens.").Bool()
htmlFlag = kingpin.Flag("html", "Enable HTML mode (equivalent to '--formatter html').").Bool()
htmlPrefixFlag = kingpin.Flag("html-prefix", "HTML CSS class prefix.").PlaceHolder("PREFIX").String()
htmlStylesFlag = kingpin.Flag("html-styles", "Output HTML CSS styles.").Bool()
htmlOnlyFlag = kingpin.Flag("html-only", "Output HTML fragment.").Bool()
htmlInlineStyleFlag = kingpin.Flag("html-inline-styles", "Output HTML with inline styles (no classes).").Bool()
htmlTabWidthFlag = kingpin.Flag("html-tab-width", "Set the HTML tab width.").Default("8").Int()
htmlLinesFlag = kingpin.Flag("html-lines", "Include line numbers in output.").Bool()
htmlLinesTableFlag = kingpin.Flag("html-lines-table", "Split line numbers and code in a HTML table").Bool()
htmlLinesStyleFlag = kingpin.Flag("html-lines-style", "Style for line numbers.").String()
htmlHighlightFlag = kingpin.Flag("html-highlight", "Highlight these lines.").PlaceHolder("N[:M][,...]").String()
htmlHighlightStyleFlag = kingpin.Flag("html-highlight-style", "Style used for highlighting lines.").String()
htmlBaseLineFlag = kingpin.Flag("html-base-line", "Base line number.").Default("1").Int()
filesArgs = kingpin.Arg("files", "Files to highlight.").ExistingFiles()
)
type flushableWriter interface {
io.Writer
Flush() error
}
type nopFlushableWriter struct{ io.Writer }
func (n *nopFlushableWriter) Flush() error { return nil }
func main() {
kingpin.CommandLine.Version(fmt.Sprintf("%s-%s-%s", version, commit, date))
kingpin.CommandLine.Help = `
Chroma is a general purpose syntax highlighting library and corresponding
command, for Go.
`
kingpin.Parse()
if *listFlag {
listAll()
return
}
if *profileFlag != "" {
f, err := os.Create(*profileFlag)
kingpin.FatalIfError(err, "")
pprof.StartCPUProfile(f)
signals := make(chan os.Signal, 1)
signal.Notify(signals, os.Interrupt)
go func() {
<-signals
pprof.StopCPUProfile()
os.Exit(128 + 3)
}()
defer pprof.StopCPUProfile()
}
var out io.Writer = os.Stdout
if runtime.GOOS == "windows" && isatty.IsTerminal(os.Stdout.Fd()) {
out = colorable.NewColorableStdout()
}
var w flushableWriter
if *unbufferedFlag {
w = &nopFlushableWriter{out}
} else {
w = bufio.NewWriterSize(out, 16384)
}
defer w.Flush()
if *jsonFlag {
*formatterFlag = "json"
}
if *htmlFlag {
*formatterFlag = "html"
}
// Retrieve user-specified style, clone it, and add some overrides.
builder := styles.Get(*styleFlag).Builder()
if *htmlHighlightStyleFlag != "" {
builder.Add(chroma.LineHighlight, *htmlHighlightStyleFlag)
}
if *htmlLinesStyleFlag != "" {
builder.Add(chroma.LineNumbers, *htmlLinesStyleFlag)
}
style, err := builder.Build()
kingpin.FatalIfError(err, "")
if *formatterFlag == "html" {
options := []html.Option{
html.TabWidth(*htmlTabWidthFlag),
html.BaseLineNumber(*htmlBaseLineFlag),
}
if *htmlPrefixFlag != "" {
options = append(options, html.ClassPrefix(*htmlPrefixFlag))
}
// Dump styles.
if *htmlStylesFlag {
formatter := html.New(html.WithClasses())
formatter.WriteCSS(w, style)
return
}
if !*htmlInlineStyleFlag {
options = append(options, html.WithClasses())
}
if !*htmlOnlyFlag {
options = append(options, html.Standalone())
}
if *htmlLinesFlag {
options = append(options, html.WithLineNumbers())
}
if *htmlLinesTableFlag {
options = append(options, html.LineNumbersInTable())
}
if len(*htmlHighlightFlag) > 0 {
ranges := [][2]int{}
for _, span := range strings.Split(*htmlHighlightFlag, ",") {
parts := strings.Split(span, ":")
if len(parts) > 2 {
kingpin.Fatalf("range should be N[:M], not %q", span)
}
start, err := strconv.ParseInt(parts[0], 10, 64)
kingpin.FatalIfError(err, "min value of range should be integer not %q", parts[0])
end := start
if len(parts) == 2 {
end, err = strconv.ParseInt(parts[1], 10, 64)
kingpin.FatalIfError(err, "max value of range should be integer not %q", parts[1])
}
ranges = append(ranges, [2]int{int(start), int(end)})
}
options = append(options, html.HighlightLines(ranges))
}
formatters.Register("html", html.New(options...))
}
if len(*filesArgs) == 0 {
contents, err := ioutil.ReadAll(os.Stdin)
kingpin.FatalIfError(err, "")
format(w, style, lex(*filenameFlag, string(contents)))
} else {
for _, filename := range *filesArgs {
contents, err := ioutil.ReadFile(filename)
kingpin.FatalIfError(err, "")
if *checkFlag {
check(filename, lex(filename, string(contents)))
} else {
format(w, style, lex(filename, string(contents)))
}
}
}
}
func listAll() {
fmt.Println("lexers:")
sort.Sort(lexers.Registry.Lexers)
for _, l := range lexers.Registry.Lexers {
config := l.Config()
fmt.Printf(" %s\n", config.Name)
filenames := []string{}
filenames = append(filenames, config.Filenames...)
filenames = append(filenames, config.AliasFilenames...)
if len(config.Aliases) > 0 {
fmt.Printf(" aliases: %s\n", strings.Join(config.Aliases, " "))
}
if len(filenames) > 0 {
fmt.Printf(" filenames: %s\n", strings.Join(filenames, " "))
}
if len(config.MimeTypes) > 0 {
fmt.Printf(" mimetypes: %s\n", strings.Join(config.MimeTypes, " "))
}
}
fmt.Println()
fmt.Printf("styles:")
for _, name := range styles.Names() {
fmt.Printf(" %s", name)
}
fmt.Println()
fmt.Printf("formatters:")
for _, name := range formatters.Names() {
fmt.Printf(" %s", name)
}
fmt.Println()
}
func lex(path string, contents string) chroma.Iterator {
lexer := selexer(path, contents)
if lexer == nil {
lexer = lexers.Fallback
}
if rel, ok := lexer.(*chroma.RegexLexer); ok {
rel.Trace(*traceFlag)
}
lexer = chroma.Coalesce(lexer)
it, err := lexer.Tokenise(nil, string(contents))
kingpin.FatalIfError(err, "")
return it
}
func selexer(path, contents string) (lexer chroma.Lexer) {
if *lexerFlag != "" {
return lexers.Get(*lexerFlag)
}
if path != "" {
lexer := lexers.Match(path)
if lexer != nil {
return lexer
}
}
return lexers.Analyse(contents)
}
func format(w io.Writer, style *chroma.Style, it chroma.Iterator) {
formatter := formatters.Get(*formatterFlag)
err := formatter.Format(w, style, it)
kingpin.FatalIfError(err, "")
}
func check(filename string, it chroma.Iterator) {
line, col := 1, 0
for token := it(); token != nil; token = it() {
if token.Type == chroma.Error {
fmt.Printf("%s:%d:%d %q\n", filename, line, col, token.String())
}
for _, c := range token.String() {
col++
if c == '\n' {
line, col = line+1, 0
}
}
}
}

View File

@ -1,7 +1,7 @@
// Package chroma takes source code and other structured text and converts it into syntax highlighted HTML, ANSI-
// coloured text, etc.
//
// Chroma is based heavily on Pygments, and includes translaters for Pygments lexers and styles.
// Chroma is based heavily on Pygments, and includes translators for Pygments lexers and styles.
//
// For more information, go here: https://github.com/alecthomas/chroma
package chroma

View File

@ -317,12 +317,13 @@ func (f *Formatter) styleToCSS(style *chroma.Style) map[chroma.TokenType]string
classes[t] = StyleEntryToCSS(entry)
}
classes[chroma.Background] += f.tabWidthStyle()
lineNumbersStyle := "; margin-right: 0.4em; padding: 0 0.4em 0 0.4em;"
classes[chroma.LineNumbers] += lineNumbersStyle
classes[chroma.LineNumbersTable] += lineNumbersStyle + " display: block;"
classes[chroma.LineHighlight] += "; display: block; width: 100%"
classes[chroma.LineTable] += "; border-spacing: 0; padding: 0; margin: 0; border: 0; width: 100%; overflow: auto; display: block;"
classes[chroma.LineTableTD] += "; vertical-align: top; padding: 0; margin: 0; border: 0;"
lineNumbersStyle := "margin-right: 0.4em; padding: 0 0.4em 0 0.4em;"
// all rules begin with default rules followed by user provided rules
classes[chroma.LineNumbers] = lineNumbersStyle + classes[chroma.LineNumbers]
classes[chroma.LineNumbersTable] = lineNumbersStyle + " display: block;" + classes[chroma.LineNumbersTable]
classes[chroma.LineHighlight] = "display: block; width: 100%;" + classes[chroma.LineHighlight]
classes[chroma.LineTable] = "border-spacing: 0; padding: 0; margin: 0; border: 0; width: 100%; overflow: auto; display: block;" + classes[chroma.LineTable]
classes[chroma.LineTableTD] = "vertical-align: top; padding: 0; margin: 0; border: 0;" + classes[chroma.LineTableTD]
return classes
}

View File

@ -3,10 +3,10 @@ package html
import (
"errors"
"io/ioutil"
"strings"
"testing"
"github.com/alecthomas/assert"
"github.com/alecthomas/chroma"
"github.com/alecthomas/chroma/lexers"
"github.com/alecthomas/chroma/styles"
@ -58,3 +58,20 @@ func TestIteratorPanicRecovery(t *testing.T) {
err := New().Format(ioutil.Discard, styles.Fallback, it)
assert.Error(t, err)
}
func TestFormatter_styleToCSS(t *testing.T) {
builder := styles.Get("github").Builder()
builder.Add(chroma.LineHighlight, "bg:#ffffcc")
builder.Add(chroma.LineNumbers, "bold")
style, err := builder.Build()
if err != nil {
t.Error(err)
}
formatter := New(WithClasses())
css := formatter.styleToCSS(style)
for _, s := range css {
if strings.HasPrefix(strings.TrimSpace(s), ";") {
t.Errorf("rule starts with semicolon - expected valid css rule without semicolon: %v", s)
}
}
}

31
vendor/github.com/alecthomas/chroma/formatters/json.go generated vendored Normal file
View File

@ -0,0 +1,31 @@
package formatters
import (
"encoding/json"
"fmt"
"io"
"github.com/alecthomas/chroma"
)
// JSON formatter outputs the raw token structures as JSON.
var JSON = Register("json", chroma.FormatterFunc(func(w io.Writer, s *chroma.Style, it chroma.Iterator) error {
fmt.Fprintln(w, "[")
i := 0
for t := it(); t != nil; t = it() {
if i > 0 {
fmt.Fprintln(w, ",")
}
i++
bytes, err := json.Marshal(t)
if err != nil {
return err
}
if _, err := fmt.Fprint(w, " "+string(bytes)); err != nil {
return err
}
}
fmt.Fprintln(w)
fmt.Fprintln(w, "]")
return nil
}))

View File

@ -50,12 +50,17 @@ type Config struct {
// If given and greater than 0, expand tabs in the input.
// TabSize int
// Priority of lexer.
//
// If this is 0 it will be treated as a default of 1.
Priority float32
}
// Token output to formatter.
type Token struct {
Type TokenType
Value string
Type TokenType `json:"type"`
Value string `json:"value"`
}
func (t *Token) String() string { return t.Value }
@ -80,32 +85,30 @@ type Lexer interface {
Tokenise(options *TokeniseOptions, text string) (Iterator, error)
}
// Lexers is a slice of lexers sortable by name.
type Lexers []Lexer
// Pick attempts to pick the best Lexer for a piece of source code. May return nil.
func (l Lexers) Pick(text string) Lexer {
if len(l) == 0 {
return nil
}
var picked Lexer
highest := float32(-1)
for _, lexer := range l {
if analyser, ok := lexer.(Analyser); ok {
score := analyser.AnalyseText(text)
if score > highest {
highest = score
picked = lexer
continue
}
}
}
return picked
}
func (l Lexers) Len() int { return len(l) }
func (l Lexers) Swap(i, j int) { l[i], l[j] = l[j], l[i] }
func (l Lexers) Less(i, j int) bool { return l[i].Config().Name < l[j].Config().Name }
// PrioritisedLexers is a slice of lexers sortable by priority.
type PrioritisedLexers []Lexer
func (l PrioritisedLexers) Len() int { return len(l) }
func (l PrioritisedLexers) Swap(i, j int) { l[i], l[j] = l[j], l[i] }
func (l PrioritisedLexers) Less(i, j int) bool {
ip := l[i].Config().Priority
if ip == 0 {
ip = 1
}
jp := l[j].Config().Priority
if jp == 0 {
jp = 1
}
return ip > jp
}
// Analyser determines how appropriate this lexer is for the given text.
type Analyser interface {
AnalyseText(text string) float32

View File

@ -3,6 +3,7 @@ package lexers
import (
"path/filepath"
"sort"
"strings"
"github.com/danwakefield/fnmatch"
@ -33,47 +34,84 @@ func Names(withAliases bool) []string {
return out
}
// Get a Lexer by name.
// Get a Lexer by name, alias or file extension.
func Get(name string) chroma.Lexer {
candidates := chroma.PrioritisedLexers{}
if lexer := Registry.byName[name]; lexer != nil {
return lexer
candidates = append(candidates, lexer)
}
return Registry.byAlias[name]
if lexer := Registry.byAlias[name]; lexer != nil {
candidates = append(candidates, lexer)
}
if lexer := Registry.byName[strings.ToLower(name)]; lexer != nil {
candidates = append(candidates, lexer)
}
if lexer := Registry.byAlias[strings.ToLower(name)]; lexer != nil {
candidates = append(candidates, lexer)
}
// Try file extension.
if lexer := Match("filename." + name); lexer != nil {
candidates = append(candidates, lexer)
}
// Try exact filename.
if lexer := Match(name); lexer != nil {
candidates = append(candidates, lexer)
}
if len(candidates) == 0 {
return nil
}
sort.Sort(candidates)
return candidates[0]
}
// MatchMimeType attempts to find a lexer for the given MIME type.
func MatchMimeType(mimeType string) chroma.Lexer {
matched := chroma.PrioritisedLexers{}
for _, l := range Registry.Lexers {
for _, lmt := range l.Config().MimeTypes {
if mimeType == lmt {
return l
matched = append(matched, l)
}
}
}
if len(matched) != 0 {
sort.Sort(matched)
return matched[0]
}
return nil
}
// Match returns the first lexer matching filename.
func Match(filename string) chroma.Lexer {
filename = filepath.Base(filename)
matched := chroma.PrioritisedLexers{}
// First, try primary filename matches.
for _, lexer := range Registry.Lexers {
config := lexer.Config()
for _, glob := range config.Filenames {
if fnmatch.Match(glob, filename, 0) {
return lexer
matched = append(matched, lexer)
}
}
}
if len(matched) > 0 {
sort.Sort(matched)
return matched[0]
}
matched = nil
// Next, try filename aliases.
for _, lexer := range Registry.Lexers {
config := lexer.Config()
for _, glob := range config.AliasFilenames {
if fnmatch.Match(glob, filename, 0) {
return lexer
matched = append(matched, lexer)
}
}
}
if len(matched) > 0 {
sort.Sort(matched)
return matched[0]
}
return nil
}
@ -97,8 +135,10 @@ func Analyse(text string) chroma.Lexer {
func Register(lexer chroma.Lexer) chroma.Lexer {
config := lexer.Config()
Registry.byName[config.Name] = lexer
Registry.byName[strings.ToLower(config.Name)] = lexer
for _, alias := range config.Aliases {
Registry.byAlias[alias] = lexer
Registry.byAlias[strings.ToLower(alias)] = lexer
}
Registry.Lexers = append(Registry.Lexers, lexer)
return lexer

View File

@ -19,3 +19,15 @@ func TestCompileAllRegexes(t *testing.T) {
assert.NoError(t, err, "%s failed", lexer.Config().Name)
}
}
func TestGet(t *testing.T) {
t.Run("ByName", func(t *testing.T) {
assert.Equal(t, lexers.Get("xml"), lexers.XML)
})
t.Run("ByAlias", func(t *testing.T) {
assert.Equal(t, lexers.Get("as"), lexers.Actionscript)
})
t.Run("ViaFilename", func(t *testing.T) {
assert.Equal(t, lexers.Get("svg"), lexers.XML)
})
}

View File

@ -6,7 +6,7 @@ import (
. "github.com/alecthomas/chroma" // nolint
)
var bashAnalyserRe = regexp.MustCompile(`(?m)^#!.*/bin/(?:bash|zsh|sh|ksh)`)
var bashAnalyserRe = regexp.MustCompile(`(?m)^#!.*/bin/(?:env |)(?:bash|zsh|sh|ksh)`)
// Bash lexer.
var Bash = Register(MustNewLexer(

View File

@ -34,6 +34,7 @@ var CSharp = Register(MustNewLexer(
{`(global)(::)`, ByGroups(Keyword, Punctuation), nil},
{`(bool|byte|char|decimal|double|dynamic|float|int|long|object|sbyte|short|string|uint|ulong|ushort|var)\b\??`, KeywordType, nil},
{`(class|struct)(\s+)`, ByGroups(Keyword, Text), Push("class")},
{`\b([_a-zA-Z]\w*)(\.)`, ByGroups(NameClass, Punctuation), nil},
{`(namespace|using)(\s+)`, ByGroups(Keyword, Text), Push("namespace")},
{`@?[_a-zA-Z]\w*`, Name, nil},
},

File diff suppressed because one or more lines are too long

View File

@ -9,6 +9,7 @@ var Diff = Register(MustNewLexer(
&Config{
Name: "Diff",
Aliases: []string{"diff", "udiff"},
EnsureNL: true,
Filenames: []string{"*.diff", "*.patch"},
MimeTypes: []string{"text/x-diff", "text/x-patch"},
},

View File

@ -0,0 +1,20 @@
package lexers
import (
"testing"
"github.com/alecthomas/assert"
"github.com/alecthomas/chroma"
)
func TestDiffLexerWithoutTralingNewLine(t *testing.T) {
diffLexer := Get("diff")
it, err := diffLexer.Tokenise(nil, "-foo\n+bar")
assert.NoError(t, err)
actual := it.Tokens()
expected := []*chroma.Token{
&chroma.Token{chroma.GenericDeleted, "-foo\n"},
&chroma.Token{chroma.GenericInserted, "+bar\n"},
}
assert.Equal(t, expected, actual)
}

129
vendor/github.com/alecthomas/chroma/lexers/http.go generated vendored Normal file
View File

@ -0,0 +1,129 @@
package lexers
import (
. "github.com/alecthomas/chroma" // nolint
"strings"
)
// HTTP lexer.
var HTTP = Register(httpBodyContentTypeLexer(MustNewLexer(
&Config{
Name: "HTTP",
Aliases: []string{"http"},
Filenames: []string{},
MimeTypes: []string{},
NotMultiline: true,
DotAll: true,
},
Rules{
"root": {
{`(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE|PATCH)( +)([^ ]+)( +)(HTTP)(/)(1\.[01])(\r?\n|\Z)`, ByGroups(NameFunction, Text, NameNamespace, Text, KeywordReserved, Operator, LiteralNumber, Text), Push("headers")},
{`(HTTP)(/)(1\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|\Z)`, ByGroups(KeywordReserved, Operator, LiteralNumber, Text, LiteralNumber, Text, NameException, Text), Push("headers")},
},
"headers": {
{`([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|\Z)`, EmitterFunc(httpHeaderBlock), nil},
{`([\t ]+)([^\r\n]+)(\r?\n|\Z)`, EmitterFunc(httpContinuousHeaderBlock), nil},
{`\r?\n`, Text, Push("content")},
},
"content": {
{`.+`, EmitterFunc(httpContentBlock), nil},
},
},
)))
func httpContentBlock(groups []string, lexer Lexer) Iterator {
tokens := []*Token{
{Generic, groups[0]},
}
return Literator(tokens...)
}
func httpHeaderBlock(groups []string, lexer Lexer) Iterator {
tokens := []*Token{
{Name, groups[1]},
{Text, groups[2]},
{Operator, groups[3]},
{Text, groups[4]},
{Literal, groups[5]},
{Text, groups[6]},
}
return Literator(tokens...)
}
func httpContinuousHeaderBlock(groups []string, lexer Lexer) Iterator {
tokens := []*Token{
{Text, groups[1]},
{Literal, groups[2]},
{Text, groups[3]},
}
return Literator(tokens...)
}
func httpBodyContentTypeLexer(lexer Lexer) Lexer { return &httpBodyContentTyper{lexer} }
type httpBodyContentTyper struct{ Lexer }
func (d *httpBodyContentTyper) Tokenise(options *TokeniseOptions, text string) (Iterator, error) {
var contentType string
var isContentType bool
var subIterator Iterator
it, err := d.Lexer.Tokenise(options, text)
if err != nil {
return nil, err
}
return func() *Token {
for token := it(); token != nil; token = it() {
switch {
case token.Type == Name && strings.ToLower(token.Value) == "content-type":
{
isContentType = true
}
case token.Type == Literal && isContentType:
{
contentType = strings.TrimSpace(token.Value)
pos := strings.Index(contentType, ";")
if pos > 0 {
contentType = strings.TrimSpace(contentType[:pos])
}
}
case token.Type == Generic && contentType != "":
{
lexer := MatchMimeType(contentType)
// application/calendar+xml can be treated as application/xml
// if there's not a better match.
if lexer == nil && strings.Contains(contentType, "+") {
slashPos := strings.Index(contentType, "/")
plusPos := strings.LastIndex(contentType, "+")
contentType = contentType[:slashPos+1] + contentType[plusPos+1:]
lexer = MatchMimeType(contentType)
}
if lexer == nil {
token.Type = Text
} else {
subIterator, err = lexer.Tokenise(nil, token.Value)
if err != nil {
panic(err)
}
return nil
}
}
}
return token
}
if subIterator != nil {
for token := subIterator(); token != nil; token = subIterator() {
return token
}
}
return nil
}, nil
}

View File

@ -0,0 +1,63 @@
package lexers
import (
"encoding/json"
"io/ioutil"
"os"
"path/filepath"
"strings"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/alecthomas/chroma"
)
// Test source files are in the form <key>.<key> and validation data is in the form <key>.<key>.expected.
func TestLexers(t *testing.T) {
files, err := ioutil.ReadDir("testdata")
require.NoError(t, err)
for _, file := range files {
ext := filepath.Ext(file.Name())[1:]
if ext != "actual" {
continue
}
lexer := Get(strings.TrimSuffix(file.Name(), filepath.Ext(file.Name())))
if !assert.NotNil(t, lexer) {
continue
}
filename := filepath.Join("testdata", file.Name())
expectedFilename := strings.TrimSuffix(filename, filepath.Ext(filename)) + ".expected"
lexer = chroma.Coalesce(lexer)
t.Run(lexer.Config().Name, func(t *testing.T) {
// Read and tokenise source text.
actualText, err := ioutil.ReadFile(filename)
if !assert.NoError(t, err) {
return
}
actual, err := chroma.Tokenise(lexer, nil, string(actualText))
if !assert.NoError(t, err) {
return
}
// Read expected JSON into token slice.
expected := []*chroma.Token{}
r, err := os.Open(expectedFilename)
if !assert.NoError(t, err) {
return
}
err = json.NewDecoder(r).Decode(&expected)
if !assert.NoError(t, err) {
return
}
// Equal?
assert.Equal(t, expected, actual)
})
}
}

View File

@ -8,8 +8,8 @@ import (
var Markdown = Register(MustNewLexer(
&Config{
Name: "markdown",
Aliases: []string{"md"},
Filenames: []string{"*.md"},
Aliases: []string{"md", "mkd"},
Filenames: []string{"*.md", "*.mkd", "*.markdown"},
MimeTypes: []string{"text/x-markdown"},
},
Rules{

View File

@ -11,6 +11,7 @@ var Mason = Register(MustNewLexer(
Aliases: []string{"mason"},
Filenames: []string{"*.m", "*.mhtml", "*.mc", "*.mi", "autohandler", "dhandler"},
MimeTypes: []string{"application/x-mason"},
Priority: 0.1,
},
Rules{
"root": {

View File

@ -21,9 +21,8 @@ var MySQL = Register(MustNewLexer(
{`/\*`, CommentMultiline, Push("multiline-comments")},
{`[0-9]+`, LiteralNumberInteger, nil},
{`[0-9]*\.[0-9]+(e[+-][0-9]+)`, LiteralNumberFloat, nil},
{`'(\\\\|\\'|''|[^'])*'`, LiteralStringSingle, nil},
{`"(\\\\|\\"|""|[^"])*"`, LiteralStringDouble, nil},
{"`(\\\\\\\\|\\\\`|``|[^`])*`", LiteralStringSymbol, nil},
{`((?:_[a-z0-9]+)?)(')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Push("string")},
{`((?:_[a-z0-9]+)?)(")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Push("double-string")},
{"[+*/<>=~!@#%^&|`?-]", Operator, nil},
{`\b(tinyint|smallint|mediumint|int|integer|bigint|date|datetime|time|bit|bool|tinytext|mediumtext|longtext|text|tinyblob|mediumblob|longblob|blob|float|double|double\s+precision|real|numeric|dec|decimal|timestamp|year|char|varchar|varbinary|varcharacter|enum|set)(\b\s*)(\()?`, ByGroups(KeywordType, Text, Punctuation), nil},
{`\b(add|all|alter|analyze|and|as|asc|asensitive|before|between|bigint|binary|blob|both|by|call|cascade|case|change|char|character|check|collate|column|condition|constraint|continue|convert|create|cross|current_date|current_time|current_timestamp|current_user|cursor|database|databases|day_hour|day_microsecond|day_minute|day_second|dec|decimal|declare|default|delayed|delete|desc|describe|deterministic|distinct|distinctrow|div|double|drop|dual|each|else|elseif|enclosed|escaped|exists|exit|explain|fetch|flush|float|float4|float8|for|force|foreign|from|fulltext|grant|group|having|high_priority|hour_microsecond|hour_minute|hour_second|if|ignore|in|index|infile|inner|inout|insensitive|insert|int|int1|int2|int3|int4|int8|integer|interval|into|is|iterate|join|key|keys|kill|leading|leave|left|like|limit|lines|load|localtime|localtimestamp|lock|long|loop|low_priority|match|minute_microsecond|minute_second|mod|modifies|natural|no_write_to_binlog|not|numeric|on|optimize|option|optionally|or|order|out|outer|outfile|precision|primary|procedure|purge|raid0|read|reads|real|references|regexp|release|rename|repeat|replace|require|restrict|return|revoke|right|rlike|schema|schemas|second_microsecond|select|sensitive|separator|set|show|smallint|soname|spatial|specific|sql|sql_big_result|sql_calc_found_rows|sql_small_result|sqlexception|sqlstate|sqlwarning|ssl|starting|straight_join|table|terminated|then|to|trailing|trigger|undo|union|unique|unlock|unsigned|update|usage|use|using|utc_date|utc_time|utc_timestamp|values|varying|when|where|while|with|write|x509|xor|year_month|zerofill)\b`, Keyword, nil},
@ -40,5 +39,15 @@ var MySQL = Register(MustNewLexer(
{`[^/*]+`, CommentMultiline, nil},
{`[/*]`, CommentMultiline, nil},
},
"string": {
{`[^']+`, LiteralStringSingle, nil},
{`''`, LiteralStringSingle, nil},
{`'`, LiteralStringSingle, Pop(1)},
},
"double-string": {
{`[^"]+`, LiteralStringDouble, nil},
{`""`, LiteralStringDouble, nil},
{`"`, LiteralStringDouble, Pop(1)},
},
},
))

View File

@ -0,0 +1,164 @@
package lexers
import (
. "github.com/alecthomas/chroma" // nolint
)
// Objective-C lexer.
var ObjectiveC = Register(MustNewLexer(
&Config{
Name: "Objective-C",
Aliases: []string{"objective-c", "objectivec", "obj-c", "objc"},
Filenames: []string{"*.m", "*.h"},
MimeTypes: []string{"text/x-objective-c"},
},
Rules{
"statements": {
{`@"`, LiteralString, Push("string")},
{`@(YES|NO)`, LiteralNumber, nil},
{`@'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'`, LiteralStringChar, nil},
{`@(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?`, LiteralNumberFloat, nil},
{`@(\d+\.\d*|\.\d+|\d+[fF])[fF]?`, LiteralNumberFloat, nil},
{`@0x[0-9a-fA-F]+[Ll]?`, LiteralNumberHex, nil},
{`@0[0-7]+[Ll]?`, LiteralNumberOct, nil},
{`@\d+[Ll]?`, LiteralNumberInteger, nil},
{`@\(`, Literal, Push("literal_number")},
{`@\[`, Literal, Push("literal_array")},
{`@\{`, Literal, Push("literal_dictionary")},
{Words(``, `\b`, `@selector`, `@private`, `@protected`, `@public`, `@encode`, `@synchronized`, `@try`, `@throw`, `@catch`, `@finally`, `@end`, `@property`, `@synthesize`, `__bridge`, `__bridge_transfer`, `__autoreleasing`, `__block`, `__weak`, `__strong`, `weak`, `strong`, `copy`, `retain`, `assign`, `unsafe_unretained`, `atomic`, `nonatomic`, `readonly`, `readwrite`, `setter`, `getter`, `typeof`, `in`, `out`, `inout`, `release`, `class`, `@dynamic`, `@optional`, `@required`, `@autoreleasepool`), Keyword, nil},
{Words(``, `\b`, `id`, `instancetype`, `Class`, `IMP`, `SEL`, `BOOL`, `IBOutlet`, `IBAction`, `unichar`), KeywordType, nil},
{`@(true|false|YES|NO)\n`, NameBuiltin, nil},
{`(YES|NO|nil|self|super)\b`, NameBuiltin, nil},
{`(Boolean|UInt8|SInt8|UInt16|SInt16|UInt32|SInt32)\b`, KeywordType, nil},
{`(TRUE|FALSE)\b`, NameBuiltin, nil},
{`(@interface|@implementation)(\s+)`, ByGroups(Keyword, Text), Push("#pop", "oc_classname")},
{`(@class|@protocol)(\s+)`, ByGroups(Keyword, Text), Push("#pop", "oc_forward_classname")},
{`@`, Punctuation, nil},
{`(L?)(")`, ByGroups(LiteralStringAffix, LiteralString), Push("string")},
{`(L?)(')(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])(')`, ByGroups(LiteralStringAffix, LiteralStringChar, LiteralStringChar, LiteralStringChar), nil},
{`(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*`, LiteralNumberFloat, nil},
{`(\d+\.\d*|\.\d+|\d+[fF])[fF]?`, LiteralNumberFloat, nil},
{`0x[0-9a-fA-F]+[LlUu]*`, LiteralNumberHex, nil},
{`0[0-7]+[LlUu]*`, LiteralNumberOct, nil},
{`\d+[LlUu]*`, LiteralNumberInteger, nil},
{`\*/`, Error, nil},
{`[~!%^&*+=|?:<>/-]`, Operator, nil},
{`[()\[\],.]`, Punctuation, nil},
{Words(``, `\b`, `asm`, `auto`, `break`, `case`, `const`, `continue`, `default`, `do`, `else`, `enum`, `extern`, `for`, `goto`, `if`, `register`, `restricted`, `return`, `sizeof`, `static`, `struct`, `switch`, `typedef`, `union`, `volatile`, `while`), Keyword, nil},
{`(bool|int|long|float|short|double|char|unsigned|signed|void)\b`, KeywordType, nil},
{Words(``, `\b`, `inline`, `_inline`, `__inline`, `naked`, `restrict`, `thread`, `typename`), KeywordReserved, nil},
{`(__m(128i|128d|128|64))\b`, KeywordReserved, nil},
{Words(`__`, `\b`, `asm`, `int8`, `based`, `except`, `int16`, `stdcall`, `cdecl`, `fastcall`, `int32`, `declspec`, `finally`, `int64`, `try`, `leave`, `wchar_t`, `w64`, `unaligned`, `raise`, `noop`, `identifier`, `forceinline`, `assume`), KeywordReserved, nil},
{`(true|false|NULL)\b`, NameBuiltin, nil},
{`([a-zA-Z_]\w*)(\s*)(:)(?!:)`, ByGroups(NameLabel, Text, Punctuation), nil},
{`[a-zA-Z_]\w*`, Name, nil},
},
"oc_classname": {
{`([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?(\s*)(\{)`, ByGroups(NameClass, Text, NameClass, Text, Punctuation), Push("#pop", "oc_ivars")},
{`([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?`, ByGroups(NameClass, Text, NameClass), Pop(1)},
{`([a-zA-Z$_][\w$]*)(\s*)(\([a-zA-Z$_][\w$]*\))(\s*)(\{)`, ByGroups(NameClass, Text, NameLabel, Text, Punctuation), Push("#pop", "oc_ivars")},
{`([a-zA-Z$_][\w$]*)(\s*)(\([a-zA-Z$_][\w$]*\))`, ByGroups(NameClass, Text, NameLabel), Pop(1)},
{`([a-zA-Z$_][\w$]*)(\s*)(\{)`, ByGroups(NameClass, Text, Punctuation), Push("#pop", "oc_ivars")},
{`([a-zA-Z$_][\w$]*)`, NameClass, Pop(1)},
},
"oc_forward_classname": {
{`([a-zA-Z$_][\w$]*)(\s*,\s*)`, ByGroups(NameClass, Text), Push("oc_forward_classname")},
{`([a-zA-Z$_][\w$]*)(\s*;?)`, ByGroups(NameClass, Text), Pop(1)},
},
"oc_ivars": {
Include("whitespace"),
Include("statements"),
{`;`, Punctuation, nil},
{`\{`, Punctuation, Push()},
{`\}`, Punctuation, Pop(1)},
},
"root": {
{`^([-+])(\s*)(\(.*?\))?(\s*)([a-zA-Z$_][\w$]*:?)`, ByGroups(Punctuation, Text, UsingSelf("root"), Text, NameFunction), Push("method")},
Include("whitespace"),
{`((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;{]*)(\{)`, ByGroups(UsingSelf("root"), NameFunction, UsingSelf("root"), UsingSelf("root"), Punctuation), Push("function")},
{`((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;]*)(;)`, ByGroups(UsingSelf("root"), NameFunction, UsingSelf("root"), UsingSelf("root"), Punctuation), nil},
Default(Push("statement")),
},
"method": {
Include("whitespace"),
{`,`, Punctuation, nil},
{`\.\.\.`, Punctuation, nil},
{`(\(.*?\))(\s*)([a-zA-Z$_][\w$]*)`, ByGroups(UsingSelf("root"), Text, NameVariable), nil},
{`[a-zA-Z$_][\w$]*:`, NameFunction, nil},
{`;`, Punctuation, Pop(1)},
{`\{`, Punctuation, Push("function")},
Default(Pop(1)),
},
"literal_number": {
{`\(`, Punctuation, Push("literal_number_inner")},
{`\)`, Literal, Pop(1)},
Include("statement"),
},
"literal_number_inner": {
{`\(`, Punctuation, Push()},
{`\)`, Punctuation, Pop(1)},
Include("statement"),
},
"literal_array": {
{`\[`, Punctuation, Push("literal_array_inner")},
{`\]`, Literal, Pop(1)},
Include("statement"),
},
"literal_array_inner": {
{`\[`, Punctuation, Push()},
{`\]`, Punctuation, Pop(1)},
Include("statement"),
},
"literal_dictionary": {
{`\}`, Literal, Pop(1)},
Include("statement"),
},
"whitespace": {
{`^#if\s+0`, CommentPreproc, Push("if0")},
{`^#`, CommentPreproc, Push("macro")},
{`^(\s*(?:/[*].*?[*]/\s*)?)(#if\s+0)`, ByGroups(UsingSelf("root"), CommentPreproc), Push("if0")},
{`^(\s*(?:/[*].*?[*]/\s*)?)(#)`, ByGroups(UsingSelf("root"), CommentPreproc), Push("macro")},
{`\n`, Text, nil},
{`\s+`, Text, nil},
{`\\\n`, Text, nil},
{`//(\n|[\w\W]*?[^\\]\n)`, CommentSingle, nil},
{`/(\\\n)?[*][\w\W]*?[*](\\\n)?/`, CommentMultiline, nil},
{`/(\\\n)?[*][\w\W]*`, CommentMultiline, nil},
},
"statement": {
Include("whitespace"),
Include("statements"),
{`[{}]`, Punctuation, nil},
{`;`, Punctuation, Pop(1)},
},
"function": {
Include("whitespace"),
Include("statements"),
{`;`, Punctuation, nil},
{`\{`, Punctuation, Push()},
{`\}`, Punctuation, Pop(1)},
},
"string": {
{`"`, LiteralString, Pop(1)},
{`\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})`, LiteralStringEscape, nil},
{`[^\\"\n]+`, LiteralString, nil},
{`\\\n`, LiteralString, nil},
{`\\`, LiteralString, nil},
},
"macro": {
{`(include)(\s*(?:/[*].*?[*]/\s*)?)([^\n]+)`, ByGroups(CommentPreproc, Text, CommentPreprocFile), nil},
{`[^/\n]+`, CommentPreproc, nil},
{`/[*](.|\n)*?[*]/`, CommentMultiline, nil},
{`//.*?\n`, CommentSingle, Pop(1)},
{`/`, CommentPreproc, nil},
{`(?<=\\)\n`, CommentPreproc, nil},
{`\n`, CommentPreproc, Pop(1)},
},
"if0": {
{`^\s*#if.*?(?<!\\)\n`, CommentPreproc, Push()},
{`^\s*#el(?:se|if).*\n`, CommentPreproc, Pop(1)},
{`^\s*#endif.*?(?<!\\)\n`, CommentPreproc, Pop(1)},
{`.*?\n`, Comment, nil},
},
},
))

123
vendor/github.com/alecthomas/chroma/lexers/scss.go generated vendored Normal file

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,33 @@
# Lexer tests
This directory contains input source and expected output lexer tokens.
Input filenames for lexers are in the form `<name>.actual`. Expected output filenames are in the form `<name>.expected`.
Each input filename is parsed by the corresponding lexer and checked against the expected JSON-encoded token list.
To add/update tests do the following:
1. `export LEXER=csharp`
1. Create/edit a file `lexers/testdata/${LEXER}.actual` (eg. `csharp.actual`).
2. Run `go run ./cmd/chroma/main.go --lexer ${LEXER} --json lexers/testdata/${LEXER}.actual > lexers/testdata/${LEXER}.expected`.
3. Run `go test -v -run TestLexers ./lexers`.
eg.
```bash
$ export LEXER=csharp
$ go run ./cmd/chroma/main.go --lexer ${LEXER} --json lexers/testdata/${LEXER}.${LEXER} > lexers/testdata/${LEXER}.expected
$ go test -v -run TestLexers ./lexers
=== RUN TestLexers
=== RUN TestLexers/C#
=== RUN TestLexers/CSS
--- PASS: TestLexers (0.01s)
--- PASS: TestLexers/C# (0.00s)
--- PASS: TestLexers/CSS (0.00s)
PASS
ok github.com/alecthomas/chroma/lexers 0.032s
```

View File

@ -0,0 +1,11 @@
DriveInfo[] drives = DriveInfo.GetDrives();
foreach (DriveInfo drive in drives)
{
IEnumerable<string> driveFolders =
Directory.EnumerateDirectories(drive.RootDirectory.ToString());
foreach (string dir in driveFolders)
{
Console.WriteLine(dir);
}
}

View File

@ -0,0 +1,73 @@
[
{"type":"Name","value":"DriveInfo"},
{"type":"NameAttribute","value":"[]"},
{"type":"Text","value":" "},
{"type":"Name","value":"drives"},
{"type":"Text","value":" "},
{"type":"Punctuation","value":"="},
{"type":"Text","value":" "},
{"type":"NameClass","value":"DriveInfo"},
{"type":"Punctuation","value":"."},
{"type":"Name","value":"GetDrives"},
{"type":"Punctuation","value":"();"},
{"type":"Text","value":"\n"},
{"type":"Keyword","value":"foreach"},
{"type":"Text","value":" "},
{"type":"Punctuation","value":"("},
{"type":"Name","value":"DriveInfo"},
{"type":"Text","value":" "},
{"type":"Name","value":"drive"},
{"type":"Text","value":" "},
{"type":"Keyword","value":"in"},
{"type":"Text","value":" "},
{"type":"Name","value":"drives"},
{"type":"Punctuation","value":")"},
{"type":"Text","value":"\n"},
{"type":"Punctuation","value":"{"},
{"type":"Text","value":"\n "},
{"type":"Name","value":"IEnumerable"},
{"type":"Punctuation","value":"\u003c"},
{"type":"KeywordType","value":"string"},
{"type":"Punctuation","value":"\u003e"},
{"type":"Text","value":" "},
{"type":"Name","value":"driveFolders"},
{"type":"Text","value":" "},
{"type":"Punctuation","value":"="},
{"type":"Text","value":"\n "},
{"type":"NameClass","value":"Directory"},
{"type":"Punctuation","value":"."},
{"type":"Name","value":"EnumerateDirectories"},
{"type":"Punctuation","value":"("},
{"type":"NameClass","value":"drive"},
{"type":"Punctuation","value":"."},
{"type":"NameClass","value":"RootDirectory"},
{"type":"Punctuation","value":"."},
{"type":"Name","value":"ToString"},
{"type":"Punctuation","value":"());"},
{"type":"Text","value":"\n\n "},
{"type":"Keyword","value":"foreach"},
{"type":"Text","value":" "},
{"type":"Punctuation","value":"("},
{"type":"KeywordType","value":"string"},
{"type":"Text","value":" "},
{"type":"Name","value":"dir"},
{"type":"Text","value":" "},
{"type":"Keyword","value":"in"},
{"type":"Text","value":" "},
{"type":"Name","value":"driveFolders"},
{"type":"Punctuation","value":")"},
{"type":"Text","value":"\n "},
{"type":"Punctuation","value":"{"},
{"type":"Text","value":"\n "},
{"type":"NameClass","value":"Console"},
{"type":"Punctuation","value":"."},
{"type":"Name","value":"WriteLine"},
{"type":"Punctuation","value":"("},
{"type":"Name","value":"dir"},
{"type":"Punctuation","value":");"},
{"type":"Text","value":"\n "},
{"type":"Punctuation","value":"}"},
{"type":"Text","value":"\n"},
{"type":"Punctuation","value":"}"},
{"type":"Text","value":"\n"}
]

View File

@ -0,0 +1,3 @@
:root {
--variable-name: #fff;
}

View File

@ -0,0 +1,16 @@
[
{"type":"Punctuation","value":":"},
{"type":"NameDecorator","value":"root"},
{"type":"Text","value":" "},
{"type":"Punctuation","value":"{"},
{"type":"Text","value":"\n "},
{"type":"NameVariable","value":"--variable-name"},
{"type":"Text","value":""},
{"type":"Punctuation","value":":"},
{"type":"Text","value":" "},
{"type":"LiteralNumberHex","value":"#fff"},
{"type":"Punctuation","value":";"},
{"type":"Text","value":"\n"},
{"type":"Punctuation","value":"}"},
{"type":"Text","value":"\n"}
]

55
vendor/github.com/alecthomas/chroma/lexers/tex.go generated vendored Normal file
View File

@ -0,0 +1,55 @@
package lexers
import (
. "github.com/alecthomas/chroma" // nolint
)
// Tex lexer.
var TeX = Register(MustNewLexer(
&Config{
Name: "TeX",
Aliases: []string{"tex", "latex"},
Filenames: []string{"*.tex", "*.aux", "*.toc"},
MimeTypes: []string{"text/x-tex", "text/x-latex"},
},
Rules{
"general": {
{`%.*?\n`, Comment, nil},
{`[{}]`, NameBuiltin, nil},
{`[&_^]`, NameBuiltin, nil},
},
"root": {
{`\\\[`, LiteralStringBacktick, Push("displaymath")},
{`\\\(`, LiteralString, Push("inlinemath")},
{`\$\$`, LiteralStringBacktick, Push("displaymath")},
{`\$`, LiteralString, Push("inlinemath")},
{`\\([a-zA-Z]+|.)`, Keyword, Push("command")},
{`\\$`, Keyword, nil},
Include("general"),
{`[^\\$%&_^{}]+`, Text, nil},
},
"math": {
{`\\([a-zA-Z]+|.)`, NameVariable, nil},
Include("general"),
{`[0-9]+`, LiteralNumber, nil},
{`[-=!+*/()\[\]]`, Operator, nil},
{`[^=!+*/()\[\]\\$%&_^{}0-9-]+`, NameBuiltin, nil},
},
"inlinemath": {
{`\\\)`, LiteralString, Pop(1)},
{`\$`, LiteralString, Pop(1)},
Include("math"),
},
"displaymath": {
{`\\\]`, LiteralString, Pop(1)},
{`\$\$`, LiteralString, Pop(1)},
{`\$`, NameBuiltin, nil},
Include("math"),
},
"command": {
{`\[.*?\]`, NameAttribute, nil},
{`\*`, Keyword, nil},
Default(Pop(1)),
},
},
))

View File

@ -18,6 +18,8 @@ var TransactSQL = Register(MustNewLexer(
{`\s+`, TextWhitespace, nil},
{`--(?m).*?$\n?`, CommentSingle, nil},
{`/\*`, CommentMultiline, Push("multiline-comments")},
{`'`, LiteralStringSingle, Push("string")},
{`"`, LiteralStringName, Push("quoted-ident")},
{Words(``, ``, `!<`, `!=`, `!>`, `<`, `<=`, `<>`, `=`, `>`, `>=`, `+`, `+=`, `-`, `-=`, `*`, `*=`, `/`, `/=`, `%`, `%=`, `&`, `&=`, `|`, `|=`, `^`, `^=`, `~`, `::`), Operator, nil},
{Words(``, `\b`, `all`, `and`, `any`, `between`, `except`, `exists`, `in`, `intersect`, `like`, `not`, `or`, `some`, `union`), OperatorWord, nil},
{Words(``, `\b`, `bigint`, `binary`, `bit`, `char`, `cursor`, `date`, `datetime`, `datetime2`, `datetimeoffset`, `decimal`, `float`, `hierarchyid`, `image`, `int`, `money`, `nchar`, `ntext`, `numeric`, `nvarchar`, `real`, `smalldatetime`, `smallint`, `smallmoney`, `sql_variant`, `table`, `text`, `time`, `timestamp`, `tinyint`, `uniqueidentifier`, `varbinary`, `varchar`, `xml`), NameClass, nil},
@ -30,8 +32,6 @@ var TransactSQL = Register(MustNewLexer(
{`\.[0-9]+(e[+-]?[0-9]+)?`, LiteralNumberFloat, nil},
{`[0-9]+e[+-]?[0-9]+`, LiteralNumberFloat, nil},
{`[0-9]+`, LiteralNumberInteger, nil},
{`'(''|[^'])*'`, LiteralStringSingle, nil},
{`"(""|[^"])*"`, LiteralStringSymbol, nil},
{`[;(),.]`, Punctuation, nil},
{`@@\w+`, NameBuiltin, nil},
{`@\w+`, NameVariable, nil},
@ -45,5 +45,15 @@ var TransactSQL = Register(MustNewLexer(
{`[^/*]+`, CommentMultiline, nil},
{`[/*]`, CommentMultiline, nil},
},
"string": {
{`[^']+`, LiteralStringSingle, nil},
{`''`, LiteralStringSingle, nil},
{`'`, LiteralStringSingle, Pop(1)},
},
"quoted-ident": {
{`[^"]+`, LiteralStringName, nil},
{`""`, LiteralStringName, nil},
{`"`, LiteralStringName, Pop(1)},
},
},
))

View File

@ -9,7 +9,7 @@ var XML = Register(MustNewLexer(
&Config{
Name: "XML",
Aliases: []string{"xml"},
Filenames: []string{"*.xml", "*.xsl", "*.rss", "*.xslt", "*.xsd", "*.wsdl", "*.wsf"},
Filenames: []string{"*.xml", "*.xsl", "*.rss", "*.xslt", "*.xsd", "*.wsdl", "*.wsf", "*.svg"},
MimeTypes: []string{"text/xml", "application/xml", "image/svg+xml", "application/rss+xml", "application/atom+xml"},
DotAll: true,
},

View File

@ -0,0 +1,19 @@
package quick_test
import (
"log"
"os"
"github.com/alecthomas/chroma/quick"
)
func Example() {
code := `package main
func main() { }
`
err := quick.Highlight(os.Stdout, code, "go", "html", "monokai")
if err != nil {
log.Fatal(err)
}
}

44
vendor/github.com/alecthomas/chroma/quick/quick.go generated vendored Normal file
View File

@ -0,0 +1,44 @@
// Package quick provides simple, no-configuration source code highlighting.
package quick
import (
"io"
"github.com/alecthomas/chroma"
"github.com/alecthomas/chroma/formatters"
"github.com/alecthomas/chroma/lexers"
"github.com/alecthomas/chroma/styles"
)
// Highlight some text.
//
// Lexer, formatter and style may be empty, in which case a best-effort is made.
func Highlight(w io.Writer, source, lexer, formatter, style string) error {
// Determine lexer.
l := lexers.Get(lexer)
if l == nil {
l = lexers.Analyse(source)
}
if l == nil {
l = lexers.Fallback
}
l = chroma.Coalesce(l)
// Determine formatter.
f := formatters.Get(formatter)
if f == nil {
f = formatters.Fallback
}
// Determine style.
s := styles.Get(style)
if s == nil {
s = styles.Fallback
}
it, err := l.Tokenise(nil, source)
if err != nil {
return err
}
return f.Format(w, s, it)
}

View File

@ -1,5 +1,10 @@
package chroma
import (
"encoding/json"
"fmt"
)
//go:generate stringer -type TokenType
// TokenType is the type of token to highlight.
@ -7,6 +12,22 @@ package chroma
// It is also an Emitter, emitting a single token of itself
type TokenType int
func (t *TokenType) MarshalJSON() ([]byte, error) { return json.Marshal(t.String()) }
func (t *TokenType) UnmarshalJSON(data []byte) error {
key := ""
err := json.Unmarshal(data, &key)
if err != nil {
return err
}
for tt, text := range _TokenType_map {
if text == key {
*t = tt
return nil
}
}
return fmt.Errorf("unknown TokenType %q", data)
}
// Set of TokenTypes.
//
// Categories of types are grouped in ranges of 1000, while sub-categories are in ranges of 100. For

View File

@ -0,0 +1 @@
[?.5x1fACc8E3'

View File

@ -0,0 +1 @@
\b\b\b\b\b\b\b\b\b

View File

@ -0,0 +1 @@
(?#)(?#)](?#)]?#)

View File

@ -0,0 +1 @@
\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\11(\1

View File

@ -0,0 +1 @@
(?-)(?-)

View File

@ -0,0 +1 @@
(?-----------------

View File

@ -0,0 +1 @@
((?'256'abc)\d+)?(?'16')(.*)

View File

@ -0,0 +1 @@
((((((){2147483647}((){2147483647}(){2147483647})){2147483647}))))

View File

@ -0,0 +1 @@
[\b\b\b\b\b\b

View File

@ -0,0 +1 @@
\D\D\D\D

View File

@ -0,0 +1 @@
(?I)іііііΉііΉіΉ

View File

@ -0,0 +1 @@
(){6,1}

View File

@ -0,0 +1,2 @@
((')'()'()'(')'()
)

View File

@ -0,0 +1 @@
((){976})

View File

@ -0,0 +1 @@
[ケ-ケ-[[-ケ-[ケ]]][ケ-ケ-[[-ケ-[ケ]]]

View File

@ -0,0 +1 @@
\8090820312

View File

@ -0,0 +1 @@
(?=)((?=)(?=)(?=)(?=)(?=)(?=)(?=))(?=)(?=)(?=)(?=)(?=)(?=)(?=)(?=)(?=)(?=)

View File

@ -0,0 +1 @@
[cA2sx5fl7Uv_10)][cA2sx5fl7Uv_10]

View File

@ -0,0 +1 @@
("?e*"?e*

View File

@ -0,0 +1 @@
((()?)?)?(()?)?(()?)?(((()?)?)?)?(()?)?(((()?)?((()?)?)?(((()?)?)?(()?)?)?)?)?(()?)?((((()?)?)?)?)?

View File

@ -0,0 +1 @@
[\w\W]?]

View File

@ -0,0 +1 @@
(?'𠜎𠜎𠹝𠹝

View File

@ -0,0 +1 @@
(A|9)(A|9)(A|A||A|9)(A|9)(A|A||A(A|9)(A|A||A|9)(A|{Î)(A|A||A|9)|9)

View File

@ -0,0 +1 @@
((?'256'bc)\d+)?(?'16')(.)

View File

@ -0,0 +1 @@
{'{(

View File

@ -0,0 +1 @@
(?'-U'(?'-U'(?'-U'(?'-U'(?'U

View File

@ -0,0 +1 @@
['-Q'-?'-Q'-?-''-Q'-?-n\n-''-/'-6-''-Q'-?-n\n-''-/'-6

View File

@ -0,0 +1 @@
[\u8333\u8f3a\u8f3a\u833a\u833a\u833a\u833a\u833a\u8f3a\u8333\u833a\u8f33

View File

@ -0,0 +1 @@
(?'U-6'(?'U-6'(?'U-6'(?'U-6'(?'6'(?'U-

View File

@ -0,0 +1 @@
(?n)()()(()(()()))()((())

View File

@ -0,0 +1 @@
(?I)[[-Ƹ][[-Ƹ][[-Ƹ]+[[-Ƹ]+[[-Ƹ][[-Ƹ]+

View File

@ -0,0 +1 @@
(?n)((@$)(@$))

View File

@ -0,0 +1 @@
$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$

View File

@ -0,0 +1 @@
(?)(?)(?)

View File

@ -0,0 +1 @@
(?I)(A9A7450580596923828125)

View File

@ -0,0 +1 @@
(?I)(.*\3826658AA)

View File

@ -0,0 +1 @@
((8((((((((((((((9(((((((((((((((((((((?'251(((((((((

View File

@ -0,0 +1 @@
\A\A\A\A\A\A\A(\A\A\A\A

View File

@ -0,0 +1 @@
[<5B>-<2D>-[<5B>-<2D>-[<5B>]]<5D>

View File

@ -0,0 +1 @@
(?#))(?#))(?#)((?#))(?#))(?#

View File

@ -0,0 +1 @@
(?!(?!(?k(?!(?!(?!

View File

@ -0,0 +1 @@
(c?]?]??]??`?]?`?]??]??`?]?`?]?)

View File

@ -0,0 +1 @@
(?(?<=(?(?<=(?(?<=(?(?<=

View File

@ -0,0 +1 @@
[+](?#)([+](?#)

View File

@ -0,0 +1 @@
((?'6')+)?(?'6'.)

View File

@ -0,0 +1 @@
[\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\pp\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p\p

View File

@ -0,0 +1 @@
(\16(.)

View File

@ -0,0 +1 @@
(?I)'''''invalid group name: group names must begin with a word character and have a matching terminator'

View File

@ -0,0 +1 @@
(?I)[RLOKQNGAXBWH][RLOKQNGAXBWH][RLOKQNGAXBWH][RLOKQNGAXBWH][RLOKQNGAXBWH][LOKNGH][ROQNGH][ONGAXBWH][RLOKQNGAXBWH][LOKNGAXBWH][LOKNGH][ROQNGH][ONGAXBWH][RLOKQNGAXBWH][LOKNGH][ROQNGAXBWH]

View File

@ -0,0 +1 @@
(?M)(^^^^^^^

View File

@ -0,0 +1 @@
(?n:(?I:(?I:(?I:(?I:(?I:(?I:(?I:(?I:

View File

@ -0,0 +1 @@
(()(())(())(())(()))()(())()((()(())(())(())(()))(()(())()(())(())(()()())(()))()(()())()()()(())

View File

@ -0,0 +1 @@
(?'e69(?'Call'(?'e69(?'Call

Some files were not shown because too many files have changed in this diff Show More