Use better dep version

This version of dep is better at pruning unused source code from
dependencies, so we don't need to carry around such a large vendor
directory.
This commit is contained in:
Kevin Burke 2018-02-07 08:45:30 -08:00
parent d441569025
commit 5b9795d676
No known key found for this signature in database
GPG Key ID: 24B0EF06511BA263
1921 changed files with 4 additions and 6964 deletions

View File

@ -1,3 +1,7 @@
[prune]
unused-packages = true
go-tests = true
[[constraint]]
name = "github.com/alecthomas/chroma"
revision = "v0.2.1"

View File

@ -1,136 +0,0 @@
package main
import (
"io/ioutil"
"os"
"strings"
"text/template"
"github.com/aymerick/douceur/css"
"github.com/aymerick/douceur/parser"
"gopkg.in/alecthomas/kingpin.v3-unstable"
"github.com/alecthomas/chroma"
)
const (
outputTemplate = `package styles
import (
"github.com/alecthomas/chroma"
)
// {{.Name}} style.
var {{.Name}} = Register(chroma.MustNewStyle("{{.Name|Lower}}", chroma.StyleEntries{
{{- range .Rules}}
{{- if .Prelude|TokenType}}
chroma.{{.Prelude|TokenType}}: "{{.Declarations|TranslateDecls}}",
{{- end}}
{{- end}}
}))
`
)
var (
typeByClass = map[string]chroma.TokenType{
".hll": chroma.Background,
}
cssNamedColours = map[string]string{
"black": "#000000", "silver": "#c0c0c0", "gray": "#808080", "white": "#ffffff",
"maroon": "#800000", "red": "#ff0000", "purple": "#800080", "fuchsia": "#ff00ff",
"green": "#008000", "lime": "#00ff00", "olive": "#808000", "yellow": "#ffff00",
"navy": "#000080", "blue": "#0000ff", "teal": "#008080", "aqua": "#00ffff",
"orange": "#ffa500", "aliceblue": "#f0f8ff", "antiquewhite": "#faebd7", "aquamarine": "#7fffd4",
"azure": "#f0ffff", "beige": "#f5f5dc", "bisque": "#ffe4c4", "blanchedalmond": "#ffebcd",
"blueviolet": "#8a2be2", "brown": "#a52a2a", "burlywood": "#deb887", "cadetblue": "#5f9ea0",
"chartreuse": "#7fff00", "chocolate": "#d2691e", "coral": "#ff7f50", "cornflowerblue": "#6495ed",
"cornsilk": "#fff8dc", "crimson": "#dc143c", "cyan": "#00ffff", "darkblue": "#00008b",
"darkcyan": "#008b8b", "darkgoldenrod": "#b8860b", "darkgray": "#a9a9a9", "darkgreen": "#006400",
"darkgrey": "#a9a9a9", "darkkhaki": "#bdb76b", "darkmagenta": "#8b008b", "darkolivegreen": "#556b2f",
"darkorange": "#ff8c00", "darkorchid": "#9932cc", "darkred": "#8b0000", "darksalmon": "#e9967a",
"darkseagreen": "#8fbc8f", "darkslateblue": "#483d8b", "darkslategray": "#2f4f4f", "darkslategrey": "#2f4f4f",
"darkturquoise": "#00ced1", "darkviolet": "#9400d3", "deeppink": "#ff1493", "deepskyblue": "#00bfff",
"dimgray": "#696969", "dimgrey": "#696969", "dodgerblue": "#1e90ff", "firebrick": "#b22222",
"floralwhite": "#fffaf0", "forestgreen": "#228b22", "gainsboro": "#dcdcdc", "ghostwhite": "#f8f8ff",
"gold": "#ffd700", "goldenrod": "#daa520", "greenyellow": "#adff2f", "grey": "#808080",
"honeydew": "#f0fff0", "hotpink": "#ff69b4", "indianred": "#cd5c5c", "indigo": "#4b0082",
"ivory": "#fffff0", "khaki": "#f0e68c", "lavender": "#e6e6fa", "lavenderblush": "#fff0f5",
"lawngreen": "#7cfc00", "lemonchiffon": "#fffacd", "lightblue": "#add8e6", "lightcoral": "#f08080",
"lightcyan": "#e0ffff", "lightgoldenrodyellow": "#fafad2", "lightgray": "#d3d3d3", "lightgreen": "#90ee90",
"lightgrey": "#d3d3d3", "lightpink": "#ffb6c1", "lightsalmon": "#ffa07a", "lightseagreen": "#20b2aa",
"lightskyblue": "#87cefa", "lightslategray": "#778899", "lightslategrey": "#778899", "lightsteelblue": "#b0c4de",
"lightyellow": "#ffffe0", "limegreen": "#32cd32", "linen": "#faf0e6", "magenta": "#ff00ff",
"mediumaquamarine": "#66cdaa", "mediumblue": "#0000cd", "mediumorchid": "#ba55d3", "mediumpurple": "#9370db",
"mediumseagreen": "#3cb371", "mediumslateblue": "#7b68ee", "mediumspringgreen": "#00fa9a", "mediumturquoise": "#48d1cc",
"mediumvioletred": "#c71585", "midnightblue": "#191970", "mintcream": "#f5fffa", "mistyrose": "#ffe4e1",
"moccasin": "#ffe4b5", "navajowhite": "#ffdead", "oldlace": "#fdf5e6", "olivedrab": "#6b8e23",
"orangered": "#ff4500", "orchid": "#da70d6", "palegoldenrod": "#eee8aa", "palegreen": "#98fb98",
"paleturquoise": "#afeeee", "palevioletred": "#db7093", "papayawhip": "#ffefd5", "peachpuff": "#ffdab9",
"peru": "#cd853f", "pink": "#ffc0cb", "plum": "#dda0dd", "powderblue": "#b0e0e6",
"rosybrown": "#bc8f8f", "royalblue": "#4169e1", "saddlebrown": "#8b4513", "salmon": "#fa8072",
"sandybrown": "#f4a460", "seagreen": "#2e8b57", "seashell": "#fff5ee", "sienna": "#a0522d",
"skyblue": "#87ceeb", "slateblue": "#6a5acd", "slategray": "#708090", "slategrey": "#708090",
"snow": "#fffafa", "springgreen": "#00ff7f", "steelblue": "#4682b4", "tan": "#d2b48c",
"thistle": "#d8bfd8", "tomato": "#ff6347", "turquoise": "#40e0d0", "violet": "#ee82ee",
"wheat": "#f5deb3", "whitesmoke": "#f5f5f5", "yellowgreen": "#9acd32", "rebeccapurple": "#663399",
}
nameArg = kingpin.Arg("name", "Name of output style.").Required().String()
fileArg = kingpin.Arg("stylesheets", ".css file to import").Required().ExistingFile()
)
func init() {
for tt, str := range chroma.StandardTypes {
typeByClass["."+str] = tt
}
}
func translateDecls(decls []*css.Declaration) string {
out := []string{}
for _, decl := range decls {
switch decl.Property {
case "color":
clr := decl.Value
if c, ok := cssNamedColours[clr]; ok {
clr = c
}
out = append(out, clr)
case "background-color":
out = append(out, "bg:"+decl.Value)
case "font-style":
if strings.Contains(decl.Value, "italic") {
out = append(out, "italic")
}
case "font-weight":
if strings.Contains(decl.Value, "bold") {
out = append(out, "bold")
}
case "text-decoration":
if strings.Contains(decl.Value, "underline") {
out = append(out, "underline")
}
}
}
return strings.Join(out, " ")
}
func main() {
kingpin.Parse()
source, err := ioutil.ReadFile(*fileArg)
kingpin.FatalIfError(err, "")
css, err := parser.Parse(string(source))
kingpin.FatalIfError(err, "")
context := map[string]interface{}{
"Name": *nameArg,
"Rules": css.Rules,
}
tmpl := template.Must(template.New("style").Funcs(template.FuncMap{
"Lower": strings.ToLower,
"TranslateDecls": translateDecls,
"TokenType": func(s string) chroma.TokenType { return typeByClass[s] },
}).Parse(outputTemplate))
err = tmpl.Execute(os.Stdout, context)
kingpin.FatalIfError(err, "")
}

View File

@ -1,38 +0,0 @@
package main
import (
"fmt"
"io/ioutil"
"os"
"github.com/alecthomas/chroma/formatters"
"github.com/alecthomas/chroma/lexers"
"github.com/alecthomas/chroma/styles"
"gopkg.in/alecthomas/kingpin.v3-unstable"
)
var (
filesArgs = kingpin.Arg("file", "Files to use to exercise lexers.").Required().ExistingFiles()
)
func main() {
kingpin.CommandLine.Help = "Exercise linters against a list of files."
kingpin.Parse()
for _, file := range *filesArgs {
lexer := lexers.Match(file)
if lexer == nil {
fmt.Printf("warning: could not find lexer for %q\n", file)
continue
}
fmt.Printf("%s: ", file)
os.Stdout.Sync()
text, err := ioutil.ReadFile(file)
kingpin.FatalIfError(err, "")
it, err := lexer.Tokenise(nil, string(text))
kingpin.FatalIfError(err, "%s failed to tokenise %q", lexer.Config().Name, file)
err = formatters.NoOp.Format(ioutil.Discard, styles.SwapOff, it)
kingpin.FatalIfError(err, "%s failed to format %q", lexer.Config().Name, file)
fmt.Printf("ok\n")
}
}

View File

@ -1,196 +0,0 @@
import functools
import importlib
import json
import os
import re
import sys
import types
import pystache
from pygments import lexer as pygments_lexer
from pygments.token import _TokenType
TEMPLATE = r'''
package lexers
import (
. "github.com/alecthomas/chroma" // nolint
)
// {{upper_name}} lexer.
var {{upper_name}} = Register(MustNewLexer(
&Config{
Name: "{{name}}",
Aliases: []string{ {{#aliases}}"{{.}}", {{/aliases}} },
Filenames: []string{ {{#filenames}}"{{.}}", {{/filenames}} },
MimeTypes: []string{ {{#mimetypes}}"{{.}}", {{/mimetypes}} },
{{#re_not_multiline}}
NotMultiline: true,
{{/re_not_multiline}}
{{#re_dotall}}
DotAll: true,
{{/re_dotall}}
{{#re_ignorecase}}
CaseInsensitive: true,
{{/re_ignorecase}}
},
Rules{
{{#tokens}}
"{{state}}": {
{{#rules}}
{{{.}}},
{{/rules}}
},
{{/tokens}}
},
))
'''
def go_regex(s):
return go_string(s)
def go_string(s):
if '`' not in s:
return '`' + s + '`'
return json.dumps(s)
def to_camel_case(snake_str):
components = snake_str.split('_')
return ''.join(x.title() for x in components)
def warning(message):
print('warning: ' + message, file=sys.stderr)
def resolve_emitter(emitter):
if isinstance(emitter, types.FunctionType):
if repr(emitter).startswith('<function bygroups.'):
args = emitter.__closure__[0].cell_contents
emitter = 'ByGroups(%s)' % ', '.join(resolve_emitter(e) for e in args)
elif repr(emitter).startswith('<function using.'):
args = emitter.__closure__[0].cell_contents
if isinstance(args, dict):
state = 'root'
if 'stack' in args:
state = args['stack'][1]
args.pop('stack')
assert args == {}, args
emitter = 'UsingSelf("%s")' % state
elif issubclass(args, pygments_lexer.Lexer):
name = args.__name__
if name.endswith('Lexer'):
name = name[:-5]
emitter = 'Using(%s, nil)' % name
else:
raise ValueError('only support "using" with lexer classes, not %r' % args)
else:
warning('unsupported emitter function %r' % emitter)
emitter = '?? %r ??' % emitter
elif isinstance(emitter, _TokenType):
emitter = str(emitter).replace('.', '')[5:]
elif emitter is None:
# This generally only occurs when a lookahead/behind assertion is used, so we just allow it
# through.
return 'None'
else:
raise ValueError('unsupported emitter type %r' % emitter)
assert isinstance(emitter, str)
return emitter
def process_state_action(action):
if isinstance(action, tuple):
return functools.reduce(lambda a, b: a + b, (process_state_action(a) for a in action))
if action.startswith('#'):
action = action[1:]
if action== 'pop':
action = 'Pop(1)'
elif action.startswith('pop:'):
action = 'Pop(%s)' % action[4:]
elif action == 'push':
action = 'Push()'
elif action.startswith('push:'):
action = 'Push("%s")' % action[5:]
else:
raise ValueError('unsupported action %r' % (action,))
else:
action = 'Push("%s")' % action
return (action,)
def translate_rules(rules):
out = []
for rule in rules:
if isinstance(rule, tuple):
regex = rule[0]
if isinstance(regex, str):
regex = go_regex(regex)
elif isinstance(regex, pygments_lexer.words):
regex = 'Words(%s, %s, %s)' % (go_string(regex.prefix),
go_string(regex.suffix),
', '.join(go_string(w) for w in regex.words))
else:
raise ValueError('expected regex string but got %r' % regex)
emitter = resolve_emitter(rule[1])
if len(rule) == 2:
modifier = 'nil'
elif type(rule[2]) is str:
modifier = process_state_action(rule[2])[0]
elif isinstance(rule[2], pygments_lexer.combined):
modifier = 'Combined("%s")' % '", "'.join(rule[2])
elif type(rule[2]) is tuple:
modifier = 'Push("%s")' % '", "'.join(rule[2])
else:
raise ValueError('unsupported modifier %r' % (rule[2],))
out.append('{{ {}, {}, {} }}'.format(regex, emitter, modifier))
elif isinstance(rule, pygments_lexer.include):
out.append('Include("{}")'.format(rule))
elif isinstance(rule, pygments_lexer.default):
out.append('Default({})'.format(', '.join(process_state_action(rule.state))))
else:
raise ValueError('unsupported rule %r' % (rule,))
return out
class TemplateView(object):
def __init__(self, **kwargs):
for key, value in kwargs.items():
setattr(self, key, value)
def re_not_multiline(self):
return not (self.regex_flags & re.MULTILINE)
def re_dotall(self):
return self.regex_flags & re.DOTALL
def re_ignorecase(self):
return self.regex_flags & re.IGNORECASE
def main():
package_name, symbol_name = sys.argv[1].rsplit(sep=".", maxsplit=1)
package = importlib.import_module(package_name)
lexer_cls = getattr(package, symbol_name)
assert issubclass(lexer_cls, pygments_lexer.RegexLexer), 'can only translate from RegexLexer'
print(pystache.render(TEMPLATE, TemplateView(
name=lexer_cls.name,
regex_flags=lexer_cls.flags,
upper_name=to_camel_case(lexer_cls.name),
aliases=lexer_cls.aliases,
filenames=lexer_cls.filenames,
mimetypes=lexer_cls.mimetypes,
tokens=[{'state': state, 'rules': translate_rules(rules)} for (state, rules) in lexer_cls.get_tokendefs().items()],
)))
if __name__ == '__main__':
main()

View File

@ -1,62 +0,0 @@
import importlib
import sys
import pystache
from pygments.style import Style
from pygments.token import Token
TEMPLATE = r'''
package styles
import (
"github.com/alecthomas/chroma"
)
// {{upper_name}} style.
var {{upper_name}} = Register(chroma.MustNewStyle("{{name}}", chroma.StyleEntries{
{{#styles}}
chroma.{{type}}: "{{style}}",
{{/styles}}
}))
'''
def to_camel_case(snake_str):
components = snake_str.split('_')
return ''.join(x.title() for x in components)
def translate_token_type(t):
if t == Token:
t = Token.Background
return "".join(map(str, t))
def main():
name = sys.argv[1]
package_name, symbol_name = sys.argv[2].rsplit(sep=".", maxsplit=1)
package = importlib.import_module(package_name)
style_cls = getattr(package, symbol_name)
assert issubclass(style_cls, Style), 'can only translate from Style subclass'
styles = dict(style_cls.styles)
bg = "bg:" + style_cls.background_color
if Token in styles:
styles[Token] += " " + bg
else:
styles[Token] = bg
context = {
'upper_name': style_cls.__name__[:-5],
'name': name,
'styles': [{'type': translate_token_type(t), 'style': s}
for t, s in styles.items() if s],
}
print(pystache.render(TEMPLATE, context))
if __name__ == '__main__':
main()

View File

@ -1,269 +0,0 @@
package main
import (
"bufio"
"fmt"
"io"
"io/ioutil"
"os"
"os/signal"
"runtime"
"runtime/pprof"
"sort"
"strconv"
"strings"
"github.com/mattn/go-colorable"
"github.com/mattn/go-isatty"
"gopkg.in/alecthomas/kingpin.v3-unstable"
"github.com/alecthomas/chroma"
"github.com/alecthomas/chroma/formatters"
"github.com/alecthomas/chroma/formatters/html"
"github.com/alecthomas/chroma/lexers"
"github.com/alecthomas/chroma/styles"
)
var (
// Populated by goreleaser.
version = "?"
commit = "?"
date = "?"
profileFlag = kingpin.Flag("profile", "Enable profiling to file.").Hidden().String()
listFlag = kingpin.Flag("list", "List lexers, styles and formatters.").Bool()
unbufferedFlag = kingpin.Flag("unbuffered", "Do not buffer output.").Bool()
traceFlag = kingpin.Flag("trace", "Trace lexer states as they are traversed.").Bool()
checkFlag = kingpin.Flag("check", "Do not format, check for tokenization errors instead.").Bool()
filenameFlag = kingpin.Flag("filename", "Filename to use for selecting a lexer when reading from stdin.").String()
lexerFlag = kingpin.Flag("lexer", "Lexer to use when formatting.").PlaceHolder("autodetect").Short('l').Enum(lexers.Names(true)...)
styleFlag = kingpin.Flag("style", "Style to use for formatting.").Short('s').Default("swapoff").Enum(styles.Names()...)
formatterFlag = kingpin.Flag("formatter", "Formatter to use.").Default("terminal").Short('f').Enum(formatters.Names()...)
jsonFlag = kingpin.Flag("json", "Output JSON representation of tokens.").Bool()
htmlFlag = kingpin.Flag("html", "Enable HTML mode (equivalent to '--formatter html').").Bool()
htmlPrefixFlag = kingpin.Flag("html-prefix", "HTML CSS class prefix.").PlaceHolder("PREFIX").String()
htmlStylesFlag = kingpin.Flag("html-styles", "Output HTML CSS styles.").Bool()
htmlOnlyFlag = kingpin.Flag("html-only", "Output HTML fragment.").Bool()
htmlInlineStyleFlag = kingpin.Flag("html-inline-styles", "Output HTML with inline styles (no classes).").Bool()
htmlTabWidthFlag = kingpin.Flag("html-tab-width", "Set the HTML tab width.").Default("8").Int()
htmlLinesFlag = kingpin.Flag("html-lines", "Include line numbers in output.").Bool()
htmlLinesTableFlag = kingpin.Flag("html-lines-table", "Split line numbers and code in a HTML table").Bool()
htmlLinesStyleFlag = kingpin.Flag("html-lines-style", "Style for line numbers.").String()
htmlHighlightFlag = kingpin.Flag("html-highlight", "Highlight these lines.").PlaceHolder("N[:M][,...]").String()
htmlHighlightStyleFlag = kingpin.Flag("html-highlight-style", "Style used for highlighting lines.").String()
htmlBaseLineFlag = kingpin.Flag("html-base-line", "Base line number.").Default("1").Int()
filesArgs = kingpin.Arg("files", "Files to highlight.").ExistingFiles()
)
type flushableWriter interface {
io.Writer
Flush() error
}
type nopFlushableWriter struct{ io.Writer }
func (n *nopFlushableWriter) Flush() error { return nil }
func main() {
kingpin.CommandLine.Version(fmt.Sprintf("%s-%s-%s", version, commit, date))
kingpin.CommandLine.Help = `
Chroma is a general purpose syntax highlighting library and corresponding
command, for Go.
`
kingpin.Parse()
if *listFlag {
listAll()
return
}
if *profileFlag != "" {
f, err := os.Create(*profileFlag)
kingpin.FatalIfError(err, "")
pprof.StartCPUProfile(f)
signals := make(chan os.Signal, 1)
signal.Notify(signals, os.Interrupt)
go func() {
<-signals
pprof.StopCPUProfile()
os.Exit(128 + 3)
}()
defer pprof.StopCPUProfile()
}
var out io.Writer = os.Stdout
if runtime.GOOS == "windows" && isatty.IsTerminal(os.Stdout.Fd()) {
out = colorable.NewColorableStdout()
}
var w flushableWriter
if *unbufferedFlag {
w = &nopFlushableWriter{out}
} else {
w = bufio.NewWriterSize(out, 16384)
}
defer w.Flush()
if *jsonFlag {
*formatterFlag = "json"
}
if *htmlFlag {
*formatterFlag = "html"
}
// Retrieve user-specified style, clone it, and add some overrides.
builder := styles.Get(*styleFlag).Builder()
if *htmlHighlightStyleFlag != "" {
builder.Add(chroma.LineHighlight, *htmlHighlightStyleFlag)
}
if *htmlLinesStyleFlag != "" {
builder.Add(chroma.LineNumbers, *htmlLinesStyleFlag)
}
style, err := builder.Build()
kingpin.FatalIfError(err, "")
if *formatterFlag == "html" {
options := []html.Option{
html.TabWidth(*htmlTabWidthFlag),
html.BaseLineNumber(*htmlBaseLineFlag),
}
if *htmlPrefixFlag != "" {
options = append(options, html.ClassPrefix(*htmlPrefixFlag))
}
// Dump styles.
if *htmlStylesFlag {
formatter := html.New(html.WithClasses())
formatter.WriteCSS(w, style)
return
}
if !*htmlInlineStyleFlag {
options = append(options, html.WithClasses())
}
if !*htmlOnlyFlag {
options = append(options, html.Standalone())
}
if *htmlLinesFlag {
options = append(options, html.WithLineNumbers())
}
if *htmlLinesTableFlag {
options = append(options, html.LineNumbersInTable())
}
if len(*htmlHighlightFlag) > 0 {
ranges := [][2]int{}
for _, span := range strings.Split(*htmlHighlightFlag, ",") {
parts := strings.Split(span, ":")
if len(parts) > 2 {
kingpin.Fatalf("range should be N[:M], not %q", span)
}
start, err := strconv.ParseInt(parts[0], 10, 64)
kingpin.FatalIfError(err, "min value of range should be integer not %q", parts[0])
end := start
if len(parts) == 2 {
end, err = strconv.ParseInt(parts[1], 10, 64)
kingpin.FatalIfError(err, "max value of range should be integer not %q", parts[1])
}
ranges = append(ranges, [2]int{int(start), int(end)})
}
options = append(options, html.HighlightLines(ranges))
}
formatters.Register("html", html.New(options...))
}
if len(*filesArgs) == 0 {
contents, err := ioutil.ReadAll(os.Stdin)
kingpin.FatalIfError(err, "")
format(w, style, lex(*filenameFlag, string(contents)))
} else {
for _, filename := range *filesArgs {
contents, err := ioutil.ReadFile(filename)
kingpin.FatalIfError(err, "")
if *checkFlag {
check(filename, lex(filename, string(contents)))
} else {
format(w, style, lex(filename, string(contents)))
}
}
}
}
func listAll() {
fmt.Println("lexers:")
sort.Sort(lexers.Registry.Lexers)
for _, l := range lexers.Registry.Lexers {
config := l.Config()
fmt.Printf(" %s\n", config.Name)
filenames := []string{}
filenames = append(filenames, config.Filenames...)
filenames = append(filenames, config.AliasFilenames...)
if len(config.Aliases) > 0 {
fmt.Printf(" aliases: %s\n", strings.Join(config.Aliases, " "))
}
if len(filenames) > 0 {
fmt.Printf(" filenames: %s\n", strings.Join(filenames, " "))
}
if len(config.MimeTypes) > 0 {
fmt.Printf(" mimetypes: %s\n", strings.Join(config.MimeTypes, " "))
}
}
fmt.Println()
fmt.Printf("styles:")
for _, name := range styles.Names() {
fmt.Printf(" %s", name)
}
fmt.Println()
fmt.Printf("formatters:")
for _, name := range formatters.Names() {
fmt.Printf(" %s", name)
}
fmt.Println()
}
func lex(path string, contents string) chroma.Iterator {
lexer := selexer(path, contents)
if lexer == nil {
lexer = lexers.Fallback
}
if rel, ok := lexer.(*chroma.RegexLexer); ok {
rel.Trace(*traceFlag)
}
lexer = chroma.Coalesce(lexer)
it, err := lexer.Tokenise(nil, string(contents))
kingpin.FatalIfError(err, "")
return it
}
func selexer(path, contents string) (lexer chroma.Lexer) {
if *lexerFlag != "" {
return lexers.Get(*lexerFlag)
}
if path != "" {
lexer := lexers.Match(path)
if lexer != nil {
return lexer
}
}
return lexers.Analyse(contents)
}
func format(w io.Writer, style *chroma.Style, it chroma.Iterator) {
formatter := formatters.Get(*formatterFlag)
err := formatter.Format(w, style, it)
kingpin.FatalIfError(err, "")
}
func check(filename string, it chroma.Iterator) {
line, col := 1, 0
for token := it(); token != nil; token = it() {
if token.Type == chroma.Error {
fmt.Printf("%s:%d:%d %q\n", filename, line, col, token.String())
}
for _, c := range token.String() {
col++
if c == '\n' {
line, col = line+1, 0
}
}
}
}

View File

@ -1,19 +0,0 @@
package chroma
import (
"testing"
"github.com/alecthomas/assert"
)
func TestCoalesce(t *testing.T) {
lexer := Coalesce(MustNewLexer(nil, Rules{
"root": []Rule{
{`[!@#$%^&*()]`, Punctuation, nil},
},
}))
actual, err := Tokenise(lexer, nil, "!@#$")
assert.NoError(t, err)
expected := []*Token{{Punctuation, "!@#$"}}
assert.Equal(t, expected, actual)
}

View File

@ -1,42 +0,0 @@
package chroma
import (
"testing"
"github.com/alecthomas/assert"
)
func TestColourRGB(t *testing.T) {
colour := ParseColour("#8913af")
assert.Equal(t, uint8(0x89), colour.Red())
assert.Equal(t, uint8(0x13), colour.Green())
assert.Equal(t, uint8(0xaf), colour.Blue())
}
func TestColourString(t *testing.T) {
assert.Equal(t, "#8913af", ParseColour("#8913af").String())
}
func distance(a, b uint8) uint8 {
if a < b {
return b - a
}
return a - b
}
func TestColourBrighten(t *testing.T) {
actual := NewColour(128, 128, 128).Brighten(0.5)
// Closeish to what we expect is fine.
assert.True(t, distance(192, actual.Red()) <= 2)
assert.True(t, distance(192, actual.Blue()) <= 2)
assert.True(t, distance(192, actual.Green()) <= 2)
actual = NewColour(128, 128, 128).Brighten(-0.5)
assert.True(t, distance(65, actual.Red()) <= 2)
assert.True(t, distance(65, actual.Blue()) <= 2)
assert.True(t, distance(65, actual.Green()) <= 2)
}
func TestColourBrightess(t *testing.T) {
actual := NewColour(128, 128, 128).Brightness()
assert.True(t, distance(128, uint8(actual*255.0)) <= 2)
}

View File

@ -1,55 +0,0 @@
package formatters
import (
"io"
"sort"
"github.com/alecthomas/chroma"
"github.com/alecthomas/chroma/formatters/html"
)
var (
// NoOp formatter.
NoOp = Register("noop", chroma.FormatterFunc(func(w io.Writer, s *chroma.Style, iterator chroma.Iterator) error {
for t := iterator(); t != nil; t = iterator() {
if _, err := io.WriteString(w, t.Value); err != nil {
return err
}
}
return nil
}))
// Default HTML formatter outputs self-contained HTML.
htmlFull = Register("html", html.New(html.Standalone(), html.WithClasses()))
)
// Fallback formatter.
var Fallback = NoOp
// Registry of Formatters.
var Registry = map[string]chroma.Formatter{}
// Names of registered formatters.
func Names() []string {
out := []string{}
for name := range Registry {
out = append(out, name)
}
sort.Strings(out)
return out
}
// Get formatter by name.
//
// If the given formatter is not found, the Fallback formatter will be returned.
func Get(name string) chroma.Formatter {
if f, ok := Registry[name]; ok {
return f
}
return Fallback
}
// Register a named formatter.
func Register(name string, formatter chroma.Formatter) chroma.Formatter {
Registry[name] = formatter
return formatter
}

View File

@ -1,77 +0,0 @@
package html
import (
"errors"
"io/ioutil"
"strings"
"testing"
"github.com/alecthomas/assert"
"github.com/alecthomas/chroma"
"github.com/alecthomas/chroma/lexers"
"github.com/alecthomas/chroma/styles"
)
func TestCompressStyle(t *testing.T) {
style := "color: #888888; background-color: #faffff"
actual := compressStyle(style)
expected := "color:#888;background-color:#faffff"
assert.Equal(t, expected, actual)
}
func BenchmarkHTMLFormatter(b *testing.B) {
formatter := New()
b.ResetTimer()
for i := 0; i < b.N; i++ {
it, err := lexers.Go.Tokenise(nil, "package main\nfunc main()\n{\nprintln(`hello world`)\n}\n")
assert.NoError(b, err)
err = formatter.Format(ioutil.Discard, styles.Fallback, it)
assert.NoError(b, err)
}
}
func TestSplitTokensIntoLines(t *testing.T) {
in := []*chroma.Token{
{Value: "hello", Type: chroma.NameKeyword},
{Value: " world\nwhat?\n", Type: chroma.NameKeyword},
}
expected := [][]*chroma.Token{
{
{Type: chroma.NameKeyword, Value: "hello"},
{Type: chroma.NameKeyword, Value: " world\n"},
},
{
{Type: chroma.NameKeyword, Value: "what?\n"},
},
{
{Type: chroma.NameKeyword},
},
}
actual := splitTokensIntoLines(in)
assert.Equal(t, expected, actual)
}
func TestIteratorPanicRecovery(t *testing.T) {
it := func() *chroma.Token {
panic(errors.New("bad"))
}
err := New().Format(ioutil.Discard, styles.Fallback, it)
assert.Error(t, err)
}
func TestFormatter_styleToCSS(t *testing.T) {
builder := styles.Get("github").Builder()
builder.Add(chroma.LineHighlight, "bg:#ffffcc")
builder.Add(chroma.LineNumbers, "bold")
style, err := builder.Build()
if err != nil {
t.Error(err)
}
formatter := New(WithClasses())
css := formatter.styleToCSS(style)
for _, s := range css {
if strings.HasPrefix(strings.TrimSpace(s), ";") {
t.Errorf("rule starts with semicolon - expected valid css rule without semicolon: %v", s)
}
}
}

View File

@ -1,31 +0,0 @@
package formatters
import (
"encoding/json"
"fmt"
"io"
"github.com/alecthomas/chroma"
)
// JSON formatter outputs the raw token structures as JSON.
var JSON = Register("json", chroma.FormatterFunc(func(w io.Writer, s *chroma.Style, it chroma.Iterator) error {
fmt.Fprintln(w, "[")
i := 0
for t := it(); t != nil; t = it() {
if i > 0 {
fmt.Fprintln(w, ",")
}
i++
bytes, err := json.Marshal(t)
if err != nil {
return err
}
if _, err := fmt.Fprint(w, " "+string(bytes)); err != nil {
return err
}
}
fmt.Fprintln(w)
fmt.Fprintln(w, "]")
return nil
}))

View File

@ -1,18 +0,0 @@
package formatters
import (
"fmt"
"io"
"github.com/alecthomas/chroma"
)
// Tokens formatter outputs the raw token structures.
var Tokens = Register("tokens", chroma.FormatterFunc(func(w io.Writer, s *chroma.Style, it chroma.Iterator) error {
for t := it(); t != nil; t = it() {
if _, err := fmt.Fprintln(w, t.GoString()); err != nil {
return err
}
}
return nil
}))

View File

@ -1,250 +0,0 @@
package formatters
import (
"fmt"
"io"
"math"
"github.com/alecthomas/chroma"
)
type ttyTable struct {
foreground map[chroma.Colour]string
background map[chroma.Colour]string
}
var c = chroma.MustParseColour
var ttyTables = map[int]*ttyTable{
8: {
foreground: map[chroma.Colour]string{
c("#000000"): "\033[30m", c("#7f0000"): "\033[31m", c("#007f00"): "\033[32m", c("#7f7fe0"): "\033[33m",
c("#00007f"): "\033[34m", c("#7f007f"): "\033[35m", c("#007f7f"): "\033[36m", c("#e5e5e5"): "\033[37m",
c("#555555"): "\033[90m", c("#ff0000"): "\033[91m", c("#00ff00"): "\033[92m", c("#ffff00"): "\033[93m",
c("#0000ff"): "\033[94m", c("#ff00ff"): "\033[95m", c("#00ffff"): "\033[96m", c("#ffffff"): "\033[97m",
},
background: map[chroma.Colour]string{
c("#000000"): "\033[40m", c("#7f0000"): "\033[41m", c("#007f00"): "\033[42m", c("#7f7fe0"): "\033[43m",
c("#00007f"): "\033[44m", c("#7f007f"): "\033[45m", c("#007f7f"): "\033[46m", c("#e5e5e5"): "\033[47m",
c("#555555"): "\033[100m", c("#ff0000"): "\033[101m", c("#00ff00"): "\033[102m", c("#ffff00"): "\033[103m",
c("#0000ff"): "\033[104m", c("#ff00ff"): "\033[105m", c("#00ffff"): "\033[106m", c("#ffffff"): "\033[107m",
},
},
256: {
foreground: map[chroma.Colour]string{
c("#000000"): "\033[38;5;0m", c("#800000"): "\033[38;5;1m", c("#008000"): "\033[38;5;2m", c("#808000"): "\033[38;5;3m",
c("#000080"): "\033[38;5;4m", c("#800080"): "\033[38;5;5m", c("#008080"): "\033[38;5;6m", c("#c0c0c0"): "\033[38;5;7m",
c("#808080"): "\033[38;5;8m", c("#ff0000"): "\033[38;5;9m", c("#00ff00"): "\033[38;5;10m", c("#ffff00"): "\033[38;5;11m",
c("#0000ff"): "\033[38;5;12m", c("#ff00ff"): "\033[38;5;13m", c("#00ffff"): "\033[38;5;14m", c("#ffffff"): "\033[38;5;15m",
c("#000000"): "\033[38;5;16m", c("#00005f"): "\033[38;5;17m", c("#000087"): "\033[38;5;18m", c("#0000af"): "\033[38;5;19m",
c("#0000d7"): "\033[38;5;20m", c("#0000ff"): "\033[38;5;21m", c("#005f00"): "\033[38;5;22m", c("#005f5f"): "\033[38;5;23m",
c("#005f87"): "\033[38;5;24m", c("#005faf"): "\033[38;5;25m", c("#005fd7"): "\033[38;5;26m", c("#005fff"): "\033[38;5;27m",
c("#008700"): "\033[38;5;28m", c("#00875f"): "\033[38;5;29m", c("#008787"): "\033[38;5;30m", c("#0087af"): "\033[38;5;31m",
c("#0087d7"): "\033[38;5;32m", c("#0087ff"): "\033[38;5;33m", c("#00af00"): "\033[38;5;34m", c("#00af5f"): "\033[38;5;35m",
c("#00af87"): "\033[38;5;36m", c("#00afaf"): "\033[38;5;37m", c("#00afd7"): "\033[38;5;38m", c("#00afff"): "\033[38;5;39m",
c("#00d700"): "\033[38;5;40m", c("#00d75f"): "\033[38;5;41m", c("#00d787"): "\033[38;5;42m", c("#00d7af"): "\033[38;5;43m",
c("#00d7d7"): "\033[38;5;44m", c("#00d7ff"): "\033[38;5;45m", c("#00ff00"): "\033[38;5;46m", c("#00ff5f"): "\033[38;5;47m",
c("#00ff87"): "\033[38;5;48m", c("#00ffaf"): "\033[38;5;49m", c("#00ffd7"): "\033[38;5;50m", c("#00ffff"): "\033[38;5;51m",
c("#5f0000"): "\033[38;5;52m", c("#5f005f"): "\033[38;5;53m", c("#5f0087"): "\033[38;5;54m", c("#5f00af"): "\033[38;5;55m",
c("#5f00d7"): "\033[38;5;56m", c("#5f00ff"): "\033[38;5;57m", c("#5f5f00"): "\033[38;5;58m", c("#5f5f5f"): "\033[38;5;59m",
c("#5f5f87"): "\033[38;5;60m", c("#5f5faf"): "\033[38;5;61m", c("#5f5fd7"): "\033[38;5;62m", c("#5f5fff"): "\033[38;5;63m",
c("#5f8700"): "\033[38;5;64m", c("#5f875f"): "\033[38;5;65m", c("#5f8787"): "\033[38;5;66m", c("#5f87af"): "\033[38;5;67m",
c("#5f87d7"): "\033[38;5;68m", c("#5f87ff"): "\033[38;5;69m", c("#5faf00"): "\033[38;5;70m", c("#5faf5f"): "\033[38;5;71m",
c("#5faf87"): "\033[38;5;72m", c("#5fafaf"): "\033[38;5;73m", c("#5fafd7"): "\033[38;5;74m", c("#5fafff"): "\033[38;5;75m",
c("#5fd700"): "\033[38;5;76m", c("#5fd75f"): "\033[38;5;77m", c("#5fd787"): "\033[38;5;78m", c("#5fd7af"): "\033[38;5;79m",
c("#5fd7d7"): "\033[38;5;80m", c("#5fd7ff"): "\033[38;5;81m", c("#5fff00"): "\033[38;5;82m", c("#5fff5f"): "\033[38;5;83m",
c("#5fff87"): "\033[38;5;84m", c("#5fffaf"): "\033[38;5;85m", c("#5fffd7"): "\033[38;5;86m", c("#5fffff"): "\033[38;5;87m",
c("#870000"): "\033[38;5;88m", c("#87005f"): "\033[38;5;89m", c("#870087"): "\033[38;5;90m", c("#8700af"): "\033[38;5;91m",
c("#8700d7"): "\033[38;5;92m", c("#8700ff"): "\033[38;5;93m", c("#875f00"): "\033[38;5;94m", c("#875f5f"): "\033[38;5;95m",
c("#875f87"): "\033[38;5;96m", c("#875faf"): "\033[38;5;97m", c("#875fd7"): "\033[38;5;98m", c("#875fff"): "\033[38;5;99m",
c("#878700"): "\033[38;5;100m", c("#87875f"): "\033[38;5;101m", c("#878787"): "\033[38;5;102m", c("#8787af"): "\033[38;5;103m",
c("#8787d7"): "\033[38;5;104m", c("#8787ff"): "\033[38;5;105m", c("#87af00"): "\033[38;5;106m", c("#87af5f"): "\033[38;5;107m",
c("#87af87"): "\033[38;5;108m", c("#87afaf"): "\033[38;5;109m", c("#87afd7"): "\033[38;5;110m", c("#87afff"): "\033[38;5;111m",
c("#87d700"): "\033[38;5;112m", c("#87d75f"): "\033[38;5;113m", c("#87d787"): "\033[38;5;114m", c("#87d7af"): "\033[38;5;115m",
c("#87d7d7"): "\033[38;5;116m", c("#87d7ff"): "\033[38;5;117m", c("#87ff00"): "\033[38;5;118m", c("#87ff5f"): "\033[38;5;119m",
c("#87ff87"): "\033[38;5;120m", c("#87ffaf"): "\033[38;5;121m", c("#87ffd7"): "\033[38;5;122m", c("#87ffff"): "\033[38;5;123m",
c("#af0000"): "\033[38;5;124m", c("#af005f"): "\033[38;5;125m", c("#af0087"): "\033[38;5;126m", c("#af00af"): "\033[38;5;127m",
c("#af00d7"): "\033[38;5;128m", c("#af00ff"): "\033[38;5;129m", c("#af5f00"): "\033[38;5;130m", c("#af5f5f"): "\033[38;5;131m",
c("#af5f87"): "\033[38;5;132m", c("#af5faf"): "\033[38;5;133m", c("#af5fd7"): "\033[38;5;134m", c("#af5fff"): "\033[38;5;135m",
c("#af8700"): "\033[38;5;136m", c("#af875f"): "\033[38;5;137m", c("#af8787"): "\033[38;5;138m", c("#af87af"): "\033[38;5;139m",
c("#af87d7"): "\033[38;5;140m", c("#af87ff"): "\033[38;5;141m", c("#afaf00"): "\033[38;5;142m", c("#afaf5f"): "\033[38;5;143m",
c("#afaf87"): "\033[38;5;144m", c("#afafaf"): "\033[38;5;145m", c("#afafd7"): "\033[38;5;146m", c("#afafff"): "\033[38;5;147m",
c("#afd700"): "\033[38;5;148m", c("#afd75f"): "\033[38;5;149m", c("#afd787"): "\033[38;5;150m", c("#afd7af"): "\033[38;5;151m",
c("#afd7d7"): "\033[38;5;152m", c("#afd7ff"): "\033[38;5;153m", c("#afff00"): "\033[38;5;154m", c("#afff5f"): "\033[38;5;155m",
c("#afff87"): "\033[38;5;156m", c("#afffaf"): "\033[38;5;157m", c("#afffd7"): "\033[38;5;158m", c("#afffff"): "\033[38;5;159m",
c("#d70000"): "\033[38;5;160m", c("#d7005f"): "\033[38;5;161m", c("#d70087"): "\033[38;5;162m", c("#d700af"): "\033[38;5;163m",
c("#d700d7"): "\033[38;5;164m", c("#d700ff"): "\033[38;5;165m", c("#d75f00"): "\033[38;5;166m", c("#d75f5f"): "\033[38;5;167m",
c("#d75f87"): "\033[38;5;168m", c("#d75faf"): "\033[38;5;169m", c("#d75fd7"): "\033[38;5;170m", c("#d75fff"): "\033[38;5;171m",
c("#d78700"): "\033[38;5;172m", c("#d7875f"): "\033[38;5;173m", c("#d78787"): "\033[38;5;174m", c("#d787af"): "\033[38;5;175m",
c("#d787d7"): "\033[38;5;176m", c("#d787ff"): "\033[38;5;177m", c("#d7af00"): "\033[38;5;178m", c("#d7af5f"): "\033[38;5;179m",
c("#d7af87"): "\033[38;5;180m", c("#d7afaf"): "\033[38;5;181m", c("#d7afd7"): "\033[38;5;182m", c("#d7afff"): "\033[38;5;183m",
c("#d7d700"): "\033[38;5;184m", c("#d7d75f"): "\033[38;5;185m", c("#d7d787"): "\033[38;5;186m", c("#d7d7af"): "\033[38;5;187m",
c("#d7d7d7"): "\033[38;5;188m", c("#d7d7ff"): "\033[38;5;189m", c("#d7ff00"): "\033[38;5;190m", c("#d7ff5f"): "\033[38;5;191m",
c("#d7ff87"): "\033[38;5;192m", c("#d7ffaf"): "\033[38;5;193m", c("#d7ffd7"): "\033[38;5;194m", c("#d7ffff"): "\033[38;5;195m",
c("#ff0000"): "\033[38;5;196m", c("#ff005f"): "\033[38;5;197m", c("#ff0087"): "\033[38;5;198m", c("#ff00af"): "\033[38;5;199m",
c("#ff00d7"): "\033[38;5;200m", c("#ff00ff"): "\033[38;5;201m", c("#ff5f00"): "\033[38;5;202m", c("#ff5f5f"): "\033[38;5;203m",
c("#ff5f87"): "\033[38;5;204m", c("#ff5faf"): "\033[38;5;205m", c("#ff5fd7"): "\033[38;5;206m", c("#ff5fff"): "\033[38;5;207m",
c("#ff8700"): "\033[38;5;208m", c("#ff875f"): "\033[38;5;209m", c("#ff8787"): "\033[38;5;210m", c("#ff87af"): "\033[38;5;211m",
c("#ff87d7"): "\033[38;5;212m", c("#ff87ff"): "\033[38;5;213m", c("#ffaf00"): "\033[38;5;214m", c("#ffaf5f"): "\033[38;5;215m",
c("#ffaf87"): "\033[38;5;216m", c("#ffafaf"): "\033[38;5;217m", c("#ffafd7"): "\033[38;5;218m", c("#ffafff"): "\033[38;5;219m",
c("#ffd700"): "\033[38;5;220m", c("#ffd75f"): "\033[38;5;221m", c("#ffd787"): "\033[38;5;222m", c("#ffd7af"): "\033[38;5;223m",
c("#ffd7d7"): "\033[38;5;224m", c("#ffd7ff"): "\033[38;5;225m", c("#ffff00"): "\033[38;5;226m", c("#ffff5f"): "\033[38;5;227m",
c("#ffff87"): "\033[38;5;228m", c("#ffffaf"): "\033[38;5;229m", c("#ffffd7"): "\033[38;5;230m", c("#ffffff"): "\033[38;5;231m",
c("#080808"): "\033[38;5;232m", c("#121212"): "\033[38;5;233m", c("#1c1c1c"): "\033[38;5;234m", c("#262626"): "\033[38;5;235m",
c("#303030"): "\033[38;5;236m", c("#3a3a3a"): "\033[38;5;237m", c("#444444"): "\033[38;5;238m", c("#4e4e4e"): "\033[38;5;239m",
c("#585858"): "\033[38;5;240m", c("#626262"): "\033[38;5;241m", c("#6c6c6c"): "\033[38;5;242m", c("#767676"): "\033[38;5;243m",
c("#808080"): "\033[38;5;244m", c("#8a8a8a"): "\033[38;5;245m", c("#949494"): "\033[38;5;246m", c("#9e9e9e"): "\033[38;5;247m",
c("#a8a8a8"): "\033[38;5;248m", c("#b2b2b2"): "\033[38;5;249m", c("#bcbcbc"): "\033[38;5;250m", c("#c6c6c6"): "\033[38;5;251m",
c("#d0d0d0"): "\033[38;5;252m", c("#dadada"): "\033[38;5;253m", c("#e4e4e4"): "\033[38;5;254m", c("#eeeeee"): "\033[38;5;255m",
},
background: map[chroma.Colour]string{
c("#000000"): "\033[48;5;0m", c("#800000"): "\033[48;5;1m", c("#008000"): "\033[48;5;2m", c("#808000"): "\033[48;5;3m",
c("#000080"): "\033[48;5;4m", c("#800080"): "\033[48;5;5m", c("#008080"): "\033[48;5;6m", c("#c0c0c0"): "\033[48;5;7m",
c("#808080"): "\033[48;5;8m", c("#ff0000"): "\033[48;5;9m", c("#00ff00"): "\033[48;5;10m", c("#ffff00"): "\033[48;5;11m",
c("#0000ff"): "\033[48;5;12m", c("#ff00ff"): "\033[48;5;13m", c("#00ffff"): "\033[48;5;14m", c("#ffffff"): "\033[48;5;15m",
c("#000000"): "\033[48;5;16m", c("#00005f"): "\033[48;5;17m", c("#000087"): "\033[48;5;18m", c("#0000af"): "\033[48;5;19m",
c("#0000d7"): "\033[48;5;20m", c("#0000ff"): "\033[48;5;21m", c("#005f00"): "\033[48;5;22m", c("#005f5f"): "\033[48;5;23m",
c("#005f87"): "\033[48;5;24m", c("#005faf"): "\033[48;5;25m", c("#005fd7"): "\033[48;5;26m", c("#005fff"): "\033[48;5;27m",
c("#008700"): "\033[48;5;28m", c("#00875f"): "\033[48;5;29m", c("#008787"): "\033[48;5;30m", c("#0087af"): "\033[48;5;31m",
c("#0087d7"): "\033[48;5;32m", c("#0087ff"): "\033[48;5;33m", c("#00af00"): "\033[48;5;34m", c("#00af5f"): "\033[48;5;35m",
c("#00af87"): "\033[48;5;36m", c("#00afaf"): "\033[48;5;37m", c("#00afd7"): "\033[48;5;38m", c("#00afff"): "\033[48;5;39m",
c("#00d700"): "\033[48;5;40m", c("#00d75f"): "\033[48;5;41m", c("#00d787"): "\033[48;5;42m", c("#00d7af"): "\033[48;5;43m",
c("#00d7d7"): "\033[48;5;44m", c("#00d7ff"): "\033[48;5;45m", c("#00ff00"): "\033[48;5;46m", c("#00ff5f"): "\033[48;5;47m",
c("#00ff87"): "\033[48;5;48m", c("#00ffaf"): "\033[48;5;49m", c("#00ffd7"): "\033[48;5;50m", c("#00ffff"): "\033[48;5;51m",
c("#5f0000"): "\033[48;5;52m", c("#5f005f"): "\033[48;5;53m", c("#5f0087"): "\033[48;5;54m", c("#5f00af"): "\033[48;5;55m",
c("#5f00d7"): "\033[48;5;56m", c("#5f00ff"): "\033[48;5;57m", c("#5f5f00"): "\033[48;5;58m", c("#5f5f5f"): "\033[48;5;59m",
c("#5f5f87"): "\033[48;5;60m", c("#5f5faf"): "\033[48;5;61m", c("#5f5fd7"): "\033[48;5;62m", c("#5f5fff"): "\033[48;5;63m",
c("#5f8700"): "\033[48;5;64m", c("#5f875f"): "\033[48;5;65m", c("#5f8787"): "\033[48;5;66m", c("#5f87af"): "\033[48;5;67m",
c("#5f87d7"): "\033[48;5;68m", c("#5f87ff"): "\033[48;5;69m", c("#5faf00"): "\033[48;5;70m", c("#5faf5f"): "\033[48;5;71m",
c("#5faf87"): "\033[48;5;72m", c("#5fafaf"): "\033[48;5;73m", c("#5fafd7"): "\033[48;5;74m", c("#5fafff"): "\033[48;5;75m",
c("#5fd700"): "\033[48;5;76m", c("#5fd75f"): "\033[48;5;77m", c("#5fd787"): "\033[48;5;78m", c("#5fd7af"): "\033[48;5;79m",
c("#5fd7d7"): "\033[48;5;80m", c("#5fd7ff"): "\033[48;5;81m", c("#5fff00"): "\033[48;5;82m", c("#5fff5f"): "\033[48;5;83m",
c("#5fff87"): "\033[48;5;84m", c("#5fffaf"): "\033[48;5;85m", c("#5fffd7"): "\033[48;5;86m", c("#5fffff"): "\033[48;5;87m",
c("#870000"): "\033[48;5;88m", c("#87005f"): "\033[48;5;89m", c("#870087"): "\033[48;5;90m", c("#8700af"): "\033[48;5;91m",
c("#8700d7"): "\033[48;5;92m", c("#8700ff"): "\033[48;5;93m", c("#875f00"): "\033[48;5;94m", c("#875f5f"): "\033[48;5;95m",
c("#875f87"): "\033[48;5;96m", c("#875faf"): "\033[48;5;97m", c("#875fd7"): "\033[48;5;98m", c("#875fff"): "\033[48;5;99m",
c("#878700"): "\033[48;5;100m", c("#87875f"): "\033[48;5;101m", c("#878787"): "\033[48;5;102m", c("#8787af"): "\033[48;5;103m",
c("#8787d7"): "\033[48;5;104m", c("#8787ff"): "\033[48;5;105m", c("#87af00"): "\033[48;5;106m", c("#87af5f"): "\033[48;5;107m",
c("#87af87"): "\033[48;5;108m", c("#87afaf"): "\033[48;5;109m", c("#87afd7"): "\033[48;5;110m", c("#87afff"): "\033[48;5;111m",
c("#87d700"): "\033[48;5;112m", c("#87d75f"): "\033[48;5;113m", c("#87d787"): "\033[48;5;114m", c("#87d7af"): "\033[48;5;115m",
c("#87d7d7"): "\033[48;5;116m", c("#87d7ff"): "\033[48;5;117m", c("#87ff00"): "\033[48;5;118m", c("#87ff5f"): "\033[48;5;119m",
c("#87ff87"): "\033[48;5;120m", c("#87ffaf"): "\033[48;5;121m", c("#87ffd7"): "\033[48;5;122m", c("#87ffff"): "\033[48;5;123m",
c("#af0000"): "\033[48;5;124m", c("#af005f"): "\033[48;5;125m", c("#af0087"): "\033[48;5;126m", c("#af00af"): "\033[48;5;127m",
c("#af00d7"): "\033[48;5;128m", c("#af00ff"): "\033[48;5;129m", c("#af5f00"): "\033[48;5;130m", c("#af5f5f"): "\033[48;5;131m",
c("#af5f87"): "\033[48;5;132m", c("#af5faf"): "\033[48;5;133m", c("#af5fd7"): "\033[48;5;134m", c("#af5fff"): "\033[48;5;135m",
c("#af8700"): "\033[48;5;136m", c("#af875f"): "\033[48;5;137m", c("#af8787"): "\033[48;5;138m", c("#af87af"): "\033[48;5;139m",
c("#af87d7"): "\033[48;5;140m", c("#af87ff"): "\033[48;5;141m", c("#afaf00"): "\033[48;5;142m", c("#afaf5f"): "\033[48;5;143m",
c("#afaf87"): "\033[48;5;144m", c("#afafaf"): "\033[48;5;145m", c("#afafd7"): "\033[48;5;146m", c("#afafff"): "\033[48;5;147m",
c("#afd700"): "\033[48;5;148m", c("#afd75f"): "\033[48;5;149m", c("#afd787"): "\033[48;5;150m", c("#afd7af"): "\033[48;5;151m",
c("#afd7d7"): "\033[48;5;152m", c("#afd7ff"): "\033[48;5;153m", c("#afff00"): "\033[48;5;154m", c("#afff5f"): "\033[48;5;155m",
c("#afff87"): "\033[48;5;156m", c("#afffaf"): "\033[48;5;157m", c("#afffd7"): "\033[48;5;158m", c("#afffff"): "\033[48;5;159m",
c("#d70000"): "\033[48;5;160m", c("#d7005f"): "\033[48;5;161m", c("#d70087"): "\033[48;5;162m", c("#d700af"): "\033[48;5;163m",
c("#d700d7"): "\033[48;5;164m", c("#d700ff"): "\033[48;5;165m", c("#d75f00"): "\033[48;5;166m", c("#d75f5f"): "\033[48;5;167m",
c("#d75f87"): "\033[48;5;168m", c("#d75faf"): "\033[48;5;169m", c("#d75fd7"): "\033[48;5;170m", c("#d75fff"): "\033[48;5;171m",
c("#d78700"): "\033[48;5;172m", c("#d7875f"): "\033[48;5;173m", c("#d78787"): "\033[48;5;174m", c("#d787af"): "\033[48;5;175m",
c("#d787d7"): "\033[48;5;176m", c("#d787ff"): "\033[48;5;177m", c("#d7af00"): "\033[48;5;178m", c("#d7af5f"): "\033[48;5;179m",
c("#d7af87"): "\033[48;5;180m", c("#d7afaf"): "\033[48;5;181m", c("#d7afd7"): "\033[48;5;182m", c("#d7afff"): "\033[48;5;183m",
c("#d7d700"): "\033[48;5;184m", c("#d7d75f"): "\033[48;5;185m", c("#d7d787"): "\033[48;5;186m", c("#d7d7af"): "\033[48;5;187m",
c("#d7d7d7"): "\033[48;5;188m", c("#d7d7ff"): "\033[48;5;189m", c("#d7ff00"): "\033[48;5;190m", c("#d7ff5f"): "\033[48;5;191m",
c("#d7ff87"): "\033[48;5;192m", c("#d7ffaf"): "\033[48;5;193m", c("#d7ffd7"): "\033[48;5;194m", c("#d7ffff"): "\033[48;5;195m",
c("#ff0000"): "\033[48;5;196m", c("#ff005f"): "\033[48;5;197m", c("#ff0087"): "\033[48;5;198m", c("#ff00af"): "\033[48;5;199m",
c("#ff00d7"): "\033[48;5;200m", c("#ff00ff"): "\033[48;5;201m", c("#ff5f00"): "\033[48;5;202m", c("#ff5f5f"): "\033[48;5;203m",
c("#ff5f87"): "\033[48;5;204m", c("#ff5faf"): "\033[48;5;205m", c("#ff5fd7"): "\033[48;5;206m", c("#ff5fff"): "\033[48;5;207m",
c("#ff8700"): "\033[48;5;208m", c("#ff875f"): "\033[48;5;209m", c("#ff8787"): "\033[48;5;210m", c("#ff87af"): "\033[48;5;211m",
c("#ff87d7"): "\033[48;5;212m", c("#ff87ff"): "\033[48;5;213m", c("#ffaf00"): "\033[48;5;214m", c("#ffaf5f"): "\033[48;5;215m",
c("#ffaf87"): "\033[48;5;216m", c("#ffafaf"): "\033[48;5;217m", c("#ffafd7"): "\033[48;5;218m", c("#ffafff"): "\033[48;5;219m",
c("#ffd700"): "\033[48;5;220m", c("#ffd75f"): "\033[48;5;221m", c("#ffd787"): "\033[48;5;222m", c("#ffd7af"): "\033[48;5;223m",
c("#ffd7d7"): "\033[48;5;224m", c("#ffd7ff"): "\033[48;5;225m", c("#ffff00"): "\033[48;5;226m", c("#ffff5f"): "\033[48;5;227m",
c("#ffff87"): "\033[48;5;228m", c("#ffffaf"): "\033[48;5;229m", c("#ffffd7"): "\033[48;5;230m", c("#ffffff"): "\033[48;5;231m",
c("#080808"): "\033[48;5;232m", c("#121212"): "\033[48;5;233m", c("#1c1c1c"): "\033[48;5;234m", c("#262626"): "\033[48;5;235m",
c("#303030"): "\033[48;5;236m", c("#3a3a3a"): "\033[48;5;237m", c("#444444"): "\033[48;5;238m", c("#4e4e4e"): "\033[48;5;239m",
c("#585858"): "\033[48;5;240m", c("#626262"): "\033[48;5;241m", c("#6c6c6c"): "\033[48;5;242m", c("#767676"): "\033[48;5;243m",
c("#808080"): "\033[48;5;244m", c("#8a8a8a"): "\033[48;5;245m", c("#949494"): "\033[48;5;246m", c("#9e9e9e"): "\033[48;5;247m",
c("#a8a8a8"): "\033[48;5;248m", c("#b2b2b2"): "\033[48;5;249m", c("#bcbcbc"): "\033[48;5;250m", c("#c6c6c6"): "\033[48;5;251m",
c("#d0d0d0"): "\033[48;5;252m", c("#dadada"): "\033[48;5;253m", c("#e4e4e4"): "\033[48;5;254m", c("#eeeeee"): "\033[48;5;255m",
},
},
}
func entryToEscapeSequence(table *ttyTable, entry chroma.StyleEntry) string {
out := ""
if entry.Bold == chroma.Yes {
out += "\033[1m"
}
if entry.Underline == chroma.Yes {
out += "\033[4m"
}
if entry.Colour.IsSet() {
out += table.foreground[findClosest(table, entry.Colour)]
}
if entry.Background.IsSet() {
out += table.background[findClosest(table, entry.Background)]
}
return out
}
func findClosest(table *ttyTable, seeking chroma.Colour) chroma.Colour {
closestColour := chroma.Colour(0)
closest := float64(math.MaxFloat64)
for colour := range table.foreground {
distance := colour.Distance(seeking)
if distance < closest {
closest = distance
closestColour = colour
}
}
return closestColour
}
func styleToEscapeSequence(table *ttyTable, style *chroma.Style) map[chroma.TokenType]string {
out := map[chroma.TokenType]string{}
for _, ttype := range style.Types() {
entry := style.Get(ttype)
out[ttype] = entryToEscapeSequence(table, entry)
}
return out
}
type indexedTTYFormatter struct {
table *ttyTable
}
func (c *indexedTTYFormatter) Format(w io.Writer, style *chroma.Style, it chroma.Iterator) (err error) {
defer func() {
if perr := recover(); perr != nil {
err = perr.(error)
}
}()
theme := styleToEscapeSequence(c.table, style)
for token := it(); token != nil; token = it() {
// TODO: Cache token lookups?
clr, ok := theme[token.Type]
if !ok {
clr, ok = theme[token.Type.SubCategory()]
if !ok {
clr = theme[token.Type.Category()]
// if !ok {
// clr = theme[chroma.InheritStyle]
// }
}
}
if clr != "" {
fmt.Fprint(w, clr)
}
fmt.Fprint(w, token.Value)
if clr != "" {
fmt.Fprintf(w, "\033[0m")
}
}
return nil
}
// TTY8 is an 8-colour terminal formatter.
//
// The Lab colour space is used to map RGB values to the most appropriate index colour.
var TTY8 = Register("terminal", &indexedTTYFormatter{ttyTables[8]})
// TTY256 is a 256-colour terminal formatter.
//
// The Lab colour space is used to map RGB values to the most appropriate index colour.
var TTY256 = Register("terminal256", &indexedTTYFormatter{ttyTables[256]})

View File

@ -1,38 +0,0 @@
package formatters
import (
"fmt"
"io"
"github.com/alecthomas/chroma"
)
// TTY16m is a true-colour terminal formatter.
var TTY16m = Register("terminal16m", chroma.FormatterFunc(trueColourFormatter))
func trueColourFormatter(w io.Writer, style *chroma.Style, it chroma.Iterator) error {
for token := it(); token != nil; token = it() {
entry := style.Get(token.Type)
if !entry.IsZero() {
out := ""
if entry.Bold == chroma.Yes {
out += "\033[1m"
}
if entry.Underline == chroma.Yes {
out += "\033[4m"
}
if entry.Colour.IsSet() {
out += fmt.Sprintf("\033[38;2;%d;%d;%dm", entry.Colour.Red(), entry.Colour.Green(), entry.Colour.Blue())
}
if entry.Background.IsSet() {
out += fmt.Sprintf("\033[48;2;%d;%d;%dm", entry.Background.Red(), entry.Background.Green(), entry.Background.Blue())
}
fmt.Fprint(w, out)
}
fmt.Fprint(w, token.Value)
if !entry.IsZero() {
fmt.Fprint(w, "\033[0m")
}
}
return nil
}

View File

@ -1,52 +0,0 @@
package chroma
import (
"testing"
"github.com/alecthomas/assert"
)
func TestTokenTypeClassifiers(t *testing.T) {
assert.True(t, GenericDeleted.InCategory(Generic))
assert.True(t, LiteralStringBacktick.InSubCategory(String))
assert.Equal(t, LiteralStringBacktick.String(), "LiteralStringBacktick")
}
func TestSimpleLexer(t *testing.T) {
lexer, err := NewLexer(
&Config{
Name: "INI",
Aliases: []string{"ini", "cfg"},
Filenames: []string{"*.ini", "*.cfg"},
},
map[string][]Rule{
"root": {
{`\s+`, Whitespace, nil},
{`;.*?$`, Comment, nil},
{`\[.*?\]$`, Keyword, nil},
{`(.*?)(\s*)(=)(\s*)(.*?)$`, ByGroups(Name, Whitespace, Operator, Whitespace, String), nil},
},
},
)
assert.NoError(t, err)
actual, err := Tokenise(lexer, nil, `
; this is a comment
[section]
a = 10
`)
assert.NoError(t, err)
expected := []*Token{
{Whitespace, "\n\t"},
{Comment, "; this is a comment"},
{Whitespace, "\n\t"},
{Keyword, "[section]"},
{Whitespace, "\n\t"},
{Name, "a"},
{Whitespace, " "},
{Operator, "="},
{Whitespace, " "},
{LiteralString, "10"},
{Whitespace, "\n"},
}
assert.Equal(t, expected, actual)
}

View File

@ -1,33 +0,0 @@
package lexers_test
import (
"io/ioutil"
"testing"
"github.com/alecthomas/assert"
"github.com/alecthomas/chroma/formatters"
"github.com/alecthomas/chroma/lexers"
"github.com/alecthomas/chroma/styles"
)
func TestCompileAllRegexes(t *testing.T) {
for _, lexer := range lexers.Registry.Lexers {
it, err := lexer.Tokenise(nil, "")
assert.NoError(t, err, "%s failed", lexer.Config().Name)
err = formatters.NoOp.Format(ioutil.Discard, styles.SwapOff, it)
assert.NoError(t, err, "%s failed", lexer.Config().Name)
}
}
func TestGet(t *testing.T) {
t.Run("ByName", func(t *testing.T) {
assert.Equal(t, lexers.Get("xml"), lexers.XML)
})
t.Run("ByAlias", func(t *testing.T) {
assert.Equal(t, lexers.Get("as"), lexers.Actionscript)
})
t.Run("ViaFilename", func(t *testing.T) {
assert.Equal(t, lexers.Get("svg"), lexers.XML)
})
}

View File

@ -1,20 +0,0 @@
package lexers
import (
"testing"
"github.com/alecthomas/assert"
"github.com/alecthomas/chroma"
)
func TestDiffLexerWithoutTralingNewLine(t *testing.T) {
diffLexer := Get("diff")
it, err := diffLexer.Tokenise(nil, "-foo\n+bar")
assert.NoError(t, err)
actual := it.Tokens()
expected := []*chroma.Token{
&chroma.Token{chroma.GenericDeleted, "-foo\n"},
&chroma.Token{chroma.GenericInserted, "+bar\n"},
}
assert.Equal(t, expected, actual)
}

View File

@ -1,37 +0,0 @@
package lexers
import (
"testing"
"github.com/alecthomas/assert"
)
const lexerBenchSource = `package chroma
import (
"io"
)
// A Formatter for Chroma lexers.
type Formatter interface {
// Format returns a formatting function for tokens.
Format(w io.Writer, style *Style) (func(*Token), error)
}
// A FormatterFunc is a Formatter implemented as a function.
type FormatterFunc func(io.Writer, *Style) (func(*Token), error)
func (f FormatterFunc) Format(w io.Writer, s *Style) (func(*Token), error) {
return f(w, s)
}
`
func Benchmark(b *testing.B) {
b.ReportAllocs()
for i := 0; i < b.N; i++ {
it, err := Go.Tokenise(nil, lexerBenchSource)
assert.NoError(b, err)
for t := it(); t != nil; t = it() {
}
}
}

View File

@ -1,63 +0,0 @@
package lexers
import (
"encoding/json"
"io/ioutil"
"os"
"path/filepath"
"strings"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/alecthomas/chroma"
)
// Test source files are in the form <key>.<key> and validation data is in the form <key>.<key>.expected.
func TestLexers(t *testing.T) {
files, err := ioutil.ReadDir("testdata")
require.NoError(t, err)
for _, file := range files {
ext := filepath.Ext(file.Name())[1:]
if ext != "actual" {
continue
}
lexer := Get(strings.TrimSuffix(file.Name(), filepath.Ext(file.Name())))
if !assert.NotNil(t, lexer) {
continue
}
filename := filepath.Join("testdata", file.Name())
expectedFilename := strings.TrimSuffix(filename, filepath.Ext(filename)) + ".expected"
lexer = chroma.Coalesce(lexer)
t.Run(lexer.Config().Name, func(t *testing.T) {
// Read and tokenise source text.
actualText, err := ioutil.ReadFile(filename)
if !assert.NoError(t, err) {
return
}
actual, err := chroma.Tokenise(lexer, nil, string(actualText))
if !assert.NoError(t, err) {
return
}
// Read expected JSON into token slice.
expected := []*chroma.Token{}
r, err := os.Open(expectedFilename)
if !assert.NoError(t, err) {
return
}
err = json.NewDecoder(r).Decode(&expected)
if !assert.NoError(t, err) {
return
}
// Equal?
assert.Equal(t, expected, actual)
})
}
}

View File

@ -1,33 +0,0 @@
# Lexer tests
This directory contains input source and expected output lexer tokens.
Input filenames for lexers are in the form `<name>.actual`. Expected output filenames are in the form `<name>.expected`.
Each input filename is parsed by the corresponding lexer and checked against the expected JSON-encoded token list.
To add/update tests do the following:
1. `export LEXER=csharp`
1. Create/edit a file `lexers/testdata/${LEXER}.actual` (eg. `csharp.actual`).
2. Run `go run ./cmd/chroma/main.go --lexer ${LEXER} --json lexers/testdata/${LEXER}.actual > lexers/testdata/${LEXER}.expected`.
3. Run `go test -v -run TestLexers ./lexers`.
eg.
```bash
$ export LEXER=csharp
$ go run ./cmd/chroma/main.go --lexer ${LEXER} --json lexers/testdata/${LEXER}.${LEXER} > lexers/testdata/${LEXER}.expected
$ go test -v -run TestLexers ./lexers
=== RUN TestLexers
=== RUN TestLexers/C#
=== RUN TestLexers/CSS
--- PASS: TestLexers (0.01s)
--- PASS: TestLexers/C# (0.00s)
--- PASS: TestLexers/CSS (0.00s)
PASS
ok github.com/alecthomas/chroma/lexers 0.032s
```

View File

@ -1,11 +0,0 @@
DriveInfo[] drives = DriveInfo.GetDrives();
foreach (DriveInfo drive in drives)
{
IEnumerable<string> driveFolders =
Directory.EnumerateDirectories(drive.RootDirectory.ToString());
foreach (string dir in driveFolders)
{
Console.WriteLine(dir);
}
}

View File

@ -1,73 +0,0 @@
[
{"type":"Name","value":"DriveInfo"},
{"type":"NameAttribute","value":"[]"},
{"type":"Text","value":" "},
{"type":"Name","value":"drives"},
{"type":"Text","value":" "},
{"type":"Punctuation","value":"="},
{"type":"Text","value":" "},
{"type":"NameClass","value":"DriveInfo"},
{"type":"Punctuation","value":"."},
{"type":"Name","value":"GetDrives"},
{"type":"Punctuation","value":"();"},
{"type":"Text","value":"\n"},
{"type":"Keyword","value":"foreach"},
{"type":"Text","value":" "},
{"type":"Punctuation","value":"("},
{"type":"Name","value":"DriveInfo"},
{"type":"Text","value":" "},
{"type":"Name","value":"drive"},
{"type":"Text","value":" "},
{"type":"Keyword","value":"in"},
{"type":"Text","value":" "},
{"type":"Name","value":"drives"},
{"type":"Punctuation","value":")"},
{"type":"Text","value":"\n"},
{"type":"Punctuation","value":"{"},
{"type":"Text","value":"\n "},
{"type":"Name","value":"IEnumerable"},
{"type":"Punctuation","value":"\u003c"},
{"type":"KeywordType","value":"string"},
{"type":"Punctuation","value":"\u003e"},
{"type":"Text","value":" "},
{"type":"Name","value":"driveFolders"},
{"type":"Text","value":" "},
{"type":"Punctuation","value":"="},
{"type":"Text","value":"\n "},
{"type":"NameClass","value":"Directory"},
{"type":"Punctuation","value":"."},
{"type":"Name","value":"EnumerateDirectories"},
{"type":"Punctuation","value":"("},
{"type":"NameClass","value":"drive"},
{"type":"Punctuation","value":"."},
{"type":"NameClass","value":"RootDirectory"},
{"type":"Punctuation","value":"."},
{"type":"Name","value":"ToString"},
{"type":"Punctuation","value":"());"},
{"type":"Text","value":"\n\n "},
{"type":"Keyword","value":"foreach"},
{"type":"Text","value":" "},
{"type":"Punctuation","value":"("},
{"type":"KeywordType","value":"string"},
{"type":"Text","value":" "},
{"type":"Name","value":"dir"},
{"type":"Text","value":" "},
{"type":"Keyword","value":"in"},
{"type":"Text","value":" "},
{"type":"Name","value":"driveFolders"},
{"type":"Punctuation","value":")"},
{"type":"Text","value":"\n "},
{"type":"Punctuation","value":"{"},
{"type":"Text","value":"\n "},
{"type":"NameClass","value":"Console"},
{"type":"Punctuation","value":"."},
{"type":"Name","value":"WriteLine"},
{"type":"Punctuation","value":"("},
{"type":"Name","value":"dir"},
{"type":"Punctuation","value":");"},
{"type":"Text","value":"\n "},
{"type":"Punctuation","value":"}"},
{"type":"Text","value":"\n"},
{"type":"Punctuation","value":"}"},
{"type":"Text","value":"\n"}
]

View File

@ -1,3 +0,0 @@
:root {
--variable-name: #fff;
}

View File

@ -1,16 +0,0 @@
[
{"type":"Punctuation","value":":"},
{"type":"NameDecorator","value":"root"},
{"type":"Text","value":" "},
{"type":"Punctuation","value":"{"},
{"type":"Text","value":"\n "},
{"type":"NameVariable","value":"--variable-name"},
{"type":"Text","value":""},
{"type":"Punctuation","value":":"},
{"type":"Text","value":" "},
{"type":"LiteralNumberHex","value":"#fff"},
{"type":"Punctuation","value":";"},
{"type":"Text","value":"\n"},
{"type":"Punctuation","value":"}"},
{"type":"Text","value":"\n"}
]

View File

@ -1,57 +0,0 @@
package chroma
import (
"testing"
"github.com/alecthomas/assert"
)
func TestInclude(t *testing.T) {
include := Include("other")
actual := CompiledRules{
"root": {{Rule: include}},
"other": {
{Rule: Rule{Pattern: "//.+", Type: Comment}},
{Rule: Rule{Pattern: `"[^"]*"`, Type: String}},
},
}
lexer := &RegexLexer{rules: actual}
err := include.Mutator.(LexerMutator).MutateLexer(lexer.rules, "root", 0)
assert.NoError(t, err)
expected := CompiledRules{
"root": {
{Rule: Rule{
Pattern: "//.+",
Type: Comment,
}},
{Rule: Rule{
Pattern: `"[^"]*"`,
Type: String,
}},
},
"other": {
{Rule: Rule{
Pattern: "//.+",
Type: Comment,
}},
{Rule: Rule{
Pattern: `"[^"]*"`,
Type: String,
}},
},
}
assert.Equal(t, expected, actual)
}
func TestCombine(t *testing.T) {
l := MustNewLexer(nil, Rules{
"root": {{`hello`, String, Combined("world", "bye", "space")}},
"world": {{`world`, Name, nil}},
"bye": {{`bye`, Name, nil}},
"space": {{`\s+`, Whitespace, nil}},
})
it, err := l.Tokenise(nil, "hello world")
assert.NoError(t, err)
expected := []*Token{{String, `hello`}, {Whitespace, ` `}, {Name, `world`}}
assert.Equal(t, expected, it.Tokens())
}

View File

@ -1,19 +0,0 @@
package quick_test
import (
"log"
"os"
"github.com/alecthomas/chroma/quick"
)
func Example() {
code := `package main
func main() { }
`
err := quick.Highlight(os.Stdout, code, "go", "html", "monokai")
if err != nil {
log.Fatal(err)
}
}

View File

@ -1,44 +0,0 @@
// Package quick provides simple, no-configuration source code highlighting.
package quick
import (
"io"
"github.com/alecthomas/chroma"
"github.com/alecthomas/chroma/formatters"
"github.com/alecthomas/chroma/lexers"
"github.com/alecthomas/chroma/styles"
)
// Highlight some text.
//
// Lexer, formatter and style may be empty, in which case a best-effort is made.
func Highlight(w io.Writer, source, lexer, formatter, style string) error {
// Determine lexer.
l := lexers.Get(lexer)
if l == nil {
l = lexers.Analyse(source)
}
if l == nil {
l = lexers.Fallback
}
l = chroma.Coalesce(l)
// Determine formatter.
f := formatters.Get(formatter)
if f == nil {
f = formatters.Fallback
}
// Determine style.
s := styles.Get(style)
if s == nil {
s = styles.Fallback
}
it, err := l.Tokenise(nil, source)
if err != nil {
return err
}
return f.Format(w, s, it)
}

View File

@ -1,27 +0,0 @@
package chroma
import (
"testing"
"github.com/alecthomas/assert"
)
func TestNewlineAtEndOfFile(t *testing.T) {
l := Coalesce(MustNewLexer(&Config{EnsureNL: true}, Rules{
"root": {
{`(\w+)(\n)`, ByGroups(Keyword, Whitespace), nil},
},
}))
it, err := l.Tokenise(nil, `hello`)
assert.NoError(t, err)
assert.Equal(t, []*Token{{Keyword, "hello"}, {Whitespace, "\n"}}, it.Tokens())
l = Coalesce(MustNewLexer(nil, Rules{
"root": {
{`(\w+)(\n)`, ByGroups(Keyword, Whitespace), nil},
},
}))
it, err = l.Tokenise(nil, `hello`)
assert.NoError(t, err)
assert.Equal(t, []*Token{{Error, "hello"}}, it.Tokens())
}

View File

@ -1,29 +0,0 @@
package chroma
import (
"testing"
"github.com/alecthomas/assert"
)
func TestRemappingLexer(t *testing.T) {
var lexer Lexer = MustNewLexer(nil, Rules{
"root": {
{`\s+`, Whitespace, nil},
{`\w+`, Name, nil},
},
})
lexer = TypeRemappingLexer(lexer, TypeMapping{
{Name, Keyword, []string{"if", "else"}},
})
it, err := lexer.Tokenise(nil, `if true then print else end`)
assert.NoError(t, err)
expected := []*Token{
{Keyword, "if"}, {TextWhitespace, " "}, {Name, "true"}, {TextWhitespace, " "}, {Name, "then"},
{TextWhitespace, " "}, {Name, "print"}, {TextWhitespace, " "}, {Keyword, "else"},
{TextWhitespace, " "}, {Name, "end"},
}
actual := it.Tokens()
assert.Equal(t, expected, actual)
}

View File

@ -1,37 +0,0 @@
package chroma
import (
"testing"
"github.com/alecthomas/assert"
)
func TestStyleInherit(t *testing.T) {
s, err := NewStyle("test", StyleEntries{
Name: "bold #f00",
NameVariable: "#fff",
})
assert.NoError(t, err)
assert.Equal(t, StyleEntry{Colour: 0x1000000, Bold: Yes}, s.Get(NameVariable))
}
func TestStyleColours(t *testing.T) {
s, err := NewStyle("test", StyleEntries{
Name: "#f00 bg:#001 border:#ansiblue",
})
assert.NoError(t, err)
assert.Equal(t, StyleEntry{Colour: 0xff0001, Background: 0x000012, Border: 0x000100}, s.Get(Name))
}
func TestStyleClone(t *testing.T) {
parent, err := NewStyle("test", StyleEntries{
Background: "bg:#ffffff",
})
assert.NoError(t, err)
clone, err := parent.Builder().Add(Comment, "#0f0").Build()
assert.NoError(t, err)
assert.Equal(t, "bg:#ffffff", clone.Get(Background).String())
assert.Equal(t, "#00ff00 bg:#ffffff", clone.Get(Comment).String())
assert.Equal(t, "bg:#ffffff", parent.Get(Comment).String())
}

View File

@ -1,351 +0,0 @@
package fnmatch_test
import (
"testing"
"github.com/danwakefield/fnmatch"
)
// This is a set of tests ported from a set of tests for C fnmatch
// found at http://www.mail-archive.com/bug-gnulib@gnu.org/msg14048.html
func TestMatch(t *testing.T) {
assert := func(p, s string) {
if !fnmatch.Match(p, s, 0) {
t.Errorf("Assertion failed: Match(%#v, %#v, 0)", p, s)
}
}
assert("", "")
assert("*", "")
assert("*", "foo")
assert("*", "bar")
assert("*", "*")
assert("**", "f")
assert("**", "foo.txt")
assert("*.*", "foo.txt")
assert("foo*.txt", "foobar.txt")
assert("foo.txt", "foo.txt")
assert("foo\\.txt", "foo.txt")
if fnmatch.Match("foo\\.txt", "foo.txt", fnmatch.FNM_NOESCAPE) {
t.Errorf("Assertion failed: Match(%#v, %#v, FNM_NOESCAPE) == false", "foo\\.txt", "foo.txt")
}
}
func TestWildcard(t *testing.T) {
// A wildcard pattern "*" should match anything
cases := []struct {
pattern string
input string
flags int
want bool
}{
{"*", "", 0, true},
{"*", "foo", 0, true},
{"*", "*", 0, true},
{"*", " ", 0, true},
{"*", ".foo", 0, true},
{"*", "わたし", 0, true},
}
for tc, c := range cases {
got := fnmatch.Match(c.pattern, c.input, c.flags)
if got != c.want {
t.Errorf(
"Testcase #%d failed: fnmatch.Match('%s', '%s', %d) should be %v not %v",
tc, c.pattern, c.input, c.flags, c.want, got,
)
}
}
}
func TestWildcardSlash(t *testing.T) {
cases := []struct {
pattern string
input string
flags int
want bool
}{
// Should match / when flags are 0
{"*", "foo/bar", 0, true},
{"*", "/", 0, true},
{"*", "/foo", 0, true},
{"*", "foo/", 0, true},
// Shouldnt match / when flags include FNM_PATHNAME
{"*", "foo/bar", fnmatch.FNM_PATHNAME, false},
{"*", "/", fnmatch.FNM_PATHNAME, false},
{"*", "/foo", fnmatch.FNM_PATHNAME, false},
{"*", "foo/", fnmatch.FNM_PATHNAME, false},
}
for tc, c := range cases {
got := fnmatch.Match(c.pattern, c.input, c.flags)
if got != c.want {
t.Errorf(
"Testcase #%d failed: fnmatch.Match('%s', '%s', %d) should be %v not %v",
tc, c.pattern, c.input, c.flags, c.want, got,
)
}
}
for _, c := range cases {
got := fnmatch.Match(c.pattern, c.input, c.flags)
if got != c.want {
t.Errorf(
"fnmatch.Match('%s', '%s', %d) should be %v not %v",
c.pattern, c.input, c.flags, c.want, got,
)
}
}
}
func TestWildcardFNMPeriod(t *testing.T) {
// FNM_PERIOD means that . is not matched in some circumstances.
cases := []struct {
pattern string
input string
flags int
want bool
}{
{"*", ".foo", fnmatch.FNM_PERIOD, false},
{"/*", "/.foo", fnmatch.FNM_PERIOD, true},
{"/*", "/.foo", fnmatch.FNM_PERIOD | fnmatch.FNM_PATHNAME, false},
}
for tc, c := range cases {
got := fnmatch.Match(c.pattern, c.input, c.flags)
if got != c.want {
t.Errorf(
"Testcase #%d failed: fnmatch.Match('%s', '%s', %d) should be %v not %v",
tc, c.pattern, c.input, c.flags, c.want, got,
)
}
}
}
func TestQuestionMark(t *testing.T) {
//A question mark pattern "?" should match a single character
cases := []struct {
pattern string
input string
flags int
want bool
}{
{"?", "", 0, false},
{"?", "f", 0, true},
{"?", ".", 0, true},
{"?", "?", 0, true},
{"?", "foo", 0, false},
{"?", "わ", 0, true},
{"?", "わた", 0, false},
// Even '/' when flags are 0
{"?", "/", 0, true},
// Except '/' when flags include FNM_PATHNAME
{"?", "/", fnmatch.FNM_PATHNAME, false},
}
for tc, c := range cases {
got := fnmatch.Match(c.pattern, c.input, c.flags)
if got != c.want {
t.Errorf(
"Testcase #%d failed: fnmatch.Match('%s', '%s', %d) should be %v not %v",
tc, c.pattern, c.input, c.flags, c.want, got,
)
}
}
}
func TestQuestionMarkExceptions(t *testing.T) {
//When flags include FNM_PERIOD a '?' might not match a '.' character.
cases := []struct {
pattern string
input string
flags int
want bool
}{
{"?", ".", fnmatch.FNM_PERIOD, false},
{"foo?", "foo.", fnmatch.FNM_PERIOD, true},
{"/?", "/.", fnmatch.FNM_PERIOD, true},
{"/?", "/.", fnmatch.FNM_PERIOD | fnmatch.FNM_PATHNAME, false},
}
for tc, c := range cases {
got := fnmatch.Match(c.pattern, c.input, c.flags)
if got != c.want {
t.Errorf(
"Testcase #%d failed: fnmatch.Match('%s', '%s', %d) should be %v not %v",
tc, c.pattern, c.input, c.flags, c.want, got,
)
}
}
}
func TestRange(t *testing.T) {
azPat := "[a-z]"
cases := []struct {
pattern string
input string
flags int
want bool
}{
// Should match a single character inside its range
{azPat, "a", 0, true},
{azPat, "q", 0, true},
{azPat, "z", 0, true},
{"[わ]", "わ", 0, true},
// Should not match characters outside its range
{azPat, "-", 0, false},
{azPat, " ", 0, false},
{azPat, "D", 0, false},
{azPat, "é", 0, false},
//Should only match one character
{azPat, "ab", 0, false},
{azPat, "", 0, false},
// Should not consume more of the pattern than necessary
{azPat + "foo", "afoo", 0, true},
// Should match '-' if it is the first/last character or is
// backslash escaped
{"[-az]", "-", 0, true},
{"[-az]", "a", 0, true},
{"[-az]", "b", 0, false},
{"[az-]", "-", 0, true},
{"[a\\-z]", "-", 0, true},
{"[a\\-z]", "b", 0, false},
// ignore '\\' when FNM_NOESCAPE is given
{"[a\\-z]", "\\", fnmatch.FNM_NOESCAPE, true},
{"[a\\-z]", "-", fnmatch.FNM_NOESCAPE, false},
// Should be negated if starting with ^ or !"
{"[^a-z]", "a", 0, false},
{"[!a-z]", "b", 0, false},
{"[!a-z]", "é", 0, true},
{"[!a-z]", "わ", 0, true},
// Still match '-' if following the negation character
{"[^-az]", "-", 0, false},
{"[^-az]", "b", 0, true},
// Should support multiple characters/ranges
{"[abc]", "a", 0, true},
{"[abc]", "c", 0, true},
{"[abc]", "d", 0, false},
{"[a-cg-z]", "c", 0, true},
{"[a-cg-z]", "h", 0, true},
{"[a-cg-z]", "d", 0, false},
//Should not match '/' when flags is FNM_PATHNAME
{"[abc/def]", "/", 0, true},
{"[abc/def]", "/", fnmatch.FNM_PATHNAME, false},
{"[.-0]", "/", 0, true}, // The range [.-0] includes /
{"[.-0]", "/", fnmatch.FNM_PATHNAME, false},
// Should normally be case-sensitive
{"[a-z]", "A", 0, false},
{"[A-Z]", "a", 0, false},
//Except when FNM_CASEFOLD is given
{"[a-z]", "A", fnmatch.FNM_CASEFOLD, true},
{"[A-Z]", "a", fnmatch.FNM_CASEFOLD, true},
}
for tc, c := range cases {
got := fnmatch.Match(c.pattern, c.input, c.flags)
if got != c.want {
t.Errorf(
"Testcase #%d failed: fnmatch.Match('%s', '%s', %d) should be %v not %v",
tc, c.pattern, c.input, c.flags, c.want, got,
)
}
}
}
func TestBackSlash(t *testing.T) {
cases := []struct {
pattern string
input string
flags int
want bool
}{
//A backslash should escape the following characters
{"\\\\", "\\", 0, true},
{"\\*", "*", 0, true},
{"\\*", "foo", 0, false},
{"\\?", "?", 0, true},
{"\\?", "f", 0, false},
{"\\[a-z]", "[a-z]", 0, true},
{"\\[a-z]", "a", 0, false},
{"\\foo", "foo", 0, true},
{"\\わ", "わ", 0, true},
// Unless FNM_NOESCAPE is given
{"\\\\", "\\", fnmatch.FNM_NOESCAPE, false},
{"\\\\", "\\\\", fnmatch.FNM_NOESCAPE, true},
{"\\*", "foo", fnmatch.FNM_NOESCAPE, false},
{"\\*", "\\*", fnmatch.FNM_NOESCAPE, true},
}
for tc, c := range cases {
got := fnmatch.Match(c.pattern, c.input, c.flags)
if got != c.want {
t.Errorf(
"Testcase #%d failed: fnmatch.Match('%s', '%s', %d) should be %v not %v",
tc, c.pattern, c.input, c.flags, c.want, got,
)
}
}
}
func TestLiteral(t *testing.T) {
cases := []struct {
pattern string
input string
flags int
want bool
}{
//Literal characters should match themselves
{"foo", "foo", 0, true},
{"foo", "foobar", 0, false},
{"foobar", "foo", 0, false},
{"foo", "Foo", 0, false},
{"わたし", "わたし", 0, true},
// And perform case-folding when FNM_CASEFOLD is given
{"foo", "FOO", fnmatch.FNM_CASEFOLD, true},
{"FoO", "fOo", fnmatch.FNM_CASEFOLD, true},
}
for tc, c := range cases {
got := fnmatch.Match(c.pattern, c.input, c.flags)
if got != c.want {
t.Errorf(
"Testcase #%d failed: fnmatch.Match('%s', '%s', %d) should be %v not %v",
tc, c.pattern, c.input, c.flags, c.want, got,
)
}
}
}
func TestFNMLeadingDir(t *testing.T) {
cases := []struct {
pattern string
input string
flags int
want bool
}{
// FNM_LEADING_DIR should ignore trailing '/*'
{"foo", "foo/bar", 0, false},
{"foo", "foo/bar", fnmatch.FNM_LEADING_DIR, true},
{"*", "foo/bar", fnmatch.FNM_PATHNAME, false},
{"*", "foo/bar", fnmatch.FNM_PATHNAME | fnmatch.FNM_LEADING_DIR, true},
}
for tc, c := range cases {
got := fnmatch.Match(c.pattern, c.input, c.flags)
if got != c.want {
t.Errorf(
"Testcase #%d failed: fnmatch.Match('%s', '%s', %d) should be %v not %v",
tc, c.pattern, c.input, c.flags, c.want, got,
)
}
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,43 +0,0 @@
package regexp2
import "testing"
func TestIgnoreCase_Simple(t *testing.T) {
r := MustCompile("aaamatch thisbbb", IgnoreCase)
m, err := r.FindStringMatch("AaAMatch thisBBb")
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
if m == nil {
t.Fatalf("no match when one was expected")
}
if want, got := "AaAMatch thisBBb", m.String(); want != got {
t.Fatalf("group 0 wanted '%v', got '%v'", want, got)
}
}
func TestIgnoreCase_Inline(t *testing.T) {
r := MustCompile("aaa(?i:match this)bbb", 0)
m, err := r.FindStringMatch("aaaMaTcH ThIsbbb")
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
if m == nil {
t.Fatalf("no match when one was expected")
}
if want, got := "aaaMaTcH ThIsbbb", m.String(); want != got {
t.Fatalf("group 0 wanted '%v', got '%v'", want, got)
}
}
func TestIgnoreCase_Revert(t *testing.T) {
r := MustCompile("aaa(?-i:match this)bbb", IgnoreCase)
m, err := r.FindStringMatch("AaAMatch thisBBb")
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
if m != nil {
t.Fatalf("had a match but expected no match")
}
}

View File

@ -1,409 +0,0 @@
package regexp2
import (
"bufio"
"bytes"
"fmt"
"log"
"os"
"regexp"
"strconv"
"strings"
"testing"
"time"
)
// Process the file "testoutput1" from PCRE2 v10.21 (public domain)
var totalCount, failCount = 0, 0
func TestPcre_Basics(t *testing.T) {
defer func() {
if failCount > 0 {
t.Logf("%v of %v patterns failed", failCount, totalCount)
}
}()
// open our test patterns file and run through it
// validating results as we go
file, err := os.Open("testoutput1")
if err != nil {
log.Fatal(err)
}
defer file.Close()
// the high level structure of the file:
// #comments - ignore only outside of the pattern
// pattern (could be multi-line, could be surrounded by "" or //) after the / there are the options some we understand, some we don't
// test case
// 0: success case
// \= Expect no match (ignored)
// another test case
// No Match
//
// another pattern ...etc
scanner := bufio.NewScanner(file)
// main pattern loop
for scanner.Scan() {
// reading the file a line at a time
line := scanner.Text()
if trim := strings.TrimSpace(line); trim == "" || strings.HasPrefix(trim, "#") {
// skip blanks and comments
continue
}
patternStart := line[0]
if patternStart != '/' && patternStart != '"' {
// an error! expected a pattern but we didn't understand what was in the file
t.Fatalf("Unknown file format, expected line to start with '/' or '\"', line in: %v", line)
}
// start building our pattern, handling multi-line patterns
pattern := line
totalCount++
// keep appending the lines to our pattern string until we
// find our closing tag, don't allow the first char to match on the
// line start, but subsequent lines could end on the first char
allowFirst := false
for !containsEnder(line, patternStart, allowFirst) {
if !scanner.Scan() {
// an error! expected more pattern, but got eof
t.Fatalf("Unknown file format, expected more pattern text, but got EOF, pattern so far: %v", pattern)
}
line = scanner.Text()
pattern += fmt.Sprintf("\n%s", line)
allowFirst = true
}
// we have our raw pattern! -- we need to convert this to a compiled regex
re := compileRawPattern(t, pattern)
var (
capsIdx map[int]int
m *Match
toMatch string
)
// now we need to parse the test cases if there are any
// they start with 4 spaces -- if we don't get a 4-space start then
// we're back out to our next pattern
for scanner.Scan() {
line = scanner.Text()
// blank line is our separator for a new pattern
if strings.TrimSpace(line) == "" {
break
}
// could be either " " or "\= Expect"
if strings.HasPrefix(line, "\\= Expect") {
continue
} else if strings.HasPrefix(line, " ") {
// trim off leading spaces for our text to match
toMatch = line[4:]
// trim off trailing spaces too
toMatch = strings.TrimRight(toMatch, " ")
m = matchString(t, re, toMatch)
capsIdx = make(map[int]int)
continue
//t.Fatalf("Expected match text to start with 4 spaces, instead got: '%v'", line)
} else if strings.HasPrefix(line, "No match") {
validateNoMatch(t, re, m)
// no match means we're done
continue
} else if subs := matchGroup.FindStringSubmatch(line); len(subs) == 3 {
gIdx, _ := strconv.Atoi(subs[1])
if _, ok := capsIdx[gIdx]; !ok {
capsIdx[gIdx] = 0
}
validateMatch(t, re, m, toMatch, subs[2], gIdx, capsIdx[gIdx])
capsIdx[gIdx]++
continue
} else {
// no match -- problem
t.Fatalf("Unknown file format, expected match or match group but got '%v'", line)
}
}
}
if err := scanner.Err(); err != nil {
log.Fatal(err)
}
}
var matchGroup = regexp.MustCompile(`^\s*(\d+): (.*)`)
func problem(t *testing.T, input string, args ...interface{}) {
failCount++
t.Errorf(input, args...)
}
func validateNoMatch(t *testing.T, re *Regexp, m *Match) {
if re == nil || m == nil {
return
}
problem(t, "Expected no match for pattern '%v', but got '%v'", re.pattern, m.String())
}
func validateMatch(t *testing.T, re *Regexp, m *Match, toMatch, value string, idx, capIdx int) {
if re == nil {
// already error'd earlier up stream
return
}
if m == nil {
// we didn't match, but should have
problem(t, "Expected match for pattern '%v' with input '%v', but got no match", re.pattern, toMatch)
return
}
g := m.Groups()
if len(g) <= idx {
problem(t, "Expected group %v does not exist in pattern '%v' with input '%v'", idx, re.pattern, toMatch)
return
}
if value == "<unset>" {
// this means we shouldn't have a cap for this group
if len(g[idx].Captures) > 0 {
problem(t, "Expected no cap %v in group %v in pattern '%v' with input '%v'", g[idx].Captures[capIdx].String(), idx, re.pattern, toMatch)
}
return
}
if len(g[idx].Captures) <= capIdx {
problem(t, "Expected cap %v does not exist in group %v in pattern '%v' with input '%v'", capIdx, idx, re.pattern, toMatch)
return
}
escp := unEscapeGroup(g[idx].String())
//escp := unEscapeGroup(g[idx].Captures[capIdx].String())
if escp != value {
problem(t, "Expected '%v' but got '%v' for cap %v, group %v for pattern '%v' with input '%v'", value, escp, capIdx, idx, re.pattern, toMatch)
return
}
}
func compileRawPattern(t *testing.T, pattern string) *Regexp {
// check our end for RegexOptions -trim them off
index := strings.LastIndexAny(pattern, "/\"")
//
// Append "= Debug" to compare details between corefx and regexp2 on the PCRE test suite
//
var opts RegexOptions
if index+1 < len(pattern) {
textOptions := pattern[index+1:]
pattern = pattern[:index+1]
// there are lots of complex options here
for _, textOpt := range strings.Split(textOptions, ",") {
switch textOpt {
case "dupnames":
// we don't know how to handle this...
default:
if strings.Contains(textOpt, "i") {
opts |= IgnoreCase
}
if strings.Contains(textOpt, "s") {
opts |= Singleline
}
if strings.Contains(textOpt, "m") {
opts |= Multiline
}
if strings.Contains(textOpt, "x") {
opts |= IgnorePatternWhitespace
}
}
}
}
// trim off first and last char
pattern = pattern[1 : len(pattern)-1]
defer func() {
if rec := recover(); rec != nil {
problem(t, "PANIC in compiling \"%v\": %v", pattern, rec)
}
}()
re, err := Compile(pattern, opts)
if err != nil {
problem(t, "Error parsing \"%v\": %v", pattern, err)
}
return re
}
func matchString(t *testing.T, re *Regexp, toMatch string) *Match {
if re == nil {
return nil
}
re.MatchTimeout = time.Second * 1
escp := ""
var err error
if toMatch != "\\" {
escp = unEscapeToMatch(toMatch)
}
m, err := re.FindStringMatch(escp)
if err != nil {
problem(t, "Error matching \"%v\" in pattern \"%v\": %v", toMatch, re.pattern, err)
}
return m
}
func containsEnder(line string, ender byte, allowFirst bool) bool {
index := strings.LastIndexByte(line, ender)
if index > 0 {
return true
} else if index == 0 && allowFirst {
return true
}
return false
}
func unEscapeToMatch(line string) string {
idx := strings.IndexRune(line, '\\')
// no slashes means no unescape needed
if idx == -1 {
return line
}
buf := bytes.NewBufferString(line[:idx])
// get the runes for the rest of the string -- we're going full parser scan on this
inEscape := false
// take any \'s and convert them
for i := idx; i < len(line); i++ {
ch := line[i]
if ch == '\\' {
if inEscape {
buf.WriteByte(ch)
}
inEscape = !inEscape
continue
}
if inEscape {
switch ch {
case 'x':
buf.WriteByte(scanHex(line, &i))
case 'a':
buf.WriteByte(0x07)
case 'b':
buf.WriteByte('\b')
case 'e':
buf.WriteByte(0x1b)
case 'f':
buf.WriteByte('\f')
case 'n':
buf.WriteByte('\n')
case 'r':
buf.WriteByte('\r')
case 't':
buf.WriteByte('\t')
case 'v':
buf.WriteByte(0x0b)
default:
if ch >= '0' && ch <= '7' {
buf.WriteByte(scanOctal(line, &i))
} else {
buf.WriteByte(ch)
//panic(fmt.Sprintf("unexpected escape '%v' in %v", string(ch), line))
}
}
inEscape = false
} else {
buf.WriteByte(ch)
}
}
return buf.String()
}
func unEscapeGroup(val string) string {
// use hex for chars 0x00-0x1f, 0x7f-0xff
buf := &bytes.Buffer{}
for i := 0; i < len(val); i++ {
ch := val[i]
if ch <= 0x1f || ch >= 0x7f {
//write it as a \x00
fmt.Fprintf(buf, "\\x%.2x", ch)
} else {
// write as-is
buf.WriteByte(ch)
}
}
return buf.String()
}
func scanHex(line string, idx *int) byte {
if *idx >= len(line)-2 {
panic(fmt.Sprintf("not enough hex chars in %v at %v", line, *idx))
}
(*idx)++
d1 := hexDigit(line[*idx])
(*idx)++
d2 := hexDigit(line[*idx])
if d1 < 0 || d2 < 0 {
panic("bad hex chars")
}
return byte(d1*0x10 + d2)
}
// Returns n <= 0xF for a hex digit.
func hexDigit(ch byte) int {
if d := uint(ch - '0'); d <= 9 {
return int(d)
}
if d := uint(ch - 'a'); d <= 5 {
return int(d + 0xa)
}
if d := uint(ch - 'A'); d <= 5 {
return int(d + 0xa)
}
return -1
}
// Scans up to three octal digits (stops before exceeding 0377).
func scanOctal(line string, idx *int) byte {
// Consume octal chars only up to 3 digits and value 0377
// octals can be 3,2, or 1 digit
c := 3
if diff := len(line) - *idx; c > diff {
c = diff
}
i := 0
d := int(line[*idx] - '0')
for c > 0 && d <= 7 {
i *= 8
i += d
c--
(*idx)++
if *idx < len(line) {
d = int(line[*idx] - '0')
}
}
(*idx)--
// Octal codes only go up to 255. Any larger and the behavior that Perl follows
// is simply to truncate the high bits.
i &= 0xFF
return byte(i)
}

View File

@ -1,307 +0,0 @@
package regexp2
import (
"strings"
"testing"
)
func BenchmarkLiteral(b *testing.B) {
x := strings.Repeat("x", 50) + "y"
b.StopTimer()
re := MustCompile("y", 0)
b.StartTimer()
for i := 0; i < b.N; i++ {
if m, err := re.MatchString(x); !m || err != nil {
b.Fatalf("no match or error! %v", err)
}
}
}
func BenchmarkNotLiteral(b *testing.B) {
x := strings.Repeat("x", 50) + "y"
b.StopTimer()
re := MustCompile(".y", 0)
b.StartTimer()
for i := 0; i < b.N; i++ {
if m, err := re.MatchString(x); !m || err != nil {
b.Fatalf("no match or error! %v", err)
}
}
}
func BenchmarkMatchClass(b *testing.B) {
b.StopTimer()
x := strings.Repeat("xxxx", 20) + "w"
re := MustCompile("[abcdw]", 0)
b.StartTimer()
for i := 0; i < b.N; i++ {
if m, err := re.MatchString(x); !m || err != nil {
b.Fatalf("no match or error! %v", err)
}
}
}
func BenchmarkMatchClass_InRange(b *testing.B) {
b.StopTimer()
// 'b' is between 'a' and 'c', so the charclass
// range checking is no help here.
x := strings.Repeat("bbbb", 20) + "c"
re := MustCompile("[ac]", 0)
b.StartTimer()
for i := 0; i < b.N; i++ {
if m, err := re.MatchString(x); !m || err != nil {
b.Fatalf("no match or error! %v", err)
}
}
}
/*
func BenchmarkReplaceAll(b *testing.B) {
x := "abcdefghijklmnopqrstuvwxyz"
b.StopTimer()
re := MustCompile("[cjrw]", 0)
b.StartTimer()
for i := 0; i < b.N; i++ {
re.ReplaceAllString(x, "")
}
}
*/
func BenchmarkAnchoredLiteralShortNonMatch(b *testing.B) {
b.StopTimer()
x := "abcdefghijklmnopqrstuvwxyz"
re := MustCompile("^zbc(d|e)", 0)
b.StartTimer()
for i := 0; i < b.N; i++ {
if m, err := re.MatchString(x); m || err != nil {
b.Fatalf("unexpected match or error! %v", err)
}
}
}
func BenchmarkAnchoredLiteralLongNonMatch(b *testing.B) {
b.StopTimer()
data := "abcdefghijklmnopqrstuvwxyz"
x := make([]rune, 32768*len(data))
for i := 0; i < 32768; /*(2^15)*/ i++ {
for j := 0; j < len(data); j++ {
x[i*len(data)+j] = rune(data[j])
}
}
re := MustCompile("^zbc(d|e)", 0)
b.StartTimer()
for i := 0; i < b.N; i++ {
if m, err := re.MatchRunes(x); m || err != nil {
b.Fatalf("unexpected match or error! %v", err)
}
}
}
func BenchmarkAnchoredShortMatch(b *testing.B) {
b.StopTimer()
x := "abcdefghijklmnopqrstuvwxyz"
re := MustCompile("^.bc(d|e)", 0)
b.StartTimer()
for i := 0; i < b.N; i++ {
if m, err := re.MatchString(x); !m || err != nil {
b.Fatalf("no match or error! %v", err)
}
}
}
func BenchmarkAnchoredLongMatch(b *testing.B) {
b.StopTimer()
data := "abcdefghijklmnopqrstuvwxyz"
x := make([]rune, 32768*len(data))
for i := 0; i < 32768; /*(2^15)*/ i++ {
for j := 0; j < len(data); j++ {
x[i*len(data)+j] = rune(data[j])
}
}
re := MustCompile("^.bc(d|e)", 0)
b.StartTimer()
for i := 0; i < b.N; i++ {
if m, err := re.MatchRunes(x); !m || err != nil {
b.Fatalf("no match or error! %v", err)
}
}
}
func BenchmarkOnePassShortA(b *testing.B) {
b.StopTimer()
x := "abcddddddeeeededd"
re := MustCompile("^.bc(d|e)*$", 0)
b.StartTimer()
for i := 0; i < b.N; i++ {
if m, err := re.MatchString(x); !m || err != nil {
b.Fatalf("no match or error! %v", err)
}
}
}
func BenchmarkNotOnePassShortA(b *testing.B) {
b.StopTimer()
x := "abcddddddeeeededd"
re := MustCompile(".bc(d|e)*$", 0)
b.StartTimer()
for i := 0; i < b.N; i++ {
if m, err := re.MatchString(x); !m || err != nil {
b.Fatalf("no match or error! %v", err)
}
}
}
func BenchmarkOnePassShortB(b *testing.B) {
b.StopTimer()
x := "abcddddddeeeededd"
re := MustCompile("^.bc(?:d|e)*$", 0)
b.StartTimer()
for i := 0; i < b.N; i++ {
if m, err := re.MatchString(x); !m || err != nil {
b.Fatalf("no match or error! %v", err)
}
}
}
func BenchmarkNotOnePassShortB(b *testing.B) {
b.StopTimer()
x := "abcddddddeeeededd"
re := MustCompile(".bc(?:d|e)*$", 0)
b.StartTimer()
for i := 0; i < b.N; i++ {
if m, err := re.MatchString(x); !m || err != nil {
b.Fatalf("no match or error! %v", err)
}
}
}
func BenchmarkOnePassLongPrefix(b *testing.B) {
b.StopTimer()
x := "abcdefghijklmnopqrstuvwxyz"
re := MustCompile("^abcdefghijklmnopqrstuvwxyz.*$", 0)
b.StartTimer()
for i := 0; i < b.N; i++ {
if m, err := re.MatchString(x); !m || err != nil {
b.Fatalf("no match or error! %v", err)
}
}
}
func BenchmarkOnePassLongNotPrefix(b *testing.B) {
b.StopTimer()
x := "abcdefghijklmnopqrstuvwxyz"
re := MustCompile("^.bcdefghijklmnopqrstuvwxyz.*$", 0)
b.StartTimer()
for i := 0; i < b.N; i++ {
if m, err := re.MatchString(x); !m || err != nil {
b.Fatalf("no match or error! %v", err)
}
}
}
var text []rune
func makeText(n int) []rune {
if len(text) >= n {
return text[:n]
}
text = make([]rune, n)
x := ^uint32(0)
for i := range text {
x += x
x ^= 1
if int32(x) < 0 {
x ^= 0x88888eef
}
if x%31 == 0 {
text[i] = '\n'
} else {
text[i] = rune(x%(0x7E+1-0x20) + 0x20)
}
}
return text
}
func benchmark(b *testing.B, re string, n int) {
r := MustCompile(re, 0)
t := makeText(n)
b.ResetTimer()
b.SetBytes(int64(n))
for i := 0; i < b.N; i++ {
if m, err := r.MatchRunes(t); m {
b.Fatal("match!")
} else if err != nil {
b.Fatalf("Err %v", err)
}
}
}
const (
easy0 = "ABCDEFGHIJKLMNOPQRSTUVWXYZ$"
easy1 = "A[AB]B[BC]C[CD]D[DE]E[EF]F[FG]G[GH]H[HI]I[IJ]J$"
medium = "[XYZ]ABCDEFGHIJKLMNOPQRSTUVWXYZ$"
hard = "[ -~]*ABCDEFGHIJKLMNOPQRSTUVWXYZ$"
hard1 = "ABCD|CDEF|EFGH|GHIJ|IJKL|KLMN|MNOP|OPQR|QRST|STUV|UVWX|WXYZ"
parens = "([ -~])*(A)(B)(C)(D)(E)(F)(G)(H)(I)(J)(K)(L)(M)" +
"(N)(O)(P)(Q)(R)(S)(T)(U)(V)(W)(X)(Y)(Z)$"
)
func BenchmarkMatchEasy0_32(b *testing.B) { benchmark(b, easy0, 32<<0) }
func BenchmarkMatchEasy0_1K(b *testing.B) { benchmark(b, easy0, 1<<10) }
func BenchmarkMatchEasy0_32K(b *testing.B) { benchmark(b, easy0, 32<<10) }
func BenchmarkMatchEasy0_1M(b *testing.B) { benchmark(b, easy0, 1<<20) }
func BenchmarkMatchEasy0_32M(b *testing.B) { benchmark(b, easy0, 32<<20) }
func BenchmarkMatchEasy1_32(b *testing.B) { benchmark(b, easy1, 32<<0) }
func BenchmarkMatchEasy1_1K(b *testing.B) { benchmark(b, easy1, 1<<10) }
func BenchmarkMatchEasy1_32K(b *testing.B) { benchmark(b, easy1, 32<<10) }
func BenchmarkMatchEasy1_1M(b *testing.B) { benchmark(b, easy1, 1<<20) }
func BenchmarkMatchEasy1_32M(b *testing.B) { benchmark(b, easy1, 32<<20) }
func BenchmarkMatchMedium_32(b *testing.B) { benchmark(b, medium, 32<<0) }
func BenchmarkMatchMedium_1K(b *testing.B) { benchmark(b, medium, 1<<10) }
func BenchmarkMatchMedium_32K(b *testing.B) { benchmark(b, medium, 32<<10) }
func BenchmarkMatchMedium_1M(b *testing.B) { benchmark(b, medium, 1<<20) }
func BenchmarkMatchMedium_32M(b *testing.B) { benchmark(b, medium, 32<<20) }
func BenchmarkMatchHard_32(b *testing.B) { benchmark(b, hard, 32<<0) }
func BenchmarkMatchHard_1K(b *testing.B) { benchmark(b, hard, 1<<10) }
func BenchmarkMatchHard_32K(b *testing.B) { benchmark(b, hard, 32<<10) }
func BenchmarkMatchHard_1M(b *testing.B) { benchmark(b, hard, 1<<20) }
func BenchmarkMatchHard_32M(b *testing.B) { benchmark(b, hard, 32<<20) }
func BenchmarkMatchHard1_32(b *testing.B) { benchmark(b, hard1, 32<<0) }
func BenchmarkMatchHard1_1K(b *testing.B) { benchmark(b, hard1, 1<<10) }
func BenchmarkMatchHard1_32K(b *testing.B) { benchmark(b, hard1, 32<<10) }
func BenchmarkMatchHard1_1M(b *testing.B) { benchmark(b, hard1, 1<<20) }
func BenchmarkMatchHard1_32M(b *testing.B) { benchmark(b, hard1, 32<<20) }
// TestProgramTooLongForBacktrack tests that a regex which is too long
// for the backtracker still executes properly.
func TestProgramTooLongForBacktrack(t *testing.T) {
longRegex := MustCompile(`(one|two|three|four|five|six|seven|eight|nine|ten|eleven|twelve|thirteen|fourteen|fifteen|sixteen|seventeen|eighteen|nineteen|twenty|twentyone|twentytwo|twentythree|twentyfour|twentyfive|twentysix|twentyseven|twentyeight|twentynine|thirty|thirtyone|thirtytwo|thirtythree|thirtyfour|thirtyfive|thirtysix|thirtyseven|thirtyeight|thirtynine|forty|fortyone|fortytwo|fortythree|fortyfour|fortyfive|fortysix|fortyseven|fortyeight|fortynine|fifty|fiftyone|fiftytwo|fiftythree|fiftyfour|fiftyfive|fiftysix|fiftyseven|fiftyeight|fiftynine|sixty|sixtyone|sixtytwo|sixtythree|sixtyfour|sixtyfive|sixtysix|sixtyseven|sixtyeight|sixtynine|seventy|seventyone|seventytwo|seventythree|seventyfour|seventyfive|seventysix|seventyseven|seventyeight|seventynine|eighty|eightyone|eightytwo|eightythree|eightyfour|eightyfive|eightysix|eightyseven|eightyeight|eightynine|ninety|ninetyone|ninetytwo|ninetythree|ninetyfour|ninetyfive|ninetysix|ninetyseven|ninetyeight|ninetynine|onehundred)`, 0)
if m, err := longRegex.MatchString("two"); !m {
t.Errorf("longRegex.MatchString(\"two\") was false, want true")
} else if err != nil {
t.Errorf("Error: %v", err)
}
if m, err := longRegex.MatchString("xxx"); m {
t.Errorf("longRegex.MatchString(\"xxx\") was true, want false")
} else if err != nil {
t.Errorf("Error: %v", err)
}
}
func BenchmarkLeading(b *testing.B) {
b.StopTimer()
r := MustCompile("[a-q][^u-z]{13}x", 0)
inp := makeText(1000000)
b.StartTimer()
for i := 0; i < b.N; i++ {
if m, err := r.MatchRunes(inp); !m {
b.Errorf("Expected match")
} else if err != nil {
b.Errorf("Error: %v", err)
}
}
}

View File

@ -1,915 +0,0 @@
package regexp2
import (
"reflect"
"strings"
"testing"
"time"
"github.com/dlclark/regexp2/syntax"
)
func TestBacktrack_CatastrophicTimeout(t *testing.T) {
r, err := Compile("(.+)*\\?", 0)
r.MatchTimeout = time.Millisecond * 1
t.Logf("code dump: %v", r.code.Dump())
m, err := r.FindStringMatch("Do you think you found the problem string!")
if err == nil {
t.Errorf("expected timeout err")
}
if m != nil {
t.Errorf("Expected no match")
}
}
func TestSetPrefix(t *testing.T) {
r := MustCompile(`^\s*-TEST`, 0)
if r.code.FcPrefix == nil {
t.Fatalf("Expected prefix set [-\\s] but was nil")
}
if r.code.FcPrefix.PrefixSet.String() != "[-\\s]" {
t.Fatalf("Expected prefix set [\\s-] but was %v", r.code.FcPrefix.PrefixSet.String())
}
}
func TestSetInCode(t *testing.T) {
r := MustCompile(`(?<body>\s*(?<name>.+))`, 0)
t.Logf("code dump: %v", r.code.Dump())
if want, got := 1, len(r.code.Sets); want != got {
t.Fatalf("r.code.Sets wanted %v, got %v", want, got)
}
if want, got := "[\\s]", r.code.Sets[0].String(); want != got {
t.Fatalf("first set wanted %v, got %v", want, got)
}
}
func TestRegexp_Basic(t *testing.T) {
r, err := Compile("test(?<named>ing)?", 0)
//t.Logf("code dump: %v", r.code.Dump())
if err != nil {
t.Errorf("unexpected compile err: %v", err)
}
m, err := r.FindStringMatch("this is a testing stuff")
if err != nil {
t.Errorf("unexpected match err: %v", err)
}
if m == nil {
t.Error("Nil match, expected success")
} else {
//t.Logf("Match: %v", m.dump())
}
}
// check all our functions and properties around basic capture groups and referential for Group 0
func TestCapture_Basic(t *testing.T) {
r := MustCompile(`.*\B(SUCCESS)\B.*`, 0)
m, err := r.FindStringMatch("adfadsfSUCCESSadsfadsf")
if err != nil {
t.Fatalf("Unexpected match error: %v", err)
}
if m == nil {
t.Fatalf("Should have matched")
}
if want, got := "adfadsfSUCCESSadsfadsf", m.String(); want != got {
t.Fatalf("Wanted '%v'\nGot '%v'", want, got)
}
if want, got := 0, m.Index; want != got {
t.Fatalf("Wanted '%v'\nGot '%v'", want, got)
}
if want, got := 22, m.Length; want != got {
t.Fatalf("Wanted '%v'\nGot '%v'", want, got)
}
if want, got := 1, len(m.Captures); want != got {
t.Fatalf("Wanted '%v'\nGot '%v'", want, got)
}
if want, got := m.String(), m.Captures[0].String(); want != got {
t.Fatalf("Wanted '%v'\nGot '%v'", want, got)
}
if want, got := 0, m.Captures[0].Index; want != got {
t.Fatalf("Wanted '%v'\nGot '%v'", want, got)
}
if want, got := 22, m.Captures[0].Length; want != got {
t.Fatalf("Wanted '%v'\nGot '%v'", want, got)
}
g := m.Groups()
if want, got := 2, len(g); want != got {
t.Fatalf("Wanted '%v'\nGot '%v'", want, got)
}
// group 0 is always the match
if want, got := m.String(), g[0].String(); want != got {
t.Fatalf("Wanted '%v'\nGot '%v'", want, got)
}
if want, got := 1, len(g[0].Captures); want != got {
t.Fatalf("Wanted '%v'\nGot '%v'", want, got)
}
// group 0's capture is always the match
if want, got := m.Captures[0].String(), g[0].Captures[0].String(); want != got {
t.Fatalf("Wanted '%v'\nGot '%v'", want, got)
}
// group 1 is our first explicit group (unnamed)
if want, got := 7, g[1].Index; want != got {
t.Fatalf("Wanted '%v'\nGot '%v'", want, got)
}
if want, got := 7, g[1].Length; want != got {
t.Fatalf("Wanted '%v'\nGot '%v'", want, got)
}
if want, got := "SUCCESS", g[1].String(); want != got {
t.Fatalf("Wanted '%v'\nGot '%v'", want, got)
}
}
func TestEscapeUnescape_Basic(t *testing.T) {
s1 := "#$^*+(){}<>\\|. "
s2 := Escape(s1)
s3, err := Unescape(s2)
if err != nil {
t.Fatalf("Unexpected error during unescape: %v", err)
}
//confirm one way
if want, got := `\#\$\^\*\+\(\)\{\}<>\\\|\.\ `, s2; want != got {
t.Fatalf("Wanted '%v'\nGot '%v'", want, got)
}
//confirm round-trip
if want, got := s1, s3; want != got {
t.Fatalf("Wanted '%v'\nGot '%v'", want, got)
}
}
func TestGroups_Basic(t *testing.T) {
type d struct {
p string
s string
name []string
num []int
strs []string
}
data := []d{
d{"(?<first_name>\\S+)\\s(?<last_name>\\S+)", // example
"Ryan Byington",
[]string{"0", "first_name", "last_name"},
[]int{0, 1, 2},
[]string{"Ryan Byington", "Ryan", "Byington"}},
d{"((?<One>abc)\\d+)?(?<Two>xyz)(.*)", // example
"abc208923xyzanqnakl",
[]string{"0", "1", "2", "One", "Two"},
[]int{0, 1, 2, 3, 4},
[]string{"abc208923xyzanqnakl", "abc208923", "anqnakl", "abc", "xyz"}},
d{"((?<256>abc)\\d+)?(?<16>xyz)(.*)", // numeric names
"0272saasdabc8978xyz][]12_+-",
[]string{"0", "1", "2", "16", "256"},
[]int{0, 1, 2, 16, 256},
[]string{"abc8978xyz][]12_+-", "abc8978", "][]12_+-", "xyz", "abc"}},
d{"((?<4>abc)(?<digits>\\d+))?(?<2>xyz)(?<everything_else>.*)", // mix numeric and string names
"0272saasdabc8978xyz][]12_+-",
[]string{"0", "1", "2", "digits", "4", "everything_else"},
[]int{0, 1, 2, 3, 4, 5},
[]string{"abc8978xyz][]12_+-", "abc8978", "xyz", "8978", "abc", "][]12_+-"}},
d{"(?<first_name>\\S+)\\s(?<first_name>\\S+)", // dupe string names
"Ryan Byington",
[]string{"0", "first_name"},
[]int{0, 1},
[]string{"Ryan Byington", "Byington"}},
d{"(?<15>\\S+)\\s(?<15>\\S+)", // dupe numeric names
"Ryan Byington",
[]string{"0", "15"},
[]int{0, 15},
[]string{"Ryan Byington", "Byington"}},
// *** repeated from above, but with alt cap syntax ***
d{"(?'first_name'\\S+)\\s(?'last_name'\\S+)", //example
"Ryan Byington",
[]string{"0", "first_name", "last_name"},
[]int{0, 1, 2},
[]string{"Ryan Byington", "Ryan", "Byington"}},
d{"((?'One'abc)\\d+)?(?'Two'xyz)(.*)", // example
"abc208923xyzanqnakl",
[]string{"0", "1", "2", "One", "Two"},
[]int{0, 1, 2, 3, 4},
[]string{"abc208923xyzanqnakl", "abc208923", "anqnakl", "abc", "xyz"}},
d{"((?'256'abc)\\d+)?(?'16'xyz)(.*)", // numeric names
"0272saasdabc8978xyz][]12_+-",
[]string{"0", "1", "2", "16", "256"},
[]int{0, 1, 2, 16, 256},
[]string{"abc8978xyz][]12_+-", "abc8978", "][]12_+-", "xyz", "abc"}},
d{"((?'4'abc)(?'digits'\\d+))?(?'2'xyz)(?'everything_else'.*)", // mix numeric and string names
"0272saasdabc8978xyz][]12_+-",
[]string{"0", "1", "2", "digits", "4", "everything_else"},
[]int{0, 1, 2, 3, 4, 5},
[]string{"abc8978xyz][]12_+-", "abc8978", "xyz", "8978", "abc", "][]12_+-"}},
d{"(?'first_name'\\S+)\\s(?'first_name'\\S+)", // dupe string names
"Ryan Byington",
[]string{"0", "first_name"},
[]int{0, 1},
[]string{"Ryan Byington", "Byington"}},
d{"(?'15'\\S+)\\s(?'15'\\S+)", // dupe numeric names
"Ryan Byington",
[]string{"0", "15"},
[]int{0, 15},
[]string{"Ryan Byington", "Byington"}},
}
fatalf := func(re *Regexp, v d, format string, args ...interface{}) {
args = append(args, v, re.code.Dump())
t.Fatalf(format+" using test data: %#v\ndump:%v", args...)
}
validateGroupNamesNumbers := func(re *Regexp, v d) {
if len(v.name) != len(v.num) {
fatalf(re, v, "Invalid data, group name count and number count must match")
}
groupNames := re.GetGroupNames()
if !reflect.DeepEqual(groupNames, v.name) {
fatalf(re, v, "group names expected: %v, actual: %v", v.name, groupNames)
}
groupNums := re.GetGroupNumbers()
if !reflect.DeepEqual(groupNums, v.num) {
fatalf(re, v, "group numbers expected: %v, actual: %v", v.num, groupNums)
}
// make sure we can freely get names and numbers from eachother
for i := range groupNums {
if want, got := groupNums[i], re.GroupNumberFromName(groupNames[i]); want != got {
fatalf(re, v, "group num from name Wanted '%v'\nGot '%v'", want, got)
}
if want, got := groupNames[i], re.GroupNameFromNumber(groupNums[i]); want != got {
fatalf(re, v, "group name from num Wanted '%v'\nGot '%v'", want, got)
}
}
}
for _, v := range data {
// compile the regex
re := MustCompile(v.p, 0)
// validate our group name/num info before execute
validateGroupNamesNumbers(re, v)
m, err := re.FindStringMatch(v.s)
if err != nil {
fatalf(re, v, "Unexpected error in match: %v", err)
}
if m == nil {
fatalf(re, v, "Match is nil")
}
if want, got := len(v.strs), m.GroupCount(); want != got {
fatalf(re, v, "GroupCount() Wanted '%v'\nGot '%v'", want, got)
}
g := m.Groups()
if want, got := len(v.strs), len(g); want != got {
fatalf(re, v, "len(m.Groups()) Wanted '%v'\nGot '%v'", want, got)
}
// validate each group's value from the execute
for i := range v.name {
grp1 := m.GroupByName(v.name[i])
grp2 := m.GroupByNumber(v.num[i])
// should be identical reference
if grp1 != grp2 {
fatalf(re, v, "Expected GroupByName and GroupByNumber to return same result for %v, %v", v.name[i], v.num[i])
}
if want, got := v.strs[i], grp1.String(); want != got {
fatalf(re, v, "Value[%v] Wanted '%v'\nGot '%v'", i, want, got)
}
}
// validate our group name/num info after execute
validateGroupNamesNumbers(re, v)
}
}
func TestErr_GroupName(t *testing.T) {
// group 0 is off limits
if _, err := Compile("foo(?<0>bar)", 0); err == nil {
t.Fatalf("zero group, expected error during compile")
} else if want, got := "error parsing regexp: capture number cannot be zero in `foo(?<0>bar)`", err.Error(); want != got {
t.Fatalf("invalid error text, want '%v', got '%v'", want, got)
}
if _, err := Compile("foo(?'0'bar)", 0); err == nil {
t.Fatalf("zero group, expected error during compile")
} else if want, got := "error parsing regexp: capture number cannot be zero in `foo(?'0'bar)`", err.Error(); want != got {
t.Fatalf("invalid error text, want '%v', got '%v'", want, got)
}
// group tag can't start with a num
if _, err := Compile("foo(?<1bar>)", 0); err == nil {
t.Fatalf("invalid group name, expected error during compile")
} else if want, got := "error parsing regexp: invalid group name: group names must begin with a word character and have a matching terminator in `foo(?<1bar>)`", err.Error(); want != got {
t.Fatalf("invalid error text, want '%v', got '%v'", want, got)
}
if _, err := Compile("foo(?'1bar')", 0); err == nil {
t.Fatalf("invalid group name, expected error during compile")
} else if want, got := "error parsing regexp: invalid group name: group names must begin with a word character and have a matching terminator in `foo(?'1bar')`", err.Error(); want != got {
t.Fatalf("invalid error text, want '%v', got '%v'", want, got)
}
// missing closing group tag
if _, err := Compile("foo(?<bar)", 0); err == nil {
t.Fatalf("invalid group name, expected error during compile")
} else if want, got := "error parsing regexp: invalid group name: group names must begin with a word character and have a matching terminator in `foo(?<bar)`", err.Error(); want != got {
t.Fatalf("invalid error text, want '%v', got '%v'", want, got)
}
if _, err := Compile("foo(?'bar)", 0); err == nil {
t.Fatalf("invalid group name, expected error during compile")
} else if want, got := "error parsing regexp: invalid group name: group names must begin with a word character and have a matching terminator in `foo(?'bar)`", err.Error(); want != got {
t.Fatalf("invalid error text, want '%v', got '%v'", want, got)
}
}
func TestConstantUneffected(t *testing.T) {
// had a bug where "constant" sets would get modified with alternations and be broken in memory until restart
// this meant that if you used a known-set (like \s) in a larger set it would "poison" \s for the process
re := MustCompile(`(\s|\*)test\s`, 0)
if want, got := 2, len(re.code.Sets); want != got {
t.Fatalf("wanted %v sets, got %v", want, got)
}
if want, got := "[\\*\\s]", re.code.Sets[0].String(); want != got {
t.Fatalf("wanted set 0 %v, got %v", want, got)
}
if want, got := "[\\s]", re.code.Sets[1].String(); want != got {
t.Fatalf("wanted set 1 %v, got %v", want, got)
}
}
func TestAlternationConstAndEscape(t *testing.T) {
re := MustCompile(`\:|\s`, 0)
if want, got := 1, len(re.code.Sets); want != got {
t.Fatalf("wanted %v sets, got %v", want, got)
}
if want, got := "[:\\s]", re.code.Sets[0].String(); want != got {
t.Fatalf("wanted set 0 %v, got %v", want, got)
}
}
func TestStartingCharsOptionalNegate(t *testing.T) {
// to maintain matching with the corefx we've made the negative char classes be negative and the
// categories they contain positive. This means they're not combinable or suitable for prefixes.
// In general this could be a fine thing since negatives are extremely wide groups and not
// missing much on prefix optimizations.
// the below expression *could* have a prefix of [\S\d] but
// this requires a change in charclass.go when setting
// NotSpaceClass = getCharSetFromCategoryString()
// to negate the individual categories rather than the CharSet itself
// this would deviate from corefx
re := MustCompile(`(^(\S{2} )?\S{2}(\d+|/) *\S{3}\S{3} ?\d{2,4}[A-Z] ?\d{2}[A-Z]{3}|(\S{2} )?\d{2,4})`, 0)
if re.code.FcPrefix != nil {
t.Fatalf("FcPrefix wanted nil, got %v", re.code.FcPrefix)
}
}
func TestParseNegativeDigit(t *testing.T) {
re := MustCompile(`\D`, 0)
if want, got := 1, len(re.code.Sets); want != got {
t.Fatalf("wanted %v sets, got %v", want, got)
}
if want, got := "[\\P{Nd}]", re.code.Sets[0].String(); want != got {
t.Fatalf("wanted set 0 %v, got %v", want, got)
}
}
func TestRunNegativeDigit(t *testing.T) {
re := MustCompile(`\D`, 0)
m, err := re.MatchString("this is a test")
if err != nil {
t.Fatalf("Unexpected error: %v", err)
}
if !m {
t.Fatalf("Expected match")
}
}
func TestCancellingClasses(t *testing.T) {
// [\w\W\s] should become "." because it means "anything"
re := MustCompile(`[\w\W\s]`, 0)
if want, got := 1, len(re.code.Sets); want != got {
t.Fatalf("wanted %v sets, got %v", want, got)
}
if want, got := syntax.AnyClass().String(), re.code.Sets[0].String(); want != got {
t.Fatalf("wanted set 0 %v, got %v", want, got)
}
}
func TestConcatLoopCaptureSet(t *testing.T) {
//(A|B)*?CD different Concat/Loop/Capture/Set (had [A-Z] should be [AB])
// we were not copying the Sets in the prefix FC stack, so the underlying sets were unexpectedly mutating
// so set [AB] becomes [ABC] when we see the the static C in FC stack generation (which are the valid start chars),
// but that was mutating the tree node's original set [AB] because even though we copied the slie header,
// the two header's pointed to the same underlying byte array...which was mutated.
re := MustCompile(`(A|B)*CD`, 0)
if want, got := 1, len(re.code.Sets); want != got {
t.Fatalf("wanted %v sets, got %v", want, got)
}
if want, got := "[AB]", re.code.Sets[0].String(); want != got {
t.Fatalf("wanted set 0 %v, got %v", want, got)
}
}
func TestFirstcharsIgnoreCase(t *testing.T) {
//((?i)AB(?-i)C|D)E different Firstchars (had [da] should be [ad])
// we were not canonicalizing when converting the prefix set to lower case
// so our set's were potentially not searching properly
re := MustCompile(`((?i)AB(?-i)C|D)E`, 0)
if re.code.FcPrefix == nil {
t.Fatalf("wanted prefix, got nil")
}
if want, got := "[ad]", re.code.FcPrefix.PrefixSet.String(); want != got {
t.Fatalf("wanted prefix %v, got %v", want, got)
}
}
func TestRepeatingGroup(t *testing.T) {
re := MustCompile(`(data?)+`, 0)
m, err := re.FindStringMatch("datadat")
if err != nil {
t.Fatalf("Unexpected err: %v", err)
}
if m == nil {
t.Fatalf("Expected match")
}
g := m.GroupByNumber(1)
if g == nil {
t.Fatalf("Expected group")
}
if want, got := 2, len(g.Captures); want != got {
t.Fatalf("wanted cap count %v, got %v", want, got)
}
if want, got := g.Captures[1].String(), g.Capture.String(); want != got {
t.Fatalf("expected last capture of the group to be embedded")
}
if want, got := "data", g.Captures[0].String(); want != got {
t.Fatalf("expected cap 0 to be %v, got %v", want, got)
}
if want, got := "dat", g.Captures[1].String(); want != got {
t.Fatalf("expected cap 1 to be %v, got %v", want, got)
}
}
func TestFindNextMatch_Basic(t *testing.T) {
re := MustCompile(`(T|E)(?=h|E|S|$)`, 0)
m, err := re.FindStringMatch(`This is a TEST`)
if err != nil {
t.Fatalf("Unexpected err 0: %v", err)
}
if m == nil {
t.Fatalf("Expected match 0")
}
if want, got := 0, m.Index; want != got {
t.Fatalf("expected match 0 to start at %v, got %v", want, got)
}
m, err = re.FindNextMatch(m)
if err != nil {
t.Fatalf("Unexpected err 1: %v", err)
}
if m == nil {
t.Fatalf("Expected match 1")
}
if want, got := 10, m.Index; want != got {
t.Fatalf("expected match 1 to start at %v, got %v", want, got)
}
m, err = re.FindNextMatch(m)
if err != nil {
t.Fatalf("Unexpected err 2: %v", err)
}
if m == nil {
t.Fatalf("Expected match 2")
}
if want, got := 11, m.Index; want != got {
t.Fatalf("expected match 2 to start at %v, got %v", want, got)
}
m, err = re.FindNextMatch(m)
if err != nil {
t.Fatalf("Unexpected err 3: %v", err)
}
if m == nil {
t.Fatalf("Expected match 3")
}
if want, got := 13, m.Index; want != got {
t.Fatalf("expected match 3 to start at %v, got %v", want, got)
}
}
func TestUnicodeSupplementaryCharSetMatch(t *testing.T) {
//0x2070E 0x20731 𠜱 0x20779 𠝹
re := MustCompile("[𠜎-𠝹]", 0)
if m, err := re.MatchString("\u2070"); err != nil {
t.Fatalf("Unexpected err: %v", err)
} else if m {
t.Fatalf("Unexpected match")
}
if m, err := re.MatchString("𠜱"); err != nil {
t.Fatalf("Unexpected err: %v", err)
} else if !m {
t.Fatalf("Expected match")
}
}
func TestUnicodeSupplementaryCharInRange(t *testing.T) {
//0x2070E 0x20731 𠜱 0x20779 𠝹
re := MustCompile(".", 0)
if m, err := re.MatchString("\u2070"); err != nil {
t.Fatalf("Unexpected err: %v", err)
} else if !m {
t.Fatalf("Expected match")
}
if m, err := re.MatchString("𠜱"); err != nil {
t.Fatalf("Unexpected err: %v", err)
} else if !m {
t.Fatalf("Expected match")
}
}
func TestUnicodeScriptSets(t *testing.T) {
re := MustCompile(`\p{Katakana}+`, 0)
if m, err := re.MatchString("\u30A0\u30FF"); err != nil {
t.Fatalf("Unexpected err: %v", err)
} else if !m {
t.Fatalf("Expected match")
}
}
func TestHexadecimalCurlyBraces(t *testing.T) {
re := MustCompile(`\x20`, 0)
if m, err := re.MatchString(" "); err != nil {
t.Fatalf("Unexpected err: %v", err)
} else if !m {
t.Fatalf("Expected match")
}
re = MustCompile(`\x{C4}`, 0)
if m, err := re.MatchString("Ä"); err != nil {
t.Fatalf("Unexpected err: %v", err)
} else if !m {
t.Fatalf("Expected match")
}
re = MustCompile(`\x{0C5}`, 0)
if m, err := re.MatchString("Å"); err != nil {
t.Fatalf("Unexpected err: %v", err)
} else if !m {
t.Fatalf("Expected match")
}
re = MustCompile(`\x{00C6}`, 0)
if m, err := re.MatchString("Æ"); err != nil {
t.Fatalf("Unexpected err: %v", err)
} else if !m {
t.Fatalf("Expected match")
}
re = MustCompile(`\x{1FF}`, 0)
if m, err := re.MatchString("ǿ"); err != nil {
t.Fatalf("Unexpected err: %v", err)
} else if !m {
t.Fatalf("Expected match")
}
re = MustCompile(`\x{02FF}`, 0)
if m, err := re.MatchString("˿"); err != nil {
t.Fatalf("Unexpected err: %v", err)
} else if !m {
t.Fatalf("Expected match")
}
re = MustCompile(`\x{1392}`, 0)
if m, err := re.MatchString("᎒"); err != nil {
t.Fatalf("Unexpected err: %v", err)
} else if !m {
t.Fatalf("Expected match")
}
re = MustCompile(`\x{0010ffff}`, 0)
if m, err := re.MatchString(string(0x10ffff)); err != nil {
t.Fatalf("Unexpected err: %v", err)
} else if !m {
t.Fatalf("Expected match")
}
if _, err := Compile(`\x2R`, 0); err == nil {
t.Fatal("Expected error")
}
if _, err := Compile(`\x0`, 0); err == nil {
t.Fatal("Expected error")
}
if _, err := Compile(`\x`, 0); err == nil {
t.Fatal("Expected error")
}
if _, err := Compile(`\x{`, 0); err == nil {
t.Fatal("Expected error")
}
if _, err := Compile(`\x{2`, 0); err == nil {
t.Fatal("Expected error")
}
if _, err := Compile(`\x{2R`, 0); err == nil {
t.Fatal("Expected error")
}
if _, err := Compile(`\x{2R}`, 0); err == nil {
t.Fatal("Expected error")
}
if _, err := Compile(`\x{}`, 0); err == nil {
t.Fatalf("Expected error")
}
if _, err := Compile(`\x{10000`, 0); err == nil {
t.Fatal("Expected error")
}
if _, err := Compile(`\x{1234`, 0); err == nil {
t.Fatal("Expected error")
}
if _, err := Compile(`\x{123456789}`, 0); err == nil {
t.Fatal("Expected error")
}
}
func TestEmptyCharClass(t *testing.T) {
if _, err := Compile("[]", 0); err == nil {
t.Fatal("Empty char class isn't valid outside of ECMAScript mode")
}
}
func TestECMAEmptyCharClass(t *testing.T) {
re := MustCompile("[]", ECMAScript)
if m, err := re.MatchString("a"); err != nil {
t.Fatal(err)
} else if m {
t.Fatal("Expected no match")
}
}
func TestDot(t *testing.T) {
re := MustCompile(".", 0)
if m, err := re.MatchString("\r"); err != nil {
t.Fatal(err)
} else if !m {
t.Fatal("Expected match")
}
}
func TestECMADot(t *testing.T) {
re := MustCompile(".", ECMAScript)
if m, err := re.MatchString("\r"); err != nil {
t.Fatal(err)
} else if m {
t.Fatal("Expected no match")
}
}
func TestDecimalLookahead(t *testing.T) {
re := MustCompile(`\1(A)`, 0)
m, err := re.FindStringMatch("AA")
if err != nil {
t.Fatal(err)
} else if m != nil {
t.Fatal("Expected no match")
}
}
func TestECMADecimalLookahead(t *testing.T) {
re := MustCompile(`\1(A)`, ECMAScript)
m, err := re.FindStringMatch("AA")
if err != nil {
t.Fatal(err)
}
if c := m.GroupCount(); c != 2 {
t.Fatalf("Group count !=2 (%d)", c)
}
if s := m.GroupByNumber(0).String(); s != "A" {
t.Fatalf("Group0 != 'A' ('%s')", s)
}
if s := m.GroupByNumber(1).String(); s != "A" {
t.Fatalf("Group1 != 'A' ('%s')", s)
}
}
func TestECMAOctal(t *testing.T) {
re := MustCompile(`\100`, ECMAScript)
if m, err := re.MatchString("@"); err != nil {
t.Fatal(err)
} else if !m {
t.Fatal("Expected match")
}
}
func TestNegateRange(t *testing.T) {
re := MustCompile(`[\D]`, 0)
if m, err := re.MatchString("A"); err != nil {
t.Fatal(err)
} else if !m {
t.Fatal("Expected match")
}
}
func TestECMANegateRange(t *testing.T) {
re := MustCompile(`[\D]`, ECMAScript)
if m, err := re.MatchString("A"); err != nil {
t.Fatal(err)
} else if !m {
t.Fatal("Expected match")
}
}
func TestThreeByteUnicode_InputOnly(t *testing.T) {
// confirm the bmprefix properly ignores 3-byte unicode in the input value
// this used to panic
re := MustCompile("高", 0)
if m, err := re.MatchString("📍Test高"); err != nil {
t.Fatal(err)
} else if !m {
t.Fatal("Expected match")
}
}
func TestMultibyteUnicode_MatchPartialPattern(t *testing.T) {
re := MustCompile("猟な", 0)
if m, err := re.MatchString("なあ🍺な"); err != nil {
t.Fatal(err)
} else if m {
t.Fatal("Expected no match")
}
}
func TestMultibyteUnicode_Match(t *testing.T) {
re := MustCompile("猟な", 0)
if m, err := re.MatchString("なあ🍺猟な"); err != nil {
t.Fatal(err)
} else if !m {
t.Fatal("Expected match")
}
}
func TestAlternationNamedOptions_Errors(t *testing.T) {
// all of these should give an error "error parsing regexp:"
data := []string{
"(?(?e))", "(?(?a)", "(?(?", "(?(", "?(a:b)", "?(a)", "?(a|b)", "?((a)", "?((a)a", "?((a)a|", "?((a)a|b",
"(?(?i))", "(?(?I))", "(?(?m))", "(?(?M))", "(?(?s))", "(?(?S))", "(?(?x))", "(?(?X))", "(?(?n))", "(?(?N))", " (?(?n))",
}
for _, p := range data {
re, err := Compile(p, 0)
if err == nil {
t.Fatal("Expected error, got nil")
}
if re != nil {
t.Fatal("Expected unparsed regexp, got non-nil")
}
if !strings.HasPrefix(err.Error(), "error parsing regexp: ") {
t.Fatalf("Wanted parse error, got '%v'", err)
}
}
}
func TestAlternationNamedOptions_Success(t *testing.T) {
data := []struct {
pattern string
input string
expectSuccess bool
matchVal string
}{
{"(?(cat)|dog)", "cat", true, ""},
{"(?(cat)|dog)", "catdog", true, ""},
{"(?(cat)dog1|dog2)", "catdog1", false, ""},
{"(?(cat)dog1|dog2)", "catdog2", true, "dog2"},
{"(?(cat)dog1|dog2)", "catdog1dog2", true, "dog2"},
{"(?(dog2))", "dog2", true, ""},
{"(?(cat)|dog)", "oof", false, ""},
{"(?(a:b))", "a", true, ""},
{"(?(a:))", "a", true, ""},
}
for _, p := range data {
re := MustCompile(p.pattern, 0)
m, err := re.FindStringMatch(p.input)
if err != nil {
t.Fatalf("Unexpected error during match: %v", err)
}
if want, got := p.expectSuccess, m != nil; want != got {
t.Fatalf("Success mismatch for %v, wanted %v, got %v", p.pattern, want, got)
}
if m != nil {
if want, got := p.matchVal, m.String(); want != got {
t.Fatalf("Match val mismatch for %v, wanted %v, got %v", p.pattern, want, got)
}
}
}
}
func TestAlternationConstruct_Matches(t *testing.T) {
re := MustCompile("(?(A)A123|C789)", 0)
m, err := re.FindStringMatch("A123 B456 C789")
if err != nil {
t.Fatalf("Unexpected err: %v", err)
}
if m == nil {
t.Fatal("Expected match, got nil")
}
if want, got := "A123", m.String(); want != got {
t.Fatalf("Wanted %v, got %v", want, got)
}
m, err = re.FindNextMatch(m)
if err != nil {
t.Fatalf("Unexpected err in second match: %v", err)
}
if m == nil {
t.Fatal("Expected second match, got nil")
}
if want, got := "C789", m.String(); want != got {
t.Fatalf("Wanted %v, got %v", want, got)
}
m, err = re.FindNextMatch(m)
if err != nil {
t.Fatalf("Unexpected err in third match: %v", err)
}
if m != nil {
t.Fatal("Did not expect third match")
}
}
func TestParserFuzzCrashes(t *testing.T) {
var crashes = []string{
"(?'-", "(\\c0)", "(\\00(?())", "[\\p{0}", "(\x00?.*.()?(()?)?)*.x\xcb?&(\\s\x80)", "\\p{0}", "[0-[\\p{0}",
}
for _, c := range crashes {
t.Log(c)
Compile(c, 0)
}
}
func TestParserFuzzHangs(t *testing.T) {
var hangs = []string{
"\r{865720113}z\xd5{\r{861o", "\r{915355}\r{9153}", "\r{525005}", "\x01{19765625}", "(\r{068828256})", "\r{677525005}",
}
for _, c := range hangs {
t.Log(c)
Compile(c, 0)
}
}
func BenchmarkParserPrefixLongLen(b *testing.B) {
re := MustCompile("\r{100001}T+", 0)
inp := strings.Repeat("testing", 10000) + strings.Repeat("\r", 100000) + "TTTT"
b.ResetTimer()
for i := 0; i < b.N; i++ {
if m, err := re.MatchString(inp); err != nil {
b.Fatalf("Unexpected err: %v", err)
} else if m {
b.Fatalf("Expected no match")
}
}
}
/*
func TestPcreStuff(t *testing.T) {
re := MustCompile(`(?(?=(a))a)`, Debug)
inp := unEscapeToMatch(`a`)
fmt.Printf("Inp %q\n", inp)
m, err := re.FindStringMatch(inp)
if err != nil {
t.Fatalf("Unexpected error: %v", err)
}
if m == nil {
t.Fatalf("Expected match")
}
fmt.Printf("Match %s\n", m.dump())
fmt.Printf("Text: %v\n", unEscapeGroup(m.String()))
}
*/
//(.*)(\d+) different FirstChars ([\x00-\t\v-\x08] OR [\x00-\t\v-\uffff\p{Nd}]

View File

@ -1,172 +0,0 @@
package regexp2
import (
"strconv"
"testing"
)
func TestReplace_Basic(t *testing.T) {
re := MustCompile(`test`, 0)
str, err := re.Replace("this is a test", "unit", -1, -1)
if err != nil {
t.Fatalf("Unexpected err: %v", err)
}
if want, got := "this is a unit", str; want != got {
t.Fatalf("Replace failed, wanted %v, got %v", want, got)
}
}
func TestReplace_NamedGroup(t *testing.T) {
re := MustCompile(`[^ ]+\s(?<time>)`, 0)
str, err := re.Replace("08/10/99 16:00", "${time}", -1, -1)
if err != nil {
t.Fatalf("Unexpected err: %v", err)
}
if want, got := "16:00", str; want != got {
t.Fatalf("Replace failed, wanted %v, got %v", want, got)
}
}
func TestReplace_IgnoreCaseUpper(t *testing.T) {
re := MustCompile(`dog`, IgnoreCase)
str, err := re.Replace("my dog has fleas", "CAT", -1, -1)
if err != nil {
t.Fatalf("Unexpected err: %v", err)
}
if want, got := "my CAT has fleas", str; want != got {
t.Fatalf("Replace failed, wanted %v, got %v", want, got)
}
}
func TestReplace_IgnoreCaseLower(t *testing.T) {
re := MustCompile(`olang`, IgnoreCase)
str, err := re.Replace("GoLAnG", "olang", -1, -1)
if err != nil {
t.Fatalf("Unexpected err: %v", err)
}
if want, got := "Golang", str; want != got {
t.Fatalf("Replace failed, wanted %v, got %v", want, got)
}
}
func TestReplace_NumberGroup(t *testing.T) {
re := MustCompile(`D\.(.+)`, None)
str, err := re.Replace("D.Bau", "David $1", -1, -1)
if err != nil {
t.Fatalf("Unexpected err: %v", err)
}
if want, got := "David Bau", str; want != got {
t.Fatalf("Replace failed, wanted %v, got %v", want, got)
}
}
func TestReplace_LimitCount(t *testing.T) {
re := MustCompile(`a`, None)
str, err := re.Replace("aaaaa", "b", 0, 2)
if err != nil {
t.Fatalf("Unexpected err: %v", err)
}
if want, got := "bbaaa", str; want != got {
t.Fatalf("Replace failed, wanted %v, got %v", want, got)
}
}
func TestReplace_LimitCountSlice(t *testing.T) {
re := MustCompile(`a`, None)
myStr := "aaaaa"
str, err := re.Replace(myStr, "b", 3, 2)
if err != nil {
t.Fatalf("Unexpected err: %v", err)
}
if want, got := "aaabb", str; want != got {
t.Fatalf("Replace failed, wanted %v, got %v", want, got)
}
}
func TestReplace_BeginBeforeAfterEnd(t *testing.T) {
re := MustCompile(`a`, None)
myStr := "a test a blah and a"
str, err := re.Replace(myStr, "stuff", -1, -1)
if err != nil {
t.Fatalf("Unexpected err: %v", err)
}
if want, got := "stuff test stuff blstuffh stuffnd stuff", str; want != got {
t.Fatalf("Replace failed, wanted %v, got %v", want, got)
}
}
func TestReplace_BadSyntax(t *testing.T) {
re := MustCompile(`a`, None)
myStr := "this is a test"
_, err := re.Replace(myStr, `$5000000000`, -1, -1)
if err == nil {
t.Fatalf("Expected err")
}
}
func TestReplaceFunc_Basic(t *testing.T) {
re := MustCompile(`test`, None)
str, err := re.ReplaceFunc("this is a test", func(m Match) string { return "unit" }, -1, -1)
if err != nil {
t.Fatalf("Unexpected err: %v", err)
}
if want, got := "this is a unit", str; want != got {
t.Fatalf("Replace failed, wanted %v, got %v", want, got)
}
}
func TestReplaceFunc_Multiple(t *testing.T) {
re := MustCompile(`test`, None)
count := 0
str, err := re.ReplaceFunc("This test is another test for stuff", func(m Match) string {
count++
return strconv.Itoa(count)
}, -1, -1)
if err != nil {
t.Fatalf("Unexpected err: %v", err)
}
if want, got := "This 1 is another 2 for stuff", str; want != got {
t.Fatalf("Replace failed, wanted %v, got %v", want, got)
}
}
func TestReplaceFunc_Groups(t *testing.T) {
re := MustCompile(`test(?<sub>ing)?`, None)
count := 0
str, err := re.ReplaceFunc("This testing is another test testingly junk", func(m Match) string {
count++
if m.GroupByName("sub").Length > 0 {
// we have an "ing", make it negative
return strconv.Itoa(count * -1)
}
return strconv.Itoa(count)
}, -1, -1)
if err != nil {
t.Fatalf("Unexpected err: %v", err)
}
if want, got := "This -1 is another 2 -3ly junk", str; want != got {
t.Fatalf("Replace failed, wanted %v, got %v", want, got)
}
}
func TestReplace_RefNumsDollarAmbiguous(t *testing.T) {
re := MustCompile("(123)hello(789)", None)
res, err := re.Replace("123hello789", "$1456$2", -1, -1)
if err != nil {
t.Fatal(err)
}
if want, got := "$1456789", res; want != got {
t.Fatalf("Wrong result: %s", got)
}
}
func TestReplace_NestedGroups(t *testing.T) {
re := MustCompile(`(\p{Sc}\s?)?(\d+\.?((?<=\.)\d+)?)(?(1)|\s?\p{Sc})?`, None)
res, err := re.Replace("$17.43 €2 16.33 £0.98 0.43 £43 12€ 17", "$2", -1, -1)
if err != nil {
t.Fatal(err)
}
if want, got := "17.43 2 16.33 0.98 0.43 43 12 17", res; want != got {
t.Fatalf("Wrong result: %s", got)
}
}

View File

@ -1,52 +0,0 @@
package regexp2
import "testing"
func TestRightToLeft_Basic(t *testing.T) {
re := MustCompile(`foo\d+`, RightToLeft)
s := "0123456789foo4567890foo1foo 0987"
m, err := re.FindStringMatch(s)
if err != nil {
t.Fatalf("Unexpected err: %v", err)
}
if want, got := "foo1", m.String(); want != got {
t.Fatalf("Match 0 failed, wanted %v, got %v", want, got)
}
m, err = re.FindNextMatch(m)
if err != nil {
t.Fatalf("Unexpected err: %v", err)
}
if want, got := "foo4567890", m.String(); want != got {
t.Fatalf("Match 1 failed, wanted %v, got %v", want, got)
}
}
func TestRightToLeft_StartAt(t *testing.T) {
re := MustCompile(`\d`, RightToLeft)
m, err := re.FindStringMatchStartingAt("0123", -1)
if err != nil {
t.Fatalf("Unexpected err: %v", err)
}
if m == nil {
t.Fatalf("Expected match")
}
if want, got := "3", m.String(); want != got {
t.Fatalf("Find failed, wanted '%v', got '%v'", want, got)
}
}
func TestRightToLeft_Replace(t *testing.T) {
re := MustCompile(`\d`, RightToLeft)
s := "0123456789foo4567890foo "
str, err := re.Replace(s, "#", -1, 7)
if err != nil {
t.Fatalf("Unexpected err: %v", err)
}
if want, got := "0123456789foo#######foo ", str; want != got {
t.Fatalf("Replace failed, wanted '%v', got '%v'", want, got)
}
}

View File

@ -1 +0,0 @@
\b\b\b\b\b\b\b\b\b

View File

@ -1 +0,0 @@
\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\11(\1

View File

@ -1 +0,0 @@
(?-----------------

View File

@ -1 +0,0 @@
((?'256'abc)\d+)?(?'16')(.*)

View File

@ -1 +0,0 @@
((((((){2147483647}((){2147483647}(){2147483647})){2147483647}))))

View File

@ -1 +0,0 @@
(?I)іііііΉііΉіΉ

View File

@ -1,2 +0,0 @@
((')'()'()'(')'()
)

View File

@ -1 +0,0 @@
[ケ-ケ-[[-ケ-[ケ]]][ケ-ケ-[[-ケ-[ケ]]]

View File

@ -1 +0,0 @@
(?=)((?=)(?=)(?=)(?=)(?=)(?=)(?=))(?=)(?=)(?=)(?=)(?=)(?=)(?=)(?=)(?=)(?=)

View File

@ -1 +0,0 @@
[cA2sx5fl7Uv_10)][cA2sx5fl7Uv_10]

View File

@ -1 +0,0 @@
((()?)?)?(()?)?(()?)?(((()?)?)?)?(()?)?(((()?)?((()?)?)?(((()?)?)?(()?)?)?)?)?(()?)?((((()?)?)?)?)?

View File

@ -1 +0,0 @@
(?'𠜎𠜎𠹝𠹝

View File

@ -1 +0,0 @@
(A|9)(A|9)(A|A||A|9)(A|9)(A|A||A(A|9)(A|A||A|9)(A|{Î)(A|A||A|9)|9)

View File

@ -1 +0,0 @@
((?'256'bc)\d+)?(?'16')(.)

View File

@ -1 +0,0 @@
(?'-U'(?'-U'(?'-U'(?'-U'(?'U

View File

@ -1 +0,0 @@
['-Q'-?'-Q'-?-''-Q'-?-n\n-''-/'-6-''-Q'-?-n\n-''-/'-6

View File

@ -1 +0,0 @@
[\u8333\u8f3a\u8f3a\u833a\u833a\u833a\u833a\u833a\u8f3a\u8333\u833a\u8f33

View File

@ -1 +0,0 @@
(?'U-6'(?'U-6'(?'U-6'(?'U-6'(?'6'(?'U-

View File

@ -1 +0,0 @@
(?n)()()(()(()()))()((())

View File

@ -1 +0,0 @@
(?I)[[-Ƹ][[-Ƹ][[-Ƹ]+[[-Ƹ]+[[-Ƹ][[-Ƹ]+

View File

@ -1 +0,0 @@
$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$

View File

@ -1 +0,0 @@
(?I)(A9A7450580596923828125)

View File

@ -1 +0,0 @@
(?I)(.*\3826658AA)

View File

@ -1 +0,0 @@
((8((((((((((((((9(((((((((((((((((((((?'251(((((((((

View File

@ -1 +0,0 @@
\A\A\A\A\A\A\A(\A\A\A\A

View File

@ -1 +0,0 @@
[<5B>-<2D>-[<5B>-<2D>-[<5B>]]<5D>

View File

@ -1 +0,0 @@
(?#))(?#))(?#)((?#))(?#))(?#

View File

@ -1 +0,0 @@
(?!(?!(?k(?!(?!(?!

View File

@ -1 +0,0 @@
(c?]?]??]??`?]?`?]??]??`?]?`?]?)

View File

@ -1 +0,0 @@
(?(?<=(?(?<=(?(?<=(?(?<=

View File

@ -1 +0,0 @@
[\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\pp\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p}\p\p

View File

@ -1 +0,0 @@
(?I)'''''invalid group name: group names must begin with a word character and have a matching terminator'

View File

@ -1 +0,0 @@
(?I)[RLOKQNGAXBWH][RLOKQNGAXBWH][RLOKQNGAXBWH][RLOKQNGAXBWH][RLOKQNGAXBWH][LOKNGH][ROQNGH][ONGAXBWH][RLOKQNGAXBWH][LOKNGAXBWH][LOKNGH][ROQNGH][ONGAXBWH][RLOKQNGAXBWH][LOKNGH][ROQNGAXBWH]

View File

@ -1 +0,0 @@
(?n:(?I:(?I:(?I:(?I:(?I:(?I:(?I:(?I:

View File

@ -1 +0,0 @@
(()(())(())(())(()))()(())()((()(())(())(())(()))(()(())()(())(())(()()())(()))()(()())()()()(())

View File

@ -1 +0,0 @@
(?'e69(?'Call'(?'e69(?'Call

View File

@ -1 +0,0 @@
[[::][::][::][::][::][::][::]]

View File

@ -1 +0,0 @@
((|9|A|A|A|A|A|A|A|A|9|A|A|A|9)A|9|A|A|A|9)

Some files were not shown because too many files have changed in this diff Show More