1
0
mirror of https://github.com/jesseduffield/lazygit.git synced 2025-02-03 13:21:56 +02:00

suggest files when picking a path to filter on

async fetching of suggestions

remove limit

cache the trie for future use

more

more
This commit is contained in:
Jesse Duffield 2021-10-18 19:00:03 +11:00
parent a496858c62
commit ca7252ef8e
54 changed files with 4615 additions and 17 deletions

5
go.mod
View File

@ -9,6 +9,7 @@ require (
github.com/cli/safeexec v1.0.0
github.com/cloudfoundry/jibber_jabber v0.0.0-20151120183258-bcc4c8345a21
github.com/creack/pty v1.1.11
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/fatih/color v1.9.0 // indirect
github.com/fsnotify/fsnotify v1.4.7
github.com/go-errors/errors v1.4.1
@ -19,8 +20,10 @@ require (
github.com/gookit/color v1.4.2
github.com/imdario/mergo v0.3.11
github.com/integrii/flaggy v1.4.0
github.com/iriri/minimal/gitignore v0.3.2 // indirect
github.com/jesseduffield/go-git/v5 v5.1.2-0.20201006095850-341962be15a4
github.com/jesseduffield/gocui v0.3.1-0.20211017220056-b2fc03c74a6f
github.com/jesseduffield/minimal/gitignore v0.3.3-0.20211018110810-9cde264e6b1e
github.com/jesseduffield/yaml v2.1.0+incompatible
github.com/kardianos/osext v0.0.0-20190222173326-2bc1f35cddc0
github.com/konsorten/go-windows-terminal-sequences v1.0.2 // indirect
@ -32,6 +35,7 @@ require (
github.com/mgutz/str v1.2.0
github.com/onsi/ginkgo v1.10.3 // indirect
github.com/onsi/gomega v1.7.1 // indirect
github.com/ozeidan/fuzzy-patricia v3.0.0+incompatible // indirect
github.com/sahilm/fuzzy v0.1.0
github.com/sirupsen/logrus v1.4.2
github.com/spkg/bom v0.0.0-20160624110644-59b7046e48ad
@ -42,4 +46,5 @@ require (
golang.org/x/sys v0.0.0-20211015200801-69063c4bb744 // indirect
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211 // indirect
golang.org/x/text v0.3.7 // indirect
gopkg.in/ozeidan/fuzzy-patricia.v3 v3.0.0
)

13
go.sum
View File

@ -48,6 +48,8 @@ github.com/go-git/go-git-fixtures/v4 v4.0.2-0.20200613231340-f56387b50c12/go.mod
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
github.com/go-logfmt/logfmt v0.5.0 h1:TrB8swr/68K7m9CcGut2g3UOihhbcbiMAYiuTXdEih4=
github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A=
github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y=
github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8=
github.com/golang-collections/collections v0.0.0-20130729185459-604e922904d3 h1:zN2lZNZRflqFyxVaTIU61KNKQ9C0055u9CAfpmqUvo4=
github.com/golang-collections/collections v0.0.0-20130729185459-604e922904d3/go.mod h1:nPpo7qLxd6XL3hWJG/O60sR8ZKfMCiIoNap5GvD12KU=
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
@ -65,6 +67,9 @@ github.com/imdario/mergo v0.3.11 h1:3tnifQM4i+fbajXKBHXWEH+KvNHqojZ778UH75j3bGA=
github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA=
github.com/integrii/flaggy v1.4.0 h1:A1x7SYx4jqu5NSrY14z8Z+0UyX2S5ygfJJrfolWR3zM=
github.com/integrii/flaggy v1.4.0/go.mod h1:tnTxHeTJbah0gQ6/K0RW0J7fMUBk9MCF5blhm43LNpI=
github.com/iriri/minimal v0.0.0-20180828191352-9b2348d09c1a h1:mCZYG6QcX0dz/J0rFc1tcRYGeixlDcCGSPXuPMbiS5U=
github.com/iriri/minimal/gitignore v0.3.2 h1:MnTVH89iuwiyZ/a1pByw/mAU2ShWai1yvv0tgHSq5Ww=
github.com/iriri/minimal/gitignore v0.3.2/go.mod h1:v7YhsYBAInyAnQligwCIGRuQmtwQyYxkVy5vEdy2wPU=
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A=
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo=
github.com/jesseduffield/go-git/v5 v5.1.2-0.20201006095850-341962be15a4 h1:GOQrmaE8i+KEdB8NzAegKYd4tPn/inM0I1uo0NXFerg=
@ -79,6 +84,9 @@ github.com/jesseduffield/gocui v0.3.1-0.20211017091015-8bf4a4666b77 h1:MQUxSxVBT
github.com/jesseduffield/gocui v0.3.1-0.20211017091015-8bf4a4666b77/go.mod h1:znJuCDnF2Ph40YZSlBwdX/4GEofnIoWLGdT4mK5zRAU=
github.com/jesseduffield/gocui v0.3.1-0.20211017220056-b2fc03c74a6f h1:JHrb78pj+gYC3KiJKL1WW6lYzlatBIF46oREn68plTM=
github.com/jesseduffield/gocui v0.3.1-0.20211017220056-b2fc03c74a6f/go.mod h1:znJuCDnF2Ph40YZSlBwdX/4GEofnIoWLGdT4mK5zRAU=
github.com/jesseduffield/minimal v0.0.0-20211018110810-9cde264e6b1e h1:WZc73tBVMMhcO6zXyZBItLEF4jgBpBH0lFCZzDgrjDg=
github.com/jesseduffield/minimal/gitignore v0.3.3-0.20211018110810-9cde264e6b1e h1:uw/oo+kg7t/oeMs6sqlAwr85ND/9cpO3up3VxphxY0U=
github.com/jesseduffield/minimal/gitignore v0.3.3-0.20211018110810-9cde264e6b1e/go.mod h1:u60qdFGXRd36jyEXxetz0vQceQIxzI13lIo3EFUDf4I=
github.com/jesseduffield/yaml v2.1.0+incompatible h1:HWQJ1gIv2zHKbDYNp0Jwjlj24K8aqpFHnMCynY1EpmE=
github.com/jesseduffield/yaml v2.1.0+incompatible/go.mod h1:w0xGhOSIJCGYYW+hnFPTutCy5aACpkcwbmORt5axGqk=
github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI=
@ -126,6 +134,9 @@ github.com/onsi/ginkgo v1.10.3 h1:OoxbjfXVZyod1fmWYhI7SEyaD8B00ynP3T+D5GiyHOY=
github.com/onsi/ginkgo v1.10.3/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/gomega v1.7.1 h1:K0jcRCwNQM3vFGh1ppMtDh/+7ApJrjldlX8fA0jDTLQ=
github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY=
github.com/ozeidan/fuzzy-patricia v1.0.1 h1:YExnavqXH3OvCCqE2TunuJJHdFcFQdVEfUoWzrnPxSg=
github.com/ozeidan/fuzzy-patricia v3.0.0+incompatible h1:Pl61eMyfJqgY/wytiI4vamqPYribq6d8VxeP1CNyg9M=
github.com/ozeidan/fuzzy-patricia v3.0.0+incompatible/go.mod h1:zgvuCcYS7wB7fVCGblsaFFmEe8+aAH13dTYm8FbrpsM=
github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
@ -198,6 +209,8 @@ gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8X
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/fsnotify.v1 v1.4.7 h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4=
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
gopkg.in/ozeidan/fuzzy-patricia.v3 v3.0.0 h1:KzcWKJ0nMAmGoBhYVMnkWc1rXjB42lKy5aIys4TdLOA=
gopkg.in/ozeidan/fuzzy-patricia.v3 v3.0.0/go.mod h1:XoytMOotjRRJVkIsQdxsPIioRLYFISEaY9a4tftOXAo=
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME=

View File

@ -189,7 +189,7 @@ func (gui *Gui) prepareConfirmationPanel(title, prompt string, hasLoader bool, f
if err != nil {
return err
}
suggestionsView.Wrap = true
suggestionsView.Wrap = false
suggestionsView.FgColor = theme.GocuiDefaultTextColor
gui.setSuggestions([]*types.Suggestion{})
suggestionsView.Visible = true

View File

@ -77,8 +77,10 @@ func (gui *Gui) defaultEditor(v *gocui.View, key gocui.Key, ch rune, mod gocui.M
if gui.findSuggestions != nil {
input := v.TextArea.GetContent()
suggestions := gui.findSuggestions(input)
gui.setSuggestions(suggestions)
gui.suggestionsAsyncHandler.Do(func() func() {
suggestions := gui.findSuggestions(input)
return func() { gui.setSuggestions(suggestions) }
})
}
return matched

View File

@ -1,5 +1,14 @@
package gui
import (
"os"
"github.com/jesseduffield/lazygit/pkg/gui/types"
"github.com/jesseduffield/lazygit/pkg/utils"
"github.com/jesseduffield/minimal/gitignore"
"gopkg.in/ozeidan/fuzzy-patricia.v3/patricia"
)
func (gui *Gui) validateNotInFilterMode() (bool, error) {
if gui.State.Modes.Filtering.Active() {
err := gui.ask(askOpts{
@ -40,3 +49,60 @@ func (gui *Gui) setFiltering(path string) error {
gui.State.Contexts.BranchCommits.GetPanelState().SetSelectedLineIdx(0)
}})
}
// here we asynchronously fetch the latest set of paths in the repo and store in
// gui.State.FilesTrie. On the main thread we'll be doing a fuzzy search via
// gui.State.FilesTrie. So if we've looked for a file previously, we'll start with
// the old trie and eventually it'll be swapped out for the new one.
func (gui *Gui) getFindSuggestionsForFilterPath() func(string) []*types.Suggestion {
_ = gui.WithWaitingStatus(gui.Tr.LcLoadingFileSuggestions, func() error {
trie := patricia.NewTrie()
// load every non-gitignored file in the repo
ignore, err := gitignore.FromGit()
if err != nil {
return err
}
err = ignore.Walk(".",
func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
trie.Insert(patricia.Prefix(path), path)
return nil
})
// cache the trie for future use
gui.State.FilesTrie = trie
// refresh the selections view
gui.suggestionsAsyncHandler.Do(func() func() {
// assuming here that the confirmation view is what we're typing into.
// This assumption may prove false over time
suggestions := gui.findSuggestions(gui.Views.Confirmation.TextArea.GetContent())
return func() { gui.setSuggestions(suggestions) }
})
return err
})
return func(input string) []*types.Suggestion {
matchingNames := []string{}
_ = gui.State.FilesTrie.VisitFuzzy(patricia.Prefix(input), true, func(prefix patricia.Prefix, item patricia.Item, skipped int) error {
matchingNames = append(matchingNames, item.(string))
return nil
})
// doing another fuzzy search for good measure
matchingNames = utils.FuzzySearch(input, matchingNames)
suggestions := make([]*types.Suggestion, len(matchingNames))
for i, name := range matchingNames {
suggestions[i] = &types.Suggestion{
Value: name,
Label: name,
}
}
return suggestions
}
}

View File

@ -35,7 +35,8 @@ func (gui *Gui) handleCreateFilteringMenuPanel() error {
displayString: gui.Tr.LcFilterPathOption,
onPress: func() error {
return gui.prompt(promptOpts{
title: gui.Tr.LcEnterFileName,
findSuggestionsFunc: gui.getFindSuggestionsForFilterPath(),
title: gui.Tr.LcEnterFileName,
handleConfirm: func(response string) error {
return gui.setFiltering(strings.TrimSpace(response))
},

View File

@ -30,6 +30,7 @@ import (
"github.com/jesseduffield/lazygit/pkg/updates"
"github.com/jesseduffield/lazygit/pkg/utils"
"github.com/sirupsen/logrus"
"gopkg.in/ozeidan/fuzzy-patricia.v3/patricia"
)
// screen sizing determines how much space your selected window takes up (window
@ -116,6 +117,8 @@ type Gui struct {
// the extras window contains things like the command log
ShowExtrasWindow bool
suggestionsAsyncHandler *tasks.AsyncHandler
}
type listPanelState struct {
@ -336,6 +339,9 @@ type guiState struct {
// flag as to whether or not the diff view should ignore whitespace
IgnoreWhitespaceInDiffView bool
// for displaying suggestions while typing in a file name
FilesTrie *patricia.Trie
}
// reuseState determines if we pull the repo state from our repo state map or
@ -412,6 +418,7 @@ func (gui *Gui) resetState(filterPath string, reuseState bool) {
// TODO: put contexts in the context manager
ContextManager: NewContextManager(initialContext),
Contexts: contexts,
FilesTrie: patricia.NewTrie(),
}
gui.RepoStateMap[Repo(currentDir)] = gui.State
@ -421,19 +428,20 @@ func (gui *Gui) resetState(filterPath string, reuseState bool) {
// NewGui builds a new gui handler
func NewGui(log *logrus.Entry, gitCommand *commands.GitCommand, oSCommand *oscommands.OSCommand, tr *i18n.TranslationSet, config config.AppConfigurer, updater *updates.Updater, filterPath string, showRecentRepos bool) (*Gui, error) {
gui := &Gui{
Log: log,
GitCommand: gitCommand,
OSCommand: oSCommand,
Config: config,
Tr: tr,
Updater: updater,
statusManager: &statusManager{},
viewBufferManagerMap: map[string]*tasks.ViewBufferManager{},
showRecentRepos: showRecentRepos,
RepoPathStack: []string{},
RepoStateMap: map[Repo]*guiState{},
CmdLog: []string{},
ShowExtrasWindow: config.GetUserConfig().Gui.ShowCommandLog,
Log: log,
GitCommand: gitCommand,
OSCommand: oSCommand,
Config: config,
Tr: tr,
Updater: updater,
statusManager: &statusManager{},
viewBufferManagerMap: map[string]*tasks.ViewBufferManager{},
showRecentRepos: showRecentRepos,
RepoPathStack: []string{},
RepoStateMap: map[Repo]*guiState{},
CmdLog: []string{},
ShowExtrasWindow: config.GetUserConfig().Gui.ShowCommandLog,
suggestionsAsyncHandler: tasks.NewAsyncHandler(),
}
gui.resetState(filterPath, false)

View File

@ -430,6 +430,7 @@ type TranslationSet struct {
CreatingPullRequestAtUrl string
SelectConfigFile string
NoConfigFileFoundErr string
LcLoadingFileSuggestions string
Spans Spans
}
@ -954,6 +955,7 @@ func englishTranslationSet() TranslationSet {
CreatingPullRequestAtUrl: "Creating pull request at URL: %s",
SelectConfigFile: "Select config file",
NoConfigFileFoundErr: "No config file found",
LcLoadingFileSuggestions: "loading file suggestions",
Spans: Spans{
// TODO: combine this with the original keybinding descriptions (those are all in lowercase atm)
CheckoutCommit: "Checkout commit",

View File

@ -0,0 +1,56 @@
package tasks
import (
"sync"
"github.com/jesseduffield/lazygit/pkg/utils"
)
// the purpose of an AsyncHandler is to ensure that if we have multiple long-running
// requests, we only handle the result of the latest one. For example, if I am
// searching for 'abc' and I have to type 'a' then 'b' then 'c' and each keypress
// dispatches a request to search for things with the string so-far, we'll be searching
// for 'a', 'ab', and 'abc', and it may be that 'abc' comes back first, then 'ab',
// then 'a' and we don't want to display the result for 'a' just because it came
// back last. AsyncHandler keeps track of the order in which things were dispatched
// so that we can ignore anything that comes back late.
type AsyncHandler struct {
currentId int
lastId int
mutex sync.Mutex
onReject func()
}
func NewAsyncHandler() *AsyncHandler {
return &AsyncHandler{
mutex: sync.Mutex{},
}
}
func (self *AsyncHandler) Do(f func() func()) {
self.mutex.Lock()
self.currentId++
id := self.currentId
self.mutex.Unlock()
go utils.Safe(func() {
after := f()
self.handle(after, id)
})
}
// f here is expected to be a function that doesn't take long to run
func (self *AsyncHandler) handle(f func(), id int) {
self.mutex.Lock()
defer self.mutex.Unlock()
if id < self.lastId {
if self.onReject != nil {
self.onReject()
}
return
}
self.lastId = id
f()
}

View File

@ -0,0 +1,44 @@
package tasks
import (
"fmt"
"sync"
"testing"
"github.com/stretchr/testify/assert"
)
func TestAsyncHandler(t *testing.T) {
wg := sync.WaitGroup{}
wg.Add(2)
handler := NewAsyncHandler()
handler.onReject = func() {
wg.Done()
}
result := 0
wg2 := sync.WaitGroup{}
wg2.Add(1)
handler.Do(func() func() {
wg2.Wait()
return func() {
fmt.Println("setting to 1")
result = 1
}
})
handler.Do(func() func() {
return func() {
fmt.Println("setting to 2")
result = 2
wg.Done()
wg2.Done()
}
})
wg.Wait()
assert.EqualValues(t, 2, result)
}

8
vendor/github.com/gobwas/glob/.gitignore generated vendored Normal file
View File

@ -0,0 +1,8 @@
glob.iml
.idea
*.cpu
*.mem
*.test
*.dot
*.png
*.svg

21
vendor/github.com/gobwas/glob/LICENSE generated vendored Normal file
View File

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2016 Sergey Kamardin
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

26
vendor/github.com/gobwas/glob/bench.sh generated vendored Normal file
View File

@ -0,0 +1,26 @@
#! /bin/bash
bench() {
filename="/tmp/$1-$2.bench"
if test -e "${filename}";
then
echo "Already exists ${filename}"
else
backup=`git rev-parse --abbrev-ref HEAD`
git checkout $1
echo -n "Creating ${filename}... "
go test ./... -run=NONE -bench=$2 > "${filename}" -benchmem
echo "OK"
git checkout ${backup}
sleep 5
fi
}
to=$1
current=`git rev-parse --abbrev-ref HEAD`
bench ${to} $2
bench ${current} $2
benchcmp $3 "/tmp/${to}-$2.bench" "/tmp/${current}-$2.bench"

525
vendor/github.com/gobwas/glob/compiler/compiler.go generated vendored Normal file
View File

@ -0,0 +1,525 @@
package compiler
// TODO use constructor with all matchers, and to their structs private
// TODO glue multiple Text nodes (like after QuoteMeta)
import (
"fmt"
"reflect"
"github.com/gobwas/glob/match"
"github.com/gobwas/glob/syntax/ast"
"github.com/gobwas/glob/util/runes"
)
func optimizeMatcher(matcher match.Matcher) match.Matcher {
switch m := matcher.(type) {
case match.Any:
if len(m.Separators) == 0 {
return match.NewSuper()
}
case match.AnyOf:
if len(m.Matchers) == 1 {
return m.Matchers[0]
}
return m
case match.List:
if m.Not == false && len(m.List) == 1 {
return match.NewText(string(m.List))
}
return m
case match.BTree:
m.Left = optimizeMatcher(m.Left)
m.Right = optimizeMatcher(m.Right)
r, ok := m.Value.(match.Text)
if !ok {
return m
}
var (
leftNil = m.Left == nil
rightNil = m.Right == nil
)
if leftNil && rightNil {
return match.NewText(r.Str)
}
_, leftSuper := m.Left.(match.Super)
lp, leftPrefix := m.Left.(match.Prefix)
la, leftAny := m.Left.(match.Any)
_, rightSuper := m.Right.(match.Super)
rs, rightSuffix := m.Right.(match.Suffix)
ra, rightAny := m.Right.(match.Any)
switch {
case leftSuper && rightSuper:
return match.NewContains(r.Str, false)
case leftSuper && rightNil:
return match.NewSuffix(r.Str)
case rightSuper && leftNil:
return match.NewPrefix(r.Str)
case leftNil && rightSuffix:
return match.NewPrefixSuffix(r.Str, rs.Suffix)
case rightNil && leftPrefix:
return match.NewPrefixSuffix(lp.Prefix, r.Str)
case rightNil && leftAny:
return match.NewSuffixAny(r.Str, la.Separators)
case leftNil && rightAny:
return match.NewPrefixAny(r.Str, ra.Separators)
}
return m
}
return matcher
}
func compileMatchers(matchers []match.Matcher) (match.Matcher, error) {
if len(matchers) == 0 {
return nil, fmt.Errorf("compile error: need at least one matcher")
}
if len(matchers) == 1 {
return matchers[0], nil
}
if m := glueMatchers(matchers); m != nil {
return m, nil
}
idx := -1
maxLen := -1
var val match.Matcher
for i, matcher := range matchers {
if l := matcher.Len(); l != -1 && l >= maxLen {
maxLen = l
idx = i
val = matcher
}
}
if val == nil { // not found matcher with static length
r, err := compileMatchers(matchers[1:])
if err != nil {
return nil, err
}
return match.NewBTree(matchers[0], nil, r), nil
}
left := matchers[:idx]
var right []match.Matcher
if len(matchers) > idx+1 {
right = matchers[idx+1:]
}
var l, r match.Matcher
var err error
if len(left) > 0 {
l, err = compileMatchers(left)
if err != nil {
return nil, err
}
}
if len(right) > 0 {
r, err = compileMatchers(right)
if err != nil {
return nil, err
}
}
return match.NewBTree(val, l, r), nil
}
func glueMatchers(matchers []match.Matcher) match.Matcher {
if m := glueMatchersAsEvery(matchers); m != nil {
return m
}
if m := glueMatchersAsRow(matchers); m != nil {
return m
}
return nil
}
func glueMatchersAsRow(matchers []match.Matcher) match.Matcher {
if len(matchers) <= 1 {
return nil
}
var (
c []match.Matcher
l int
)
for _, matcher := range matchers {
if ml := matcher.Len(); ml == -1 {
return nil
} else {
c = append(c, matcher)
l += ml
}
}
return match.NewRow(l, c...)
}
func glueMatchersAsEvery(matchers []match.Matcher) match.Matcher {
if len(matchers) <= 1 {
return nil
}
var (
hasAny bool
hasSuper bool
hasSingle bool
min int
separator []rune
)
for i, matcher := range matchers {
var sep []rune
switch m := matcher.(type) {
case match.Super:
sep = []rune{}
hasSuper = true
case match.Any:
sep = m.Separators
hasAny = true
case match.Single:
sep = m.Separators
hasSingle = true
min++
case match.List:
if !m.Not {
return nil
}
sep = m.List
hasSingle = true
min++
default:
return nil
}
// initialize
if i == 0 {
separator = sep
}
if runes.Equal(sep, separator) {
continue
}
return nil
}
if hasSuper && !hasAny && !hasSingle {
return match.NewSuper()
}
if hasAny && !hasSuper && !hasSingle {
return match.NewAny(separator)
}
if (hasAny || hasSuper) && min > 0 && len(separator) == 0 {
return match.NewMin(min)
}
every := match.NewEveryOf()
if min > 0 {
every.Add(match.NewMin(min))
if !hasAny && !hasSuper {
every.Add(match.NewMax(min))
}
}
if len(separator) > 0 {
every.Add(match.NewContains(string(separator), true))
}
return every
}
func minimizeMatchers(matchers []match.Matcher) []match.Matcher {
var done match.Matcher
var left, right, count int
for l := 0; l < len(matchers); l++ {
for r := len(matchers); r > l; r-- {
if glued := glueMatchers(matchers[l:r]); glued != nil {
var swap bool
if done == nil {
swap = true
} else {
cl, gl := done.Len(), glued.Len()
swap = cl > -1 && gl > -1 && gl > cl
swap = swap || count < r-l
}
if swap {
done = glued
left = l
right = r
count = r - l
}
}
}
}
if done == nil {
return matchers
}
next := append(append([]match.Matcher{}, matchers[:left]...), done)
if right < len(matchers) {
next = append(next, matchers[right:]...)
}
if len(next) == len(matchers) {
return next
}
return minimizeMatchers(next)
}
// minimizeAnyOf tries to apply some heuristics to minimize number of nodes in given tree
func minimizeTree(tree *ast.Node) *ast.Node {
switch tree.Kind {
case ast.KindAnyOf:
return minimizeTreeAnyOf(tree)
default:
return nil
}
}
// minimizeAnyOf tries to find common children of given node of AnyOf pattern
// it searches for common children from left and from right
// if any common children are found – then it returns new optimized ast tree
// else it returns nil
func minimizeTreeAnyOf(tree *ast.Node) *ast.Node {
if !areOfSameKind(tree.Children, ast.KindPattern) {
return nil
}
commonLeft, commonRight := commonChildren(tree.Children)
commonLeftCount, commonRightCount := len(commonLeft), len(commonRight)
if commonLeftCount == 0 && commonRightCount == 0 { // there are no common parts
return nil
}
var result []*ast.Node
if commonLeftCount > 0 {
result = append(result, ast.NewNode(ast.KindPattern, nil, commonLeft...))
}
var anyOf []*ast.Node
for _, child := range tree.Children {
reuse := child.Children[commonLeftCount : len(child.Children)-commonRightCount]
var node *ast.Node
if len(reuse) == 0 {
// this pattern is completely reduced by commonLeft and commonRight patterns
// so it become nothing
node = ast.NewNode(ast.KindNothing, nil)
} else {
node = ast.NewNode(ast.KindPattern, nil, reuse...)
}
anyOf = appendIfUnique(anyOf, node)
}
switch {
case len(anyOf) == 1 && anyOf[0].Kind != ast.KindNothing:
result = append(result, anyOf[0])
case len(anyOf) > 1:
result = append(result, ast.NewNode(ast.KindAnyOf, nil, anyOf...))
}
if commonRightCount > 0 {
result = append(result, ast.NewNode(ast.KindPattern, nil, commonRight...))
}
return ast.NewNode(ast.KindPattern, nil, result...)
}
func commonChildren(nodes []*ast.Node) (commonLeft, commonRight []*ast.Node) {
if len(nodes) <= 1 {
return
}
// find node that has least number of children
idx := leastChildren(nodes)
if idx == -1 {
return
}
tree := nodes[idx]
treeLength := len(tree.Children)
// allocate max able size for rightCommon slice
// to get ability insert elements in reverse order (from end to start)
// without sorting
commonRight = make([]*ast.Node, treeLength)
lastRight := treeLength // will use this to get results as commonRight[lastRight:]
var (
breakLeft bool
breakRight bool
commonTotal int
)
for i, j := 0, treeLength-1; commonTotal < treeLength && j >= 0 && !(breakLeft && breakRight); i, j = i+1, j-1 {
treeLeft := tree.Children[i]
treeRight := tree.Children[j]
for k := 0; k < len(nodes) && !(breakLeft && breakRight); k++ {
// skip least children node
if k == idx {
continue
}
restLeft := nodes[k].Children[i]
restRight := nodes[k].Children[j+len(nodes[k].Children)-treeLength]
breakLeft = breakLeft || !treeLeft.Equal(restLeft)
// disable searching for right common parts, if left part is already overlapping
breakRight = breakRight || (!breakLeft && j <= i)
breakRight = breakRight || !treeRight.Equal(restRight)
}
if !breakLeft {
commonTotal++
commonLeft = append(commonLeft, treeLeft)
}
if !breakRight {
commonTotal++
lastRight = j
commonRight[j] = treeRight
}
}
commonRight = commonRight[lastRight:]
return
}
func appendIfUnique(target []*ast.Node, val *ast.Node) []*ast.Node {
for _, n := range target {
if reflect.DeepEqual(n, val) {
return target
}
}
return append(target, val)
}
func areOfSameKind(nodes []*ast.Node, kind ast.Kind) bool {
for _, n := range nodes {
if n.Kind != kind {
return false
}
}
return true
}
func leastChildren(nodes []*ast.Node) int {
min := -1
idx := -1
for i, n := range nodes {
if idx == -1 || (len(n.Children) < min) {
min = len(n.Children)
idx = i
}
}
return idx
}
func compileTreeChildren(tree *ast.Node, sep []rune) ([]match.Matcher, error) {
var matchers []match.Matcher
for _, desc := range tree.Children {
m, err := compile(desc, sep)
if err != nil {
return nil, err
}
matchers = append(matchers, optimizeMatcher(m))
}
return matchers, nil
}
func compile(tree *ast.Node, sep []rune) (m match.Matcher, err error) {
switch tree.Kind {
case ast.KindAnyOf:
// todo this could be faster on pattern_alternatives_combine_lite (see glob_test.go)
if n := minimizeTree(tree); n != nil {
return compile(n, sep)
}
matchers, err := compileTreeChildren(tree, sep)
if err != nil {
return nil, err
}
return match.NewAnyOf(matchers...), nil
case ast.KindPattern:
if len(tree.Children) == 0 {
return match.NewNothing(), nil
}
matchers, err := compileTreeChildren(tree, sep)
if err != nil {
return nil, err
}
m, err = compileMatchers(minimizeMatchers(matchers))
if err != nil {
return nil, err
}
case ast.KindAny:
m = match.NewAny(sep)
case ast.KindSuper:
m = match.NewSuper()
case ast.KindSingle:
m = match.NewSingle(sep)
case ast.KindNothing:
m = match.NewNothing()
case ast.KindList:
l := tree.Value.(ast.List)
m = match.NewList([]rune(l.Chars), l.Not)
case ast.KindRange:
r := tree.Value.(ast.Range)
m = match.NewRange(r.Lo, r.Hi, r.Not)
case ast.KindText:
t := tree.Value.(ast.Text)
m = match.NewText(t.Text)
default:
return nil, fmt.Errorf("could not compile tree: unknown node type")
}
return optimizeMatcher(m), nil
}
func Compile(tree *ast.Node, sep []rune) (match.Matcher, error) {
m, err := compile(tree, sep)
if err != nil {
return nil, err
}
return m, nil
}

80
vendor/github.com/gobwas/glob/glob.go generated vendored Normal file
View File

@ -0,0 +1,80 @@
package glob
import (
"github.com/gobwas/glob/compiler"
"github.com/gobwas/glob/syntax"
)
// Glob represents compiled glob pattern.
type Glob interface {
Match(string) bool
}
// Compile creates Glob for given pattern and strings (if any present after pattern) as separators.
// The pattern syntax is:
//
// pattern:
// { term }
//
// term:
// `*` matches any sequence of non-separator characters
// `**` matches any sequence of characters
// `?` matches any single non-separator character
// `[` [ `!` ] { character-range } `]`
// character class (must be non-empty)
// `{` pattern-list `}`
// pattern alternatives
// c matches character c (c != `*`, `**`, `?`, `\`, `[`, `{`, `}`)
// `\` c matches character c
//
// character-range:
// c matches character c (c != `\\`, `-`, `]`)
// `\` c matches character c
// lo `-` hi matches character c for lo <= c <= hi
//
// pattern-list:
// pattern { `,` pattern }
// comma-separated (without spaces) patterns
//
func Compile(pattern string, separators ...rune) (Glob, error) {
ast, err := syntax.Parse(pattern)
if err != nil {
return nil, err
}
matcher, err := compiler.Compile(ast, separators)
if err != nil {
return nil, err
}
return matcher, nil
}
// MustCompile is the same as Compile, except that if Compile returns error, this will panic
func MustCompile(pattern string, separators ...rune) Glob {
g, err := Compile(pattern, separators...)
if err != nil {
panic(err)
}
return g
}
// QuoteMeta returns a string that quotes all glob pattern meta characters
// inside the argument text; For example, QuoteMeta(`{foo*}`) returns `\[foo\*\]`.
func QuoteMeta(s string) string {
b := make([]byte, 2*len(s))
// a byte loop is correct because all meta characters are ASCII
j := 0
for i := 0; i < len(s); i++ {
if syntax.Special(s[i]) {
b[j] = '\\'
j++
}
b[j] = s[i]
j++
}
return string(b[0:j])
}

45
vendor/github.com/gobwas/glob/match/any.go generated vendored Normal file
View File

@ -0,0 +1,45 @@
package match
import (
"fmt"
"github.com/gobwas/glob/util/strings"
)
type Any struct {
Separators []rune
}
func NewAny(s []rune) Any {
return Any{s}
}
func (self Any) Match(s string) bool {
return strings.IndexAnyRunes(s, self.Separators) == -1
}
func (self Any) Index(s string) (int, []int) {
found := strings.IndexAnyRunes(s, self.Separators)
switch found {
case -1:
case 0:
return 0, segments0
default:
s = s[:found]
}
segments := acquireSegments(len(s))
for i := range s {
segments = append(segments, i)
}
segments = append(segments, len(s))
return 0, segments
}
func (self Any) Len() int {
return lenNo
}
func (self Any) String() string {
return fmt.Sprintf("<any:![%s]>", string(self.Separators))
}

82
vendor/github.com/gobwas/glob/match/any_of.go generated vendored Normal file
View File

@ -0,0 +1,82 @@
package match
import "fmt"
type AnyOf struct {
Matchers Matchers
}
func NewAnyOf(m ...Matcher) AnyOf {
return AnyOf{Matchers(m)}
}
func (self *AnyOf) Add(m Matcher) error {
self.Matchers = append(self.Matchers, m)
return nil
}
func (self AnyOf) Match(s string) bool {
for _, m := range self.Matchers {
if m.Match(s) {
return true
}
}
return false
}
func (self AnyOf) Index(s string) (int, []int) {
index := -1
segments := acquireSegments(len(s))
for _, m := range self.Matchers {
idx, seg := m.Index(s)
if idx == -1 {
continue
}
if index == -1 || idx < index {
index = idx
segments = append(segments[:0], seg...)
continue
}
if idx > index {
continue
}
// here idx == index
segments = appendMerge(segments, seg)
}
if index == -1 {
releaseSegments(segments)
return -1, nil
}
return index, segments
}
func (self AnyOf) Len() (l int) {
l = -1
for _, m := range self.Matchers {
ml := m.Len()
switch {
case l == -1:
l = ml
continue
case ml == -1:
return -1
case l != ml:
return -1
}
}
return
}
func (self AnyOf) String() string {
return fmt.Sprintf("<any_of:[%s]>", self.Matchers)
}

146
vendor/github.com/gobwas/glob/match/btree.go generated vendored Normal file
View File

@ -0,0 +1,146 @@
package match
import (
"fmt"
"unicode/utf8"
)
type BTree struct {
Value Matcher
Left Matcher
Right Matcher
ValueLengthRunes int
LeftLengthRunes int
RightLengthRunes int
LengthRunes int
}
func NewBTree(Value, Left, Right Matcher) (tree BTree) {
tree.Value = Value
tree.Left = Left
tree.Right = Right
lenOk := true
if tree.ValueLengthRunes = Value.Len(); tree.ValueLengthRunes == -1 {
lenOk = false
}
if Left != nil {
if tree.LeftLengthRunes = Left.Len(); tree.LeftLengthRunes == -1 {
lenOk = false
}
}
if Right != nil {
if tree.RightLengthRunes = Right.Len(); tree.RightLengthRunes == -1 {
lenOk = false
}
}
if lenOk {
tree.LengthRunes = tree.LeftLengthRunes + tree.ValueLengthRunes + tree.RightLengthRunes
} else {
tree.LengthRunes = -1
}
return tree
}
func (self BTree) Len() int {
return self.LengthRunes
}
// todo?
func (self BTree) Index(s string) (int, []int) {
return -1, nil
}
func (self BTree) Match(s string) bool {
inputLen := len(s)
// self.Length, self.RLen and self.LLen are values meaning the length of runes for each part
// here we manipulating byte length for better optimizations
// but these checks still works, cause minLen of 1-rune string is 1 byte.
if self.LengthRunes != -1 && self.LengthRunes > inputLen {
return false
}
// try to cut unnecessary parts
// by knowledge of length of right and left part
var offset, limit int
if self.LeftLengthRunes >= 0 {
offset = self.LeftLengthRunes
}
if self.RightLengthRunes >= 0 {
limit = inputLen - self.RightLengthRunes
} else {
limit = inputLen
}
for offset < limit {
// search for matching part in substring
index, segments := self.Value.Index(s[offset:limit])
if index == -1 {
releaseSegments(segments)
return false
}
l := s[:offset+index]
var left bool
if self.Left != nil {
left = self.Left.Match(l)
} else {
left = l == ""
}
if left {
for i := len(segments) - 1; i >= 0; i-- {
length := segments[i]
var right bool
var r string
// if there is no string for the right branch
if inputLen <= offset+index+length {
r = ""
} else {
r = s[offset+index+length:]
}
if self.Right != nil {
right = self.Right.Match(r)
} else {
right = r == ""
}
if right {
releaseSegments(segments)
return true
}
}
}
_, step := utf8.DecodeRuneInString(s[offset+index:])
offset += index + step
releaseSegments(segments)
}
return false
}
func (self BTree) String() string {
const n string = "<nil>"
var l, r string
if self.Left == nil {
l = n
} else {
l = self.Left.String()
}
if self.Right == nil {
r = n
} else {
r = self.Right.String()
}
return fmt.Sprintf("<btree:[%s<-%s->%s]>", l, self.Value, r)
}

58
vendor/github.com/gobwas/glob/match/contains.go generated vendored Normal file
View File

@ -0,0 +1,58 @@
package match
import (
"fmt"
"strings"
)
type Contains struct {
Needle string
Not bool
}
func NewContains(needle string, not bool) Contains {
return Contains{needle, not}
}
func (self Contains) Match(s string) bool {
return strings.Contains(s, self.Needle) != self.Not
}
func (self Contains) Index(s string) (int, []int) {
var offset int
idx := strings.Index(s, self.Needle)
if !self.Not {
if idx == -1 {
return -1, nil
}
offset = idx + len(self.Needle)
if len(s) <= offset {
return 0, []int{offset}
}
s = s[offset:]
} else if idx != -1 {
s = s[:idx]
}
segments := acquireSegments(len(s) + 1)
for i := range s {
segments = append(segments, offset+i)
}
return 0, append(segments, offset+len(s))
}
func (self Contains) Len() int {
return lenNo
}
func (self Contains) String() string {
var not string
if self.Not {
not = "!"
}
return fmt.Sprintf("<contains:%s[%s]>", not, self.Needle)
}

99
vendor/github.com/gobwas/glob/match/every_of.go generated vendored Normal file
View File

@ -0,0 +1,99 @@
package match
import (
"fmt"
)
type EveryOf struct {
Matchers Matchers
}
func NewEveryOf(m ...Matcher) EveryOf {
return EveryOf{Matchers(m)}
}
func (self *EveryOf) Add(m Matcher) error {
self.Matchers = append(self.Matchers, m)
return nil
}
func (self EveryOf) Len() (l int) {
for _, m := range self.Matchers {
if ml := m.Len(); l > 0 {
l += ml
} else {
return -1
}
}
return
}
func (self EveryOf) Index(s string) (int, []int) {
var index int
var offset int
// make `in` with cap as len(s),
// cause it is the maximum size of output segments values
next := acquireSegments(len(s))
current := acquireSegments(len(s))
sub := s
for i, m := range self.Matchers {
idx, seg := m.Index(sub)
if idx == -1 {
releaseSegments(next)
releaseSegments(current)
return -1, nil
}
if i == 0 {
// we use copy here instead of `current = seg`
// cause seg is a slice from reusable buffer `in`
// and it could be overwritten in next iteration
current = append(current, seg...)
} else {
// clear the next
next = next[:0]
delta := index - (idx + offset)
for _, ex := range current {
for _, n := range seg {
if ex+delta == n {
next = append(next, n)
}
}
}
if len(next) == 0 {
releaseSegments(next)
releaseSegments(current)
return -1, nil
}
current = append(current[:0], next...)
}
index = idx + offset
sub = s[index:]
offset += idx
}
releaseSegments(next)
return index, current
}
func (self EveryOf) Match(s string) bool {
for _, m := range self.Matchers {
if !m.Match(s) {
return false
}
}
return true
}
func (self EveryOf) String() string {
return fmt.Sprintf("<every_of:[%s]>", self.Matchers)
}

49
vendor/github.com/gobwas/glob/match/list.go generated vendored Normal file
View File

@ -0,0 +1,49 @@
package match
import (
"fmt"
"github.com/gobwas/glob/util/runes"
"unicode/utf8"
)
type List struct {
List []rune
Not bool
}
func NewList(list []rune, not bool) List {
return List{list, not}
}
func (self List) Match(s string) bool {
r, w := utf8.DecodeRuneInString(s)
if len(s) > w {
return false
}
inList := runes.IndexRune(self.List, r) != -1
return inList == !self.Not
}
func (self List) Len() int {
return lenOne
}
func (self List) Index(s string) (int, []int) {
for i, r := range s {
if self.Not == (runes.IndexRune(self.List, r) == -1) {
return i, segmentsByRuneLength[utf8.RuneLen(r)]
}
}
return -1, nil
}
func (self List) String() string {
var not string
if self.Not {
not = "!"
}
return fmt.Sprintf("<list:%s[%s]>", not, string(self.List))
}

81
vendor/github.com/gobwas/glob/match/match.go generated vendored Normal file
View File

@ -0,0 +1,81 @@
package match
// todo common table of rune's length
import (
"fmt"
"strings"
)
const lenOne = 1
const lenZero = 0
const lenNo = -1
type Matcher interface {
Match(string) bool
Index(string) (int, []int)
Len() int
String() string
}
type Matchers []Matcher
func (m Matchers) String() string {
var s []string
for _, matcher := range m {
s = append(s, fmt.Sprint(matcher))
}
return fmt.Sprintf("%s", strings.Join(s, ","))
}
// appendMerge merges and sorts given already SORTED and UNIQUE segments.
func appendMerge(target, sub []int) []int {
lt, ls := len(target), len(sub)
out := make([]int, 0, lt+ls)
for x, y := 0, 0; x < lt || y < ls; {
if x >= lt {
out = append(out, sub[y:]...)
break
}
if y >= ls {
out = append(out, target[x:]...)
break
}
xValue := target[x]
yValue := sub[y]
switch {
case xValue == yValue:
out = append(out, xValue)
x++
y++
case xValue < yValue:
out = append(out, xValue)
x++
case yValue < xValue:
out = append(out, yValue)
y++
}
}
target = append(target[:0], out...)
return target
}
func reverseSegments(input []int) {
l := len(input)
m := l / 2
for i := 0; i < m; i++ {
input[i], input[l-i-1] = input[l-i-1], input[i]
}
}

49
vendor/github.com/gobwas/glob/match/max.go generated vendored Normal file
View File

@ -0,0 +1,49 @@
package match
import (
"fmt"
"unicode/utf8"
)
type Max struct {
Limit int
}
func NewMax(l int) Max {
return Max{l}
}
func (self Max) Match(s string) bool {
var l int
for range s {
l += 1
if l > self.Limit {
return false
}
}
return true
}
func (self Max) Index(s string) (int, []int) {
segments := acquireSegments(self.Limit + 1)
segments = append(segments, 0)
var count int
for i, r := range s {
count++
if count > self.Limit {
break
}
segments = append(segments, i+utf8.RuneLen(r))
}
return 0, segments
}
func (self Max) Len() int {
return lenNo
}
func (self Max) String() string {
return fmt.Sprintf("<max:%d>", self.Limit)
}

57
vendor/github.com/gobwas/glob/match/min.go generated vendored Normal file
View File

@ -0,0 +1,57 @@
package match
import (
"fmt"
"unicode/utf8"
)
type Min struct {
Limit int
}
func NewMin(l int) Min {
return Min{l}
}
func (self Min) Match(s string) bool {
var l int
for range s {
l += 1
if l >= self.Limit {
return true
}
}
return false
}
func (self Min) Index(s string) (int, []int) {
var count int
c := len(s) - self.Limit + 1
if c <= 0 {
return -1, nil
}
segments := acquireSegments(c)
for i, r := range s {
count++
if count >= self.Limit {
segments = append(segments, i+utf8.RuneLen(r))
}
}
if len(segments) == 0 {
return -1, nil
}
return 0, segments
}
func (self Min) Len() int {
return lenNo
}
func (self Min) String() string {
return fmt.Sprintf("<min:%d>", self.Limit)
}

27
vendor/github.com/gobwas/glob/match/nothing.go generated vendored Normal file
View File

@ -0,0 +1,27 @@
package match
import (
"fmt"
)
type Nothing struct{}
func NewNothing() Nothing {
return Nothing{}
}
func (self Nothing) Match(s string) bool {
return len(s) == 0
}
func (self Nothing) Index(s string) (int, []int) {
return 0, segments0
}
func (self Nothing) Len() int {
return lenZero
}
func (self Nothing) String() string {
return fmt.Sprintf("<nothing>")
}

50
vendor/github.com/gobwas/glob/match/prefix.go generated vendored Normal file
View File

@ -0,0 +1,50 @@
package match
import (
"fmt"
"strings"
"unicode/utf8"
)
type Prefix struct {
Prefix string
}
func NewPrefix(p string) Prefix {
return Prefix{p}
}
func (self Prefix) Index(s string) (int, []int) {
idx := strings.Index(s, self.Prefix)
if idx == -1 {
return -1, nil
}
length := len(self.Prefix)
var sub string
if len(s) > idx+length {
sub = s[idx+length:]
} else {
sub = ""
}
segments := acquireSegments(len(sub) + 1)
segments = append(segments, length)
for i, r := range sub {
segments = append(segments, length+i+utf8.RuneLen(r))
}
return idx, segments
}
func (self Prefix) Len() int {
return lenNo
}
func (self Prefix) Match(s string) bool {
return strings.HasPrefix(s, self.Prefix)
}
func (self Prefix) String() string {
return fmt.Sprintf("<prefix:%s>", self.Prefix)
}

55
vendor/github.com/gobwas/glob/match/prefix_any.go generated vendored Normal file
View File

@ -0,0 +1,55 @@
package match
import (
"fmt"
"strings"
"unicode/utf8"
sutil "github.com/gobwas/glob/util/strings"
)
type PrefixAny struct {
Prefix string
Separators []rune
}
func NewPrefixAny(s string, sep []rune) PrefixAny {
return PrefixAny{s, sep}
}
func (self PrefixAny) Index(s string) (int, []int) {
idx := strings.Index(s, self.Prefix)
if idx == -1 {
return -1, nil
}
n := len(self.Prefix)
sub := s[idx+n:]
i := sutil.IndexAnyRunes(sub, self.Separators)
if i > -1 {
sub = sub[:i]
}
seg := acquireSegments(len(sub) + 1)
seg = append(seg, n)
for i, r := range sub {
seg = append(seg, n+i+utf8.RuneLen(r))
}
return idx, seg
}
func (self PrefixAny) Len() int {
return lenNo
}
func (self PrefixAny) Match(s string) bool {
if !strings.HasPrefix(s, self.Prefix) {
return false
}
return sutil.IndexAnyRunes(s[len(self.Prefix):], self.Separators) == -1
}
func (self PrefixAny) String() string {
return fmt.Sprintf("<prefix_any:%s![%s]>", self.Prefix, string(self.Separators))
}

62
vendor/github.com/gobwas/glob/match/prefix_suffix.go generated vendored Normal file
View File

@ -0,0 +1,62 @@
package match
import (
"fmt"
"strings"
)
type PrefixSuffix struct {
Prefix, Suffix string
}
func NewPrefixSuffix(p, s string) PrefixSuffix {
return PrefixSuffix{p, s}
}
func (self PrefixSuffix) Index(s string) (int, []int) {
prefixIdx := strings.Index(s, self.Prefix)
if prefixIdx == -1 {
return -1, nil
}
suffixLen := len(self.Suffix)
if suffixLen <= 0 {
return prefixIdx, []int{len(s) - prefixIdx}
}
if (len(s) - prefixIdx) <= 0 {
return -1, nil
}
segments := acquireSegments(len(s) - prefixIdx)
for sub := s[prefixIdx:]; ; {
suffixIdx := strings.LastIndex(sub, self.Suffix)
if suffixIdx == -1 {
break
}
segments = append(segments, suffixIdx+suffixLen)
sub = sub[:suffixIdx]
}
if len(segments) == 0 {
releaseSegments(segments)
return -1, nil
}
reverseSegments(segments)
return prefixIdx, segments
}
func (self PrefixSuffix) Len() int {
return lenNo
}
func (self PrefixSuffix) Match(s string) bool {
return strings.HasPrefix(s, self.Prefix) && strings.HasSuffix(s, self.Suffix)
}
func (self PrefixSuffix) String() string {
return fmt.Sprintf("<prefix_suffix:[%s,%s]>", self.Prefix, self.Suffix)
}

48
vendor/github.com/gobwas/glob/match/range.go generated vendored Normal file
View File

@ -0,0 +1,48 @@
package match
import (
"fmt"
"unicode/utf8"
)
type Range struct {
Lo, Hi rune
Not bool
}
func NewRange(lo, hi rune, not bool) Range {
return Range{lo, hi, not}
}
func (self Range) Len() int {
return lenOne
}
func (self Range) Match(s string) bool {
r, w := utf8.DecodeRuneInString(s)
if len(s) > w {
return false
}
inRange := r >= self.Lo && r <= self.Hi
return inRange == !self.Not
}
func (self Range) Index(s string) (int, []int) {
for i, r := range s {
if self.Not != (r >= self.Lo && r <= self.Hi) {
return i, segmentsByRuneLength[utf8.RuneLen(r)]
}
}
return -1, nil
}
func (self Range) String() string {
var not string
if self.Not {
not = "!"
}
return fmt.Sprintf("<range:%s[%s,%s]>", not, string(self.Lo), string(self.Hi))
}

77
vendor/github.com/gobwas/glob/match/row.go generated vendored Normal file
View File

@ -0,0 +1,77 @@
package match
import (
"fmt"
)
type Row struct {
Matchers Matchers
RunesLength int
Segments []int
}
func NewRow(len int, m ...Matcher) Row {
return Row{
Matchers: Matchers(m),
RunesLength: len,
Segments: []int{len},
}
}
func (self Row) matchAll(s string) bool {
var idx int
for _, m := range self.Matchers {
length := m.Len()
var next, i int
for next = range s[idx:] {
i++
if i == length {
break
}
}
if i < length || !m.Match(s[idx:idx+next+1]) {
return false
}
idx += next + 1
}
return true
}
func (self Row) lenOk(s string) bool {
var i int
for range s {
i++
if i > self.RunesLength {
return false
}
}
return self.RunesLength == i
}
func (self Row) Match(s string) bool {
return self.lenOk(s) && self.matchAll(s)
}
func (self Row) Len() (l int) {
return self.RunesLength
}
func (self Row) Index(s string) (int, []int) {
for i := range s {
if len(s[i:]) < self.RunesLength {
break
}
if self.matchAll(s[i:]) {
return i, self.Segments
}
}
return -1, nil
}
func (self Row) String() string {
return fmt.Sprintf("<row_%d:[%s]>", self.RunesLength, self.Matchers)
}

91
vendor/github.com/gobwas/glob/match/segments.go generated vendored Normal file
View File

@ -0,0 +1,91 @@
package match
import (
"sync"
)
type SomePool interface {
Get() []int
Put([]int)
}
var segmentsPools [1024]sync.Pool
func toPowerOfTwo(v int) int {
v--
v |= v >> 1
v |= v >> 2
v |= v >> 4
v |= v >> 8
v |= v >> 16
v++
return v
}
const (
cacheFrom = 16
cacheToAndHigher = 1024
cacheFromIndex = 15
cacheToAndHigherIndex = 1023
)
var (
segments0 = []int{0}
segments1 = []int{1}
segments2 = []int{2}
segments3 = []int{3}
segments4 = []int{4}
)
var segmentsByRuneLength [5][]int = [5][]int{
0: segments0,
1: segments1,
2: segments2,
3: segments3,
4: segments4,
}
func init() {
for i := cacheToAndHigher; i >= cacheFrom; i >>= 1 {
func(i int) {
segmentsPools[i-1] = sync.Pool{New: func() interface{} {
return make([]int, 0, i)
}}
}(i)
}
}
func getTableIndex(c int) int {
p := toPowerOfTwo(c)
switch {
case p >= cacheToAndHigher:
return cacheToAndHigherIndex
case p <= cacheFrom:
return cacheFromIndex
default:
return p - 1
}
}
func acquireSegments(c int) []int {
// make []int with less capacity than cacheFrom
// is faster than acquiring it from pool
if c < cacheFrom {
return make([]int, 0, c)
}
return segmentsPools[getTableIndex(c)].Get().([]int)[:0]
}
func releaseSegments(s []int) {
c := cap(s)
// make []int with less capacity than cacheFrom
// is faster than acquiring it from pool
if c < cacheFrom {
return
}
segmentsPools[getTableIndex(c)].Put(s)
}

43
vendor/github.com/gobwas/glob/match/single.go generated vendored Normal file
View File

@ -0,0 +1,43 @@
package match
import (
"fmt"
"github.com/gobwas/glob/util/runes"
"unicode/utf8"
)
// single represents ?
type Single struct {
Separators []rune
}
func NewSingle(s []rune) Single {
return Single{s}
}
func (self Single) Match(s string) bool {
r, w := utf8.DecodeRuneInString(s)
if len(s) > w {
return false
}
return runes.IndexRune(self.Separators, r) == -1
}
func (self Single) Len() int {
return lenOne
}
func (self Single) Index(s string) (int, []int) {
for i, r := range s {
if runes.IndexRune(self.Separators, r) == -1 {
return i, segmentsByRuneLength[utf8.RuneLen(r)]
}
}
return -1, nil
}
func (self Single) String() string {
return fmt.Sprintf("<single:![%s]>", string(self.Separators))
}

35
vendor/github.com/gobwas/glob/match/suffix.go generated vendored Normal file
View File

@ -0,0 +1,35 @@
package match
import (
"fmt"
"strings"
)
type Suffix struct {
Suffix string
}
func NewSuffix(s string) Suffix {
return Suffix{s}
}
func (self Suffix) Len() int {
return lenNo
}
func (self Suffix) Match(s string) bool {
return strings.HasSuffix(s, self.Suffix)
}
func (self Suffix) Index(s string) (int, []int) {
idx := strings.Index(s, self.Suffix)
if idx == -1 {
return -1, nil
}
return 0, []int{idx + len(self.Suffix)}
}
func (self Suffix) String() string {
return fmt.Sprintf("<suffix:%s>", self.Suffix)
}

43
vendor/github.com/gobwas/glob/match/suffix_any.go generated vendored Normal file
View File

@ -0,0 +1,43 @@
package match
import (
"fmt"
"strings"
sutil "github.com/gobwas/glob/util/strings"
)
type SuffixAny struct {
Suffix string
Separators []rune
}
func NewSuffixAny(s string, sep []rune) SuffixAny {
return SuffixAny{s, sep}
}
func (self SuffixAny) Index(s string) (int, []int) {
idx := strings.Index(s, self.Suffix)
if idx == -1 {
return -1, nil
}
i := sutil.LastIndexAnyRunes(s[:idx], self.Separators) + 1
return i, []int{idx + len(self.Suffix) - i}
}
func (self SuffixAny) Len() int {
return lenNo
}
func (self SuffixAny) Match(s string) bool {
if !strings.HasSuffix(s, self.Suffix) {
return false
}
return sutil.IndexAnyRunes(s[:len(s)-len(self.Suffix)], self.Separators) == -1
}
func (self SuffixAny) String() string {
return fmt.Sprintf("<suffix_any:![%s]%s>", string(self.Separators), self.Suffix)
}

33
vendor/github.com/gobwas/glob/match/super.go generated vendored Normal file
View File

@ -0,0 +1,33 @@
package match
import (
"fmt"
)
type Super struct{}
func NewSuper() Super {
return Super{}
}
func (self Super) Match(s string) bool {
return true
}
func (self Super) Len() int {
return lenNo
}
func (self Super) Index(s string) (int, []int) {
segments := acquireSegments(len(s) + 1)
for i := range s {
segments = append(segments, i)
}
segments = append(segments, len(s))
return 0, segments
}
func (self Super) String() string {
return fmt.Sprintf("<super>")
}

45
vendor/github.com/gobwas/glob/match/text.go generated vendored Normal file
View File

@ -0,0 +1,45 @@
package match
import (
"fmt"
"strings"
"unicode/utf8"
)
// raw represents raw string to match
type Text struct {
Str string
RunesLength int
BytesLength int
Segments []int
}
func NewText(s string) Text {
return Text{
Str: s,
RunesLength: utf8.RuneCountInString(s),
BytesLength: len(s),
Segments: []int{len(s)},
}
}
func (self Text) Match(s string) bool {
return self.Str == s
}
func (self Text) Len() int {
return self.RunesLength
}
func (self Text) Index(s string) (int, []int) {
index := strings.Index(s, self.Str)
if index == -1 {
return -1, nil
}
return index, self.Segments
}
func (self Text) String() string {
return fmt.Sprintf("<text:`%v`>", self.Str)
}

148
vendor/github.com/gobwas/glob/readme.md generated vendored Normal file
View File

@ -0,0 +1,148 @@
# glob.[go](https://golang.org)
[![GoDoc][godoc-image]][godoc-url] [![Build Status][travis-image]][travis-url]
> Go Globbing Library.
## Install
```shell
go get github.com/gobwas/glob
```
## Example
```go
package main
import "github.com/gobwas/glob"
func main() {
var g glob.Glob
// create simple glob
g = glob.MustCompile("*.github.com")
g.Match("api.github.com") // true
// quote meta characters and then create simple glob
g = glob.MustCompile(glob.QuoteMeta("*.github.com"))
g.Match("*.github.com") // true
// create new glob with set of delimiters as ["."]
g = glob.MustCompile("api.*.com", '.')
g.Match("api.github.com") // true
g.Match("api.gi.hub.com") // false
// create new glob with set of delimiters as ["."]
// but now with super wildcard
g = glob.MustCompile("api.**.com", '.')
g.Match("api.github.com") // true
g.Match("api.gi.hub.com") // true
// create glob with single symbol wildcard
g = glob.MustCompile("?at")
g.Match("cat") // true
g.Match("fat") // true
g.Match("at") // false
// create glob with single symbol wildcard and delimiters ['f']
g = glob.MustCompile("?at", 'f')
g.Match("cat") // true
g.Match("fat") // false
g.Match("at") // false
// create glob with character-list matchers
g = glob.MustCompile("[abc]at")
g.Match("cat") // true
g.Match("bat") // true
g.Match("fat") // false
g.Match("at") // false
// create glob with character-list matchers
g = glob.MustCompile("[!abc]at")
g.Match("cat") // false
g.Match("bat") // false
g.Match("fat") // true
g.Match("at") // false
// create glob with character-range matchers
g = glob.MustCompile("[a-c]at")
g.Match("cat") // true
g.Match("bat") // true
g.Match("fat") // false
g.Match("at") // false
// create glob with character-range matchers
g = glob.MustCompile("[!a-c]at")
g.Match("cat") // false
g.Match("bat") // false
g.Match("fat") // true
g.Match("at") // false
// create glob with pattern-alternatives list
g = glob.MustCompile("{cat,bat,[fr]at}")
g.Match("cat") // true
g.Match("bat") // true
g.Match("fat") // true
g.Match("rat") // true
g.Match("at") // false
g.Match("zat") // false
}
```
## Performance
This library is created for compile-once patterns. This means, that compilation could take time, but
strings matching is done faster, than in case when always parsing template.
If you will not use compiled `glob.Glob` object, and do `g := glob.MustCompile(pattern); g.Match(...)` every time, then your code will be much more slower.
Run `go test -bench=.` from source root to see the benchmarks:
Pattern | Fixture | Match | Speed (ns/op)
--------|---------|-------|--------------
`[a-z][!a-x]*cat*[h][!b]*eyes*` | `my cat has very bright eyes` | `true` | 432
`[a-z][!a-x]*cat*[h][!b]*eyes*` | `my dog has very bright eyes` | `false` | 199
`https://*.google.*` | `https://account.google.com` | `true` | 96
`https://*.google.*` | `https://google.com` | `false` | 66
`{https://*.google.*,*yandex.*,*yahoo.*,*mail.ru}` | `http://yahoo.com` | `true` | 163
`{https://*.google.*,*yandex.*,*yahoo.*,*mail.ru}` | `http://google.com` | `false` | 197
`{https://*gobwas.com,http://exclude.gobwas.com}` | `https://safe.gobwas.com` | `true` | 22
`{https://*gobwas.com,http://exclude.gobwas.com}` | `http://safe.gobwas.com` | `false` | 24
`abc*` | `abcdef` | `true` | 8.15
`abc*` | `af` | `false` | 5.68
`*def` | `abcdef` | `true` | 8.84
`*def` | `af` | `false` | 5.74
`ab*ef` | `abcdef` | `true` | 15.2
`ab*ef` | `af` | `false` | 10.4
The same things with `regexp` package:
Pattern | Fixture | Match | Speed (ns/op)
--------|---------|-------|--------------
`^[a-z][^a-x].*cat.*[h][^b].*eyes.*$` | `my cat has very bright eyes` | `true` | 2553
`^[a-z][^a-x].*cat.*[h][^b].*eyes.*$` | `my dog has very bright eyes` | `false` | 1383
`^https:\/\/.*\.google\..*$` | `https://account.google.com` | `true` | 1205
`^https:\/\/.*\.google\..*$` | `https://google.com` | `false` | 767
`^(https:\/\/.*\.google\..*|.*yandex\..*|.*yahoo\..*|.*mail\.ru)$` | `http://yahoo.com` | `true` | 1435
`^(https:\/\/.*\.google\..*|.*yandex\..*|.*yahoo\..*|.*mail\.ru)$` | `http://google.com` | `false` | 1674
`^(https:\/\/.*gobwas\.com|http://exclude.gobwas.com)$` | `https://safe.gobwas.com` | `true` | 1039
`^(https:\/\/.*gobwas\.com|http://exclude.gobwas.com)$` | `http://safe.gobwas.com` | `false` | 272
`^abc.*$` | `abcdef` | `true` | 237
`^abc.*$` | `af` | `false` | 100
`^.*def$` | `abcdef` | `true` | 464
`^.*def$` | `af` | `false` | 265
`^ab.*ef$` | `abcdef` | `true` | 375
`^ab.*ef$` | `af` | `false` | 145
[godoc-image]: https://godoc.org/github.com/gobwas/glob?status.svg
[godoc-url]: https://godoc.org/github.com/gobwas/glob
[travis-image]: https://travis-ci.org/gobwas/glob.svg?branch=master
[travis-url]: https://travis-ci.org/gobwas/glob
## Syntax
Syntax is inspired by [standard wildcards](http://tldp.org/LDP/GNU-Linux-Tools-Summary/html/x11655.htm),
except that `**` is aka super-asterisk, that do not sensitive for separators.

122
vendor/github.com/gobwas/glob/syntax/ast/ast.go generated vendored Normal file
View File

@ -0,0 +1,122 @@
package ast
import (
"bytes"
"fmt"
)
type Node struct {
Parent *Node
Children []*Node
Value interface{}
Kind Kind
}
func NewNode(k Kind, v interface{}, ch ...*Node) *Node {
n := &Node{
Kind: k,
Value: v,
}
for _, c := range ch {
Insert(n, c)
}
return n
}
func (a *Node) Equal(b *Node) bool {
if a.Kind != b.Kind {
return false
}
if a.Value != b.Value {
return false
}
if len(a.Children) != len(b.Children) {
return false
}
for i, c := range a.Children {
if !c.Equal(b.Children[i]) {
return false
}
}
return true
}
func (a *Node) String() string {
var buf bytes.Buffer
buf.WriteString(a.Kind.String())
if a.Value != nil {
buf.WriteString(" =")
buf.WriteString(fmt.Sprintf("%v", a.Value))
}
if len(a.Children) > 0 {
buf.WriteString(" [")
for i, c := range a.Children {
if i > 0 {
buf.WriteString(", ")
}
buf.WriteString(c.String())
}
buf.WriteString("]")
}
return buf.String()
}
func Insert(parent *Node, children ...*Node) {
parent.Children = append(parent.Children, children...)
for _, ch := range children {
ch.Parent = parent
}
}
type List struct {
Not bool
Chars string
}
type Range struct {
Not bool
Lo, Hi rune
}
type Text struct {
Text string
}
type Kind int
const (
KindNothing Kind = iota
KindPattern
KindList
KindRange
KindText
KindAny
KindSuper
KindSingle
KindAnyOf
)
func (k Kind) String() string {
switch k {
case KindNothing:
return "Nothing"
case KindPattern:
return "Pattern"
case KindList:
return "List"
case KindRange:
return "Range"
case KindText:
return "Text"
case KindAny:
return "Any"
case KindSuper:
return "Super"
case KindSingle:
return "Single"
case KindAnyOf:
return "AnyOf"
default:
return ""
}
}

157
vendor/github.com/gobwas/glob/syntax/ast/parser.go generated vendored Normal file
View File

@ -0,0 +1,157 @@
package ast
import (
"errors"
"fmt"
"github.com/gobwas/glob/syntax/lexer"
"unicode/utf8"
)
type Lexer interface {
Next() lexer.Token
}
type parseFn func(*Node, Lexer) (parseFn, *Node, error)
func Parse(lexer Lexer) (*Node, error) {
var parser parseFn
root := NewNode(KindPattern, nil)
var (
tree *Node
err error
)
for parser, tree = parserMain, root; parser != nil; {
parser, tree, err = parser(tree, lexer)
if err != nil {
return nil, err
}
}
return root, nil
}
func parserMain(tree *Node, lex Lexer) (parseFn, *Node, error) {
for {
token := lex.Next()
switch token.Type {
case lexer.EOF:
return nil, tree, nil
case lexer.Error:
return nil, tree, errors.New(token.Raw)
case lexer.Text:
Insert(tree, NewNode(KindText, Text{token.Raw}))
return parserMain, tree, nil
case lexer.Any:
Insert(tree, NewNode(KindAny, nil))
return parserMain, tree, nil
case lexer.Super:
Insert(tree, NewNode(KindSuper, nil))
return parserMain, tree, nil
case lexer.Single:
Insert(tree, NewNode(KindSingle, nil))
return parserMain, tree, nil
case lexer.RangeOpen:
return parserRange, tree, nil
case lexer.TermsOpen:
a := NewNode(KindAnyOf, nil)
Insert(tree, a)
p := NewNode(KindPattern, nil)
Insert(a, p)
return parserMain, p, nil
case lexer.Separator:
p := NewNode(KindPattern, nil)
Insert(tree.Parent, p)
return parserMain, p, nil
case lexer.TermsClose:
return parserMain, tree.Parent.Parent, nil
default:
return nil, tree, fmt.Errorf("unexpected token: %s", token)
}
}
return nil, tree, fmt.Errorf("unknown error")
}
func parserRange(tree *Node, lex Lexer) (parseFn, *Node, error) {
var (
not bool
lo rune
hi rune
chars string
)
for {
token := lex.Next()
switch token.Type {
case lexer.EOF:
return nil, tree, errors.New("unexpected end")
case lexer.Error:
return nil, tree, errors.New(token.Raw)
case lexer.Not:
not = true
case lexer.RangeLo:
r, w := utf8.DecodeRuneInString(token.Raw)
if len(token.Raw) > w {
return nil, tree, fmt.Errorf("unexpected length of lo character")
}
lo = r
case lexer.RangeBetween:
//
case lexer.RangeHi:
r, w := utf8.DecodeRuneInString(token.Raw)
if len(token.Raw) > w {
return nil, tree, fmt.Errorf("unexpected length of lo character")
}
hi = r
if hi < lo {
return nil, tree, fmt.Errorf("hi character '%s' should be greater than lo '%s'", string(hi), string(lo))
}
case lexer.Text:
chars = token.Raw
case lexer.RangeClose:
isRange := lo != 0 && hi != 0
isChars := chars != ""
if isChars == isRange {
return nil, tree, fmt.Errorf("could not parse range")
}
if isRange {
Insert(tree, NewNode(KindRange, Range{
Lo: lo,
Hi: hi,
Not: not,
}))
} else {
Insert(tree, NewNode(KindList, List{
Chars: chars,
Not: not,
}))
}
return parserMain, tree, nil
}
}
}

273
vendor/github.com/gobwas/glob/syntax/lexer/lexer.go generated vendored Normal file
View File

@ -0,0 +1,273 @@
package lexer
import (
"bytes"
"fmt"
"github.com/gobwas/glob/util/runes"
"unicode/utf8"
)
const (
char_any = '*'
char_comma = ','
char_single = '?'
char_escape = '\\'
char_range_open = '['
char_range_close = ']'
char_terms_open = '{'
char_terms_close = '}'
char_range_not = '!'
char_range_between = '-'
)
var specials = []byte{
char_any,
char_single,
char_escape,
char_range_open,
char_range_close,
char_terms_open,
char_terms_close,
}
func Special(c byte) bool {
return bytes.IndexByte(specials, c) != -1
}
type tokens []Token
func (i *tokens) shift() (ret Token) {
ret = (*i)[0]
copy(*i, (*i)[1:])
*i = (*i)[:len(*i)-1]
return
}
func (i *tokens) push(v Token) {
*i = append(*i, v)
}
func (i *tokens) empty() bool {
return len(*i) == 0
}
var eof rune = 0
type lexer struct {
data string
pos int
err error
tokens tokens
termsLevel int
lastRune rune
lastRuneSize int
hasRune bool
}
func NewLexer(source string) *lexer {
l := &lexer{
data: source,
tokens: tokens(make([]Token, 0, 4)),
}
return l
}
func (l *lexer) Next() Token {
if l.err != nil {
return Token{Error, l.err.Error()}
}
if !l.tokens.empty() {
return l.tokens.shift()
}
l.fetchItem()
return l.Next()
}
func (l *lexer) peek() (r rune, w int) {
if l.pos == len(l.data) {
return eof, 0
}
r, w = utf8.DecodeRuneInString(l.data[l.pos:])
if r == utf8.RuneError {
l.errorf("could not read rune")
r = eof
w = 0
}
return
}
func (l *lexer) read() rune {
if l.hasRune {
l.hasRune = false
l.seek(l.lastRuneSize)
return l.lastRune
}
r, s := l.peek()
l.seek(s)
l.lastRune = r
l.lastRuneSize = s
return r
}
func (l *lexer) seek(w int) {
l.pos += w
}
func (l *lexer) unread() {
if l.hasRune {
l.errorf("could not unread rune")
return
}
l.seek(-l.lastRuneSize)
l.hasRune = true
}
func (l *lexer) errorf(f string, v ...interface{}) {
l.err = fmt.Errorf(f, v...)
}
func (l *lexer) inTerms() bool {
return l.termsLevel > 0
}
func (l *lexer) termsEnter() {
l.termsLevel++
}
func (l *lexer) termsLeave() {
l.termsLevel--
}
var inTextBreakers = []rune{char_single, char_any, char_range_open, char_terms_open}
var inTermsBreakers = append(inTextBreakers, char_terms_close, char_comma)
func (l *lexer) fetchItem() {
r := l.read()
switch {
case r == eof:
l.tokens.push(Token{EOF, ""})
case r == char_terms_open:
l.termsEnter()
l.tokens.push(Token{TermsOpen, string(r)})
case r == char_comma && l.inTerms():
l.tokens.push(Token{Separator, string(r)})
case r == char_terms_close && l.inTerms():
l.tokens.push(Token{TermsClose, string(r)})
l.termsLeave()
case r == char_range_open:
l.tokens.push(Token{RangeOpen, string(r)})
l.fetchRange()
case r == char_single:
l.tokens.push(Token{Single, string(r)})
case r == char_any:
if l.read() == char_any {
l.tokens.push(Token{Super, string(r) + string(r)})
} else {
l.unread()
l.tokens.push(Token{Any, string(r)})
}
default:
l.unread()
var breakers []rune
if l.inTerms() {
breakers = inTermsBreakers
} else {
breakers = inTextBreakers
}
l.fetchText(breakers)
}
}
func (l *lexer) fetchRange() {
var wantHi bool
var wantClose bool
var seenNot bool
for {
r := l.read()
if r == eof {
l.errorf("unexpected end of input")
return
}
if wantClose {
if r != char_range_close {
l.errorf("expected close range character")
} else {
l.tokens.push(Token{RangeClose, string(r)})
}
return
}
if wantHi {
l.tokens.push(Token{RangeHi, string(r)})
wantClose = true
continue
}
if !seenNot && r == char_range_not {
l.tokens.push(Token{Not, string(r)})
seenNot = true
continue
}
if n, w := l.peek(); n == char_range_between {
l.seek(w)
l.tokens.push(Token{RangeLo, string(r)})
l.tokens.push(Token{RangeBetween, string(n)})
wantHi = true
continue
}
l.unread() // unread first peek and fetch as text
l.fetchText([]rune{char_range_close})
wantClose = true
}
}
func (l *lexer) fetchText(breakers []rune) {
var data []rune
var escaped bool
reading:
for {
r := l.read()
if r == eof {
break
}
if !escaped {
if r == char_escape {
escaped = true
continue
}
if runes.IndexRune(breakers, r) != -1 {
l.unread()
break reading
}
}
escaped = false
data = append(data, r)
}
if len(data) > 0 {
l.tokens.push(Token{Text, string(data)})
}
}

88
vendor/github.com/gobwas/glob/syntax/lexer/token.go generated vendored Normal file
View File

@ -0,0 +1,88 @@
package lexer
import "fmt"
type TokenType int
const (
EOF TokenType = iota
Error
Text
Char
Any
Super
Single
Not
Separator
RangeOpen
RangeClose
RangeLo
RangeHi
RangeBetween
TermsOpen
TermsClose
)
func (tt TokenType) String() string {
switch tt {
case EOF:
return "eof"
case Error:
return "error"
case Text:
return "text"
case Char:
return "char"
case Any:
return "any"
case Super:
return "super"
case Single:
return "single"
case Not:
return "not"
case Separator:
return "separator"
case RangeOpen:
return "range_open"
case RangeClose:
return "range_close"
case RangeLo:
return "range_lo"
case RangeHi:
return "range_hi"
case RangeBetween:
return "range_between"
case TermsOpen:
return "terms_open"
case TermsClose:
return "terms_close"
default:
return "undef"
}
}
type Token struct {
Type TokenType
Raw string
}
func (t Token) String() string {
return fmt.Sprintf("%v<%q>", t.Type, t.Raw)
}

14
vendor/github.com/gobwas/glob/syntax/syntax.go generated vendored Normal file
View File

@ -0,0 +1,14 @@
package syntax
import (
"github.com/gobwas/glob/syntax/ast"
"github.com/gobwas/glob/syntax/lexer"
)
func Parse(s string) (*ast.Node, error) {
return ast.Parse(lexer.NewLexer(s))
}
func Special(b byte) bool {
return lexer.Special(b)
}

154
vendor/github.com/gobwas/glob/util/runes/runes.go generated vendored Normal file
View File

@ -0,0 +1,154 @@
package runes
func Index(s, needle []rune) int {
ls, ln := len(s), len(needle)
switch {
case ln == 0:
return 0
case ln == 1:
return IndexRune(s, needle[0])
case ln == ls:
if Equal(s, needle) {
return 0
}
return -1
case ln > ls:
return -1
}
head:
for i := 0; i < ls && ls-i >= ln; i++ {
for y := 0; y < ln; y++ {
if s[i+y] != needle[y] {
continue head
}
}
return i
}
return -1
}
func LastIndex(s, needle []rune) int {
ls, ln := len(s), len(needle)
switch {
case ln == 0:
if ls == 0 {
return 0
}
return ls
case ln == 1:
return IndexLastRune(s, needle[0])
case ln == ls:
if Equal(s, needle) {
return 0
}
return -1
case ln > ls:
return -1
}
head:
for i := ls - 1; i >= 0 && i >= ln; i-- {
for y := ln - 1; y >= 0; y-- {
if s[i-(ln-y-1)] != needle[y] {
continue head
}
}
return i - ln + 1
}
return -1
}
// IndexAny returns the index of the first instance of any Unicode code point
// from chars in s, or -1 if no Unicode code point from chars is present in s.
func IndexAny(s, chars []rune) int {
if len(chars) > 0 {
for i, c := range s {
for _, m := range chars {
if c == m {
return i
}
}
}
}
return -1
}
func Contains(s, needle []rune) bool {
return Index(s, needle) >= 0
}
func Max(s []rune) (max rune) {
for _, r := range s {
if r > max {
max = r
}
}
return
}
func Min(s []rune) rune {
min := rune(-1)
for _, r := range s {
if min == -1 {
min = r
continue
}
if r < min {
min = r
}
}
return min
}
func IndexRune(s []rune, r rune) int {
for i, c := range s {
if c == r {
return i
}
}
return -1
}
func IndexLastRune(s []rune, r rune) int {
for i := len(s) - 1; i >= 0; i-- {
if s[i] == r {
return i
}
}
return -1
}
func Equal(a, b []rune) bool {
if len(a) == len(b) {
for i := 0; i < len(a); i++ {
if a[i] != b[i] {
return false
}
}
return true
}
return false
}
// HasPrefix tests whether the string s begins with prefix.
func HasPrefix(s, prefix []rune) bool {
return len(s) >= len(prefix) && Equal(s[0:len(prefix)], prefix)
}
// HasSuffix tests whether the string s ends with suffix.
func HasSuffix(s, suffix []rune) bool {
return len(s) >= len(suffix) && Equal(s[len(s)-len(suffix):], suffix)
}

39
vendor/github.com/gobwas/glob/util/strings/strings.go generated vendored Normal file
View File

@ -0,0 +1,39 @@
package strings
import (
"strings"
"unicode/utf8"
)
func IndexAnyRunes(s string, rs []rune) int {
for _, r := range rs {
if i := strings.IndexRune(s, r); i != -1 {
return i
}
}
return -1
}
func LastIndexAnyRunes(s string, rs []rune) int {
for _, r := range rs {
i := -1
if 0 <= r && r < utf8.RuneSelf {
i = strings.LastIndexByte(s, byte(r))
} else {
sub := s
for len(sub) > 0 {
j := strings.IndexRune(s, r)
if j == -1 {
break
}
i = j
sub = sub[i+1:]
}
}
if i != -1 {
return i
}
}
return -1
}

View File

@ -0,0 +1,24 @@
Copyright (c) 2018, iriri
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the copyright holder nor the names of contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View File

@ -0,0 +1,313 @@
// Copyright 2018 iriri. All rights reserved. Use of this source code is
// governed by a BSD-style license which can be found in the LICENSE file.
// Package gitignore can be used to parse .gitignore-style files into lists of
// globs that can be used to test against paths or selectively walk a file
// tree. Gobwas's glob package is used for matching because it is faster than
// using regexp, which is overkill, and supports globstars (**), unlike
// filepath.Match.
package gitignore
import (
"bufio"
"errors"
"os"
"os/user"
"path/filepath"
"strings"
"github.com/gobwas/glob"
)
type ignoreFile struct {
globs []glob.Glob
abspath []string
}
type IgnoreList struct {
files []ignoreFile
cwd []string
}
func toSplit(path string) []string {
return strings.Split(filepath.ToSlash(path), "/")
}
func fromSplit(path []string) string {
return filepath.FromSlash(strings.Join(path, "/"))
}
// New creates a new ignore list.
func New() (IgnoreList, error) {
cwd, err := filepath.Abs(".")
if err != nil {
return IgnoreList{}, err
}
files := make([]ignoreFile, 1, 4)
files[0].globs = make([]glob.Glob, 0, 16)
return IgnoreList{
files,
toSplit(cwd),
}, nil
}
// From creates a new ignore list and populates the first entry with the
// contents of the specified file.
func From(path string) (IgnoreList, error) {
ign, err := New()
if err == nil {
err = ign.append(path, nil)
}
return ign, err
}
// FromGit finds the root directory of the current git repository and creates a
// new ignore list with the contents of all .gitignore files in that git
// repository.
func FromGit() (IgnoreList, error) {
ign, err := New()
if err == nil {
err = ign.AppendGit()
}
return ign, err
}
func clean(s string) string {
i := len(s) - 1
for ; i >= 0; i-- {
if s[i] != ' ' || i > 0 && s[i-1] == '\\' {
return s[:i+1]
}
}
return ""
}
// AppendGlob appends a single glob as a new entry in the ignore list. The root
// (relevant for matching patterns that begin with "/") is assumed to be the
// current working directory.
func (ign *IgnoreList) AppendGlob(s string) error {
g, err := glob.Compile(clean(s), '/')
if err == nil {
ign.files[0].globs = append(ign.files[0].globs, g)
}
return err
}
func toRelpath(s string, dir, cwd []string) string {
if s != "" {
if s[0] != '/' {
return s
}
if dir == nil || cwd == nil {
return s[1:]
}
dir = append(dir, toSplit(s[1:])...)
}
i := 0
min := len(cwd)
if len(dir) < min {
min = len(dir)
}
for ; i < min; i++ {
if dir[i] != cwd[i] {
break
}
}
if i == min && len(cwd) == len(dir) {
return "."
}
ss := make([]string, (len(cwd)-i)+(len(dir)-i))
j := 0
for ; j < len(cwd)-i; j++ {
ss[j] = ".."
}
for k := 0; j < len(ss); j, k = j+1, k+1 {
ss[j] = dir[i+k]
}
return fromSplit(ss)
}
func (ign *IgnoreList) append(path string, dir []string) error {
f, err := os.Open(path)
if err != nil {
return err
}
defer f.Close()
var ignf *ignoreFile
if dir != nil {
ignf = &ign.files[0]
} else {
d, err := filepath.Abs(filepath.Dir(path))
if err != nil {
return err
}
if d != fromSplit(ign.cwd) {
dir = toSplit(d)
ignf = &ignoreFile{
make([]glob.Glob, 0, 16),
dir,
}
} else {
ignf = &ign.files[0]
}
}
scn := bufio.NewScanner(bufio.NewReader(f))
for scn.Scan() {
s := scn.Text()
if s == "" || s[0] == '#' {
continue
}
g, err := glob.Compile(toRelpath(clean(s), dir, ign.cwd), '/')
if err != nil {
return err
}
ignf.globs = append(ignf.globs, g)
}
ign.files = append(ign.files, *ignf)
return nil
}
// Append appends the globs in the specified file to the ignore list. Files are
// expected to have the same format as .gitignore files.
func (ign *IgnoreList) Append(path string) error {
return ign.append(path, nil)
}
func exists(path string) bool {
_, err := os.Stat(path)
return !os.IsNotExist(err)
}
func findGitRoot(cwd []string) (string, error) {
p := fromSplit(cwd)
for !exists(p + "/.git") {
if len(cwd) == 1 {
return "", errors.New("not in a git repository")
}
cwd = cwd[:len(cwd)-1]
p = fromSplit(cwd)
}
return p, nil
}
func (ign *IgnoreList) appendAll(fname, root string) error {
return filepath.Walk(
root,
func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if filepath.Base(path) == fname {
ign.append(path, nil)
}
return nil
})
}
// AppendGit finds the root directory of the current git repository and appends
// the contents of all .gitignore files in that git repository to the ignore
// list.
func (ign *IgnoreList) AppendGit() error {
gitRoot, err := findGitRoot(ign.cwd)
if err != nil {
return err
}
if err = ign.AppendGlob(".git"); err != nil {
return err
}
usr, err := user.Current()
if err != nil {
return err
}
if gg := filepath.Join(usr.HomeDir, ".gitignore_global"); exists(gg) {
if err = ign.append(gg, toSplit(gitRoot)); err != nil {
return err
}
}
return ign.appendAll(".gitignore", gitRoot)
}
func isPrefix(abspath, dir []string) bool {
if len(abspath) > len(dir) {
return false
}
for i := range abspath {
if abspath[i] != dir[i] {
return false
}
}
return true
}
func (ign *IgnoreList) match(path string, info os.FileInfo) bool {
if path == "." {
return false
}
ss := make([]string, 0, 4)
base := filepath.Base(path)
ss = append(ss, path)
if base != path {
ss = append(ss, base)
} else {
ss = append(ss, "./"+path)
}
if info != nil && info.IsDir() {
ss = append(ss, path+"/")
if base != path {
ss = append(ss, base+"/")
} else {
ss = append(ss, "./"+path+"/")
}
}
d, err := filepath.Abs(filepath.Dir(path))
if err != nil {
return false
}
dir := toSplit(d)
for _, f := range ign.files {
if isPrefix(f.abspath, dir) || len(f.abspath) == 0 {
for _, g := range f.globs {
for _, s := range ss {
if g.Match(s) {
return true
}
}
}
}
}
return false
}
// Match returns whether any of the globs in the ignore list match the
// specified path. Uses the same matching rules as .gitignore files.
func (ign *IgnoreList) Match(path string) bool {
return ign.match(path, nil)
}
// Walk walks the file tree with the specified root and calls fn on each file
// or directory. Files and directories that match any of the globs in the
// ignore list are skipped.
func (ign *IgnoreList) Walk(root string, fn filepath.WalkFunc) error {
abs, err := filepath.Abs(root)
if err != nil {
return err
}
return filepath.Walk(
toRelpath("", toSplit(abs), ign.cwd),
func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if ign.match(path, info) {
if info.IsDir() {
return filepath.SkipDir
}
return err
}
return fn(path, info, err)
})
}

View File

@ -0,0 +1,5 @@
module github.com/jesseduffield/minimal/gitignore
go 1.15
require github.com/gobwas/glob v0.2.3

View File

@ -0,0 +1,2 @@
github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y=
github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8=

View File

@ -0,0 +1,10 @@
testgitignore
*.o
*.out*
#*.ou
aa
bbb
ccc/
**/Makefile
/testfs/ignoredfile*

3
vendor/gopkg.in/ozeidan/fuzzy-patricia.v3/AUTHORS generated vendored Normal file
View File

@ -0,0 +1,3 @@
This is the complete list of go-patricia copyright holders:
Ondřej Kupka <ondra.cap@gmail.com>

20
vendor/gopkg.in/ozeidan/fuzzy-patricia.v3/LICENSE generated vendored Normal file
View File

@ -0,0 +1,20 @@
The MIT License (MIT)
Copyright (c) 2014 The AUTHORS
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@ -0,0 +1,182 @@
// Copyright (c) 2014 The go-patricia AUTHORS
//
// Use of this source code is governed by The MIT License
// that can be found in the LICENSE file.
package patricia
import (
"io"
"sort"
)
type childList interface {
length() int
head() *Trie
add(child *Trie) childList
remove(b byte)
replace(b byte, child *Trie)
next(b byte) *Trie
combinedMask() uint64
getChildren() []*Trie
walk(prefix *Prefix, visitor VisitorFunc) error
print(w io.Writer, indent int)
clone() childList
total() int
}
type tries []*Trie
func (t tries) Len() int {
return len(t)
}
func (t tries) Less(i, j int) bool {
strings := sort.StringSlice{string(t[i].prefix), string(t[j].prefix)}
return strings.Less(0, 1)
}
func (t tries) Swap(i, j int) {
t[i], t[j] = t[j], t[i]
}
type childContainer struct {
char byte
node *Trie
}
type superDenseChildList struct {
children []childContainer
}
func newSuperDenseChildList() childList {
return &superDenseChildList{
make([]childContainer, 0),
}
}
func (list *superDenseChildList) length() int {
return len(list.children)
}
func (list *superDenseChildList) head() *Trie {
if len(list.children) > 0 {
return list.children[0].node
}
return nil
}
func (list *superDenseChildList) add(child *Trie) childList {
char := child.prefix[0]
list.children = append(list.children, childContainer{
char,
child,
})
return list
}
func (list *superDenseChildList) remove(b byte) {
children := list.children
for i := 0; i < len(children); i++ {
if children[i].char == b {
// children[i] = children[len(children)-1]
// children = children[:len(children)-1]
// children = append(children[:i], children[i+1:]...)
newChildren := make([]childContainer, len(children)-1)
// copy the elements over to avoid "memory leaks"
copy(newChildren, children[:i])
copy(newChildren[i:], children[i+1:])
list.children = newChildren
// list.children = make([]childContainer, len(children))
// copy(list.children, children)
// children = nil
return
}
}
}
func (list *superDenseChildList) replace(b byte, child *Trie) {
children := list.children
for i := 0; i < len(list.children); i++ {
if children[i].char == b {
children[i].node = child
return
}
}
}
func (list *superDenseChildList) next(b byte) *Trie {
children := list.children
for i := 0; i < len(list.children); i++ {
if children[i].char == b {
return children[i].node
}
}
return nil
}
func (list superDenseChildList) combinedMask() uint64 {
var mask uint64
for _, child := range list.children {
// fmt.Printf("child = %+v\n", child)
mask |= child.node.mask
}
return mask
}
func (list *superDenseChildList) getChildren() []*Trie {
children := make([]*Trie, 0, len(list.children))
for _, child := range list.children {
children = append(children, child.node)
}
return children
}
func (list *superDenseChildList) walk(prefix *Prefix, visitor VisitorFunc) error {
for _, child := range list.children {
node := child.node
*prefix = append(*prefix, node.prefix...)
if node.item != nil {
if err := visitor(*prefix, node.item); err != nil {
if err == SkipSubtree {
*prefix = (*prefix)[:len(*prefix)-len(node.prefix)]
continue
}
*prefix = (*prefix)[:len(*prefix)-len(node.prefix)]
return err
}
}
err := node.children.walk(prefix, visitor)
*prefix = (*prefix)[:len(*prefix)-len(node.prefix)]
if err != nil {
return err
}
}
return nil
}
func (list *superDenseChildList) print(w io.Writer, indent int) {
for _, child := range list.children {
child.node.print(w, indent)
}
}
func (list *superDenseChildList) clone() childList {
clones := make([]childContainer, len(list.children))
for i := 0; i < len(list.children); i++ {
child := list.children[i]
clones[i] = childContainer{child.char, child.node.Clone()}
}
return &superDenseChildList{
clones,
}
}
func (list *superDenseChildList) total() int {
return len(list.children)
}

View File

@ -0,0 +1,892 @@
// Copyright (c) 2014 The go-patricia AUTHORS
//
// Use of this source code is governed by The MIT License
// that can be found in the LICENSE file.
package patricia
import (
"bytes"
"errors"
"fmt"
"io"
"strings"
)
//------------------------------------------------------------------------------
// Trie
//------------------------------------------------------------------------------
const (
defaultMaxPrefixPerNode = 10
)
var (
maxPrefixPerNode = defaultMaxPrefixPerNode
)
type (
// Prefix is the type of node prefixes
Prefix []byte
// Item is just interface{}
Item interface{}
// VisitorFunc is the type of functions passed to visit function
VisitorFunc func(prefix Prefix, item Item) error
// FuzzyVisitorFunc additionaly returns how many characters were skipped which can be sorted on
FuzzyVisitorFunc func(prefix Prefix, item Item, skipped int) error
)
// Trie is a generic patricia trie that allows fast retrieval of items by prefix.
// and other funky stuff.
//
// Trie is not thread-safe.
type Trie struct {
prefix Prefix
item Item
mask uint64
children childList
}
// Public API ------------------------------------------------------------------
// NewTrie constructs a new trie.
func NewTrie() *Trie {
trie := &Trie{}
trie.children = newSuperDenseChildList()
trie.mask = 0
return trie
}
// SetMaxPrefixPerNode sets the maximum length of a prefix before it is split into two nodes
func SetMaxPrefixPerNode(value int) {
maxPrefixPerNode = value
}
// Clone makes a copy of an existing trie.
// Items stored in both tries become shared, obviously.
func (trie *Trie) Clone() *Trie {
return &Trie{
prefix: append(Prefix(nil), trie.prefix...),
item: trie.item,
children: trie.children.clone(),
}
}
// Item returns the item stored in the root of this trie.
func (trie *Trie) Item() Item {
return trie.item
}
// Insert inserts a new item into the trie using the given prefix. Insert does
// not replace existing items. It returns false if an item was already in place.
func (trie *Trie) Insert(key Prefix, item Item) (inserted bool) {
return trie.put(key, item, false)
}
// Set works much like Insert, but it always sets the item, possibly replacing
// the item previously inserted.
func (trie *Trie) Set(key Prefix, item Item) {
trie.put(key, item, true)
}
// Get returns the item located at key.
//
// This method is a bit dangerous, because Get can as well end up in an internal
// node that is not really representing any user-defined value. So when nil is
// a valid value being used, it is not possible to tell if the value was inserted
// into the tree by the user or not. A possible workaround for this is not to use
// nil interface as a valid value, even using zero value of any type is enough
// to prevent this bad behaviour.
func (trie *Trie) Get(key Prefix) (item Item) {
_, node, found, leftover := trie.findSubtree(key)
if !found || len(leftover) != 0 {
return nil
}
return node.item
}
// Match returns what Get(prefix) != nil would return. The same warning as for
// Get applies here as well.
func (trie *Trie) Match(prefix Prefix) (matchedExactly bool) {
return trie.Get(prefix) != nil
}
// MatchSubtree returns true when there is a subtree representing extensions
// to key, that is if there are any keys in the tree which have key as prefix.
func (trie *Trie) MatchSubtree(key Prefix) (matched bool) {
_, _, matched, _ = trie.findSubtree(key)
return
}
// Visit calls visitor on every node containing a non-nil item
// in alphabetical order.
//
// If an error is returned from visitor, the function stops visiting the tree
// and returns that error, unless it is a special error - SkipSubtree. In that
// case Visit skips the subtree represented by the current node and continues
// elsewhere.
func (trie *Trie) Visit(visitor VisitorFunc) error {
return trie.walk(nil, visitor)
}
func (trie *Trie) size() int {
n := 0
err := trie.walk(nil, func(prefix Prefix, item Item) error {
n++
return nil
})
if err != nil {
panic(err)
}
return n
}
func (trie *Trie) total() int {
return 1 + trie.children.total()
}
// VisitSubtree works much like Visit, but it only visits nodes matching prefix.
func (trie *Trie) VisitSubtree(prefix Prefix, visitor VisitorFunc) error {
// Nil prefix not allowed.
if prefix == nil {
panic(ErrNilPrefix)
}
// Empty trie must be handled explicitly.
if trie.prefix == nil {
return nil
}
// Locate the relevant subtree.
_, root, found, leftover := trie.findSubtree(prefix)
if !found {
return nil
}
prefix = append(prefix, leftover...)
// Visit it.
return root.walk(prefix, visitor)
}
type potentialSubtree struct {
idx int
skipped int
prefix Prefix
node *Trie
}
// VisitFuzzy visits every node that is succesfully matched via fuzzy matching
func (trie *Trie) VisitFuzzy(partial Prefix, caseInsensitive bool, visitor FuzzyVisitorFunc) error {
if len(partial) == 0 {
return trie.VisitPrefixes(partial, caseInsensitive, func(prefix Prefix, item Item) error {
return visitor(prefix, item, 0)
})
}
var (
m uint64
cmp uint64
i int
p potentialSubtree
)
potential := []potentialSubtree{potentialSubtree{node: trie, prefix: Prefix(""), idx: 0}}
for l := len(potential); l > 0; l = len(potential) {
i = l - 1
p = potential[i]
potential = potential[:i]
m = makePrefixMask(partial[p.idx:])
if caseInsensitive {
cmp = caseInsensitiveMask(p.node.mask)
} else {
cmp = p.node.mask
}
if (cmp & m) != m {
continue
}
matchCount, skipped := fuzzyMatchCount(p.node.prefix,
partial[p.idx:], p.idx, caseInsensitive)
p.idx += matchCount
if p.idx != 0 {
p.skipped += skipped
}
if p.idx == len(partial) {
fullPrefix := append(p.prefix, p.node.prefix...)
err := p.node.walk(Prefix(""), func(prefix Prefix, item Item) error {
key := make([]byte, len(fullPrefix), len(fullPrefix)+len(prefix))
copy(key, fullPrefix)
key = append(key, prefix...)
err := visitor(key, item, p.skipped)
if err != nil {
return err
}
return nil
})
if err != nil {
return err
}
continue
}
for _, c := range p.node.children.getChildren() {
if c != nil {
newPrefix := make(Prefix, len(p.prefix), len(p.prefix)+len(p.node.prefix))
copy(newPrefix, p.prefix)
newPrefix = append(newPrefix, p.node.prefix...)
potential = append(potential, potentialSubtree{
node: c,
prefix: newPrefix,
idx: p.idx,
skipped: p.skipped,
})
} else {
fmt.Println("warning, child isn il")
}
}
}
return nil
}
func fuzzyMatchCount(prefix, query Prefix, idx int, caseInsensitive bool) (count, skipped int) {
for i := 0; i < len(prefix); i++ {
var match bool
if caseInsensitive {
match = matchCaseInsensitive(prefix[i], query[count])
} else {
match = prefix[i] == query[count]
}
if !match {
if count+idx > 0 {
skipped++
}
continue
}
count++
if count >= len(query) {
return
}
}
return
}
// VisitSubstring takes a substring and visits all the nodes that whos prefix contains this substring
func (trie *Trie) VisitSubstring(substring Prefix, caseInsensitive bool, visitor VisitorFunc) error {
if len(substring) == 0 {
return trie.VisitSubtree(substring, visitor)
}
var (
m uint64
cmp uint64
i int
p potentialSubtree
suffixLen int
maxSuffixLen = len(substring) - 1
)
potential := []potentialSubtree{potentialSubtree{node: trie, prefix: nil}}
for l := len(potential); l > 0; l = len(potential) {
i = l - 1
p = potential[i]
potential = potential[:i]
if len(p.prefix) < maxSuffixLen {
suffixLen = len(p.prefix)
} else {
suffixLen = maxSuffixLen
}
searchBytes := append(p.prefix[len(p.prefix)-suffixLen:], p.node.prefix...)
contains := false
if caseInsensitive {
contains = bytes.Contains(bytes.ToUpper(searchBytes), bytes.ToUpper(substring))
} else {
contains = bytes.Contains(searchBytes, substring)
}
if contains {
fullPrefix := append(p.prefix, p.node.prefix...)
err := p.node.walk(Prefix(""), func(prefix Prefix, item Item) error {
key := make([]byte, len(fullPrefix), len(fullPrefix)+len(prefix))
copy(key, fullPrefix)
key = append(key, prefix...)
copy(key, append(fullPrefix, prefix...))
err := visitor(key, item)
if err != nil {
return err
}
return nil
})
if err != nil {
return err
}
}
newPrefix := make(Prefix, len(p.prefix), len(p.prefix)+len(p.node.prefix))
copy(newPrefix, p.prefix)
newPrefix = append(newPrefix, p.node.prefix...)
overLap := overlapLength(newPrefix, substring, caseInsensitive)
m = makePrefixMask(substring[overLap:])
for _, c := range p.node.children.getChildren() {
if caseInsensitive {
cmp = caseInsensitiveMask(c.mask)
} else {
cmp = c.mask
}
if c != nil && (cmp&m == m) {
potential = append(potential, potentialSubtree{
node: c,
prefix: newPrefix,
})
}
}
}
return nil
}
func overlapLength(prefix, query Prefix, caseInsensitive bool) int {
startLength := len(query) - 1
if len(prefix) < startLength {
startLength = len(prefix)
}
for i := startLength; i > 0; i-- {
suffix := prefix[len(prefix)-i:]
queryPrefix := query[:i]
if caseInsensitive {
if bytes.EqualFold(suffix, queryPrefix) {
return i
}
} else if bytes.Equal(suffix, queryPrefix) {
return i
}
}
return 0
}
// VisitPrefixes visits only nodes that represent prefixes of key.
// To say the obvious, returning SkipSubtree from visitor makes no sense here.
func (trie *Trie) VisitPrefixes(key Prefix, caseInsensitive bool, visitor VisitorFunc) error {
// Nil key not allowed.
if key == nil {
panic(ErrNilPrefix)
}
// Empty trie must be handled explicitly.
if trie.prefix == nil {
return nil
}
// Walk the path matching key prefixes.
node := trie
prefix := key
offset := 0
for {
// Compute what part of prefix matches.
common := node.longestCommonPrefixLength(key, caseInsensitive)
key = key[common:]
offset += common
// Partial match means that there is no subtree matching prefix.
if common < len(node.prefix) {
return nil
}
// Call the visitor.
if item := node.item; item != nil {
if err := visitor(prefix[:offset], item); err != nil {
return err
}
}
if len(key) == 0 {
// This node represents key, we are finished.
return nil
}
// There is some key suffix left, move to the children.
child := node.children.next(key[0])
if child == nil {
// There is nowhere to continue, return.
return nil
}
node = child
}
}
// Delete deletes the item represented by the given prefix.
//
// True is returned if the matching node was found and deleted.
func (trie *Trie) Delete(key Prefix) (deleted bool) {
// Nil prefix not allowed.
if key == nil {
panic(ErrNilPrefix)
}
// Empty trie must be handled explicitly.
if trie.prefix == nil {
return false
}
// Find the relevant node.
path, found, _ := trie.findSubtreePath(key)
if !found {
return false
}
node := path[len(path)-1]
var parent *Trie
if len(path) != 1 {
parent = path[len(path)-2]
}
// If the item is already set to nil, there is nothing to do.
if node.item == nil {
return false
}
// Delete the item.
node.item = nil
// Initialise i before goto.
// Will be used later in a loop.
i := len(path) - 1
// In case there are some child nodes, we cannot drop the whole subtree.
// We can try to compact nodes, though.
if node.children.length() != 0 {
goto Compact
}
// In case we are at the root, just reset it and we are done.
if parent == nil {
node.reset()
return true
}
// We can drop a subtree.
// Find the first ancestor that has its value set or it has 2 or more child nodes.
// That will be the node where to drop the subtree at.
for ; i >= 0; i-- {
if current := path[i]; current.item != nil || current.children.length() >= 2 {
break
}
}
// Handle the case when there is no such node.
// In other words, we can reset the whole tree.
if i == -1 {
path[0].reset()
return true
}
// We can just remove the subtree here.
node = path[i]
if i == 0 {
parent = nil
} else {
parent = path[i-1]
}
// i+1 is always a valid index since i is never pointing to the last node.
// The loop above skips at least the last node since we are sure that the item
// is set to nil and it has no children, othewise we would be compacting instead.
node.children.remove(path[i+1].prefix[0])
// lastly, the bitmasks of all of the parent nodes have to be updated again, since
// a child node of all of them has bin removed
for ; i >= 0; i-- {
n := path[i]
n.mask = n.children.combinedMask()
}
Compact:
// The node is set to the first non-empty ancestor,
// so try to compact since that might be possible now.
if compacted := node.compact(); compacted != node {
if parent == nil {
*node = *compacted
} else {
parent.children.replace(node.prefix[0], compacted)
*parent = *parent.compact()
}
}
return true
}
// DeleteSubtree finds the subtree exactly matching prefix and deletes it.
//
// True is returned if the subtree was found and deleted.
func (trie *Trie) DeleteSubtree(prefix Prefix) (deleted bool) {
// Nil prefix not allowed.
if prefix == nil {
panic(ErrNilPrefix)
}
// Empty trie must be handled explicitly.
if trie.prefix == nil {
return false
}
// Locate the relevant subtree.
parent, root, found, _ := trie.findSubtree(prefix)
path, _, _ := trie.findSubtreePath(prefix)
if !found {
return false
}
// If we are in the root of the trie, reset the trie.
if parent == nil {
root.reset()
return true
}
// Otherwise remove the root node from its parent.
parent.children.remove(root.prefix[0])
// update masks
parent.mask = parent.children.combinedMask()
for i := len(path) - 1; i >= 0; i-- {
n := path[i]
n.mask = n.children.combinedMask()
}
return true
}
// Internal helper methods -----------------------------------------------------
func (trie *Trie) empty() bool {
return trie.item == nil && trie.children.length() == 0
}
func (trie *Trie) reset() {
trie.prefix = nil
trie.children = newSuperDenseChildList()
}
func makePrefixMask(key Prefix) uint64 {
var mask uint64
for _, b := range key {
if b >= '0' && b <= '9' {
// 0-9 bits: 0-9
b -= 48
} else if b >= 'A' && b <= 'Z' {
// A-Z bits: 10-35
b -= 55
} else if b >= 'a' && b <= 'z' {
// a-z bits: 36-61
b -= 61
} else if b == '.' {
b = 62
} else if b == '-' {
b = 63
} else {
continue
}
mask |= uint64(1) << uint64(b)
}
return mask
}
const upperBits = 0xFFFFFFC00
const lowerBits = 0x3FFFFFF000000000
func caseInsensitiveMask(mask uint64) uint64 {
mask |= (mask & upperBits) << uint64(26)
mask |= (mask & lowerBits) >> uint64(26)
return mask
}
var charmap = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz.-"
func (trie *Trie) put(key Prefix, item Item, replace bool) (inserted bool) {
// Nil prefix not allowed.
if key == nil {
panic(ErrNilPrefix)
}
var (
common int
node = trie
child *Trie
mask uint64
)
mask = makePrefixMask(key)
if node.prefix == nil {
node.mask |= mask
if len(key) <= maxPrefixPerNode {
node.prefix = key
goto InsertItem
}
node.prefix = key[:maxPrefixPerNode]
key = key[maxPrefixPerNode:]
mask = makePrefixMask(key)
goto AppendChild
}
for {
// Compute the longest common prefix length.
common = node.longestCommonPrefixLength(key, false)
key = key[common:]
// Only a part matches, split.
if common < len(node.prefix) {
goto SplitPrefix
}
// common == len(node.prefix) since never (common > len(node.prefix))
// common == len(former key) <-> 0 == len(key)
// -> former key == node.prefix
if len(key) == 0 {
goto InsertItem
}
node.mask |= mask
// Check children for matching prefix.
child = node.children.next(key[0])
if child == nil {
goto AppendChild
}
node = child
}
SplitPrefix:
// Split the prefix if necessary.
child = new(Trie)
*child = *node
*node = *NewTrie()
node.prefix = child.prefix[:common]
child.prefix = child.prefix[common:]
child = child.compact()
node.children = node.children.add(child)
node.mask = child.mask
node.mask |= mask
mask = makePrefixMask(key)
AppendChild:
// Keep appending children until whole prefix is inserted.
// This loop starts with empty node.prefix that needs to be filled.
for len(key) != 0 {
child := NewTrie()
child.mask = mask
if len(key) <= maxPrefixPerNode {
child.prefix = key
node.children = node.children.add(child)
node = child
goto InsertItem
} else {
child.prefix = key[:maxPrefixPerNode]
key = key[maxPrefixPerNode:]
mask = makePrefixMask(key)
node.children = node.children.add(child)
node = child
}
}
InsertItem:
// Try to insert the item if possible.
if replace || node.item == nil {
node.item = item
return true
}
return false
}
func (trie *Trie) compact() *Trie {
// Only a node with a single child can be compacted.
if trie.children.length() != 1 {
return trie
}
child := trie.children.head()
// If any item is set, we cannot compact since we want to retain
// the ability to do searching by key. This makes compaction less usable,
// but that simply cannot be avoided.
if trie.item != nil || child.item != nil {
return trie
}
// Make sure the combined prefixes fit into a single node.
if len(trie.prefix)+len(child.prefix) > maxPrefixPerNode {
return trie
}
// Concatenate the prefixes, move the items.
child.prefix = append(trie.prefix, child.prefix...)
child.mask = trie.mask
if trie.item != nil {
child.item = trie.item
}
return child
}
func (trie *Trie) findSubtree(prefix Prefix) (parent *Trie, root *Trie, found bool, leftover Prefix) {
// Find the subtree matching prefix.
root = trie
for {
// Compute what part of prefix matches.
common := root.longestCommonPrefixLength(prefix, false)
prefix = prefix[common:]
// We used up the whole prefix, subtree found.
if len(prefix) == 0 {
found = true
leftover = root.prefix[common:]
return
}
// Partial match means that there is no subtree matching prefix.
if common < len(root.prefix) {
leftover = root.prefix[common:]
return
}
// There is some prefix left, move to the children.
child := root.children.next(prefix[0])
if child == nil {
// There is nowhere to continue, there is no subtree matching prefix.
return
}
parent = root
root = child
}
}
func (trie *Trie) findSubtreePath(prefix Prefix) (path []*Trie, found bool, leftover Prefix) {
// Find the subtree matching prefix.
root := trie
var subtreePath []*Trie
for {
// Append the current root to the path.
subtreePath = append(subtreePath, root)
// Compute what part of prefix matches.
common := root.longestCommonPrefixLength(prefix, false)
prefix = prefix[common:]
// We used up the whole prefix, subtree found.
if len(prefix) == 0 {
path = subtreePath
found = true
leftover = root.prefix[common:]
return
}
// Partial match means that there is no subtree matching prefix.
if common < len(root.prefix) {
leftover = root.prefix[common:]
return
}
// There is some prefix left, move to the children.
child := root.children.next(prefix[0])
if child == nil {
// There is nowhere to continue, there is no subtree matching prefix.
return
}
root = child
}
}
func (trie *Trie) walk(actualRootPrefix Prefix, visitor VisitorFunc) error {
var prefix Prefix
// Allocate a bit more space for prefix at the beginning.
if actualRootPrefix == nil {
prefix = make(Prefix, 32+len(trie.prefix))
copy(prefix, trie.prefix)
prefix = prefix[:len(trie.prefix)]
} else {
prefix = make(Prefix, 32+len(actualRootPrefix))
copy(prefix, actualRootPrefix)
prefix = prefix[:len(actualRootPrefix)]
}
// Visit the root first. Not that this works for empty trie as well since
// in that case item == nil && len(children) == 0.
if trie.item != nil {
if err := visitor(prefix, trie.item); err != nil {
if err == SkipSubtree {
return nil
}
return err
}
}
// Then continue to the children.
return trie.children.walk(&prefix, visitor)
}
func (trie *Trie) longestCommonPrefixLength(prefix Prefix, caseInsensitive bool) (i int) {
for ; i < len(prefix) && i < len(trie.prefix); i++ {
p := prefix[i]
t := trie.prefix[i]
if caseInsensitive {
if !(matchCaseInsensitive(t, p)) {
break
}
} else {
if p != t {
break
}
}
}
return
}
func matchCaseInsensitive(a byte, b byte) bool {
return a == b+32 || b == a+32 || a == b
}
func (trie *Trie) dump() string {
writer := &bytes.Buffer{}
trie.print(writer, 0)
return writer.String()
}
func (trie *Trie) print(writer io.Writer, indent int) {
fmt.Fprintf(writer, "%s%s %v\n", strings.Repeat(" ", indent), string(trie.prefix), trie.item)
trie.children.print(writer, indent+2)
}
// Errors ----------------------------------------------------------------------
var (
SkipSubtree = errors.New("Skip this subtree")
ErrNilPrefix = errors.New("Nil prefix passed into a method call")
)

20
vendor/modules.txt vendored
View File

@ -17,6 +17,7 @@ github.com/cloudfoundry/jibber_jabber
## explicit
github.com/creack/pty
# github.com/davecgh/go-spew v1.1.1
## explicit
github.com/davecgh/go-spew/spew
# github.com/emirpasic/gods v1.12.0
github.com/emirpasic/gods/containers
@ -90,6 +91,15 @@ github.com/go-git/go-billy/v5/util
# github.com/go-logfmt/logfmt v0.5.0
## explicit
github.com/go-logfmt/logfmt
# github.com/gobwas/glob v0.2.3
github.com/gobwas/glob
github.com/gobwas/glob/compiler
github.com/gobwas/glob/match
github.com/gobwas/glob/syntax
github.com/gobwas/glob/syntax/ast
github.com/gobwas/glob/syntax/lexer
github.com/gobwas/glob/util/runes
github.com/gobwas/glob/util/strings
# github.com/golang-collections/collections v0.0.0-20130729185459-604e922904d3
## explicit
github.com/golang-collections/collections/stack
@ -106,6 +116,8 @@ github.com/imdario/mergo
# github.com/integrii/flaggy v1.4.0
## explicit
github.com/integrii/flaggy
# github.com/iriri/minimal/gitignore v0.3.2
## explicit
# github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99
github.com/jbenet/go-context/io
# github.com/jesseduffield/go-git/v5 v5.1.2-0.20201006095850-341962be15a4
@ -155,6 +167,9 @@ github.com/jesseduffield/go-git/v5/utils/merkletrie/noder
# github.com/jesseduffield/gocui v0.3.1-0.20211017220056-b2fc03c74a6f
## explicit
github.com/jesseduffield/gocui
# github.com/jesseduffield/minimal/gitignore v0.3.3-0.20211018110810-9cde264e6b1e
## explicit
github.com/jesseduffield/minimal/gitignore
# github.com/jesseduffield/yaml v2.1.0+incompatible
## explicit
github.com/jesseduffield/yaml
@ -193,6 +208,8 @@ github.com/mitchellh/go-homedir
## explicit
# github.com/onsi/gomega v1.7.1
## explicit
# github.com/ozeidan/fuzzy-patricia v3.0.0+incompatible
## explicit
# github.com/pmezard/go-difflib v1.0.0
github.com/pmezard/go-difflib/difflib
# github.com/rivo/uniseg v0.2.0
@ -256,6 +273,9 @@ golang.org/x/term
golang.org/x/text/encoding
golang.org/x/text/encoding/internal/identifier
golang.org/x/text/transform
# gopkg.in/ozeidan/fuzzy-patricia.v3 v3.0.0
## explicit
gopkg.in/ozeidan/fuzzy-patricia.v3/patricia
# gopkg.in/warnings.v0 v0.1.2
gopkg.in/warnings.v0
# gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c