1
0
mirror of https://github.com/jesseduffield/lazygit.git synced 2025-06-15 00:15:32 +02:00

Bump go-git

This commit is contained in:
Stefan Haller
2025-04-09 10:38:46 +02:00
parent da0105c16b
commit 4cf49ff449
527 changed files with 70489 additions and 10167 deletions

View File

@ -0,0 +1,29 @@
package path_util
import (
"os"
"os/user"
"strings"
)
func ReplaceTildeWithHome(path string) (string, error) {
if strings.HasPrefix(path, "~") {
firstSlash := strings.Index(path, "/")
if firstSlash == 1 {
home, err := os.UserHomeDir()
if err != nil {
return path, err
}
return strings.Replace(path, "~", home, 1), nil
} else if firstSlash > 1 {
username := path[1:firstSlash]
userAccount, err := user.Lookup(username)
if err != nil {
return path, err
}
return strings.Replace(path, path[:firstSlash], userAccount.HomeDir, 1), nil
}
}
return path, nil
}

View File

@ -322,6 +322,8 @@ func (p *Parser) parseAt() (Revisioner, error) {
}
return AtDate{t}, nil
case tok == eof:
return nil, &ErrInvalidRevision{s: `missing "}" in @{<data>} structure`}
default:
date += lit
}
@ -424,6 +426,8 @@ func (p *Parser) parseCaretBraces() (Revisioner, error) {
p.unscan()
case tok != slash && start:
return nil, &ErrInvalidRevision{fmt.Sprintf(`"%s" is not a valid revision suffix brace component`, lit)}
case tok == eof:
return nil, &ErrInvalidRevision{s: `missing "}" in ^{<data>} structure`}
case tok != cbrace:
p.unscan()
re += lit

View File

@ -43,6 +43,11 @@ func tokenizeExpression(ch rune, tokenType token, check runeCategoryValidator, r
return tokenType, string(data), nil
}
// maxRevisionLength holds the maximum length that will be parsed for a
// revision. Git itself doesn't enforce a max length, but rather leans on
// the OS to enforce it via its ARG_MAX.
const maxRevisionLength = 128 * 1024 // 128kb
var zeroRune = rune(0)
// scanner represents a lexical scanner.
@ -52,7 +57,7 @@ type scanner struct {
// newScanner returns a new instance of scanner.
func newScanner(r io.Reader) *scanner {
return &scanner{r: bufio.NewReader(r)}
return &scanner{r: bufio.NewReader(io.LimitReader(r, maxRevisionLength))}
}
// Scan extracts tokens and their strings counterpart

View File

@ -5,8 +5,10 @@ import (
)
var (
isSchemeRegExp = regexp.MustCompile(`^[^:]+://`)
scpLikeUrlRegExp = regexp.MustCompile(`^(?:(?P<user>[^@]+)@)?(?P<host>[^:\s]+):(?:(?P<port>[0-9]{1,5})(?:\/|:))?(?P<path>[^\\].*\/[^\\].*)$`)
isSchemeRegExp = regexp.MustCompile(`^[^:]+://`)
// Ref: https://github.com/git/git/blob/master/Documentation/urls.txt#L37
scpLikeUrlRegExp = regexp.MustCompile(`^(?:(?P<user>[^@]+)@)?(?P<host>[^:\s]+):(?:(?P<port>[0-9]{1,5}):)?(?P<path>[^\\].*)$`)
)
// MatchesScheme returns true if the given string matches a URL-like