1
0
mirror of https://github.com/rclone/rclone.git synced 2025-11-23 21:44:49 +02:00

fix: comment typos

This commit is contained in:
n4n5
2025-11-13 14:47:40 +01:00
committed by GitHub
parent cf94824426
commit 71138082ea
8 changed files with 9 additions and 9 deletions

View File

@@ -51,7 +51,7 @@ type LifecycleRule struct {
// ServerSideEncryption is a configuration object for B2 Server-Side Encryption // ServerSideEncryption is a configuration object for B2 Server-Side Encryption
type ServerSideEncryption struct { type ServerSideEncryption struct {
Mode string `json:"mode"` Mode string `json:"mode"`
Algorithm string `json:"algorithm"` // Encryption algorith to use Algorithm string `json:"algorithm"` // Encryption algorithm to use
CustomerKey string `json:"customerKey"` // User provided Base64 encoded key that is used by the server to encrypt files CustomerKey string `json:"customerKey"` // User provided Base64 encoded key that is used by the server to encrypt files
CustomerKeyMd5 string `json:"customerKeyMd5"` // An MD5 hash of the decoded key CustomerKeyMd5 string `json:"customerKeyMd5"` // An MD5 hash of the decoded key
} }

View File

@@ -75,7 +75,7 @@ func TestLinkValid(t *testing.T) {
Expire: Time(time.Now().Add(time.Hour)), Expire: Time(time.Now().Add(time.Hour)),
}, },
expected: true, expected: true,
desc: "should fallback to Expire field when URL expire parameter is unparseable", desc: "should fallback to Expire field when URL expire parameter is unparsable",
}, },
{ {
name: "invalid when both URL expire and Expire field are expired", name: "invalid when both URL expire and Expire field are expired",

View File

@@ -136,7 +136,7 @@ func (u *Uploader) UploadChunk(ctx context.Context, cnt int, options ...fs.OpenO
size, err := u.upload.stream.Read(data) size, err := u.upload.stream.Read(data)
if err != nil { if err != nil {
fs.Errorf(u.fs, "Chunk %d: Error: Can not read from data strem: %v", cnt, err) fs.Errorf(u.fs, "Chunk %d: Error: Can not read from data stream: %v", cnt, err)
return err return err
} }

View File

@@ -961,7 +961,7 @@ func (o *Object) setMetaData(info *api.ResourceInfoResponse) (err error) {
return nil return nil
} }
// readMetaData reads ands sets the new metadata for a storage.Object // readMetaData reads and sets the new metadata for a storage.Object
func (o *Object) readMetaData(ctx context.Context) (err error) { func (o *Object) readMetaData(ctx context.Context) (err error) {
if o.hasMetaData { if o.hasMetaData {
return nil return nil

View File

@@ -1,6 +1,6 @@
//go:build !plan9 //go:build !plan9
// Package list inplements 'rclone archive list' // Package list implements 'rclone archive list'
package list package list
import ( import (

View File

@@ -23,7 +23,7 @@ func init() {
var commandDefinition = &cobra.Command{ var commandDefinition = &cobra.Command{
Use: "cryptcheck remote:path cryptedremote:path", Use: "cryptcheck remote:path cryptedremote:path",
Short: `Cryptcheck checks the integrity of an encrypted remote.`, Short: `Cryptcheck checks the integrity of an encrypted remote.`,
Long: `Checks a remote against a [crypted](/crypt/) remote. This is the equivalent Long: `Checks a remote against an [encrypted](/crypt/) remote. This is the equivalent
of running rclone [check](/commands/rclone_check/), but able to check the of running rclone [check](/commands/rclone_check/), but able to check the
checksums of the encrypted remote. checksums of the encrypted remote.

View File

@@ -68,7 +68,7 @@ func init() {
Add(Call{ Add(Call{
Path: "rc/panic", Path: "rc/panic",
Fn: rcPanic, Fn: rcPanic,
Title: "This returns an error by panicing", Title: "This returns an error by panicking",
Help: ` Help: `
This returns an error with the input as part of its error string. This returns an error with the input as part of its error string.
Useful for testing error handling.`, Useful for testing error handling.`,

View File

@@ -420,14 +420,14 @@ backends:
fastlist: true fastlist: true
ignore: ignore:
# It just isn't possible to preserve the existing file with azure blob # It just isn't possible to preserve the existing file with azure blob
# and make sure we don't leak uncomitted blocks. # and make sure we don't leak uncommitted blocks.
- TestMultithreadCopyAbort - TestMultithreadCopyAbort
- backend: "azureblob" - backend: "azureblob"
remote: "TestAzureBlob,directory_markers:" remote: "TestAzureBlob,directory_markers:"
fastlist: true fastlist: true
ignore: ignore:
# It just isn't possible to preserve the existing file with azure blob # It just isn't possible to preserve the existing file with azure blob
# and make sure we don't leak uncomitted blocks. # and make sure we don't leak uncommitted blocks.
- TestMultithreadCopyAbort - TestMultithreadCopyAbort
- backend: "azurefiles" - backend: "azurefiles"
remote: "TestAzureFiles:" remote: "TestAzureFiles:"