1
0
mirror of https://github.com/drakkan/sftpgo.git synced 2025-11-23 22:04:50 +02:00

gcs: add support for automatic credentials

We can now also support implicit credentials using the Application
Default Credentials strategy
This commit is contained in:
Nicola Murino
2020-02-19 09:41:15 +01:00
parent c8cc81cf4a
commit ae8ed75ae5
15 changed files with 215 additions and 115 deletions

View File

@@ -587,7 +587,7 @@ Other notes:
Each user can be mapped with a Google Cloud Storage bucket or a bucket virtual folder, this way the mapped bucket/virtual folder is exposed over SFTP/SCP. This backend is very similar to the S3 backend and it has the same limitations.
To connect SFTPGo to Google Cloud Storage you need a credentials file that you can obtain from the Google Cloud Console, take a look at the "Setting up authentication" section [here](https://cloud.google.com/storage/docs/reference/libraries) for details.
To connect SFTPGo to Google Cloud Storage you can use use the Application Default Credentials (ADC) strategy to try to find your application's credentials automatically or you can explicitly provide a JSON credentials file that you can obtain from the Google Cloud Console, take a look [here](https://cloud.google.com/docs/authentication/production#providing_credentials_to_your_application) for details.
You can optionally specify a [storage class](https://cloud.google.com/storage/docs/storage-classes) too, leave blank to use the default storage class.
@@ -622,6 +622,7 @@ Flags:
-S, --advertise-service Advertise SFTP service using multicast DNS (default true)
-d, --directory string Path to the directory to serve. This can be an absolute path or a path relative to the current directory (default ".")
-f, --fs-provider int 0 means local filesystem, 1 Amazon S3 compatible, 2 Google Cloud Storage
--gcs-automatic-credentials int 0 means explicit credentials using a JSON credentials file, 1 automatic (default 1)
--gcs-bucket string
--gcs-credentials-file string Google Cloud Storage JSON credentials file
--gcs-key-prefix string Allows to restrict access to the virtual folder identified by this prefix and its contents
@@ -696,6 +697,7 @@ For each account the following properties can be configured:
- `s3_key_prefix`, allows to restrict access to the virtual folder identified by this prefix and its contents
- `gcs_bucket`, required for GCS filesystem
- `gcs_credentials`, Google Cloud Storage JSON credentials base64 encoded
- `gcs_automatic_credentials`, integer. Set to 1 to use Application Default Credentials strategy or set to 0 to use explicit credentials via `gcs_credentials`
- `gcs_storage_class`
- `gcs_key_prefix`, allows to restrict access to the virtual folder identified by this prefix and its contents

View File

@@ -35,6 +35,7 @@ var (
portableS3KeyPrefix string
portableGCSBucket string
portableGCSCredentialsFile string
portableGCSAutoCredentials int
portableGCSStorageClass string
portableGCSKeyPrefix string
portableCmd = &cobra.Command{
@@ -48,12 +49,16 @@ Please take a look at the usage below to customize the serving parameters`,
Run: func(cmd *cobra.Command, args []string) {
portableDir := directoryToServe
if !filepath.IsAbs(portableDir) {
if portableFsProvider == 0 {
portableDir, _ = filepath.Abs(portableDir)
} else {
portableDir = os.TempDir()
}
}
permissions := make(map[string][]string)
permissions["/"] = portablePermissions
portableGCSCredentials := ""
if portableFsProvider == 2 {
if portableFsProvider == 2 && len(portableGCSCredentialsFile) > 0 {
fi, err := os.Stat(portableGCSCredentialsFile)
if err != nil {
fmt.Printf("Invalid GCS credentials file: %v\n", err)
@@ -69,6 +74,7 @@ Please take a look at the usage below to customize the serving parameters`,
fmt.Printf("Unable to read credentials file: %v\n", err)
}
portableGCSCredentials = base64.StdEncoding.EncodeToString(creds)
portableGCSAutoCredentials = 0
}
service := service.Service{
ConfigDir: defaultConfigDir,
@@ -102,6 +108,7 @@ Please take a look at the usage below to customize the serving parameters`,
GCSConfig: vfs.GCSFsConfig{
Bucket: portableGCSBucket,
Credentials: portableGCSCredentials,
AutomaticCredentials: portableGCSAutoCredentials,
StorageClass: portableGCSStorageClass,
KeyPrefix: portableGCSKeyPrefix,
},
@@ -147,5 +154,7 @@ func init() {
portableCmd.Flags().StringVar(&portableGCSKeyPrefix, "gcs-key-prefix", "", "Allows to restrict access to the virtual folder "+
"identified by this prefix and its contents")
portableCmd.Flags().StringVar(&portableGCSCredentialsFile, "gcs-credentials-file", "", "Google Cloud Storage JSON credentials file")
portableCmd.Flags().IntVar(&portableGCSAutoCredentials, "gcs-automatic-credentials", 1, "0 means explicit credentials using a JSON "+
"credentials file, 1 automatic")
rootCmd.AddCommand(portableCmd)
}

View File

@@ -813,6 +813,9 @@ func addCredentialsToUser(user *User) error {
if user.FsConfig.Provider != 2 {
return nil
}
if user.FsConfig.GCSConfig.AutomaticCredentials > 0 {
return nil
}
cred, err := ioutil.ReadFile(user.getGCSCredentialsFilePath())
if err != nil {
return err

View File

@@ -421,6 +421,7 @@ func (u *User) getACopy() User {
GCSConfig: vfs.GCSFsConfig{
Bucket: u.FsConfig.GCSConfig.Bucket,
CredentialFile: u.FsConfig.GCSConfig.CredentialFile,
AutomaticCredentials: u.FsConfig.GCSConfig.AutomaticCredentials,
StorageClass: u.FsConfig.GCSConfig.StorageClass,
KeyPrefix: u.FsConfig.GCSConfig.KeyPrefix,
},

View File

@@ -462,6 +462,9 @@ func compareUserFsConfig(expected *dataprovider.User, actual *dataprovider.User)
expected.FsConfig.GCSConfig.KeyPrefix+"/" != actual.FsConfig.GCSConfig.KeyPrefix {
return errors.New("GCS key prefix mismatch")
}
if expected.FsConfig.GCSConfig.AutomaticCredentials != actual.FsConfig.GCSConfig.AutomaticCredentials {
return errors.New("GCS automatic credentials mismatch")
}
return nil
}

View File

@@ -352,6 +352,7 @@ func TestAddUserInvalidFsConfig(t *testing.T) {
}
u.FsConfig.GCSConfig.KeyPrefix = "somedir/subdir/"
u.FsConfig.GCSConfig.Credentials = ""
u.FsConfig.GCSConfig.AutomaticCredentials = 0
_, _, err = httpd.AddUser(u, http.StatusBadRequest)
if err != nil {
t.Errorf("unexpected error adding user with invalid fs config: %v", err)
@@ -519,6 +520,14 @@ func TestUserGCSConfig(t *testing.T) {
if err != nil {
t.Errorf("unable to add user: %v", err)
}
os.RemoveAll(credentialsPath)
os.MkdirAll(credentialsPath, 0700)
user.FsConfig.GCSConfig.Credentials = ""
user.FsConfig.GCSConfig.AutomaticCredentials = 1
user, _, err = httpd.UpdateUser(user, http.StatusOK)
if err != nil {
t.Errorf("unable to update user: %v", err)
}
user.FsConfig.Provider = 1
user.FsConfig.S3Config.Bucket = "test1"
user.FsConfig.S3Config.Region = "us-east-1"
@@ -1937,6 +1946,26 @@ func TestWebUserGCSMock(t *testing.T) {
if updateUser.FsConfig.GCSConfig.KeyPrefix != user.FsConfig.GCSConfig.KeyPrefix {
t.Error("GCS key prefix mismatch")
}
form.Set("gcs_auto_credentials", "on")
b, contentType, _ = getMultipartFormData(form, "", "")
req, _ = http.NewRequest(http.MethodPost, webUserPath+"/"+strconv.FormatInt(user.ID, 10), &b)
req.Header.Set("Content-Type", contentType)
rr = executeRequest(req)
checkResponseCode(t, http.StatusSeeOther, rr.Code)
req, _ = http.NewRequest(http.MethodGet, userPath+"?limit=1&offset=0&order=ASC&username="+user.Username, nil)
rr = executeRequest(req)
checkResponseCode(t, http.StatusOK, rr.Code)
err = render.DecodeJSON(rr.Body, &users)
if err != nil {
t.Errorf("Error decoding users: %v", err)
}
if len(users) != 1 {
t.Errorf("1 user is expected")
}
updateUser = users[0]
if updateUser.FsConfig.GCSConfig.AutomaticCredentials != 1 {
t.Error("GCS automatic credentials mismatch")
}
req, _ = http.NewRequest(http.MethodDelete, userPath+"/"+strconv.FormatInt(user.ID, 10), nil)
rr = executeRequest(req)
checkResponseCode(t, http.StatusOK, rr.Code)

View File

@@ -286,8 +286,13 @@ func TestCompareUserFsConfig(t *testing.T) {
t.Errorf("S3 key prefix does not match")
}
expected.FsConfig.S3Config.KeyPrefix = ""
}
func TestCompareUserGCSConfig(t *testing.T) {
expected := &dataprovider.User{}
actual := &dataprovider.User{}
expected.FsConfig.GCSConfig.KeyPrefix = "somedir/subdir"
err = compareUserFsConfig(expected, actual)
err := compareUserFsConfig(expected, actual)
if err == nil {
t.Errorf("GCS key prefix does not match")
}
@@ -304,6 +309,12 @@ func TestCompareUserFsConfig(t *testing.T) {
t.Errorf("GCS storage class does not match")
}
expected.FsConfig.GCSConfig.StorageClass = ""
expected.FsConfig.GCSConfig.AutomaticCredentials = 1
err = compareUserFsConfig(expected, actual)
if err == nil {
t.Errorf("GCS automatic credentials does not match")
}
expected.FsConfig.GCSConfig.AutomaticCredentials = 0
}
func TestGCSWebInvalidFormFile(t *testing.T) {

View File

@@ -2,7 +2,7 @@ openapi: 3.0.1
info:
title: SFTPGo
description: 'SFTPGo REST API'
version: 1.7.0
version: 1.8.0
servers:
- url: /api/v1
@@ -987,6 +987,16 @@ components:
type: string
format: byte
description: Google Cloud Storage JSON credentials base64 encoded. This field must be populated only when adding/updating an user. It will be always omitted, since there are sensitive data, when you search/get users. The credentials will be stored in the configured "credentials_path"
automatic_credentials:
type: integer
nullable: true
enum:
- 0
- 1
description: >
Automatic credentials:
* `0` - disabled, explicit credentials, using a JSON credentials file, must be provided. This is the default value if the field is null
* `1` - enabled, we try to use the Application Default Credentials (ADC) strategy to find your application's credentials
storage_class:
type: string
key_prefix:

View File

@@ -246,6 +246,12 @@ func getFsConfigFromUserPostFields(r *http.Request) (dataprovider.Filesystem, er
fs.GCSConfig.Bucket = r.Form.Get("gcs_bucket")
fs.GCSConfig.StorageClass = r.Form.Get("gcs_storage_class")
fs.GCSConfig.KeyPrefix = r.Form.Get("gcs_key_prefix")
autoCredentials := r.Form.Get("gcs_auto_credentials")
if len(autoCredentials) > 0 {
fs.GCSConfig.AutomaticCredentials = 1
} else {
fs.GCSConfig.AutomaticCredentials = 0
}
credentials, _, err := r.FormFile("gcs_credential_file")
if err == http.ErrMissingFile {
return fs, nil
@@ -262,6 +268,7 @@ func getFsConfigFromUserPostFields(r *http.Request) (dataprovider.Filesystem, er
return fs, err
}
fs.GCSConfig.Credentials = base64.StdEncoding.EncodeToString(fileBytes)
fs.GCSConfig.AutomaticCredentials = 0
}
return fs, nil
}

View File

@@ -75,7 +75,8 @@ class SFTPGoApiRequests:
max_sessions=0, quota_size=0, quota_files=0, permissions={}, upload_bandwidth=0, download_bandwidth=0,
status=1, expiration_date=0, allowed_ip=[], denied_ip=[], fs_provider='local', s3_bucket='',
s3_region='', s3_access_key='', s3_access_secret='', s3_endpoint='', s3_storage_class='',
s3_key_prefix='', gcs_bucket='', gcs_key_prefix='', gcs_storage_class='', gcs_credentials_file=''):
s3_key_prefix='', gcs_bucket='', gcs_key_prefix='', gcs_storage_class='', gcs_credentials_file='',
gcs_automatic_credentials='automatic'):
user = {'id':user_id, 'username':username, 'uid':uid, 'gid':gid,
'max_sessions':max_sessions, 'quota_size':quota_size, 'quota_files':quota_files,
'upload_bandwidth':upload_bandwidth, 'download_bandwidth':download_bandwidth,
@@ -95,7 +96,8 @@ class SFTPGoApiRequests:
user.update({'filters':self.buildFilters(allowed_ip, denied_ip)})
user.update({'filesystem':self.buildFsConfig(fs_provider, s3_bucket, s3_region, s3_access_key, s3_access_secret,
s3_endpoint, s3_storage_class, s3_key_prefix, gcs_bucket,
gcs_key_prefix, gcs_storage_class, gcs_credentials_file)})
gcs_key_prefix, gcs_storage_class, gcs_credentials_file,
gcs_automatic_credentials)})
return user
def buildPermissions(self, root_perms, subdirs_perms):
@@ -130,7 +132,8 @@ class SFTPGoApiRequests:
return filters
def buildFsConfig(self, fs_provider, s3_bucket, s3_region, s3_access_key, s3_access_secret, s3_endpoint,
s3_storage_class, s3_key_prefix, gcs_bucket, gcs_key_prefix, gcs_storage_class, gcs_credentials_file):
s3_storage_class, s3_key_prefix, gcs_bucket, gcs_key_prefix, gcs_storage_class,
gcs_credentials_file, gcs_automatic_credentials):
fs_config = {'provider':0}
if fs_provider == 'S3':
s3config = {'bucket':s3_bucket, 'region':s3_region, 'access_key':s3_access_key, 'access_secret':
@@ -139,9 +142,14 @@ class SFTPGoApiRequests:
fs_config.update({'provider':1, 's3config':s3config})
elif fs_provider == 'GCS':
gcsconfig = {'bucket':gcs_bucket, 'key_prefix':gcs_key_prefix, 'storage_class':gcs_storage_class}
if gcs_automatic_credentials == "automatic":
gcsconfig.update({'automatic_credentials':1})
else:
gcsconfig.update({'automatic_credentials':0})
if gcs_credentials_file:
with open(gcs_credentials_file) as creds:
gcsconfig.update({'credentials':base64.b64encode(creds.read().encode('UTF-8')).decode('UTF-8')})
gcsconfig.update({'credentials':base64.b64encode(creds.read().encode('UTF-8')).decode('UTF-8'),
'automatic_credentials':0})
fs_config.update({'provider':2, 'gcsconfig':gcsconfig})
return fs_config
@@ -158,12 +166,12 @@ class SFTPGoApiRequests:
quota_files=0, perms=[], upload_bandwidth=0, download_bandwidth=0, status=1, expiration_date=0,
subdirs_permissions=[], allowed_ip=[], denied_ip=[], fs_provider='local', s3_bucket='', s3_region='',
s3_access_key='', s3_access_secret='', s3_endpoint='', s3_storage_class='', s3_key_prefix='', gcs_bucket='',
gcs_key_prefix='', gcs_storage_class='', gcs_credentials_file=''):
gcs_key_prefix='', gcs_storage_class='', gcs_credentials_file='', gcs_automatic_credentials='automatic'):
u = self.buildUserObject(0, username, password, public_keys, home_dir, uid, gid, max_sessions,
quota_size, quota_files, self.buildPermissions(perms, subdirs_permissions), upload_bandwidth, download_bandwidth,
status, expiration_date, allowed_ip, denied_ip, fs_provider, s3_bucket, s3_region, s3_access_key,
s3_access_secret, s3_endpoint, s3_storage_class, s3_key_prefix, gcs_bucket, gcs_key_prefix, gcs_storage_class,
gcs_credentials_file)
gcs_credentials_file, gcs_automatic_credentials)
r = requests.post(self.userPath, json=u, auth=self.auth, verify=self.verify)
self.printResponse(r)
@@ -171,12 +179,13 @@ class SFTPGoApiRequests:
quota_size=0, quota_files=0, perms=[], upload_bandwidth=0, download_bandwidth=0, status=1,
expiration_date=0, subdirs_permissions=[], allowed_ip=[], denied_ip=[], fs_provider='local',
s3_bucket='', s3_region='', s3_access_key='', s3_access_secret='', s3_endpoint='', s3_storage_class='',
s3_key_prefix='', gcs_bucket='', gcs_key_prefix='', gcs_storage_class='', gcs_credentials_file=''):
s3_key_prefix='', gcs_bucket='', gcs_key_prefix='', gcs_storage_class='', gcs_credentials_file='',
gcs_automatic_credentials='automatic'):
u = self.buildUserObject(user_id, username, password, public_keys, home_dir, uid, gid, max_sessions,
quota_size, quota_files, self.buildPermissions(perms, subdirs_permissions), upload_bandwidth, download_bandwidth,
status, expiration_date, allowed_ip, denied_ip, fs_provider, s3_bucket, s3_region, s3_access_key,
s3_access_secret, s3_endpoint, s3_storage_class, s3_key_prefix, gcs_bucket, gcs_key_prefix, gcs_storage_class,
gcs_credentials_file)
gcs_credentials_file, gcs_automatic_credentials)
r = requests.put(urlparse.urljoin(self.userPath, 'user/' + str(user_id)), json=u, auth=self.auth, verify=self.verify)
self.printResponse(r)
@@ -448,6 +457,8 @@ def addCommonUserArguments(parser):
' Default: %(default)s')
parser.add_argument('--gcs-storage-class', type=str, default='', help='Default: %(default)s')
parser.add_argument('--gcs-credentials-file', type=str, default='', help='Default: %(default)s')
parser.add_argument('--gcs-automatic-credentials', type=str, default='automatic', choices=['explicit', 'automatic'],
help='If you provide a credentials file this argument will be setted to "explicit". Default: %(default)s')
if __name__ == '__main__':
@@ -558,7 +569,7 @@ if __name__ == '__main__':
args.status, getDatetimeAsMillisSinceEpoch(args.expiration_date), args.subdirs_permissions, args.allowed_ip,
args.denied_ip, args.fs, args.s3_bucket, args.s3_region, args.s3_access_key, args.s3_access_secret,
args.s3_endpoint, args.s3_storage_class, args.s3_key_prefix, args.gcs_bucket, args.gcs_key_prefix,
args.gcs_storage_class, args.gcs_credentials_file)
args.gcs_storage_class, args.gcs_credentials_file, args.gcs_automatic_credentials)
elif args.command == 'update-user':
api.updateUser(args.id, args.username, args.password, args.public_keys, args.home_dir, args.uid, args.gid,
args.max_sessions, args.quota_size, args.quota_files, args.permissions, args.upload_bandwidth,
@@ -566,7 +577,7 @@ if __name__ == '__main__':
args.subdirs_permissions, args.allowed_ip, args.denied_ip, args.fs, args.s3_bucket, args.s3_region,
args.s3_access_key, args.s3_access_secret, args.s3_endpoint, args.s3_storage_class,
args.s3_key_prefix, args.gcs_bucket, args.gcs_key_prefix, args.gcs_storage_class,
args.gcs_credentials_file)
args.gcs_credentials_file, args.gcs_automatic_credentials)
elif args.command == 'delete-user':
api.deleteUser(args.id)
elif args.command == 'get-users':

View File

@@ -172,9 +172,10 @@ func (s *Service) StartPortableMode(sftpdPort int, enabledSSHCommands []string,
config.SetSFTPDConfig(sftpdConf)
err = s.Start()
if err == nil {
if err != nil {
return err
}
var mDNSService *zeroconf.Server
var err error
if advertiseService {
version := utils.GetAppVersion()
meta := []string{
@@ -215,9 +216,16 @@ func (s *Service) StartPortableMode(sftpdPort int, enabledSSHCommands []string,
}
s.Stop()
}()
var dirToServe string
if s.PortableUser.FsConfig.Provider == 1 {
dirToServe = s.PortableUser.FsConfig.S3Config.KeyPrefix
} else if s.PortableUser.FsConfig.Provider == 2 {
dirToServe = s.PortableUser.FsConfig.GCSConfig.KeyPrefix
} else {
dirToServe = s.PortableUser.HomeDir
}
logger.InfoToConsole("Portable mode ready, SFTP port: %v, user: %#v, password: %#v, public keys: %v, directory: %#v, "+
"permissions: %v, enabled ssh commands: %v", sftpdConf.BindPort, s.PortableUser.Username, s.PortableUser.Password,
s.PortableUser.PublicKeys, s.PortableUser.HomeDir, s.PortableUser.Permissions, sftpdConf.EnabledSSHCommands)
}
return err
s.PortableUser.PublicKeys, dirToServe, s.PortableUser.Permissions, sftpdConf.EnabledSSHCommands)
return nil
}

View File

@@ -203,13 +203,13 @@
</div>
<div class="form-group row s3">
<label for="idS3Bucket" class="col-sm-2 col-form-label">S3 Bucket</label>
<label for="idS3Bucket" class="col-sm-2 col-form-label">Bucket</label>
<div class="col-sm-3">
<input type="text" class="form-control" id="idS3Bucket" name="s3_bucket" placeholder=""
value="{{.User.FsConfig.S3Config.Bucket}}" maxlength="255">
</div>
<div class="col-sm-2"></div>
<label for="idS3Region" class="col-sm-2 col-form-label">S3 Region</label>
<label for="idS3Region" class="col-sm-2 col-form-label">Region</label>
<div class="col-sm-3">
<input type="text" class="form-control" id="idS3Region" name="s3_region" placeholder=""
value="{{.User.FsConfig.S3Config.Region}}" maxlength="255">
@@ -217,13 +217,13 @@
</div>
<div class="form-group row s3">
<label for="idS3AccessKey" class="col-sm-2 col-form-label">S3 Access Key</label>
<label for="idS3AccessKey" class="col-sm-2 col-form-label">Access Key</label>
<div class="col-sm-3">
<input type="text" class="form-control" id="idS3AccessKey" name="s3_access_key" placeholder=""
value="{{.User.FsConfig.S3Config.AccessKey}}" maxlength="255">
</div>
<div class="col-sm-2"></div>
<label for="idS3AccessSecret" class="col-sm-2 col-form-label">S3 Access Secret</label>
<label for="idS3AccessSecret" class="col-sm-2 col-form-label">Access Secret</label>
<div class="col-sm-3">
<input type="text" class="form-control" id="idS3AccessSecret" name="s3_access_secret" placeholder=""
value="{{.User.FsConfig.S3Config.AccessSecret}}" maxlength="1000">
@@ -231,13 +231,13 @@
</div>
<div class="form-group row s3">
<label for="idS3StorageClass" class="col-sm-2 col-form-label">S3 Storage Class</label>
<label for="idS3StorageClass" class="col-sm-2 col-form-label">Storage Class</label>
<div class="col-sm-3">
<input type="text" class="form-control" id="idS3StorageClass" name="s3_storage_class" placeholder=""
value="{{.User.FsConfig.S3Config.StorageClass}}" maxlength="1000">
</div>
<div class="col-sm-2"></div>
<label for="idS3Endpoint" class="col-sm-2 col-form-label">S3 Endpoint</label>
<label for="idS3Endpoint" class="col-sm-2 col-form-label">Endpoint</label>
<div class="col-sm-3">
<input type="text" class="form-control" id="idS3Endpoint" name="s3_endpoint" placeholder=""
value="{{.User.FsConfig.S3Config.Endpoint}}" maxlength="255">
@@ -245,7 +245,7 @@
</div>
<div class="form-group row s3">
<label for="idS3KeyPrefix" class="col-sm-2 col-form-label">S3 Key Prefix</label>
<label for="idS3KeyPrefix" class="col-sm-2 col-form-label">Key Prefix</label>
<div class="col-sm-10">
<input type="text" class="form-control" id="idS3KeyPrefix" name="s3_key_prefix" placeholder=""
value="{{.User.FsConfig.S3Config.KeyPrefix}}" maxlength="255" aria-describedby="S3KeyPrefixHelpBlock">
@@ -256,7 +256,7 @@
</div>
<div class="form-group row gcs">
<label for="idGCSBucket" class="col-sm-2 col-form-label">GCS Bucket</label>
<label for="idGCSBucket" class="col-sm-2 col-form-label">Bucket</label>
<div class="col-sm-10">
<input type="text" class="form-control" id="idGCSBucket" name="gcs_bucket" placeholder=""
value="{{.User.FsConfig.GCSConfig.Bucket}}" maxlength="255">
@@ -264,7 +264,7 @@
</div>
<div class="form-group row gcs">
<label for="idGCSCredentialFile" class="col-sm-2 col-form-label">GCS Credentials file</label>
<label for="idGCSCredentialFile" class="col-sm-2 col-form-label">Credentials file</label>
<div class="col-sm-4">
<input type="file" class="form-control-file" id="idGCSCredentialFile" name="gcs_credential_file"
aria-describedby="GCSCredentialsHelpBlock">
@@ -273,15 +273,23 @@
</small>
</div>
<div class="col-sm-1"></div>
<label for="idGCSStorageClass" class="col-sm-2 col-form-label">GCS Storage Class</label>
<label for="idGCSStorageClass" class="col-sm-2 col-form-label">Storage Class</label>
<div class="col-sm-3">
<input type="text" class="form-control" id="idGCSStorageClass" name="gcs_storage_class" placeholder=""
value="{{.User.FsConfig.GCSConfig.StorageClass}}" maxlength="255">
</div>
</div>
<div class="form-group gcs">
<div class="form-check">
<input type="checkbox" class="form-check-input" id="idGCSAutoCredentials" name="gcs_auto_credentials"
{{if gt .User.FsConfig.GCSConfig.AutomaticCredentials 0}}checked{{end}}>
<label for="idGCSAutoCredentials" class="form-check-label">Automatic credentials</label>
</div>
</div>
<div class="form-group row gcs">
<label for="idGCSKeyPrefix" class="col-sm-2 col-form-label">GCS Key Prefix</label>
<label for="idGCSKeyPrefix" class="col-sm-2 col-form-label">Key Prefix</label>
<div class="col-sm-10">
<input type="text" class="form-control" id="idGCSKeyPrefix" name="gcs_key_prefix" placeholder=""
value="{{.User.FsConfig.GCSConfig.KeyPrefix}}" maxlength="255" aria-describedby="GCSKeyPrefixHelpBlock">
@@ -341,12 +349,15 @@
function onFilesystemChanged(val){
if (val == '1'){
$('.form-group.row.gcs').hide();
$('.form-group.gcs').hide();
$('.form-group.row.s3').show();
} else if (val == '2'){
$('.form-group.row.gcs').show();
$('.form-group.gcs').show();
$('.form-group.row.s3').hide();
} else {
$('.form-group.row.gcs').hide();
$('.form-group.gcs').hide();
$('.form-group.row.s3').hide();
}
}

View File

@@ -21,10 +21,8 @@ import (
)
var (
// we cannot use attrs selection until this bug is fixed:
//
// https://github.com/googleapis/google-cloud-go/issues/1763
//
// we can use fields selection only when we don't need directory-like results
// with folders
gcsDefaultFieldsSelection = []string{"Name", "Size", "Deleted", "Updated"}
)
@@ -40,6 +38,7 @@ type GCSFsConfig struct {
KeyPrefix string `json:"key_prefix,omitempty"`
CredentialFile string `json:"-"`
Credentials string `json:"credentials,omitempty"`
AutomaticCredentials int `json:"automatic_credentials,omitempty"`
StorageClass string `json:"storage_class,omitempty"`
}
@@ -67,7 +66,11 @@ func NewGCSFs(connectionID, localTempDir string, config GCSFsConfig) (Fs, error)
return fs, err
}
ctx := context.Background()
if fs.config.AutomaticCredentials > 0 {
fs.svc, err = storage.NewClient(ctx)
} else {
fs.svc, err = storage.NewClient(ctx, option.WithCredentialsFile(fs.config.CredentialFile))
}
return fs, err
}
@@ -97,10 +100,6 @@ func (fs GCSFs) Stat(name string) (os.FileInfo, error) {
}
prefix := fs.getPrefixForStat(name)
query := &storage.Query{Prefix: prefix, Delimiter: "/"}
/*err = query.SetAttrSelection(gcsDefaultFieldsSelection)
if err != nil {
return result, err
}*/
ctx, cancelFn := context.WithDeadline(context.Background(), time.Now().Add(fs.ctxTimeout))
defer cancelFn()
bkt := fs.svc.Bucket(fs.config.Bucket)
@@ -299,7 +298,7 @@ func (GCSFs) Chtimes(name string, atime, mtime time.Time) error {
// a list of directory entries.
func (fs GCSFs) ReadDir(dirname string) ([]os.FileInfo, error) {
var result []os.FileInfo
// dirname deve essere già cleaned
// dirname must be already cleaned
prefix := ""
if len(dirname) > 0 && dirname != "." {
prefix = strings.TrimPrefix(dirname, "/")
@@ -308,10 +307,6 @@ func (fs GCSFs) ReadDir(dirname string) ([]os.FileInfo, error) {
}
}
query := &storage.Query{Prefix: prefix, Delimiter: "/"}
/*err := query.SetAttrSelection(gcsDefaultFieldsSelection)
if err != nil {
return result, err
}*/
ctx, cancelFn := context.WithDeadline(context.Background(), time.Now().Add(fs.ctxTimeout))
defer cancelFn()
bkt := fs.svc.Bucket(fs.config.Bucket)

View File

@@ -325,7 +325,7 @@ func (S3Fs) Chtimes(name string, atime, mtime time.Time) error {
// a list of directory entries.
func (fs S3Fs) ReadDir(dirname string) ([]os.FileInfo, error) {
var result []os.FileInfo
// dirname deve essere già cleaned
// dirname must be already cleaned
prefix := ""
if dirname != "/" && dirname != "." {
prefix = strings.TrimPrefix(dirname, "/")

View File

@@ -109,7 +109,7 @@ func ValidateGCSFsConfig(config *GCSFsConfig, credentialsFilePath string) error
config.KeyPrefix += "/"
}
}
if len(config.Credentials) == 0 {
if len(config.Credentials) == 0 && config.AutomaticCredentials == 0 {
fi, err := os.Stat(credentialsFilePath)
if err != nil {
return fmt.Errorf("invalid credentials %v", err)