1
0
mirror of https://github.com/pocketbase/pocketbase.git synced 2025-03-19 22:19:23 +02:00

[#114] simplified some code by returning early and added cap for slices

This commit is contained in:
Valley 2022-07-15 00:26:08 +08:00 committed by GitHub
parent 03a7bafa66
commit a16b0c9004
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 124 additions and 147 deletions

View File

@ -64,7 +64,7 @@ func (dao *Dao) expandRecords(records []*models.Record, expandPath string, fetch
}
// extract the id of the relations to expand
relIds := []string{}
relIds := make([]string, 0, len(records))
for _, record := range records {
relIds = append(relIds, record.GetStringSliceDataValue(relField.Name)...)
}
@ -92,7 +92,7 @@ func (dao *Dao) expandRecords(records []*models.Record, expandPath string, fetch
for _, model := range records {
relIds := model.GetStringSliceDataValue(relField.Name)
validRels := []*models.Record{}
validRels := make([]*models.Record, 0, len(relIds))
for _, id := range relIds {
if rel, ok := indexedRels[id]; ok {
validRels = append(validRels, rel)
@ -120,20 +120,18 @@ func (dao *Dao) expandRecords(records []*models.Record, expandPath string, fetch
// normalizeExpands normalizes expand strings and merges self containing paths
// (eg. ["a.b.c", "a.b", " test ", " ", "test"] -> ["a.b.c", "test"]).
func normalizeExpands(paths []string) []string {
result := []string{}
// normalize paths
normalized := []string{}
normalized := make([]string, 0, len(paths))
for _, p := range paths {
p := strings.ReplaceAll(p, " ", "") // replace spaces
p = strings.Trim(p, ".") // trim incomplete paths
if p == "" {
continue
p = strings.ReplaceAll(p, " ", "") // replace spaces
p = strings.Trim(p, ".") // trim incomplete paths
if p != "" {
normalized = append(normalized, p)
}
normalized = append(normalized, p)
}
// merge containing paths
result := make([]string, 0, len(normalized))
for i, p1 := range normalized {
var skip bool
for j, p2 := range normalized {

View File

@ -43,10 +43,10 @@ func (dao *Dao) LoadProfiles(users []*models.User) error {
}
// extract user ids
ids := []string{}
ids := make([]string, len(users))
usersMap := map[string]*models.User{}
for _, user := range users {
ids = append(ids, user.Id)
for i, user := range users {
ids[i] = user.Id
usersMap[user.Id] = user
}

View File

@ -150,76 +150,77 @@ func (form *RecordUpsert) LoadData(r *http.Request) error {
value := extendedData[key]
value = field.PrepareValue(value)
if field.Type == schema.FieldTypeFile {
options, _ := field.Options.(*schema.FileOptions)
oldNames := list.ToUniqueStringSlice(form.Data[key])
if field.Type != schema.FieldTypeFile {
form.Data[key] = value
continue
}
// delete previously uploaded file(s)
if options.MaxSelect == 1 {
// search for unset zero indexed key as a fallback
indexedKeyValue, hasIndexedKey := extendedData[key+".0"]
options, _ := field.Options.(*schema.FileOptions)
oldNames := list.ToUniqueStringSlice(form.Data[key])
if cast.ToString(value) == "" || (hasIndexedKey && cast.ToString(indexedKeyValue) == "") {
if len(oldNames) > 0 {
form.filesToDelete = append(form.filesToDelete, oldNames...)
}
form.Data[key] = nil
// delete previously uploaded file(s)
if options.MaxSelect == 1 {
// search for unset zero indexed key as a fallback
indexedKeyValue, hasIndexedKey := extendedData[key+".0"]
if cast.ToString(value) == "" || (hasIndexedKey && cast.ToString(indexedKeyValue) == "") {
if len(oldNames) > 0 {
form.filesToDelete = append(form.filesToDelete, oldNames...)
}
} else if options.MaxSelect > 1 {
// search for individual file index to delete (eg. "file.0")
keyExp, _ := regexp.Compile(`^` + regexp.QuoteMeta(key) + `\.\d+$`)
indexesToDelete := []int{}
for indexedKey := range extendedData {
if keyExp.MatchString(indexedKey) && cast.ToString(extendedData[indexedKey]) == "" {
index, indexErr := strconv.Atoi(indexedKey[len(key)+1:])
if indexErr != nil || index >= len(oldNames) {
continue
}
indexesToDelete = append(indexesToDelete, index)
}
}
// slice to fill only with the non-deleted indexes
nonDeleted := []string{}
for i, name := range oldNames {
// not marked for deletion
if !list.ExistInSlice(i, indexesToDelete) {
nonDeleted = append(nonDeleted, name)
form.Data[key] = nil
}
} else if options.MaxSelect > 1 {
// search for individual file index to delete (eg. "file.0")
keyExp, _ := regexp.Compile(`^` + regexp.QuoteMeta(key) + `\.\d+$`)
indexesToDelete := make([]int, 0, len(extendedData))
for indexedKey := range extendedData {
if keyExp.MatchString(indexedKey) && cast.ToString(extendedData[indexedKey]) == "" {
index, indexErr := strconv.Atoi(indexedKey[len(key)+1:])
if indexErr != nil || index >= len(oldNames) {
continue
}
// store the id to actually delete the file later
form.filesToDelete = append(form.filesToDelete, name)
indexesToDelete = append(indexesToDelete, index)
}
form.Data[key] = nonDeleted
}
// check if there are any new uploaded form files
files, err := rest.FindUploadedFiles(r, key)
if err != nil {
continue // skip invalid or missing file(s)
}
// refresh oldNames list
oldNames = list.ToUniqueStringSlice(form.Data[key])
if options.MaxSelect == 1 {
// delete previous file(s) before replacing
if len(oldNames) > 0 {
form.filesToDelete = list.ToUniqueStringSlice(append(form.filesToDelete, oldNames...))
// slice to fill only with the non-deleted indexes
nonDeleted := make([]string, 0, len(oldNames))
for i, name := range oldNames {
// not marked for deletion
if !list.ExistInSlice(i, indexesToDelete) {
nonDeleted = append(nonDeleted, name)
continue
}
form.filesToUpload = append(form.filesToUpload, files[0])
form.Data[key] = files[0].Name()
} else if options.MaxSelect > 1 {
// append the id of each uploaded file instance
form.filesToUpload = append(form.filesToUpload, files...)
for _, file := range files {
oldNames = append(oldNames, file.Name())
}
form.Data[key] = oldNames
// store the id to actually delete the file later
form.filesToDelete = append(form.filesToDelete, name)
}
} else {
form.Data[key] = value
form.Data[key] = nonDeleted
}
// check if there are any new uploaded form files
files, err := rest.FindUploadedFiles(r, key)
if err != nil {
continue // skip invalid or missing file(s)
}
// refresh oldNames list
oldNames = list.ToUniqueStringSlice(form.Data[key])
if options.MaxSelect == 1 {
// delete previous file(s) before replacing
if len(oldNames) > 0 {
form.filesToDelete = list.ToUniqueStringSlice(append(form.filesToDelete, oldNames...))
}
form.filesToUpload = append(form.filesToUpload, files[0])
form.Data[key] = files[0].Name()
} else if options.MaxSelect > 1 {
// append the id of each uploaded file instance
form.filesToUpload = append(form.filesToUpload, files...)
for _, file := range files {
oldNames = append(oldNames, file.Name())
}
form.Data[key] = oldNames
}
}

View File

@ -98,35 +98,35 @@ func (form *UserOauth2Login) Submit() (*models.User, *auth.AuthUser, error) {
return nil, authData, err
}
}
} else {
if !config.AllowRegistrations {
// registration of new users is not allowed via the Oauth2 provider
return nil, authData, errors.New("Cannot find user with the authorized email.")
}
return user, authData, nil
}
if !config.AllowRegistrations {
// registration of new users is not allowed via the Oauth2 provider
return nil, authData, errors.New("Cannot find user with the authorized email.")
}
// create new user
user = &models.User{Verified: true}
upsertForm := NewUserUpsert(form.app, user)
upsertForm.Email = authData.Email
upsertForm.Password = security.RandomString(30)
upsertForm.PasswordConfirm = upsertForm.Password
// create new user
user = &models.User{Verified: true}
upsertForm := NewUserUpsert(form.app, user)
upsertForm.Email = authData.Email
upsertForm.Password = security.RandomString(30)
upsertForm.PasswordConfirm = upsertForm.Password
event := &core.UserOauth2RegisterEvent{
User: user,
AuthData: authData,
}
event := &core.UserOauth2RegisterEvent{
User: user,
AuthData: authData,
}
if err := form.app.OnUserBeforeOauth2Register().Trigger(event); err != nil {
return nil, authData, err
}
if err := form.app.OnUserBeforeOauth2Register().Trigger(event); err != nil {
return nil, authData, err
}
if err := upsertForm.Submit(); err != nil {
return nil, authData, err
}
if err := upsertForm.Submit(); err != nil {
return nil, authData, err
}
if err := form.app.OnUserAfterOauth2Register().Trigger(event); err != nil {
return nil, authData, err
}
if err := form.app.OnUserAfterOauth2Register().Trigger(event); err != nil {
return nil, authData, err
}
return user, authData, nil

View File

@ -318,12 +318,10 @@ func (validator *RecordDataValidator) checkFileValue(field *schema.SchemaField,
}
// extract the uploaded files
files := []*rest.UploadedFile{}
if len(validator.uploadedFiles) > 0 {
for _, file := range validator.uploadedFiles {
if list.ExistInSlice(file.Name(), names) {
files = append(files, file)
}
files := make([]*rest.UploadedFile, 0, len(validator.uploadedFiles))
for _, file := range validator.uploadedFiles {
if list.ExistInSlice(file.Name(), names) {
files = append(files, file)
}
}

View File

@ -68,10 +68,9 @@ func NewRecordFromNullStringMap(collection *Collection, data dbx.NullStringMap)
// NewRecordsFromNullStringMaps initializes a new Record model for
// each row in the provided NullStringMap slice.
func NewRecordsFromNullStringMaps(collection *Collection, rows []dbx.NullStringMap) []*Record {
result := []*Record{}
for _, row := range rows {
result = append(result, NewRecordFromNullStringMap(collection, row))
result := make([]*Record, len(rows))
for i, row := range rows {
result[i] = NewRecordFromNullStringMap(collection, row)
}
return result

View File

@ -9,10 +9,10 @@ import (
var _ Model = (*User)(nil)
const (
// The name of the system user profiles collection.
// ProfileCollectionName is the name of the system user profiles collection.
ProfileCollectionName = "profiles"
// The name of the user field from the system user profiles collection.
// ProfileCollectionUserFieldName is the name of the user field from the system user profiles collection.
ProfileCollectionUserFieldName = "userId"
)

View File

@ -168,7 +168,7 @@ func (r *RecordFieldResolver) Resolve(fieldName string) (resultName string, plac
return "", nil, fmt.Errorf("Failed to find field %q collection.", prop)
}
newCollectionName := relCollection.Name
newTableAlias := (currentTableAlias + "_" + field.Name)
newTableAlias := currentTableAlias + "_" + field.Name
r.addJoin(
newCollectionName,

View File

@ -48,7 +48,7 @@ func (scenario *ApiScenario) Test(t *testing.T) {
recorder := httptest.NewRecorder()
req := httptest.NewRequest(scenario.Method, scenario.Url, scenario.Body)
// add middeware to timeout long running requests (eg. keep-alive routes)
// add middleware to timeout long-running requests (eg. keep-alive routes)
e.Pre(func(next echo.HandlerFunc) echo.HandlerFunc {
return func(c echo.Context) error {
ctx, cancelFunc := context.WithTimeout(c.Request().Context(), 100*time.Millisecond)

View File

@ -89,15 +89,11 @@ func (f FilterData) resolveTokenizedExpr(expr fexpr.Expr, fieldResolver FieldRes
// merge both operands parameters (if any)
params := dbx.Params{}
if len(lParams) > 0 {
for k, v := range lParams {
params[k] = v
}
for k, v := range lParams {
params[k] = v
}
if len(rParams) > 0 {
for k, v := range rParams {
params[k] = v
}
for k, v := range rParams {
params[k] = v
}
switch expr.Op {
@ -139,32 +135,26 @@ func (f FilterData) resolveTokenizedExpr(expr fexpr.Expr, fieldResolver FieldRes
}
func (f FilterData) resolveToken(token fexpr.Token, fieldResolver FieldResolver) (name string, params dbx.Params, err error) {
if token.Type == fexpr.TokenIdentifier {
switch token.Type {
case fexpr.TokenIdentifier:
name, params, err := fieldResolver.Resolve(token.Literal)
if name == "" || err != nil {
// if `null` field is missing, treat `null` identifier as NULL token
if strings.ToLower(token.Literal) == "null" {
return "NULL", nil, nil
m := map[string]string{
// if `null` field is missing, treat `null` identifier as NULL token
"null": "NULL",
// if `true` field is missing, treat `true` identifier as TRUE token
"true": "1",
// if `false` field is missing, treat `false` identifier as FALSE token
"false": "0",
}
// if `true` field is missing, treat `true` identifier as TRUE token
if strings.ToLower(token.Literal) == "true" {
return "1", nil, nil
if v, ok := m[strings.ToLower(token.Literal)]; ok {
return v, nil, nil
}
// if `false` field is missing, treat `false` identifier as FALSE token
if strings.ToLower(token.Literal) == "false" {
return "0", nil, nil
}
return "", nil, err
}
return name, params, err
}
if token.Type == fexpr.TokenNumber || token.Type == fexpr.TokenText {
case fexpr.TokenNumber, fexpr.TokenText:
placeholder := "t" + security.RandomString(7)
name := fmt.Sprintf("{:%s}", placeholder)
params := dbx.Params{placeholder: token.Literal}
@ -177,10 +167,6 @@ func (f FilterData) resolveToken(token fexpr.Token, fieldResolver FieldResolver)
func (f FilterData) normalizeLikeParams(params dbx.Params) dbx.Params {
result := dbx.Params{}
if len(params) == 0 {
return result
}
for k, v := range params {
vStr := cast.ToString(v)
if !strings.Contains(vStr, "%") {

View File

@ -66,10 +66,5 @@ func Decrypt(cipherText string, key string) ([]byte, error) {
}
nonce, cipherByteClean := cipherByte[:nonceSize], cipherByte[nonceSize:]
plainData, err := gcm.Open(nil, nonce, cipherByteClean, nil)
if err != nil {
return nil, err
}
return plainData, nil
return gcm.Open(nil, nonce, cipherByteClean, nil)
}