1
0
mirror of https://github.com/pocketbase/pocketbase.git synced 2024-11-21 13:35:49 +02:00

added back relation filter reference support

This commit is contained in:
Gani Georgiev 2024-02-19 16:55:34 +02:00
parent 4743c1ce72
commit 4937acb3e2
18 changed files with 660 additions and 169 deletions

View File

@ -1,10 +1,16 @@
## (WIP) v0.22.0
- Admin UI improvements:
- Sync collection changes across multiple opened tabs.
- Fixed vertical image popup preview scrolling.
- Added options to export a subset of collections.
- Added option to import a subset of collections without deleting the others ([#3403](https://github.com/pocketbase/pocketbase/issues/3403)).
- Added support for back/indirect relation `filter`/`sort` (single and multiple).
The syntax to reference back relation fields is `yourCollection_via_yourRelField.*`.
⚠️ To avoid excessive joins, the nested relations resolver is now limited to max 6 level depth (similar to `expand`).
_Note that in the future there will be also more advanced and granular options to specify a subset of the fields that are filterable/sortable._
@todo add Admin UI autocomplete
@todo update "Working with relations" docs
- Added support for multiple back/indirect relation `expand` and updated the keys to use the `_via_` reference syntax (`yourCollection_via_yourRelField`).
_To minimize the breaking changes, the old parenthesis reference syntax (`yourCollection(yourRelField)`) will still continue to work but it is soft-deprecated and there will be a console log reminding you to change it to the new one._
- ⚠️ Collections and fields are no longer allowed to have `_via_` in their name to avoid collisions with the back/indirect relation reference syntax.
- Added `jsvm.Config.OnInit` optional config function to allow registering custom Go bindings to the JSVM.
@ -18,9 +24,15 @@
oauth2
```
- Upgraded to `aws-sdk-go-v2` and added special handling for GCS to workaround the previous [headers signature issue](https://github.com/pocketbase/pocketbase/issues/2231) that we had with v2.
- Upgraded to `aws-sdk-go-v2` and added special handling for GCS to workaround the previous [GCS headers signature issue](https://github.com/pocketbase/pocketbase/issues/2231) that we had with v2.
_This should also fix the SVG/JSON zero response when using Cloudflare R2 ([#4287](https://github.com/pocketbase/pocketbase/issues/4287#issuecomment-1925168142), [#2068](https://github.com/pocketbase/pocketbase/discussions/2068), [#2952](https://github.com/pocketbase/pocketbase/discussions/2952))._
_If you are using S3, please verify that you have a green check in the Admin UI for your S3 configuration (I've tested the new version with GCS, MinIO, Cloudflare R2 and Wasabi)._
_If you are using S3 for uploaded files or backups, please verify that you have a green check in the Admin UI for your S3 configuration (I've tested the new version with GCS, MinIO, Cloudflare R2 and Wasabi)._
- Admin UI improvements:
- Sync collection changes across multiple opened tabs.
- Fixed vertical image popup preview scrolling.
- Added options to export a subset of collections.
- Added option to import a subset of collections without deleting the others ([#3403](https://github.com/pocketbase/pocketbase/issues/3403)).
- Other minor improvements (updated the `ghupdate` plugin to use the configured executable name when printing to the console, fixed the error reporting of `admin update/delete` commands, etc.).

View File

@ -893,7 +893,8 @@ func TestCollectionUpdate(t *testing.T) {
{"type":"text","name":"password"},
{"type":"text","name":"passwordConfirm"},
{"type":"text","name":"oldPassword"}
]
],
"indexes": []
}`),
RequestHeaders: map[string]string{
"Authorization": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6InN5d2JoZWNuaDQ2cmhtMCIsInR5cGUiOiJhZG1pbiIsImV4cCI6MjIwODk4NTI2MX0.M1m--VOqGyv0d23eeUc0r9xE8ZzHaYVmVFw1VZW6gT8",

View File

@ -153,9 +153,11 @@ func TestFindCollectionReferences(t *testing.T) {
"rel_one_no_cascade",
"rel_one_no_cascade_required",
"rel_one_cascade",
"rel_one_unique",
"rel_many_no_cascade",
"rel_many_no_cascade_required",
"rel_many_cascade",
"rel_many_unique",
}
for col, fields := range result {
@ -756,7 +758,7 @@ func TestImportCollections(t *testing.T) {
"demo1": 15,
"demo2": 2,
"demo3": 2,
"demo4": 11,
"demo4": 13,
"demo5": 6,
"new_import": 1,
}
@ -774,37 +776,38 @@ func TestImportCollections(t *testing.T) {
},
}
for _, scenario := range scenarios {
testApp, _ := tests.NewTestApp()
defer testApp.Cleanup()
for _, s := range scenarios {
t.Run(s.name, func(t *testing.T) {
testApp, _ := tests.NewTestApp()
defer testApp.Cleanup()
importedCollections := []*models.Collection{}
importedCollections := []*models.Collection{}
// load data
loadErr := json.Unmarshal([]byte(scenario.jsonData), &importedCollections)
if loadErr != nil {
t.Fatalf("[%s] Failed to load data: %v", scenario.name, loadErr)
continue
}
// load data
loadErr := json.Unmarshal([]byte(s.jsonData), &importedCollections)
if loadErr != nil {
t.Fatalf("Failed to load data: %v", loadErr)
}
err := testApp.Dao().ImportCollections(importedCollections, scenario.deleteMissing, scenario.beforeRecordsSync)
err := testApp.Dao().ImportCollections(importedCollections, s.deleteMissing, s.beforeRecordsSync)
hasErr := err != nil
if hasErr != scenario.expectError {
t.Errorf("[%s] Expected hasErr to be %v, got %v (%v)", scenario.name, scenario.expectError, hasErr, err)
}
hasErr := err != nil
if hasErr != s.expectError {
t.Fatalf("Expected hasErr to be %v, got %v (%v)", s.expectError, hasErr, err)
}
// check collections count
collections := []*models.Collection{}
if err := testApp.Dao().CollectionQuery().All(&collections); err != nil {
t.Fatal(err)
}
if len(collections) != scenario.expectCollectionsCount {
t.Errorf("[%s] Expected %d collections, got %d", scenario.name, scenario.expectCollectionsCount, len(collections))
}
// check collections count
collections := []*models.Collection{}
if err := testApp.Dao().CollectionQuery().All(&collections); err != nil {
t.Fatal(err)
}
if len(collections) != s.expectCollectionsCount {
t.Fatalf("Expected %d collections, got %d", s.expectCollectionsCount, len(collections))
}
if scenario.afterTestFunc != nil {
scenario.afterTestFunc(testApp, collections)
}
if s.afterTestFunc != nil {
s.afterTestFunc(testApp, collections)
}
})
}
}

View File

@ -198,8 +198,6 @@ func (dao *Dao) FindRecordsByIds(
return records, nil
}
// @todo consider to depricate as it may be easier to just use dao.RecordQuery()
//
// FindRecordsByExpr finds all records by the specified db expression.
//
// Returns all collection records if no expressions are provided.

View File

@ -1,7 +1,9 @@
package daos
import (
"errors"
"fmt"
"log"
"regexp"
"strings"
@ -9,13 +11,14 @@ import (
"github.com/pocketbase/pocketbase/models"
"github.com/pocketbase/pocketbase/models/schema"
"github.com/pocketbase/pocketbase/tools/dbutils"
"github.com/pocketbase/pocketbase/tools/inflector"
"github.com/pocketbase/pocketbase/tools/list"
"github.com/pocketbase/pocketbase/tools/security"
"github.com/pocketbase/pocketbase/tools/types"
)
// MaxExpandDepth specifies the max allowed nested expand depth path.
//
// @todo Consider eventually reusing resolvers.maxNestedRels
const MaxExpandDepth = 6
// ExpandFetchFunc defines the function that is used to fetch the expanded relation records.
@ -51,13 +54,15 @@ func (dao *Dao) ExpandRecords(records []*models.Record, expands []string, optFet
return failed
}
var indirectExpandRegex = regexp.MustCompile(`^(\w+)\((\w+)\)$`)
// Deprecated
var indirectExpandRegexOld = regexp.MustCompile(`^(\w+)\((\w+)\)$`)
var indirectExpandRegex = regexp.MustCompile(`^(\w+)_via_(\w+)$`)
// notes:
// - if fetchFunc is nil, dao.FindRecordsByIds will be used
// - all records are expected to be from the same collection
// - if MaxExpandDepth is reached, the function returns nil ignoring the remaining expand path
// - indirect expands are supported only with single relation fields
func (dao *Dao) expandRecords(records []*models.Record, expandPath string, fetchFunc ExpandFetchFunc, recursionLevel int) error {
if fetchFunc == nil {
// load a default fetchFunc
@ -77,7 +82,22 @@ func (dao *Dao) expandRecords(records []*models.Record, expandPath string, fetch
var relCollection *models.Collection
parts := strings.SplitN(expandPath, ".", 2)
matches := indirectExpandRegex.FindStringSubmatch(parts[0])
var matches []string
// @todo remove the old syntax support
if strings.Contains(parts[0], "(") {
matches = indirectExpandRegexOld.FindStringSubmatch(parts[0])
if len(matches) == 3 {
log.Printf(
"%s expand format is deprecated and will be removed in the future. Consider replacing it with %s_via_%s.\n",
matches[0],
matches[1],
matches[2],
)
}
} else {
matches = indirectExpandRegex.FindStringSubmatch(parts[0])
}
if len(matches) == 3 {
indirectRel, _ := dao.FindCollectionByNameOrId(matches[1])
@ -95,47 +115,47 @@ func (dao *Dao) expandRecords(records []*models.Record, expandPath string, fetch
if indirectRelFieldOptions == nil || indirectRelFieldOptions.CollectionId != mainCollection.Id {
return fmt.Errorf("Invalid indirect relation field path %q.", parts[0])
}
if indirectRelFieldOptions.IsMultiple() {
// for now don't allow multi-relation indirect fields expand
// due to eventual poor query performance with large data sets.
return fmt.Errorf("Multi-relation fields cannot be indirectly expanded in %q.", parts[0])
}
recordIds := make([]any, len(records))
for i, record := range records {
recordIds[i] = record.Id
}
// add the related id(s) as a dynamic relation field value to
// allow further expand checks at later stage in a more unified manner
prepErr := func() error {
q := dao.DB().Select("id").From(indirectRel.Name)
// @todo after the index optimizations consider allowing
// indirect expand for multi-relation fields
indirectRecords, err := dao.FindRecordsByExpr(
indirectRel.Id,
dbx.In(inflector.Columnify(matches[2]), recordIds...),
)
if err != nil {
return err
}
mappedIndirectRecordIds := make(map[string][]string, len(indirectRecords))
for _, indirectRecord := range indirectRecords {
recId := indirectRecord.GetString(matches[2])
if recId != "" {
mappedIndirectRecordIds[recId] = append(mappedIndirectRecordIds[recId], indirectRecord.Id)
if indirectRelFieldOptions.IsMultiple() {
q.AndWhere(dbx.Exists(dbx.NewExp(fmt.Sprintf(
"SELECT 1 FROM %s je WHERE je.value = {:id}",
dbutils.JsonEach(indirectRelField.Name),
))))
} else {
q.AndWhere(dbx.NewExp("[[" + indirectRelField.Name + "]] = {:id}"))
}
}
// add the indirect relation ids as a new relation field value
for _, record := range records {
relIds, ok := mappedIndirectRecordIds[record.Id]
if ok && len(relIds) > 0 {
record.Set(parts[0], relIds)
pq := q.Build().Prepare()
for _, record := range records {
var relIds []string
err := pq.Bind(dbx.Params{"id": record.Id}).Column(&relIds)
if err != nil {
return errors.Join(err, pq.Close())
}
if len(relIds) > 0 {
record.Set(parts[0], relIds)
}
}
return pq.Close()
}()
if prepErr != nil {
return prepErr
}
relFieldOptions = &schema.RelationOptions{
MaxSelect: nil,
CollectionId: indirectRel.Id,
}
if isRelFieldUnique(indirectRel, indirectRelField.Name) {
if dbutils.HasSingleColumnUniqueIndex(indirectRelField.Name, indirectRel.Indexes) {
relFieldOptions.MaxSelect = types.Pointer(1)
}
// indirect relation

View File

@ -163,7 +163,7 @@ func TestExpandRecords(t *testing.T) {
0,
},
{
"simple indirect expand",
"simple back single relation field expand (deprecated syntax)",
"demo3",
[]string{"lcl9d87w22ml6jy"},
[]string{"demo4(rel_one_no_cascade_required)"},
@ -174,11 +174,22 @@ func TestExpandRecords(t *testing.T) {
0,
},
{
"nested indirect expand",
"simple back expand via single relation field",
"demo3",
[]string{"lcl9d87w22ml6jy"},
[]string{"demo4_via_rel_one_no_cascade_required"},
func(c *models.Collection, ids []string) ([]*models.Record, error) {
return app.Dao().FindRecordsByIds(c.Id, ids, nil)
},
1,
0,
},
{
"nested back expand via single relation field",
"demo3",
[]string{"lcl9d87w22ml6jy"},
[]string{
"demo4(rel_one_no_cascade_required).self_rel_many.self_rel_many.self_rel_one",
"demo4_via_rel_one_no_cascade_required.self_rel_many.self_rel_many.self_rel_one",
},
func(c *models.Collection, ids []string) ([]*models.Record, error) {
return app.Dao().FindRecordsByIds(c.Id, ids, nil)
@ -186,6 +197,19 @@ func TestExpandRecords(t *testing.T) {
5,
0,
},
{
"nested back expand via multiple relation field",
"demo3",
[]string{"lcl9d87w22ml6jy"},
[]string{
"demo4_via_rel_many_no_cascade_required.self_rel_many.rel_many_no_cascade_required.demo4_via_rel_many_no_cascade_required",
},
func(c *models.Collection, ids []string) ([]*models.Record, error) {
return app.Dao().FindRecordsByIds(c.Id, ids, nil)
},
7,
0,
},
{
"expand multiple relations sharing a common path",
"demo4",
@ -332,7 +356,7 @@ func TestExpandRecord(t *testing.T) {
0,
},
{
"simple indirect expand",
"simple indirect expand via single relation field (deprecated syntax)",
"demo3",
"lcl9d87w22ml6jy",
[]string{"demo4(rel_one_no_cascade_required)"},
@ -343,7 +367,18 @@ func TestExpandRecord(t *testing.T) {
0,
},
{
"nested indirect expand",
"simple indirect expand via single relation field",
"demo3",
"lcl9d87w22ml6jy",
[]string{"demo4_via_rel_one_no_cascade_required"},
func(c *models.Collection, ids []string) ([]*models.Record, error) {
return app.Dao().FindRecordsByIds(c.Id, ids, nil)
},
1,
0,
},
{
"nested indirect expand via single relation field",
"demo3",
"lcl9d87w22ml6jy",
[]string{
@ -355,6 +390,19 @@ func TestExpandRecord(t *testing.T) {
5,
0,
},
{
"nested indirect expand via single relation field",
"demo3",
"lcl9d87w22ml6jy",
[]string{
"demo4_via_rel_many_no_cascade_required.self_rel_many.rel_many_no_cascade_required.demo4_via_rel_many_no_cascade_required",
},
func(c *models.Collection, ids []string) ([]*models.Record, error) {
return app.Dao().FindRecordsByIds(c.Id, ids, nil)
},
7,
0,
},
}
for _, s := range scenarios {
@ -388,21 +436,23 @@ func TestIndirectExpandSingeVsArrayResult(t *testing.T) {
// non-unique indirect expand
{
errs := app.Dao().ExpandRecord(record, []string{"demo4(rel_one_cascade)"}, func(c *models.Collection, ids []string) ([]*models.Record, error) {
errs := app.Dao().ExpandRecord(record, []string{"demo4_via_rel_one_cascade"}, func(c *models.Collection, ids []string) ([]*models.Record, error) {
return app.Dao().FindRecordsByIds(c.Id, ids, nil)
})
if len(errs) > 0 {
t.Fatal(errs)
}
result, ok := record.Expand()["demo4(rel_one_cascade)"].([]*models.Record)
result, ok := record.Expand()["demo4_via_rel_one_cascade"].([]*models.Record)
if !ok {
t.Fatalf("Expected the expanded result to be a slice, got %v", result)
}
}
// mock a unique constraint for the rel_one_cascade field
// unique indirect expand
{
// mock a unique constraint for the rel_one_cascade field
// ---
demo4, err := app.Dao().FindCollectionByNameOrId("demo4")
if err != nil {
t.Fatal(err)
@ -413,18 +463,16 @@ func TestIndirectExpandSingeVsArrayResult(t *testing.T) {
if err := app.Dao().SaveCollection(demo4); err != nil {
t.Fatalf("Failed to mock unique constraint: %v", err)
}
}
// ---
// non-unique indirect expand
{
errs := app.Dao().ExpandRecord(record, []string{"demo4(rel_one_cascade)"}, func(c *models.Collection, ids []string) ([]*models.Record, error) {
errs := app.Dao().ExpandRecord(record, []string{"demo4_via_rel_one_cascade"}, func(c *models.Collection, ids []string) ([]*models.Record, error) {
return app.Dao().FindRecordsByIds(c.Id, ids, nil)
})
if len(errs) > 0 {
t.Fatal(errs)
}
result, ok := record.Expand()["demo4(rel_one_cascade)"].(*models.Record)
result, ok := record.Expand()["demo4_via_rel_one_cascade"].(*models.Record)
if !ok {
t.Fatalf("Expected the expanded result to be a single model, got %v", result)
}

View File

@ -5,6 +5,7 @@ import (
"fmt"
"regexp"
"strconv"
"strings"
validation "github.com/go-ozzo/ozzo-validation/v4"
"github.com/pocketbase/pocketbase/core"
@ -131,6 +132,7 @@ func (form *CollectionUpsert) Validate() error {
validation.Match(collectionNameRegex),
validation.By(form.ensureNoSystemNameChange),
validation.By(form.checkUniqueName),
validation.By(form.checkForVia),
),
// validates using the type's own validation rules + some collection's specifics
validation.Field(
@ -163,6 +165,19 @@ func (form *CollectionUpsert) Validate() error {
)
}
func (form *CollectionUpsert) checkForVia(value any) error {
v, _ := value.(string)
if v == "" {
return nil
}
if strings.Contains(strings.ToLower(v), "_via_") {
return validation.NewError("validation_invalid_name", "The name of the collection cannot contain '_via_'.")
}
return nil
}
func (form *CollectionUpsert) checkUniqueName(value any) error {
v, _ := value.(string)

View File

@ -105,6 +105,17 @@ func TestCollectionUpsertValidateAndSubmit(t *testing.T) {
{"empty create (auth)", "", `{"type":"auth"}`, []string{"name"}},
{"empty create (view)", "", `{"type":"view"}`, []string{"name", "options"}},
{"empty update", "demo2", "{}", []string{}},
{
"collection and field with _via_ names",
"",
`{
"name": "a_via_b",
"schema": [
{"name":"c_via_d","type":"text"}
]
}`,
[]string{"name", "schema"},
},
{
"create failure",
"",

View File

@ -5,6 +5,7 @@ import (
"errors"
"regexp"
"strconv"
"strings"
validation "github.com/go-ozzo/ozzo-validation/v4"
"github.com/go-ozzo/ozzo-validation/v4/is"
@ -211,6 +212,7 @@ func (f SchemaField) Validate() error {
validation.Length(1, 255),
validation.Match(schemaFieldNameRegex),
validation.NotIn(list.ToInterfaceSlice(excludeNames)...),
validation.By(f.checkForVia),
),
validation.Field(&f.Type, validation.Required, validation.In(list.ToInterfaceSlice(FieldTypes())...)),
// currently file fields cannot be unique because a proper
@ -228,6 +230,20 @@ func (f *SchemaField) checkOptions(value any) error {
return v.Validate()
}
// @todo merge with the collections during the refactoring
func (f *SchemaField) checkForVia(value any) error {
v, _ := value.(string)
if v == "" {
return nil
}
if strings.Contains(strings.ToLower(v), "_via_") {
return validation.NewError("validation_invalid_name", "The name of the field cannot contain '_via_'.")
}
return nil
}
// InitOptions initializes the current field options based on its type.
//
// Returns error on unknown field type.

View File

@ -298,6 +298,15 @@ func TestSchemaFieldValidate(t *testing.T) {
},
[]string{"name"},
},
{
"name with _via_",
schema.SchemaField{
Type: schema.FieldTypeText,
Id: "1234567890",
Name: "a_via_b",
},
[]string{"name"},
},
{
"reserved name (null)",
schema.SchemaField{

View File

@ -3,18 +3,23 @@ package resolvers
import (
"encoding/json"
"fmt"
"regexp"
"strconv"
"strings"
"github.com/pocketbase/dbx"
"github.com/pocketbase/pocketbase/models"
"github.com/pocketbase/pocketbase/models/schema"
"github.com/pocketbase/pocketbase/tools/dbutils"
"github.com/pocketbase/pocketbase/tools/inflector"
"github.com/pocketbase/pocketbase/tools/list"
"github.com/pocketbase/pocketbase/tools/search"
"github.com/pocketbase/pocketbase/tools/security"
)
// maxNestedRels defines the max allowed nested relations depth.
const maxNestedRels = 6
// parseAndRun starts a new one-off RecordFieldResolver.Resolve execution.
func parseAndRun(fieldName string, resolver *RecordFieldResolver) (*search.ResolverResult, error) {
r := &runner{
@ -334,6 +339,8 @@ func (r *runner) processRequestInfoRelationField(dataField *schema.SchemaField)
return r.processActiveProps()
}
var viaRegex = regexp.MustCompile(`^(\w+)_via_(\w+)$`)
func (r *runner) processActiveProps() (*search.ResolverResult, error) {
totalProps := len(r.activeProps)
@ -393,12 +400,12 @@ func (r *runner) processActiveProps() (*search.ResolverResult, error) {
jePair := r.activeTableAlias + "." + cleanFieldName
result := &search.ResolverResult{
Identifier: jsonArrayLength(jePair),
Identifier: dbutils.JsonArrayLength(jePair),
}
if r.withMultiMatch {
jePair2 := r.multiMatchActiveTableAlias + "." + cleanFieldName
r.multiMatch.valueIdentifier = jsonArrayLength(jePair2)
r.multiMatch.valueIdentifier = dbutils.JsonArrayLength(jePair2)
result.MultiMatchSubQuery = r.multiMatch
}
@ -410,7 +417,7 @@ func (r *runner) processActiveProps() (*search.ResolverResult, error) {
if field.Type == schema.FieldTypeSelect && modifier == eachModifier {
jePair := r.activeTableAlias + "." + cleanFieldName
jeAlias := r.activeTableAlias + "_" + cleanFieldName + "_je"
r.resolver.registerJoin(jsonEach(jePair), jeAlias, nil)
r.resolver.registerJoin(dbutils.JsonEach(jePair), jeAlias, nil)
result := &search.ResolverResult{
Identifier: fmt.Sprintf("[[%s.value]]", jeAlias),
@ -431,7 +438,7 @@ func (r *runner) processActiveProps() (*search.ResolverResult, error) {
jeAlias2 := r.multiMatchActiveTableAlias + "_" + cleanFieldName + "_je"
r.multiMatch.joins = append(r.multiMatch.joins, &join{
tableName: jsonEach(jePair2),
tableName: dbutils.JsonEach(jePair2),
tableAlias: jeAlias2,
})
r.multiMatch.valueIdentifier = fmt.Sprintf("[[%s.value]]", jeAlias2)
@ -458,9 +465,9 @@ func (r *runner) processActiveProps() (*search.ResolverResult, error) {
// (https://github.com/pocketbase/pocketbase/issues/4068)
if field.Type == schema.FieldTypeJson {
result.NoCoalesce = true
result.Identifier = jsonExtract(r.activeTableAlias+"."+cleanFieldName, "")
result.Identifier = dbutils.JsonExtract(r.activeTableAlias+"."+cleanFieldName, "")
if r.withMultiMatch {
r.multiMatch.valueIdentifier = jsonExtract(r.multiMatchActiveTableAlias+"."+cleanFieldName, "")
r.multiMatch.valueIdentifier = dbutils.JsonExtract(r.multiMatchActiveTableAlias+"."+cleanFieldName, "")
}
}
@ -468,23 +475,19 @@ func (r *runner) processActiveProps() (*search.ResolverResult, error) {
}
field := collection.Schema.GetFieldByName(prop)
if field == nil {
if r.nullifyMisingField {
return &search.ResolverResult{Identifier: "NULL"}, nil
}
return nil, fmt.Errorf("unknown field %q", prop)
}
// check if it is a json field
if field.Type == schema.FieldTypeJson {
// json field -> treat the rest of the props as json path
if field != nil && field.Type == schema.FieldTypeJson {
var jsonPath strings.Builder
for _, p := range r.activeProps[i+1:] {
for j, p := range r.activeProps[i+1:] {
if _, err := strconv.Atoi(p); err == nil {
jsonPath.WriteString("[")
jsonPath.WriteString(inflector.Columnify(p))
jsonPath.WriteString("]")
} else {
jsonPath.WriteString(".")
if j > 0 {
jsonPath.WriteString(".")
}
jsonPath.WriteString(inflector.Columnify(p))
}
}
@ -492,18 +495,130 @@ func (r *runner) processActiveProps() (*search.ResolverResult, error) {
result := &search.ResolverResult{
NoCoalesce: true,
Identifier: jsonExtract(r.activeTableAlias+"."+inflector.Columnify(prop), jsonPathStr),
Identifier: dbutils.JsonExtract(r.activeTableAlias+"."+inflector.Columnify(prop), jsonPathStr),
}
if r.withMultiMatch {
r.multiMatch.valueIdentifier = jsonExtract(r.multiMatchActiveTableAlias+"."+inflector.Columnify(prop), jsonPathStr)
r.multiMatch.valueIdentifier = dbutils.JsonExtract(r.multiMatchActiveTableAlias+"."+inflector.Columnify(prop), jsonPathStr)
result.MultiMatchSubQuery = r.multiMatch
}
return result, nil
}
// check if it is a relation field
if i >= maxNestedRels {
return nil, fmt.Errorf("max nested relations reached for field %q", prop)
}
// check for back relation (eg. yourCollection_via_yourRelField)
// -----------------------------------------------------------
if field == nil {
parts := viaRegex.FindStringSubmatch(prop)
if len(parts) != 3 {
if r.nullifyMisingField {
return &search.ResolverResult{Identifier: "NULL"}, nil
}
return nil, fmt.Errorf("unknown field %q", prop)
}
backCollection, err := r.resolver.loadCollection(parts[1])
if err != nil {
return nil, fmt.Errorf("failed to resolve field %q", prop)
}
backField := backCollection.Schema.GetFieldByName(parts[2])
if backField == nil || backField.Type != schema.FieldTypeRelation {
return nil, fmt.Errorf("invalid or missing back relation field %q", parts[2])
}
backField.InitOptions()
backFieldOptions, ok := backField.Options.(*schema.RelationOptions)
if !ok {
return nil, fmt.Errorf("failed to initialize back relation field %q options", backField.Name)
}
if backFieldOptions.CollectionId != collection.Id {
return nil, fmt.Errorf("invalid back relation field %q collection reference", backField.Name)
}
// join the back relation to the main query
// ---
cleanProp := inflector.Columnify(prop)
cleanBackFieldName := inflector.Columnify(backField.Name)
newTableAlias := r.activeTableAlias + "_" + cleanProp
newCollectionName := inflector.Columnify(backCollection.Name)
isBackRelMultiple := backFieldOptions.IsMultiple()
if !isBackRelMultiple {
// additionally check if the rel field has a single column unique index
isBackRelMultiple = !dbutils.HasSingleColumnUniqueIndex(backField.Name, backCollection.Indexes)
}
if !isBackRelMultiple {
r.resolver.registerJoin(
newCollectionName,
newTableAlias,
dbx.NewExp(fmt.Sprintf("[[%s.%s]] = [[%s.id]]", newTableAlias, cleanBackFieldName, r.activeTableAlias)),
)
} else {
jeAlias := r.activeTableAlias + "_" + cleanProp + "_je"
r.resolver.registerJoin(
newCollectionName,
newTableAlias,
dbx.NewExp(fmt.Sprintf(
"[[%s.id]] IN (SELECT [[%s.value]] FROM %s {{%s}})",
r.activeTableAlias,
jeAlias,
dbutils.JsonEach(newTableAlias+"."+cleanBackFieldName),
jeAlias,
)),
)
}
r.activeCollectionName = newCollectionName
r.activeTableAlias = newTableAlias
// ---
// join the back relation to the multi-match subquery
// ---
if isBackRelMultiple {
r.withMultiMatch = true // enable multimatch if not already
}
newTableAlias2 := r.multiMatchActiveTableAlias + "_" + cleanProp
if !isBackRelMultiple {
r.multiMatch.joins = append(
r.multiMatch.joins,
&join{
tableName: newCollectionName,
tableAlias: newTableAlias2,
on: dbx.NewExp(fmt.Sprintf("[[%s.%s]] = [[%s.id]]", newTableAlias2, cleanBackFieldName, r.multiMatchActiveTableAlias)),
},
)
} else {
jeAlias2 := r.multiMatchActiveTableAlias + "_" + cleanProp + "_je"
r.multiMatch.joins = append(
r.multiMatch.joins,
&join{
tableName: newCollectionName,
tableAlias: newTableAlias2,
on: dbx.NewExp(fmt.Sprintf(
"[[%s.id]] IN (SELECT [[%s.value]] FROM %s {{%s}})",
r.multiMatchActiveTableAlias,
jeAlias2,
dbutils.JsonEach(newTableAlias2+"."+cleanBackFieldName),
jeAlias2,
)),
},
)
}
r.multiMatchActiveTableAlias = newTableAlias2
// ---
continue
}
// -----------------------------------------------------------
// check for direct relation
if field.Type != schema.FieldTypeRelation {
return nil, fmt.Errorf("field %q is not a valid relation", prop)
}
@ -534,7 +649,7 @@ func (r *runner) processActiveProps() (*search.ResolverResult, error) {
)
} else {
jeAlias := r.activeTableAlias + "_" + cleanFieldName + "_je"
r.resolver.registerJoin(jsonEach(prefixedFieldName), jeAlias, nil)
r.resolver.registerJoin(dbutils.JsonEach(prefixedFieldName), jeAlias, nil)
r.resolver.registerJoin(
inflector.Columnify(newCollectionName),
newTableAlias,
@ -549,7 +664,7 @@ func (r *runner) processActiveProps() (*search.ResolverResult, error) {
// join the relation to the multi-match subquery
// ---
if options.IsMultiple() {
r.withMultiMatch = true
r.withMultiMatch = true // enable multimatch if not already
}
newTableAlias2 := r.multiMatchActiveTableAlias + "_" + cleanFieldName
@ -569,7 +684,7 @@ func (r *runner) processActiveProps() (*search.ResolverResult, error) {
r.multiMatch.joins = append(
r.multiMatch.joins,
&join{
tableName: jsonEach(prefixedFieldName2),
tableName: dbutils.JsonEach(prefixedFieldName2),
tableAlias: jeAlias2,
},
&join{
@ -587,34 +702,6 @@ func (r *runner) processActiveProps() (*search.ResolverResult, error) {
return nil, fmt.Errorf("failed to resolve field %q", r.fieldName)
}
func jsonArrayLength(tableColumnPair string) string {
return fmt.Sprintf(
// note: the case is used to normalize value access for single and multiple relations.
`json_array_length(CASE WHEN json_valid([[%s]]) THEN [[%s]] ELSE (CASE WHEN [[%s]] = '' OR [[%s]] IS NULL THEN json_array() ELSE json_array([[%s]]) END) END)`,
tableColumnPair, tableColumnPair, tableColumnPair, tableColumnPair, tableColumnPair,
)
}
func jsonEach(tableColumnPair string) string {
return fmt.Sprintf(
// note: the case is used to normalize value access for single and multiple relations.
`json_each(CASE WHEN json_valid([[%s]]) THEN [[%s]] ELSE json_array([[%s]]) END)`,
tableColumnPair, tableColumnPair, tableColumnPair,
)
}
func jsonExtract(tableColumnPair string, path string) string {
return fmt.Sprintf(
// note: the extra object wrapping is needed to workaround the cases where a json_extract is used with non-json columns.
"(CASE WHEN json_valid([[%s]]) THEN JSON_EXTRACT([[%s]], '$%s') ELSE JSON_EXTRACT(json_object('pb', [[%s]]), '$.pb%s') END)",
tableColumnPair,
tableColumnPair,
path,
tableColumnPair,
path,
)
}
func resolvableSystemFieldNames(collection *models.Collection) []string {
result := schema.BaseModelFieldNames()

View File

@ -144,7 +144,8 @@ func (r *RecordFieldResolver) UpdateQuery(query *dbx.SelectQuery) error {
// id
// someSelect.each
// project.screen.status
// @request.status
// @request.context
// @request.method
// @request.query.filter
// @request.headers.x_token
// @request.auth.someRelation.name

View File

@ -99,61 +99,103 @@ func TestRecordFieldResolverUpdateQuery(t *testing.T) {
"SELECT DISTINCT `demo4`.* FROM `demo4` LEFT JOIN `demo4` `demo4_self_rel_one` ON [[demo4_self_rel_one.id]] = [[demo4.self_rel_one]] WHERE ([[demo4.title]] > 1 OR [[demo4_self_rel_one.title]] > 1)",
},
{
"nested incomplete rels (opt/any operator)",
"nested incomplete relations (opt/any operator)",
"demo4",
"self_rel_many.self_rel_one ?> true",
false,
"SELECT DISTINCT `demo4`.* FROM `demo4` LEFT JOIN json_each(CASE WHEN json_valid([[demo4.self_rel_many]]) THEN [[demo4.self_rel_many]] ELSE json_array([[demo4.self_rel_many]]) END) `demo4_self_rel_many_je` LEFT JOIN `demo4` `demo4_self_rel_many` ON [[demo4_self_rel_many.id]] = [[demo4_self_rel_many_je.value]] WHERE [[demo4_self_rel_many.self_rel_one]] > 1",
},
{
"nested incomplete rels (multi-match operator)",
"nested incomplete relations (multi-match operator)",
"demo4",
"self_rel_many.self_rel_one > true",
false,
"SELECT DISTINCT `demo4`.* FROM `demo4` LEFT JOIN json_each(CASE WHEN json_valid([[demo4.self_rel_many]]) THEN [[demo4.self_rel_many]] ELSE json_array([[demo4.self_rel_many]]) END) `demo4_self_rel_many_je` LEFT JOIN `demo4` `demo4_self_rel_many` ON [[demo4_self_rel_many.id]] = [[demo4_self_rel_many_je.value]] WHERE ((([[demo4_self_rel_many.self_rel_one]] > 1) AND (NOT EXISTS (SELECT 1 FROM (SELECT [[__mm_demo4_self_rel_many.self_rel_one]] as [[multiMatchValue]] FROM `demo4` `__mm_demo4` LEFT JOIN json_each(CASE WHEN json_valid([[__mm_demo4.self_rel_many]]) THEN [[__mm_demo4.self_rel_many]] ELSE json_array([[__mm_demo4.self_rel_many]]) END) `__mm_demo4_self_rel_many_je` LEFT JOIN `demo4` `__mm_demo4_self_rel_many` ON [[__mm_demo4_self_rel_many.id]] = [[__mm_demo4_self_rel_many_je.value]] WHERE `__mm_demo4`.`id` = `demo4`.`id`) {{TEST}} WHERE ((NOT ([[TEST.multiMatchValue]] > 1)) OR ([[TEST.multiMatchValue]] IS NULL))))))",
},
{
"nested complete rels (opt/any operator)",
"nested complete relations (opt/any operator)",
"demo4",
"self_rel_many.self_rel_one.title ?> true",
false,
"SELECT DISTINCT `demo4`.* FROM `demo4` LEFT JOIN json_each(CASE WHEN json_valid([[demo4.self_rel_many]]) THEN [[demo4.self_rel_many]] ELSE json_array([[demo4.self_rel_many]]) END) `demo4_self_rel_many_je` LEFT JOIN `demo4` `demo4_self_rel_many` ON [[demo4_self_rel_many.id]] = [[demo4_self_rel_many_je.value]] LEFT JOIN `demo4` `demo4_self_rel_many_self_rel_one` ON [[demo4_self_rel_many_self_rel_one.id]] = [[demo4_self_rel_many.self_rel_one]] WHERE [[demo4_self_rel_many_self_rel_one.title]] > 1",
},
{
"nested complete rels (multi-match operator)",
"nested complete relations (multi-match operator)",
"demo4",
"self_rel_many.self_rel_one.title > true",
false,
"SELECT DISTINCT `demo4`.* FROM `demo4` LEFT JOIN json_each(CASE WHEN json_valid([[demo4.self_rel_many]]) THEN [[demo4.self_rel_many]] ELSE json_array([[demo4.self_rel_many]]) END) `demo4_self_rel_many_je` LEFT JOIN `demo4` `demo4_self_rel_many` ON [[demo4_self_rel_many.id]] = [[demo4_self_rel_many_je.value]] LEFT JOIN `demo4` `demo4_self_rel_many_self_rel_one` ON [[demo4_self_rel_many_self_rel_one.id]] = [[demo4_self_rel_many.self_rel_one]] WHERE ((([[demo4_self_rel_many_self_rel_one.title]] > 1) AND (NOT EXISTS (SELECT 1 FROM (SELECT [[__mm_demo4_self_rel_many_self_rel_one.title]] as [[multiMatchValue]] FROM `demo4` `__mm_demo4` LEFT JOIN json_each(CASE WHEN json_valid([[__mm_demo4.self_rel_many]]) THEN [[__mm_demo4.self_rel_many]] ELSE json_array([[__mm_demo4.self_rel_many]]) END) `__mm_demo4_self_rel_many_je` LEFT JOIN `demo4` `__mm_demo4_self_rel_many` ON [[__mm_demo4_self_rel_many.id]] = [[__mm_demo4_self_rel_many_je.value]] LEFT JOIN `demo4` `__mm_demo4_self_rel_many_self_rel_one` ON [[__mm_demo4_self_rel_many_self_rel_one.id]] = [[__mm_demo4_self_rel_many.self_rel_one]] WHERE `__mm_demo4`.`id` = `demo4`.`id`) {{__smTEST}} WHERE ((NOT ([[__smTEST.multiMatchValue]] > 1)) OR ([[__smTEST.multiMatchValue]] IS NULL))))))",
},
{
"repeated nested rels (opt/any operator)",
"repeated nested relations (opt/any operator)",
"demo4",
"self_rel_many.self_rel_one.self_rel_many.self_rel_one.title ?> true",
false,
"SELECT DISTINCT `demo4`.* FROM `demo4` LEFT JOIN json_each(CASE WHEN json_valid([[demo4.self_rel_many]]) THEN [[demo4.self_rel_many]] ELSE json_array([[demo4.self_rel_many]]) END) `demo4_self_rel_many_je` LEFT JOIN `demo4` `demo4_self_rel_many` ON [[demo4_self_rel_many.id]] = [[demo4_self_rel_many_je.value]] LEFT JOIN `demo4` `demo4_self_rel_many_self_rel_one` ON [[demo4_self_rel_many_self_rel_one.id]] = [[demo4_self_rel_many.self_rel_one]] LEFT JOIN json_each(CASE WHEN json_valid([[demo4_self_rel_many_self_rel_one.self_rel_many]]) THEN [[demo4_self_rel_many_self_rel_one.self_rel_many]] ELSE json_array([[demo4_self_rel_many_self_rel_one.self_rel_many]]) END) `demo4_self_rel_many_self_rel_one_self_rel_many_je` LEFT JOIN `demo4` `demo4_self_rel_many_self_rel_one_self_rel_many` ON [[demo4_self_rel_many_self_rel_one_self_rel_many.id]] = [[demo4_self_rel_many_self_rel_one_self_rel_many_je.value]] LEFT JOIN `demo4` `demo4_self_rel_many_self_rel_one_self_rel_many_self_rel_one` ON [[demo4_self_rel_many_self_rel_one_self_rel_many_self_rel_one.id]] = [[demo4_self_rel_many_self_rel_one_self_rel_many.self_rel_one]] WHERE [[demo4_self_rel_many_self_rel_one_self_rel_many_self_rel_one.title]] > 1",
},
{
"repeated nested rels (multi-match operator)",
"repeated nested relations (multi-match operator)",
"demo4",
"self_rel_many.self_rel_one.self_rel_many.self_rel_one.title > true",
false,
"SELECT DISTINCT `demo4`.* FROM `demo4` LEFT JOIN json_each(CASE WHEN json_valid([[demo4.self_rel_many]]) THEN [[demo4.self_rel_many]] ELSE json_array([[demo4.self_rel_many]]) END) `demo4_self_rel_many_je` LEFT JOIN `demo4` `demo4_self_rel_many` ON [[demo4_self_rel_many.id]] = [[demo4_self_rel_many_je.value]] LEFT JOIN `demo4` `demo4_self_rel_many_self_rel_one` ON [[demo4_self_rel_many_self_rel_one.id]] = [[demo4_self_rel_many.self_rel_one]] LEFT JOIN json_each(CASE WHEN json_valid([[demo4_self_rel_many_self_rel_one.self_rel_many]]) THEN [[demo4_self_rel_many_self_rel_one.self_rel_many]] ELSE json_array([[demo4_self_rel_many_self_rel_one.self_rel_many]]) END) `demo4_self_rel_many_self_rel_one_self_rel_many_je` LEFT JOIN `demo4` `demo4_self_rel_many_self_rel_one_self_rel_many` ON [[demo4_self_rel_many_self_rel_one_self_rel_many.id]] = [[demo4_self_rel_many_self_rel_one_self_rel_many_je.value]] LEFT JOIN `demo4` `demo4_self_rel_many_self_rel_one_self_rel_many_self_rel_one` ON [[demo4_self_rel_many_self_rel_one_self_rel_many_self_rel_one.id]] = [[demo4_self_rel_many_self_rel_one_self_rel_many.self_rel_one]] WHERE ((([[demo4_self_rel_many_self_rel_one_self_rel_many_self_rel_one.title]] > 1) AND (NOT EXISTS (SELECT 1 FROM (SELECT [[__mm_demo4_self_rel_many_self_rel_one_self_rel_many_self_rel_one.title]] as [[multiMatchValue]] FROM `demo4` `__mm_demo4` LEFT JOIN json_each(CASE WHEN json_valid([[__mm_demo4.self_rel_many]]) THEN [[__mm_demo4.self_rel_many]] ELSE json_array([[__mm_demo4.self_rel_many]]) END) `__mm_demo4_self_rel_many_je` LEFT JOIN `demo4` `__mm_demo4_self_rel_many` ON [[__mm_demo4_self_rel_many.id]] = [[__mm_demo4_self_rel_many_je.value]] LEFT JOIN `demo4` `__mm_demo4_self_rel_many_self_rel_one` ON [[__mm_demo4_self_rel_many_self_rel_one.id]] = [[__mm_demo4_self_rel_many.self_rel_one]] LEFT JOIN json_each(CASE WHEN json_valid([[__mm_demo4_self_rel_many_self_rel_one.self_rel_many]]) THEN [[__mm_demo4_self_rel_many_self_rel_one.self_rel_many]] ELSE json_array([[__mm_demo4_self_rel_many_self_rel_one.self_rel_many]]) END) `__mm_demo4_self_rel_many_self_rel_one_self_rel_many_je` LEFT JOIN `demo4` `__mm_demo4_self_rel_many_self_rel_one_self_rel_many` ON [[__mm_demo4_self_rel_many_self_rel_one_self_rel_many.id]] = [[__mm_demo4_self_rel_many_self_rel_one_self_rel_many_je.value]] LEFT JOIN `demo4` `__mm_demo4_self_rel_many_self_rel_one_self_rel_many_self_rel_one` ON [[__mm_demo4_self_rel_many_self_rel_one_self_rel_many_self_rel_one.id]] = [[__mm_demo4_self_rel_many_self_rel_one_self_rel_many.self_rel_one]] WHERE `__mm_demo4`.`id` = `demo4`.`id`) {{__smTEST}} WHERE ((NOT ([[__smTEST.multiMatchValue]] > 1)) OR ([[__smTEST.multiMatchValue]] IS NULL))))))",
},
{
"multiple rels (opt/any operators)",
"multiple relations (opt/any operators)",
"demo4",
"self_rel_many.title ?= 'test' || self_rel_one.json_object.a ?> true",
false,
"SELECT DISTINCT `demo4`.* FROM `demo4` LEFT JOIN json_each(CASE WHEN json_valid([[demo4.self_rel_many]]) THEN [[demo4.self_rel_many]] ELSE json_array([[demo4.self_rel_many]]) END) `demo4_self_rel_many_je` LEFT JOIN `demo4` `demo4_self_rel_many` ON [[demo4_self_rel_many.id]] = [[demo4_self_rel_many_je.value]] LEFT JOIN `demo4` `demo4_self_rel_one` ON [[demo4_self_rel_one.id]] = [[demo4.self_rel_one]] WHERE ([[demo4_self_rel_many.title]] = {:TEST} OR (CASE WHEN json_valid([[demo4_self_rel_one.json_object]]) THEN JSON_EXTRACT([[demo4_self_rel_one.json_object]], '$.a') ELSE JSON_EXTRACT(json_object('pb', [[demo4_self_rel_one.json_object]]), '$.pb.a') END) > 1)",
},
{
"multiple rels (multi-match operators)",
"multiple relations (multi-match operators)",
"demo4",
"self_rel_many.title = 'test' || self_rel_one.json_object.a > true",
false,
"SELECT DISTINCT `demo4`.* FROM `demo4` LEFT JOIN json_each(CASE WHEN json_valid([[demo4.self_rel_many]]) THEN [[demo4.self_rel_many]] ELSE json_array([[demo4.self_rel_many]]) END) `demo4_self_rel_many_je` LEFT JOIN `demo4` `demo4_self_rel_many` ON [[demo4_self_rel_many.id]] = [[demo4_self_rel_many_je.value]] LEFT JOIN `demo4` `demo4_self_rel_one` ON [[demo4_self_rel_one.id]] = [[demo4.self_rel_one]] WHERE ((([[demo4_self_rel_many.title]] = {:TEST}) AND (NOT EXISTS (SELECT 1 FROM (SELECT [[__mm_demo4_self_rel_many.title]] as [[multiMatchValue]] FROM `demo4` `__mm_demo4` LEFT JOIN json_each(CASE WHEN json_valid([[__mm_demo4.self_rel_many]]) THEN [[__mm_demo4.self_rel_many]] ELSE json_array([[__mm_demo4.self_rel_many]]) END) `__mm_demo4_self_rel_many_je` LEFT JOIN `demo4` `__mm_demo4_self_rel_many` ON [[__mm_demo4_self_rel_many.id]] = [[__mm_demo4_self_rel_many_je.value]] WHERE `__mm_demo4`.`id` = `demo4`.`id`) {{__smTEST}} WHERE NOT ([[__smTEST.multiMatchValue]] = {:TEST})))) OR (CASE WHEN json_valid([[demo4_self_rel_one.json_object]]) THEN JSON_EXTRACT([[demo4_self_rel_one.json_object]], '$.a') ELSE JSON_EXTRACT(json_object('pb', [[demo4_self_rel_one.json_object]]), '$.pb.a') END) > 1)",
},
{
"back relations via single relation field (without unique index)",
"demo3",
"demo4_via_rel_one_cascade.id = true",
false,
"SELECT DISTINCT `demo3`.* FROM `demo3` LEFT JOIN `demo4` `demo3_demo4_via_rel_one_cascade` ON [[demo3.id]] IN (SELECT [[demo3_demo4_via_rel_one_cascade_je.value]] FROM json_each(CASE WHEN json_valid([[demo3_demo4_via_rel_one_cascade.rel_one_cascade]]) THEN [[demo3_demo4_via_rel_one_cascade.rel_one_cascade]] ELSE json_array([[demo3_demo4_via_rel_one_cascade.rel_one_cascade]]) END) {{demo3_demo4_via_rel_one_cascade_je}}) WHERE ((([[demo3_demo4_via_rel_one_cascade.id]] = 1) AND (NOT EXISTS (SELECT 1 FROM (SELECT [[__mm_demo3_demo4_via_rel_one_cascade.id]] as [[multiMatchValue]] FROM `demo3` `__mm_demo3` LEFT JOIN `demo4` `__mm_demo3_demo4_via_rel_one_cascade` ON [[__mm_demo3.id]] IN (SELECT [[__mm_demo3_demo4_via_rel_one_cascade_je.value]] FROM json_each(CASE WHEN json_valid([[__mm_demo3_demo4_via_rel_one_cascade.rel_one_cascade]]) THEN [[__mm_demo3_demo4_via_rel_one_cascade.rel_one_cascade]] ELSE json_array([[__mm_demo3_demo4_via_rel_one_cascade.rel_one_cascade]]) END) {{__mm_demo3_demo4_via_rel_one_cascade_je}}) WHERE `__mm_demo3`.`id` = `demo3`.`id`) {{__smTEST}} WHERE NOT ([[__smTEST.multiMatchValue]] = 1)))))",
},
{
"back relations via single relation field (with unique index)",
"demo3",
"demo4_via_rel_one_unique.id = true",
false,
"SELECT DISTINCT `demo3`.* FROM `demo3` LEFT JOIN `demo4` `demo3_demo4_via_rel_one_unique` ON [[demo3_demo4_via_rel_one_unique.rel_one_unique]] = [[demo3.id]] WHERE [[demo3_demo4_via_rel_one_unique.id]] = 1",
},
{
"back relations via multiple relation field (opt/any operators)",
"demo3",
"demo4_via_rel_many_cascade.id ?= true",
false,
"SELECT DISTINCT `demo3`.* FROM `demo3` LEFT JOIN `demo4` `demo3_demo4_via_rel_many_cascade` ON [[demo3.id]] IN (SELECT [[demo3_demo4_via_rel_many_cascade_je.value]] FROM json_each(CASE WHEN json_valid([[demo3_demo4_via_rel_many_cascade.rel_many_cascade]]) THEN [[demo3_demo4_via_rel_many_cascade.rel_many_cascade]] ELSE json_array([[demo3_demo4_via_rel_many_cascade.rel_many_cascade]]) END) {{demo3_demo4_via_rel_many_cascade_je}}) WHERE [[demo3_demo4_via_rel_many_cascade.id]] = 1",
},
{
"back relations via multiple relation field (multi-match operators)",
"demo3",
"demo4_via_rel_many_cascade.id = true",
false,
"SELECT DISTINCT `demo3`.* FROM `demo3` LEFT JOIN `demo4` `demo3_demo4_via_rel_many_cascade` ON [[demo3.id]] IN (SELECT [[demo3_demo4_via_rel_many_cascade_je.value]] FROM json_each(CASE WHEN json_valid([[demo3_demo4_via_rel_many_cascade.rel_many_cascade]]) THEN [[demo3_demo4_via_rel_many_cascade.rel_many_cascade]] ELSE json_array([[demo3_demo4_via_rel_many_cascade.rel_many_cascade]]) END) {{demo3_demo4_via_rel_many_cascade_je}}) WHERE ((([[demo3_demo4_via_rel_many_cascade.id]] = 1) AND (NOT EXISTS (SELECT 1 FROM (SELECT [[__mm_demo3_demo4_via_rel_many_cascade.id]] as [[multiMatchValue]] FROM `demo3` `__mm_demo3` LEFT JOIN `demo4` `__mm_demo3_demo4_via_rel_many_cascade` ON [[__mm_demo3.id]] IN (SELECT [[__mm_demo3_demo4_via_rel_many_cascade_je.value]] FROM json_each(CASE WHEN json_valid([[__mm_demo3_demo4_via_rel_many_cascade.rel_many_cascade]]) THEN [[__mm_demo3_demo4_via_rel_many_cascade.rel_many_cascade]] ELSE json_array([[__mm_demo3_demo4_via_rel_many_cascade.rel_many_cascade]]) END) {{__mm_demo3_demo4_via_rel_many_cascade_je}}) WHERE `__mm_demo3`.`id` = `demo3`.`id`) {{__smTEST}} WHERE NOT ([[__smTEST.multiMatchValue]] = 1)))))",
},
{
"back relations via unique multiple relation field (should be the same as multi-match)",
"demo3",
"demo4_via_rel_many_unique.id = true",
false,
"SELECT DISTINCT `demo3`.* FROM `demo3` LEFT JOIN `demo4` `demo3_demo4_via_rel_many_unique` ON [[demo3.id]] IN (SELECT [[demo3_demo4_via_rel_many_unique_je.value]] FROM json_each(CASE WHEN json_valid([[demo3_demo4_via_rel_many_unique.rel_many_unique]]) THEN [[demo3_demo4_via_rel_many_unique.rel_many_unique]] ELSE json_array([[demo3_demo4_via_rel_many_unique.rel_many_unique]]) END) {{demo3_demo4_via_rel_many_unique_je}}) WHERE ((([[demo3_demo4_via_rel_many_unique.id]] = 1) AND (NOT EXISTS (SELECT 1 FROM (SELECT [[__mm_demo3_demo4_via_rel_many_unique.id]] as [[multiMatchValue]] FROM `demo3` `__mm_demo3` LEFT JOIN `demo4` `__mm_demo3_demo4_via_rel_many_unique` ON [[__mm_demo3.id]] IN (SELECT [[__mm_demo3_demo4_via_rel_many_unique_je.value]] FROM json_each(CASE WHEN json_valid([[__mm_demo3_demo4_via_rel_many_unique.rel_many_unique]]) THEN [[__mm_demo3_demo4_via_rel_many_unique.rel_many_unique]] ELSE json_array([[__mm_demo3_demo4_via_rel_many_unique.rel_many_unique]]) END) {{__mm_demo3_demo4_via_rel_many_unique_je}}) WHERE `__mm_demo3`.`id` = `demo3`.`id`) {{__smTEST}} WHERE NOT ([[__smTEST.multiMatchValue]] = 1)))))",
},
{
"recursive back relations",
"demo3",
"demo4_via_rel_many_cascade.rel_one_cascade.demo4_via_rel_many_cascade.id ?= true",
false,
"SELECT DISTINCT `demo3`.* FROM `demo3` LEFT JOIN `demo4` `demo3_demo4_via_rel_many_cascade` ON [[demo3.id]] IN (SELECT [[demo3_demo4_via_rel_many_cascade_je.value]] FROM json_each(CASE WHEN json_valid([[demo3_demo4_via_rel_many_cascade.rel_many_cascade]]) THEN [[demo3_demo4_via_rel_many_cascade.rel_many_cascade]] ELSE json_array([[demo3_demo4_via_rel_many_cascade.rel_many_cascade]]) END) {{demo3_demo4_via_rel_many_cascade_je}}) LEFT JOIN `demo3` `demo3_demo4_via_rel_many_cascade_rel_one_cascade` ON [[demo3_demo4_via_rel_many_cascade_rel_one_cascade.id]] = [[demo3_demo4_via_rel_many_cascade.rel_one_cascade]] LEFT JOIN `demo4` `demo3_demo4_via_rel_many_cascade_rel_one_cascade_demo4_via_rel_many_cascade` ON [[demo3_demo4_via_rel_many_cascade_rel_one_cascade.id]] IN (SELECT [[demo3_demo4_via_rel_many_cascade_rel_one_cascade_demo4_via_rel_many_cascade_je.value]] FROM json_each(CASE WHEN json_valid([[demo3_demo4_via_rel_many_cascade_rel_one_cascade_demo4_via_rel_many_cascade.rel_many_cascade]]) THEN [[demo3_demo4_via_rel_many_cascade_rel_one_cascade_demo4_via_rel_many_cascade.rel_many_cascade]] ELSE json_array([[demo3_demo4_via_rel_many_cascade_rel_one_cascade_demo4_via_rel_many_cascade.rel_many_cascade]]) END) {{demo3_demo4_via_rel_many_cascade_rel_one_cascade_demo4_via_rel_many_cascade_je}}) WHERE [[demo3_demo4_via_rel_many_cascade_rel_one_cascade_demo4_via_rel_many_cascade.id]] = 1",
},
{
"@collection join (opt/any operators)",
"demo4",
@ -403,13 +445,28 @@ func TestRecordFieldResolverResolveSchemaFields(t *testing.T) {
{"self_rel_many.unknown", true, ""},
{"self_rel_many.title", false, "[[demo4_self_rel_many.title]]"},
{"self_rel_many.self_rel_one.self_rel_many.title", false, "[[demo4_self_rel_many_self_rel_one_self_rel_many.title]]"},
// max relations limit
{"self_rel_many.self_rel_many.self_rel_many.self_rel_many.self_rel_many.self_rel_many.id", false, "[[demo4_self_rel_many_self_rel_many_self_rel_many_self_rel_many_self_rel_many_self_rel_many.id]]"},
{"self_rel_many.self_rel_many.self_rel_many.self_rel_many.self_rel_many.self_rel_many.self_rel_many.id", true, ""},
// back relations
{"rel_one_cascade.demo4_via_title.id", true, ""}, // non-relation via field
{"rel_one_cascade.demo4_via_rel_one_cascade.id", false, "[[demo4_rel_one_cascade_demo4_via_rel_one_cascade.id]]"},
{"rel_one_cascade.demo4_via_rel_one_cascade.rel_one_cascade.demo4_via_rel_one_cascade.id", false, "[[demo4_rel_one_cascade_demo4_via_rel_one_cascade_rel_one_cascade_demo4_via_rel_one_cascade.id]]"},
// json_extract
{"json_array.0", false, "(CASE WHEN json_valid([[demo4.json_array]]) THEN JSON_EXTRACT([[demo4.json_array]], '$[0]') ELSE JSON_EXTRACT(json_object('pb', [[demo4.json_array]]), '$.pb[0]') END)"},
{"json_object.a.b.c", false, "(CASE WHEN json_valid([[demo4.json_object]]) THEN JSON_EXTRACT([[demo4.json_object]], '$.a.b.c') ELSE JSON_EXTRACT(json_object('pb', [[demo4.json_object]]), '$.pb.a.b.c') END)"},
// @request.auth relation join:
// max relations limit shouldn't apply for json paths
{"json_object.a.b.c.e.f.g.h.i.j.k.l.m.n.o.p", false, "(CASE WHEN json_valid([[demo4.json_object]]) THEN JSON_EXTRACT([[demo4.json_object]], '$.a.b.c.e.f.g.h.i.j.k.l.m.n.o.p') ELSE JSON_EXTRACT(json_object('pb', [[demo4.json_object]]), '$.pb.a.b.c.e.f.g.h.i.j.k.l.m.n.o.p') END)"},
// @request.auth relation join
{"@request.auth.rel", false, "[[__auth_users.rel]]"},
{"@request.auth.rel.title", false, "[[__auth_users_rel.title]]"},
// @collection fieds:
// @collection fieds
{"@collect", true, ""},
{"collection.demo4.title", true, ""},
{"@collection", true, ""},
@ -441,12 +498,12 @@ func TestRecordFieldResolverResolveSchemaFields(t *testing.T) {
}
if r.Identifier != s.expectName {
t.Fatalf("Expected r.Identifier %q, got %q", s.expectName, r.Identifier)
t.Fatalf("Expected r.Identifier\n%q\ngot\n%q", s.expectName, r.Identifier)
}
// params should be empty for non @request fields
if len(r.Params) != 0 {
t.Fatalf("Expected 0 r.Params, got %v", r.Params)
t.Fatalf("Expected 0 r.Params, got\n%v", r.Params)
}
})
}

Binary file not shown.

View File

@ -192,3 +192,16 @@ func ParseIndex(createIndexExpr string) Index {
return result
}
// HasColumnUniqueIndex loosely checks whether the specified column has
// a single column unique index (WHERE statements are ignored).
func HasSingleColumnUniqueIndex(column string, indexes []string) bool {
for _, idx := range indexes {
parsed := ParseIndex(idx)
if parsed.Unique && len(parsed.Columns) == 1 && strings.EqualFold(parsed.Columns[0].Name, column) {
return true
}
}
return false
}

View File

@ -3,6 +3,7 @@ package dbutils_test
import (
"bytes"
"encoding/json"
"fmt"
"testing"
"github.com/pocketbase/pocketbase/tools/dbutils"
@ -68,21 +69,23 @@ func TestParseIndex(t *testing.T) {
}
for i, s := range scenarios {
result := dbutils.ParseIndex(s.index)
t.Run(fmt.Sprintf("scenario_%d", i), func(t *testing.T) {
result := dbutils.ParseIndex(s.index)
resultRaw, err := json.Marshal(result)
if err != nil {
t.Fatalf("[%d] %v", i, err)
}
resultRaw, err := json.Marshal(result)
if err != nil {
t.Fatalf("Faild to marshalize parse result: %v", err)
}
expectedRaw, err := json.Marshal(s.expected)
if err != nil {
t.Fatalf("[%d] %v", i, err)
}
expectedRaw, err := json.Marshal(s.expected)
if err != nil {
t.Fatalf("Failed to marshalize expected index: %v", err)
}
if !bytes.Equal(resultRaw, expectedRaw) {
t.Errorf("[%d] Expected \n%s \ngot \n%s", i, expectedRaw, resultRaw)
}
if !bytes.Equal(resultRaw, expectedRaw) {
t.Errorf("Expected \n%s \ngot \n%s", expectedRaw, resultRaw)
}
})
}
}
@ -146,11 +149,12 @@ func TestIndexIsValid(t *testing.T) {
}
for _, s := range scenarios {
result := s.index.IsValid()
if result != s.expected {
t.Errorf("[%s] Expected %v, got %v", s.name, s.expected, result)
}
t.Run(s.name, func(t *testing.T) {
result := s.index.IsValid()
if result != s.expected {
t.Fatalf("Expected %v, got %v", s.expected, result)
}
})
}
}
@ -218,10 +222,93 @@ func TestIndexBuild(t *testing.T) {
}
for _, s := range scenarios {
result := s.index.Build()
if result != s.expected {
t.Errorf("[%s] Expected \n%v \ngot \n%v", s.name, s.expected, result)
}
t.Run(s.name, func(t *testing.T) {
result := s.index.Build()
if result != s.expected {
t.Fatalf("Expected \n%v \ngot \n%v", s.expected, result)
}
})
}
}
func TestHasSingleColumnUniqueIndex(t *testing.T) {
scenarios := []struct {
name string
column string
indexes []string
expected bool
}{
{
"empty indexes",
"test",
nil,
false,
},
{
"empty column",
"",
[]string{
"CREATE UNIQUE INDEX `index1` ON `example` (`test`)",
},
false,
},
{
"mismatched column",
"test",
[]string{
"CREATE UNIQUE INDEX `index1` ON `example` (`test2`)",
},
false,
},
{
"non unique index",
"test",
[]string{
"CREATE INDEX `index1` ON `example` (`test`)",
},
false,
},
{
"matching columnd and unique index",
"test",
[]string{
"CREATE UNIQUE INDEX `index1` ON `example` (`test`)",
},
true,
},
{
"multiple columns",
"test",
[]string{
"CREATE UNIQUE INDEX `index1` ON `example` (`test`, `test2`)",
},
false,
},
{
"multiple indexes",
"test",
[]string{
"CREATE UNIQUE INDEX `index1` ON `example` (`test`, `test2`)",
"CREATE UNIQUE INDEX `index2` ON `example` (`test`)",
},
true,
},
{
"partial unique index",
"test",
[]string{
"CREATE UNIQUE INDEX `index` ON `example` (`test`) where test != ''",
},
true,
},
}
for _, s := range scenarios {
t.Run(s.name, func(t *testing.T) {
result := dbutils.HasSingleColumnUniqueIndex(s.column, s.indexes)
if result != s.expected {
t.Fatalf("Expected %v got %v", s.expected, result)
}
})
}
}

47
tools/dbutils/json.go Normal file
View File

@ -0,0 +1,47 @@
package dbutils
import (
"fmt"
"strings"
)
// JsonEach returns JSON_EACH SQLite string expression with
// some normalizations for non-json columns.
func JsonEach(column string) string {
return fmt.Sprintf(
`json_each(CASE WHEN json_valid([[%s]]) THEN [[%s]] ELSE json_array([[%s]]) END)`,
column, column, column,
)
}
// JsonArrayLength returns JSON_ARRAY_LENGTH SQLite string expression
// with some normalizations for non-json columns.
//
// It works with both json and non-json column values.
//
// Returns 0 for empty string or NULL column values.
func JsonArrayLength(column string) string {
return fmt.Sprintf(
`json_array_length(CASE WHEN json_valid([[%s]]) THEN [[%s]] ELSE (CASE WHEN [[%s]] = '' OR [[%s]] IS NULL THEN json_array() ELSE json_array([[%s]]) END) END)`,
column, column, column, column, column,
)
}
// JsonExtract returns a JSON_EXTRACT SQLite string expression with
// some normalizations for non-json columns.
func JsonExtract(column string, path string) string {
// prefix the path with dot if it is not starting with array notation
if path != "" && !strings.HasPrefix(path, "[") {
path = "." + path
}
return fmt.Sprintf(
// note: the extra object wrapping is needed to workaround the cases where a json_extract is used with non-json columns.
"(CASE WHEN json_valid([[%s]]) THEN JSON_EXTRACT([[%s]], '$%s') ELSE JSON_EXTRACT(json_object('pb', [[%s]]), '$.pb%s') END)",
column,
column,
path,
column,
path,
)
}

View File

@ -0,0 +1,66 @@
package dbutils_test
import (
"testing"
"github.com/pocketbase/pocketbase/tools/dbutils"
)
func TestJsonEach(t *testing.T) {
result := dbutils.JsonEach("a.b")
expected := "json_each(CASE WHEN json_valid([[a.b]]) THEN [[a.b]] ELSE json_array([[a.b]]) END)"
if result != expected {
t.Fatalf("Expected\n%v\ngot\n%v", expected, result)
}
}
func TestJsonArrayLength(t *testing.T) {
result := dbutils.JsonArrayLength("a.b")
expected := "json_array_length(CASE WHEN json_valid([[a.b]]) THEN [[a.b]] ELSE (CASE WHEN [[a.b]] = '' OR [[a.b]] IS NULL THEN json_array() ELSE json_array([[a.b]]) END) END)"
if result != expected {
t.Fatalf("Expected\n%v\ngot\n%v", expected, result)
}
}
func TestJsonExtract(t *testing.T) {
scenarios := []struct {
name string
column string
path string
expected string
}{
{
"empty path",
"a.b",
"",
"(CASE WHEN json_valid([[a.b]]) THEN JSON_EXTRACT([[a.b]], '$') ELSE JSON_EXTRACT(json_object('pb', [[a.b]]), '$.pb') END)",
},
{
"starting with array index",
"a.b",
"[1].a[2]",
"(CASE WHEN json_valid([[a.b]]) THEN JSON_EXTRACT([[a.b]], '$[1].a[2]') ELSE JSON_EXTRACT(json_object('pb', [[a.b]]), '$.pb[1].a[2]') END)",
},
{
"starting with key",
"a.b",
"a.b[2].c",
"(CASE WHEN json_valid([[a.b]]) THEN JSON_EXTRACT([[a.b]], '$.a.b[2].c') ELSE JSON_EXTRACT(json_object('pb', [[a.b]]), '$.pb.a.b[2].c') END)",
},
}
for _, s := range scenarios {
t.Run(s.name, func(t *testing.T) {
result := dbutils.JsonExtract(s.column, s.path)
if result != s.expected {
t.Fatalf("Expected\n%v\ngot\n%v", s.expected, result)
}
})
}
}