2022-11-26 09:05:52 +02:00
|
|
|
package migratecmd
|
|
|
|
|
|
|
|
import (
|
2022-11-27 23:00:58 +02:00
|
|
|
"bytes"
|
2022-11-26 09:05:52 +02:00
|
|
|
"encoding/json"
|
2022-11-27 23:00:58 +02:00
|
|
|
"errors"
|
2022-11-26 09:05:52 +02:00
|
|
|
"fmt"
|
|
|
|
"path/filepath"
|
2022-11-27 23:00:58 +02:00
|
|
|
"strconv"
|
|
|
|
"strings"
|
2022-11-26 09:05:52 +02:00
|
|
|
|
|
|
|
"github.com/pocketbase/pocketbase/models"
|
|
|
|
)
|
|
|
|
|
|
|
|
const (
|
|
|
|
TemplateLangJS = "js"
|
|
|
|
TemplateLangGo = "go"
|
2023-06-27 13:45:04 +02:00
|
|
|
|
|
|
|
// note: this usually should be configurable similar to the jsvm plugin,
|
|
|
|
// but for simplicity is static as users can easily change the
|
|
|
|
// reference path if they use custom dirs structure
|
|
|
|
jsTypesDirective = `/// <reference path="../pb_data/types.d.ts" />` + "\n"
|
2022-11-26 09:05:52 +02:00
|
|
|
)
|
|
|
|
|
2022-12-02 11:36:13 +02:00
|
|
|
var emptyTemplateErr = errors.New("empty template")
|
|
|
|
|
2022-11-26 09:05:52 +02:00
|
|
|
// -------------------------------------------------------------------
|
|
|
|
// JavaScript templates
|
|
|
|
// -------------------------------------------------------------------
|
|
|
|
|
2022-11-27 23:00:58 +02:00
|
|
|
func (p *plugin) jsBlankTemplate() (string, error) {
|
2023-06-27 13:45:04 +02:00
|
|
|
const template = jsTypesDirective + `migrate((db) => {
|
2022-11-26 09:05:52 +02:00
|
|
|
// add up queries...
|
|
|
|
}, (db) => {
|
|
|
|
// add down queries...
|
|
|
|
})
|
|
|
|
`
|
|
|
|
|
|
|
|
return template, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *plugin) jsSnapshotTemplate(collections []*models.Collection) (string, error) {
|
2022-11-28 19:59:17 +02:00
|
|
|
jsonData, err := marhshalWithoutEscape(collections, " ", " ")
|
2022-11-26 09:05:52 +02:00
|
|
|
if err != nil {
|
2022-11-30 17:23:00 +02:00
|
|
|
return "", fmt.Errorf("failed to serialize collections list: %w", err)
|
2022-11-26 09:05:52 +02:00
|
|
|
}
|
|
|
|
|
2023-06-27 13:45:04 +02:00
|
|
|
const template = jsTypesDirective + `migrate((db) => {
|
2022-11-26 09:05:52 +02:00
|
|
|
const snapshot = %s;
|
|
|
|
|
2022-12-05 13:57:09 +02:00
|
|
|
const collections = snapshot.map((item) => new Collection(item));
|
2022-11-26 09:05:52 +02:00
|
|
|
|
|
|
|
return Dao(db).importCollections(collections, true, null);
|
|
|
|
}, (db) => {
|
|
|
|
return null;
|
|
|
|
})
|
|
|
|
`
|
|
|
|
|
|
|
|
return fmt.Sprintf(template, string(jsonData)), nil
|
|
|
|
}
|
|
|
|
|
2022-11-27 23:00:58 +02:00
|
|
|
func (p *plugin) jsCreateTemplate(collection *models.Collection) (string, error) {
|
2022-11-28 19:59:17 +02:00
|
|
|
jsonData, err := marhshalWithoutEscape(collection, " ", " ")
|
2022-11-27 23:00:58 +02:00
|
|
|
if err != nil {
|
2022-11-30 17:23:00 +02:00
|
|
|
return "", fmt.Errorf("failed to serialize collections list: %w", err)
|
2022-11-27 23:00:58 +02:00
|
|
|
}
|
|
|
|
|
2023-06-27 13:45:04 +02:00
|
|
|
const template = jsTypesDirective + `migrate((db) => {
|
2022-12-05 13:57:09 +02:00
|
|
|
const collection = new Collection(%s);
|
2022-11-27 23:00:58 +02:00
|
|
|
|
|
|
|
return Dao(db).saveCollection(collection);
|
|
|
|
}, (db) => {
|
|
|
|
const dao = new Dao(db);
|
|
|
|
const collection = dao.findCollectionByNameOrId(%q);
|
|
|
|
|
|
|
|
return dao.deleteCollection(collection);
|
|
|
|
})
|
|
|
|
`
|
|
|
|
|
|
|
|
return fmt.Sprintf(template, string(jsonData), collection.Id), nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *plugin) jsDeleteTemplate(collection *models.Collection) (string, error) {
|
2022-11-28 19:59:17 +02:00
|
|
|
jsonData, err := marhshalWithoutEscape(collection, " ", " ")
|
2022-11-27 23:00:58 +02:00
|
|
|
if err != nil {
|
2022-11-30 17:23:00 +02:00
|
|
|
return "", fmt.Errorf("failed to serialize collections list: %w", err)
|
2022-11-27 23:00:58 +02:00
|
|
|
}
|
|
|
|
|
2023-06-27 13:45:04 +02:00
|
|
|
const template = jsTypesDirective + `migrate((db) => {
|
2022-11-27 23:00:58 +02:00
|
|
|
const dao = new Dao(db);
|
|
|
|
const collection = dao.findCollectionByNameOrId(%q);
|
|
|
|
|
|
|
|
return dao.deleteCollection(collection);
|
|
|
|
}, (db) => {
|
2022-12-05 13:57:09 +02:00
|
|
|
const collection = new Collection(%s);
|
2022-11-27 23:00:58 +02:00
|
|
|
|
|
|
|
return Dao(db).saveCollection(collection);
|
|
|
|
})
|
|
|
|
`
|
|
|
|
|
|
|
|
return fmt.Sprintf(template, collection.Id, string(jsonData)), nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *plugin) jsDiffTemplate(new *models.Collection, old *models.Collection) (string, error) {
|
|
|
|
if new == nil && old == nil {
|
|
|
|
return "", errors.New("the diff template require at least one of the collection to be non-nil")
|
|
|
|
}
|
|
|
|
|
|
|
|
if new == nil {
|
|
|
|
return p.jsDeleteTemplate(old)
|
|
|
|
}
|
|
|
|
|
|
|
|
if old == nil {
|
|
|
|
return p.jsCreateTemplate(new)
|
|
|
|
}
|
|
|
|
|
|
|
|
upParts := []string{}
|
|
|
|
downParts := []string{}
|
|
|
|
varName := "collection"
|
|
|
|
|
|
|
|
if old.Name != new.Name {
|
|
|
|
upParts = append(upParts, fmt.Sprintf("%s.name = %q", varName, new.Name))
|
|
|
|
downParts = append(downParts, fmt.Sprintf("%s.name = %q", varName, old.Name))
|
|
|
|
}
|
|
|
|
|
|
|
|
if old.Type != new.Type {
|
|
|
|
upParts = append(upParts, fmt.Sprintf("%s.type = %q", varName, new.Type))
|
|
|
|
downParts = append(downParts, fmt.Sprintf("%s.type = %q", varName, old.Type))
|
|
|
|
}
|
|
|
|
|
|
|
|
if old.System != new.System {
|
|
|
|
upParts = append(upParts, fmt.Sprintf("%s.system = %t", varName, new.System))
|
|
|
|
downParts = append(downParts, fmt.Sprintf("%s.system = %t", varName, old.System))
|
|
|
|
}
|
|
|
|
|
|
|
|
// ---
|
|
|
|
// note: strconv.Quote is used because %q converts the rule operators in unicode char codes
|
|
|
|
// ---
|
|
|
|
|
2023-08-25 15:21:04 +02:00
|
|
|
formatRule := func(typ string, rule *string) string {
|
|
|
|
if rule == nil {
|
|
|
|
return fmt.Sprintf("%s.%sRule = null", varName, typ)
|
|
|
|
}
|
|
|
|
|
|
|
|
return fmt.Sprintf("%s.%sRule = %s", varName, typ, strconv.Quote(*rule))
|
|
|
|
}
|
|
|
|
|
2022-11-27 23:00:58 +02:00
|
|
|
if old.ListRule != new.ListRule {
|
2023-08-25 15:21:04 +02:00
|
|
|
oldRule := formatRule("list", old.ListRule)
|
|
|
|
newRule := formatRule("list", new.ListRule)
|
|
|
|
|
|
|
|
if oldRule != newRule {
|
|
|
|
upParts = append(upParts, newRule)
|
|
|
|
downParts = append(downParts, oldRule)
|
2022-11-27 23:00:58 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if old.ViewRule != new.ViewRule {
|
2023-08-25 15:21:04 +02:00
|
|
|
oldRule := formatRule("view", old.ViewRule)
|
|
|
|
newRule := formatRule("view", new.ViewRule)
|
|
|
|
|
|
|
|
if oldRule != newRule {
|
|
|
|
upParts = append(upParts, newRule)
|
|
|
|
downParts = append(downParts, oldRule)
|
2022-11-27 23:00:58 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if old.CreateRule != new.CreateRule {
|
2023-08-25 15:21:04 +02:00
|
|
|
oldRule := formatRule("create", old.CreateRule)
|
|
|
|
newRule := formatRule("create", new.CreateRule)
|
|
|
|
|
|
|
|
if oldRule != newRule {
|
|
|
|
upParts = append(upParts, newRule)
|
|
|
|
downParts = append(downParts, oldRule)
|
2022-11-27 23:00:58 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if old.UpdateRule != new.UpdateRule {
|
2023-08-25 15:21:04 +02:00
|
|
|
oldRule := formatRule("update", old.UpdateRule)
|
|
|
|
newRule := formatRule("update", new.UpdateRule)
|
|
|
|
|
|
|
|
if oldRule != newRule {
|
|
|
|
upParts = append(upParts, newRule)
|
|
|
|
downParts = append(downParts, oldRule)
|
2022-11-27 23:00:58 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if old.DeleteRule != new.DeleteRule {
|
2023-08-25 15:21:04 +02:00
|
|
|
oldRule := formatRule("delete", old.DeleteRule)
|
|
|
|
newRule := formatRule("delete", new.DeleteRule)
|
|
|
|
|
|
|
|
if oldRule != newRule {
|
|
|
|
upParts = append(upParts, newRule)
|
|
|
|
downParts = append(downParts, oldRule)
|
2022-11-27 23:00:58 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Options
|
2022-11-28 19:59:17 +02:00
|
|
|
rawNewOptions, err := marhshalWithoutEscape(new.Options, " ", " ")
|
2022-11-27 23:00:58 +02:00
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
2022-11-28 19:59:17 +02:00
|
|
|
rawOldOptions, err := marhshalWithoutEscape(old.Options, " ", " ")
|
2022-11-27 23:00:58 +02:00
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
if !bytes.Equal(rawNewOptions, rawOldOptions) {
|
|
|
|
upParts = append(upParts, fmt.Sprintf("%s.options = %s", varName, rawNewOptions))
|
|
|
|
downParts = append(downParts, fmt.Sprintf("%s.options = %s", varName, rawOldOptions))
|
|
|
|
}
|
|
|
|
|
2023-03-19 16:02:29 +02:00
|
|
|
// Indexes
|
|
|
|
rawNewIndexes, err := marhshalWithoutEscape(new.Indexes, " ", " ")
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
rawOldIndexes, err := marhshalWithoutEscape(old.Indexes, " ", " ")
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
if !bytes.Equal(rawNewIndexes, rawOldIndexes) {
|
|
|
|
upParts = append(upParts, fmt.Sprintf("%s.indexes = %s", varName, rawNewIndexes))
|
|
|
|
downParts = append(downParts, fmt.Sprintf("%s.indexes = %s", varName, rawOldIndexes))
|
|
|
|
}
|
|
|
|
|
2022-11-28 19:59:17 +02:00
|
|
|
// ensure new line between regular and collection fields
|
|
|
|
if len(upParts) > 0 {
|
|
|
|
upParts[len(upParts)-1] += "\n"
|
|
|
|
}
|
|
|
|
if len(downParts) > 0 {
|
|
|
|
downParts[len(downParts)-1] += "\n"
|
|
|
|
}
|
|
|
|
|
2022-11-27 23:00:58 +02:00
|
|
|
// Schema
|
2022-11-28 19:59:17 +02:00
|
|
|
// -----------------------------------------------------------------
|
|
|
|
|
2022-11-27 23:00:58 +02:00
|
|
|
// deleted fields
|
|
|
|
for _, oldField := range old.Schema.Fields() {
|
|
|
|
if new.Schema.GetFieldById(oldField.Id) != nil {
|
|
|
|
continue // exist
|
|
|
|
}
|
2022-11-28 19:59:17 +02:00
|
|
|
|
|
|
|
rawOldField, err := marhshalWithoutEscape(oldField, " ", " ")
|
2022-11-27 23:00:58 +02:00
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
2022-11-28 19:59:17 +02:00
|
|
|
|
|
|
|
upParts = append(upParts, "// remove")
|
|
|
|
upParts = append(upParts, fmt.Sprintf("%s.schema.removeField(%q)\n", varName, oldField.Id))
|
|
|
|
|
|
|
|
downParts = append(downParts, "// add")
|
2022-12-05 13:57:09 +02:00
|
|
|
downParts = append(downParts, fmt.Sprintf("%s.schema.addField(new SchemaField(%s))\n", varName, rawOldField))
|
2022-11-27 23:00:58 +02:00
|
|
|
}
|
2022-11-28 19:59:17 +02:00
|
|
|
|
2022-11-27 23:00:58 +02:00
|
|
|
// created fields
|
|
|
|
for _, newField := range new.Schema.Fields() {
|
|
|
|
if old.Schema.GetFieldById(newField.Id) != nil {
|
|
|
|
continue // exist
|
|
|
|
}
|
2022-11-28 19:59:17 +02:00
|
|
|
|
|
|
|
rawNewField, err := marhshalWithoutEscape(newField, " ", " ")
|
2022-11-27 23:00:58 +02:00
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
2022-11-28 19:59:17 +02:00
|
|
|
|
|
|
|
upParts = append(upParts, "// add")
|
2022-12-05 13:57:09 +02:00
|
|
|
upParts = append(upParts, fmt.Sprintf("%s.schema.addField(new SchemaField(%s))\n", varName, rawNewField))
|
2022-11-28 19:59:17 +02:00
|
|
|
|
|
|
|
downParts = append(downParts, "// remove")
|
|
|
|
downParts = append(downParts, fmt.Sprintf("%s.schema.removeField(%q)\n", varName, newField.Id))
|
2022-11-27 23:00:58 +02:00
|
|
|
}
|
2022-11-28 19:59:17 +02:00
|
|
|
|
2022-11-27 23:00:58 +02:00
|
|
|
// modified fields
|
|
|
|
for _, newField := range new.Schema.Fields() {
|
|
|
|
oldField := old.Schema.GetFieldById(newField.Id)
|
|
|
|
if oldField == nil {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2022-11-28 19:59:17 +02:00
|
|
|
rawNewField, err := marhshalWithoutEscape(newField, " ", " ")
|
2022-11-27 23:00:58 +02:00
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
|
2022-11-28 19:59:17 +02:00
|
|
|
rawOldField, err := marhshalWithoutEscape(oldField, " ", " ")
|
2022-11-27 23:00:58 +02:00
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
|
|
|
|
if bytes.Equal(rawNewField, rawOldField) {
|
|
|
|
continue // no change
|
|
|
|
}
|
|
|
|
|
2022-11-28 19:59:17 +02:00
|
|
|
upParts = append(upParts, "// update")
|
2022-12-05 13:57:09 +02:00
|
|
|
upParts = append(upParts, fmt.Sprintf("%s.schema.addField(new SchemaField(%s))\n", varName, rawNewField))
|
2022-11-28 19:59:17 +02:00
|
|
|
|
|
|
|
downParts = append(downParts, "// update")
|
2022-12-05 13:57:09 +02:00
|
|
|
downParts = append(downParts, fmt.Sprintf("%s.schema.addField(new SchemaField(%s))\n", varName, rawOldField))
|
2022-11-27 23:00:58 +02:00
|
|
|
}
|
2022-11-28 19:59:17 +02:00
|
|
|
|
|
|
|
// -----------------------------------------------------------------
|
2022-11-27 23:00:58 +02:00
|
|
|
|
2022-12-02 11:36:13 +02:00
|
|
|
if len(upParts) == 0 && len(downParts) == 0 {
|
|
|
|
return "", emptyTemplateErr
|
|
|
|
}
|
|
|
|
|
2022-11-27 23:00:58 +02:00
|
|
|
up := strings.Join(upParts, "\n ")
|
|
|
|
down := strings.Join(downParts, "\n ")
|
|
|
|
|
2023-06-27 13:45:04 +02:00
|
|
|
const template = jsTypesDirective + `migrate((db) => {
|
2022-11-27 23:00:58 +02:00
|
|
|
const dao = new Dao(db)
|
|
|
|
const collection = dao.findCollectionByNameOrId(%q)
|
|
|
|
|
2022-11-28 19:59:17 +02:00
|
|
|
%s
|
2022-11-27 23:00:58 +02:00
|
|
|
|
|
|
|
return dao.saveCollection(collection)
|
|
|
|
}, (db) => {
|
|
|
|
const dao = new Dao(db)
|
|
|
|
const collection = dao.findCollectionByNameOrId(%q)
|
|
|
|
|
2022-11-28 19:59:17 +02:00
|
|
|
%s
|
2022-11-27 23:00:58 +02:00
|
|
|
|
|
|
|
return dao.saveCollection(collection)
|
|
|
|
})
|
|
|
|
`
|
|
|
|
|
2022-11-28 19:59:17 +02:00
|
|
|
return fmt.Sprintf(
|
|
|
|
template,
|
|
|
|
old.Id, strings.TrimSpace(up),
|
|
|
|
new.Id, strings.TrimSpace(down),
|
|
|
|
), nil
|
2022-11-27 23:00:58 +02:00
|
|
|
}
|
|
|
|
|
2022-11-26 09:05:52 +02:00
|
|
|
// -------------------------------------------------------------------
|
|
|
|
// Go templates
|
|
|
|
// -------------------------------------------------------------------
|
|
|
|
|
2022-11-27 23:00:58 +02:00
|
|
|
func (p *plugin) goBlankTemplate() (string, error) {
|
2022-11-26 09:05:52 +02:00
|
|
|
const template = `package %s
|
|
|
|
|
|
|
|
import (
|
|
|
|
"github.com/pocketbase/dbx"
|
|
|
|
m "github.com/pocketbase/pocketbase/migrations"
|
|
|
|
)
|
|
|
|
|
|
|
|
func init() {
|
|
|
|
m.Register(func(db dbx.Builder) error {
|
|
|
|
// add up queries...
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}, func(db dbx.Builder) error {
|
|
|
|
// add down queries...
|
|
|
|
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
}
|
|
|
|
`
|
|
|
|
|
2023-06-08 16:59:08 +02:00
|
|
|
return fmt.Sprintf(template, filepath.Base(p.config.Dir)), nil
|
2022-11-26 09:05:52 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
func (p *plugin) goSnapshotTemplate(collections []*models.Collection) (string, error) {
|
2022-11-28 19:59:17 +02:00
|
|
|
jsonData, err := marhshalWithoutEscape(collections, "\t\t", "\t")
|
2022-11-26 09:05:52 +02:00
|
|
|
if err != nil {
|
2022-11-30 17:23:00 +02:00
|
|
|
return "", fmt.Errorf("failed to serialize collections list: %w", err)
|
2022-11-26 09:05:52 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
const template = `package %s
|
|
|
|
|
|
|
|
import (
|
|
|
|
"encoding/json"
|
|
|
|
|
|
|
|
"github.com/pocketbase/dbx"
|
|
|
|
"github.com/pocketbase/pocketbase/daos"
|
2022-11-27 23:00:58 +02:00
|
|
|
m "github.com/pocketbase/pocketbase/migrations"
|
2022-11-28 19:59:17 +02:00
|
|
|
"github.com/pocketbase/pocketbase/models"
|
2022-11-26 09:05:52 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
func init() {
|
|
|
|
m.Register(func(db dbx.Builder) error {
|
|
|
|
jsonData := ` + "`%s`" + `
|
|
|
|
|
|
|
|
collections := []*models.Collection{}
|
|
|
|
if err := json.Unmarshal([]byte(jsonData), &collections); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
return daos.New(db).ImportCollections(collections, true, nil)
|
|
|
|
}, func(db dbx.Builder) error {
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
}
|
|
|
|
`
|
2022-11-28 19:59:17 +02:00
|
|
|
return fmt.Sprintf(
|
|
|
|
template,
|
2023-06-08 16:59:08 +02:00
|
|
|
filepath.Base(p.config.Dir),
|
2022-11-28 19:59:17 +02:00
|
|
|
escapeBacktick(string(jsonData)),
|
|
|
|
), nil
|
2022-11-26 09:05:52 +02:00
|
|
|
}
|
2022-11-27 23:00:58 +02:00
|
|
|
|
|
|
|
func (p *plugin) goCreateTemplate(collection *models.Collection) (string, error) {
|
2022-11-28 19:59:17 +02:00
|
|
|
jsonData, err := marhshalWithoutEscape(collection, "\t\t", "\t")
|
2022-11-27 23:00:58 +02:00
|
|
|
if err != nil {
|
2022-11-30 17:23:00 +02:00
|
|
|
return "", fmt.Errorf("failed to serialize collections list: %w", err)
|
2022-11-27 23:00:58 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
const template = `package %s
|
|
|
|
|
|
|
|
import (
|
|
|
|
"encoding/json"
|
|
|
|
|
|
|
|
"github.com/pocketbase/dbx"
|
|
|
|
"github.com/pocketbase/pocketbase/daos"
|
|
|
|
m "github.com/pocketbase/pocketbase/migrations"
|
2022-11-28 19:59:17 +02:00
|
|
|
"github.com/pocketbase/pocketbase/models"
|
2022-11-27 23:00:58 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
func init() {
|
|
|
|
m.Register(func(db dbx.Builder) error {
|
|
|
|
jsonData := ` + "`%s`" + `
|
|
|
|
|
2022-11-28 19:59:17 +02:00
|
|
|
collection := &models.Collection{}
|
2022-11-27 23:00:58 +02:00
|
|
|
if err := json.Unmarshal([]byte(jsonData), &collection); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
return daos.New(db).SaveCollection(collection)
|
|
|
|
}, func(db dbx.Builder) error {
|
|
|
|
dao := daos.New(db);
|
|
|
|
|
|
|
|
collection, err := dao.FindCollectionByNameOrId(%q)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
return dao.DeleteCollection(collection)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
`
|
|
|
|
|
|
|
|
return fmt.Sprintf(
|
|
|
|
template,
|
2023-06-08 16:59:08 +02:00
|
|
|
filepath.Base(p.config.Dir),
|
2022-11-28 19:59:17 +02:00
|
|
|
escapeBacktick(string(jsonData)),
|
2022-11-27 23:00:58 +02:00
|
|
|
collection.Id,
|
|
|
|
), nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *plugin) goDeleteTemplate(collection *models.Collection) (string, error) {
|
2022-11-28 19:59:17 +02:00
|
|
|
jsonData, err := marhshalWithoutEscape(collection, "\t\t", "\t")
|
2022-11-27 23:00:58 +02:00
|
|
|
if err != nil {
|
2022-11-30 17:23:00 +02:00
|
|
|
return "", fmt.Errorf("failed to serialize collections list: %w", err)
|
2022-11-27 23:00:58 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
const template = `package %s
|
|
|
|
|
|
|
|
import (
|
|
|
|
"encoding/json"
|
|
|
|
|
|
|
|
"github.com/pocketbase/dbx"
|
|
|
|
"github.com/pocketbase/pocketbase/daos"
|
|
|
|
m "github.com/pocketbase/pocketbase/migrations"
|
2022-11-28 19:59:17 +02:00
|
|
|
"github.com/pocketbase/pocketbase/models"
|
2022-11-27 23:00:58 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
func init() {
|
|
|
|
m.Register(func(db dbx.Builder) error {
|
|
|
|
dao := daos.New(db);
|
|
|
|
|
|
|
|
collection, err := dao.FindCollectionByNameOrId(%q)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
return dao.DeleteCollection(collection)
|
|
|
|
}, func(db dbx.Builder) error {
|
|
|
|
jsonData := ` + "`%s`" + `
|
|
|
|
|
2022-11-28 19:59:17 +02:00
|
|
|
collection := &models.Collection{}
|
2022-11-27 23:00:58 +02:00
|
|
|
if err := json.Unmarshal([]byte(jsonData), &collection); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
return daos.New(db).SaveCollection(collection)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
`
|
|
|
|
|
|
|
|
return fmt.Sprintf(
|
|
|
|
template,
|
2023-06-08 16:59:08 +02:00
|
|
|
filepath.Base(p.config.Dir),
|
2022-11-27 23:00:58 +02:00
|
|
|
collection.Id,
|
2022-11-28 19:59:17 +02:00
|
|
|
escapeBacktick(string(jsonData)),
|
2022-11-27 23:00:58 +02:00
|
|
|
), nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *plugin) goDiffTemplate(new *models.Collection, old *models.Collection) (string, error) {
|
|
|
|
if new == nil && old == nil {
|
|
|
|
return "", errors.New("the diff template require at least one of the collection to be non-nil")
|
|
|
|
}
|
|
|
|
|
|
|
|
if new == nil {
|
|
|
|
return p.goDeleteTemplate(old)
|
|
|
|
}
|
|
|
|
|
|
|
|
if old == nil {
|
|
|
|
return p.goCreateTemplate(new)
|
|
|
|
}
|
|
|
|
|
|
|
|
upParts := []string{}
|
|
|
|
downParts := []string{}
|
|
|
|
varName := "collection"
|
|
|
|
if old.Name != new.Name {
|
|
|
|
upParts = append(upParts, fmt.Sprintf("%s.Name = %q\n", varName, new.Name))
|
|
|
|
downParts = append(downParts, fmt.Sprintf("%s.Name = %q\n", varName, old.Name))
|
|
|
|
}
|
|
|
|
|
|
|
|
if old.Type != new.Type {
|
|
|
|
upParts = append(upParts, fmt.Sprintf("%s.Type = %q\n", varName, new.Type))
|
|
|
|
downParts = append(downParts, fmt.Sprintf("%s.Type = %q\n", varName, old.Type))
|
|
|
|
}
|
|
|
|
|
|
|
|
if old.System != new.System {
|
|
|
|
upParts = append(upParts, fmt.Sprintf("%s.System = %t\n", varName, new.System))
|
|
|
|
downParts = append(downParts, fmt.Sprintf("%s.System = %t\n", varName, old.System))
|
|
|
|
}
|
|
|
|
|
|
|
|
// ---
|
|
|
|
// note: strconv.Quote is used because %q converts the rule operators in unicode char codes
|
|
|
|
// ---
|
|
|
|
|
2023-08-25 15:21:04 +02:00
|
|
|
formatRule := func(typ string, rule *string) string {
|
|
|
|
if rule == nil {
|
|
|
|
return fmt.Sprintf("%s.%sRule = nil\n", varName, typ)
|
|
|
|
}
|
|
|
|
|
|
|
|
return fmt.Sprintf("%s.%sRule = types.Pointer(%s)\n", varName, typ, strconv.Quote(*rule))
|
|
|
|
}
|
|
|
|
|
2022-11-27 23:00:58 +02:00
|
|
|
if old.ListRule != new.ListRule {
|
2023-08-25 15:21:04 +02:00
|
|
|
oldRule := formatRule("List", old.ListRule)
|
|
|
|
newRule := formatRule("List", new.ListRule)
|
|
|
|
|
|
|
|
if oldRule != newRule {
|
|
|
|
upParts = append(upParts, newRule)
|
|
|
|
downParts = append(downParts, oldRule)
|
2022-11-27 23:00:58 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if old.ViewRule != new.ViewRule {
|
2023-08-25 15:21:04 +02:00
|
|
|
oldRule := formatRule("View", old.ViewRule)
|
|
|
|
newRule := formatRule("View", new.ViewRule)
|
|
|
|
|
|
|
|
if oldRule != newRule {
|
|
|
|
upParts = append(upParts, newRule)
|
|
|
|
downParts = append(downParts, oldRule)
|
2022-11-27 23:00:58 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if old.CreateRule != new.CreateRule {
|
2023-08-25 15:21:04 +02:00
|
|
|
oldRule := formatRule("Create", old.CreateRule)
|
|
|
|
newRule := formatRule("Create", new.CreateRule)
|
|
|
|
|
|
|
|
if oldRule != newRule {
|
|
|
|
upParts = append(upParts, newRule)
|
|
|
|
downParts = append(downParts, oldRule)
|
2022-11-27 23:00:58 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if old.UpdateRule != new.UpdateRule {
|
2023-08-25 15:21:04 +02:00
|
|
|
oldRule := formatRule("Update", old.UpdateRule)
|
|
|
|
newRule := formatRule("Update", new.UpdateRule)
|
|
|
|
|
|
|
|
if oldRule != newRule {
|
|
|
|
upParts = append(upParts, newRule)
|
|
|
|
downParts = append(downParts, oldRule)
|
2022-11-27 23:00:58 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if old.DeleteRule != new.DeleteRule {
|
2023-08-25 15:21:04 +02:00
|
|
|
oldRule := formatRule("Delete", old.DeleteRule)
|
|
|
|
newRule := formatRule("Delete", new.DeleteRule)
|
|
|
|
|
|
|
|
if oldRule != newRule {
|
|
|
|
upParts = append(upParts, newRule)
|
|
|
|
downParts = append(downParts, oldRule)
|
2022-11-27 23:00:58 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Options
|
2022-11-28 19:59:17 +02:00
|
|
|
rawNewOptions, err := marhshalWithoutEscape(new.Options, "\t\t", "\t")
|
2022-11-27 23:00:58 +02:00
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
2022-11-28 19:59:17 +02:00
|
|
|
rawOldOptions, err := marhshalWithoutEscape(old.Options, "\t\t", "\t")
|
2022-11-27 23:00:58 +02:00
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
if !bytes.Equal(rawNewOptions, rawOldOptions) {
|
|
|
|
upParts = append(upParts, "options := map[string]any{}")
|
2022-11-28 19:59:17 +02:00
|
|
|
upParts = append(upParts, fmt.Sprintf("json.Unmarshal([]byte(`%s`), &options)", escapeBacktick(string(rawNewOptions))))
|
2022-11-27 23:00:58 +02:00
|
|
|
upParts = append(upParts, fmt.Sprintf("%s.SetOptions(options)\n", varName))
|
|
|
|
// ---
|
|
|
|
downParts = append(downParts, "options := map[string]any{}")
|
2022-11-28 19:59:17 +02:00
|
|
|
downParts = append(downParts, fmt.Sprintf("json.Unmarshal([]byte(`%s`), &options)", escapeBacktick(string(rawOldOptions))))
|
2022-11-27 23:00:58 +02:00
|
|
|
downParts = append(downParts, fmt.Sprintf("%s.SetOptions(options)\n", varName))
|
|
|
|
}
|
|
|
|
|
2023-03-19 16:02:29 +02:00
|
|
|
// Indexes
|
|
|
|
rawNewIndexes, err := marhshalWithoutEscape(new.Indexes, "\t\t", "\t")
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
rawOldIndexes, err := marhshalWithoutEscape(old.Indexes, "\t\t", "\t")
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
if !bytes.Equal(rawNewIndexes, rawOldIndexes) {
|
|
|
|
upParts = append(upParts, fmt.Sprintf("json.Unmarshal([]byte(`%s`), &%s.Indexes)\n", escapeBacktick(string(rawNewIndexes)), varName))
|
|
|
|
// ---
|
|
|
|
downParts = append(downParts, fmt.Sprintf("json.Unmarshal([]byte(`%s`), &%s.Indexes)\n", escapeBacktick(string(rawOldIndexes)), varName))
|
|
|
|
}
|
|
|
|
|
2022-11-27 23:00:58 +02:00
|
|
|
// Schema
|
|
|
|
// ---------------------------------------------------------------
|
|
|
|
// deleted fields
|
|
|
|
for _, oldField := range old.Schema.Fields() {
|
|
|
|
if new.Schema.GetFieldById(oldField.Id) != nil {
|
|
|
|
continue // exist
|
|
|
|
}
|
|
|
|
|
2022-11-28 19:59:17 +02:00
|
|
|
rawOldField, err := marhshalWithoutEscape(oldField, "\t\t", "\t")
|
2022-11-27 23:00:58 +02:00
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
|
|
|
|
fieldVar := fmt.Sprintf("del_%s", oldField.Name)
|
|
|
|
|
|
|
|
upParts = append(upParts, "// remove")
|
|
|
|
upParts = append(upParts, fmt.Sprintf("%s.Schema.RemoveField(%q)\n", varName, oldField.Id))
|
|
|
|
|
|
|
|
downParts = append(downParts, "// add")
|
|
|
|
downParts = append(downParts, fmt.Sprintf("%s := &schema.SchemaField{}", fieldVar))
|
2022-11-28 19:59:17 +02:00
|
|
|
downParts = append(downParts, fmt.Sprintf("json.Unmarshal([]byte(`%s`), %s)", escapeBacktick(string(rawOldField)), fieldVar))
|
2022-11-27 23:00:58 +02:00
|
|
|
downParts = append(downParts, fmt.Sprintf("%s.Schema.AddField(%s)\n", varName, fieldVar))
|
|
|
|
}
|
|
|
|
|
|
|
|
// created fields
|
|
|
|
for _, newField := range new.Schema.Fields() {
|
|
|
|
if old.Schema.GetFieldById(newField.Id) != nil {
|
|
|
|
continue // exist
|
|
|
|
}
|
|
|
|
|
2022-11-28 19:59:17 +02:00
|
|
|
rawNewField, err := marhshalWithoutEscape(newField, "\t\t", "\t")
|
2022-11-27 23:00:58 +02:00
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
|
|
|
|
fieldVar := fmt.Sprintf("new_%s", newField.Name)
|
|
|
|
|
|
|
|
upParts = append(upParts, "// add")
|
|
|
|
upParts = append(upParts, fmt.Sprintf("%s := &schema.SchemaField{}", fieldVar))
|
2022-11-28 19:59:17 +02:00
|
|
|
upParts = append(upParts, fmt.Sprintf("json.Unmarshal([]byte(`%s`), %s)", escapeBacktick(string(rawNewField)), fieldVar))
|
2022-11-27 23:00:58 +02:00
|
|
|
upParts = append(upParts, fmt.Sprintf("%s.Schema.AddField(%s)\n", varName, fieldVar))
|
|
|
|
|
|
|
|
downParts = append(downParts, "// remove")
|
|
|
|
downParts = append(downParts, fmt.Sprintf("%s.Schema.RemoveField(%q)\n", varName, newField.Id))
|
|
|
|
}
|
|
|
|
|
|
|
|
// modified fields
|
|
|
|
for _, newField := range new.Schema.Fields() {
|
|
|
|
oldField := old.Schema.GetFieldById(newField.Id)
|
|
|
|
if oldField == nil {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2022-11-28 19:59:17 +02:00
|
|
|
rawNewField, err := marhshalWithoutEscape(newField, "\t\t", "\t")
|
2022-11-27 23:00:58 +02:00
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
|
2022-11-28 19:59:17 +02:00
|
|
|
rawOldField, err := marhshalWithoutEscape(oldField, "\t\t", "\t")
|
2022-11-27 23:00:58 +02:00
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
|
|
|
|
if bytes.Equal(rawNewField, rawOldField) {
|
|
|
|
continue // no change
|
|
|
|
}
|
|
|
|
|
|
|
|
fieldVar := fmt.Sprintf("edit_%s", newField.Name)
|
|
|
|
|
2022-11-28 19:59:17 +02:00
|
|
|
upParts = append(upParts, "// update")
|
2022-11-27 23:00:58 +02:00
|
|
|
upParts = append(upParts, fmt.Sprintf("%s := &schema.SchemaField{}", fieldVar))
|
2022-11-28 19:59:17 +02:00
|
|
|
upParts = append(upParts, fmt.Sprintf("json.Unmarshal([]byte(`%s`), %s)", escapeBacktick(string(rawNewField)), fieldVar))
|
2022-11-27 23:00:58 +02:00
|
|
|
upParts = append(upParts, fmt.Sprintf("%s.Schema.AddField(%s)\n", varName, fieldVar))
|
|
|
|
|
2022-11-28 19:59:17 +02:00
|
|
|
downParts = append(downParts, "// update")
|
2022-11-27 23:00:58 +02:00
|
|
|
downParts = append(downParts, fmt.Sprintf("%s := &schema.SchemaField{}", fieldVar))
|
2022-11-28 19:59:17 +02:00
|
|
|
downParts = append(downParts, fmt.Sprintf("json.Unmarshal([]byte(`%s`), %s)", escapeBacktick(string(rawOldField)), fieldVar))
|
2022-11-27 23:00:58 +02:00
|
|
|
downParts = append(downParts, fmt.Sprintf("%s.Schema.AddField(%s)\n", varName, fieldVar))
|
|
|
|
}
|
|
|
|
// ---------------------------------------------------------------
|
|
|
|
|
2022-12-02 11:36:13 +02:00
|
|
|
if len(upParts) == 0 && len(downParts) == 0 {
|
|
|
|
return "", emptyTemplateErr
|
|
|
|
}
|
|
|
|
|
2022-11-27 23:00:58 +02:00
|
|
|
up := strings.Join(upParts, "\n\t\t")
|
|
|
|
down := strings.Join(downParts, "\n\t\t")
|
2022-11-28 19:59:17 +02:00
|
|
|
combined := up + down
|
2022-11-27 23:00:58 +02:00
|
|
|
|
2022-11-28 21:56:30 +02:00
|
|
|
// generate imports
|
|
|
|
// ---
|
|
|
|
var imports string
|
|
|
|
|
2022-11-28 19:59:17 +02:00
|
|
|
if strings.Contains(combined, "json.Unmarshal(") ||
|
|
|
|
strings.Contains(combined, "json.Marshal(") {
|
2022-11-28 21:56:30 +02:00
|
|
|
imports += "\n\t\"encoding/json\"\n"
|
2022-11-28 19:59:17 +02:00
|
|
|
}
|
|
|
|
|
2022-11-28 21:56:30 +02:00
|
|
|
imports += "\n\t\"github.com/pocketbase/dbx\""
|
|
|
|
imports += "\n\t\"github.com/pocketbase/pocketbase/daos\""
|
|
|
|
imports += "\n\tm \"github.com/pocketbase/pocketbase/migrations\""
|
2022-11-28 19:59:17 +02:00
|
|
|
|
|
|
|
if strings.Contains(combined, "schema.SchemaField{") {
|
2022-11-28 21:56:30 +02:00
|
|
|
imports += "\n\t\"github.com/pocketbase/pocketbase/models/schema\""
|
2022-11-28 19:59:17 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
if strings.Contains(combined, "types.Pointer(") {
|
2022-11-28 21:56:30 +02:00
|
|
|
imports += "\n\t\"github.com/pocketbase/pocketbase/tools/types\""
|
2022-11-28 19:59:17 +02:00
|
|
|
}
|
2022-11-28 21:56:30 +02:00
|
|
|
// ---
|
2022-11-28 19:59:17 +02:00
|
|
|
|
|
|
|
const template = `package %s
|
|
|
|
|
|
|
|
import (%s
|
2022-11-27 23:00:58 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
func init() {
|
|
|
|
m.Register(func(db dbx.Builder) error {
|
|
|
|
dao := daos.New(db);
|
|
|
|
|
|
|
|
collection, err := dao.FindCollectionByNameOrId(%q)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
%s
|
|
|
|
|
|
|
|
return dao.SaveCollection(collection)
|
|
|
|
}, func(db dbx.Builder) error {
|
|
|
|
dao := daos.New(db);
|
|
|
|
|
|
|
|
collection, err := dao.FindCollectionByNameOrId(%q)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
%s
|
|
|
|
|
|
|
|
return dao.SaveCollection(collection)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
`
|
|
|
|
|
|
|
|
return fmt.Sprintf(
|
|
|
|
template,
|
2023-06-08 16:59:08 +02:00
|
|
|
filepath.Base(p.config.Dir),
|
2022-11-28 21:56:30 +02:00
|
|
|
imports,
|
2022-11-27 23:00:58 +02:00
|
|
|
old.Id, strings.TrimSpace(up),
|
|
|
|
new.Id, strings.TrimSpace(down),
|
|
|
|
), nil
|
|
|
|
}
|
2022-11-28 19:59:17 +02:00
|
|
|
|
|
|
|
func marhshalWithoutEscape(v any, prefix string, indent string) ([]byte, error) {
|
|
|
|
raw, err := json.MarshalIndent(v, prefix, indent)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
// unescape escaped unicode characters
|
2022-11-29 22:28:38 +02:00
|
|
|
unescaped, err := strconv.Unquote(strings.ReplaceAll(strconv.Quote(string(raw)), `\\u`, `\u`))
|
2022-11-28 19:59:17 +02:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
return []byte(unescaped), nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func escapeBacktick(v string) string {
|
|
|
|
return strings.ReplaceAll(v, "`", "` + \"`\" + `")
|
|
|
|
}
|