You've already forked golang-saas-starter-kit
mirror of
https://github.com/raseels-repos/golang-saas-starter-kit.git
synced 2025-08-10 22:41:25 +02:00
moved example-project files back a directory
This commit is contained in:
149
tools/truss/cmd/dbtable2crud/db.go
Normal file
149
tools/truss/cmd/dbtable2crud/db.go
Normal file
@@ -0,0 +1,149 @@
|
||||
package dbtable2crud
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
"github.com/lib/pq"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
type psqlColumn struct {
|
||||
Table string
|
||||
Column string
|
||||
ColumnId int64
|
||||
NotNull bool
|
||||
DataTypeFull string
|
||||
DataTypeName string
|
||||
DataTypeLength *int
|
||||
NumericPrecision *int
|
||||
NumericScale *int
|
||||
IsPrimaryKey bool
|
||||
PrimaryKeyName *string
|
||||
IsUniqueKey bool
|
||||
UniqueKeyName *string
|
||||
IsForeignKey bool
|
||||
ForeignKeyName *string
|
||||
ForeignKeyColumnId pq.Int64Array
|
||||
ForeignKeyTable *string
|
||||
ForeignKeyLocalColumnId pq.Int64Array
|
||||
DefaultFull *string
|
||||
DefaultValue *string
|
||||
IsEnum bool
|
||||
EnumTypeId *string
|
||||
EnumValues []string
|
||||
}
|
||||
|
||||
// descTable lists all the columns for a table.
|
||||
func descTable(db *sqlx.DB, dbName, dbTable string) ([]psqlColumn, error) {
|
||||
|
||||
queryStr := fmt.Sprintf(`SELECT
|
||||
c.relname as table,
|
||||
f.attname as column,
|
||||
f.attnum as columnId,
|
||||
f.attnotnull as not_null,
|
||||
pg_catalog.format_type(f.atttypid,f.atttypmod) AS data_type_full,
|
||||
t.typname AS data_type_name,
|
||||
CASE WHEN f.atttypmod >= 0 AND t.typname <> 'numeric'THEN (f.atttypmod - 4) --first 4 bytes are for storing actual length of data
|
||||
END AS data_type_length,
|
||||
CASE WHEN t.typname = 'numeric' THEN (((f.atttypmod - 4) >> 16) & 65535)
|
||||
END AS numeric_precision,
|
||||
CASE WHEN t.typname = 'numeric' THEN ((f.atttypmod - 4)& 65535 )
|
||||
END AS numeric_scale,
|
||||
CASE WHEN p.contype = 'p' THEN true ELSE false
|
||||
END AS is_primary_key,
|
||||
CASE WHEN p.contype = 'p' THEN p.conname
|
||||
END AS primary_key_name,
|
||||
CASE WHEN p.contype = 'u' THEN true ELSE false
|
||||
END AS is_unique_key,
|
||||
CASE WHEN p.contype = 'u' THEN p.conname
|
||||
END AS unique_key_name,
|
||||
CASE WHEN p.contype = 'f' THEN true ELSE false
|
||||
END AS is_foreign_key,
|
||||
CASE WHEN p.contype = 'f' THEN p.conname
|
||||
END AS foreignkey_name,
|
||||
CASE WHEN p.contype = 'f' THEN p.confkey
|
||||
END AS foreign_key_columnid,
|
||||
CASE WHEN p.contype = 'f' THEN g.relname
|
||||
END AS foreign_key_table,
|
||||
CASE WHEN p.contype = 'f' THEN p.conkey
|
||||
END AS foreign_key_local_column_id,
|
||||
CASE WHEN f.atthasdef = 't' THEN d.adsrc
|
||||
END AS default_value,
|
||||
CASE WHEN t.typtype = 'e' THEN true ELSE false
|
||||
END AS is_enum,
|
||||
CASE WHEN t.typtype = 'e' THEN t.oid
|
||||
END AS enum_type_id
|
||||
FROM pg_attribute f
|
||||
JOIN pg_class c ON c.oid = f.attrelid
|
||||
JOIN pg_type t ON t.oid = f.atttypid
|
||||
LEFT JOIN pg_attrdef d ON d.adrelid = c.oid AND d.adnum = f.attnum
|
||||
LEFT JOIN pg_namespace n ON n.oid = c.relnamespace
|
||||
LEFT JOIN pg_constraint p ON p.conrelid = c.oid AND f.attnum = ANY (p.conkey)
|
||||
LEFT JOIN pg_class AS g ON p.confrelid = g.oid
|
||||
WHERE c.relkind = 'r'::char
|
||||
AND f.attisdropped = false
|
||||
AND c.relname = '%s'
|
||||
AND f.attnum > 0
|
||||
ORDER BY f.attnum
|
||||
;`, dbTable) // AND n.nspname = '%s'
|
||||
|
||||
rows, err := db.Query(queryStr)
|
||||
if err != nil {
|
||||
err = errors.Wrapf(err, "query - %s", queryStr)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// iterate over each row
|
||||
var resp []psqlColumn
|
||||
for rows.Next() {
|
||||
var c psqlColumn
|
||||
err = rows.Scan(&c.Table, &c.Column, &c.ColumnId, &c.NotNull, &c.DataTypeFull, &c.DataTypeName, &c.DataTypeLength, &c.NumericPrecision, &c.NumericScale, &c.IsPrimaryKey, &c.PrimaryKeyName, &c.IsUniqueKey, &c.UniqueKeyName, &c.IsForeignKey, &c.ForeignKeyName, &c.ForeignKeyColumnId, &c.ForeignKeyTable, &c.ForeignKeyLocalColumnId, &c.DefaultFull, &c.IsEnum, &c.EnumTypeId)
|
||||
if err != nil {
|
||||
err = errors.Wrapf(err, "query - %s", queryStr)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if c.DefaultFull != nil {
|
||||
defaultValue := *c.DefaultFull
|
||||
|
||||
// "'active'::project_status_t"
|
||||
defaultValue = strings.Split(defaultValue, "::")[0]
|
||||
c.DefaultValue = &defaultValue
|
||||
}
|
||||
|
||||
resp = append(resp, c)
|
||||
}
|
||||
|
||||
for colIdx, dbCol := range resp {
|
||||
if !dbCol.IsEnum {
|
||||
continue
|
||||
}
|
||||
|
||||
queryStr := fmt.Sprintf(`SELECT e.enumlabel
|
||||
FROM pg_enum AS e
|
||||
WHERE e.enumtypid = '%s'
|
||||
ORDER BY e.enumsortorder`, *dbCol.EnumTypeId)
|
||||
|
||||
rows, err := db.Query(queryStr)
|
||||
if err != nil {
|
||||
err = errors.Wrapf(err, "query - %s", queryStr)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for rows.Next() {
|
||||
var v string
|
||||
err = rows.Scan(&v)
|
||||
if err != nil {
|
||||
err = errors.Wrapf(err, "query - %s", queryStr)
|
||||
return nil, err
|
||||
}
|
||||
dbCol.EnumValues = append(dbCol.EnumValues, v)
|
||||
}
|
||||
|
||||
resp[colIdx] = dbCol
|
||||
}
|
||||
|
||||
return resp, nil
|
||||
}
|
431
tools/truss/cmd/dbtable2crud/dbtable2crud.go
Normal file
431
tools/truss/cmd/dbtable2crud/dbtable2crud.go
Normal file
@@ -0,0 +1,431 @@
|
||||
package dbtable2crud
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"geeks-accelerator/oss/saas-starter-kit/example-project/internal/schema"
|
||||
"geeks-accelerator/oss/saas-starter-kit/example-project/tools/truss/internal/goparse"
|
||||
"github.com/dustin/go-humanize/english"
|
||||
"github.com/fatih/camelcase"
|
||||
"github.com/jmoiron/sqlx"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/sergi/go-diff/diffmatchpatch"
|
||||
)
|
||||
|
||||
// Run in the main entry point for the dbtable2crud cmd.
|
||||
func Run(db *sqlx.DB, log *log.Logger, dbName, dbTable, modelFile, modelName, templateDir, goSrcPath string, saveChanges bool) error {
|
||||
log.SetPrefix(log.Prefix() + " : dbtable2crud")
|
||||
|
||||
// Ensure the schema is up to date
|
||||
if err := schema.Migrate(db, log); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// When dbTable is empty, lower case the model name
|
||||
if dbTable == "" {
|
||||
dbTable = strings.Join(camelcase.Split(modelName), " ")
|
||||
dbTable = english.PluralWord(2, dbTable, "")
|
||||
dbTable = strings.Replace(dbTable, " ", "_", -1)
|
||||
dbTable = strings.ToLower(dbTable)
|
||||
}
|
||||
|
||||
// Parse the model file and load the specified model struct.
|
||||
model, err := parseModelFile(db, log, dbName, dbTable, modelFile, modelName)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Basic lint of the model struct.
|
||||
err = validateModel(log, model)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
tmplData := map[string]interface{}{
|
||||
"GoSrcPath": goSrcPath,
|
||||
}
|
||||
|
||||
// Update the model file with new or updated code.
|
||||
err = updateModel(log, model, modelFile, templateDir, tmplData, saveChanges)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Update the model crud file with new or updated code.
|
||||
err = updateModelCrud(db, log, dbName, dbTable, modelFile, modelName, templateDir, model, tmplData, saveChanges)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// validateModel performs a basic lint of the model struct to ensure
|
||||
// code gen output is correct.
|
||||
func validateModel(log *log.Logger, model *modelDef) error {
|
||||
for _, sf := range model.Fields {
|
||||
if sf.DbColumn == nil && sf.ColumnName != "-" {
|
||||
log.Printf("validateStruct : Unable to find struct field for db column %s\n", sf.ColumnName)
|
||||
}
|
||||
|
||||
var expectedType string
|
||||
switch sf.FieldName {
|
||||
case "ID":
|
||||
expectedType = "string"
|
||||
case "CreatedAt":
|
||||
expectedType = "time.Time"
|
||||
case "UpdatedAt":
|
||||
expectedType = "time.Time"
|
||||
case "ArchivedAt":
|
||||
expectedType = "pq.NullTime"
|
||||
}
|
||||
|
||||
if expectedType != "" && expectedType != sf.FieldType {
|
||||
log.Printf("validateStruct : Struct field %s should be of type %s not %s\n", sf.FieldName, expectedType, sf.FieldType)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// updateModel updated the parsed code file with the new code.
|
||||
func updateModel(log *log.Logger, model *modelDef, modelFile, templateDir string, tmplData map[string]interface{}, saveChanges bool) error {
|
||||
|
||||
// Execute template and parse code to be used to compare against modelFile.
|
||||
tmplObjs, err := loadTemplateObjects(log, model, templateDir, "models.tmpl", tmplData)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Store the current code as a string to produce a diff.
|
||||
curCode := model.String()
|
||||
|
||||
objHeaders := []*goparse.GoObject{}
|
||||
|
||||
for _, obj := range tmplObjs {
|
||||
if obj.Type == goparse.GoObjectType_Comment || obj.Type == goparse.GoObjectType_LineBreak {
|
||||
objHeaders = append(objHeaders, obj)
|
||||
continue
|
||||
}
|
||||
|
||||
if model.HasType(obj.Name, obj.Type) {
|
||||
cur := model.Objects().Get(obj.Name, obj.Type)
|
||||
|
||||
newObjs := []*goparse.GoObject{}
|
||||
if len(objHeaders) > 0 {
|
||||
// Remove any comments and linebreaks before the existing object so updates can be added.
|
||||
removeObjs := []*goparse.GoObject{}
|
||||
for idx := cur.Index - 1; idx > 0; idx-- {
|
||||
prevObj := model.Objects().List()[idx]
|
||||
if prevObj.Type == goparse.GoObjectType_Comment || prevObj.Type == goparse.GoObjectType_LineBreak {
|
||||
removeObjs = append(removeObjs, prevObj)
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if len(removeObjs) > 0 {
|
||||
err := model.Objects().Remove(removeObjs...)
|
||||
if err != nil {
|
||||
err = errors.WithMessagef(err, "Failed to update object %s %s for %s", obj.Type, obj.Name, model.Name)
|
||||
return err
|
||||
}
|
||||
|
||||
// Make sure the current index is correct.
|
||||
cur = model.Objects().Get(obj.Name, obj.Type)
|
||||
}
|
||||
|
||||
// Append comments and line breaks before adding the object
|
||||
for _, c := range objHeaders {
|
||||
newObjs = append(newObjs, c)
|
||||
}
|
||||
}
|
||||
|
||||
newObjs = append(newObjs, obj)
|
||||
|
||||
// Do the object replacement.
|
||||
err := model.Objects().Replace(cur, newObjs...)
|
||||
if err != nil {
|
||||
err = errors.WithMessagef(err, "Failed to update object %s %s for %s", obj.Type, obj.Name, model.Name)
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
// Append comments and line breaks before adding the object
|
||||
for _, c := range objHeaders {
|
||||
err := model.Objects().Add(c)
|
||||
if err != nil {
|
||||
err = errors.WithMessagef(err, "Failed to add object %s %s for %s", c.Type, c.Name, model.Name)
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
err := model.Objects().Add(obj)
|
||||
if err != nil {
|
||||
err = errors.WithMessagef(err, "Failed to add object %s %s for %s", obj.Type, obj.Name, model.Name)
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
objHeaders = []*goparse.GoObject{}
|
||||
}
|
||||
|
||||
// Set some flags to determine additional imports and need to be added.
|
||||
var hasEnum bool
|
||||
var hasPq bool
|
||||
for _, f := range model.Fields {
|
||||
if f.DbColumn != nil && f.DbColumn.IsEnum {
|
||||
hasEnum = true
|
||||
}
|
||||
if strings.HasPrefix(strings.Trim(f.FieldType, "*"), "pq.") {
|
||||
hasPq = true
|
||||
}
|
||||
}
|
||||
|
||||
reqImports := []string{}
|
||||
if hasEnum {
|
||||
reqImports = append(reqImports, "database/sql/driver")
|
||||
reqImports = append(reqImports, "gopkg.in/go-playground/validator.v9")
|
||||
reqImports = append(reqImports, "github.com/pkg/errors")
|
||||
}
|
||||
|
||||
if hasPq {
|
||||
reqImports = append(reqImports, "github.com/lib/pq")
|
||||
}
|
||||
|
||||
for _, in := range reqImports {
|
||||
err := model.AddImport(goparse.GoImport{Name: in})
|
||||
if err != nil {
|
||||
err = errors.WithMessagef(err, "Failed to add import %s for %s", in, model.Name)
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if saveChanges {
|
||||
err = model.Save(modelFile)
|
||||
if err != nil {
|
||||
err = errors.WithMessagef(err, "Failed to save changes for %s to %s", model.Name, modelFile)
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
// Produce a diff after the updates have been applied.
|
||||
dmp := diffmatchpatch.New()
|
||||
diffs := dmp.DiffMain(curCode, model.String(), true)
|
||||
fmt.Println(dmp.DiffPrettyText(diffs))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// updateModelCrud updated the parsed code file with the new code.
|
||||
func updateModelCrud(db *sqlx.DB, log *log.Logger, dbName, dbTable, modelFile, modelName, templateDir string, baseModel *modelDef, tmplData map[string]interface{}, saveChanges bool) error {
|
||||
|
||||
// Load all the updated struct fields from the base model file.
|
||||
structFields := make(map[string]map[string]modelField)
|
||||
for _, obj := range baseModel.GoDocument.Objects().List() {
|
||||
if obj.Type != goparse.GoObjectType_Struct || obj.Name == baseModel.Name {
|
||||
continue
|
||||
}
|
||||
|
||||
objFields, err := parseModelFields(baseModel.GoDocument, obj.Name, baseModel)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
structFields[obj.Name] = make(map[string]modelField)
|
||||
for _, f := range objFields {
|
||||
structFields[obj.Name][f.FieldName] = f
|
||||
}
|
||||
}
|
||||
|
||||
// Append the struct fields to be used for template execution.
|
||||
if tmplData == nil {
|
||||
tmplData = make(map[string]interface{})
|
||||
}
|
||||
tmplData["StructFields"] = structFields
|
||||
|
||||
// Get the dir to store crud methods and test files.
|
||||
modelDir := filepath.Dir(modelFile)
|
||||
|
||||
// Process the CRUD hanlders template and write to file.
|
||||
crudFilePath := filepath.Join(modelDir, FormatCamelLowerUnderscore(baseModel.Name)+".go")
|
||||
crudTmplFile := "model_crud.tmpl"
|
||||
err := updateModelCrudFile(db, log, dbName, dbTable, templateDir, crudFilePath, crudTmplFile, baseModel, tmplData, saveChanges)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Process the CRUD test template and write to file.
|
||||
testFilePath := filepath.Join(modelDir, FormatCamelLowerUnderscore(baseModel.Name)+"_test.go")
|
||||
testTmplFile := "model_crud_test.tmpl"
|
||||
err = updateModelCrudFile(db, log, dbName, dbTable, templateDir, testFilePath, testTmplFile, baseModel, tmplData, saveChanges)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// updateModelCrudFile processes the input file.
|
||||
func updateModelCrudFile(db *sqlx.DB, log *log.Logger, dbName, dbTable, templateDir, crudFilePath, tmplFile string, baseModel *modelDef, tmplData map[string]interface{}, saveChanges bool) error {
|
||||
|
||||
// Execute template and parse code to be used to compare against modelFile.
|
||||
tmplObjs, err := loadTemplateObjects(log, baseModel, templateDir, tmplFile, tmplData)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var crudDoc *goparse.GoDocument
|
||||
if _, err := os.Stat(crudFilePath); os.IsNotExist(err) {
|
||||
crudDoc, err = goparse.NewGoDocument(baseModel.Package)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
// Parse the supplied model file.
|
||||
crudDoc, err = goparse.ParseFile(log, crudFilePath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// Store the current code as a string to produce a diff.
|
||||
curCode := crudDoc.String()
|
||||
|
||||
objHeaders := []*goparse.GoObject{}
|
||||
|
||||
for _, obj := range tmplObjs {
|
||||
if obj.Type == goparse.GoObjectType_Comment || obj.Type == goparse.GoObjectType_LineBreak {
|
||||
objHeaders = append(objHeaders, obj)
|
||||
continue
|
||||
}
|
||||
|
||||
if obj.Name == "" && (obj.Type == goparse.GoObjectType_Var || obj.Type == goparse.GoObjectType_Const) {
|
||||
var curDocObj *goparse.GoObject
|
||||
for _, subObj := range obj.Objects().List() {
|
||||
for _, do := range crudDoc.Objects().List() {
|
||||
if do.Name == "" && (do.Type == goparse.GoObjectType_Var || do.Type == goparse.GoObjectType_Const) {
|
||||
for _, subDocObj := range do.Objects().List() {
|
||||
if subDocObj.String() == subObj.String() && subObj.Type != goparse.GoObjectType_LineBreak {
|
||||
curDocObj = do
|
||||
break
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if curDocObj != nil {
|
||||
for _, subObj := range obj.Objects().List() {
|
||||
var hasSubObj bool
|
||||
for _, subDocObj := range curDocObj.Objects().List() {
|
||||
if subDocObj.String() == subObj.String() {
|
||||
hasSubObj = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !hasSubObj {
|
||||
curDocObj.Objects().Add(subObj)
|
||||
if err != nil {
|
||||
err = errors.WithMessagef(err, "Failed to add object %s %s for %s", obj.Type, obj.Name, baseModel.Name)
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Append comments and line breaks before adding the object
|
||||
for _, c := range objHeaders {
|
||||
err := crudDoc.Objects().Add(c)
|
||||
if err != nil {
|
||||
err = errors.WithMessagef(err, "Failed to add object %s %s for %s", c.Type, c.Name, baseModel.Name)
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
err := crudDoc.Objects().Add(obj)
|
||||
if err != nil {
|
||||
err = errors.WithMessagef(err, "Failed to add object %s %s for %s", obj.Type, obj.Name, baseModel.Name)
|
||||
return err
|
||||
}
|
||||
}
|
||||
} else if crudDoc.HasType(obj.Name, obj.Type) {
|
||||
cur := crudDoc.Objects().Get(obj.Name, obj.Type)
|
||||
|
||||
newObjs := []*goparse.GoObject{}
|
||||
if len(objHeaders) > 0 {
|
||||
// Remove any comments and linebreaks before the existing object so updates can be added.
|
||||
removeObjs := []*goparse.GoObject{}
|
||||
for idx := cur.Index - 1; idx > 0; idx-- {
|
||||
prevObj := crudDoc.Objects().List()[idx]
|
||||
if prevObj.Type == goparse.GoObjectType_Comment || prevObj.Type == goparse.GoObjectType_LineBreak {
|
||||
removeObjs = append(removeObjs, prevObj)
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if len(removeObjs) > 0 {
|
||||
err := crudDoc.Objects().Remove(removeObjs...)
|
||||
if err != nil {
|
||||
err = errors.WithMessagef(err, "Failed to update object %s %s for %s", obj.Type, obj.Name, baseModel.Name)
|
||||
return err
|
||||
}
|
||||
|
||||
// Make sure the current index is correct.
|
||||
cur = crudDoc.Objects().Get(obj.Name, obj.Type)
|
||||
}
|
||||
|
||||
// Append comments and line breaks before adding the object
|
||||
for _, c := range objHeaders {
|
||||
newObjs = append(newObjs, c)
|
||||
}
|
||||
}
|
||||
|
||||
newObjs = append(newObjs, obj)
|
||||
|
||||
// Do the object replacement.
|
||||
err := crudDoc.Objects().Replace(cur, newObjs...)
|
||||
if err != nil {
|
||||
err = errors.WithMessagef(err, "Failed to update object %s %s for %s", obj.Type, obj.Name, baseModel.Name)
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
// Append comments and line breaks before adding the object
|
||||
for _, c := range objHeaders {
|
||||
err := crudDoc.Objects().Add(c)
|
||||
if err != nil {
|
||||
err = errors.WithMessagef(err, "Failed to add object %s %s for %s", c.Type, c.Name, baseModel.Name)
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
err := crudDoc.Objects().Add(obj)
|
||||
if err != nil {
|
||||
err = errors.WithMessagef(err, "Failed to add object %s %s for %s", obj.Type, obj.Name, baseModel.Name)
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
objHeaders = []*goparse.GoObject{}
|
||||
}
|
||||
|
||||
if saveChanges {
|
||||
err = crudDoc.Save(crudFilePath)
|
||||
if err != nil {
|
||||
err = errors.WithMessagef(err, "Failed to save changes for %s to %s", baseModel.Name, crudFilePath)
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
// Produce a diff after the updates have been applied.
|
||||
dmp := diffmatchpatch.New()
|
||||
diffs := dmp.DiffMain(curCode, crudDoc.String(), true)
|
||||
fmt.Println(dmp.DiffPrettyText(diffs))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
229
tools/truss/cmd/dbtable2crud/models.go
Normal file
229
tools/truss/cmd/dbtable2crud/models.go
Normal file
@@ -0,0 +1,229 @@
|
||||
package dbtable2crud
|
||||
|
||||
import (
|
||||
"log"
|
||||
"strings"
|
||||
|
||||
"geeks-accelerator/oss/saas-starter-kit/example-project/tools/truss/internal/goparse"
|
||||
"github.com/fatih/structtag"
|
||||
"github.com/jmoiron/sqlx"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
// modelDef defines info about the struct and associated db table.
|
||||
type modelDef struct {
|
||||
*goparse.GoDocument
|
||||
Name string
|
||||
TableName string
|
||||
PrimaryField string
|
||||
PrimaryColumn string
|
||||
PrimaryType string
|
||||
Fields []modelField
|
||||
FieldNames []string
|
||||
ColumnNames []string
|
||||
}
|
||||
|
||||
// modelField defines a struct field and associated db column.
|
||||
type modelField struct {
|
||||
ColumnName string
|
||||
DbColumn *psqlColumn
|
||||
FieldName string
|
||||
FieldType string
|
||||
FieldIsPtr bool
|
||||
Tags *structtag.Tags
|
||||
ApiHide bool
|
||||
ApiRead bool
|
||||
ApiCreate bool
|
||||
ApiUpdate bool
|
||||
DefaultValue string
|
||||
}
|
||||
|
||||
// parseModelFile parses the entire model file and then loads the specified model struct.
|
||||
func parseModelFile(db *sqlx.DB, log *log.Logger, dbName, dbTable, modelFile, modelName string) (*modelDef, error) {
|
||||
|
||||
// Parse the supplied model file.
|
||||
doc, err := goparse.ParseFile(log, modelFile)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Init new modelDef.
|
||||
model := &modelDef{
|
||||
GoDocument: doc,
|
||||
Name: modelName,
|
||||
TableName: dbTable,
|
||||
}
|
||||
|
||||
// Append the field the the model def.
|
||||
model.Fields, err = parseModelFields(doc, modelName, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, sf := range model.Fields {
|
||||
model.FieldNames = append(model.FieldNames, sf.FieldName)
|
||||
model.ColumnNames = append(model.ColumnNames, sf.ColumnName)
|
||||
}
|
||||
|
||||
// Query the database for a table definition.
|
||||
dbCols, err := descTable(db, dbName, dbTable)
|
||||
if err != nil {
|
||||
return model, err
|
||||
}
|
||||
|
||||
// Loop over all the database table columns and append to the associated
|
||||
// struct field. Don't force all database table columns to be defined in the
|
||||
// in the struct.
|
||||
for _, dbCol := range dbCols {
|
||||
for idx, sf := range model.Fields {
|
||||
if sf.ColumnName != dbCol.Column {
|
||||
continue
|
||||
}
|
||||
|
||||
if dbCol.IsPrimaryKey {
|
||||
model.PrimaryColumn = sf.ColumnName
|
||||
model.PrimaryField = sf.FieldName
|
||||
model.PrimaryType = sf.FieldType
|
||||
}
|
||||
|
||||
if dbCol.DefaultValue != nil {
|
||||
sf.DefaultValue = *dbCol.DefaultValue
|
||||
|
||||
if dbCol.IsEnum {
|
||||
sf.DefaultValue = strings.Trim(sf.DefaultValue, "'")
|
||||
sf.DefaultValue = sf.FieldType + "_" + FormatCamel(sf.DefaultValue)
|
||||
} else if strings.HasPrefix(sf.DefaultValue, "'") {
|
||||
sf.DefaultValue = strings.Trim(sf.DefaultValue, "'")
|
||||
sf.DefaultValue = "\"" + sf.DefaultValue + "\""
|
||||
}
|
||||
}
|
||||
|
||||
c := dbCol
|
||||
sf.DbColumn = &c
|
||||
model.Fields[idx] = sf
|
||||
}
|
||||
}
|
||||
|
||||
// Print out the model for debugging.
|
||||
//modelJSON, err := json.MarshalIndent(model, "", " ")
|
||||
//if err != nil {
|
||||
// return model, errors.WithStack(err )
|
||||
//}
|
||||
//log.Printf(string(modelJSON))
|
||||
|
||||
return model, nil
|
||||
}
|
||||
|
||||
// parseModelFields parses the fields from a struct.
|
||||
func parseModelFields(doc *goparse.GoDocument, modelName string, baseModel *modelDef) ([]modelField, error) {
|
||||
|
||||
// Ensure the model file has a struct with the model name supplied.
|
||||
if !doc.HasType(modelName, goparse.GoObjectType_Struct) {
|
||||
err := errors.Errorf("Struct with the name %s does not exist", modelName)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Load the struct from parsed go file.
|
||||
docModel := doc.Get(modelName, goparse.GoObjectType_Struct)
|
||||
|
||||
// Loop over all the objects contained between the struct definition start and end.
|
||||
// This should be a list of variables defined for model.
|
||||
resp := []modelField{}
|
||||
for _, l := range docModel.Objects().List() {
|
||||
|
||||
// Skip all lines that are not a var.
|
||||
if l.Type != goparse.GoObjectType_Line {
|
||||
log.Printf("parseModelFile : Model %s has line that is %s, not type line, skipping - %s\n", modelName, l.Type, l.String())
|
||||
continue
|
||||
}
|
||||
|
||||
// Extract the var name, type and defined tags from the line.
|
||||
sv, err := goparse.ParseStructProp(l)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Init new modelField for the struct var.
|
||||
sf := modelField{
|
||||
FieldName: sv.Name,
|
||||
FieldType: sv.Type,
|
||||
FieldIsPtr: strings.HasPrefix(sv.Type, "*"),
|
||||
Tags: sv.Tags,
|
||||
}
|
||||
|
||||
// Extract the column name from the var tags.
|
||||
if sf.Tags != nil {
|
||||
// First try to get the column name from the db tag.
|
||||
dbt, err := sf.Tags.Get("db")
|
||||
if err != nil && !strings.Contains(err.Error(), "not exist") {
|
||||
err = errors.WithStack(err)
|
||||
return nil, err
|
||||
} else if dbt != nil {
|
||||
sf.ColumnName = dbt.Name
|
||||
}
|
||||
|
||||
// Second try to get the column name from the json tag.
|
||||
if sf.ColumnName == "" {
|
||||
jt, err := sf.Tags.Get("json")
|
||||
if err != nil && !strings.Contains(err.Error(), "not exist") {
|
||||
err = errors.WithStack(err)
|
||||
return nil, err
|
||||
} else if jt != nil && jt.Name != "-" {
|
||||
sf.ColumnName = jt.Name
|
||||
}
|
||||
}
|
||||
|
||||
var apiActionsSet bool
|
||||
tt, err := sf.Tags.Get("truss")
|
||||
if err != nil && !strings.Contains(err.Error(), "not exist") {
|
||||
err = errors.WithStack(err)
|
||||
return nil, err
|
||||
} else if tt != nil {
|
||||
if tt.Name == "api-create" || tt.HasOption("api-create") {
|
||||
sf.ApiCreate = true
|
||||
apiActionsSet = true
|
||||
}
|
||||
if tt.Name == "api-read" || tt.HasOption("api-read") {
|
||||
sf.ApiRead = true
|
||||
apiActionsSet = true
|
||||
}
|
||||
if tt.Name == "api-update" || tt.HasOption("api-update") {
|
||||
sf.ApiUpdate = true
|
||||
apiActionsSet = true
|
||||
}
|
||||
if tt.Name == "api-hide" || tt.HasOption("api-hide") {
|
||||
sf.ApiHide = true
|
||||
apiActionsSet = true
|
||||
}
|
||||
}
|
||||
|
||||
if !apiActionsSet {
|
||||
sf.ApiCreate = true
|
||||
sf.ApiRead = true
|
||||
sf.ApiUpdate = true
|
||||
}
|
||||
}
|
||||
|
||||
// Set the column name to the field name if empty and does not equal '-'.
|
||||
if sf.ColumnName == "" {
|
||||
sf.ColumnName = sf.FieldName
|
||||
}
|
||||
|
||||
// If a base model as already been parsed with the db columns,
|
||||
// append to the current field.
|
||||
if baseModel != nil {
|
||||
for _, baseSf := range baseModel.Fields {
|
||||
if baseSf.ColumnName == sf.ColumnName {
|
||||
sf.DefaultValue = baseSf.DefaultValue
|
||||
sf.DbColumn = baseSf.DbColumn
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Append the field the the model def.
|
||||
resp = append(resp, sf)
|
||||
}
|
||||
|
||||
return resp, nil
|
||||
}
|
345
tools/truss/cmd/dbtable2crud/templates.go
Normal file
345
tools/truss/cmd/dbtable2crud/templates.go
Normal file
@@ -0,0 +1,345 @@
|
||||
package dbtable2crud
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"fmt"
|
||||
"go/format"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
"text/template"
|
||||
|
||||
"geeks-accelerator/oss/saas-starter-kit/example-project/tools/truss/internal/goparse"
|
||||
"github.com/dustin/go-humanize/english"
|
||||
"github.com/fatih/camelcase"
|
||||
"github.com/iancoleman/strcase"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
// loadTemplateObjects executes a template file based on the given model struct and
|
||||
// returns the parsed go objects.
|
||||
func loadTemplateObjects(log *log.Logger, model *modelDef, templateDir, filename string, tmptData map[string]interface{}) ([]*goparse.GoObject, error) {
|
||||
|
||||
// Data used to execute all the of defined code sections in the template file.
|
||||
if tmptData == nil {
|
||||
tmptData = make(map[string]interface{})
|
||||
}
|
||||
tmptData["Model"] = model
|
||||
|
||||
// geeks-accelerator/oss/saas-starter-kit/example-project
|
||||
|
||||
// Read the template file from the local file system.
|
||||
tempFilePath := filepath.Join(templateDir, filename)
|
||||
dat, err := ioutil.ReadFile(tempFilePath)
|
||||
if err != nil {
|
||||
err = errors.WithMessagef(err, "Failed to read template file %s", tempFilePath)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// New template with custom functions.
|
||||
baseTmpl := template.New("base")
|
||||
baseTmpl.Funcs(template.FuncMap{
|
||||
"Concat": func(vals ...string) string {
|
||||
return strings.Join(vals, "")
|
||||
},
|
||||
"JoinStrings": func(vals []string, sep string) string {
|
||||
return strings.Join(vals, sep)
|
||||
},
|
||||
"PrefixAndJoinStrings": func(vals []string, pre, sep string) string {
|
||||
l := []string{}
|
||||
for _, v := range vals {
|
||||
l = append(l, pre+v)
|
||||
}
|
||||
return strings.Join(l, sep)
|
||||
},
|
||||
"FmtAndJoinStrings": func(vals []string, fmtStr, sep string) string {
|
||||
l := []string{}
|
||||
for _, v := range vals {
|
||||
l = append(l, fmt.Sprintf(fmtStr, v))
|
||||
}
|
||||
return strings.Join(l, sep)
|
||||
},
|
||||
"FormatCamel": func(name string) string {
|
||||
return FormatCamel(name)
|
||||
},
|
||||
"FormatCamelTitle": func(name string) string {
|
||||
return FormatCamelTitle(name)
|
||||
},
|
||||
"FormatCamelLower": func(name string) string {
|
||||
if name == "ID" {
|
||||
return "id"
|
||||
}
|
||||
return FormatCamelLower(name)
|
||||
},
|
||||
"FormatCamelLowerTitle": func(name string) string {
|
||||
return FormatCamelLowerTitle(name)
|
||||
},
|
||||
"FormatCamelPluralTitle": func(name string) string {
|
||||
return FormatCamelPluralTitle(name)
|
||||
},
|
||||
"FormatCamelPluralTitleLower": func(name string) string {
|
||||
return FormatCamelPluralTitleLower(name)
|
||||
},
|
||||
"FormatCamelPluralCamel": func(name string) string {
|
||||
return FormatCamelPluralCamel(name)
|
||||
},
|
||||
"FormatCamelPluralLower": func(name string) string {
|
||||
return FormatCamelPluralLower(name)
|
||||
},
|
||||
"FormatCamelPluralUnderscore": func(name string) string {
|
||||
return FormatCamelPluralUnderscore(name)
|
||||
},
|
||||
"FormatCamelPluralLowerUnderscore": func(name string) string {
|
||||
return FormatCamelPluralLowerUnderscore(name)
|
||||
},
|
||||
"FormatCamelUnderscore": func(name string) string {
|
||||
return FormatCamelUnderscore(name)
|
||||
},
|
||||
"FormatCamelLowerUnderscore": func(name string) string {
|
||||
return FormatCamelLowerUnderscore(name)
|
||||
},
|
||||
"FieldTagHasOption": func(f modelField, tagName, optName string) bool {
|
||||
if f.Tags == nil {
|
||||
return false
|
||||
}
|
||||
ft, err := f.Tags.Get(tagName)
|
||||
if ft == nil || err != nil {
|
||||
return false
|
||||
}
|
||||
if ft.Name == optName || ft.HasOption(optName) {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
},
|
||||
"FieldTag": func(f modelField, tagName string) string {
|
||||
if f.Tags == nil {
|
||||
return ""
|
||||
}
|
||||
ft, err := f.Tags.Get(tagName)
|
||||
if ft == nil || err != nil {
|
||||
return ""
|
||||
}
|
||||
return ft.String()
|
||||
},
|
||||
"FieldTagReplaceOrPrepend": func(f modelField, tagName, oldVal, newVal string) string {
|
||||
if f.Tags == nil {
|
||||
return ""
|
||||
}
|
||||
ft, err := f.Tags.Get(tagName)
|
||||
if ft == nil || err != nil {
|
||||
return ""
|
||||
}
|
||||
|
||||
if ft.Name == oldVal || ft.Name == newVal {
|
||||
ft.Name = newVal
|
||||
} else if ft.HasOption(oldVal) {
|
||||
for idx, val := range ft.Options {
|
||||
if val == oldVal {
|
||||
ft.Options[idx] = newVal
|
||||
}
|
||||
}
|
||||
} else if !ft.HasOption(newVal) {
|
||||
if ft.Name == "" {
|
||||
ft.Name = newVal
|
||||
} else {
|
||||
ft.Options = append(ft.Options, newVal)
|
||||
}
|
||||
}
|
||||
|
||||
return ft.String()
|
||||
},
|
||||
"StringListHasValue": func(list []string, val string) bool {
|
||||
for _, v := range list {
|
||||
if v == val {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
},
|
||||
})
|
||||
|
||||
// Load the template file using the text/template package.
|
||||
tmpl, err := baseTmpl.Parse(string(dat))
|
||||
if err != nil {
|
||||
err = errors.WithMessagef(err, "Failed to parse template file %s", tempFilePath)
|
||||
log.Printf("loadTemplateObjects : %v\n%v", err, string(dat))
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Generate a list of template names defined in the template file.
|
||||
tmplNames := []string{}
|
||||
for _, defTmpl := range tmpl.Templates() {
|
||||
tmplNames = append(tmplNames, defTmpl.Name())
|
||||
}
|
||||
|
||||
// Stupid hack to return template names the in order they are defined in the file.
|
||||
tmplNames, err = templateFileOrderedNames(tempFilePath, tmplNames)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Loop over all the defined templates, execute using the defined data, parse the
|
||||
// formatted code and append the parsed go objects to the result list.
|
||||
var resp []*goparse.GoObject
|
||||
for _, tmplName := range tmplNames {
|
||||
// Executed the defined template with the given data.
|
||||
var tpl bytes.Buffer
|
||||
if err := tmpl.Lookup(tmplName).Execute(&tpl, tmptData); err != nil {
|
||||
err = errors.WithMessagef(err, "Failed to execute %s from template file %s", tmplName, tempFilePath)
|
||||
return resp, err
|
||||
}
|
||||
|
||||
// Format the source code to ensure its valid and code to parsed consistently.
|
||||
codeBytes, err := format.Source(tpl.Bytes())
|
||||
if err != nil {
|
||||
err = errors.WithMessagef(err, "Failed to format source for %s in template file %s", tmplName, filename)
|
||||
|
||||
dl := []string{}
|
||||
for idx, l := range strings.Split(tpl.String(), "\n") {
|
||||
dl = append(dl, fmt.Sprintf("%d -> ", idx)+l)
|
||||
}
|
||||
|
||||
log.Printf("loadTemplateObjects : %v\n%v", err, strings.Join(dl, "\n"))
|
||||
return resp, err
|
||||
}
|
||||
|
||||
// Remove extra white space from the code.
|
||||
codeStr := strings.TrimSpace(string(codeBytes))
|
||||
|
||||
// Split the code into a list of strings.
|
||||
codeLines := strings.Split(codeStr, "\n")
|
||||
|
||||
// Parse the code lines into a set of objects.
|
||||
objs, err := goparse.ParseLines(codeLines, 0)
|
||||
if err != nil {
|
||||
err = errors.WithMessagef(err, "Failed to parse %s in template file %s", tmplName, filename)
|
||||
log.Printf("loadTemplateObjects : %v\n%v", err, codeStr)
|
||||
return resp, err
|
||||
}
|
||||
|
||||
// Append the parsed objects to the return result list.
|
||||
for _, obj := range objs.List() {
|
||||
if obj.Name == "" && obj.Type != goparse.GoObjectType_Import && obj.Type != goparse.GoObjectType_Var && obj.Type != goparse.GoObjectType_Const && obj.Type != goparse.GoObjectType_Comment && obj.Type != goparse.GoObjectType_LineBreak {
|
||||
// All objects should have a name except for multiline var/const declarations and comments.
|
||||
err = errors.Errorf("Failed to parse name with type %s from lines: %v", obj.Type, obj.Lines())
|
||||
return resp, err
|
||||
} else if string(obj.Type) == "" {
|
||||
err = errors.Errorf("Failed to parse type for %s from lines: %v", obj.Name, obj.Lines())
|
||||
return resp, err
|
||||
}
|
||||
|
||||
resp = append(resp, obj)
|
||||
}
|
||||
}
|
||||
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
// FormatCamel formats Valdez mountain to ValdezMountain
|
||||
func FormatCamel(name string) string {
|
||||
return strcase.ToCamel(name)
|
||||
}
|
||||
|
||||
// FormatCamelLower formats ValdezMountain to valdezmountain
|
||||
func FormatCamelLower(name string) string {
|
||||
return strcase.ToLowerCamel(FormatCamel(name))
|
||||
}
|
||||
|
||||
// FormatCamelTitle formats ValdezMountain to Valdez Mountain
|
||||
func FormatCamelTitle(name string) string {
|
||||
return strings.Join(camelcase.Split(name), " ")
|
||||
}
|
||||
|
||||
// FormatCamelLowerTitle formats ValdezMountain to valdez mountain
|
||||
func FormatCamelLowerTitle(name string) string {
|
||||
return strings.ToLower(FormatCamelTitle(name))
|
||||
}
|
||||
|
||||
// FormatCamelPluralTitle formats ValdezMountain to Valdez Mountains
|
||||
func FormatCamelPluralTitle(name string) string {
|
||||
pts := camelcase.Split(name)
|
||||
lastIdx := len(pts) - 1
|
||||
pts[lastIdx] = english.PluralWord(2, pts[lastIdx], "")
|
||||
return strings.Join(pts, " ")
|
||||
}
|
||||
|
||||
// FormatCamelPluralTitleLower formats ValdezMountain to valdez mountains
|
||||
func FormatCamelPluralTitleLower(name string) string {
|
||||
return strings.ToLower(FormatCamelPluralTitle(name))
|
||||
}
|
||||
|
||||
// FormatCamelPluralCamel formats ValdezMountain to ValdezMountains
|
||||
func FormatCamelPluralCamel(name string) string {
|
||||
return strcase.ToCamel(FormatCamelPluralTitle(name))
|
||||
}
|
||||
|
||||
// FormatCamelPluralLower formats ValdezMountain to valdezmountains
|
||||
func FormatCamelPluralLower(name string) string {
|
||||
return strcase.ToLowerCamel(FormatCamelPluralTitle(name))
|
||||
}
|
||||
|
||||
// FormatCamelPluralUnderscore formats ValdezMountain to Valdez_Mountains
|
||||
func FormatCamelPluralUnderscore(name string) string {
|
||||
return strings.Replace(FormatCamelPluralTitle(name), " ", "_", -1)
|
||||
}
|
||||
|
||||
// FormatCamelPluralLowerUnderscore formats ValdezMountain to valdez_mountains
|
||||
func FormatCamelPluralLowerUnderscore(name string) string {
|
||||
return strings.ToLower(FormatCamelPluralUnderscore(name))
|
||||
}
|
||||
|
||||
// FormatCamelUnderscore formats ValdezMountain to Valdez_Mountain
|
||||
func FormatCamelUnderscore(name string) string {
|
||||
return strings.Replace(FormatCamelTitle(name), " ", "_", -1)
|
||||
}
|
||||
|
||||
// FormatCamelLowerUnderscore formats ValdezMountain to valdez_mountain
|
||||
func FormatCamelLowerUnderscore(name string) string {
|
||||
return strings.ToLower(FormatCamelUnderscore(name))
|
||||
}
|
||||
|
||||
// templateFileOrderedNames returns the template names the in order they are defined in the file.
|
||||
func templateFileOrderedNames(localPath string, names []string) (resp []string, err error) {
|
||||
file, err := os.Open(localPath)
|
||||
if err != nil {
|
||||
return resp, errors.WithStack(err)
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
idxList := []int{}
|
||||
idxNames := make(map[int]string)
|
||||
|
||||
idx := 0
|
||||
scanner := bufio.NewScanner(file)
|
||||
for scanner.Scan() {
|
||||
if !strings.HasPrefix(scanner.Text(), "{{") || !strings.Contains(scanner.Text(), "define ") {
|
||||
continue
|
||||
}
|
||||
|
||||
for _, name := range names {
|
||||
if strings.Contains(scanner.Text(), "\""+name+"\"") {
|
||||
idxList = append(idxList, idx)
|
||||
idxNames[idx] = name
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
idx = idx + 1
|
||||
}
|
||||
|
||||
if err := scanner.Err(); err != nil {
|
||||
return resp, errors.WithStack(err)
|
||||
}
|
||||
|
||||
sort.Ints(idxList)
|
||||
|
||||
for _, idx := range idxList {
|
||||
resp = append(resp, idxNames[idx])
|
||||
}
|
||||
|
||||
return resp, nil
|
||||
}
|
149
tools/truss/cmd/devops/README.md
Normal file
149
tools/truss/cmd/devops/README.md
Normal file
@@ -0,0 +1,149 @@
|
||||
|
||||
1. Create new policy `saas-starter-kit-deploy` with the following permissions.
|
||||
```json
|
||||
{
|
||||
"Version": "2012-10-17",
|
||||
"Statement": [
|
||||
{
|
||||
"Sid": "ServiceDeployPermissions",
|
||||
"Effect": "Allow",
|
||||
"Action": [
|
||||
"acm:ListCertificates",
|
||||
"acm:RequestCertificate",
|
||||
"acm:DescribeCertificate",
|
||||
"ec2:DescribeSubnets",
|
||||
"ec2:DescribeSecurityGroups",
|
||||
"ec2:CreateSecurityGroup",
|
||||
"ec2:AuthorizeSecurityGroupIngress",
|
||||
"ec2:DescribeNetworkInterfaces",
|
||||
"ec2:DescribeVpcs",
|
||||
"ec2:CreateVpc",
|
||||
"ec2:CreateSubnet",
|
||||
"ec2:DescribeVpcs",
|
||||
"ec2:DescribeInternetGateways",
|
||||
"ec2:CreateInternetGateway",
|
||||
"ec2:CreateTags",
|
||||
"ec2:CreateRouteTable",
|
||||
"ec2:DescribeRouteTables",
|
||||
"ec2:CreateRoute",
|
||||
"ec2:AttachInternetGateway",
|
||||
"ec2:DescribeAccountAttributes",
|
||||
"elasticache:DescribeCacheClusters",
|
||||
"elasticache:CreateCacheCluster",
|
||||
"elasticache:DescribeCacheParameterGroups",
|
||||
"elasticache:CreateCacheParameterGroup",
|
||||
"elasticache:ModifyCacheCluster",
|
||||
"elasticache:ModifyCacheParameterGroup",
|
||||
"elasticloadbalancing:DescribeLoadBalancers",
|
||||
"elasticloadbalancing:CreateLoadBalancer",
|
||||
"elasticloadbalancing:CreateListener",
|
||||
"elasticloadbalancing:DescribeTargetGroups",
|
||||
"elasticloadbalancing:CreateTargetGroup",
|
||||
"elasticloadbalancing:DescribeListeners",
|
||||
"elasticloadbalancing:ModifyTargetGroupAttributes",
|
||||
"ecs:CreateCluster",
|
||||
"ecs:CreateService",
|
||||
"ecs:DeleteService",
|
||||
"ecs:DescribeClusters",
|
||||
"ecs:DescribeServices",
|
||||
"ecs:UpdateService",
|
||||
"ecs:RegisterTaskDefinition",
|
||||
"ecs:ListTaskDefinitions",
|
||||
"ecr:BatchCheckLayerAvailability",
|
||||
"ecr:BatchDeleteImage",
|
||||
"ecr:GetAuthorizationToken",
|
||||
"ecr:DescribeImages",
|
||||
"ecr:DescribeRepositories",
|
||||
"ecs:DescribeTasks",
|
||||
"ecr:CreateRepository",
|
||||
"ecr:ListImages",
|
||||
"ecs:ListTasks",
|
||||
"ecr:PutImage",
|
||||
"ecr:InitiateLayerUpload",
|
||||
"ecr:UploadLayerPart",
|
||||
"ecr:CompleteLayerUpload",
|
||||
"logs:DescribeLogGroups",
|
||||
"logs:CreateLogGroup",
|
||||
"lambda:ListFunctions",
|
||||
"lambda:CreateFunction",
|
||||
"lambda:UpdateFunctionCode",
|
||||
"lambda:UpdateFunctionConfiguration",
|
||||
"iam:GetRole",
|
||||
"iam:PassRole",
|
||||
"iam:CreateRole",
|
||||
"iam:CreateServiceLinkedRole",
|
||||
"iam:CreatePolicy",
|
||||
"iam:PutRolePolicy",
|
||||
"iam:TagRole",
|
||||
"iam:AttachRolePolicy",
|
||||
"iam:ListPolicies",
|
||||
"iam:GetPolicyVersion",
|
||||
"iam:CreatePolicyVersion",
|
||||
"logs:DescribeLogGroups",
|
||||
"logs:CreateLogGroup",
|
||||
"logs:DescribeLogStreams",
|
||||
"logs:CreateExportTask",
|
||||
"logs:DescribeExportTasks",
|
||||
"rds:CreateDBCluster",
|
||||
"rds:CreateDBInstance",
|
||||
"rds:DescribeDBClusters",
|
||||
"rds:DescribeDBInstances",
|
||||
"s3:CreateBucket",
|
||||
"s3:DeleteObject",
|
||||
"s3:DeleteObjectVersion",
|
||||
"s3:GetBucketPublicAccessBlock",
|
||||
"s3:GetBucketAcl",
|
||||
"s3:HeadBucket",
|
||||
"s3:ListObjects",
|
||||
"s3:ListBucket",
|
||||
"s3:GetObject",
|
||||
"s3:PutLifecycleConfiguration",
|
||||
"s3:PutBucketCORS",
|
||||
"s3:PutBucketPolicy",
|
||||
"s3:PutBucketPublicAccessBlock",
|
||||
"route53:CreateHostedZone",
|
||||
"route53:ChangeResourceRecordSets",
|
||||
"route53:ListHostedZones",
|
||||
"secretsmanager:CreateSecret",
|
||||
"secretsmanager:ListSecrets",
|
||||
"secretsmanager:GetSecretValue",
|
||||
"secretsmanager:UpdateSecret",
|
||||
"secretsmanager:RestoreSecret",
|
||||
"secretsmanager:DeleteSecret",
|
||||
"servicediscovery:ListNamespaces",
|
||||
"servicediscovery:CreatePrivateDnsNamespace",
|
||||
"servicediscovery:GetOperation",
|
||||
"servicediscovery:ListServices",
|
||||
"servicediscovery:CreateService",
|
||||
"servicediscovery:GetService"
|
||||
],
|
||||
"Resource": "*"
|
||||
},
|
||||
{
|
||||
"Action": "iam:CreateServiceLinkedRole",
|
||||
"Effect": "Allow",
|
||||
"Resource": "arn:aws:iam::*:role/aws-service-role/rds.amazonaws.com/AWSServiceRoleForRDS",
|
||||
"Condition": {
|
||||
"StringLike": {
|
||||
"iam:AWSServiceName":"rds.amazonaws.com"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
2. Create new user `saas-starter-kit-deploy` with _Programmatic Access_ and _Attach existing policies directly_ with the policy created from step 1 `saas-starter-kit-deploy`
|
||||
|
||||
3. Try running the deploy
|
||||
```bash
|
||||
go run main.go deploy -service=web-api -env=dev
|
||||
```
|
||||
|
||||
Or
|
||||
```bash
|
||||
go run main.go deploy -service=web-api -env=dev -enable_https=true -primary_host=eproc.tech -host_names=www.eproc.tech -host_names=api.eproc.tech -recreate_service=false
|
||||
```
|
||||
|
||||
|
||||
|
155
tools/truss/cmd/devops/devops.go
Normal file
155
tools/truss/cmd/devops/devops.go
Normal file
@@ -0,0 +1,155 @@
|
||||
package devops
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
// findProjectGoModFile finds the project root directory from the current working directory.
|
||||
func findProjectGoModFile() (string, error) {
|
||||
var err error
|
||||
projectRoot, err := os.Getwd()
|
||||
if err != nil {
|
||||
return "", errors.WithMessage(err, "failed to get current working directory")
|
||||
}
|
||||
|
||||
// Try to find the project root for looking for the go.mod file in a parent directory.
|
||||
var goModFile string
|
||||
testDir := projectRoot
|
||||
for i := 0; i < 3; i++ {
|
||||
if goModFile != "" {
|
||||
testDir = filepath.Join(testDir, "../")
|
||||
}
|
||||
goModFile = filepath.Join(testDir, "go.mod")
|
||||
ok, _ := exists(goModFile)
|
||||
if ok {
|
||||
projectRoot = testDir
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// Verify the go.mod file was found.
|
||||
ok, err := exists(goModFile)
|
||||
if err != nil {
|
||||
return "", errors.WithMessagef(err, "failed to load go.mod for project using project root %s")
|
||||
} else if !ok {
|
||||
return "", errors.Errorf("failed to locate project go.mod in project root %s", projectRoot)
|
||||
}
|
||||
|
||||
return goModFile, nil
|
||||
}
|
||||
|
||||
// findServiceDockerFile finds the service directory.
|
||||
func findServiceDockerFile(projectRoot, targetService string) (string, error) {
|
||||
checkDirs := []string{
|
||||
filepath.Join(projectRoot, "cmd", targetService),
|
||||
filepath.Join(projectRoot, "tools", targetService),
|
||||
}
|
||||
|
||||
var dockerFile string
|
||||
for _, cd := range checkDirs {
|
||||
// Check to see if directory contains Dockerfile.
|
||||
tf := filepath.Join(cd, "Dockerfile")
|
||||
|
||||
ok, _ := exists(tf)
|
||||
if ok {
|
||||
dockerFile = tf
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if dockerFile == "" {
|
||||
return "", errors.Errorf("failed to locate Dockerfile for service %s", targetService)
|
||||
}
|
||||
|
||||
return dockerFile, nil
|
||||
}
|
||||
|
||||
// getTargetEnv checks for an env var that is prefixed with the current target env.
|
||||
func getTargetEnv(targetEnv, envName string) string {
|
||||
k := fmt.Sprintf("%s_%s", strings.ToUpper(targetEnv), envName)
|
||||
|
||||
if v := os.Getenv(k); v != "" {
|
||||
// Set the non prefixed env var with the prefixed value.
|
||||
os.Setenv(envName, v)
|
||||
return v
|
||||
}
|
||||
|
||||
return os.Getenv(envName)
|
||||
}
|
||||
|
||||
// loadGoModName parses out the module name from go.mod.
|
||||
func loadGoModName(goModFile string) (string, error) {
|
||||
ok, err := exists(goModFile)
|
||||
if err != nil {
|
||||
return "", errors.WithMessage(err, "Failed to load go.mod for project")
|
||||
} else if !ok {
|
||||
return "", errors.Errorf("Failed to locate project go.mod at %s", goModFile)
|
||||
}
|
||||
|
||||
b, err := ioutil.ReadFile(goModFile)
|
||||
if err != nil {
|
||||
return "", errors.WithMessagef(err, "Failed to read go.mod at %s", goModFile)
|
||||
}
|
||||
|
||||
var name string
|
||||
lines := strings.Split(string(b), "\n")
|
||||
for _, l := range lines {
|
||||
if strings.HasPrefix(l, "module ") {
|
||||
name = strings.TrimSpace(strings.Split(l, " ")[1])
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return name, nil
|
||||
}
|
||||
|
||||
// exists returns a bool as to whether a file path exists.
|
||||
func exists(path string) (bool, error) {
|
||||
_, err := os.Stat(path)
|
||||
if err == nil {
|
||||
return true, nil
|
||||
}
|
||||
if os.IsNotExist(err) {
|
||||
return false, nil
|
||||
}
|
||||
return true, err
|
||||
}
|
||||
|
||||
/*
|
||||
type EnvVars []string
|
||||
|
||||
// execCmds executes a set of commands.
|
||||
func execCmds(workDir string, envVars *EnvVars, cmds ...[]string) ([]string, error) {
|
||||
if envVars == nil {
|
||||
ev := EnvVars(os.Environ())
|
||||
envVars = &ev
|
||||
}
|
||||
|
||||
var results []string
|
||||
for _, cmdVals := range cmds {
|
||||
cmd := exec.Command(cmdVals[0], cmdVals[1:]...)
|
||||
cmd.Dir = workDir
|
||||
cmd.Env = *envVars
|
||||
out, err := cmd.CombinedOutput()
|
||||
|
||||
fmt.Println(string(out ))
|
||||
|
||||
if err != nil {
|
||||
return results, errors.WithMessagef(err, "failed to execute %s\n%s", strings.Join(cmdVals, " "), string(out))
|
||||
}
|
||||
results = append(results, string(out))
|
||||
|
||||
// Update the current env vars after command has been executed.
|
||||
ev := EnvVars(cmd.Env)
|
||||
envVars = &ev
|
||||
}
|
||||
|
||||
return results, nil
|
||||
}
|
||||
*/
|
199
tools/truss/cmd/devops/models.go
Normal file
199
tools/truss/cmd/devops/models.go
Normal file
@@ -0,0 +1,199 @@
|
||||
package devops
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/aws/aws-sdk-go/aws"
|
||||
"github.com/aws/aws-sdk-go/aws/credentials"
|
||||
"github.com/aws/aws-sdk-go/aws/session"
|
||||
"github.com/aws/aws-sdk-go/service/cloudwatchlogs"
|
||||
"github.com/aws/aws-sdk-go/service/ec2"
|
||||
"github.com/aws/aws-sdk-go/service/ecr"
|
||||
"github.com/aws/aws-sdk-go/service/ecs"
|
||||
"github.com/aws/aws-sdk-go/service/elasticache"
|
||||
"github.com/aws/aws-sdk-go/service/elbv2"
|
||||
"github.com/aws/aws-sdk-go/service/iam"
|
||||
"github.com/aws/aws-sdk-go/service/rds"
|
||||
"github.com/aws/aws-sdk-go/service/s3"
|
||||
"github.com/aws/aws-sdk-go/service/servicediscovery"
|
||||
"github.com/iancoleman/strcase"
|
||||
"github.com/urfave/cli"
|
||||
)
|
||||
|
||||
// ServiceDeployFlags defines the flags used for executing a service deployment.
|
||||
type ServiceDeployFlags struct {
|
||||
// Required flags.
|
||||
ServiceName string `validate:"required" example:"web-api"`
|
||||
Env string `validate:"oneof=dev stage prod" example:"dev"`
|
||||
|
||||
// Optional flags.
|
||||
EnableHTTPS bool `validate:"omitempty" example:"false"`
|
||||
ServiceHostPrimary string `validate:"omitempty" example:"example-project.com"`
|
||||
ServiceHostNames cli.StringSlice `validate:"omitempty" example:"subdomain.example-project.com"`
|
||||
S3BucketPrivateName string `validate:"omitempty" example:"saas-example-project-private"`
|
||||
S3BucketPublicName string `validate:"omitempty" example:"saas-example-project-public"`
|
||||
|
||||
ProjectRoot string `validate:"omitempty" example:"."`
|
||||
ProjectName string ` validate:"omitempty" example:"example-project"`
|
||||
DockerFile string `validate:"omitempty" example:"./cmd/web-api/Dockerfile"`
|
||||
EnableLambdaVPC bool `validate:"omitempty" example:"false"`
|
||||
EnableEcsElb bool `validate:"omitempty" example:"false"`
|
||||
NoBuild bool `validate:"omitempty" example:"false"`
|
||||
NoDeploy bool `validate:"omitempty" example:"false"`
|
||||
NoCache bool `validate:"omitempty" example:"false"`
|
||||
NoPush bool `validate:"omitempty" example:"false"`
|
||||
RecreateService bool `validate:"omitempty" example:"false"`
|
||||
}
|
||||
|
||||
// serviceDeployRequest defines the details needed to execute a service deployment.
|
||||
type serviceDeployRequest struct {
|
||||
ServiceName string `validate:"required"`
|
||||
ServiceDir string `validate:"required"`
|
||||
Env string `validate:"oneof=dev stage prod"`
|
||||
ProjectRoot string `validate:"required"`
|
||||
ProjectName string `validate:"required"`
|
||||
DockerFile string `validate:"required"`
|
||||
GoModFile string `validate:"required"`
|
||||
GoModName string `validate:"required"`
|
||||
|
||||
EnableHTTPS bool `validate:"omitempty"`
|
||||
ServiceHostPrimary string `validate:"omitempty,required_with=EnableHTTPS,fqdn"`
|
||||
ServiceHostNames []string `validate:"omitempty,dive,fqdn"`
|
||||
|
||||
AwsCreds awsCredentials `validate:"required,dive,required"`
|
||||
|
||||
EcrRepositoryName string `validate:"required"`
|
||||
EcrRepository *ecr.CreateRepositoryInput
|
||||
EcrRepositoryMaxImages int `validate:"omitempty"`
|
||||
|
||||
EcsClusterName string `validate:"required"`
|
||||
EcsCluster *ecs.CreateClusterInput
|
||||
|
||||
EcsServiceName string `validate:"required"`
|
||||
EcsServiceDesiredCount int64 `validate:"required"`
|
||||
EcsServiceMinimumHealthyPercent *int64 `validate:"omitempty"`
|
||||
EcsServiceMaximumPercent *int64 `validate:"omitempty"`
|
||||
EscServiceHealthCheckGracePeriodSeconds *int64 `validate:"omitempty"`
|
||||
|
||||
EcsExecutionRoleName string `validate:"required"`
|
||||
EcsExecutionRole *iam.CreateRoleInput
|
||||
EcsExecutionRolePolicyArns []string `validate:"required"`
|
||||
|
||||
EcsTaskRoleName string `validate:"required"`
|
||||
EcsTaskRole *iam.CreateRoleInput
|
||||
|
||||
EcsTaskPolicyName string `validate:"required"`
|
||||
EcsTaskPolicy *iam.CreatePolicyInput
|
||||
EcsTaskPolicyDocument IamPolicyDocument
|
||||
|
||||
Ec2SecurityGroupName string `validate:"required"`
|
||||
Ec2SecurityGroup *ec2.CreateSecurityGroupInput
|
||||
|
||||
CloudWatchLogGroupName string `validate:"required"`
|
||||
CloudWatchLogGroup *cloudwatchlogs.CreateLogGroupInput
|
||||
|
||||
S3BucketTempPrefix string `validate:"required_with=S3BucketPrivateName S3BucketPublicName"`
|
||||
S3BucketPrivateName string `validate:"omitempty"`
|
||||
S3BucketPublicName string `validate:"omitempty"`
|
||||
S3Buckets []S3Bucket
|
||||
|
||||
EnableEcsElb bool `validate:"omitempty"`
|
||||
ElbLoadBalancerName string `validate:"omitempty"`
|
||||
ElbDeregistrationDelay *int `validate:"omitempty"`
|
||||
ElbLoadBalancer *elbv2.CreateLoadBalancerInput
|
||||
|
||||
ElbTargetGroupName string `validate:"omitempty"`
|
||||
ElbTargetGroup *elbv2.CreateTargetGroupInput
|
||||
|
||||
VpcPublicName string `validate:"omitempty"`
|
||||
VpcPublic *ec2.CreateVpcInput
|
||||
VpcPublicSubnets []*ec2.CreateSubnetInput
|
||||
|
||||
EnableLambdaVPC bool `validate:"omitempty"`
|
||||
NoBuild bool `validate:"omitempty"`
|
||||
NoDeploy bool `validate:"omitempty"`
|
||||
NoCache bool `validate:"omitempty"`
|
||||
NoPush bool `validate:"omitempty"`
|
||||
RecreateService bool `validate:"omitempty"`
|
||||
|
||||
SDNamepsace *servicediscovery.CreatePrivateDnsNamespaceInput
|
||||
SDService *servicediscovery.CreateServiceInput
|
||||
|
||||
CacheCluster *elasticache.CreateCacheClusterInput
|
||||
CacheClusterParameter []*elasticache.ParameterNameValue
|
||||
|
||||
DBCluster *rds.CreateDBClusterInput
|
||||
DBInstance *rds.CreateDBInstanceInput
|
||||
|
||||
ReleaseImage string
|
||||
BuildTags []string
|
||||
flags ServiceDeployFlags
|
||||
_awsSession *session.Session
|
||||
}
|
||||
|
||||
type S3Bucket struct {
|
||||
Name string `validate:"omitempty"`
|
||||
Input *s3.CreateBucketInput
|
||||
LifecycleRules []*s3.LifecycleRule
|
||||
CORSRules []*s3.CORSRule
|
||||
PublicAccessBlock *s3.PublicAccessBlockConfiguration
|
||||
Policy string
|
||||
}
|
||||
|
||||
// DB mimics the general info needed for services used to define placeholders.
|
||||
type DB struct {
|
||||
Host string
|
||||
User string
|
||||
Pass string
|
||||
Database string
|
||||
Driver string
|
||||
DisableTLS bool
|
||||
}
|
||||
|
||||
// projectNameCamel takes a project name and returns the camel cased version.
|
||||
func (r *serviceDeployRequest) ProjectNameCamel() string {
|
||||
s := strings.Replace(r.ProjectName, "_", " ", -1)
|
||||
s = strings.Replace(s, "-", " ", -1)
|
||||
s = strcase.ToCamel(s)
|
||||
return s
|
||||
}
|
||||
|
||||
// awsSession returns the current AWS session for the serviceDeployRequest.
|
||||
func (r *serviceDeployRequest) awsSession() *session.Session {
|
||||
if r._awsSession == nil {
|
||||
r._awsSession = r.AwsCreds.Session()
|
||||
}
|
||||
|
||||
return r._awsSession
|
||||
}
|
||||
|
||||
// AwsCredentials defines AWS credentials used for deployment. Unable to use roles when deploying
|
||||
// using gitlab CI/CD pipeline.
|
||||
type awsCredentials struct {
|
||||
AccessKeyID string `validate:"required"`
|
||||
SecretAccessKey string `validate:"required"`
|
||||
Region string `validate:"required"`
|
||||
}
|
||||
|
||||
// Session returns a new AWS Session used to access AWS services.
|
||||
func (creds awsCredentials) Session() *session.Session {
|
||||
return session.New(
|
||||
&aws.Config{
|
||||
Region: aws.String(creds.Region),
|
||||
Credentials: credentials.NewStaticCredentials(creds.AccessKeyID, creds.SecretAccessKey, ""),
|
||||
})
|
||||
}
|
||||
|
||||
// IamPolicyDocument defines an AWS IAM policy used for defining access for IAM roles, users, and groups.
|
||||
type IamPolicyDocument struct {
|
||||
Version string `json:"Version"`
|
||||
Statement []IamStatementEntry `json:"Statement"`
|
||||
}
|
||||
|
||||
// IamStatementEntry defines a single statement for an IAM policy.
|
||||
type IamStatementEntry struct {
|
||||
Sid string `json:"Sid"`
|
||||
Effect string `json:"Effect"`
|
||||
Action []string `json:"Action"`
|
||||
Resource interface{} `json:"Resource"`
|
||||
}
|
3662
tools/truss/cmd/devops/service_deploy.go
Normal file
3662
tools/truss/cmd/devops/service_deploy.go
Normal file
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user