1
0
mirror of https://github.com/mattermost/focalboard.git synced 2025-09-16 08:56:19 +02:00

Import/export moved to server + image support + streaming (#2201)

This commit is contained in:
Doug Lauder
2022-02-01 19:01:29 -05:00
committed by GitHub
parent 42c6ec5b61
commit 65c783c270
20 changed files with 684 additions and 494 deletions

View File

@@ -548,6 +548,8 @@ github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/krolaw/zipstream v0.0.0-20180621105154-0a2661891f94 h1:+AIlO01SKT9sfWU5CLWi0cfHc7dQwgGz3FhFRzXLoMg=
github.com/krolaw/zipstream v0.0.0-20180621105154-0a2661891f94/go.mod h1:TcE3PIIkVWbP/HjhRAafgCjRKvDOi086iqp9VkNX/ng=
github.com/ktrysmt/go-bitbucket v0.6.4/go.mod h1:9u0v3hsd2rqCHRIpbir1oP7F58uo5dq19sBYvuMoyQ4= github.com/ktrysmt/go-bitbucket v0.6.4/go.mod h1:9u0v3hsd2rqCHRIpbir1oP7F58uo5dq19sBYvuMoyQ4=
github.com/labstack/echo/v4 v4.1.11/go.mod h1:i541M3Fj6f76NZtHSj7TXnyM8n2gaodfvfxNnFqi74g= github.com/labstack/echo/v4 v4.1.11/go.mod h1:i541M3Fj6f76NZtHSj7TXnyM8n2gaodfvfxNnFqi74g=
github.com/labstack/gommon v0.3.0/go.mod h1:MULnywXg0yavhxWKc+lOruYdAhDwPK9wf0OL7NoOu+k= github.com/labstack/gommon v0.3.0/go.mod h1:MULnywXg0yavhxWKc+lOruYdAhDwPK9wf0OL7NoOu+k=

View File

@@ -727,6 +727,8 @@ github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/krolaw/zipstream v0.0.0-20180621105154-0a2661891f94 h1:+AIlO01SKT9sfWU5CLWi0cfHc7dQwgGz3FhFRzXLoMg=
github.com/krolaw/zipstream v0.0.0-20180621105154-0a2661891f94/go.mod h1:TcE3PIIkVWbP/HjhRAafgCjRKvDOi086iqp9VkNX/ng=
github.com/ktrysmt/go-bitbucket v0.6.4/go.mod h1:9u0v3hsd2rqCHRIpbir1oP7F58uo5dq19sBYvuMoyQ4= github.com/ktrysmt/go-bitbucket v0.6.4/go.mod h1:9u0v3hsd2rqCHRIpbir1oP7F58uo5dq19sBYvuMoyQ4=
github.com/labstack/echo/v4 v4.1.11/go.mod h1:i541M3Fj6f76NZtHSj7TXnyM8n2gaodfvfxNnFqi74g= github.com/labstack/echo/v4 v4.1.11/go.mod h1:i541M3Fj6f76NZtHSj7TXnyM8n2gaodfvfxNnFqi74g=
github.com/labstack/gommon v0.3.0/go.mod h1:MULnywXg0yavhxWKc+lOruYdAhDwPK9wf0OL7NoOu+k= github.com/labstack/gommon v0.3.0/go.mod h1:MULnywXg0yavhxWKc+lOruYdAhDwPK9wf0OL7NoOu+k=

View File

@@ -25,7 +25,6 @@ import (
const ( const (
HeaderRequestedWith = "X-Requested-With" HeaderRequestedWith = "X-Requested-With"
HeaderRequestedWithXML = "XMLHttpRequest" HeaderRequestedWithXML = "XMLHttpRequest"
SingleUser = "single-user"
UploadFormFileKey = "file" UploadFormFileKey = "file"
) )
@@ -76,9 +75,6 @@ func (a *API) RegisterRoutes(r *mux.Router) {
apiv1.HandleFunc("/workspaces/{workspaceID}/blocks/{blockID}", a.sessionRequired(a.handlePatchBlock)).Methods("PATCH") apiv1.HandleFunc("/workspaces/{workspaceID}/blocks/{blockID}", a.sessionRequired(a.handlePatchBlock)).Methods("PATCH")
apiv1.HandleFunc("/workspaces/{workspaceID}/blocks/{blockID}/subtree", a.attachSession(a.handleGetSubTree, false)).Methods("GET") apiv1.HandleFunc("/workspaces/{workspaceID}/blocks/{blockID}/subtree", a.attachSession(a.handleGetSubTree, false)).Methods("GET")
apiv1.HandleFunc("/workspaces/{workspaceID}/blocks/export", a.sessionRequired(a.handleExport)).Methods("GET")
apiv1.HandleFunc("/workspaces/{workspaceID}/blocks/import", a.sessionRequired(a.handleImport)).Methods("POST")
apiv1.HandleFunc("/workspaces/{workspaceID}/sharing/{rootID}", a.sessionRequired(a.handlePostSharing)).Methods("POST") apiv1.HandleFunc("/workspaces/{workspaceID}/sharing/{rootID}", a.sessionRequired(a.handlePostSharing)).Methods("POST")
apiv1.HandleFunc("/workspaces/{workspaceID}/sharing/{rootID}", a.sessionRequired(a.handleGetSharing)).Methods("GET") apiv1.HandleFunc("/workspaces/{workspaceID}/sharing/{rootID}", a.sessionRequired(a.handleGetSharing)).Methods("GET")
@@ -109,6 +105,10 @@ func (a *API) RegisterRoutes(r *mux.Router) {
apiv1.HandleFunc("/workspaces/{workspaceID}/subscriptions", a.sessionRequired(a.handleCreateSubscription)).Methods("POST") apiv1.HandleFunc("/workspaces/{workspaceID}/subscriptions", a.sessionRequired(a.handleCreateSubscription)).Methods("POST")
apiv1.HandleFunc("/workspaces/{workspaceID}/subscriptions/{blockID}/{subscriberID}", a.sessionRequired(a.handleDeleteSubscription)).Methods("DELETE") apiv1.HandleFunc("/workspaces/{workspaceID}/subscriptions/{blockID}/{subscriberID}", a.sessionRequired(a.handleDeleteSubscription)).Methods("DELETE")
apiv1.HandleFunc("/workspaces/{workspaceID}/subscriptions/{subscriberID}", a.sessionRequired(a.handleGetSubscriptions)).Methods("GET") apiv1.HandleFunc("/workspaces/{workspaceID}/subscriptions/{subscriberID}", a.sessionRequired(a.handleGetSubscriptions)).Methods("GET")
// archives
apiv1.HandleFunc("/workspaces/{workspaceID}/archive/export", a.sessionRequired(a.handleArchiveExport)).Methods("GET")
apiv1.HandleFunc("/workspaces/{workspaceID}/archive/import", a.sessionRequired(a.handleArchiveImport)).Methods("POST")
} }
func (a *API) RegisterAdminRoutes(r *mux.Router) { func (a *API) RegisterAdminRoutes(r *mux.Router) {
@@ -336,7 +336,7 @@ func stampModificationMetadata(r *http.Request, blocks []model.Block, auditRec *
ctx := r.Context() ctx := r.Context()
session := ctx.Value(sessionContextKey).(*model.Session) session := ctx.Value(sessionContextKey).(*model.Session)
userID := session.UserID userID := session.UserID
if userID == SingleUser { if userID == model.SingleUser {
userID = "" userID = ""
} }
@@ -537,12 +537,12 @@ func (a *API) handleGetMe(w http.ResponseWriter, r *http.Request) {
auditRec := a.makeAuditRecord(r, "getMe", audit.Fail) auditRec := a.makeAuditRecord(r, "getMe", audit.Fail)
defer a.audit.LogRecord(audit.LevelRead, auditRec) defer a.audit.LogRecord(audit.LevelRead, auditRec)
if session.UserID == SingleUser { if session.UserID == model.SingleUser {
now := utils.GetMillis() now := utils.GetMillis()
user = &model.User{ user = &model.User{
ID: SingleUser, ID: model.SingleUser,
Username: SingleUser, Username: model.SingleUser,
Email: SingleUser, Email: model.SingleUser,
CreateAt: now, CreateAt: now,
UpdateAt: now, UpdateAt: now,
} }
@@ -860,183 +860,6 @@ func (a *API) handleGetSubTree(w http.ResponseWriter, r *http.Request) {
auditRec.Success() auditRec.Success()
} }
func (a *API) handleExport(w http.ResponseWriter, r *http.Request) {
// swagger:operation GET /api/v1/workspaces/{workspaceID}/blocks/export exportBlocks
//
// Returns all blocks
//
// ---
// produces:
// - application/json
// parameters:
// - name: workspaceID
// in: path
// description: Workspace ID
// required: true
// type: string
// security:
// - BearerAuth: []
// responses:
// '200':
// description: success
// schema:
// type: array
// items:
// "$ref": "#/definitions/Block"
// default:
// description: internal error
// schema:
// "$ref": "#/definitions/ErrorResponse"
query := r.URL.Query()
rootID := query.Get("root_id")
container, err := a.getContainer(r)
if err != nil {
a.noContainerErrorResponse(w, r.URL.Path, err)
return
}
auditRec := a.makeAuditRecord(r, "export", audit.Fail)
defer a.audit.LogRecord(audit.LevelRead, auditRec)
auditRec.AddMeta("rootID", rootID)
var blocks []model.Block
if rootID == "" {
blocks, err = a.app.GetAllBlocks(*container)
} else {
blocks, err = a.app.GetBlocksWithRootID(*container, rootID)
}
if err != nil {
a.errorResponse(w, r.URL.Path, http.StatusInternalServerError, "", err)
return
}
a.logger.Debug("raw blocks", mlog.Int("block_count", len(blocks)))
auditRec.AddMeta("rawCount", len(blocks))
blocks = filterOrphanBlocks(blocks)
a.logger.Debug("EXPORT filtered blocks", mlog.Int("block_count", len(blocks)))
auditRec.AddMeta("filteredCount", len(blocks))
json, err := json.Marshal(blocks)
if err != nil {
a.errorResponse(w, r.URL.Path, http.StatusInternalServerError, "", err)
return
}
jsonBytesResponse(w, http.StatusOK, json)
auditRec.Success()
}
func filterOrphanBlocks(blocks []model.Block) (ret []model.Block) {
queue := make([]model.Block, 0)
childrenOfBlockWithID := make(map[string]*[]model.Block)
// Build the trees from nodes
for _, block := range blocks {
if len(block.ParentID) == 0 {
// Queue root blocks to process first
queue = append(queue, block)
} else {
siblings := childrenOfBlockWithID[block.ParentID]
if siblings != nil {
*siblings = append(*siblings, block)
} else {
siblings := []model.Block{block}
childrenOfBlockWithID[block.ParentID] = &siblings
}
}
}
// Map the trees to an array, which skips orphaned nodes
blocks = make([]model.Block, 0)
for len(queue) > 0 {
block := queue[0]
queue = queue[1:] // dequeue
blocks = append(blocks, block)
children := childrenOfBlockWithID[block.ID]
if children != nil {
queue = append(queue, (*children)...)
}
}
return blocks
}
func (a *API) handleImport(w http.ResponseWriter, r *http.Request) {
// swagger:operation POST /api/v1/workspaces/{workspaceID}/blocks/import importBlocks
//
// Import blocks
//
// ---
// produces:
// - application/json
// parameters:
// - name: workspaceID
// in: path
// description: Workspace ID
// required: true
// type: string
// - name: Body
// in: body
// description: array of blocks to import
// required: true
// schema:
// type: array
// items:
// "$ref": "#/definitions/Block"
// security:
// - BearerAuth: []
// responses:
// '200':
// description: success
// default:
// description: internal error
// schema:
// "$ref": "#/definitions/ErrorResponse"
container, err := a.getContainer(r)
if err != nil {
a.noContainerErrorResponse(w, r.URL.Path, err)
return
}
requestBody, err := ioutil.ReadAll(r.Body)
if err != nil {
a.errorResponse(w, r.URL.Path, http.StatusInternalServerError, "", err)
return
}
var blocks []model.Block
err = json.Unmarshal(requestBody, &blocks)
if err != nil {
a.errorResponse(w, r.URL.Path, http.StatusInternalServerError, "", err)
return
}
auditRec := a.makeAuditRecord(r, "import", audit.Fail)
defer a.audit.LogRecord(audit.LevelModify, auditRec)
stampModificationMetadata(r, blocks, auditRec)
ctx := r.Context()
session := ctx.Value(sessionContextKey).(*model.Session)
_, err = a.app.InsertBlocks(*container, model.GenerateBlockIDs(blocks, a.logger), session.UserID, false)
if err != nil {
a.errorResponse(w, r.URL.Path, http.StatusInternalServerError, "", err)
return
}
jsonStringResponse(w, http.StatusOK, "{}")
a.logger.Debug("IMPORT Blocks", mlog.Int("block_count", len(blocks)))
auditRec.AddMeta("blockCount", len(blocks))
auditRec.Success()
}
// Sharing // Sharing
func (a *API) handleGetSharing(w http.ResponseWriter, r *http.Request) { func (a *API) handleGetSharing(w http.ResponseWriter, r *http.Request) {
@@ -1173,7 +996,7 @@ func (a *API) handlePostSharing(w http.ResponseWriter, r *http.Request) {
ctx := r.Context() ctx := r.Context()
session := ctx.Value(sessionContextKey).(*model.Session) session := ctx.Value(sessionContextKey).(*model.Session)
userID := session.UserID userID := session.UserID
if userID == SingleUser { if userID == model.SingleUser {
userID = "" userID = ""
} }

144
server/api/archive.go Normal file
View File

@@ -0,0 +1,144 @@
package api
import (
"fmt"
"net/http"
"time"
"github.com/mattermost/focalboard/server/model"
"github.com/mattermost/focalboard/server/services/audit"
)
func (a *API) handleArchiveExport(w http.ResponseWriter, r *http.Request) {
// swagger:operation GET /api/v1/workspaces/{workspaceID}/archive/export archiveExport
//
// Exports an archive of all blocks for one or more boards. If board_id is provided then
// only that board will be exported, otherwise all boards in the workspace are exported.
//
// ---
// produces:
// - application/json
// parameters:
// - name: workspaceID
// in: path
// description: Workspace ID
// required: true
// type: string
// - name: board_id
// in: path
// description: Id of board to to export
// required: false
// type: string
// security:
// - BearerAuth: []
// responses:
// '200':
// description: success
// content:
// application-octet-stream:
// type: string
// format: binary
// default:
// description: internal error
// schema:
// "$ref": "#/definitions/ErrorResponse"
query := r.URL.Query()
boardID := query.Get("board_id")
container, err := a.getContainer(r)
if err != nil {
a.noContainerErrorResponse(w, r.URL.Path, err)
return
}
auditRec := a.makeAuditRecord(r, "archiveExport", audit.Fail)
defer a.audit.LogRecord(audit.LevelRead, auditRec)
auditRec.AddMeta("BoardID", boardID)
var boardIDs []string
if boardID != "" {
boardIDs = []string{boardID}
}
opts := model.ExportArchiveOptions{
WorkspaceID: container.WorkspaceID,
BoardIDs: boardIDs,
}
filename := fmt.Sprintf("archive-%s.focalboard", time.Now().Format("2006-01-02"))
w.Header().Set("Content-Type", "application/octet-stream")
w.Header().Set("Content-Disposition", "attachment; filename="+filename)
w.Header().Set("Content-Transfer-Encoding", "binary")
if err := a.app.ExportArchive(w, opts); err != nil {
a.errorResponse(w, r.URL.Path, http.StatusInternalServerError, "", err)
}
auditRec.Success()
}
func (a *API) handleArchiveImport(w http.ResponseWriter, r *http.Request) {
// swagger:operation POST /api/v1/workspaces/{workspaceID}/archive/import archiveImport
//
// Import an archive of boards.
//
// ---
// produces:
// - application/json
// consumes:
// - multipart/form-data
// parameters:
// - name: workspaceID
// in: path
// description: Workspace ID
// required: true
// type: string
// - name: file
// in: formData
// description: archive file to import
// required: true
// type: file
// security:
// - BearerAuth: []
// responses:
// '200':
// description: success
// default:
// description: internal error
// schema:
// "$ref": "#/definitions/ErrorResponse"
container, err := a.getContainer(r)
if err != nil {
a.noContainerErrorResponse(w, r.URL.Path, err)
return
}
ctx := r.Context()
session, _ := ctx.Value(sessionContextKey).(*model.Session)
userID := session.UserID
file, handle, err := r.FormFile(UploadFormFileKey)
if err != nil {
fmt.Fprintf(w, "%v", err)
return
}
defer file.Close()
auditRec := a.makeAuditRecord(r, "import", audit.Fail)
defer a.audit.LogRecord(audit.LevelModify, auditRec)
auditRec.AddMeta("filename", handle.Filename)
auditRec.AddMeta("size", handle.Size)
opt := model.ImportArchiveOptions{
WorkspaceID: container.WorkspaceID,
ModifiedBy: userID,
}
if err := a.app.ImportArchive(file, opt); err != nil {
a.errorResponse(w, r.URL.Path, http.StatusInternalServerError, "", err)
return
}
jsonStringResponse(w, http.StatusOK, "{}")
auditRec.Success()
}

View File

@@ -433,9 +433,9 @@ func (a *API) attachSession(handler func(w http.ResponseWriter, r *http.Request)
now := utils.GetMillis() now := utils.GetMillis()
session := &model.Session{ session := &model.Session{
ID: SingleUser, ID: model.SingleUser,
Token: token, Token: token,
UserID: SingleUser, UserID: model.SingleUser,
AuthService: a.authService, AuthService: a.authService,
Props: map[string]interface{}{}, Props: map[string]interface{}{},
CreateAt: now, CreateAt: now,

View File

@@ -145,6 +145,10 @@ func (a *App) GetAllBlocks(c store.Container) ([]model.Block, error) {
return a.store.GetAllBlocks(c) return a.store.GetAllBlocks(c)
} }
func (a *App) GetBlockByID(c store.Container, blockID string) (*model.Block, error) {
return a.store.GetBlock(c, blockID)
}
func (a *App) DeleteBlock(c store.Container, blockID string, modifiedBy string) error { func (a *App) DeleteBlock(c store.Container, blockID string, modifiedBy string) error {
block, err := a.store.GetBlock(c, blockID) block, err := a.store.GetBlock(c, blockID)
if err != nil { if err != nil {

208
server/app/export.go Normal file
View File

@@ -0,0 +1,208 @@
package app
import (
"archive/zip"
"encoding/json"
"fmt"
"io"
"github.com/mattermost/focalboard/server/model"
"github.com/mattermost/focalboard/server/services/store"
"github.com/wiggin77/merror"
"github.com/mattermost/mattermost-server/v6/shared/mlog"
)
var (
newline = []byte{'\n'}
)
func (a *App) ExportArchive(w io.Writer, opt model.ExportArchiveOptions) (errs error) {
container := store.Container{
WorkspaceID: opt.WorkspaceID,
}
boards, err := a.getBoardsForArchive(container, opt.BoardIDs)
if err != nil {
return err
}
merr := merror.New()
defer func() {
errs = merr.ErrorOrNil()
}()
// wrap the writer in a zip.
zw := zip.NewWriter(w)
defer func() {
merr.Append(zw.Close())
}()
if err := a.writeArchiveVersion(zw); err != nil {
merr.Append(err)
return
}
for _, board := range boards {
if err := a.writeArchiveBoard(zw, board, opt); err != nil {
merr.Append(fmt.Errorf("cannot export board %s: %w", board.ID, err))
return
}
}
return nil
}
// writeArchiveVersion writes a version file to the zip.
func (a *App) writeArchiveVersion(zw *zip.Writer) error {
archiveHeader := model.ArchiveHeader{
Version: archiveVersion,
Date: model.GetMillis(),
}
b, _ := json.Marshal(&archiveHeader)
w, err := zw.Create("version.json")
if err != nil {
return fmt.Errorf("cannot write archive header: %w", err)
}
if _, err := w.Write(b); err != nil {
return fmt.Errorf("cannot write archive header: %w", err)
}
return nil
}
// writeArchiveBoard writes a single board to the archive in a zip directory.
func (a *App) writeArchiveBoard(zw *zip.Writer, board model.Block, opt model.ExportArchiveOptions) error {
// create a directory per board
w, err := zw.Create(board.ID + "/board.jsonl")
if err != nil {
return err
}
// write the board block first
if err = a.writeArchiveBlockLine(w, board); err != nil {
return err
}
var files []string
container := store.Container{
WorkspaceID: opt.WorkspaceID,
}
// write the board's blocks
// TODO: paginate this
blocks, err := a.GetBlocksWithRootID(container, board.ID)
if err != nil {
return err
}
for _, block := range blocks {
if err = a.writeArchiveBlockLine(w, block); err != nil {
return err
}
if block.Type == "image" {
filename, err := extractImageFilename(block)
if err != nil {
return err
}
files = append(files, filename)
}
}
// write the files
for _, filename := range files {
if err := a.writeArchiveFile(zw, filename, board.ID, opt); err != nil {
return fmt.Errorf("cannot write file %s to archive: %w", filename, err)
}
}
return nil
}
// writeArchiveBlockLine writes a single block to the archive.
func (a *App) writeArchiveBlockLine(w io.Writer, block model.Block) error {
b, err := json.Marshal(&block)
if err != nil {
return err
}
line := model.ArchiveLine{
Type: "block",
Data: b,
}
b, err = json.Marshal(&line)
if err != nil {
return err
}
_, err = w.Write(b)
if err != nil {
return err
}
// jsonl files need a newline
_, err = w.Write(newline)
return err
}
// writeArchiveFile writes a single file to the archive.
func (a *App) writeArchiveFile(zw *zip.Writer, filename string, boardID string, opt model.ExportArchiveOptions) error {
dest, err := zw.Create(boardID + "/" + filename)
if err != nil {
return err
}
src, err := a.GetFileReader(opt.WorkspaceID, boardID, filename)
if err != nil {
// just log this; image file is missing but we'll still export an equivalent board
a.logger.Error("image file missing for export",
mlog.String("filename", filename),
mlog.String("workspace_id", opt.WorkspaceID),
mlog.String("board_id", boardID),
)
return nil
}
defer src.Close()
_, err = io.Copy(dest, src)
return err
}
// getBoardsForArchive fetches all the specified boards, or all boards in the workspace/team
// if `boardIDs` is empty.
func (a *App) getBoardsForArchive(container store.Container, boardIDs []string) ([]model.Block, error) {
if len(boardIDs) == 0 {
boards, err := a.GetBlocks(container, "", "board")
if err != nil {
return nil, fmt.Errorf("could not fetch all boards: %w", err)
}
return boards, nil
}
boards := make([]model.Block, 0, len(boardIDs))
for _, id := range boardIDs {
b, err := a.GetBlockByID(container, id)
if err != nil {
return nil, fmt.Errorf("could not fetch board %s: %w", id, err)
}
if b.Type != "board" {
return nil, fmt.Errorf("block %s is not a board: %w", b.ID, model.ErrInvalidBoardBlock)
}
boards = append(boards, *b)
}
return boards, nil
}
func extractImageFilename(imageBlock model.Block) (string, error) {
f, ok := imageBlock.Fields["fileId"]
if !ok {
return "", model.ErrInvalidImageBlock
}
filename, ok := f.(string)
if !ok {
return "", model.ErrInvalidImageBlock
}
return filename, nil
}

172
server/app/import.go Normal file
View File

@@ -0,0 +1,172 @@
package app
import (
"bufio"
"bytes"
"encoding/json"
"errors"
"fmt"
"io"
"path"
"path/filepath"
"github.com/krolaw/zipstream"
"github.com/mattermost/focalboard/server/model"
"github.com/mattermost/focalboard/server/services/store"
"github.com/mattermost/focalboard/server/utils"
"github.com/mattermost/mattermost-server/v6/shared/mlog"
)
const (
archiveVersion = 2
)
// ImportArchive imports an archive containing zero or more boards, plus all
// associated content, including cards, content blocks, views, and images.
//
// Archives are ZIP files containing a `version.json` file and zero or more
// directories, each containing a `board.jsonl` and zero or more image files.
func (a *App) ImportArchive(r io.Reader, opt model.ImportArchiveOptions) error {
zr := zipstream.NewReader(r)
boardMap := make(map[string]string) // maps old board ids to new
for {
hdr, err := zr.Next()
if err != nil {
if errors.Is(err, io.EOF) {
a.logger.Debug("import archive - done", mlog.Int("boards_imported", len(boardMap)))
return nil
}
return err
}
dir, filename := path.Split(hdr.Name)
dir = path.Clean(dir)
switch filename {
case "version.json":
ver, errVer := parseVersionFile(zr)
if errVer != nil {
return errVer
}
if ver != archiveVersion {
return model.NewErrUnsupportedArchiveVersion(ver, archiveVersion)
}
case "board.jsonl":
boardID, err := a.ImportBoardJSONL(zr, opt)
if err != nil {
return fmt.Errorf("cannot import board %s: %w", dir, err)
}
boardMap[dir] = boardID
default:
// import file/image; dir is the old board id
boardID, ok := boardMap[dir]
if !ok {
a.logger.Error("skipping orphan image in archive",
mlog.String("dir", dir),
mlog.String("filename", filename),
)
continue
}
// save file with original filename so it matches name in image block.
filePath := filepath.Join(opt.WorkspaceID, boardID, filename)
_, err := a.filesBackend.WriteFile(zr, filePath)
if err != nil {
return fmt.Errorf("cannot import file %s for board %s: %w", filename, dir, err)
}
}
a.logger.Trace("import archive file",
mlog.String("dir", dir),
mlog.String("filename", filename),
)
}
}
// ImportBoardJSONL imports a JSONL file containing blocks for one board. The resulting
// board id is returned.
func (a *App) ImportBoardJSONL(r io.Reader, opt model.ImportArchiveOptions) (string, error) {
// TODO: Stream this once `model.GenerateBlockIDs` can take a stream of blocks.
// We don't want to load the whole file in memory, even though it's a single board.
blocks := make([]model.Block, 0, 10)
lineReader := bufio.NewReader(r)
userID := opt.ModifiedBy
if userID == model.SingleUser {
userID = ""
}
now := utils.GetMillis()
lineNum := 1
for {
line, errRead := readLine(lineReader)
if len(line) != 0 {
var archiveLine model.ArchiveLine
err := json.Unmarshal(line, &archiveLine)
if err != nil {
return "", fmt.Errorf("error parsing archive line %d: %w", lineNum, err)
}
switch archiveLine.Type {
case "block":
var block model.Block
if err2 := json.Unmarshal(archiveLine.Data, &block); err2 != nil {
return "", fmt.Errorf("invalid block in archive line %d: %w", lineNum, err2)
}
block.ModifiedBy = userID
block.UpdateAt = now
blocks = append(blocks, block)
default:
return "", model.NewErrUnsupportedArchiveLineType(lineNum, archiveLine.Type)
}
}
if errRead != nil {
if errors.Is(errRead, io.EOF) {
break
}
return "", fmt.Errorf("error reading archive line %d: %w", lineNum, errRead)
}
lineNum++
}
container := store.Container{
WorkspaceID: opt.WorkspaceID,
}
var err error
blocks = model.GenerateBlockIDs(blocks, a.logger)
blocks, err = a.InsertBlocks(container, blocks, opt.ModifiedBy, false)
if err != nil {
return "", fmt.Errorf("error inserting archive blocks: %w", err)
}
// find new board id
for _, block := range blocks {
if block.Type == "board" {
return block.ID, nil
}
}
return "", fmt.Errorf("missing board in archive: %w", model.ErrInvalidBoardBlock)
}
func parseVersionFile(r io.Reader) (int, error) {
file, err := io.ReadAll(r)
if err != nil {
return 0, fmt.Errorf("cannot read version.json: %w", err)
}
var header model.ArchiveHeader
if err := json.Unmarshal(file, &header); err != nil {
return 0, fmt.Errorf("cannot parse version.json: %w", err)
}
return header.Version, nil
}
func readLine(r *bufio.Reader) ([]byte, error) {
line, err := r.ReadBytes('\n')
line = bytes.TrimSpace(line)
return line, err
}

View File

@@ -9,6 +9,7 @@ require (
github.com/golang/mock v1.5.0 github.com/golang/mock v1.5.0
github.com/gorilla/mux v1.8.0 github.com/gorilla/mux v1.8.0
github.com/gorilla/websocket v1.4.2 github.com/gorilla/websocket v1.4.2
github.com/krolaw/zipstream v0.0.0-20180621105154-0a2661891f94
github.com/lib/pq v1.10.2 github.com/lib/pq v1.10.2
github.com/magiconair/properties v1.8.5 // indirect github.com/magiconair/properties v1.8.5 // indirect
github.com/mattermost/mattermost-plugin-api v0.0.21 github.com/mattermost/mattermost-plugin-api v0.0.21

View File

@@ -548,6 +548,8 @@ github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/krolaw/zipstream v0.0.0-20180621105154-0a2661891f94 h1:+AIlO01SKT9sfWU5CLWi0cfHc7dQwgGz3FhFRzXLoMg=
github.com/krolaw/zipstream v0.0.0-20180621105154-0a2661891f94/go.mod h1:TcE3PIIkVWbP/HjhRAafgCjRKvDOi086iqp9VkNX/ng=
github.com/ktrysmt/go-bitbucket v0.6.4/go.mod h1:9u0v3hsd2rqCHRIpbir1oP7F58uo5dq19sBYvuMoyQ4= github.com/ktrysmt/go-bitbucket v0.6.4/go.mod h1:9u0v3hsd2rqCHRIpbir1oP7F58uo5dq19sBYvuMoyQ4=
github.com/labstack/echo/v4 v4.1.11/go.mod h1:i541M3Fj6f76NZtHSj7TXnyM8n2gaodfvfxNnFqi74g= github.com/labstack/echo/v4 v4.1.11/go.mod h1:i541M3Fj6f76NZtHSj7TXnyM8n2gaodfvfxNnFqi74g=
github.com/labstack/gommon v0.3.0/go.mod h1:MULnywXg0yavhxWKc+lOruYdAhDwPK9wf0OL7NoOu+k= github.com/labstack/gommon v0.3.0/go.mod h1:MULnywXg0yavhxWKc+lOruYdAhDwPK9wf0OL7NoOu+k=

View File

@@ -108,25 +108,12 @@ type BlockPatchBatch struct {
BlockPatches []BlockPatch `json:"block_patches"` BlockPatches []BlockPatch `json:"block_patches"`
} }
// ArchiveHeader is the first line of any archive file.
type ArchiveHeader struct {
Version int `json:"version"`
Date int64 `json:"date"`
}
// ArchiveLine is any non-header line in an archive.
type ArchiveLine struct {
Type string `json:"type"`
Data json.RawMessage `json:"data"`
}
// BlockModifier is a callback that can modify each block during an import. // BlockModifier is a callback that can modify each block during an import.
// A cache of arbitrary data will be passed for each call and any changes // A cache of arbitrary data will be passed for each call and any changes
// to the cache will be preserved for the next call. // to the cache will be preserved for the next call.
// Return true to import the block or false to skip import. // Return true to import the block or false to skip import.
type BlockModifier func(block *Block, cache map[string]interface{}) bool type BlockModifier func(block *Block, cache map[string]interface{}) bool
// BlocksFromJSON creates a slice from blocks from a JSON stream, ignoring any errors.
func BlocksFromJSON(data io.Reader) []Block { func BlocksFromJSON(data io.Reader) []Block {
var blocks []Block var blocks []Block
_ = json.NewDecoder(data).Decode(&blocks) _ = json.NewDecoder(data).Decode(&blocks)

View File

@@ -0,0 +1,85 @@
package model
import (
"encoding/json"
"errors"
"fmt"
)
var (
ErrInvalidImageBlock = errors.New("invalid image block")
)
// Archive is an import / export archive.
// TODO: remove once default templates are converted to new archive format.
type Archive struct {
Version int64 `json:"version"`
Date int64 `json:"date"`
Blocks []Block `json:"blocks"`
}
// ArchiveHeader is the content of the first file (`version.json`) within an archive.
type ArchiveHeader struct {
Version int `json:"version"`
Date int64 `json:"date"`
}
// ArchiveLine is any line in an archive.
type ArchiveLine struct {
Type string `json:"type"`
Data json.RawMessage `json:"data"`
}
// ExportArchiveOptions provides options when exporting one or more boards
// to an archive.
type ExportArchiveOptions struct {
WorkspaceID string
// BoardIDs is the list of boards to include in the archive.
// Empty slice means export all boards from workspace/team.
BoardIDs []string
}
// ImportArchiveOptions provides options when importing an archive.
type ImportArchiveOptions struct {
WorkspaceID string
ModifiedBy string
}
// ErrUnsupportedArchiveVersion is an error returned when trying to import an
// archive with a version that this server does not support.
type ErrUnsupportedArchiveVersion struct {
got int
want int
}
// NewErrUnsupportedArchiveVersion creates a ErrUnsupportedArchiveVersion error.
func NewErrUnsupportedArchiveVersion(got int, want int) ErrUnsupportedArchiveVersion {
return ErrUnsupportedArchiveVersion{
got: got,
want: want,
}
}
func (e ErrUnsupportedArchiveVersion) Error() string {
return fmt.Sprintf("unsupported archive version; got %d, want %d", e.got, e.want)
}
// ErrUnsupportedArchiveLineType is an error returned when trying to import an
// archive containing an unsupported line type.
type ErrUnsupportedArchiveLineType struct {
line int
got string
}
// NewErrUnsupportedArchiveLineType creates a ErrUnsupportedArchiveLineType error.
func NewErrUnsupportedArchiveLineType(line int, got string) ErrUnsupportedArchiveLineType {
return ErrUnsupportedArchiveLineType{
line: line,
got: got,
}
}
func (e ErrUnsupportedArchiveLineType) Error() string {
return fmt.Sprintf("unsupported archive line type; got %s, line %d", e.got, e.line)
}

View File

@@ -5,6 +5,10 @@ import (
"io" "io"
) )
const (
SingleUser = "single-user"
)
// User is a user // User is a user
// swagger:model // swagger:model
type User struct { type User struct {

View File

@@ -2,10 +2,8 @@
// See LICENSE.txt for license information. // See LICENSE.txt for license information.
import {IAppWindow} from './types' import {IAppWindow} from './types'
import {ArchiveUtils, ArchiveHeader, ArchiveLine, BlockArchiveLine} from './blocks/archive'
import {Block} from './blocks/block' import {Block} from './blocks/block'
import {Board} from './blocks/board' import {Board} from './blocks/board'
import {LineReader} from './lineReader'
import mutator from './mutator' import mutator from './mutator'
import {Utils} from './utils' import {Utils} from './utils'
@@ -13,91 +11,48 @@ declare let window: IAppWindow
class Archiver { class Archiver {
static async exportBoardArchive(board: Board): Promise<void> { static async exportBoardArchive(board: Board): Promise<void> {
const blocks = await mutator.exportArchive(board.id) this.exportArchive(mutator.exportArchive(board.id))
this.exportArchive(blocks)
} }
static async exportFullArchive(): Promise<void> { static async exportFullArchive(): Promise<void> {
const blocks = await mutator.exportArchive() this.exportArchive(mutator.exportArchive())
this.exportArchive(blocks)
} }
private static exportArchive(blocks: readonly Block[]): void { private static exportArchive(prom: Promise<Response>): void {
const content = ArchiveUtils.buildBlockArchive(blocks) // TODO: don't download whole archive before presenting SaveAs dialog.
prom.then((response) => {
response.blob().
then((blob) => {
const link = document.createElement('a')
link.style.display = 'none'
const date = new Date() const date = new Date()
const filename = `archive-${date.getFullYear()}-${date.getMonth() + 1}-${date.getDate()}.focalboard` const filename = `archive-${date.getFullYear()}-${date.getMonth() + 1}-${date.getDate()}.focalboard`
const link = document.createElement('a')
link.style.display = 'none'
// const file = new Blob([content], { type: "text/json" }) const file = new Blob([blob], {type: 'application/octet-stream'})
// link.href = URL.createObjectURL(file) link.href = URL.createObjectURL(file)
link.href = 'data:text/json,' + encodeURIComponent(content) link.download = filename
link.download = filename document.body.appendChild(link) // FireFox support
document.body.appendChild(link) // FireFox support
link.click() link.click()
// TODO: Review if this is needed in the future, this is to fix the problem with linux webview links // TODO: Review if this is needed in the future, this is to fix the problem with linux webview links
if (window.openInNewBrowser) { if (window.openInNewBrowser) {
window.openInNewBrowser(link.href) window.openInNewBrowser(link.href)
}
// TODO: Remove or reuse link
}
private static async importBlocksFromFile(file: File): Promise<void> {
let blockCount = 0
const maxBlocksPerImport = 1000
let blocks: Block[] = []
let isFirstLine = true
return new Promise<void>((resolve) => {
LineReader.readFile(file, async (line, completed) => {
if (completed) {
if (blocks.length > 0) {
await mutator.importFullArchive(blocks)
blockCount += blocks.length
} }
Utils.log(`Imported ${blockCount} blocks.`)
resolve()
return
}
if (isFirstLine) { // TODO: Remove or reuse link and revolkObjectURL to avoid memory leak
isFirstLine = false })
const header = JSON.parse(line) as ArchiveHeader
if (header.date && header.version >= 1) {
const date = new Date(header.date)
Utils.log(`Import archive, version: ${header.version}, date/time: ${date.toLocaleString()}.`)
}
} else {
const row = JSON.parse(line) as ArchiveLine
if (!row || !row.type || !row.data) {
Utils.logError('importFullArchive ERROR parsing line')
return
}
switch (row.type) {
case 'block': {
const blockLine = row as BlockArchiveLine
const block = blockLine.data
if (Archiver.isValidBlock(block)) {
blocks.push(block)
if (blocks.length >= maxBlocksPerImport) {
const blocksToSend = blocks
blocks = []
await mutator.importFullArchive(blocksToSend)
blockCount += blocksToSend.length
}
}
break
}
}
}
})
}) })
} }
private static async importArchiveFromFile(file: File): Promise<void> {
const response = await mutator.importFullArchive(file)
if (response.status !== 200) {
Utils.log('ERROR importing archive: ' + response.text())
}
}
static isValidBlock(block: Block): boolean { static isValidBlock(block: Block): boolean {
if (!block.id || !block.rootId) { if (!block.id || !block.rootId) {
return false return false
@@ -113,7 +68,7 @@ class Archiver {
input.onchange = async () => { input.onchange = async () => {
const file = input.files && input.files[0] const file = input.files && input.files[0]
if (file) { if (file) {
await Archiver.importBlocksFromFile(file) await Archiver.importArchiveFromFile(file)
} }
onComplete?.() onComplete?.()

View File

@@ -1,24 +0,0 @@
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
// See LICENSE.txt for license information.
import {TestBlockFactory} from '../test/testBlockFactory'
import {ArchiveUtils} from './archive'
import {Block} from './block'
test('archive: archive and unarchive', async () => {
const blocks: Block[] = []
const board = TestBlockFactory.createBoard()
blocks.push(board)
blocks.push(TestBlockFactory.createBoardView(board))
const card = TestBlockFactory.createCard(board)
blocks.push(card)
blocks.push(TestBlockFactory.createText(card))
blocks.push(TestBlockFactory.createDivider(card))
blocks.push(TestBlockFactory.createImage(card))
const archive = ArchiveUtils.buildBlockArchive(blocks)
const unarchivedBlocks = ArchiveUtils.parseBlockArchive(archive)
expect(unarchivedBlocks).toEqual(blocks)
})

View File

@@ -1,81 +0,0 @@
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
// See LICENSE.txt for license information.
import {Block} from './block'
interface ArchiveHeader {
version: number
date: number
}
interface ArchiveLine {
type: string,
data: unknown,
}
// This schema allows the expansion of additional line types in the future
interface BlockArchiveLine extends ArchiveLine {
type: 'block',
data: Block
}
class ArchiveUtils {
static buildBlockArchive(blocks: readonly Block[]): string {
const header: ArchiveHeader = {
version: 1,
date: Date.now(),
}
const headerString = JSON.stringify(header)
let content = headerString + '\n'
for (const block of blocks) {
const line: BlockArchiveLine = {
type: 'block',
data: block,
}
const lineString = JSON.stringify(line)
content += lineString
content += '\n'
}
return content
}
static parseBlockArchive(contents: string): Block[] {
const blocks: Block[] = []
const allLineStrings = contents.split('\n')
if (allLineStrings.length >= 2) {
const headerString = allLineStrings[0]
const header = JSON.parse(headerString) as ArchiveHeader
if (header.date && header.version >= 1) {
const lineStrings = allLineStrings.slice(1)
let lineNum = 2
for (const lineString of lineStrings) {
if (!lineString) {
// Ignore empty lines, e.g. last line
continue
}
const line = JSON.parse(lineString) as ArchiveLine
if (!line || !line.type || !line.data) {
throw new Error(`ERROR parsing line ${lineNum}`)
}
switch (line.type) {
case 'block': {
const blockLine = line as BlockArchiveLine
const block = blockLine.data
blocks.push(block)
break
}
}
lineNum += 1
}
} else {
throw new Error('ERROR parsing header')
}
}
return blocks
}
}
export {ArchiveHeader, ArchiveLine, BlockArchiveLine, ArchiveUtils}

View File

@@ -1,77 +0,0 @@
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
// See LICENSE.txt for license information.
class LineReader {
private static appendBuffer(buffer1: Uint8Array, buffer2: Uint8Array): Uint8Array {
const tmp = new Uint8Array(buffer1.byteLength + buffer2.byteLength)
tmp.set(buffer1, 0)
tmp.set(buffer2, buffer1.byteLength)
return tmp
}
private static arrayBufferIndexOf(buffer: Uint8Array, charCode: number): number {
for (let i = 0; i < buffer.byteLength; ++i) {
if (buffer[i] === charCode) {
return i
}
}
return -1
}
static readFile(file: File, callback: (line: string, completed: boolean) => Promise<void>): void {
let buffer = new Uint8Array(0)
const chunkSize = 1024 * 1000
let offset = 0
const fr = new FileReader()
const decoder = new TextDecoder()
fr.onload = async () => {
const chunk = new Uint8Array(fr.result as ArrayBuffer)
buffer = LineReader.appendBuffer(buffer, chunk)
const newlineChar = 10 // '\n'
let newlineIndex = LineReader.arrayBufferIndexOf(buffer, newlineChar)
while (newlineIndex >= 0) {
const result = decoder.decode(buffer.slice(0, newlineIndex))
buffer = buffer.slice(newlineIndex + 1)
// eslint-disable-next-line no-await-in-loop
await callback(result, false)
newlineIndex = LineReader.arrayBufferIndexOf(buffer, newlineChar)
}
offset += chunkSize
if (offset >= file.size) {
// Completed
if (buffer.byteLength > 0) {
// Handle last line
await callback(decoder.decode(buffer), false)
}
await callback('', true)
return
}
seek()
}
fr.onerror = () => {
callback('', true)
}
seek()
function seek() {
const slice = file.slice(offset, offset + chunkSize)
// Need to read as an ArrayBuffer (instead of text) to handle unicode boundaries
fr.readAsArrayBuffer(slice)
}
}
}
export {LineReader}

View File

@@ -809,13 +809,13 @@ class Mutator {
// Other methods // Other methods
// Not a mutator, but convenient to put here since Mutator wraps OctoClient // Not a mutator, but convenient to put here since Mutator wraps OctoClient
async exportArchive(boardID?: string): Promise<Block[]> { async exportArchive(boardID?: string): Promise<Response> {
return octoClient.exportArchive(boardID) return octoClient.exportArchive(boardID)
} }
// Not a mutator, but convenient to put here since Mutator wraps OctoClient // Not a mutator, but convenient to put here since Mutator wraps OctoClient
async importFullArchive(blocks: readonly Block[]): Promise<Response> { async importFullArchive(file: File): Promise<Response> {
return octoClient.importFullArchive(blocks) return octoClient.importFullArchive(file)
} }
get canUndo(): boolean { get canUndo(): boolean {

View File

@@ -28,8 +28,8 @@ test('OctoClient: get blocks', async () => {
expect(boards.length).toBe(blocks.length) expect(boards.length).toBe(blocks.length)
FetchMock.fn.mockReturnValueOnce(FetchMock.jsonResponse(JSON.stringify(blocks))) FetchMock.fn.mockReturnValueOnce(FetchMock.jsonResponse(JSON.stringify(blocks)))
boards = await octoClient.exportArchive() const response = await octoClient.exportArchive()
expect(boards.length).toBe(blocks.length) expect(response.status).toBe(200)
FetchMock.fn.mockReturnValueOnce(FetchMock.jsonResponse(JSON.stringify(blocks))) FetchMock.fn.mockReturnValueOnce(FetchMock.jsonResponse(JSON.stringify(blocks)))
const parentId = 'id1' const parentId = 'id1'
@@ -55,20 +55,6 @@ test('OctoClient: insert blocks', async () => {
})) }))
}) })
test('OctoClient: importFullArchive', async () => {
const blocks = createBoards()
await octoClient.importFullArchive(blocks)
expect(FetchMock.fn).toBeCalledTimes(1)
expect(FetchMock.fn).toHaveBeenCalledWith(
expect.anything(),
expect.objectContaining({
method: 'POST',
body: JSON.stringify(blocks),
}))
})
function createBoards(): Block[] { function createBoards(): Block[] {
const blocks = [] const blocks = []

View File

@@ -186,27 +186,24 @@ class OctoClient {
} }
// If no boardID is provided, it will export the entire archive // If no boardID is provided, it will export the entire archive
async exportArchive(boardID = ''): Promise<Block[]> { async exportArchive(boardID = ''): Promise<Response> {
const path = `${this.workspacePath()}/blocks/export?root_id=${boardID}` const path = `${this.workspacePath()}/archive/export?board_id=${boardID}`
const response = await fetch(this.getBaseURL() + path, {headers: this.headers()}) return fetch(this.getBaseURL() + path, {headers: this.headers()})
if (response.status !== 200) {
return []
}
const blocks = (await this.getJson(response, [])) as Block[]
return this.fixBlocks(blocks)
} }
async importFullArchive(blocks: readonly Block[]): Promise<Response> { async importFullArchive(file: File): Promise<Response> {
Utils.log(`importFullArchive: ${blocks.length} blocks(s)`) const formData = new FormData()
formData.append('file', file)
// blocks.forEach((block) => { const headers = this.headers() as Record<string, string>
// Utils.log(`\t ${block.type}, ${block.id}`)
// }) // TIPTIP: Leave out Content-Type here, it will be automatically set by the browser
const body = JSON.stringify(blocks) delete headers['Content-Type']
return fetch(this.getBaseURL() + this.workspacePath() + '/blocks/import', {
return fetch(this.getBaseURL() + this.workspacePath() + '/archive/import', {
method: 'POST', method: 'POST',
headers: this.headers(), headers,
body, body: formData,
}) })
} }