1
0
mirror of https://github.com/postgrespro/pg_probackup.git synced 2025-02-08 14:28:36 +02:00

Fix merging external directories

This commit is contained in:
Sergey Cherkashin 2019-03-01 18:14:26 +03:00
parent 5b3a733b37
commit c94c6d06f0
4 changed files with 192 additions and 8 deletions

View File

@ -383,6 +383,16 @@ pgFileComparePathDesc(const void *f1, const void *f2)
return -pgFileComparePath(f1, f2);
}
/*
* Compare two pgFile with their path and external_dir_num
* in descending order of ASCII code.
*/
int
pgFileComparePathWithExternalDesc(const void *f1, const void *f2)
{
return -pgFileComparePathWithExternal(f1, f2);
}
/* Compare two pgFile with their linked directory path. */
int
pgFileCompareLinked(const void *f1, const void *f2)

View File

@ -372,12 +372,14 @@ delete_source_backup:
for (i = 0; i < parray_num(to_files); i++)
{
pgFile *file = (pgFile *) parray_get(to_files, i);
char *dir_name = parray_get(to_external, file->external_dir_num - 1);
if (file->external_dir_num &&
backup_contains_external(dir_name, from_external))
/* Dir already removed*/
continue;
if (file->external_dir_num && to_external)
{
char *dir_name = parray_get(to_external, file->external_dir_num - 1);
if (backup_contains_external(dir_name, from_external))
/* Dir already removed*/
continue;
}
if (parray_bsearch(files, file, pgFileComparePathDesc) == NULL)
{
@ -463,7 +465,7 @@ merge_files(void *arg)
i + 1, num_files, file->path);
res_file = parray_bsearch(argument->to_files, file,
pgFileComparePathDesc);
pgFileComparePathWithExternalDesc);
to_file = (res_file) ? *res_file : NULL;
/*
@ -611,8 +613,6 @@ merge_files(void *arg)
file->crc = pgFileGetCRC(to_file_path, true, true, NULL);
}
}
else if (strcmp(file->name, "pg_control") == 0)
copy_pgcontrol_file(argument->from_root, argument->to_root, file);
else if (file->external_dir_num)
{
char from_root[MAXPGPATH];
@ -630,6 +630,8 @@ merge_files(void *arg)
new_dir_num);
copy_file(from_root, to_root, file);
}
else if (strcmp(file->name, "pg_control") == 0)
copy_pgcontrol_file(argument->from_root, argument->to_root, file);
else
copy_file(argument->from_root, argument->to_root, file);

View File

@ -539,6 +539,7 @@ extern pg_crc32 pgFileGetCRC(const char *file_path, bool use_crc32c,
extern int pgFileComparePath(const void *f1, const void *f2);
extern int pgFileComparePathWithExternal(const void *f1, const void *f2);
extern int pgFileComparePathDesc(const void *f1, const void *f2);
extern int pgFileComparePathWithExternalDesc(const void *f1, const void *f2);
extern int pgFileCompareLinked(const void *f1, const void *f2);
extern int pgFileCompareSize(const void *f1, const void *f2);

View File

@ -580,6 +580,177 @@ class ExternalTest(ProbackupTest, unittest.TestCase):
self.compare_pgdata(pgdata, pgdata_restored)
# @unittest.expectedFailure
# @unittest.skip("skip")
def test_external_merge_single(self):
""""""
fname = self.id().split('.')[3]
backup_dir = os.path.join(self.tmp_path, module_name, fname, 'backup')
node = self.make_simple_node(
base_dir=os.path.join(module_name, fname, 'node'),
set_replication=True,
initdb_params=['--data-checksums'],
pg_options={
'max_wal_senders': '2',
'autovacuum': 'off'})
self.init_pb(backup_dir)
self.add_instance(backup_dir, 'node', node)
node.slow_start()
node.pgbench_init(scale=10)
# FULL backup
self.backup_node(
backup_dir, 'node', node, options=["-j", "4", "--stream"])
external_dir1_old = self.get_tblspace_path(node, 'external_dir1')
external_dir2_old = self.get_tblspace_path(node, 'external_dir2')
pgbench = node.pgbench(options=['-T', '30', '-c', '1', '--no-vacuum'])
pgbench.wait()
# FULL backup with changed data
backup_id = self.backup_node(
backup_dir, 'node', node,
options=["-j", "4", "--stream"])
# fill external directories with changed data
self.restore_node(
backup_dir, 'node', node,
data_dir=external_dir1_old, options=["-j", "4"])
self.restore_node(
backup_dir, 'node', node,
data_dir=external_dir2_old, options=["-j", "4"])
self.delete_pb(backup_dir, 'node', backup_id=backup_id)
# delta backup with external directories using new binary
backup_id = self.backup_node(
backup_dir, 'node', node, backup_type="delta",
options=[
"-j", "4", "--stream",
"-E", "{0}:{1}".format(
external_dir1_old,
external_dir2_old)])
self.merge_backup(backup_dir, 'node', backup_id=backup_id)
pgdata = self.pgdata_content(
node.base_dir, exclude_dirs=['logs'])
# RESTORE
node.cleanup()
shutil.rmtree(node.base_dir, ignore_errors=True)
external_dir1_new = self.get_tblspace_path(node, 'external_dir1')
external_dir2_new = self.get_tblspace_path(node, 'external_dir2')
self.restore_node(
backup_dir, 'node', node,
options=[
"-j", "4",
"--external-mapping={0}={1}".format(external_dir1_old, external_dir1_new),
"--external-mapping={0}={1}".format(external_dir2_old, external_dir2_new)])
pgdata_restored = self.pgdata_content(
node.base_dir, exclude_dirs=['logs'])
self.compare_pgdata(pgdata, pgdata_restored)
# @unittest.expectedFailure
# @unittest.skip("skip")
def test_external_merge_double(self):
""""""
fname = self.id().split('.')[3]
backup_dir = os.path.join(self.tmp_path, module_name, fname, 'backup')
node = self.make_simple_node(
base_dir=os.path.join(module_name, fname, 'node'),
set_replication=True,
initdb_params=['--data-checksums'],
pg_options={
'max_wal_senders': '2',
'autovacuum': 'off'})
self.init_pb(backup_dir)
self.add_instance(backup_dir, 'node', node)
node.slow_start()
node.pgbench_init(scale=10)
# FULL backup
self.backup_node(
backup_dir, 'node', node, options=["-j", "4", "--stream"])
external_dir1_old = self.get_tblspace_path(node, 'external_dir1')
external_dir2_old = self.get_tblspace_path(node, 'external_dir2')
pgbench = node.pgbench(options=['-T', '30', '-c', '1', '--no-vacuum'])
pgbench.wait()
# FULL backup
backup_id = self.backup_node(
backup_dir, 'node', node,
options=["-j", "4", "--stream"])
# fill external directories with changed data
self.restore_node(
backup_dir, 'node', node,
data_dir=external_dir1_old, options=["-j", "4"])
self.restore_node(
backup_dir, 'node', node,
data_dir=external_dir2_old, options=["-j", "4"])
self.delete_pb(backup_dir, 'node', backup_id=backup_id)
# delta backup with external directories
self.backup_node(
backup_dir, 'node', node, backup_type="delta",
options=[
"-j", "4", "--stream",
"-E", "{0}:{1}".format(
external_dir1_old,
external_dir2_old)])
# delta backup with external directories
backup_id = self.backup_node(
backup_dir, 'node', node, backup_type="delta",
options=[
"-j", "4", "--stream",
"-E", "{0}:{1}".format(
external_dir1_old,
external_dir2_old)])
pgdata = self.pgdata_content(
node.base_dir, exclude_dirs=['logs'])
shutil.rmtree(external_dir1_old, ignore_errors=True)
shutil.rmtree(external_dir2_old, ignore_errors=True)
# delta backup without external directories
self.merge_backup(backup_dir, 'node', backup_id=backup_id)
# RESTORE
node.cleanup()
shutil.rmtree(node.base_dir, ignore_errors=True)
external_dir1_new = self.get_tblspace_path(node, 'external_dir1')
external_dir2_new = self.get_tblspace_path(node, 'external_dir2')
self.restore_node(
backup_dir, 'node', node,
options=[
"-j", "4",
"--external-mapping={0}={1}".format(external_dir1_old, external_dir1_new),
"--external-mapping={0}={1}".format(external_dir2_old, external_dir2_new)])
pgdata_restored = self.pgdata_content(
node.base_dir, exclude_dirs=['logs'])
self.compare_pgdata(pgdata, pgdata_restored)
# external directory contain symlink to file
# external directory contain symlink to directory
# latest page backup without external_dir