1
0
mirror of https://github.com/postgrespro/pg_probackup.git synced 2025-02-03 14:01:57 +02:00

tests: ptrack yet another fix

This commit is contained in:
Grigory Smolkin 2018-10-24 21:08:00 +03:00
parent 59b237b93e
commit cc45cea3c9
2 changed files with 8 additions and 51 deletions

View File

@ -58,47 +58,7 @@ class DeleteTest(ProbackupTest, unittest.TestCase):
# @unittest.skip("skip")
# @unittest.expectedFailure
def test_delete_archive_mix_compress_and_non_compressed_segments(self):
"""delete full backups"""
fname = self.id().split('.')[3]
node = self.make_simple_node(
base_dir="{0}/{1}/node".format(module_name, fname),
initdb_params=['--data-checksums'],
pg_options={'wal_level': 'replica'}
)
backup_dir = os.path.join(self.tmp_path, module_name, fname, 'backup')
self.init_pb(backup_dir)
self.add_instance(backup_dir, 'node', node)
self.set_archiving(backup_dir, 'node', node)
node.start()
# full backup
self.backup_node(backup_dir, 'node', node)
pgbench = node.pgbench(
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
pgbench.wait()
pgbench.stdout.close()
self.backup_node(backup_dir, 'node', node)
pgbench = node.pgbench(
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
pgbench.wait()
pgbench.stdout.close()
self.backup_node(backup_dir, 'node', node)
show_backups = self.show_pb(backup_dir, 'node')
id_1 = show_backups[0]['ID']
id_2 = show_backups[1]['ID']
id_3 = show_backups[2]['ID']
self.delete_pb(backup_dir, 'node', id_2)
show_backups = self.show_pb(backup_dir, 'node')
self.assertEqual(show_backups[0]['ID'], id_1)
self.assertEqual(show_backups[1]['ID'], id_3)
# Clean after yourself
self.del_test_dir(module_name, fname)
"""stub"""
# @unittest.skip("skip")
def test_delete_increment_page(self):

View File

@ -49,6 +49,10 @@ class SimpleTest(ProbackupTest, unittest.TestCase):
node.safe_psql('postgres', 'vacuum t_heap')
node.safe_psql('postgres', 'checkpoint')
# Make full backup to clean every ptrack
self.backup_node(
backup_dir, 'node', node, options=['-j10', '--stream'])
for i in idx_ptrack:
# get fork size and calculate it in pages
idx_ptrack[i]['old_size'] = self.get_fork_size(node, i)
@ -57,11 +61,6 @@ class SimpleTest(ProbackupTest, unittest.TestCase):
# calculate md5sums for every page of this fork
idx_ptrack[i]['old_pages'] = self.get_md5_per_page_for_fork(
idx_ptrack[i]['path'], idx_ptrack[i]['old_size'])
# Make full backup to clean every ptrack
self.backup_node(
backup_dir, 'node', node, options=['-j10', '--stream'])
for i in idx_ptrack:
idx_ptrack[i]['ptrack'] = self.get_ptrack_bits_per_page_for_fork(
node, idx_ptrack[i]['path'], [idx_ptrack[i]['old_size']])
self.check_ptrack_clean(idx_ptrack[i], idx_ptrack[i]['old_size'])
@ -158,11 +157,6 @@ class SimpleTest(ProbackupTest, unittest.TestCase):
'--master-db=postgres',
'--master-port={0}'.format(master.port)])
for i in idx_ptrack:
idx_ptrack[i]['ptrack'] = self.get_ptrack_bits_per_page_for_fork(
replica, idx_ptrack[i]['path'], [idx_ptrack[i]['old_size']])
self.check_ptrack_clean(idx_ptrack[i], idx_ptrack[i]['old_size'])
for i in idx_ptrack:
# get fork size and calculate it in pages
idx_ptrack[i]['old_size'] = self.get_fork_size(replica, i)
@ -171,6 +165,9 @@ class SimpleTest(ProbackupTest, unittest.TestCase):
# calculate md5sums for every page of this fork
idx_ptrack[i]['old_pages'] = self.get_md5_per_page_for_fork(
idx_ptrack[i]['path'], idx_ptrack[i]['old_size'])
idx_ptrack[i]['ptrack'] = self.get_ptrack_bits_per_page_for_fork(
replica, idx_ptrack[i]['path'], [idx_ptrack[i]['old_size']])
self.check_ptrack_clean(idx_ptrack[i], idx_ptrack[i]['old_size'])
# Delete some rows, vacuum it and make checkpoint
master.safe_psql('postgres', 'delete from t_heap where id%2 = 1')