1
0
mirror of https://github.com/postgrespro/pg_probackup.git synced 2025-02-09 14:33:17 +02:00

tests: pgdata_compare block-level granularity

This commit is contained in:
Grigory Smolkin 2017-12-28 11:52:18 +03:00
parent 41de50c385
commit 0f85328399
2 changed files with 72 additions and 26 deletions

View File

@ -476,10 +476,10 @@ class ProbackupTest(object):
raise ProbackupException(e.output.decode("utf-8"), self.cmd) raise ProbackupException(e.output.decode("utf-8"), self.cmd)
def run_binary(self, command, async=False): def run_binary(self, command, async=False):
if self.verbose:
print([' '.join(map(str, command))])
try: try:
if async: if async:
if self.verbose:
print(command)
return subprocess.Popen( return subprocess.Popen(
command, command,
stdin=subprocess.PIPE, stdin=subprocess.PIPE,
@ -488,8 +488,6 @@ class ProbackupTest(object):
env=self.test_env env=self.test_env
) )
else: else:
if self.verbose:
print(command)
self.output = subprocess.check_output( self.output = subprocess.check_output(
command, command,
stderr=subprocess.STDOUT, stderr=subprocess.STDOUT,
@ -859,7 +857,7 @@ class ProbackupTest(object):
] ]
suffixes_to_ignore = ( suffixes_to_ignore = (
'_ptrack', 'ptrack_control', '_ptrack', 'ptrack_control',
'pg_control', 'ptrack_init' 'pg_control', 'ptrack_init', 'backup_label'
) )
directory_dict = {} directory_dict = {}
directory_dict['pgdata'] = directory directory_dict['pgdata'] = directory
@ -867,38 +865,84 @@ class ProbackupTest(object):
for root, dirs, files in os.walk(directory, followlinks=True): for root, dirs, files in os.walk(directory, followlinks=True):
dirs[:] = [d for d in dirs if d not in dirs_to_ignore] dirs[:] = [d for d in dirs if d not in dirs_to_ignore]
for file in files: for file in files:
if file in files_to_ignore or file.endswith( if (
suffixes_to_ignore file in files_to_ignore or
): file.endswith(suffixes_to_ignore)
continue ):
file = os.path.join(root, file) continue
file_relpath = os.path.relpath(file, directory)
directory_dict['files'][file_relpath] = hashlib.md5( file_fullpath = os.path.join(root, file)
open(file, 'rb').read()).hexdigest() file_relpath = os.path.relpath(file_fullpath, directory)
directory_dict['files'][file_relpath] = {'is_datafile': False}
directory_dict['files'][file_relpath]['md5'] = hashlib.md5(
open(file_fullpath, 'rb').read()).hexdigest()
if file.isdigit():
directory_dict['files'][file_relpath]['is_datafile'] = True
size_in_pages = os.path.getsize(file_fullpath)/8192
directory_dict['files'][file_relpath][
'md5_per_page'] = self.get_md5_per_page_for_fork(
file_fullpath, size_in_pages
)
return directory_dict return directory_dict
def compare_pgdata(self, original_pgdata, restored_pgdata): def compare_pgdata(self, original_pgdata, restored_pgdata):
""" return dict with directory content. DO IT BEFORE RECOVERY""" """ return dict with directory content. DO IT BEFORE RECOVERY"""
fail = False fail = False
error_message = '' error_message = ''
for file in restored_pgdata['files']:
# File is present in RESTORED PGDATA
# but not present in ORIGINAL
if (
file not in original_pgdata['files'] and
not file.endswith('backup_label')
):
fail = True
error_message += 'File is not present'
error_message += ' in original PGDATA:\n {0}'.format(
os.path.join(restored_pgdata['pgdata'], file)
)
for file in original_pgdata['files']: for file in original_pgdata['files']:
if file in restored_pgdata['files']: if file in restored_pgdata['files']:
if ( if (
original_pgdata['files'][file] != original_pgdata['files'][file]['md5'] !=
restored_pgdata['files'][file] restored_pgdata['files'][file]['md5']
): ):
error_message += '\nChecksumm mismatch.\n' error_message += (
' File_old: {0}\n Checksumm_old: {1}\n' '\nChecksumm mismatch.\n'
' File_new: {2}\n Checksumm_new: {3}\n'.format( ' File_old: {0}\n Checksumm_old: {1}\n'
' File_new: {2}\n Checksumm_new: {3}\n').format(
os.path.join(original_pgdata['pgdata'], file), os.path.join(original_pgdata['pgdata'], file),
original_pgdata['files'][file], original_pgdata['files'][file]['md5'],
os.path.join(restored_pgdata['pgdata'], file), os.path.join(restored_pgdata['pgdata'], file),
restored_pgdata['files'][file] restored_pgdata['files'][file]['md5']
) )
fail = True fail = True
if original_pgdata['files'][file]['is_datafile']:
for page in original_pgdata['files'][
file]['md5_per_page']:
if original_pgdata['files'][file][
'md5_per_page'][page] != restored_pgdata[
'files'][file]['md5_per_page'][page]:
error_message += (
'PAGE: {0}\n'
' PAGE Checksumm_old: {1}\n'
' PAGE Checksumm_new: {2}\n'
).format(
page,
original_pgdata['files'][file][
'md5_per_page'][page],
restored_pgdata['files'][file][
'md5_per_page'][page])
else: else:
error_message += '\nFile dissappearance.' error_message += (
' File: {0}/{1}'.format(restored_pgdata['pgdata'], file) '\nFile dissappearance.'
' File: {0}').format(
os.path.join(restored_pgdata['pgdata'], file)
)
fail = True fail = True
self.assertFalse(fail, error_message) self.assertFalse(fail, error_message)

View File

@ -175,9 +175,6 @@ class PtrackBackupTest(ProbackupTest, unittest.TestCase):
node.safe_psql( node.safe_psql(
"postgres", "postgres",
"update t_heap set id = 100500") "update t_heap set id = 100500")
# print(node.safe_psql(
# "postgres",
# "select * from t_heap"))
if not gdb.continue_execution(): if not gdb.continue_execution():
self.assertTrue( self.assertTrue(
@ -189,16 +186,21 @@ class PtrackBackupTest(ProbackupTest, unittest.TestCase):
backup_dir, 'node', node, backup_dir, 'node', node,
backup_type='ptrack', options=['--stream'] backup_type='ptrack', options=['--stream']
) )
pgdata = self.pgdata_content(node.data_dir)
result = node.safe_psql("postgres", "SELECT * FROM t_heap") result = node.safe_psql("postgres", "SELECT * FROM t_heap")
node.cleanup() node.cleanup()
self.restore_node(backup_dir, 'node', node, options=["-j", "4"]) self.restore_node(backup_dir, 'node', node, options=["-j", "4"])
pgdata_restored = self.pgdata_content(node.data_dir)
node.start() node.start()
# Logical comparison
self.assertEqual( self.assertEqual(
result, result,
node.safe_psql("postgres", "SELECT * FROM t_heap") node.safe_psql("postgres", "SELECT * FROM t_heap")
) )
# Physical comparison
self.compare_pgdata(pgdata, pgdata_restored)
# Clean after yourself # Clean after yourself
# self.del_test_dir(module_name, fname) # self.del_test_dir(module_name, fname)