You've already forked pg_probackup
mirror of
https://github.com/postgrespro/pg_probackup.git
synced 2025-07-16 07:14:15 +02:00
tests: better directory collection in pgdata_content
There is no need to walk pgdata twice. We could delete parent directories instead of skipping them.
This commit is contained in:
@ -1803,27 +1803,13 @@ class ProbackupTest(object):
|
|||||||
file_fullpath, size_in_pages
|
file_fullpath, size_in_pages
|
||||||
)
|
)
|
||||||
|
|
||||||
for root, dirs, files in os.walk(pgdata, topdown=False, followlinks=True):
|
for directory in dirs:
|
||||||
for directory in sorted(dirs):
|
|
||||||
directory_path = os.path.join(root, directory)
|
directory_path = os.path.join(root, directory)
|
||||||
directory_relpath = os.path.relpath(directory_path, pgdata)
|
directory_relpath = os.path.relpath(directory_path, pgdata)
|
||||||
|
parent = os.path.dirname(directory_relpath)
|
||||||
found = False
|
if parent in directory_dict['dirs']:
|
||||||
for d in dirs_to_ignore:
|
del directory_dict['dirs'][parent]
|
||||||
if d in directory_relpath:
|
directory_dict['dirs'][directory_relpath] = {}
|
||||||
found = True
|
|
||||||
break
|
|
||||||
|
|
||||||
# check if directory already here as part of larger directory
|
|
||||||
if not found:
|
|
||||||
for d in directory_dict['dirs']:
|
|
||||||
# print("OLD dir {0}".format(d))
|
|
||||||
if directory_relpath in d:
|
|
||||||
found = True
|
|
||||||
break
|
|
||||||
|
|
||||||
if not found:
|
|
||||||
directory_dict['dirs'][directory_relpath] = {}
|
|
||||||
|
|
||||||
# get permissions for every file and directory
|
# get permissions for every file and directory
|
||||||
for file in directory_dict['dirs']:
|
for file in directory_dict['dirs']:
|
||||||
|
Reference in New Issue
Block a user