1
0
mirror of https://github.com/pgbackrest/pgbackrest.git synced 2025-03-03 14:52:21 +02:00

Add archive-get command multi-repo support.

Repositories will be searched in order for the requested archive file.

Errors will be reported as warnings as long as a valid copy of the archive file is found.
This commit is contained in:
David Steele 2021-02-23 15:34:28 -05:00
parent e28f6f11e9
commit bec3e20b2c
21 changed files with 1247 additions and 346 deletions

View File

@ -18,7 +18,7 @@ freebsd_12_task:
install_script: pkg install -y git postgresql-libpqxx pkgconf libxml2 gmake perl5 p5-YAML rsync
script:
- perl ${CIRRUS_WORKING_DIR}/test/test.pl --no-gen --make-cmd=gmake --vm=none --vm-max=2 --no-coverage --no-valgrind --module=command --test=backup --test=archive-get --test=archive-push
- perl ${CIRRUS_WORKING_DIR}/test/test.pl --no-gen --make-cmd=gmake --vm=none --vm-max=2 --no-coverage --no-valgrind --module=command --test=backup --test=archive-push
debug_script:
- ls -lah ${CIRRUS_WORKING_DIR}
@ -39,7 +39,7 @@ macos_catalina_task:
- cpanm --local-lib=/usr/local/opt/perl5 install YAML
script:
- ${CIRRUS_WORKING_DIR}/test/test.pl --no-gen --vm=none --vm-max=2 --no-coverage --no-valgrind --module=command --test=backup --test=archive-get --test=archive-push
- ${CIRRUS_WORKING_DIR}/test/test.pl --no-gen --vm=none --vm-max=2 --no-coverage --no-valgrind --module=command --test=backup --test=archive-push
debug_script:
- ls -lah ${CIRRUS_WORKING_DIR}

View File

@ -97,6 +97,9 @@ service: 101
# An error while attempting to execute a binary
execute: 102
# No valid repository could be found
repo-invalid: 103
# The command encountered one or more errors
command: 104

View File

@ -1273,7 +1273,7 @@
<command id="archive-get" name="Archive Get">
<summary>Get a WAL segment from the archive.</summary>
<text>WAL segments are required for restoring a <postgres/> cluster or maintaining a replica.</text>
<text>WAL segments are required for <postgres/> recovery or to maintain a replica.</text>
<command-example-list>
<command-example>

View File

@ -40,9 +40,11 @@
<release-item>
<commit subject="Enhance expire command multi-repo support."/>
<commit subject="Expire continues if an error occurs processing a repository."/>
<commit subject="Add archive-get command multi-repo support."/>
<release-item-contributor-list>
<release-item-contributor id="cynthia.shang"/>
<release-item-contributor id="david.steele"/>
</release-item-contributor-list>
<p>Partial multi-repository implementation.</p>

View File

@ -17,58 +17,99 @@ Archive Get File
#include "storage/helper.h"
/**********************************************************************************************************************************/
void
archiveGetFile(
const Storage *storage, const String *archiveFile, const String *walDestination, bool durable, CipherType cipherType,
const String *cipherPassArchive)
ArchiveGetFileResult archiveGetFile(
const Storage *storage, const String *request, const List *actualList, const String *walDestination)
{
FUNCTION_LOG_BEGIN(logLevelDebug);
FUNCTION_LOG_PARAM(STORAGE, storage);
FUNCTION_LOG_PARAM(STRING, archiveFile);
FUNCTION_LOG_PARAM(STRING, request);
FUNCTION_LOG_PARAM(LIST, actualList);
FUNCTION_LOG_PARAM(STRING, walDestination);
FUNCTION_LOG_PARAM(BOOL, durable);
FUNCTION_LOG_PARAM(ENUM, cipherType);
FUNCTION_TEST_PARAM(STRING, cipherPassArchive);
FUNCTION_LOG_END();
ASSERT(archiveFile != NULL);
ASSERT(request != NULL);
ASSERT(actualList != NULL && !lstEmpty(actualList));
ASSERT(walDestination != NULL);
// Is the file compressible during the copy?
bool compressible = true;
ArchiveGetFileResult result = {.warnList = strLstNew()};
// Test for stop file
lockStopTest();
MEM_CONTEXT_TEMP_BEGIN()
// Check all files in the actual list and return as soon as one is copied
bool copied = false;
for (unsigned int actualIdx = 0; actualIdx < lstSize(actualList); actualIdx++)
{
StorageWrite *destination = storageNewWriteP(
storage, walDestination, .noCreatePath = true, .noSyncFile = !durable, .noSyncPath = !durable, .noAtomic = !durable);
ArchiveGetFile *actual = lstGet(actualList, actualIdx);
// If there is a cipher then add the decrypt filter
if (cipherType != cipherTypeNone)
// Is the file compressible during the copy?
bool compressible = true;
TRY_BEGIN()
{
ioFilterGroupAdd(
ioWriteFilterGroup(storageWriteIo(destination)),
cipherBlockNew(cipherModeDecrypt, cipherType, BUFSTR(cipherPassArchive), NULL));
compressible = false;
MEM_CONTEXT_TEMP_BEGIN()
{
StorageWrite *destination = storageNewWriteP(
storage, walDestination, .noCreatePath = true, .noSyncFile = true, .noSyncPath = true, .noAtomic = true);
// If there is a cipher then add the decrypt filter
if (actual->cipherType != cipherTypeNone)
{
ioFilterGroupAdd(
ioWriteFilterGroup(storageWriteIo(destination)),
cipherBlockNew(cipherModeDecrypt, actual->cipherType, BUFSTR(actual->cipherPassArchive), NULL));
compressible = false;
}
// If file is compressed then add the decompression filter
CompressType compressType = compressTypeFromName(actual->file);
if (compressType != compressTypeNone)
{
ioFilterGroupAdd(ioWriteFilterGroup(storageWriteIo(destination)), decompressFilter(compressType));
compressible = false;
}
// Copy the file
storageCopyP(
storageNewReadP(
storageRepoIdx(actual->repoIdx), strNewFmt(STORAGE_REPO_ARCHIVE "/%s", strZ(actual->file)),
.compressible = compressible),
destination);
}
MEM_CONTEXT_TEMP_END();
// File was successfully copied
result.actualIdx = actualIdx;
copied = true;
}
// If file is compressed then add the decompression filter
CompressType compressType = compressTypeFromName(archiveFile);
if (compressType != compressTypeNone)
// Log errors as warnings and continue
CATCH_ANY()
{
ioFilterGroupAdd(ioWriteFilterGroup(storageWriteIo(destination)), decompressFilter(compressType));
compressible = false;
MEM_CONTEXT_PRIOR_BEGIN()
{
strLstAdd(
result.warnList,
strNewFmt(
"repo%u: %s [%s] %s", cfgOptionGroupIdxToKey(cfgOptGrpRepo, actual->repoIdx), strZ(actual->file),
errorTypeName(errorType()), errorMessage()));
}
MEM_CONTEXT_PRIOR_END();
}
TRY_END();
// Copy the file
storageCopyP(
storageNewReadP(storageRepo(), strNewFmt(STORAGE_REPO_ARCHIVE "/%s", strZ(archiveFile)), .compressible = compressible),
destination);
// Stop on success
if (copied)
break;
}
MEM_CONTEXT_TEMP_END();
FUNCTION_LOG_RETURN_VOID();
// If no file was successfully copied then error
if (!copied)
{
ASSERT(!strLstEmpty(result.warnList));
THROW_FMT(FileReadError, "unable to get %s:\n%s", strZ(request), strZ(strLstJoin(result.warnList, "\n")));
}
FUNCTION_LOG_RETURN_STRUCT(result);
}

View File

@ -12,8 +12,22 @@ Archive Get File
Functions
***********************************************************************************************************************************/
// Copy a file from the archive to the specified destination
void archiveGetFile(
const Storage *storage, const String *archiveFile, const String *walDestination, bool durable, CipherType cipherType,
const String *cipherPassArchive);
typedef struct ArchiveGetFile
{
const String *file; // File in the repo (with path, checksum, ext, etc.)
unsigned int repoIdx; // Repo idx
const String *archiveId; // Repo archive id
CipherType cipherType; // Repo cipher type
const String *cipherPassArchive; // Repo archive cipher pass
} ArchiveGetFile;
typedef struct ArchiveGetFileResult
{
unsigned int actualIdx; // Index of the file from actual list that was retrieved
StringList *warnList; // Warnings from a successful operation
} ArchiveGetFileResult;
ArchiveGetFileResult archiveGetFile(
const Storage *storage, const String *request, const List *actualList, const String *walDestination);
#endif

View File

@ -25,14 +25,16 @@ Archive Get Command
#include "protocol/helper.h"
#include "protocol/parallel.h"
#include "storage/helper.h"
#include "storage/write.intern.h"
/***********************************************************************************************************************************
Constants for log messages that are used multiple times to keep them consistent
***********************************************************************************************************************************/
#define FOUND_IN_ARCHIVE_MSG "found %s in the archive"
#define FOUND_IN_REPO_ARCHIVE_MSG "found %s in the repo%u:%s archive"
#define FOUND_IN_REPO_ARCHIVE_MSG "found %s in the repo%u: %s archive"
#define UNABLE_TO_FIND_IN_ARCHIVE_MSG "unable to find %s in the archive"
#define COULD_NOT_GET_FROM_REPO_ARCHIVE_MSG "could not get %s from the repo%u:%s archive (will be retried):"
#define UNABLE_TO_FIND_VALID_REPO_MSG "unable to find a valid repository"
#define REPO_INVALID_OR_ERR_MSG "some repositories were invalid or encountered errors"
/***********************************************************************************************************************************
Check for a list of archive files in the repository
@ -40,154 +42,315 @@ Check for a list of archive files in the repository
typedef struct ArchiveFileMap
{
const String *request; // Archive file requested by archive_command
const String *actual; // Actual file in the repo (with path, checksum, ext, etc.)
List *actualList; // Actual files in various repos/archiveIds
StringList *warnList; // Warnings that need to be reported by the async process
} ArchiveFileMap;
typedef struct ArchiveGetCheckResult
{
List *archiveFileMapList; // List of mapped archive files, i.e. found in the repo
const String *archiveId; // Repo archive id
CipherType cipherType; // Repo cipher type
const String *cipherPassArchive; // Repo archive cipher pass
// Global error that affects all repos
const ErrorType *errorType; // Error type if there was an error
const String *errorFile; // Error file if there was an error
const String *errorMessage; // Error message if there was an error
const StringList *warnList; // Warnings that need to be reported by the async process
} ArchiveGetCheckResult;
// Helper to find a single archive file in the repository using a cache to speed up the process and minimize storageListP() calls
typedef struct ArchiveGetFindCache
// Helper to add an error to an error list and warn if the error is not already in the list
static void
archiveGetErrorAdd(StringList *warnList, bool log, unsigned int repoIdx, const ErrorType *type, const String *message)
{
const String *path;
const StringList *fileList;
} ArchiveGetFindCache;
const String *warn = strNewFmt(
"repo%u: [%s] %s", cfgOptionGroupIdxToKey(cfgOptGrpRepo, repoIdx), errorTypeName(type), strZ(message));
if (!strLstExists(warnList, warn))
{
if (log)
LOG_WARN(strZ(warn));
strLstAdd(warnList, warn);
}
}
// Helper to find a single archive file in the repository using a cache to speed up the process and minimize storageListP() calls
typedef struct ArchiveGetFindCachePath
{
const String *path; // Cached path in the archiveId
const StringList *fileList; // List of files in the cache path
} ArchiveGetFindCachePath;
typedef struct ArchiveGetFindCacheArchive
{
const String *archiveId; // ArchiveId in the repo
List *pathList; // List of paths cached for archiveId
} ArchiveGetFindCacheArchive;
typedef struct ArchiveGetFindCacheRepo
{
unsigned int repoIdx;
CipherType cipherType; // Repo cipher type
const String *cipherPassArchive; // Repo archive cipher pass
List *archiveList; // Cached list of archiveIds and associated paths
StringList *warnList; // Track repo warnings so each is only reported once
} ArchiveGetFindCacheRepo;
static bool
archiveGetFind(
const String *archiveFileRequest, const String *archiveId, ArchiveGetCheckResult *getCheckResult, List *cache, bool single)
const String *archiveFileRequest, ArchiveGetCheckResult *getCheckResult, List *cacheRepoList, const StringList *warnList,
bool single)
{
FUNCTION_LOG_BEGIN(logLevelDebug);
FUNCTION_LOG_PARAM(STRING, archiveFileRequest);
FUNCTION_LOG_PARAM(STRING, archiveId);
FUNCTION_LOG_PARAM_P(VOID, getCheckResult);
FUNCTION_LOG_PARAM(LIST, cache);
FUNCTION_LOG_PARAM(LIST, cacheRepoList);
FUNCTION_LOG_PARAM(STRING_LIST, warnList);
FUNCTION_LOG_PARAM(BOOL, single);
FUNCTION_LOG_END();
ASSERT(archiveFileRequest != NULL);
ASSERT(archiveId != NULL);
ASSERT(getCheckResult != NULL);
ASSERT(cache != NULL);
ASSERT(cacheRepoList != NULL);
ArchiveFileMap archiveFileMap = {0};
bool result = false;
MEM_CONTEXT_TEMP_BEGIN()
{
// If a WAL segment search among the possible file names
if (walIsSegment(archiveFileRequest))
// Is the archive file a WAL segment?
bool isSegment = walIsSegment(archiveFileRequest);
// Get the WAL segment path
const String *path = isSegment ? strSubN(archiveFileRequest, 0, 16) : NULL;
// List to hold matches for the requested file
List *matchList = lstNewP(sizeof(ArchiveGetFile));
// List of file level warnings
StringList *fileWarnList = strLstDup(warnList);
// Errored repo total to track if all repos errored
unsigned int repoErrorTotal = 0;
// Check each repo
for (unsigned int repoCacheIdx = 0; repoCacheIdx < lstSize(cacheRepoList); repoCacheIdx++)
{
// Get the path
const String *path = strSubN(archiveFileRequest, 0, 16);
ArchiveGetFindCacheRepo *cacheRepo = lstGet(cacheRepoList, repoCacheIdx);
// List to hold matches for the requested file
StringList *matchList = NULL;
// If a single file is requested then optimize by adding a more restrictive expression to reduce network bandwidth
if (single)
TRY_BEGIN()
{
matchList = storageListP(
storageRepo(), strNewFmt(STORAGE_REPO_ARCHIVE "/%s/%s", strZ(archiveId), strZ(path)),
.expression = strNewFmt(
"^%s%s-[0-f]{40}" COMPRESS_TYPE_REGEXP "{0,1}$", strZ(strSubN(archiveFileRequest, 0, 24)),
walIsPartial(archiveFileRequest) ? WAL_SEGMENT_PARTIAL_EXT : ""));
}
// Else multiple files will be requested so cache list results
else
{
// Partial files cannot be in a list with multiple requests
ASSERT(!walIsPartial(archiveFileRequest));
// If the path does not exist in the cache then fetch it
const ArchiveGetFindCache *cachePath = lstFind(cache, &path);
if (cachePath == NULL)
// Check each archiveId
for (unsigned int archiveCacheIdx = 0; archiveCacheIdx < lstSize(cacheRepo->archiveList); archiveCacheIdx++)
{
MEM_CONTEXT_BEGIN(lstMemContext(cache))
ArchiveGetFindCacheArchive *cacheArchive = lstGet(cacheRepo->archiveList, archiveCacheIdx);
// If a WAL segment then search among the possible file names
if (isSegment)
{
cachePath = lstAdd(
cache,
&(ArchiveGetFindCache)
StringList *segmentList = NULL;
// If a single file is requested then optimize by adding a restrictive expression to reduce bandwidth
if (single)
{
segmentList = storageListP(
storageRepoIdx(cacheRepo->repoIdx),
strNewFmt(STORAGE_REPO_ARCHIVE "/%s/%s", strZ(cacheArchive->archiveId), strZ(path)),
.expression = strNewFmt(
"^%s%s-[0-f]{40}" COMPRESS_TYPE_REGEXP "{0,1}$", strZ(strSubN(archiveFileRequest, 0, 24)),
walIsPartial(archiveFileRequest) ? WAL_SEGMENT_PARTIAL_EXT : ""));
}
// Else multiple files will be requested so cache list results
else
{
// Partial files cannot be in a list with multiple requests
ASSERT(!walIsPartial(archiveFileRequest));
// If the path does not exist in the cache then fetch it
const ArchiveGetFindCachePath *cachePath = lstFind(cacheArchive->pathList, &path);
if (cachePath == NULL)
{
.path = strDup(path),
.fileList = storageListP(
storageRepo(), strNewFmt(STORAGE_REPO_ARCHIVE "/%s/%s", strZ(archiveId), strZ(path)),
.expression = strNewFmt("^%s[0-F]{8}-[0-f]{40}" COMPRESS_TYPE_REGEXP "{0,1}$", strZ(path))),
});
MEM_CONTEXT_BEGIN(lstMemContext(cacheArchive->pathList))
{
cachePath = lstAdd(
cacheArchive->pathList,
&(ArchiveGetFindCachePath)
{
.path = strDup(path),
.fileList = storageListP(
storageRepoIdx(cacheRepo->repoIdx),
strNewFmt(STORAGE_REPO_ARCHIVE "/%s/%s", strZ(cacheArchive->archiveId), strZ(path)),
.expression = strNewFmt(
"^%s[0-F]{8}-[0-f]{40}" COMPRESS_TYPE_REGEXP "{0,1}$", strZ(path))),
});
}
MEM_CONTEXT_END();
}
// Get a list of all WAL segments that match
segmentList = strLstNew();
for (unsigned int fileIdx = 0; fileIdx < strLstSize(cachePath->fileList); fileIdx++)
{
if (strBeginsWith(strLstGet(cachePath->fileList, fileIdx), archiveFileRequest))
strLstAdd(segmentList, strLstGet(cachePath->fileList, fileIdx));
}
}
// Add segments to match list
for (unsigned int segmentIdx = 0; segmentIdx < strLstSize(segmentList); segmentIdx++)
{
MEM_CONTEXT_BEGIN(lstMemContext(getCheckResult->archiveFileMapList))
{
lstAdd(
matchList,
&(ArchiveGetFile)
{
.file = strNewFmt(
"%s/%s/%s", strZ(cacheArchive->archiveId), strZ(path),
strZ(strLstGet(segmentList, segmentIdx))),
.repoIdx = cacheRepo->repoIdx,
.archiveId = cacheArchive->archiveId,
.cipherType = cacheRepo->cipherType,
.cipherPassArchive = cacheRepo->cipherPassArchive,
});
}
MEM_CONTEXT_END();
}
}
// Else if not a WAL segment, see if it exists in the archiveId path
else if (storageExistsP(
storageRepoIdx(cacheRepo->repoIdx), strNewFmt(STORAGE_REPO_ARCHIVE "/%s/%s", strZ(cacheArchive->archiveId),
strZ(archiveFileRequest))))
{
MEM_CONTEXT_BEGIN(lstMemContext(getCheckResult->archiveFileMapList))
{
lstAdd(
matchList,
&(ArchiveGetFile)
{
.file = strNewFmt("%s/%s", strZ(cacheArchive->archiveId), strZ(archiveFileRequest)),
.repoIdx = cacheRepo->repoIdx,
.archiveId = cacheArchive->archiveId,
.cipherType = cacheRepo->cipherType,
.cipherPassArchive = cacheRepo->cipherPassArchive,
});
}
MEM_CONTEXT_END();
}
}
}
// Log errors as warnings and continue
CATCH_ANY()
{
repoErrorTotal++;
archiveGetErrorAdd(cacheRepo->warnList, true, cacheRepo->repoIdx, errorType(), STR(errorMessage()));
archiveGetErrorAdd(fileWarnList, false, cacheRepo->repoIdx, errorType(), STR(errorMessage()));
}
TRY_END();
}
// If all repos errored out then set the global error since processing cannot continue past this segment
ASSERT(repoErrorTotal <= lstSize(cacheRepoList));
if (repoErrorTotal == lstSize(cacheRepoList))
{
ASSERT(!strLstEmpty(fileWarnList));
MEM_CONTEXT_BEGIN(lstMemContext(getCheckResult->archiveFileMapList))
{
getCheckResult->errorType = &RepoInvalidError;
getCheckResult->errorFile = strDup(archiveFileRequest);
getCheckResult->errorMessage = strNewFmt(UNABLE_TO_FIND_VALID_REPO_MSG);
getCheckResult->warnList = strLstMove(fileWarnList, memContextCurrent());
}
MEM_CONTEXT_END();
}
// Else if a file was found
else if (!lstEmpty(matchList))
{
bool error = false;
// If a segment match list is > 1 then check for duplicates
if (isSegment && lstSize(matchList) > 1)
{
// Count the number of unique hashes
StringList *hashList = strLstNew();
for (unsigned int matchIdx = 0; matchIdx < lstSize(matchList); matchIdx++)
strLstAddIfMissing(hashList, strSubN(((ArchiveGetFile *)lstGet(matchList, matchIdx))->file, 25, 40));
// If there is more than one unique hash then there are duplicates
if (strLstSize(hashList) > 1)
{
// Build list of duplicates
unsigned int repoKeyLast = 0;
String *message = strNew("");
bool first = true;
for (unsigned int matchIdx = 0; matchIdx < lstSize(matchList); matchIdx++)
{
ArchiveGetFile *file = lstGet(matchList, matchIdx);
unsigned int repoKey = cfgOptionGroupIdxToKey(cfgOptGrpRepo, file->repoIdx);
if (repoKey != repoKeyLast)
{
strCatFmt(message, "\nrepo%u:", repoKey);
repoKeyLast = repoKey;
first = true;
}
if (first)
first = false;
else
strCatChr(message, ',');
strCatFmt(message, " %s", strZ(file->file));
}
// Set as global error since processing cannot continue past this segment
MEM_CONTEXT_BEGIN(lstMemContext(getCheckResult->archiveFileMapList))
{
getCheckResult->errorType = &ArchiveDuplicateError;
getCheckResult->errorFile = strDup(archiveFileRequest);
getCheckResult->errorMessage = strNewFmt(
"duplicates found for WAL segment %s:%s\n"
"HINT: are multiple primaries archiving to this stanza?",
strZ(archiveFileRequest), strZ(message));
getCheckResult->warnList = strLstMove(fileWarnList, memContextCurrent());
}
MEM_CONTEXT_END();
}
// Get a list of all WAL segments that match
matchList = strLstNew();
for (unsigned int fileIdx = 0; fileIdx < strLstSize(cachePath->fileList); fileIdx++)
{
if (strBeginsWith(strLstGet(cachePath->fileList, fileIdx), archiveFileRequest))
strLstAdd(matchList, strLstGet(cachePath->fileList, fileIdx));
error = true;
}
}
// If there is a single result then return it
if (strLstSize(matchList) == 1)
// Files are valid so add them to the map
if (!error)
{
MEM_CONTEXT_BEGIN(lstMemContext(getCheckResult->archiveFileMapList))
{
archiveFileMap.actual = strNewFmt(
"%s/%s/%s", strZ(archiveId), strZ(path), strZ(strLstGet(matchList, 0)));
ArchiveFileMap map =
{
.request = strDup(archiveFileRequest),
.actualList = lstNewP(sizeof(ArchiveGetFile)),
.warnList = strLstMove(fileWarnList, memContextCurrent()),
};
for (unsigned int matchIdx = 0; matchIdx < lstSize(matchList); matchIdx++)
lstAdd(map.actualList, lstGet(matchList, matchIdx));
lstAdd(getCheckResult->archiveFileMapList, &map);
}
MEM_CONTEXT_END();
}
// Else error if there are multiple results
else if (strLstSize(matchList) > 1)
{
MEM_CONTEXT_BEGIN(lstMemContext(getCheckResult->archiveFileMapList))
{
getCheckResult->errorType = &ArchiveDuplicateError;
getCheckResult->errorFile = strDup(archiveFileRequest);
getCheckResult->errorMessage = strNewFmt(
"duplicates found in the repo%u:%s archive for WAL segment %s: %s\n"
"HINT: are multiple primaries archiving to this stanza?",
cfgOptionGroupIdxToKey(cfgOptGrpRepo, cfgOptionGroupIdxDefault(cfgOptGrpRepo)), strZ(archiveId),
strZ(archiveFileRequest), strZ(strLstJoin(strLstSort(matchList, sortOrderAsc), ", ")));
}
MEM_CONTEXT_END();
}
}
// Else if not a WAL segment, see if it exists in the archive dir
else if (storageExistsP(storageRepo(), strNewFmt(STORAGE_REPO_ARCHIVE "/%s/%s", strZ(archiveId), strZ(archiveFileRequest))))
{
MEM_CONTEXT_BEGIN(lstMemContext(getCheckResult->archiveFileMapList))
{
archiveFileMap.actual = strNewFmt("%s/%s", strZ(archiveId), strZ(archiveFileRequest));
}
MEM_CONTEXT_END();
}
if (archiveFileMap.actual != NULL)
{
MEM_CONTEXT_BEGIN(lstMemContext(getCheckResult->archiveFileMapList))
{
archiveFileMap.request = strDup(archiveFileRequest);
result = true;
}
MEM_CONTEXT_END();
lstAdd(getCheckResult->archiveFileMapList, &archiveFileMap);
}
}
MEM_CONTEXT_TEMP_END();
FUNCTION_LOG_RETURN(BOOL, archiveFileMap.actual != NULL);
FUNCTION_LOG_RETURN(BOOL, result);
}
static ArchiveGetCheckResult
@ -200,74 +363,151 @@ archiveGetCheck(const StringList *archiveRequestList)
ASSERT(archiveRequestList != NULL);
ASSERT(!strLstEmpty(archiveRequestList));
ArchiveGetCheckResult result = {.archiveFileMapList = lstNewP(sizeof(ArchiveFileMap))};
ArchiveGetCheckResult result = {.archiveFileMapList = lstNewP(sizeof(ArchiveFileMap), .comparator = lstComparatorStr)};
MEM_CONTEXT_TEMP_BEGIN()
{
// List of warnings
StringList *warnList = strLstNew();
// Get pg control info
PgControl controlInfo = pgControlFromFile(storagePg());
// Get the repo storage in case it is remote and encryption settings need to be pulled down
storageRepo();
// Build list of repos/archiveIds where WAL may be found
List *cacheRepoList = lstNewP(sizeof(ArchiveGetFindCacheRepo));
result.cipherType = cipherType(cfgOptionStr(cfgOptRepoCipherType));
// Attempt to load the archive info file
InfoArchive *info = infoArchiveLoadFile(
storageRepo(), INFO_ARCHIVE_PATH_FILE_STR, result.cipherType, cfgOptionStrNull(cfgOptRepoCipherPass));
// Loop through the pg history and determine which archiveId to use based on the first file in the list
bool found = false;
const String *archiveId = NULL;
List *cache = NULL;
for (unsigned int pgIdx = 0; pgIdx < infoPgDataTotal(infoArchivePg(info)); pgIdx++)
for (unsigned int repoIdx = 0; repoIdx < cfgOptionGroupIdxTotal(cfgOptGrpRepo); repoIdx++)
{
InfoPgData pgData = infoPgData(infoArchivePg(info), pgIdx);
// If a repo was specified then skip all other repos
if (cfgOptionTest(cfgOptRepo) && cfgOptionUInt(cfgOptRepo) != cfgOptionGroupIdxToKey(cfgOptGrpRepo, repoIdx))
continue;
// Only use the archive id if it matches the current cluster
if (pgData.systemId == controlInfo.systemId && pgData.version == controlInfo.version)
TRY_BEGIN()
{
archiveId = infoPgArchiveId(infoArchivePg(info), pgIdx);
cache = lstNewP(sizeof(ArchiveGetFindCache), .comparator = lstComparatorStr);
// Get the repo storage in case it is remote and encryption settings need to be pulled down
storageRepoIdx(repoIdx);
found = archiveGetFind(
strLstGet(archiveRequestList, 0), archiveId, &result, cache, strLstSize(archiveRequestList) == 1);
ArchiveGetFindCacheRepo cacheRepo =
{
.repoIdx = repoIdx,
.cipherType = cipherType(cfgOptionIdxStr(cfgOptRepoCipherType, repoIdx)),
.archiveList = lstNewP(sizeof(ArchiveGetFindCacheArchive)),
.warnList = strLstNew(),
};
// If the file was found then use this archiveId for the rest of the files
if (found)
// Attempt to load the archive info file
InfoArchive *info = infoArchiveLoadFile(
storageRepoIdx(repoIdx), INFO_ARCHIVE_PATH_FILE_STR, cacheRepo.cipherType,
cfgOptionIdxStrNull(cfgOptRepoCipherPass, repoIdx));
// Copy cipher pass into the result list context once rather than making a copy per candidate file later
MEM_CONTEXT_BEGIN(lstMemContext(result.archiveFileMapList))
{
cacheRepo.cipherPassArchive = strDup(infoArchiveCipherPass(info));
}
MEM_CONTEXT_END();
// Loop through pg history and determine which archiveIds to use
StringList *archivePathList = NULL;
for (unsigned int pgIdx = 0; pgIdx < infoPgDataTotal(infoArchivePg(info)); pgIdx++)
{
InfoPgData pgData = infoPgData(infoArchivePg(info), pgIdx);
// Only use the archive id if it matches the current cluster
if (pgData.systemId == controlInfo.systemId && pgData.version == controlInfo.version)
{
const String *archiveId = infoPgArchiveId(infoArchivePg(info), pgIdx);
bool found = true;
// If the archiveId is in the past make sure the path exists
if (pgIdx != 0)
{
// Get list of archiveId paths in the archive path
if (archivePathList == NULL)
archivePathList = storageListP(storageRepoIdx(repoIdx), STORAGE_REPO_ARCHIVE_STR);
if (!strLstExists(archivePathList, archiveId))
found = false;
}
// If the archiveId is most recent or has files then add it
if (found)
{
ArchiveGetFindCacheArchive cacheArchive =
{
.pathList = lstNewP(sizeof(ArchiveGetFindCachePath), .comparator = lstComparatorStr),
};
// Copy archiveId into the result list context once rather than making a copy per candidate file later
MEM_CONTEXT_BEGIN(lstMemContext(result.archiveFileMapList))
{
cacheArchive.archiveId = strDup(archiveId);
}
MEM_CONTEXT_END();
lstAdd(cacheRepo.archiveList, &cacheArchive);
}
}
}
// Error if no archive id was found -- this indicates a mismatch with the current cluster
if (lstEmpty(cacheRepo.archiveList))
{
archiveGetErrorAdd(
warnList, true, repoIdx, &ArchiveMismatchError,
strNewFmt(
"unable to retrieve the archive id for database version '%s' and system-id '%" PRIu64 "'",
strZ(pgVersionToStr(controlInfo.version)), controlInfo.systemId));
}
// Else add repo to list
else
lstAdd(cacheRepoList, &cacheRepo);
}
// Log errors as warnings and continue
CATCH_ANY()
{
archiveGetErrorAdd(warnList, true, repoIdx, errorType(), STR(errorMessage()));
}
TRY_END();
}
// Error if there are no repos to check
if (lstEmpty(cacheRepoList))
{
ASSERT(!strLstEmpty(warnList));
// Set as global error since processing cannot continue past this segment
MEM_CONTEXT_BEGIN(lstMemContext(result.archiveFileMapList))
{
result.errorType = &RepoInvalidError;
result.errorMessage = strNew(UNABLE_TO_FIND_VALID_REPO_MSG);
result.warnList = strLstMove(warnList, memContextCurrent());
}
MEM_CONTEXT_END();
}
else
{
// Any remaining errors will be reported as warnings since at least one repo is valid
MEM_CONTEXT_BEGIN(lstMemContext(result.archiveFileMapList))
{
result.warnList = strLstMove(warnList, memContextCurrent());
}
MEM_CONTEXT_END();
// Find files in the list
for (unsigned int archiveRequestIdx = 0; archiveRequestIdx < strLstSize(archiveRequestList); archiveRequestIdx++)
{
if (!archiveGetFind(
strLstGet(archiveRequestList, archiveRequestIdx), &result, cacheRepoList, warnList,
strLstSize(archiveRequestList) == 1))
{
break;
}
}
}
// Error if no archive id was found -- this indicates a mismatch with the current cluster
if (archiveId == NULL)
{
THROW_FMT(
ArchiveMismatchError, "unable to retrieve the archive id for database version '%s' and system-id '%" PRIu64 "'",
strZ(pgVersionToStr(controlInfo.version)), controlInfo.systemId);
}
// Copy repo data to result if the first file was found or on error
if (found || result.errorType != NULL)
{
MEM_CONTEXT_PRIOR_BEGIN()
{
result.archiveId = strDup(archiveId);
result.cipherPassArchive = strDup(infoArchiveCipherPass(info));
}
MEM_CONTEXT_PRIOR_END();
}
// Continue only if the first file was found
if (found)
{
// Find the rest of the files in the list
for (unsigned int archiveRequestIdx = 1; archiveRequestIdx < strLstSize(archiveRequestList); archiveRequestIdx++)
{
if (!archiveGetFind(strLstGet(archiveRequestList, archiveRequestIdx), archiveId, &result, cache, false))
break;
}
// Sort the list to make searching for files faster
lstSort(result.archiveFileMapList, sortOrderAsc);
}
}
MEM_CONTEXT_TEMP_END();
@ -329,9 +569,14 @@ queueNeed(const String *walSegment, bool found, uint64_t queueSize, size_t walSe
{
strLstAdd(keepQueue, file);
}
// Else delete it
else
// Else delete if it does not match an ok file for a WAL segment that has already been preserved. If an ok file exists
// in addition to the segment then it contains warnings which need to be preserved.
else if (
!strEndsWithZ(file, STATUS_EXT_OK) ||
!strLstExists(actualQueue, strSubN(file, 0, strSize(file) - STATUS_EXT_OK_SIZE)))
{
storageRemoveP(storageSpoolWrite(), strNewFmt(STORAGE_SPOOL_ARCHIVE_IN "/%s", strZ(file)), .errorOnMissing = true);
}
}
// Generate a list of the WAL that are needed by removing kept WAL from the ideal queue
@ -396,18 +641,20 @@ cmdArchiveGet(void)
do
{
// Check for errors or missing files. For archive-get ok indicates that the process succeeded but there is no WAL
// file to download.
// Check if the WAL segment is already in the queue
found = storageExistsP(storageSpool(), strNewFmt(STORAGE_SPOOL_ARCHIVE_IN "/%s", strZ(walSegment)));
// Check for errors or missing files. For archive-get ok indicates that the process succeeded but there is no WAL
// file to download, or that there was a warning.
if (archiveAsyncStatus(archiveModeGet, walSegment, throwOnError))
{
storageRemoveP(
storageSpoolWrite(), strNewFmt(STORAGE_SPOOL_ARCHIVE_IN "/%s" STATUS_EXT_OK, strZ(walSegment)),
.errorOnMissing = true);
break;
}
// Check if the WAL segment is already in the queue
found = storageExistsP(storageSpool(), strNewFmt(STORAGE_SPOOL_ARCHIVE_IN "/%s", strZ(walSegment)));
if (!found)
break;
}
// If found then move the WAL segment to the destination directory
if (found)
@ -507,7 +754,6 @@ cmdArchiveGet(void)
// Log that the file was not found
if (result == 1)
LOG_INFO_FMT(UNABLE_TO_FIND_IN_ARCHIVE_MSG " asynchronously", strZ(walSegment));
}
// Else perform synchronous get
else
@ -525,16 +771,25 @@ cmdArchiveGet(void)
// Get the archive file
if (!lstEmpty(checkResult.archiveFileMapList))
{
// There can only be one file mapping since only one file was requested
ASSERT(lstSize(checkResult.archiveFileMapList) == 1);
const ArchiveFileMap *fileMap = lstGet(checkResult.archiveFileMapList, 0);
archiveGetFile(
storageLocalWrite(), ((ArchiveFileMap *)lstGet(checkResult.archiveFileMapList, 0))->actual, walDestination,
false, checkResult.cipherType, checkResult.cipherPassArchive);
// Get the file
ArchiveGetFileResult fileResult = archiveGetFile(
storageLocalWrite(), fileMap->request, fileMap->actualList, walDestination);
// Output file warnings
for (unsigned int warnIdx = 0; warnIdx < strLstSize(fileResult.warnList); warnIdx++)
LOG_WARN(strZ(strLstGet(fileResult.warnList, warnIdx)));
// If there was no error then the file existed
ArchiveGetFile *file = lstGet(fileMap->actualList, fileResult.actualIdx);
ASSERT(file != NULL);
LOG_INFO_FMT(
FOUND_IN_REPO_ARCHIVE_MSG, strZ(walSegment),
cfgOptionGroupIdxToKey(cfgOptGrpRepo, cfgOptionGroupIdxDefault(cfgOptGrpRepo)), strZ(checkResult.archiveId));
FOUND_IN_REPO_ARCHIVE_MSG, strZ(walSegment), cfgOptionGroupIdxToKey(cfgOptGrpRepo, file->repoIdx),
strZ(file->archiveId));
result = 0;
}
@ -549,6 +804,12 @@ cmdArchiveGet(void)
}
/**********************************************************************************************************************************/
typedef struct ArchiveGetAsyncData
{
const List *const archiveFileMapList; // List of wal segments to process
unsigned int archiveFileIdx; // Current index in the list to be processed
} ArchiveGetAsyncData;
static ProtocolParallelJob *archiveGetAsyncCallback(void *data, unsigned int clientIdx)
{
FUNCTION_TEST_BEGIN();
@ -560,20 +821,29 @@ static ProtocolParallelJob *archiveGetAsyncCallback(void *data, unsigned int cli
(void)clientIdx;
// Get a new job if there are any left
ArchiveGetCheckResult *checkResult = data;
ArchiveGetAsyncData *jobData = data;
if (!lstEmpty(checkResult->archiveFileMapList))
if (jobData->archiveFileIdx < lstSize(jobData->archiveFileMapList))
{
const ArchiveFileMap archiveFileMap = *((ArchiveFileMap *)lstGet(checkResult->archiveFileMapList, 0));
lstRemoveIdx(checkResult->archiveFileMapList, 0);
const ArchiveFileMap *archiveFileMap = lstGet(jobData->archiveFileMapList, jobData->archiveFileIdx);
jobData->archiveFileIdx++;
ProtocolCommand *command = protocolCommandNew(PROTOCOL_COMMAND_ARCHIVE_GET_STR);
protocolCommandParamAdd(command, VARSTR(archiveFileMap.request));
protocolCommandParamAdd(command, VARSTR(archiveFileMap.actual));
protocolCommandParamAdd(command, VARUINT(checkResult->cipherType));
protocolCommandParamAdd(command, VARSTR(checkResult->cipherPassArchive));
protocolCommandParamAdd(command, VARSTR(archiveFileMap->request));
FUNCTION_TEST_RETURN(protocolParallelJobNew(VARSTR(archiveFileMap.request), command));
// Add actual files to get
for (unsigned int actualIdx = 0; actualIdx < lstSize(archiveFileMap->actualList); actualIdx++)
{
const ArchiveGetFile *actual = lstGet(archiveFileMap->actualList, actualIdx);
protocolCommandParamAdd(command, VARSTR(actual->file));
protocolCommandParamAdd(command, VARUINT(actual->repoIdx));
protocolCommandParamAdd(command, VARSTR(actual->archiveId));
protocolCommandParamAdd(command, VARUINT(actual->cipherType));
protocolCommandParamAdd(command, VARSTR(actual->cipherPassArchive));
}
FUNCTION_TEST_RETURN(protocolParallelJobNew(VARSTR(archiveFileMap->request), command));
}
FUNCTION_TEST_RETURN(NULL);
@ -614,8 +884,10 @@ cmdArchiveGetAsync(void)
if (!lstEmpty(checkResult.archiveFileMapList))
{
// Create the parallel executor
ArchiveGetAsyncData jobData = {.archiveFileMapList = checkResult.archiveFileMapList};
ProtocolParallel *parallelExec = protocolParallelNew(
cfgOptionUInt64(cfgOptProtocolTimeout) / 2, archiveGetAsyncCallback, &checkResult);
cfgOptionUInt64(cfgOptProtocolTimeout) / 2, archiveGetAsyncCallback, &jobData);
for (unsigned int processIdx = 1; processIdx <= cfgOptionUInt(cfgOptProcessMax); processIdx++)
protocolParallelClientAdd(parallelExec, protocolLocalGet(protocolStorageTypeRepo, 0, processIdx));
@ -627,33 +899,67 @@ cmdArchiveGetAsync(void)
for (unsigned int jobIdx = 0; jobIdx < completed; jobIdx++)
{
// Get the job and job key
// Get the job
ProtocolParallelJob *job = protocolParallelResult(parallelExec);
unsigned int processId = protocolParallelJobProcessId(job);
// Get wal segment name and archive file map
const String *walSegment = varStr(protocolParallelJobKey(job));
const ArchiveFileMap *fileMap = lstFind(checkResult.archiveFileMapList, &walSegment);
ASSERT(fileMap != NULL);
// Build warnings for status file
String *warning = strNew("");
if (!strLstEmpty(fileMap->warnList))
strCatFmt(warning, "%s", strZ(strLstJoin(fileMap->warnList, "\n")));
// The job was successful
if (protocolParallelJobErrorCode(job) == 0)
{
// Get the actual file retrieved
const VariantList *fileResult = varVarLst(protocolParallelJobResult(job));
ArchiveGetFile *file = lstGet(fileMap->actualList, varUIntForce(varLstGet(fileResult, 0)));
ASSERT(file != NULL);
// Output file warnings
StringList *fileWarnList = strLstNewVarLst(varVarLst(varLstGet(fileResult, 1)));
for (unsigned int warnIdx = 0; warnIdx < strLstSize(fileWarnList); warnIdx++)
LOG_WARN_PID(processId, strZ(strLstGet(fileWarnList, warnIdx)));
// Build file warnings for status file
if (!strLstEmpty(fileWarnList))
strCatFmt(warning, "%s%s", strSize(warning) == 0 ? "" : "\n", strZ(strLstJoin(fileWarnList, "\n")));
if (strSize(warning) != 0)
archiveAsyncStatusOkWrite(archiveModeGet, walSegment, warning);
LOG_DETAIL_PID_FMT(
processId,
FOUND_IN_REPO_ARCHIVE_MSG, strZ(walSegment),
cfgOptionGroupIdxToKey(cfgOptGrpRepo, cfgOptionGroupIdxDefault(cfgOptGrpRepo)),
strZ(checkResult.archiveId));
processId, FOUND_IN_REPO_ARCHIVE_MSG, strZ(walSegment),
cfgOptionGroupIdxToKey(cfgOptGrpRepo, file->repoIdx), strZ(file->archiveId));
// Rename temp WAL segment to actual name. This is done after the ok file is written so the ok file is
// guaranteed to exist before the foreground process finds the WAL segment.
storageMoveP(
storageSpoolWrite(),
storageNewReadP(
storageSpool(),
strNewFmt(STORAGE_SPOOL_ARCHIVE_IN "/%s." STORAGE_FILE_TEMP_EXT, strZ(walSegment))),
storageNewWriteP(storageSpoolWrite(), strNewFmt(STORAGE_SPOOL_ARCHIVE_IN "/%s", strZ(walSegment))));
}
// Else the job errored
else
{
LOG_WARN_PID_FMT(
processId,
COULD_NOT_GET_FROM_REPO_ARCHIVE_MSG " [%d] %s", strZ(walSegment),
cfgOptionGroupIdxToKey(cfgOptGrpRepo, cfgOptionGroupIdxDefault(cfgOptGrpRepo)),
strZ(checkResult.archiveId), protocolParallelJobErrorCode(job),
processId, "[%s] %s", errorTypeName(errorTypeFromCode(protocolParallelJobErrorCode(job))),
strZ(protocolParallelJobErrorMessage(job)));
archiveAsyncStatusErrorWrite(
archiveModeGet, walSegment, protocolParallelJobErrorCode(job),
protocolParallelJobErrorMessage(job));
strNewFmt(
"%s%s", strZ(protocolParallelJobErrorMessage(job)),
strSize(warning) == 0 ? "" : strZ(strNewFmt("\n%s", strZ(warning)))));
}
protocolParallelJobFree(job);
@ -666,19 +972,27 @@ cmdArchiveGetAsync(void)
// need to fetch as many valid files as possible before throwing an error.
if (checkResult.errorType != NULL)
{
LOG_WARN_FMT(
COULD_NOT_GET_FROM_REPO_ARCHIVE_MSG " [%d] %s", strZ(checkResult.errorFile),
cfgOptionGroupIdxToKey(cfgOptGrpRepo, cfgOptionGroupIdxDefault(cfgOptGrpRepo)),
strZ(checkResult.archiveId), errorTypeCode(checkResult.errorType), strZ(checkResult.errorMessage));
LOG_WARN_FMT("[%s] %s", errorTypeName(checkResult.errorType), strZ(checkResult.errorMessage));
String *message = strDup(checkResult.errorMessage);
if (!strLstEmpty(checkResult.warnList))
strCatFmt(message, "\n%s", strZ(strLstJoin(checkResult.warnList, "\n")));
archiveAsyncStatusErrorWrite(
archiveModeGet, checkResult.errorFile, errorTypeCode(checkResult.errorType), checkResult.errorMessage);
archiveModeGet, checkResult.errorFile, errorTypeCode(checkResult.errorType), message);
}
// Else log a warning if any files were missing
else if (archiveFileMissing != NULL)
{
LOG_DETAIL_FMT(UNABLE_TO_FIND_IN_ARCHIVE_MSG, strZ(archiveFileMissing));
archiveAsyncStatusOkWrite(archiveModeGet, archiveFileMissing, NULL);
String *message = NULL;
if (!strLstEmpty(checkResult.warnList))
message = strLstJoin(checkResult.warnList, "\n");
archiveAsyncStatusOkWrite(archiveModeGet, archiveFileMissing, message);
}
}
// On any global error write a single error file to cover all unprocessed files

View File

@ -11,6 +11,7 @@ Archive Get Protocol Handler
#include "common/memContext.h"
#include "config/config.h"
#include "storage/helper.h"
#include "storage/write.intern.h"
/***********************************************************************************************************************************
Constants
@ -36,16 +37,42 @@ archiveGetProtocol(const String *command, const VariantList *paramList, Protocol
{
if (strEq(command, PROTOCOL_COMMAND_ARCHIVE_GET_STR))
{
const String *archiveFileRequest = varStr(varLstGet(paramList, 0));
const String *archiveFileActual = varStr(varLstGet(paramList, 1));
const CipherType cipherType = (CipherType)varUIntForce(varLstGet(paramList, 2));
const String *cipherPassArchive = varStr(varLstGet(paramList, 3));
const String *request = varStr(varLstGet(paramList, 0));
archiveGetFile(
storageSpoolWrite(), archiveFileActual, strNewFmt(STORAGE_SPOOL_ARCHIVE_IN "/%s", strZ(archiveFileRequest)), true,
cipherType, cipherPassArchive);
const unsigned int paramFixed = 1; // Fixed params before the actual list
const unsigned int paramActual = 5; // Parameters in each index of the actual list
protocolServerResponse(server, NULL);
// Check that the correct number of list parameters were passed
CHECK((varLstSize(paramList) - paramFixed) % paramActual == 0);
// Build the actual list
List *actualList = lstNewP(sizeof(ArchiveGetFile));
unsigned int actualListSize = (varLstSize(paramList) - paramFixed) / paramActual;
for (unsigned int actualIdx = 0; actualIdx < actualListSize; actualIdx++)
{
lstAdd(
actualList,
&(ArchiveGetFile)
{
.file = varStr(varLstGet(paramList, paramFixed + (actualIdx * paramActual))),
.repoIdx = varUIntForce(varLstGet(paramList, paramFixed + (actualIdx * paramActual) + 1)),
.archiveId = varStr(varLstGet(paramList, paramFixed + (actualIdx * paramActual) + 2)),
.cipherType = (CipherType)varUIntForce(varLstGet(paramList, paramFixed + (actualIdx * paramActual) + 3)),
.cipherPassArchive = varStr(varLstGet(paramList, paramFixed + (actualIdx * paramActual) + 4)),
});
}
// Return result
ArchiveGetFileResult fileResult = archiveGetFile(
storageSpoolWrite(), request, actualList,
strNewFmt(STORAGE_SPOOL_ARCHIVE_IN "/%s." STORAGE_FILE_TEMP_EXT, strZ(request)));
VariantList *result = varLstNew();
varLstAdd(result, varNewUInt(fileResult.actualIdx));
varLstAdd(result, varNewVarLst(varLstNewStrLst(fileResult.warnList)));
protocolServerResponse(server, varNewVarLst(result));
}
else
found = false;

View File

@ -18,12 +18,11 @@ static const unsigned char helpDataPack[] =
pckTypeStr << 4 | 0x09, 0x23, // Summary
0x47, 0x65, 0x74, 0x20, 0x61, 0x20, 0x57, 0x41, 0x4C, 0x20, 0x73, 0x65, 0x67, 0x6D, 0x65, 0x6E, 0x74, 0x20, 0x66, 0x72,
0x6F, 0x6D, 0x20, 0x74, 0x68, 0x65, 0x20, 0x61, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x2E,
pckTypeStr << 4 | 0x08, 0x56, // Description
pckTypeStr << 4 | 0x08, 0x4B, // Description
0x57, 0x41, 0x4C, 0x20, 0x73, 0x65, 0x67, 0x6D, 0x65, 0x6E, 0x74, 0x73, 0x20, 0x61, 0x72, 0x65, 0x20, 0x72, 0x65, 0x71,
0x75, 0x69, 0x72, 0x65, 0x64, 0x20, 0x66, 0x6F, 0x72, 0x20, 0x72, 0x65, 0x73, 0x74, 0x6F, 0x72, 0x69, 0x6E, 0x67, 0x20,
0x61, 0x20, 0x50, 0x6F, 0x73, 0x74, 0x67, 0x72, 0x65, 0x53, 0x51, 0x4C, 0x20, 0x63, 0x6C, 0x75, 0x73, 0x74, 0x65, 0x72,
0x20, 0x6F, 0x72, 0x20, 0x6D, 0x61, 0x69, 0x6E, 0x74, 0x61, 0x69, 0x6E, 0x69, 0x6E, 0x67, 0x20, 0x61, 0x20, 0x72, 0x65,
0x70, 0x6C, 0x69, 0x63, 0x61, 0x2E,
0x75, 0x69, 0x72, 0x65, 0x64, 0x20, 0x66, 0x6F, 0x72, 0x20, 0x50, 0x6F, 0x73, 0x74, 0x67, 0x72, 0x65, 0x53, 0x51, 0x4C,
0x20, 0x72, 0x65, 0x63, 0x6F, 0x76, 0x65, 0x72, 0x79, 0x20, 0x6F, 0x72, 0x20, 0x74, 0x6F, 0x20, 0x6D, 0x61, 0x69, 0x6E,
0x74, 0x61, 0x69, 0x6E, 0x20, 0x61, 0x20, 0x72, 0x65, 0x70, 0x6C, 0x69, 0x63, 0x61, 0x2E,
// archive-push command
// -------------------------------------------------------------------------------------------------------------------------

View File

@ -83,6 +83,7 @@ ERROR_DEFINE( 99, JsonFormatError, RuntimeError);
ERROR_DEFINE(100, KernelError, RuntimeError);
ERROR_DEFINE(101, ServiceError, RuntimeError);
ERROR_DEFINE(102, ExecuteError, RuntimeError);
ERROR_DEFINE(103, RepoInvalidError, RuntimeError);
ERROR_DEFINE(104, CommandError, RuntimeError);
ERROR_DEFINE(122, RuntimeError, RuntimeError);
ERROR_DEFINE(123, InvalidError, RuntimeError);
@ -170,6 +171,7 @@ static const ErrorType *errorTypeList[] =
&KernelError,
&ServiceError,
&ExecuteError,
&RepoInvalidError,
&CommandError,
&RuntimeError,
&InvalidError,

View File

@ -85,6 +85,7 @@ ERROR_DECLARE(JsonFormatError);
ERROR_DECLARE(KernelError);
ERROR_DECLARE(ServiceError);
ERROR_DECLARE(ExecuteError);
ERROR_DECLARE(RepoInvalidError);
ERROR_DECLARE(CommandError);
ERROR_DECLARE(RuntimeError);
ERROR_DECLARE(InvalidError);

View File

@ -70,8 +70,9 @@ cfgLoadUpdateOption(void)
// Make sure repo option is set for the default command role when it is not internal and more than one repo is configured or the
// first configured repo is not key 1. Filter out any commands where this does not apply.
if (!cfgCommandHelp() && cfgCommand() != cfgCmdInfo && cfgCommand() != cfgCmdExpire && cfgOptionValid(cfgOptRepo) &&
!cfgOptionTest(cfgOptRepo) && (cfgOptionGroupIdxTotal(cfgOptGrpRepo) > 1 || cfgOptionGroupIdxToKey(cfgOptGrpRepo, 0) != 1))
if (!cfgCommandHelp() && cfgOptionValid(cfgOptRepo) && !cfgOptionTest(cfgOptRepo) && cfgCommand() != cfgCmdArchiveGet &&
cfgCommand() != cfgCmdInfo && cfgCommand() != cfgCmdExpire &&
(cfgOptionGroupIdxTotal(cfgOptGrpRepo) > 1 || cfgOptionGroupIdxToKey(cfgOptGrpRepo, 0) != 1))
{
THROW_FMT(
OptionRequiredError,

View File

@ -16,14 +16,15 @@ P00 INFO: archive-push command end: aborted with exception [055]
> [CONTAINER-EXEC] db-primary [BACKREST-BIN] --config=[TEST_PATH]/db-primary/pgbackrest.conf --stanza=db archive-get 000000010000000100000001 [TEST_PATH]/db-primary/db/base/pg_xlog/RECOVERYXLOG
------------------------------------------------------------------------------------------------------------------------------------
P00 INFO: archive-get command begin [BACKREST-VERSION]: [000000010000000100000001, [TEST_PATH]/db-primary/db/base/pg_xlog/RECOVERYXLOG] --buffer-size=[BUFFER-SIZE] --config=[TEST_PATH]/db-primary/pgbackrest.conf --db-timeout=45 --exec-id=[EXEC-ID] --job-retry=0 --lock-path=[TEST_PATH]/db-primary/lock --log-level-console=detail --log-level-file=[LOG-LEVEL-FILE] --log-level-stderr=off --log-path=[TEST_PATH]/db-primary/log[] --no-log-timestamp --pg1-path=[TEST_PATH]/db-primary/db/base --protocol-timeout=60 --repo1-cipher-pass=<redacted> --repo1-cipher-type=aes-256-cbc --repo1-path=[TEST_PATH]/db-primary/repo --stanza=db
P00 ERROR: [055]: unable to load info file '[TEST_PATH]/db-primary/repo/archive/db/archive.info' or '[TEST_PATH]/db-primary/repo/archive/db/archive.info.copy':
P00 WARN: repo1: [FileMissingError] unable to load info file '[TEST_PATH]/db-primary/repo/archive/db/archive.info' or '[TEST_PATH]/db-primary/repo/archive/db/archive.info.copy':
FileMissingError: unable to open missing file '[TEST_PATH]/db-primary/repo/archive/db/archive.info' for read
FileMissingError: unable to open missing file '[TEST_PATH]/db-primary/repo/archive/db/archive.info.copy' for read
HINT: archive.info cannot be opened but is required to push/get WAL segments.
HINT: is archive_command configured correctly in postgresql.conf?
HINT: has a stanza-create been performed?
HINT: use --no-archive-check to disable archive checks during backup if you have an alternate archiving scheme.
P00 INFO: archive-get command end: aborted with exception [055]
P00 ERROR: [103]: unable to find a valid repository
P00 INFO: archive-get command end: aborted with exception [103]
stanza-create db - stanza create (db-primary host)
> [CONTAINER-EXEC] db-primary [BACKREST-BIN] --config=[TEST_PATH]/db-primary/pgbackrest.conf --stanza=db --no-online stanza-create
@ -89,7 +90,7 @@ P00 INFO: archive-get command end: completed successfully
> [CONTAINER-EXEC] db-primary [BACKREST-BIN] --config=[TEST_PATH]/db-primary/pgbackrest.conf --stanza=db archive-get 000000010000000100000001 [TEST_PATH]/db-primary/db/base/pg_xlog/RECOVERYXLOG
------------------------------------------------------------------------------------------------------------------------------------
P00 INFO: archive-get command begin [BACKREST-VERSION]: [000000010000000100000001, [TEST_PATH]/db-primary/db/base/pg_xlog/RECOVERYXLOG] --buffer-size=[BUFFER-SIZE] --config=[TEST_PATH]/db-primary/pgbackrest.conf --db-timeout=45 --exec-id=[EXEC-ID] --job-retry=0 --lock-path=[TEST_PATH]/db-primary/lock --log-level-console=detail --log-level-file=[LOG-LEVEL-FILE] --log-level-stderr=off --log-path=[TEST_PATH]/db-primary/log[] --no-log-timestamp --pg1-path=[TEST_PATH]/db-primary/db/base --protocol-timeout=60 --repo1-cipher-pass=<redacted> --repo1-cipher-type=aes-256-cbc --repo1-path=[TEST_PATH]/db-primary/repo --stanza=db
P00 INFO: found 000000010000000100000001 in the repo1:9.4-1 archive
P00 INFO: found 000000010000000100000001 in the repo1: 9.4-1 archive
P00 INFO: archive-get command end: completed successfully
> [CONTAINER-EXEC] db-primary [BACKREST-BIN] --config=[TEST_PATH]/db-primary/pgbackrest.conf --stanza=db archive-push --compress-type=lz4 --archive-async --process-max=2 [TEST_PATH]/db-primary/db/base/pg_xlog/000000010000000100000002
@ -107,8 +108,9 @@ P00 INFO: archive-push command end: completed successfully
> [CONTAINER-EXEC] db-primary [BACKREST-BIN] --config=[TEST_PATH]/db-primary/pgbackrest.conf --stanza=db archive-get 000000010000000100000001 [TEST_PATH]/db-primary/db/base/pg_xlog/RECOVERYXLOG
------------------------------------------------------------------------------------------------------------------------------------
P00 INFO: archive-get command begin [BACKREST-VERSION]: [000000010000000100000001, [TEST_PATH]/db-primary/db/base/pg_xlog/RECOVERYXLOG] --buffer-size=[BUFFER-SIZE] --config=[TEST_PATH]/db-primary/pgbackrest.conf --db-timeout=45 --exec-id=[EXEC-ID] --job-retry=0 --lock-path=[TEST_PATH]/db-primary/lock --log-level-console=detail --log-level-file=[LOG-LEVEL-FILE] --log-level-stderr=off --log-path=[TEST_PATH]/db-primary/log[] --no-log-timestamp --pg1-path=[TEST_PATH]/db-primary/db/base --protocol-timeout=60 --repo1-cipher-pass=<redacted> --repo1-cipher-type=aes-256-cbc --repo1-path=[TEST_PATH]/db-primary/repo --stanza=db
P00 ERROR: [044]: unable to retrieve the archive id for database version '9.4' and system-id '1000000000000000094'
P00 INFO: archive-get command end: aborted with exception [044]
P00 WARN: repo1: [ArchiveMismatchError] unable to retrieve the archive id for database version '9.4' and system-id '1000000000000000094'
P00 ERROR: [103]: unable to find a valid repository
P00 INFO: archive-get command end: aborted with exception [103]
> [CONTAINER-EXEC] db-primary [BACKREST-BIN] --config=[TEST_PATH]/db-primary/pgbackrest.conf --stanza=db archive-push [TEST_PATH]/db-primary/db/base/pg_xlog/000000010000000100000002
------------------------------------------------------------------------------------------------------------------------------------
@ -120,8 +122,9 @@ P00 INFO: archive-push command end: aborted with exception [044]
> [CONTAINER-EXEC] db-primary [BACKREST-BIN] --config=[TEST_PATH]/db-primary/pgbackrest.conf --stanza=db archive-get 000000010000000100000001 [TEST_PATH]/db-primary/db/base/pg_xlog/RECOVERYXLOG
------------------------------------------------------------------------------------------------------------------------------------
P00 INFO: archive-get command begin [BACKREST-VERSION]: [000000010000000100000001, [TEST_PATH]/db-primary/db/base/pg_xlog/RECOVERYXLOG] --buffer-size=[BUFFER-SIZE] --config=[TEST_PATH]/db-primary/pgbackrest.conf --db-timeout=45 --exec-id=[EXEC-ID] --job-retry=0 --lock-path=[TEST_PATH]/db-primary/lock --log-level-console=detail --log-level-file=[LOG-LEVEL-FILE] --log-level-stderr=off --log-path=[TEST_PATH]/db-primary/log[] --no-log-timestamp --pg1-path=[TEST_PATH]/db-primary/db/base --protocol-timeout=60 --repo1-cipher-pass=<redacted> --repo1-cipher-type=aes-256-cbc --repo1-path=[TEST_PATH]/db-primary/repo --stanza=db
P00 ERROR: [044]: unable to retrieve the archive id for database version '9.4' and system-id '1000000000000000094'
P00 INFO: archive-get command end: aborted with exception [044]
P00 WARN: repo1: [ArchiveMismatchError] unable to retrieve the archive id for database version '9.4' and system-id '1000000000000000094'
P00 ERROR: [103]: unable to find a valid repository
P00 INFO: archive-get command end: aborted with exception [103]
stop db stanza (db-primary host)
> [CONTAINER-EXEC] db-primary [BACKREST-BIN] --config=[TEST_PATH]/db-primary/pgbackrest.conf --stanza=db stop
@ -176,7 +179,7 @@ P00 INFO: archive-get command end: completed successfully
> [CONTAINER-EXEC] db-primary [BACKREST-BIN] --config=[TEST_PATH]/db-primary/pgbackrest.conf --stanza=db archive-get --archive-async 00000002.history [TEST_PATH]/db-primary/db/base/pg_xlog/00000002.history
------------------------------------------------------------------------------------------------------------------------------------
P00 INFO: archive-get command begin [BACKREST-VERSION]: [00000002.history, [TEST_PATH]/db-primary/db/base/pg_xlog/00000002.history] --archive-async --buffer-size=[BUFFER-SIZE] --config=[TEST_PATH]/db-primary/pgbackrest.conf --db-timeout=45 --exec-id=[EXEC-ID] --job-retry=0 --lock-path=[TEST_PATH]/db-primary/lock --log-level-console=detail --log-level-file=[LOG-LEVEL-FILE] --log-level-stderr=off --log-path=[TEST_PATH]/db-primary/log[] --no-log-timestamp --pg1-path=[TEST_PATH]/db-primary/db/base --protocol-timeout=60 --repo1-cipher-pass=<redacted> --repo1-cipher-type=aes-256-cbc --repo1-path=[TEST_PATH]/db-primary/repo --spool-path=[TEST_PATH]/db-primary/spool --stanza=db
P00 INFO: found 00000002.history in the repo1:9.4-1 archive
P00 INFO: found 00000002.history in the repo1: 9.4-1 archive
P00 INFO: archive-get command end: completed successfully
> [CONTAINER-EXEC] db-primary [BACKREST-BIN] --config=[TEST_PATH]/db-primary/pgbackrest.conf --stanza=db archive-push [TEST_PATH]/db-primary/db/base/pg_xlog/000000010000000100000002.partial

View File

@ -16,14 +16,15 @@ P00 INFO: archive-push command end: aborted with exception [055]
> [CONTAINER-EXEC] db-primary [BACKREST-BIN] --config=[TEST_PATH]/db-primary/pgbackrest.conf --stanza=db archive-get 000000010000000100000001 [TEST_PATH]/db-primary/db/base/pg_xlog/RECOVERYXLOG
------------------------------------------------------------------------------------------------------------------------------------
P00 INFO: archive-get command begin [BACKREST-VERSION]: [000000010000000100000001, [TEST_PATH]/db-primary/db/base/pg_xlog/RECOVERYXLOG] --buffer-size=[BUFFER-SIZE] --compress-level-network=1 --config=[TEST_PATH]/db-primary/pgbackrest.conf --db-timeout=45 --exec-id=[EXEC-ID] --job-retry=0 --lock-path=[TEST_PATH]/db-primary/lock --log-level-console=detail --log-level-file=[LOG-LEVEL-FILE] --log-level-stderr=off --log-path=[TEST_PATH]/db-primary/log[] --no-log-timestamp --pg1-path=[TEST_PATH]/db-primary/db/base --protocol-timeout=60 --repo1-host=backup --repo1-host-cmd=[BACKREST-BIN] --repo1-host-config=[TEST_PATH]/backup/pgbackrest.conf --repo1-host-user=[USER-1] --stanza=db
P00 ERROR: [055]: unable to load info file '/archive/db/archive.info' or '/archive/db/archive.info.copy':
P00 WARN: repo1: [FileMissingError] unable to load info file '/archive/db/archive.info' or '/archive/db/archive.info.copy':
FileMissingError: raised from remote-0 protocol on 'backup': unable to open '/archive/db/archive.info': No such file or directory
FileMissingError: raised from remote-0 protocol on 'backup': unable to open '/archive/db/archive.info.copy': No such file or directory
HINT: archive.info cannot be opened but is required to push/get WAL segments.
HINT: is archive_command configured correctly in postgresql.conf?
HINT: has a stanza-create been performed?
HINT: use --no-archive-check to disable archive checks during backup if you have an alternate archiving scheme.
P00 INFO: archive-get command end: aborted with exception [055]
P00 ERROR: [103]: unable to find a valid repository
P00 INFO: archive-get command end: aborted with exception [103]
stanza-create db - stanza create (backup host)
> [CONTAINER-EXEC] backup [BACKREST-BIN] --config=[TEST_PATH]/backup/pgbackrest.conf --stanza=db --no-online stanza-create
@ -84,7 +85,7 @@ P00 INFO: archive-get command end: completed successfully
> [CONTAINER-EXEC] db-primary [BACKREST-BIN] --config=[TEST_PATH]/db-primary/pgbackrest.conf --stanza=db archive-get 000000010000000100000001 [TEST_PATH]/db-primary/db/base/pg_xlog/RECOVERYXLOG
------------------------------------------------------------------------------------------------------------------------------------
P00 INFO: archive-get command begin [BACKREST-VERSION]: [000000010000000100000001, [TEST_PATH]/db-primary/db/base/pg_xlog/RECOVERYXLOG] --buffer-size=[BUFFER-SIZE] --compress-level-network=1 --config=[TEST_PATH]/db-primary/pgbackrest.conf --db-timeout=45 --exec-id=[EXEC-ID] --job-retry=0 --lock-path=[TEST_PATH]/db-primary/lock --log-level-console=detail --log-level-file=[LOG-LEVEL-FILE] --log-level-stderr=off --log-path=[TEST_PATH]/db-primary/log[] --no-log-timestamp --pg1-path=[TEST_PATH]/db-primary/db/base --protocol-timeout=60 --repo1-host=backup --repo1-host-cmd=[BACKREST-BIN] --repo1-host-config=[TEST_PATH]/backup/pgbackrest.conf --repo1-host-user=[USER-1] --stanza=db
P00 INFO: found 000000010000000100000001 in the repo1:9.4-1 archive
P00 INFO: found 000000010000000100000001 in the repo1: 9.4-1 archive
P00 INFO: archive-get command end: completed successfully
> [CONTAINER-EXEC] db-primary [BACKREST-BIN] --config=[TEST_PATH]/db-primary/pgbackrest.conf --stanza=db archive-push --compress-type=zst --archive-async --process-max=2 [TEST_PATH]/db-primary/db/base/pg_xlog/000000010000000100000002
@ -102,8 +103,9 @@ P00 INFO: archive-push command end: completed successfully
> [CONTAINER-EXEC] db-primary [BACKREST-BIN] --config=[TEST_PATH]/db-primary/pgbackrest.conf --stanza=db archive-get 000000010000000100000001 [TEST_PATH]/db-primary/db/base/pg_xlog/RECOVERYXLOG
------------------------------------------------------------------------------------------------------------------------------------
P00 INFO: archive-get command begin [BACKREST-VERSION]: [000000010000000100000001, [TEST_PATH]/db-primary/db/base/pg_xlog/RECOVERYXLOG] --buffer-size=[BUFFER-SIZE] --compress-level-network=1 --config=[TEST_PATH]/db-primary/pgbackrest.conf --db-timeout=45 --exec-id=[EXEC-ID] --job-retry=0 --lock-path=[TEST_PATH]/db-primary/lock --log-level-console=detail --log-level-file=[LOG-LEVEL-FILE] --log-level-stderr=off --log-path=[TEST_PATH]/db-primary/log[] --no-log-timestamp --pg1-path=[TEST_PATH]/db-primary/db/base --protocol-timeout=60 --repo1-host=backup --repo1-host-cmd=[BACKREST-BIN] --repo1-host-config=[TEST_PATH]/backup/pgbackrest.conf --repo1-host-user=[USER-1] --stanza=db
P00 ERROR: [044]: unable to retrieve the archive id for database version '9.4' and system-id '1000000000000000094'
P00 INFO: archive-get command end: aborted with exception [044]
P00 WARN: repo1: [ArchiveMismatchError] unable to retrieve the archive id for database version '9.4' and system-id '1000000000000000094'
P00 ERROR: [103]: unable to find a valid repository
P00 INFO: archive-get command end: aborted with exception [103]
> [CONTAINER-EXEC] db-primary [BACKREST-BIN] --config=[TEST_PATH]/db-primary/pgbackrest.conf --stanza=db archive-push [TEST_PATH]/db-primary/db/base/pg_xlog/000000010000000100000002
------------------------------------------------------------------------------------------------------------------------------------
@ -115,8 +117,9 @@ P00 INFO: archive-push command end: aborted with exception [044]
> [CONTAINER-EXEC] db-primary [BACKREST-BIN] --config=[TEST_PATH]/db-primary/pgbackrest.conf --stanza=db archive-get 000000010000000100000001 [TEST_PATH]/db-primary/db/base/pg_xlog/RECOVERYXLOG
------------------------------------------------------------------------------------------------------------------------------------
P00 INFO: archive-get command begin [BACKREST-VERSION]: [000000010000000100000001, [TEST_PATH]/db-primary/db/base/pg_xlog/RECOVERYXLOG] --buffer-size=[BUFFER-SIZE] --compress-level-network=1 --config=[TEST_PATH]/db-primary/pgbackrest.conf --db-timeout=45 --exec-id=[EXEC-ID] --job-retry=0 --lock-path=[TEST_PATH]/db-primary/lock --log-level-console=detail --log-level-file=[LOG-LEVEL-FILE] --log-level-stderr=off --log-path=[TEST_PATH]/db-primary/log[] --no-log-timestamp --pg1-path=[TEST_PATH]/db-primary/db/base --protocol-timeout=60 --repo1-host=backup --repo1-host-cmd=[BACKREST-BIN] --repo1-host-config=[TEST_PATH]/backup/pgbackrest.conf --repo1-host-user=[USER-1] --stanza=db
P00 ERROR: [044]: unable to retrieve the archive id for database version '9.4' and system-id '1000000000000000094'
P00 INFO: archive-get command end: aborted with exception [044]
P00 WARN: repo1: [ArchiveMismatchError] unable to retrieve the archive id for database version '9.4' and system-id '1000000000000000094'
P00 ERROR: [103]: unable to find a valid repository
P00 INFO: archive-get command end: aborted with exception [103]
stop db stanza (db-primary host)
> [CONTAINER-EXEC] db-primary [BACKREST-BIN] --config=[TEST_PATH]/db-primary/pgbackrest.conf --stanza=db stop
@ -171,7 +174,7 @@ P00 INFO: archive-get command end: completed successfully
> [CONTAINER-EXEC] db-primary [BACKREST-BIN] --config=[TEST_PATH]/db-primary/pgbackrest.conf --stanza=db archive-get --archive-async 00000002.history [TEST_PATH]/db-primary/db/base/pg_xlog/00000002.history
------------------------------------------------------------------------------------------------------------------------------------
P00 INFO: archive-get command begin [BACKREST-VERSION]: [00000002.history, [TEST_PATH]/db-primary/db/base/pg_xlog/00000002.history] --archive-async --buffer-size=[BUFFER-SIZE] --compress-level-network=1 --config=[TEST_PATH]/db-primary/pgbackrest.conf --db-timeout=45 --exec-id=[EXEC-ID] --job-retry=0 --lock-path=[TEST_PATH]/db-primary/lock --log-level-console=detail --log-level-file=[LOG-LEVEL-FILE] --log-level-stderr=off --log-path=[TEST_PATH]/db-primary/log[] --no-log-timestamp --pg1-path=[TEST_PATH]/db-primary/db/base --protocol-timeout=60 --repo1-host=backup --repo1-host-cmd=[BACKREST-BIN] --repo1-host-config=[TEST_PATH]/backup/pgbackrest.conf --repo1-host-user=[USER-1] --spool-path=[TEST_PATH]/db-primary/spool --stanza=db
P00 INFO: found 00000002.history in the repo1:9.4-1 archive
P00 INFO: found 00000002.history in the repo1: 9.4-1 archive
P00 INFO: archive-get command end: completed successfully
> [CONTAINER-EXEC] db-primary [BACKREST-BIN] --config=[TEST_PATH]/db-primary/pgbackrest.conf --stanza=db archive-push [TEST_PATH]/db-primary/db/base/pg_xlog/000000010000000100000002.partial

View File

@ -293,7 +293,7 @@ backrest-checksum="[CHECKSUM]"
> [CONTAINER-EXEC] db-primary [BACKREST-BIN] --config=[TEST_PATH]/db-primary/pgbackrest.conf --stanza=db archive-get 000000010000000100000002 [TEST_PATH]/db-primary/db/base/pg_xlog/RECOVERYXLOG
------------------------------------------------------------------------------------------------------------------------------------
P00 INFO: archive-get command begin [BACKREST-VERSION]: [000000010000000100000002, [TEST_PATH]/db-primary/db/base/pg_xlog/RECOVERYXLOG] --buffer-size=[BUFFER-SIZE] --config=[TEST_PATH]/db-primary/pgbackrest.conf --db-timeout=45 --exec-id=[EXEC-ID] --job-retry=0 --lock-path=[TEST_PATH]/db-primary/lock --log-level-console=detail --log-level-file=[LOG-LEVEL-FILE] --log-level-stderr=off --log-path=[TEST_PATH]/db-primary/log[] --no-log-timestamp --pg1-path=[TEST_PATH]/db-primary/db/base --protocol-timeout=60 --repo1-path=[TEST_PATH]/db-primary/repo --stanza=db
P00 INFO: found 000000010000000100000002 in the repo1:9.3-1 archive
P00 INFO: found 000000010000000100000002 in the repo1: 9.3-1 archive
P00 INFO: archive-get command end: completed successfully
> [CONTAINER-EXEC] db-primary [BACKREST-BIN] --config=[TEST_PATH]/db-primary/pgbackrest.conf --log-level-console=warn --archive-push-queue-max=33554432 --stanza=db archive-push [TEST_PATH]/db-primary/db/base/pg_xlog/000000010000000100000001

View File

@ -301,7 +301,7 @@ backrest-checksum="[CHECKSUM]"
> [CONTAINER-EXEC] db-primary [BACKREST-BIN] --config=[TEST_PATH]/db-primary/pgbackrest.conf --stanza=db archive-get 000000010000000100000002 [TEST_PATH]/db-primary/db/base/pg_xlog/RECOVERYXLOG
------------------------------------------------------------------------------------------------------------------------------------
P00 INFO: archive-get command begin [BACKREST-VERSION]: [000000010000000100000002, [TEST_PATH]/db-primary/db/base/pg_xlog/RECOVERYXLOG] --buffer-size=[BUFFER-SIZE] --compress-level-network=1 --config=[TEST_PATH]/db-primary/pgbackrest.conf --db-timeout=45 --exec-id=[EXEC-ID] --job-retry=0 --lock-path=[TEST_PATH]/db-primary/lock --log-level-console=detail --log-level-file=[LOG-LEVEL-FILE] --log-level-stderr=off --log-path=[TEST_PATH]/db-primary/log[] --no-log-timestamp --pg1-path=[TEST_PATH]/db-primary/db/base --protocol-timeout=60 --repo1-host=backup --repo1-host-cmd=[BACKREST-BIN] --repo1-host-config=[TEST_PATH]/backup/pgbackrest.conf --repo1-host-user=[USER-1] --stanza=db
P00 INFO: found 000000010000000100000002 in the repo1:9.3-1 archive
P00 INFO: found 000000010000000100000002 in the repo1: 9.3-1 archive
P00 INFO: archive-get command end: completed successfully
> [CONTAINER-EXEC] db-primary [BACKREST-BIN] --config=[TEST_PATH]/db-primary/pgbackrest.conf --log-level-console=warn --archive-push-queue-max=33554432 --stanza=db archive-push [TEST_PATH]/db-primary/db/base/pg_xlog/000000010000000100000001

View File

@ -38,6 +38,12 @@ use pgBackRestTest::Common::ExecuteTest;
use pgBackRestTest::Common::HostGroupTest;
use pgBackRestTest::Common::RunTest;
####################################################################################################################################
# Error constants
####################################################################################################################################
use constant ERROR_REPO_INVALID => 103;
push @EXPORT, qw(ERROR_REPO_INVALID);
####################################################################################################################################
# Latest backup link constant
####################################################################################################################################

View File

@ -139,7 +139,7 @@ sub run
$oHostDbPrimary->executeSimple(
$strCommandGet . " ${strSourceFile1} ${strWalPath}/RECOVERYXLOG",
{iExpectedExitStatus => ERROR_FILE_MISSING, oLogTest => $self->expect()});
{iExpectedExitStatus => ERROR_REPO_INVALID, oLogTest => $self->expect()});
#---------------------------------------------------------------------------------------------------------------------------
$oHostBackup->stanzaCreate('stanza create', {strOptionalParam => '--no-online'});
@ -278,7 +278,7 @@ sub run
$oHostDbPrimary->executeSimple(
$strCommandGet . " ${strSourceFile1} ${strWalPath}/RECOVERYXLOG",
{iExpectedExitStatus => ERROR_ARCHIVE_MISMATCH, oLogTest => $self->expect()});
{iExpectedExitStatus => ERROR_REPO_INVALID, oLogTest => $self->expect()});
# Restore the file to its original condition
$oHostBackup->infoRestore($oHostBackup->repoArchivePath(ARCHIVE_INFO_FILE));
@ -297,7 +297,7 @@ sub run
$oHostDbPrimary->executeSimple(
$strCommandGet . " ${strSourceFile1} ${strWalPath}/RECOVERYXLOG",
{iExpectedExitStatus => ERROR_ARCHIVE_MISMATCH, oLogTest => $self->expect()});
{iExpectedExitStatus => ERROR_REPO_INVALID, oLogTest => $self->expect()});
# Restore the file to its original condition
$oHostBackup->infoRestore($oHostBackup->repoArchivePath(ARCHIVE_INFO_FILE));

View File

@ -4,6 +4,7 @@ Storage Test Harness
#include <inttypes.h>
#include <stdio.h>
#include <string.h>
#include <sys/stat.h>
#include "common/crypto/cipherBlock.h"
#include "common/debug.h"
@ -169,6 +170,24 @@ hrnStorageInfoListCallback(void *callbackData, const StorageInfo *info)
strCatZ(data->content, "}\n");
}
/**********************************************************************************************************************************/
void
testStorageGet(
const int line, const Storage *const storage, const char *const file, const char *const expected, TestStorageGetParam param)
{
hrnTestLogPrefix(line, true);
hrnTestResultBegin(__func__, line, false);
const String *const fileFull = storagePathP(storage, STR(file));
printf("test content of '%s'\n", strZ(fileFull));
fflush(stdout);
hrnTestResultZ(strZ(strNewBuf(storageGetP(storageNewReadP(storage, fileFull)))), expected, harnessTestResultOperationEq);
if (param.remove)
storageRemoveP(storage, fileFull, .errorOnMissing = true);
}
/**********************************************************************************************************************************/
StringList *
hrnStorageList(const Storage *storage, const char *path, HrnStorageListParam param)
@ -197,6 +216,36 @@ hrnStorageListLog(const Storage *storage, const char *path, HrnStorageListParam
strZ(storagePathP(storage, STR(path)))));
}
/**********************************************************************************************************************************/
void
hrnStorageMode(const int line, const Storage *const storage, const char *const path, HrnStorageModeParam param)
{
hrnTestLogPrefix(line, true);
hrnTestResultBegin(__func__, line, false);
const char *const pathFull = strZ(storagePathP(storage, STR(path)));
// If no mode specified then default the mode based on the file type
if (param.mode == 0)
{
struct stat statFile;
THROW_ON_SYS_ERROR_FMT(stat(pathFull, &statFile) == -1, FileOpenError, "unable to stat '%s'", pathFull);
if (S_ISDIR(statFile.st_mode))
param.mode = STORAGE_MODE_PATH_DEFAULT;
else
param.mode = STORAGE_MODE_FILE_DEFAULT;
}
printf("chmod '%04o' on '%s'\n", param.mode, pathFull);
fflush(stdout);
THROW_ON_SYS_ERROR_FMT(chmod(pathFull, param.mode) == -1, FileModeError, "unable to set mode on '%s'", pathFull);
hrnTestResultEnd();
}
/**********************************************************************************************************************************/
void
hrnStoragePut(const Storage *storage, const char *file, const Buffer *buffer, HrnStoragePutParam param)

View File

@ -16,6 +16,23 @@ Check file exists
#define TEST_STORAGE_EXISTS(storage, file) \
TEST_RESULT_BOOL(storageExistsP(storage, STR(file)), true, "file exists '%s'", strZ(storagePathP(storage, STR(file))))
/***********************************************************************************************************************************
Get a file and test it against the specified content
***********************************************************************************************************************************/
typedef struct TestStorageGetParam
{
VAR_PARAM_HEADER;
bool remove; // Remove file after testing?
} TestStorageGetParam;
#define TEST_STORAGE_GET(storage, file, content, ...) \
testStorageGet(__LINE__, storage, file, content, (TestStorageGetParam){VAR_PARAM_INIT, __VA_ARGS__})
#define TEST_STORAGE_GET_EMPTY(storage, file, ...) \
TEST_STORAGE_GET(storage, file, "", __VA_ARGS__)
void testStorageGet(
const int line, const Storage *const storage, const char *const file, const char *const expected, TestStorageGetParam param);
/***********************************************************************************************************************************
List files in a path and optionally remove them
***********************************************************************************************************************************/
@ -36,6 +53,20 @@ typedef struct HrnStorageListParam
StringList *hrnStorageList(const Storage *storage, const char *path, HrnStorageListParam param);
const char *hrnStorageListLog(const Storage *storage, const char *path, HrnStorageListParam param);
/***********************************************************************************************************************************
Change the mode of a path/file
***********************************************************************************************************************************/
typedef struct HrnStorageModeParam
{
VAR_PARAM_HEADER;
mode_t mode; // Mode to set -- reset to default if not provided
} HrnStorageModeParam;
#define HRN_STORAGE_MODE(storage, path, ...) \
hrnStorageMode(__LINE__, storage, path, (HrnStorageModeParam){VAR_PARAM_INIT, __VA_ARGS__})
void hrnStorageMode(const int line, const Storage *const storage, const char *const path, HrnStorageModeParam param);
/***********************************************************************************************************************************
Put a file with optional compression and/or encryption
***********************************************************************************************************************************/

View File

@ -69,18 +69,34 @@ testRun(void)
storageListP(storageSpoolWrite(), strNew(STORAGE_SPOOL_ARCHIVE_IN)), "0000000100000001000000FE\n", "check queue");
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("pg >= 9.3 and ok/junk status files");
walSegmentSize = 1024 * 1024;
queueSize = walSegmentSize * 5;
HRN_STORAGE_PUT_Z(storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN "/junk", "JUNK");
// Bad OK file with wrong length (just to make sure this does not cause strSubN() issues)
HRN_STORAGE_PUT_Z(storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN "/AAA.ok", "0\nWARNING");
// OK file with warnings somehow left over from a prior run
HRN_STORAGE_PUT_Z(storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN "/000000010000000A00000FFD.ok", "0\nWARNING");
// Valid queued WAL segments (one with an OK file containing warnings)
HRN_STORAGE_PUT(storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN "/000000010000000A00000FFE", walSegmentBuffer);
HRN_STORAGE_PUT(storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN "/000000010000000A00000FFF", walSegmentBuffer);
HRN_STORAGE_PUT_Z(storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN "/000000010000000A00000FFF.ok", "0\nWARNING2");
// Empty OK file indicating a WAL segment not found at the end of the queue
HRN_STORAGE_PUT_EMPTY(storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN "/000000010000000B00000000.ok");
TEST_RESULT_STRLST_Z(
queueNeed(strNew("000000010000000A00000FFD"), true, queueSize, walSegmentSize, PG_VERSION_11),
"000000010000000B00000000\n000000010000000B00000001\n000000010000000B00000002\n", "queue has wal >= 9.3");
TEST_STORAGE_LIST(storageSpool(), STORAGE_SPOOL_ARCHIVE_IN, "000000010000000A00000FFE\n000000010000000A00000FFF\n");
TEST_STORAGE_LIST(
storageSpool(), STORAGE_SPOOL_ARCHIVE_IN,
"000000010000000A00000FFE\n000000010000000A00000FFF\n000000010000000A00000FFF.ok\n");
}
// *****************************************************************************************************************************
@ -105,7 +121,10 @@ testRun(void)
TEST_ERROR(cmdArchiveGetAsync(), HostInvalidError, "archive-get command must be run on the PostgreSQL host");
TEST_STORAGE_LIST(storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN, "global.error\n", .remove = true);
TEST_STORAGE_GET(
storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN "/global.error",
"72\narchive-get command must be run on the PostgreSQL host", .remove = true);
TEST_STORAGE_LIST_EMPTY(storageSpool(), STORAGE_SPOOL_ARCHIVE_IN);
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("error on no segments");
@ -115,7 +134,10 @@ testRun(void)
TEST_ERROR(cmdArchiveGetAsync(), ParamInvalidError, "at least one wal segment is required");
TEST_STORAGE_LIST(storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN, "global.error\n", .remove = true);
TEST_STORAGE_GET(
storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN "/global.error", "96\nat least one wal segment is required",
.remove = true);
TEST_STORAGE_LIST_EMPTY(storageSpool(), STORAGE_SPOOL_ARCHIVE_IN);
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("no segments to find");
@ -141,7 +163,32 @@ testRun(void)
"P00 INFO: get 1 WAL file(s) from archive: 000000010000000100000001\n"
"P00 DETAIL: unable to find 000000010000000100000001 in the archive");
TEST_STORAGE_LIST(storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN, "000000010000000100000001.ok\n", .remove = true);
TEST_STORAGE_GET_EMPTY(storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN "/000000010000000100000001.ok", .remove = true);
TEST_STORAGE_LIST_EMPTY(storageSpool(), STORAGE_SPOOL_ARCHIVE_IN);
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("error on path permission");
storagePathCreateP(storageRepoIdxWrite(0), STRDEF(STORAGE_REPO_ARCHIVE "/10-1"), .mode = 0400);
TEST_RESULT_VOID(cmdArchiveGetAsync(), "get async");
harnessLogResult(
"P00 INFO: get 1 WAL file(s) from archive: 000000010000000100000001\n"
"P00 WARN: repo1: [PathOpenError] unable to list file info for path '" TEST_PATH_REPO "/archive/test2/10-1"
"/0000000100000001': [13] Permission denied\n"
"P00 WARN: [RepoInvalidError] unable to find a valid repository");
TEST_STORAGE_GET(
storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN "/000000010000000100000001.error",
"103\n"
"unable to find a valid repository\n"
"repo1: [PathOpenError] unable to list file info for path '" TEST_PATH_REPO "/archive/test2/10-1/0000000100000001':"
" [13] Permission denied",
.remove = true);
TEST_STORAGE_LIST_EMPTY(storageSpool(), STORAGE_SPOOL_ARCHIVE_IN);
HRN_STORAGE_MODE(storageRepoIdxWrite(0), STORAGE_REPO_ARCHIVE "/10-1");
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("error on invalid compressed segment");
@ -153,12 +200,18 @@ testRun(void)
harnessLogResult(
"P00 INFO: get 1 WAL file(s) from archive: 000000010000000100000001\n"
"P01 WARN: could not get 000000010000000100000001 from the repo1:10-1 archive (will be retried):"
" [29] raised from local-1 protocol: unexpected eof in compressed data");
"P01 WARN: [FileReadError] raised from local-1 protocol: unable to get 000000010000000100000001:\n"
" repo1: 10-1/0000000100000001/000000010000000100000001-abcdabcdabcdabcdabcdabcdabcdabcdabcdabcd.gz"
" [FormatError] unexpected eof in compressed data");
TEST_STORAGE_LIST(
storageSpool(), STORAGE_SPOOL_ARCHIVE_IN, "000000010000000100000001.error\n000000010000000100000001.pgbackrest.tmp\n");
TEST_STORAGE_REMOVE(storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN "/000000010000000100000001.error");
TEST_STORAGE_GET(
storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN "/000000010000000100000001.error",
"42\n"
"raised from local-1 protocol: unable to get 000000010000000100000001:\n"
"repo1: 10-1/0000000100000001/000000010000000100000001-abcdabcdabcdabcdabcdabcdabcdabcdabcdabcd.gz [FormatError]"
" unexpected eof in compressed data",
.remove = true);
TEST_STORAGE_LIST(storageSpool(), STORAGE_SPOOL_ARCHIVE_IN, "000000010000000100000001.pgbackrest.tmp\n");
TEST_STORAGE_REMOVE(
storageRepoWrite(), STORAGE_REPO_ARCHIVE "/10-1/000000010000000100000001-abcdabcdabcdabcdabcdabcdabcdabcdabcdabcd.gz");
@ -176,12 +229,89 @@ testRun(void)
harnessLogResult(
"P00 INFO: get 1 WAL file(s) from archive: 000000010000000100000001\n"
"P01 DETAIL: found 000000010000000100000001 in the repo1:10-1 archive");
"P01 DETAIL: found 000000010000000100000001 in the repo1: 10-1 archive");
TEST_STORAGE_LIST(storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN, "000000010000000100000001\n", .remove = true);
TEST_STORAGE_GET_EMPTY(storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN "/000000010000000100000001", .remove = true);
TEST_STORAGE_LIST_EMPTY(storageSpool(), STORAGE_SPOOL_ARCHIVE_IN);
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("multiple segments where some are missing or errored");
TEST_TITLE("single segment with one invalid file");
HRN_INFO_PUT(
storageRepoWrite(), INFO_ARCHIVE_PATH_FILE,
"[db]\n"
"db-id=1\n"
"\n"
"[db:history]\n"
"1={\"db-id\":18072658121562454734,\"db-version\":\"10\"}\n"
"2={\"db-id\":18072658121562454734,\"db-version\":\"10\"}\n");
HRN_STORAGE_PUT_EMPTY(
storageRepoWrite(), STORAGE_REPO_ARCHIVE "/10-1/000000010000000100000001-abcdabcdabcdabcdabcdabcdabcdabcdabcdabcd");
HRN_STORAGE_PUT_EMPTY(
storageRepoWrite(), STORAGE_REPO_ARCHIVE "/10-2/000000010000000100000001-abcdabcdabcdabcdabcdabcdabcdabcdabcdabcd.gz");
TEST_RESULT_VOID(cmdArchiveGetAsync(), "archive async");
harnessLogResult(
"P00 INFO: get 1 WAL file(s) from archive: 000000010000000100000001\n"
"P01 WARN: repo1: 10-2/0000000100000001/000000010000000100000001-abcdabcdabcdabcdabcdabcdabcdabcdabcdabcd.gz"
" [FormatError] unexpected eof in compressed data\n"
"P01 DETAIL: found 000000010000000100000001 in the repo1: 10-1 archive");
TEST_STORAGE_GET(
storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN "/000000010000000100000001.ok",
"0\n"
"repo1: 10-2/0000000100000001/000000010000000100000001-abcdabcdabcdabcdabcdabcdabcdabcdabcdabcd.gz [FormatError]"
" unexpected eof in compressed data",
.remove = true);
TEST_STORAGE_GET_EMPTY(storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN "/000000010000000100000001", .remove = true);
TEST_STORAGE_LIST_EMPTY(storageSpool(), STORAGE_SPOOL_ARCHIVE_IN);
TEST_STORAGE_REMOVE(
storageRepoWrite(), STORAGE_REPO_ARCHIVE "/10-2/000000010000000100000001-abcdabcdabcdabcdabcdabcdabcdabcdabcdabcd.gz");
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("single segment with one invalid file");
HRN_INFO_PUT(
storageRepoWrite(), INFO_ARCHIVE_PATH_FILE,
"[db]\n"
"db-id=1\n"
"\n"
"[db:history]\n"
"1={\"db-id\":18072658121562454734,\"db-version\":\"10\"}\n"
"2={\"db-id\":18072658121562454734,\"db-version\":\"10\"}\n");
HRN_STORAGE_PUT_EMPTY(
storageRepoWrite(), STORAGE_REPO_ARCHIVE "/10-1/000000010000000100000001-abcdabcdabcdabcdabcdabcdabcdabcdabcdabcd");
HRN_STORAGE_PUT_EMPTY(
storageRepoWrite(), STORAGE_REPO_ARCHIVE "/10-2/000000010000000100000001-abcdabcdabcdabcdabcdabcdabcdabcdabcdabcd.gz");
TEST_RESULT_VOID(cmdArchiveGetAsync(), "archive async");
harnessLogResult(
"P00 INFO: get 1 WAL file(s) from archive: 000000010000000100000001\n"
"P01 WARN: repo1: 10-2/0000000100000001/000000010000000100000001-abcdabcdabcdabcdabcdabcdabcdabcdabcdabcd.gz"
" [FormatError] unexpected eof in compressed data\n"
"P01 DETAIL: found 000000010000000100000001 in the repo1: 10-1 archive");
TEST_STORAGE_GET(
storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN "/000000010000000100000001.ok",
"0\n"
"repo1: 10-2/0000000100000001/000000010000000100000001-abcdabcdabcdabcdabcdabcdabcdabcdabcdabcd.gz [FormatError]"
" unexpected eof in compressed data",
.remove = true);
TEST_STORAGE_GET_EMPTY(storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN "/000000010000000100000001", .remove = true);
TEST_STORAGE_LIST_EMPTY(storageSpool(), STORAGE_SPOOL_ARCHIVE_IN);
TEST_STORAGE_REMOVE(
storageRepoWrite(), STORAGE_REPO_ARCHIVE "/10-2/000000010000000100000001-abcdabcdabcdabcdabcdabcdabcdabcdabcdabcd.gz");
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("multiple segments where some are missing or errored and mismatched repo");
hrnCfgArgKeyRawZ(argBaseList, cfgOptRepoPath, 2, TEST_PATH_REPO "2");
argList = strLstDup(argBaseList);
strLstAddZ(argList, "0000000100000001000000FE");
@ -189,6 +319,14 @@ testRun(void)
strLstAddZ(argList, "000000010000000200000000");
harnessCfgLoadRole(cfgCmdArchiveGet, cfgCmdRoleAsync, argList);
HRN_INFO_PUT(
storageRepoIdxWrite(1), INFO_ARCHIVE_PATH_FILE,
"[db]\n"
"db-id=1\n"
"\n"
"[db:history]\n"
"1={\"db-id\":18072658121562454734,\"db-version\":\"11\"}\n");
HRN_STORAGE_PUT_EMPTY(
storageRepoWrite(), STORAGE_REPO_ARCHIVE "/10-1/0000000100000001000000FE-abcdabcdabcdabcdabcdabcdabcdabcdabcdabcd");
@ -200,36 +338,194 @@ testRun(void)
TEST_RESULT_VOID(cmdArchiveGetAsync(), "archive async");
#define TEST_WARN \
"repo2: [ArchiveMismatchError] unable to retrieve the archive id for database version '10' and system-id" \
" '18072658121562454734'"
harnessLogResult(
"P00 INFO: get 3 WAL file(s) from archive: 0000000100000001000000FE...000000010000000200000000\n"
"P01 DETAIL: found 0000000100000001000000FE in the repo1:10-1 archive\n"
"P00 WARN: " TEST_WARN "\n"
"P01 DETAIL: found 0000000100000001000000FE in the repo1: 10-1 archive\n"
"P00 DETAIL: unable to find 0000000100000001000000FF in the archive");
TEST_STORAGE_LIST(
storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN, "0000000100000001000000FE\n0000000100000001000000FF.ok\n",
.remove = true);
TEST_STORAGE_GET(
storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN "/0000000100000001000000FE.ok", "0\n" TEST_WARN, .remove = true);
TEST_STORAGE_GET(
storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN "/0000000100000001000000FF.ok", "0\n" TEST_WARN, .remove = true);
TEST_STORAGE_GET_EMPTY(storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN "/0000000100000001000000FE", .remove = true);
TEST_STORAGE_LIST_EMPTY(storageSpool(), STORAGE_SPOOL_ARCHIVE_IN);
#undef TEST_WARN
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("error on duplicates now that no segments are missing");
TEST_TITLE("error on duplicates now that no segments are missing, repo with bad perms");
// Fix repo 2 archive info but break archive path
HRN_INFO_PUT(
storageRepoIdxWrite(1), INFO_ARCHIVE_PATH_FILE,
"[db]\n"
"db-id=1\n"
"\n"
"[db:history]\n"
"1={\"db-id\":18072658121562454734,\"db-version\":\"10\"}\n");
storagePathCreateP(storageRepoIdxWrite(1), STRDEF(STORAGE_REPO_ARCHIVE "/10-1"), .mode = 0400);
HRN_STORAGE_PUT_EMPTY(
storageRepoWrite(), STORAGE_REPO_ARCHIVE "/10-1/0000000100000001000000FF-efefefefefefefefefefefefefefefefefefefef");
TEST_RESULT_VOID(cmdArchiveGetAsync(), "archive async");
#define TEST_WARN1 \
"repo2: [PathOpenError] unable to list file info for path '" TEST_PATH_REPO "2/archive/test2/10-1" \
"/0000000100000001': [13] Permission denied"
#define TEST_WARN2 \
"repo2: [PathOpenError] unable to list file info for path '" TEST_PATH_REPO "2/archive/test2/10-1" \
"/0000000100000002': [13] Permission denied"
harnessLogResult(
"P00 INFO: get 3 WAL file(s) from archive: 0000000100000001000000FE...000000010000000200000000\n"
"P01 DETAIL: found 0000000100000001000000FE in the repo1:10-1 archive\n"
"P01 DETAIL: found 0000000100000001000000FF in the repo1:10-1 archive\n"
"P00 WARN: could not get 000000010000000200000000 from the repo1:10-1 archive (will be retried): "
"[45] duplicates found in the repo1:10-1 archive for WAL segment 000000010000000200000000: "
"000000010000000200000000-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, "
"000000010000000200000000-bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb\n"
"P00 WARN: " TEST_WARN1 "\n"
"P00 WARN: " TEST_WARN2 "\n"
"P01 DETAIL: found 0000000100000001000000FE in the repo1: 10-1 archive\n"
"P01 DETAIL: found 0000000100000001000000FF in the repo1: 10-1 archive\n"
"P00 WARN: [ArchiveDuplicateError] duplicates found for WAL segment 000000010000000200000000:\n"
" repo1: 10-1/0000000100000002/000000010000000200000000-bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"
", 10-1/0000000100000002/000000010000000200000000-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\n"
" HINT: are multiple primaries archiving to this stanza?");
TEST_STORAGE_GET_EMPTY(storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN "/0000000100000001000000FE", .remove = true);
TEST_STORAGE_GET(
storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN "/0000000100000001000000FE.ok", "0\n" TEST_WARN1, .remove = true);
TEST_STORAGE_GET_EMPTY(storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN "/0000000100000001000000FF", .remove = true);
TEST_STORAGE_GET(
storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN "/0000000100000001000000FF.ok", "0\n" TEST_WARN1, .remove = true);
TEST_STORAGE_GET(
storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN "/000000010000000200000000.error",
"45\n"
"duplicates found for WAL segment 000000010000000200000000:\n"
"repo1: 10-1/0000000100000002/000000010000000200000000-bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb, 10-1/0000000100000002"
"/000000010000000200000000-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\n"
"HINT: are multiple primaries archiving to this stanza?\n"
TEST_WARN2,
.remove = true);
TEST_STORAGE_LIST_EMPTY(storageSpool(), STORAGE_SPOOL_ARCHIVE_IN);
HRN_STORAGE_MODE(storageRepoIdxWrite(1), STORAGE_REPO_ARCHIVE "/10-1");
#undef TEST_WARN1
#undef TEST_WARN2
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("error on duplicates");
argList = strLstDup(argBaseList);
strLstAddZ(argList, "000000010000000200000000");
harnessCfgLoadRole(cfgCmdArchiveGet, cfgCmdRoleAsync, argList);
TEST_RESULT_VOID(cmdArchiveGetAsync(), "archive async");
harnessLogResult(
"P00 INFO: get 1 WAL file(s) from archive: 000000010000000200000000\n"
"P00 WARN: [ArchiveDuplicateError] duplicates found for WAL segment 000000010000000200000000:\n"
" repo1: 10-1/0000000100000002/000000010000000200000000-bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"
", 10-1/0000000100000002/000000010000000200000000-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\n"
" HINT: are multiple primaries archiving to this stanza?");
TEST_STORAGE_GET(
storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN "/000000010000000200000000.error",
"45\n"
"duplicates found for WAL segment 000000010000000200000000:\n"
"repo1: 10-1/0000000100000002/000000010000000200000000-bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb, 10-1/0000000100000002"
"/000000010000000200000000-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\n"
"HINT: are multiple primaries archiving to this stanza?",
.remove = true);
TEST_STORAGE_LIST_EMPTY(storageSpool(), STORAGE_SPOOL_ARCHIVE_IN);
TEST_STORAGE_REMOVE(
storageRepoWrite(), STORAGE_REPO_ARCHIVE "/10-1/000000010000000200000000-bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb");
TEST_STORAGE_REMOVE(
storageRepoWrite(), STORAGE_REPO_ARCHIVE "/10-1/000000010000000200000000-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa");
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("warn on invalid file");
hrnCfgArgKeyRawZ(argBaseList, cfgOptRepoPath, 3, TEST_PATH_REPO "3");
argList = strLstDup(argBaseList);
strLstAddZ(argList, "000000010000000200000000");
harnessCfgLoadRole(cfgCmdArchiveGet, cfgCmdRoleAsync, argList);
HRN_INFO_PUT(
storageRepoIdxWrite(2), INFO_ARCHIVE_PATH_FILE,
"[db]\n"
"db-id=1\n"
"\n"
"[db:history]\n"
"1={\"db-id\":18072658121562454734,\"db-version\":\"11\"}\n");
storagePathCreateP(storageRepoIdxWrite(2), STRDEF("10-1"), .mode = 0400);
HRN_STORAGE_PUT_EMPTY(
storageRepoIdxWrite(0),
STORAGE_REPO_ARCHIVE "/10-1/000000010000000200000000-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.gz");
HRN_STORAGE_PUT_EMPTY(
storageRepoIdxWrite(1),
STORAGE_REPO_ARCHIVE "/10-1/000000010000000200000000-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa");
TEST_RESULT_VOID(cmdArchiveGetAsync(), "archive async");
#define TEST_WARN1 \
"repo3: [ArchiveMismatchError] unable to retrieve the archive id for database version '10' and system-id" \
" '18072658121562454734'"
#define TEST_WARN2 \
"repo1: 10-1/0000000100000002/000000010000000200000000-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.gz" \
" [FormatError] unexpected eof in compressed data"
harnessLogResult(
"P00 INFO: get 1 WAL file(s) from archive: 000000010000000200000000\n"
"P00 WARN: " TEST_WARN1 "\n"
"P01 WARN: " TEST_WARN2 "\n"
"P01 DETAIL: found 000000010000000200000000 in the repo2: 10-1 archive");
TEST_STORAGE_GET(
storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN "/000000010000000200000000.ok", "0\n" TEST_WARN1 "\n" TEST_WARN2,
.remove = true);
TEST_STORAGE_GET_EMPTY(storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN "/000000010000000200000000", .remove = true);
TEST_STORAGE_LIST_EMPTY(storageSpool(), STORAGE_SPOOL_ARCHIVE_IN);
TEST_STORAGE_REMOVE(
storageRepoIdxWrite(1), STORAGE_REPO_ARCHIVE "/10-1/000000010000000200000000-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa");
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("error with warnings");
HRN_STORAGE_PUT_EMPTY(
storageRepoIdxWrite(1),
STORAGE_REPO_ARCHIVE "/10-1/000000010000000200000000-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.gz");
TEST_RESULT_VOID(cmdArchiveGetAsync(), "archive async");
#define TEST_WARN3 \
"repo2: 10-1/0000000100000002/000000010000000200000000-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.gz" \
" [FormatError] unexpected eof in compressed data"
harnessLogResult(
"P00 INFO: get 1 WAL file(s) from archive: 000000010000000200000000\n"
"P00 WARN: " TEST_WARN1 "\n"
"P01 WARN: [FileReadError] raised from local-1 protocol: unable to get 000000010000000200000000:\n"
" " TEST_WARN2 "\n"
" " TEST_WARN3);
TEST_STORAGE_GET(
storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN "/000000010000000200000000.error",
"42\n"
"raised from local-1 protocol: unable to get 000000010000000200000000:\n"
TEST_WARN2 "\n"
TEST_WARN3 "\n"
TEST_WARN1,
.remove = true);
TEST_STORAGE_LIST(
storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN,
"0000000100000001000000FE\n0000000100000001000000FF\n000000010000000200000000.error\n", .remove = true);
storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN, "000000010000000200000000.pgbackrest.tmp\n", .remove = true);
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("global error on invalid executable");
@ -310,19 +606,19 @@ testRun(void)
strLstAddZ(argList, TEST_PATH_PG "/pg_wal/RECOVERYXLOG");
harnessCfgLoadRaw(strLstSize(argList), strLstPtr(argList));
TEST_ERROR_FMT(
cmdArchiveGet(), FileMissingError,
"unable to load info file '%s/archive/test1/archive.info' or '%s/archive/test1/archive.info.copy':\n"
"FileMissingError: " STORAGE_ERROR_READ_MISSING "\n"
"FileMissingError: " STORAGE_ERROR_READ_MISSING "\n"
"HINT: archive.info cannot be opened but is required to push/get WAL segments.\n"
"HINT: is archive_command configured correctly in postgresql.conf?\n"
"HINT: has a stanza-create been performed?\n"
"HINT: use --no-archive-check to disable archive checks during backup if you have an alternate archiving"
" scheme.",
strZ(cfgOptionStr(cfgOptRepoPath)), strZ(cfgOptionStr(cfgOptRepoPath)),
strZ(strNewFmt("%s/archive/test1/archive.info", strZ(cfgOptionStr(cfgOptRepoPath)))),
strZ(strNewFmt("%s/archive/test1/archive.info.copy", strZ(cfgOptionStr(cfgOptRepoPath)))));
TEST_ERROR_FMT(cmdArchiveGet(), RepoInvalidError, "unable to find a valid repository");
harnessLogResult(
"P00 WARN: repo1: [FileMissingError] unable to load info file '" TEST_PATH_REPO "/archive/test1/archive.info' or '"
TEST_PATH_REPO "/archive/test1/archive.info.copy':\n"
" FileMissingError: unable to open missing file '" TEST_PATH_REPO "/archive/test1/archive.info' for read\n"
" FileMissingError: unable to open missing file '" TEST_PATH_REPO "/archive/test1/archive.info.copy' for"
" read\n"
" HINT: archive.info cannot be opened but is required to push/get WAL segments.\n"
" HINT: is archive_command configured correctly in postgresql.conf?\n"
" HINT: has a stanza-create been performed?\n"
" HINT: use --no-archive-check to disable archive checks during backup if you have an alternate archiving"
" scheme.");
// -------------------------------------------------------------------------------------------------------------------------
argList = strLstDup(argBaseList);
@ -331,19 +627,19 @@ testRun(void)
strLstAddZ(argList, "--archive-async");
harnessCfgLoadRaw(strLstSize(argList), strLstPtr(argList));
TEST_ERROR_FMT(
cmdArchiveGet(), FileMissingError,
"unable to load info file '%s/archive/test1/archive.info' or '%s/archive/test1/archive.info.copy':\n"
"FileMissingError: " STORAGE_ERROR_READ_MISSING "\n"
"FileMissingError: " STORAGE_ERROR_READ_MISSING "\n"
"HINT: archive.info cannot be opened but is required to push/get WAL segments.\n"
"HINT: is archive_command configured correctly in postgresql.conf?\n"
"HINT: has a stanza-create been performed?\n"
"HINT: use --no-archive-check to disable archive checks during backup if you have an alternate archiving"
" scheme.",
strZ(cfgOptionStr(cfgOptRepoPath)), strZ(cfgOptionStr(cfgOptRepoPath)),
strZ(strNewFmt("%s/archive/test1/archive.info", strZ(cfgOptionStr(cfgOptRepoPath)))),
strZ(strNewFmt("%s/archive/test1/archive.info.copy", strZ(cfgOptionStr(cfgOptRepoPath)))));
TEST_ERROR_FMT(cmdArchiveGet(), RepoInvalidError, "unable to find a valid repository");
harnessLogResult(
"P00 WARN: repo1: [FileMissingError] unable to load info file '" TEST_PATH_REPO "/archive/test1/archive.info' or '"
TEST_PATH_REPO "/archive/test1/archive.info.copy':\n"
" FileMissingError: unable to open missing file '" TEST_PATH_REPO "/archive/test1/archive.info' for read\n"
" FileMissingError: unable to open missing file '" TEST_PATH_REPO "/archive/test1/archive.info.copy' for"
" read\n"
" HINT: archive.info cannot be opened but is required to push/get WAL segments.\n"
" HINT: is archive_command configured correctly in postgresql.conf?\n"
" HINT: has a stanza-create been performed?\n"
" HINT: use --no-archive-check to disable archive checks during backup if you have an alternate archiving"
" scheme.");
// Make sure the process times out when there is nothing to get
// -------------------------------------------------------------------------------------------------------------------------
@ -390,11 +686,16 @@ testRun(void)
harnessCfgLoadRaw(strLstSize(argList), strLstPtr(argList));
HRN_STORAGE_PUT_Z(storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN "/000000010000000100000001", "SHOULD-BE-A-REAL-WAL-FILE");
HRN_STORAGE_PUT_Z(storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN "/000000010000000100000001.ok", "0\nwarning about x");
HRN_STORAGE_PUT_Z(storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN "/000000010000000100000002", "SHOULD-BE-A-REAL-WAL-FILE");
TEST_RESULT_INT(cmdArchiveGet(), 0, "successful get");
TEST_RESULT_VOID(harnessLogResult("P00 INFO: found 000000010000000100000001 in the archive asynchronously"), "check log");
TEST_RESULT_VOID(
harnessLogResult(
"P00 WARN: warning about x\n"
"P00 INFO: found 000000010000000100000001 in the archive asynchronously"),
"check log");
TEST_STORAGE_LIST(storageTest, TEST_PATH_PG "/pg_wal", "RECOVERYXLOG\n", .remove = true);
@ -441,9 +742,11 @@ testRun(void)
strLstAddZ(argList, TEST_PATH_PG "/pg_wal/RECOVERYXLOG");
harnessCfgLoad(cfgCmdArchiveGet, argList);
TEST_ERROR(
cmdArchiveGet(), ArchiveMismatchError,
"unable to retrieve the archive id for database version '11' and system-id '18072658121562454734'");
TEST_ERROR(cmdArchiveGet(), RepoInvalidError, "unable to find a valid repository");
harnessLogResult(
"P00 WARN: repo1: [ArchiveMismatchError] unable to retrieve the archive id for database version '11' and system-id"
" '18072658121562454734'");
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("pg version does not match archive.info");
@ -452,9 +755,11 @@ testRun(void)
storagePgWrite(), PG_PATH_GLOBAL "/" PG_FILE_PGCONTROL,
pgControlTestToBuffer((PgControl){.version = PG_VERSION_10, .systemId = 0x8888888888888888}));
TEST_ERROR(
cmdArchiveGet(), ArchiveMismatchError,
"unable to retrieve the archive id for database version '10' and system-id '9838263505978427528'");
TEST_ERROR(cmdArchiveGet(), RepoInvalidError, "unable to find a valid repository");
harnessLogResult(
"P00 WARN: repo1: [ArchiveMismatchError] unable to retrieve the archive id for database version '10' and system-id"
" '9838263505978427528'");
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("file is missing");
@ -482,7 +787,7 @@ testRun(void)
TEST_RESULT_INT(cmdArchiveGet(), 0, "get");
harnessLogResult("P00 INFO: found 01ABCDEF01ABCDEF01ABCDEF in the repo1:10-1 archive");
harnessLogResult("P00 INFO: found 01ABCDEF01ABCDEF01ABCDEF in the repo1: 10-1 archive");
TEST_RESULT_UINT(
storageInfoP(storageTest, STRDEF(TEST_PATH_PG "/pg_wal/RECOVERYXLOG")).size, 16 * 1024 * 1024, "check size");
@ -496,9 +801,9 @@ testRun(void)
TEST_ERROR(
cmdArchiveGet(), ArchiveDuplicateError,
"duplicates found in the repo1:10-1 archive for WAL segment 01ABCDEF01ABCDEF01ABCDEF:"
" 01ABCDEF01ABCDEF01ABCDEF-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,"
" 01ABCDEF01ABCDEF01ABCDEF-bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb\n"
"duplicates found for WAL segment 01ABCDEF01ABCDEF01ABCDEF:\n"
"repo1: 10-1/01ABCDEF01ABCDEF/01ABCDEF01ABCDEF01ABCDEF-bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"
", 10-1/01ABCDEF01ABCDEF/01ABCDEF01ABCDEF01ABCDEF-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\n"
"HINT: are multiple primaries archiving to this stanza?");
TEST_STORAGE_LIST(storageTest, TEST_PATH_PG "/pg_wal", NULL);
@ -515,12 +820,13 @@ testRun(void)
"\n"
"[db:history]\n"
"1={\"db-id\":18072658121562454734,\"db-version\":\"10\"}\n"
"2={\"db-id\":10000000000000000000,\"db-version\":\"11\"}\n"
"3={\"db-id\":18072658121562454734,\"db-version\":\"10\"}");
"2={\"db-id\":18072658121562454734,\"db-version\":\"10\"}\n"
"3={\"db-id\":10000000000000000000,\"db-version\":\"11\"}\n"
"4={\"db-id\":18072658121562454734,\"db-version\":\"10\"}");
TEST_RESULT_INT(cmdArchiveGet(), 0, "get");
harnessLogResult("P00 INFO: found 01ABCDEF01ABCDEF01ABCDEF in the repo1:10-1 archive");
harnessLogResult("P00 INFO: found 01ABCDEF01ABCDEF01ABCDEF in the repo1: 10-1 archive");
TEST_STORAGE_LIST(storageTest, TEST_PATH_PG "/pg_wal", "RECOVERYXLOG\n", .remove = true);
@ -528,17 +834,17 @@ testRun(void)
TEST_TITLE("get from current db-id");
HRN_STORAGE_PUT_EMPTY(
storageRepoWrite(), STORAGE_REPO_ARCHIVE "/10-3/01ABCDEF01ABCDEF01ABCDEF-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa");
storageRepoWrite(), STORAGE_REPO_ARCHIVE "/10-4/01ABCDEF01ABCDEF01ABCDEF-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa");
TEST_RESULT_INT(cmdArchiveGet(), 0, "get");
harnessLogResult("P00 INFO: found 01ABCDEF01ABCDEF01ABCDEF in the repo1:10-3 archive");
harnessLogResult("P00 INFO: found 01ABCDEF01ABCDEF01ABCDEF in the repo1: 10-4 archive");
TEST_STORAGE_LIST(storageTest, TEST_PATH_PG "/pg_wal", "RECOVERYXLOG\n", .remove = true);
TEST_STORAGE_REMOVE(
storageRepoWrite(), STORAGE_REPO_ARCHIVE "/10-1/01ABCDEF01ABCDEF01ABCDEF-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa");
TEST_STORAGE_REMOVE(
storageRepoWrite(), STORAGE_REPO_ARCHIVE "/10-3/01ABCDEF01ABCDEF01ABCDEF-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa");
storageRepoWrite(), STORAGE_REPO_ARCHIVE "/10-4/01ABCDEF01ABCDEF01ABCDEF-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa");
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("get partial");
@ -549,7 +855,7 @@ testRun(void)
HRN_STORAGE_PUT(
storageRepoWrite(),
STORAGE_REPO_ARCHIVE "/10-3/000000010000000100000001.partial-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
STORAGE_REPO_ARCHIVE "/10-4/000000010000000100000001.partial-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
buffer);
argList = strLstDup(argBaseList);
@ -559,12 +865,12 @@ testRun(void)
TEST_RESULT_INT(cmdArchiveGet(), 0, "get");
harnessLogResult("P00 INFO: found 000000010000000100000001.partial in the repo1:10-3 archive");
harnessLogResult("P00 INFO: found 000000010000000100000001.partial in the repo1: 10-4 archive");
TEST_STORAGE_LIST(storageTest, TEST_PATH_PG "/pg_wal", "RECOVERYXLOG\n", .remove = true);
TEST_STORAGE_REMOVE(
storageRepoWrite(),
STORAGE_REPO_ARCHIVE "/10-3/000000010000000100000001.partial-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa");
STORAGE_REPO_ARCHIVE "/10-4/000000010000000100000001.partial-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa");
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("get missing history");
@ -587,13 +893,13 @@ testRun(void)
TEST_RESULT_INT(cmdArchiveGet(), 0, "get");
harnessLogResult("P00 INFO: found 00000001.history in the repo1:10-1 archive");
harnessLogResult("P00 INFO: found 00000001.history in the repo1: 10-1 archive");
TEST_RESULT_UINT(storageInfoP(storageTest, STRDEF(TEST_PATH_PG "/pg_wal/RECOVERYHISTORY")).size, 7, "check size");
TEST_STORAGE_LIST(storageTest, TEST_PATH_PG "/pg_wal", "RECOVERYHISTORY\n", .remove = true);
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("get compressed and encrypted WAL segment");
TEST_TITLE("get compressed and encrypted WAL segment with invalid repo");
HRN_INFO_PUT(
storageRepoWrite(), INFO_ARCHIVE_PATH_FILE,
@ -614,9 +920,8 @@ testRun(void)
// Add encryption options
argList = strLstNew();
hrnCfgArgRawZ(argList, cfgOptPgPath, TEST_PATH_PG);
hrnCfgArgKeyRawZ(argList, cfgOptRepoPath, 1, "/repo-bogus");
hrnCfgArgKeyRawZ(argList, cfgOptRepoPath, 1, TEST_PATH_REPO "-bogus");
hrnCfgArgKeyRawFmt(argList, cfgOptRepoPath, 2, TEST_PATH_REPO);
hrnCfgArgRawZ(argList, cfgOptRepo, "2");
hrnCfgArgKeyRawZ(argList, cfgOptRepoCipherType, 2, CIPHER_TYPE_AES_256_CBC);
hrnCfgEnvKeyRawZ(cfgOptRepoCipherPass, 2, TEST_CIPHER_PASS);
hrnCfgArgRawZ(argList, cfgOptStanza, "test1");
@ -627,11 +932,108 @@ testRun(void)
TEST_RESULT_INT(cmdArchiveGet(), 0, "get");
harnessLogResult("P00 INFO: found 01ABCDEF01ABCDEF01ABCDEF in the repo2:10-1 archive");
harnessLogResult(
"P00 WARN: repo1: [FileMissingError] unable to load info file '" TEST_PATH_REPO "-bogus/archive/test1/archive.info'"
" or '" TEST_PATH_REPO "-bogus/archive/test1/archive.info.copy':\n"
" FileMissingError: unable to open missing file '" TEST_PATH_REPO "-bogus/archive/test1/archive.info'"
" for read\n"
" FileMissingError: unable to open missing file '" TEST_PATH_REPO "-bogus/archive/test1/archive.info.copy'"
" for read\n"
" HINT: archive.info cannot be opened but is required to push/get WAL segments.\n"
" HINT: is archive_command configured correctly in postgresql.conf?\n"
" HINT: has a stanza-create been performed?\n"
" HINT: use --no-archive-check to disable archive checks during backup if you have an alternate"
" archiving scheme.\n"
"P00 INFO: found 01ABCDEF01ABCDEF01ABCDEF in the repo2: 10-1 archive");
TEST_STORAGE_LIST(storageTest, TEST_PATH_PG "/pg_wal", "RECOVERYXLOG\n");
TEST_RESULT_UINT(
storageInfoP(storageTest, STRDEF(TEST_PATH_PG "/pg_wal/RECOVERYXLOG")).size, 16 * 1024 * 1024, "check size");
TEST_STORAGE_LIST(storageTest, TEST_PATH_PG "/pg_wal", "RECOVERYXLOG\n", .remove = true);
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("repo1 has info but bad permissions");
HRN_INFO_PUT(
storageRepoIdxWrite(0), INFO_ARCHIVE_PATH_FILE,
"[db]\n"
"db-id=2\n"
"\n"
"[db:history]\n"
"2={\"db-id\":18072658121562454734,\"db-version\":\"10\"}");
storagePathCreateP(storageRepoIdxWrite(0), STRDEF(STORAGE_REPO_ARCHIVE "/10-2"), .mode = 0400);
TEST_RESULT_INT(cmdArchiveGet(), 0, "get");
harnessLogResult(
"P00 WARN: repo1: [PathOpenError] unable to list file info for path '" TEST_PATH_REPO "-bogus/archive/test1/10-2"
"/01ABCDEF01ABCDEF': [13] Permission denied\n"
"P00 INFO: found 01ABCDEF01ABCDEF01ABCDEF in the repo2: 10-1 archive");
TEST_STORAGE_LIST(storageTest, TEST_PATH_PG "/pg_wal", "RECOVERYXLOG\n", .remove = true);
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("all repos have info but bad permissions");
HRN_STORAGE_MODE(storageRepoIdxWrite(1), STORAGE_REPO_ARCHIVE "/10-1", .mode = 0400);
TEST_ERROR(cmdArchiveGet(), RepoInvalidError, "unable to find a valid repository");
harnessLogResult(
"P00 WARN: repo1: [PathOpenError] unable to list file info for path '" TEST_PATH_REPO "-bogus/archive/test1/10-2"
"/01ABCDEF01ABCDEF': [13] Permission denied\n"
"P00 WARN: repo2: [PathOpenError] unable to list file info for path '" TEST_PATH_REPO "/archive/test1/10-1"
"/01ABCDEF01ABCDEF': [13] Permission denied");
HRN_STORAGE_MODE(storageRepoIdxWrite(0), STORAGE_REPO_ARCHIVE "/10-2");
HRN_STORAGE_MODE(storageRepoIdxWrite(1), STORAGE_REPO_ARCHIVE "/10-1");
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("unable to get from one repo");
HRN_STORAGE_PUT(
storageRepoIdxWrite(0),
STORAGE_REPO_ARCHIVE "/10-2/01ABCDEF01ABCDEF01ABCDEF-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.gz", NULL);
TEST_RESULT_INT(cmdArchiveGet(), 0, "get");
harnessLogResult(
"P00 WARN: repo1: 10-2/01ABCDEF01ABCDEF/01ABCDEF01ABCDEF01ABCDEF-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.gz"
" [FormatError] unexpected eof in compressed data\n"
"P00 INFO: found 01ABCDEF01ABCDEF01ABCDEF in the repo2: 10-1 archive");
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("unable to get from all repos");
HRN_STORAGE_MODE(
storageRepoIdxWrite(1),
STORAGE_REPO_ARCHIVE "/10-1/01ABCDEF01ABCDEF01ABCDEF-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.gz", .mode = 0200);
TEST_ERROR(
cmdArchiveGet(), FileReadError,
"unable to get 01ABCDEF01ABCDEF01ABCDEF:\n"
"repo1: 10-2/01ABCDEF01ABCDEF/01ABCDEF01ABCDEF01ABCDEF-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.gz [FormatError]"
" unexpected eof in compressed data\n"
"repo2: 10-1/01ABCDEF01ABCDEF/01ABCDEF01ABCDEF01ABCDEF-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.gz [FileOpenError]"
" unable to open file '" TEST_PATH_REPO "/archive/test1/10-1/01ABCDEF01ABCDEF/01ABCDEF01ABCDEF01ABCDEF"
"-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.gz' for read: [13] Permission denied");
HRN_STORAGE_MODE(
storageRepoIdxWrite(1),
STORAGE_REPO_ARCHIVE "/10-1/01ABCDEF01ABCDEF01ABCDEF-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.gz");
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("repo is specified so invalid repo is skipped");
hrnCfgArgRawZ(argList, cfgOptRepo, "2");
hrnCfgEnvKeyRawZ(cfgOptRepoCipherPass, 2, TEST_CIPHER_PASS);
harnessCfgLoadRole(cfgCmdArchiveGet, cfgCmdRoleLocal, argList);
hrnCfgEnvKeyRemoveRaw(cfgOptRepoCipherPass, 2);
TEST_RESULT_INT(cmdArchiveGet(), 0, "get");
harnessLogResult(
"P00 INFO: found 01ABCDEF01ABCDEF01ABCDEF in the repo2: 10-1 archive");
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("call protocol function directly");
@ -658,14 +1060,17 @@ testRun(void)
varLstAdd(paramList, varNewStrZ("01ABCDEF01ABCDEF01ABCDEF"));
varLstAdd(
paramList, varNewStrZ("10-1/01ABCDEF01ABCDEF/01ABCDEF01ABCDEF01ABCDEF-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.gz"));
varLstAdd(paramList, varNewUInt(1));
varLstAdd(paramList, varNewStrZ("10-1"));
varLstAdd(paramList, varNewUInt(cipherTypeAes256Cbc));
varLstAdd(paramList, varNewStrZ(TEST_CIPHER_PASS_ARCHIVE));
TEST_RESULT_BOOL(
archiveGetProtocol(PROTOCOL_COMMAND_ARCHIVE_GET_STR, paramList, server), true, "protocol archive get");
TEST_RESULT_STR_Z(strNewBuf(serverWrite), "{}\n", "check result");
TEST_STORAGE_LIST(storageSpool(), STORAGE_SPOOL_ARCHIVE_IN, "000000010000000100000002\n01ABCDEF01ABCDEF01ABCDEF\n");
TEST_RESULT_STR_Z(strNewBuf(serverWrite), "{\"out\":[0,[]]}\n", "check result");
TEST_STORAGE_LIST(
storageSpool(), STORAGE_SPOOL_ARCHIVE_IN, "000000010000000100000002\n01ABCDEF01ABCDEF01ABCDEF.pgbackrest.tmp\n");
bufUsedSet(serverWrite, 0);