1
0
mirror of https://github.com/pgbackrest/pgbackrest.git synced 2025-01-20 04:59:25 +02:00

Reformat code with uncrustify.

uncrustify has been configured to be as close to the current format as possible but the following changes were required:

* Break long struct initializiers out of function calls.
* Bit fields get extra spacing.
* Strings that continue from the previous line no longer indented.
* Ternary operators that do not fit on a single line moved to the next line first.
* Align under parens for multi-line if statements.
* Macros in header #if blocks are no longer indented.
* Purposeful lack of function indentation in tests has been removed.

Currently uncrustify does not completely reflow the code so there are some edge cases that might not be caught. However, this still represents a huge improvement and the formatting can be refined going forward.

Support code for uncrustify will be in a followup commit.
This commit is contained in:
David Steele 2023-01-30 11:55:54 +07:00
parent b2202c36d9
commit d4070c9064
115 changed files with 2299 additions and 2169 deletions

View File

@ -190,20 +190,20 @@ bldCfgParseCommandList(Yaml *const yaml)
MEM_CONTEXT_BEGIN(lstMemContext(result))
{
lstAdd(
result,
&(BldCfgCommand)
{
.name = strDup(cmdRaw.name),
.internal = cmdRaw.internal,
.logFile = cmdRaw.logFile,
.logLevelDefault = strDup(cmdRaw.logLevelDefault),
.lockRequired = cmdRaw.lockRequired,
.lockRemoteRequired = cmdRaw.lockRemoteRequired,
.lockType = strDup(cmdRaw.lockType),
.parameterAllowed = cmdRaw.parameterAllowed,
.roleList = strLstDup(cmdRaw.roleList),
});
const BldCfgCommand bldCfgCommand =
{
.name = strDup(cmdRaw.name),
.internal = cmdRaw.internal,
.logFile = cmdRaw.logFile,
.logLevelDefault = strDup(cmdRaw.logLevelDefault),
.lockRequired = cmdRaw.lockRequired,
.lockRemoteRequired = cmdRaw.lockRemoteRequired,
.lockType = strDup(cmdRaw.lockType),
.parameterAllowed = cmdRaw.parameterAllowed,
.roleList = strLstDup(cmdRaw.roleList)
};
lstAdd(result, &bldCfgCommand);
}
MEM_CONTEXT_END();
@ -496,13 +496,12 @@ bldCfgParseDependReconcile(
THROW_FMT(FormatError, "dependency on undefined option '%s'", strZ(optDependRaw->option));
result = memNew(sizeof(BldCfgOptionDepend));
memcpy(
result,
&(BldCfgOptionDepend){
.option = optDepend, .defaultValue = strDup(optDependRaw->defaultValue),
.valueList = strLstDup(optDependRaw->valueList)},
sizeof(BldCfgOptionDepend));
*result = (BldCfgOptionDepend)
{
.option = optDepend,
.defaultValue = strDup(optDependRaw->defaultValue),
.valueList = strLstDup(optDependRaw->valueList)
};
}
return result;
@ -582,15 +581,14 @@ bldCfgParseOptionDeprecateReconcile(const List *const optDeprecateRawList)
for (unsigned int optDeprecateRawIdx = 0; optDeprecateRawIdx < lstSize(optDeprecateRawList); optDeprecateRawIdx++)
{
const BldCfgOptionDeprecateRaw *const optDeprecateRaw = lstGet(optDeprecateRawList, optDeprecateRawIdx);
const BldCfgOptionDeprecate bldCfgOptionDeprecate =
{
.name = strDup(optDeprecateRaw->name),
.indexed = optDeprecateRaw->indexed,
.unindexed = optDeprecateRaw->unindexed,
};
lstAdd(
result,
&(BldCfgOptionDeprecate)
{
.name = strDup(optDeprecateRaw->name),
.indexed = optDeprecateRaw->indexed,
.unindexed = optDeprecateRaw->unindexed,
});
lstAdd(result, &bldCfgOptionDeprecate);
}
}
@ -680,18 +678,18 @@ bldCfgParseOptionCommandList(Yaml *const yaml, const List *const optList)
MEM_CONTEXT_BEGIN(lstMemContext(optCmdRawList))
{
lstAdd(
optCmdRawList,
&(BldCfgOptionCommandRaw)
{
.name = strDup(optCmdRaw.name),
.internal = varDup(optCmdRaw.internal),
.required = varDup(optCmdRaw.required),
.defaultValue = strDup(optCmdRaw.defaultValue),
.depend = optCmdRaw.depend,
.allowList = strLstDup(optCmdRaw.allowList),
.roleList = strLstDup(optCmdRaw.roleList),
});
const BldCfgOptionCommandRaw bldCfgOptionCommandRaw =
{
.name = strDup(optCmdRaw.name),
.internal = varDup(optCmdRaw.internal),
.required = varDup(optCmdRaw.required),
.defaultValue = strDup(optCmdRaw.defaultValue),
.depend = optCmdRaw.depend,
.allowList = strLstDup(optCmdRaw.allowList),
.roleList = strLstDup(optCmdRaw.roleList),
};
lstAdd(optCmdRawList, &bldCfgOptionCommandRaw);
}
MEM_CONTEXT_END();
@ -900,26 +898,26 @@ bldCfgParseOptionList(Yaml *const yaml, const List *const cmdList, const List *c
MEM_CONTEXT_BEGIN(lstMemContext(result))
{
lstAdd(
result,
&(BldCfgOption)
{
.name = strDup(optRaw->name),
.type = strDup(optRaw->type),
.section = strDup(optRaw->section),
.internal = optRaw->internal,
.required = varBool(optRaw->required),
.negate = varBool(optRaw->negate),
.reset = optRaw->reset,
.defaultValue = strDup(optRaw->defaultValue),
.defaultLiteral = optRaw->defaultLiteral,
.group = strDup(optRaw->group),
.secure = optRaw->secure,
.allowList = strLstDup(optRaw->allowList),
.allowRangeMin = strDup(optRaw->allowRangeMin),
.allowRangeMax = strDup(optRaw->allowRangeMax),
.deprecateList = bldCfgParseOptionDeprecateReconcile(optRaw->deprecateList),
});
const BldCfgOption bldCfgOption =
{
.name = strDup(optRaw->name),
.type = strDup(optRaw->type),
.section = strDup(optRaw->section),
.internal = optRaw->internal,
.required = varBool(optRaw->required),
.negate = varBool(optRaw->negate),
.reset = optRaw->reset,
.defaultValue = strDup(optRaw->defaultValue),
.defaultLiteral = optRaw->defaultLiteral,
.group = strDup(optRaw->group),
.secure = optRaw->secure,
.allowList = strLstDup(optRaw->allowList),
.allowRangeMin = strDup(optRaw->allowRangeMin),
.allowRangeMax = strDup(optRaw->allowRangeMax),
.deprecateList = bldCfgParseOptionDeprecateReconcile(optRaw->deprecateList),
};
lstAdd(result, &bldCfgOption);
}
MEM_CONTEXT_END();
}
@ -981,18 +979,18 @@ bldCfgParseOptionList(Yaml *const yaml, const List *const cmdList, const List *c
MEM_CONTEXT_BEGIN(lstMemContext(cmdOptList))
{
lstAdd(
cmdOptList,
&(BldCfgOptionCommand)
{
.name = strDup(optCmd.name),
.internal = varBool(optCmd.internal),
.required = varBool(optCmd.required),
.defaultValue = strDup(optCmd.defaultValue),
.depend = bldCfgParseDependReconcile(optRaw, optCmd.depend, result),
.allowList = strLstDup(optCmd.allowList),
.roleList = strLstDup(optCmd.roleList),
});
BldCfgOptionCommand bldCfgOptionCommand =
{
.name = strDup(optCmd.name),
.internal = varBool(optCmd.internal),
.required = varBool(optCmd.required),
.defaultValue = strDup(optCmd.defaultValue),
.depend = bldCfgParseDependReconcile(optRaw, optCmd.depend, result),
.allowList = strLstDup(optCmd.allowList),
.roleList = strLstDup(optCmd.roleList),
};
lstAdd(cmdOptList, &bldCfgOptionCommand);
}
MEM_CONTEXT_END();
}

View File

@ -10,59 +10,59 @@ Parse Configuration Yaml
Command role constants
***********************************************************************************************************************************/
#define CMD_ROLE_ASYNC "async"
STRING_DECLARE(CMD_ROLE_ASYNC_STR);
STRING_DECLARE(CMD_ROLE_ASYNC_STR);
#define CMD_ROLE_LOCAL "local"
STRING_DECLARE(CMD_ROLE_LOCAL_STR);
STRING_DECLARE(CMD_ROLE_LOCAL_STR);
#define CMD_ROLE_MAIN "main"
STRING_DECLARE(CMD_ROLE_MAIN_STR);
STRING_DECLARE(CMD_ROLE_MAIN_STR);
#define CMD_ROLE_REMOTE "remote"
STRING_DECLARE(CMD_ROLE_REMOTE_STR);
STRING_DECLARE(CMD_ROLE_REMOTE_STR);
/***********************************************************************************************************************************
Command constants
***********************************************************************************************************************************/
#define CMD_HELP "help"
STRING_DECLARE(CMD_HELP_STR);
STRING_DECLARE(CMD_HELP_STR);
#define CMD_VERSION "version"
STRING_DECLARE(CMD_VERSION_STR);
STRING_DECLARE(CMD_VERSION_STR);
/***********************************************************************************************************************************
Option type constants
***********************************************************************************************************************************/
#define OPT_TYPE_BOOLEAN "boolean"
STRING_DECLARE(OPT_TYPE_BOOLEAN_STR);
STRING_DECLARE(OPT_TYPE_BOOLEAN_STR);
#define OPT_TYPE_HASH "hash"
STRING_DECLARE(OPT_TYPE_HASH_STR);
STRING_DECLARE(OPT_TYPE_HASH_STR);
#define OPT_TYPE_INTEGER "integer"
STRING_DECLARE(OPT_TYPE_INTEGER_STR);
STRING_DECLARE(OPT_TYPE_INTEGER_STR);
#define OPT_TYPE_LIST "list"
STRING_DECLARE(OPT_TYPE_LIST_STR);
STRING_DECLARE(OPT_TYPE_LIST_STR);
#define OPT_TYPE_PATH "path"
STRING_DECLARE(OPT_TYPE_PATH_STR);
STRING_DECLARE(OPT_TYPE_PATH_STR);
#define OPT_TYPE_SIZE "size"
STRING_DECLARE(OPT_TYPE_SIZE_STR);
STRING_DECLARE(OPT_TYPE_SIZE_STR);
#define OPT_TYPE_STRING "string"
STRING_DECLARE(OPT_TYPE_STRING_STR);
STRING_DECLARE(OPT_TYPE_STRING_STR);
#define OPT_TYPE_STRING_ID "string-id"
STRING_DECLARE(OPT_TYPE_STRING_ID_STR);
STRING_DECLARE(OPT_TYPE_STRING_ID_STR);
#define OPT_TYPE_TIME "time"
STRING_DECLARE(OPT_TYPE_TIME_STR);
STRING_DECLARE(OPT_TYPE_TIME_STR);
/***********************************************************************************************************************************
Option constants
***********************************************************************************************************************************/
#define OPT_STANZA "stanza"
STRING_DECLARE(OPT_STANZA_STR);
STRING_DECLARE(OPT_STANZA_STR);
/***********************************************************************************************************************************
Section constants
***********************************************************************************************************************************/
#define SECTION_COMMAND_LINE "command-line"
STRING_DECLARE(SECTION_COMMAND_LINE_STR);
STRING_DECLARE(SECTION_COMMAND_LINE_STR);
#define SECTION_GLOBAL "global"
STRING_DECLARE(SECTION_GLOBAL_STR);
STRING_DECLARE(SECTION_GLOBAL_STR);
#define SECTION_STANZA "stanza"
STRING_DECLARE(SECTION_STANZA_STR);
STRING_DECLARE(SECTION_STANZA_STR);
/***********************************************************************************************************************************
Types

View File

@ -1037,16 +1037,15 @@ bldCfgRenderParseAutoC(const Storage *const storageRepo, const BldCfg bldCfg, co
for (unsigned int deprecateIdx = 0; deprecateIdx < lstSize(opt->deprecateList); deprecateIdx++)
{
const BldCfgOptionDeprecate *const deprecate = lstGet(opt->deprecateList, deprecateIdx);
const BldCfgRenderOptionDeprecate bldCfgRenderOptionDeprecate =
{
.name = deprecate->name,
.option = opt,
.indexed = deprecate->indexed,
.unindexed = deprecate->unindexed,
};
lstAdd(
deprecateCombineList,
&(BldCfgRenderOptionDeprecate)
{
.name = deprecate->name,
.option = opt,
.indexed = deprecate->indexed,
.unindexed = deprecate->unindexed,
});
lstAdd(deprecateCombineList, &bldCfgRenderOptionDeprecate);
}
}
}

View File

@ -92,14 +92,14 @@ bldErrParseErrorList(Yaml *const yaml)
// Add to list
MEM_CONTEXT_BEGIN(lstMemContext(result))
{
lstAdd(
result,
&(BldErrError)
{
.name = strDup(errRaw.name),
.code = errRaw.code,
.fatal = errRaw.fatal,
});
const BldErrError bldErrError =
{
.name = strDup(errRaw.name),
.code = errRaw.code,
.fatal = errRaw.fatal,
};
lstAdd(result, &bldErrError);
}
MEM_CONTEXT_END();

View File

@ -31,15 +31,15 @@ bldHlpParseOption(XmlNodeList *const xmlOptList, List *const optList, const Stri
// Add option to list
MEM_CONTEXT_BEGIN(lstMemContext(optList))
{
lstAdd(
optList,
&(BldHlpOption)
{
.name = xmlNodeAttribute(xmlOpt, STRDEF("id")),
.section = strDup(section),
.summary = xmlNodeChild(xmlOpt, STRDEF("summary"), true),
.description = xmlNodeChild(xmlOpt, STRDEF("text"), true),
});
const BldHlpOption bldHlpOption =
{
.name = xmlNodeAttribute(xmlOpt, STRDEF("id")),
.section = strDup(section),
.summary = xmlNodeChild(xmlOpt, STRDEF("summary"), true),
.description = xmlNodeChild(xmlOpt, STRDEF("text"), true),
};
lstAdd(optList, &bldHlpOption);
}
MEM_CONTEXT_END();
}
@ -106,15 +106,15 @@ bldHlpParseCommandList(XmlNode *const xml)
// Add command to list
MEM_CONTEXT_BEGIN(lstMemContext(result))
{
lstAdd(
result,
&(BldHlpCommand)
{
.name = xmlNodeAttribute(xmlCmd, STRDEF("id")),
.summary = xmlNodeChild(xmlCmd, STRDEF("summary"), true),
.description = xmlNodeChild(xmlCmd, STRDEF("text"), true),
.optList = lstMove(cmdOptList, memContextCurrent()),
});
const BldHlpCommand bldHlpCommand =
{
.name = xmlNodeAttribute(xmlCmd, STRDEF("id")),
.summary = xmlNodeChild(xmlCmd, STRDEF("summary"), true),
.description = xmlNodeChild(xmlCmd, STRDEF("text"), true),
.optList = lstMove(cmdOptList, memContextCurrent()),
};
lstAdd(result, &bldHlpCommand);
}
MEM_CONTEXT_END();
}

View File

@ -62,13 +62,13 @@ bldPgVersionList(Yaml *const yaml)
// Add to list
MEM_CONTEXT_BEGIN(lstMemContext(result))
{
lstAdd(
result,
&(BldPgVersion)
{
.version = strDup(pgRaw.version),
.release = pgRaw.release,
});
const BldPgVersion bldPgVersion =
{
.version = strDup(pgRaw.version),
.release = pgRaw.release
};
lstAdd(result, &bldPgVersion);
}
MEM_CONTEXT_END();

View File

@ -35,10 +35,10 @@ STRING_EXTERN(WAL_TIMELINE_HISTORY_REGEXP_STR, WAL_TIMELINE
Global error file constant
***********************************************************************************************************************************/
#define STATUS_FILE_GLOBAL "global"
STRING_STATIC(STATUS_FILE_GLOBAL_STR, STATUS_FILE_GLOBAL);
STRING_STATIC(STATUS_FILE_GLOBAL_STR, STATUS_FILE_GLOBAL);
#define STATUS_FILE_GLOBAL_ERROR STATUS_FILE_GLOBAL STATUS_EXT_ERROR
STRING_STATIC(STATUS_FILE_GLOBAL_ERROR_STR, STATUS_FILE_GLOBAL_ERROR);
STRING_STATIC(STATUS_FILE_GLOBAL_ERROR_STR, STATUS_FILE_GLOBAL_ERROR);
/***********************************************************************************************************************************
Get the correct spool queue based on the archive mode
@ -341,8 +341,8 @@ walPath(const String *walFile, const String *pgPath, const String *command)
THROW_FMT(
OptionRequiredError,
"option '%s' must be specified when relative wal paths are used\n"
"HINT: is %%f passed to %s instead of %%p?\n"
"HINT: PostgreSQL may pass relative paths even with %%p depending on the environment.",
"HINT: is %%f passed to %s instead of %%p?\n"
"HINT: PostgreSQL may pass relative paths even with %%p depending on the environment.",
cfgOptionName(cfgOptPgPath), strZ(command));
}
@ -367,7 +367,7 @@ walPath(const String *walFile, const String *pgPath, const String *command)
THROW_FMT(
OptionInvalidValueError,
PG_NAME " working directory '%s' is not the same as option %s '%s'\n"
"HINT: is the " PG_NAME " data_directory configured the same as the %s option?",
"HINT: is the " PG_NAME " data_directory configured the same as the %s option?",
currentWorkDir, cfgOptionName(cfgOptPgPath), strZ(pgPath), cfgOptionName(cfgOptPgPath));
}
}
@ -446,7 +446,7 @@ walSegmentFind(const Storage *storage, const String *archiveId, const String *wa
THROW_FMT(
ArchiveDuplicateError,
"duplicates found in archive for WAL segment %s: %s\n"
"HINT: are multiple primaries archiving to this stanza?",
"HINT: are multiple primaries archiving to this stanza?",
strZ(walSegment), strZ(strLstJoin(strLstSort(list, sortOrderAsc), ", ")));
}
@ -467,9 +467,9 @@ walSegmentFind(const Storage *storage, const String *archiveId, const String *wa
THROW_FMT(
ArchiveTimeoutError,
"WAL segment %s was not archived before the %" PRIu64 "ms timeout\n"
"HINT: check the archive_command to ensure that all options are correct (especially --stanza).\n"
"HINT: check the PostgreSQL server log for errors.\n"
"HINT: run the 'start' command if the stanza was previously stopped.",
"HINT: check the archive_command to ensure that all options are correct (especially --stanza).\n"
"HINT: check the PostgreSQL server log for errors.\n"
"HINT: run the 'start' command if the stanza was previously stopped.",
strZ(walSegment), timeout);
}

View File

@ -43,24 +43,24 @@ WAL segment constants
// Match on a WAL segment without checksum appended
#define WAL_SEGMENT_REGEXP WAL_SEGMENT_PREFIX_REGEXP "$"
STRING_DECLARE(WAL_SEGMENT_REGEXP_STR);
STRING_DECLARE(WAL_SEGMENT_REGEXP_STR);
// Match on a WAL segment with partial allowed
#define WAL_SEGMENT_PARTIAL_REGEXP WAL_SEGMENT_PREFIX_REGEXP "(\\.partial){0,1}$"
STRING_DECLARE(WAL_SEGMENT_PARTIAL_REGEXP_STR);
STRING_DECLARE(WAL_SEGMENT_PARTIAL_REGEXP_STR);
// Defines the size of standard WAL segment name -- hopefully this won't change
#define WAL_SEGMENT_NAME_SIZE ((unsigned int)24)
// WAL segment directory/file
#define WAL_SEGMENT_DIR_REGEXP "^[0-F]{16}$"
STRING_DECLARE(WAL_SEGMENT_DIR_REGEXP_STR);
STRING_DECLARE(WAL_SEGMENT_DIR_REGEXP_STR);
#define WAL_SEGMENT_FILE_REGEXP "^[0-F]{24}-[0-f]{40}" COMPRESS_TYPE_REGEXP "{0,1}$"
STRING_DECLARE(WAL_SEGMENT_FILE_REGEXP_STR);
STRING_DECLARE(WAL_SEGMENT_FILE_REGEXP_STR);
// Timeline history file
#define WAL_TIMELINE_HISTORY_REGEXP "^[0-F]{8}.history$"
STRING_DECLARE(WAL_TIMELINE_HISTORY_REGEXP_STR);
STRING_DECLARE(WAL_TIMELINE_HISTORY_REGEXP_STR);
/***********************************************************************************************************************************
Functions

View File

@ -171,17 +171,17 @@ archiveGetFind(
{
MEM_CONTEXT_BEGIN(lstMemContext(cacheArchive->pathList))
{
cachePath = lstAdd(
cacheArchive->pathList,
&(ArchiveGetFindCachePath)
{
.path = strDup(path),
.fileList = storageListP(
storageRepoIdx(cacheRepo->repoIdx),
strNewFmt(STORAGE_REPO_ARCHIVE "/%s/%s", strZ(cacheArchive->archiveId), strZ(path)),
.expression = strNewFmt(
"^%s[0-F]{8}-[0-f]{40}" COMPRESS_TYPE_REGEXP "{0,1}$", strZ(path))),
});
const ArchiveGetFindCachePath archiveGetFindCachePath =
{
.path = strDup(path),
.fileList = storageListP(
storageRepoIdx(cacheRepo->repoIdx),
strNewFmt(STORAGE_REPO_ARCHIVE "/%s/%s", strZ(cacheArchive->archiveId), strZ(path)),
.expression = strNewFmt(
"^%s[0-F]{8}-[0-f]{40}" COMPRESS_TYPE_REGEXP "{0,1}$", strZ(path))),
};
cachePath = lstAdd(cacheArchive->pathList, &archiveGetFindCachePath);
}
MEM_CONTEXT_END();
}
@ -201,18 +201,18 @@ archiveGetFind(
{
MEM_CONTEXT_BEGIN(lstMemContext(getCheckResult->archiveFileMapList))
{
lstAdd(
matchList,
&(ArchiveGetFile)
{
.file = strNewFmt(
"%s/%s/%s", strZ(cacheArchive->archiveId), strZ(path),
strZ(strLstGet(segmentList, segmentIdx))),
.repoIdx = cacheRepo->repoIdx,
.archiveId = cacheArchive->archiveId,
.cipherType = cacheRepo->cipherType,
.cipherPassArchive = cacheRepo->cipherPassArchive,
});
const ArchiveGetFile archiveGetFile =
{
.file = strNewFmt(
"%s/%s/%s", strZ(cacheArchive->archiveId), strZ(path),
strZ(strLstGet(segmentList, segmentIdx))),
.repoIdx = cacheRepo->repoIdx,
.archiveId = cacheArchive->archiveId,
.cipherType = cacheRepo->cipherType,
.cipherPassArchive = cacheRepo->cipherPassArchive,
};
lstAdd(matchList, &archiveGetFile);
}
MEM_CONTEXT_END();
}
@ -225,16 +225,16 @@ archiveGetFind(
{
MEM_CONTEXT_BEGIN(lstMemContext(getCheckResult->archiveFileMapList))
{
lstAdd(
matchList,
&(ArchiveGetFile)
{
.file = strNewFmt("%s/%s", strZ(cacheArchive->archiveId), strZ(archiveFileRequest)),
.repoIdx = cacheRepo->repoIdx,
.archiveId = cacheArchive->archiveId,
.cipherType = cacheRepo->cipherType,
.cipherPassArchive = cacheRepo->cipherPassArchive,
});
const ArchiveGetFile archiveGetFile =
{
.file = strNewFmt("%s/%s", strZ(cacheArchive->archiveId), strZ(archiveFileRequest)),
.repoIdx = cacheRepo->repoIdx,
.archiveId = cacheArchive->archiveId,
.cipherType = cacheRepo->cipherType,
.cipherPassArchive = cacheRepo->cipherPassArchive,
};
lstAdd(matchList, &archiveGetFile);
}
MEM_CONTEXT_END();
}
@ -318,7 +318,7 @@ archiveGetFind(
getCheckResult->errorFile = strDup(archiveFileRequest);
getCheckResult->errorMessage = strNewFmt(
"duplicates found for WAL segment %s:%s\n"
"HINT: are multiple primaries archiving to this stanza?",
"HINT: are multiple primaries archiving to this stanza?",
strZ(archiveFileRequest), strZ(message));
getCheckResult->warnList = strLstMove(fileWarnList, memContextCurrent());
}

View File

@ -275,15 +275,15 @@ archivePushCheck(bool pgPathSet)
result.pgVersion = archiveInfo.version;
result.pgSystemId = archiveInfo.systemId;
lstAdd(
result.repoList,
&(ArchivePushFileRepoData)
{
.repoIdx = repoIdx,
.archiveId = strDup(archiveId),
.cipherType = repoCipherType,
.cipherPass = strDup(infoArchiveCipherPass(info)),
});
const ArchivePushFileRepoData archivePushFileRepoData =
{
.repoIdx = repoIdx,
.archiveId = strDup(archiveId),
.cipherType = repoCipherType,
.cipherPass = strDup(infoArchiveCipherPass(info)),
};
lstAdd(result.repoList, &archivePushFileRepoData);
}
MEM_CONTEXT_PRIOR_END();
}

View File

@ -507,8 +507,8 @@ backupResumeClean(
continue;
// Build the name used to lookup files in the manifest
const String *manifestName = manifestParentName != NULL ?
strNewFmt("%s/%s", strZ(manifestParentName), strZ(info.name)) : info.name;
const String *manifestName =
manifestParentName != NULL ? strNewFmt("%s/%s", strZ(manifestParentName), strZ(info.name)) : info.name;
// Build the backup path used to remove files/links/paths that are invalid
const String *const backupPath = strNewFmt("%s/%s", strZ(backupParentPath), strZ(info.name));
@ -720,7 +720,7 @@ backupResumeFind(const Manifest *manifest, const String *cipherPassBackup)
// Check compression. Compression can't be changed between backups so resume won't work either.
else if (
manifestResumeData->backupOptionCompressType !=
compressTypeEnum(cfgOptionStrId(cfgOptCompressType)))
compressTypeEnum(cfgOptionStrId(cfgOptCompressType)))
{
reason = strNewFmt(
"new compression '%s' does not match resumable compression '%s'",
@ -844,7 +844,7 @@ backupStart(BackupData *backupData)
THROW(
PgRunningError,
"--no-" CFGOPT_ONLINE " passed but " PG_FILE_POSTMTRPID " exists - looks like " PG_NAME " is running. Shut"
" down " PG_NAME " and try again, or use --force.");
" down " PG_NAME " and try again, or use --force.");
}
}
}
@ -1181,8 +1181,8 @@ backupJobResult(
MEM_CONTEXT_TEMP_BEGIN()
{
const unsigned int processId = protocolParallelJobProcessId(job);
const uint64_t bundleId = varType(protocolParallelJobKey(job)) == varTypeUInt64 ?
varUInt64(protocolParallelJobKey(job)) : 0;
const uint64_t bundleId =
varType(protocolParallelJobKey(job)) == varTypeUInt64 ? varUInt64(protocolParallelJobKey(job)) : 0;
PackRead *const jobResult = protocolParallelJobResult(job);
unsigned int percentComplete = 0;
@ -1218,8 +1218,8 @@ backupJobResult(
logProgress, "%s, %u.%02u%%", strZ(strSizeFormat(copySize)), percentComplete / 100, percentComplete % 100);
// Format log checksum
const String *const logChecksum = copySize != 0 ?
strNewFmt(" checksum %s", strZ(strNewEncode(encodingHex, copyChecksum))) : EMPTY_STR;
const String *const logChecksum =
copySize != 0 ? strNewFmt(" checksum %s", strZ(strNewEncode(encodingHex, copyChecksum))) : EMPTY_STR;
// If the file is in a prior backup and nothing changed, just log it
if (copyResult == backupCopyResultNoOp)
@ -1339,8 +1339,8 @@ backupJobResult(
file.checksumRepoSha1 = repoChecksum != NULL ? bufPtrConst(repoChecksum) : NULL;
file.reference = NULL;
file.checksumPageError = checksumPageError;
file.checksumPageErrorList = checksumPageErrorList != NULL ?
jsonFromVar(varNewVarLst(checksumPageErrorList)) : NULL;
file.checksumPageErrorList =
checksumPageErrorList != NULL ? jsonFromVar(varNewVarLst(checksumPageErrorList)) : NULL;
file.bundleId = bundleId;
file.bundleOffset = bundleOffset;
file.blockIncrMapSize = blockIncrMapSize;
@ -1691,8 +1691,8 @@ backupJobCallback(void *data, unsigned int clientIdx)
// Determine where to begin scanning the queue (we'll stop when we get back here). When copying from the primary during
// backup from standby only queue 0 will be used.
unsigned int queueOffset = jobData->backupStandby && clientIdx > 0 ? 1 : 0;
int queueIdx = jobData->backupStandby && clientIdx == 0 ?
0 : (int)(clientIdx % (lstSize(jobData->queueList) - queueOffset));
int queueIdx =
jobData->backupStandby && clientIdx == 0 ? 0 : (int)(clientIdx % (lstSize(jobData->queueList) - queueOffset));
int queueEnd = queueIdx;
// Create backup job
@ -1869,8 +1869,8 @@ backupProcess(
// Build expression to identify files that can be copied from the standby when standby backup is supported
.standbyExp = regExpNew(
strNewFmt(
"^((" MANIFEST_TARGET_PGDATA "/(" PG_PATH_BASE "|" PG_PATH_GLOBAL "|%s|" PG_PATH_PGMULTIXACT "))|"
MANIFEST_TARGET_PGTBLSPC ")/",
"^((" MANIFEST_TARGET_PGDATA "/(" PG_PATH_BASE "|" PG_PATH_GLOBAL "|%s|" PG_PATH_PGMULTIXACT "))"
"|" MANIFEST_TARGET_PGTBLSPC ")/",
strZ(pgXactPath(backupData->version)))),
};

View File

@ -188,10 +188,14 @@ blockMapWrite(const BlockMap *const this, IoWrite *const output)
ioWriteVarIntU64(output, blockMapItem->offset);
// Add reference to list
blockMapRef = lstAdd(
refList,
&(BlockMapRef){
.reference = blockMapItem->reference, .bundleId = blockMapItem->bundleId, .offset = blockMapItem->offset});
const BlockMapRef blockMapRefAdd =
{
.reference = blockMapItem->reference,
.bundleId = blockMapItem->bundleId,
.offset = blockMapItem->offset,
};
blockMapRef = lstAdd(refList, &blockMapRefAdd);
}
else
{

View File

@ -213,12 +213,15 @@ backupFile(
}
// Compress filter
IoFilter *const compress = repoFileCompressType != compressTypeNone ?
compressFilter(repoFileCompressType, repoFileCompressLevel) : NULL;
IoFilter *const compress =
repoFileCompressType != compressTypeNone ?
compressFilter(repoFileCompressType, repoFileCompressLevel) : NULL;
// Encrypt filter
IoFilter *const encrypt = cipherType != cipherTypeNone ?
cipherBlockNewP(cipherModeEncrypt, cipherType, BUFSTR(cipherPass), .raw = file->blockIncrSize != 0) : NULL;
IoFilter *const encrypt =
cipherType != cipherTypeNone ?
cipherBlockNewP(cipherModeEncrypt, cipherType, BUFSTR(cipherPass), .raw = file->blockIncrSize != 0) :
NULL;
// If block incremental then add the filter and pass compress/encrypt filters to it since each block is
// compressed/encrypted separately

View File

@ -154,8 +154,8 @@ checkStanzaInfoPg(
THROW(
FileInvalidError,
"backup and archive info files exist but do not match the database\n"
"HINT: is this the correct stanza?\n"
"HINT: did an error occur during stanza-upgrade?");
"HINT: is this the correct stanza?\n"
"HINT: did an error occur during stanza-upgrade?");
}
}
MEM_CONTEXT_TEMP_END();

View File

@ -1502,9 +1502,10 @@ infoRender(void)
KeyValue *backupLockKv = varKv(kvGet(lockKv, STATUS_KEY_LOCK_BACKUP_VAR));
bool backupLockHeld = varBool(kvGet(backupLockKv, STATUS_KEY_LOCK_BACKUP_HELD_VAR));
const Variant *const percentComplete = kvGet(backupLockKv, STATUS_KEY_LOCK_BACKUP_PERCENT_COMPLETE_VAR);
const String *const percentCompleteStr = percentComplete != NULL ?
strNewFmt(" - %u.%02u%% complete", varUInt(percentComplete) / 100, varUInt(percentComplete) % 100) :
EMPTY_STR;
const String *const percentCompleteStr =
percentComplete != NULL ?
strNewFmt(" - %u.%02u%% complete", varUInt(percentComplete) / 100, varUInt(percentComplete) % 100) :
EMPTY_STR;
if (statusCode != INFO_STANZA_STATUS_CODE_OK)
{

View File

@ -140,12 +140,15 @@ restoreFile(
// after restore.
if (info.timeModified != file->timeModified)
{
const struct utimbuf uTimeBuf =
{
.actime = file->timeModified,
.modtime = file->timeModified,
};
THROW_ON_SYS_ERROR_FMT(
utime(
fileName,
&((struct utimbuf){
.actime = file->timeModified, .modtime = file->timeModified})) == -1,
FileInfoError, "unable to set time for '%s'", fileName);
utime(fileName, &uTimeBuf) == -1, FileInfoError, "unable to set time for '%s'",
fileName);
}
fileResult->result = restoreResultPreserve;
@ -292,8 +295,8 @@ restoreFile(
// Size of delta map. If there is no delta map because the pg file does not exist then set to zero, which
// will force all blocks to be updated.
const unsigned int deltaMapSize = file->deltaMap == NULL ?
0 : (unsigned int)(bufUsed(file->deltaMap) / HASH_TYPE_SHA1_SIZE);
const unsigned int deltaMapSize =
file->deltaMap == NULL ? 0 : (unsigned int)(bufUsed(file->deltaMap) / HASH_TYPE_SHA1_SIZE);
// Find and write updated blocks
bool updateFound = false; // Is there a block list to be updated?
@ -344,8 +347,8 @@ restoreFile(
!bufEq(
BUF(blockMapItemNext->checksum, HASH_TYPE_SHA1_SIZE),
BUF(
bufPtrConst(file->deltaMap) + (blockMapIdx + 1) * HASH_TYPE_SHA1_SIZE,
HASH_TYPE_SHA1_SIZE))))
bufPtrConst(file->deltaMap) + (blockMapIdx + 1) * HASH_TYPE_SHA1_SIZE,
HASH_TYPE_SHA1_SIZE))))
{
continue;
}

View File

@ -32,7 +32,7 @@ Restore Command
Recovery constants
***********************************************************************************************************************************/
#define RESTORE_COMMAND "restore_command"
STRING_STATIC(RESTORE_COMMAND_STR, RESTORE_COMMAND);
STRING_STATIC(RESTORE_COMMAND_STR, RESTORE_COMMAND);
#define RECOVERY_TARGET "recovery_target"
#define RECOVERY_TARGET_LSN "recovery_target_lsn"
@ -48,7 +48,7 @@ Recovery constants
#define PAUSE_AT_RECOVERY_TARGET "pause_at_recovery_target"
#define STANDBY_MODE "standby_mode"
STRING_STATIC(STANDBY_MODE_STR, STANDBY_MODE);
STRING_STATIC(STANDBY_MODE_STR, STANDBY_MODE);
#define ARCHIVE_MODE "archive_mode"
@ -68,8 +68,8 @@ restorePathValidate(void)
THROW_FMT(
PgRunningError,
"unable to restore while PostgreSQL is running\n"
"HINT: presence of '" PG_FILE_POSTMTRPID "' in '%s' indicates PostgreSQL is running.\n"
"HINT: remove '" PG_FILE_POSTMTRPID "' only if PostgreSQL is not running.",
"HINT: presence of '" PG_FILE_POSTMTRPID "' in '%s' indicates PostgreSQL is running.\n"
"HINT: remove '" PG_FILE_POSTMTRPID "' only if PostgreSQL is not running.",
strZ(cfgOptionDisplay(cfgOptPgPath)));
}
@ -79,9 +79,9 @@ restorePathValidate(void)
{
LOG_WARN_FMT(
"--delta or --force specified but unable to find '" PG_FILE_PGVERSION "' or '" BACKUP_MANIFEST_FILE "' in '%s' to"
" confirm that this is a valid $PGDATA directory. --delta and --force have been disabled and if any files"
" exist in the destination directories the restore will be aborted.",
strZ(cfgOptionDisplay(cfgOptPgPath)));
" confirm that this is a valid $PGDATA directory. --delta and --force have been disabled and if any files"
" exist in the destination directories the restore will be aborted.",
strZ(cfgOptionDisplay(cfgOptPgPath)));
// Disable delta and force so restore will fail if the directories are not empty
cfgOptionSet(cfgOptDelta, cfgSourceDefault, VARBOOL(false));
@ -163,9 +163,18 @@ getEpoch(const String *targetTime)
// Set tm_isdst to -1 to force mktime to consider if DST. For example, if system time is America/New_York then
// 2019-09-14 20:02:49 was a time in DST so the Epoch value should be 1568505769 (and not 1568509369 which would be
// 2019-09-14 21:02:49 - an hour too late)
result = mktime(
&(struct tm){.tm_sec = dtSecond, .tm_min = dtMinute, .tm_hour = dtHour, .tm_mday = dtDay, .tm_mon = dtMonth - 1,
.tm_year = dtYear - 1900, .tm_isdst = -1});
struct tm time =
{
.tm_sec = dtSecond,
.tm_min = dtMinute,
.tm_hour = dtHour,
.tm_mday = dtDay,
.tm_mon = dtMonth - 1,
.tm_year = dtYear - 1900,
.tm_isdst = -1,
};
result = mktime(&time);
}
}
else
@ -174,7 +183,7 @@ getEpoch(const String *targetTime)
FormatError,
"automatic backup set selection cannot be performed with provided time '%s'\n"
"HINT: time format must be YYYY-MM-DD HH:MM:SS with optional msec and optional timezone (+/- HH or HHMM or HH:MM)"
" - if timezone is omitted, local time is assumed (for UTC use +00)",
" - if timezone is omitted, local time is assumed (for UTC use +00)",
strZ(targetTime));
}
}
@ -317,7 +326,7 @@ restoreBackupSet(void)
{
LOG_WARN_FMT(
"%s reached backup from prior version missing required LSN info before finding a match -- backup"
" auto-select has been disabled for this repo\n"
" auto-select has been disabled for this repo\n"
"HINT: you may specify a backup to restore using the --set option.",
cfgOptionGroupName(cfgOptGrpRepo, repoIdx));
@ -351,8 +360,8 @@ restoreBackupSet(void)
BackupSetInvalidError,
"the latest backup set found '%s' is from a prior version of " PG_NAME "\n"
"HINT: was a backup created after the stanza-upgrade?\n"
"HINT: specify --" CFGOPT_SET " or --" CFGOPT_TYPE "=time/lsn to restore from a prior version of "
PG_NAME ".",
"HINT: specify --" CFGOPT_SET " or --" CFGOPT_TYPE "=time/lsn to restore from a prior version of"
" " PG_NAME ".",
strZ(latestBackup.backupLabel));
}
@ -595,12 +604,15 @@ restoreManifestMap(Manifest *manifest)
// Add the link. Copy user/group from the base data directory.
const ManifestPath *const pathBase = manifestPathFind(manifest, MANIFEST_TARGET_PGDATA_STR);
const ManifestLink manifestLink =
{
.name = manifestName,
.destination = linkPath,
.group = pathBase->group,
.user = pathBase->user,
};
manifestLinkAdd(
manifest,
&(ManifestLink){
.name = manifestName, .destination = linkPath, .group = pathBase->group, .user = pathBase->user});
manifestLinkAdd(manifest, &manifestLink);
create = true;
}
// Else update target to new path
@ -1570,7 +1582,7 @@ restoreRecoveryOption(unsigned int pgVersion)
THROW_FMT(
OptionInvalidError,
"option '" CFGOPT_ARCHIVE_MODE "' is not supported on " PG_NAME " < " PG_VERSION_12_STR "\n"
"HINT: 'archive_mode' should be manually set to 'off' in postgresql.conf.");
"HINT: 'archive_mode' should be manually set to 'off' in postgresql.conf.");
}
// The only other valid option is off
@ -1784,8 +1796,8 @@ restoreRecoveryWriteAutoConf(unsigned int pgVersion, const String *restoreLabel)
regExpNew(
STRDEF(
"^[\t ]*(" RECOVERY_TARGET "|" RECOVERY_TARGET_ACTION "|" RECOVERY_TARGET_INCLUSIVE "|"
RECOVERY_TARGET_LSN "|" RECOVERY_TARGET_NAME "|" RECOVERY_TARGET_TIME "|" RECOVERY_TARGET_TIMELINE "|"
RECOVERY_TARGET_XID ")[\t ]*="));
RECOVERY_TARGET_LSN "|" RECOVERY_TARGET_NAME "|" RECOVERY_TARGET_TIME "|" RECOVERY_TARGET_TIMELINE "|"
RECOVERY_TARGET_XID ")[\t ]*="));
// Check each line for recovery settings
const StringList *contentList = strLstNewSplit(strNewBuf(autoConf), LF_STR);
@ -1835,7 +1847,7 @@ restoreRecoveryWriteAutoConf(unsigned int pgVersion, const String *restoreLabel)
OptionInvalidError,
"'" STANDBY_MODE "' setting is not valid for " PG_NAME " >= %s\n"
"HINT: use --" CFGOPT_TYPE "=" CFGOPTVAL_TYPE_STANDBY_Z " instead of --" CFGOPT_RECOVERY_OPTION "="
STANDBY_MODE "=on.",
STANDBY_MODE "=on.",
strZ(pgVersionToStr(PG_VERSION_RECOVERY_GUC)));
}
}
@ -1898,8 +1910,8 @@ restoreRecoveryWrite(const Manifest *manifest)
if (cfgOptionStrId(cfgOptType) == CFGOPTVAL_TYPE_PRESERVE)
{
// Determine which file recovery setttings will be written to
const String *recoveryFile = pgVersion >= PG_VERSION_RECOVERY_GUC ?
PG_FILE_POSTGRESQLAUTOCONF_STR : PG_FILE_RECOVERYCONF_STR;
const String *const recoveryFile =
pgVersion >= PG_VERSION_RECOVERY_GUC ? PG_FILE_POSTGRESQLAUTOCONF_STR : PG_FILE_RECOVERYCONF_STR;
if (!storageExistsP(storagePg(), recoveryFile))
{

View File

@ -144,7 +144,7 @@ cmdStanzaCreate(void)
THROW_FMT(
FileMissingError,
"%s on %s\n"
"HINT: this may be a symptom of repository corruption!",
"HINT: this may be a symptom of repository corruption!",
(archiveInfoFileExists || archiveInfoFileCopyExists) ?
"archive.info exists but backup.info is missing" : "backup.info exists but archive.info is missing",
cfgOptionGroupName(cfgOptGrpRepo, repoIdx));

View File

@ -426,7 +426,7 @@ verifyManifestFile(
result = verifyManifestInfoCopy.manifest;
}
else if (verifyManifestInfo.errorCode == errorTypeCode(&FileMissingError) &&
verifyManifestInfoCopy.errorCode == errorTypeCode(&FileMissingError))
verifyManifestInfoCopy.errorCode == errorTypeCode(&FileMissingError))
{
backupResult->status = backupMissingManifest;
@ -465,7 +465,7 @@ verifyManifestFile(
{
LOG_INFO_FMT(
"'%s' may not be recoverable - PG data (id %u, version %s, system-id %" PRIu64 ") is not in the backup.info"
" history, skipping",
" history, skipping",
strZ(backupResult->backupLabel), manData->pgId, strZ(pgVersionToStr(manData->pgVersion)), manData->pgSystemId);
manifestFree(result);

View File

@ -10,45 +10,51 @@ Assert Routines
Asserts are used in test code to ensure that certain conditions are true. They are omitted from production builds.
***********************************************************************************************************************************/
#ifdef DEBUG
#define ASSERT(condition) \
do \
{ \
if (!(condition)) \
THROW_FMT(AssertError, "assertion '%s' failed", #condition); \
} \
while (0)
// Skip inline asserts when coverage testing because they will not have branch coverage. Generally speaking inline assertions
// should be of the "this != NULL" variety which is also caught effectively by Valgrind.
#ifndef DEBUG_COVERAGE
#define ASSERT_INLINE(condition) \
do \
{ \
if (!(condition)) \
THROW_FMT(AssertError, "assertion '%s' failed", #condition); \
} \
while (0)
#else
#define ASSERT_INLINE(condition)
#endif
#define ASSERT(condition) \
do \
{ \
if (!(condition)) \
THROW_FMT(AssertError, "assertion '%s' failed", #condition); \
} \
while (0)
// Used when execution reaches an invalid location rather than an invalid condition
#define ASSERT_MSG(message) \
THROW_FMT(AssertError, message);
// Skip inline asserts when coverage testing because they will not have branch coverage. Generally speaking inline assertions
// should be of the "this != NULL" variety which is also caught effectively by Valgrind.
#ifndef DEBUG_COVERAGE
// Declare variables that will be used by later assertions with the goal of making them easier to read and maintain
#define ASSERT_DECLARE(declaration) \
declaration
#define ASSERT_INLINE(condition) \
do \
{ \
if (!(condition)) \
THROW_FMT(AssertError, "assertion '%s' failed", #condition); \
} \
while (0)
// Add a parameter to a function that is only used by assertions
#define ASSERT_PARAM(param) \
, param
#else
#define ASSERT(condition)
#define ASSERT_INLINE(condition)
#define ASSERT_MSG(message)
#define ASSERT_DECLARE(declaration)
#define ASSERT_PARAM(param)
#define ASSERT_INLINE(condition)
#endif
// Used when execution reaches an invalid location rather than an invalid condition
#define ASSERT_MSG(message) \
THROW_FMT(AssertError, message);
// Declare variables that will be used by later assertions with the goal of making them easier to read and maintain
#define ASSERT_DECLARE(declaration) \
declaration
// Add a parameter to a function that is only used by assertions
#define ASSERT_PARAM(param) \
, param
#else
#define ASSERT(condition)
#define ASSERT_INLINE(condition)
#define ASSERT_MSG(message)
#define ASSERT_DECLARE(declaration)
#define ASSERT_PARAM(param)
#endif
/***********************************************************************************************************************************

View File

@ -42,9 +42,9 @@ static const struct CompressHelperLocal
IoFilter *(*compressNew)(int); // Function to create new compression filter
StringId decompressType; // Type of the decompression filter
IoFilter *(*decompressNew)(void); // Function to create new decompression filter
int levelDefault:8; // Default compression level
int levelMin:8; // Minimum compression level
int levelMax:8; // Maximum compression level
int levelDefault : 8; // Default compression level
int levelMin : 8; // Minimum compression level
int levelMax : 8; // Maximum compression level
} compressHelperLocal[] =
{
{

View File

@ -23,7 +23,7 @@ Developed against version r131 using the documentation in https://github.com/lz4
Older versions of lz4 do not define the max header size. This seems to be the max for any version.
***********************************************************************************************************************************/
#ifndef LZ4F_HEADER_SIZE_MAX
#define LZ4F_HEADER_SIZE_MAX 19
#define LZ4F_HEADER_SIZE_MAX 19
#endif
/***********************************************************************************************************************************

View File

@ -20,10 +20,10 @@ Hashes for zero-length files (i.e., starting hash)
***********************************************************************************************************************************/
#define HASH_TYPE_MD5_ZERO "d41d8cd98f00b204e9800998ecf8427e"
#define HASH_TYPE_SHA1_ZERO "da39a3ee5e6b4b0d3255bfef95601890afd80709"
BUFFER_DECLARE(HASH_TYPE_SHA1_ZERO_BUF);
BUFFER_DECLARE(HASH_TYPE_SHA1_ZERO_BUF);
#define HASH_TYPE_SHA256_ZERO \
"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
BUFFER_DECLARE(HASH_TYPE_SHA256_ZERO_BUF);
BUFFER_DECLARE(HASH_TYPE_SHA256_ZERO_BUF);
/***********************************************************************************************************************************
Hash type sizes

View File

@ -18,45 +18,49 @@ The FUNCTION_AUDIT_*() macros can be used to annotate functions that do that fol
is returned and that is the only value created in the calling context.
***********************************************************************************************************************************/
#if defined(DEBUG_MEM) && defined(DEBUG_TEST_TRACE)
#include "common/macro.h"
#include "common/memContext.h"
// Begin the audit
#define FUNCTION_TEST_MEM_CONTEXT_AUDIT_BEGIN() \
MemContextAuditState MEM_CONTEXT_AUDIT_param = {.memContext = memContextCurrent()}; \
memContextAuditBegin(&MEM_CONTEXT_AUDIT_param)
#include "common/macro.h"
#include "common/memContext.h"
// End the audit
#define FUNCTION_TEST_MEM_CONTEXT_AUDIT_END(returnType) \
memContextAuditEnd(&MEM_CONTEXT_AUDIT_param, returnType)
// Begin the audit
#define FUNCTION_TEST_MEM_CONTEXT_AUDIT_BEGIN() \
MemContextAuditState MEM_CONTEXT_AUDIT_param = {.memContext = memContextCurrent()}; \
memContextAuditBegin(&MEM_CONTEXT_AUDIT_param)
// Allow any new mem contexts or allocations in the calling context. These should be fixed and this macro eventually removed.
#define FUNCTION_AUDIT_IF(condition) \
do \
{ \
if (!(condition)) \
MEM_CONTEXT_AUDIT_param.returnTypeAny = true; \
} \
while (0)
// End the audit
#define FUNCTION_TEST_MEM_CONTEXT_AUDIT_END(returnType) \
memContextAuditEnd(&MEM_CONTEXT_AUDIT_param, returnType)
// Callbacks are difficult to audit so ignore them. Eventually they should all be removed.
#define FUNCTION_AUDIT_CALLBACK() MEM_CONTEXT_AUDIT_param.returnTypeAny = true
// Allow any new mem contexts or allocations in the calling context. These should be fixed and this macro eventually removed.
#define FUNCTION_AUDIT_IF(condition) \
do \
{ \
if (!(condition)) \
MEM_CONTEXT_AUDIT_param.returnTypeAny = true; \
} \
while (0)
// Helper function that creates new mem contexts or allocations in the calling context. These functions should be static (except
// for interface helpers) but it is not clear that anything else needs to be done.
#define FUNCTION_AUDIT_HELPER() MEM_CONTEXT_AUDIT_param.returnTypeAny = true
// Callbacks are difficult to audit so ignore them. Eventually they should all be removed.
#define FUNCTION_AUDIT_CALLBACK() MEM_CONTEXT_AUDIT_param.returnTypeAny = true
// Helper function that creates new mem contexts or allocations in the calling context. These functions should be static (except
// for interface helpers) but it is not clear that anything else needs to be done.
#define FUNCTION_AUDIT_HELPER() MEM_CONTEXT_AUDIT_param.returnTypeAny = true
// Function returns a struct that has new mem contexts or allocations in the calling context. Find a way to fix these.
#define FUNCTION_AUDIT_STRUCT() MEM_CONTEXT_AUDIT_param.returnTypeAny = true
// Function returns a struct that has new mem contexts or allocations in the calling context. Find a way to fix these.
#define FUNCTION_AUDIT_STRUCT() MEM_CONTEXT_AUDIT_param.returnTypeAny = true
#else
#define FUNCTION_TEST_MEM_CONTEXT_AUDIT_BEGIN()
#define FUNCTION_TEST_MEM_CONTEXT_AUDIT_END(returnType)
#define FUNCTION_AUDIT_IF(condition)
#define FUNCTION_AUDIT_CALLBACK()
#define FUNCTION_AUDIT_HELPER()
#define FUNCTION_AUDIT_STRUCT()
#endif // DEBUG_TEST_TRACE_MACRO
#define FUNCTION_TEST_MEM_CONTEXT_AUDIT_BEGIN()
#define FUNCTION_TEST_MEM_CONTEXT_AUDIT_END(returnType)
#define FUNCTION_AUDIT_IF(condition)
#define FUNCTION_AUDIT_CALLBACK()
#define FUNCTION_AUDIT_HELPER()
#define FUNCTION_AUDIT_STRUCT()
#endif
/***********************************************************************************************************************************
Base function debugging macros
@ -68,30 +72,34 @@ level is set to debug or trace.
FUNCTION_LOG_logLevel
#ifdef DEBUG_TEST_TRACE
#define FUNCTION_LOG_BEGIN_BASE(logLevel) \
LogLevel FUNCTION_LOG_LEVEL() = STACK_TRACE_PUSH(logLevel); \
FUNCTION_TEST_MEM_CONTEXT_AUDIT_BEGIN(); \
\
{ \
stackTraceParamLog(); \
stackTraceTestStop()
#define FUNCTION_LOG_END_BASE() \
stackTraceTestStart(); \
LOG_FMT(FUNCTION_LOG_LEVEL(), 0, "(%s)", stackTraceParam()); \
}
#define FUNCTION_LOG_BEGIN_BASE(logLevel) \
LogLevel FUNCTION_LOG_LEVEL() = STACK_TRACE_PUSH(logLevel); \
FUNCTION_TEST_MEM_CONTEXT_AUDIT_BEGIN(); \
\
{ \
stackTraceParamLog(); \
stackTraceTestStop()
#define FUNCTION_LOG_END_BASE() \
stackTraceTestStart(); \
LOG_FMT(FUNCTION_LOG_LEVEL(), 0, "(%s)", stackTraceParam()); \
}
#else
#define FUNCTION_LOG_BEGIN_BASE(logLevel) \
LogLevel FUNCTION_LOG_LEVEL() = STACK_TRACE_PUSH(logLevel); \
FUNCTION_TEST_MEM_CONTEXT_AUDIT_BEGIN(); \
\
if (logAny(FUNCTION_LOG_LEVEL())) \
{ \
stackTraceParamLog()
#define FUNCTION_LOG_END_BASE() \
LOG_FMT(FUNCTION_LOG_LEVEL(), 0, "(%s)", stackTraceParam()); \
}
#define FUNCTION_LOG_BEGIN_BASE(logLevel) \
LogLevel FUNCTION_LOG_LEVEL() = STACK_TRACE_PUSH(logLevel); \
FUNCTION_TEST_MEM_CONTEXT_AUDIT_BEGIN(); \
\
if (logAny(FUNCTION_LOG_LEVEL())) \
{ \
stackTraceParamLog()
#define FUNCTION_LOG_END_BASE() \
LOG_FMT(FUNCTION_LOG_LEVEL(), 0, "(%s)", stackTraceParam()); \
}
#endif
/***********************************************************************************************************************************
@ -153,7 +161,7 @@ typedef void (*ObjToLogFormat)(const void *object, StringStatic *debugLog);
FN_EXTERN size_t objToLog(const void *object, ObjToLogFormat formatFunc, char *buffer, size_t bufferSize);
#define FUNCTION_LOG_OBJECT_FORMAT(object, formatFunc, buffer, bufferSize) \
#define FUNCTION_LOG_OBJECT_FORMAT(object, formatFunc, buffer, bufferSize) \
objToLog(object, (ObjToLogFormat)formatFunc, buffer, bufferSize)
// Convert object name to a zero-terminated string for logging
@ -343,7 +351,7 @@ Ignore DEBUG_TEST_TRACE_MACRO if DEBUG is not defined because the underlying fun
***********************************************************************************************************************************/
#ifdef DEBUG
#ifdef DEBUG_TEST_TRACE
#define DEBUG_TEST_TRACE_MACRO
#define DEBUG_TEST_TRACE_MACRO
#endif // DEBUG_TEST_TRACE
#endif // DEBUG
@ -351,135 +359,136 @@ Ignore DEBUG_TEST_TRACE_MACRO if DEBUG is not defined because the underlying fun
#define FUNCTION_TEST_NO_RETURN()
#ifdef DEBUG_TEST_TRACE_MACRO
#define FUNCTION_TEST_BEGIN() \
FUNCTION_TEST_MEM_CONTEXT_AUDIT_BEGIN(); \
#define FUNCTION_TEST_BEGIN() \
FUNCTION_TEST_MEM_CONTEXT_AUDIT_BEGIN(); \
\
/* Ensure that FUNCTION_LOG_BEGIN() and FUNCTION_TEST_BEGIN() are not both used in a single function by declaring the */ \
/* same variable that FUNCTION_LOG_BEGIN() uses to track logging */ \
LogLevel FUNCTION_LOG_LEVEL(); \
(void)FUNCTION_LOG_LEVEL(); \
/* Ensure that FUNCTION_LOG_BEGIN() and FUNCTION_TEST_BEGIN() are not both used in a single function by declaring the */ \
/* same variable that FUNCTION_LOG_BEGIN() uses to track logging */ \
LogLevel FUNCTION_LOG_LEVEL(); \
(void)FUNCTION_LOG_LEVEL(); \
\
/* Ensure that FUNCTION_TEST_RETURN*() is not used with FUNCTION_LOG_BEGIN*() by declaring a variable that will be */ \
/* referenced in FUNCTION_TEST_RETURN*() */ \
bool FUNCTION_TEST_BEGIN_exists; \
/* Ensure that FUNCTION_TEST_RETURN*() is not used with FUNCTION_LOG_BEGIN*() by declaring a variable that will be */ \
/* referenced in FUNCTION_TEST_RETURN*() */ \
bool FUNCTION_TEST_BEGIN_exists; \
\
if (stackTraceTest()) \
{ \
STACK_TRACE_PUSH(logLevelDebug); \
stackTraceParamLog(); \
stackTraceTestStop()
if (stackTraceTest()) \
{ \
STACK_TRACE_PUSH(logLevelDebug); \
stackTraceParamLog(); \
stackTraceTestStop()
#define FUNCTION_TEST_PARAM(typeMacroPrefix, param) \
FUNCTION_LOG_PARAM(typeMacroPrefix, param)
#define FUNCTION_TEST_PARAM(typeMacroPrefix, param) \
FUNCTION_LOG_PARAM(typeMacroPrefix, param)
#define FUNCTION_TEST_PARAM_P(typeName, param) \
FUNCTION_LOG_PARAM_P(typeName, param)
#define FUNCTION_TEST_PARAM_P(typeName, param) \
FUNCTION_LOG_PARAM_P(typeName, param)
#define FUNCTION_TEST_PARAM_PP(typeName, param) \
FUNCTION_LOG_PARAM_PP(typeName, param)
#define FUNCTION_TEST_PARAM_PP(typeName, param) \
FUNCTION_LOG_PARAM_PP(typeName, param)
#define FUNCTION_TEST_END() \
/* CHECK for presense of FUNCTION_TEST_BEGIN*() */ \
(void)FUNCTION_TEST_BEGIN_exists; \
#define FUNCTION_TEST_END() \
(void)FUNCTION_TEST_BEGIN_exists; /* CHECK for presense of FUNCTION_TEST_BEGIN*() */ \
\
stackTraceTestStart(); \
}
stackTraceTestStart(); \
}
#define FUNCTION_TEST_VOID() \
FUNCTION_TEST_BEGIN(); \
FUNCTION_TEST_END();
#define FUNCTION_TEST_VOID() \
FUNCTION_TEST_BEGIN(); \
FUNCTION_TEST_END();
#define FUNCTION_TEST_RETURN_TYPE_BASE(typePre, type, typePost, ...) \
do \
{ \
/* CHECK for presense of FUNCTION_TEST_BEGIN*() */ \
(void)FUNCTION_TEST_BEGIN_exists; \
#define FUNCTION_TEST_RETURN_TYPE_BASE(typePre, type, typePost, ...) \
do \
{ \
(void)FUNCTION_TEST_BEGIN_exists; /* CHECK for presense of FUNCTION_TEST_BEGIN*() */ \
\
typePre type typePost FUNCTION_TEST_result = __VA_ARGS__; \
typePre type typePost FUNCTION_TEST_result = __VA_ARGS__; \
\
STACK_TRACE_POP(true); \
FUNCTION_TEST_MEM_CONTEXT_AUDIT_END(STRINGIFY(type)); \
STACK_TRACE_POP(true); \
FUNCTION_TEST_MEM_CONTEXT_AUDIT_END(STRINGIFY(type)); \
\
return FUNCTION_TEST_result; \
} \
while (0)
return FUNCTION_TEST_result; \
} \
while (0)
#define FUNCTION_TEST_RETURN_TYPE_MACRO_BASE(typePre, typeMacroPrefix, typePost, ...) \
FUNCTION_TEST_RETURN_TYPE_BASE(typePre, FUNCTION_LOG_##typeMacroPrefix##_TYPE, typePost, __VA_ARGS__)
#define FUNCTION_TEST_RETURN_TYPE_MACRO_BASE(typePre, typeMacroPrefix, typePost, ...) \
FUNCTION_TEST_RETURN_TYPE_BASE(typePre, FUNCTION_LOG_##typeMacroPrefix##_TYPE, typePost, __VA_ARGS__)
#define FUNCTION_TEST_RETURN(typeMacroPrefix, ...) \
FUNCTION_TEST_RETURN_TYPE_MACRO_BASE(, typeMacroPrefix, , __VA_ARGS__)
#define FUNCTION_TEST_RETURN_P(typeMacroPrefix, ...) \
FUNCTION_TEST_RETURN_TYPE_MACRO_BASE(, typeMacroPrefix, *, __VA_ARGS__)
#define FUNCTION_TEST_RETURN_PP(typeMacroPrefix, ...) \
FUNCTION_TEST_RETURN_TYPE_MACRO_BASE(, typeMacroPrefix, **, __VA_ARGS__)
#define FUNCTION_TEST_RETURN_CONST(typeMacroPrefix, ...) \
FUNCTION_TEST_RETURN_TYPE_MACRO_BASE(const, typeMacroPrefix, , __VA_ARGS__)
#define FUNCTION_TEST_RETURN_CONST_P(typeMacroPrefix, ...) \
FUNCTION_TEST_RETURN_TYPE_MACRO_BASE(const, typeMacroPrefix, *, __VA_ARGS__)
#define FUNCTION_TEST_RETURN_CONST_PP(typeMacroPrefix, ...) \
FUNCTION_TEST_RETURN_TYPE_MACRO_BASE(const, typeMacroPrefix, **, __VA_ARGS__)
#define FUNCTION_TEST_RETURN(typeMacroPrefix, ...) \
FUNCTION_TEST_RETURN_TYPE_MACRO_BASE(, typeMacroPrefix, , __VA_ARGS__)
#define FUNCTION_TEST_RETURN_P(typeMacroPrefix, ...) \
FUNCTION_TEST_RETURN_TYPE_MACRO_BASE(, typeMacroPrefix, *, __VA_ARGS__)
#define FUNCTION_TEST_RETURN_PP(typeMacroPrefix, ...) \
FUNCTION_TEST_RETURN_TYPE_MACRO_BASE(, typeMacroPrefix, **, __VA_ARGS__)
#define FUNCTION_TEST_RETURN_CONST(typeMacroPrefix, ...) \
FUNCTION_TEST_RETURN_TYPE_MACRO_BASE(const, typeMacroPrefix, , __VA_ARGS__)
#define FUNCTION_TEST_RETURN_CONST_P(typeMacroPrefix, ...) \
FUNCTION_TEST_RETURN_TYPE_MACRO_BASE(const, typeMacroPrefix, *, __VA_ARGS__)
#define FUNCTION_TEST_RETURN_CONST_PP(typeMacroPrefix, ...) \
FUNCTION_TEST_RETURN_TYPE_MACRO_BASE(const, typeMacroPrefix, **, __VA_ARGS__)
#define FUNCTION_TEST_RETURN_TYPE(type, ...) \
FUNCTION_TEST_RETURN_TYPE_BASE(, type, , __VA_ARGS__)
#define FUNCTION_TEST_RETURN_TYPE_P(type, ...) \
FUNCTION_TEST_RETURN_TYPE_BASE(, type, *, __VA_ARGS__)
#define FUNCTION_TEST_RETURN_TYPE_PP(type, ...) \
FUNCTION_TEST_RETURN_TYPE_BASE(, type, **, __VA_ARGS__)
#define FUNCTION_TEST_RETURN_TYPE_CONST(type, ...) \
FUNCTION_TEST_RETURN_TYPE_BASE(const, type, , __VA_ARGS__)
#define FUNCTION_TEST_RETURN_TYPE_CONST_P(type, ...) \
FUNCTION_TEST_RETURN_TYPE_BASE(const, type, *, __VA_ARGS__)
#define FUNCTION_TEST_RETURN_TYPE_CONST_PP(type, ...) \
FUNCTION_TEST_RETURN_TYPE_BASE(const, type, **, __VA_ARGS__)
#define FUNCTION_TEST_RETURN_TYPE(type, ...) \
FUNCTION_TEST_RETURN_TYPE_BASE(, type, , __VA_ARGS__)
#define FUNCTION_TEST_RETURN_TYPE_P(type, ...) \
FUNCTION_TEST_RETURN_TYPE_BASE(, type, *, __VA_ARGS__)
#define FUNCTION_TEST_RETURN_TYPE_PP(type, ...) \
FUNCTION_TEST_RETURN_TYPE_BASE(, type, **, __VA_ARGS__)
#define FUNCTION_TEST_RETURN_TYPE_CONST(type, ...) \
FUNCTION_TEST_RETURN_TYPE_BASE(const, type, , __VA_ARGS__)
#define FUNCTION_TEST_RETURN_TYPE_CONST_P(type, ...) \
FUNCTION_TEST_RETURN_TYPE_BASE(const, type, *, __VA_ARGS__)
#define FUNCTION_TEST_RETURN_TYPE_CONST_PP(type, ...) \
FUNCTION_TEST_RETURN_TYPE_BASE(const, type, **, __VA_ARGS__)
#define FUNCTION_TEST_RETURN_VOID() \
do \
{ \
/* CHECK for presense of FUNCTION_TEST_BEGIN*() */ \
(void)FUNCTION_TEST_BEGIN_exists; \
#define FUNCTION_TEST_RETURN_VOID() \
do \
{ \
(void)FUNCTION_TEST_BEGIN_exists; /* CHECK for presence of FUNCTION_TEST_BEGIN*() */ \
\
STACK_TRACE_POP(true); \
FUNCTION_TEST_MEM_CONTEXT_AUDIT_END("void"); \
return; \
} \
while (0)
STACK_TRACE_POP(true); \
FUNCTION_TEST_MEM_CONTEXT_AUDIT_END("void"); \
return; \
} \
while (0)
#else
#define FUNCTION_TEST_BEGIN()
#define FUNCTION_TEST_PARAM(typeMacroPrefix, param)
#define FUNCTION_TEST_PARAM_P(typeMacroPrefix, param)
#define FUNCTION_TEST_PARAM_PP(typeMacroPrefix, param)
#define FUNCTION_TEST_END()
#define FUNCTION_TEST_VOID()
#define FUNCTION_TEST_RETURN(typeMacroPrefix, ...) \
return __VA_ARGS__
#define FUNCTION_TEST_RETURN_P(typeMacroPrefix, ...) \
return __VA_ARGS__
#define FUNCTION_TEST_RETURN_PP(typeMacroPrefix, ...) \
return __VA_ARGS__
#define FUNCTION_TEST_RETURN_CONST(typeMacroPrefix, ...) \
return __VA_ARGS__
#define FUNCTION_TEST_RETURN_CONST_P(typeMacroPrefix, ...) \
return __VA_ARGS__
#define FUNCTION_TEST_RETURN_CONST_PP(typeMacroPrefix, ...) \
return __VA_ARGS__
#define FUNCTION_TEST_BEGIN()
#define FUNCTION_TEST_PARAM(typeMacroPrefix, param)
#define FUNCTION_TEST_PARAM_P(typeMacroPrefix, param)
#define FUNCTION_TEST_PARAM_PP(typeMacroPrefix, param)
#define FUNCTION_TEST_END()
#define FUNCTION_TEST_VOID()
#define FUNCTION_TEST_RETURN_TYPE(type, ...) \
return __VA_ARGS__
#define FUNCTION_TEST_RETURN_TYPE_P(type, ...) \
return __VA_ARGS__
#define FUNCTION_TEST_RETURN_TYPE_PP(type, ...) \
return __VA_ARGS__
#define FUNCTION_TEST_RETURN_TYPE_CONST(type, ...) \
return __VA_ARGS__
#define FUNCTION_TEST_RETURN_TYPE_CONST_P(type, ...) \
return __VA_ARGS__
#define FUNCTION_TEST_RETURN_TYPE_CONST_PP(type, ...) \
return __VA_ARGS__
#define FUNCTION_TEST_RETURN(typeMacroPrefix, ...) \
return __VA_ARGS__
#define FUNCTION_TEST_RETURN_P(typeMacroPrefix, ...) \
return __VA_ARGS__
#define FUNCTION_TEST_RETURN_PP(typeMacroPrefix, ...) \
return __VA_ARGS__
#define FUNCTION_TEST_RETURN_CONST(typeMacroPrefix, ...) \
return __VA_ARGS__
#define FUNCTION_TEST_RETURN_CONST_P(typeMacroPrefix, ...) \
return __VA_ARGS__
#define FUNCTION_TEST_RETURN_CONST_PP(typeMacroPrefix, ...) \
return __VA_ARGS__
#define FUNCTION_TEST_RETURN_TYPE(type, ...) \
return __VA_ARGS__
#define FUNCTION_TEST_RETURN_TYPE_P(type, ...) \
return __VA_ARGS__
#define FUNCTION_TEST_RETURN_TYPE_PP(type, ...) \
return __VA_ARGS__
#define FUNCTION_TEST_RETURN_TYPE_CONST(type, ...) \
return __VA_ARGS__
#define FUNCTION_TEST_RETURN_TYPE_CONST_P(type, ...) \
return __VA_ARGS__
#define FUNCTION_TEST_RETURN_TYPE_CONST_PP(type, ...) \
return __VA_ARGS__
#define FUNCTION_TEST_RETURN_VOID() \
return
#define FUNCTION_TEST_RETURN_VOID() \
return
#endif // DEBUG_TEST_TRACE_MACRO
#endif

View File

@ -30,7 +30,7 @@ struct ErrorType
// Define test error
#ifdef DEBUG
ERROR_DEFINE(1, TestError, false, RuntimeError);
ERROR_DEFINE(1, TestError, false, RuntimeError);
#endif
// Include error type definitions
@ -91,7 +91,7 @@ situations.
The temp buffer is required because the error message being passed might be the error already stored in the message buffer.
***********************************************************************************************************************************/
#ifndef ERROR_MESSAGE_BUFFER_SIZE
#define ERROR_MESSAGE_BUFFER_SIZE 8192
#define ERROR_MESSAGE_BUFFER_SIZE 8192
#endif
static char messageBuffer[ERROR_MESSAGE_BUFFER_SIZE];

View File

@ -55,10 +55,10 @@ typedef struct ErrorType ErrorType;
// Declare test error
#ifdef DEBUG
ERROR_DECLARE(TestError);
ERROR_DECLARE(TestError);
#else
// Must always be defined since it might be needed to compile (though not used) during profiling
#define TestError AssertError
// Must always be defined since it might be needed to compile (though not used) during profiling
#define TestError AssertError
#endif
/***********************************************************************************************************************************
@ -234,65 +234,67 @@ Throw an error when a system call fails
// THROW*_ON*() calls that contain conditionals.
#ifdef DEBUG_COVERAGE
// The expression can't be passed directly to errorInternalThrowSys*() because we need to be sure it is evaluated before passing
// errno. Depending on optimization that might not happen.
#define THROW_ON_SYS_ERROR(expression, errorType, message) \
do \
{ \
bool error = expression; \
errorInternalThrowOnSys(error, errno, &errorType, __FILE__, __func__, __LINE__, message); \
} while (0)
// The expression can't be passed directly to errorInternalThrowSys*() because we need to be sure it is evaluated before passing
// errno. Depending on optimization that might not happen.
#define THROW_ON_SYS_ERROR(expression, errorType, message) \
do \
{ \
bool error = expression; \
errorInternalThrowOnSys(error, errno, &errorType, __FILE__, __func__, __LINE__, message); \
} while (0)
#define THROW_ON_SYS_ERROR_FMT(expression, errorType, ...) \
do \
{ \
bool error = expression; \
errorInternalThrowOnSysFmt(error, errno, &errorType, __FILE__, __func__, __LINE__, __VA_ARGS__); \
} while (0)
#define THROW_ON_SYS_ERROR_FMT(expression, errorType, ...) \
do \
{ \
bool error = expression; \
errorInternalThrowOnSysFmt(error, errno, &errorType, __FILE__, __func__, __LINE__, __VA_ARGS__); \
} while (0)
#define THROWP_ON_SYS_ERROR(expression, errorType, message) \
do \
{ \
bool error = expression; \
errorInternalThrowOnSys(error, errno, errorType, __FILE__, __func__, __LINE__, message); \
} while (0)
#define THROWP_ON_SYS_ERROR(expression, errorType, message) \
do \
{ \
bool error = expression; \
errorInternalThrowOnSys(error, errno, errorType, __FILE__, __func__, __LINE__, message); \
} while (0)
#define THROWP_ON_SYS_ERROR_FMT(expression, errorType, ...) \
do \
{ \
bool error = expression; \
errorInternalThrowOnSysFmt(error, errno, errorType, __FILE__, __func__, __LINE__, __VA_ARGS__); \
} while (0)
#define THROWP_ON_SYS_ERROR_FMT(expression, errorType, ...) \
do \
{ \
bool error = expression; \
errorInternalThrowOnSysFmt(error, errno, errorType, __FILE__, __func__, __LINE__, __VA_ARGS__); \
} while (0)
// Else define the normal macros which check for an error first
#else
#define THROW_ON_SYS_ERROR(expression, errorType, message) \
do \
{ \
if (expression) \
errorInternalThrowSys(errno, &errorType, __FILE__, __func__, __LINE__, message); \
} while (0)
#define THROW_ON_SYS_ERROR_FMT(expression, errorType, ...) \
do \
{ \
if (expression) \
errorInternalThrowSysFmt(errno, &errorType, __FILE__, __func__, __LINE__, __VA_ARGS__); \
} while (0)
#define THROW_ON_SYS_ERROR(expression, errorType, message) \
do \
{ \
if (expression) \
errorInternalThrowSys(errno, &errorType, __FILE__, __func__, __LINE__, message); \
} while (0)
#define THROWP_ON_SYS_ERROR(expression, errorType, message) \
do \
{ \
if (expression) \
errorInternalThrowSys(errno, errorType, __FILE__, __func__, __LINE__, message); \
} while (0)
#define THROW_ON_SYS_ERROR_FMT(expression, errorType, ...) \
do \
{ \
if (expression) \
errorInternalThrowSysFmt(errno, &errorType, __FILE__, __func__, __LINE__, __VA_ARGS__); \
} while (0)
#define THROWP_ON_SYS_ERROR(expression, errorType, message) \
do \
{ \
if (expression) \
errorInternalThrowSys(errno, errorType, __FILE__, __func__, __LINE__, message); \
} while (0)
#define THROWP_ON_SYS_ERROR_FMT(expression, errorType, ...) \
do \
{ \
if (expression) \
errorInternalThrowSysFmt(errno, errorType, __FILE__, __func__, __LINE__, __VA_ARGS__); \
} while (0)
#define THROWP_ON_SYS_ERROR_FMT(expression, errorType, ...) \
do \
{ \
if (expression) \
errorInternalThrowSysFmt(errno, errorType, __FILE__, __func__, __LINE__, __VA_ARGS__); \
} while (0)
#endif
/***********************************************************************************************************************************
@ -338,13 +340,13 @@ FN_EXTERN FN_NO_RETURN FN_PRINTF(6, 7) void errorInternalThrowSysFmt(
// Versions of the above for coverage testing which checks the error condition inside the function
#ifdef DEBUG_COVERAGE
FN_EXTERN void errorInternalThrowOnSys(
bool error, int errNo, const ErrorType *errorType, const char *fileName, const char *functionName, int fileLine,
const char *message);
FN_EXTERN void errorInternalThrowOnSys(
bool error, int errNo, const ErrorType *errorType, const char *fileName, const char *functionName, int fileLine,
const char *message);
FN_EXTERN FN_PRINTF(7, 8) void errorInternalThrowOnSysFmt(
bool error, int errNo, const ErrorType *errorType, const char *fileName, const char *functionName, int fileLine,
const char *format, ...);
FN_EXTERN FN_PRINTF(7, 8) void errorInternalThrowOnSysFmt(
bool error, int errNo, const ErrorType *errorType, const char *fileName, const char *functionName, int fileLine,
const char *format, ...);
#endif
/***********************************************************************************************************************************

View File

@ -29,15 +29,15 @@ typedef struct HttpClient HttpClient;
Statistics constants
***********************************************************************************************************************************/
#define HTTP_STAT_CLIENT "http.client" // Clients created
STRING_DECLARE(HTTP_STAT_CLIENT_STR);
STRING_DECLARE(HTTP_STAT_CLIENT_STR);
#define HTTP_STAT_CLOSE "http.close" // Closes forced by server
STRING_DECLARE(HTTP_STAT_CLOSE_STR);
STRING_DECLARE(HTTP_STAT_CLOSE_STR);
#define HTTP_STAT_REQUEST "http.request" // Requests (i.e. calls to httpRequestNew())
STRING_DECLARE(HTTP_STAT_REQUEST_STR);
STRING_DECLARE(HTTP_STAT_REQUEST_STR);
#define HTTP_STAT_RETRY "http.retry" // Request retries
STRING_DECLARE(HTTP_STAT_RETRY_STR);
STRING_DECLARE(HTTP_STAT_RETRY_STR);
#define HTTP_STAT_SESSION "http.session" // Sessions created
STRING_DECLARE(HTTP_STAT_SESSION_STR);
STRING_DECLARE(HTTP_STAT_SESSION_STR);
/***********************************************************************************************************************************
Constructors

View File

@ -14,7 +14,9 @@ Convert the time using the format specified in https://tools.ietf.org/html/rfc72
only version we support).
***********************************************************************************************************************************/
static const char *const httpCommonMonthList[] =
{"Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"};
{
"Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"
};
static const char *const httpCommonDayList[] = {"Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"};
FN_EXTERN time_t

View File

@ -22,48 +22,48 @@ typedef struct HttpRequest HttpRequest;
HTTP Constants
***********************************************************************************************************************************/
#define HTTP_VERSION "HTTP/1.1"
STRING_DECLARE(HTTP_VERSION_STR);
STRING_DECLARE(HTTP_VERSION_STR);
#define HTTP_VERSION_10 "HTTP/1.0"
STRING_DECLARE(HTTP_VERSION_10_STR);
STRING_DECLARE(HTTP_VERSION_10_STR);
#define HTTP_VERB_DELETE "DELETE"
STRING_DECLARE(HTTP_VERB_DELETE_STR);
STRING_DECLARE(HTTP_VERB_DELETE_STR);
#define HTTP_VERB_GET "GET"
STRING_DECLARE(HTTP_VERB_GET_STR);
STRING_DECLARE(HTTP_VERB_GET_STR);
#define HTTP_VERB_HEAD "HEAD"
STRING_DECLARE(HTTP_VERB_HEAD_STR);
STRING_DECLARE(HTTP_VERB_HEAD_STR);
#define HTTP_VERB_POST "POST"
STRING_DECLARE(HTTP_VERB_POST_STR);
STRING_DECLARE(HTTP_VERB_POST_STR);
#define HTTP_VERB_PUT "PUT"
STRING_DECLARE(HTTP_VERB_PUT_STR);
STRING_DECLARE(HTTP_VERB_PUT_STR);
#define HTTP_HEADER_AUTHORIZATION "authorization"
STRING_DECLARE(HTTP_HEADER_AUTHORIZATION_STR);
STRING_DECLARE(HTTP_HEADER_AUTHORIZATION_STR);
#define HTTP_HEADER_CONTENT_LENGTH "content-length"
STRING_DECLARE(HTTP_HEADER_CONTENT_LENGTH_STR);
STRING_DECLARE(HTTP_HEADER_CONTENT_LENGTH_STR);
#define HTTP_HEADER_CONTENT_MD5 "content-md5"
STRING_DECLARE(HTTP_HEADER_CONTENT_MD5_STR);
STRING_DECLARE(HTTP_HEADER_CONTENT_MD5_STR);
#define HTTP_HEADER_CONTENT_RANGE "content-range"
STRING_DECLARE(HTTP_HEADER_CONTENT_RANGE_STR);
STRING_DECLARE(HTTP_HEADER_CONTENT_RANGE_STR);
#define HTTP_HEADER_CONTENT_TYPE "content-type"
STRING_DECLARE(HTTP_HEADER_CONTENT_TYPE_STR);
STRING_DECLARE(HTTP_HEADER_CONTENT_TYPE_STR);
#define HTTP_HEADER_CONTENT_TYPE_APP_FORM_URL "application/x-www-form-urlencoded"
STRING_DECLARE(HTTP_HEADER_CONTENT_TYPE_APP_FORM_URL_STR);
STRING_DECLARE(HTTP_HEADER_CONTENT_TYPE_APP_FORM_URL_STR);
#define HTTP_HEADER_CONTENT_TYPE_JSON "application/json"
STRING_DECLARE(HTTP_HEADER_CONTENT_TYPE_JSON_STR);
STRING_DECLARE(HTTP_HEADER_CONTENT_TYPE_JSON_STR);
#define HTTP_HEADER_CONTENT_TYPE_XML "application/xml"
STRING_DECLARE(HTTP_HEADER_CONTENT_TYPE_XML_STR);
STRING_DECLARE(HTTP_HEADER_CONTENT_TYPE_XML_STR);
#define HTTP_HEADER_CONTENT_RANGE_BYTES "bytes"
#define HTTP_HEADER_DATE "date"
STRING_DECLARE(HTTP_HEADER_DATE_STR);
STRING_DECLARE(HTTP_HEADER_DATE_STR);
#define HTTP_HEADER_ETAG "etag"
STRING_DECLARE(HTTP_HEADER_ETAG_STR);
STRING_DECLARE(HTTP_HEADER_ETAG_STR);
#define HTTP_HEADER_HOST "host"
STRING_DECLARE(HTTP_HEADER_HOST_STR);
STRING_DECLARE(HTTP_HEADER_HOST_STR);
#define HTTP_HEADER_LAST_MODIFIED "last-modified"
STRING_DECLARE(HTTP_HEADER_LAST_MODIFIED_STR);
STRING_DECLARE(HTTP_HEADER_LAST_MODIFIED_STR);
#define HTTP_HEADER_RANGE "range"
STRING_DECLARE(HTTP_HEADER_RANGE_STR);
STRING_DECLARE(HTTP_HEADER_RANGE_STR);
#define HTTP_HEADER_RANGE_BYTES "bytes"
/***********************************************************************************************************************************

View File

@ -18,14 +18,14 @@ HTTP Response
HTTP constants
***********************************************************************************************************************************/
#define HTTP_HEADER_CONNECTION "connection"
STRING_STATIC(HTTP_HEADER_CONNECTION_STR, HTTP_HEADER_CONNECTION);
STRING_STATIC(HTTP_HEADER_CONNECTION_STR, HTTP_HEADER_CONNECTION);
#define HTTP_HEADER_TRANSFER_ENCODING "transfer-encoding"
STRING_STATIC(HTTP_HEADER_TRANSFER_ENCODING_STR, HTTP_HEADER_TRANSFER_ENCODING);
STRING_STATIC(HTTP_HEADER_TRANSFER_ENCODING_STR, HTTP_HEADER_TRANSFER_ENCODING);
#define HTTP_VALUE_CONNECTION_CLOSE "close"
STRING_STATIC(HTTP_VALUE_CONNECTION_CLOSE_STR, HTTP_VALUE_CONNECTION_CLOSE);
STRING_STATIC(HTTP_VALUE_CONNECTION_CLOSE_STR, HTTP_VALUE_CONNECTION_CLOSE);
#define HTTP_VALUE_TRANSFER_ENCODING_CHUNKED "chunked"
STRING_STATIC(HTTP_VALUE_TRANSFER_ENCODING_CHUNKED_STR, HTTP_VALUE_TRANSFER_ENCODING_CHUNKED);
STRING_STATIC(HTTP_VALUE_TRANSFER_ENCODING_CHUNKED_STR, HTTP_VALUE_TRANSFER_ENCODING_CHUNKED);
/***********************************************************************************************************************************
Object type
@ -405,7 +405,7 @@ httpResponseToLog(const HttpResponse *const this, StringStatic *const debugLog)
strStcFmt(
debugLog,
"{code: %u, reason: %s, contentChunked: %s, contentSize: %" PRIu64 ", contentRemaining: %" PRIu64 ", closeOnContentEof: %s"
", contentExists: %s, contentEof: %s, contentCached: %s}",
", contentExists: %s, contentEof: %s, contentCached: %s}",
httpResponseCode(this), strZ(httpResponseReason(this)), cvtBoolToConstZ(this->contentChunked), this->contentSize,
this->contentRemaining, cvtBoolToConstZ(this->closeOnContentEof), cvtBoolToConstZ(this->contentExists),
cvtBoolToConstZ(this->contentEof), cvtBoolToConstZ(this->content != NULL));

View File

@ -18,11 +18,11 @@ Io client type
Statistics constants
***********************************************************************************************************************************/
#define SOCKET_STAT_CLIENT "socket.client" // Clients created
STRING_DECLARE(SOCKET_STAT_CLIENT_STR);
STRING_DECLARE(SOCKET_STAT_CLIENT_STR);
#define SOCKET_STAT_RETRY "socket.retry" // Connection retries
STRING_DECLARE(SOCKET_STAT_RETRY_STR);
STRING_DECLARE(SOCKET_STAT_RETRY_STR);
#define SOCKET_STAT_SESSION "socket.session" // Sessions created
STRING_DECLARE(SOCKET_STAT_SESSION_STR);
STRING_DECLARE(SOCKET_STAT_SESSION_STR);
/***********************************************************************************************************************************
Constructors

View File

@ -19,7 +19,7 @@ Io server type
Statistics constants
***********************************************************************************************************************************/
#define SOCKET_STAT_SERVER "socket.server" // Servers created
STRING_DECLARE(SOCKET_STAT_SERVER_STR);
STRING_DECLARE(SOCKET_STAT_SERVER_STR);
/***********************************************************************************************************************************
Constructors

View File

@ -21,11 +21,11 @@ Io client type
Statistics constants
***********************************************************************************************************************************/
#define TLS_STAT_CLIENT "tls.client" // Clients created
STRING_DECLARE(TLS_STAT_CLIENT_STR);
STRING_DECLARE(TLS_STAT_CLIENT_STR);
#define TLS_STAT_RETRY "tls.retry" // Connection retries
STRING_DECLARE(TLS_STAT_RETRY_STR);
STRING_DECLARE(TLS_STAT_RETRY_STR);
#define TLS_STAT_SESSION "tls.session" // Sessions created
STRING_DECLARE(TLS_STAT_SESSION_STR);
STRING_DECLARE(TLS_STAT_SESSION_STR);
/***********************************************************************************************************************************
Constructors

View File

@ -19,7 +19,7 @@ Io server type
Statistics constants
***********************************************************************************************************************************/
#define TLS_STAT_SERVER "tls.server" // Servers created
STRING_DECLARE(TLS_STAT_SERVER_STR);
STRING_DECLARE(TLS_STAT_SERVER_STR);
/***********************************************************************************************************************************
Constructors

View File

@ -14,7 +14,7 @@ Log Handler
Max size allowed for a single log message including header
***********************************************************************************************************************************/
#ifndef LOG_BUFFER_SIZE
#define LOG_BUFFER_SIZE ((size_t)(32 * 1024))
#define LOG_BUFFER_SIZE ((size_t)(32 * 1024))
#endif
/***********************************************************************************************************************************
@ -52,10 +52,10 @@ be used directly. They are included for completeness and future usage.
// Define a macro to test logAny() that can be removed when performing coverage testing. Checking logAny() saves a function call
// for logging calls that won't be output anywhere, but since the macro contains a branch it causes coverage problems.
#ifdef DEBUG_COVERAGE
#define IF_LOG_ANY(logLevel)
#define IF_LOG_ANY(logLevel)
#else
#define IF_LOG_ANY(logLevel) \
if (logAny(logLevel))
#define IF_LOG_ANY(logLevel) \
if (logAny(logLevel))
#endif
#define LOG_INTERNAL(logLevel, logRangeMin, logRangeMax, processId, code, message) \

View File

@ -51,17 +51,21 @@ same kluge.
Adapted from PostgreSQL src/include/c.h.
***********************************************************************************************************************************/
#ifdef HAVE_STATIC_ASSERT
#define STATIC_ASSERT_STMT(condition, message) \
do {_Static_assert(condition, message);} while (0)
#define STATIC_ASSERT_EXPR(condition, message) \
((void)({STATIC_ASSERT_STMT(condition, message); true;}))
#define STATIC_ASSERT_STMT(condition, message) \
do {_Static_assert(condition, message);} while (0)
#define STATIC_ASSERT_EXPR(condition, message) \
((void)({STATIC_ASSERT_STMT(condition, message); true;}))
#else
#define STATIC_ASSERT_STMT(condition, message) \
((void)sizeof(struct {int static_assert_failure : (condition) ? 1 : -1;}))
#define STATIC_ASSERT_EXPR(condition, message) \
STATIC_ASSERT_STMT(condition, message)
#define STATIC_ASSERT_STMT(condition, message) \
((void)sizeof(struct {int static_assert_failure : (condition) ? 1 : -1;}))
#define STATIC_ASSERT_EXPR(condition, message) \
STATIC_ASSERT_STMT(condition, message)
#endif
/***********************************************************************************************************************************
@ -78,11 +82,11 @@ Note that this only works in function scope, not for global variables (it would
Adapted from PostgreSQL src/include/c.h.
***********************************************************************************************************************************/
#ifdef HAVE_BUILTIN_TYPES_COMPATIBLE_P
#define UNCONSTIFY(type, expression) \
(STATIC_ASSERT_EXPR(__builtin_types_compatible_p(__typeof(expression), const type), "invalid cast"), (type)(expression))
#define UNCONSTIFY(type, expression) \
(STATIC_ASSERT_EXPR(__builtin_types_compatible_p(__typeof(expression), const type), "invalid cast"), (type)(expression))
#else
#define UNCONSTIFY(type, expression) \
((type)(expression))
#define UNCONSTIFY(type, expression) \
((type)(expression))
#endif
/***********************************************************************************************************************************

View File

@ -18,8 +18,8 @@ header by doing some pointer arithmetic. This is much faster than searching thro
***********************************************************************************************************************************/
typedef struct MemContextAlloc
{
unsigned int allocIdx:32; // Index in the allocation list
unsigned int size:32; // Allocation size (4GB max)
unsigned int allocIdx : 32; // Index in the allocation list
unsigned int size : 32; // Allocation size (4GB max)
} MemContextAlloc;
// Get the allocation buffer pointer given the allocation header pointer
@ -53,15 +53,15 @@ struct MemContext
#ifdef DEBUG
const char *name; // Indicates what the context is being used for
uint64_t sequenceNew; // Sequence when this context was created (used for audit)
bool active:1; // Is the context currently active?
bool active : 1; // Is the context currently active?
#endif
MemQty childQty:2; // How many child contexts can this context have?
bool childInitialized:1; // Has the child context list been initialized?
MemQty allocQty:2; // How many allocations can this context have?
bool allocInitialized:1; // Has the allocation list been initialized?
MemQty callbackQty:2; // How many callbacks can this context have?
bool callbackInitialized:1; // Has the callback been initialized?
size_t allocExtra:16; // Size of extra allocation (1kB max)
MemQty childQty : 2; // How many child contexts can this context have?
bool childInitialized : 1; // Has the child context list been initialized?
MemQty allocQty : 2; // How many allocations can this context have?
bool allocInitialized : 1; // Has the allocation list been initialized?
MemQty callbackQty : 2; // How many callbacks can this context have?
bool callbackInitialized : 1; // Has the callback been initialized?
size_t allocExtra : 16; // Size of extra allocation (1kB max)
unsigned int contextParentIdx; // Index in the parent context list
MemContext *contextParent; // All contexts have a parent except top

View File

@ -43,26 +43,28 @@ Functions and macros to audit a mem context by detecting new child contexts/allo
are not the expected return type.
***********************************************************************************************************************************/
#if defined(DEBUG)
typedef struct MemContextAuditState
{
MemContext *memContext; // Mem context to audit
bool returnTypeAny; // Skip auditing for this mem context
uint64_t sequenceContextNew; // Max sequence for new contexts at beginning
} MemContextAuditState;
typedef struct MemContextAuditState
{
MemContext *memContext; // Mem context to audit
// Begin the audit
FN_EXTERN void memContextAuditBegin(MemContextAuditState *state);
bool returnTypeAny; // Skip auditing for this mem context
uint64_t sequenceContextNew; // Max sequence for new contexts at beginning
} MemContextAuditState;
// End the audit and make sure the return type is as expected
FN_EXTERN void memContextAuditEnd(const MemContextAuditState *state, const char *returnTypeDefault);
// Begin the audit
FN_EXTERN void memContextAuditBegin(MemContextAuditState *state);
// Rename a mem context using the extra allocation pointer
#define MEM_CONTEXT_AUDIT_ALLOC_EXTRA_NAME(this, name) memContextAuditAllocExtraName(this, #name)
// End the audit and make sure the return type is as expected
FN_EXTERN void memContextAuditEnd(const MemContextAuditState *state, const char *returnTypeDefault);
// Rename a mem context using the extra allocation pointer
#define MEM_CONTEXT_AUDIT_ALLOC_EXTRA_NAME(this, name) memContextAuditAllocExtraName(this, #name)
FN_EXTERN void *memContextAuditAllocExtraName(void *allocExtra, const char *name);
FN_EXTERN void *memContextAuditAllocExtraName(void *allocExtra, const char *name);
#else
#define MEM_CONTEXT_AUDIT_ALLOC_EXTRA_NAME(this, name) this
#define MEM_CONTEXT_AUDIT_ALLOC_EXTRA_NAME(this, name) this
#endif
/***********************************************************************************************************************************
@ -97,12 +99,10 @@ MEM_CONTEXT_END();
#define MEM_CONTEXT_BEGIN(memContext) \
do \
{ \
/* Switch to the new memory context */ \
memContextSwitch(memContext);
memContextSwitch(memContext); /* Switch to the new memory context */
#define MEM_CONTEXT_END() \
/* Switch back to the prior context */ \
memContextSwitchBack(); \
memContextSwitchBack(); /* Switch back to the prior context */ \
} \
while (0)
@ -245,11 +245,11 @@ typedef struct MemContextNewParam
#define MEM_CONTEXT_QTY_MAX UINT8_MAX
#ifdef DEBUG
#define memContextNewP(name, ...) \
memContextNew(name, (MemContextNewParam){VAR_PARAM_INIT, __VA_ARGS__})
#define memContextNewP(name, ...) \
memContextNew(name, (MemContextNewParam){VAR_PARAM_INIT, __VA_ARGS__})
#else
#define memContextNewP(name, ...) \
memContextNew((MemContextNewParam){VAR_PARAM_INIT, __VA_ARGS__})
#define memContextNewP(name, ...) \
memContextNew((MemContextNewParam){VAR_PARAM_INIT, __VA_ARGS__})
#endif
FN_EXTERN MemContext *memContextNew(
@ -312,7 +312,7 @@ FN_EXTERN MemContext *memContextTop(void);
// Get total size of mem context and all children
#ifdef DEBUG
FN_EXTERN size_t memContextSize(const MemContext *this);
FN_EXTERN size_t memContextSize(const MemContext *this);
#endif // DEBUG
/***********************************************************************************************************************************

View File

@ -21,27 +21,29 @@ Macros to access internal functions
stackTracePush(__FILE__, __func__, logLevel)
#ifdef DEBUG
#define STACK_TRACE_POP(test) \
stackTracePop(__FILE__, __func__, test);
#define STACK_TRACE_POP(test) \
stackTracePop(__FILE__, __func__, test);
#else
#define STACK_TRACE_POP(test) \
stackTracePop();
#define STACK_TRACE_POP(test) \
stackTracePop();
#endif
/***********************************************************************************************************************************
Internal Functions
***********************************************************************************************************************************/
#ifdef DEBUG
// Enable/disable test function logging
FN_EXTERN void stackTraceTestStart(void);
FN_EXTERN void stackTraceTestStop(void);
FN_EXTERN bool stackTraceTest(void);
// Set line number for the current function on the stack
FN_EXTERN void stackTraceTestFileLineSet(unsigned int fileLine);
// Enable/disable test function logging
FN_EXTERN void stackTraceTestStart(void);
FN_EXTERN void stackTraceTestStop(void);
FN_EXTERN bool stackTraceTest(void);
// Set line number for the current function on the stack
FN_EXTERN void stackTraceTestFileLineSet(unsigned int fileLine);
#else
// Must always be valid since it might be needed to compile (though not used) during profiling
#define stackTraceTestFileLineSet(fileLine)
// Must always be valid since it might be needed to compile (though not used) during profiling
#define stackTraceTestFileLineSet(fileLine)
#endif
// Push a new function onto the trace stack
@ -49,9 +51,9 @@ FN_EXTERN LogLevel stackTracePush(const char *fileName, const char *functionName
// Pop a function from the trace stack
#ifdef DEBUG
FN_EXTERN void stackTracePop(const char *fileName, const char *functionName, bool test);
FN_EXTERN void stackTracePop(const char *fileName, const char *functionName, bool test);
#else
FN_EXTERN void stackTracePop(void);
FN_EXTERN void stackTracePop(void);
#endif
// Generate the stack trace

View File

@ -96,7 +96,7 @@ tzPartsValid(int tzHour, int tzMinute)
// ??? This is only a sanity check for basic validity of timezone offset of 15 minute intervals until the timezone
// database is implemented.
if (!(((tzHour > -12 && tzHour < 14) && (tzMinute % 15 == 0)) || (tzHour == -12 && tzMinute == 0) ||
(tzHour == 14 && tzMinute == 0)))
(tzHour == 14 && tzMinute == 0)))
{
THROW_FMT(FormatError, "invalid timezone %02d%02d", tzHour, tzMinute);
}

View File

@ -17,7 +17,7 @@ memory requirements for large numbers of zero-terminated strings.
Size of blocks allocated for blob data
***********************************************************************************************************************************/
#ifndef BLOB_BLOCK_SIZE
#define BLOB_BLOCK_SIZE (64 * 1024)
#define BLOB_BLOCK_SIZE (64 * 1024)
#endif
/***********************************************************************************************************************************

View File

@ -94,7 +94,7 @@ cumulative field ID. At the end of a container the numbering will continue from
Minimum number of extra bytes to allocate for packs that are growing or are likely to grow
***********************************************************************************************************************************/
#ifndef PACK_EXTRA_MIN
#define PACK_EXTRA_MIN 128
#define PACK_EXTRA_MIN 128
#endif
/***********************************************************************************************************************************

View File

@ -29,7 +29,7 @@ old context and then back. Below is a simplified example:
Minimum number of extra bytes to allocate for strings that are growing or are likely to grow
***********************************************************************************************************************************/
#ifndef STRING_EXTRA_MIN
#define STRING_EXTRA_MIN 64
#define STRING_EXTRA_MIN 64
#endif
/***********************************************************************************************************************************
@ -83,8 +83,8 @@ Getters/setters
***********************************************************************************************************************************/
typedef struct StringPub
{
uint64_t size:32; // Actual size of the string
uint64_t extra:32; // Extra space allocated for expansion
uint64_t size : 32; // Actual size of the string
uint64_t extra : 32; // Extra space allocated for expansion
char *buffer; // String buffer
} StringPub;

View File

@ -149,7 +149,7 @@ cfgLoadUpdateOption(void)
THROW_FMT(
OptionInvalidValueError,
"'%s' is not valid for '" CFGOPT_PROTOCOL_TIMEOUT "' option\nHINT '" CFGOPT_PROTOCOL_TIMEOUT "' option (%s)"
" should be greater than '" CFGOPT_DB_TIMEOUT "' option (%s).",
" should be greater than '" CFGOPT_DB_TIMEOUT "' option (%s).",
strZ(cfgOptionDisplay(cfgOptProtocolTimeout)), strZ(cfgOptionDisplay(cfgOptProtocolTimeout)),
strZ(cfgOptionDisplay(cfgOptDbTimeout)));
}
@ -283,10 +283,10 @@ cfgLoadUpdateOption(void)
{
THROW_FMT(
OptionInvalidValueError,
"'%s' is not valid for option '%s'"
"\nHINT: RFC-2818 forbids dots in wildcard matches."
"\nHINT: TLS/SSL verification cannot proceed with this bucket name."
"\nHINT: remove dots from the bucket name.",
"'%s' is not valid for option '%s'\n"
"HINT: RFC-2818 forbids dots in wildcard matches.\n"
"HINT: TLS/SSL verification cannot proceed with this bucket name.\n"
"HINT: remove dots from the bucket name.",
strZ(cfgOptionIdxDisplay(cfgOptRepoS3Bucket, repoIdx)), cfgOptionIdxName(cfgOptRepoS3Bucket, repoIdx));
}
}

View File

@ -46,7 +46,7 @@ typedef enum
Standard config file name and old default path and name
***********************************************************************************************************************************/
#define PGBACKREST_CONFIG_ORIG_PATH_FILE "/etc/" PROJECT_CONFIG_FILE
STRING_STATIC(PGBACKREST_CONFIG_ORIG_PATH_FILE_STR, PGBACKREST_CONFIG_ORIG_PATH_FILE);
STRING_STATIC(PGBACKREST_CONFIG_ORIG_PATH_FILE_STR, PGBACKREST_CONFIG_ORIG_PATH_FILE);
/***********************************************************************************************************************************
Prefix for environment variables
@ -63,13 +63,13 @@ Define how a command is parsed
typedef struct ParseRuleCommand
{
const char *name; // Name
unsigned int commandRoleValid:CFG_COMMAND_ROLE_TOTAL; // Valid for the command role?
bool lockRequired:1; // Is an immediate lock required?
bool lockRemoteRequired:1; // Is a lock required on the remote?
unsigned int lockType:2; // Lock type required
bool logFile:1; // Will the command log to a file?
unsigned int logLevelDefault:4; // Default log level
bool parameterAllowed:1; // Command-line parameters are allowed
unsigned int commandRoleValid : CFG_COMMAND_ROLE_TOTAL; // Valid for the command role?
bool lockRequired : 1; // Is an immediate lock required?
bool lockRemoteRequired : 1; // Is a lock required on the remote?
unsigned int lockType : 2; // Lock type required
bool logFile : 1; // Will the command log to a file?
unsigned int logLevelDefault : 4; // Default log level
bool parameterAllowed : 1; // Command-line parameters are allowed
} ParseRuleCommand;
// Macros used to define parse rules in parse.auto.c.inc in a format that diffs well
@ -124,17 +124,17 @@ Define how an option is parsed and interacts with other options
typedef struct ParseRuleOption
{
const char *name; // Name
unsigned int type:4; // e.g. string, int, boolean
bool negate:1; // Can the option be negated on the command line?
bool reset:1; // Can the option be reset on the command line?
bool required:1; // Is the option required?
unsigned int section:2; // e.g. global, stanza, cmd-line
bool secure:1; // Needs to be redacted in logs and cmd-line?
bool multi:1; // Can be specified multiple times?
bool group:1; // In a group?
unsigned int groupId:1; // Id if in a group
bool deprecateMatch:1; // Does a deprecated name exactly match the option name?
unsigned int packSize:7; // Size of optional data in pack format
unsigned int type : 4; // e.g. string, int, boolean
bool negate : 1; // Can the option be negated on the command line?
bool reset : 1; // Can the option be reset on the command line?
bool required : 1; // Is the option required?
unsigned int section : 2; // e.g. global, stanza, cmd-line
bool secure : 1; // Needs to be redacted in logs and cmd-line?
bool multi : 1; // Can be specified multiple times?
bool group : 1; // In a group?
unsigned int groupId : 1; // Id if in a group
bool deprecateMatch : 1; // Does a deprecated name exactly match the option name?
unsigned int packSize : 7; // Size of optional data in pack format
uint32_t commandRoleValid[CFG_COMMAND_ROLE_TOTAL]; // Valid for the command role?
const unsigned char *pack; // Optional data in pack format
@ -273,10 +273,10 @@ Struct to hold options parsed from the command line
***********************************************************************************************************************************/
typedef struct ParseOptionValue
{
bool found:1; // Was the option found?
bool negate:1; // Was the option negated on the command line?
bool reset:1; // Was the option reset on the command line?
unsigned int source:2; // Where was the option found?
bool found : 1; // Was the option found?
bool negate : 1; // Was the option negated on the command line?
bool reset : 1; // Was the option reset on the command line?
unsigned int source : 2; // Where was the option found?
StringList *valueList; // List of values found
} ParseOptionValue;
@ -2049,8 +2049,9 @@ configParse(const Storage *storage, unsigned int argListSize, const char *argLis
unsigned optionKeyIdx = optionGroup ? config->optionGroup[optionGroupId].indexMap[optionListIdx] : 0;
// Get the parsed value using the key index. Provide a default structure when the value was not found.
ParseOptionValue *parseOptionValue = optionKeyIdx < parseOptionList[optionId].indexListTotal ?
&parseOptionList[optionId].indexList[optionKeyIdx] : &(ParseOptionValue){0};
ParseOptionValue *parseOptionValue =
optionKeyIdx < parseOptionList[optionId].indexListTotal ?
&parseOptionList[optionId].indexList[optionKeyIdx] : &(ParseOptionValue){0};
// Get the location where the value will be stored in the configuration
ConfigOptionValue *configOptionValue = &config->option[optionId].index[optionListIdx];
@ -2371,8 +2372,9 @@ configParse(const Storage *storage, unsigned int argListSize, const char *argLis
// Else error if option is required and help was not requested
else
{
const bool required = cfgParseOptionalRule(
&optionalRules, parseRuleOptionalTypeRequired, config->command, optionId) ?
const bool required =
cfgParseOptionalRule(
&optionalRules, parseRuleOptionalTypeRequired, config->command, optionId) ?
optionalRules.required : parseRuleOption[optionId].required;
if (required && !config->help)

View File

@ -287,8 +287,8 @@ dbOpen(Db *this)
THROW(
DbQueryError,
"unable to select some rows from pg_settings\n"
"HINT: is the backup running as the postgres user?\n"
"HINT: is the pg_read_all_settings role assigned for " PG_NAME " >= " PG_VERSION_10_STR "?");
"HINT: is the backup running as the postgres user?\n"
"HINT: is the pg_read_all_settings role assigned for " PG_NAME " >= " PG_VERSION_10_STR "?");
}
}
@ -418,7 +418,7 @@ dbBackupStart(Db *const this, const bool startFast, const bool stopAuto, const b
{
LOG_WARN_FMT(
CFGOPT_START_FAST " is disabled and " CFGOPT_DB_TIMEOUT " (%" PRIu64 "s) is smaller than the " PG_NAME
" checkpoint_timeout (%" PRIu64 "s) - timeout may occur before the backup starts",
" checkpoint_timeout (%" PRIu64 "s) - timeout may occur before the backup starts",
dbDbTimeout(this) / MSEC_PER_SEC, dbCheckpointTimeout(this) / MSEC_PER_SEC);
}
@ -597,7 +597,7 @@ dbList(Db *this)
this, pgClientQueryResultAny,
STRDEF(
"select oid::oid, datname::text, (select oid::oid from pg_catalog.pg_database where datname = 'template0')"
" as datlastsysoid from pg_catalog.pg_database")));
" as datlastsysoid from pg_catalog.pg_database")));
}
/**********************************************************************************************************************************/

View File

@ -278,7 +278,7 @@ infoSaveSection(InfoSave *const infoSaveData, const char *const section, const S
FUNCTION_TEST_RETURN(
BOOL,
(infoSaveData->sectionLast == NULL || strCmpZ(infoSaveData->sectionLast, section) < 0) &&
(sectionNext == NULL || strCmpZ(sectionNext, section) > 0));
(sectionNext == NULL || strCmpZ(sectionNext, section) > 0));
}
/**********************************************************************************************************************************/

View File

@ -22,9 +22,9 @@ Archive info filename
#define REGEX_ARCHIVE_DIR_DB_VERSION "^[0-9]+(\\.[0-9]+)*-[0-9]+$"
#define INFO_ARCHIVE_PATH_FILE STORAGE_REPO_ARCHIVE "/" INFO_ARCHIVE_FILE
STRING_DECLARE(INFO_ARCHIVE_PATH_FILE_STR);
STRING_DECLARE(INFO_ARCHIVE_PATH_FILE_STR);
#define INFO_ARCHIVE_PATH_FILE_COPY INFO_ARCHIVE_PATH_FILE INFO_COPY_EXT
STRING_DECLARE(INFO_ARCHIVE_PATH_FILE_COPY_STR);
STRING_DECLARE(INFO_ARCHIVE_PATH_FILE_COPY_STR);
/***********************************************************************************************************************************
Constructors

View File

@ -469,8 +469,8 @@ infoBackupDataAdd(const InfoBackup *this, const Manifest *manifest)
.optionArchiveCheck = manData->backupOptionArchiveCheck,
.optionArchiveCopy = manData->backupOptionArchiveCopy,
.optionBackupStandby = manData->backupOptionStandby != NULL ? varBool(manData->backupOptionStandby) : false,
.optionChecksumPage = manData->backupOptionChecksumPage != NULL ?
varBool(manData->backupOptionChecksumPage) : false,
.optionChecksumPage =
manData->backupOptionChecksumPage != NULL ? varBool(manData->backupOptionChecksumPage) : false,
.optionCompress = manData->backupOptionCompressType != compressTypeNone,
.optionHardlink = manData->backupOptionHardLink,
.optionOnline = manData->backupOptionOnline,

View File

@ -34,9 +34,9 @@ Constants
#define INFO_BACKUP_FILE "backup.info"
#define INFO_BACKUP_PATH_FILE STORAGE_REPO_BACKUP "/" INFO_BACKUP_FILE
STRING_DECLARE(INFO_BACKUP_PATH_FILE_STR);
STRING_DECLARE(INFO_BACKUP_PATH_FILE_STR);
#define INFO_BACKUP_PATH_FILE_COPY INFO_BACKUP_PATH_FILE INFO_COPY_EXT
STRING_DECLARE(INFO_BACKUP_PATH_FILE_COPY_STR);
STRING_DECLARE(INFO_BACKUP_PATH_FILE_COPY_STR);
/***********************************************************************************************************************************
Information about an existing backup

View File

@ -247,8 +247,9 @@ manifestFilePack(const Manifest *const manifest, const ManifestFile *const file)
const size_t nameSize = strSize(file->name) + 1;
uint8_t *const result = memNew(
sizeof(StringPub) + nameSize + bufferPos + (file->checksumPageErrorList != NULL ?
ALIGN_OFFSET(StringPub, nameSize + bufferPos) + sizeof(StringPub) + strSize(file->checksumPageErrorList) + 1 : 0));
sizeof(StringPub) + nameSize + bufferPos +
(file->checksumPageErrorList != NULL ?
ALIGN_OFFSET(StringPub, nameSize + bufferPos) + sizeof(StringPub) + strSize(file->checksumPageErrorList) + 1 : 0));
// Create string object for the file name
*(StringPub *)result = (StringPub){.size = (unsigned int)strSize(file->name), .buffer = (char *)result + sizeof(StringPub)};
@ -266,9 +267,12 @@ manifestFilePack(const Manifest *const manifest, const ManifestFile *const file)
resultPos += bufferPos + ALIGN_OFFSET(StringPub, nameSize + bufferPos);
*(StringPub *)(result + resultPos) = (StringPub)
{.size = (unsigned int)strSize(file->checksumPageErrorList), .buffer = (char *)result + resultPos + sizeof(StringPub)};
resultPos += sizeof(StringPub);
{
.size = (unsigned int)strSize(file->checksumPageErrorList),
.buffer = (char *)result + resultPos + sizeof(StringPub),
};
resultPos += sizeof(StringPub);
memcpy(result + resultPos, (uint8_t *)strZ(file->checksumPageErrorList), strSize(file->checksumPageErrorList) + 1);
}
@ -765,12 +769,12 @@ static struct ManifestBuildBlockIncrSizeMap
} manifestBuildBlockIncrSizeMap[] =
{
{.fileSize = 1024 * 1024 * 1024, .blockSize = 1024 * 1024},
{.fileSize = 256 * 1024 * 1024, .blockSize = 768 * 1024},
{.fileSize = 64 * 1024 * 1024, .blockSize = 512 * 1024},
{.fileSize = 16 * 1024 * 1024, .blockSize = 384 * 1024},
{.fileSize = 4 * 1024 * 1024, .blockSize = 256 * 1024},
{.fileSize = 2 * 1024 * 1024, .blockSize = 192 * 1024},
{.fileSize = 128 * 1024, .blockSize = 128 * 1024},
{.fileSize = 256 * 1024 * 1024, .blockSize = 768 * 1024},
{.fileSize = 64 * 1024 * 1024, .blockSize = 512 * 1024},
{.fileSize = 16 * 1024 * 1024, .blockSize = 384 * 1024},
{.fileSize = 4 * 1024 * 1024, .blockSize = 256 * 1024},
{.fileSize = 2 * 1024 * 1024, .blockSize = 192 * 1024},
{.fileSize = 128 * 1024, .blockSize = 128 * 1024},
};
// File age to block multiplier map
@ -782,7 +786,7 @@ static struct ManifestBuildBlockIncrTimeMap
{
{.fileAge = 4 * 7 * 86400, .blockMultiplier = 0},
{.fileAge = 2 * 7 * 86400, .blockMultiplier = 4},
{.fileAge = 7 * 86400, .blockMultiplier = 2},
{.fileAge = 7 * 86400, .blockMultiplier = 2},
};
static uint64_t
@ -999,7 +1003,7 @@ manifestBuildInfo(
// the likelihood of needing the regexp should be very small.
if (dbPath && strBeginsWithZ(info->name, PG_FILE_PGINTERNALINIT) &&
(strSize(info->name) == sizeof(PG_FILE_PGINTERNALINIT) - 1 ||
regExpMatchOne(STRDEF("\\.[0-9]+"), strSub(info->name, sizeof(PG_FILE_PGINTERNALINIT) - 1))))
regExpMatchOne(STRDEF("\\.[0-9]+"), strSub(info->name, sizeof(PG_FILE_PGINTERNALINIT) - 1))))
{
FUNCTION_TEST_RETURN_VOID();
}
@ -1009,9 +1013,9 @@ manifestBuildInfo(
{
// Skip recovery files
if (((strEqZ(info->name, PG_FILE_RECOVERYSIGNAL) || strEqZ(info->name, PG_FILE_STANDBYSIGNAL)) &&
pgVersion >= PG_VERSION_12) ||
pgVersion >= PG_VERSION_12) ||
((strEqZ(info->name, PG_FILE_RECOVERYCONF) || strEqZ(info->name, PG_FILE_RECOVERYDONE)) &&
pgVersion < PG_VERSION_12) ||
pgVersion < PG_VERSION_12) ||
// Skip temp file for safely writing postgresql.auto.conf
(strEqZ(info->name, PG_FILE_POSTGRESQLAUTOCONFTMP) && pgVersion >= PG_VERSION_94) ||
// Skip backup_label in versions where non-exclusive backup is supported
@ -1020,7 +1024,7 @@ manifestBuildInfo(
strEqZ(info->name, PG_FILE_BACKUPLABELOLD) ||
// Skip backup_manifest/tmp in versions where it is created
((strEqZ(info->name, PG_FILE_BACKUPMANIFEST) || strEqZ(info->name, PG_FILE_BACKUPMANIFEST_TMP)) &&
pgVersion >= PG_VERSION_13) ||
pgVersion >= PG_VERSION_13) ||
// Skip running process options
strEqZ(info->name, PG_FILE_POSTMTROPTS) ||
// Skip process id file to avoid confusing postgres after restore
@ -1528,7 +1532,7 @@ manifestBuildValidate(Manifest *this, bool delta, time_t copyStart, CompressType
{
LOG_WARN_FMT(
"file '%s' has timestamp (%" PRId64 ") in the future (relative to copy start %" PRId64 "), enabling delta"
" checksum",
" checksum",
strZ(manifestPathPg(file.name)), (int64_t)file.timestamp, (int64_t)copyStart);
this->pub.data.backupOptionDelta = BOOL_TRUE_VAR;
@ -1613,7 +1617,7 @@ manifestBuildIncr(Manifest *this, const Manifest *manifestPrior, BackupType type
{
LOG_WARN_FMT(
"file '%s' has timestamp earlier than prior backup (prior %" PRId64 ", current %" PRId64 "), enabling"
" delta checksum",
" delta checksum",
strZ(manifestPathPg(file.name)), (int64_t)filePrior.timestamp, (int64_t)file.timestamp);
this->pub.data.backupOptionDelta = BOOL_TRUE_VAR;
@ -1625,7 +1629,7 @@ manifestBuildIncr(Manifest *this, const Manifest *manifestPrior, BackupType type
{
LOG_WARN_FMT(
"file '%s' has same timestamp (%" PRId64 ") as prior but different size (prior %" PRIu64 ", current"
" %" PRIu64 "), enabling delta checksum",
" %" PRIu64 "), enabling delta checksum",
strZ(manifestPathPg(file.name)), (int64_t)file.timestamp, filePrior.size, file.size);
this->pub.data.backupOptionDelta = BOOL_TRUE_VAR;
@ -1866,9 +1870,9 @@ manifestBuildComplete(
// multiple structs since most of the fields are the same and the size shouldn't be more than 4/8 bytes.
typedef struct ManifestLoadFound
{
bool group:1;
bool mode:1;
bool user:1;
bool group : 1;
bool mode : 1;
bool user : 1;
} ManifestLoadFound;
typedef struct ManifestLoadData

View File

@ -17,15 +17,15 @@ Constants
***********************************************************************************************************************************/
#define BACKUP_MANIFEST_EXT ".manifest"
#define BACKUP_MANIFEST_FILE "backup" BACKUP_MANIFEST_EXT
STRING_DECLARE(BACKUP_MANIFEST_FILE_STR);
STRING_DECLARE(BACKUP_MANIFEST_FILE_STR);
#define MANIFEST_PATH_BUNDLE "bundle"
STRING_DECLARE(MANIFEST_PATH_BUNDLE_STR);
STRING_DECLARE(MANIFEST_PATH_BUNDLE_STR);
#define MANIFEST_TARGET_PGDATA "pg_data"
STRING_DECLARE(MANIFEST_TARGET_PGDATA_STR);
STRING_DECLARE(MANIFEST_TARGET_PGDATA_STR);
#define MANIFEST_TARGET_PGTBLSPC "pg_tblspc"
STRING_DECLARE(MANIFEST_TARGET_PGTBLSPC_STR);
STRING_DECLARE(MANIFEST_TARGET_PGTBLSPC_STR);
/***********************************************************************************************************************************
Object type
@ -102,11 +102,11 @@ File type
typedef struct ManifestFile
{
const String *name; // File name (must be first member in struct)
bool copy:1; // Should the file be copied (backup only)?
bool delta:1; // Verify checksum in PGDATA before copying (backup only)?
bool resume:1; // Is the file being resumed (backup only)?
bool checksumPage:1; // Does this file have page checksums?
bool checksumPageError:1; // Is there an error in the page checksum?
bool copy : 1; // Should the file be copied (backup only)?
bool delta : 1; // Verify checksum in PGDATA before copying (backup only)?
bool resume : 1; // Is the file being resumed (backup only)?
bool checksumPage : 1; // Does this file have page checksums?
bool checksumPageError : 1; // Is there an error in the page checksum?
mode_t mode; // File mode
const uint8_t *checksumSha1; // SHA1 checksum
const uint8_t *checksumRepoSha1; // SHA1 checksum as stored in repo (including compression, etc.)

View File

@ -200,7 +200,7 @@ pgControlFromBuffer(const Buffer *controlFile)
THROW_FMT(
VersionNotSupportedError,
"unexpected control version = %u and catalog version = %u\n"
"HINT: is this version of PostgreSQL supported?",
"HINT: is this version of PostgreSQL supported?",
controlCommon->controlVersion, controlCommon->catalogVersion);
}
@ -297,7 +297,7 @@ pgWalFromBuffer(const Buffer *walBuffer)
THROW_FMT(
VersionNotSupportedError,
"unexpected WAL magic %u\n"
"HINT: is this version of PostgreSQL supported?",
"HINT: is this version of PostgreSQL supported?",
((const PgWalCommon *)bufPtrConst(walBuffer))->magic);
}

View File

@ -22,27 +22,27 @@ Defines for various Postgres paths and files
#define PG_FILE_PGFILENODEMAP "pg_filenode.map"
#define PG_FILE_PGINTERNALINIT "pg_internal.init"
#define PG_FILE_PGVERSION "PG_VERSION"
STRING_DECLARE(PG_FILE_PGVERSION_STR);
STRING_DECLARE(PG_FILE_PGVERSION_STR);
#define PG_FILE_POSTGRESQLAUTOCONF "postgresql.auto.conf"
STRING_DECLARE(PG_FILE_POSTGRESQLAUTOCONF_STR);
STRING_DECLARE(PG_FILE_POSTGRESQLAUTOCONF_STR);
#define PG_FILE_POSTGRESQLAUTOCONFTMP "postgresql.auto.conf.tmp"
#define PG_FILE_POSTMTROPTS "postmas""ter.opts"
#define PG_FILE_POSTMTRPID "postmas""ter.pid"
STRING_DECLARE(PG_FILE_POSTMTRPID_STR);
STRING_DECLARE(PG_FILE_POSTMTRPID_STR);
#define PG_FILE_RECOVERYCONF "recovery.conf"
STRING_DECLARE(PG_FILE_RECOVERYCONF_STR);
STRING_DECLARE(PG_FILE_RECOVERYCONF_STR);
#define PG_FILE_RECOVERYDONE "recovery.done"
STRING_DECLARE(PG_FILE_RECOVERYDONE_STR);
STRING_DECLARE(PG_FILE_RECOVERYDONE_STR);
#define PG_FILE_RECOVERYSIGNAL "recovery.signal"
STRING_DECLARE(PG_FILE_RECOVERYSIGNAL_STR);
STRING_DECLARE(PG_FILE_RECOVERYSIGNAL_STR);
#define PG_FILE_STANDBYSIGNAL "standby.signal"
STRING_DECLARE(PG_FILE_STANDBYSIGNAL_STR);
STRING_DECLARE(PG_FILE_STANDBYSIGNAL_STR);
#define PG_FILE_TABLESPACEMAP "tablespace_map"
#define PG_PATH_ARCHIVE_STATUS "archive_status"
#define PG_PATH_BASE "base"
#define PG_PATH_GLOBAL "global"
STRING_DECLARE(PG_PATH_GLOBAL_STR);
STRING_DECLARE(PG_PATH_GLOBAL_STR);
#define PG_PATH_PGMULTIXACT "pg_multixact"
#define PG_PATH_PGDYNSHMEM "pg_dynshmem"
#define PG_PATH_PGNOTIFY "pg_notify"
@ -56,9 +56,9 @@ Defines for various Postgres paths and files
#define PG_PREFIX_PGSQLTMP "pgsql_tmp"
#define PG_NAME_WAL "wal"
STRING_DECLARE(PG_NAME_WAL_STR);
STRING_DECLARE(PG_NAME_WAL_STR);
#define PG_NAME_XLOG "xlog"
STRING_DECLARE(PG_NAME_XLOG_STR);
STRING_DECLARE(PG_NAME_XLOG_STR);
/***********************************************************************************************************************************
Define default page size

View File

@ -140,7 +140,7 @@ protocolClientNew(const String *name, const String *service, IoRead *read, IoWri
THROW_FMT(
ProtocolError,
"expected value '%s' for greeting key '%s' but got '%s'\n"
"HINT: is the same version of " PROJECT_NAME " installed on the local and remote host?",
"HINT: is the same version of " PROJECT_NAME " installed on the local and remote host?",
expected[expectedIdx].value, strZ(strIdToStr(expected[expectedIdx].key)), strZ(actualValue));
}
}

View File

@ -22,9 +22,9 @@ typedef enum
Constants
***********************************************************************************************************************************/
#define PROTOCOL_SERVICE_LOCAL "local"
STRING_DECLARE(PROTOCOL_SERVICE_LOCAL_STR);
STRING_DECLARE(PROTOCOL_SERVICE_LOCAL_STR);
#define PROTOCOL_SERVICE_REMOTE "remote"
STRING_DECLARE(PROTOCOL_SERVICE_REMOTE_STR);
STRING_DECLARE(PROTOCOL_SERVICE_REMOTE_STR);
/***********************************************************************************************************************************
Getters/Setters

View File

@ -205,8 +205,8 @@ storageAzureRequestAsync(StorageAzure *this, const String *verb, StorageAzureReq
param.path = param.path == NULL ? this->pathPrefix : strNewFmt("%s%s", strZ(this->pathPrefix), strZ(param.path));
// Create header list and add content length
HttpHeader *requestHeader = param.header == NULL ?
httpHeaderNew(this->headerRedactList) : httpHeaderDup(param.header, this->headerRedactList);
HttpHeader *requestHeader =
param.header == NULL ? httpHeaderNew(this->headerRedactList) : httpHeaderDup(param.header, this->headerRedactList);
// Set content length
httpHeaderAdd(
@ -747,8 +747,9 @@ storageAzureNew(
.account = strDup(account),
.blockSize = blockSize,
.host = uriStyle == storageAzureUriStyleHost ? strNewFmt("%s.%s", strZ(account), strZ(endpoint)) : strDup(endpoint),
.pathPrefix = uriStyle == storageAzureUriStyleHost ?
strNewFmt("/%s", strZ(container)) : strNewFmt("/%s/%s", strZ(account), strZ(container)),
.pathPrefix =
uriStyle == storageAzureUriStyleHost ?
strNewFmt("/%s", strZ(container)) : strNewFmt("/%s/%s", strZ(account), strZ(container)),
};
// Store shared key or parse sas query

View File

@ -16,11 +16,11 @@ typedef struct StorageAzure StorageAzure;
Azure query tokens
***********************************************************************************************************************************/
#define AZURE_QUERY_COMP "comp"
STRING_DECLARE(AZURE_QUERY_COMP_STR);
STRING_DECLARE(AZURE_QUERY_COMP_STR);
#define AZURE_QUERY_RESTYPE "restype"
STRING_DECLARE(AZURE_QUERY_RESTYPE_STR);
STRING_DECLARE(AZURE_QUERY_RESTYPE_STR);
#define AZURE_QUERY_VALUE_CONTAINER "container"
STRING_DECLARE(AZURE_QUERY_VALUE_CONTAINER_STR);
STRING_DECLARE(AZURE_QUERY_VALUE_CONTAINER_STR);
/***********************************************************************************************************************************
Perform an Azure Request

View File

@ -178,8 +178,10 @@ storageWriteAzure(THIS_VOID, const Buffer *buffer)
do
{
// Copy as many bytes as possible into the block buffer
size_t bytesNext = bufRemains(this->blockBuffer) > bufUsed(buffer) - bytesTotal ?
bufUsed(buffer) - bytesTotal : bufRemains(this->blockBuffer);
const size_t bytesNext =
bufRemains(this->blockBuffer) > bufUsed(buffer) - bytesTotal ?
bufUsed(buffer) - bytesTotal : bufRemains(this->blockBuffer);
bufCatSub(this->blockBuffer, buffer, bytesTotal, bytesNext);
bytesTotal += bytesNext;

View File

@ -52,19 +52,19 @@ VARIANT_STRDEF_STATIC(GCS_JSON_ERROR_VAR, "error");
VARIANT_STRDEF_STATIC(GCS_JSON_ERROR_DESCRIPTION_VAR, "error_description");
VARIANT_STRDEF_STATIC(GCS_JSON_EXPIRES_IN_VAR, "expires_in");
#define GCS_JSON_ITEMS "items"
VARIANT_STRDEF_STATIC(GCS_JSON_ITEMS_VAR, GCS_JSON_ITEMS);
VARIANT_STRDEF_STATIC(GCS_JSON_ITEMS_VAR, GCS_JSON_ITEMS);
VARIANT_STRDEF_EXTERN(GCS_JSON_MD5_HASH_VAR, GCS_JSON_MD5_HASH);
VARIANT_STRDEF_EXTERN(GCS_JSON_NAME_VAR, GCS_JSON_NAME);
#define GCS_JSON_NEXT_PAGE_TOKEN "nextPageToken"
VARIANT_STRDEF_STATIC(GCS_JSON_NEXT_PAGE_TOKEN_VAR, GCS_JSON_NEXT_PAGE_TOKEN);
VARIANT_STRDEF_STATIC(GCS_JSON_NEXT_PAGE_TOKEN_VAR, GCS_JSON_NEXT_PAGE_TOKEN);
#define GCS_JSON_PREFIXES "prefixes"
VARIANT_STRDEF_STATIC(GCS_JSON_PREFIXES_VAR, GCS_JSON_PREFIXES);
VARIANT_STRDEF_STATIC(GCS_JSON_PREFIXES_VAR, GCS_JSON_PREFIXES);
VARIANT_STRDEF_STATIC(GCS_JSON_PRIVATE_KEY_VAR, "private_key");
VARIANT_STRDEF_EXTERN(GCS_JSON_SIZE_VAR, GCS_JSON_SIZE);
VARIANT_STRDEF_STATIC(GCS_JSON_TOKEN_TYPE_VAR, "token_type");
VARIANT_STRDEF_STATIC(GCS_JSON_TOKEN_URI_VAR, "token_uri");
#define GCS_JSON_UPDATED "updated"
VARIANT_STRDEF_STATIC(GCS_JSON_UPDATED_VAR, GCS_JSON_UPDATED);
VARIANT_STRDEF_STATIC(GCS_JSON_UPDATED_VAR, GCS_JSON_UPDATED);
// Fields required when listing files
#define GCS_FIELD_LIST \
@ -183,7 +183,7 @@ storageGcsAuthJwt(StorageGcs *this, time_t timeBegin)
BUFSTR(
strNewFmt(
"{\"iss\":\"%s\",\"scope\":\"https://www.googleapis.com/auth/devstorage.read%s\",\"aud\":\"%s\""
",\"exp\":%" PRIu64 ",\"iat\":%" PRIu64 "}",
",\"exp\":%" PRIu64 ",\"iat\":%" PRIu64 "}",
strZ(this->credential), this->write ? "_write" : "_only", strZ(httpUrl(this->authUrl)),
(uint64_t)timeBegin + 3600, (uint64_t)timeBegin)));
@ -348,8 +348,9 @@ storageGcsAuth(StorageGcs *this, HttpHeader *httpHeader)
// If the current token has expired then request a new one
if (timeBegin >= this->tokenTimeExpire)
{
StorageGcsAuthTokenResult tokenResult = this->keyType == storageGcsKeyTypeAuto ?
storageGcsAuthAuto(this, timeBegin) : storageGcsAuthService(this, timeBegin);
StorageGcsAuthTokenResult tokenResult =
this->keyType == storageGcsKeyTypeAuto ?
storageGcsAuthAuto(this, timeBegin) : storageGcsAuthService(this, timeBegin);
MEM_CONTEXT_OBJ_BEGIN(this)
{
@ -408,8 +409,8 @@ storageGcsRequestAsync(StorageGcs *this, const String *verb, StorageGcsRequestAs
strCatFmt(path, "/%s", strZ(httpUriEncode(strSub(param.object, 1), false)));
// Create header list and add content length
HttpHeader *requestHeader = param.header == NULL ?
httpHeaderNew(this->headerRedactList) : httpHeaderDup(param.header, this->headerRedactList);
HttpHeader *requestHeader =
param.header == NULL ? httpHeaderNew(this->headerRedactList) : httpHeaderDup(param.header, this->headerRedactList);
// Set host
httpHeaderPut(requestHeader, HTTP_HEADER_HOST_STR, this->endpoint);

View File

@ -16,29 +16,29 @@ typedef struct StorageGcs StorageGcs;
HTTP headers
***********************************************************************************************************************************/
#define GCS_HEADER_UPLOAD_ID "x-guploader-uploadid"
STRING_DECLARE(GCS_HEADER_UPLOAD_ID_STR);
STRING_DECLARE(GCS_HEADER_UPLOAD_ID_STR);
/***********************************************************************************************************************************
Query tokens
***********************************************************************************************************************************/
#define GCS_QUERY_FIELDS "fields"
STRING_DECLARE(GCS_QUERY_FIELDS_STR);
STRING_DECLARE(GCS_QUERY_FIELDS_STR);
#define GCS_QUERY_MEDIA "media"
STRING_DECLARE(GCS_QUERY_MEDIA_STR);
STRING_DECLARE(GCS_QUERY_MEDIA_STR);
#define GCS_QUERY_NAME "name"
STRING_DECLARE(GCS_QUERY_NAME_STR);
STRING_DECLARE(GCS_QUERY_NAME_STR);
#define GCS_QUERY_UPLOAD_ID "upload_id"
STRING_DECLARE(GCS_QUERY_UPLOAD_ID_STR);
STRING_DECLARE(GCS_QUERY_UPLOAD_ID_STR);
/***********************************************************************************************************************************
JSON tokens
***********************************************************************************************************************************/
#define GCS_JSON_MD5_HASH "md5Hash"
VARIANT_DECLARE(GCS_JSON_MD5_HASH_VAR);
VARIANT_DECLARE(GCS_JSON_MD5_HASH_VAR);
#define GCS_JSON_NAME "name"
VARIANT_DECLARE(GCS_JSON_NAME_VAR);
VARIANT_DECLARE(GCS_JSON_NAME_VAR);
#define GCS_JSON_SIZE "size"
VARIANT_DECLARE(GCS_JSON_SIZE_VAR);
VARIANT_DECLARE(GCS_JSON_SIZE_VAR);
/***********************************************************************************************************************************
Perform a GCS Request

View File

@ -249,8 +249,10 @@ storageWriteGcs(THIS_VOID, const Buffer *buffer)
}
// Copy as many bytes as possible into the chunk buffer
size_t bytesNext = bufRemains(this->chunkBuffer) > bufUsed(buffer) - bytesTotal ?
bufUsed(buffer) - bytesTotal : bufRemains(this->chunkBuffer);
const size_t bytesNext =
bufRemains(this->chunkBuffer) > bufUsed(buffer) - bytesTotal ?
bufUsed(buffer) - bytesTotal : bufRemains(this->chunkBuffer);
bufCatSub(this->chunkBuffer, buffer, bytesTotal, bytesNext);
bytesTotal += bytesNext;
}

View File

@ -10,21 +10,21 @@ Storage Helper
Storage path constants
***********************************************************************************************************************************/
#define STORAGE_SPOOL_ARCHIVE "<SPOOL:ARCHIVE>"
STRING_DECLARE(STORAGE_SPOOL_ARCHIVE_STR);
STRING_DECLARE(STORAGE_SPOOL_ARCHIVE_STR);
#define STORAGE_SPOOL_ARCHIVE_IN "<SPOOL:ARCHIVE:IN>"
STRING_DECLARE(STORAGE_SPOOL_ARCHIVE_IN_STR);
STRING_DECLARE(STORAGE_SPOOL_ARCHIVE_IN_STR);
#define STORAGE_SPOOL_ARCHIVE_OUT "<SPOOL:ARCHIVE:OUT>"
STRING_DECLARE(STORAGE_SPOOL_ARCHIVE_OUT_STR);
STRING_DECLARE(STORAGE_SPOOL_ARCHIVE_OUT_STR);
#define STORAGE_REPO_ARCHIVE "<REPO:ARCHIVE>"
STRING_DECLARE(STORAGE_REPO_ARCHIVE_STR);
STRING_DECLARE(STORAGE_REPO_ARCHIVE_STR);
#define STORAGE_REPO_BACKUP "<REPO:BACKUP>"
STRING_DECLARE(STORAGE_REPO_BACKUP_STR);
STRING_DECLARE(STORAGE_REPO_BACKUP_STR);
#define STORAGE_PATH_ARCHIVE "archive"
STRING_DECLARE(STORAGE_PATH_ARCHIVE_STR);
STRING_DECLARE(STORAGE_PATH_ARCHIVE_STR);
#define STORAGE_PATH_BACKUP "backup"
STRING_DECLARE(STORAGE_PATH_BACKUP_STR);
STRING_DECLARE(STORAGE_PATH_BACKUP_STR);
/***********************************************************************************************************************************
Functions

View File

@ -24,7 +24,7 @@ Posix Storage
Define PATH_MAX if it is not defined
***********************************************************************************************************************************/
#ifndef PATH_MAX
#define PATH_MAX (4 * 1024)
#define PATH_MAX (4 * 1024)
#endif
/***********************************************************************************************************************************
@ -234,14 +234,14 @@ storagePosixList(THIS_VOID, const String *const path, const StorageInfoLevel lev
// stat() and is therefore relatively slow
if (level == storageInfoLevelExists)
{
storageLstAdd(
result,
&(StorageInfo)
{
.name = STR(dirEntry->d_name),
.level = storageInfoLevelExists,
.exists = true,
});
const StorageInfo storageInfo =
{
.name = STR(dirEntry->d_name),
.level = storageInfoLevelExists,
.exists = true,
};
storageLstAdd(result, &storageInfo);
}
// Else more info is required which requires a call to stat()
else

View File

@ -69,8 +69,8 @@ storageS3Helper(const unsigned int repoIdx, const bool write, StoragePathExpress
{
THROW_FMT(
OptionInvalidError,
"option '%s' is '" CFGOPTVAL_REPO_S3_KEY_TYPE_WEB_ID_Z "' but '" S3_ENV_AWS_ROLE_ARN "' and '"
S3_ENV_AWS_WEB_IDENTITY_TOKEN_FILE "' are not set",
"option '%s' is '" CFGOPTVAL_REPO_S3_KEY_TYPE_WEB_ID_Z "' but '" S3_ENV_AWS_ROLE_ARN "' and"
" '" S3_ENV_AWS_WEB_IDENTITY_TOKEN_FILE "' are not set",
cfgOptionIdxName(cfgOptRepoS3KeyType, repoIdx));
}

View File

@ -56,7 +56,7 @@ STRING_STATIC(S3_XML_TAG_IS_TRUNCATED_STR, "IsTruncated
STRING_STATIC(S3_XML_TAG_KEY_STR, "Key");
STRING_STATIC(S3_XML_TAG_LAST_MODIFIED_STR, "LastModified");
#define S3_XML_TAG_NEXT_CONTINUATION_TOKEN "NextContinuationToken"
STRING_STATIC(S3_XML_TAG_NEXT_CONTINUATION_TOKEN_STR, S3_XML_TAG_NEXT_CONTINUATION_TOKEN);
STRING_STATIC(S3_XML_TAG_NEXT_CONTINUATION_TOKEN_STR, S3_XML_TAG_NEXT_CONTINUATION_TOKEN);
STRING_STATIC(S3_XML_TAG_OBJECT_STR, "Object");
STRING_STATIC(S3_XML_TAG_PREFIX_STR, "Prefix");
STRING_STATIC(S3_XML_TAG_QUIET_STR, "Quiet");
@ -66,10 +66,10 @@ STRING_STATIC(S3_XML_TAG_SIZE_STR, "Size");
AWS authentication v4 constants
***********************************************************************************************************************************/
#define S3 "s3"
BUFFER_STRDEF_STATIC(S3_BUF, S3);
BUFFER_STRDEF_STATIC(S3_BUF, S3);
#define AWS4 "AWS4"
#define AWS4_REQUEST "aws4_request"
BUFFER_STRDEF_STATIC(AWS4_REQUEST_BUF, AWS4_REQUEST);
BUFFER_STRDEF_STATIC(AWS4_REQUEST_BUF, AWS4_REQUEST);
#define AWS4_HMAC_SHA256 "AWS4-HMAC-SHA256"
/***********************************************************************************************************************************
@ -315,7 +315,7 @@ storageS3AuthAuto(StorageS3 *const this, HttpHeader *const header)
THROW(
ProtocolError,
"role to retrieve temporary credentials not found\n"
"HINT: is a valid IAM role associated with this instance?");
"HINT: is a valid IAM role associated with this instance?");
}
// Else an error that we can't handle
else if (!httpResponseCodeOk(response))
@ -340,7 +340,7 @@ storageS3AuthAuto(StorageS3 *const this, HttpHeader *const header)
THROW_FMT(
ProtocolError,
"role '%s' not found\n"
"HINT: is '%s' a valid IAM role associated with this instance?",
"HINT: is '%s' a valid IAM role associated with this instance?",
strZ(this->credRole), strZ(this->credRole));
}
// Else an error that we can't handle
@ -463,8 +463,8 @@ storageS3RequestAsync(StorageS3 *this, const String *verb, const String *path, S
MEM_CONTEXT_TEMP_BEGIN()
{
HttpHeader *requestHeader = param.header == NULL ?
httpHeaderNew(this->headerRedactList) : httpHeaderDup(param.header, this->headerRedactList);
HttpHeader *requestHeader =
param.header == NULL ? httpHeaderNew(this->headerRedactList) : httpHeaderDup(param.header, this->headerRedactList);
// Set content length
httpHeaderAdd(
@ -1145,8 +1145,8 @@ storageS3New(
.partSize = partSize,
.deleteMax = STORAGE_S3_DELETE_MAX,
.uriStyle = uriStyle,
.bucketEndpoint = uriStyle == storageS3UriStyleHost ?
strNewFmt("%s.%s", strZ(bucket), strZ(endPoint)) : strDup(endPoint),
.bucketEndpoint =
uriStyle == storageS3UriStyleHost ? strNewFmt("%s.%s", strZ(bucket), strZ(endPoint)) : strDup(endPoint),
// Force the signing key to be generated on the first run
.signingKeyDate = YYYYMMDD_STR,

View File

@ -178,8 +178,10 @@ storageWriteS3(THIS_VOID, const Buffer *buffer)
do
{
// Copy as many bytes as possible into the part buffer
size_t bytesNext = bufRemains(this->partBuffer) > bufUsed(buffer) - bytesTotal ?
bufUsed(buffer) - bytesTotal : bufRemains(this->partBuffer);
const size_t bytesNext =
bufRemains(this->partBuffer) > bufUsed(buffer) - bytesTotal ?
bufUsed(buffer) - bytesTotal : bufRemains(this->partBuffer);
bufCatSub(this->partBuffer, buffer, bytesTotal, bytesNext);
bytesTotal += bytesNext;

View File

@ -95,7 +95,7 @@ storageNew(
CHECK(
AssertError,
(!storageFeature(this, storageFeatureSymLink) && !storageFeature(this, storageFeatureHardLink)) ||
interface.linkCreate != NULL,
interface.linkCreate != NULL,
"linkCreate required");
FUNCTION_LOG_RETURN(STORAGE, this);

View File

@ -253,14 +253,13 @@ testDefParseModuleList(Yaml *const yaml, List *const moduleList)
for (unsigned int harnessIdx = 0; harnessIdx < lstSize(globalHarnessList); harnessIdx++)
{
const TestDefHarness *const globalHarness = lstGet(globalHarnessList, harnessIdx);
const TestDefHarness testDefHarness =
{
.name = strDup(globalHarness->name),
.includeList = strLstDup(globalHarness->includeList),
};
lstAdd(
harnessList,
&(TestDefHarness)
{
.name = strDup(globalHarness->name),
.includeList = strLstDup(globalHarness->includeList),
});
lstAdd(harnessList, &testDefHarness);
}
}
MEM_CONTEXT_OBJ_END();
@ -275,14 +274,13 @@ testDefParseModuleList(Yaml *const yaml, List *const moduleList)
for (unsigned int shimIdx = 0; shimIdx < lstSize(globalShimList); shimIdx++)
{
const TestDefShim *const globalShim = lstGet(globalShimList, shimIdx);
const TestDefShim testDefShim =
{
.name = strDup(globalShim->name),
.functionList = strLstDup(globalShim->functionList),
};
lstAdd(
shimList,
&(TestDefShim)
{
.name = strDup(globalShim->name),
.functionList = strLstDup(globalShim->functionList),
});
lstAdd(shimList, &testDefShim);
}
}
MEM_CONTEXT_OBJ_END();

View File

@ -154,8 +154,9 @@ cmdTest(
}
// Remove old coverage data. Note that coverage can be in different paths depending on the meson version.
const String *const pathCoverage = storagePathExistsP(storageUnitBuild, STRDEF("test-unit.p")) ?
STRDEF("test-unit.p") : STRDEF("test-unit@exe");
const String *const pathCoverage =
storagePathExistsP(storageUnitBuild, STRDEF("test-unit.p")) ?
STRDEF("test-unit.p") : STRDEF("test-unit@exe");
StorageIterator *const storageItr = storageNewItrP(
storageUnitBuild, pathCoverage, .expression = STRDEF("\\.gcda$"));

View File

@ -5,68 +5,72 @@ C Debug Harness
#define TEST_COMMON_HARNESS_DEBUG_H
#ifdef HRN_FEATURE_DEBUG
#include "common/debug.h"
// Set line numer of the current function in the stack trace. This is used to give more detailed info about which test macro
// caused an error.
#ifdef DEBUG
#define FUNCTION_HARNESS_STACK_TRACE_LINE_SET(lineNo) \
stackTraceTestFileLineSet((unsigned int)lineNo)
#else
#define FUNCTION_HARNESS_STACK_TRACE_LINE_SET(lineNo)
#endif
#include "common/debug.h"
#define FUNCTION_HARNESS_BEGIN() \
STACK_TRACE_PUSH(logLevelDebug); \
stackTraceParamLog()
#define FUNCTION_HARNESS_PARAM(typeMacroPrefix, param) \
FUNCTION_LOG_PARAM(typeMacroPrefix, param)
#define FUNCTION_HARNESS_PARAM_P(typeMacroPrefix, param) \
FUNCTION_LOG_PARAM_P(typeMacroPrefix, param)
#define FUNCTION_HARNESS_PARAM_PP(typeMacroPrefix, param) \
FUNCTION_LOG_PARAM_PP(typeMacroPrefix, param)
#define FUNCTION_HARNESS_END()
#define FUNCTION_HARNESS_VOID() \
FUNCTION_HARNESS_BEGIN(); \
FUNCTION_HARNESS_END()
#define FUNCTION_HARNESS_ASSERT(condition) \
do \
{ \
if (!(condition)) \
THROW_FMT(AssertError, "function harness assertion '%s' failed", #condition); \
} \
while (0)
#define FUNCTION_HARNESS_RETURN(typeMacroPrefix, ...) \
do \
{ \
STACK_TRACE_POP(false); \
return __VA_ARGS__; \
} \
while (0)
#define FUNCTION_HARNESS_RETURN_VOID() \
STACK_TRACE_POP(false);
// Set line numer of the current function in the stack trace. This is used to give more detailed info about which test macro caused
// an error.
#ifdef DEBUG
#define FUNCTION_HARNESS_STACK_TRACE_LINE_SET(lineNo) \
stackTraceTestFileLineSet((unsigned int)lineNo)
#else
#define FUNCTION_HARNESS_STACK_TRACE_LINE_SET(lineNo)
#define FUNCTION_HARNESS_BEGIN()
#define FUNCTION_HARNESS_PARAM(typeMacroPrefix, param)
#define FUNCTION_HARNESS_PARAM_P(typeMacroPrefix, param)
#define FUNCTION_HARNESS_PARAM_PP(typeMacroPrefix, param)
#define FUNCTION_HARNESS_END()
#define FUNCTION_HARNESS_VOID()
#define FUNCTION_HARNESS_ASSERT(condition)
#define FUNCTION_HARNESS_STACK_TRACE_LINE_SET(lineNo)
#endif
#define FUNCTION_HARNESS_RETURN(typeMacroPrefix, ...) \
return __VA_ARGS__
#define FUNCTION_HARNESS_BEGIN() \
STACK_TRACE_PUSH(logLevelDebug); \
stackTraceParamLog()
#define FUNCTION_HARNESS_PARAM(typeMacroPrefix, param) \
FUNCTION_LOG_PARAM(typeMacroPrefix, param)
#define FUNCTION_HARNESS_PARAM_P(typeMacroPrefix, param) \
FUNCTION_LOG_PARAM_P(typeMacroPrefix, param)
#define FUNCTION_HARNESS_PARAM_PP(typeMacroPrefix, param) \
FUNCTION_LOG_PARAM_PP(typeMacroPrefix, param)
#define FUNCTION_HARNESS_END()
#define FUNCTION_HARNESS_VOID() \
FUNCTION_HARNESS_BEGIN(); \
FUNCTION_HARNESS_END()
#define FUNCTION_HARNESS_ASSERT(condition) \
do \
{ \
if (!(condition)) \
THROW_FMT(AssertError, "function harness assertion '%s' failed", #condition); \
} \
while (0)
#define FUNCTION_HARNESS_RETURN(typeMacroPrefix, ...) \
do \
{ \
STACK_TRACE_POP(false); \
return __VA_ARGS__; \
} \
while (0)
#define FUNCTION_HARNESS_RETURN_VOID() \
STACK_TRACE_POP(false);
#else
#define FUNCTION_HARNESS_STACK_TRACE_LINE_SET(lineNo)
#define FUNCTION_HARNESS_BEGIN()
#define FUNCTION_HARNESS_PARAM(typeMacroPrefix, param)
#define FUNCTION_HARNESS_PARAM_P(typeMacroPrefix, param)
#define FUNCTION_HARNESS_PARAM_PP(typeMacroPrefix, param)
#define FUNCTION_HARNESS_END()
#define FUNCTION_HARNESS_VOID()
#define FUNCTION_HARNESS_ASSERT(condition)
#define FUNCTION_HARNESS_RETURN(typeMacroPrefix, ...) \
return __VA_ARGS__
#define FUNCTION_HARNESS_RETURN_VOID();
#define FUNCTION_HARNESS_RETURN_VOID();
#endif
#endif

View File

@ -92,51 +92,55 @@ IoRead/IoWrite buffer internally using both the interfaces and the file descript
buffers are known to be empty, e.g. ioWriteFlush() has been called.
***********************************************************************************************************************************/
#ifdef HRN_FEATURE_IO
#define HRN_FORK_CHILD_READ() \
HRN_FORK_ioReadChild
#define HRN_FORK_CHILD_WRITE() \
HRN_FORK_ioWriteChild
#define HRN_FORK_CHILD_READ() \
HRN_FORK_ioReadChild
#define HRN_FORK_PARENT_READ(processIdx) \
HRN_FORK_ioReadParent[processIdx]
#define HRN_FORK_CHILD_WRITE() \
HRN_FORK_ioWriteChild
#define HRN_FORK_PARENT_READ(processIdx) \
HRN_FORK_ioReadParent[processIdx]
#define HRN_FORK_PARENT_WRITE(processIdx) \
HRN_FORK_ioWriteParent[processIdx]
#define HRN_FORK_PARENT_WRITE(processIdx) \
HRN_FORK_ioWriteParent[processIdx]
#endif
/***********************************************************************************************************************************
Get/put notify messages. These macros allow the parent and child process to synchronize which is useful, e.g. releasing locks.
***********************************************************************************************************************************/
#ifdef HRN_FEATURE_IO
// General notify get macro used by parent/child
#define HRN_FORK_NOTIFY_GET(read) \
ioReadLine(read)
// General notify put macro used by parent/child
#define HRN_FORK_NOTIFY_PUT(write) \
do \
{ \
ioWriteStrLine(write, EMPTY_STR); \
ioWriteFlush(write); \
} \
while (0)
// General notify get macro used by parent/child
#define HRN_FORK_NOTIFY_GET(read) \
ioReadLine(read)
// Put notification to parent from child
#define HRN_FORK_CHILD_NOTIFY_GET() \
HRN_FORK_NOTIFY_GET(HRN_FORK_CHILD_READ())
// General notify put macro used by parent/child
#define HRN_FORK_NOTIFY_PUT(write) \
do \
{ \
ioWriteStrLine(write, EMPTY_STR); \
ioWriteFlush(write); \
} \
while (0)
// Get notification from parent to child
#define HRN_FORK_CHILD_NOTIFY_PUT() \
HRN_FORK_NOTIFY_PUT(HRN_FORK_CHILD_WRITE())
// Put notification to parent from child
#define HRN_FORK_CHILD_NOTIFY_GET() \
HRN_FORK_NOTIFY_GET(HRN_FORK_CHILD_READ())
// Put notification to child from parent
#define HRN_FORK_PARENT_NOTIFY_GET(processIdx) \
HRN_FORK_NOTIFY_GET(HRN_FORK_PARENT_READ(processIdx))
// Get notification from parent to child
#define HRN_FORK_CHILD_NOTIFY_PUT() \
HRN_FORK_NOTIFY_PUT(HRN_FORK_CHILD_WRITE())
// Put notification to child from parent
#define HRN_FORK_PARENT_NOTIFY_GET(processIdx) \
HRN_FORK_NOTIFY_GET(HRN_FORK_PARENT_READ(processIdx))
// Get notification from child to parent
#define HRN_FORK_PARENT_NOTIFY_PUT(processIdx) \
HRN_FORK_NOTIFY_PUT(HRN_FORK_PARENT_WRITE(processIdx))
// Get notification from child to parent
#define HRN_FORK_PARENT_NOTIFY_PUT(processIdx) \
HRN_FORK_NOTIFY_PUT(HRN_FORK_PARENT_WRITE(processIdx))
#endif
/***********************************************************************************************************************************
@ -163,8 +167,7 @@ typedef struct HrnForkParam
{ \
HrnForkParam param = {VAR_PARAM_INIT, __VA_ARGS__}; \
\
/* Set timeout default */ \
if (param.timeout == 0) \
if (param.timeout == 0) /* Set timeout default */ \
param.timeout = HRN_FORK_TIMEOUT; \
\
unsigned int HRN_FORK_PROCESS_TOTAL() = 0; \
@ -192,19 +195,21 @@ typedef struct HrnForkChildParam
// Declare/assign IoRead/IoWrite
#ifdef HRN_FEATURE_IO
#include "common/io/fdRead.h"
#include "common/io/fdWrite.h"
#include "common/type/string.h"
#define HRN_FORK_CHILD_IO() \
IoRead *HRN_FORK_CHILD_READ() = ioFdReadNewOpen( \
strNewFmt("%s %u read", paramChild.prefix, HRN_FORK_PROCESS_IDX()), HRN_FORK_CHILD_READ_FD(), paramChild.timeout); \
(void)HRN_FORK_CHILD_READ(); \
IoWrite *HRN_FORK_CHILD_WRITE() = ioFdWriteNewOpen( \
strNewFmt("%s %u write", paramChild.prefix, HRN_FORK_PROCESS_IDX()), HRN_FORK_CHILD_WRITE_FD(), paramChild.timeout); \
(void)HRN_FORK_CHILD_WRITE()
#include "common/io/fdRead.h"
#include "common/io/fdWrite.h"
#include "common/type/string.h"
#define HRN_FORK_CHILD_IO() \
IoRead *HRN_FORK_CHILD_READ() = ioFdReadNewOpen( \
strNewFmt("%s %u read", paramChild.prefix, HRN_FORK_PROCESS_IDX()), HRN_FORK_CHILD_READ_FD(), paramChild.timeout); \
(void)HRN_FORK_CHILD_READ(); \
IoWrite *HRN_FORK_CHILD_WRITE() = ioFdWriteNewOpen( \
strNewFmt("%s %u write", paramChild.prefix, HRN_FORK_PROCESS_IDX()), HRN_FORK_CHILD_WRITE_FD(), paramChild.timeout); \
(void)HRN_FORK_CHILD_WRITE()
#else
#define HRN_FORK_CHILD_IO()
#define HRN_FORK_CHILD_IO()
#endif
#define HRN_FORK_CHILD_BEGIN(...) \
@ -212,44 +217,36 @@ typedef struct HrnForkChildParam
{ \
HrnForkChildParam paramChild = {VAR_PARAM_INIT, __VA_ARGS__}; \
\
/* Set prefix default */ \
if (paramChild.prefix == NULL) \
if (paramChild.prefix == NULL) /* Set prefix default */ \
paramChild.prefix = "child"; \
\
/* Set timeout default */ \
if (paramChild.timeout == 0) \
if (paramChild.timeout == 0) /* Set timeout default */ \
paramChild.timeout = param.timeout; \
\
HRN_FORK_CHILD_EXPECTED_EXIT_STATUS(HRN_FORK_PROCESS_TOTAL()) = paramChild.expectedExitStatus; \
\
/* Create pipe for parent/child communication */ \
THROW_ON_SYS_ERROR_FMT( \
\
THROW_ON_SYS_ERROR_FMT( /* Create pipe for parent/child communication */ \
pipe(HRN_FORK_PIPE(HRN_FORK_PROCESS_TOTAL())[0]) == -1, KernelError, \
"unable to create read pipe for child process %u", HRN_FORK_PROCESS_TOTAL()); \
THROW_ON_SYS_ERROR_FMT( \
pipe(HRN_FORK_PIPE(HRN_FORK_PROCESS_TOTAL())[1]) == -1, KernelError, \
"unable to create write pipe for child process %u", HRN_FORK_PROCESS_TOTAL()); \
\
/* Fork child process */ \
HRN_FORK_PROCESS_ID(HRN_FORK_PROCESS_TOTAL()) = fork(); \
\
if (HRN_FORK_PROCESS_ID(HRN_FORK_PROCESS_TOTAL()) == 0) \
{ \
unsigned int HRN_FORK_PROCESS_IDX() = HRN_FORK_PROCESS_TOTAL(); \
\
/* Declare/assign IoRead/IoWrite */ \
HRN_FORK_CHILD_IO(); \
HRN_FORK_CHILD_IO(); /* Declare/assign IoRead/IoWrite */ \
hrnLogProcessIdSet(HRN_FORK_PROCESS_IDX() + 1); /* Change log process id to aid in debugging */ \
\
/* Change log process id to aid in debugging */ \
hrnLogProcessIdSet(HRN_FORK_PROCESS_IDX() + 1); \
\
/* Close parent side of pipe */ \
close(HRN_FORK_PARENT_READ_FD(HRN_FORK_PROCESS_IDX())); \
close(HRN_FORK_PARENT_READ_FD(HRN_FORK_PROCESS_IDX())); /* Close parent side of pipe */ \
close(HRN_FORK_PARENT_WRITE_FD(HRN_FORK_PROCESS_IDX())); \
#define HRN_FORK_CHILD_END() \
/* Close child side of pipe */ \
close(HRN_FORK_CHILD_READ_FD()); \
close(HRN_FORK_CHILD_READ_FD()); /* Close child side of pipe */ \
close(HRN_FORK_CHILD_WRITE_FD()); \
\
exit(0); \
@ -275,20 +272,22 @@ typedef struct HrnForkParentParam
// Declare IoRead/IoWrite
#ifdef HRN_FEATURE_IO
#define HRN_FORK_PARENT_IO_DECLARE() \
IoRead *HRN_FORK_PARENT_READ(HRN_FORK_CHILD_MAX) = {0}; \
(void)HRN_FORK_PARENT_READ(0); \
IoWrite *HRN_FORK_PARENT_WRITE(HRN_FORK_CHILD_MAX) = {0}; \
(void)HRN_FORK_PARENT_WRITE(0)
#define HRN_FORK_PARENT_IO_ASSIGN(processIdx) \
HRN_FORK_PARENT_READ(processIdx) = ioFdReadNewOpen( \
strNewFmt("%s %u read", paramParent.prefix, processIdx), HRN_FORK_PARENT_READ_FD(processIdx), paramParent.timeout); \
HRN_FORK_PARENT_WRITE(processIdx) = ioFdWriteNewOpen( \
strNewFmt("%s %u write", paramParent.prefix, processIdx), HRN_FORK_PARENT_WRITE_FD(processIdx), paramParent.timeout)
#define HRN_FORK_PARENT_IO_DECLARE() \
IoRead *HRN_FORK_PARENT_READ(HRN_FORK_CHILD_MAX) = {0}; \
(void)HRN_FORK_PARENT_READ(0); \
IoWrite *HRN_FORK_PARENT_WRITE(HRN_FORK_CHILD_MAX) = {0}; \
(void)HRN_FORK_PARENT_WRITE(0)
#define HRN_FORK_PARENT_IO_ASSIGN(processIdx) \
HRN_FORK_PARENT_READ(processIdx) = ioFdReadNewOpen( \
strNewFmt("%s %u read", paramParent.prefix, processIdx), HRN_FORK_PARENT_READ_FD(processIdx), paramParent.timeout); \
HRN_FORK_PARENT_WRITE(processIdx) = ioFdWriteNewOpen( \
strNewFmt("%s %u write", paramParent.prefix, processIdx), HRN_FORK_PARENT_WRITE_FD(processIdx), paramParent.timeout)
#else
#define HRN_FORK_PARENT_IO_DECLARE()
#define HRN_FORK_PARENT_IO_ASSIGN(processIdx)
#define HRN_FORK_PARENT_IO_DECLARE()
#define HRN_FORK_PARENT_IO_ASSIGN(processIdx)
#endif
#define HRN_FORK_PARENT_BEGIN(...) \
@ -296,32 +295,26 @@ typedef struct HrnForkParentParam
{ \
HrnForkParentParam paramParent = {VAR_PARAM_INIT, __VA_ARGS__}; \
\
/* Set prefix default */ \
if (paramParent.prefix == NULL) \
if (paramParent.prefix == NULL) /* Set prefix default */ \
paramParent.prefix = "parent"; \
\
/* Set timeout default */ \
if (paramParent.timeout == 0) \
if (paramParent.timeout == 0) /* Set timeout default */ \
paramParent.timeout = param.timeout; \
\
/* Declare IoRead/IoWrite */ \
HRN_FORK_PARENT_IO_DECLARE(); \
HRN_FORK_PARENT_IO_DECLARE(); /* Declare IoRead/IoWrite */ \
\
for (unsigned int processIdx = 0; processIdx < HRN_FORK_PROCESS_TOTAL(); processIdx++) \
{ \
/* Close child side of pipe */ \
close(HRN_FORK_PIPE(processIdx)[1][0]); \
close(HRN_FORK_PIPE(processIdx)[1][0]); /* Close child side of pipe */ \
close(HRN_FORK_PIPE(processIdx)[0][1]); \
\
/* Assign IoRead/IoWrite */ \
HRN_FORK_PARENT_IO_ASSIGN(processIdx); \
HRN_FORK_PARENT_IO_ASSIGN(processIdx); /* Assign IoRead/IoWrite */ \
}
#define HRN_FORK_PARENT_END() \
for (unsigned int processIdx = 0; processIdx < HRN_FORK_PROCESS_TOTAL(); processIdx++) \
{ \
/* Close parent side of pipe */ \
close(HRN_FORK_PARENT_READ_FD(processIdx)); \
close(HRN_FORK_PARENT_READ_FD(processIdx)); /* Close parent side of pipe */ \
close(HRN_FORK_PARENT_WRITE_FD(processIdx)); \
} \
} \

View File

@ -39,9 +39,9 @@ void harnessLogLevelSet(LogLevel logLevel);
// Set the process id used for logging. Ignore the request if the logging module is not active yet.
#ifdef HRN_FEATURE_LOG
void hrnLogProcessIdSet(unsigned int processId);
void hrnLogProcessIdSet(unsigned int processId);
#else
#define hrnLogProcessIdSet(processId)
#define hrnLogProcessIdSet(processId)
#endif
/***********************************************************************************************************************************

View File

@ -28,11 +28,11 @@ Add file to manifest
typedef struct HrnManifestFile
{
const char *name; // See ManifestFile for comments
bool copy:1;
bool delta:1;
bool resume:1;
bool checksumPage:1;
bool checksumPageError:1;
bool copy : 1;
bool delta : 1;
bool resume : 1;
bool checksumPage : 1;
bool checksumPageError : 1;
mode_t mode;
const char *checksumSha1;
const char *checksumRepoSha1;

View File

@ -192,8 +192,8 @@ hrnPgControlToBuffer(PgControl pgControl)
// Set defaults if values are not passed
pgControl.pageSize = pgControl.pageSize == 0 ? PG_PAGE_SIZE_DEFAULT : pgControl.pageSize;
pgControl.walSegmentSize = pgControl.walSegmentSize == 0 ? PG_WAL_SEGMENT_SIZE_DEFAULT : pgControl.walSegmentSize;
pgControl.catalogVersion = pgControl.catalogVersion == 0 ?
hrnPgInterfaceVersion(pgControl.version)->catalogVersion() : pgControl.catalogVersion;
pgControl.catalogVersion =
pgControl.catalogVersion == 0 ? hrnPgInterfaceVersion(pgControl.version)->catalogVersion() : pgControl.catalogVersion;
pgControl.systemId = pgControl.systemId < 100 ? hrnPgSystemId(pgControl.version) + pgControl.systemId : pgControl.systemId;
pgControl.checkpoint = pgControl.checkpoint == 0 ? 1 : pgControl.checkpoint;
pgControl.timeline = pgControl.timeline == 0 ? 1 : pgControl.timeline;

View File

@ -488,8 +488,9 @@ Macros for defining groups of functions that implement various queries and comma
{.session = sessionParam, .function = HRNPQ_CLEAR}, \
{.session = sessionParam, .function = HRNPQ_GETRESULT, .resultNull = true}
#define HRNPQ_MACRO_REPLAY_TARGET_REACHED( \
sessionParam, walNameParam, lsnNameParam, targetLsnParam, targetReachedParam, replayLsnParam) \
#define \
HRNPQ_MACRO_REPLAY_TARGET_REACHED( \
sessionParam, walNameParam, lsnNameParam, targetLsnParam, targetReachedParam, replayLsnParam) \
{.session = sessionParam, \
.function = HRNPQ_SENDQUERY, \
.param = zNewFmt( \
@ -516,8 +517,9 @@ Macros for defining groups of functions that implement various queries and comma
#define HRNPQ_MACRO_REPLAY_TARGET_REACHED_GE_10(sessionParam, targetLsnParam, targetReachedParam, reachedLsnParam) \
HRNPQ_MACRO_REPLAY_TARGET_REACHED(sessionParam, "wal", "lsn", targetLsnParam, targetReachedParam, reachedLsnParam)
#define HRNPQ_MACRO_CHECKPOINT_TARGET_REACHED( \
sessionParam, lsnNameParam, targetLsnParam, targetReachedParam, checkpointLsnParam, sleepParam) \
#define \
HRNPQ_MACRO_CHECKPOINT_TARGET_REACHED( \
sessionParam, lsnNameParam, targetLsnParam, targetReachedParam, checkpointLsnParam, sleepParam) \
{.session = sessionParam, \
.function = HRNPQ_SENDQUERY, \
.param = zNewFmt( \
@ -542,8 +544,9 @@ Macros for defining groups of functions that implement various queries and comma
HRNPQ_MACRO_CHECKPOINT_TARGET_REACHED( \
sessionParam, "location", targetLsnParam, targetReachedParam, checkpointLsnParam, sleepParam)
#define HRNPQ_MACRO_CHECKPOINT_TARGET_REACHED_GE_10( \
sessionParam, targetLsnParam, targetReachedParam, checkpointLsnParam, sleepParam) \
#define \
HRNPQ_MACRO_CHECKPOINT_TARGET_REACHED_GE_10( \
sessionParam, targetLsnParam, targetReachedParam, checkpointLsnParam, sleepParam) \
HRNPQ_MACRO_CHECKPOINT_TARGET_REACHED(sessionParam, "lsn", targetLsnParam, targetReachedParam, checkpointLsnParam, sleepParam)
#define HRNPQ_MACRO_REPLAY_WAIT_LE_95(sessionParam, targetLsnParam) \

View File

@ -7,6 +7,6 @@ Functions
***********************************************************************************************************************************/
// Install/uninstall shim to return true from stackTraceBackCallback() to indicate that no backtrace data was found
#ifdef HAVE_LIBBACKTRACE
void hrnStackTraceBackShimInstall(void);
void hrnStackTraceBackShimUninstall(void);
void hrnStackTraceBackShimInstall(void);
void hrnStackTraceBackShimUninstall(void);
#endif

View File

@ -57,8 +57,8 @@ static struct HarnessTestLocal
Extern functions
***********************************************************************************************************************************/
#ifdef HRN_FEATURE_LOG
void harnessLogInit(void);
void harnessLogFinal(void);
void harnessLogInit(void);
void harnessLogFinal(void);
#endif
/***********************************************************************************************************************************
@ -388,9 +388,13 @@ hrnTestResultBegin(const char *const statement, const bool result)
FUNCTION_HARNESS_STACK_TRACE_LINE_SET(harnessTestLocal.logLastLineNo);
// Set info to report if an error is thrown
harnessTestLocal.result =
(struct HarnessTestResult){
.running = true, .statement = statement, .lineNo = harnessTestLocal.logLastLineNo, .result = result};
harnessTestLocal.result = (struct HarnessTestResult)
{
.running = true,
.statement = statement,
.lineNo = harnessTestLocal.logLastLineNo,
.result = result,
};
// Reset line number so it is not used by another test
harnessTestLocal.logLastLineNo = 0;

View File

@ -21,15 +21,17 @@ Constants
Make sure ASSERT() always exists for tests to use, even when DEBUG is disabled for performance
***********************************************************************************************************************************/
#ifdef HRN_FEATURE_ASSERT
#undef ASSERT
#define ASSERT(condition) \
do \
{ \
if (!(condition)) \
THROW_FMT(AssertError, "assertion '%s' failed", #condition); \
} \
while (0)
#undef ASSERT
#define ASSERT(condition) \
do \
{ \
if (!(condition)) \
THROW_FMT(AssertError, "assertion '%s' failed", #condition); \
} \
while (0)
#endif
/***********************************************************************************************************************************
@ -71,13 +73,15 @@ Test that an expected error is actually thrown and error when it isn't
***********************************************************************************************************************************/
// Wrap the error in a temp mem context (when available) to free memory and execute callbacks after CATCH_FATAL()
#ifdef HRN_FEATURE_MEMCONTEXT
#include "common/memContext.h"
#define TEST_ERROR_MEM_CONTEXT_BEGIN() MEM_CONTEXT_TEMP_BEGIN()
#define TEST_ERROR_MEM_CONTEXT_END() MEM_CONTEXT_TEMP_END()
#include "common/memContext.h"
#define TEST_ERROR_MEM_CONTEXT_BEGIN() MEM_CONTEXT_TEMP_BEGIN()
#define TEST_ERROR_MEM_CONTEXT_END() MEM_CONTEXT_TEMP_END()
#else
#define TEST_ERROR_MEM_CONTEXT_BEGIN()
#define TEST_ERROR_MEM_CONTEXT_END()
#define TEST_ERROR_MEM_CONTEXT_BEGIN()
#define TEST_ERROR_MEM_CONTEXT_END()
#endif
#define TEST_ERROR(statement, errorTypeExpected, errorMessageExpected) \

View File

@ -23,7 +23,7 @@ testRun(void)
TEST_RESULT_STR_Z(
bldCfgRenderLabel(STRDEF("line"), true, STRDEF("label")),
"line "
" // label",
" // label",
"render");
// -------------------------------------------------------------------------------------------------------------------------
@ -33,10 +33,10 @@ testRun(void)
bldCfgRenderLabel(
STRDEF(
"12345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890"
"12345678901234567890"),
"12345678901234567890"),
true, STRDEF("label")),
"1234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678"
"901234567890",
"901234567890",
"render");
// -------------------------------------------------------------------------------------------------------------------------

View File

@ -68,8 +68,8 @@ testRun(void)
"\"option-online\":true}\n"
"\n"
"[db:history]\n"
"1={\"db-catalog-version\":201510051,\"db-control-version\":942,\"db-system-id\":6626363367545678089,"
"\"db-version\":\"9.5\"}\n",
"1={\"db-catalog-version\":201510051,\"db-control-version\":942,\"db-system-id\":6626363367545678089"
",\"db-version\":\"9.5\"}\n",
.comment = "write backup info - stanza1, repo1");
// Add backup manifest

View File

@ -214,13 +214,13 @@ testRun(void)
TEST_ERROR(
walPath(STRDEF("relative/path"), pgPathLink, STRDEF("test")), OptionInvalidValueError,
"PostgreSQL working directory '/' is not the same as option pg1-path '" TEST_PATH "/pg-link'\n"
"HINT: is the PostgreSQL data_directory configured the same as the pg1-path option?");
"HINT: is the PostgreSQL data_directory configured the same as the pg1-path option?");
TEST_ERROR(
walPath(STRDEF("relative/path"), NULL, STRDEF("test")), OptionRequiredError,
"option 'pg1-path' must be specified when relative wal paths are used\n"
"HINT: is %f passed to test instead of %p?\n"
"HINT: PostgreSQL may pass relative paths even with %p depending on the environment.");
"HINT: is %f passed to test instead of %p?\n"
"HINT: PostgreSQL may pass relative paths even with %p depending on the environment.");
}
// *****************************************************************************************************************************
@ -284,9 +284,9 @@ testRun(void)
walSegmentFind(storageRepo(), STRDEF("9.6-2"), STRDEF("123456781234567812345678"), 0),
ArchiveDuplicateError,
"duplicates found in archive for WAL segment 123456781234567812345678:"
" 123456781234567812345678-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
", 123456781234567812345678-bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb.gz"
"\nHINT: are multiple primaries archiving to this stanza?");
" 123456781234567812345678-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
", 123456781234567812345678-bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb.gz\n"
"HINT: are multiple primaries archiving to this stanza?");
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("partial not found");

View File

@ -170,7 +170,7 @@ testRun(void)
TEST_RESULT_LOG(
"P00 INFO: get 1 WAL file(s) from archive: 000000010000000100000001\n"
"P00 WARN: repo1: [PathOpenError] unable to list file info for path '" TEST_PATH "/repo/archive/test2/10-1"
"/0000000100000001': [13] Permission denied\n"
"/0000000100000001': [13] Permission denied\n"
"P00 WARN: [RepoInvalidError] unable to find a valid repository");
TEST_STORAGE_GET(
@ -178,7 +178,7 @@ testRun(void)
"103\n"
"unable to find a valid repository\n"
"repo1: [PathOpenError] unable to list file info for path '" TEST_PATH "/repo/archive/test2/10-1/0000000100000001':"
" [13] Permission denied",
" [13] Permission denied",
.remove = true);
TEST_STORAGE_LIST_EMPTY(storageSpool(), STORAGE_SPOOL_ARCHIVE_IN);
@ -196,14 +196,14 @@ testRun(void)
"P00 INFO: get 1 WAL file(s) from archive: 000000010000000100000001\n"
"P01 WARN: [FileReadError] raised from local-1 shim protocol: unable to get 000000010000000100000001:\n"
" repo1: 10-1/0000000100000001/000000010000000100000001-abcdabcdabcdabcdabcdabcdabcdabcdabcdabcd.gz"
" [FormatError] unexpected eof in compressed data");
" [FormatError] unexpected eof in compressed data");
TEST_STORAGE_GET(
storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN "/000000010000000100000001.error",
"42\n"
"raised from local-1 shim protocol: unable to get 000000010000000100000001:\n"
"repo1: 10-1/0000000100000001/000000010000000100000001-abcdabcdabcdabcdabcdabcdabcdabcdabcdabcd.gz [FormatError]"
" unexpected eof in compressed data",
" unexpected eof in compressed data",
.remove = true);
TEST_STORAGE_LIST(storageSpool(), STORAGE_SPOOL_ARCHIVE_IN, "000000010000000100000001.pgbackrest.tmp\n");
@ -251,14 +251,14 @@ testRun(void)
TEST_RESULT_LOG(
"P00 INFO: get 1 WAL file(s) from archive: 000000010000000100000001\n"
"P01 WARN: repo1: 10-2/0000000100000001/000000010000000100000001-abcdabcdabcdabcdabcdabcdabcdabcdabcdabcd.gz"
" [FormatError] unexpected eof in compressed data\n"
" [FormatError] unexpected eof in compressed data\n"
"P01 DETAIL: found 000000010000000100000001 in the repo1: 10-1 archive");
TEST_STORAGE_GET(
storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN "/000000010000000100000001.ok",
"0\n"
"repo1: 10-2/0000000100000001/000000010000000100000001-abcdabcdabcdabcdabcdabcdabcdabcdabcdabcd.gz [FormatError]"
" unexpected eof in compressed data",
" unexpected eof in compressed data",
.remove = true);
TEST_STORAGE_GET_EMPTY(storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN "/000000010000000100000001", .remove = true);
TEST_STORAGE_LIST_EMPTY(storageSpool(), STORAGE_SPOOL_ARCHIVE_IN);
@ -289,14 +289,14 @@ testRun(void)
TEST_RESULT_LOG(
"P00 INFO: get 1 WAL file(s) from archive: 000000010000000100000001\n"
"P01 WARN: repo1: 10-2/0000000100000001/000000010000000100000001-abcdabcdabcdabcdabcdabcdabcdabcdabcdabcd.gz"
" [FormatError] unexpected eof in compressed data\n"
" [FormatError] unexpected eof in compressed data\n"
"P01 DETAIL: found 000000010000000100000001 in the repo1: 10-1 archive");
TEST_STORAGE_GET(
storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN "/000000010000000100000001.ok",
"0\n"
"repo1: 10-2/0000000100000001/000000010000000100000001-abcdabcdabcdabcdabcdabcdabcdabcdabcdabcd.gz [FormatError]"
" unexpected eof in compressed data",
" unexpected eof in compressed data",
.remove = true);
TEST_STORAGE_GET_EMPTY(storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN "/000000010000000100000001", .remove = true);
TEST_STORAGE_LIST_EMPTY(storageSpool(), STORAGE_SPOOL_ARCHIVE_IN);
@ -338,7 +338,7 @@ testRun(void)
TEST_RESULT_LOG(
"P00 INFO: get 3 WAL file(s) from archive: 0000000100000001000000FE...000000010000000200000000\n"
"P00 WARN: repo2: [ArchiveMismatchError] unable to retrieve the archive id for database version '10' and system-id"
" '" HRN_PG_SYSTEMID_10_Z "'\n"
" '" HRN_PG_SYSTEMID_10_Z "'\n"
"P01 DETAIL: found 0000000100000001000000FE in the repo1: 10-1 archive\n"
"P00 DETAIL: unable to find 0000000100000001000000FF in the archive");
@ -379,14 +379,14 @@ testRun(void)
TEST_RESULT_LOG(
"P00 INFO: get 3 WAL file(s) from archive: 0000000100000001000000FE...000000010000000200000000\n"
"P00 WARN: repo2: [PathOpenError] unable to list file info for path '" TEST_PATH "/repo2/archive/test2/10-1"
"/0000000100000001': [13] Permission denied\n"
"/0000000100000001': [13] Permission denied\n"
"P00 WARN: repo2: [PathOpenError] unable to list file info for path '" TEST_PATH "/repo2/archive/test2/10-1"
"/0000000100000002': [13] Permission denied\n"
"/0000000100000002': [13] Permission denied\n"
"P01 DETAIL: found 0000000100000001000000FE in the repo1: 10-1 archive\n"
"P01 DETAIL: found 0000000100000001000000FF in the repo1: 10-1 archive\n"
"P00 WARN: [ArchiveDuplicateError] duplicates found for WAL segment 000000010000000200000000:\n"
" repo1: 10-1/0000000100000002/000000010000000200000000-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
", 10-1/0000000100000002/000000010000000200000000-bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb\n"
", 10-1/0000000100000002/000000010000000200000000-bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb\n"
" HINT: are multiple primaries archiving to this stanza?");
TEST_STORAGE_GET_EMPTY(storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN "/0000000100000001000000FE", .remove = true);
@ -394,24 +394,24 @@ testRun(void)
storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN "/0000000100000001000000FE.ok",
"0\n"
"repo2: [PathOpenError] unable to list file info for path '" TEST_PATH "/repo2/archive/test2/10-1/0000000100000001':"
" [13] Permission denied",
" [13] Permission denied",
.remove = true);
TEST_STORAGE_GET_EMPTY(storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN "/0000000100000001000000FF", .remove = true);
TEST_STORAGE_GET(
storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN "/0000000100000001000000FF.ok",
"0\n"
"repo2: [PathOpenError] unable to list file info for path '" TEST_PATH "/repo2/archive/test2/10-1/0000000100000001':"
" [13] Permission denied",
" [13] Permission denied",
.remove = true);
TEST_STORAGE_GET(
storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN "/000000010000000200000000.error",
"45\n"
"duplicates found for WAL segment 000000010000000200000000:\n"
"repo1: 10-1/0000000100000002/000000010000000200000000-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, 10-1/0000000100000002"
"/000000010000000200000000-bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb\n"
"/000000010000000200000000-bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb\n"
"HINT: are multiple primaries archiving to this stanza?\n"
"repo2: [PathOpenError] unable to list file info for path '" TEST_PATH "/repo2/archive/test2/10-1" \
"/0000000100000002': [13] Permission denied",
"/0000000100000002': [13] Permission denied",
.remove = true);
TEST_STORAGE_LIST_EMPTY(storageSpool(), STORAGE_SPOOL_ARCHIVE_IN);
@ -430,7 +430,7 @@ testRun(void)
"P00 INFO: get 1 WAL file(s) from archive: 000000010000000200000000\n"
"P00 WARN: [ArchiveDuplicateError] duplicates found for WAL segment 000000010000000200000000:\n"
" repo1: 10-1/0000000100000002/000000010000000200000000-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
", 10-1/0000000100000002/000000010000000200000000-bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb\n"
", 10-1/0000000100000002/000000010000000200000000-bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb\n"
" HINT: are multiple primaries archiving to this stanza?");
TEST_STORAGE_GET(
@ -438,7 +438,7 @@ testRun(void)
"45\n"
"duplicates found for WAL segment 000000010000000200000000:\n"
"repo1: 10-1/0000000100000002/000000010000000200000000-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, 10-1/0000000100000002"
"/000000010000000200000000-bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb\n"
"/000000010000000200000000-bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb\n"
"HINT: are multiple primaries archiving to this stanza?",
.remove = true);
TEST_STORAGE_LIST_EMPTY(storageSpool(), STORAGE_SPOOL_ARCHIVE_IN);
@ -480,18 +480,18 @@ testRun(void)
TEST_RESULT_LOG(
"P00 INFO: get 1 WAL file(s) from archive: 000000010000000200000000\n"
"P00 WARN: repo3: [ArchiveMismatchError] unable to retrieve the archive id for database version '10' and system-id"
" '" HRN_PG_SYSTEMID_10_Z "'\n"
" '" HRN_PG_SYSTEMID_10_Z "'\n"
"P01 WARN: repo1: 10-1/0000000100000002/000000010000000200000000-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.gz"
" [FormatError] unexpected eof in compressed data\n"
" [FormatError] unexpected eof in compressed data\n"
"P01 DETAIL: found 000000010000000200000000 in the repo2: 10-1 archive");
TEST_STORAGE_GET(
storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN "/000000010000000200000000.ok",
"0\n"
"repo3: [ArchiveMismatchError] unable to retrieve the archive id for database version '10' and system-id"
" '" HRN_PG_SYSTEMID_10_Z "'\n"
" '" HRN_PG_SYSTEMID_10_Z "'\n"
"repo1: 10-1/0000000100000002/000000010000000200000000-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.gz"
" [FormatError] unexpected eof in compressed data",
" [FormatError] unexpected eof in compressed data",
.remove = true);
TEST_STORAGE_GET_EMPTY(storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN "/000000010000000200000000", .remove = true);
TEST_STORAGE_LIST_EMPTY(storageSpool(), STORAGE_SPOOL_ARCHIVE_IN);
@ -514,12 +514,12 @@ testRun(void)
TEST_RESULT_LOG(
"P00 INFO: get 1 WAL file(s) from archive: 000000010000000200000000\n"
"P00 WARN: repo3: [ArchiveMismatchError] unable to retrieve the archive id for database version '10' and system-id"
" '" HRN_PG_SYSTEMID_10_Z "'\n"
" '" HRN_PG_SYSTEMID_10_Z "'\n"
"P01 WARN: [FileReadError] raised from local-1 shim protocol: unable to get 000000010000000200000000:\n"
" repo1: 10-1/0000000100000002/000000010000000200000000-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.gz"
" [FormatError] unexpected eof in compressed data\n"
" [FormatError] unexpected eof in compressed data\n"
" repo2: 10-1/0000000100000002/000000010000000200000000-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.gz"
" [FormatError] unexpected eof in compressed data\n"
" [FormatError] unexpected eof in compressed data\n"
" [FileReadError] on retry after 0ms");
TEST_STORAGE_GET(
@ -527,12 +527,12 @@ testRun(void)
"42\n"
"raised from local-1 shim protocol: unable to get 000000010000000200000000:\n"
"repo1: 10-1/0000000100000002/000000010000000200000000-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.gz"
" [FormatError] unexpected eof in compressed data\n"
" [FormatError] unexpected eof in compressed data\n"
"repo2: 10-1/0000000100000002/000000010000000200000000-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.gz"
" [FormatError] unexpected eof in compressed data\n"
" [FormatError] unexpected eof in compressed data\n"
"[FileReadError] on retry after 0ms\n"
"repo3: [ArchiveMismatchError] unable to retrieve the archive id for database version '10' and system-id"
" '" HRN_PG_SYSTEMID_10_Z "'",
" '" HRN_PG_SYSTEMID_10_Z "'",
.remove = true);
TEST_STORAGE_LIST(
storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN, "000000010000000200000000.pgbackrest.tmp\n", .remove = true);
@ -563,8 +563,8 @@ testRun(void)
TEST_RESULT_STR_Z(
strNewBuf(storageGetP(storageNewReadP(storageSpool(), STRDEF(STORAGE_SPOOL_ARCHIVE_IN "/global.error")))),
"102\nlocal-1 process terminated unexpectedly [102]: unable to execute 'pgbackrest-bogus': "
"[2] No such file or directory",
"102\nlocal-1 process terminated unexpectedly [102]: unable to execute 'pgbackrest-bogus':"
" [2] No such file or directory",
"check global error");
TEST_STORAGE_LIST(storageSpoolWrite(), STORAGE_SPOOL_ARCHIVE_IN, "global.error\n", .remove = true);
@ -618,16 +618,16 @@ testRun(void)
TEST_ERROR(cmdArchiveGet(), RepoInvalidError, "unable to find a valid repository");
TEST_RESULT_LOG(
"P00 WARN: repo1: [FileMissingError] unable to load info file '" TEST_PATH "/repo/archive/test1/archive.info' or '"
TEST_PATH "/repo/archive/test1/archive.info.copy':\n"
"P00 WARN: repo1: [FileMissingError] unable to load info file '" TEST_PATH "/repo/archive/test1/archive.info' or"
" '" TEST_PATH "/repo/archive/test1/archive.info.copy':\n"
" FileMissingError: unable to open missing file '" TEST_PATH "/repo/archive/test1/archive.info' for read\n"
" FileMissingError: unable to open missing file '" TEST_PATH "/repo/archive/test1/archive.info.copy' for"
" read\n"
" read\n"
" HINT: archive.info cannot be opened but is required to push/get WAL segments.\n"
" HINT: is archive_command configured correctly in postgresql.conf?\n"
" HINT: has a stanza-create been performed?\n"
" HINT: use --no-archive-check to disable archive checks during backup if you have an alternate archiving"
" scheme.");
" scheme.");
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("no valid repo - async");
@ -641,16 +641,16 @@ testRun(void)
TEST_ERROR(cmdArchiveGet(), RepoInvalidError, "unable to find a valid repository");
TEST_RESULT_LOG(
"P00 WARN: repo1: [FileMissingError] unable to load info file '" TEST_PATH "/repo/archive/test1/archive.info' or '"
TEST_PATH "/repo/archive/test1/archive.info.copy':\n"
"P00 WARN: repo1: [FileMissingError] unable to load info file '" TEST_PATH "/repo/archive/test1/archive.info' or"
" '" TEST_PATH "/repo/archive/test1/archive.info.copy':\n"
" FileMissingError: unable to open missing file '" TEST_PATH "/repo/archive/test1/archive.info' for read\n"
" FileMissingError: unable to open missing file '" TEST_PATH "/repo/archive/test1/archive.info.copy' for"
" read\n"
" read\n"
" HINT: archive.info cannot be opened but is required to push/get WAL segments.\n"
" HINT: is archive_command configured correctly in postgresql.conf?\n"
" HINT: has a stanza-create been performed?\n"
" HINT: use --no-archive-check to disable archive checks during backup if you have an alternate archiving"
" scheme.");
" scheme.");
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("WAL not found - timeout");
@ -793,7 +793,7 @@ testRun(void)
TEST_RESULT_LOG(
"P00 WARN: repo1: [ArchiveMismatchError] unable to retrieve the archive id for database version '11' and system-id"
" '" HRN_PG_SYSTEMID_11_Z "'");
" '" HRN_PG_SYSTEMID_11_Z "'");
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("pg system id does not match archive.info");
@ -804,7 +804,7 @@ testRun(void)
TEST_RESULT_LOG(
"P00 WARN: repo1: [ArchiveMismatchError] unable to retrieve the archive id for database version '10' and system-id"
" '" HRN_PG_SYSTEMID_10_1_Z "'");
" '" HRN_PG_SYSTEMID_10_1_Z "'");
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("file is missing");
@ -845,7 +845,7 @@ testRun(void)
cmdArchiveGet(), ArchiveDuplicateError,
"duplicates found for WAL segment 01ABCDEF01ABCDEF01ABCDEF:\n"
"repo1: 10-1/01ABCDEF01ABCDEF/01ABCDEF01ABCDEF01ABCDEF-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
", 10-1/01ABCDEF01ABCDEF/01ABCDEF01ABCDEF01ABCDEF-bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb\n"
", 10-1/01ABCDEF01ABCDEF/01ABCDEF01ABCDEF01ABCDEF-bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb\n"
"HINT: are multiple primaries archiving to this stanza?");
TEST_STORAGE_LIST(storagePg(), "pg_wal", NULL);
@ -979,16 +979,16 @@ testRun(void)
TEST_RESULT_LOG(
"P00 WARN: repo1: [FileMissingError] unable to load info file '" TEST_PATH "/repo-bogus/archive/test1/archive.info'"
" or '" TEST_PATH "/repo-bogus/archive/test1/archive.info.copy':\n"
" or '" TEST_PATH "/repo-bogus/archive/test1/archive.info.copy':\n"
" FileMissingError: unable to open missing file '" TEST_PATH "/repo-bogus/archive/test1/archive.info'"
" for read\n"
" for read\n"
" FileMissingError: unable to open missing file '" TEST_PATH "/repo-bogus/archive/test1/archive.info.copy'"
" for read\n"
" for read\n"
" HINT: archive.info cannot be opened but is required to push/get WAL segments.\n"
" HINT: is archive_command configured correctly in postgresql.conf?\n"
" HINT: has a stanza-create been performed?\n"
" HINT: use --no-archive-check to disable archive checks during backup if you have an alternate"
" archiving scheme.\n"
" archiving scheme.\n"
"P00 INFO: found 01ABCDEF01ABCDEF01ABCDEF in the repo2: 10-1 archive");
TEST_RESULT_UINT(storageInfoP(storagePg(), STRDEF("pg_wal/RECOVERYXLOG")).size, 16 * 1024 * 1024, "check size");
@ -1011,7 +1011,7 @@ testRun(void)
TEST_RESULT_LOG(
"P00 WARN: repo1: [PathOpenError] unable to list file info for path '" TEST_PATH "/repo-bogus/archive/test1/10-2"
"/01ABCDEF01ABCDEF': [13] Permission denied\n"
"/01ABCDEF01ABCDEF': [13] Permission denied\n"
"P00 INFO: found 01ABCDEF01ABCDEF01ABCDEF in the repo2: 10-1 archive");
TEST_STORAGE_LIST(storagePgWrite(), "pg_wal", "RECOVERYXLOG\n", .remove = true);
@ -1025,9 +1025,9 @@ testRun(void)
TEST_RESULT_LOG(
"P00 WARN: repo1: [PathOpenError] unable to list file info for path '" TEST_PATH "/repo-bogus/archive/test1/10-2"
"/01ABCDEF01ABCDEF': [13] Permission denied\n"
"/01ABCDEF01ABCDEF': [13] Permission denied\n"
"P00 WARN: repo2: [PathOpenError] unable to list file info for path '" TEST_PATH "/repo/archive/test1/10-1"
"/01ABCDEF01ABCDEF': [13] Permission denied");
"/01ABCDEF01ABCDEF': [13] Permission denied");
HRN_STORAGE_MODE(storageRepoIdxWrite(0), STORAGE_REPO_ARCHIVE "/10-2");
HRN_STORAGE_MODE(storageRepoIdxWrite(1), STORAGE_REPO_ARCHIVE "/10-1");
@ -1043,7 +1043,7 @@ testRun(void)
TEST_RESULT_LOG(
"P00 WARN: repo1: 10-2/01ABCDEF01ABCDEF/01ABCDEF01ABCDEF01ABCDEF-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.gz"
" [FormatError] unexpected eof in compressed data\n"
" [FormatError] unexpected eof in compressed data\n"
"P00 INFO: found 01ABCDEF01ABCDEF01ABCDEF in the repo2: 10-1 archive");
// -------------------------------------------------------------------------------------------------------------------------
@ -1057,10 +1057,10 @@ testRun(void)
cmdArchiveGet(), FileReadError,
"unable to get 01ABCDEF01ABCDEF01ABCDEF:\n"
"repo1: 10-2/01ABCDEF01ABCDEF/01ABCDEF01ABCDEF01ABCDEF-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.gz [FormatError]"
" unexpected eof in compressed data\n"
" unexpected eof in compressed data\n"
"repo2: 10-1/01ABCDEF01ABCDEF/01ABCDEF01ABCDEF01ABCDEF-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.gz [FileOpenError]"
" unable to open file '" TEST_PATH "/repo/archive/test1/10-1/01ABCDEF01ABCDEF/01ABCDEF01ABCDEF01ABCDEF"
"-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.gz' for read: [13] Permission denied");
" unable to open file '" TEST_PATH "/repo/archive/test1/10-1/01ABCDEF01ABCDEF/01ABCDEF01ABCDEF01ABCDEF"
"-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.gz' for read: [13] Permission denied");
HRN_STORAGE_MODE(
storageRepoIdxWrite(1),

View File

@ -119,8 +119,8 @@ testRun(void)
archivePushCheck(true), RepoInvalidError,
"unable to find a valid repository:\n"
"repo1: [ArchiveMismatchError] PostgreSQL version 9.6, system-id " HRN_PG_SYSTEMID_96_Z " do not match repo1 stanza"
" version 9.4, system-id 5555555555555555555"
"\nHINT: are you archiving to the correct stanza?");
" version 9.4, system-id 5555555555555555555\n"
"HINT: are you archiving to the correct stanza?");
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("mismatched pg_control and archive.info - system-id");
@ -138,8 +138,8 @@ testRun(void)
archivePushCheck(true), RepoInvalidError,
"unable to find a valid repository:\n"
"repo1: [ArchiveMismatchError] PostgreSQL version 9.6, system-id " HRN_PG_SYSTEMID_96_Z " do not match repo1 stanza"
" version 9.6, system-id 5555555555555555555"
"\nHINT: are you archiving to the correct stanza?");
" version 9.6, system-id 5555555555555555555\n"
"HINT: are you archiving to the correct stanza?");
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("pg_control and archive.info match");
@ -199,7 +199,7 @@ testRun(void)
TEST_RESULT_STRLST_Z(
result.errorList,
"repo4: [ArchiveMismatchError] repo2 stanza version 9.6, system-id " HRN_PG_SYSTEMID_96_Z " do not match repo4 stanza"
" version 9.4, system-id 5555555555555555555\n"
" version 9.4, system-id 5555555555555555555\n"
"HINT: are you archiving to the correct stanza?\n",
"check error list");
@ -310,7 +310,7 @@ testRun(void)
TEST_ERROR(
cmdArchivePush(), ArchiveMismatchError,
"WAL file '" TEST_PATH "/pg/pg_wal/000000010000000100000001' version 10, system-id " HRN_PG_SYSTEMID_10_Z " do not"
" match stanza version 11, system-id " HRN_PG_SYSTEMID_11_Z "");
" match stanza version 11, system-id " HRN_PG_SYSTEMID_11_Z "");
memset(bufPtr(walBuffer1), 0, bufSize(walBuffer1));
hrnPgWalToBuffer((PgWal){.version = PG_VERSION_11, .systemId = 1}, walBuffer1);
@ -321,7 +321,7 @@ testRun(void)
TEST_ERROR(
cmdArchivePush(), ArchiveMismatchError,
"WAL file '" TEST_PATH "/pg/pg_wal/000000010000000100000001' version 11, system-id " HRN_PG_SYSTEMID_11_1_Z " do not"
" match stanza version 11, system-id " HRN_PG_SYSTEMID_11_Z);
" match stanza version 11, system-id " HRN_PG_SYSTEMID_11_Z);
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("push by ignoring the invalid header");
@ -522,7 +522,7 @@ testRun(void)
cmdArchivePush(), CommandError,
"archive-push command encountered error(s):\n"
"repo2: [FileOpenError] unable to open file '" TEST_PATH "/repo2/archive/test/11-1/0000000100000001"
"/000000010000000100000002-%s' for write: [13] Permission denied",
"/000000010000000100000002-%s' for write: [13] Permission denied",
walBuffer2Sha1);
TEST_STORAGE_LIST_EMPTY(storageTest, "repo2/archive/test/11-1/0000000100000001", .comment = "check repo2 for no WAL file");
@ -571,10 +571,10 @@ testRun(void)
cmdArchivePush(), CommandError,
"archive-push command encountered error(s):\n"
"repo2: [FileOpenError] unable to load info file '" TEST_PATH "/repo2/archive/test/archive.info' or"
" '" TEST_PATH "/repo2/archive/test/archive.info.copy':\n"
" '" TEST_PATH "/repo2/archive/test/archive.info.copy':\n"
"FileOpenError: unable to open file '" TEST_PATH "/repo2/archive/test/archive.info' for read: [13] Permission denied\n"
"FileOpenError: unable to open file '" TEST_PATH "/repo2/archive/test/archive.info.copy' for read:"
" [13] Permission denied\n"
" [13] Permission denied\n"
"HINT: archive.info cannot be opened but is required to push/get WAL segments.\n"
"HINT: is archive_command configured correctly in postgresql.conf?\n"
"HINT: has a stanza-create been performed?\n"
@ -596,7 +596,7 @@ testRun(void)
cmdArchivePush(), CommandError,
"archive-push command encountered error(s):\n"
"repo2: [PathOpenError] unable to list file info for path '" TEST_PATH "/repo2/archive/test/11-1/0000000100000001':"
" [13] Permission denied");
" [13] Permission denied");
// Make sure WAL got pushed to repo3
TEST_STORAGE_EXISTS(
@ -837,7 +837,7 @@ testRun(void)
" HINT: this is valid in some recovery scenarios but may also indicate a problem.\n"
"P01 DETAIL: pushed WAL file '000000010000000100000001' to the archive\n"
"P01 WARN: could not push WAL file '000000010000000100000002' to the archive (will be retried): "
"[55] raised from local-1 shim protocol: " STORAGE_ERROR_READ_MISSING "\n"
"[55] raised from local-1 shim protocol: " STORAGE_ERROR_READ_MISSING "\n"
" [FileMissingError] on retry after 0ms",
TEST_PATH "/pg/pg_xlog/000000010000000100000002");

View File

@ -956,35 +956,69 @@ testRun(void)
BlockMap *blockMap = NULL;
TEST_ASSIGN(blockMap, blockMapNew(), "new");
BlockMapItem blockMapItem1 = {
.reference = 128, .size = 3,
.checksum = {255, 128, 14, 13, 12, 11, 10, 9, 8, 7, 7, 8, 9, 10, 11, 12, 13, 14, 255, 255}};
BlockMapItem blockMapItem1 =
{
.reference = 128,
.size = 3,
.checksum = {255, 128, 14, 13, 12, 11, 10, 9, 8, 7, 7, 8, 9, 10, 11, 12, 13, 14, 255, 255},
};
TEST_RESULT_UINT(blockMapAdd(blockMap, &blockMapItem1)->reference, 128, "add");
TEST_RESULT_UINT(blockMapGet(blockMap, 0)->reference, 128, "get");
BlockMapItem blockMapItem2 = {
.reference = 0, .bundleId = 56, .offset = 200000000, .size = 127,
.checksum = {255, 0, 14, 13, 12, 11, 10, 9, 8, 7, 7, 8, 9, 10, 11, 12, 13, 14, 255, 255}};
BlockMapItem blockMapItem2 =
{
.reference = 0,
.bundleId = 56,
.offset = 200000000,
.size = 127,
.checksum = {255, 0, 14, 13, 12, 11, 10, 9, 8, 7, 7, 8, 9, 10, 11, 12, 13, 14, 255, 255},
};
TEST_RESULT_UINT(blockMapAdd(blockMap, &blockMapItem2)->reference, blockMapItem2.reference, "add");
BlockMapItem blockMapItem5 = {
.reference = 1024, .bundleId = 1024, .offset = 1024, .size = 1024,
.checksum = {255, 102, 14, 13, 12, 11, 10, 9, 8, 7, 7, 8, 9, 10, 11, 12, 13, 14, 255, 255}};
BlockMapItem blockMapItem5 =
{
.reference = 1024,
.bundleId = 1024,
.offset = 1024,
.size = 1024,
.checksum = {255, 102, 14, 13, 12, 11, 10, 9, 8, 7, 7, 8, 9, 10, 11, 12, 13, 14, 255, 255},
};
TEST_RESULT_UINT(blockMapAdd(blockMap, &blockMapItem5)->reference, blockMapItem5.reference, "add");
BlockMapItem blockMapItem3 = {
.reference = blockMapItem1.reference, .bundleId = blockMapItem1.bundleId, .offset = blockMapItem1.offset + 129,
.size = 9, .checksum = {255, 129, 14, 13, 12, 11, 10, 9, 8, 7, 7, 8, 9, 10, 11, 12, 13, 14, 255, 255}};
BlockMapItem blockMapItem3 =
{
.reference = blockMapItem1.reference,
.bundleId = blockMapItem1.bundleId,
.offset = blockMapItem1.offset + 129,
.size = 9,
.checksum = {255, 129, 14, 13, 12, 11, 10, 9, 8, 7, 7, 8, 9, 10, 11, 12, 13, 14, 255, 255},
};
TEST_RESULT_UINT(blockMapAdd(blockMap, &blockMapItem3)->reference, blockMapItem3.reference, "add");
BlockMapItem blockMapItem4 = {
.reference = blockMapItem2.reference, .bundleId = blockMapItem2.bundleId, .offset = blockMapItem2.offset + 129,
.size = 10, .checksum = {255, 1, 14, 13, 12, 11, 10, 9, 8, 7, 7, 8, 9, 10, 11, 12, 13, 14, 255, 255}};
BlockMapItem blockMapItem4 =
{
.reference = blockMapItem2.reference,
.bundleId = blockMapItem2.bundleId,
.offset = blockMapItem2.offset + 129,
.size = 10,
.checksum = {255, 1, 14, 13, 12, 11, 10, 9, 8, 7, 7, 8, 9, 10, 11, 12, 13, 14, 255, 255},
};
TEST_RESULT_UINT(blockMapAdd(blockMap, &blockMapItem4)->reference, blockMapItem4.reference, "add");
BlockMapItem blockMapItem6 = {
.reference = blockMapItem4.reference, .bundleId = blockMapItem4.bundleId, .offset = blockMapItem4.offset + 10,
.size = 11, .checksum = {255, 2, 14, 13, 12, 11, 10, 9, 8, 7, 7, 8, 9, 10, 11, 12, 13, 14, 255, 255}};
BlockMapItem blockMapItem6 =
{
.reference = blockMapItem4.reference,
.bundleId = blockMapItem4.bundleId,
.offset = blockMapItem4.offset + 10,
.size = 11,
.checksum = {255, 2, 14, 13, 12, 11, 10, 9, 8, 7, 7, 8, 9, 10, 11, 12, 13, 14, 255, 255},
};
TEST_RESULT_UINT(blockMapAdd(blockMap, &blockMapItem6)->reference, blockMapItem6.reference, "add");
// -------------------------------------------------------------------------------------------------------------------------
@ -1752,13 +1786,13 @@ testRun(void)
backupInit(infoBackupNew(PG_VERSION_11, HRN_PG_SYSTEMID_11, hrnPgCatalogVersion(PG_VERSION_11), NULL)),
BackupMismatchError,
"PostgreSQL version 10, system-id " HRN_PG_SYSTEMID_10_Z " do not match stanza version 11, system-id"
" " HRN_PG_SYSTEMID_11_Z "\n"
" " HRN_PG_SYSTEMID_11_Z "\n"
"HINT: is this the correct stanza?");
TEST_ERROR(
backupInit(infoBackupNew(PG_VERSION_10, HRN_PG_SYSTEMID_11, hrnPgCatalogVersion(PG_VERSION_10), NULL)),
BackupMismatchError,
"PostgreSQL version 10, system-id " HRN_PG_SYSTEMID_10_Z " do not match stanza version 10, system-id"
" " HRN_PG_SYSTEMID_11_Z "\n"
" " HRN_PG_SYSTEMID_11_Z "\n"
"HINT: is this the correct stanza?");
// -------------------------------------------------------------------------------------------------------------------------
@ -1962,7 +1996,7 @@ testRun(void)
TEST_RESULT_LOG(
"P00 WARN: backup '20191003-105320F' cannot be resumed: unable to read"
" <REPO:BACKUP>/20191003-105320F/backup.manifest.copy");
" <REPO:BACKUP>/20191003-105320F/backup.manifest.copy");
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("cannot resume when pgBackRest version has changed");
@ -1993,7 +2027,7 @@ testRun(void)
TEST_RESULT_LOG(
"P00 WARN: backup '20191003-105320F' cannot be resumed:"
" new pgBackRest version 'BOGUS' does not match resumable pgBackRest version '" PROJECT_VERSION "'");
" new pgBackRest version 'BOGUS' does not match resumable pgBackRest version '" PROJECT_VERSION "'");
TEST_STORAGE_LIST_EMPTY(storageRepo(), STORAGE_REPO_BACKUP, .comment = "check backup path removed");
@ -2015,7 +2049,7 @@ testRun(void)
TEST_RESULT_LOG(
"P00 WARN: backup '20191003-105320F' cannot be resumed:"
" new prior backup label '<undef>' does not match resumable prior backup label '20191003-105320F'");
" new prior backup label '<undef>' does not match resumable prior backup label '20191003-105320F'");
TEST_STORAGE_LIST_EMPTY(storageRepo(), STORAGE_REPO_BACKUP, .comment = "check backup path removed");
@ -2037,7 +2071,7 @@ testRun(void)
TEST_RESULT_LOG(
"P00 WARN: backup '20191003-105320F' cannot be resumed:"
" new prior backup label '20191003-105320F' does not match resumable prior backup label '<undef>'");
" new prior backup label '20191003-105320F' does not match resumable prior backup label '<undef>'");
TEST_STORAGE_LIST_EMPTY(storageRepo(), STORAGE_REPO_BACKUP, .comment = "check backup path removed");
@ -2058,7 +2092,7 @@ testRun(void)
TEST_RESULT_LOG(
"P00 WARN: backup '20191003-105320F' cannot be resumed:"
" new compression 'none' does not match resumable compression 'gz'");
" new compression 'none' does not match resumable compression 'gz'");
TEST_STORAGE_LIST_EMPTY(storageRepo(), STORAGE_REPO_BACKUP, .comment = "check backup path removed");
@ -2167,7 +2201,7 @@ testRun(void)
TEST_ERROR(
hrnCmdBackup(), PgRunningError,
"--no-online passed but " PG_FILE_POSTMTRPID " exists - looks like " PG_NAME " is running. Shut down " PG_NAME " and"
" try again, or use --force.");
" try again, or use --force.");
TEST_RESULT_LOG("P00 WARN: no prior backup exists, incr backup has been changed to full");
@ -2191,10 +2225,10 @@ testRun(void)
TEST_RESULT_LOG_FMT(
"P00 WARN: no prior backup exists, incr backup has been changed to full\n"
"P00 WARN: --no-online passed and " PG_FILE_POSTMTRPID " exists but --force was passed so backup will continue though"
" it looks like " PG_NAME " is running and the backup will probably not be consistent\n"
" it looks like " PG_NAME " is running and the backup will probably not be consistent\n"
"P01 DETAIL: backup file " TEST_PATH "/pg1/global/pg_control (8KB, 99.86%%) checksum %s\n"
"P01 DETAIL: backup file " TEST_PATH "/pg1/postgresql.conf (11B, 100.00%%) checksum"
" e3db315c260e79211b7b52587123b7aa060f30ab\n"
" e3db315c260e79211b7b52587123b7aa060f30ab\n"
"P00 INFO: new backup label = [FULL-1]\n"
"P00 INFO: full backup size = 8KB, file total = 2",
TEST_64BIT() ?
@ -2457,7 +2491,7 @@ testRun(void)
"P00 INFO: execute exclusive backup start: backup begins after the next regular checkpoint completes\n"
"P00 INFO: backup start archive = 0000000105D944C000000000, lsn = 5d944c0/0\n"
"P00 WARN: resumable backup 20191002-070640F of same type exists -- invalid files will be removed then the backup"
" will resume\n"
" will resume\n"
"P01 DETAIL: backup file " TEST_PATH "/pg1/global/pg_control (8KB, [PCT]) checksum [SHA1]\n"
"P01 DETAIL: backup file " TEST_PATH "/pg1/postgresql.conf (11B, [PCT]) checksum [SHA1]\n"
"P01 DETAIL: checksum resumed file " TEST_PATH "/pg1/PG_VERSION (3B, [PCT]) checksum [SHA1]\n"
@ -2481,10 +2515,10 @@ testRun(void)
"\n"
"[target:file]\n"
"pg_data/PG_VERSION={\"checksum\":\"06d06bb31b570b94d7b4325f511f853dbe771c21\",\"size\":3"
",\"timestamp\":1570000000}\n"
",\"timestamp\":1570000000}\n"
"pg_data/global/pg_control={\"size\":8192,\"timestamp\":1570000000}\n"
"pg_data/postgresql.conf={\"checksum\":\"e3db315c260e79211b7b52587123b7aa060f30ab\",\"size\":11"
",\"timestamp\":1570000000}\n"
",\"timestamp\":1570000000}\n"
"\n"
"[target:path]\n"
"pg_data={}\n"
@ -2598,23 +2632,23 @@ testRun(void)
"P00 INFO: backup start archive = 0000000105D95D3000000000, lsn = 5d95d30/0\n"
"P00 INFO: check archive for prior segment 0000000105D95D2F000000FF\n"
"P00 WARN: resumable backup 20191003-105320F of same type exists -- invalid files will be removed then the backup"
" will resume\n"
" will resume\n"
"P00 DETAIL: remove path '" TEST_PATH "/repo/backup/test1/20191003-105320F/pg_data/bogus_path' from resumed"
" backup\n"
" backup\n"
"P00 DETAIL: remove file '" TEST_PATH "/repo/backup/test1/20191003-105320F/pg_data/global/bogus' from resumed"
" backup (mismatched compression type)\n"
" backup (mismatched compression type)\n"
"P00 DETAIL: remove file '" TEST_PATH "/repo/backup/test1/20191003-105320F/pg_data/global/bogus.gz' from resumed"
" backup (missing in manifest)\n"
" backup (missing in manifest)\n"
"P00 DETAIL: remove file '" TEST_PATH "/repo/backup/test1/20191003-105320F/pg_data/global/pg_control.gz' from"
" resumed backup (no checksum in resumed manifest)\n"
" resumed backup (no checksum in resumed manifest)\n"
"P00 DETAIL: remove file '" TEST_PATH "/repo/backup/test1/20191003-105320F/pg_data/not-in-resume.gz' from resumed"
" backup (missing in resumed manifest)\n"
" backup (missing in resumed manifest)\n"
"P00 DETAIL: remove file '" TEST_PATH "/repo/backup/test1/20191003-105320F/pg_data/size-mismatch.gz' from resumed"
" backup (mismatched size)\n"
" backup (mismatched size)\n"
"P00 DETAIL: remove file '" TEST_PATH "/repo/backup/test1/20191003-105320F/pg_data/time-mismatch.gz' from resumed"
" backup (mismatched timestamp)\n"
" backup (mismatched timestamp)\n"
"P00 DETAIL: remove file '" TEST_PATH "/repo/backup/test1/20191003-105320F/pg_data/zero-size.gz' from resumed"
" backup (zero size)\n"
" backup (zero size)\n"
"P01 DETAIL: backup file " TEST_PATH "/pg1/global/pg_control (8KB, [PCT]) checksum [SHA1]\n"
"P01 DETAIL: backup file " TEST_PATH "/pg1/postgresql.conf (11B, [PCT]) checksum [SHA1]\n"
"P01 DETAIL: backup file " TEST_PATH "/pg1/time-mismatch (4B, [PCT]) checksum [SHA1]\n"
@ -2649,17 +2683,17 @@ testRun(void)
"\n"
"[target:file]\n"
"pg_data/PG_VERSION={\"checksum\":\"06d06bb31b570b94d7b4325f511f853dbe771c21\",\"size\":3"
",\"timestamp\":1570000000}\n"
",\"timestamp\":1570000000}\n"
"pg_data/global/pg_control={\"size\":8192,\"timestamp\":1570100000}\n"
"pg_data/not-in-resume={\"checksum\":\"984816fd329622876e14907634264e6f332e9fb3\",\"size\":4"
",\"timestamp\":1570100000}\n"
",\"timestamp\":1570100000}\n"
"pg_data/pg_xlog/0000000105D95D3000000000={\"size\":16777216,\"timestamp\":1570100002}\n"
"pg_data/postgresql.conf={\"checksum\":\"e3db315c260e79211b7b52587123b7aa060f30ab\",\"size\":11"
",\"timestamp\":1570000000}\n"
",\"timestamp\":1570000000}\n"
"pg_data/size-mismatch={\"checksum\":\"984816fd329622876e14907634264e6f332e9fb3\",\"size\":4"
",\"timestamp\":1570100000}\n"
",\"timestamp\":1570100000}\n"
"pg_data/time-mismatch={\"checksum\":\"984816fd329622876e14907634264e6f332e9fb3\",\"size\":4"
",\"timestamp\":1570100000}\n"
",\"timestamp\":1570100000}\n"
"pg_data/zero-size={\"size\":0,\"timestamp\":1570100000}\n"
"\n"
"[target:path]\n"
@ -2778,25 +2812,25 @@ testRun(void)
"P00 INFO: backup start archive = 0000000105D9759000000000, lsn = 5d97590/0\n"
"P00 INFO: check archive for prior segment 0000000105D9758F000000FF\n"
"P00 WARN: file 'time-mismatch2' has timestamp (1570200100) in the future (relative to copy start 1570200000),"
" enabling delta checksum\n"
" enabling delta checksum\n"
"P00 WARN: resumable backup 20191003-105320F_20191004-144000D of same type exists -- invalid files will be"
" removed then the backup will resume\n"
" removed then the backup will resume\n"
"P00 DETAIL: remove file '" TEST_PATH "/repo/backup/test1/20191003-105320F_20191004-144000D/pg_data/PG_VERSION.gz'"
" from resumed backup (reference in manifest)\n"
" from resumed backup (reference in manifest)\n"
"P00 WARN: remove special file '" TEST_PATH "/repo/backup/test1/20191003-105320F_20191004-144000D/pg_data/pipe'"
" from resumed backup\n"
" from resumed backup\n"
"P00 DETAIL: remove file '" TEST_PATH "/repo/backup/test1/20191003-105320F_20191004-144000D/pg_data/resume-ref.gz'"
" from resumed backup (reference in resumed manifest)\n"
" from resumed backup (reference in resumed manifest)\n"
"P01 DETAIL: backup file " TEST_PATH "/pg1/global/pg_control (8KB, [PCT]) checksum [SHA1]\n"
"P01 DETAIL: match file from prior backup " TEST_PATH "/pg1/postgresql.conf (11B, [PCT]) checksum [SHA1]\n"
"P00 WARN: resumed backup file pg_data/time-mismatch2 does not have expected checksum"
" 984816fd329622876e14907634264e6f332e9fb3. The file will be recopied and backup will continue but this may be"
" an issue unless the resumed backup path in the repository is known to be corrupted.\n"
" 984816fd329622876e14907634264e6f332e9fb3. The file will be recopied and backup will continue but this may be"
" an issue unless the resumed backup path in the repository is known to be corrupted.\n"
" NOTE: this does not indicate a problem with the PostgreSQL page checksums.\n"
"P01 DETAIL: backup file " TEST_PATH "/pg1/time-mismatch2 (4B, [PCT]) checksum [SHA1]\n"
"P00 WARN: resumed backup file pg_data/repo-size-mismatch does not have expected checksum"
" 984816fd329622876e14907634264e6f332e9fb3. The file will be recopied and backup will continue but this may be"
" an issue unless the resumed backup path in the repository is known to be corrupted.\n"
" 984816fd329622876e14907634264e6f332e9fb3. The file will be recopied and backup will continue but this may be"
" an issue unless the resumed backup path in the repository is known to be corrupted.\n"
" NOTE: this does not indicate a problem with the PostgreSQL page checksums.\n"
"P01 DETAIL: backup file " TEST_PATH "/pg1/repo-size-mismatch (4B, [PCT]) checksum [SHA1]\n"
"P01 DETAIL: backup file " TEST_PATH "/pg1/content-mismatch (4B, [PCT]) checksum [SHA1]\n"
@ -2806,7 +2840,7 @@ testRun(void)
"P00 DETAIL: hardlink pg_data/postgresql.conf to 20191003-105320F\n"
"P00 INFO: execute exclusive backup stop and wait for all WAL segments to archive\n"
"P00 INFO: backup stop archive = 0000000105D9759000000000, lsn = 5d97590/800000\n"
"P00 INFO: check archive for segment(s) 0000000105D9759000000000:0000000105D9759000000000\n"
"P00 INFO: check archive for segment(s) 0000000105D9759000000000:0000000105D9759000000000\n"
"P00 INFO: new backup label = 20191003-105320F_20191004-144000D\n"
"P00 INFO: diff backup size = [SIZE], file total = 7");
@ -2830,17 +2864,17 @@ testRun(void)
"\n"
"[target:file]\n"
"pg_data/PG_VERSION={\"checksum\":\"06d06bb31b570b94d7b4325f511f853dbe771c21\",\"reference\":\"20191003-105320F\""
",\"size\":3,\"timestamp\":1570000000}\n"
",\"size\":3,\"timestamp\":1570000000}\n"
"pg_data/content-mismatch={\"checksum\":\"984816fd329622876e14907634264e6f332e9fb3\",\"size\":4"
",\"timestamp\":1570200000}\n"
",\"timestamp\":1570200000}\n"
"pg_data/global/pg_control={\"size\":8192,\"timestamp\":1570200000}\n"
"pg_data/postgresql.conf={\"checksum\":\"e3db315c260e79211b7b52587123b7aa060f30ab\""
",\"reference\":\"20191003-105320F\",\"size\":11,\"timestamp\":1570000000}\n"
",\"reference\":\"20191003-105320F\",\"size\":11,\"timestamp\":1570000000}\n"
"pg_data/repo-size-mismatch={\"checksum\":\"984816fd329622876e14907634264e6f332e9fb3\",\"size\":4"
",\"timestamp\":1570200000}\n"
",\"timestamp\":1570200000}\n"
"pg_data/resume-ref={\"size\":0,\"timestamp\":1570200000}\n"
"pg_data/time-mismatch2={\"checksum\":\"984816fd329622876e14907634264e6f332e9fb3\",\"size\":4"
",\"timestamp\":1570200100}\n"
",\"timestamp\":1570200100}\n"
"\n"
"[target:path]\n"
"pg_data={}\n"
@ -2989,16 +3023,16 @@ testRun(void)
"\n"
"[target:file]\n"
"pg_data/PG_VERSION={\"checksum\":\"f5b7e6d36dc0113f61b36c700817d42b96f7b037\",\"size\":3"
",\"timestamp\":1571200000}\n"
",\"timestamp\":1571200000}\n"
"pg_data/backup_label={\"checksum\":\"8e6f41ac87a7514be96260d65bacbffb11be77dc\",\"size\":17"
",\"timestamp\":1571200002}\n"
",\"timestamp\":1571200002}\n"
"pg_data/base/1/1={\"size\":0,\"timestamp\":1571200000}\n"
"pg_data/base/1/2={\"checksum\":\"54ceb91256e8190e474aa752a6e0650a2df5ba37\",\"size\":2,\"timestamp\":1571200000}\n"
"pg_data/base/1/3={\"checksum\":\"3c01bdbb26f358bab27f267924aa2c9a03fcfdb8\",\"size\":3,\"timestamp\":1571200000}\n"
"pg_data/global/pg_control={\"size\":8192,\"timestamp\":1571200000}\n"
"pg_data/pg_xlog/0000000105DA69C000000000={\"size\":16777216,\"timestamp\":1571200002}\n"
"pg_data/postgresql.conf={\"checksum\":\"e3db315c260e79211b7b52587123b7aa060f30ab\",\"size\":11"
",\"timestamp\":1570000000}\n"
",\"timestamp\":1570000000}\n"
"\n"
"[target:path]\n"
"pg_data={}\n"
@ -3141,7 +3175,7 @@ testRun(void)
"P00 WARN: invalid page checksum found in file " TEST_PATH "/pg1/base/1/4 at page 1\n"
"P01 DETAIL: backup file " TEST_PATH "/pg1/base/1/2 (8.5KB, [PCT]) checksum [SHA1]\n"
"P00 WARN: page misalignment in file " TEST_PATH "/pg1/base/1/2: file size 8704 is not divisible by page size"
" 8192\n"
" 8192\n"
"P01 DETAIL: backup file " TEST_PATH "/pg1/global/pg_control (8KB, [PCT]) checksum [SHA1]\n"
"P01 DETAIL: backup file " TEST_PATH "/pg1/base/1/1 (8KB, [PCT]) checksum [SHA1]\n"
"P01 DETAIL: backup file " TEST_PATH "/pg1/postgresql.conf (11B, [PCT]) checksum [SHA1]\n"
@ -3188,28 +3222,28 @@ testRun(void)
"[backup:target]\n"
"pg_data={\"path\":\"" TEST_PATH "/pg1\",\"type\":\"path\"}\n"
"pg_tblspc/32768={\"path\":\"../../pg1-tblspc/32768\",\"tablespace-id\":\"32768\""
",\"tablespace-name\":\"tblspc32768\",\"type\":\"link\"}\n"
",\"tablespace-name\":\"tblspc32768\",\"type\":\"link\"}\n"
"\n"
"[target:file]\n"
"pg_data/PG_VERSION={\"checksum\":\"17ba0791499db908433b80f37c5fbc89b870084b\",\"size\":2"
",\"timestamp\":1572200000}\n"
",\"timestamp\":1572200000}\n"
"pg_data/backup_label={\"checksum\":\"8e6f41ac87a7514be96260d65bacbffb11be77dc\",\"size\":17"
",\"timestamp\":1572200002}\n"
",\"timestamp\":1572200002}\n"
"pg_data/base/1/1={\"checksum\":\"0631457264ff7f8d5fb1edc2c0211992a67c73e6\",\"checksum-page\":true"
",\"size\":8192,\"timestamp\":1572200000}\n"
",\"size\":8192,\"timestamp\":1572200000}\n"
"pg_data/base/1/2={\"checksum\":\"%s\",\"checksum-page\":false,\"size\":8704,\"timestamp\":1572200000}\n"
"pg_data/base/1/3={\"checksum\":\"%s\",\"checksum-page\":false,\"checksum-page-error\":[0,[2,4]]"
",\"size\":40960,\"timestamp\":1572200000}\n"
",\"size\":40960,\"timestamp\":1572200000}\n"
"pg_data/base/1/4={\"checksum\":\"%s\",\"checksum-page\":false,\"checksum-page-error\":[1],\"size\":24576"
",\"timestamp\":1572200000}\n"
",\"timestamp\":1572200000}\n"
"pg_data/global/pg_control={\"size\":8192,\"timestamp\":1572200000}\n"
"pg_data/pg_wal/0000000105DB5DE000000000={\"size\":1048576,\"timestamp\":1572200002}\n"
"pg_data/pg_wal/0000000105DB5DE000000001={\"size\":1048576,\"timestamp\":1572200002}\n"
"pg_data/pg_wal/0000000105DB5DE000000002={\"size\":1048576,\"timestamp\":1572200002}\n"
"pg_data/postgresql.conf={\"checksum\":\"e3db315c260e79211b7b52587123b7aa060f30ab\",\"size\":11"
",\"timestamp\":1570000000}\n"
",\"timestamp\":1570000000}\n"
"pg_data/tablespace_map={\"checksum\":\"87fe624d7976c2144e10afcb7a9a49b071f35e9c\",\"size\":19"
",\"timestamp\":1572200002}\n"
",\"timestamp\":1572200002}\n"
"pg_tblspc/32768/PG_11_201809051/1/5={\"checksum-page\":true,\"size\":0,\"timestamp\":1572200000}\n"
"\n"
"[target:link]\n"
@ -3343,22 +3377,22 @@ testRun(void)
"[backup:target]\n"
"pg_data={\"path\":\"" TEST_PATH "/pg1\",\"type\":\"path\"}\n"
"pg_tblspc/32768={\"path\":\"../../pg1-tblspc/32768\",\"tablespace-id\":\"32768\""
",\"tablespace-name\":\"tblspc32768\",\"type\":\"link\"}\n"
",\"tablespace-name\":\"tblspc32768\",\"type\":\"link\"}\n"
"\n"
"[target:file]\n"
"pg_data/PG_VERSION={\"checksum\":\"17ba0791499db908433b80f37c5fbc89b870084b\",\"reference\":\"20191027-181320F\""
",\"size\":2,\"timestamp\":1572200000}\n"
",\"size\":2,\"timestamp\":1572200000}\n"
"pg_data/backup_label={\"checksum\":\"8e6f41ac87a7514be96260d65bacbffb11be77dc\",\"size\":17"
",\"timestamp\":1572400002}\n"
",\"timestamp\":1572400002}\n"
"pg_data/base/1/1={\"checksum\":\"0631457264ff7f8d5fb1edc2c0211992a67c73e6\",\"checksum-page\":true"
",\"reference\":\"20191027-181320F\",\"size\":8192,\"timestamp\":1572200000}\n"
",\"reference\":\"20191027-181320F\",\"size\":8192,\"timestamp\":1572200000}\n"
"pg_data/global/pg_control={\"size\":8192,\"timestamp\":1572400000}\n"
"pg_data/postgresql.conf={\"checksum\":\"e3db315c260e79211b7b52587123b7aa060f30ab\""
",\"reference\":\"20191027-181320F\",\"size\":11,\"timestamp\":1570000000}\n"
",\"reference\":\"20191027-181320F\",\"size\":11,\"timestamp\":1570000000}\n"
"pg_data/tablespace_map={\"checksum\":\"87fe624d7976c2144e10afcb7a9a49b071f35e9c\",\"size\":19"
",\"timestamp\":1572400002}\n"
",\"timestamp\":1572400002}\n"
"pg_tblspc/32768/PG_11_201809051/1/5={\"checksum-page\":true,\"reference\":\"20191027-181320F\",\"size\":0"
",\"timestamp\":1572200000}\n"
",\"timestamp\":1572200000}\n"
"\n"
"[target:link]\n"
"pg_data/pg_tblspc/32768={\"destination\":\"../../pg1-tblspc/32768\"}\n"
@ -3476,33 +3510,33 @@ testRun(void)
"[backup:target]\n"
"pg_data={\"path\":\"" TEST_PATH "/pg1\",\"type\":\"path\"}\n"
"pg_tblspc/32768={\"path\":\"../../pg1-tblspc/32768\",\"tablespace-id\":\"32768\""
",\"tablespace-name\":\"tblspc32768\",\"type\":\"link\"}\n"
",\"tablespace-name\":\"tblspc32768\",\"type\":\"link\"}\n"
"\n"
"[metadata]\n"
"annotation={\"extra key\":\"this is an annotation\",\"source\":\"this is another annotation\"}\n"
"\n"
"[target:file]\n"
"pg_data/PG_VERSION={\"checksum\":\"17ba0791499db908433b80f37c5fbc89b870084b\",\"size\":2"
",\"timestamp\":1572200000}\n"
",\"timestamp\":1572200000}\n"
"pg_data/backup_label={\"checksum\":\"8e6f41ac87a7514be96260d65bacbffb11be77dc\",\"size\":17"
",\"timestamp\":1572400002}\n"
",\"timestamp\":1572400002}\n"
"pg_data/base/1/1={\"checksum\":\"0631457264ff7f8d5fb1edc2c0211992a67c73e6\",\"checksum-page\":true,\"size\":8192"
",\"timestamp\":1572200000}\n"
",\"timestamp\":1572200000}\n"
"pg_data/base/1/2={\"checksum\":\"ebdd38b69cd5b9f2d00d273c981e16960fbbb4f7\",\"checksum-page\":true,\"size\":24576"
",\"timestamp\":1572400000}\n"
",\"timestamp\":1572400000}\n"
"pg_data/bigish.dat={\"checksum\":\"3e5175386be683d2f231f3fa3eab892a799082f7\",\"size\":8191"
",\"timestamp\":1500000001}\n"
",\"timestamp\":1500000001}\n"
"pg_data/global/pg_control={\"size\":8192,\"timestamp\":1572400000}\n"
"pg_data/pg_wal/0000000105DB8EB000000000={\"size\":1048576,\"timestamp\":1572400002}\n"
"pg_data/pg_wal/0000000105DB8EB000000001={\"size\":1048576,\"timestamp\":1572400002}\n"
"pg_data/postgresql.auto.conf={\"checksum\":\"e873a5cb5a67e48761e7b619c531311404facdce\",\"size\":12"
",\"timestamp\":1500000000}\n"
",\"timestamp\":1500000000}\n"
"pg_data/postgresql.conf={\"checksum\":\"e3db315c260e79211b7b52587123b7aa060f30ab\",\"size\":11"
",\"timestamp\":1570000000}\n"
",\"timestamp\":1570000000}\n"
"pg_data/stuff.conf={\"checksum\":\"55a9d0d18b77789c7722abe72aa905e2dc85bb5d\",\"size\":12"
",\"timestamp\":1500000000}\n"
",\"timestamp\":1500000000}\n"
"pg_data/tablespace_map={\"checksum\":\"87fe624d7976c2144e10afcb7a9a49b071f35e9c\",\"size\":19"
",\"timestamp\":1572400002}\n"
",\"timestamp\":1572400002}\n"
"pg_data/zero={\"size\":0,\"timestamp\":1572400000}\n"
"pg_tblspc/32768/PG_11_201809051/1/5={\"checksum-page\":true,\"size\":0,\"timestamp\":1572200000}\n"
"\n"
@ -3590,12 +3624,12 @@ testRun(void)
"\n"
"[target:file]\n"
"pg_data/PG_VERSION={\"checksum\":\"17ba0791499db908433b80f37c5fbc89b870084b\",\"reference\":\"20191030-014640F\""
",\"size\":2,\"timestamp\":1572600000}\n"
",\"size\":2,\"timestamp\":1572600000}\n"
"pg_data/backup_label={\"checksum\":\"8e6f41ac87a7514be96260d65bacbffb11be77dc\",\"size\":17"
",\"timestamp\":1572600002}\n"
",\"timestamp\":1572600002}\n"
"pg_data/global/pg_control={\"size\":8192,\"timestamp\":1572600000}\n"
"pg_data/tablespace_map={\"checksum\":\"87fe624d7976c2144e10afcb7a9a49b071f35e9c\",\"size\":19"
",\"timestamp\":1572600002}\n"
",\"timestamp\":1572600002}\n"
"pg_data/zero={\"size\":0,\"timestamp\":1572600000}\n"
"\n"
"[target:path]\n"
@ -3692,18 +3726,18 @@ testRun(void)
"\n"
"[target:file]\n"
"pg_data/PG_VERSION={\"checksum\":\"17ba0791499db908433b80f37c5fbc89b870084b\",\"size\":2"
",\"timestamp\":1572800000}\n"
",\"timestamp\":1572800000}\n"
"pg_data/backup_label={\"checksum\":\"8e6f41ac87a7514be96260d65bacbffb11be77dc\",\"size\":17"
",\"timestamp\":1572800002}\n"
",\"timestamp\":1572800002}\n"
"pg_data/block-incr-grow={\"bims\":70,\"bis\":16,\"checksum\":\"b0d82b7805e85aa6447b94de7c2aa07077734581\""
",\"size\":393216,\"timestamp\":1572800000}\n"
",\"size\":393216,\"timestamp\":1572800000}\n"
"pg_data/block-incr-shrink={\"bims\":53,\"bis\":16,\"checksum\":\"9c32e340aad633663fdc3a5b1151c46abbf927f0\""
",\"size\":131073,\"timestamp\":1572800000}\n"
",\"size\":131073,\"timestamp\":1572800000}\n"
"pg_data/global/pg_control={\"size\":8192,\"timestamp\":1572800000}\n"
"pg_data/grow-to-block-incr={\"checksum\":\"f24ce50110bed53703bac93d9705d1e101c7cfeb\",\"size\":131071"
",\"timestamp\":1572800000}\n"
",\"timestamp\":1572800000}\n"
"pg_data/tablespace_map={\"checksum\":\"87fe624d7976c2144e10afcb7a9a49b071f35e9c\",\"size\":19"
",\"timestamp\":1572800002}\n"
",\"timestamp\":1572800002}\n"
"\n"
"[target:path]\n"
"pg_data={}\n"
@ -3800,20 +3834,20 @@ testRun(void)
"\n"
"[target:file]\n"
"pg_data/PG_VERSION={\"checksum\":\"17ba0791499db908433b80f37c5fbc89b870084b\",\"reference\":\"20191103-165320F\""
",\"size\":2,\"timestamp\":1572800000}\n"
",\"size\":2,\"timestamp\":1572800000}\n"
"pg_data/backup_label={\"checksum\":\"8e6f41ac87a7514be96260d65bacbffb11be77dc\",\"size\":17"
",\"timestamp\":1573000002}\n"
",\"timestamp\":1573000002}\n"
"pg_data/block-incr-grow={\"bims\":347,\"bis\":16,\"checksum\":\"7d76d48d64d7ac5411d714a4bb83f37e3e5b8df6\""
",\"size\":2097152,\"timestamp\":1573000000}\n"
",\"size\":2097152,\"timestamp\":1573000000}\n"
"pg_data/block-incr-larger={\"bims\":240,\"bis\":24,\"checksum\":\"7d76d48d64d7ac5411d714a4bb83f37e3e5b8df6\""
",\"size\":2097152,\"timestamp\":1573000000}\n"
",\"size\":2097152,\"timestamp\":1573000000}\n"
"pg_data/block-incr-shrink={\"checksum\":\"a8a85be0079c68c5c5a6ee743c44d853d6be12bb\",\"size\":131071"
",\"timestamp\":1573000000}\n"
",\"timestamp\":1573000000}\n"
"pg_data/global/pg_control={\"size\":8192,\"timestamp\":1573000000}\n"
"pg_data/grow-to-block-incr={\"bims\":51,\"bis\":16,\"checksum\":\"b616afbe703280742f9c6125b738d85c80fde971\""
",\"size\":131073,\"timestamp\":1573000000}\n"
",\"size\":131073,\"timestamp\":1573000000}\n"
"pg_data/tablespace_map={\"checksum\":\"87fe624d7976c2144e10afcb7a9a49b071f35e9c\",\"size\":19"
",\"timestamp\":1573000002}\n"
",\"timestamp\":1573000002}\n"
"\n"
"[target:path]\n"
"pg_data={}\n"
@ -3909,14 +3943,14 @@ testRun(void)
"\n"
"[target:file]\n"
"pg_data/PG_VERSION={\"checksum\":\"17ba0791499db908433b80f37c5fbc89b870084b\",\"size\":2"
",\"timestamp\":1572800000}\n"
",\"timestamp\":1572800000}\n"
"pg_data/backup_label={\"checksum\":\"8e6f41ac87a7514be96260d65bacbffb11be77dc\",\"size\":17"
",\"timestamp\":1573200002}\n"
",\"timestamp\":1573200002}\n"
"pg_data/block-incr-grow={\"bims\":40,\"bis\":16,\"checksum\":\"67dfd19f3eb3649d6f3f6631e44d0bd36b8d8d19\""
",\"size\":131072,\"timestamp\":1573200000}\n"
",\"size\":131072,\"timestamp\":1573200000}\n"
"pg_data/global/pg_control={\"size\":8192,\"timestamp\":1573200000}\n"
"pg_data/tablespace_map={\"checksum\":\"87fe624d7976c2144e10afcb7a9a49b071f35e9c\",\"size\":19"
",\"timestamp\":1573200002}\n"
",\"timestamp\":1573200002}\n"
"\n"
"[target:path]\n"
"pg_data={}\n"
@ -3990,14 +4024,14 @@ testRun(void)
"\n"
"[target:file]\n"
"pg_data/PG_VERSION={\"checksum\":\"17ba0791499db908433b80f37c5fbc89b870084b\",\"reference\":\"20191108-080000F\""
",\"size\":2,\"timestamp\":1572800000}\n"
",\"size\":2,\"timestamp\":1572800000}\n"
"pg_data/backup_label={\"checksum\":\"8e6f41ac87a7514be96260d65bacbffb11be77dc\",\"size\":17"
",\"timestamp\":1573400002}\n"
"pg_data/block-incr-grow={\"bims\":72,\"bis\":16,\"checksum\":\"2e000fa7e85759c7f4c254d4d9c33ef481e459a7\","
"\"size\":262144,\"timestamp\":1573400000}\n"
",\"timestamp\":1573400002}\n"
"pg_data/block-incr-grow={\"bims\":72,\"bis\":16,\"checksum\":\"2e000fa7e85759c7f4c254d4d9c33ef481e459a7\""
",\"size\":262144,\"timestamp\":1573400000}\n"
"pg_data/global/pg_control={\"size\":8192,\"timestamp\":1573400000}\n"
"pg_data/tablespace_map={\"checksum\":\"87fe624d7976c2144e10afcb7a9a49b071f35e9c\",\"size\":19"
",\"timestamp\":1573400002}\n"
",\"timestamp\":1573400002}\n"
"\n"
"[target:path]\n"
"pg_data={}\n"

View File

@ -118,7 +118,7 @@ testRun(void)
TEST_ERROR(
cmdCheck(), DbMismatchError,
"version '9.6' and path '/pgdata' queried from cluster do not match version '9.6' and '" TEST_PATH "/pg' read from"
" '" TEST_PATH "/pg/global/pg_control'\n"
" '" TEST_PATH "/pg/global/pg_control'\n"
"HINT: the pg1-path and pg1-port settings likely reference different clusters.");
// -------------------------------------------------------------------------------------------------------------------------
@ -143,7 +143,7 @@ testRun(void)
TEST_ERROR(
cmdCheck(), FileMissingError,
"unable to load info file '" TEST_PATH "/repo/archive/test1/archive.info' or '" TEST_PATH
"/repo/archive/test1/archive.info.copy':\n"
"/repo/archive/test1/archive.info.copy':\n"
"FileMissingError: unable to open missing file '" TEST_PATH "/repo/archive/test1/archive.info' for read\n"
"FileMissingError: unable to open missing file '" TEST_PATH "/repo/archive/test1/archive.info.copy' for read\n"
"HINT: archive.info cannot be opened but is required to push/get WAL segments.\n"
@ -183,8 +183,8 @@ testRun(void)
TEST_ERROR(
cmdCheck(), DbMismatchError,
"version '" PG_VERSION_15_STR "' and path '" TEST_PATH "' queried from cluster do not match version '" PG_VERSION_15_STR
"' and '" TEST_PATH "/pg' read from '" TEST_PATH "/pg/global/pg_control'\n"
"version '" PG_VERSION_15_STR "' and path '" TEST_PATH "' queried from cluster do not match version"
" '" PG_VERSION_15_STR "' and '" TEST_PATH "/pg' read from '" TEST_PATH "/pg/global/pg_control'\n"
"HINT: the pg1-path and pg1-port settings likely reference different clusters.");
// -------------------------------------------------------------------------------------------------------------------------
@ -213,8 +213,8 @@ testRun(void)
"db-version=\"15\"\n"
"\n"
"[db:history]\n"
"1={\"db-catalog-version\":202209061,\"db-control-version\":1300,\"db-system-id\":" HRN_PG_SYSTEMID_15_Z ","
"\"db-version\":\"15\"}\n");
"1={\"db-catalog-version\":202209061,\"db-control-version\":1300,\"db-system-id\":" HRN_PG_SYSTEMID_15_Z
",\"db-version\":\"15\"}\n");
// Single repo config - error when checking archive mode setting on database
harnessPqScriptSet((HarnessPq [])
@ -257,7 +257,7 @@ testRun(void)
TEST_ERROR_FMT(
cmdCheck(), FileMissingError,
"unable to load info file '" TEST_PATH "/repo2/archive/test1/archive.info' or"
" '" TEST_PATH "/repo2/archive/test1/archive.info.copy':\n"
" '" TEST_PATH "/repo2/archive/test1/archive.info.copy':\n"
"FileMissingError: " STORAGE_ERROR_READ_MISSING "\n"
"FileMissingError: " STORAGE_ERROR_READ_MISSING "\n"
"HINT: archive.info cannot be opened but is required to push/get WAL segments.\n"
@ -300,8 +300,8 @@ testRun(void)
"db-version=\"15\"\n"
"\n"
"[db:history]\n"
"1={\"db-catalog-version\":202209061,\"db-control-version\":1300,\"db-system-id\":" HRN_PG_SYSTEMID_15_Z ","
"\"db-version\":\"15\"}\n");
"1={\"db-catalog-version\":202209061,\"db-control-version\":1300,\"db-system-id\":" HRN_PG_SYSTEMID_15_Z
",\"db-version\":\"15\"}\n");
// Error when WAL segment not found
harnessPqScriptSet((HarnessPq [])
@ -354,11 +354,11 @@ testRun(void)
"P00 INFO: check repo1 configuration (primary)\n"
"P00 INFO: check repo2 configuration (primary)\n"
"P00 INFO: check repo1 archive for WAL (primary)\n"
"P00 INFO: WAL segment 000000010000000100000001 successfully archived to '" TEST_PATH "/repo/archive/test1/15-1/"
"0000000100000001/000000010000000100000001-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' on repo1\n"
"P00 INFO: WAL segment 000000010000000100000001 successfully archived to '" TEST_PATH "/repo/archive/test1/15-1"
"/0000000100000001/000000010000000100000001-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' on repo1\n"
"P00 INFO: check repo2 archive for WAL (primary)\n"
"P00 INFO: WAL segment 000000010000000100000001 successfully archived to '" TEST_PATH "/repo2/archive/test1/15-1/"
"0000000100000001/000000010000000100000001-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' on repo2");
"P00 INFO: WAL segment 000000010000000100000001 successfully archived to '" TEST_PATH "/repo2/archive/test1/15-1"
"/0000000100000001/000000010000000100000001-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' on repo2");
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("Primary == NULL (for test coverage)");
@ -431,7 +431,7 @@ testRun(void)
TEST_ERROR_FMT(
checkDbConfig(PG_VERSION_11, db.standbyIdx, db.standby, true), DbMismatchError,
"version '" PG_VERSION_11_STR "' and path '%s' queried from cluster do not match version '" PG_VERSION_11_STR "' and"
" '" TEST_PATH "/pg8' read from '" TEST_PATH "/pg8/global/pg_control'\n"
" '" TEST_PATH "/pg8' read from '" TEST_PATH "/pg8/global/pg_control'\n"
"HINT: the pg8-path and pg8-port settings likely reference different clusters.",
strZ(dbPgDataPath(db.standby)));

View File

@ -36,7 +36,7 @@ testRun(void)
TEST_RESULT_VOID(cmdBegin(), "command begin with command parameter");
TEST_RESULT_LOG(
"P00 INFO: archive-get command begin " PROJECT_VERSION ": [param1] --archive-async --archive-timeout=10"
" --exec-id=1-test --pg1-path=/pg1 --stanza=test");
" --exec-id=1-test --pg1-path=/pg1 --stanza=test");
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("multiple parameters");
@ -52,7 +52,7 @@ testRun(void)
TEST_RESULT_VOID(cmdBegin(), "command begin with command parameters");
TEST_RESULT_LOG(
"P00 INFO: archive-get command begin " PROJECT_VERSION ": [param1, \"param 2\"] --archive-async --exec-id=1-test"
" --pg1-path=/pg1 --stanza=test");
" --pg1-path=/pg1 --stanza=test");
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("reset, negate, list, hash options");

View File

@ -141,8 +141,8 @@ testRun(void)
"db-version=\"9.4\"\n"
"\n"
"[db:history]\n"
"1={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6625592122879095702,"
"\"db-version\":\"9.4\"}", timeNow - (41 * SEC_PER_DAY), timeNow - (40 * SEC_PER_DAY), timeNow - (30 * SEC_PER_DAY),
"1={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6625592122879095702"
",\"db-version\":\"9.4\"}", timeNow - (41 * SEC_PER_DAY), timeNow - (40 * SEC_PER_DAY), timeNow - (30 * SEC_PER_DAY),
timeNow - (30 * SEC_PER_DAY), timeNow - (25 * SEC_PER_DAY), timeNow - (25 * SEC_PER_DAY), timeNow - (20 * SEC_PER_DAY),
timeNow - (20 * SEC_PER_DAY), timeNow - (10 * SEC_PER_DAY), timeNow - (10 * SEC_PER_DAY), timeNow - (5 * SEC_PER_DAY),
timeNow - (5 * SEC_PER_DAY));
@ -207,9 +207,9 @@ testRun(void)
TEST_RESULT_UINT(expireFullBackup(infoBackup, 0), 0, "retention-full not set");
TEST_RESULT_LOG(
"P00 WARN: option 'repo1-retention-full' is not set for 'repo1-retention-full-type=count', the repository may run out"
" of space\n"
" of space\n"
" HINT: to retain full backups indefinitely (without warning), set option 'repo1-retention-full' to the"
" maximum.");
" maximum.");
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("retention-full set - full backup no dependencies expired");
@ -222,7 +222,7 @@ testRun(void)
TEST_RESULT_STRLST_Z(
infoBackupDataLabelList(infoBackup, NULL),
"20181119-152800F\n20181119-152800F_20181119-152152D\n20181119-152800F_20181119-152155I\n20181119-152900F\n"
"20181119-152900F_20181119-152600D\n",
"20181119-152900F_20181119-152600D\n",
"remaining backups correct");
TEST_RESULT_LOG("P00 INFO: repo1: expire full backup 20181119-152138F");
@ -239,8 +239,8 @@ testRun(void)
infoBackupDataLabelList(infoBackup, NULL), "20181119-152900F\n20181119-152900F_20181119-152600D\n",
"remaining backups correct");
TEST_RESULT_LOG(
"P00 INFO: repo1: expire full backup set 20181119-152800F, 20181119-152800F_20181119-152152D,"
" 20181119-152800F_20181119-152155I");
"P00 INFO: repo1: expire full backup set 20181119-152800F, 20181119-152800F_20181119-152152D"
", 20181119-152800F_20181119-152155I");
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("retention-full set - no backups expired");
@ -301,8 +301,7 @@ testRun(void)
TEST_TITLE("retention-diff set - diff with no dependents expired");
// Create backup.info with two diff - oldest to be expired - no "set:"
const Buffer *backupInfoContent = harnessInfoChecksumZ
(
const Buffer *backupInfoContent = harnessInfoChecksumZ(
"[backup:current]\n"
"20181119-152800F={"
"\"backrest-format\":5,\"backrest-version\":\"2.08dev\","
@ -337,9 +336,8 @@ testRun(void)
"db-version=\"9.4\"\n"
"\n"
"[db:history]\n"
"1={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6625592122879095702,"
"\"db-version\":\"9.4\"}"
);
"1={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6625592122879095702"
",\"db-version\":\"9.4\"}");
TEST_ASSIGN(infoBackup, infoBackupNewLoad(ioBufferReadNew(backupInfoContent)), "get backup.info");
@ -388,8 +386,8 @@ testRun(void)
"db-version=\"9.4\"\n"
"\n"
"[db:history]\n"
"1={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6625592122879095702,"
"\"db-version\":\"9.4\"}");
"1={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6625592122879095702"
",\"db-version\":\"9.4\"}");
InfoBackup *infoBackup = NULL;
TEST_ASSIGN(
@ -424,8 +422,7 @@ testRun(void)
TEST_TITLE("remove expired backup from disk - no current backups");
// Create backup.info without current backups
const Buffer *backupInfoContent = harnessInfoChecksumZ
(
const Buffer *backupInfoContent = harnessInfoChecksumZ(
"[db]\n"
"db-catalog-version=201409291\n"
"db-control-version=942\n"
@ -434,9 +431,8 @@ testRun(void)
"db-version=\"9.4\"\n"
"\n"
"[db:history]\n"
"1={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6625592122879095702,"
"\"db-version\":\"9.4\"}"
);
"1={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6625592122879095702"
",\"db-version\":\"9.4\"}");
TEST_ASSIGN(infoBackup, infoBackupNewLoad(ioBufferReadNew(backupInfoContent)), "get backup.info");
@ -484,8 +480,7 @@ testRun(void)
HRN_CFG_LOAD(cfgCmdExpire, argList);
// Create backup.info without current backups
const Buffer *backupInfoContent = harnessInfoChecksumZ
(
const Buffer *backupInfoContent = harnessInfoChecksumZ(
"[db]\n"
"db-catalog-version=201409291\n"
"db-control-version=942\n"
@ -494,9 +489,8 @@ testRun(void)
"db-version=\"9.4\"\n"
"\n"
"[db:history]\n"
"1={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6625592122879095702,"
"\"db-version\":\"9.4\"}"
);
"1={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6625592122879095702"
",\"db-version\":\"9.4\"}");
InfoBackup *infoBackup = NULL;
TEST_ASSIGN(infoBackup, infoBackupNewLoad(ioBufferReadNew(backupInfoContent)), "get backup.info");
@ -504,9 +498,9 @@ testRun(void)
TEST_RESULT_VOID(removeExpiredArchive(infoBackup, false, 0), "archive retention not set");
TEST_RESULT_LOG(
"P00 WARN: option 'repo1-retention-full' is not set for 'repo1-retention-full-type=count', the repository may run out"
" of space\n"
" of space\n"
" HINT: to retain full backups indefinitely (without warning), set option 'repo1-retention-full' to the"
" maximum.\n"
" maximum.\n"
"P00 INFO: option 'repo1-retention-archive' is not set - archive logs will not be expired");
TEST_RESULT_VOID(removeExpiredArchive(infoBackup, true, 0), "archive retention not set - retention-full-type=time");
@ -523,9 +517,9 @@ testRun(void)
removeExpiredArchive(infoBackup, false, 0), "archive retention set, retention type default, no current backups");
TEST_RESULT_LOG(
"P00 WARN: option 'repo1-retention-full' is not set for 'repo1-retention-full-type=count', the repository may run out"
" of space\n"
" of space\n"
" HINT: to retain full backups indefinitely (without warning), set option 'repo1-retention-full' to the"
" maximum.");
" maximum.");
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("retention-archive set - no archive on disk");
@ -600,10 +594,10 @@ testRun(void)
"db-version=\"10\"\n"
"\n"
"[db:history]\n"
"1={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6625592122879095702,"
"\"db-version\":\"9.4\"}\n"
"2={\"db-catalog-version\":201707211,\"db-control-version\":1002,\"db-system-id\":6626363367545678089,"
"\"db-version\":\"10\"}\n");
"1={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6625592122879095702"
",\"db-version\":\"9.4\"}\n"
"2={\"db-catalog-version\":201707211,\"db-control-version\":1002,\"db-system-id\":6626363367545678089"
",\"db-version\":\"10\"}\n");
TEST_ASSIGN(
infoBackup, infoBackupLoadFile(storageRepo(), INFO_BACKUP_PATH_FILE_STR, cipherTypeNone, NULL), "get backup.info");
@ -687,8 +681,9 @@ testRun(void)
.comment = "only 9.4-1/0000000100000000/000000010000000000000002 remains in major wal 1");
TEST_STORAGE_LIST(
storageRepo(), STORAGE_REPO_ARCHIVE "/9.4-1/0000000200000000", archiveExpectList(2, 10, "0000000200000000"),
.comment = "nothing removed from 9.4-1/0000000200000000 major wal 2 - each archiveId must have one backup to play"
" through PITR");
.comment =
"nothing removed from 9.4-1/0000000200000000 major wal 2 - each archiveId must have one backup to play"
" through PITR");
TEST_STORAGE_LIST(
storageRepo(), STORAGE_REPO_ARCHIVE "/10-2/0000000100000000", archiveExpectList(3, 10, "0000000100000000"),
.comment = "none removed from 10-2/0000000100000000");
@ -902,26 +897,26 @@ testRun(void)
TEST_ERROR(
cmdExpire(), CommandError, CFGCMD_EXPIRE " command encountered 2 error(s), check the log file for details");
TEST_RESULT_LOG(
"P00 ERROR: [055]: [DRY-RUN] repo1: unable to load info file '" TEST_PATH "/repo/backup/db/backup.info' or '"
TEST_PATH "/repo/backup/db/backup.info.copy':\n"
"P00 ERROR: [055]: [DRY-RUN] repo1: unable to load info file '" TEST_PATH "/repo/backup/db/backup.info' or"
" '" TEST_PATH "/repo/backup/db/backup.info.copy':\n"
" FileMissingError: unable to open missing file '" TEST_PATH "/repo/backup/db/backup.info' for read\n"
" FileMissingError: unable to open missing file '" TEST_PATH "/repo/backup/db/backup.info.copy' for read\n"
" HINT: backup.info cannot be opened and is required to perform a backup.\n"
" HINT: has a stanza-create been performed?\n"
"P00 INFO: [DRY-RUN] repo2: expire diff backup set 20181119-152800F_20181119-152152D,"
" 20181119-152800F_20181119-152155I\n"
"P00 INFO: [DRY-RUN] repo2: expire diff backup set 20181119-152800F_20181119-152152D"
", 20181119-152800F_20181119-152155I\n"
"P00 INFO: [DRY-RUN] repo2: remove expired backup 20181119-152800F_20181119-152155I\n"
"P00 INFO: [DRY-RUN] repo2: remove expired backup 20181119-152800F_20181119-152152D\n"
"P00 ERROR: [055]: [DRY-RUN] repo2: unable to load info file '" TEST_PATH "/repo2/archive/db/archive.info' or '"
TEST_PATH "/repo2/archive/db/archive.info.copy':\n"
"P00 ERROR: [055]: [DRY-RUN] repo2: unable to load info file '" TEST_PATH "/repo2/archive/db/archive.info' or"
" '" TEST_PATH "/repo2/archive/db/archive.info.copy':\n"
" FileMissingError: unable to open missing file '" TEST_PATH "/repo2/archive/db/archive.info' for read\n"
" FileMissingError: unable to open missing file '" TEST_PATH "/repo2/archive/db/archive.info.copy'"
" for read\n"
" for read\n"
" HINT: archive.info cannot be opened but is required to push/get WAL segments.\n"
" HINT: is archive_command configured correctly in postgresql.conf?\n"
" HINT: has a stanza-create been performed?\n"
" HINT: use --no-archive-check to disable archive checks during backup if you have an alternate"
" archiving scheme.");
" archiving scheme.");
// Restore saved archive.info file
HRN_STORAGE_MOVE(storageTest, "repo2/archive/db/" INFO_ARCHIVE_FILE ".save", "repo2/archive/db/" INFO_ARCHIVE_FILE);
@ -932,22 +927,22 @@ testRun(void)
TEST_ERROR(
cmdExpire(), CommandError, CFGCMD_EXPIRE " command encountered 1 error(s), check the log file for details");
TEST_RESULT_LOG(
"P00 ERROR: [055]: [DRY-RUN] repo1: unable to load info file '" TEST_PATH "/repo/backup/db/backup.info' or '"
TEST_PATH "/repo/backup/db/backup.info.copy':\n"
"P00 ERROR: [055]: [DRY-RUN] repo1: unable to load info file '" TEST_PATH "/repo/backup/db/backup.info' or"
" '" TEST_PATH "/repo/backup/db/backup.info.copy':\n"
" FileMissingError: unable to open missing file '" TEST_PATH "/repo/backup/db/backup.info' for read\n"
" FileMissingError: unable to open missing file '" TEST_PATH "/repo/backup/db/backup.info.copy' for read\n"
" HINT: backup.info cannot be opened and is required to perform a backup.\n"
" HINT: has a stanza-create been performed?\n"
"P00 INFO: [DRY-RUN] repo2: expire diff backup set 20181119-152800F_20181119-152152D,"
" 20181119-152800F_20181119-152155I\n"
"P00 INFO: [DRY-RUN] repo2: expire diff backup set 20181119-152800F_20181119-152152D"
", 20181119-152800F_20181119-152155I\n"
"P00 INFO: [DRY-RUN] repo2: remove expired backup 20181119-152800F_20181119-152155I\n"
"P00 INFO: [DRY-RUN] repo2: remove expired backup 20181119-152800F_20181119-152152D\n"
"P00 DETAIL: [DRY-RUN] repo2: 9.4-1 archive retention on backup 20181119-152800F, start = 000000020000000000000002,"
" stop = 000000020000000000000002\n"
"P00 DETAIL: [DRY-RUN] repo2: 9.4-1 archive retention on backup 20181119-152800F_20181119-152252D,"
" start = 000000020000000000000009\n"
"P00 INFO: [DRY-RUN] repo2: 9.4-1 remove archive, start = 000000020000000000000004,"
" stop = 000000020000000000000007\n"
"P00 DETAIL: [DRY-RUN] repo2: 9.4-1 archive retention on backup 20181119-152800F, start = 000000020000000000000002"
", stop = 000000020000000000000002\n"
"P00 DETAIL: [DRY-RUN] repo2: 9.4-1 archive retention on backup 20181119-152800F_20181119-152252D"
", start = 000000020000000000000009\n"
"P00 INFO: [DRY-RUN] repo2: 9.4-1 remove archive, start = 000000020000000000000004"
", stop = 000000020000000000000007\n"
"P00 DETAIL: [DRY-RUN] repo2: 10-2 archive retention on backup 20181119-152900F, start = 000000010000000000000003\n"
"P00 INFO: [DRY-RUN] repo2: 10-2 no archive to remove");
@ -1002,8 +997,8 @@ testRun(void)
"backup.info.copy\n", .noRecurse = true, .comment = "repo2: backups not removed");
TEST_RESULT_LOG(
"P00 INFO: [DRY-RUN] repo1: expire full backup set 20181119-152800F, 20181119-152800F_20181119-152152D, "
"20181119-152800F_20181119-152155I, 20181119-152800F_20181119-152252D\n"
"P00 INFO: [DRY-RUN] repo1: expire full backup set 20181119-152800F, 20181119-152800F_20181119-152152D"
", 20181119-152800F_20181119-152155I, 20181119-152800F_20181119-152252D\n"
"P00 INFO: [DRY-RUN] repo1: remove expired backup 20181119-152800F_20181119-152252D\n"
"P00 INFO: [DRY-RUN] repo1: remove expired backup 20181119-152800F_20181119-152155I\n"
"P00 INFO: [DRY-RUN] repo1: remove expired backup 20181119-152800F_20181119-152152D\n"
@ -1011,16 +1006,16 @@ testRun(void)
"P00 INFO: [DRY-RUN] repo1: remove archive path " TEST_PATH "/repo/archive/db/9.4-1\n"
"P00 DETAIL: [DRY-RUN] repo1: 10-2 archive retention on backup 20181119-152900F, start = 000000010000000000000003\n"
"P00 INFO: [DRY-RUN] repo1: 10-2 no archive to remove\n"
"P00 INFO: [DRY-RUN] repo2: expire diff backup set 20181119-152800F_20181119-152152D,"
" 20181119-152800F_20181119-152155I\n"
"P00 INFO: [DRY-RUN] repo2: expire diff backup set 20181119-152800F_20181119-152152D"
", 20181119-152800F_20181119-152155I\n"
"P00 INFO: [DRY-RUN] repo2: remove expired backup 20181119-152800F_20181119-152155I\n"
"P00 INFO: [DRY-RUN] repo2: remove expired backup 20181119-152800F_20181119-152152D\n"
"P00 DETAIL: [DRY-RUN] repo2: 9.4-1 archive retention on backup 20181119-152800F, start = 000000020000000000000002,"
" stop = 000000020000000000000002\n"
"P00 DETAIL: [DRY-RUN] repo2: 9.4-1 archive retention on backup 20181119-152800F_20181119-152252D,"
" start = 000000020000000000000009\n"
"P00 INFO: [DRY-RUN] repo2: 9.4-1 remove archive, start = 000000020000000000000004,"
" stop = 000000020000000000000007\n"
"P00 DETAIL: [DRY-RUN] repo2: 9.4-1 archive retention on backup 20181119-152800F, start = 000000020000000000000002"
", stop = 000000020000000000000002\n"
"P00 DETAIL: [DRY-RUN] repo2: 9.4-1 archive retention on backup 20181119-152800F_20181119-152252D"
", start = 000000020000000000000009\n"
"P00 INFO: [DRY-RUN] repo2: 9.4-1 remove archive, start = 000000020000000000000004"
", stop = 000000020000000000000007\n"
"P00 DETAIL: [DRY-RUN] repo2: 10-2 archive retention on backup 20181119-152900F, start = 000000010000000000000003\n"
"P00 INFO: [DRY-RUN] repo2: 10-2 no archive to remove");
@ -1047,8 +1042,8 @@ testRun(void)
.comment = "repo2: 9.4-1 only archives not meeting retention for archive-retention-type=diff are removed");
TEST_RESULT_LOG(
"P00 INFO: repo1: expire full backup set 20181119-152800F, 20181119-152800F_20181119-152152D, "
"20181119-152800F_20181119-152155I, 20181119-152800F_20181119-152252D\n"
"P00 INFO: repo1: expire full backup set 20181119-152800F, 20181119-152800F_20181119-152152D"
", 20181119-152800F_20181119-152155I, 20181119-152800F_20181119-152252D\n"
"P00 INFO: repo1: remove expired backup 20181119-152800F_20181119-152252D\n"
"P00 INFO: repo1: remove expired backup 20181119-152800F_20181119-152155I\n"
"P00 INFO: repo1: remove expired backup 20181119-152800F_20181119-152152D\n"
@ -1056,14 +1051,14 @@ testRun(void)
"P00 INFO: repo1: remove archive path " TEST_PATH "/repo/archive/db/9.4-1\n"
"P00 DETAIL: repo1: 10-2 archive retention on backup 20181119-152900F, start = 000000010000000000000003\n"
"P00 INFO: repo1: 10-2 no archive to remove\n"
"P00 INFO: repo2: expire diff backup set 20181119-152800F_20181119-152152D,"
" 20181119-152800F_20181119-152155I\n"
"P00 INFO: repo2: expire diff backup set 20181119-152800F_20181119-152152D"
", 20181119-152800F_20181119-152155I\n"
"P00 INFO: repo2: remove expired backup 20181119-152800F_20181119-152155I\n"
"P00 INFO: repo2: remove expired backup 20181119-152800F_20181119-152152D\n"
"P00 DETAIL: repo2: 9.4-1 archive retention on backup 20181119-152800F, start = 000000020000000000000002,"
" stop = 000000020000000000000002\n"
"P00 DETAIL: repo2: 9.4-1 archive retention on backup 20181119-152800F_20181119-152252D,"
" start = 000000020000000000000009\n"
"P00 DETAIL: repo2: 9.4-1 archive retention on backup 20181119-152800F, start = 000000020000000000000002"
", stop = 000000020000000000000002\n"
"P00 DETAIL: repo2: 9.4-1 archive retention on backup 20181119-152800F_20181119-152252D"
", start = 000000020000000000000009\n"
"P00 INFO: repo2: 9.4-1 remove archive, start = 000000020000000000000004, stop = 000000020000000000000007\n"
"P00 DETAIL: repo2: 10-2 archive retention on backup 20181119-152900F, start = 000000010000000000000003\n"
"P00 INFO: repo2: 10-2 no archive to remove");
@ -1123,9 +1118,9 @@ testRun(void)
TEST_RESULT_VOID(cmdExpire(), "label format OK and expired on specified repo");
TEST_RESULT_LOG(
"P00 WARN: [DRY-RUN] repo1: expiring latest backup 20181119-152900F_20181119-152500I - the ability to perform"
" point-in-time-recovery (PITR) may be affected\n"
" point-in-time-recovery (PITR) may be affected\n"
" HINT: non-default settings for 'repo1-retention-archive'/'repo1-retention-archive-type' (even in prior"
" expires) can cause gaps in the WAL.\n"
" expires) can cause gaps in the WAL.\n"
"P00 INFO: [DRY-RUN] repo1: expire adhoc backup 20181119-152900F_20181119-152500I\n"
"P00 INFO: [DRY-RUN] repo1: remove expired backup 20181119-152900F_20181119-152500I\n"
"P00 INFO: [DRY-RUN] repo1: 10-2 no archive to remove");
@ -1187,10 +1182,10 @@ testRun(void)
"db-version=\"10\"\n"
"\n"
"[db:history]\n"
"1={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6625592122879095702,"
"\"db-version\":\"9.4\"}\n"
"2={\"db-catalog-version\":201707211,\"db-control-version\":1002,\"db-system-id\":6626363367545678089,"
"\"db-version\":\"10\"}\n");
"1={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6625592122879095702"
",\"db-version\":\"9.4\"}\n"
"2={\"db-catalog-version\":201707211,\"db-control-version\":1002,\"db-system-id\":6626363367545678089"
",\"db-version\":\"10\"}\n");
TEST_ASSIGN(
infoBackup, infoBackupLoadFile(storageRepo(), INFO_BACKUP_PATH_FILE_STR, cipherTypeNone, NULL), "get backup.info");
@ -1223,8 +1218,8 @@ testRun(void)
TEST_RESULT_VOID(
removeExpiredArchive(infoBackup, false, 0), "backup earlier than selected for retention does not have archive-start");
TEST_RESULT_LOG(
"P00 DETAIL: repo1: 9.4-1 archive retention on backup 20181119-152138F, start = 000000010000000000000002,"
" stop = 000000010000000000000002\n"
"P00 DETAIL: repo1: 9.4-1 archive retention on backup 20181119-152138F, start = 000000010000000000000002"
", stop = 000000010000000000000002\n"
"P00 DETAIL: repo1: 9.4-1 archive retention on backup 20181119-152900F, start = 000000010000000000000004\n"
"P00 INFO: repo1: 9.4-1 remove archive, start = 000000010000000000000001, stop = 000000010000000000000001\n"
"P00 INFO: repo1: 9.4-1 remove archive, start = 000000010000000000000003, stop = 000000010000000000000003");
@ -1269,10 +1264,10 @@ testRun(void)
"db-version=\"10\"\n"
"\n"
"[db:history]\n"
"1={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6625592122879095702,"
"\"db-version\":\"9.4\"}\n"
"2={\"db-catalog-version\":201707211,\"db-control-version\":1002,\"db-system-id\":6626363367545678089,"
"\"db-version\":\"10\"}\n");
"1={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6625592122879095702"
",\"db-version\":\"9.4\"}\n"
"2={\"db-catalog-version\":201707211,\"db-control-version\":1002,\"db-system-id\":6626363367545678089"
",\"db-version\":\"10\"}\n");
TEST_ASSIGN(
infoBackup, infoBackupLoadFile(storageRepo(), INFO_BACKUP_PATH_FILE_STR, cipherTypeNone, NULL), "get backup.info");
@ -1529,10 +1524,10 @@ testRun(void)
"db-version=\"10\"\n"
"\n"
"[db:history]\n"
"1={\"db-catalog-version\":201409291,\"db-control-version\":1002,\"db-system-id\":6625592122879095702,"
"\"db-version\":\"10\"}\n"
"2={\"db-catalog-version\":201707211,\"db-control-version\":1002,\"db-system-id\":6626363367545678089,"
"\"db-version\":\"10\"}\n");
"1={\"db-catalog-version\":201409291,\"db-control-version\":1002,\"db-system-id\":6625592122879095702"
",\"db-version\":\"10\"}\n"
"2={\"db-catalog-version\":201707211,\"db-control-version\":1002,\"db-system-id\":6626363367545678089"
",\"db-version\":\"10\"}\n");
// Write backup.manifest so infoBackup reconstruct produces same results as backup.info on disk and removeExpiredBackup
// will find backup directories to remove
@ -1648,10 +1643,10 @@ testRun(void)
"db-version=\"10\"\n"
"\n"
"[db:history]\n"
"1={\"db-catalog-version\":201409291,\"db-control-version\":1002,\"db-system-id\":6625592122879095702,"
"\"db-version\":\"10\"}\n"
"2={\"db-catalog-version\":201707211,\"db-control-version\":1002,\"db-system-id\":6626363367545678089,"
"\"db-version\":\"10\"}\n");
"1={\"db-catalog-version\":201409291,\"db-control-version\":1002,\"db-system-id\":6625592122879095702"
",\"db-version\":\"10\"}\n"
"2={\"db-catalog-version\":201707211,\"db-control-version\":1002,\"db-system-id\":6626363367545678089"
",\"db-version\":\"10\"}\n");
// Write backup.manifest so infoBackup reconstruct produces same results as backup.info on disk and removeExpiredBackup
// will find backup directories to remove
@ -1769,10 +1764,10 @@ testRun(void)
"db-version=\"12\"\n"
"\n"
"[db:history]\n"
"1={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6625592122879095702,"
"\"db-version\":\"9.4\"}\n"
"2={\"db-catalog-version\":201909212,\"db-control-version\":1201,\"db-system-id\":6626363367545678089,"
"\"db-version\":\"12\"}\n");
"1={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6625592122879095702"
",\"db-version\":\"9.4\"}\n"
"2={\"db-catalog-version\":201909212,\"db-control-version\":1201,\"db-system-id\":6626363367545678089"
",\"db-version\":\"12\"}\n");
// Add backup directories with manifest file including a resumable backup dependent on last backup
HRN_STORAGE_PUT_EMPTY(storageRepoWrite(), STORAGE_REPO_BACKUP "/20181119-152138F/" BACKUP_MANIFEST_FILE);
@ -1896,12 +1891,12 @@ testRun(void)
"P00 INFO: repo1: expire adhoc backup set 20181119-152800F_20181119-152152D, 20181119-152800F_20181119-152155I\n"
"P00 INFO: repo1: remove expired backup 20181119-152800F_20181119-152155I\n"
"P00 INFO: repo1: remove expired backup 20181119-152800F_20181119-152152D\n"
"P00 DETAIL: repo1: 9.4-1 archive retention on backup 20181119-152138F, start = 000000020000000000000001,"
" stop = 000000020000000000000001\n"
"P00 DETAIL: repo1: 9.4-1 archive retention on backup 20181119-152138F, start = 000000020000000000000001"
", stop = 000000020000000000000001\n"
"P00 DETAIL: repo1: 9.4-1 archive retention on backup 20181119-152800F, start = 000000020000000000000002\n"
"P00 INFO: repo1: 9.4-1 no archive to remove\n"
"P00 DETAIL: repo1: 12-2 archive retention on backup 20181119-152850F, start = 000000010000000000000002,"
" stop = 000000010000000000000004\n"
"P00 DETAIL: repo1: 12-2 archive retention on backup 20181119-152850F, start = 000000010000000000000002"
", stop = 000000010000000000000004\n"
"P00 DETAIL: repo1: 12-2 archive retention on backup 20181119-152900F, start = 000000010000000000000006\n"
"P00 INFO: repo1: 12-2 remove archive, start = 000000010000000000000001, stop = 000000010000000000000001\n"
"P00 INFO: repo1: 12-2 remove archive, start = 000000010000000000000005, stop = 000000010000000000000005");
@ -1936,8 +1931,8 @@ testRun(void)
"P00 INFO: repo1: remove expired backup 20181119-152138F\n"
"P00 DETAIL: repo1: 9.4-1 archive retention on backup 20181119-152800F, start = 000000020000000000000002\n"
"P00 INFO: repo1: 9.4-1 remove archive, start = 000000020000000000000001, stop = 000000020000000000000001\n"
"P00 DETAIL: repo1: 12-2 archive retention on backup 20181119-152850F, start = 000000010000000000000002,"
" stop = 000000010000000000000004\n"
"P00 DETAIL: repo1: 12-2 archive retention on backup 20181119-152850F, start = 000000010000000000000002"
", stop = 000000010000000000000004\n"
"P00 DETAIL: repo1: 12-2 archive retention on backup 20181119-152900F, start = 000000010000000000000006\n"
"P00 INFO: repo1: 12-2 no archive to remove");
@ -1964,9 +1959,9 @@ testRun(void)
.comment = "latest backup and resumable removed");
TEST_RESULT_LOG(
"P00 WARN: repo1: expiring latest backup 20181119-152900F - the ability to perform point-in-time-recovery (PITR) may"
" be affected\n"
" be affected\n"
" HINT: non-default settings for 'repo1-retention-archive'/'repo1-retention-archive-type' (even in prior"
" expires) can cause gaps in the WAL.\n"
" expires) can cause gaps in the WAL.\n"
"P00 INFO: repo1: expire adhoc backup 20181119-152900F\n"
"P00 INFO: repo1: remove expired backup 20181119-152900F_20181119-153000I\n"
"P00 INFO: repo1: remove expired backup 20181119-152900F\n"
@ -1994,7 +1989,7 @@ testRun(void)
TEST_RESULT_LOG(
"P00 ERROR: [075]: repo1: full backup 20181119-152850F cannot be expired until another full backup has been created on"
" this repo");
" this repo");
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("allow adhoc expire on last full backup in prior db-id");
@ -2032,7 +2027,7 @@ testRun(void)
cmdExpire(), CommandError, CFGCMD_EXPIRE " command encountered 1 error(s), check the log file for details");
TEST_RESULT_LOG(
"P00 ERROR: [075]: repo1: full backup 20181119-152850F cannot be expired until another full backup has been created on"
" this repo");
" this repo");
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("adhoc dry-run");
@ -2066,10 +2061,10 @@ testRun(void)
"db-version=\"12\"\n"
"\n"
"[db:history]\n"
"1={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6625592122879095702,"
"\"db-version\":\"9.4\"}\n"
"2={\"db-catalog-version\":201909212,\"db-control-version\":1201,\"db-system-id\":6626363367545678089,"
"\"db-version\":\"12\"}\n");
"1={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6625592122879095702"
",\"db-version\":\"9.4\"}\n"
"2={\"db-catalog-version\":201909212,\"db-control-version\":1201,\"db-system-id\":6626363367545678089"
",\"db-version\":\"12\"}\n");
// Load the backup info. Do not store a manifest file for the adhoc backup for code coverage
TEST_ASSIGN(
@ -2090,9 +2085,9 @@ testRun(void)
removeExpiredBackup(infoBackup, adhocBackupLabel, 0), "code coverage: removeExpireBackup with no manifests");
TEST_RESULT_LOG(
"P00 WARN: [DRY-RUN] repo1: expiring latest backup 20181119-152850F_20181119-152252D - the ability to perform"
" point-in-time-recovery (PITR) may be affected\n"
" point-in-time-recovery (PITR) may be affected\n"
" HINT: non-default settings for 'repo1-retention-archive'/'repo1-retention-archive-type' (even in prior"
" expires) can cause gaps in the WAL.\n"
" expires) can cause gaps in the WAL.\n"
"P00 INFO: [DRY-RUN] repo1: expire adhoc backup 20181119-152850F_20181119-152252D\n"
"P00 INFO: [DRY-RUN] repo1: remove expired backup 20181119-152850F_20181119-152252D");
@ -2137,10 +2132,10 @@ testRun(void)
"db-version=\"12\"\n" \
"\n" \
"[db:history]\n" \
"1={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6625592122879095702," \
"\"db-version\":\"9.4\"}\n" \
"2={\"db-catalog-version\":201909212,\"db-control-version\":1201,\"db-system-id\":6626363367545678089," \
"\"db-version\":\"12\"}\n"
"1={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6625592122879095702" \
",\"db-version\":\"9.4\"}\n" \
"2={\"db-catalog-version\":201909212,\"db-control-version\":1201,\"db-system-id\":6626363367545678089" \
",\"db-version\":\"12\"}\n"
HRN_INFO_PUT(
storageRepoWrite(), INFO_BACKUP_PATH_FILE,
@ -2255,17 +2250,17 @@ testRun(void)
TEST_RESULT_LOG(
"P00 WARN: repo1: expiring latest backup 20181119-152850F_20181119-152252D - the ability to perform"
" point-in-time-recovery (PITR) may be affected\n"
" point-in-time-recovery (PITR) may be affected\n"
" HINT: non-default settings for 'repo1-retention-archive'/'repo1-retention-archive-type' (even in prior"
" expires) can cause gaps in the WAL.\n"
" expires) can cause gaps in the WAL.\n"
"P00 INFO: repo1: expire adhoc backup 20181119-152850F_20181119-152252D\n"
"P00 INFO: repo1: remove expired backup 20181119-152850F_20181119-152252D\n"
"P00 DETAIL: repo1: 12-2 archive retention on backup 20181119-152850F, start = 000000010000000000000002\n"
"P00 INFO: repo1: 12-2 no archive to remove\n"
"P00 WARN: repo2: expiring latest backup 20181119-152850F_20181119-152252D - the ability to perform"
" point-in-time-recovery (PITR) may be affected\n"
" point-in-time-recovery (PITR) may be affected\n"
" HINT: non-default settings for 'repo2-retention-archive'/'repo2-retention-archive-type' (even in prior"
" expires) can cause gaps in the WAL.\n"
" expires) can cause gaps in the WAL.\n"
"P00 INFO: repo2: expire adhoc backup 20181119-152850F_20181119-152252D\n"
"P00 INFO: repo2: remove expired backup 20181119-152850F_20181119-152252D\n"
"P00 DETAIL: repo2: 12-2 archive retention on backup 20181119-152850F, start = 000000010000000000000002\n"
@ -2344,9 +2339,9 @@ testRun(void)
TEST_RESULT_VOID(cmdExpire(), "repo-retention-full not set for time-based");
TEST_RESULT_LOG(
"P00 WARN: option 'repo1-retention-full' is not set for 'repo1-retention-full-type=time', the repository may run out"
" of space\n"
" of space\n"
" HINT: to retain full backups indefinitely (without warning), set option 'repo1-retention-full' to the"
" maximum.\n"
" maximum.\n"
"P00 INFO: repo1: time-based archive retention not met - archive logs will not be expired");
// Stop time equals retention time
@ -2404,12 +2399,12 @@ testRun(void)
TEST_RESULT_LOG(
"P00 INFO: [DRY-RUN] repo1: expire time-based backup 20181119-152138F\n"
"P00 INFO: [DRY-RUN] repo1: remove expired backup 20181119-152138F\n"
"P00 DETAIL: [DRY-RUN] repo1: 9.4-1 archive retention on backup 20181119-152800F, start = 000000010000000000000004,"
" stop = 000000010000000000000004\n"
"P00 DETAIL: [DRY-RUN] repo1: 9.4-1 archive retention on backup 20181119-152800F_20181119-152152D,"
" start = 000000010000000000000006, stop = 000000010000000000000006\n"
"P00 DETAIL: [DRY-RUN] repo1: 9.4-1 archive retention on backup 20181119-152800F_20181119-152155I,"
" start = 000000010000000000000007, stop = 000000010000000000000007\n"
"P00 DETAIL: [DRY-RUN] repo1: 9.4-1 archive retention on backup 20181119-152800F, start = 000000010000000000000004"
", stop = 000000010000000000000004\n"
"P00 DETAIL: [DRY-RUN] repo1: 9.4-1 archive retention on backup 20181119-152800F_20181119-152152D"
", start = 000000010000000000000006, stop = 000000010000000000000006\n"
"P00 DETAIL: [DRY-RUN] repo1: 9.4-1 archive retention on backup 20181119-152800F_20181119-152155I"
", start = 000000010000000000000007, stop = 000000010000000000000007\n"
"P00 DETAIL: [DRY-RUN] repo1: 9.4-1 archive retention on backup 20181119-152900F, start = 000000010000000000000009\n"
"P00 INFO: [DRY-RUN] repo1: 9.4-1 remove archive, start = 000000010000000000000001, stop = 000000010000000000000003\n"
"P00 INFO: [DRY-RUN] repo1: 9.4-1 remove archive, start = 000000010000000000000005, stop = 000000010000000000000005\n"
@ -2429,19 +2424,19 @@ testRun(void)
TEST_RESULT_LOG(
"P00 WARN: [DRY-RUN] option 'repo1-retention-diff' is not set for 'repo1-retention-archive-type=diff'\n"
" HINT: to retain differential backups indefinitely (without warning), set option 'repo1-retention-diff'"
" to the maximum.\n"
" to the maximum.\n"
"P00 INFO: [DRY-RUN] repo1: expire time-based backup 20181119-152138F\n"
"P00 INFO: [DRY-RUN] repo1: remove expired backup 20181119-152138F\n"
"P00 DETAIL: [DRY-RUN] repo1: 9.4-1 archive retention on backup 20181119-152800F, start = 000000010000000000000004,"
" stop = 000000010000000000000004\n"
"P00 DETAIL: [DRY-RUN] repo1: 9.4-1 archive retention on backup 20181119-152800F_20181119-152152D,"
" start = 000000010000000000000006, stop = 000000010000000000000006\n"
"P00 DETAIL: [DRY-RUN] repo1: 9.4-1 archive retention on backup 20181119-152800F_20181119-152155I,"
" start = 000000010000000000000007, stop = 000000010000000000000007\n"
"P00 DETAIL: [DRY-RUN] repo1: 9.4-1 archive retention on backup 20181119-152900F, start = 000000010000000000000009,"
" stop = 000000010000000000000009\n"
"P00 DETAIL: [DRY-RUN] repo1: 9.4-1 archive retention on backup 20181119-152900F_20181119-152600D,"
" start = 000000010000000000000011\n"
"P00 DETAIL: [DRY-RUN] repo1: 9.4-1 archive retention on backup 20181119-152800F, start = 000000010000000000000004"
", stop = 000000010000000000000004\n"
"P00 DETAIL: [DRY-RUN] repo1: 9.4-1 archive retention on backup 20181119-152800F_20181119-152152D"
", start = 000000010000000000000006, stop = 000000010000000000000006\n"
"P00 DETAIL: [DRY-RUN] repo1: 9.4-1 archive retention on backup 20181119-152800F_20181119-152155I"
", start = 000000010000000000000007, stop = 000000010000000000000007\n"
"P00 DETAIL: [DRY-RUN] repo1: 9.4-1 archive retention on backup 20181119-152900F, start = 000000010000000000000009"
", stop = 000000010000000000000009\n"
"P00 DETAIL: [DRY-RUN] repo1: 9.4-1 archive retention on backup 20181119-152900F_20181119-152600D"
", start = 000000010000000000000011\n"
"P00 INFO: [DRY-RUN] repo1: 9.4-1 remove archive, start = 000000010000000000000001, stop = 000000010000000000000003\n"
"P00 INFO: [DRY-RUN] repo1: 9.4-1 remove archive, start = 000000010000000000000005, stop = 000000010000000000000005\n"
"P00 INFO: [DRY-RUN] repo1: 9.4-1 remove archive, start = 000000010000000000000008, stop = 000000010000000000000008\n"
@ -2475,8 +2470,8 @@ testRun(void)
TEST_RESULT_VOID(cmdExpire(), "expire all but newest");
TEST_RESULT_LOG(
"P00 INFO: repo1: expire time-based backup set 20181119-152800F, 20181119-152800F_20181119-152152D,"
" 20181119-152800F_20181119-152155I\n"
"P00 INFO: repo1: expire time-based backup set 20181119-152800F, 20181119-152800F_20181119-152152D"
", 20181119-152800F_20181119-152155I\n"
"P00 INFO: repo1: remove expired backup 20181119-152800F_20181119-152155I\n"
"P00 INFO: repo1: remove expired backup 20181119-152800F_20181119-152152D\n"
"P00 INFO: repo1: remove expired backup 20181119-152800F\n"

View File

@ -141,10 +141,10 @@ testRun(void)
"db-version=\"9.4\"\n"
"\n"
"[db:history]\n"
"1={\"db-catalog-version\":201306121,\"db-control-version\":937,\"db-system-id\":6569239123849665666,"
"\"db-version\":\"9.3\"}\n"
"2={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6569239123849665679,"
"\"db-version\":\"9.4\"}\n");
"1={\"db-catalog-version\":201306121,\"db-control-version\":937,\"db-system-id\":6569239123849665666"
",\"db-version\":\"9.3\"}\n"
"2={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6569239123849665679"
",\"db-version\":\"9.4\"}\n");
TEST_RESULT_STR_Z(
infoRender(),
@ -190,11 +190,11 @@ testRun(void)
infoRender(),
"stanza: stanza1\n"
" status: error (other)\n"
" [FileMissingError] unable to load info file '" TEST_PATH "/repo/archive/stanza1/archive.info' or '"
TEST_PATH "/repo/archive/stanza1/archive.info.copy':\n"
" [FileMissingError] unable to load info file '" TEST_PATH "/repo/archive/stanza1/archive.info' or"
" '" TEST_PATH "/repo/archive/stanza1/archive.info.copy':\n"
" FileMissingError: unable to open missing file '" TEST_PATH "/repo/archive/stanza1/archive.info' for read\n"
" FileMissingError: unable to open missing file '" TEST_PATH "/repo/archive/stanza1/archive.info.copy'"
" for read\n"
" for read\n"
" HINT: archive.info cannot be opened but is required to push/get WAL segments.\n"
" HINT: is archive_command configured correctly in postgresql.conf?\n"
" HINT: has a stanza-create been performed?\n"
@ -409,12 +409,12 @@ testRun(void)
"\"option-online\":true}\n"
"\n"
"[db:history]\n"
"1={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6569239123849665679,"
"\"db-version\":\"9.4\"}\n"
"2={\"db-catalog-version\":201306121,\"db-control-version\":937,\"db-system-id\":6569239123849665666,"
"\"db-version\":\"9.3\"}\n"
"3={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6569239123849665679,"
"\"db-version\":\"9.4\"}\n");
"1={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6569239123849665679"
",\"db-version\":\"9.4\"}\n"
"2={\"db-catalog-version\":201306121,\"db-control-version\":937,\"db-system-id\":6569239123849665666"
",\"db-version\":\"9.3\"}\n"
"3={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6569239123849665679"
",\"db-version\":\"9.4\"}\n");
// Execute while a backup lock is held
HRN_FORK_BEGIN()
@ -690,10 +690,10 @@ testRun(void)
"db-version=\"9.5\"\n"
"\n"
"[db:history]\n"
"1={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6625592122879095702,"
"\"db-version\":\"9.4\"}\n"
"2={\"db-catalog-version\":201510051,\"db-control-version\":942,\"db-system-id\":6626363367545678089,"
"\"db-version\":\"9.5\"}\n",
"1={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6625592122879095702"
",\"db-version\":\"9.4\"}\n"
"2={\"db-catalog-version\":201510051,\"db-control-version\":942,\"db-system-id\":6626363367545678089"
",\"db-version\":\"9.5\"}\n",
.comment = "put backup info to file - stanza1, repo1");
// Manifest with all features
@ -833,8 +833,8 @@ testRun(void)
"db-version=\"9.4\"\n"
"\n"
"[db:history]\n"
"1={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6625633699176220261,"
"\"db-version\":\"9.4\"}\n",
"1={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6625633699176220261"
",\"db-version\":\"9.4\"}\n",
.comment = "put backup info to file - stanza2, repo1");
// Write encrypted info files to encrypted repo2
@ -875,8 +875,8 @@ testRun(void)
"cipher-pass=\"somepass\"\n"
"\n"
"[db:history]\n"
"1={\"db-catalog-version\":201510051,\"db-control-version\":942,\"db-system-id\":6626363367545678089,"
"\"db-version\":\"9.5\"}\n",
"1={\"db-catalog-version\":201510051,\"db-control-version\":942,\"db-system-id\":6626363367545678089"
",\"db-version\":\"9.5\"}\n",
.cipherType = cipherTypeAes256Cbc, .cipherPass = TEST_CIPHER_PASS,
.comment = "write encrypted backup.info, stanza1, repo2");
@ -988,8 +988,8 @@ testRun(void)
"cipher-pass=\"somepass\"\n"
"\n"
"[db:history]\n"
"1={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6626363367545678089,"
"\"db-version\":\"9.4\"}\n",
"1={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6626363367545678089"
",\"db-version\":\"9.4\"}\n",
.cipherType = cipherTypeAes256Cbc, .cipherPass = TEST_CIPHER_PASS,
.comment = "write encrypted backup.info, repo2, stanza3");
@ -2450,8 +2450,8 @@ testRun(void)
"cipher-pass=\"somepass\"\n"
"\n"
"[db:history]\n"
"1={\"db-catalog-version\":201510051,\"db-control-version\":942,\"db-system-id\":6626363367545678089,"
"\"db-version\":\"9.5\"}\n",
"1={\"db-catalog-version\":201510051,\"db-control-version\":942,\"db-system-id\":6626363367545678089"
",\"db-version\":\"9.5\"}\n",
.cipherType = cipherTypeAes256Cbc, .cipherPass = TEST_CIPHER_PASS,
.comment = "backup.info without current, repo2, stanza1");
@ -2611,10 +2611,10 @@ testRun(void)
"db-version=\"9.5\"\n"
"\n"
"[db:history]\n"
"1={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6625592122879095702,"
"\"db-version\":\"9.4\"}\n"
"2={\"db-catalog-version\":201510051,\"db-control-version\":942,\"db-system-id\":6626363367545678089,"
"\"db-version\":\"9.5\"}\n",
"1={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6625592122879095702"
",\"db-version\":\"9.4\"}\n"
"2={\"db-catalog-version\":201510051,\"db-control-version\":942,\"db-system-id\":6626363367545678089"
",\"db-version\":\"9.5\"}\n",
.comment = "put backup info to file - stanza1, repo1");
TEST_ERROR(infoRender(), AssertError, "assertion 'value != NULL' failed");
@ -2678,10 +2678,10 @@ testRun(void)
"\"option-online\":true}\n"
"\n"
"[db:history]\n"
"1={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6626363367545678089,"
"\"db-version\":\"9.4\"}\n"
"2={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6626363367545678888,"
"\"db-version\":\"9.5\"}\n",
"1={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6626363367545678089"
",\"db-version\":\"9.4\"}\n"
"2={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6626363367545678888"
",\"db-version\":\"9.5\"}\n",
.comment = "put backup info to file - stanza3, repo1 stanza upgraded");
// Create stanza3 db1 WAL, repo1
@ -2924,12 +2924,12 @@ testRun(void)
"stanza: stanza1\n"
" status: mixed\n"
" repo1: error (other)\n"
" [CryptoError] unable to load info file '" TEST_PATH "/repo/backup/stanza1/backup.info' or '"
TEST_PATH "/repo/backup/stanza1/backup.info.copy':\n"
" [CryptoError] unable to load info file '" TEST_PATH "/repo/backup/stanza1/backup.info' or"
" '" TEST_PATH "/repo/backup/stanza1/backup.info.copy':\n"
" CryptoError: cipher header invalid\n"
" HINT: is or was the repo encrypted?\n"
" FileMissingError: unable to open missing file '" TEST_PATH "/repo/backup/stanza1/backup.info.copy'"
" for read\n"
" for read\n"
" HINT: backup.info cannot be opened and is required to perform a backup.\n"
" HINT: has a stanza-create been performed?\n"
" HINT: use option --stanza if encryption settings are different for the stanza than the global"
@ -2943,12 +2943,12 @@ testRun(void)
"stanza: stanza2\n"
" status: mixed\n"
" repo1: error (other)\n"
" [CryptoError] unable to load info file '" TEST_PATH "/repo/backup/stanza2/backup.info' or '"
TEST_PATH "/repo/backup/stanza2/backup.info.copy':\n"
" [CryptoError] unable to load info file '" TEST_PATH "/repo/backup/stanza2/backup.info' or"
" '" TEST_PATH "/repo/backup/stanza2/backup.info.copy':\n"
" CryptoError: cipher header invalid\n"
" HINT: is or was the repo encrypted?\n"
" FileMissingError: unable to open missing file '" TEST_PATH "/repo/backup/stanza2/backup.info.copy'"
" for read\n"
" for read\n"
" HINT: backup.info cannot be opened and is required to perform a backup.\n"
" HINT: has a stanza-create been performed?\n"
" HINT: use option --stanza if encryption settings are different for the stanza than the global"
@ -2959,12 +2959,12 @@ testRun(void)
"stanza: stanza3\n"
" status: mixed\n"
" repo1: error (other)\n"
" [CryptoError] unable to load info file '" TEST_PATH "/repo/backup/stanza3/backup.info' or '"
TEST_PATH "/repo/backup/stanza3/backup.info.copy':\n"
" [CryptoError] unable to load info file '" TEST_PATH "/repo/backup/stanza3/backup.info' or"
" '" TEST_PATH "/repo/backup/stanza3/backup.info.copy':\n"
" CryptoError: cipher header invalid\n"
" HINT: is or was the repo encrypted?\n"
" FileMissingError: unable to open missing file '" TEST_PATH "/repo/backup/stanza3/backup.info.copy'"
" for read\n"
" for read\n"
" HINT: backup.info cannot be opened and is required to perform a backup.\n"
" HINT: has a stanza-create been performed?\n"
" HINT: use option --stanza if encryption settings are different for the stanza than the global"
@ -2995,12 +2995,12 @@ testRun(void)
"stanza: stanza3\n"
" status: mixed\n"
" repo1: error (other)\n"
" [CryptoError] unable to load info file '" TEST_PATH "/repo/backup/stanza3/backup.info' or '"
TEST_PATH "/repo/backup/stanza3/backup.info.copy':\n"
" [CryptoError] unable to load info file '" TEST_PATH "/repo/backup/stanza3/backup.info' or"
" '" TEST_PATH "/repo/backup/stanza3/backup.info.copy':\n"
" CryptoError: cipher header invalid\n"
" HINT: is or was the repo encrypted?\n"
" FileMissingError: unable to open missing file '" TEST_PATH "/repo/backup/stanza3/backup.info.copy'"
" for read\n"
" for read\n"
" HINT: backup.info cannot be opened and is required to perform a backup.\n"
" HINT: has a stanza-create been performed?\n"
" HINT: use option --stanza if encryption settings are different for the stanza than the global"
@ -3072,8 +3072,8 @@ testRun(void)
"\"option-online\":true}\n"
"\n"
"[db:history]\n"
"1={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6569239123849665679,"
"\"db-version\":\"9.4\"}\n",
"1={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6569239123849665679"
",\"db-version\":\"9.4\"}\n",
.comment = "put backup info to file, repo1");
HRN_INFO_PUT(
@ -3115,8 +3115,8 @@ testRun(void)
"\"option-online\":true}\n"
"\n"
"[db:history]\n"
"1={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6569239123849665679,"
"\"db-version\":\"9.5\"}\n",
"1={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6569239123849665679"
",\"db-version\":\"9.5\"}\n",
.comment = "put backup info to file, repo2, same system-id, different version");
HRN_INFO_PUT(
@ -3189,8 +3189,8 @@ testRun(void)
"\"option-online\":true}\n"
"\n"
"[db:history]\n"
"1={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6569239123849665888,"
"\"db-version\":\"9.4\"}\n",
"1={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6569239123849665888"
",\"db-version\":\"9.4\"}\n",
.comment = "put backup info to file, repo2, different system-id, same version");
HRN_INFO_PUT(
@ -3338,7 +3338,7 @@ testRun(void)
" status: mixed\n"
" repo1: error (other)\n"
" [PathOpenError] unable to list file info for path '" TEST_PATH "/repo2/backup':"
" [13] Permission denied\n"
" [13] Permission denied\n"
" repo2: error (missing stanza path)\n"
" cipher: none\n",
"text - stanza repo structure exists");

View File

@ -146,7 +146,11 @@ testRun(void)
// Install local command handler shim
static const ProtocolServerHandler testLocalHandlerList[] =
{PROTOCOL_SERVER_HANDLER_BACKUP_LIST PROTOCOL_SERVER_HANDLER_RESTORE_LIST};
{
PROTOCOL_SERVER_HANDLER_BACKUP_LIST
PROTOCOL_SERVER_HANDLER_RESTORE_LIST
};
hrnProtocolLocalShimInstall(testLocalHandlerList, LENGTH_OF(testLocalHandlerList));
// Create default storage object for testing
@ -245,7 +249,7 @@ testRun(void)
0, false, false, STRDEF("badpass"), NULL, fileList),
ChecksumError,
"error restoring 'normal': actual checksum 'd1cd8a7d11daa26814b93eb604e1d49ab4b43770' does not match expected checksum"
" 'ffffffffffffffffffffffffffffffffffffffff'");
" 'ffffffffffffffffffffffffffffffffffffffff'");
}
// *****************************************************************************************************************************
@ -287,8 +291,8 @@ testRun(void)
TEST_RESULT_BOOL(cfgOptionBool(cfgOptDelta), false, "--delta set to false");
TEST_RESULT_LOG(
"P00 WARN: --delta or --force specified but unable to find 'PG_VERSION' or 'backup.manifest' in '" TEST_PATH "/pg' to"
" confirm that this is a valid $PGDATA directory. --delta and --force have been disabled and if any files"
" exist in the destination directories the restore will be aborted.");
" confirm that this is a valid $PGDATA directory. --delta and --force have been disabled and if any files"
" exist in the destination directories the restore will be aborted.");
HRN_CFG_LOAD(cfgCmdRestore, argList);
HRN_STORAGE_PUT_EMPTY(storagePgWrite(), "backup.manifest");
@ -309,8 +313,8 @@ testRun(void)
TEST_RESULT_BOOL(cfgOptionBool(cfgOptForce), false, "--force set to false");
TEST_RESULT_LOG(
"P00 WARN: --delta or --force specified but unable to find 'PG_VERSION' or 'backup.manifest' in '" TEST_PATH "/pg' to"
" confirm that this is a valid $PGDATA directory. --delta and --force have been disabled and if any files"
" exist in the destination directories the restore will be aborted.");
" confirm that this is a valid $PGDATA directory. --delta and --force have been disabled and if any files"
" exist in the destination directories the restore will be aborted.");
HRN_CFG_LOAD(cfgCmdRestore, argList);
HRN_STORAGE_PUT_EMPTY(storagePgWrite(), PG_FILE_PGVERSION);
@ -351,7 +355,7 @@ testRun(void)
getEpoch(STRDEF("Tue, 15 Nov 1994 12:45:26")), FormatError,
"automatic backup set selection cannot be performed with provided time 'Tue, 15 Nov 1994 12:45:26'\n"
"HINT: time format must be YYYY-MM-DD HH:MM:SS with optional msec and optional timezone (+/- HH or HHMM or HH:MM) - if"
" timezone is omitted, local time is assumed (for UTC use +00)");
" timezone is omitted, local time is assumed (for UTC use +00)");
setenv("TZ", "UTC", true);
}
@ -408,10 +412,10 @@ testRun(void)
"db-version=\"10\"\n"
"\n"
"[db:history]\n"
"1={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6569239123849665679,"
"\"db-version\":\"9.4\"}\n"
"2={\"db-catalog-version\":201707211,\"db-control-version\":1002,\"db-system-id\":6626363367545678089,"
"\"db-version\":\"10\"}\n");
"1={\"db-catalog-version\":201409291,\"db-control-version\":942,\"db-system-id\":6569239123849665679"
",\"db-version\":\"9.4\"}\n"
"2={\"db-catalog-version\":201707211,\"db-control-version\":1002,\"db-system-id\":6626363367545678089"
",\"db-version\":\"10\"}\n");
argList = strLstNew();
hrnCfgArgRawZ(argList, cfgOptStanza, "test1");
@ -536,7 +540,7 @@ testRun(void)
restoreBackupSet(), FormatError,
"automatic backup set selection cannot be performed with provided time 'Tue, 15 Nov 1994 12:45:26'\n"
"HINT: time format must be YYYY-MM-DD HH:MM:SS with optional msec and optional timezone (+/- HH or HHMM or HH:MM) - if"
" timezone is omitted, local time is assumed (for UTC use +00)");
" timezone is omitted, local time is assumed (for UTC use +00)");
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("target time, multi repo, no candidates found");
@ -619,7 +623,7 @@ testRun(void)
restoreBackupSet(), BackupSetInvalidError, "unable to find backup set with lsn less than or equal to '0/1C000101'");
TEST_RESULT_LOG(
"P00 WARN: repo1 reached backup from prior version missing required LSN info before finding a match -- backup"
" auto-select has been disabled for this repo\n"
" auto-select has been disabled for this repo\n"
" HINT: you may specify a backup to restore using the --set option.\n"
"P00 WARN: repo2: [BackupSetInvalidError] no backup sets to restore");
@ -643,7 +647,7 @@ testRun(void)
TEST_RESULT_UINT(backupData.repoIdx, 1, "backup set found, repo2");
TEST_RESULT_LOG(
"P00 WARN: repo1 reached backup from prior version missing required LSN info before finding a match -- backup"
" auto-select has been disabled for this repo\n"
" auto-select has been disabled for this repo\n"
" HINT: you may specify a backup to restore using the --set option.");
// No backups to search for qualifying backup set
@ -1188,7 +1192,7 @@ testRun(void)
TEST_ERROR(
restoreCleanBuild(manifest, NULL, NULL), PathNotEmptyError,
"unable to restore to path '" TEST_PATH "/pg' because it contains files\n"
"HINT: try using --delta if this is what you intended.");
"HINT: try using --delta if this is what you intended.");
TEST_RESULT_LOG("P00 DETAIL: check '" TEST_PATH "/pg' exists");
@ -1501,7 +1505,7 @@ testRun(void)
TEST_RESULT_STR_Z(
restoreSelectiveExpression(manifest),
"(^pg_data/base/32768/)|(^pg_tblspc/16387/PG_9.4_201409291/32768/)|(^pg_data/base/65536/)"
"|(^pg_tblspc/16387/PG_9.4_201409291/65536/)",
"|(^pg_tblspc/16387/PG_9.4_201409291/65536/)",
"check expression");
TEST_RESULT_LOG(
@ -1627,7 +1631,7 @@ testRun(void)
"a_setting = 'a'\n"
"b_setting = 'b'\n"
"restore_command = '" TEST_PROJECT_EXE " --lock-path=" HRN_PATH "/lock --log-path=" HRN_PATH " --pg1-path=/pg"
" --repo1-path=/repo --stanza=test1 archive-get %f \"%p\"'\n",
" --repo1-path=/repo --stanza=test1 archive-get %f \"%p\"'\n",
"check recovery options");
// -------------------------------------------------------------------------------------------------------------------------
@ -1641,7 +1645,7 @@ testRun(void)
restoreRecoveryConf(PG_VERSION_94, restoreLabel),
RECOVERY_SETTING_HEADER
"restore_command = '/usr/local/bin/pg_wrapper.sh --lock-path=" HRN_PATH "/lock --log-path=" HRN_PATH " --pg1-path=/pg"
" --repo1-path=/repo --stanza=test1 archive-get %f \"%p\"'\n",
" --repo1-path=/repo --stanza=test1 archive-get %f \"%p\"'\n",
"restore_command invokes /usr/local/bin/pg_wrapper.sh per --cmd option");
// -------------------------------------------------------------------------------------------------------------------------
@ -1826,7 +1830,7 @@ testRun(void)
TEST_ERROR(
restoreRecoveryConf(PG_VERSION_94, restoreLabel), OptionInvalidError,
"option 'archive-mode' is not supported on PostgreSQL < 12\n"
"HINT: 'archive_mode' should be manually set to 'off' in postgresql.conf.");
"HINT: 'archive_mode' should be manually set to 'off' in postgresql.conf.");
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("recovery type = standby with recovery GUCs and archive-mode=off");
@ -2143,8 +2147,8 @@ testRun(void)
"P00 DETAIL: check '" TEST_PATH "/pg' exists\n"
"P00 DETAIL: create path '" TEST_PATH "/pg/global'\n"
"P00 DETAIL: create path '" TEST_PATH "/pg/pg_tblspc'\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/PG_VERSION (4B, 100.00%%) checksum 8dbabb96e032b8d9f1993c0e4b9141e71ade01a1"
"\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/PG_VERSION (4B, 100.00%%) checksum"
" 8dbabb96e032b8d9f1993c0e4b9141e71ade01a1\n"
"P00 INFO: write " TEST_PATH "/pg/recovery.conf\n"
"P00 DETAIL: sync path '" TEST_PATH "/pg'\n"
"P00 DETAIL: sync path '" TEST_PATH "/pg/pg_tblspc'\n"
@ -2287,16 +2291,16 @@ testRun(void)
"P00 DETAIL: create symlink '" TEST_PATH "/pg/pg_tblspc/1' to '" TEST_PATH "/ts/1'\n"
"P00 DETAIL: create path '" TEST_PATH "/pg/pg_tblspc/1/16384'\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/postgresql.auto.conf (15B, 44.12%)"
" checksum 37a0c84d42c3ec3d08c311cec2cef2a7ab55a7c3\n"
" checksum 37a0c84d42c3ec3d08c311cec2cef2a7ab55a7c3\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/postgresql.conf (10B, 73.53%) checksum"
" 1a49a3c2240449fee1422e4afcf44d5b96378511\n"
" 1a49a3c2240449fee1422e4afcf44d5b96378511\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/PG_VERSION - exists and matches size 4 and modification time 1482182860"
" (4B, 85.29%) checksum 8dbabb96e032b8d9f1993c0e4b9141e71ade01a1\n"
" (4B, 85.29%) checksum 8dbabb96e032b8d9f1993c0e4b9141e71ade01a1\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/size-mismatch (1B, 88.24%) checksum"
" c032adc1ff629c9b66f22749ad667e6beadf144b\n"
" c032adc1ff629c9b66f22749ad667e6beadf144b\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/tablespace_map (0B, 88.24%)\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/pg_tblspc/1/16384/PG_VERSION (4B, 100.00%)"
" checksum 8dbabb96e032b8d9f1993c0e4b9141e71ade01a1\n"
" checksum 8dbabb96e032b8d9f1993c0e4b9141e71ade01a1\n"
"P00 WARN: recovery type is preserve but recovery file does not exist at '" TEST_PATH "/pg/recovery.conf'\n"
"P00 DETAIL: sync path '" TEST_PATH "/pg'\n"
"P00 DETAIL: sync path '" TEST_PATH "/pg/pg_tblspc'\n"
@ -2360,15 +2364,15 @@ testRun(void)
"P00 INFO: remove invalid files/links/paths from '" TEST_PATH "/pg'\n"
"P00 INFO: remove invalid files/links/paths from '" TEST_PATH "/ts/1/PG_9.4_201409291'\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/postgresql.auto.conf (15B, [PCT]) checksum"
" 37a0c84d42c3ec3d08c311cec2cef2a7ab55a7c3\n"
" 37a0c84d42c3ec3d08c311cec2cef2a7ab55a7c3\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/postgresql.conf (10B, [PCT]) checksum"
" 1a49a3c2240449fee1422e4afcf44d5b96378511\n"
" 1a49a3c2240449fee1422e4afcf44d5b96378511\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/PG_VERSION (4B, [PCT]) checksum 8dbabb96e032b8d9f1993c0e4b9141e71ade01a1\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/size-mismatch (1B, [PCT]) checksum"
" c032adc1ff629c9b66f22749ad667e6beadf144b\n"
" c032adc1ff629c9b66f22749ad667e6beadf144b\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/tablespace_map (0B, [PCT])\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/pg_tblspc/1/16384/PG_VERSION (4B, [PCT])"
" checksum 8dbabb96e032b8d9f1993c0e4b9141e71ade01a1\n"
" checksum 8dbabb96e032b8d9f1993c0e4b9141e71ade01a1\n"
"P00 WARN: recovery type is preserve but recovery file does not exist at '" TEST_PATH "/pg/recovery.conf'\n"
"P00 DETAIL: sync path '" TEST_PATH "/pg'\n"
"P00 DETAIL: sync path '" TEST_PATH "/pg/pg_tblspc'\n"
@ -2768,44 +2772,44 @@ testRun(void)
"P00 DETAIL: create symlink '" TEST_PATH "/pg/pg_hba.conf' to '../config/pg_hba.conf'\n"
"P00 DETAIL: create symlink '" TEST_PATH "/pg/postgresql.conf' to '../config/postgresql.conf'\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/base/1/bi-unused-ref (48KB, [PCT]) checksum"
" febd680181d4cd315dce942348862c25fbd731f3\n"
" febd680181d4cd315dce942348862c25fbd731f3\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/base/32768/32769 (32KB, [PCT]) checksum"
" a40f0986acb1531ce0cc75a23dcf8aa406ae9081\n"
" a40f0986acb1531ce0cc75a23dcf8aa406ae9081\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/base/1/bi-no-ref (24KB, [PCT]) checksum"
" 953cdcc904c5d4135d96fc0833f121bf3033c74c\n"
" 953cdcc904c5d4135d96fc0833f121bf3033c74c\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/base/16384/16385 (16KB, [PCT]) checksum"
" d74e5f7ebe52a3ed468ba08c5b6aefaccd1ca88f\n"
" d74e5f7ebe52a3ed468ba08c5b6aefaccd1ca88f\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/global/pg_control.pgbackrest.tmp (8KB, [PCT])"
" checksum 5e2b96c19c4f5c63a5afa2de504d29fe64a4c908\n"
" checksum 5e2b96c19c4f5c63a5afa2de504d29fe64a4c908\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/base/1/2 (8KB, [PCT]) checksum 4d7b2a36c5387decf799352a3751883b7ceb96aa\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/postgresql.conf (15B, [PCT]) checksum"
" 98b8abb2e681e2a5a7d8ab082c0a79727887558d\n"
" 98b8abb2e681e2a5a7d8ab082c0a79727887558d\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/pg_hba.conf (11B, [PCT]) checksum"
" 401215e092779574988a854d8c7caed7f91dba4b\n"
" 401215e092779574988a854d8c7caed7f91dba4b\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/base/32768/PG_VERSION (4B, [PCT])"
" checksum 8dbabb96e032b8d9f1993c0e4b9141e71ade01a1\n"
" checksum 8dbabb96e032b8d9f1993c0e4b9141e71ade01a1\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/base/16384/PG_VERSION (4B, [PCT])"
" checksum 8dbabb96e032b8d9f1993c0e4b9141e71ade01a1\n"
" checksum 8dbabb96e032b8d9f1993c0e4b9141e71ade01a1\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/base/1/10 (bundle 20161219-212741F/1/1, 8KB, [PCT])"
" checksum 28757c756c03c37aca13692cb719c18d1510c190\n"
" checksum 28757c756c03c37aca13692cb719c18d1510c190\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/PG_VERSION (bundle 1/0, 4B, [PCT]) checksum"
" 2fafe15172578a19dbc196723bca6a4a8ad70da8\n"
" 2fafe15172578a19dbc196723bca6a4a8ad70da8\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/base/1/PG_VERSION (bundle 1/4, 4B, [PCT]) checksum"
" 2fafe15172578a19dbc196723bca6a4a8ad70da8\n"
" 2fafe15172578a19dbc196723bca6a4a8ad70da8\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/yyy - exists and matches backup (bundle 1/8, 3B, [PCT]) checksum"
" 186154712b2d5f6791d85b9a0987b98fa231779c\n"
" 186154712b2d5f6791d85b9a0987b98fa231779c\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/xxxxx (bundle 1/11, 5B, [PCT]) checksum"
" 9addbf544119efa4a64223b649750a510f0d463f\n"
" 9addbf544119efa4a64223b649750a510f0d463f\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/zz (bundle 1/17, 2B, [PCT]) checksum"
" d7dacae2c968388960bf8970080a980ed5c5dcb7\n"
" d7dacae2c968388960bf8970080a980ed5c5dcb7\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/base/1/20 (bundle 20161219-212741F_20161219-212800D/2/1, 1B, [PCT]) checksum"
" c032adc1ff629c9b66f22749ad667e6beadf144b\n"
" c032adc1ff629c9b66f22749ad667e6beadf144b\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/base/1/21 (bundle 20161219-212741F_20161219-212800D/2/2, 1B, [PCT]) checksum"
" e9d71f5ee7c92d6dc9e92ffdad17b8bd49418f98\n"
" e9d71f5ee7c92d6dc9e92ffdad17b8bd49418f98\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/base/1/30 (bundle 20161219-212741F_20161219-212900I/2/1, 1B, [PCT]) checksum"
" c032adc1ff629c9b66f22749ad667e6beadf144b\n"
" c032adc1ff629c9b66f22749ad667e6beadf144b\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/base/1/31 (bundle 20161219-212741F_20161219-212900I/2/2, 1B, [PCT]) checksum"
" e9d71f5ee7c92d6dc9e92ffdad17b8bd49418f98\n"
" e9d71f5ee7c92d6dc9e92ffdad17b8bd49418f98\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/global/999 (0B, [PCT])\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/global/888 (0B, [PCT])\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/zero-length (bundle 1/16, 0B, [PCT])\n"
@ -2962,43 +2966,43 @@ testRun(void)
"P00 DETAIL: create path '" TEST_PATH "/pg/pg_xact'\n"
"P00 DETAIL: create symlink '" TEST_PATH "/pg/pg_hba.conf' to '../config/pg_hba.conf'\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/base/1/bi-unused-ref - exists and matches backup (48KB, [PCT]) checksum"
" febd680181d4cd315dce942348862c25fbd731f3\n"
" febd680181d4cd315dce942348862c25fbd731f3\n"
"P01 DETAIL: restore zeroed file " TEST_PATH "/pg/base/32768/32769 (32KB, [PCT])\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/base/1/bi-no-ref - exists and matches backup (24KB, [PCT]) checksum"
" 953cdcc904c5d4135d96fc0833f121bf3033c74c\n"
" 953cdcc904c5d4135d96fc0833f121bf3033c74c\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/base/16384/16385 - exists and matches backup (16KB, [PCT])"
" checksum d74e5f7ebe52a3ed468ba08c5b6aefaccd1ca88f\n"
" checksum d74e5f7ebe52a3ed468ba08c5b6aefaccd1ca88f\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/global/pg_control.pgbackrest.tmp (8KB, [PCT])"
" checksum 5e2b96c19c4f5c63a5afa2de504d29fe64a4c908\n"
" checksum 5e2b96c19c4f5c63a5afa2de504d29fe64a4c908\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/base/1/2 (8KB, [PCT]) checksum 4d7b2a36c5387decf799352a3751883b7ceb96aa\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/postgresql.conf - exists and matches backup (15B, [PCT])"
" checksum 98b8abb2e681e2a5a7d8ab082c0a79727887558d\n"
" checksum 98b8abb2e681e2a5a7d8ab082c0a79727887558d\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/pg_hba.conf - exists and matches backup (11B, [PCT])"
" checksum 401215e092779574988a854d8c7caed7f91dba4b\n"
" checksum 401215e092779574988a854d8c7caed7f91dba4b\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/base/32768/PG_VERSION - exists and matches backup (4B, [PCT])"
" checksum 8dbabb96e032b8d9f1993c0e4b9141e71ade01a1\n"
" checksum 8dbabb96e032b8d9f1993c0e4b9141e71ade01a1\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/base/16384/PG_VERSION - exists and matches backup (4B, [PCT])"
" checksum 8dbabb96e032b8d9f1993c0e4b9141e71ade01a1\n"
" checksum 8dbabb96e032b8d9f1993c0e4b9141e71ade01a1\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/base/1/10 - exists and matches backup (bundle 20161219-212741F/1/1, 8KB,"
" [PCT]) checksum 28757c756c03c37aca13692cb719c18d1510c190\n"
" [PCT]) checksum 28757c756c03c37aca13692cb719c18d1510c190\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/PG_VERSION - exists and matches backup (bundle 1/0, 4B, [PCT])"
" checksum 2fafe15172578a19dbc196723bca6a4a8ad70da8\n"
" checksum 2fafe15172578a19dbc196723bca6a4a8ad70da8\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/base/1/PG_VERSION - exists and matches backup (bundle 1/4, 4B, [PCT])"
" checksum 2fafe15172578a19dbc196723bca6a4a8ad70da8\n"
" checksum 2fafe15172578a19dbc196723bca6a4a8ad70da8\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/yyy - exists and matches backup (bundle 1/8, 3B, [PCT]) checksum"
" 186154712b2d5f6791d85b9a0987b98fa231779c\n"
" 186154712b2d5f6791d85b9a0987b98fa231779c\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/xxxxx - exists and matches backup (bundle 1/11, 5B, [PCT]) checksum"
" 9addbf544119efa4a64223b649750a510f0d463f\n"
" 9addbf544119efa4a64223b649750a510f0d463f\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/zz - exists and matches backup (bundle 1/17, 2B, [PCT]) checksum"
" d7dacae2c968388960bf8970080a980ed5c5dcb7\n"
" d7dacae2c968388960bf8970080a980ed5c5dcb7\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/base/1/20 - exists and matches backup (bundle"
" 20161219-212741F_20161219-212800D/2/1, 1B, [PCT]) checksum c032adc1ff629c9b66f22749ad667e6beadf144b\n"
" 20161219-212741F_20161219-212800D/2/1, 1B, [PCT]) checksum c032adc1ff629c9b66f22749ad667e6beadf144b\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/base/1/21 - exists and matches backup (bundle"
" 20161219-212741F_20161219-212800D/2/2, 1B, [PCT]) checksum e9d71f5ee7c92d6dc9e92ffdad17b8bd49418f98\n"
" 20161219-212741F_20161219-212800D/2/2, 1B, [PCT]) checksum e9d71f5ee7c92d6dc9e92ffdad17b8bd49418f98\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/base/1/30 - exists and matches backup (bundle"
" 20161219-212741F_20161219-212900I/2/1, 1B, [PCT]) checksum c032adc1ff629c9b66f22749ad667e6beadf144b\n"
" 20161219-212741F_20161219-212900I/2/1, 1B, [PCT]) checksum c032adc1ff629c9b66f22749ad667e6beadf144b\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/base/1/31 - exists and matches backup (bundle"
" 20161219-212741F_20161219-212900I/2/2, 1B, [PCT]) checksum e9d71f5ee7c92d6dc9e92ffdad17b8bd49418f98\n"
" 20161219-212741F_20161219-212900I/2/2, 1B, [PCT]) checksum e9d71f5ee7c92d6dc9e92ffdad17b8bd49418f98\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/global/999 - exists and is zero size (0B, [PCT])\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/global/888 - exists and is zero size (0B, [PCT])\n"
"P01 DETAIL: restore file " TEST_PATH "/pg/zero-length - exists and is zero size (bundle 1/16, 0B, [PCT])\n"
@ -3035,7 +3039,7 @@ testRun(void)
TEST_ERROR(
cmdRestore(), FileMissingError,
"raised from local-1 shim protocol: unable to open missing file"
" '" TEST_PATH "/repo/backup/test1/20161219-212741F_20161219-212918I/pg_data/global/pg_control' for read\n"
" '" TEST_PATH "/repo/backup/test1/20161219-212741F_20161219-212918I/pg_data/global/pg_control' for read\n"
"[FileMissingError] on retry after 0ms");
// Free local processes that were not freed because of the error

View File

@ -96,8 +96,8 @@ testRun(void)
"db-version=\"9.6\"\n"
"\n"
"[db:history]\n"
"1={\"db-catalog-version\":201608131,\"db-control-version\":960,\"db-system-id\":" HRN_PG_SYSTEMID_96_Z ","
"\"db-version\":\"9.6\"}\n",
"1={\"db-catalog-version\":201608131,\"db-control-version\":960,\"db-system-id\":" HRN_PG_SYSTEMID_96_Z
",\"db-version\":\"9.6\"}\n",
.comment = "put backup info to test file");
TEST_RESULT_BOOL(
@ -400,8 +400,8 @@ testRun(void)
"db-version=\"9.6\"\n"
"\n"
"[db:history]\n"
"2={\"db-catalog-version\":201608131,\"db-control-version\":960,\"db-system-id\":" HRN_PG_SYSTEMID_96_Z ","
"\"db-version\":\"9.6\"}\n",
"2={\"db-catalog-version\":201608131,\"db-control-version\":960,\"db-system-id\":" HRN_PG_SYSTEMID_96_Z
",\"db-version\":\"9.6\"}\n",
.comment = "put backup info to file - bad db-id");
TEST_ERROR(
@ -431,8 +431,8 @@ testRun(void)
"db-version=\"9.5\"\n"
"\n"
"[db:history]\n"
"1={\"db-catalog-version\":201510051,\"db-control-version\":942,\"db-system-id\":" HRN_PG_SYSTEMID_96_Z ","
"\"db-version\":\"9.5\"}\n");
"1={\"db-catalog-version\":201510051,\"db-control-version\":942,\"db-system-id\":" HRN_PG_SYSTEMID_96_Z
",\"db-version\":\"9.5\"}\n");
HRN_INFO_PUT(
storageRepoIdxWrite(0), INFO_ARCHIVE_PATH_FILE,
@ -474,8 +474,8 @@ testRun(void)
"db-version=\"9.6\"\n"
"\n"
"[db:history]\n"
"1={\"db-catalog-version\":201608131,\"db-control-version\":960,\"db-system-id\":6569239123849665999,"
"\"db-version\":\"9.6\"}\n");
"1={\"db-catalog-version\":201608131,\"db-control-version\":960,\"db-system-id\":6569239123849665999"
",\"db-version\":\"9.6\"}\n");
TEST_ERROR(
cmdStanzaCreate(), FileInvalidError,
@ -594,9 +594,10 @@ testRun(void)
TEST_ERROR(
pgValidate(), DbMismatchError,
"version '" PG_VERSION_15_STR "' and path '" TEST_PATH "/pg2' queried from cluster do not match version '"
PG_VERSION_15_STR "' and '" TEST_PATH "/pg' read from '" TEST_PATH "/pg/" PG_PATH_GLOBAL "/" PG_FILE_PGCONTROL
"'\nHINT: the pg1-path and pg1-port settings likely reference different clusters.");
"version '" PG_VERSION_15_STR "' and path '" TEST_PATH "/pg2' queried from cluster do not match version"
" '" PG_VERSION_15_STR "' and '" TEST_PATH "/pg' read from '" TEST_PATH "/pg/" PG_PATH_GLOBAL
"/" PG_FILE_PGCONTROL "'\n"
"HINT: the pg1-path and pg1-port settings likely reference different clusters.");
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("primary at pg2");
@ -670,8 +671,8 @@ testRun(void)
"db-version=\"9.6\"\n"
"\n"
"[db:history]\n"
"1={\"db-catalog-version\":201608131,\"db-control-version\":960,\"db-system-id\":" HRN_PG_SYSTEMID_96_Z ","
"\"db-version\":\"9.6\"}\n");
"1={\"db-catalog-version\":201608131,\"db-control-version\":960,\"db-system-id\":" HRN_PG_SYSTEMID_96_Z
",\"db-version\":\"9.6\"}\n");
// backup info up to date but archive info db-id mismatch
HRN_INFO_PUT(
@ -706,10 +707,10 @@ testRun(void)
"db-version=\"9.6\"\n"
"\n"
"[db:history]\n"
"1={\"db-catalog-version\":201510051,\"db-control-version\":942,\"db-system-id\":6569239123849665999,"
"\"db-version\":\"9.5\"}\n"
"2={\"db-catalog-version\":201608131,\"db-control-version\":960,\"db-system-id\":" HRN_PG_SYSTEMID_96_Z ","
"\"db-version\":\"9.6\"}\n");
"1={\"db-catalog-version\":201510051,\"db-control-version\":942,\"db-system-id\":6569239123849665999"
",\"db-version\":\"9.5\"}\n"
"2={\"db-catalog-version\":201608131,\"db-control-version\":960,\"db-system-id\":" HRN_PG_SYSTEMID_96_Z
",\"db-version\":\"9.6\"}\n");
HRN_INFO_PUT(
storageRepoIdxWrite(0), INFO_ARCHIVE_PATH_FILE,
"[db]\n"
@ -758,8 +759,8 @@ testRun(void)
"db-version=\"9.5\"\n"
"\n"
"[db:history]\n"
"1={\"db-catalog-version\":201510051,\"db-control-version\":942,\"db-system-id\":" HRN_PG_SYSTEMID_96_Z ","
"\"db-version\":\"9.5\"}\n");
"1={\"db-catalog-version\":201510051,\"db-control-version\":942,\"db-system-id\":" HRN_PG_SYSTEMID_96_Z
",\"db-version\":\"9.5\"}\n");
TEST_RESULT_VOID(cmdStanzaUpgrade(), "stanza upgrade - backup.info file upgraded - version");
TEST_RESULT_LOG("P00 INFO: stanza-upgrade for stanza 'db' on repo1");
@ -774,10 +775,10 @@ testRun(void)
"db-version=\"9.6\"\n"
"\n"
"[db:history]\n"
"1={\"db-catalog-version\":201510051,\"db-control-version\":942,\"db-system-id\":" HRN_PG_SYSTEMID_96_Z ","
"\"db-version\":\"9.5\"}\n"
"2={\"db-catalog-version\":201608131,\"db-control-version\":960,\"db-system-id\":" HRN_PG_SYSTEMID_96_Z ","
"\"db-version\":\"9.6\"}\n",
"1={\"db-catalog-version\":201510051,\"db-control-version\":942,\"db-system-id\":" HRN_PG_SYSTEMID_96_Z
",\"db-version\":\"9.5\"}\n"
"2={\"db-catalog-version\":201608131,\"db-control-version\":960,\"db-system-id\":" HRN_PG_SYSTEMID_96_Z
",\"db-version\":\"9.6\"}\n",
.comment = "put backup info to test file");
TEST_RESULT_BOOL(
@ -803,10 +804,10 @@ testRun(void)
"db-version=\"9.6\"\n"
"\n"
"[db:history]\n"
"1={\"db-catalog-version\":201510051,\"db-control-version\":942,\"db-system-id\":6569239123849665999,"
"\"db-version\":\"9.5\"}\n"
"2={\"db-catalog-version\":201608131,\"db-control-version\":960,\"db-system-id\":" HRN_PG_SYSTEMID_96_Z ","
"\"db-version\":\"9.6\"}\n");
"1={\"db-catalog-version\":201510051,\"db-control-version\":942,\"db-system-id\":6569239123849665999"
",\"db-version\":\"9.5\"}\n"
"2={\"db-catalog-version\":201608131,\"db-control-version\":960,\"db-system-id\":" HRN_PG_SYSTEMID_96_Z
",\"db-version\":\"9.6\"}\n");
HRN_INFO_PUT(
storageRepoIdxWrite(0), INFO_ARCHIVE_PATH_FILE,
"[db]\n"
@ -855,8 +856,8 @@ testRun(void)
"db-version=\"9.6\"\n"
"\n"
"[db:history]\n"
"1={\"db-catalog-version\":201608131,\"db-control-version\":960,\"db-system-id\":6569239123849665999,"
"\"db-version\":\"9.6\"}\n");
"1={\"db-catalog-version\":201608131,\"db-control-version\":960,\"db-system-id\":6569239123849665999"
",\"db-version\":\"9.6\"}\n");
TEST_RESULT_VOID(cmdStanzaUpgrade(), "stanza upgrade - backup.info file upgraded - system-id");
TEST_RESULT_LOG("P00 INFO: stanza-upgrade for stanza 'db' on repo1");
@ -871,10 +872,10 @@ testRun(void)
"db-version=\"9.6\"\n"
"\n"
"[db:history]\n"
"1={\"db-catalog-version\":201608131,\"db-control-version\":960,\"db-system-id\":6569239123849665999,"
"\"db-version\":\"9.6\"}\n"
"2={\"db-catalog-version\":201608131,\"db-control-version\":960,\"db-system-id\":" HRN_PG_SYSTEMID_96_Z ","
"\"db-version\":\"9.6\"}\n",
"1={\"db-catalog-version\":201608131,\"db-control-version\":960,\"db-system-id\":6569239123849665999"
",\"db-version\":\"9.6\"}\n"
"2={\"db-catalog-version\":201608131,\"db-control-version\":960,\"db-system-id\":" HRN_PG_SYSTEMID_96_Z
",\"db-version\":\"9.6\"}\n",
.comment = "put backup info to test file");
TEST_RESULT_BOOL(
bufEq(

View File

@ -255,9 +255,9 @@ testRun(void)
TEST_RESULT_UINT(backupResult.status, backupInvalid, "manifest unusable - backup invalid");
TEST_RESULT_LOG(
"P00 DETAIL: unable to open missing file '" TEST_PATH "/repo/backup/db/20181119-152138F/backup.manifest.copy'"
" for read\n"
"P00 INFO: '20181119-152138F' may not be recoverable - PG data (id 1, version 9.3, system-id "
HRN_PG_SYSTEMID_94_Z ") is not in the backup.info history, skipping");
" for read\n"
"P00 INFO: '20181119-152138F' may not be recoverable - PG data (id 1, version 9.3, system-id"
" " HRN_PG_SYSTEMID_94_Z ") is not in the backup.info history, skipping");
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("rerun test with db-system-id invalid and no main");
@ -291,8 +291,8 @@ testRun(void)
TEST_RESULT_LOG(
"P00 DETAIL: unable to open missing file '" TEST_PATH "/repo/backup/db/20181119-152138F/backup.manifest' for read\n"
"P00 DETAIL: 20181119-152138F/backup.manifest is missing or unusable, using copy\n"
"P00 INFO: '20181119-152138F' may not be recoverable - PG data (id 1, version 9.4, system-id 0) is not in the "
"backup.info history, skipping");
"P00 INFO: '20181119-152138F' may not be recoverable - PG data (id 1, version 9.4, system-id 0) is not in the"
" backup.info history, skipping");
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("rerun copy test with db-id invalid");
@ -325,8 +325,8 @@ testRun(void)
TEST_RESULT_LOG(
"P00 DETAIL: unable to open missing file '" TEST_PATH "/repo/backup/db/20181119-152138F/backup.manifest' for read\n"
"P00 DETAIL: 20181119-152138F/backup.manifest is missing or unusable, using copy\n"
"P00 INFO: '20181119-152138F' may not be recoverable - PG data (id 0, version 9.4, system-id "
HRN_PG_SYSTEMID_94_Z ") is not in the backup.info history, skipping");
"P00 INFO: '20181119-152138F' may not be recoverable - PG data (id 0, version 9.4, system-id"
" " HRN_PG_SYSTEMID_94_Z ") is not in the backup.info history, skipping");
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("missing main manifest, errored copy");
@ -341,8 +341,8 @@ testRun(void)
TEST_RESULT_UINT(backupResult.status, backupInvalid, "manifest unusable - backup invalid");
TEST_RESULT_LOG(
"P00 DETAIL: unable to open missing file '" TEST_PATH "/repo/backup/db/20181119-152138F/backup.manifest' for read\n"
"P00 DETAIL: invalid checksum, actual 'e056f784a995841fd4e2802b809299b8db6803a2' but expected 'BOGUS' "
"<REPO:BACKUP>/20181119-152138F/backup.manifest.copy");
"P00 DETAIL: invalid checksum, actual 'e056f784a995841fd4e2802b809299b8db6803a2' but expected 'BOGUS'"
" <REPO:BACKUP>/20181119-152138F/backup.manifest.copy");
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("current backup true");
@ -355,10 +355,10 @@ testRun(void)
TEST_RESULT_PTR(manifest, NULL, "manifest not set");
TEST_RESULT_UINT(backupResult.status, backupInvalid, "manifest unusable - backup invalid");
TEST_RESULT_LOG(
"P00 DETAIL: invalid checksum, actual 'e056f784a995841fd4e2802b809299b8db6803a2' but expected 'BOGUS' "
"<REPO:BACKUP>/20181119-152138F/backup.manifest\n"
"P00 DETAIL: invalid checksum, actual 'e056f784a995841fd4e2802b809299b8db6803a2' but expected 'BOGUS' "
"<REPO:BACKUP>/20181119-152138F/backup.manifest.copy");
"P00 DETAIL: invalid checksum, actual 'e056f784a995841fd4e2802b809299b8db6803a2' but expected 'BOGUS'"
" <REPO:BACKUP>/20181119-152138F/backup.manifest\n"
"P00 DETAIL: invalid checksum, actual 'e056f784a995841fd4e2802b809299b8db6803a2' but expected 'BOGUS'"
" <REPO:BACKUP>/20181119-152138F/backup.manifest.copy");
// Write a valid manifest with a manifest copy that is invalid
HRN_INFO_PUT(
@ -476,14 +476,18 @@ testRun(void)
// Create archive.info - history mismatch
InfoArchive *archiveInfo = NULL;
TEST_ASSIGN(
archiveInfo, infoArchiveNewLoad(ioBufferReadNew(harnessInfoChecksumZ(
"[db]\n"
"db-id=2\n"
"db-system-id=" HRN_PG_SYSTEMID_11_Z "\n"
"db-version=\"11\"\n"
"\n"
"[db:history]\n"
"2={\"db-id\":" HRN_PG_SYSTEMID_11_Z ",\"db-version\":\"11\"}"))), "archive.info missing history");
archiveInfo,
infoArchiveNewLoad(
ioBufferReadNew(
harnessInfoChecksumZ(
"[db]\n"
"db-id=2\n"
"db-system-id=" HRN_PG_SYSTEMID_11_Z "\n"
"db-version=\"11\"\n"
"\n"
"[db:history]\n"
"2={\"db-id\":" HRN_PG_SYSTEMID_11_Z ",\"db-version\":\"11\"}"))),
"archive.info missing history");
TEST_ERROR(
verifyPgHistory(infoArchivePg(archiveInfo), infoBackupPg(backupInfo)), FormatError,
@ -493,15 +497,19 @@ testRun(void)
TEST_TITLE("history mismatch - system id");
TEST_ASSIGN(
archiveInfo, infoArchiveNewLoad(ioBufferReadNew(harnessInfoChecksumZ(
"[db]\n"
"db-id=2\n"
"db-system-id=" HRN_PG_SYSTEMID_11_Z "\n"
"db-version=\"11\"\n"
"\n"
"[db:history]\n"
"1={\"db-id\":6625592122879095777,\"db-version\":\"9.4\"}\n"
"2={\"db-id\":" HRN_PG_SYSTEMID_11_Z ",\"db-version\":\"11\"}"))), "archive.info history system id mismatch");
archiveInfo,
infoArchiveNewLoad(
ioBufferReadNew(
harnessInfoChecksumZ(
"[db]\n"
"db-id=2\n"
"db-system-id=" HRN_PG_SYSTEMID_11_Z "\n"
"db-version=\"11\"\n"
"\n"
"[db:history]\n"
"1={\"db-id\":6625592122879095777,\"db-version\":\"9.4\"}\n"
"2={\"db-id\":" HRN_PG_SYSTEMID_11_Z ",\"db-version\":\"11\"}"))),
"archive.info history system id mismatch");
TEST_ERROR(
verifyPgHistory(infoArchivePg(archiveInfo), infoBackupPg(backupInfo)), FormatError,
@ -511,15 +519,19 @@ testRun(void)
TEST_TITLE("history mismatch - version");
TEST_ASSIGN(
archiveInfo, infoArchiveNewLoad(ioBufferReadNew(harnessInfoChecksumZ(
"[db]\n"
"db-id=2\n"
"db-system-id=" HRN_PG_SYSTEMID_11_Z "\n"
"db-version=\"11\"\n"
"\n"
"[db:history]\n"
"1={\"db-id\":" HRN_PG_SYSTEMID_94_Z ",\"db-version\":\"9.5\"}\n"
"2={\"db-id\":" HRN_PG_SYSTEMID_11_Z ",\"db-version\":\"11\"}"))), "archive.info history version mismatch");
archiveInfo,
infoArchiveNewLoad(
ioBufferReadNew(
harnessInfoChecksumZ(
"[db]\n"
"db-id=2\n"
"db-system-id=" HRN_PG_SYSTEMID_11_Z "\n"
"db-version=\"11\"\n"
"\n"
"[db:history]\n"
"1={\"db-id\":" HRN_PG_SYSTEMID_94_Z ",\"db-version\":\"9.5\"}\n"
"2={\"db-id\":" HRN_PG_SYSTEMID_11_Z ",\"db-version\":\"11\"}"))),
"archive.info history version mismatch");
TEST_ERROR(
verifyPgHistory(infoArchivePg(archiveInfo), infoBackupPg(backupInfo)), FormatError,
@ -529,15 +541,19 @@ testRun(void)
TEST_TITLE("history mismatch - id");
TEST_ASSIGN(
archiveInfo, infoArchiveNewLoad(ioBufferReadNew(harnessInfoChecksumZ(
"[db]\n"
"db-id=2\n"
"db-system-id=" HRN_PG_SYSTEMID_11_Z "\n"
"db-version=\"11\"\n"
"\n"
"[db:history]\n"
"3={\"db-id\":" HRN_PG_SYSTEMID_94_Z ",\"db-version\":\"9.4\"}\n"
"2={\"db-id\":" HRN_PG_SYSTEMID_11_Z ",\"db-version\":\"11\"}"))), "archive.info history id mismatch");
archiveInfo,
infoArchiveNewLoad(
ioBufferReadNew(
harnessInfoChecksumZ(
"[db]\n"
"db-id=2\n"
"db-system-id=" HRN_PG_SYSTEMID_11_Z "\n"
"db-version=\"11\"\n"
"\n"
"[db:history]\n"
"3={\"db-id\":" HRN_PG_SYSTEMID_94_Z ",\"db-version\":\"9.4\"}\n"
"2={\"db-id\":" HRN_PG_SYSTEMID_11_Z ",\"db-version\":\"11\"}"))),
"archive.info history id mismatch");
TEST_ERROR(
verifyPgHistory(infoArchivePg(archiveInfo), infoBackupPg(backupInfo)), FormatError,
@ -683,8 +699,8 @@ testRun(void)
// Check output of verify command stored in file
TEST_STORAGE_GET(storageTest, strZ(stdoutFile), "", .remove = true);
TEST_RESULT_LOG(
"P00 DETAIL: invalid checksum, actual 'e056f784a995841fd4e2802b809299b8db6803a2' but expected 'BOGUS' "
"<REPO:BACKUP>/backup.info\n"
"P00 DETAIL: invalid checksum, actual 'e056f784a995841fd4e2802b809299b8db6803a2' but expected 'BOGUS'"
" <REPO:BACKUP>/backup.info\n"
"P00 DETAIL: unable to open missing file '" TEST_PATH "/repo/backup/db/backup.info.copy' for read\n"
"P00 DETAIL: unable to open missing file '" TEST_PATH "/repo/archive/db/archive.info' for read\n"
"P00 DETAIL: unable to open missing file '" TEST_PATH "/repo/archive/db/archive.info.copy' for read\n"
@ -727,10 +743,10 @@ testRun(void)
TEST_STORAGE_GET(storageTest, strZ(stdoutFile), "", .remove = true);
TEST_RESULT_LOG(
"P00 DETAIL: invalid checksum, actual 'e056f784a995841fd4e2802b809299b8db6803a2' but expected 'BOGUS'"
" <REPO:BACKUP>/backup.info\n"
" <REPO:BACKUP>/backup.info\n"
"P00 DETAIL: unable to open missing file '" TEST_PATH "/repo/archive/db/archive.info' for read\n"
"P00 DETAIL: invalid checksum, actual 'e056f784a995841fd4e2802b809299b8db6803a2' but expected 'BOGUS'"
" <REPO:ARCHIVE>/archive.info.copy\n"
" <REPO:ARCHIVE>/archive.info.copy\n"
"P00 INFO: stanza: db\n"
" status: error\n"
" No usable archive.info file");
@ -762,7 +778,7 @@ testRun(void)
TEST_RESULT_LOG(
"P00 DETAIL: backup.info.copy does not match backup.info\n"
"P00 DETAIL: invalid checksum, actual 'e056f784a995841fd4e2802b809299b8db6803a2' but expected 'BOGUS'"
" <REPO:ARCHIVE>/archive.info\n"
" <REPO:ARCHIVE>/archive.info\n"
"P00 INFO: stanza: db\n"
" status: error\n"
" backup info file and archive info file do not match\n"
@ -988,10 +1004,10 @@ testRun(void)
"P00 DETAIL: no backups exist in the repo\n"
"P00 DETAIL: archive path '9.4-1' is empty\n"
"P00 DETAIL: path '11-2/0000000100000000' does not contain any valid WAL to be processed\n"
"P01 INFO: invalid checksum "
"'11-2/0000000200000007/000000020000000700000FFD-a6e1a64f0813352bc2e97f116a1800377e17d2e4.gz'\n"
"P01 INFO: invalid size "
"'11-2/0000000200000007/000000020000000700000FFF-ee161f898c9012dd0c28b3fd1e7140b9cf411306'\n"
"P01 INFO: invalid checksum"
" '11-2/0000000200000007/000000020000000700000FFD-a6e1a64f0813352bc2e97f116a1800377e17d2e4.gz'\n"
"P01 INFO: invalid size"
" '11-2/0000000200000007/000000020000000700000FFF-ee161f898c9012dd0c28b3fd1e7140b9cf411306'\n"
"P00 DETAIL: archiveId: 11-2, wal start: 000000020000000700000FFD, wal stop: 000000020000000800000000");
harnessLogLevelReset();
@ -1013,10 +1029,10 @@ testRun(void)
" backup: none found",
"verbose, with failures");
TEST_RESULT_LOG(
"P01 INFO: invalid checksum "
"'11-2/0000000200000007/000000020000000700000FFD-a6e1a64f0813352bc2e97f116a1800377e17d2e4.gz'\n"
"P01 INFO: invalid size "
"'11-2/0000000200000007/000000020000000700000FFF-ee161f898c9012dd0c28b3fd1e7140b9cf411306'");
"P01 INFO: invalid checksum"
" '11-2/0000000200000007/000000020000000700000FFD-a6e1a64f0813352bc2e97f116a1800377e17d2e4.gz'\n"
"P01 INFO: invalid size"
" '11-2/0000000200000007/000000020000000700000FFF-ee161f898c9012dd0c28b3fd1e7140b9cf411306'");
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("text output, verbose, with verify failures");
@ -1034,10 +1050,10 @@ testRun(void)
" missing: 0, checksum invalid: 1, size invalid: 1, other: 0\n"
" backup: none found", "verify text output, verbose, with verify failures");
TEST_RESULT_LOG(
"P01 INFO: invalid checksum "
"'11-2/0000000200000007/000000020000000700000FFD-a6e1a64f0813352bc2e97f116a1800377e17d2e4.gz'\n"
"P01 INFO: invalid size "
"'11-2/0000000200000007/000000020000000700000FFF-ee161f898c9012dd0c28b3fd1e7140b9cf411306'");
"P01 INFO: invalid checksum"
" '11-2/0000000200000007/000000020000000700000FFD-a6e1a64f0813352bc2e97f116a1800377e17d2e4.gz'\n"
"P01 INFO: invalid size"
" '11-2/0000000200000007/000000020000000700000FFF-ee161f898c9012dd0c28b3fd1e7140b9cf411306'");
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("valid info files, start next timeline");
@ -1454,7 +1470,7 @@ testRun(void)
" backup: 20181119-152900F, status: invalid, total files checked: 1, total valid files: 0\n"
" checksum invalid: 1\n"
" backup: 20181119-152900F_20181119-152909D, status: invalid, total files checked: 1,"
" total valid files: 0\n"
" total valid files: 0\n"
" checksum invalid: 1");
// -------------------------------------------------------------------------------------------------------------------------
@ -1501,7 +1517,7 @@ testRun(void)
"\n"
"[target:file]\n"
"pg_data/PG_VERSION={\"checksum\":\"184473f470864e067ee3a22e64b47b0a1c356f29\",\"reference\":\"20181119-152900F\""
",\"size\":4,\"timestamp\":1565282114}\n"
",\"size\":4,\"timestamp\":1565282114}\n"
TEST_MANIFEST_FILE_DEFAULT
TEST_MANIFEST_LINK
TEST_MANIFEST_LINK_DEFAULT
@ -1582,7 +1598,7 @@ testRun(void)
" backup: 20181119-152900F, status: invalid, total files checked: 3, total valid files: 0\n"
" missing: 1, checksum invalid: 1, size invalid: 1\n"
" backup: 20181119-152900F_20181119-152909D, status: invalid, total files checked: 1,"
" total valid files: 0\n"
" total valid files: 0\n"
" checksum invalid: 1");
}
@ -1724,7 +1740,7 @@ testRun(void)
" backup: 20181119-152900F, status: invalid, total files checked: 1, total valid files: 0\n"
" missing: 0, checksum invalid: 1, size invalid: 0, other: 0\n"
" backup: 20181119-152900F_20181119-152909D, status: invalid, total files checked: 1,"
" total valid files: 0\n"
" total valid files: 0\n"
" missing: 0, checksum invalid: 1, size invalid: 0, other: 0");
}

View File

@ -189,7 +189,7 @@ testRun(void)
TEST_RESULT_LOG(
"P00 DEBUG: " TEST_PGB_PATH "/test/src/module/common/debugOnTest::testFunction1: (paramInt: 99, paramBool: false,"
" paramBoolP: null, paramBoolPP: null, paramVoidP: null, paramDouble: 1.17, paramMode: 0755)\n"
" paramBoolP: null, paramBoolPP: null, paramVoidP: null, paramDouble: 1.17, paramMode: 0755)\n"
"P00 TRACE: " TEST_PGB_PATH "/test/src/module/common/debugOnTest::testFunction2: (void)\n"
"P00 TRACE: " TEST_PGB_PATH "/test/src/module/common/debugOnTest::testFunction2: => void\n"
"P00 DEBUG: " TEST_PGB_PATH "/test/src/module/common/debugOnTest::testFunction1: => 1");
@ -204,7 +204,7 @@ testRun(void)
TEST_RESULT_LOG(
"P00 DEBUG: " TEST_PGB_PATH "/test/src/module/common/debugOnTest::testFunction1: (paramInt: 99, paramBool: false,"
" paramBoolP: *true, paramBoolPP: **true, paramVoidP: null, paramDouble: 1.17, paramMode: 0755)\n"
" paramBoolP: *true, paramBoolPP: **true, paramVoidP: null, paramDouble: 1.17, paramMode: 0755)\n"
"P00 TRACE: " TEST_PGB_PATH "/test/src/module/common/debugOnTest::testFunction2: (void)\n"
"P00 TRACE: " TEST_PGB_PATH "/test/src/module/common/debugOnTest::testFunction2: => void\n"
"P00 DEBUG: " TEST_PGB_PATH "/test/src/module/common/debugOnTest::testFunction1: => 1");
@ -216,7 +216,7 @@ testRun(void)
TEST_RESULT_LOG(
"P00 DEBUG: " TEST_PGB_PATH "/test/src/module/common/debugOnTest::testFunction1: (paramInt: 99, paramBool: false,"
" paramBoolP: null, paramBoolPP: *null, paramVoidP: *void, paramDouble: 1.17, paramMode: 0755)\n"
" paramBoolP: null, paramBoolPP: *null, paramVoidP: *void, paramDouble: 1.17, paramMode: 0755)\n"
"P00 TRACE: " TEST_PGB_PATH "/test/src/module/common/debugOnTest::testFunction2: (void)\n"
"P00 TRACE: " TEST_PGB_PATH "/test/src/module/common/debugOnTest::testFunction2: => void\n"
"P00 DEBUG: " TEST_PGB_PATH "/test/src/module/common/debugOnTest::testFunction1: => 1");

View File

@ -126,8 +126,7 @@ testRun(void)
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("store and retrieve values");
const Buffer *iniBuf = BUFSTRDEF
(
const Buffer *iniBuf = BUFSTRDEF(
"# Comment\n"
"[global] \n"
"compress=y \n"
@ -135,8 +134,7 @@ testRun(void)
"repeat=2\n"
"\n"
" [db]\n"
"pg1-path = /path/to/pg"
);
"pg1-path = /path/to/pg");
Ini *ini = NULL;
TEST_ASSIGN(ini, iniNewP(ioBufferReadNew(iniBuf), .store = true), "new ini");

View File

@ -709,7 +709,7 @@ testRun(void)
hrnServerScriptExpectZ(
http,
"GET /path/file%201.txt HTTP/1.1\r\n" TEST_USER_AGENT "content-length:30\r\n\r\n"
"012345678901234567890123456789");
"012345678901234567890123456789");
hrnServerScriptReplyZ(
http,
"HTTP/1.1 200 OK\r\nConnection:ClosE\r\ncontent-type:application/xml\r\n\r\n01234567890123456789012345678901");

View File

@ -545,7 +545,7 @@ testRun(void)
// Add test hosts
HRN_SYSTEM(
"echo \"127.0.0.1 test.pgbackrest.org host.test2.pgbackrest.org test3.pgbackrest.org\" | sudo tee -a /etc/hosts >"
" /dev/null");
" /dev/null");
HRN_FORK_BEGIN()
{
@ -835,7 +835,7 @@ testRun(void)
buffer,
zNewFmt(
"{type: tls, driver: {ioClient: {type: socket, driver: {host: %s, port: %u, timeoutConnect: 5000"
", timeoutSession: 5000}}, timeoutConnect: 0, timeoutSession: 0, verifyPeer: %s}}",
", timeoutSession: 5000}}, timeoutConnect: 0, timeoutSession: 0, verifyPeer: %s}}",
strZ(hrnServerHost()), hrnServerPort(0), cvtBoolToConstZ(TEST_IN_CONTAINER)),
"check log");
@ -844,7 +844,7 @@ testRun(void)
buffer,
zNewFmt(
"{type: tls, role: client, driver: {ioSession: {type: socket, role: client, driver: {host: %s, port: %u"
", fd: %d, timeout: 5000}}, timeout: 0, shutdownOnClose: true}}",
", fd: %d, timeout: 5000}}, timeout: 0, shutdownOnClose: true}}",
strZ(hrnServerHost()), hrnServerPort(0),
((SocketSession *)((TlsSession *)session->pub.driver)->ioSession->pub.driver)->fd),
"check log");

View File

@ -259,9 +259,9 @@ testRun(void)
TEST_ERROR(
memFree(NULL), AssertError,
"assertion 'alloc != NULL && "
"(uintptr_t)alloc != (uintptr_t)-sizeof(MemContextAlloc) && "
"alloc->allocIdx < memContextAllocMany(memContextStack[memContextCurrentStackIdx].memContext)->listSize && "
"memContextAllocMany(memContextStack[memContextCurrentStackIdx].memContext)->list[alloc->allocIdx]' failed");
"(uintptr_t)alloc != (uintptr_t)-sizeof(MemContextAlloc) &&"
" alloc->allocIdx < memContextAllocMany(memContextStack[memContextCurrentStackIdx].memContext)->listSize &&"
" memContextAllocMany(memContextStack[memContextCurrentStackIdx].memContext)->list[alloc->allocIdx]' failed");
memFree(buffer);
memContextSwitch(memContextTop());

View File

@ -99,9 +99,9 @@ testRun(void)
strNewBuf(xmlDocumentBuf(xmlDocument)),
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"
"<CompleteMultipartUpload>"
"<Part><PartNumber>1</PartNumber><ETag>E1</ETag></Part>"
"<Part><PartNumber>2</PartNumber><ETag>E2</ETag></Part>"
"</CompleteMultipartUpload>\n",
"<Part><PartNumber>1</PartNumber><ETag>E1</ETag></Part>"
"<Part><PartNumber>2</PartNumber><ETag>E2</ETag></Part>"
"</CompleteMultipartUpload>\n",
"get xml");
}

View File

@ -37,14 +37,14 @@ testRun(void)
TEST_RESULT_STRLST_Z(
cfgExecParam(cfgCmdArchiveGet, cfgCmdRoleAsync, NULL, false, true),
"--archive-async\n--buffer-size=64KiB\n--no-config\n--exec-id=1-test\n--log-subprocess\n--reset-neutral-umask\n"
"--pg1-path=\"" TEST_PATH "/db path\"\n--pg2-path=/db2\n--repo1-path=" TEST_PATH "/repo\n--stanza=test1\n"
"archive-get:async\n",
"--pg1-path=\"" TEST_PATH "/db path\"\n--pg2-path=/db2\n--repo1-path=" TEST_PATH "/repo\n--stanza=test1\n"
"archive-get:async\n",
"exec archive-get -> archive-get:async");
TEST_RESULT_STRLST_Z(
cfgExecParam(cfgCmdBackup, cfgCmdRoleMain, NULL, false, false),
"--archive-timeout=5\n--buffer-size=64KiB\n--no-config\n--exec-id=1-test\n--log-subprocess\n--reset-neutral-umask\n"
"--pg1-path=" TEST_PATH "/db path\n--pg2-path=/db2\n--repo1-path=" TEST_PATH "/repo\n--stanza=test1\nbackup\n",
"--pg1-path=" TEST_PATH "/db path\n--pg2-path=/db2\n--repo1-path=" TEST_PATH "/repo\n--stanza=test1\nbackup\n",
"exec archive-get -> backup");
// -------------------------------------------------------------------------------------------------------------------------
@ -70,7 +70,7 @@ testRun(void)
TEST_RESULT_STRLST_Z(
cfgExecParam(cfgCmdRestore, cfgCmdRoleMain, optionReplace, true, false),
"--db-include=1\n--db-include=2\n--exec-id=1-test\n--log-path=/log\n--pg1-path=" TEST_PATH "/db path\n"
"--recovery-option=a=b\n--recovery-option=c=d\n--repo1-path=/replace/path\nrestore\n",
"--recovery-option=a=b\n--recovery-option=c=d\n--repo1-path=/replace/path\nrestore\n",
"exec restore -> restore");
}

View File

@ -218,7 +218,7 @@ testRun(void)
TEST_ERROR(
hrnCfgLoadP(cfgCmdCheck, argList), OptionInvalidValueError,
"'50.5' is not valid for 'protocol-timeout' option\n"
"HINT 'protocol-timeout' option (50.5) should be greater than 'db-timeout' option (100000).");
"HINT 'protocol-timeout' option (50.5) should be greater than 'db-timeout' option (100000).");
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("very small protocol-timeout triggers db-timeout special handling");
@ -280,9 +280,9 @@ testRun(void)
HRN_CFG_LOAD(cfgCmdExpire, argList, .comment = "load config for retention warning");
TEST_RESULT_LOG(
"P00 WARN: option 'repo1-retention-full' is not set for 'repo1-retention-full-type=count', the repository may run out"
" of space\n"
" of space\n"
" HINT: to retain full backups indefinitely (without warning), set option 'repo1-retention-full' to the"
" maximum.");
" maximum.");
TEST_RESULT_BOOL(cfgOptionTest(cfgOptRepoRetentionArchive), false, "repo1-retention-archive not set");
hrnCfgArgRawZ(argList, cfgOptRepoRetentionFull, "1");
@ -296,9 +296,9 @@ testRun(void)
HRN_CFG_LOAD(cfgCmdExpire, argList, .comment = "multi-repo, load config for retention warning");
TEST_RESULT_LOG(
"P00 WARN: option 'repo2-retention-full' is not set for 'repo2-retention-full-type=count', the repository may run out"
" of space\n"
" of space\n"
" HINT: to retain full backups indefinitely (without warning), set option 'repo2-retention-full' to the"
" maximum.");
" maximum.");
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("retention-full warning, retention-archive-type incr - expire command");
@ -311,11 +311,11 @@ testRun(void)
TEST_RESULT_LOG(
"P00 WARN: option 'repo1-retention-full' is not set for 'repo1-retention-full-type=count', the repository may run out"
" of space\n"
" of space\n"
" HINT: to retain full backups indefinitely (without warning), set option 'repo1-retention-full' to the"
" maximum.\n"
" maximum.\n"
"P00 WARN: WAL segments will not be expired: option 'repo1-retention-archive-type=incr' but option"
" 'repo1-retention-archive' is not set");
" 'repo1-retention-archive' is not set");
TEST_RESULT_BOOL(cfgOptionTest(cfgOptRepoRetentionArchive), false, "repo1-retention-archive not set");
// -------------------------------------------------------------------------------------------------------------------------
@ -329,11 +329,11 @@ testRun(void)
TEST_RESULT_LOG(
"P00 WARN: option 'repo1-retention-full' is not set for 'repo1-retention-full-type=count', the repository may run out"
" of space\n"
" of space\n"
" HINT: to retain full backups indefinitely (without warning), set option 'repo1-retention-full' to the"
" maximum.\n"
" maximum.\n"
"P00 WARN: WAL segments will not be expired: option 'repo1-retention-archive-type=diff' but neither option"
" 'repo1-retention-archive' nor option 'repo1-retention-diff' is set");
" 'repo1-retention-archive' nor option 'repo1-retention-diff' is set");
TEST_RESULT_BOOL(cfgOptionTest(cfgOptRepoRetentionArchive), false, "repo1-retention-archive not set");
hrnCfgArgRawZ(argList, cfgOptRepoRetentionDiff, "2");
@ -341,9 +341,9 @@ testRun(void)
TEST_RESULT_LOG(
"P00 WARN: option 'repo1-retention-full' is not set for 'repo1-retention-full-type=count', the repository may run out"
" of space\n"
" of space\n"
" HINT: to retain full backups indefinitely (without warning), set option 'repo1-retention-full' to the"
" maximum.");
" maximum.");
TEST_RESULT_INT(cfgOptionInt(cfgOptRepoRetentionArchive), 2, "repo1-retention-archive set to retention-diff");
// -------------------------------------------------------------------------------------------------------------------------
@ -360,7 +360,7 @@ testRun(void)
TEST_RESULT_LOG(
"P00 WARN: option 'repo1-retention-diff' is not set for 'repo1-retention-archive-type=diff'\n"
" HINT: to retain differential backups indefinitely (without warning), set option 'repo1-retention-diff'"
" to the maximum.");
" to the maximum.");
// -------------------------------------------------------------------------------------------------------------------------
TEST_TITLE("no warning - expire command");
@ -445,10 +445,10 @@ testRun(void)
TEST_ERROR(
hrnCfgLoadP(cfgCmdArchiveGet, argList), OptionInvalidValueError,
"'bogus.bucket' is not valid for option 'repo111-s3-bucket'"
"\nHINT: RFC-2818 forbids dots in wildcard matches."
"\nHINT: TLS/SSL verification cannot proceed with this bucket name."
"\nHINT: remove dots from the bucket name.");
"'bogus.bucket' is not valid for option 'repo111-s3-bucket'\n"
"HINT: RFC-2818 forbids dots in wildcard matches.\n"
"HINT: TLS/SSL verification cannot proceed with this bucket name.\n"
"HINT: remove dots from the bucket name.");
hrnCfgEnvKeyRemoveRaw(cfgOptRepoS3Key, 111);
hrnCfgEnvKeyRemoveRaw(cfgOptRepoS3KeySecret, 111);

Some files were not shown because too many files have changed in this diff Show More