mirror of
https://github.com/pgbackrest/pgbackrest.git
synced 2024-12-12 10:04:14 +02:00
Return binary as result from CryptoHash filter.
The prior result was hex-encoded, which is not optimal. This was legacy from the interface with Perl and then the JSON protocol. The new binary protocol natively supports binary so it makes sense to use it and convert to hex where needed. A number of these hex conversions can now be removed but that will need to be handled in another commit.
This commit is contained in:
parent
5602f179a1
commit
46a0af3531
@ -160,8 +160,8 @@ archivePushFile(
|
||||
ioFilterGroupAdd(ioReadFilterGroup(read), cryptoHashNew(hashTypeSha1));
|
||||
ioReadDrain(read);
|
||||
|
||||
const String *walSegmentChecksum = pckReadStrP(
|
||||
ioFilterGroupResultP(ioReadFilterGroup(read), CRYPTO_HASH_FILTER_TYPE));
|
||||
const String *const walSegmentChecksum = bufHex(
|
||||
pckReadBinP(ioFilterGroupResultP(ioReadFilterGroup(read), CRYPTO_HASH_FILTER_TYPE)));
|
||||
|
||||
// Check each repo for the WAL segment
|
||||
for (unsigned int repoListIdx = 0; repoListIdx < lstSize(repoList); repoListIdx++)
|
||||
|
@ -984,7 +984,7 @@ backupFilePut(BackupData *backupData, Manifest *manifest, const String *name, ti
|
||||
};
|
||||
|
||||
memcpy(
|
||||
file.checksumSha1, strZ(pckReadStrP(ioFilterGroupResultP(filterGroup, CRYPTO_HASH_FILTER_TYPE))),
|
||||
file.checksumSha1, strZ(bufHex(pckReadBinP(ioFilterGroupResultP(filterGroup, CRYPTO_HASH_FILTER_TYPE)))),
|
||||
HASH_TYPE_SHA1_SIZE_HEX + 1);
|
||||
|
||||
manifestFileAdd(manifest, &file);
|
||||
|
@ -94,8 +94,8 @@ backupFile(
|
||||
// If the pg file exists check the checksum/size
|
||||
if (ioReadDrain(read))
|
||||
{
|
||||
const String *pgTestChecksum = pckReadStrP(
|
||||
ioFilterGroupResultP(ioReadFilterGroup(read), CRYPTO_HASH_FILTER_TYPE));
|
||||
const String *const pgTestChecksum = bufHex(
|
||||
pckReadBinP(ioFilterGroupResultP(ioReadFilterGroup(read), CRYPTO_HASH_FILTER_TYPE)));
|
||||
uint64_t pgTestSize = pckReadU64P(ioFilterGroupResultP(ioReadFilterGroup(read), SIZE_FILTER_TYPE));
|
||||
|
||||
// Does the pg file match?
|
||||
@ -160,8 +160,8 @@ backupFile(
|
||||
ioReadDrain(read);
|
||||
|
||||
// Test checksum/size
|
||||
const String *pgTestChecksum = pckReadStrP(
|
||||
ioFilterGroupResultP(ioReadFilterGroup(read), CRYPTO_HASH_FILTER_TYPE));
|
||||
const String *const pgTestChecksum = bufHex(
|
||||
pckReadBinP(ioFilterGroupResultP(ioReadFilterGroup(read), CRYPTO_HASH_FILTER_TYPE)));
|
||||
uint64_t pgTestSize = pckReadU64P(ioFilterGroupResultP(ioReadFilterGroup(read), SIZE_FILTER_TYPE));
|
||||
|
||||
// No need to recopy if checksum/size match
|
||||
@ -275,8 +275,8 @@ backupFile(
|
||||
fileResult->copySize = pckReadU64P(
|
||||
ioFilterGroupResultP(ioReadFilterGroup(storageReadIo(read)), SIZE_FILTER_TYPE, .idx = 0));
|
||||
fileResult->bundleOffset = bundleOffset;
|
||||
fileResult->copyChecksum = strDup(
|
||||
pckReadStrP(ioFilterGroupResultP(ioReadFilterGroup(storageReadIo(read)), CRYPTO_HASH_FILTER_TYPE)));
|
||||
fileResult->copyChecksum = bufHex(
|
||||
pckReadBinP(ioFilterGroupResultP(ioReadFilterGroup(storageReadIo(read)), CRYPTO_HASH_FILTER_TYPE)));
|
||||
fileResult->repoSize = pckReadU64P(
|
||||
ioFilterGroupResultP(ioReadFilterGroup(storageReadIo(read)), SIZE_FILTER_TYPE, .idx = 1));
|
||||
|
||||
|
@ -127,7 +127,8 @@ List *restoreFile(
|
||||
if (file->size == 0 ||
|
||||
strEq(
|
||||
file->checksum,
|
||||
pckReadStrP(ioFilterGroupResultP(ioReadFilterGroup(read), CRYPTO_HASH_FILTER_TYPE))))
|
||||
bufHex(
|
||||
pckReadBinP(ioFilterGroupResultP(ioReadFilterGroup(read), CRYPTO_HASH_FILTER_TYPE)))))
|
||||
{
|
||||
// If the hash/size are now the same but the time is not, then set the time back to the backup
|
||||
// time. This helps with unit testing, but also presents a pristine version of the database
|
||||
@ -273,12 +274,13 @@ List *restoreFile(
|
||||
storageReadFree(repoFileRead);
|
||||
|
||||
// Validate checksum
|
||||
if (!strEq(file->checksum, pckReadStrP(ioFilterGroupResultP(filterGroup, CRYPTO_HASH_FILTER_TYPE))))
|
||||
if (!strEq(file->checksum, bufHex(pckReadBinP(ioFilterGroupResultP(filterGroup, CRYPTO_HASH_FILTER_TYPE)))))
|
||||
{
|
||||
THROW_FMT(
|
||||
ChecksumError,
|
||||
"error restoring '%s': actual checksum '%s' does not match expected checksum '%s'", strZ(file->name),
|
||||
strZ(pckReadStrP(ioFilterGroupResultP(filterGroup, CRYPTO_HASH_FILTER_TYPE))), strZ(file->checksum));
|
||||
strZ(bufHex(pckReadBinP(ioFilterGroupResultP(filterGroup, CRYPTO_HASH_FILTER_TYPE)))),
|
||||
strZ(file->checksum));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -65,7 +65,7 @@ verifyFile(
|
||||
if (ioReadDrain(read))
|
||||
{
|
||||
// Validate checksum
|
||||
if (!strEq(fileChecksum, pckReadStrP(ioFilterGroupResultP(filterGroup, CRYPTO_HASH_FILTER_TYPE))))
|
||||
if (!strEq(fileChecksum, bufHex(pckReadBinP(ioFilterGroupResultP(filterGroup, CRYPTO_HASH_FILTER_TYPE)))))
|
||||
{
|
||||
result = verifyChecksumMismatch;
|
||||
}
|
||||
|
@ -224,11 +224,12 @@ verifyInfoFile(const String *pathFileName, bool keepFile, const String *cipherPa
|
||||
else
|
||||
ioReadDrain(infoRead);
|
||||
|
||||
PackRead *const filterResult = ioFilterGroupResultP(ioReadFilterGroup(infoRead), CRYPTO_HASH_FILTER_TYPE);
|
||||
const Buffer *const filterResult = pckReadBinP(
|
||||
ioFilterGroupResultP(ioReadFilterGroup(infoRead), CRYPTO_HASH_FILTER_TYPE));
|
||||
|
||||
MEM_CONTEXT_PRIOR_BEGIN()
|
||||
{
|
||||
result.checksum = pckReadStrP(filterResult);
|
||||
result.checksum = bufHex(filterResult);
|
||||
}
|
||||
MEM_CONTEXT_PRIOR_END();
|
||||
}
|
||||
|
@ -152,7 +152,7 @@ cryptoHashResult(THIS_VOID)
|
||||
{
|
||||
PackWrite *const packWrite = pckWriteNewP();
|
||||
|
||||
pckWriteStrP(packWrite, bufHex(cryptoHash(this)));
|
||||
pckWriteBinP(packWrite, cryptoHash(this));
|
||||
pckWriteEndP(packWrite);
|
||||
|
||||
result = pckMove(pckWriteResult(packWrite), memContextPrior());
|
||||
|
@ -287,7 +287,7 @@ infoNewLoad(IoRead *read, InfoLoadNewCallback *callbackFunction, void *callbackD
|
||||
INFO_CHECKSUM_END(data.checksumActual);
|
||||
|
||||
// Verify the checksum
|
||||
const String *checksumActual = pckReadStrP(pckReadNew(ioFilterResult(data.checksumActual)));
|
||||
const String *const checksumActual = bufHex(pckReadBinP(pckReadNew(ioFilterResult(data.checksumActual))));
|
||||
|
||||
if (data.checksumExpected == NULL)
|
||||
THROW_FMT(ChecksumError, "invalid checksum, actual '%s' but no checksum found", strZ(checksumActual));
|
||||
@ -426,7 +426,7 @@ infoSave(Info *this, IoWrite *write, InfoSaveCallback *callbackFunction, void *c
|
||||
INFO_CHECKSUM_END(data.checksum);
|
||||
|
||||
ioWrite(data.write, BUFSTRDEF("\n[" INFO_SECTION_BACKREST "]\n" INFO_KEY_CHECKSUM "="));
|
||||
ioWriteLine(data.write, BUFSTR(jsonFromVar(VARSTR(pckReadStrP(pckReadNew(ioFilterResult(data.checksum)))))));
|
||||
ioWriteLine(data.write, BUFSTR(jsonFromVar(VARSTR(bufHex(pckReadBinP(pckReadNew(ioFilterResult(data.checksum))))))));
|
||||
|
||||
// Close the file
|
||||
ioWriteClose(data.write);
|
||||
|
@ -89,14 +89,14 @@ storageWriteGcsVerify(StorageWriteGcs *const this, HttpResponse *const response)
|
||||
const String *const md5base64 = varStr(kvGet(content, GCS_JSON_MD5_HASH_VAR));
|
||||
CHECK(FormatError, md5base64 != NULL, "MD5 missing");
|
||||
|
||||
const String *const md5actual = bufHex(bufNewDecode(encodeBase64, md5base64));
|
||||
const String *const md5expected = pckReadStrP(pckReadNew(ioFilterResult(this->md5hash)));
|
||||
const Buffer *const md5actual = bufNewDecode(encodeBase64, md5base64);
|
||||
const Buffer *const md5expected = pckReadBinP(pckReadNew(ioFilterResult(this->md5hash)));
|
||||
|
||||
if (!strEq(md5actual, md5expected))
|
||||
if (!bufEq(md5actual, md5expected))
|
||||
{
|
||||
THROW_FMT(
|
||||
FormatError, "expected md5 '%s' for '%s' but actual is '%s'", strZ(md5expected), strZ(this->interface.name),
|
||||
strZ(md5actual));
|
||||
FormatError, "expected md5 '%s' for '%s' but actual is '%s'", strZ(bufHex(md5expected)), strZ(this->interface.name),
|
||||
strZ(bufHex(md5actual)));
|
||||
}
|
||||
|
||||
// Check the size when available
|
||||
|
@ -96,7 +96,7 @@ harnessInfoChecksum(const String *info)
|
||||
|
||||
// Append checksum to buffer
|
||||
bufCat(result, BUFSTRDEF("\n[backrest]\nbackrest-checksum="));
|
||||
bufCat(result, BUFSTR(jsonFromVar(VARSTR(pckReadStrP(pckReadNew(ioFilterResult(data.checksum)))))));
|
||||
bufCat(result, BUFSTR(jsonFromVar(VARSTR(bufHex(pckReadBinP(pckReadNew(ioFilterResult(data.checksum))))))));
|
||||
bufCat(result, BUFSTRDEF("\n"));
|
||||
|
||||
bufMove(result, memContextPrior());
|
||||
|
@ -120,8 +120,8 @@ testBackupValidateList(
|
||||
ioFilterGroupAdd(ioReadFilterGroup(storageReadIo(read)), cryptoHashNew(hashTypeSha1));
|
||||
|
||||
uint64_t size = bufUsed(storageGetP(read));
|
||||
const String *checksum = pckReadStrP(
|
||||
ioFilterGroupResultP(ioReadFilterGroup(storageReadIo(read)), CRYPTO_HASH_FILTER_TYPE));
|
||||
const String *checksum = bufHex(
|
||||
pckReadBinP(ioFilterGroupResultP(ioReadFilterGroup(storageReadIo(read)), CRYPTO_HASH_FILTER_TYPE)));
|
||||
|
||||
strCatFmt(result, ", s=%" PRIu64, size);
|
||||
|
||||
|
@ -326,14 +326,15 @@ testRun(void)
|
||||
TEST_RESULT_VOID(ioFilterProcessIn(hash, BUFSTRDEF("5")), " add 5");
|
||||
|
||||
TEST_RESULT_STR_Z(
|
||||
pckReadStrP(pckReadNew(ioFilterResult(hash))), "8cb2237d0679ca88db6464eac60da96345513964", " check small hash");
|
||||
bufHex(pckReadBinP(pckReadNew(ioFilterResult(hash)))), "8cb2237d0679ca88db6464eac60da96345513964",
|
||||
" check small hash");
|
||||
TEST_RESULT_VOID(ioFilterFree(hash), " free hash");
|
||||
|
||||
// -------------------------------------------------------------------------------------------------------------------------
|
||||
TEST_TITLE("md5 hash - zero bytes");
|
||||
|
||||
TEST_ASSIGN(hash, cryptoHashNew(hashTypeMd5), "create md5 hash");
|
||||
TEST_RESULT_STR_Z(pckReadStrP(pckReadNew(ioFilterResult(hash))), HASH_TYPE_MD5_ZERO, "check empty hash");
|
||||
TEST_RESULT_STR_Z(bufHex(pckReadBinP(pckReadNew(ioFilterResult(hash)))), HASH_TYPE_MD5_ZERO, "check empty hash");
|
||||
|
||||
// Exercise most of the conditions in the local MD5 code
|
||||
// -------------------------------------------------------------------------------------------------------------------------
|
||||
@ -352,7 +353,7 @@ testRun(void)
|
||||
TEST_RESULT_VOID(
|
||||
ioFilterProcessIn(hash, BUFSTRZ("12345678901234567890123456789001234567890012345678901234")), "add 58 bytes");
|
||||
|
||||
TEST_RESULT_STR_Z(pckReadStrP(pckReadNew(ioFilterResult(hash))), "3318600bc9c1d379e91e4bae90721243", "check hash");
|
||||
TEST_RESULT_STR_Z(bufHex(pckReadBinP(pckReadNew(ioFilterResult(hash)))), "3318600bc9c1d379e91e4bae90721243", "check hash");
|
||||
|
||||
// Full coverage of local MD5 requires processing > 511MB of data but that makes the test run too long. Instead we'll cheat
|
||||
// a bit and initialize the context at 511MB to start. This does not produce a valid MD5 hash but does provide coverage of
|
||||
@ -364,11 +365,11 @@ testRun(void)
|
||||
((CryptoHash *)ioFilterDriver(hash))->md5Context->lo = 0x1fffffff;
|
||||
|
||||
TEST_RESULT_VOID(ioFilterProcessIn(hash, BUFSTRZ("1")), "add 1");
|
||||
TEST_RESULT_STR_Z(pckReadStrP(pckReadNew(ioFilterResult(hash))), "5c99876f9cafa7f485eac9c7a8a2764c", "check hash");
|
||||
TEST_RESULT_STR_Z(bufHex(pckReadBinP(pckReadNew(ioFilterResult(hash)))), "5c99876f9cafa7f485eac9c7a8a2764c", "check hash");
|
||||
|
||||
// -------------------------------------------------------------------------------------------------------------------------
|
||||
TEST_ASSIGN(hash, cryptoHashNew(hashTypeSha256), "create sha256 hash");
|
||||
TEST_RESULT_STR_Z(pckReadStrP(pckReadNew(ioFilterResult(hash))), HASH_TYPE_SHA256_ZERO, " check empty hash");
|
||||
TEST_RESULT_STR_Z(bufHex(pckReadBinP(pckReadNew(ioFilterResult(hash)))), HASH_TYPE_SHA256_ZERO, " check empty hash");
|
||||
|
||||
// -------------------------------------------------------------------------------------------------------------------------
|
||||
TEST_RESULT_STR_Z(
|
||||
|
@ -312,7 +312,7 @@ testRun(void)
|
||||
|
||||
TEST_RESULT_STR_Z(
|
||||
hrnPackToStr(ioFilterGroupResultAll(filterGroup)),
|
||||
"1:strid:size, 2:pack:<1:u64:8>, 3:strid:hash, 4:pack:<1:str:bbbcf2c59433f68f22376cd2439d6cd309378df6>,"
|
||||
"1:strid:size, 2:pack:<1:u64:8>, 3:strid:hash, 4:pack:<1:bin:bbbcf2c59433f68f22376cd2439d6cd309378df6>,"
|
||||
" 5:strid:pg-chksum, 6:pack:<2:bool:false, 3:bool:false>, 7:strid:cipher-blk, 9:strid:cipher-blk, 11:strid:gz-cmp,"
|
||||
" 13:strid:gz-dcmp, 15:strid:buffer",
|
||||
"filter results");
|
||||
@ -335,7 +335,7 @@ testRun(void)
|
||||
|
||||
TEST_RESULT_STR_Z(
|
||||
hrnPackToStr(ioFilterGroupResultAll(filterGroup)),
|
||||
"1:strid:size, 2:pack:<1:u64:8>, 3:strid:hash, 4:pack:<1:str:bbbcf2c59433f68f22376cd2439d6cd309378df6>, 5:strid:sink,"
|
||||
"1:strid:size, 2:pack:<1:u64:8>, 3:strid:hash, 4:pack:<1:bin:bbbcf2c59433f68f22376cd2439d6cd309378df6>, 5:strid:sink,"
|
||||
" 7:strid:buffer",
|
||||
"filter results");
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user