1
0
mirror of https://github.com/pgbackrest/pgbackrest.git synced 2025-07-03 00:26:59 +02:00

Allow suppression of times in testing for reproducibility.

Timestamps, timings, etc. cause a lot of log churn when included in documentation.
This commit is contained in:
David Steele
2020-03-22 10:12:29 -04:00
parent a180251011
commit 3ec630f5b2
6 changed files with 64 additions and 26 deletions

View File

@ -66,6 +66,7 @@ sub new
$self->{strLogLevel},
$self->{strLogLevelTest},
$self->{strLogLevelTestFile},
$self->{bLogTimestamp},
$self->{bLogForce},
$self->{bShowOutputAsync},
$self->{bNoCleanup},
@ -99,6 +100,7 @@ sub new
{name => 'strLogLevel'},
{name => 'strLogLevelTest'},
{name => 'strLogLevelTestFile'},
{name => 'bLogTimestamp'},
{name => 'bLogForce'},
{name => 'bShowOutputAsync'},
{name => 'bNoCleanup'},
@ -278,6 +280,7 @@ sub run
(defined($self->{oTest}->{&TEST_DB}) ? ' --pg-version=' . $self->{oTest}->{&TEST_DB} : '') .
($self->{strLogLevel} ne lc(INFO) ? " --log-level=$self->{strLogLevel}" : '') .
($self->{strLogLevelTestFile} ne lc(TRACE) ? " --log-level-test-file=$self->{strLogLevelTestFile}" : '') .
($self->{bLogTimestamp} ? '' : ' --no-log-timestamp') .
' --pgsql-bin=' . $self->{oTest}->{&TEST_PGSQL_BIN} .
($self->{strTimeZone} ? " --tz='$self->{strTimeZone}'" : '') .
($self->{bLogForce} ? ' --log-force' : '') .
@ -395,6 +398,9 @@ sub run
$strTestC =~ s/\{\[C\_TEST\_REPO_PATH\]\}/$self->{strBackRestBase}/g;
$strTestC =~ s/\{\[C\_TEST\_SCALE\]\}/$self->{iScale}/g;
my $strLogTimestampC = $self->{bLogTimestamp} ? 'true' : 'false';
$strTestC =~ s/\{\[C\_TEST\_TIMING\]\}/$strLogTimestampC/g;
# Set timezone
if (defined($self->{strTimeZone}))
{
@ -624,7 +630,7 @@ sub end
# Output error
if ($iExitStatus != 0)
{
&log(ERROR, "${strTestDone} (err${iExitStatus}-${fTestElapsedTime}s)" .
&log(ERROR, "${strTestDone} (err${iExitStatus}" . ($self->{bLogTimestamp} ? "-${fTestElapsedTime}s)" : '') .
(defined($oExecDone->{strOutLog}) && !$self->{bShowOutputAsync} ?
":\n\n" . trim($oExecDone->{strOutLog}) . "\n" : ''), undef, undef, 4);
$bFail = true;
@ -632,7 +638,7 @@ sub end
# Output success
else
{
&log(INFO, "${strTestDone} (${fTestElapsedTime}s)".
&log(INFO, "${strTestDone}" . ($self->{bLogTimestamp} ? "(${fTestElapsedTime}s)" : '').
($self->{bVmOut} && !$self->{bShowOutputAsync} ?
":\n\n" . trim($oExecDone->{strOutLog}) . "\n" : ''), undef, undef, 4);
}

View File

@ -33,6 +33,7 @@ static const char *testExeData = NULL;
static const char *testProjectExeData = NULL;
static bool testContainerData = false;
static unsigned int testIdxData = 0;
static bool testTiming = true;
static uint64_t testScaleData = 1;
static const char *testPathData = NULL;
static const char *testDataPathData = NULL;
@ -56,7 +57,7 @@ Initialize harness
***********************************************************************************************************************************/
void
hrnInit(
const char *testExe, const char *testProjectExe, bool testContainer, unsigned int testIdx, uint64_t testScale,
const char *testExe, const char *testProjectExe, bool testContainer, unsigned int testIdx, bool timing, uint64_t testScale,
const char *testPath, const char *testDataPath, const char *testRepoPath)
{
FUNCTION_HARNESS_VOID();
@ -66,6 +67,7 @@ hrnInit(
testProjectExeData = testProjectExe;
testContainerData = testContainer;
testIdxData = testIdx;
testTiming = timing;
testScaleData = testScale;
testPathData = testPath;
testDataPathData = testDataPath;
@ -383,6 +385,32 @@ hrnDiff(const char *expected, const char *actual)
FUNCTION_HARNESS_RESULT(STRINGZ, harnessDiffBuffer);
}
/**********************************************************************************************************************************/
void
hrnTestLogPrefix(int lineNo, bool padding)
{
FUNCTION_HARNESS_BEGIN();
FUNCTION_HARNESS_PARAM(INT, lineNo);
FUNCTION_HARNESS_PARAM(BOOL, padding);
FUNCTION_HARNESS_END();
// Always indent at the beginning
printf(" ");
// Add timing if requested
if (testTiming)
{
printf(
"%03u.%03us ", (unsigned int)((testTimeMSec() - testTimeMSecBegin()) / 1000),
(unsigned int)((testTimeMSec() - testTimeMSecBegin()) % 1000));
}
// Add number and padding
printf("l%04d - %s", lineNo, padding ? " " : "");
FUNCTION_HARNESS_RESULT_VOID();
}
/***********************************************************************************************************************************
Getters
***********************************************************************************************************************************/

View File

@ -6,6 +6,8 @@ C Test Harness
#include <inttypes.h>
#include "common/harnessTest.intern.h"
#include "common/debug.h"
#include "common/error.h"
@ -95,10 +97,8 @@ Test that an expected error is actually thrown and error when it isn't
{ \
bool TEST_ERROR_catch = false; \
\
printf( \
" %03u.%03us l%04d - expect %s: %s\n", (unsigned int)((testTimeMSec() - testTimeMSecBegin()) / 1000), \
(unsigned int)((testTimeMSec() - testTimeMSecBegin()) % 1000), __LINE__, errorTypeName(&errorTypeExpected), \
errorMessageExpected); \
hrnTestLogPrefix(__LINE__, true); \
printf("expect %s: %s\n", errorTypeName(&errorTypeExpected), errorMessageExpected); \
fflush(stdout); \
\
TRY_BEGIN() \
@ -179,9 +179,7 @@ Compare types
Output information about the test
***********************************************************************************************************************************/
#define TEST_RESULT_INFO(...) \
printf( \
" %03u.%03us l%04d - ", (unsigned int)((testTimeMSec() - testTimeMSecBegin()) / 1000), \
(unsigned int)((testTimeMSec() - testTimeMSecBegin()) % 1000), __LINE__); \
hrnTestLogPrefix(__LINE__, true); \
printf(__VA_ARGS__); \
printf("\n"); \
fflush(stdout);
@ -423,18 +421,16 @@ Logging macros
#define TEST_LOG(message) \
do \
{ \
printf( \
" %03u.%03us l%04d - %s\n", (unsigned int)((testTimeMSec() - testTimeMSecBegin()) / 1000), \
(unsigned int)((testTimeMSec() - testTimeMSecBegin()) % 1000), __LINE__, message); \
hrnTestLogPrefix(__LINE__, true); \
printf("%s\n", message); \
fflush(stdout); \
} while(0)
#define TEST_LOG_FMT(format, ...) \
do \
{ \
printf( \
" %03u.%03us l%04d - " format "\n", (unsigned int)((testTimeMSec() - testTimeMSecBegin()) / 1000), \
(unsigned int)((testTimeMSec() - testTimeMSecBegin()) % 1000), __LINE__, __VA_ARGS__); \
hrnTestLogPrefix(__LINE__, true); \
printf(format "\n", __VA_ARGS__); \
fflush(stdout); \
} while(0)
@ -444,9 +440,8 @@ Test title macro
#define TEST_TITLE(message) \
do \
{ \
printf( \
" %03u.%03us l%04d - %s\n", (unsigned int)((testTimeMSec() - testTimeMSecBegin()) / 1000), \
(unsigned int)((testTimeMSec() - testTimeMSecBegin()) % 1000), __LINE__, message); \
hrnTestLogPrefix(__LINE__, false); \
printf("%s\n", message); \
fflush(stdout); \
} while(0)

View File

@ -4,15 +4,20 @@ C Test Harness Internal
#ifndef TEST_COMMON_HARNESS_INTERN_H
#define TEST_COMMON_HARNESS_INTERN_H
#include <stdbool.h>
#include "common/harnessTest.h"
/***********************************************************************************************************************************
Functions
***********************************************************************************************************************************/
void hrnInit(
const char *testExe, const char *testProjectExe, bool testContainer, unsigned int testIdx, uint64_t testScale,
const char *testExe, const char *testProjectExe, bool testContainer, unsigned int testIdx, bool timing, uint64_t testScale,
const char *testPath, const char *testDataPath, const char *testRepoPath);
void hrnAdd(int run, bool selected);
void hrnComplete(void);
// Output test log prefix with timing, line number, and optional padding
void hrnTestLogPrefix(int lineNo, bool padding);
#endif

View File

@ -85,6 +85,7 @@ main(int argListSize, const char *argList[])
"{[C_TEST_PROJECT_EXE]}", // Project exe
{[C_TEST_CONTAINER]}, // Is this test running in a container?
{[C_TEST_IDX]}, // The 0-based index of this test
{[C_TEST_TIMING]}, // Is timing enabled (may be disabled for reproducible documentation)
{[C_TEST_SCALE]}, // Scaling factor for performance tests
"{[C_TEST_PATH]}", // Path where tests write data
"{[C_TEST_DATA_PATH]}", // Path where the harness stores temp files (expect, diff, etc.)

View File

@ -108,6 +108,7 @@ test.pl [options]
--log-level log level to use for test harness (and Perl tests) (defaults to INFO)
--log-level-test log level to use for C tests (defaults to OFF)
--log-level-test-file log level to use for file logging in integration tests (defaults to TRACE)
--no-log-timestamp suppress timestamps, timings, etc. Used to generate documentation.
--quiet, -q equivalent to --log-level=off
VM Options:
@ -128,6 +129,7 @@ test.pl [options]
my $strLogLevel = lc(INFO);
my $strLogLevelTest = lc(OFF);
my $strLogLevelTestFile = lc(TRACE);
my $bNoLogTimestamp = false;
my $bVmOut = false;
my @stryModule;
my @stryModuleTest;
@ -182,6 +184,7 @@ GetOptions ('q|quiet' => \$bQuiet,
'log-level=s' => \$strLogLevel,
'log-level-test=s' => \$strLogLevelTest,
'log-level-test-file=s' => \$strLogLevelTestFile,
'no-log-timestamp' => \$bNoLogTimestamp,
'vm=s' => \$strVm,
'vm-host=s' => \$strVmHost,
'vm-out' => \$bVmOut,
@ -312,7 +315,7 @@ eval
$strLogLevel = 'off';
}
logLevelSet(uc($strLogLevel), uc($strLogLevel), OFF);
logLevelSet(uc($strLogLevel), uc($strLogLevel), OFF, !$bNoLogTimestamp);
&log(INFO, "test begin - log level ${strLogLevel}");
if (@stryModuleTest != 0 && @stryModule != 1)
@ -1095,10 +1098,10 @@ eval
{
my $oJob = new pgBackRestTest::Common::JobTest(
$oStorageTest, $strBackRestBase, $strTestPath, $$oyTestRun[$iTestIdx], $bDryRun, $strVmHost, $bVmOut,
$iVmIdx, $iVmMax, $iTestIdx, $iTestMax, $strLogLevel, $strLogLevelTest, $strLogLevelTestFile, $bLogForce,
$bShowOutputAsync, $bNoCleanup, $iRetry, !$bNoValgrind, !$bNoCoverage, $bCoverageSummary, !$bNoOptimize,
$bBackTrace, $bProfile, $iScale, $strTimeZone, !$bNoDebug, $bDebugTestTrace,
$iBuildMax / $iVmMax < 1 ? 1 : int($iBuildMax / $iVmMax));
$iVmIdx, $iVmMax, $iTestIdx, $iTestMax, $strLogLevel, $strLogLevelTest, $strLogLevelTestFile,
!$bNoLogTimestamp, $bLogForce, $bShowOutputAsync, $bNoCleanup, $iRetry, !$bNoValgrind, !$bNoCoverage,
$bCoverageSummary, !$bNoOptimize, $bBackTrace, $bProfile, $iScale, $strTimeZone, !$bNoDebug,
$bDebugTestTrace, $iBuildMax / $iVmMax < 1 ? 1 : int($iBuildMax / $iVmMax));
$iTestIdx++;
if ($oJob->run())
@ -1129,7 +1132,7 @@ eval
($bDryRun ? 'DRY RUN COMPLETED' : 'TESTS COMPLETED') . ($iTestFail == 0 ? ' SUCCESSFULLY' .
($iUncoveredCodeModuleTotal == 0 ? '' : " WITH ${iUncoveredCodeModuleTotal} MODULE(S) MISSING COVERAGE") :
" WITH ${iTestFail} FAILURE(S)") . ($iTestRetry == 0 ? '' : ", ${iTestRetry} RETRY(IES)") .
' (' . (time() - $lStartTime) . 's)');
($bNoLogTimestamp ? '' : ' (' . (time() - $lStartTime) . 's)'));
exit 1 if ($iTestFail > 0 || ($iUncoveredCodeModuleTotal > 0 && !$bCoverageSummary));