1
0
mirror of https://github.com/postgres/postgres.git synced 2025-10-21 02:52:47 +03:00

Add more TAP test coverage for pg_dump.

Add a test case to cover pg_dump with --compress=none.  This
brings the coverage of compress_none.c up from about 64% to 90%,
in particular covering the new code added in a previous patch.

Include compression of toc.dat in manually-compressed test cases.
We would have found the bug fixed in commit a239c4a0c much sooner
if we'd done this.  As far as I can tell, this doesn't reduce test
coverage at all, since there are other tests of directory format
that still use an uncompressed toc.dat.

Widen the wide row used to verify correct (de) compression.
Commit 1a05c1d25 advises us (not without reason) to ensure that
this test case fully fills DEFAULT_IO_BUFFER_SIZE, so that loops
within the compression logic will iterate completely.  To follow
that advice with the proposed DEFAULT_IO_BUFFER_SIZE of 128K,
we need something close to this.  This does indeed increase the
reported code coverage by a few lines.

While here, fix a glitch that I noticed in testing: the
$glob_patterns tests were incapable of failing, because glob()
will return 'foo' as 'foo' whether there is a matching file or
not.  (Indeed, the stanza just above that one relies on that.)

In my testing, this patch adds approximately as much runtime
as was saved by the previous patch, so that it's about a wash
compared to the old code.  However, we get better test coverage.

Author: Tom Lane <tgl@sss.pgh.pa.us>
Discussion: https://postgr.es/m/3515357.1760128017@sss.pgh.pa.us
This commit is contained in:
Tom Lane
2025-10-16 12:52:10 -04:00
parent 9dcf7f1172
commit 20ec995892
2 changed files with 52 additions and 22 deletions

View File

@@ -5104,8 +5104,12 @@ foreach my $run (sort keys %pgdump_runs)
foreach my $glob_pattern (@{$glob_patterns}) foreach my $glob_pattern (@{$glob_patterns})
{ {
my @glob_output = glob($glob_pattern); my @glob_output = glob($glob_pattern);
is(scalar(@glob_output) > 0, my $ok = 0;
1, "$run: glob check for $glob_pattern"); # certainly found some files if glob() returned multiple matches
$ok = 1 if (scalar(@glob_output) > 1);
# if just one match, we need to check if it's real
$ok = 1 if (scalar(@glob_output) == 1 && -f $glob_output[0]);
is($ok, 1, "$run: glob check for $glob_pattern");
} }
} }

View File

@@ -39,6 +39,24 @@ my $supports_lz4 = check_pg_config("#define USE_LZ4 1");
my $supports_zstd = check_pg_config("#define USE_ZSTD 1"); my $supports_zstd = check_pg_config("#define USE_ZSTD 1");
my %pgdump_runs = ( my %pgdump_runs = (
compression_none_custom => {
test_key => 'compression',
dump_cmd => [
'pg_dump', '--no-sync',
'--format' => 'custom',
'--compress' => 'none',
'--file' => "$tempdir/compression_none_custom.dump",
'--statistics',
'postgres',
],
restore_cmd => [
'pg_restore',
'--file' => "$tempdir/compression_none_custom.sql",
'--statistics',
"$tempdir/compression_none_custom.dump",
],
},
compression_gzip_custom => { compression_gzip_custom => {
test_key => 'compression', test_key => 'compression',
compile_option => 'gzip', compile_option => 'gzip',
@@ -78,15 +96,18 @@ my %pgdump_runs = (
'--statistics', '--statistics',
'postgres', 'postgres',
], ],
# Give coverage for manually compressed blobs.toc files during # Give coverage for manually-compressed TOC files during restore.
# restore.
compress_cmd => { compress_cmd => {
program => $ENV{'GZIP_PROGRAM'}, program => $ENV{'GZIP_PROGRAM'},
args => [ '-f', "$tempdir/compression_gzip_dir/blobs_*.toc", ], args => [
}, '-f',
# Verify that only data files were compressed
glob_patterns => [
"$tempdir/compression_gzip_dir/toc.dat", "$tempdir/compression_gzip_dir/toc.dat",
"$tempdir/compression_gzip_dir/blobs_*.toc",
],
},
# Verify that TOC and data files were compressed
glob_patterns => [
"$tempdir/compression_gzip_dir/toc.dat.gz",
"$tempdir/compression_gzip_dir/*.dat.gz", "$tempdir/compression_gzip_dir/*.dat.gz",
], ],
restore_cmd => [ restore_cmd => [
@@ -155,18 +176,18 @@ my %pgdump_runs = (
'--statistics', '--statistics',
'postgres', 'postgres',
], ],
# Give coverage for manually compressed blobs.toc files during # Give coverage for manually-compressed TOC files during restore.
# restore.
compress_cmd => { compress_cmd => {
program => $ENV{'LZ4'}, program => $ENV{'LZ4'},
args => [ args => [
'-z', '-f', '-m', '--rm', '-z', '-f', '-m', '--rm',
"$tempdir/compression_lz4_dir/toc.dat",
"$tempdir/compression_lz4_dir/blobs_*.toc", "$tempdir/compression_lz4_dir/blobs_*.toc",
], ],
}, },
# Verify that data files were compressed # Verify that TOC and data files were compressed
glob_patterns => [ glob_patterns => [
"$tempdir/compression_lz4_dir/toc.dat", "$tempdir/compression_lz4_dir/toc.dat.lz4",
"$tempdir/compression_lz4_dir/*.dat.lz4", "$tempdir/compression_lz4_dir/*.dat.lz4",
], ],
restore_cmd => [ restore_cmd => [
@@ -239,18 +260,18 @@ my %pgdump_runs = (
'--statistics', '--statistics',
'postgres', 'postgres',
], ],
# Give coverage for manually compressed blobs.toc files during # Give coverage for manually-compressed TOC files during restore.
# restore.
compress_cmd => { compress_cmd => {
program => $ENV{'ZSTD'}, program => $ENV{'ZSTD'},
args => [ args => [
'-z', '-f', '-z', '-f', '--rm',
'--rm', "$tempdir/compression_zstd_dir/blobs_*.toc", "$tempdir/compression_zstd_dir/toc.dat",
"$tempdir/compression_zstd_dir/blobs_*.toc",
], ],
}, },
# Verify that data files were compressed # Verify that TOC and data files were compressed
glob_patterns => [ glob_patterns => [
"$tempdir/compression_zstd_dir/toc.dat", "$tempdir/compression_zstd_dir/toc.dat.zst",
"$tempdir/compression_zstd_dir/*.dat.zst", "$tempdir/compression_zstd_dir/*.dat.zst",
], ],
restore_cmd => [ restore_cmd => [
@@ -333,14 +354,15 @@ my %tests = (
}, },
# Insert enough data to surpass DEFAULT_IO_BUFFER_SIZE during # Insert enough data to surpass DEFAULT_IO_BUFFER_SIZE during
# (de)compression operations # (de)compression operations. The weird regex is because Perl
# restricts us to repeat counts of less than 32K.
'COPY test_compression_method' => { 'COPY test_compression_method' => {
create_order => 111, create_order => 111,
create_sql => 'INSERT INTO test_compression_method (col1) ' create_sql => 'INSERT INTO test_compression_method (col1) '
. 'SELECT string_agg(a::text, \'\') FROM generate_series(1,4096) a;', . 'SELECT string_agg(a::text, \'\') FROM generate_series(1,65536) a;',
regexp => qr/^ regexp => qr/^
\QCOPY public.test_compression_method (col1) FROM stdin;\E \QCOPY public.test_compression_method (col1) FROM stdin;\E
\n(?:\d{15277}\n){1}\\\.\n \n(?:(?:\d\d\d\d\d\d\d\d\d\d){31657}\d\d\d\d\n){1}\\\.\n
/xm, /xm,
like => { %full_runs, }, like => { %full_runs, },
}, },
@@ -502,8 +524,12 @@ foreach my $run (sort keys %pgdump_runs)
foreach my $glob_pattern (@{$glob_patterns}) foreach my $glob_pattern (@{$glob_patterns})
{ {
my @glob_output = glob($glob_pattern); my @glob_output = glob($glob_pattern);
is(scalar(@glob_output) > 0, my $ok = 0;
1, "$run: glob check for $glob_pattern"); # certainly found some files if glob() returned multiple matches
$ok = 1 if (scalar(@glob_output) > 1);
# if just one match, we need to check if it's real
$ok = 1 if (scalar(@glob_output) == 1 && -f $glob_output[0]);
is($ok, 1, "$run: glob check for $glob_pattern");
} }
} }