summaryrefslogtreecommitdiffstats
path: root/src/bin/pg_dump/t
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-04 12:17:33 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-04 12:17:33 +0000
commit5e45211a64149b3c659b90ff2de6fa982a5a93ed (patch)
tree739caf8c461053357daa9f162bef34516c7bf452 /src/bin/pg_dump/t
parentInitial commit. (diff)
downloadpostgresql-15-5e45211a64149b3c659b90ff2de6fa982a5a93ed.tar.xz
postgresql-15-5e45211a64149b3c659b90ff2de6fa982a5a93ed.zip
Adding upstream version 15.5.upstream/15.5
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'src/bin/pg_dump/t')
-rw-r--r--src/bin/pg_dump/t/001_basic.pl207
-rw-r--r--src/bin/pg_dump/t/002_pg_dump.pl4280
-rw-r--r--src/bin/pg_dump/t/003_pg_dump_with_server.pl40
-rw-r--r--src/bin/pg_dump/t/004_pg_dump_parallel.pl81
-rw-r--r--src/bin/pg_dump/t/010_dump_connstr.pl233
5 files changed, 4841 insertions, 0 deletions
diff --git a/src/bin/pg_dump/t/001_basic.pl b/src/bin/pg_dump/t/001_basic.pl
new file mode 100644
index 0000000..a583c8a
--- /dev/null
+++ b/src/bin/pg_dump/t/001_basic.pl
@@ -0,0 +1,207 @@
+
+# Copyright (c) 2021-2022, PostgreSQL Global Development Group
+
+use strict;
+use warnings;
+
+use PostgreSQL::Test::Cluster;
+use PostgreSQL::Test::Utils;
+use Test::More;
+
+my $tempdir = PostgreSQL::Test::Utils::tempdir;
+
+#########################################
+# Basic checks
+
+program_help_ok('pg_dump');
+program_version_ok('pg_dump');
+program_options_handling_ok('pg_dump');
+
+program_help_ok('pg_restore');
+program_version_ok('pg_restore');
+program_options_handling_ok('pg_restore');
+
+program_help_ok('pg_dumpall');
+program_version_ok('pg_dumpall');
+program_options_handling_ok('pg_dumpall');
+
+#########################################
+# Test various invalid options and disallowed combinations
+# Doesn't require a PG instance to be set up, so do this first.
+
+command_fails_like(
+ [ 'pg_dump', 'qqq', 'abc' ],
+ qr/\Qpg_dump: error: too many command-line arguments (first is "abc")\E/,
+ 'pg_dump: too many command-line arguments');
+
+command_fails_like(
+ [ 'pg_restore', 'qqq', 'abc' ],
+ qr/\Qpg_restore: error: too many command-line arguments (first is "abc")\E/,
+ 'pg_restore: too many command-line arguments');
+
+command_fails_like(
+ [ 'pg_dumpall', 'qqq', 'abc' ],
+ qr/\Qpg_dumpall: error: too many command-line arguments (first is "qqq")\E/,
+ 'pg_dumpall: too many command-line arguments');
+
+command_fails_like(
+ [ 'pg_dump', '-s', '-a' ],
+ qr/\Qpg_dump: error: options -s\/--schema-only and -a\/--data-only cannot be used together\E/,
+ 'pg_dump: options -s/--schema-only and -a/--data-only cannot be used together'
+);
+
+command_fails_like(
+ [ 'pg_dump', '-s', '--include-foreign-data=xxx' ],
+ qr/\Qpg_dump: error: options -s\/--schema-only and --include-foreign-data cannot be used together\E/,
+ 'pg_dump: options -s/--schema-only and --include-foreign-data cannot be used together'
+);
+
+command_fails_like(
+ [ 'pg_dump', '-j2', '--include-foreign-data=xxx' ],
+ qr/\Qpg_dump: error: option --include-foreign-data is not supported with parallel backup\E/,
+ 'pg_dump: option --include-foreign-data is not supported with parallel backup'
+);
+
+command_fails_like(
+ ['pg_restore'],
+ qr{\Qpg_restore: error: one of -d/--dbname and -f/--file must be specified\E},
+ 'pg_restore: error: one of -d/--dbname and -f/--file must be specified');
+
+command_fails_like(
+ [ 'pg_restore', '-s', '-a', '-f -' ],
+ qr/\Qpg_restore: error: options -s\/--schema-only and -a\/--data-only cannot be used together\E/,
+ 'pg_restore: options -s/--schema-only and -a/--data-only cannot be used together'
+);
+
+command_fails_like(
+ [ 'pg_restore', '-d', 'xxx', '-f', 'xxx' ],
+ qr/\Qpg_restore: error: options -d\/--dbname and -f\/--file cannot be used together\E/,
+ 'pg_restore: options -d/--dbname and -f/--file cannot be used together');
+
+command_fails_like(
+ [ 'pg_dump', '-c', '-a' ],
+ qr/\Qpg_dump: error: options -c\/--clean and -a\/--data-only cannot be used together\E/,
+ 'pg_dump: options -c/--clean and -a/--data-only cannot be used together');
+
+command_fails_like(
+ [ 'pg_restore', '-c', '-a', '-f -' ],
+ qr/\Qpg_restore: error: options -c\/--clean and -a\/--data-only cannot be used together\E/,
+ 'pg_restore: options -c/--clean and -a/--data-only cannot be used together'
+);
+
+command_fails_like(
+ [ 'pg_dump', '--if-exists' ],
+ qr/\Qpg_dump: error: option --if-exists requires option -c\/--clean\E/,
+ 'pg_dump: option --if-exists requires option -c/--clean');
+
+command_fails_like(
+ [ 'pg_dump', '-j3' ],
+ qr/\Qpg_dump: error: parallel backup only supported by the directory format\E/,
+ 'pg_dump: parallel backup only supported by the directory format');
+
+# Note the trailing whitespace for the value of --jobs, that is valid.
+command_fails_like(
+ [ 'pg_dump', '-j', '-1 ' ],
+ qr/\Qpg_dump: error: -j\/--jobs must be in range\E/,
+ 'pg_dump: -j/--jobs must be in range');
+
+command_fails_like(
+ [ 'pg_dump', '-F', 'garbage' ],
+ qr/\Qpg_dump: error: invalid output format\E/,
+ 'pg_dump: invalid output format');
+
+command_fails_like(
+ [ 'pg_restore', '-j', '-1', '-f -' ],
+ qr/\Qpg_restore: error: -j\/--jobs must be in range\E/,
+ 'pg_restore: -j/--jobs must be in range');
+
+command_fails_like(
+ [ 'pg_restore', '--single-transaction', '-j3', '-f -' ],
+ qr/\Qpg_restore: error: cannot specify both --single-transaction and multiple jobs\E/,
+ 'pg_restore: cannot specify both --single-transaction and multiple jobs');
+
+command_fails_like(
+ [ 'pg_dump', '-Z', '-1' ],
+ qr/\Qpg_dump: error: -Z\/--compress must be in range 0..9\E/,
+ 'pg_dump: -Z/--compress must be in range');
+
+if (check_pg_config("#define HAVE_LIBZ 1"))
+{
+ command_fails_like(
+ [ 'pg_dump', '--compress', '1', '--format', 'tar' ],
+ qr/\Qpg_dump: error: compression is not supported by tar archive format\E/,
+ 'pg_dump: compression is not supported by tar archive format');
+}
+else
+{
+ # --jobs > 1 forces an error with tar format.
+ command_fails_like(
+ [ 'pg_dump', '--compress', '1', '--format', 'tar', '-j3' ],
+ qr/\Qpg_dump: warning: requested compression not available in this installation -- archive will be uncompressed\E/,
+ 'pg_dump: warning: compression not available in this installation');
+}
+
+command_fails_like(
+ [ 'pg_dump', '--extra-float-digits', '-16' ],
+ qr/\Qpg_dump: error: --extra-float-digits must be in range\E/,
+ 'pg_dump: --extra-float-digits must be in range');
+
+command_fails_like(
+ [ 'pg_dump', '--rows-per-insert', '0' ],
+ qr/\Qpg_dump: error: --rows-per-insert must be in range\E/,
+ 'pg_dump: --rows-per-insert must be in range');
+
+command_fails_like(
+ [ 'pg_restore', '--if-exists', '-f -' ],
+ qr/\Qpg_restore: error: option --if-exists requires option -c\/--clean\E/,
+ 'pg_restore: option --if-exists requires option -c/--clean');
+
+command_fails_like(
+ [ 'pg_restore', '-f -', '-F', 'garbage' ],
+ qr/\Qpg_restore: error: unrecognized archive format "garbage";\E/,
+ 'pg_dump: unrecognized archive format');
+
+command_fails_like(
+ [ 'pg_dump', '--on-conflict-do-nothing' ],
+ qr/pg_dump: error: option --on-conflict-do-nothing requires option --inserts, --rows-per-insert, or --column-inserts/,
+ 'pg_dump: --on-conflict-do-nothing requires --inserts, --rows-per-insert, --column-inserts'
+);
+
+# pg_dumpall command-line argument checks
+command_fails_like(
+ [ 'pg_dumpall', '-g', '-r' ],
+ qr/\Qpg_dumpall: error: options -g\/--globals-only and -r\/--roles-only cannot be used together\E/,
+ 'pg_dumpall: options -g/--globals-only and -r/--roles-only cannot be used together'
+);
+
+command_fails_like(
+ [ 'pg_dumpall', '-g', '-t' ],
+ qr/\Qpg_dumpall: error: options -g\/--globals-only and -t\/--tablespaces-only cannot be used together\E/,
+ 'pg_dumpall: options -g/--globals-only and -t/--tablespaces-only cannot be used together'
+);
+
+command_fails_like(
+ [ 'pg_dumpall', '-r', '-t' ],
+ qr/\Qpg_dumpall: error: options -r\/--roles-only and -t\/--tablespaces-only cannot be used together\E/,
+ 'pg_dumpall: options -r/--roles-only and -t/--tablespaces-only cannot be used together'
+);
+
+command_fails_like(
+ [ 'pg_dumpall', '--if-exists' ],
+ qr/\Qpg_dumpall: error: option --if-exists requires option -c\/--clean\E/,
+ 'pg_dumpall: option --if-exists requires option -c/--clean');
+
+command_fails_like(
+ [ 'pg_restore', '-C', '-1', '-f -' ],
+ qr/\Qpg_restore: error: options -C\/--create and -1\/--single-transaction cannot be used together\E/,
+ 'pg_restore: options -C\/--create and -1\/--single-transaction cannot be used together'
+);
+
+# also fails for -r and -t, but it seems pointless to add more tests for those.
+command_fails_like(
+ [ 'pg_dumpall', '--exclude-database=foo', '--globals-only' ],
+ qr/\Qpg_dumpall: error: option --exclude-database cannot be used together with -g\/--globals-only\E/,
+ 'pg_dumpall: option --exclude-database cannot be used together with -g/--globals-only'
+);
+
+done_testing();
diff --git a/src/bin/pg_dump/t/002_pg_dump.pl b/src/bin/pg_dump/t/002_pg_dump.pl
new file mode 100644
index 0000000..6006276
--- /dev/null
+++ b/src/bin/pg_dump/t/002_pg_dump.pl
@@ -0,0 +1,4280 @@
+
+# Copyright (c) 2021-2022, PostgreSQL Global Development Group
+
+use strict;
+use warnings;
+
+use PostgreSQL::Test::Cluster;
+use PostgreSQL::Test::Utils;
+use Test::More;
+
+my $tempdir = PostgreSQL::Test::Utils::tempdir;
+
+###############################################################
+# Definition of the pg_dump runs to make.
+#
+# Each of these runs are named and those names are used below
+# to define how each test should (or shouldn't) treat a result
+# from a given run.
+#
+# test_key indicates that a given run should simply use the same
+# set of like/unlike tests as another run, and which run that is.
+#
+# compile_option indicates if the commands run depend on a compilation
+# option, if any. This can be used to control if tests should be
+# skipped when a build dependency is not satisfied.
+#
+# dump_cmd is the pg_dump command to run, which is an array of
+# the full command and arguments to run. Note that this is run
+# using $node->command_ok(), so the port does not need to be
+# specified and is pulled from $PGPORT, which is set by the
+# PostgreSQL::Test::Cluster system.
+#
+# compress_cmd is the utility command for (de)compression, if any.
+# Note that this should generally be used on pg_dump's output
+# either to generate a text file to run the through the tests, or
+# to test pg_restore's ability to parse manually compressed files
+# that otherwise pg_dump does not compress on its own (e.g. *.toc).
+#
+# restore_cmd is the pg_restore command to run, if any. Note
+# that this should generally be used when the pg_dump goes to
+# a non-text file and that the restore can then be used to
+# generate a text file to run through the tests from the
+# non-text file generated by pg_dump.
+#
+# TODO: Have pg_restore actually restore to an independent
+# database and then pg_dump *that* database (or something along
+# those lines) to validate that part of the process.
+
+my %pgdump_runs = (
+ binary_upgrade => {
+ dump_cmd => [
+ 'pg_dump',
+ '--no-sync',
+ '--format=custom',
+ "--file=$tempdir/binary_upgrade.dump",
+ '-w',
+ '--schema-only',
+ '--binary-upgrade',
+ '-d', 'postgres', # alternative way to specify database
+ ],
+ restore_cmd => [
+ 'pg_restore', '-Fc', '--verbose',
+ "--file=$tempdir/binary_upgrade.sql",
+ "$tempdir/binary_upgrade.dump",
+ ],
+ },
+
+ # Do not use --no-sync to give test coverage for data sync.
+ compression_gzip_custom => {
+ test_key => 'compression',
+ compile_option => 'gzip',
+ dump_cmd => [
+ 'pg_dump', '--format=custom',
+ '--compress=1', "--file=$tempdir/compression_gzip_custom.dump",
+ 'postgres',
+ ],
+ restore_cmd => [
+ 'pg_restore',
+ "--file=$tempdir/compression_gzip_custom.sql",
+ "$tempdir/compression_gzip_custom.dump",
+ ],
+ },
+
+ # Do not use --no-sync to give test coverage for data sync.
+ compression_gzip_dir => {
+ test_key => 'compression',
+ compile_option => 'gzip',
+ dump_cmd => [
+ 'pg_dump', '--jobs=2',
+ '--format=directory', '--compress=1',
+ "--file=$tempdir/compression_gzip_dir", 'postgres',
+ ],
+ # Give coverage for manually compressed blob.toc files during
+ # restore.
+ compress_cmd => {
+ program => $ENV{'GZIP_PROGRAM'},
+ args => [ '-f', "$tempdir/compression_gzip_dir/blobs.toc", ],
+ },
+ restore_cmd => [
+ 'pg_restore', '--jobs=2',
+ "--file=$tempdir/compression_gzip_dir.sql",
+ "$tempdir/compression_gzip_dir",
+ ],
+ },
+
+ compression_gzip_plain => {
+ test_key => 'compression',
+ compile_option => 'gzip',
+ dump_cmd => [
+ 'pg_dump', '--format=plain', '-Z1',
+ "--file=$tempdir/compression_gzip_plain.sql.gz", 'postgres',
+ ],
+ # Decompress the generated file to run through the tests.
+ compress_cmd => {
+ program => $ENV{'GZIP_PROGRAM'},
+ args => [ '-d', "$tempdir/compression_gzip_plain.sql.gz", ],
+ },
+ },
+ clean => {
+ dump_cmd => [
+ 'pg_dump',
+ '--no-sync',
+ "--file=$tempdir/clean.sql",
+ '-c',
+ '-d', 'postgres', # alternative way to specify database
+ ],
+ },
+ clean_if_exists => {
+ dump_cmd => [
+ 'pg_dump',
+ '--no-sync',
+ "--file=$tempdir/clean_if_exists.sql",
+ '-c',
+ '--if-exists',
+ '--encoding=UTF8', # no-op, just tests that option is accepted
+ 'postgres',
+ ],
+ },
+ column_inserts => {
+ dump_cmd => [
+ 'pg_dump', '--no-sync',
+ "--file=$tempdir/column_inserts.sql", '-a',
+ '--column-inserts', 'postgres',
+ ],
+ },
+ createdb => {
+ dump_cmd => [
+ 'pg_dump',
+ '--no-sync',
+ "--file=$tempdir/createdb.sql",
+ '-C',
+ '-R', # no-op, just for testing
+ '-v',
+ 'postgres',
+ ],
+ },
+ data_only => {
+ dump_cmd => [
+ 'pg_dump',
+ '--no-sync',
+ "--file=$tempdir/data_only.sql",
+ '-a',
+ '--superuser=test_superuser',
+ '--disable-triggers',
+ '-v', # no-op, just make sure it works
+ 'postgres',
+ ],
+ },
+ defaults => {
+ dump_cmd => [
+ 'pg_dump', '--no-sync',
+ '-f', "$tempdir/defaults.sql",
+ 'postgres',
+ ],
+ },
+ defaults_no_public => {
+ database => 'regress_pg_dump_test',
+ dump_cmd => [
+ 'pg_dump', '--no-sync', '-f', "$tempdir/defaults_no_public.sql",
+ 'regress_pg_dump_test',
+ ],
+ },
+ defaults_no_public_clean => {
+ database => 'regress_pg_dump_test',
+ dump_cmd => [
+ 'pg_dump', '--no-sync', '-c', '-f',
+ "$tempdir/defaults_no_public_clean.sql",
+ 'regress_pg_dump_test',
+ ],
+ },
+ defaults_public_owner => {
+ database => 'regress_public_owner',
+ dump_cmd => [
+ 'pg_dump', '--no-sync', '-f',
+ "$tempdir/defaults_public_owner.sql",
+ 'regress_public_owner',
+ ],
+ },
+
+ # Do not use --no-sync to give test coverage for data sync.
+ defaults_custom_format => {
+ test_key => 'defaults',
+ dump_cmd => [
+ 'pg_dump', '-Fc', '-Z6',
+ "--file=$tempdir/defaults_custom_format.dump", 'postgres',
+ ],
+ restore_cmd => [
+ 'pg_restore', '-Fc',
+ "--file=$tempdir/defaults_custom_format.sql",
+ "$tempdir/defaults_custom_format.dump",
+ ],
+ },
+
+ # Do not use --no-sync to give test coverage for data sync.
+ defaults_dir_format => {
+ test_key => 'defaults',
+ dump_cmd => [
+ 'pg_dump', '-Fd',
+ "--file=$tempdir/defaults_dir_format", 'postgres',
+ ],
+ restore_cmd => [
+ 'pg_restore', '-Fd',
+ "--file=$tempdir/defaults_dir_format.sql",
+ "$tempdir/defaults_dir_format",
+ ],
+ },
+
+ # Do not use --no-sync to give test coverage for data sync.
+ defaults_parallel => {
+ test_key => 'defaults',
+ dump_cmd => [
+ 'pg_dump', '-Fd', '-j2', "--file=$tempdir/defaults_parallel",
+ 'postgres',
+ ],
+ restore_cmd => [
+ 'pg_restore',
+ "--file=$tempdir/defaults_parallel.sql",
+ "$tempdir/defaults_parallel",
+ ],
+ },
+
+ # Do not use --no-sync to give test coverage for data sync.
+ defaults_tar_format => {
+ test_key => 'defaults',
+ dump_cmd => [
+ 'pg_dump', '-Ft',
+ "--file=$tempdir/defaults_tar_format.tar", 'postgres',
+ ],
+ restore_cmd => [
+ 'pg_restore',
+ '--format=tar',
+ "--file=$tempdir/defaults_tar_format.sql",
+ "$tempdir/defaults_tar_format.tar",
+ ],
+ },
+ exclude_dump_test_schema => {
+ dump_cmd => [
+ 'pg_dump', '--no-sync',
+ "--file=$tempdir/exclude_dump_test_schema.sql",
+ '--exclude-schema=dump_test', 'postgres',
+ ],
+ },
+ exclude_test_table => {
+ dump_cmd => [
+ 'pg_dump', '--no-sync',
+ "--file=$tempdir/exclude_test_table.sql",
+ '--exclude-table=dump_test.test_table', 'postgres',
+ ],
+ },
+ exclude_test_table_data => {
+ dump_cmd => [
+ 'pg_dump',
+ '--no-sync',
+ "--file=$tempdir/exclude_test_table_data.sql",
+ '--exclude-table-data=dump_test.test_table',
+ '--no-unlogged-table-data',
+ 'postgres',
+ ],
+ },
+ inserts => {
+ dump_cmd => [
+ 'pg_dump', '--no-sync',
+ "--file=$tempdir/inserts.sql", '-a',
+ '--inserts', 'postgres',
+ ],
+ },
+ pg_dumpall_globals => {
+ dump_cmd => [
+ 'pg_dumpall', '-v', "--file=$tempdir/pg_dumpall_globals.sql",
+ '-g', '--no-sync',
+ ],
+ },
+ pg_dumpall_globals_clean => {
+ dump_cmd => [
+ 'pg_dumpall', "--file=$tempdir/pg_dumpall_globals_clean.sql",
+ '-g', '-c', '--no-sync',
+ ],
+ },
+ pg_dumpall_dbprivs => {
+ dump_cmd => [
+ 'pg_dumpall', '--no-sync',
+ "--file=$tempdir/pg_dumpall_dbprivs.sql",
+ ],
+ },
+ pg_dumpall_exclude => {
+ dump_cmd => [
+ 'pg_dumpall', '-v', "--file=$tempdir/pg_dumpall_exclude.sql",
+ '--exclude-database', '*dump_test*', '--no-sync',
+ ],
+ },
+ no_toast_compression => {
+ dump_cmd => [
+ 'pg_dump', '--no-sync',
+ "--file=$tempdir/no_toast_compression.sql",
+ '--no-toast-compression', 'postgres',
+ ],
+ },
+ no_blobs => {
+ dump_cmd => [
+ 'pg_dump', '--no-sync',
+ "--file=$tempdir/no_blobs.sql", '-B',
+ 'postgres',
+ ],
+ },
+ no_privs => {
+ dump_cmd => [
+ 'pg_dump', '--no-sync',
+ "--file=$tempdir/no_privs.sql", '-x',
+ 'postgres',
+ ],
+ },
+ no_owner => {
+ dump_cmd => [
+ 'pg_dump', '--no-sync',
+ "--file=$tempdir/no_owner.sql", '-O',
+ 'postgres',
+ ],
+ },
+ no_table_access_method => {
+ dump_cmd => [
+ 'pg_dump', '--no-sync',
+ "--file=$tempdir/no_table_access_method.sql",
+ '--no-table-access-method', 'postgres',
+ ],
+ },
+ only_dump_test_schema => {
+ dump_cmd => [
+ 'pg_dump', '--no-sync',
+ "--file=$tempdir/only_dump_test_schema.sql",
+ '--schema=dump_test', 'postgres',
+ ],
+ },
+ only_dump_test_table => {
+ dump_cmd => [
+ 'pg_dump',
+ '--no-sync',
+ "--file=$tempdir/only_dump_test_table.sql",
+ '--table=dump_test.test_table',
+ '--lock-wait-timeout='
+ . (1000 * $PostgreSQL::Test::Utils::timeout_default),
+ 'postgres',
+ ],
+ },
+ role => {
+ dump_cmd => [
+ 'pg_dump',
+ '--no-sync',
+ "--file=$tempdir/role.sql",
+ '--role=regress_dump_test_role',
+ '--schema=dump_test_second_schema',
+ 'postgres',
+ ],
+ },
+ role_parallel => {
+ test_key => 'role',
+ dump_cmd => [
+ 'pg_dump',
+ '--no-sync',
+ '--format=directory',
+ '--jobs=2',
+ "--file=$tempdir/role_parallel",
+ '--role=regress_dump_test_role',
+ '--schema=dump_test_second_schema',
+ 'postgres',
+ ],
+ restore_cmd => [
+ 'pg_restore', "--file=$tempdir/role_parallel.sql",
+ "$tempdir/role_parallel",
+ ],
+ },
+ rows_per_insert => {
+ dump_cmd => [
+ 'pg_dump',
+ '--no-sync',
+ "--file=$tempdir/rows_per_insert.sql",
+ '-a',
+ '--rows-per-insert=4',
+ '--table=dump_test.test_table',
+ '--table=dump_test.test_fourth_table',
+ 'postgres',
+ ],
+ },
+ schema_only => {
+ dump_cmd => [
+ 'pg_dump', '--format=plain',
+ "--file=$tempdir/schema_only.sql", '--no-sync',
+ '-s', 'postgres',
+ ],
+ },
+ section_pre_data => {
+ dump_cmd => [
+ 'pg_dump', "--file=$tempdir/section_pre_data.sql",
+ '--section=pre-data', '--no-sync',
+ 'postgres',
+ ],
+ },
+ section_data => {
+ dump_cmd => [
+ 'pg_dump', "--file=$tempdir/section_data.sql",
+ '--section=data', '--no-sync',
+ 'postgres',
+ ],
+ },
+ section_post_data => {
+ dump_cmd => [
+ 'pg_dump', "--file=$tempdir/section_post_data.sql",
+ '--section=post-data', '--no-sync', 'postgres',
+ ],
+ },
+ test_schema_plus_blobs => {
+ dump_cmd => [
+ 'pg_dump', "--file=$tempdir/test_schema_plus_blobs.sql",
+
+ '--schema=dump_test', '-b', '-B', '--no-sync', 'postgres',
+ ],
+ },);
+
+###############################################################
+# Definition of the tests to run.
+#
+# Each test is defined using the log message that will be used.
+#
+# A regexp should be defined for each test which provides the
+# basis for the test. That regexp will be run against the output
+# file of each of the runs which the test is to be run against
+# and the success of the result will depend on if the regexp
+# result matches the expected 'like' or 'unlike' case.
+#
+# The runs listed as 'like' will be checked if they match the
+# regexp and, if so, the test passes. All runs which are not
+# listed as 'like' will be checked to ensure they don't match
+# the regexp; if they do, the test will fail.
+#
+# The below hashes provide convenience sets of runs. Individual
+# runs can be excluded from a general hash by placing that run
+# into the 'unlike' section.
+#
+# For example, there is an 'exclude_test_table' run which runs a
+# full pg_dump but with an exclude flag to not include the test
+# table. The CREATE TABLE test which creates the test table is
+# defined with %full_runs but then has 'exclude_test_table' in
+# its 'unlike' list, excluding that test.
+#
+# There can then be a 'create_sql' and 'create_order' for a
+# given test. The 'create_sql' commands are collected up in
+# 'create_order' and then run against the database prior to any
+# of the pg_dump runs happening. This is what "seeds" the
+# system with objects to be dumped out.
+#
+# There can be a flag called 'lz4', which can be set if the test
+# case depends on LZ4. Tests marked with this flag are skipped if
+# the build used does not support LZ4.
+#
+# Building of this hash takes a bit of time as all of the regexps
+# included in it are compiled. This greatly improves performance
+# as the regexps are used for each run the test applies to.
+
+# Tests which target the 'dump_test' schema, specifically.
+my %dump_test_schema_runs = (
+ only_dump_test_schema => 1,
+ test_schema_plus_blobs => 1,);
+
+# Tests which are considered 'full' dumps by pg_dump, but there
+# are flags used to exclude specific items (ACLs, blobs, etc).
+my %full_runs = (
+ binary_upgrade => 1,
+ clean => 1,
+ clean_if_exists => 1,
+ compression => 1,
+ createdb => 1,
+ defaults => 1,
+ exclude_dump_test_schema => 1,
+ exclude_test_table => 1,
+ exclude_test_table_data => 1,
+ no_toast_compression => 1,
+ no_blobs => 1,
+ no_owner => 1,
+ no_privs => 1,
+ no_table_access_method => 1,
+ pg_dumpall_dbprivs => 1,
+ pg_dumpall_exclude => 1,
+ schema_only => 1,);
+
+# This is where the actual tests are defined.
+my %tests = (
+ 'ALTER DEFAULT PRIVILEGES FOR ROLE regress_dump_test_role GRANT' => {
+ create_order => 14,
+ create_sql => 'ALTER DEFAULT PRIVILEGES
+ FOR ROLE regress_dump_test_role IN SCHEMA dump_test
+ GRANT SELECT ON TABLES TO regress_dump_test_role;',
+ regexp => qr/^
+ \QALTER DEFAULT PRIVILEGES \E
+ \QFOR ROLE regress_dump_test_role IN SCHEMA dump_test \E
+ \QGRANT SELECT ON TABLES TO regress_dump_test_role;\E
+ /xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_post_data => 1, },
+ unlike => {
+ exclude_dump_test_schema => 1,
+ no_privs => 1,
+ },
+ },
+
+ 'ALTER DEFAULT PRIVILEGES FOR ROLE regress_dump_test_role GRANT EXECUTE ON FUNCTIONS'
+ => {
+ create_order => 15,
+ create_sql => 'ALTER DEFAULT PRIVILEGES
+ FOR ROLE regress_dump_test_role IN SCHEMA dump_test
+ GRANT EXECUTE ON FUNCTIONS TO regress_dump_test_role;',
+ regexp => qr/^
+ \QALTER DEFAULT PRIVILEGES \E
+ \QFOR ROLE regress_dump_test_role IN SCHEMA dump_test \E
+ \QGRANT ALL ON FUNCTIONS TO regress_dump_test_role;\E
+ /xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_post_data => 1, },
+ unlike => {
+ exclude_dump_test_schema => 1,
+ no_privs => 1,
+ },
+ },
+
+ 'ALTER DEFAULT PRIVILEGES FOR ROLE regress_dump_test_role REVOKE' => {
+ create_order => 55,
+ create_sql => 'ALTER DEFAULT PRIVILEGES
+ FOR ROLE regress_dump_test_role
+ REVOKE EXECUTE ON FUNCTIONS FROM PUBLIC;',
+ regexp => qr/^
+ \QALTER DEFAULT PRIVILEGES \E
+ \QFOR ROLE regress_dump_test_role \E
+ \QREVOKE ALL ON FUNCTIONS FROM PUBLIC;\E
+ /xm,
+ like => { %full_runs, section_post_data => 1, },
+ unlike => { no_privs => 1, },
+ },
+
+ 'ALTER DEFAULT PRIVILEGES FOR ROLE regress_dump_test_role REVOKE SELECT'
+ => {
+ create_order => 56,
+ create_sql => 'ALTER DEFAULT PRIVILEGES
+ FOR ROLE regress_dump_test_role
+ REVOKE SELECT ON TABLES FROM regress_dump_test_role;',
+ regexp => qr/^
+ \QALTER DEFAULT PRIVILEGES \E
+ \QFOR ROLE regress_dump_test_role \E
+ \QREVOKE ALL ON TABLES FROM regress_dump_test_role;\E\n
+ \QALTER DEFAULT PRIVILEGES \E
+ \QFOR ROLE regress_dump_test_role \E
+ \QGRANT INSERT,REFERENCES,DELETE,TRIGGER,TRUNCATE,UPDATE ON TABLES TO regress_dump_test_role;\E
+ /xm,
+ like => { %full_runs, section_post_data => 1, },
+ unlike => { no_privs => 1, },
+ },
+
+ 'ALTER ROLE regress_dump_test_role' => {
+ regexp => qr/^
+ \QALTER ROLE regress_dump_test_role WITH \E
+ \QNOSUPERUSER INHERIT NOCREATEROLE NOCREATEDB NOLOGIN \E
+ \QNOREPLICATION NOBYPASSRLS;\E
+ /xm,
+ like => {
+ pg_dumpall_dbprivs => 1,
+ pg_dumpall_globals => 1,
+ pg_dumpall_globals_clean => 1,
+ pg_dumpall_exclude => 1,
+ },
+ },
+
+ 'ALTER COLLATION test0 OWNER TO' => {
+ regexp => qr/^\QALTER COLLATION public.test0 OWNER TO \E.+;/m,
+ collation => 1,
+ like => { %full_runs, section_pre_data => 1, },
+ unlike => { %dump_test_schema_runs, no_owner => 1, },
+ },
+
+ 'ALTER FOREIGN DATA WRAPPER dummy OWNER TO' => {
+ regexp => qr/^ALTER FOREIGN DATA WRAPPER dummy OWNER TO .+;/m,
+ like => { %full_runs, section_pre_data => 1, },
+ unlike => { no_owner => 1, },
+ },
+
+ 'ALTER SERVER s1 OWNER TO' => {
+ regexp => qr/^ALTER SERVER s1 OWNER TO .+;/m,
+ like => { %full_runs, section_pre_data => 1, },
+ unlike => { no_owner => 1, },
+ },
+
+ 'ALTER FUNCTION dump_test.pltestlang_call_handler() OWNER TO' => {
+ regexp => qr/^
+ \QALTER FUNCTION dump_test.pltestlang_call_handler() \E
+ \QOWNER TO \E
+ .+;/xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => {
+ exclude_dump_test_schema => 1,
+ no_owner => 1,
+ },
+ },
+
+ 'ALTER OPERATOR FAMILY dump_test.op_family OWNER TO' => {
+ regexp => qr/^
+ \QALTER OPERATOR FAMILY dump_test.op_family USING btree \E
+ \QOWNER TO \E
+ .+;/xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => {
+ exclude_dump_test_schema => 1,
+ no_owner => 1,
+ },
+ },
+
+ 'ALTER OPERATOR FAMILY dump_test.op_family USING btree' => {
+ create_order => 75,
+ create_sql =>
+ 'ALTER OPERATOR FAMILY dump_test.op_family USING btree ADD
+ OPERATOR 1 <(bigint,int4),
+ OPERATOR 2 <=(bigint,int4),
+ OPERATOR 3 =(bigint,int4),
+ OPERATOR 4 >=(bigint,int4),
+ OPERATOR 5 >(bigint,int4),
+ FUNCTION 1 (int4, int4) btint4cmp(int4,int4),
+ FUNCTION 2 (int4, int4) btint4sortsupport(internal),
+ FUNCTION 4 (int4, int4) btequalimage(oid);',
+ # note: it's correct that btint8sortsupport and bigint btequalimage
+ # are included here:
+ regexp => qr/^
+ \QALTER OPERATOR FAMILY dump_test.op_family USING btree ADD\E\n\s+
+ \QOPERATOR 1 <(bigint,integer) ,\E\n\s+
+ \QOPERATOR 2 <=(bigint,integer) ,\E\n\s+
+ \QOPERATOR 3 =(bigint,integer) ,\E\n\s+
+ \QOPERATOR 4 >=(bigint,integer) ,\E\n\s+
+ \QOPERATOR 5 >(bigint,integer) ,\E\n\s+
+ \QFUNCTION 1 (integer, integer) btint4cmp(integer,integer) ,\E\n\s+
+ \QFUNCTION 2 (bigint, bigint) btint8sortsupport(internal) ,\E\n\s+
+ \QFUNCTION 2 (integer, integer) btint4sortsupport(internal) ,\E\n\s+
+ \QFUNCTION 4 (bigint, bigint) btequalimage(oid) ,\E\n\s+
+ \QFUNCTION 4 (integer, integer) btequalimage(oid);\E
+ /xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'ALTER OPERATOR CLASS dump_test.op_class OWNER TO' => {
+ regexp => qr/^
+ \QALTER OPERATOR CLASS dump_test.op_class USING btree \E
+ \QOWNER TO \E
+ .+;/xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => {
+ exclude_dump_test_schema => 1,
+ no_owner => 1,
+ },
+ },
+
+ 'ALTER PUBLICATION pub1 OWNER TO' => {
+ regexp => qr/^ALTER PUBLICATION pub1 OWNER TO .+;/m,
+ like => { %full_runs, section_post_data => 1, },
+ unlike => { no_owner => 1, },
+ },
+
+ 'ALTER LARGE OBJECT ... OWNER TO' => {
+ regexp => qr/^ALTER LARGE OBJECT \d+ OWNER TO .+;/m,
+ like => {
+ %full_runs,
+ column_inserts => 1,
+ data_only => 1,
+ inserts => 1,
+ section_pre_data => 1,
+ test_schema_plus_blobs => 1,
+ },
+ unlike => {
+ no_blobs => 1,
+ no_owner => 1,
+ schema_only => 1,
+ },
+ },
+
+ 'ALTER PROCEDURAL LANGUAGE pltestlang OWNER TO' => {
+ regexp => qr/^ALTER PROCEDURAL LANGUAGE pltestlang OWNER TO .+;/m,
+ like => { %full_runs, section_pre_data => 1, },
+ unlike => { no_owner => 1, },
+ },
+
+ 'ALTER SCHEMA dump_test OWNER TO' => {
+ regexp => qr/^ALTER SCHEMA dump_test OWNER TO .+;/m,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => {
+ exclude_dump_test_schema => 1,
+ no_owner => 1,
+ },
+ },
+
+ 'ALTER SCHEMA dump_test_second_schema OWNER TO' => {
+ regexp => qr/^ALTER SCHEMA dump_test_second_schema OWNER TO .+;/m,
+ like => {
+ %full_runs,
+ role => 1,
+ section_pre_data => 1,
+ },
+ unlike => { no_owner => 1, },
+ },
+
+ 'ALTER SCHEMA public OWNER TO' => {
+ create_order => 15,
+ create_sql =>
+ 'ALTER SCHEMA public OWNER TO "regress_quoted \"" role";',
+ regexp => qr/^ALTER SCHEMA public OWNER TO .+;/m,
+ like => {
+ %full_runs, section_pre_data => 1,
+ },
+ unlike => { no_owner => 1, },
+ },
+
+ 'ALTER SCHEMA public OWNER TO (w/o ACL changes)' => {
+ database => 'regress_public_owner',
+ create_order => 100,
+ create_sql =>
+ 'ALTER SCHEMA public OWNER TO "regress_quoted \"" role";',
+ regexp => qr/^(GRANT|REVOKE)/m,
+ unlike => { defaults_public_owner => 1 },
+ },
+
+ 'ALTER SEQUENCE test_table_col1_seq' => {
+ regexp => qr/^
+ \QALTER SEQUENCE dump_test.test_table_col1_seq OWNED BY dump_test.test_table.col1;\E
+ /xm,
+ like => {
+ %full_runs,
+ %dump_test_schema_runs,
+ only_dump_test_table => 1,
+ section_pre_data => 1,
+ },
+ unlike => {
+ exclude_dump_test_schema => 1,
+ exclude_test_table => 1,
+ },
+ },
+
+ 'ALTER TABLE ONLY test_table ADD CONSTRAINT ... PRIMARY KEY' => {
+ regexp => qr/^
+ \QALTER TABLE ONLY dump_test.test_table\E \n^\s+
+ \QADD CONSTRAINT test_table_pkey PRIMARY KEY (col1);\E
+ /xm,
+ like => {
+ %full_runs,
+ %dump_test_schema_runs,
+ only_dump_test_table => 1,
+ section_post_data => 1,
+ },
+ unlike => {
+ exclude_dump_test_schema => 1,
+ exclude_test_table => 1,
+ },
+ },
+
+ 'ALTER TABLE (partitioned) ADD CONSTRAINT ... FOREIGN KEY' => {
+ create_order => 4,
+ create_sql => 'CREATE TABLE dump_test.test_table_fk (
+ col1 int references dump_test.test_table)
+ PARTITION BY RANGE (col1);
+ CREATE TABLE dump_test.test_table_fk_1
+ PARTITION OF dump_test.test_table_fk
+ FOR VALUES FROM (0) TO (10);',
+ regexp => qr/
+ \QADD CONSTRAINT test_table_fk_col1_fkey FOREIGN KEY (col1) REFERENCES dump_test.test_table\E
+ /xm,
+ like => {
+ %full_runs, %dump_test_schema_runs, section_post_data => 1,
+ },
+ unlike => {
+ exclude_dump_test_schema => 1,
+ },
+ },
+
+ 'ALTER TABLE ONLY test_table ALTER COLUMN col1 SET STATISTICS 90' => {
+ create_order => 93,
+ create_sql =>
+ 'ALTER TABLE dump_test.test_table ALTER COLUMN col1 SET STATISTICS 90;',
+ regexp => qr/^
+ \QALTER TABLE ONLY dump_test.test_table ALTER COLUMN col1 SET STATISTICS 90;\E\n
+ /xm,
+ like => {
+ %full_runs,
+ %dump_test_schema_runs,
+ only_dump_test_table => 1,
+ section_pre_data => 1,
+ },
+ unlike => {
+ exclude_dump_test_schema => 1,
+ exclude_test_table => 1,
+ },
+ },
+
+ 'ALTER TABLE ONLY test_table ALTER COLUMN col2 SET STORAGE' => {
+ create_order => 94,
+ create_sql =>
+ 'ALTER TABLE dump_test.test_table ALTER COLUMN col2 SET STORAGE EXTERNAL;',
+ regexp => qr/^
+ \QALTER TABLE ONLY dump_test.test_table ALTER COLUMN col2 SET STORAGE EXTERNAL;\E\n
+ /xm,
+ like => {
+ %full_runs,
+ %dump_test_schema_runs,
+ only_dump_test_table => 1,
+ section_pre_data => 1,
+ },
+ unlike => {
+ exclude_dump_test_schema => 1,
+ exclude_test_table => 1,
+ },
+ },
+
+ 'ALTER TABLE ONLY test_table ALTER COLUMN col3 SET STORAGE' => {
+ create_order => 95,
+ create_sql =>
+ 'ALTER TABLE dump_test.test_table ALTER COLUMN col3 SET STORAGE MAIN;',
+ regexp => qr/^
+ \QALTER TABLE ONLY dump_test.test_table ALTER COLUMN col3 SET STORAGE MAIN;\E\n
+ /xm,
+ like => {
+ %full_runs,
+ %dump_test_schema_runs,
+ only_dump_test_table => 1,
+ section_pre_data => 1,
+ },
+ unlike => {
+ exclude_dump_test_schema => 1,
+ exclude_test_table => 1,
+ },
+ },
+
+ 'ALTER TABLE ONLY test_table ALTER COLUMN col4 SET n_distinct' => {
+ create_order => 95,
+ create_sql =>
+ 'ALTER TABLE dump_test.test_table ALTER COLUMN col4 SET (n_distinct = 10);',
+ regexp => qr/^
+ \QALTER TABLE ONLY dump_test.test_table ALTER COLUMN col4 SET (n_distinct=10);\E\n
+ /xm,
+ like => {
+ %full_runs,
+ %dump_test_schema_runs,
+ only_dump_test_table => 1,
+ section_pre_data => 1,
+ },
+ unlike => {
+ exclude_dump_test_schema => 1,
+ exclude_test_table => 1,
+ },
+ },
+
+ 'ALTER TABLE ONLY dump_test.measurement ATTACH PARTITION measurement_y2006m2'
+ => {
+ regexp => qr/^
+ \QALTER TABLE ONLY dump_test.measurement ATTACH PARTITION dump_test_second_schema.measurement_y2006m2 \E
+ \QFOR VALUES FROM ('2006-02-01') TO ('2006-03-01');\E\n
+ /xm,
+ like => {
+ %full_runs,
+ role => 1,
+ section_pre_data => 1,
+ binary_upgrade => 1,
+ },
+ },
+
+ 'ALTER TABLE test_table CLUSTER ON test_table_pkey' => {
+ create_order => 96,
+ create_sql =>
+ 'ALTER TABLE dump_test.test_table CLUSTER ON test_table_pkey',
+ regexp => qr/^
+ \QALTER TABLE dump_test.test_table CLUSTER ON test_table_pkey;\E\n
+ /xm,
+ like => {
+ %full_runs,
+ %dump_test_schema_runs,
+ only_dump_test_table => 1,
+ section_post_data => 1,
+ },
+ unlike => {
+ exclude_dump_test_schema => 1,
+ exclude_test_table => 1,
+ },
+ },
+
+ 'ALTER TABLE test_table DISABLE TRIGGER ALL' => {
+ regexp => qr/^
+ \QSET SESSION AUTHORIZATION 'test_superuser';\E\n\n
+ \QALTER TABLE dump_test.test_table DISABLE TRIGGER ALL;\E\n\n
+ \QCOPY dump_test.test_table (col1, col2, col3, col4) FROM stdin;\E
+ \n(?:\d\t\\N\t\\N\t\\N\n){9}\\\.\n\n\n
+ \QALTER TABLE dump_test.test_table ENABLE TRIGGER ALL;\E/xm,
+ like => { data_only => 1, },
+ },
+
+ 'ALTER FOREIGN TABLE foreign_table ALTER COLUMN c1 OPTIONS' => {
+ regexp => qr/^
+ \QALTER FOREIGN TABLE dump_test.foreign_table ALTER COLUMN c1 OPTIONS (\E\n
+ \s+\Qcolumn_name 'col1'\E\n
+ \Q);\E\n
+ /xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'ALTER TABLE test_table OWNER TO' => {
+ regexp => qr/^\QALTER TABLE dump_test.test_table OWNER TO \E.+;/m,
+ like => {
+ %full_runs,
+ %dump_test_schema_runs,
+ only_dump_test_table => 1,
+ section_pre_data => 1,
+ },
+ unlike => {
+ exclude_dump_test_schema => 1,
+ exclude_test_table => 1,
+ no_owner => 1,
+ },
+ },
+
+ 'ALTER TABLE test_table ENABLE ROW LEVEL SECURITY' => {
+ create_order => 23,
+ create_sql => 'ALTER TABLE dump_test.test_table
+ ENABLE ROW LEVEL SECURITY;',
+ regexp =>
+ qr/^\QALTER TABLE dump_test.test_table ENABLE ROW LEVEL SECURITY;\E/m,
+ like => {
+ %full_runs,
+ %dump_test_schema_runs,
+ only_dump_test_table => 1,
+ section_post_data => 1,
+ },
+ unlike => {
+ exclude_dump_test_schema => 1,
+ exclude_test_table => 1,
+ },
+ },
+
+ 'ALTER TABLE test_second_table OWNER TO' => {
+ regexp =>
+ qr/^\QALTER TABLE dump_test.test_second_table OWNER TO \E.+;/m,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => {
+ exclude_dump_test_schema => 1,
+ no_owner => 1,
+ },
+ },
+
+ 'ALTER TABLE measurement OWNER TO' => {
+ regexp => qr/^\QALTER TABLE dump_test.measurement OWNER TO \E.+;/m,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => {
+ exclude_dump_test_schema => 1,
+ no_owner => 1,
+ },
+ },
+
+ 'ALTER TABLE measurement_y2006m2 OWNER TO' => {
+ regexp =>
+ qr/^\QALTER TABLE dump_test_second_schema.measurement_y2006m2 OWNER TO \E.+;/m,
+ like => {
+ %full_runs,
+ role => 1,
+ section_pre_data => 1,
+ },
+ unlike => { no_owner => 1, },
+ },
+
+ 'ALTER FOREIGN TABLE foreign_table OWNER TO' => {
+ regexp =>
+ qr/^\QALTER FOREIGN TABLE dump_test.foreign_table OWNER TO \E.+;/m,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => {
+ exclude_dump_test_schema => 1,
+ no_owner => 1,
+ },
+ },
+
+ 'ALTER TEXT SEARCH CONFIGURATION alt_ts_conf1 OWNER TO' => {
+ regexp =>
+ qr/^\QALTER TEXT SEARCH CONFIGURATION dump_test.alt_ts_conf1 OWNER TO \E.+;/m,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => {
+ exclude_dump_test_schema => 1,
+ no_owner => 1,
+ },
+ },
+
+ 'ALTER TEXT SEARCH DICTIONARY alt_ts_dict1 OWNER TO' => {
+ regexp =>
+ qr/^\QALTER TEXT SEARCH DICTIONARY dump_test.alt_ts_dict1 OWNER TO \E.+;/m,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => {
+ exclude_dump_test_schema => 1,
+ only_dump_test_table => 1,
+ no_owner => 1,
+ role => 1,
+ },
+ },
+
+ 'BLOB create (using lo_from_bytea)' => {
+ create_order => 50,
+ create_sql =>
+ 'SELECT pg_catalog.lo_from_bytea(0, \'\\x310a320a330a340a350a360a370a380a390a\');',
+ regexp => qr/^SELECT pg_catalog\.lo_create\('\d+'\);/m,
+ like => {
+ %full_runs,
+ column_inserts => 1,
+ data_only => 1,
+ inserts => 1,
+ section_pre_data => 1,
+ test_schema_plus_blobs => 1,
+ },
+ unlike => {
+ schema_only => 1,
+ no_blobs => 1,
+ },
+ },
+
+ 'BLOB load (using lo_from_bytea)' => {
+ regexp => qr/^
+ \QSELECT pg_catalog.lo_open\E \('\d+',\ \d+\);\n
+ \QSELECT pg_catalog.lowrite(0, \E
+ \Q'\x310a320a330a340a350a360a370a380a390a');\E\n
+ \QSELECT pg_catalog.lo_close(0);\E
+ /xm,
+ like => {
+ %full_runs,
+ column_inserts => 1,
+ data_only => 1,
+ inserts => 1,
+ section_data => 1,
+ test_schema_plus_blobs => 1,
+ },
+ unlike => {
+ binary_upgrade => 1,
+ no_blobs => 1,
+ schema_only => 1,
+ },
+ },
+
+ 'COMMENT ON DATABASE postgres' => {
+ regexp => qr/^COMMENT ON DATABASE postgres IS .+;/m,
+
+ # Should appear in the same tests as "CREATE DATABASE postgres"
+ like => { createdb => 1, },
+ },
+
+ 'COMMENT ON EXTENSION plpgsql' => {
+ regexp => qr/^COMMENT ON EXTENSION plpgsql IS .+;/m,
+
+ # this shouldn't ever get emitted anymore
+ like => {},
+ },
+
+ 'COMMENT ON SCHEMA public' => {
+ regexp => qr/^COMMENT ON SCHEMA public IS .+;/m,
+ # regress_public_owner emits this, due to create_sql of next test
+ like => {
+ pg_dumpall_dbprivs => 1,
+ pg_dumpall_exclude => 1,
+ },
+ },
+
+ 'COMMENT ON SCHEMA public IS NULL' => {
+ database => 'regress_public_owner',
+ create_order => 100,
+ create_sql => 'COMMENT ON SCHEMA public IS NULL;',
+ regexp => qr/^COMMENT ON SCHEMA public IS '';/m,
+ like => { defaults_public_owner => 1 },
+ },
+
+ 'COMMENT ON TABLE dump_test.test_table' => {
+ create_order => 36,
+ create_sql => 'COMMENT ON TABLE dump_test.test_table
+ IS \'comment on table\';',
+ regexp =>
+ qr/^\QCOMMENT ON TABLE dump_test.test_table IS 'comment on table';\E/m,
+ like => {
+ %full_runs,
+ %dump_test_schema_runs,
+ only_dump_test_table => 1,
+ section_pre_data => 1,
+ },
+ unlike => {
+ exclude_dump_test_schema => 1,
+ exclude_test_table => 1,
+ },
+ },
+
+ 'COMMENT ON COLUMN dump_test.test_table.col1' => {
+ create_order => 36,
+ create_sql => 'COMMENT ON COLUMN dump_test.test_table.col1
+ IS \'comment on column\';',
+ regexp => qr/^
+ \QCOMMENT ON COLUMN dump_test.test_table.col1 IS 'comment on column';\E
+ /xm,
+ like => {
+ %full_runs,
+ %dump_test_schema_runs,
+ only_dump_test_table => 1,
+ section_pre_data => 1,
+ },
+ unlike => {
+ exclude_dump_test_schema => 1,
+ exclude_test_table => 1,
+ },
+ },
+
+ 'COMMENT ON COLUMN dump_test.composite.f1' => {
+ create_order => 44,
+ create_sql => 'COMMENT ON COLUMN dump_test.composite.f1
+ IS \'comment on column of type\';',
+ regexp => qr/^
+ \QCOMMENT ON COLUMN dump_test.composite.f1 IS 'comment on column of type';\E
+ /xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'COMMENT ON COLUMN dump_test.test_second_table.col1' => {
+ create_order => 63,
+ create_sql => 'COMMENT ON COLUMN dump_test.test_second_table.col1
+ IS \'comment on column col1\';',
+ regexp => qr/^
+ \QCOMMENT ON COLUMN dump_test.test_second_table.col1 IS 'comment on column col1';\E
+ /xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'COMMENT ON COLUMN dump_test.test_second_table.col2' => {
+ create_order => 64,
+ create_sql => 'COMMENT ON COLUMN dump_test.test_second_table.col2
+ IS \'comment on column col2\';',
+ regexp => qr/^
+ \QCOMMENT ON COLUMN dump_test.test_second_table.col2 IS 'comment on column col2';\E
+ /xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'COMMENT ON CONVERSION dump_test.test_conversion' => {
+ create_order => 79,
+ create_sql => 'COMMENT ON CONVERSION dump_test.test_conversion
+ IS \'comment on test conversion\';',
+ regexp =>
+ qr/^\QCOMMENT ON CONVERSION dump_test.test_conversion IS 'comment on test conversion';\E/m,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'COMMENT ON COLLATION test0' => {
+ create_order => 77,
+ create_sql => 'COMMENT ON COLLATION test0
+ IS \'comment on test0 collation\';',
+ regexp =>
+ qr/^\QCOMMENT ON COLLATION public.test0 IS 'comment on test0 collation';\E/m,
+ collation => 1,
+ like => { %full_runs, section_pre_data => 1, },
+ },
+
+ 'COMMENT ON LARGE OBJECT ...' => {
+ create_order => 65,
+ create_sql => 'DO $$
+ DECLARE myoid oid;
+ BEGIN
+ SELECT loid FROM pg_largeobject INTO myoid;
+ EXECUTE \'COMMENT ON LARGE OBJECT \' || myoid || \' IS \'\'comment on large object\'\';\';
+ END;
+ $$;',
+ regexp => qr/^
+ \QCOMMENT ON LARGE OBJECT \E[0-9]+\Q IS 'comment on large object';\E
+ /xm,
+ like => {
+ %full_runs,
+ column_inserts => 1,
+ data_only => 1,
+ inserts => 1,
+ section_pre_data => 1,
+ test_schema_plus_blobs => 1,
+ },
+ unlike => {
+ no_blobs => 1,
+ schema_only => 1,
+ },
+ },
+
+ 'COMMENT ON PUBLICATION pub1' => {
+ create_order => 55,
+ create_sql => 'COMMENT ON PUBLICATION pub1
+ IS \'comment on publication\';',
+ regexp =>
+ qr/^COMMENT ON PUBLICATION pub1 IS 'comment on publication';/m,
+ like => { %full_runs, section_post_data => 1, },
+ },
+
+ 'COMMENT ON SUBSCRIPTION sub1' => {
+ create_order => 55,
+ create_sql => 'COMMENT ON SUBSCRIPTION sub1
+ IS \'comment on subscription\';',
+ regexp =>
+ qr/^COMMENT ON SUBSCRIPTION sub1 IS 'comment on subscription';/m,
+ like => { %full_runs, section_post_data => 1, },
+ },
+
+ 'COMMENT ON TEXT SEARCH CONFIGURATION dump_test.alt_ts_conf1' => {
+ create_order => 84,
+ create_sql =>
+ 'COMMENT ON TEXT SEARCH CONFIGURATION dump_test.alt_ts_conf1
+ IS \'comment on text search configuration\';',
+ regexp =>
+ qr/^\QCOMMENT ON TEXT SEARCH CONFIGURATION dump_test.alt_ts_conf1 IS 'comment on text search configuration';\E/m,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'COMMENT ON TEXT SEARCH DICTIONARY dump_test.alt_ts_dict1' => {
+ create_order => 84,
+ create_sql =>
+ 'COMMENT ON TEXT SEARCH DICTIONARY dump_test.alt_ts_dict1
+ IS \'comment on text search dictionary\';',
+ regexp =>
+ qr/^\QCOMMENT ON TEXT SEARCH DICTIONARY dump_test.alt_ts_dict1 IS 'comment on text search dictionary';\E/m,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'COMMENT ON TEXT SEARCH PARSER dump_test.alt_ts_prs1' => {
+ create_order => 84,
+ create_sql => 'COMMENT ON TEXT SEARCH PARSER dump_test.alt_ts_prs1
+ IS \'comment on text search parser\';',
+ regexp =>
+ qr/^\QCOMMENT ON TEXT SEARCH PARSER dump_test.alt_ts_prs1 IS 'comment on text search parser';\E/m,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'COMMENT ON TEXT SEARCH TEMPLATE dump_test.alt_ts_temp1' => {
+ create_order => 84,
+ create_sql => 'COMMENT ON TEXT SEARCH TEMPLATE dump_test.alt_ts_temp1
+ IS \'comment on text search template\';',
+ regexp =>
+ qr/^\QCOMMENT ON TEXT SEARCH TEMPLATE dump_test.alt_ts_temp1 IS 'comment on text search template';\E/m,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'COMMENT ON TYPE dump_test.planets - ENUM' => {
+ create_order => 68,
+ create_sql => 'COMMENT ON TYPE dump_test.planets
+ IS \'comment on enum type\';',
+ regexp =>
+ qr/^\QCOMMENT ON TYPE dump_test.planets IS 'comment on enum type';\E/m,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'COMMENT ON TYPE dump_test.textrange - RANGE' => {
+ create_order => 69,
+ create_sql => 'COMMENT ON TYPE dump_test.textrange
+ IS \'comment on range type\';',
+ regexp =>
+ qr/^\QCOMMENT ON TYPE dump_test.textrange IS 'comment on range type';\E/m,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'COMMENT ON TYPE dump_test.int42 - Regular' => {
+ create_order => 70,
+ create_sql => 'COMMENT ON TYPE dump_test.int42
+ IS \'comment on regular type\';',
+ regexp =>
+ qr/^\QCOMMENT ON TYPE dump_test.int42 IS 'comment on regular type';\E/m,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'COMMENT ON TYPE dump_test.undefined - Undefined' => {
+ create_order => 71,
+ create_sql => 'COMMENT ON TYPE dump_test.undefined
+ IS \'comment on undefined type\';',
+ regexp =>
+ qr/^\QCOMMENT ON TYPE dump_test.undefined IS 'comment on undefined type';\E/m,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'COPY test_table' => {
+ create_order => 4,
+ create_sql => 'INSERT INTO dump_test.test_table (col1) '
+ . 'SELECT generate_series FROM generate_series(1,9);',
+ regexp => qr/^
+ \QCOPY dump_test.test_table (col1, col2, col3, col4) FROM stdin;\E
+ \n(?:\d\t\\N\t\\N\t\\N\n){9}\\\.\n
+ /xm,
+ like => {
+ %full_runs,
+ %dump_test_schema_runs,
+ data_only => 1,
+ only_dump_test_table => 1,
+ section_data => 1,
+ },
+ unlike => {
+ binary_upgrade => 1,
+ exclude_dump_test_schema => 1,
+ exclude_test_table => 1,
+ exclude_test_table_data => 1,
+ schema_only => 1,
+ },
+ },
+
+ 'COPY fk_reference_test_table' => {
+ create_order => 22,
+ create_sql => 'INSERT INTO dump_test.fk_reference_test_table (col1) '
+ . 'SELECT generate_series FROM generate_series(1,5);',
+ regexp => qr/^
+ \QCOPY dump_test.fk_reference_test_table (col1) FROM stdin;\E
+ \n(?:\d\n){5}\\\.\n
+ /xm,
+ like => {
+ %full_runs,
+ %dump_test_schema_runs,
+ data_only => 1,
+ exclude_test_table => 1,
+ exclude_test_table_data => 1,
+ section_data => 1,
+ },
+ unlike => {
+ binary_upgrade => 1,
+ exclude_dump_test_schema => 1,
+ schema_only => 1,
+ },
+ },
+
+ # In a data-only dump, we try to actually order according to FKs,
+ # so this check is just making sure that the referring table comes after
+ # the referred-to table.
+ 'COPY fk_reference_test_table second' => {
+ regexp => qr/^
+ \QCOPY dump_test.test_table (col1, col2, col3, col4) FROM stdin;\E
+ \n(?:\d\t\\N\t\\N\t\\N\n){9}\\\.\n.*
+ \QCOPY dump_test.fk_reference_test_table (col1) FROM stdin;\E
+ \n(?:\d\n){5}\\\.\n
+ /xms,
+ like => { data_only => 1, },
+ },
+
+ 'COPY test_second_table' => {
+ create_order => 7,
+ create_sql => 'INSERT INTO dump_test.test_second_table (col1, col2) '
+ . 'SELECT generate_series, generate_series::text '
+ . 'FROM generate_series(1,9);',
+ regexp => qr/^
+ \QCOPY dump_test.test_second_table (col1, col2) FROM stdin;\E
+ \n(?:\d\t\d\n){9}\\\.\n
+ /xm,
+ like => {
+ %full_runs,
+ %dump_test_schema_runs,
+ data_only => 1,
+ section_data => 1,
+ },
+ unlike => {
+ binary_upgrade => 1,
+ exclude_dump_test_schema => 1,
+ schema_only => 1,
+ },
+ },
+
+ 'COPY test_third_table' => {
+ create_order => 7,
+ create_sql =>
+ 'INSERT INTO dump_test.test_third_table VALUES (123, DEFAULT, 456);',
+ regexp => qr/^
+ \QCOPY dump_test.test_third_table (f1, "F3") FROM stdin;\E
+ \n123\t456\n\\\.\n
+ /xm,
+ like => {
+ %full_runs,
+ %dump_test_schema_runs,
+ data_only => 1,
+ section_data => 1,
+ },
+ unlike => {
+ binary_upgrade => 1,
+ exclude_dump_test_schema => 1,
+ schema_only => 1,
+ },
+ },
+
+ 'COPY test_fourth_table' => {
+ create_order => 7,
+ create_sql =>
+ 'INSERT INTO dump_test.test_fourth_table DEFAULT VALUES;'
+ . 'INSERT INTO dump_test.test_fourth_table DEFAULT VALUES;',
+ regexp => qr/^
+ \QCOPY dump_test.test_fourth_table FROM stdin;\E
+ \n\n\n\\\.\n
+ /xm,
+ like => {
+ %full_runs,
+ %dump_test_schema_runs,
+ data_only => 1,
+ section_data => 1,
+ },
+ unlike => {
+ binary_upgrade => 1,
+ exclude_dump_test_schema => 1,
+ schema_only => 1,
+ },
+ },
+
+ 'COPY test_fifth_table' => {
+ create_order => 54,
+ create_sql =>
+ 'INSERT INTO dump_test.test_fifth_table VALUES (NULL, true, false, \'11001\'::bit(5), \'NaN\');',
+ regexp => qr/^
+ \QCOPY dump_test.test_fifth_table (col1, col2, col3, col4, col5) FROM stdin;\E
+ \n\\N\tt\tf\t11001\tNaN\n\\\.\n
+ /xm,
+ like => {
+ %full_runs,
+ %dump_test_schema_runs,
+ data_only => 1,
+ section_data => 1,
+ },
+ unlike => {
+ binary_upgrade => 1,
+ exclude_dump_test_schema => 1,
+ schema_only => 1,
+ },
+ },
+
+ 'COPY test_table_identity' => {
+ create_order => 54,
+ create_sql =>
+ 'INSERT INTO dump_test.test_table_identity (col2) VALUES (\'test\');',
+ regexp => qr/^
+ \QCOPY dump_test.test_table_identity (col1, col2) FROM stdin;\E
+ \n1\ttest\n\\\.\n
+ /xm,
+ like => {
+ %full_runs,
+ %dump_test_schema_runs,
+ data_only => 1,
+ section_data => 1,
+ },
+ unlike => {
+ binary_upgrade => 1,
+ exclude_dump_test_schema => 1,
+ schema_only => 1,
+ },
+ },
+
+ 'INSERT INTO test_table' => {
+ regexp => qr/^
+ (?:INSERT\ INTO\ dump_test\.test_table\ \(col1,\ col2,\ col3,\ col4\)\ VALUES\ \(\d,\ NULL,\ NULL,\ NULL\);\n){9}
+ /xm,
+ like => { column_inserts => 1, },
+ },
+
+ 'test_table with 4-row INSERTs' => {
+ regexp => qr/^
+ (?:
+ INSERT\ INTO\ dump_test\.test_table\ VALUES\n
+ (?:\t\(\d,\ NULL,\ NULL,\ NULL\),\n){3}
+ \t\(\d,\ NULL,\ NULL,\ NULL\);\n
+ ){2}
+ INSERT\ INTO\ dump_test\.test_table\ VALUES\n
+ \t\(\d,\ NULL,\ NULL,\ NULL\);
+ /xm,
+ like => { rows_per_insert => 1, },
+ },
+
+ 'INSERT INTO test_second_table' => {
+ regexp => qr/^
+ (?:INSERT\ INTO\ dump_test\.test_second_table\ \(col1,\ col2\)
+ \ VALUES\ \(\d,\ '\d'\);\n){9}/xm,
+ like => { column_inserts => 1, },
+ },
+
+ 'INSERT INTO test_third_table (colnames)' => {
+ regexp =>
+ qr/^INSERT INTO dump_test\.test_third_table \(f1, "F3"\) VALUES \(123, 456\);\n/m,
+ like => { column_inserts => 1, },
+ },
+
+ 'INSERT INTO test_third_table' => {
+ regexp =>
+ qr/^INSERT INTO dump_test\.test_third_table VALUES \(123, DEFAULT, 456, DEFAULT\);\n/m,
+ like => { inserts => 1, },
+ },
+
+ 'INSERT INTO test_fourth_table' => {
+ regexp =>
+ qr/^(?:INSERT INTO dump_test\.test_fourth_table DEFAULT VALUES;\n){2}/m,
+ like => { column_inserts => 1, inserts => 1, rows_per_insert => 1, },
+ },
+
+ 'INSERT INTO test_fifth_table' => {
+ regexp =>
+ qr/^\QINSERT INTO dump_test.test_fifth_table (col1, col2, col3, col4, col5) VALUES (NULL, true, false, B'11001', 'NaN');\E/m,
+ like => { column_inserts => 1, },
+ },
+
+ 'INSERT INTO test_table_identity' => {
+ regexp =>
+ qr/^\QINSERT INTO dump_test.test_table_identity (col1, col2) OVERRIDING SYSTEM VALUE VALUES (1, 'test');\E/m,
+ like => { column_inserts => 1, },
+ },
+
+ 'CREATE ROLE regress_dump_test_role' => {
+ create_order => 1,
+ create_sql => 'CREATE ROLE regress_dump_test_role;',
+ regexp => qr/^CREATE ROLE regress_dump_test_role;/m,
+ like => {
+ pg_dumpall_dbprivs => 1,
+ pg_dumpall_exclude => 1,
+ pg_dumpall_globals => 1,
+ pg_dumpall_globals_clean => 1,
+ },
+ },
+
+ 'CREATE ROLE regress_quoted...' => {
+ create_order => 1,
+ create_sql => 'CREATE ROLE "regress_quoted \"" role";',
+ regexp => qr/^CREATE ROLE "regress_quoted \\"" role";/m,
+ like => {
+ pg_dumpall_dbprivs => 1,
+ pg_dumpall_exclude => 1,
+ pg_dumpall_globals => 1,
+ pg_dumpall_globals_clean => 1,
+ },
+ },
+
+ 'CREATE DATABASE regression_invalid...' => {
+ create_order => 1,
+ create_sql => q(
+ CREATE DATABASE regression_invalid;
+ UPDATE pg_database SET datconnlimit = -2 WHERE datname = 'regression_invalid'),
+ regexp => qr/^CREATE DATABASE regression_invalid/m,
+
+ # invalid databases should never be dumped
+ like => {},
+ },
+
+ 'CREATE ACCESS METHOD gist2' => {
+ create_order => 52,
+ create_sql =>
+ 'CREATE ACCESS METHOD gist2 TYPE INDEX HANDLER gisthandler;',
+ regexp =>
+ qr/CREATE ACCESS METHOD gist2 TYPE INDEX HANDLER gisthandler;/m,
+ like => { %full_runs, section_pre_data => 1, },
+ },
+
+ 'CREATE COLLATION test0 FROM "C"' => {
+ create_order => 76,
+ create_sql => 'CREATE COLLATION test0 FROM "C";',
+ regexp =>
+ qr/CREATE COLLATION public.test0 \(provider = libc, locale = 'C'(, version = '[^']*')?\);/m,
+ collation => 1,
+ like => { %full_runs, section_pre_data => 1, },
+ },
+
+ 'CREATE COLLATION icu_collation' => {
+ create_order => 76,
+ create_sql => "CREATE COLLATION icu_collation (PROVIDER = icu, LOCALE = 'C');",
+ regexp =>
+ qr/CREATE COLLATION public.icu_collation \(provider = icu, locale = 'C'(, version = '[^']*')?\);/m,
+ icu => 1,
+ like => { %full_runs, section_pre_data => 1, },
+ },
+
+ 'CREATE CAST FOR timestamptz' => {
+ create_order => 51,
+ create_sql =>
+ 'CREATE CAST (timestamptz AS interval) WITH FUNCTION age(timestamptz) AS ASSIGNMENT;',
+ regexp =>
+ qr/CREATE CAST \(timestamp with time zone AS interval\) WITH FUNCTION pg_catalog\.age\(timestamp with time zone\) AS ASSIGNMENT;/m,
+ like => { %full_runs, section_pre_data => 1, },
+ },
+
+ 'CREATE DATABASE postgres' => {
+ regexp => qr/^
+ \QCREATE DATABASE postgres WITH TEMPLATE = template0 \E
+ .+;/xm,
+ like => { createdb => 1, },
+ },
+
+ 'CREATE DATABASE dump_test' => {
+ create_order => 47,
+ create_sql => 'CREATE DATABASE dump_test;',
+ regexp => qr/^
+ \QCREATE DATABASE dump_test WITH TEMPLATE = template0 \E
+ .+;/xm,
+ like => { pg_dumpall_dbprivs => 1, },
+ },
+
+ "CREATE DATABASE dump_test2 LOCALE = 'C'" => {
+ create_order => 47,
+ create_sql =>
+ "CREATE DATABASE dump_test2 LOCALE = 'C' TEMPLATE = template0;",
+ regexp => qr/^
+ \QCREATE DATABASE dump_test2 \E.*\QLOCALE = 'C';\E
+ /xm,
+ like => { pg_dumpall_dbprivs => 1, },
+ },
+
+ 'CREATE EXTENSION ... plpgsql' => {
+ regexp => qr/^
+ \QCREATE EXTENSION IF NOT EXISTS plpgsql WITH SCHEMA pg_catalog;\E
+ /xm,
+
+ # this shouldn't ever get emitted anymore
+ like => {},
+ },
+
+ 'CREATE AGGREGATE dump_test.newavg' => {
+ create_order => 25,
+ create_sql => 'CREATE AGGREGATE dump_test.newavg (
+ sfunc = int4_avg_accum,
+ basetype = int4,
+ stype = _int8,
+ finalfunc = int8_avg,
+ finalfunc_modify = shareable,
+ initcond1 = \'{0,0}\'
+ );',
+ regexp => qr/^
+ \QCREATE AGGREGATE dump_test.newavg(integer) (\E
+ \n\s+\QSFUNC = int4_avg_accum,\E
+ \n\s+\QSTYPE = bigint[],\E
+ \n\s+\QINITCOND = '{0,0}',\E
+ \n\s+\QFINALFUNC = int8_avg,\E
+ \n\s+\QFINALFUNC_MODIFY = SHAREABLE\E
+ \n\);/xm,
+ like => {
+ %full_runs,
+ %dump_test_schema_runs,
+ exclude_test_table => 1,
+ section_pre_data => 1,
+ },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'CREATE CONVERSION dump_test.test_conversion' => {
+ create_order => 78,
+ create_sql =>
+ 'CREATE DEFAULT CONVERSION dump_test.test_conversion FOR \'LATIN1\' TO \'UTF8\' FROM iso8859_1_to_utf8;',
+ regexp =>
+ qr/^\QCREATE DEFAULT CONVERSION dump_test.test_conversion FOR 'LATIN1' TO 'UTF8' FROM iso8859_1_to_utf8;\E/xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'CREATE DOMAIN dump_test.us_postal_code' => {
+ create_order => 29,
+ create_sql => 'CREATE DOMAIN dump_test.us_postal_code AS TEXT
+ COLLATE "C"
+ DEFAULT \'10014\'
+ CHECK(VALUE ~ \'^\d{5}$\' OR
+ VALUE ~ \'^\d{5}-\d{4}$\');
+ COMMENT ON CONSTRAINT us_postal_code_check
+ ON DOMAIN dump_test.us_postal_code IS \'check it\';',
+ regexp => qr/^
+ \QCREATE DOMAIN dump_test.us_postal_code AS text COLLATE pg_catalog."C" DEFAULT '10014'::text\E\n\s+
+ \QCONSTRAINT us_postal_code_check CHECK \E
+ \Q(((VALUE ~ '^\d{5}\E
+ \$\Q'::text) OR (VALUE ~ '^\d{5}-\d{4}\E\$
+ \Q'::text)));\E(.|\n)*
+ \QCOMMENT ON CONSTRAINT us_postal_code_check ON DOMAIN dump_test.us_postal_code IS 'check it';\E
+ /xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'CREATE FUNCTION dump_test.pltestlang_call_handler' => {
+ create_order => 17,
+ create_sql => 'CREATE FUNCTION dump_test.pltestlang_call_handler()
+ RETURNS LANGUAGE_HANDLER AS \'$libdir/plpgsql\',
+ \'plpgsql_call_handler\' LANGUAGE C;',
+ regexp => qr/^
+ \QCREATE FUNCTION dump_test.pltestlang_call_handler() \E
+ \QRETURNS language_handler\E
+ \n\s+\QLANGUAGE c\E
+ \n\s+AS\ \'\$
+ \Qlibdir\/plpgsql', 'plpgsql_call_handler';\E
+ /xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'CREATE FUNCTION dump_test.trigger_func' => {
+ create_order => 30,
+ create_sql => 'CREATE FUNCTION dump_test.trigger_func()
+ RETURNS trigger LANGUAGE plpgsql
+ AS $$ BEGIN RETURN NULL; END;$$;',
+ regexp => qr/^
+ \QCREATE FUNCTION dump_test.trigger_func() RETURNS trigger\E
+ \n\s+\QLANGUAGE plpgsql\E
+ \n\s+AS\ \$\$
+ \Q BEGIN RETURN NULL; END;\E
+ \$\$;/xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'CREATE FUNCTION dump_test.event_trigger_func' => {
+ create_order => 32,
+ create_sql => 'CREATE FUNCTION dump_test.event_trigger_func()
+ RETURNS event_trigger LANGUAGE plpgsql
+ AS $$ BEGIN RETURN; END;$$;',
+ regexp => qr/^
+ \QCREATE FUNCTION dump_test.event_trigger_func() RETURNS event_trigger\E
+ \n\s+\QLANGUAGE plpgsql\E
+ \n\s+AS\ \$\$
+ \Q BEGIN RETURN; END;\E
+ \$\$;/xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'CREATE OPERATOR FAMILY dump_test.op_family' => {
+ create_order => 73,
+ create_sql =>
+ 'CREATE OPERATOR FAMILY dump_test.op_family USING btree;',
+ regexp => qr/^
+ \QCREATE OPERATOR FAMILY dump_test.op_family USING btree;\E
+ /xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'CREATE OPERATOR CLASS dump_test.op_class' => {
+ create_order => 74,
+ create_sql => 'CREATE OPERATOR CLASS dump_test.op_class
+ FOR TYPE bigint USING btree FAMILY dump_test.op_family
+ AS STORAGE bigint,
+ OPERATOR 1 <(bigint,bigint),
+ OPERATOR 2 <=(bigint,bigint),
+ OPERATOR 3 =(bigint,bigint),
+ OPERATOR 4 >=(bigint,bigint),
+ OPERATOR 5 >(bigint,bigint),
+ FUNCTION 1 btint8cmp(bigint,bigint),
+ FUNCTION 2 btint8sortsupport(internal),
+ FUNCTION 4 btequalimage(oid);',
+ # note: it's correct that btint8sortsupport and btequalimage
+ # are NOT included here (they're optional support functions):
+ regexp => qr/^
+ \QCREATE OPERATOR CLASS dump_test.op_class\E\n\s+
+ \QFOR TYPE bigint USING btree FAMILY dump_test.op_family AS\E\n\s+
+ \QOPERATOR 1 <(bigint,bigint) ,\E\n\s+
+ \QOPERATOR 2 <=(bigint,bigint) ,\E\n\s+
+ \QOPERATOR 3 =(bigint,bigint) ,\E\n\s+
+ \QOPERATOR 4 >=(bigint,bigint) ,\E\n\s+
+ \QOPERATOR 5 >(bigint,bigint) ,\E\n\s+
+ \QFUNCTION 1 (bigint, bigint) btint8cmp(bigint,bigint);\E
+ /xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ # verify that a custom operator/opclass/range type is dumped in right order
+ 'CREATE OPERATOR CLASS dump_test.op_class_custom' => {
+ create_order => 74,
+ create_sql => 'CREATE OPERATOR dump_test.~~ (
+ PROCEDURE = int4eq,
+ LEFTARG = int,
+ RIGHTARG = int);
+ CREATE OPERATOR CLASS dump_test.op_class_custom
+ FOR TYPE int USING btree AS
+ OPERATOR 3 dump_test.~~;
+ CREATE TYPE dump_test.range_type_custom AS RANGE (
+ subtype = int,
+ subtype_opclass = dump_test.op_class_custom);',
+ regexp => qr/^
+ \QCREATE OPERATOR dump_test.~~ (\E\n.+
+ \QCREATE OPERATOR FAMILY dump_test.op_class_custom USING btree;\E\n.+
+ \QCREATE OPERATOR CLASS dump_test.op_class_custom\E\n\s+
+ \QFOR TYPE integer USING btree FAMILY dump_test.op_class_custom AS\E\n\s+
+ \QOPERATOR 3 dump_test.~~(integer,integer);\E\n.+
+ \QCREATE TYPE dump_test.range_type_custom AS RANGE (\E\n\s+
+ \Qsubtype = integer,\E\n\s+
+ \Qmultirange_type_name = dump_test.multirange_type_custom,\E\n\s+
+ \Qsubtype_opclass = dump_test.op_class_custom\E\n
+ \Q);\E
+ /xms,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'CREATE OPERATOR CLASS dump_test.op_class_empty' => {
+ create_order => 89,
+ create_sql => 'CREATE OPERATOR CLASS dump_test.op_class_empty
+ FOR TYPE bigint USING btree FAMILY dump_test.op_family
+ AS STORAGE bigint;',
+ regexp => qr/^
+ \QCREATE OPERATOR CLASS dump_test.op_class_empty\E\n\s+
+ \QFOR TYPE bigint USING btree FAMILY dump_test.op_family AS\E\n\s+
+ \QSTORAGE bigint;\E
+ /xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'CREATE EVENT TRIGGER test_event_trigger' => {
+ create_order => 33,
+ create_sql => 'CREATE EVENT TRIGGER test_event_trigger
+ ON ddl_command_start
+ EXECUTE FUNCTION dump_test.event_trigger_func();',
+ regexp => qr/^
+ \QCREATE EVENT TRIGGER test_event_trigger \E
+ \QON ddl_command_start\E
+ \n\s+\QEXECUTE FUNCTION dump_test.event_trigger_func();\E
+ /xm,
+ like => { %full_runs, section_post_data => 1, },
+ },
+
+ 'CREATE TRIGGER test_trigger' => {
+ create_order => 31,
+ create_sql => 'CREATE TRIGGER test_trigger
+ BEFORE INSERT ON dump_test.test_table
+ FOR EACH ROW WHEN (NEW.col1 > 10)
+ EXECUTE FUNCTION dump_test.trigger_func();',
+ regexp => qr/^
+ \QCREATE TRIGGER test_trigger BEFORE INSERT ON dump_test.test_table \E
+ \QFOR EACH ROW WHEN ((new.col1 > 10)) \E
+ \QEXECUTE FUNCTION dump_test.trigger_func();\E
+ /xm,
+ like => {
+ %full_runs,
+ %dump_test_schema_runs,
+ only_dump_test_table => 1,
+ section_post_data => 1,
+ },
+ unlike => {
+ exclude_test_table => 1,
+ exclude_dump_test_schema => 1,
+ },
+ },
+
+ 'CREATE TYPE dump_test.planets AS ENUM' => {
+ create_order => 37,
+ create_sql => 'CREATE TYPE dump_test.planets
+ AS ENUM ( \'venus\', \'earth\', \'mars\' );',
+ regexp => qr/^
+ \QCREATE TYPE dump_test.planets AS ENUM (\E
+ \n\s+'venus',
+ \n\s+'earth',
+ \n\s+'mars'
+ \n\);/xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => {
+ binary_upgrade => 1,
+ exclude_dump_test_schema => 1,
+ },
+ },
+
+ 'CREATE TYPE dump_test.planets AS ENUM pg_upgrade' => {
+ regexp => qr/^
+ \QCREATE TYPE dump_test.planets AS ENUM (\E
+ \n\);.*^
+ \QALTER TYPE dump_test.planets ADD VALUE 'venus';\E
+ \n.*^
+ \QALTER TYPE dump_test.planets ADD VALUE 'earth';\E
+ \n.*^
+ \QALTER TYPE dump_test.planets ADD VALUE 'mars';\E
+ \n/xms,
+ like => { binary_upgrade => 1, },
+ },
+
+ 'CREATE TYPE dump_test.textrange AS RANGE' => {
+ create_order => 38,
+ create_sql => 'CREATE TYPE dump_test.textrange
+ AS RANGE (subtype=text, collation="C");',
+ regexp => qr/^
+ \QCREATE TYPE dump_test.textrange AS RANGE (\E
+ \n\s+\Qsubtype = text,\E
+ \n\s+\Qmultirange_type_name = dump_test.textmultirange,\E
+ \n\s+\Qcollation = pg_catalog."C"\E
+ \n\);/xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'CREATE TYPE dump_test.int42' => {
+ create_order => 39,
+ create_sql => 'CREATE TYPE dump_test.int42;',
+ regexp => qr/^\QCREATE TYPE dump_test.int42;\E/m,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'CREATE TEXT SEARCH CONFIGURATION dump_test.alt_ts_conf1' => {
+ create_order => 80,
+ create_sql =>
+ 'CREATE TEXT SEARCH CONFIGURATION dump_test.alt_ts_conf1 (copy=english);',
+ regexp => qr/^
+ \QCREATE TEXT SEARCH CONFIGURATION dump_test.alt_ts_conf1 (\E\n
+ \s+\QPARSER = pg_catalog."default" );\E/xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'ALTER TEXT SEARCH CONFIGURATION dump_test.alt_ts_conf1 ...' => {
+ regexp => qr/^
+ \QALTER TEXT SEARCH CONFIGURATION dump_test.alt_ts_conf1\E\n
+ \s+\QADD MAPPING FOR asciiword WITH english_stem;\E\n
+ \n
+ \QALTER TEXT SEARCH CONFIGURATION dump_test.alt_ts_conf1\E\n
+ \s+\QADD MAPPING FOR word WITH english_stem;\E\n
+ \n
+ \QALTER TEXT SEARCH CONFIGURATION dump_test.alt_ts_conf1\E\n
+ \s+\QADD MAPPING FOR numword WITH simple;\E\n
+ \n
+ \QALTER TEXT SEARCH CONFIGURATION dump_test.alt_ts_conf1\E\n
+ \s+\QADD MAPPING FOR email WITH simple;\E\n
+ \n
+ \QALTER TEXT SEARCH CONFIGURATION dump_test.alt_ts_conf1\E\n
+ \s+\QADD MAPPING FOR url WITH simple;\E\n
+ \n
+ \QALTER TEXT SEARCH CONFIGURATION dump_test.alt_ts_conf1\E\n
+ \s+\QADD MAPPING FOR host WITH simple;\E\n
+ \n
+ \QALTER TEXT SEARCH CONFIGURATION dump_test.alt_ts_conf1\E\n
+ \s+\QADD MAPPING FOR sfloat WITH simple;\E\n
+ \n
+ \QALTER TEXT SEARCH CONFIGURATION dump_test.alt_ts_conf1\E\n
+ \s+\QADD MAPPING FOR version WITH simple;\E\n
+ \n
+ \QALTER TEXT SEARCH CONFIGURATION dump_test.alt_ts_conf1\E\n
+ \s+\QADD MAPPING FOR hword_numpart WITH simple;\E\n
+ \n
+ \QALTER TEXT SEARCH CONFIGURATION dump_test.alt_ts_conf1\E\n
+ \s+\QADD MAPPING FOR hword_part WITH english_stem;\E\n
+ \n
+ \QALTER TEXT SEARCH CONFIGURATION dump_test.alt_ts_conf1\E\n
+ \s+\QADD MAPPING FOR hword_asciipart WITH english_stem;\E\n
+ \n
+ \QALTER TEXT SEARCH CONFIGURATION dump_test.alt_ts_conf1\E\n
+ \s+\QADD MAPPING FOR numhword WITH simple;\E\n
+ \n
+ \QALTER TEXT SEARCH CONFIGURATION dump_test.alt_ts_conf1\E\n
+ \s+\QADD MAPPING FOR asciihword WITH english_stem;\E\n
+ \n
+ \QALTER TEXT SEARCH CONFIGURATION dump_test.alt_ts_conf1\E\n
+ \s+\QADD MAPPING FOR hword WITH english_stem;\E\n
+ \n
+ \QALTER TEXT SEARCH CONFIGURATION dump_test.alt_ts_conf1\E\n
+ \s+\QADD MAPPING FOR url_path WITH simple;\E\n
+ \n
+ \QALTER TEXT SEARCH CONFIGURATION dump_test.alt_ts_conf1\E\n
+ \s+\QADD MAPPING FOR file WITH simple;\E\n
+ \n
+ \QALTER TEXT SEARCH CONFIGURATION dump_test.alt_ts_conf1\E\n
+ \s+\QADD MAPPING FOR "float" WITH simple;\E\n
+ \n
+ \QALTER TEXT SEARCH CONFIGURATION dump_test.alt_ts_conf1\E\n
+ \s+\QADD MAPPING FOR "int" WITH simple;\E\n
+ \n
+ \QALTER TEXT SEARCH CONFIGURATION dump_test.alt_ts_conf1\E\n
+ \s+\QADD MAPPING FOR uint WITH simple;\E\n
+ \n
+ /xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'CREATE TEXT SEARCH TEMPLATE dump_test.alt_ts_temp1' => {
+ create_order => 81,
+ create_sql =>
+ 'CREATE TEXT SEARCH TEMPLATE dump_test.alt_ts_temp1 (lexize=dsimple_lexize);',
+ regexp => qr/^
+ \QCREATE TEXT SEARCH TEMPLATE dump_test.alt_ts_temp1 (\E\n
+ \s+\QLEXIZE = dsimple_lexize );\E/xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'CREATE TEXT SEARCH PARSER dump_test.alt_ts_prs1' => {
+ create_order => 82,
+ create_sql => 'CREATE TEXT SEARCH PARSER dump_test.alt_ts_prs1
+ (start = prsd_start, gettoken = prsd_nexttoken, end = prsd_end, lextypes = prsd_lextype);',
+ regexp => qr/^
+ \QCREATE TEXT SEARCH PARSER dump_test.alt_ts_prs1 (\E\n
+ \s+\QSTART = prsd_start,\E\n
+ \s+\QGETTOKEN = prsd_nexttoken,\E\n
+ \s+\QEND = prsd_end,\E\n
+ \s+\QLEXTYPES = prsd_lextype );\E\n
+ /xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'CREATE TEXT SEARCH DICTIONARY dump_test.alt_ts_dict1' => {
+ create_order => 83,
+ create_sql =>
+ 'CREATE TEXT SEARCH DICTIONARY dump_test.alt_ts_dict1 (template=simple);',
+ regexp => qr/^
+ \QCREATE TEXT SEARCH DICTIONARY dump_test.alt_ts_dict1 (\E\n
+ \s+\QTEMPLATE = pg_catalog.simple );\E\n
+ /xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'CREATE FUNCTION dump_test.int42_in' => {
+ create_order => 40,
+ create_sql => 'CREATE FUNCTION dump_test.int42_in(cstring)
+ RETURNS dump_test.int42 AS \'int4in\'
+ LANGUAGE internal STRICT IMMUTABLE;',
+ regexp => qr/^
+ \QCREATE FUNCTION dump_test.int42_in(cstring) RETURNS dump_test.int42\E
+ \n\s+\QLANGUAGE internal IMMUTABLE STRICT\E
+ \n\s+AS\ \$\$int4in\$\$;
+ /xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'CREATE FUNCTION dump_test.int42_out' => {
+ create_order => 41,
+ create_sql => 'CREATE FUNCTION dump_test.int42_out(dump_test.int42)
+ RETURNS cstring AS \'int4out\'
+ LANGUAGE internal STRICT IMMUTABLE;',
+ regexp => qr/^
+ \QCREATE FUNCTION dump_test.int42_out(dump_test.int42) RETURNS cstring\E
+ \n\s+\QLANGUAGE internal IMMUTABLE STRICT\E
+ \n\s+AS\ \$\$int4out\$\$;
+ /xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'CREATE FUNCTION ... SUPPORT' => {
+ create_order => 41,
+ create_sql =>
+ 'CREATE FUNCTION dump_test.func_with_support() RETURNS int LANGUAGE sql AS $$ SELECT 1 $$ SUPPORT varchar_support;',
+ regexp => qr/^
+ \QCREATE FUNCTION dump_test.func_with_support() RETURNS integer\E
+ \n\s+\QLANGUAGE sql SUPPORT varchar_support\E
+ \n\s+AS\ \$\$\Q SELECT 1 \E\$\$;
+ /xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'Check ordering of a function that depends on a primary key' => {
+ create_order => 41,
+ create_sql => '
+ CREATE TABLE dump_test.ordering_table (id int primary key, data int);
+ CREATE FUNCTION dump_test.ordering_func ()
+ RETURNS SETOF dump_test.ordering_table
+ LANGUAGE sql BEGIN ATOMIC
+ SELECT * FROM dump_test.ordering_table GROUP BY id; END;',
+ regexp => qr/^
+ \QALTER TABLE ONLY dump_test.ordering_table\E
+ \n\s+\QADD CONSTRAINT ordering_table_pkey PRIMARY KEY (id);\E
+ .*^
+ \QCREATE FUNCTION dump_test.ordering_func\E/xms,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_post_data => 1, },
+ unlike => {
+ exclude_dump_test_schema => 1,
+ only_dump_measurement => 1,
+ },
+ },
+
+ 'CREATE PROCEDURE dump_test.ptest1' => {
+ create_order => 41,
+ create_sql => 'CREATE PROCEDURE dump_test.ptest1(a int)
+ LANGUAGE SQL AS $$ INSERT INTO dump_test.test_table (col1) VALUES (a) $$;',
+ regexp => qr/^
+ \QCREATE PROCEDURE dump_test.ptest1(IN a integer)\E
+ \n\s+\QLANGUAGE sql\E
+ \n\s+AS\ \$\$\Q INSERT INTO dump_test.test_table (col1) VALUES (a) \E\$\$;
+ /xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'CREATE TYPE dump_test.int42 populated' => {
+ create_order => 42,
+ create_sql => 'CREATE TYPE dump_test.int42 (
+ internallength = 4,
+ input = dump_test.int42_in,
+ output = dump_test.int42_out,
+ alignment = int4,
+ default = 42,
+ passedbyvalue);',
+ regexp => qr/^
+ \QCREATE TYPE dump_test.int42 (\E
+ \n\s+\QINTERNALLENGTH = 4,\E
+ \n\s+\QINPUT = dump_test.int42_in,\E
+ \n\s+\QOUTPUT = dump_test.int42_out,\E
+ \n\s+\QDEFAULT = '42',\E
+ \n\s+\QALIGNMENT = int4,\E
+ \n\s+\QSTORAGE = plain,\E
+ \n\s+PASSEDBYVALUE\n\);
+ /xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'CREATE TYPE dump_test.composite' => {
+ create_order => 43,
+ create_sql => 'CREATE TYPE dump_test.composite AS (
+ f1 int,
+ f2 dump_test.int42
+ );',
+ regexp => qr/^
+ \QCREATE TYPE dump_test.composite AS (\E
+ \n\s+\Qf1 integer,\E
+ \n\s+\Qf2 dump_test.int42\E
+ \n\);
+ /xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'CREATE TYPE dump_test.undefined' => {
+ create_order => 39,
+ create_sql => 'CREATE TYPE dump_test.undefined;',
+ regexp => qr/^\QCREATE TYPE dump_test.undefined;\E/m,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'CREATE FOREIGN DATA WRAPPER dummy' => {
+ create_order => 35,
+ create_sql => 'CREATE FOREIGN DATA WRAPPER dummy;',
+ regexp => qr/CREATE FOREIGN DATA WRAPPER dummy;/m,
+ like => { %full_runs, section_pre_data => 1, },
+ },
+
+ 'CREATE SERVER s1 FOREIGN DATA WRAPPER dummy' => {
+ create_order => 36,
+ create_sql => 'CREATE SERVER s1 FOREIGN DATA WRAPPER dummy;',
+ regexp => qr/CREATE SERVER s1 FOREIGN DATA WRAPPER dummy;/m,
+ like => { %full_runs, section_pre_data => 1, },
+ },
+
+ 'CREATE FOREIGN TABLE dump_test.foreign_table SERVER s1' => {
+ create_order => 88,
+ create_sql =>
+ 'CREATE FOREIGN TABLE dump_test.foreign_table (c1 int options (column_name \'col1\'))
+ SERVER s1 OPTIONS (schema_name \'x1\');',
+ regexp => qr/
+ \QCREATE FOREIGN TABLE dump_test.foreign_table (\E\n
+ \s+\Qc1 integer\E\n
+ \Q)\E\n
+ \QSERVER s1\E\n
+ \QOPTIONS (\E\n
+ \s+\Qschema_name 'x1'\E\n
+ \Q);\E\n
+ /xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'CREATE USER MAPPING FOR regress_dump_test_role SERVER s1' => {
+ create_order => 86,
+ create_sql =>
+ 'CREATE USER MAPPING FOR regress_dump_test_role SERVER s1;',
+ regexp =>
+ qr/CREATE USER MAPPING FOR regress_dump_test_role SERVER s1;/m,
+ like => { %full_runs, section_pre_data => 1, },
+ },
+
+ 'CREATE TRANSFORM FOR int' => {
+ create_order => 34,
+ create_sql =>
+ 'CREATE TRANSFORM FOR int LANGUAGE SQL (FROM SQL WITH FUNCTION prsd_lextype(internal), TO SQL WITH FUNCTION int4recv(internal));',
+ regexp =>
+ qr/CREATE TRANSFORM FOR integer LANGUAGE sql \(FROM SQL WITH FUNCTION pg_catalog\.prsd_lextype\(internal\), TO SQL WITH FUNCTION pg_catalog\.int4recv\(internal\)\);/m,
+ like => { %full_runs, section_pre_data => 1, },
+ },
+
+ 'CREATE LANGUAGE pltestlang' => {
+ create_order => 18,
+ create_sql => 'CREATE LANGUAGE pltestlang
+ HANDLER dump_test.pltestlang_call_handler;',
+ regexp => qr/^
+ \QCREATE PROCEDURAL LANGUAGE pltestlang \E
+ \QHANDLER dump_test.pltestlang_call_handler;\E
+ /xm,
+ like => { %full_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'CREATE MATERIALIZED VIEW matview' => {
+ create_order => 20,
+ create_sql => 'CREATE MATERIALIZED VIEW dump_test.matview (col1) AS
+ SELECT col1 FROM dump_test.test_table;',
+ regexp => qr/^
+ \QCREATE MATERIALIZED VIEW dump_test.matview AS\E
+ \n\s+\QSELECT test_table.col1\E
+ \n\s+\QFROM dump_test.test_table\E
+ \n\s+\QWITH NO DATA;\E
+ /xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'CREATE MATERIALIZED VIEW matview_second' => {
+ create_order => 21,
+ create_sql => 'CREATE MATERIALIZED VIEW
+ dump_test.matview_second (col1) AS
+ SELECT * FROM dump_test.matview;',
+ regexp => qr/^
+ \QCREATE MATERIALIZED VIEW dump_test.matview_second AS\E
+ \n\s+\QSELECT matview.col1\E
+ \n\s+\QFROM dump_test.matview\E
+ \n\s+\QWITH NO DATA;\E
+ /xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'CREATE MATERIALIZED VIEW matview_third' => {
+ create_order => 58,
+ create_sql => 'CREATE MATERIALIZED VIEW
+ dump_test.matview_third (col1) AS
+ SELECT * FROM dump_test.matview_second WITH NO DATA;',
+ regexp => qr/^
+ \QCREATE MATERIALIZED VIEW dump_test.matview_third AS\E
+ \n\s+\QSELECT matview_second.col1\E
+ \n\s+\QFROM dump_test.matview_second\E
+ \n\s+\QWITH NO DATA;\E
+ /xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'CREATE MATERIALIZED VIEW matview_fourth' => {
+ create_order => 59,
+ create_sql => 'CREATE MATERIALIZED VIEW
+ dump_test.matview_fourth (col1) AS
+ SELECT * FROM dump_test.matview_third WITH NO DATA;',
+ regexp => qr/^
+ \QCREATE MATERIALIZED VIEW dump_test.matview_fourth AS\E
+ \n\s+\QSELECT matview_third.col1\E
+ \n\s+\QFROM dump_test.matview_third\E
+ \n\s+\QWITH NO DATA;\E
+ /xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'CREATE MATERIALIZED VIEW matview_compression' => {
+ create_order => 20,
+ create_sql => 'CREATE MATERIALIZED VIEW
+ dump_test.matview_compression (col2) AS
+ SELECT col2 FROM dump_test.test_table;
+ ALTER MATERIALIZED VIEW dump_test.matview_compression
+ ALTER COLUMN col2 SET COMPRESSION lz4;',
+ regexp => qr/^
+ \QCREATE MATERIALIZED VIEW dump_test.matview_compression AS\E
+ \n\s+\QSELECT test_table.col2\E
+ \n\s+\QFROM dump_test.test_table\E
+ \n\s+\QWITH NO DATA;\E
+ .*
+ \QALTER TABLE ONLY dump_test.matview_compression ALTER COLUMN col2 SET COMPRESSION lz4;\E\n
+ /xms,
+ lz4 => 1,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike =>
+ { exclude_dump_test_schema => 1, no_toast_compression => 1, },
+ },
+
+ 'Check ordering of a matview that depends on a primary key' => {
+ create_order => 42,
+ create_sql => '
+ CREATE MATERIALIZED VIEW dump_test.ordering_view AS
+ SELECT * FROM dump_test.ordering_table GROUP BY id;',
+ regexp => qr/^
+ \QALTER TABLE ONLY dump_test.ordering_table\E
+ \n\s+\QADD CONSTRAINT ordering_table_pkey PRIMARY KEY (id);\E
+ .*^
+ \QCREATE MATERIALIZED VIEW dump_test.ordering_view AS\E
+ \n\s+\QSELECT ordering_table.id,\E/xms,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_post_data => 1, },
+ unlike => {
+ exclude_dump_test_schema => 1,
+ only_dump_measurement => 1,
+ },
+ },
+
+ 'CREATE POLICY p1 ON test_table' => {
+ create_order => 22,
+ create_sql => 'CREATE POLICY p1 ON dump_test.test_table
+ USING (true)
+ WITH CHECK (true);',
+ regexp => qr/^
+ \QCREATE POLICY p1 ON dump_test.test_table \E
+ \QUSING (true) WITH CHECK (true);\E
+ /xm,
+ like => {
+ %full_runs,
+ %dump_test_schema_runs,
+ only_dump_test_table => 1,
+ section_post_data => 1,
+ },
+ unlike => {
+ exclude_dump_test_schema => 1,
+ exclude_test_table => 1,
+ },
+ },
+
+ 'CREATE POLICY p2 ON test_table FOR SELECT' => {
+ create_order => 24,
+ create_sql => 'CREATE POLICY p2 ON dump_test.test_table
+ FOR SELECT TO regress_dump_test_role USING (true);',
+ regexp => qr/^
+ \QCREATE POLICY p2 ON dump_test.test_table FOR SELECT TO regress_dump_test_role \E
+ \QUSING (true);\E
+ /xm,
+ like => {
+ %full_runs,
+ %dump_test_schema_runs,
+ only_dump_test_table => 1,
+ section_post_data => 1,
+ },
+ unlike => {
+ exclude_dump_test_schema => 1,
+ exclude_test_table => 1,
+ },
+ },
+
+ 'CREATE POLICY p3 ON test_table FOR INSERT' => {
+ create_order => 25,
+ create_sql => 'CREATE POLICY p3 ON dump_test.test_table
+ FOR INSERT TO regress_dump_test_role WITH CHECK (true);',
+ regexp => qr/^
+ \QCREATE POLICY p3 ON dump_test.test_table FOR INSERT \E
+ \QTO regress_dump_test_role WITH CHECK (true);\E
+ /xm,
+ like => {
+ %full_runs,
+ %dump_test_schema_runs,
+ only_dump_test_table => 1,
+ section_post_data => 1,
+ },
+ unlike => {
+ exclude_dump_test_schema => 1,
+ exclude_test_table => 1,
+ },
+ },
+
+ 'CREATE POLICY p4 ON test_table FOR UPDATE' => {
+ create_order => 26,
+ create_sql => 'CREATE POLICY p4 ON dump_test.test_table FOR UPDATE
+ TO regress_dump_test_role USING (true) WITH CHECK (true);',
+ regexp => qr/^
+ \QCREATE POLICY p4 ON dump_test.test_table FOR UPDATE TO regress_dump_test_role \E
+ \QUSING (true) WITH CHECK (true);\E
+ /xm,
+ like => {
+ %full_runs,
+ %dump_test_schema_runs,
+ only_dump_test_table => 1,
+ section_post_data => 1,
+ },
+ unlike => {
+ exclude_dump_test_schema => 1,
+ exclude_test_table => 1,
+ },
+ },
+
+ 'CREATE POLICY p5 ON test_table FOR DELETE' => {
+ create_order => 27,
+ create_sql => 'CREATE POLICY p5 ON dump_test.test_table
+ FOR DELETE TO regress_dump_test_role USING (true);',
+ regexp => qr/^
+ \QCREATE POLICY p5 ON dump_test.test_table FOR DELETE \E
+ \QTO regress_dump_test_role USING (true);\E
+ /xm,
+ like => {
+ %full_runs,
+ %dump_test_schema_runs,
+ only_dump_test_table => 1,
+ section_post_data => 1,
+ },
+ unlike => {
+ exclude_dump_test_schema => 1,
+ exclude_test_table => 1,
+ },
+ },
+
+ 'CREATE POLICY p6 ON test_table AS RESTRICTIVE' => {
+ create_order => 27,
+ create_sql => 'CREATE POLICY p6 ON dump_test.test_table AS RESTRICTIVE
+ USING (false);',
+ regexp => qr/^
+ \QCREATE POLICY p6 ON dump_test.test_table AS RESTRICTIVE \E
+ \QUSING (false);\E
+ /xm,
+ like => {
+ %full_runs,
+ %dump_test_schema_runs,
+ only_dump_test_table => 1,
+ section_post_data => 1,
+ },
+ unlike => {
+ exclude_dump_test_schema => 1,
+ exclude_test_table => 1,
+ },
+ },
+
+ 'CREATE PUBLICATION pub1' => {
+ create_order => 50,
+ create_sql => 'CREATE PUBLICATION pub1;',
+ regexp => qr/^
+ \QCREATE PUBLICATION pub1 WITH (publish = 'insert, update, delete, truncate');\E
+ /xm,
+ like => { %full_runs, section_post_data => 1, },
+ },
+
+ 'CREATE PUBLICATION pub2' => {
+ create_order => 50,
+ create_sql => 'CREATE PUBLICATION pub2
+ FOR ALL TABLES
+ WITH (publish = \'\');',
+ regexp => qr/^
+ \QCREATE PUBLICATION pub2 FOR ALL TABLES WITH (publish = '');\E
+ /xm,
+ like => { %full_runs, section_post_data => 1, },
+ },
+
+ 'CREATE PUBLICATION pub3' => {
+ create_order => 50,
+ create_sql => 'CREATE PUBLICATION pub3;',
+ regexp => qr/^
+ \QCREATE PUBLICATION pub3 WITH (publish = 'insert, update, delete, truncate');\E
+ /xm,
+ like => { %full_runs, section_post_data => 1, },
+ },
+
+ 'CREATE PUBLICATION pub4' => {
+ create_order => 50,
+ create_sql => 'CREATE PUBLICATION pub4;',
+ regexp => qr/^
+ \QCREATE PUBLICATION pub4 WITH (publish = 'insert, update, delete, truncate');\E
+ /xm,
+ like => { %full_runs, section_post_data => 1, },
+ },
+
+ 'CREATE SUBSCRIPTION sub1' => {
+ create_order => 50,
+ create_sql => 'CREATE SUBSCRIPTION sub1
+ CONNECTION \'dbname=doesnotexist\' PUBLICATION pub1
+ WITH (connect = false);',
+ regexp => qr/^
+ \QCREATE SUBSCRIPTION sub1 CONNECTION 'dbname=doesnotexist' PUBLICATION pub1 WITH (connect = false, slot_name = 'sub1');\E
+ /xm,
+ like => { %full_runs, section_post_data => 1, },
+ },
+
+ 'ALTER PUBLICATION pub1 ADD TABLE test_table' => {
+ create_order => 51,
+ create_sql =>
+ 'ALTER PUBLICATION pub1 ADD TABLE dump_test.test_table;',
+ regexp => qr/^
+ \QALTER PUBLICATION pub1 ADD TABLE ONLY dump_test.test_table;\E
+ /xm,
+ like => { %full_runs, section_post_data => 1, },
+ unlike => {
+ exclude_dump_test_schema => 1,
+ exclude_test_table => 1,
+ },
+ },
+
+ 'ALTER PUBLICATION pub1 ADD TABLE test_second_table' => {
+ create_order => 52,
+ create_sql =>
+ 'ALTER PUBLICATION pub1 ADD TABLE dump_test.test_second_table;',
+ regexp => qr/^
+ \QALTER PUBLICATION pub1 ADD TABLE ONLY dump_test.test_second_table;\E
+ /xm,
+ like => { %full_runs, section_post_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'ALTER PUBLICATION pub1 ADD TABLE test_sixth_table (col3, col2)' => {
+ create_order => 52,
+ create_sql =>
+ 'ALTER PUBLICATION pub1 ADD TABLE dump_test.test_sixth_table (col3, col2);',
+ regexp => qr/^
+ \QALTER PUBLICATION pub1 ADD TABLE ONLY dump_test.test_sixth_table (col2, col3);\E
+ /xm,
+ like => { %full_runs, section_post_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'ALTER PUBLICATION pub1 ADD TABLE test_seventh_table (col3, col2) WHERE (col1 = 1)'
+ => {
+ create_order => 52,
+ create_sql =>
+ 'ALTER PUBLICATION pub1 ADD TABLE dump_test.test_seventh_table (col3, col2) WHERE (col1 = 1);',
+ regexp => qr/^
+ \QALTER PUBLICATION pub1 ADD TABLE ONLY dump_test.test_seventh_table (col2, col3) WHERE ((col1 = 1));\E
+ /xm,
+ like => { %full_runs, section_post_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'ALTER PUBLICATION pub3 ADD TABLES IN SCHEMA dump_test' => {
+ create_order => 51,
+ create_sql =>
+ 'ALTER PUBLICATION pub3 ADD TABLES IN SCHEMA dump_test;',
+ regexp => qr/^
+ \QALTER PUBLICATION pub3 ADD TABLES IN SCHEMA dump_test;\E
+ /xm,
+ like => { %full_runs, section_post_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'ALTER PUBLICATION pub3 ADD TABLES IN SCHEMA public' => {
+ create_order => 52,
+ create_sql => 'ALTER PUBLICATION pub3 ADD TABLES IN SCHEMA public;',
+ regexp => qr/^
+ \QALTER PUBLICATION pub3 ADD TABLES IN SCHEMA public;\E
+ /xm,
+ like => { %full_runs, section_post_data => 1, },
+ },
+
+ 'ALTER PUBLICATION pub3 ADD TABLE test_table' => {
+ create_order => 51,
+ create_sql =>
+ 'ALTER PUBLICATION pub3 ADD TABLE dump_test.test_table;',
+ regexp => qr/^
+ \QALTER PUBLICATION pub3 ADD TABLE ONLY dump_test.test_table;\E
+ /xm,
+ like => { %full_runs, section_post_data => 1, },
+ unlike => {
+ exclude_dump_test_schema => 1,
+ exclude_test_table => 1,
+ },
+ },
+
+ 'ALTER PUBLICATION pub4 ADD TABLE test_table WHERE (col1 > 0);' => {
+ create_order => 51,
+ create_sql =>
+ 'ALTER PUBLICATION pub4 ADD TABLE dump_test.test_table WHERE (col1 > 0);',
+ regexp => qr/^
+ \QALTER PUBLICATION pub4 ADD TABLE ONLY dump_test.test_table WHERE ((col1 > 0));\E
+ /xm,
+ like => { %full_runs, section_post_data => 1, },
+ unlike => {
+ exclude_dump_test_schema => 1,
+ exclude_test_table => 1,
+ },
+ },
+
+ 'ALTER PUBLICATION pub4 ADD TABLE test_second_table WHERE (col2 = \'test\');'
+ => {
+ create_order => 52,
+ create_sql =>
+ 'ALTER PUBLICATION pub4 ADD TABLE dump_test.test_second_table WHERE (col2 = \'test\');',
+ regexp => qr/^
+ \QALTER PUBLICATION pub4 ADD TABLE ONLY dump_test.test_second_table WHERE ((col2 = 'test'::text));\E
+ /xm,
+ like => { %full_runs, section_post_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'CREATE SCHEMA public' => {
+ regexp => qr/^CREATE SCHEMA public;/m,
+
+ # this shouldn't ever get emitted anymore
+ like => {},
+ },
+
+ 'CREATE SCHEMA dump_test' => {
+ create_order => 2,
+ create_sql => 'CREATE SCHEMA dump_test;',
+ regexp => qr/^CREATE SCHEMA dump_test;/m,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'CREATE SCHEMA dump_test_second_schema' => {
+ create_order => 9,
+ create_sql => 'CREATE SCHEMA dump_test_second_schema;',
+ regexp => qr/^CREATE SCHEMA dump_test_second_schema;/m,
+ like => {
+ %full_runs,
+ role => 1,
+ section_pre_data => 1,
+ },
+ },
+
+ 'CREATE TABLE test_table' => {
+ create_order => 3,
+ create_sql => 'CREATE TABLE dump_test.test_table (
+ col1 serial primary key,
+ col2 text COMPRESSION pglz,
+ col3 text,
+ col4 text,
+ CHECK (col1 <= 1000)
+ ) WITH (autovacuum_enabled = false, fillfactor=80);
+ COMMENT ON CONSTRAINT test_table_col1_check
+ ON dump_test.test_table IS \'bounds check\';',
+ regexp => qr/^
+ \QCREATE TABLE dump_test.test_table (\E\n
+ \s+\Qcol1 integer NOT NULL,\E\n
+ \s+\Qcol2 text,\E\n
+ \s+\Qcol3 text,\E\n
+ \s+\Qcol4 text,\E\n
+ \s+\QCONSTRAINT test_table_col1_check CHECK ((col1 <= 1000))\E\n
+ \Q)\E\n
+ \QWITH (autovacuum_enabled='false', fillfactor='80');\E\n(.|\n)*
+ \QCOMMENT ON CONSTRAINT test_table_col1_check ON dump_test.test_table IS 'bounds check';\E
+ /xm,
+ like => {
+ %full_runs,
+ %dump_test_schema_runs,
+ only_dump_test_table => 1,
+ section_pre_data => 1,
+ },
+ unlike => {
+ exclude_dump_test_schema => 1,
+ exclude_test_table => 1,
+ },
+ },
+
+ 'CREATE TABLE fk_reference_test_table' => {
+ create_order => 21,
+ create_sql => 'CREATE TABLE dump_test.fk_reference_test_table (
+ col1 int primary key references dump_test.test_table
+ );',
+ regexp => qr/^
+ \QCREATE TABLE dump_test.fk_reference_test_table (\E
+ \n\s+\Qcol1 integer NOT NULL\E
+ \n\);
+ /xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'CREATE TABLE test_second_table' => {
+ create_order => 6,
+ create_sql => 'CREATE TABLE dump_test.test_second_table (
+ col1 int,
+ col2 text
+ );',
+ regexp => qr/^
+ \QCREATE TABLE dump_test.test_second_table (\E
+ \n\s+\Qcol1 integer,\E
+ \n\s+\Qcol2 text\E
+ \n\);
+ /xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'CREATE TABLE test_compression' => {
+ create_order => 3,
+ create_sql => 'CREATE TABLE dump_test.test_compression (
+ col1 int,
+ col2 text COMPRESSION lz4
+ );',
+ regexp => qr/^
+ \QCREATE TABLE dump_test.test_compression (\E\n
+ \s+\Qcol1 integer,\E\n
+ \s+\Qcol2 text\E\n
+ \);\n
+ .*
+ \QALTER TABLE ONLY dump_test.test_compression ALTER COLUMN col2 SET COMPRESSION lz4;\E\n
+ /xms,
+ lz4 => 1,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike =>
+ { exclude_dump_test_schema => 1, no_toast_compression => 1, },
+ },
+
+ 'CREATE TABLE measurement PARTITIONED BY' => {
+ create_order => 90,
+ create_sql => 'CREATE TABLE dump_test.measurement (
+ city_id serial not null,
+ logdate date not null,
+ peaktemp int CHECK (peaktemp >= -460),
+ unitsales int
+ ) PARTITION BY RANGE (logdate);',
+ regexp => qr/^
+ \Q-- Name: measurement;\E.*\n
+ \Q--\E\n\n
+ \QCREATE TABLE dump_test.measurement (\E\n
+ \s+\Qcity_id integer NOT NULL,\E\n
+ \s+\Qlogdate date NOT NULL,\E\n
+ \s+\Qpeaktemp integer,\E\n
+ \s+\Qunitsales integer,\E\n
+ \s+\QCONSTRAINT measurement_peaktemp_check CHECK ((peaktemp >= '-460'::integer))\E\n
+ \)\n
+ \QPARTITION BY RANGE (logdate);\E\n
+ /xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => {
+ binary_upgrade => 1,
+ exclude_dump_test_schema => 1,
+ },
+ },
+
+ 'Partition measurement_y2006m2 creation' => {
+ create_order => 91,
+ create_sql =>
+ 'CREATE TABLE dump_test_second_schema.measurement_y2006m2
+ PARTITION OF dump_test.measurement (
+ unitsales DEFAULT 0 CHECK (unitsales >= 0)
+ )
+ FOR VALUES FROM (\'2006-02-01\') TO (\'2006-03-01\');',
+ regexp => qr/^
+ \QCREATE TABLE dump_test_second_schema.measurement_y2006m2 (\E\n
+ \s+\Qcity_id integer DEFAULT nextval('dump_test.measurement_city_id_seq'::regclass) NOT NULL,\E\n
+ \s+\Qlogdate date NOT NULL,\E\n
+ \s+\Qpeaktemp integer,\E\n
+ \s+\Qunitsales integer DEFAULT 0,\E\n
+ \s+\QCONSTRAINT measurement_peaktemp_check CHECK ((peaktemp >= '-460'::integer)),\E\n
+ \s+\QCONSTRAINT measurement_y2006m2_unitsales_check CHECK ((unitsales >= 0))\E\n
+ \);\n
+ /xm,
+ like => {
+ %full_runs,
+ section_pre_data => 1,
+ role => 1,
+ binary_upgrade => 1,
+ },
+ },
+
+ 'Creation of row-level trigger in partitioned table' => {
+ create_order => 92,
+ create_sql => 'CREATE TRIGGER test_trigger
+ AFTER INSERT ON dump_test.measurement
+ FOR EACH ROW EXECUTE PROCEDURE dump_test.trigger_func()',
+ regexp => qr/^
+ \QCREATE TRIGGER test_trigger AFTER INSERT ON dump_test.measurement \E
+ \QFOR EACH ROW \E
+ \QEXECUTE FUNCTION dump_test.trigger_func();\E
+ /xm,
+ like => {
+ %full_runs, %dump_test_schema_runs, section_post_data => 1,
+ },
+ unlike => {
+ exclude_dump_test_schema => 1,
+ },
+ },
+
+ 'Disabled trigger on partition is altered' => {
+ create_order => 93,
+ create_sql =>
+ 'CREATE TABLE dump_test_second_schema.measurement_y2006m3
+ PARTITION OF dump_test.measurement
+ FOR VALUES FROM (\'2006-03-01\') TO (\'2006-04-01\');
+ ALTER TABLE dump_test_second_schema.measurement_y2006m3 DISABLE TRIGGER test_trigger;
+ CREATE TABLE dump_test_second_schema.measurement_y2006m4
+ PARTITION OF dump_test.measurement
+ FOR VALUES FROM (\'2006-04-01\') TO (\'2006-05-01\');
+ ALTER TABLE dump_test_second_schema.measurement_y2006m4 ENABLE REPLICA TRIGGER test_trigger;
+ CREATE TABLE dump_test_second_schema.measurement_y2006m5
+ PARTITION OF dump_test.measurement
+ FOR VALUES FROM (\'2006-05-01\') TO (\'2006-06-01\');
+ ALTER TABLE dump_test_second_schema.measurement_y2006m5 ENABLE ALWAYS TRIGGER test_trigger;
+ ',
+ regexp => qr/^
+ \QALTER TABLE dump_test_second_schema.measurement_y2006m3 DISABLE TRIGGER test_trigger;\E
+ /xm,
+ like => {
+ %full_runs,
+ section_post_data => 1,
+ role => 1,
+ binary_upgrade => 1,
+ },
+ },
+
+ 'Replica trigger on partition is altered' => {
+ regexp => qr/^
+ \QALTER TABLE dump_test_second_schema.measurement_y2006m4 ENABLE REPLICA TRIGGER test_trigger;\E
+ /xm,
+ like => {
+ %full_runs,
+ section_post_data => 1,
+ role => 1,
+ binary_upgrade => 1,
+ },
+ },
+
+ 'Always trigger on partition is altered' => {
+ regexp => qr/^
+ \QALTER TABLE dump_test_second_schema.measurement_y2006m5 ENABLE ALWAYS TRIGGER test_trigger;\E
+ /xm,
+ like => {
+ %full_runs,
+ section_post_data => 1,
+ role => 1,
+ binary_upgrade => 1,
+ },
+ },
+
+ # We should never see the creation of a trigger on a partition
+ 'Disabled trigger on partition is not created' => {
+ regexp => qr/CREATE TRIGGER test_trigger.*ON dump_test_second_schema/,
+ like => {},
+ unlike => { %full_runs, %dump_test_schema_runs },
+ },
+
+ # Triggers on partitions should not be dropped individually
+ 'Triggers on partitions are not dropped' => {
+ regexp => qr/DROP TRIGGER test_trigger.*ON dump_test_second_schema/,
+ like => {}
+ },
+
+ 'CREATE TABLE test_third_table_generated_cols' => {
+ create_order => 6,
+ create_sql => 'CREATE TABLE dump_test.test_third_table (
+ f1 int, junk int,
+ g1 int generated always as (f1 * 2) stored,
+ "F3" int,
+ g2 int generated always as ("F3" * 3) stored
+ );
+ ALTER TABLE dump_test.test_third_table DROP COLUMN junk;',
+ regexp => qr/^
+ \QCREATE TABLE dump_test.test_third_table (\E\n
+ \s+\Qf1 integer,\E\n
+ \s+\Qg1 integer GENERATED ALWAYS AS ((f1 * 2)) STORED,\E\n
+ \s+\Q"F3" integer,\E\n
+ \s+\Qg2 integer GENERATED ALWAYS AS (("F3" * 3)) STORED\E\n
+ \);\n
+ /xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { binary_upgrade => 1, exclude_dump_test_schema => 1, },
+ },
+
+ 'CREATE TABLE test_fourth_table_zero_col' => {
+ create_order => 6,
+ create_sql => 'CREATE TABLE dump_test.test_fourth_table (
+ );',
+ regexp => qr/^
+ \QCREATE TABLE dump_test.test_fourth_table (\E
+ \n\);
+ /xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'CREATE TABLE test_fifth_table' => {
+ create_order => 53,
+ create_sql => 'CREATE TABLE dump_test.test_fifth_table (
+ col1 integer,
+ col2 boolean,
+ col3 boolean,
+ col4 bit(5),
+ col5 float8
+ );',
+ regexp => qr/^
+ \QCREATE TABLE dump_test.test_fifth_table (\E
+ \n\s+\Qcol1 integer,\E
+ \n\s+\Qcol2 boolean,\E
+ \n\s+\Qcol3 boolean,\E
+ \n\s+\Qcol4 bit(5),\E
+ \n\s+\Qcol5 double precision\E
+ \n\);
+ /xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'CREATE TABLE test_sixth_table' => {
+ create_order => 6,
+ create_sql => 'CREATE TABLE dump_test.test_sixth_table (
+ col1 int,
+ col2 text,
+ col3 bytea
+ );',
+ regexp => qr/^
+ \QCREATE TABLE dump_test.test_sixth_table (\E
+ \n\s+\Qcol1 integer,\E
+ \n\s+\Qcol2 text,\E
+ \n\s+\Qcol3 bytea\E
+ \n\);
+ /xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'CREATE TABLE test_seventh_table' => {
+ create_order => 6,
+ create_sql => 'CREATE TABLE dump_test.test_seventh_table (
+ col1 int,
+ col2 text,
+ col3 bytea
+ );',
+ regexp => qr/^
+ \QCREATE TABLE dump_test.test_seventh_table (\E
+ \n\s+\Qcol1 integer,\E
+ \n\s+\Qcol2 text,\E
+ \n\s+\Qcol3 bytea\E
+ \n\);
+ /xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'CREATE TABLE test_table_identity' => {
+ create_order => 3,
+ create_sql => 'CREATE TABLE dump_test.test_table_identity (
+ col1 int generated always as identity primary key,
+ col2 text
+ );',
+ regexp => qr/^
+ \QCREATE TABLE dump_test.test_table_identity (\E\n
+ \s+\Qcol1 integer NOT NULL,\E\n
+ \s+\Qcol2 text\E\n
+ \);
+ .*
+ \QALTER TABLE dump_test.test_table_identity ALTER COLUMN col1 ADD GENERATED ALWAYS AS IDENTITY (\E\n
+ \s+\QSEQUENCE NAME dump_test.test_table_identity_col1_seq\E\n
+ \s+\QSTART WITH 1\E\n
+ \s+\QINCREMENT BY 1\E\n
+ \s+\QNO MINVALUE\E\n
+ \s+\QNO MAXVALUE\E\n
+ \s+\QCACHE 1\E\n
+ \);
+ /xms,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'CREATE TABLE test_table_generated' => {
+ create_order => 3,
+ create_sql => 'CREATE TABLE dump_test.test_table_generated (
+ col1 int primary key,
+ col2 int generated always as (col1 * 2) stored
+ );',
+ regexp => qr/^
+ \QCREATE TABLE dump_test.test_table_generated (\E\n
+ \s+\Qcol1 integer NOT NULL,\E\n
+ \s+\Qcol2 integer GENERATED ALWAYS AS ((col1 * 2)) STORED\E\n
+ \);
+ /xms,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'CREATE TABLE test_table_generated_child1 (without local columns)' => {
+ create_order => 4,
+ create_sql => 'CREATE TABLE dump_test.test_table_generated_child1 ()
+ INHERITS (dump_test.test_table_generated);',
+ regexp => qr/^
+ \QCREATE TABLE dump_test.test_table_generated_child1 (\E\n
+ \)\n
+ \QINHERITS (dump_test.test_table_generated);\E\n
+ /xms,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => {
+ binary_upgrade => 1,
+ exclude_dump_test_schema => 1,
+ },
+ },
+
+ 'ALTER TABLE test_table_generated_child1' => {
+ regexp =>
+ qr/^\QALTER TABLE ONLY dump_test.test_table_generated_child1 ALTER COLUMN col2 \E/m,
+
+ # should not get emitted
+ like => {},
+ },
+
+ 'CREATE TABLE test_table_generated_child2 (with local columns)' => {
+ create_order => 4,
+ create_sql => 'CREATE TABLE dump_test.test_table_generated_child2 (
+ col1 int,
+ col2 int
+ ) INHERITS (dump_test.test_table_generated);',
+ regexp => qr/^
+ \QCREATE TABLE dump_test.test_table_generated_child2 (\E\n
+ \s+\Qcol1 integer,\E\n
+ \s+\Qcol2 integer\E\n
+ \)\n
+ \QINHERITS (dump_test.test_table_generated);\E\n
+ /xms,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => {
+ binary_upgrade => 1,
+ exclude_dump_test_schema => 1,
+ },
+ },
+
+ 'CREATE TABLE table_with_stats' => {
+ create_order => 98,
+ create_sql => 'CREATE TABLE dump_test.table_index_stats (
+ col1 int,
+ col2 int,
+ col3 int);
+ CREATE INDEX index_with_stats
+ ON dump_test.table_index_stats
+ ((col1 + 1), col1, (col2 + 1), (col3 + 1));
+ ALTER INDEX dump_test.index_with_stats
+ ALTER COLUMN 1 SET STATISTICS 400;
+ ALTER INDEX dump_test.index_with_stats
+ ALTER COLUMN 3 SET STATISTICS 500;',
+ regexp => qr/^
+ \QALTER INDEX dump_test.index_with_stats ALTER COLUMN 1 SET STATISTICS 400;\E\n
+ \QALTER INDEX dump_test.index_with_stats ALTER COLUMN 3 SET STATISTICS 500;\E\n
+ /xms,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_post_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'CREATE TABLE test_inheritance_parent' => {
+ create_order => 90,
+ create_sql => 'CREATE TABLE dump_test.test_inheritance_parent (
+ col1 int NOT NULL,
+ col2 int CHECK (col2 >= 42)
+ );',
+ regexp => qr/^
+ \QCREATE TABLE dump_test.test_inheritance_parent (\E\n
+ \s+\Qcol1 integer NOT NULL,\E\n
+ \s+\Qcol2 integer,\E\n
+ \s+\QCONSTRAINT test_inheritance_parent_col2_check CHECK ((col2 >= 42))\E\n
+ \Q);\E\n
+ /xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'CREATE TABLE test_inheritance_child' => {
+ create_order => 91,
+ create_sql => 'CREATE TABLE dump_test.test_inheritance_child (
+ col1 int NOT NULL,
+ CONSTRAINT test_inheritance_child CHECK (col2 >= 142857)
+ ) INHERITS (dump_test.test_inheritance_parent);',
+ regexp => qr/^
+ \QCREATE TABLE dump_test.test_inheritance_child (\E\n
+ \s+\Qcol1 integer,\E\n
+ \s+\QCONSTRAINT test_inheritance_child CHECK ((col2 >= 142857))\E\n
+ \)\n
+ \QINHERITS (dump_test.test_inheritance_parent);\E\n
+ /xm,
+ like => {
+ %full_runs, %dump_test_schema_runs, section_pre_data => 1,
+ },
+ unlike => {
+ binary_upgrade => 1,
+ exclude_dump_test_schema => 1,
+ },
+ },
+
+ 'CREATE STATISTICS extended_stats_no_options' => {
+ create_order => 97,
+ create_sql => 'CREATE STATISTICS dump_test.test_ext_stats_no_options
+ ON col1, col2 FROM dump_test.test_fifth_table',
+ regexp => qr/^
+ \QCREATE STATISTICS dump_test.test_ext_stats_no_options ON col1, col2 FROM dump_test.test_fifth_table;\E
+ /xms,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_post_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'CREATE STATISTICS extended_stats_options' => {
+ create_order => 97,
+ create_sql => 'CREATE STATISTICS dump_test.test_ext_stats_opts
+ (ndistinct) ON col1, col2 FROM dump_test.test_fifth_table',
+ regexp => qr/^
+ \QCREATE STATISTICS dump_test.test_ext_stats_opts (ndistinct) ON col1, col2 FROM dump_test.test_fifth_table;\E
+ /xms,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_post_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'ALTER STATISTICS extended_stats_options' => {
+ create_order => 98,
+ create_sql =>
+ 'ALTER STATISTICS dump_test.test_ext_stats_opts SET STATISTICS 1000',
+ regexp => qr/^
+ \QALTER STATISTICS dump_test.test_ext_stats_opts SET STATISTICS 1000;\E
+ /xms,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_post_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'CREATE STATISTICS extended_stats_expression' => {
+ create_order => 99,
+ create_sql => 'CREATE STATISTICS dump_test.test_ext_stats_expr
+ ON (2 * col1) FROM dump_test.test_fifth_table',
+ regexp => qr/^
+ \QCREATE STATISTICS dump_test.test_ext_stats_expr ON (2 * col1) FROM dump_test.test_fifth_table;\E
+ /xms,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_post_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'CREATE SEQUENCE test_table_col1_seq' => {
+ regexp => qr/^
+ \QCREATE SEQUENCE dump_test.test_table_col1_seq\E
+ \n\s+\QAS integer\E
+ \n\s+\QSTART WITH 1\E
+ \n\s+\QINCREMENT BY 1\E
+ \n\s+\QNO MINVALUE\E
+ \n\s+\QNO MAXVALUE\E
+ \n\s+\QCACHE 1;\E
+ /xm,
+ like => {
+ %full_runs,
+ %dump_test_schema_runs,
+ only_dump_test_table => 1,
+ section_pre_data => 1,
+ },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'CREATE INDEX ON ONLY measurement' => {
+ create_order => 92,
+ create_sql =>
+ 'CREATE INDEX ON dump_test.measurement (city_id, logdate);',
+ regexp => qr/^
+ \QCREATE INDEX measurement_city_id_logdate_idx ON ONLY dump_test.measurement USING\E
+ /xm,
+ like => {
+ binary_upgrade => 1,
+ clean => 1,
+ clean_if_exists => 1,
+ compression => 1,
+ createdb => 1,
+ defaults => 1,
+ exclude_test_table => 1,
+ exclude_test_table_data => 1,
+ no_toast_compression => 1,
+ no_blobs => 1,
+ no_privs => 1,
+ no_owner => 1,
+ no_table_access_method => 1,
+ only_dump_test_schema => 1,
+ pg_dumpall_dbprivs => 1,
+ pg_dumpall_exclude => 1,
+ schema_only => 1,
+ section_post_data => 1,
+ test_schema_plus_blobs => 1,
+ },
+ unlike => {
+ exclude_dump_test_schema => 1,
+ only_dump_test_table => 1,
+ pg_dumpall_globals => 1,
+ pg_dumpall_globals_clean => 1,
+ role => 1,
+ section_pre_data => 1,
+ },
+ },
+
+ 'ALTER TABLE measurement PRIMARY KEY' => {
+ all_runs => 1,
+ catch_all => 'CREATE ... commands',
+ create_order => 93,
+ create_sql =>
+ 'ALTER TABLE dump_test.measurement ADD PRIMARY KEY (city_id, logdate);',
+ regexp => qr/^
+ \QALTER TABLE ONLY dump_test.measurement\E \n^\s+
+ \QADD CONSTRAINT measurement_pkey PRIMARY KEY (city_id, logdate);\E
+ /xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_post_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'CREATE INDEX ... ON measurement_y2006_m2' => {
+ regexp => qr/^
+ \QCREATE INDEX measurement_y2006m2_city_id_logdate_idx ON dump_test_second_schema.measurement_y2006m2 \E
+ /xm,
+ like => {
+ %full_runs,
+ role => 1,
+ section_post_data => 1,
+ },
+ },
+
+ 'ALTER INDEX ... ATTACH PARTITION' => {
+ regexp => qr/^
+ \QALTER INDEX dump_test.measurement_city_id_logdate_idx ATTACH PARTITION dump_test_second_schema.measurement_y2006m2_city_id_logdate_idx\E
+ /xm,
+ like => {
+ %full_runs,
+ role => 1,
+ section_post_data => 1,
+ },
+ },
+
+ 'ALTER INDEX ... ATTACH PARTITION (primary key)' => {
+ all_runs => 1,
+ catch_all => 'CREATE ... commands',
+ regexp => qr/^
+ \QALTER INDEX dump_test.measurement_pkey ATTACH PARTITION dump_test_second_schema.measurement_y2006m2_pkey\E
+ /xm,
+ like => {
+ binary_upgrade => 1,
+ clean => 1,
+ clean_if_exists => 1,
+ compression => 1,
+ createdb => 1,
+ defaults => 1,
+ exclude_dump_test_schema => 1,
+ exclude_test_table => 1,
+ exclude_test_table_data => 1,
+ no_toast_compression => 1,
+ no_blobs => 1,
+ no_privs => 1,
+ no_owner => 1,
+ no_table_access_method => 1,
+ pg_dumpall_dbprivs => 1,
+ pg_dumpall_exclude => 1,
+ role => 1,
+ schema_only => 1,
+ section_post_data => 1,
+ },
+ unlike => {
+ only_dump_test_schema => 1,
+ only_dump_test_table => 1,
+ pg_dumpall_globals => 1,
+ pg_dumpall_globals_clean => 1,
+ section_pre_data => 1,
+ test_schema_plus_blobs => 1,
+ },
+ },
+
+ 'CREATE VIEW test_view' => {
+ create_order => 61,
+ create_sql => 'CREATE VIEW dump_test.test_view
+ WITH (check_option = \'local\', security_barrier = true) AS
+ SELECT col1 FROM dump_test.test_table;',
+ regexp => qr/^
+ \QCREATE VIEW dump_test.test_view WITH (security_barrier='true') AS\E
+ \n\s+\QSELECT test_table.col1\E
+ \n\s+\QFROM dump_test.test_table\E
+ \n\s+\QWITH LOCAL CHECK OPTION;\E/xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ 'ALTER VIEW test_view SET DEFAULT' => {
+ create_order => 62,
+ create_sql =>
+ 'ALTER VIEW dump_test.test_view ALTER COLUMN col1 SET DEFAULT 1;',
+ regexp => qr/^
+ \QALTER TABLE ONLY dump_test.test_view ALTER COLUMN col1 SET DEFAULT 1;\E/xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => { exclude_dump_test_schema => 1, },
+ },
+
+ # FIXME
+ 'DROP SCHEMA public (for testing without public schema)' => {
+ database => 'regress_pg_dump_test',
+ create_order => 100,
+ create_sql => 'DROP SCHEMA public;',
+ regexp => qr/^DROP SCHEMA public;/m,
+ like => {},
+ },
+
+ 'DROP SCHEMA public' => {
+ regexp => qr/^DROP SCHEMA public;/m,
+
+ # this shouldn't ever get emitted anymore
+ like => {},
+ },
+
+ 'DROP SCHEMA IF EXISTS public' => {
+ regexp => qr/^DROP SCHEMA IF EXISTS public;/m,
+
+ # this shouldn't ever get emitted anymore
+ like => {},
+ },
+
+ 'DROP EXTENSION plpgsql' => {
+ regexp => qr/^DROP EXTENSION plpgsql;/m,
+
+ # this shouldn't ever get emitted anymore
+ like => {},
+ },
+
+ 'DROP FUNCTION dump_test.pltestlang_call_handler()' => {
+ regexp => qr/^DROP FUNCTION dump_test\.pltestlang_call_handler\(\);/m,
+ like => { clean => 1, },
+ },
+
+ 'DROP LANGUAGE pltestlang' => {
+ regexp => qr/^DROP PROCEDURAL LANGUAGE pltestlang;/m,
+ like => { clean => 1, },
+ },
+
+ 'DROP SCHEMA dump_test' => {
+ regexp => qr/^DROP SCHEMA dump_test;/m,
+ like => { clean => 1, },
+ },
+
+ 'DROP SCHEMA dump_test_second_schema' => {
+ regexp => qr/^DROP SCHEMA dump_test_second_schema;/m,
+ like => { clean => 1, },
+ },
+
+ 'DROP TABLE test_table' => {
+ regexp => qr/^DROP TABLE dump_test\.test_table;/m,
+ like => { clean => 1, },
+ },
+
+ 'DROP TABLE fk_reference_test_table' => {
+ regexp => qr/^DROP TABLE dump_test\.fk_reference_test_table;/m,
+ like => { clean => 1, },
+ },
+
+ 'DROP TABLE test_second_table' => {
+ regexp => qr/^DROP TABLE dump_test\.test_second_table;/m,
+ like => { clean => 1, },
+ },
+
+ 'DROP EXTENSION IF EXISTS plpgsql' => {
+ regexp => qr/^DROP EXTENSION IF EXISTS plpgsql;/m,
+
+ # this shouldn't ever get emitted anymore
+ like => {},
+ },
+
+ 'DROP FUNCTION IF EXISTS dump_test.pltestlang_call_handler()' => {
+ regexp => qr/^
+ \QDROP FUNCTION IF EXISTS dump_test.pltestlang_call_handler();\E
+ /xm,
+ like => { clean_if_exists => 1, },
+ },
+
+ 'DROP LANGUAGE IF EXISTS pltestlang' => {
+ regexp => qr/^DROP PROCEDURAL LANGUAGE IF EXISTS pltestlang;/m,
+ like => { clean_if_exists => 1, },
+ },
+
+ 'DROP SCHEMA IF EXISTS dump_test' => {
+ regexp => qr/^DROP SCHEMA IF EXISTS dump_test;/m,
+ like => { clean_if_exists => 1, },
+ },
+
+ 'DROP SCHEMA IF EXISTS dump_test_second_schema' => {
+ regexp => qr/^DROP SCHEMA IF EXISTS dump_test_second_schema;/m,
+ like => { clean_if_exists => 1, },
+ },
+
+ 'DROP TABLE IF EXISTS test_table' => {
+ regexp => qr/^DROP TABLE IF EXISTS dump_test\.test_table;/m,
+ like => { clean_if_exists => 1, },
+ },
+
+ 'DROP TABLE IF EXISTS test_second_table' => {
+ regexp => qr/^DROP TABLE IF EXISTS dump_test\.test_second_table;/m,
+ like => { clean_if_exists => 1, },
+ },
+
+ 'DROP ROLE regress_dump_test_role' => {
+ regexp => qr/^
+ \QDROP ROLE regress_dump_test_role;\E
+ /xm,
+ like => { pg_dumpall_globals_clean => 1, },
+ },
+
+ 'DROP ROLE pg_' => {
+ regexp => qr/^
+ \QDROP ROLE pg_\E.+;
+ /xm,
+
+ # this shouldn't ever get emitted anywhere
+ like => {},
+ },
+
+ 'GRANT USAGE ON SCHEMA dump_test_second_schema' => {
+ create_order => 10,
+ create_sql => 'GRANT USAGE ON SCHEMA dump_test_second_schema
+ TO regress_dump_test_role;',
+ regexp => qr/^
+ \QGRANT USAGE ON SCHEMA dump_test_second_schema TO regress_dump_test_role;\E
+ /xm,
+ like => {
+ %full_runs,
+ role => 1,
+ section_pre_data => 1,
+ },
+ unlike => { no_privs => 1, },
+ },
+
+ 'GRANT USAGE ON FOREIGN DATA WRAPPER dummy' => {
+ create_order => 85,
+ create_sql => 'GRANT USAGE ON FOREIGN DATA WRAPPER dummy
+ TO regress_dump_test_role;',
+ regexp => qr/^
+ \QGRANT ALL ON FOREIGN DATA WRAPPER dummy TO regress_dump_test_role;\E
+ /xm,
+ like => { %full_runs, section_pre_data => 1, },
+ unlike => { no_privs => 1, },
+ },
+
+ 'GRANT USAGE ON FOREIGN SERVER s1' => {
+ create_order => 85,
+ create_sql => 'GRANT USAGE ON FOREIGN SERVER s1
+ TO regress_dump_test_role;',
+ regexp => qr/^
+ \QGRANT ALL ON FOREIGN SERVER s1 TO regress_dump_test_role;\E
+ /xm,
+ like => { %full_runs, section_pre_data => 1, },
+ unlike => { no_privs => 1, },
+ },
+
+ 'GRANT USAGE ON DOMAIN dump_test.us_postal_code' => {
+ create_order => 72,
+ create_sql =>
+ 'GRANT USAGE ON DOMAIN dump_test.us_postal_code TO regress_dump_test_role;',
+ regexp => qr/^
+ \QGRANT ALL ON TYPE dump_test.us_postal_code TO regress_dump_test_role;\E
+ /xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => {
+ exclude_dump_test_schema => 1,
+ no_privs => 1,
+ },
+ },
+
+ 'GRANT USAGE ON TYPE dump_test.int42' => {
+ create_order => 87,
+ create_sql =>
+ 'GRANT USAGE ON TYPE dump_test.int42 TO regress_dump_test_role;',
+ regexp => qr/^
+ \QGRANT ALL ON TYPE dump_test.int42 TO regress_dump_test_role;\E
+ /xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => {
+ exclude_dump_test_schema => 1,
+ no_privs => 1,
+ },
+ },
+
+ 'GRANT USAGE ON TYPE dump_test.planets - ENUM' => {
+ create_order => 66,
+ create_sql =>
+ 'GRANT USAGE ON TYPE dump_test.planets TO regress_dump_test_role;',
+ regexp => qr/^
+ \QGRANT ALL ON TYPE dump_test.planets TO regress_dump_test_role;\E
+ /xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => {
+ exclude_dump_test_schema => 1,
+ no_privs => 1,
+ },
+ },
+
+ 'GRANT USAGE ON TYPE dump_test.textrange - RANGE' => {
+ create_order => 67,
+ create_sql =>
+ 'GRANT USAGE ON TYPE dump_test.textrange TO regress_dump_test_role;',
+ regexp => qr/^
+ \QGRANT ALL ON TYPE dump_test.textrange TO regress_dump_test_role;\E
+ /xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => {
+ exclude_dump_test_schema => 1,
+ no_privs => 1,
+ },
+ },
+
+ 'GRANT CREATE ON DATABASE dump_test' => {
+ create_order => 48,
+ create_sql =>
+ 'GRANT CREATE ON DATABASE dump_test TO regress_dump_test_role;',
+ regexp => qr/^
+ \QGRANT CREATE ON DATABASE dump_test TO regress_dump_test_role;\E
+ /xm,
+ like => { pg_dumpall_dbprivs => 1, },
+ },
+
+ 'GRANT SELECT ON TABLE test_table' => {
+ create_order => 5,
+ create_sql => 'GRANT SELECT ON TABLE dump_test.test_table
+ TO regress_dump_test_role;',
+ regexp =>
+ qr/^\QGRANT SELECT ON TABLE dump_test.test_table TO regress_dump_test_role;\E/m,
+ like => {
+ %full_runs,
+ %dump_test_schema_runs,
+ only_dump_test_table => 1,
+ section_pre_data => 1,
+ },
+ unlike => {
+ exclude_dump_test_schema => 1,
+ exclude_test_table => 1,
+ no_privs => 1,
+ },
+ },
+
+ 'GRANT SELECT ON TABLE measurement' => {
+ create_order => 91,
+ create_sql => 'GRANT SELECT ON TABLE dump_test.measurement
+ TO regress_dump_test_role;
+ GRANT SELECT(city_id) ON TABLE dump_test.measurement
+ TO "regress_quoted \"" role";',
+ regexp =>
+ qr/^\QGRANT SELECT ON TABLE dump_test.measurement TO regress_dump_test_role;\E\n.*
+ ^\QGRANT SELECT(city_id) ON TABLE dump_test.measurement TO "regress_quoted \"" role";\E/xms,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => {
+ exclude_dump_test_schema => 1,
+ no_privs => 1,
+ },
+ },
+
+ 'GRANT SELECT ON TABLE measurement_y2006m2' => {
+ create_order => 94,
+ create_sql => 'GRANT SELECT ON TABLE
+ dump_test_second_schema.measurement_y2006m2,
+ dump_test_second_schema.measurement_y2006m3,
+ dump_test_second_schema.measurement_y2006m4,
+ dump_test_second_schema.measurement_y2006m5
+ TO regress_dump_test_role;',
+ regexp =>
+ qr/^\QGRANT SELECT ON TABLE dump_test_second_schema.measurement_y2006m2 TO regress_dump_test_role;\E/m,
+ like => {
+ %full_runs,
+ role => 1,
+ section_pre_data => 1,
+ },
+ unlike => { no_privs => 1, },
+ },
+
+ 'GRANT ALL ON LARGE OBJECT ...' => {
+ create_order => 60,
+ create_sql => 'DO $$
+ DECLARE myoid oid;
+ BEGIN
+ SELECT loid FROM pg_largeobject INTO myoid;
+ EXECUTE \'GRANT ALL ON LARGE OBJECT \' || myoid || \' TO regress_dump_test_role;\';
+ END;
+ $$;',
+ regexp => qr/^
+ \QGRANT ALL ON LARGE OBJECT \E[0-9]+\Q TO regress_dump_test_role;\E
+ /xm,
+ like => {
+ %full_runs,
+ column_inserts => 1,
+ data_only => 1,
+ inserts => 1,
+ section_pre_data => 1,
+ test_schema_plus_blobs => 1,
+ binary_upgrade => 1,
+ },
+ unlike => {
+ no_blobs => 1,
+ no_privs => 1,
+ schema_only => 1,
+ },
+ },
+
+ 'GRANT INSERT(col1) ON TABLE test_second_table' => {
+ create_order => 8,
+ create_sql =>
+ 'GRANT INSERT (col1) ON TABLE dump_test.test_second_table
+ TO regress_dump_test_role;',
+ regexp => qr/^
+ \QGRANT INSERT(col1) ON TABLE dump_test.test_second_table TO regress_dump_test_role;\E
+ /xm,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
+ unlike => {
+ exclude_dump_test_schema => 1,
+ no_privs => 1,
+ },
+ },
+
+ 'GRANT EXECUTE ON FUNCTION pg_sleep() TO regress_dump_test_role' => {
+ create_order => 16,
+ create_sql => 'GRANT EXECUTE ON FUNCTION pg_sleep(float8)
+ TO regress_dump_test_role;',
+ regexp => qr/^
+ \QGRANT ALL ON FUNCTION pg_catalog.pg_sleep(double precision) TO regress_dump_test_role;\E
+ /xm,
+ like => { %full_runs, section_pre_data => 1, },
+ unlike => { no_privs => 1, },
+ },
+
+ 'GRANT SELECT (proname ...) ON TABLE pg_proc TO public' => {
+ create_order => 46,
+ create_sql => 'GRANT SELECT (
+ tableoid,
+ oid,
+ proname,
+ pronamespace,
+ proowner,
+ prolang,
+ procost,
+ prorows,
+ provariadic,
+ prosupport,
+ prokind,
+ prosecdef,
+ proleakproof,
+ proisstrict,
+ proretset,
+ provolatile,
+ proparallel,
+ pronargs,
+ pronargdefaults,
+ prorettype,
+ proargtypes,
+ proallargtypes,
+ proargmodes,
+ proargnames,
+ proargdefaults,
+ protrftypes,
+ prosrc,
+ probin,
+ proconfig,
+ proacl
+ ) ON TABLE pg_proc TO public;',
+ regexp => qr/
+ \QGRANT SELECT(tableoid) ON TABLE pg_catalog.pg_proc TO PUBLIC;\E\n.*
+ \QGRANT SELECT(oid) ON TABLE pg_catalog.pg_proc TO PUBLIC;\E\n.*
+ \QGRANT SELECT(proname) ON TABLE pg_catalog.pg_proc TO PUBLIC;\E\n.*
+ \QGRANT SELECT(pronamespace) ON TABLE pg_catalog.pg_proc TO PUBLIC;\E\n.*
+ \QGRANT SELECT(proowner) ON TABLE pg_catalog.pg_proc TO PUBLIC;\E\n.*
+ \QGRANT SELECT(prolang) ON TABLE pg_catalog.pg_proc TO PUBLIC;\E\n.*
+ \QGRANT SELECT(procost) ON TABLE pg_catalog.pg_proc TO PUBLIC;\E\n.*
+ \QGRANT SELECT(prorows) ON TABLE pg_catalog.pg_proc TO PUBLIC;\E\n.*
+ \QGRANT SELECT(provariadic) ON TABLE pg_catalog.pg_proc TO PUBLIC;\E\n.*
+ \QGRANT SELECT(prosupport) ON TABLE pg_catalog.pg_proc TO PUBLIC;\E\n.*
+ \QGRANT SELECT(prokind) ON TABLE pg_catalog.pg_proc TO PUBLIC;\E\n.*
+ \QGRANT SELECT(prosecdef) ON TABLE pg_catalog.pg_proc TO PUBLIC;\E\n.*
+ \QGRANT SELECT(proleakproof) ON TABLE pg_catalog.pg_proc TO PUBLIC;\E\n.*
+ \QGRANT SELECT(proisstrict) ON TABLE pg_catalog.pg_proc TO PUBLIC;\E\n.*
+ \QGRANT SELECT(proretset) ON TABLE pg_catalog.pg_proc TO PUBLIC;\E\n.*
+ \QGRANT SELECT(provolatile) ON TABLE pg_catalog.pg_proc TO PUBLIC;\E\n.*
+ \QGRANT SELECT(proparallel) ON TABLE pg_catalog.pg_proc TO PUBLIC;\E\n.*
+ \QGRANT SELECT(pronargs) ON TABLE pg_catalog.pg_proc TO PUBLIC;\E\n.*
+ \QGRANT SELECT(pronargdefaults) ON TABLE pg_catalog.pg_proc TO PUBLIC;\E\n.*
+ \QGRANT SELECT(prorettype) ON TABLE pg_catalog.pg_proc TO PUBLIC;\E\n.*
+ \QGRANT SELECT(proargtypes) ON TABLE pg_catalog.pg_proc TO PUBLIC;\E\n.*
+ \QGRANT SELECT(proallargtypes) ON TABLE pg_catalog.pg_proc TO PUBLIC;\E\n.*
+ \QGRANT SELECT(proargmodes) ON TABLE pg_catalog.pg_proc TO PUBLIC;\E\n.*
+ \QGRANT SELECT(proargnames) ON TABLE pg_catalog.pg_proc TO PUBLIC;\E\n.*
+ \QGRANT SELECT(proargdefaults) ON TABLE pg_catalog.pg_proc TO PUBLIC;\E\n.*
+ \QGRANT SELECT(protrftypes) ON TABLE pg_catalog.pg_proc TO PUBLIC;\E\n.*
+ \QGRANT SELECT(prosrc) ON TABLE pg_catalog.pg_proc TO PUBLIC;\E\n.*
+ \QGRANT SELECT(probin) ON TABLE pg_catalog.pg_proc TO PUBLIC;\E\n.*
+ \QGRANT SELECT(proconfig) ON TABLE pg_catalog.pg_proc TO PUBLIC;\E\n.*
+ \QGRANT SELECT(proacl) ON TABLE pg_catalog.pg_proc TO PUBLIC;\E/xms,
+ like => { %full_runs, section_pre_data => 1, },
+ unlike => { no_privs => 1, },
+ },
+
+ 'GRANT USAGE ON SCHEMA public TO public' => {
+ regexp => qr/^
+ \Q--\E\n\n
+ \QGRANT USAGE ON SCHEMA public TO PUBLIC;\E
+ /xm,
+
+ # this shouldn't ever get emitted anymore
+ like => {},
+ },
+
+ 'REFRESH MATERIALIZED VIEW matview' => {
+ regexp => qr/^\QREFRESH MATERIALIZED VIEW dump_test.matview;\E/m,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_post_data => 1, },
+ unlike => {
+ binary_upgrade => 1,
+ exclude_dump_test_schema => 1,
+ schema_only => 1,
+ },
+ },
+
+ 'REFRESH MATERIALIZED VIEW matview_second' => {
+ regexp => qr/^
+ \QREFRESH MATERIALIZED VIEW dump_test.matview;\E
+ \n.*
+ \QREFRESH MATERIALIZED VIEW dump_test.matview_second;\E
+ /xms,
+ like =>
+ { %full_runs, %dump_test_schema_runs, section_post_data => 1, },
+ unlike => {
+ binary_upgrade => 1,
+ exclude_dump_test_schema => 1,
+ schema_only => 1,
+ },
+ },
+
+ # FIXME
+ 'REFRESH MATERIALIZED VIEW matview_third' => {
+ regexp => qr/^
+ \QREFRESH MATERIALIZED VIEW dump_test.matview_third;\E
+ /xms,
+ like => {},
+ },
+
+ # FIXME
+ 'REFRESH MATERIALIZED VIEW matview_fourth' => {
+ regexp => qr/^
+ \QREFRESH MATERIALIZED VIEW dump_test.matview_fourth;\E
+ /xms,
+ like => {},
+ },
+
+ 'REVOKE CONNECT ON DATABASE dump_test FROM public' => {
+ create_order => 49,
+ create_sql => 'REVOKE CONNECT ON DATABASE dump_test FROM public;',
+ regexp => qr/^
+ \QREVOKE CONNECT,TEMPORARY ON DATABASE dump_test FROM PUBLIC;\E\n
+ \QGRANT TEMPORARY ON DATABASE dump_test TO PUBLIC;\E\n
+ \QGRANT CREATE ON DATABASE dump_test TO regress_dump_test_role;\E
+ /xm,
+ like => { pg_dumpall_dbprivs => 1, },
+ },
+
+ 'REVOKE EXECUTE ON FUNCTION pg_sleep() FROM public' => {
+ create_order => 15,
+ create_sql => 'REVOKE EXECUTE ON FUNCTION pg_sleep(float8)
+ FROM public;',
+ regexp => qr/^
+ \QREVOKE ALL ON FUNCTION pg_catalog.pg_sleep(double precision) FROM PUBLIC;\E
+ /xm,
+ like => { %full_runs, section_pre_data => 1, },
+ unlike => { no_privs => 1, },
+ },
+
+ # With the exception of the public schema, we don't dump ownership changes
+ # for objects originating at initdb. Hence, any GRANT or REVOKE affecting
+ # owner privileges for those objects should reference the bootstrap
+ # superuser, not the dump-time owner.
+ 'REVOKE EXECUTE ON FUNCTION pg_stat_reset FROM regress_dump_test_role' =>
+ {
+ create_order => 15,
+ create_sql => '
+ ALTER FUNCTION pg_stat_reset OWNER TO regress_dump_test_role;
+ REVOKE EXECUTE ON FUNCTION pg_stat_reset
+ FROM regress_dump_test_role;',
+ regexp => qr/^[^-].*pg_stat_reset.* regress_dump_test_role/m,
+
+ # this shouldn't ever get emitted
+ like => {},
+ },
+
+ 'REVOKE SELECT ON TABLE pg_proc FROM public' => {
+ create_order => 45,
+ create_sql => 'REVOKE SELECT ON TABLE pg_proc FROM public;',
+ regexp =>
+ qr/^\QREVOKE SELECT ON TABLE pg_catalog.pg_proc FROM PUBLIC;\E/m,
+ like => { %full_runs, section_pre_data => 1, },
+ unlike => { no_privs => 1, },
+ },
+
+ 'REVOKE ALL ON SCHEMA public' => {
+ create_order => 16,
+ create_sql =>
+ 'REVOKE ALL ON SCHEMA public FROM "regress_quoted \"" role";',
+ regexp =>
+ qr/^REVOKE ALL ON SCHEMA public FROM "regress_quoted \\"" role";/m,
+ like => { %full_runs, section_pre_data => 1, },
+ unlike => { no_privs => 1, },
+ },
+
+ 'REVOKE USAGE ON LANGUAGE plpgsql FROM public' => {
+ create_order => 16,
+ create_sql => 'REVOKE USAGE ON LANGUAGE plpgsql FROM public;',
+ regexp => qr/^REVOKE ALL ON LANGUAGE plpgsql FROM PUBLIC;/m,
+ like => {
+ %full_runs,
+ %dump_test_schema_runs,
+ only_dump_test_table => 1,
+ role => 1,
+ section_pre_data => 1,
+ },
+ unlike => { no_privs => 1, },
+ },
+
+
+ 'CREATE ACCESS METHOD regress_test_table_am' => {
+ create_order => 11,
+ create_sql =>
+ 'CREATE ACCESS METHOD regress_table_am TYPE TABLE HANDLER heap_tableam_handler;',
+ regexp => qr/^
+ \QCREATE ACCESS METHOD regress_table_am TYPE TABLE HANDLER heap_tableam_handler;\E
+ \n/xm,
+ like => {
+ %full_runs, section_pre_data => 1,
+ },
+ },
+
+ # It's a bit tricky to ensure that the proper SET of default table
+ # AM occurs. To achieve that we create a table with the standard
+ # AM, test AM, standard AM. That guarantees that there needs to be
+ # a SET interspersed. Then use a regex that prevents interspersed
+ # SET ...; statements, followed by the expected CREATE TABLE. Not
+ # pretty, but seems hard to do better in this framework.
+ 'CREATE TABLE regress_pg_dump_table_am' => {
+ create_order => 12,
+ create_sql => '
+ CREATE TABLE dump_test.regress_pg_dump_table_am_0() USING heap;
+ CREATE TABLE dump_test.regress_pg_dump_table_am_1 (col1 int) USING regress_table_am;
+ CREATE TABLE dump_test.regress_pg_dump_table_am_2() USING heap;',
+ regexp => qr/^
+ \QSET default_table_access_method = regress_table_am;\E
+ (\n(?!SET[^;]+;)[^\n]*)*
+ \n\QCREATE TABLE dump_test.regress_pg_dump_table_am_1 (\E
+ \n\s+\Qcol1 integer\E
+ \n\);/xm,
+ like => {
+ %full_runs, %dump_test_schema_runs, section_pre_data => 1,
+ },
+ unlike =>
+ { exclude_dump_test_schema => 1, no_table_access_method => 1 },
+ },
+
+ 'CREATE MATERIALIZED VIEW regress_pg_dump_matview_am' => {
+ create_order => 13,
+ create_sql => '
+ CREATE MATERIALIZED VIEW dump_test.regress_pg_dump_matview_am_0 USING heap AS SELECT 1;
+ CREATE MATERIALIZED VIEW dump_test.regress_pg_dump_matview_am_1
+ USING regress_table_am AS SELECT count(*) FROM pg_class;
+ CREATE MATERIALIZED VIEW dump_test.regress_pg_dump_matview_am_2 USING heap AS SELECT 1;',
+ regexp => qr/^
+ \QSET default_table_access_method = regress_table_am;\E
+ (\n(?!SET[^;]+;)[^\n]*)*
+ \QCREATE MATERIALIZED VIEW dump_test.regress_pg_dump_matview_am_1 AS\E
+ \n\s+\QSELECT count(*) AS count\E
+ \n\s+\QFROM pg_class\E
+ \n\s+\QWITH NO DATA;\E\n/xm,
+ like => {
+ %full_runs, %dump_test_schema_runs, section_pre_data => 1,
+ },
+ unlike =>
+ { exclude_dump_test_schema => 1, no_table_access_method => 1 },
+ });
+
+#########################################
+# Create a PG instance to test actually dumping from
+
+my $node = PostgreSQL::Test::Cluster->new('main');
+$node->init;
+$node->start;
+
+my $port = $node->port;
+
+# We need to see if this system supports CREATE COLLATION or not
+# If it doesn't then we will skip all the COLLATION-related tests.
+my $collation_support = 0;
+my $collation_check_stderr;
+$node->psql(
+ 'postgres',
+ "CREATE COLLATION testing FROM \"C\"; DROP COLLATION testing;",
+ on_error_stop => 0,
+ stderr => \$collation_check_stderr);
+
+if ($collation_check_stderr !~ /ERROR: /)
+{
+ $collation_support = 1;
+}
+
+my $supports_icu = ($ENV{with_icu} eq 'yes');
+my $supports_lz4 = check_pg_config("#define USE_LZ4 1");
+my $supports_gzip = check_pg_config("#define HAVE_LIBZ 1");
+
+# ICU doesn't work with some encodings
+my $encoding = $node->safe_psql('postgres', 'show server_encoding');
+$supports_icu = 0 if $encoding eq 'SQL_ASCII';
+
+# Create additional databases for mutations of schema public
+$node->psql('postgres', 'create database regress_pg_dump_test;');
+$node->psql('postgres', 'create database regress_public_owner;');
+
+#########################################
+# Set up schemas, tables, etc, to be dumped.
+
+# Build up the create statements
+my %create_sql = ();
+
+foreach my $test (
+ sort {
+ if ($tests{$a}->{create_order} and $tests{$b}->{create_order})
+ {
+ $tests{$a}->{create_order} <=> $tests{$b}->{create_order};
+ }
+ elsif ($tests{$a}->{create_order})
+ {
+ -1;
+ }
+ elsif ($tests{$b}->{create_order})
+ {
+ 1;
+ }
+ else
+ {
+ 0;
+ }
+ } keys %tests)
+{
+ my $test_db = 'postgres';
+
+ if (defined($tests{$test}->{database}))
+ {
+ $test_db = $tests{$test}->{database};
+ }
+
+ if (defined($tests{$test}->{icu}))
+ {
+ $tests{$test}->{collation} = 1;
+ }
+
+ if ($tests{$test}->{create_sql})
+ {
+
+ # Skip any collation-related commands if there is no collation support
+ if (!$collation_support && defined($tests{$test}->{collation}))
+ {
+ next;
+ }
+
+ # Skip any icu-related collation commands if build was without icu
+ if (!$supports_icu && defined($tests{$test}->{icu}))
+ {
+ next;
+ }
+
+ # Skip tests specific to LZ4 if this build does not support
+ # this option.
+ if (!$supports_lz4 && defined($tests{$test}->{lz4}))
+ {
+ next;
+ }
+
+ # Add terminating semicolon
+ $create_sql{$test_db} .= $tests{$test}->{create_sql} . ";";
+ }
+}
+
+# Send the combined set of commands to psql
+foreach my $db (sort keys %create_sql)
+{
+ $node->safe_psql($db, $create_sql{$db});
+}
+
+#########################################
+# Test connecting to a non-existent database
+
+command_fails_like(
+ [ 'pg_dump', '-p', "$port", 'qqq' ],
+ qr/pg_dump: error: connection to server .* failed: FATAL: database "qqq" does not exist/,
+ 'connecting to a non-existent database');
+
+#########################################
+# Test connecting to an invalid database
+
+$node->command_fails_like(
+ [ 'pg_dump', '-d', 'regression_invalid' ],
+ qr/pg_dump: error: connection to server .* failed: FATAL: cannot connect to invalid database "regression_invalid"/,
+ 'connecting to an invalid database');
+
+#########################################
+# Test connecting with an unprivileged user
+
+command_fails_like(
+ [ 'pg_dump', '-p', "$port", '--role=regress_dump_test_role' ],
+ qr/\Qpg_dump: error: query failed: ERROR: permission denied for\E/,
+ 'connecting with an unprivileged user');
+
+#########################################
+# Test dumping a non-existent schema, table, and patterns with --strict-names
+
+command_fails_like(
+ [ 'pg_dump', '-p', "$port", '-n', 'nonexistent' ],
+ qr/\Qpg_dump: error: no matching schemas were found\E/,
+ 'dumping a non-existent schema');
+
+command_fails_like(
+ [ 'pg_dump', '-p', "$port", '-t', 'nonexistent' ],
+ qr/\Qpg_dump: error: no matching tables were found\E/,
+ 'dumping a non-existent table');
+
+command_fails_like(
+ [ 'pg_dump', '-p', "$port", '--strict-names', '-n', 'nonexistent*' ],
+ qr/\Qpg_dump: error: no matching schemas were found for pattern\E/,
+ 'no matching schemas');
+
+command_fails_like(
+ [ 'pg_dump', '-p', "$port", '--strict-names', '-t', 'nonexistent*' ],
+ qr/\Qpg_dump: error: no matching tables were found for pattern\E/,
+ 'no matching tables');
+
+#########################################
+# Test invalid multipart database names
+
+$node->command_fails_like(
+ [ 'pg_dumpall', '--exclude-database', '.' ],
+ qr/pg_dumpall: error: improper qualified name \(too many dotted names\): \./,
+ 'pg_dumpall: option --exclude-database rejects multipart pattern "."');
+
+$node->command_fails_like(
+ [ 'pg_dumpall', '--exclude-database', 'myhost.mydb' ],
+ qr/pg_dumpall: error: improper qualified name \(too many dotted names\): myhost\.mydb/,
+ 'pg_dumpall: option --exclude-database rejects multipart database names');
+
+##############################################################
+# Test dumping pg_catalog (for research -- cannot be reloaded)
+
+$node->command_ok(
+ [ 'pg_dump', '-p', "$port", '-n', 'pg_catalog' ],
+ 'pg_dump: option -n pg_catalog'
+);
+
+#########################################
+# Test valid database exclusion patterns
+
+$node->command_ok(
+ [ 'pg_dumpall', '-p', "$port", '--exclude-database', '"myhost.mydb"' ],
+ 'pg_dumpall: option --exclude-database handles database names with embedded dots'
+);
+
+#########################################
+# Test invalid multipart schema names
+
+$node->command_fails_like(
+ [ 'pg_dump', '--schema', 'myhost.mydb.myschema' ],
+ qr/pg_dump: error: improper qualified name \(too many dotted names\): myhost\.mydb\.myschema/,
+ 'pg_dump: option --schema rejects three-part schema names');
+
+$node->command_fails_like(
+ [ 'pg_dump', '--schema', 'otherdb.myschema' ],
+ qr/pg_dump: error: cross-database references are not implemented: otherdb\.myschema/,
+ 'pg_dump: option --schema rejects cross-database multipart schema names');
+
+$node->command_fails_like(
+ [ 'pg_dump', '--schema', '.' ],
+ qr/pg_dump: error: cross-database references are not implemented: \./,
+ 'pg_dump: option --schema rejects degenerate two-part schema name: "."');
+
+$node->command_fails_like(
+ [ 'pg_dump', '--schema', '"some.other.db".myschema' ],
+ qr/pg_dump: error: cross-database references are not implemented: "some\.other\.db"\.myschema/,
+ 'pg_dump: option --schema rejects cross-database multipart schema names with embedded dots'
+);
+
+$node->command_fails_like(
+ [ 'pg_dump', '--schema', '..' ],
+ qr/pg_dump: error: improper qualified name \(too many dotted names\): \.\./,
+ 'pg_dump: option --schema rejects degenerate three-part schema name: ".."'
+);
+
+#########################################
+# Test invalid multipart relation names
+
+$node->command_fails_like(
+ [ 'pg_dump', '--table', 'myhost.mydb.myschema.mytable' ],
+ qr/pg_dump: error: improper relation name \(too many dotted names\): myhost\.mydb\.myschema\.mytable/,
+ 'pg_dump: option --table rejects four-part table names');
+
+$node->command_fails_like(
+ [ 'pg_dump', '--table', 'otherdb.pg_catalog.pg_class' ],
+ qr/pg_dump: error: cross-database references are not implemented: otherdb\.pg_catalog\.pg_class/,
+ 'pg_dump: option --table rejects cross-database three part table names');
+
+command_fails_like(
+ [
+ 'pg_dump', '-p', "$port", '--table',
+ '"some.other.db".pg_catalog.pg_class'
+ ],
+ qr/pg_dump: error: cross-database references are not implemented: "some\.other\.db"\.pg_catalog\.pg_class/,
+ 'pg_dump: option --table rejects cross-database three part table names with embedded dots'
+);
+
+#########################################
+# Run all runs
+
+foreach my $run (sort keys %pgdump_runs)
+{
+ my $test_key = $run;
+ my $run_db = 'postgres';
+
+ # Skip command-level tests for gzip if there is no support for it.
+ if ( defined($pgdump_runs{$run}->{compile_option})
+ && $pgdump_runs{$run}->{compile_option} eq 'gzip'
+ && !$supports_gzip)
+ {
+ note "$run: skipped due to no gzip support";
+ next;
+ }
+
+ $node->command_ok(\@{ $pgdump_runs{$run}->{dump_cmd} },
+ "$run: pg_dump runs");
+
+ if ($pgdump_runs{$run}->{compress_cmd})
+ {
+ my ($compress_cmd) = $pgdump_runs{$run}->{compress_cmd};
+ my $compress_program = $compress_cmd->{program};
+
+ # Skip the rest of the test if the compression program is
+ # not defined.
+ next if (!defined($compress_program) || $compress_program eq '');
+
+ my @full_compress_cmd =
+ ($compress_cmd->{program}, @{ $compress_cmd->{args} });
+ command_ok(\@full_compress_cmd, "$run: compression commands");
+ }
+
+ if ($pgdump_runs{$run}->{restore_cmd})
+ {
+ $node->command_ok(\@{ $pgdump_runs{$run}->{restore_cmd} },
+ "$run: pg_restore runs");
+ }
+
+ if ($pgdump_runs{$run}->{test_key})
+ {
+ $test_key = $pgdump_runs{$run}->{test_key};
+ }
+
+ my $output_file = slurp_file("$tempdir/${run}.sql");
+
+ #########################################
+ # Run all tests where this run is included
+ # as either a 'like' or 'unlike' test.
+
+ foreach my $test (sort keys %tests)
+ {
+ my $test_db = 'postgres';
+
+ if (defined($pgdump_runs{$run}->{database}))
+ {
+ $run_db = $pgdump_runs{$run}->{database};
+ }
+
+ if (defined($tests{$test}->{database}))
+ {
+ $test_db = $tests{$test}->{database};
+ }
+
+ # Skip any collation-related commands if there is no collation support
+ if (!$collation_support && defined($tests{$test}->{collation}))
+ {
+ next;
+ }
+
+ # Skip any icu-related collation commands if build was without icu
+ if (!$supports_icu && defined($tests{$test}->{icu}))
+ {
+ next;
+ }
+
+ # Skip tests specific to LZ4 if this build does not support
+ # this option.
+ if (!$supports_lz4 && defined($tests{$test}->{lz4}))
+ {
+ next;
+ }
+
+ if ($run_db ne $test_db)
+ {
+ next;
+ }
+
+ # Run the test listed as a like, unless it is specifically noted
+ # as an unlike (generally due to an explicit exclusion or similar).
+ if ($tests{$test}->{like}->{$test_key}
+ && !defined($tests{$test}->{unlike}->{$test_key}))
+ {
+ if (!ok($output_file =~ $tests{$test}->{regexp},
+ "$run: should dump $test"))
+ {
+ diag("Review $run results in $tempdir");
+ }
+ }
+ else
+ {
+ if (!ok($output_file !~ $tests{$test}->{regexp},
+ "$run: should not dump $test"))
+ {
+ diag("Review $run results in $tempdir");
+ }
+ }
+ }
+}
+
+#########################################
+# Stop the database instance, which will be removed at the end of the tests.
+
+$node->stop('fast');
+
+done_testing();
diff --git a/src/bin/pg_dump/t/003_pg_dump_with_server.pl b/src/bin/pg_dump/t/003_pg_dump_with_server.pl
new file mode 100644
index 0000000..8cc9da0
--- /dev/null
+++ b/src/bin/pg_dump/t/003_pg_dump_with_server.pl
@@ -0,0 +1,40 @@
+
+# Copyright (c) 2021-2022, PostgreSQL Global Development Group
+
+use strict;
+use warnings;
+
+use PostgreSQL::Test::Cluster;
+use PostgreSQL::Test::Utils;
+use Test::More;
+
+my $tempdir = PostgreSQL::Test::Utils::tempdir;
+
+my $node = PostgreSQL::Test::Cluster->new('main');
+my $port = $node->port;
+
+$node->init;
+$node->start;
+
+#########################################
+# Verify that dumping foreign data includes only foreign tables of
+# matching servers
+
+$node->safe_psql('postgres', "CREATE FOREIGN DATA WRAPPER dummy");
+$node->safe_psql('postgres', "CREATE SERVER s0 FOREIGN DATA WRAPPER dummy");
+$node->safe_psql('postgres', "CREATE SERVER s1 FOREIGN DATA WRAPPER dummy");
+$node->safe_psql('postgres', "CREATE SERVER s2 FOREIGN DATA WRAPPER dummy");
+$node->safe_psql('postgres', "CREATE FOREIGN TABLE t0 (a int) SERVER s0");
+$node->safe_psql('postgres', "CREATE FOREIGN TABLE t1 (a int) SERVER s1");
+my ($cmd, $stdout, $stderr, $result);
+
+command_fails_like(
+ [ "pg_dump", '-p', $port, '--include-foreign-data=s0', 'postgres' ],
+ qr/foreign-data wrapper \"dummy\" has no handler\r?\npg_dump: detail: Query was: .*t0/,
+ "correctly fails to dump a foreign table from a dummy FDW");
+
+command_ok(
+ [ "pg_dump", '-p', $port, '-a', '--include-foreign-data=s2', 'postgres' ],
+ "dump foreign server with no tables");
+
+done_testing();
diff --git a/src/bin/pg_dump/t/004_pg_dump_parallel.pl b/src/bin/pg_dump/t/004_pg_dump_parallel.pl
new file mode 100644
index 0000000..f41c2fa
--- /dev/null
+++ b/src/bin/pg_dump/t/004_pg_dump_parallel.pl
@@ -0,0 +1,81 @@
+
+# Copyright (c) 2021-2023, PostgreSQL Global Development Group
+
+use strict;
+use warnings;
+
+use PostgreSQL::Test::Cluster;
+use PostgreSQL::Test::Utils;
+use Test::More;
+
+my $dbname1 = 'regression_src';
+my $dbname2 = 'regression_dest1';
+my $dbname3 = 'regression_dest2';
+
+my $node = PostgreSQL::Test::Cluster->new('main');
+$node->init;
+$node->start;
+
+my $backupdir = $node->backup_dir;
+
+$node->run_log([ 'createdb', $dbname1 ]);
+$node->run_log([ 'createdb', $dbname2 ]);
+$node->run_log([ 'createdb', $dbname3 ]);
+
+$node->safe_psql(
+ $dbname1,
+ qq{
+create type digit as enum ('0', '1', '2', '3', '4', '5', '6', '7', '8', '9');
+
+-- plain table with index
+create table tplain (en digit, data int unique);
+insert into tplain select (x%10)::text::digit, x from generate_series(1,1000) x;
+
+-- non-troublesome hashed partitioning
+create table ths (mod int, data int, unique(mod, data)) partition by hash(mod);
+create table ths_p1 partition of ths for values with (modulus 3, remainder 0);
+create table ths_p2 partition of ths for values with (modulus 3, remainder 1);
+create table ths_p3 partition of ths for values with (modulus 3, remainder 2);
+insert into ths select (x%10), x from generate_series(1,1000) x;
+
+-- dangerous hashed partitioning
+create table tht (en digit, data int, unique(en, data)) partition by hash(en);
+create table tht_p1 partition of tht for values with (modulus 3, remainder 0);
+create table tht_p2 partition of tht for values with (modulus 3, remainder 1);
+create table tht_p3 partition of tht for values with (modulus 3, remainder 2);
+insert into tht select (x%10)::text::digit, x from generate_series(1,1000) x;
+ });
+
+$node->command_ok(
+ [
+ 'pg_dump', '-Fd', '--no-sync', '-j2', '-f', "$backupdir/dump1",
+ $node->connstr($dbname1)
+ ],
+ 'parallel dump');
+
+$node->command_ok(
+ [
+ 'pg_restore', '-v',
+ '-d', $node->connstr($dbname2),
+ '-j3', "$backupdir/dump1"
+ ],
+ 'parallel restore');
+
+$node->command_ok(
+ [
+ 'pg_dump', '-Fd',
+ '--no-sync', '-j2',
+ '-f', "$backupdir/dump2",
+ '--inserts', $node->connstr($dbname1)
+ ],
+ 'parallel dump as inserts');
+
+$node->command_ok(
+ [
+ 'pg_restore', '-v',
+ '-d', $node->connstr($dbname3),
+ '-j3', "$backupdir/dump2"
+ ],
+ 'parallel restore as inserts');
+
+done_testing();
diff --git a/src/bin/pg_dump/t/010_dump_connstr.pl b/src/bin/pg_dump/t/010_dump_connstr.pl
new file mode 100644
index 0000000..f0e3aaf
--- /dev/null
+++ b/src/bin/pg_dump/t/010_dump_connstr.pl
@@ -0,0 +1,233 @@
+
+# Copyright (c) 2021-2022, PostgreSQL Global Development Group
+
+use strict;
+use warnings;
+
+use PostgreSQL::Test::Cluster;
+use PostgreSQL::Test::Utils;
+use Test::More;
+
+if ($PostgreSQL::Test::Utils::is_msys2)
+{
+ plan skip_all => 'High bit name tests fail on Msys2';
+}
+
+# We're going to use byte sequences that aren't valid UTF-8 strings. Use
+# LATIN1, which accepts any byte and has a conversion from each byte to UTF-8.
+$ENV{LC_ALL} = 'C';
+$ENV{PGCLIENTENCODING} = 'LATIN1';
+
+# Create database and user names covering the range of LATIN1
+# characters, for use in a connection string by pg_dumpall. Skip ','
+# because of pg_regress --create-role, skip [\n\r] because pg_dumpall
+# does not allow them. We also skip many ASCII letters, to keep the
+# total number of tested characters to what will fit in four names.
+# The odds of finding something interesting by testing all ASCII letters
+# seem too small to justify the cycles of testing a fifth name.
+my $dbname1 =
+ 'regression'
+ . generate_ascii_string(1, 9)
+ . generate_ascii_string(11, 12)
+ . generate_ascii_string(14, 33)
+ . (
+ $PostgreSQL::Test::Utils::windows_os
+ ? ''
+ : '"x"') # IPC::Run mishandles '"' on Windows
+ . generate_ascii_string(35, 43) # skip ','
+ . generate_ascii_string(45, 54);
+my $dbname2 = 'regression' . generate_ascii_string(55, 65) # skip 'B'-'W'
+ . generate_ascii_string(88, 99) # skip 'd'-'w'
+ . generate_ascii_string(120, 149);
+my $dbname3 = 'regression' . generate_ascii_string(150, 202);
+my $dbname4 = 'regression' . generate_ascii_string(203, 255);
+
+(my $username1 = $dbname1) =~ s/^regression/regress_/;
+(my $username2 = $dbname2) =~ s/^regression/regress_/;
+(my $username3 = $dbname3) =~ s/^regression/regress_/;
+(my $username4 = $dbname4) =~ s/^regression/regress_/;
+
+my $src_bootstrap_super = 'regress_postgres';
+my $dst_bootstrap_super = 'boot';
+
+my $node = PostgreSQL::Test::Cluster->new('main');
+$node->init(extra =>
+ [ '-U', $src_bootstrap_super, '--locale=C', '--encoding=LATIN1' ]);
+
+# prep pg_hba.conf and pg_ident.conf
+$node->run_log(
+ [
+ $ENV{PG_REGRESS}, '--config-auth',
+ $node->data_dir, '--user',
+ $src_bootstrap_super, '--create-role',
+ "$username1,$username2,$username3,$username4"
+ ]);
+$node->start;
+
+my $backupdir = $node->backup_dir;
+my $discard = "$backupdir/discard.sql";
+my $plain = "$backupdir/plain.sql";
+my $dirfmt = "$backupdir/dirfmt";
+
+$node->run_log([ 'createdb', '-U', $src_bootstrap_super, $dbname1 ]);
+$node->run_log(
+ [ 'createuser', '-U', $src_bootstrap_super, '-s', $username1 ]);
+$node->run_log([ 'createdb', '-U', $src_bootstrap_super, $dbname2 ]);
+$node->run_log(
+ [ 'createuser', '-U', $src_bootstrap_super, '-s', $username2 ]);
+$node->run_log([ 'createdb', '-U', $src_bootstrap_super, $dbname3 ]);
+$node->run_log(
+ [ 'createuser', '-U', $src_bootstrap_super, '-s', $username3 ]);
+$node->run_log([ 'createdb', '-U', $src_bootstrap_super, $dbname4 ]);
+$node->run_log(
+ [ 'createuser', '-U', $src_bootstrap_super, '-s', $username4 ]);
+
+
+# For these tests, pg_dumpall -r is used because it produces a short
+# dump.
+$node->command_ok(
+ [
+ 'pg_dumpall', '-r', '-f', $discard, '--dbname',
+ $node->connstr($dbname1),
+ '-U', $username4
+ ],
+ 'pg_dumpall with long ASCII name 1');
+$node->command_ok(
+ [
+ 'pg_dumpall', '--no-sync', '-r', '-f', $discard, '--dbname',
+ $node->connstr($dbname2),
+ '-U', $username3
+ ],
+ 'pg_dumpall with long ASCII name 2');
+$node->command_ok(
+ [
+ 'pg_dumpall', '--no-sync', '-r', '-f', $discard, '--dbname',
+ $node->connstr($dbname3),
+ '-U', $username2
+ ],
+ 'pg_dumpall with long ASCII name 3');
+$node->command_ok(
+ [
+ 'pg_dumpall', '--no-sync', '-r', '-f', $discard, '--dbname',
+ $node->connstr($dbname4),
+ '-U', $username1
+ ],
+ 'pg_dumpall with long ASCII name 4');
+$node->command_ok(
+ [
+ 'pg_dumpall', '-U',
+ $src_bootstrap_super, '--no-sync',
+ '-r', '-l',
+ 'dbname=template1'
+ ],
+ 'pg_dumpall -l accepts connection string');
+
+$node->run_log([ 'createdb', '-U', $src_bootstrap_super, "foo\n\rbar" ]);
+
+# not sufficient to use -r here
+$node->command_fails(
+ [ 'pg_dumpall', '-U', $src_bootstrap_super, '--no-sync', '-f', $discard ],
+ 'pg_dumpall with \n\r in database name');
+$node->run_log([ 'dropdb', '-U', $src_bootstrap_super, "foo\n\rbar" ]);
+
+
+# make a table, so the parallel worker has something to dump
+$node->safe_psql(
+ $dbname1,
+ 'CREATE TABLE t0()',
+ extra_params => [ '-U', $src_bootstrap_super ]);
+
+# XXX no printed message when this fails, just SIGPIPE termination
+$node->command_ok(
+ [
+ 'pg_dump', '-Fd', '--no-sync', '-j2', '-f', $dirfmt, '-U', $username1,
+ $node->connstr($dbname1)
+ ],
+ 'parallel dump');
+
+# recreate $dbname1 for restore test
+$node->run_log([ 'dropdb', '-U', $src_bootstrap_super, $dbname1 ]);
+$node->run_log([ 'createdb', '-U', $src_bootstrap_super, $dbname1 ]);
+
+$node->command_ok(
+ [
+ 'pg_restore', '-v', '-d', 'template1',
+ '-j2', '-U', $username1, $dirfmt
+ ],
+ 'parallel restore');
+
+$node->run_log([ 'dropdb', '-U', $src_bootstrap_super, $dbname1 ]);
+
+$node->command_ok(
+ [
+ 'pg_restore', '-C', '-v', '-d',
+ 'template1', '-j2', '-U', $username1,
+ $dirfmt
+ ],
+ 'parallel restore with create');
+
+
+$node->command_ok(
+ [ 'pg_dumpall', '--no-sync', '-f', $plain, '-U', $username1 ],
+ 'take full dump');
+system_log('cat', $plain);
+my ($stderr, $result);
+my $restore_super = qq{regress_a'b\\c=d\\ne"f};
+$restore_super =~ s/"//g
+ if
+ $PostgreSQL::Test::Utils::windows_os; # IPC::Run mishandles '"' on Windows
+
+
+# Restore full dump through psql using environment variables for
+# dbname/user connection parameters
+
+my $envar_node = PostgreSQL::Test::Cluster->new('destination_envar');
+$envar_node->init(
+ extra =>
+ [ '-U', $dst_bootstrap_super, '--locale=C', '--encoding=LATIN1' ],
+ auth_extra =>
+ [ '--user', $dst_bootstrap_super, '--create-role', $restore_super ]);
+$envar_node->start;
+
+# make superuser for restore
+$envar_node->run_log(
+ [ 'createuser', '-U', $dst_bootstrap_super, '-s', $restore_super ]);
+
+{
+ local $ENV{PGPORT} = $envar_node->port;
+ local $ENV{PGUSER} = $restore_super;
+ $result = run_log([ 'psql', '-X', '-f', $plain ], '2>', \$stderr);
+}
+ok($result,
+ 'restore full dump using environment variables for connection parameters'
+);
+is($stderr, '', 'no dump errors');
+
+
+# Restore full dump through psql using command-line options for
+# dbname/user connection parameters. "\connect dbname=" forgets
+# user/port from command line.
+
+my $cmdline_node = PostgreSQL::Test::Cluster->new('destination_cmdline');
+$cmdline_node->init(
+ extra =>
+ [ '-U', $dst_bootstrap_super, '--locale=C', '--encoding=LATIN1' ],
+ auth_extra =>
+ [ '--user', $dst_bootstrap_super, '--create-role', $restore_super ]);
+$cmdline_node->start;
+$cmdline_node->run_log(
+ [ 'createuser', '-U', $dst_bootstrap_super, '-s', $restore_super ]);
+{
+ $result = run_log(
+ [
+ 'psql', '-p', $cmdline_node->port, '-U',
+ $restore_super, '-X', '-f', $plain
+ ],
+ '2>',
+ \$stderr);
+}
+ok($result,
+ 'restore full dump with command-line options for connection parameters');
+is($stderr, '', 'no dump errors');
+
+done_testing();