summaryrefslogtreecommitdiffstats
path: root/test
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-08 19:09:22 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-08 19:09:22 +0000
commit2faa747e2303ee774a4b4aace961188e950e185a (patch)
tree604e79c7481956ce48f458e3546eaf1090b3ffff /test
parentInitial commit. (diff)
downloadapache2-2faa747e2303ee774a4b4aace961188e950e185a.tar.xz
apache2-2faa747e2303ee774a4b4aace961188e950e185a.zip
Adding upstream version 2.4.58.upstream/2.4.58
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'test')
-rw-r--r--test/.indent.pro54
-rw-r--r--test/Makefile.in26
-rw-r--r--test/README3
-rw-r--r--test/README.ci163
-rw-r--r--test/README.pytest139
-rw-r--r--test/check_chunked58
-rw-r--r--test/clients/.gitignore1
-rw-r--r--test/clients/Makefile.in20
-rw-r--r--test/clients/h2ws.c1110
-rw-r--r--test/cls.c182
-rw-r--r--test/conftest.py31
-rw-r--r--test/make_sni.sh396
-rw-r--r--test/modules/core/__init__.py1
-rw-r--r--test/modules/core/conftest.py44
-rw-r--r--test/modules/core/test_001_encoding.py93
-rw-r--r--test/modules/http2/.gitignore3
-rw-r--r--test/modules/http2/Makefile.in20
-rw-r--r--test/modules/http2/__init__.py0
-rw-r--r--test/modules/http2/conftest.py40
-rw-r--r--test/modules/http2/env.py169
-rw-r--r--test/modules/http2/htdocs/cgi/alive.json4
-rw-r--r--test/modules/http2/htdocs/cgi/echo.py16
-rw-r--r--test/modules/http2/htdocs/cgi/echohd.py22
-rw-r--r--test/modules/http2/htdocs/cgi/env.py41
-rw-r--r--test/modules/http2/htdocs/cgi/files/empty.txt0
-rw-r--r--test/modules/http2/htdocs/cgi/hecho.py48
-rw-r--r--test/modules/http2/htdocs/cgi/hello.py25
-rw-r--r--test/modules/http2/htdocs/cgi/mnot164.py16
-rw-r--r--test/modules/http2/htdocs/cgi/necho.py56
-rw-r--r--test/modules/http2/htdocs/cgi/requestparser.py57
-rw-r--r--test/modules/http2/htdocs/cgi/ssi/include.inc1
-rw-r--r--test/modules/http2/htdocs/cgi/ssi/test.html9
-rw-r--r--test/modules/http2/htdocs/cgi/upload.py55
-rw-r--r--test/modules/http2/htdocs/cgi/xxx/test.json1
-rw-r--r--test/modules/http2/htdocs/noh2/alive.json5
-rw-r--r--test/modules/http2/htdocs/noh2/index.html9
-rw-r--r--test/modules/http2/mod_h2test/mod_h2test.c588
-rw-r--r--test/modules/http2/mod_h2test/mod_h2test.h21
-rw-r--r--test/modules/http2/test_001_httpd_alive.py22
-rw-r--r--test/modules/http2/test_002_curl_basics.py71
-rw-r--r--test/modules/http2/test_003_get.py265
-rw-r--r--test/modules/http2/test_004_post.py201
-rw-r--r--test/modules/http2/test_005_files.py48
-rw-r--r--test/modules/http2/test_006_assets.py75
-rw-r--r--test/modules/http2/test_007_ssi.py43
-rw-r--r--test/modules/http2/test_008_ranges.py189
-rw-r--r--test/modules/http2/test_009_timing.py74
-rw-r--r--test/modules/http2/test_100_conn_reuse.py57
-rw-r--r--test/modules/http2/test_101_ssl_reneg.py138
-rw-r--r--test/modules/http2/test_102_require.py41
-rw-r--r--test/modules/http2/test_103_upgrade.py118
-rw-r--r--test/modules/http2/test_104_padding.py104
-rw-r--r--test/modules/http2/test_105_timeout.py152
-rw-r--r--test/modules/http2/test_106_shutdown.py75
-rw-r--r--test/modules/http2/test_107_frame_lengths.py51
-rw-r--r--test/modules/http2/test_200_header_invalid.py207
-rw-r--r--test/modules/http2/test_201_header_conditional.py70
-rw-r--r--test/modules/http2/test_202_trailer.py92
-rw-r--r--test/modules/http2/test_203_rfc9113.py56
-rw-r--r--test/modules/http2/test_300_interim.py40
-rw-r--r--test/modules/http2/test_400_push.py200
-rw-r--r--test/modules/http2/test_401_early_hints.py83
-rw-r--r--test/modules/http2/test_500_proxy.py157
-rw-r--r--test/modules/http2/test_501_proxy_serverheader.py36
-rw-r--r--test/modules/http2/test_502_proxy_port.py41
-rw-r--r--test/modules/http2/test_503_proxy_fwd.py79
-rw-r--r--test/modules/http2/test_600_h2proxy.py201
-rw-r--r--test/modules/http2/test_601_h2proxy_twisted.py99
-rw-r--r--test/modules/http2/test_700_load_get.py63
-rw-r--r--test/modules/http2/test_710_load_post_static.py65
-rw-r--r--test/modules/http2/test_711_load_post_cgi.py73
-rw-r--r--test/modules/http2/test_712_buffering.py48
-rw-r--r--test/modules/http2/test_800_websockets.py363
-rw-r--r--test/modules/http2/ws_server.py104
-rw-r--r--test/modules/md/__init__.py0
-rwxr-xr-xtest/modules/md/conftest.py92
-rw-r--r--test/modules/md/data/store_migrate/1.0/sample1/accounts/ACME-localhost-0000/account.json6
-rw-r--r--test/modules/md/data/store_migrate/1.0/sample1/accounts/ACME-localhost-0000/account.pem54
-rw-r--r--test/modules/md/data/store_migrate/1.0/sample1/archive/7007-1502285564.org.1/md.json18
-rw-r--r--test/modules/md/data/store_migrate/1.0/sample1/domains/7007-1502285564.org/cert.pem32
-rw-r--r--test/modules/md/data/store_migrate/1.0/sample1/domains/7007-1502285564.org/chain.pem27
-rw-r--r--test/modules/md/data/store_migrate/1.0/sample1/domains/7007-1502285564.org/md.json23
-rw-r--r--test/modules/md/data/store_migrate/1.0/sample1/domains/7007-1502285564.org/pkey.pem52
-rw-r--r--test/modules/md/data/store_migrate/1.0/sample1/httpd.json6
-rw-r--r--test/modules/md/data/store_migrate/1.0/sample1/md_store.json7
-rw-r--r--test/modules/md/data/test_920/002.pubcert58
-rw-r--r--test/modules/md/data/test_conf_validate/test_014.conf8
-rw-r--r--test/modules/md/data/test_drive/test1.example.org.conf6
-rw-r--r--test/modules/md/data/test_roundtrip/temp.conf27
-rwxr-xr-xtest/modules/md/dns01.py62
-rwxr-xr-xtest/modules/md/http_challenge_foobar.py27
-rwxr-xr-xtest/modules/md/md_acme.py125
-rwxr-xr-xtest/modules/md/md_cert_util.py239
-rwxr-xr-xtest/modules/md/md_certs.py444
-rwxr-xr-xtest/modules/md/md_conf.py83
-rwxr-xr-xtest/modules/md/md_env.py613
-rwxr-xr-xtest/modules/md/message.py26
-rwxr-xr-xtest/modules/md/msg_fail_on.py28
-rwxr-xr-xtest/modules/md/notifail.py16
-rwxr-xr-xtest/modules/md/notify.py18
-rw-r--r--test/modules/md/pebble/pebble-eab.json.template16
-rw-r--r--test/modules/md/pebble/pebble.json.template12
-rw-r--r--test/modules/md/test_001_store.py213
-rw-r--r--test/modules/md/test_010_store_migrate.py43
-rw-r--r--test/modules/md/test_100_reg_add.py152
-rw-r--r--test/modules/md/test_110_reg_update.py273
-rw-r--r--test/modules/md/test_120_reg_list.py87
-rw-r--r--test/modules/md/test_202_acmev2_regs.py132
-rw-r--r--test/modules/md/test_300_conf_validate.py390
-rw-r--r--test/modules/md/test_310_conf_store.py850
-rw-r--r--test/modules/md/test_502_acmev2_drive.py549
-rw-r--r--test/modules/md/test_602_roundtrip.py143
-rw-r--r--test/modules/md/test_702_auto.py756
-rw-r--r--test/modules/md/test_720_wildcard.py254
-rw-r--r--test/modules/md/test_730_static.py117
-rw-r--r--test/modules/md/test_740_acme_errors.py72
-rw-r--r--test/modules/md/test_741_setup_errors.py48
-rw-r--r--test/modules/md/test_750_eab.py337
-rw-r--r--test/modules/md/test_751_sectigo.py181
-rw-r--r--test/modules/md/test_752_zerossl.py202
-rw-r--r--test/modules/md/test_780_tailscale.py186
-rw-r--r--test/modules/md/test_790_failover.py87
-rw-r--r--test/modules/md/test_800_must_staple.py84
-rw-r--r--test/modules/md/test_801_stapling.py391
-rw-r--r--test/modules/md/test_810_ec.py153
-rw-r--r--test/modules/md/test_820_locks.py73
-rw-r--r--test/modules/md/test_900_notify.py122
-rw-r--r--test/modules/md/test_901_message.py297
-rw-r--r--test/modules/md/test_910_cleanups.py54
-rw-r--r--test/modules/md/test_920_status.py245
-rw-r--r--test/modules/proxy/__init__.py0
-rw-r--r--test/modules/proxy/conftest.py51
-rw-r--r--test/modules/proxy/env.py54
-rw-r--r--test/modules/proxy/test_01_http.py96
-rw-r--r--test/modules/proxy/test_02_unix.py187
-rw-r--r--test/modules/tls/__init__.py0
-rw-r--r--test/modules/tls/conf.py61
-rw-r--r--test/modules/tls/conftest.py39
-rw-r--r--test/modules/tls/env.py190
-rw-r--r--test/modules/tls/htdocs/a.mod-tls.test/index.json3
-rwxr-xr-xtest/modules/tls/htdocs/a.mod-tls.test/vars.py56
-rwxr-xr-xtest/modules/tls/htdocs/b.mod-tls.test/dir1/vars.py23
-rw-r--r--test/modules/tls/htdocs/b.mod-tls.test/index.json3
-rwxr-xr-xtest/modules/tls/htdocs/b.mod-tls.test/resp-jitter.py23
-rwxr-xr-xtest/modules/tls/htdocs/b.mod-tls.test/vars.py56
-rw-r--r--test/modules/tls/htdocs/index.html9
-rw-r--r--test/modules/tls/htdocs/index.json3
-rw-r--r--test/modules/tls/test_01_apache.py14
-rw-r--r--test/modules/tls/test_02_conf.py138
-rw-r--r--test/modules/tls/test_03_sni.py71
-rw-r--r--test/modules/tls/test_04_get.py67
-rw-r--r--test/modules/tls/test_05_proto.py64
-rw-r--r--test/modules/tls/test_06_ciphers.py209
-rw-r--r--test/modules/tls/test_07_alpn.py43
-rw-r--r--test/modules/tls/test_08_vars.py60
-rw-r--r--test/modules/tls/test_09_timeout.py43
-rw-r--r--test/modules/tls/test_10_session_id.py50
-rw-r--r--test/modules/tls/test_11_md.py37
-rw-r--r--test/modules/tls/test_12_cauth.py235
-rw-r--r--test/modules/tls/test_13_proxy.py40
-rw-r--r--test/modules/tls/test_14_proxy_ssl.py78
-rw-r--r--test/modules/tls/test_15_proxy_tls.py86
-rw-r--r--test/modules/tls/test_16_proxy_mixed.py47
-rw-r--r--test/modules/tls/test_17_proxy_machine_cert.py69
-rw-r--r--test/pyhttpd/__init__.py0
-rw-r--r--test/pyhttpd/certs.py476
-rw-r--r--test/pyhttpd/conf.py188
-rw-r--r--test/pyhttpd/conf/httpd.conf.template60
-rw-r--r--test/pyhttpd/conf/mime.types1588
-rw-r--r--test/pyhttpd/conf/stop.conf.template46
-rw-r--r--test/pyhttpd/conf/test.conf1
-rw-r--r--test/pyhttpd/config.ini.in32
-rw-r--r--test/pyhttpd/curl.py138
-rw-r--r--test/pyhttpd/env.py893
-rw-r--r--test/pyhttpd/htdocs/alive.json4
-rw-r--r--test/pyhttpd/htdocs/forbidden.html11
-rw-r--r--test/pyhttpd/htdocs/index.html9
-rw-r--r--test/pyhttpd/htdocs/test1/001.html10
-rw-r--r--test/pyhttpd/htdocs/test1/002.jpgbin0 -> 90364 bytes
-rw-r--r--test/pyhttpd/htdocs/test1/003.html11
-rw-r--r--test/pyhttpd/htdocs/test1/003/003_img.jpgbin0 -> 90364 bytes
-rw-r--r--test/pyhttpd/htdocs/test1/004.html23
-rw-r--r--test/pyhttpd/htdocs/test1/004/gophertiles.jpgbin0 -> 742 bytes
-rw-r--r--test/pyhttpd/htdocs/test1/006.html23
-rw-r--r--test/pyhttpd/htdocs/test1/006/006.css21
-rw-r--r--test/pyhttpd/htdocs/test1/006/006.js31
-rw-r--r--test/pyhttpd/htdocs/test1/006/header.html1
-rw-r--r--test/pyhttpd/htdocs/test1/007.html21
-rw-r--r--test/pyhttpd/htdocs/test1/007/007.py29
-rw-r--r--test/pyhttpd/htdocs/test1/009.py21
-rw-r--r--test/pyhttpd/htdocs/test1/alive.json5
-rw-r--r--test/pyhttpd/htdocs/test1/index.html46
-rwxr-xr-xtest/pyhttpd/htdocs/test2/006/006.css21
-rw-r--r--test/pyhttpd/htdocs/test2/10%abnormal.txt0
-rw-r--r--test/pyhttpd/htdocs/test2/alive.json4
-rw-r--r--test/pyhttpd/htdocs/test2/x%2f.test0
-rw-r--r--test/pyhttpd/log.py163
-rw-r--r--test/pyhttpd/mod_aptest/mod_aptest.c66
-rw-r--r--test/pyhttpd/nghttp.py304
-rw-r--r--test/pyhttpd/result.py92
-rw-r--r--test/pyhttpd/ws_util.py137
-rw-r--r--test/tcpdumpscii.txt50
-rw-r--r--test/test-writev.c101
-rw-r--r--test/test_find.c78
-rw-r--r--test/test_limits.c200
-rw-r--r--test/test_parser.c75
-rw-r--r--test/test_select.c46
-rwxr-xr-xtest/test_travis_conditions.sh42
-rw-r--r--test/time-sem.c593
-rw-r--r--test/travis_Dockerfile_slapd9
-rw-r--r--test/travis_Dockerfile_slapd.centos75
-rwxr-xr-xtest/travis_before_linux.sh178
-rwxr-xr-xtest/travis_run_linux.sh268
213 files changed, 24269 insertions, 0 deletions
diff --git a/test/.indent.pro b/test/.indent.pro
new file mode 100644
index 0000000..a9fbe9f
--- /dev/null
+++ b/test/.indent.pro
@@ -0,0 +1,54 @@
+-i4 -npsl -di0 -br -nce -d0 -cli0 -npcs -nfc1
+-TBUFF
+-TFILE
+-TTRANS
+-TUINT4
+-T_trans
+-Tallow_options_t
+-Tapache_sfio
+-Tarray_header
+-Tbool_int
+-Tbuf_area
+-Tbuff_struct
+-Tbuffy
+-Tcmd_how
+-Tcmd_parms
+-Tcommand_rec
+-Tcommand_struct
+-Tconn_rec
+-Tcore_dir_config
+-Tcore_server_config
+-Tdir_maker_func
+-Tevent
+-Tglobals_s
+-Thandler_func
+-Thandler_rec
+-Tjoblist_s
+-Tlisten_rec
+-Tmerger_func
+-Tmode_t
+-Tmodule
+-Tmodule_struct
+-Tmutex
+-Tn_long
+-Tother_child_rec
+-Toverrides_t
+-Tparent_score
+-Tpid_t
+-Tpiped_log
+-Tpool
+-Trequest_rec
+-Trequire_line
+-Trlim_t
+-Tscoreboard
+-Tsemaphore
+-Tserver_addr_rec
+-Tserver_rec
+-Tserver_rec_chain
+-Tshort_score
+-Ttable
+-Ttable_entry
+-Tthread
+-Tu_wide_int
+-Tvtime_t
+-Twide_int
diff --git a/test/Makefile.in b/test/Makefile.in
new file mode 100644
index 0000000..d9ad04d
--- /dev/null
+++ b/test/Makefile.in
@@ -0,0 +1,26 @@
+
+# no targets: we don't want to build anything by default. if you want the
+# test programs, then "make test"
+TARGETS =
+
+bin_PROGRAMS =
+
+PROGRAM_LDADD = $(EXTRA_LDFLAGS) $(PROGRAM_DEPENDENCIES) $(EXTRA_LIBS)
+PROGRAM_DEPENDENCIES = \
+ $(top_srcdir)/srclib/apr-util/libaprutil.la \
+ $(top_srcdir)/srclib/apr/libapr.la
+
+include $(top_builddir)/build/rules.mk
+
+test: $(bin_PROGRAMS)
+
+# example for building a test proggie
+# dbu_OBJECTS = dbu.lo
+# dbu: $(dbu_OBJECTS)
+# $(LINK) $(dbu_OBJECTS) $(PROGRAM_LDADD)
+
+clean:
+ rm -rf gen
+
+distclean:
+ rm -f pyhttpd/config.ini \ No newline at end of file
diff --git a/test/README b/test/README
new file mode 100644
index 0000000..9f8be50
--- /dev/null
+++ b/test/README
@@ -0,0 +1,3 @@
+This directory contains useful test code for testing various bits
+of Apache functionality. This stuff is for the developers only,
+so we might remove it on public releases.
diff --git a/test/README.ci b/test/README.ci
new file mode 100644
index 0000000..43600b0
--- /dev/null
+++ b/test/README.ci
@@ -0,0 +1,163 @@
+
+Variables
+---------
+
+The CI scripts use the following environment variables:
+
+* APR_VERSION - if set, APR of this version is built and installed in
+ $HOME/root/apr-$APR_VERSION - a value of "trunk" means trunk is
+ used, "*.x" means a branch, otherwise a tagged version is implied.
+
+* APR_CONFIG - arguments to pass when running APR's configure script
+ if APR_VERSION is set
+
+* APU_VERSION - if set, APR-util of this version is built and
+ installed in $HOME/root/apr-util-$APU_VERSION - a value of "*.x"
+ means a branch, otherwise a tagged version is implied. (Since there
+ is no "trunk" for apr-util, that value cannot be used here.)
+
+* APU_CONFIG - arguments to pass when running APR-util's configure
+ script if APU_VERSION is set
+
+* CONFIG - arguments to pass to httpd's configure script.
+
+* BUILDCONFIG - arguments to pass when running httpd's ./buildconf script
+
+* MFLAGS - arguments to pass when running "make" for httpd.
+
+* SKIP_TESTING - if set, the Perl test framework is not run for the
+ build.
+
+* TEST_UBSAN - set for job using UBSan ("Undefined Behaviour Sanitizer")
+
+* TEST_MALLOC - set for job using enhanced malloc debugging.
+
+* TEST_INSTALL - set for job testing "make install"
+
+* TEST_VPATH - set for job testing srcdir!=builddir
+
+* TEST_LDAP - set for job with slapd, running LDAP tests
+
+* TEST_SSL - set for job with SSL/TLS testing variants
+
+* TESTS - a list of Perl framework tests to run
+
+* TEST_ARGS - arguments to pass to ./t/TEST in the Perl test framework
+
+* CLEAR_CACHE - if set, the cached $HOME/root is removed before each build
+
+Caching -- NOTE, BROKEN IN GITHUB ACTIONS --
+-------
+
+Perl modules installed in $HOME/perl5 are cached.
+
+Anything installed into the $HOME/root directory is cached - notably,
+versions of APR/APR-util are installed here and cached across httpd
+build jobs without needing to be rebuilt every time.
+
+The cached installs of APR/APR-util are refreshed if the
+last-changed-revision of the build is stale.
+
+If APR_VERSION and APU_VERSION are both set to 1.x versions, then
+CLEAR_CACHE should also be set to disable APR* caching. APR-util can
+only be rebuilt if an APR checkout is present, so a APR-util cannot be
+built from source alone. (i.e. the scripts do not handle the case of
+cached, fresh APR plus a cached but stale APR-util)
+
+Travis to Github Actions Migration TODO
+---------------------------------------
+
+* better path filtering so e.g. CHANGES changes don't trigger CI
+* support branch conditionals again (some tests are 2.4.x only, some trunk only)
+* make caching work properly for APR + CPAN modules
+ - this is using the wrong model at the moment
+ - the cache key needs to be based off (source code, job configuration)
+ - rather than done on the fly in test/travis_before_linux.sh
+ - pebble + Rustls builds should also be cached
+* turn on failure notifications?
+* test across different Ubuntu versions again
+ - and test against OpenSSL 1.x since we're now ONLY building against 3.x
+* update the docs below for testing from PRs/feature branches
+* introduce some job ordering rather than having a flat/concurrent
+ set, if the default "./configure && make && test" works *then* start
+ jobs doing 200 different variations on ./configure --enable-XXX
+ i.e. stop burning CPU time for a typo which breaks every job
+
+TODO list
+---------
+
+* non-x86 builds
+* MacOS build
+* Windows build
+* clang-on-Linux build
+* Use containers for non-Ubuntu-based Linux testing
+
+Known Failures
+--------------
+
+Some known failures:
+
+* prefork, and more rarely non-prefork testing sometimes catches child
+ segfaults under pool-debug from assert()s in (e.g.)
+ __pthread_tpp_change_priority where one child is destroying threads
+ which another is waiting for, or iterating through via
+ apr_pool_walk_tree().
+
+ See dev@httpd threads:
+ msg <5f4abde1b5789_13fde2ecacb40795a1@travis-tasks-5b566d48fc-drkb9.mail>
+ msg <73060f7b-df7f-ad3c-a9fa-dd666a59b31e@kippdata.de> and
+ https://bz.apache.org/bugzilla/show_bug.cgi?id=63098
+ https://bz.apache.org/bugzilla/show_bug.cgi?id=46185
+
+ Not clear if there is a real bug here which can be reproduced
+ outside of pool-debug.
+
+Testing from a Feature Branch [*** N/A FOR GITHUB ACTIONS ***]
+-----------------------------
+
+An SVN branch off trunk should be mirrored to github, and will be
+tested in the same way that trunk is in CI, so this workflow is
+available for those familiar with using Subversion and the standard
+ASF/httpd repository layout.
+
+Tested branches are listed at: https://travis-ci.com/github/apache/httpd/branches
+
+Travis will also run the tests for a PR filed against the httpd Github
+repository at https://github.com/apache/httpd or from a fork of this
+repository if enabled for the Travis user.
+
+A workflow to enable testing would be as follows, substituting
+$USERNAME for your github username:
+
+ $ git clone https://github.com/apache/httpd
+ $ cd httpd
+ $ git remote add $USERNAME git@github.com:$USERNAME/httpd.git
+ $ git checkout -b my-feature origin/trunk
+ ... do some work ...
+ $ git commit ...
+ $ git push -u $USERNAME my-feature:my-feature
+
+To enable testing for a fork, visit the settings page at
+https://travis-ci.com/$USERNAME/httpd/settings - you may need to sync
+your account via https://travis-ci.com/account/repositories for a
+freshly created fork.
+
+To create a Pull Request, go to the URL produced in the "git push"
+command output when pushing to your fork, which is something like:
+https://github.com/apache/httpd/compare/trunk...$USERNAME:trunk
+
+Once a PR has been created, travis will run the tests and link the
+results from a PR comment. All tested PRs are listed here:
+https://travis-ci.com/github/apache/httpd/pull_requests
+
+To merge from github back to SVN trunk, create a patch from e.g.:
+
+ $ git diff origin/trunk..my-feature
+
+and then apply it in SVN. To rebase a feature once trunk has
+diverged, from a feature branch run:
+
+ $ git pull
+ $ git rebase -i origin/trunk
+
+and follow the standard rebase steps.
diff --git a/test/README.pytest b/test/README.pytest
new file mode 100644
index 0000000..474030b
--- /dev/null
+++ b/test/README.pytest
@@ -0,0 +1,139 @@
+Apache httpd pytest suite
+=========================
+Using pytest (<https://docs.pytest.org/en/6.2.x/>) and a Python >= 3.8
+for a more flexible testing of Apache httpd.
+
+Install
+-------
+If not already installed, you will need to install 'pytest' and 'OpenSSL' for
+python:
+> apt install python3-pip
+> pip install -U pytest
+> pip install -U pyopenssl
+
+And for 'h2load':
+> apt install nghttp2-client
+
+
+Usage
+-----
+In your httpd source checkout, do:
+
+> make install
+> pytest
+
+and all tests defined run on the installed executable and modules.
+> pytest test/modules/core
+runs all the core tests. You can run tests also by name selection:
+> pytest -k test_core
+runs all test cases starting with 'test_core'. Similar:
+> pytest -k test_core_001_04
+runs the test cases starting with that.
+
+Test output gets more verbose, by adding one or several '-v'. This
+also raises the error log level of the tested modules.
+> pytest -vvv -k test_h2_004_01
+run the specific test with mod_http2 at log level TRACE2.
+
+By default, test cases will configure httpd with mpm_event. You
+can change that with the invocation:
+> MPM=worker pytest test/modules/http2
+
+Some tests rely on additional tools installed in your environment
+and will 'skip' if those are not present. In a non-verbose run,
+these will appear as 's' in the output. If you run pytest more
+verbose, the skipped test cases will mention a reason for why
+they are disabled.
+
+For example, most tests in test/modules/md require an installation
+of 'pebble', an ACME test server, and look for it in $PATH.
+
+
+Workings
+--------
+All tests start httpd on their own and try hard to shut it down
+afterwards. You can abort tests with ^C and they clean up.
+
+httpd is started/stopped repeatedly in testing as configurations
+for individual test cases are changed. This is a main difference to
+the Perl test framework which starts the server with all possible
+combinations configured that are needed by tests.
+
+In test/gen/apache a server root is created with config, logs and htdocs
+directories. test/gen/apache/logs/error_log will be the log.
+Configs start in test/gen/apache/conf/httpd.conf. modules.conf is
+dynamically created for the list of modules that a test suite needs.
+
+Test cases write their specific setup in test.conf and reload/restart
+the httpd process. This makes for a small configuration in a test case.
+
+
+Development
+-----------
+
+Adding a test in an existing file is done by adding a method. Its name
+must start with 'test_' and the common practice is to have the name
+of the test suite there as well. All http2 tests start with 'test_h2_'.
+
+Following this can be any characters. If you make test cases of a
+certain feature with a common prefix, it is easier to invoke just
+them using the '-k' selector on the command line.
+
+You can also add just a new file to a test suite, if you do a new
+range of test cases that do not fit anywhere else. A very simple
+one is found in test/modules/http2/test_001_httpd_alive.py.
+
+There is a python class defined with 2 methods. One is the test
+method itself and the other one ' is
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ code
+
+is marked as a pytest 'fixture'. This is some pytest magic.
+'autouse=True' means that this fixture is run, even though
+no test case uses it directly. scope='class' means that it
+is run once for all test cases in this class.
+
+As you see, this fixture gets a parameter named 'env' and
+that is the name of another pytest fixture, defined in the
+file 'conftest.py' in the same directory.
+
+ @pytest.fixture(scope="package")
+ def env(pytestconfig) -> H2TestEnv:
+ code
+
+This one runs one time per 'package', meaning for all test
+cases in this directory. And it gets the 'pytestconfig'
+as parameter which is a standard pytest fixture.
+
+So, when you run 'pytest -k test_h2_004', pytest will look
+at _all_ test cases defined and collect those with that
+prefix. For each directory with test cases found, it will
+process the 'conftest.py', boot-strapping the 'env' fixture,
+and the process the files with active test cases.
+
+As a result, if you invoke just a single test case, only
+the fixtures needed for that test case are created. This
+gives good turn around times when debugging a test case.
+
+If you want to add a new test suite, create a new directory.
+Add the files '__init__.py', 'conftest.py' and a first
+'test_suitename_something.py'. test/modules/core is the
+simplest example. 'test/modules/http2' shows how you load
+other modules. 'test/modules/md' checks and starts external
+processes (an ACME test server).
+
+
+Infrastructure
+--------------
+The test cases rely on the classes provided in 'test/pyhttpd'
+for common code in managing a httpd test instance and do
+provide some base setups:
+- a small set of virtual hosts with some files
+- access to paths and definitions (env fixture)
+- start/stop httpd and inspect the error log
+- run clients like curl and nghttp
+- create certificates for your hosts and make curl use
+ the root certs (so no --insecure calls by curl).
+
diff --git a/test/check_chunked b/test/check_chunked
new file mode 100644
index 0000000..50c56eb
--- /dev/null
+++ b/test/check_chunked
@@ -0,0 +1,58 @@
+#!/usr/bin/perl -w
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+#
+# This is meant to be used on the raw output of an HTTP/1.1 connection
+# to check that the chunks are all correctly laid out. It's easiest
+# to use a tool like netcat to generate the output. This script
+# *insists* that \r exist in the output.
+#
+# You can find netcat at avian.org:/src/hacks/nc110.tgz.
+
+use strict;
+
+my $is_chunked = 0;
+
+# must toss headers
+while(<>) {
+ if (/^Transfer-Encoding:\s+chunked/i) {
+ $is_chunked = 1;
+ }
+ last if ($_ eq "\r\n");
+}
+
+$is_chunked || die "wasn't chunked\n";
+
+for(;;) {
+ $_ = <> || die "unexpected end of file!\n";
+
+ m#^([0-9a-f]+) *\r$#i || die "bogus chunklen: $_";
+
+ my $chunklen = hex($1);
+
+ exit 0 if ($chunklen == 0);
+
+ chop; chop;
+ print "$_ ";
+
+ my $data = '';
+ read(ARGV, $data, $chunklen) == $chunklen || die "short read!\n";
+
+ $_ = <> || die "unexpected end of file!\n";
+
+ $_ eq "\r\n" || die "missing chunk trailer!\n";
+}
diff --git a/test/clients/.gitignore b/test/clients/.gitignore
new file mode 100644
index 0000000..18b1263
--- /dev/null
+++ b/test/clients/.gitignore
@@ -0,0 +1 @@
+h2ws \ No newline at end of file
diff --git a/test/clients/Makefile.in b/test/clients/Makefile.in
new file mode 100644
index 0000000..a322a58
--- /dev/null
+++ b/test/clients/Makefile.in
@@ -0,0 +1,20 @@
+DISTCLEAN_TARGETS = h2ws
+
+CLEAN_TARGETS = h2ws
+
+bin_PROGRAMS = h2ws
+TARGETS = $(bin_PROGRAMS)
+
+PROGRAM_LDADD = $(UTIL_LDFLAGS) $(PROGRAM_DEPENDENCIES) $(EXTRA_LIBS) $(AP_LIBS)
+PROGRAM_DEPENDENCIES =
+
+include $(top_builddir)/build/rules.mk
+
+h2ws.lo: h2ws.c
+ $(LIBTOOL) --mode=compile $(CC) $(ab_CFLAGS) $(ALL_CFLAGS) $(ALL_CPPFLAGS) \
+ $(ALL_INCLUDES) $(PICFLAGS) $(LTCFLAGS) -c $< && touch $@
+h2ws_OBJECTS = h2ws.lo
+h2ws_LDADD = -lnghttp2
+h2ws: $(h2ws_OBJECTS)
+ $(LIBTOOL) --mode=link $(CC) $(ALL_CFLAGS) $(PILDFLAGS) \
+ $(LT_LDFLAGS) $(ALL_LDFLAGS) -o $@ $(h2ws_LTFLAGS) $(h2ws_OBJECTS) $(h2ws_LDADD)
diff --git a/test/clients/h2ws.c b/test/clients/h2ws.c
new file mode 100644
index 0000000..1de3876
--- /dev/null
+++ b/test/clients/h2ws.c
@@ -0,0 +1,1110 @@
+/* Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <apr.h>
+
+#include <assert.h>
+#include <inttypes.h>
+#include <stdlib.h>
+#ifdef APR_HAVE_UNISTD_H
+# include <unistd.h>
+#endif /* HAVE_UNISTD_H */
+#ifdef APR_HAVE_FCNTL_H
+# include <fcntl.h>
+#endif /* HAVE_FCNTL_H */
+#include <sys/types.h>
+#include <sys/time.h>
+#ifdef APR_HAVE_SYS_SOCKET_H
+# include <sys/socket.h>
+#endif /* HAVE_SYS_SOCKET_H */
+#ifdef APR_HAVE_NETDB_H
+# include <netdb.h>
+#endif /* HAVE_NETDB_H */
+#ifdef APR_HAVE_NETINET_IN_H
+# include <netinet/in.h>
+#endif /* HAVE_NETINET_IN_H */
+#include <netinet/tcp.h>
+#include <poll.h>
+#include <signal.h>
+#include <stdio.h>
+#include <string.h>
+#include <time.h>
+#include <errno.h>
+
+#include <nghttp2/nghttp2.h>
+
+#define MAKE_NV(NAME, VALUE) \
+ { \
+ (uint8_t *)NAME, (uint8_t *)VALUE, sizeof(NAME) - 1, sizeof(VALUE) - 1, \
+ NGHTTP2_NV_FLAG_NONE \
+ }
+
+#define MAKE_NV_CS(NAME, VALUE) \
+ { \
+ (uint8_t *)NAME, (uint8_t *)VALUE, sizeof(NAME) - 1, strlen(VALUE), \
+ NGHTTP2_NV_FLAG_NONE \
+ }
+
+
+static int verbose;
+static const char *cmd;
+
+static void log_out(const char *level, const char *where, const char *msg)
+{
+ struct timespec tp;
+ struct tm tm;
+ char timebuf[128];
+
+ clock_gettime(CLOCK_REALTIME, &tp);
+ localtime_r(&tp.tv_sec, &tm);
+ strftime(timebuf, sizeof(timebuf)-1, "%H:%M:%S", &tm);
+ fprintf(stderr, "[%s.%09lu][%s][%s] %s\n", timebuf, tp.tv_nsec, level, where, msg);
+}
+
+static void log_err(const char *where, const char *msg)
+{
+ log_out("ERROR", where, msg);
+}
+
+static void log_info(const char *where, const char *msg)
+{
+ if (verbose)
+ log_out("INFO", where, msg);
+}
+
+static void log_debug(const char *where, const char *msg)
+{
+ if (verbose > 1)
+ log_out("DEBUG", where, msg);
+}
+
+#if defined(__GNUC__)
+ __attribute__((format(printf, 2, 3)))
+#endif
+static void log_errf(const char *where, const char *msg, ...)
+{
+ char buffer[8*1024];
+ va_list ap;
+
+ va_start(ap, msg);
+ vsnprintf(buffer, sizeof(buffer), msg, ap);
+ va_end(ap);
+ log_err(where, buffer);
+}
+
+#if defined(__GNUC__)
+ __attribute__((format(printf, 2, 3)))
+#endif
+static void log_infof(const char *where, const char *msg, ...)
+{
+ if (verbose) {
+ char buffer[8*1024];
+ va_list ap;
+
+ va_start(ap, msg);
+ vsnprintf(buffer, sizeof(buffer), msg, ap);
+ va_end(ap);
+ log_info(where, buffer);
+ }
+}
+
+#if defined(__GNUC__)
+ __attribute__((format(printf, 2, 3)))
+#endif
+static void log_debugf(const char *where, const char *msg, ...)
+{
+ if (verbose > 1) {
+ char buffer[8*1024];
+ va_list ap;
+
+ va_start(ap, msg);
+ vsnprintf(buffer, sizeof(buffer), msg, ap);
+ va_end(ap);
+ log_debug(where, buffer);
+ }
+}
+
+static int parse_host_port(const char **phost, uint16_t *pport,
+ int *pipv6, size_t *pconsumed,
+ const char *s, size_t len, uint16_t def_port)
+{
+ size_t i, offset;
+ char *host = NULL;
+ int port = 0;
+ int rv = 1, ipv6 = 0;
+
+ if (!len)
+ goto leave;
+ offset = 0;
+ if (s[offset] == '[') {
+ ipv6 = 1;
+ for (i = offset++; i < len; ++i) {
+ if (s[i] == ']')
+ break;
+ }
+ if (i >= len || i == offset)
+ goto leave;
+ host = strndup(s + offset, i - offset);
+ offset = i + 1;
+ }
+ else {
+ for (i = offset; i < len; ++i) {
+ if (strchr(":/?#", s[i]))
+ break;
+ }
+ if (i == offset) {
+ log_debugf("parse_uri", "empty host name in '%.*s", (int)len, s);
+ goto leave;
+ }
+ host = strndup(s + offset, i - offset);
+ offset = i;
+ }
+ if (offset < len && s[offset] == ':') {
+ port = 0;
+ ++offset;
+ for (i = offset; i < len; ++i) {
+ if (strchr("/?#", s[i]))
+ break;
+ if (s[i] < '0' || s[i] > '9') {
+ log_debugf("parse_uri", "invalid port char '%c'", s[i]);
+ goto leave;
+ }
+ port *= 10;
+ port += s[i] - '0';
+ if (port > 65535) {
+ log_debugf("parse_uri", "invalid port number '%d'", port);
+ goto leave;
+ }
+ }
+ offset = i;
+ }
+ rv = 0;
+
+leave:
+ *phost = rv? NULL : host;
+ *pport = rv? 0 : (port? (uint16_t)port : def_port);
+ if (pipv6)
+ *pipv6 = ipv6;
+ if (pconsumed)
+ *pconsumed = offset;
+ return rv;
+}
+
+struct uri {
+ const char *scheme;
+ const char *host;
+ const char *authority;
+ const char *path;
+ uint16_t port;
+ int ipv6;
+};
+
+static int parse_uri(struct uri *uri, const char *s, size_t len)
+{
+ char tmp[8192];
+ size_t n, offset = 0;
+ uint16_t def_port = 0;
+ int rv = 1;
+
+ /* NOT A REAL URI PARSER */
+ memset(uri, 0, sizeof(*uri));
+ if (len > 5 && !memcmp("ws://", s, 5)) {
+ uri->scheme = "ws";
+ def_port = 80;
+ offset = 5;
+ }
+ else if (len > 6 && !memcmp("wss://", s, 6)) {
+ uri->scheme = "wss";
+ def_port = 443;
+ offset = 6;
+ }
+ else {
+ /* not a scheme we process */
+ goto leave;
+ }
+
+ if (parse_host_port(&uri->host, &uri->port, &uri->ipv6, &n, s + offset,
+ len - offset, def_port))
+ goto leave;
+ offset += n;
+
+ if (uri->port == def_port)
+ uri->authority = uri->host;
+ else if (uri->ipv6) {
+ snprintf(tmp, sizeof(tmp), "[%s]:%u", uri->host, uri->port);
+ uri->authority = strdup(tmp);
+ }
+ else {
+ snprintf(tmp, sizeof(tmp), "%s:%u", uri->host, uri->port);
+ uri->authority = strdup(tmp);
+ }
+
+ if (offset < len) {
+ uri->path = strndup(s + offset, len - offset);
+ }
+ rv = 0;
+
+leave:
+ return rv;
+}
+
+static int sock_nonblock_nodelay(int fd) {
+ int flags, rv;
+ int val = 1;
+
+ while ((flags = fcntl(fd, F_GETFL, 0)) == -1 && errno == EINTR)
+ ;
+ if (flags == -1) {
+ log_errf("sock_nonblock_nodelay", "fcntl get error %d (%s)",
+ errno, strerror(errno));
+ return -1;
+ }
+ while ((rv = fcntl(fd, F_SETFL, flags | O_NONBLOCK)) == -1 && errno == EINTR)
+ ;
+ if (rv == -1) {
+ log_errf("sock_nonblock_nodelay", "fcntl set error %d (%s)",
+ errno, strerror(errno));
+ return -1;
+ }
+ rv = setsockopt(fd, IPPROTO_TCP, TCP_NODELAY, &val, (socklen_t)sizeof(val));
+ if (rv == -1) {
+ log_errf("sock_nonblock_nodelay", "set nodelay error %d (%s)",
+ errno, strerror(errno));
+ return -1;
+ }
+ return 0;
+}
+
+static int open_connection(const char *host, uint16_t port)
+{
+ char service[NI_MAXSERV];
+ struct addrinfo hints;
+ struct addrinfo *res = NULL, *rp;
+ int rv, fd = -1;
+
+ memset(&hints, 0, sizeof(hints));
+ snprintf(service, sizeof(service), "%u", port);
+ hints.ai_family = AF_UNSPEC;
+ hints.ai_socktype = SOCK_STREAM;
+ rv = getaddrinfo(host, service, &hints, &res);
+ if (rv) {
+ log_err("getaddrinfo", gai_strerror(rv));
+ goto leave;
+ }
+
+ for (rp = res; rp; rp = rp->ai_next) {
+ fd = socket(rp->ai_family, rp->ai_socktype, rp->ai_protocol);
+ if (fd == -1) {
+ continue;
+ }
+ while ((rv = connect(fd, rp->ai_addr, rp->ai_addrlen)) == -1 &&
+ errno == EINTR)
+ ;
+ if (!rv) /* connected */
+ break;
+ close(fd);
+ fd = -1;
+ }
+
+leave:
+ if (res)
+ freeaddrinfo(res);
+ return fd;
+}
+
+struct h2_stream;
+
+#define IO_WANT_NONE 0
+#define IO_WANT_READ 1
+#define IO_WANT_WRITE 2
+
+struct h2_session {
+ const char *server_name;
+ const char *connect_host;
+ uint16_t connect_port;
+ int fd;
+ nghttp2_session *ngh2;
+ struct h2_stream *streams;
+ int aborted;
+ int want_io;
+};
+
+typedef void h2_stream_closed_cb(struct h2_stream *stream);
+typedef void h2_stream_recv_data(struct h2_stream *stream,
+ const uint8_t *data, size_t len);
+
+struct h2_stream {
+ struct h2_stream *next;
+ struct uri *uri;
+ int32_t id;
+ int fdin;
+ int http_status;
+ uint32_t error_code;
+ unsigned input_closed : 1;
+ unsigned closed : 1;
+ unsigned reset : 1;
+ h2_stream_closed_cb *on_close;
+ h2_stream_recv_data *on_recv_data;
+};
+
+static void h2_session_stream_add(struct h2_session *session,
+ struct h2_stream *stream)
+{
+ struct h2_stream *s;
+ for (s = session->streams; s; s = s->next) {
+ if (s == stream) /* already there? */
+ return;
+ }
+ stream->next = session->streams;
+ session->streams = stream;
+}
+
+static void h2_session_stream_remove(struct h2_session *session,
+ struct h2_stream *stream)
+{
+ struct h2_stream *s, **pnext;
+ pnext = &session->streams;
+ s = session->streams;
+ while (s) {
+ if (s == stream) {
+ *pnext = s->next;
+ s->next = NULL;
+ break;
+ }
+ pnext = &s->next;
+ s = s->next;
+ }
+}
+
+static struct h2_stream *h2_session_stream_get(struct h2_session *session,
+ int32_t id)
+{
+ struct h2_stream *s;
+ for (s = session->streams; s; s = s->next) {
+ if (s->id == id)
+ return s;
+ }
+ return NULL;
+}
+
+static ssize_t h2_session_send(nghttp2_session *ngh2, const uint8_t *data,
+ size_t length, int flags, void *user_data)
+{
+ struct h2_session *session = user_data;
+ ssize_t nwritten;
+ (void)ngh2;
+ (void)flags;
+
+ session->want_io = IO_WANT_NONE;
+ nwritten = send(session->fd, data, length, 0);
+ if (nwritten < 0) {
+ int err = errno;
+ if ((EWOULDBLOCK == err) || (EAGAIN == err) ||
+ (EINTR == err) || (EINPROGRESS == err)) {
+ return NGHTTP2_ERR_WOULDBLOCK;
+ }
+ log_errf("h2_session_send", "error sending %ld bytes: %d (%s)",
+ (long)length, err, strerror(err));
+ return NGHTTP2_ERR_CALLBACK_FAILURE;
+ }
+ return nwritten;
+}
+
+static ssize_t h2_session_recv(nghttp2_session *ngh2, uint8_t *buf,
+ size_t length, int flags, void *user_data)
+{
+ struct h2_session *session = user_data;
+ ssize_t nread;
+ (void)ngh2;
+ (void)flags;
+
+ session->want_io = IO_WANT_NONE;
+ nread = recv(session->fd, buf, length, 0);
+ if (nread < 0) {
+ int err = errno;
+ if ((EWOULDBLOCK == err) || (EAGAIN == err) || (EINTR == err)) {
+ return NGHTTP2_ERR_WOULDBLOCK;
+ }
+ log_errf("h2_session_recv", "error reading %ld bytes: %d (%s)",
+ (long)length, err, strerror(err));
+ return NGHTTP2_ERR_CALLBACK_FAILURE;
+ }
+ return nread;
+}
+
+static int h2_session_on_frame_send(nghttp2_session *session,
+ const nghttp2_frame *frame,
+ void *user_data)
+{
+ size_t i;
+ (void)user_data;
+
+ switch (frame->hd.type) {
+ case NGHTTP2_HEADERS:
+ if (nghttp2_session_get_stream_user_data(session, frame->hd.stream_id)) {
+ const nghttp2_nv *nva = frame->headers.nva;
+ log_infof("frame send", "FRAME[HEADERS, stream=%d",
+ frame->hd.stream_id);
+ for (i = 0; i < frame->headers.nvlen; ++i) {
+ log_infof("frame send", " %.*s: %.*s",
+ (int)nva[i].namelen, nva[i].name,
+ (int)nva[i].valuelen, nva[i].value);
+ }
+ log_infof("frame send", "]");
+ }
+ break;
+ case NGHTTP2_DATA:
+ log_infof("frame send", "FRAME[DATA, stream=%d, length=%d, flags=%d]",
+ frame->hd.stream_id, (int)frame->hd.length,
+ (int)frame->hd.flags);
+ break;
+ case NGHTTP2_RST_STREAM:
+ log_infof("frame send", "FRAME[RST, stream=%d]",
+ frame->hd.stream_id);
+ break;
+ case NGHTTP2_WINDOW_UPDATE:
+ log_infof("frame send", "FRAME[WINDOW_UPDATE, stream=%d]",
+ frame->hd.stream_id);
+ break;
+ case NGHTTP2_GOAWAY:
+ log_infof("frame send", "FRAME[GOAWAY]");
+ break;
+ }
+ return 0;
+}
+
+static int h2_session_on_frame_recv(nghttp2_session *ngh2,
+ const nghttp2_frame *frame,
+ void *user_data)
+{
+ (void)user_data;
+
+ switch (frame->hd.type) {
+ case NGHTTP2_HEADERS:
+ if (frame->headers.cat == NGHTTP2_HCAT_RESPONSE) {
+ log_infof("frame recv", "FRAME[HEADERS, stream=%d]",
+ frame->hd.stream_id);
+ }
+ break;
+ case NGHTTP2_DATA:
+ log_infof("frame recv", "FRAME[DATA, stream=%d, len=%lu, eof=%d]",
+ frame->hd.stream_id, frame->hd.length,
+ (frame->hd.flags & NGHTTP2_FLAG_END_STREAM) != 0);
+ break;
+ case NGHTTP2_RST_STREAM:
+ log_infof("frame recv", "FRAME[RST, stream=%d]",
+ frame->hd.stream_id);
+ fprintf(stdout, "[%d] RST\n", frame->hd.stream_id);
+ break;
+ case NGHTTP2_GOAWAY:
+ log_infof("frame recv", "FRAME[GOAWAY]");
+ break;
+ }
+ return 0;
+}
+
+static int h2_session_on_header(nghttp2_session *ngh2,
+ const nghttp2_frame *frame,
+ const uint8_t *name, size_t namelen,
+ const uint8_t *value, size_t valuelen,
+ uint8_t flags, void *user_data)
+{
+ struct h2_session *session = user_data;
+ struct h2_stream *stream;
+ (void)flags;
+ (void)user_data;
+ log_infof("frame recv", "stream=%d, HEADER %.*s: %.*s",
+ frame->hd.stream_id, (int)namelen, name,
+ (int)valuelen, value);
+ stream = h2_session_stream_get(session, frame->hd.stream_id);
+ if (stream) {
+ if (namelen == 7 && !strncmp(":status", (const char *)name, namelen)) {
+ stream->http_status = 0;
+ if (valuelen < 10) {
+ char tmp[10], *endp;
+ memcpy(tmp, value, valuelen);
+ tmp[valuelen] = 0;
+ stream->http_status = (int)strtol(tmp, &endp, 10);
+ }
+ if (stream->http_status < 100 || stream->http_status >= 600) {
+ log_errf("on header recv", "stream=%d, invalid :status: %.*s",
+ frame->hd.stream_id, (int)valuelen, value);
+ return NGHTTP2_ERR_CALLBACK_FAILURE;
+ }
+ else {
+ fprintf(stdout, "[%d] :status: %d\n", stream->id,
+ stream->http_status);
+ }
+ }
+ }
+ return 0;
+}
+
+static int h2_session_on_stream_close(nghttp2_session *ngh2, int32_t stream_id,
+ uint32_t error_code, void *user_data)
+{
+ struct h2_session *session = user_data;
+ struct h2_stream *stream;
+
+ stream = h2_session_stream_get(session, stream_id);
+ if (stream) {
+ /* closed known stream */
+ stream->error_code = error_code;
+ stream->closed = 1;
+ if (error_code)
+ stream->reset = 1;
+ if (error_code) {
+ log_errf("stream close", "stream %d closed with error %d",
+ stream_id, error_code);
+ }
+
+ h2_session_stream_remove(session, stream);
+ if (stream->on_close)
+ stream->on_close(stream);
+ /* last one? */
+ if (!session->streams) {
+ int rv;
+ rv = nghttp2_session_terminate_session(ngh2, NGHTTP2_NO_ERROR);
+ if (rv) {
+ log_errf("terminate session", "error %d (%s)",
+ rv, nghttp2_strerror(rv));
+ session->aborted = 1;
+ }
+ }
+ }
+ return 0;
+}
+
+static int h2_session_on_data_chunk_recv(nghttp2_session *ngh2, uint8_t flags,
+ int32_t stream_id, const uint8_t *data,
+ size_t len, void *user_data) {
+ struct h2_session *session = user_data;
+ struct h2_stream *stream;
+
+ stream = h2_session_stream_get(session, stream_id);
+ if (stream && stream->on_recv_data) {
+ stream->on_recv_data(stream, data, len);
+ }
+ return 0;
+}
+
+static int h2_session_open(struct h2_session *session, const char *server_name,
+ const char *host, uint16_t port)
+{
+ nghttp2_session_callbacks *cbs = NULL;
+ nghttp2_settings_entry settings[2];
+ int rv = -1;
+
+ memset(session, 0, sizeof(*session));
+ session->server_name = server_name;
+ session->connect_host = host;
+ session->connect_port = port;
+ /* establish socket */
+ session->fd = open_connection(session->connect_host, session->connect_port);
+ if (session->fd < 0) {
+ log_errf(cmd, "could not connect to %s:%u",
+ session->connect_host, session->connect_port);
+ goto leave;
+ }
+ if (sock_nonblock_nodelay(session->fd))
+ goto leave;
+ session->want_io = IO_WANT_NONE;
+
+ log_infof(cmd, "connected to %s via %s:%u", session->server_name,
+ session->connect_host, session->connect_port);
+
+ rv = nghttp2_session_callbacks_new(&cbs);
+ if (rv) {
+ log_errf("setup callbacks", "error_code=%d, msg=%s\n", rv,
+ nghttp2_strerror(rv));
+ rv = -1;
+ goto leave;
+ }
+ /* setup session callbacks */
+ nghttp2_session_callbacks_set_send_callback(cbs, h2_session_send);
+ nghttp2_session_callbacks_set_recv_callback(cbs, h2_session_recv);
+ nghttp2_session_callbacks_set_on_frame_send_callback(
+ cbs, h2_session_on_frame_send);
+ nghttp2_session_callbacks_set_on_frame_recv_callback(
+ cbs, h2_session_on_frame_recv);
+ nghttp2_session_callbacks_set_on_header_callback(
+ cbs, h2_session_on_header);
+ nghttp2_session_callbacks_set_on_stream_close_callback(
+ cbs, h2_session_on_stream_close);
+ nghttp2_session_callbacks_set_on_data_chunk_recv_callback(
+ cbs, h2_session_on_data_chunk_recv);
+ /* create the ngh2 session */
+ rv = nghttp2_session_client_new(&session->ngh2, cbs, session);
+ if (rv) {
+ log_errf("client new", "error_code=%d, msg=%s\n", rv,
+ nghttp2_strerror(rv));
+ rv = -1;
+ goto leave;
+ }
+ /* submit initial settings */
+ settings[0].settings_id = NGHTTP2_SETTINGS_MAX_CONCURRENT_STREAMS;
+ settings[0].value = 100;
+ settings[1].settings_id = NGHTTP2_SETTINGS_INITIAL_WINDOW_SIZE;
+ settings[1].value = 10 * 1024 * 1024;
+
+ rv = nghttp2_submit_settings(session->ngh2, NGHTTP2_FLAG_NONE, settings, 2);
+ if (rv) {
+ log_errf("submit settings", "error_code=%d, msg=%s\n", rv,
+ nghttp2_strerror(rv));
+ rv = -1;
+ goto leave;
+ }
+ rv = nghttp2_session_set_local_window_size(session->ngh2, NGHTTP2_FLAG_NONE,
+ 0, 10 * 1024 * 1024);
+ if (rv) {
+ log_errf("set connection window size", "error_code=%d, msg=%s\n", rv,
+ nghttp2_strerror(rv));
+ rv = -1;
+ goto leave;
+ }
+ rv = 0;
+
+leave:
+ if (cbs)
+ nghttp2_session_callbacks_del(cbs);
+ return rv;
+}
+
+static int h2_session_io(struct h2_session *session) {
+ int rv;
+ rv = nghttp2_session_recv(session->ngh2);
+ if (rv) {
+ log_errf("session recv", "error_code=%d, msg=%s\n", rv,
+ nghttp2_strerror(rv));
+ return 1;
+ }
+ rv = nghttp2_session_send(session->ngh2);
+ if (rv) {
+ log_errf("session send", "error_code=%d, msg=%s\n", rv,
+ nghttp2_strerror(rv));
+ }
+ return 0;
+}
+
+struct h2_poll_ctx;
+typedef int h2_poll_ev_cb(struct h2_poll_ctx *pctx, struct pollfd *pfd);
+
+struct h2_poll_ctx {
+ struct h2_session *session;
+ struct h2_stream *stream;
+ h2_poll_ev_cb *on_ev;
+};
+
+static int h2_session_ev(struct h2_poll_ctx *pctx, struct pollfd *pfd)
+{
+ if (pfd->revents & (POLLIN | POLLOUT)) {
+ h2_session_io(pctx->session);
+ }
+ else if (pfd->revents & POLLHUP) {
+ log_errf("session run", "connection closed");
+ return -1;
+ }
+ else if (pfd->revents & POLLERR) {
+ log_errf("session run", "connection error");
+ return -1;
+ }
+ return 0;
+}
+
+static int h2_stream_ev(struct h2_poll_ctx *pctx, struct pollfd *pfd)
+{
+ if (pfd->revents & (POLLIN | POLLHUP)) {
+ nghttp2_session_resume_data(pctx->session->ngh2, pctx->stream->id);
+ }
+ else if (pfd->revents & (POLLERR)) {
+ nghttp2_submit_rst_stream(pctx->session->ngh2, NGHTTP2_FLAG_NONE,
+ pctx->stream->id, NGHTTP2_STREAM_CLOSED);
+ }
+ return 0;
+}
+
+static nfds_t h2_session_set_poll(struct h2_session *session,
+ struct h2_poll_ctx *pollctxs,
+ struct pollfd *pfds)
+{
+ nfds_t n = 0;
+ int want_read, want_write;
+ struct h2_stream *stream;
+
+ want_read = (nghttp2_session_want_read(session->ngh2) ||
+ session->want_io == IO_WANT_READ);
+ want_write = (nghttp2_session_want_write(session->ngh2) ||
+ session->want_io == IO_WANT_WRITE);
+ if (want_read || want_write) {
+ pollctxs[n].session = session;
+ pollctxs[n].stream = NULL;
+ pollctxs[n].on_ev = h2_session_ev;
+ pfds[n].fd = session->fd;
+ pfds[n].events = pfds[n].revents = 0;
+ if (want_read)
+ pfds[n].events |= (POLLIN | POLLHUP);
+ if (want_write)
+ pfds[n].events |= (POLLOUT | POLLERR);
+ ++n;
+ }
+
+ for (stream = session->streams; stream; stream = stream->next) {
+ if (stream->fdin >= 0 && !stream->input_closed && !stream->closed) {
+ pollctxs[n].session = session;
+ pollctxs[n].stream = stream;
+ pollctxs[n].on_ev = h2_stream_ev;
+ pfds[n].fd = stream->fdin;
+ pfds[n].revents = 0;
+ pfds[n].events = (POLLIN | POLLHUP);
+ ++n;
+ }
+ }
+ return n;
+}
+
+static void h2_session_run(struct h2_session *session)
+{
+ struct h2_poll_ctx pollctxs[5];
+ struct pollfd pfds[5];
+ nfds_t npollfds, i;
+
+ npollfds = h2_session_set_poll(session, pollctxs, pfds);
+ while (npollfds) {
+ if (poll(pfds, npollfds, -1) == -1) {
+ log_errf("session run", "poll error %d (%s)", errno, strerror(errno));
+ break;
+ }
+ for (i = 0; i < npollfds; ++i) {
+ if (pfds[i].revents) {
+ if (pollctxs[i].on_ev(&pollctxs[i], &pfds[i])) {
+ break;
+ }
+ }
+ }
+ npollfds = h2_session_set_poll(session, pollctxs, pfds);
+ if (!session->streams)
+ break;
+ }
+}
+
+static void h2_session_close(struct h2_session *session)
+{
+ log_infof(cmd, "closed session to %s:%u",
+ session->connect_host, session->connect_port);
+}
+
+/* websocket stream */
+
+struct ws_stream {
+ struct h2_stream s;
+};
+
+static void ws_stream_on_close(struct h2_stream *stream)
+{
+ log_infof("ws stream", "stream %d closed", stream->id);
+ if (!stream->reset)
+ fprintf(stdout, "[%d] EOF\n", stream->id);
+}
+
+static void ws_stream_on_recv_data(struct h2_stream *stream,
+ const uint8_t *data, size_t len)
+{
+ size_t i;
+
+ log_infof("ws stream", "stream %d recv %lu data bytes",
+ stream->id, (unsigned long)len);
+ for (i = 0; i < len; ++i) {
+ fprintf(stdout, "%s%02x", (i&0xf)? " " : (i? "\n" : ""), data[i]);
+ }
+ fprintf(stdout, "\n");
+}
+
+static int ws_stream_create(struct ws_stream **pstream, struct uri *uri)
+{
+ struct ws_stream *stream;
+
+ stream = calloc(1, sizeof(*stream));
+ if (!stream) {
+ log_errf("ws stream create", "out of memory");
+ *pstream = NULL;
+ return -1;
+ }
+ stream->s.uri = uri;
+ stream->s.id = -1;
+ stream->s.on_close = ws_stream_on_close;
+ stream->s.on_recv_data = ws_stream_on_recv_data;
+ *pstream = stream;
+ return 0;
+}
+
+static ssize_t ws_stream_read_req_body(nghttp2_session *ngh2,
+ int32_t stream_id,
+ uint8_t *buf, size_t buflen,
+ uint32_t *pflags,
+ nghttp2_data_source *source,
+ void *user_data)
+{
+ struct h2_session *session = user_data;
+ struct ws_stream *stream;
+ ssize_t nread = 0;
+ int eof = 0;
+
+ stream = (struct ws_stream *)h2_session_stream_get(session, stream_id);
+ if (!stream) {
+ log_errf("stream req body", "stream not known");
+ return NGHTTP2_ERR_CALLBACK_FAILURE;
+ }
+
+ (void)source;
+ assert(stream->s.fdin >= 0);
+ nread = read(stream->s.fdin, buf, buflen);
+ log_debugf("stream req body", "fread(len=%lu) -> %ld",
+ (unsigned long)buflen, (long)nread);
+
+ if (nread < 0) {
+ if (errno == EAGAIN) {
+ nread = 0;
+ }
+ else {
+ log_errf("stream req body", "error on input");
+ return NGHTTP2_ERR_CALLBACK_FAILURE;
+ }
+ }
+ else if (nread == 0) {
+ eof = 1;
+ stream->s.input_closed = 1;
+ }
+
+ *pflags = stream->s.input_closed? NGHTTP2_DATA_FLAG_EOF : 0;
+ if (nread == 0 && !eof) {
+ return NGHTTP2_ERR_DEFERRED;
+ }
+ return nread;
+}
+
+static int ws_stream_submit(struct ws_stream *stream,
+ struct h2_session *session,
+ const nghttp2_nv *nva, size_t nvalen,
+ int fdin)
+{
+ nghttp2_data_provider provider, *req_body = NULL;
+
+ if (fdin >= 0) {
+ sock_nonblock_nodelay(fdin);
+ stream->s.fdin = fdin;
+ provider.read_callback = ws_stream_read_req_body;
+ provider.source.ptr = NULL;
+ req_body = &provider;
+ }
+ else {
+ stream->s.input_closed = 1;
+ }
+
+ stream->s.id = nghttp2_submit_request(session->ngh2, NULL, nva, nvalen,
+ req_body, stream);
+ if (stream->s.id < 0) {
+ log_errf("ws stream submit", "nghttp2_submit_request: error %d",
+ stream->s.id);
+ return -1;
+ }
+
+ h2_session_stream_add(session, &stream->s);
+ log_infof("ws stream submit", "stream %d opened for %s%s",
+ stream->s.id, stream->s.uri->authority, stream->s.uri->path);
+ return 0;
+}
+
+static void usage(const char *msg)
+{
+ if(msg)
+ fprintf(stderr, "%s\n", msg);
+ fprintf(stderr,
+ "usage: [options] ws-uri scenario\n"
+ " run a websocket scenario to the ws-uri, options:\n"
+ " -c host:port connect to host:port\n"
+ " -v increase verbosity\n"
+ "scenarios are:\n"
+ " * fail-proto: CONNECT using wrong :protocol\n"
+ " * miss-authority: CONNECT without :authority header\n"
+ " * miss-path: CONNECT without :path header\n"
+ " * miss-scheme: CONNECT without :scheme header\n"
+ " * miss-version: CONNECT without sec-webSocket-version header\n"
+ " * ws-empty: open valid websocket, do not send anything\n"
+ );
+}
+
+int main(int argc, char *argv[])
+{
+ const char *host = NULL, *scenario;
+ uint16_t port = 80;
+ struct uri uri;
+ struct h2_session session;
+ struct ws_stream *stream;
+ char ch;
+
+ cmd = argv[0];
+ while((ch = getopt(argc, argv, "c:vh")) != -1) {
+ switch(ch) {
+ case 'c':
+ if (parse_host_port(&host, &port, NULL, NULL,
+ optarg, strlen(optarg), 80)) {
+ log_errf(cmd, "could not parse connect '%s'", optarg);
+ return 1;
+ }
+ break;
+ case 'h':
+ usage(NULL);
+ return 2;
+ break;
+ case 'v':
+ ++verbose;
+ break;
+ default:
+ usage("invalid option");
+ return 1;
+ }
+ }
+ argc -= optind;
+ argv += optind;
+
+ if (argc < 1) {
+ usage("need URL");
+ return 1;
+ }
+ if (argc < 2) {
+ usage("need scenario");
+ return 1;
+ }
+ if (parse_uri(&uri, argv[0], strlen(argv[0]))) {
+ log_errf(cmd, "could not parse uri '%s'", argv[0]);
+ return 1;
+ }
+ log_debugf(cmd, "normalized uri: %s://%s:%u%s", uri.scheme, uri.host,
+ uri.port, uri.path? uri.path : "");
+ scenario = argv[1];
+
+ if (!host) {
+ host = uri.host;
+ port = uri.port;
+ }
+
+ if (h2_session_open(&session, uri.host, host, port))
+ return 1;
+
+ if (ws_stream_create(&stream, &uri))
+ return 1;
+
+ if (!strcmp(scenario, "ws-stdin")) {
+ const nghttp2_nv nva[] = {
+ MAKE_NV(":method", "CONNECT"),
+ MAKE_NV_CS(":path", stream->s.uri->path),
+ MAKE_NV_CS(":scheme", "http"),
+ MAKE_NV_CS(":authority", stream->s.uri->authority),
+ MAKE_NV_CS(":protocol", "websocket"),
+ MAKE_NV("accept", "*/*"),
+ MAKE_NV("user-agent", "mod_h2/h2ws-test"),
+ MAKE_NV("sec-webSocket-version", "13"),
+ MAKE_NV("sec-webSocket-protocol", "chat"),
+ };
+ if (ws_stream_submit(stream, &session,
+ nva, sizeof(nva) / sizeof(nva[0]), 0))
+ return 1;
+ }
+ else if (!strcmp(scenario, "fail-proto")) {
+ const nghttp2_nv nva[] = {
+ MAKE_NV(":method", "CONNECT"),
+ MAKE_NV_CS(":path", stream->s.uri->path),
+ MAKE_NV_CS(":scheme", "http"),
+ MAKE_NV_CS(":authority", stream->s.uri->authority),
+ MAKE_NV_CS(":protocol", "websockets"),
+ MAKE_NV("accept", "*/*"),
+ MAKE_NV("user-agent", "mod_h2/h2ws-test"),
+ MAKE_NV("sec-webSocket-version", "13"),
+ MAKE_NV("sec-webSocket-protocol", "chat"),
+ };
+ if (ws_stream_submit(stream, &session,
+ nva, sizeof(nva) / sizeof(nva[0]), -1))
+ return 1;
+ }
+ else if (!strcmp(scenario, "miss-version")) {
+ const nghttp2_nv nva[] = {
+ MAKE_NV(":method", "CONNECT"),
+ MAKE_NV_CS(":path", stream->s.uri->path),
+ MAKE_NV_CS(":scheme", "http"),
+ MAKE_NV_CS(":authority", stream->s.uri->authority),
+ MAKE_NV_CS(":protocol", "websocket"),
+ MAKE_NV("accept", "*/*"),
+ MAKE_NV("user-agent", "mod_h2/h2ws-test"),
+ MAKE_NV("sec-webSocket-protocol", "chat"),
+ };
+ if (ws_stream_submit(stream, &session,
+ nva, sizeof(nva) / sizeof(nva[0]), -1))
+ return 1;
+ }
+ else if (!strcmp(scenario, "miss-path")) {
+ const nghttp2_nv nva[] = {
+ MAKE_NV(":method", "CONNECT"),
+ MAKE_NV_CS(":scheme", "http"),
+ MAKE_NV_CS(":authority", stream->s.uri->authority),
+ MAKE_NV_CS(":protocol", "websocket"),
+ MAKE_NV("accept", "*/*"),
+ MAKE_NV("user-agent", "mod_h2/h2ws-test"),
+ MAKE_NV("sec-webSocket-version", "13"),
+ MAKE_NV("sec-webSocket-protocol", "chat"),
+ };
+ if (ws_stream_submit(stream, &session,
+ nva, sizeof(nva) / sizeof(nva[0]), -1))
+ return 1;
+ }
+ else if (!strcmp(scenario, "miss-scheme")) {
+ const nghttp2_nv nva[] = {
+ MAKE_NV(":method", "CONNECT"),
+ MAKE_NV_CS(":path", stream->s.uri->path),
+ MAKE_NV_CS(":authority", stream->s.uri->authority),
+ MAKE_NV_CS(":protocol", "websocket"),
+ MAKE_NV("accept", "*/*"),
+ MAKE_NV("user-agent", "mod_h2/h2ws-test"),
+ MAKE_NV("sec-webSocket-version", "13"),
+ MAKE_NV("sec-webSocket-protocol", "chat"),
+ };
+ if (ws_stream_submit(stream, &session,
+ nva, sizeof(nva) / sizeof(nva[0]), -1))
+ return 1;
+ }
+ else if (!strcmp(scenario, "miss-authority")) {
+ const nghttp2_nv nva[] = {
+ MAKE_NV(":method", "CONNECT"),
+ MAKE_NV_CS(":path", stream->s.uri->path),
+ MAKE_NV_CS(":scheme", "http"),
+ MAKE_NV_CS(":protocol", "websocket"),
+ MAKE_NV("accept", "*/*"),
+ MAKE_NV("user-agent", "mod_h2/h2ws-test"),
+ MAKE_NV("sec-webSocket-version", "13"),
+ MAKE_NV("sec-webSocket-protocol", "chat"),
+ };
+ if (ws_stream_submit(stream, &session,
+ nva, sizeof(nva) / sizeof(nva[0]), -1))
+ return 1;
+ }
+ else {
+ log_errf(cmd, "unknown scenario: %s", scenario);
+ return 1;
+ }
+
+ h2_session_run(&session);
+ h2_session_close(&session);
+ return 0;
+}
diff --git a/test/cls.c b/test/cls.c
new file mode 100644
index 0000000..1ee6ac7
--- /dev/null
+++ b/test/cls.c
@@ -0,0 +1,182 @@
+/* Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <ctype.h>
+#include <dirent.h>
+#include <stdio.h>
+#include <string.h>
+#include <time.h>
+
+/*
+ * Compare a string to a mask
+ * Mask characters:
+ * @ - uppercase letter
+ * # - lowercase letter
+ * & - hex digit
+ * # - digit
+ * * - swallow remaining characters
+ * <x> - exact match for any other character
+ */
+static int checkmask(const char *data, const char *mask)
+{
+ int i, ch, d;
+
+ for (i = 0; mask[i] != '\0' && mask[i] != '*'; i++) {
+ ch = mask[i];
+ d = data[i];
+ if (ch == '@') {
+ if (!isupper(d))
+ return 0;
+ }
+ else if (ch == '$') {
+ if (!islower(d))
+ return 0;
+ }
+ else if (ch == '#') {
+ if (!isdigit(d))
+ return 0;
+ }
+ else if (ch == '&') {
+ if (!isxdigit(d))
+ return 0;
+ }
+ else if (ch != d)
+ return 0;
+ }
+
+ if (mask[i] == '*')
+ return 1;
+ else
+ return (data[i] == '\0');
+}
+
+/*
+ * Converts 8 hex digits to a time integer
+ */
+static int hex2sec(const char *x)
+{
+ int i, ch;
+ unsigned int j;
+
+ for (i = 0, j = 0; i < 8; i++) {
+ ch = x[i];
+ j <<= 4;
+ if (isdigit(ch))
+ j |= ch - '0';
+ else if (isupper(ch))
+ j |= ch - ('A' - 10);
+ else
+ j |= ch - ('a' - 10);
+ }
+ if (j == 0xffffffff)
+ return -1; /* so that it works with 8-byte ints */
+ else
+ return j;
+}
+
+int main(int argc, char **argv)
+{
+ int i, ver;
+ DIR *d;
+ struct dirent *e;
+ const char *s;
+ FILE *fp;
+ char path[FILENAME_MAX + 1];
+ char line[1035];
+ time_t date, lmod, expire;
+ unsigned int len;
+ struct tm ts;
+ char sdate[30], slmod[30], sexpire[30];
+ const char time_format[] = "%e %b %Y %R";
+
+ if (argc != 2) {
+ printf("Usage: cls directory\n");
+ exit(0);
+ }
+
+ d = opendir(argv[1]);
+ if (d == NULL) {
+ perror("opendir");
+ exit(1);
+ }
+
+ for (;;) {
+ e = readdir(d);
+ if (e == NULL)
+ break;
+ s = e->d_name;
+ if (s[0] == '.' || s[0] == '#')
+ continue;
+ sprintf(path, "%s/%s", argv[1], s);
+ fp = fopen(path, "r");
+ if (fp == NULL) {
+ perror("fopen");
+ continue;
+ }
+ if (fgets(line, 1034, fp) == NULL) {
+ perror("fgets");
+ fclose(fp);
+ continue;
+ }
+ if (!checkmask(line, "&&&&&&&& &&&&&&&& &&&&&&&& &&&&&&&& &&&&&&&&\n")) {
+ fprintf(stderr, "Bad cache file\n");
+ fclose(fp);
+ continue;
+ }
+ date = hex2sec(line);
+ lmod = hex2sec(line + 9);
+ expire = hex2sec(line + 18);
+ ver = hex2sec(line + 27);
+ len = hex2sec(line + 35);
+ if (fgets(line, 1034, fp) == NULL) {
+ perror("fgets");
+ fclose(fp);
+ continue;
+ }
+ fclose(fp);
+ i = strlen(line);
+ if (strncmp(line, "X-URL: ", 7) != 0 || line[i - 1] != '\n') {
+ fprintf(stderr, "Bad cache file\n");
+ continue;
+ }
+ line[i - 1] = '\0';
+ if (date != -1) {
+ ts = *gmtime(&date);
+ strftime(sdate, 30, time_format, &ts);
+ }
+ else
+ strcpy(sdate, "-");
+
+ if (lmod != -1) {
+ ts = *gmtime(&lmod);
+ strftime(slmod, 30, time_format, &ts);
+ }
+ else
+ strcpy(slmod, "-");
+
+ if (expire != -1) {
+ ts = *gmtime(&expire);
+ strftime(sexpire, 30, time_format, &ts);
+ }
+ else
+ strcpy(sexpire, "-");
+
+ printf("%s: %d; %s %s %s\n", line + 7, ver, sdate, slmod, sexpire);
+ }
+
+ closedir(d);
+ return 0;
+}
diff --git a/test/conftest.py b/test/conftest.py
new file mode 100644
index 0000000..2ae35f3
--- /dev/null
+++ b/test/conftest.py
@@ -0,0 +1,31 @@
+import sys
+import os
+
+import pytest
+
+sys.path.append(os.path.join(os.path.dirname(__file__), '.'))
+
+from pyhttpd.env import HttpdTestEnv
+
+def pytest_report_header(config, startdir):
+ env = HttpdTestEnv()
+ return f"[apache httpd: {env.get_httpd_version()}, mpm: {env.mpm_module}, {env.prefix}]"
+
+def pytest_addoption(parser):
+ parser.addoption("--repeat", action="store", type=int, default=1,
+ help='Number of times to repeat each test')
+ parser.addoption("--all", action="store_true")
+
+
+def pytest_generate_tests(metafunc):
+ if "repeat" in metafunc.fixturenames:
+ count = int(metafunc.config.getoption("repeat"))
+ metafunc.fixturenames.append('tmp_ct')
+ metafunc.parametrize('repeat', range(count))
+
+@pytest.fixture(autouse=True, scope="function")
+def _function_scope(env, request):
+ env.set_current_test_name(request.node.name)
+ yield
+ env.set_current_test_name(None)
+
diff --git a/test/make_sni.sh b/test/make_sni.sh
new file mode 100644
index 0000000..1805c4b
--- /dev/null
+++ b/test/make_sni.sh
@@ -0,0 +1,396 @@
+#!/bin/sh
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# This script will populate a directory 'sni' with 3 sites, httpd.conf
+# and certificates as to facilitate testing of TLS server name
+# indication support (RFC 4366) or SNI.
+#
+#
+OPENSSL=${OPENSSL:-openssl}
+DOMAIN=${DOMAIN:-my-sni-test.org}
+DIR=${DIR:-$PWD/sni}
+
+# List of hostnames automatically created by default.
+NAMES=${NAMES:-ape nut pear apple banana}
+
+# IP address these hostnames are bound to.
+IP=${IP:-127.0.0.1}
+
+# A certificate password for the .p12 files of the client
+# authentication test. Normally not set. However some browsers
+# require a password of at least 4 characters.
+#
+PASSWD=${PASSWD:-}
+
+args=`getopt a:fd:D:p: $*`
+if [ $? != 0 ]; then
+ echo "Syntax: $0 [-f] [-a IPaddress] [-d outdir] [-D domain ] [two or more vhost names ]"
+ echo " -f Force overwriting of outdir (default is $DIR)"
+ echo " -d dir Directory to create the SNI test server in (default is $DIR)"
+ echo " -D domain Domain name to use for this test (default is $DOMAIN)"
+ echo " -a IP IP address to use for this virtual host (default is $IP)"
+ echo " -p str Password for the client certificate test (some browsers require a set password)"
+ echo " [names] List of optional vhost names (default is $NAMES)"
+ echo
+ echo "Example:"
+ echo " $0 -D SecureBlogsAreUs.com peter fred mary jane ardy"
+ echo
+ echo "Which will create peter.SecureBlogsAreUs.com, fred.SecureBlogsAreUs.com and"
+ echo "so on. Note that the _first_ FQDN is also the default for non SNI hosts. It"
+ echo "may make sense to give this host a generic name - and allow each of the real"
+ echo "SNI site as sub directories/URI's of this generic name; thus allowing the "
+ echo "few non-SNI browsers access."
+ exit 1
+fi
+set -- $args
+for i
+do
+ case "$i"
+ in
+ -f)
+ FORCE=1
+ shift;;
+ -a)
+ IP=$2; shift
+ shift;;
+ -d)
+ DIR=$2; shift
+ shift;;
+ -p)
+ PASSWD=$2; shift
+ shift;;
+ -D)
+ DOMAIN=$2; shift
+ shift;;
+ --)
+ shift; break;
+ esac
+done
+
+if [ $# = 1 ]; then
+ echo "Aborted - just specifying one vhost makes no sense for SNI testing. Go wild !"
+ exit 1
+fi
+
+if [ $# -gt 0 ]; then
+ NAMES=$*
+fi
+
+if ! openssl version | grep -q OpenSSL; then
+ echo Aborted - your openssl is very old or misconfigured.
+ exit 1
+fi
+
+set `openssl version`
+if test "0$2" \< "00.9"; then
+ echo Aborted - version of openssl too old, 0.9 or up required.
+ exit 1
+fi
+
+if test -d ${DIR} -a "x$FORCE" != "x1"; then
+ echo Aborted - already an ${DIR} directory. Use the -f flag to overwrite.
+ exit 1
+fi
+
+mkdir -p ${DIR} || exit 1
+mkdir -p ${DIR}/ssl ${DIR}/htdocs ${DIR}/logs || exit 1
+
+# Create a 'CA' - keep using different serial numbers
+# as the browsers get upset if they see an identical
+# serial with a different pub-key.
+#
+# Note that we're not relying on the 'v3_ca' section as
+# in the default openssl.conf file - so the certificate
+# will be without the basicConstraints = CA:true and
+# keyUsage = cRLSign, keyCertSign values. This is fine
+# for most browsers.
+#
+serial=$RANDOM$$
+
+openssl req -new -nodes -batch \
+ -x509 \
+ -days 10 -subj '/CN=Da Root/O=SNI testing/' -set_serial $serial \
+ -keyout ${DIR}/root.key -out ${DIR}/root.pem \
+ || exit 2
+
+CDIR=${DIR}/client-xs-control
+mkdir -p ${CDIR}
+# Create some certificate authorities for testing client controls
+#
+openssl req -new -nodes -batch \
+ -x509 \
+ -days 10 -subj '/CN=Da Second Root/O=SNI user access I/' -set_serial 2$serial$$\
+ -keyout ${CDIR}/xs-root-1.key -out ${CDIR}/xs-root-1.pem \
+ || exit 2
+
+openssl req -new -nodes -batch \
+ -x509 \
+ -days 10 -subj '/CN=Da Second Root/O=SNI user access II/' -set_serial 3$serial$$ \
+ -keyout ${CDIR}/xs-root-2.key -out ${CDIR}/xs-root-2.pem \
+ || exit 2
+
+# Create a chain of just the two access authorities:
+cat ${CDIR}/xs-root-2.pem ${CDIR}/xs-root-1.pem > ${CDIR}/xs-root-chain.pem
+
+# And likewise a directory with the same information (using the
+# required 'hash' naming format
+#
+mkdir -p ${CDIR}/xs-root-dir || exit 1
+rm -f {$CDIR}/*.0
+ln ${CDIR}/xs-root-1.pem ${CDIR}/xs-root-dir/`openssl x509 -noout -hash -in ${CDIR}/xs-root-1.pem`.0
+ln ${CDIR}/xs-root-2.pem ${CDIR}/xs-root-dir/`openssl x509 -noout -hash -in ${CDIR}/xs-root-2.pem`.0
+
+# Use the above two client certificate authorities to make a few users
+for i in 1 2
+do
+ # Create a certificate request for a test user.
+ #
+ openssl req -new -nodes -batch \
+ -days 9 -subj "/CN=User $i/O=SNI Test Crash Dummy Dept/" \
+ -keyout ${CDIR}/client-$i.key -out ${CDIR}/client-$i.req -batch \
+ || exit 3
+
+ # And get it signed by either our client cert issuing root authority.
+ #
+ openssl x509 -text -req \
+ -CA ${CDIR}/xs-root-$i.pem -CAkey ${CDIR}/xs-root-$i.key \
+ -set_serial 3$serial$$ -in ${CDIR}/client-$i.req -out ${CDIR}/client-$i.pem \
+ || exit 4
+
+ # And create a pkcs#12 version for easy browser import.
+ #
+ openssl pkcs12 -export \
+ -inkey ${CDIR}/client-$i.key -in ${CDIR}/client-$i.pem -name "Client $i" \
+ -caname "Issuing client root $i" -certfile ${CDIR}/xs-root-$i.pem \
+ -out ${CDIR}/client.p12 -passout pass:"$PASSWD" || exit 5
+
+ rm ${CDIR}/client-$i.req
+done
+
+# Create the header for the example '/etc/hosts' file.
+#
+echo '# To append to your hosts file' > ${DIR}/hosts
+
+# Create a header for the httpd.conf snipped.
+#
+cat > ${DIR}/httpd-sni.conf << EOM
+# To append to your httpd.conf file'
+Listen ${IP}:443
+NameVirtualHost ${IP}:443
+
+LoadModule ssl_module modules/mod_ssl.so
+
+SSLRandomSeed startup builtin
+SSLRandomSeed connect builtin
+
+LogLevel debug
+TransferLog ${DIR}/logs/access_log
+ErrorLog ${DIR}/logs/error_log
+
+# You'll get a warning about this.
+#
+SSLSessionCache none
+
+# Note that this SSL configuration is far
+# from complete - you probably will want
+# to configure SSLSession Caches at the
+# very least.
+
+<Directory />
+ Options None
+ AllowOverride None
+ Require all denied
+</Directory>
+
+<Directory "${DIR}/htdocs">
+ allow from all
+ Require all granted
+</Directory>
+
+# This first entry is also the default for non SNI
+# supporting clients.
+#
+EOM
+
+# Create the header of a sample BIND zone file.
+#
+(
+ echo "; Configuration sample to be added to the $DOMAIN zone file of BIND."
+ echo "\$ORIGIN $DOMAIN."
+) > ${DIR}/zone-file
+
+ZADD="IN A $IP"
+INFO="and also the site you see when the browser does not support SNI."
+
+set -- ${NAMES}
+DEFAULT=$1
+
+for n in ${NAMES}
+do
+ FQDN=$n.$DOMAIN
+ serial=`expr $serial + 1`
+
+ # Create a certificate request for this host.
+ #
+ openssl req -new -nodes -batch \
+ -days 9 -subj "/CN=$FQDN/O=SNI Testing/" \
+ -keyout ${DIR}/$n.key -out ${DIR}/$n.req -batch \
+ || exit 3
+
+ # And get it signed by our root authority.
+ #
+ openssl x509 -text -req \
+ -CA ${DIR}/root.pem -CAkey ${DIR}/root.key \
+ -set_serial $serial -in ${DIR}/$n.req -out ${DIR}/$n.pem \
+ || exit 4
+
+ # Combine the key and certificate in one file.
+ #
+ cat ${DIR}/$n.pem ${DIR}/$n.key > ${DIR}/ssl/$n.crt
+ rm ${DIR}/$n.req ${DIR}/$n.key ${DIR}/$n.pem
+
+ LST="$LST
+ https://$FQDN/index.html"
+
+ # Create a /etc/host and bind-zone file example
+ #
+ echo "${IP} $FQDN $n" >> ${DIR}/hosts
+ echo "$n $ZADD" >> ${DIR}/zone-file
+ ZADD="IN CNAME $DEFAULT"
+
+ # Create and populate a docroot for this host.
+ #
+ mkdir -p ${DIR}/htdocs/$n || exit 1
+ echo We are $FQDN $INFO > ${DIR}/htdocs/$n/index.html || exit 1
+
+ # And change the info text - so that only the default/fallback site
+ # gets marked as such.
+ #
+ INFO="and you'd normally only see this site when there is proper SNI support."
+
+ # And create a configuration snipped.
+ #
+ cat >> ${DIR}/httpd-sni.conf << EOM
+<VirtualHost ${IP}:443>
+ SSLEngine On
+ ServerName $FQDN:443
+ DocumentRoot ${DIR}/htdocs/$n
+ SSLCertificateChainFile ${DIR}/root.pem
+ SSLCertificateFile ${DIR}/ssl/$n.crt
+
+ # Uncomment the following lines if you
+ # want to only allow access to clients with
+ # a certificate issued/signed by some
+ # selection of the issuing authorities
+ #
+ # SSLCACertificate ${CDIR}/xs-root-1.pem # just root 1
+ # SSLCACertificate ${CDIR}/xs-root-2.pem # just root 2
+ # SSLCACertificate ${CDIR}/xs-root-chain.pem # 1 & 2
+ # SSLCACertificateDir ${CDIR}/xs-root-dir # 1 & 2 - but as a directory.
+ #
+ # SSLVerifyClient require
+ # SSLVerifyDepth 2
+ #
+ TransferLog ${DIR}/logs/access_$n
+</VirtualHost>
+
+EOM
+
+done
+
+cat << EOM
+SNI Files generated
+===================
+
+The directory ${DIR}/sni has been populated with the following
+
+- root.key|pem Certificate authority root and key. (You could
+ import the root.pem key into your browser to
+ quell warnings about an unknown authority).
+
+- hosts /etc/hosts file with fake entries for the hosts
+
+- htdocs directory with one docroot for each domain,
+ each with a small sample file.
+
+- ssl directory with an ssl cert (signed by root)
+ for each of the domains).
+
+- logs logfiles, one for each domain and an
+ access_log for any misses.
+
+The directory ${CDIR} contains optional test files to allow client
+authentication testing:
+
+- client*pem/p12 Files for client authentication testing. These
+ need to be imported into the browser.
+
+- xs-root-1/2 Certificate authority which has issued above
+ client authentication certificates.
+
+- xs-root-dir A directory specific for the SSLCACertificateDir
+ directive.
+
+- xs-root-chain A chain of the two client xs authorities for the
+ SSLCACertificate directive.
+
+SNI Test
+========
+
+A directory ${DIR}/sni has been created. Run an apache
+server against it with
+
+ .../httpd -f ${DIR}/httpd-sni.conf
+
+and keep an eye on ${DIR}/logs/error_log. When everything
+is fine you will see entries like:
+
+ Feb 11 16:12:26 2008] [debug] Init:
+ SSL server IP/port overlap: ape.*:443 (httpd-sni.conf:24) vs. jane.*:443 (httpd-sni.conf:42)
+
+for each vhost configured and a concluding warning:
+
+ [Mon Feb 11 16:12:26 2008] [warn] Init:
+ Name-based SSL virtual hosts only work for clients with TLS server name indication support (RFC 4366)
+
+HOWEVER - If you see an entry like:
+
+ [Mon Feb 11 15:41:41 2008] [warn] Init:
+ You should not use name-based virtual hosts in conjunction with SSL!!
+
+then you are either using an OpenSSL which is too old and/or you need to ensure that the
+TLS Extensions are compiled into openssl with the 'enable-tlsext' flag. Once you have
+recompiled or reinstalled OpenSSL with TLS Extensions you will have to recompile mod_ssl
+to allow it to recognize SNI support.
+
+Meanwhile add 'hosts' to your c:\windows\system32\drivers\etc\hosts
+or /etc/hosts file as to point the various URL's to your server:
+$LST
+
+and verify that each returns its own name (and an entry in its
+own ${DIR}/logs) file).
+
+NOTE
+====
+
+Note that in the generated example the 'first' domain is special - and is the
+catch all for non-SNI browsers. Depending on your circumstances it may make
+sense to use a generic name - and have each of the SNI domains as subdirectories
+(and hence URI's under this generic name). Thus allowing non SNI browsers also
+access to those sites.
+EOM
+exit 0
diff --git a/test/modules/core/__init__.py b/test/modules/core/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/test/modules/core/__init__.py
@@ -0,0 +1 @@
+
diff --git a/test/modules/core/conftest.py b/test/modules/core/conftest.py
new file mode 100644
index 0000000..439cd22
--- /dev/null
+++ b/test/modules/core/conftest.py
@@ -0,0 +1,44 @@
+import logging
+import os
+
+import pytest
+import sys
+
+from pyhttpd.env import HttpdTestEnv
+
+sys.path.append(os.path.join(os.path.dirname(__file__), '../..'))
+
+
+def pytest_report_header(config, startdir):
+ env = HttpdTestEnv()
+ return f"core [apache: {env.get_httpd_version()}, mpm: {env.mpm_module}, {env.prefix}]"
+
+
+@pytest.fixture(scope="package")
+def env(pytestconfig) -> HttpdTestEnv:
+ level = logging.INFO
+ console = logging.StreamHandler()
+ console.setLevel(level)
+ console.setFormatter(logging.Formatter('%(levelname)s: %(message)s'))
+ logging.getLogger('').addHandler(console)
+ logging.getLogger('').setLevel(level=level)
+ env = HttpdTestEnv(pytestconfig=pytestconfig)
+ env.setup_httpd()
+ env.apache_access_log_clear()
+ env.httpd_error_log.clear_log()
+ return env
+
+
+@pytest.fixture(autouse=True, scope="package")
+def _session_scope(env):
+ env.httpd_error_log.set_ignored_lognos([
+ 'AH10244', # core: invalid URI path
+ 'AH01264', # mod_cgid script not found
+ ])
+ yield
+ assert env.apache_stop() == 0
+ errors, warnings = env.httpd_error_log.get_missed()
+ assert (len(errors), len(warnings)) == (0, 0),\
+ f"apache logged {len(errors)} errors and {len(warnings)} warnings: \n"\
+ "{0}\n{1}\n".format("\n".join(errors), "\n".join(warnings))
+
diff --git a/test/modules/core/test_001_encoding.py b/test/modules/core/test_001_encoding.py
new file mode 100644
index 0000000..b7ffbaa
--- /dev/null
+++ b/test/modules/core/test_001_encoding.py
@@ -0,0 +1,93 @@
+import pytest
+
+from pyhttpd.conf import HttpdConf
+
+
+class TestEncoding:
+
+ EXP_AH10244_ERRS = 0
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ conf = HttpdConf(env, extras={
+ 'base': f"""
+ <Directory "{env.gen_dir}">
+ AllowOverride None
+ Options +ExecCGI -MultiViews +SymLinksIfOwnerMatch
+ Require all granted
+ </Directory>
+ """,
+ f"test2.{env.http_tld}": "AllowEncodedSlashes on",
+ f"test1.{env.http_tld}": f"ScriptAlias /cgi-bin/ {env.gen_dir}",
+ })
+ conf.add_vhost_test1()
+ conf.add_vhost_test2()
+ conf.add_vhost_cgi()
+ conf.install()
+ assert env.apache_restart() == 0
+
+ # check handling of url encodings that are accepted
+ @pytest.mark.parametrize("path", [
+ "/006/006.css",
+ "/%30%30%36/%30%30%36.css",
+ "/nothing/../006/006.css",
+ "/nothing/./../006/006.css",
+ "/nothing/%2e%2e/006/006.css",
+ "/nothing/%2e/%2e%2e/006/006.css",
+ "/nothing/%2e/%2e%2e/006/006%2ecss",
+ ])
+ def test_core_001_01(self, env, path):
+ url = env.mkurl("https", "test1", path)
+ r = env.curl_get(url)
+ assert r.response["status"] == 200
+
+ # check handling of / normalization
+ @pytest.mark.parametrize("path", [
+ "/006//006.css",
+ "/006//////////006.css",
+ "/006////.//////006.css",
+ "/006////%2e//////006.css",
+ "/006////%2e//////006%2ecss",
+ "/006/../006/006.css",
+ "/006/%2e%2e/006/006.css",
+ ])
+ def test_core_001_03(self, env, path):
+ url = env.mkurl("https", "test1", path)
+ r = env.curl_get(url)
+ assert r.response["status"] == 200
+
+ # check path traversals
+ @pytest.mark.parametrize(["path", "status"], [
+ ["/../echo.py", 400],
+ ["/nothing/../../echo.py", 400],
+ ["/cgi-bin/../../echo.py", 400],
+ ["/nothing/%2e%2e/%2e%2e/echo.py", 400],
+ ["/cgi-bin/%2e%2e/%2e%2e/echo.py", 400],
+ ["/nothing/%%32%65%%32%65/echo.py", 400],
+ ["/cgi-bin/%%32%65%%32%65/echo.py", 400],
+ ["/nothing/%%32%65%%32%65/%%32%65%%32%65/h2_env.py", 400],
+ ["/cgi-bin/%%32%65%%32%65/%%32%65%%32%65/h2_env.py", 400],
+ ["/nothing/%25%32%65%25%32%65/echo.py", 404],
+ ["/cgi-bin/%25%32%65%25%32%65/echo.py", 404],
+ ["/nothing/%25%32%65%25%32%65/%25%32%65%25%32%65/h2_env.py", 404],
+ ["/cgi-bin/%25%32%65%25%32%65/%25%32%65%25%32%65/h2_env.py", 404],
+ ])
+ def test_core_001_04(self, env, path, status):
+ url = env.mkurl("https", "test1", path)
+ r = env.curl_get(url)
+ assert r.response["status"] == status
+ if status == 400:
+ TestEncoding.EXP_AH10244_ERRS += 1
+ # the log will have a core:err about invalid URI path
+
+ # check handling of %2f url encodings that are not decoded by default
+ @pytest.mark.parametrize(["host", "path", "status"], [
+ ["test1", "/006%2f006.css", 404],
+ ["test2", "/006%2f006.css", 200],
+ ["test2", "/x%252f.test", 200],
+ ["test2", "/10%25abnormal.txt", 200],
+ ])
+ def test_core_001_20(self, env, host, path, status):
+ url = env.mkurl("https", host, path)
+ r = env.curl_get(url)
+ assert r.response["status"] == status
diff --git a/test/modules/http2/.gitignore b/test/modules/http2/.gitignore
new file mode 100644
index 0000000..d68cd09
--- /dev/null
+++ b/test/modules/http2/.gitignore
@@ -0,0 +1,3 @@
+gen
+config.ini
+__pycache__
diff --git a/test/modules/http2/Makefile.in b/test/modules/http2/Makefile.in
new file mode 100644
index 0000000..15d404d
--- /dev/null
+++ b/test/modules/http2/Makefile.in
@@ -0,0 +1,20 @@
+
+# no targets: we don't want to build anything by default. if you want the
+# test programs, then "make test"
+TARGETS =
+
+bin_PROGRAMS =
+
+PROGRAM_LDADD = $(EXTRA_LDFLAGS) $(PROGRAM_DEPENDENCIES) $(EXTRA_LIBS)
+PROGRAM_DEPENDENCIES = \
+ $(top_srcdir)/srclib/apr-util/libaprutil.la \
+ $(top_srcdir)/srclib/apr/libapr.la
+
+include $(top_builddir)/build/rules.mk
+
+test: $(bin_PROGRAMS)
+
+# example for building a test proggie
+# dbu_OBJECTS = dbu.lo
+# dbu: $(dbu_OBJECTS)
+# $(LINK) $(dbu_OBJECTS) $(PROGRAM_LDADD)
diff --git a/test/modules/http2/__init__.py b/test/modules/http2/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test/modules/http2/__init__.py
diff --git a/test/modules/http2/conftest.py b/test/modules/http2/conftest.py
new file mode 100644
index 0000000..55d0c3a
--- /dev/null
+++ b/test/modules/http2/conftest.py
@@ -0,0 +1,40 @@
+import logging
+import os
+
+import pytest
+import sys
+
+sys.path.append(os.path.join(os.path.dirname(__file__), '../..'))
+
+from .env import H2TestEnv
+
+
+def pytest_report_header(config, startdir):
+ env = H2TestEnv()
+ return f"mod_h2 [apache: {env.get_httpd_version()}, mpm: {env.mpm_module}, {env.prefix}]"
+
+
+@pytest.fixture(scope="package")
+def env(pytestconfig) -> H2TestEnv:
+ level = logging.INFO
+ console = logging.StreamHandler()
+ console.setLevel(level)
+ console.setFormatter(logging.Formatter('%(levelname)s: %(message)s'))
+ logging.getLogger('').addHandler(console)
+ logging.getLogger('').setLevel(level=level)
+ env = H2TestEnv(pytestconfig=pytestconfig)
+ env.setup_httpd()
+ env.apache_access_log_clear()
+ env.httpd_error_log.clear_log()
+ return env
+
+
+@pytest.fixture(autouse=True, scope="package")
+def _session_scope(env):
+ yield
+ assert env.apache_stop() == 0
+ errors, warnings = env.httpd_error_log.get_missed()
+ assert (len(errors), len(warnings)) == (0, 0),\
+ f"apache logged {len(errors)} errors and {len(warnings)} warnings: \n"\
+ "{0}\n{1}\n".format("\n".join(errors), "\n".join(warnings))
+
diff --git a/test/modules/http2/env.py b/test/modules/http2/env.py
new file mode 100644
index 0000000..34d196d
--- /dev/null
+++ b/test/modules/http2/env.py
@@ -0,0 +1,169 @@
+import inspect
+import logging
+import os
+import re
+import subprocess
+from typing import Dict, Any
+
+from pyhttpd.certs import CertificateSpec
+from pyhttpd.conf import HttpdConf
+from pyhttpd.env import HttpdTestEnv, HttpdTestSetup
+
+log = logging.getLogger(__name__)
+
+
+class H2TestSetup(HttpdTestSetup):
+
+ def __init__(self, env: 'HttpdTestEnv'):
+ super().__init__(env=env)
+ self.add_source_dir(os.path.dirname(inspect.getfile(H2TestSetup)))
+ self.add_modules(["http2", "proxy_http2", "cgid", "autoindex", "ssl", "include"])
+
+ def make(self):
+ super().make()
+ self._add_h2test()
+ self._setup_data_1k_1m()
+
+ def _add_h2test(self):
+ local_dir = os.path.dirname(inspect.getfile(H2TestSetup))
+ p = subprocess.run([self.env.apxs, '-c', 'mod_h2test.c'],
+ capture_output=True,
+ cwd=os.path.join(local_dir, 'mod_h2test'))
+ rv = p.returncode
+ if rv != 0:
+ log.error(f"compiling md_h2test failed: {p.stderr}")
+ raise Exception(f"compiling md_h2test failed: {p.stderr}")
+
+ modules_conf = os.path.join(self.env.server_dir, 'conf/modules.conf')
+ with open(modules_conf, 'a') as fd:
+ # load our test module which is not installed
+ fd.write(f"LoadModule h2test_module \"{local_dir}/mod_h2test/.libs/mod_h2test.so\"\n")
+
+ def _setup_data_1k_1m(self):
+ s90 = "01234567890123456789012345678901234567890123456789012345678901234567890123456789012345678\n"
+ with open(os.path.join(self.env.gen_dir, "data-1k"), 'w') as f:
+ for i in range(10):
+ f.write(f"{i:09d}-{s90}")
+ with open(os.path.join(self.env.gen_dir, "data-10k"), 'w') as f:
+ for i in range(100):
+ f.write(f"{i:09d}-{s90}")
+ with open(os.path.join(self.env.gen_dir, "data-100k"), 'w') as f:
+ for i in range(1000):
+ f.write(f"{i:09d}-{s90}")
+ with open(os.path.join(self.env.gen_dir, "data-1m"), 'w') as f:
+ for i in range(10000):
+ f.write(f"{i:09d}-{s90}")
+
+
+class H2TestEnv(HttpdTestEnv):
+
+ @classmethod
+ @property
+ def is_unsupported(cls):
+ mpm_module = f"mpm_{os.environ['MPM']}" if 'MPM' in os.environ else 'mpm_event'
+ return mpm_module == 'mpm_prefork'
+
+ def __init__(self, pytestconfig=None):
+ super().__init__(pytestconfig=pytestconfig)
+ self.add_httpd_conf([
+ "H2MinWorkers 1",
+ "H2MaxWorkers 64",
+ "Protocols h2 http/1.1 h2c",
+ ])
+ self.add_httpd_log_modules(["http2", "proxy_http2", "h2test", "proxy", "proxy_http"])
+ self.add_cert_specs([
+ CertificateSpec(domains=[
+ f"push.{self._http_tld}",
+ f"hints.{self._http_tld}",
+ f"ssl.{self._http_tld}",
+ f"pad0.{self._http_tld}",
+ f"pad1.{self._http_tld}",
+ f"pad2.{self._http_tld}",
+ f"pad3.{self._http_tld}",
+ f"pad8.{self._http_tld}",
+ ]),
+ CertificateSpec(domains=[f"noh2.{self.http_tld}"], key_type='rsa2048'),
+ ])
+
+ self.httpd_error_log.set_ignored_lognos([
+ 'AH02032',
+ 'AH01276',
+ 'AH01630',
+ 'AH00135',
+ 'AH02261', # Re-negotiation handshake failed (our test_101)
+ 'AH03490', # scoreboard full, happens on limit tests
+ 'AH02429', # invalid chars in response header names, see test_h2_200
+ 'AH02430', # invalid chars in response header values, see test_h2_200
+ 'AH10373', # SSL errors on uncompleted handshakes, see test_h2_105
+ 'AH01247', # mod_cgid sometimes freaks out on load tests
+ 'AH01110', # error by proxy reading response
+ 'AH10400', # warning that 'enablereuse' has not effect in certain configs test_h2_600
+ 'AH00045', # child did not exit in time, SIGTERM was sent
+ ])
+ self.httpd_error_log.add_ignored_patterns([
+ re.compile(r'.*malformed header from script \'hecho.py\': Bad header: x.*'),
+ re.compile(r'.*:tls_post_process_client_hello:.*'),
+ # OSSL 3 dropped the function name from the error description. Use the code instead:
+ # 0A0000C1 = no shared cipher -- Too restrictive SSLCipherSuite or using DSA server certificate?
+ re.compile(r'.*SSL Library Error: error:0A0000C1:.*'),
+ re.compile(r'.*:tls_process_client_certificate:.*'),
+ # OSSL 3 dropped the function name from the error description. Use the code instead:
+ # 0A0000C7 = peer did not return a certificate -- No CAs known to server for verification?
+ re.compile(r'.*SSL Library Error: error:0A0000C7:.*'),
+ re.compile(r'.*have incompatible TLS configurations.'),
+ ])
+
+ def setup_httpd(self, setup: HttpdTestSetup = None):
+ super().setup_httpd(setup=H2TestSetup(env=self))
+
+
+class H2Conf(HttpdConf):
+
+ def __init__(self, env: HttpdTestEnv, extras: Dict[str, Any] = None):
+ super().__init__(env=env, extras=HttpdConf.merge_extras(extras, {
+ f"cgi.{env.http_tld}": [
+ "SSLOptions +StdEnvVars",
+ "AddHandler cgi-script .py",
+ "<Location \"/h2test/echo\">",
+ " SetHandler h2test-echo",
+ "</Location>",
+ "<Location \"/h2test/delay\">",
+ " SetHandler h2test-delay",
+ "</Location>",
+ "<Location \"/h2test/error\">",
+ " SetHandler h2test-error",
+ "</Location>",
+ ]
+ }))
+
+ def start_vhost(self, domains, port=None, doc_root="htdocs", with_ssl=None,
+ ssl_module=None, with_certificates=None):
+ super().start_vhost(domains=domains, port=port, doc_root=doc_root,
+ with_ssl=with_ssl, ssl_module=ssl_module,
+ with_certificates=with_certificates)
+ if f"noh2.{self.env.http_tld}" in domains:
+ protos = ["http/1.1"]
+ elif port == self.env.https_port or with_ssl is True:
+ protos = ["h2", "http/1.1"]
+ else:
+ protos = ["h2c", "http/1.1"]
+ if f"test2.{self.env.http_tld}" in domains:
+ protos = reversed(protos)
+ self.add(f"Protocols {' '.join(protos)}")
+ return self
+
+ def add_vhost_noh2(self):
+ domains = [f"noh2.{self.env.http_tld}", f"noh2-alias.{self.env.http_tld}"]
+ self.start_vhost(domains=domains, port=self.env.https_port, doc_root="htdocs/noh2")
+ self.add(["Protocols http/1.1", "SSLOptions +StdEnvVars"])
+ self.end_vhost()
+ self.start_vhost(domains=domains, port=self.env.http_port, doc_root="htdocs/noh2")
+ self.add(["Protocols http/1.1", "SSLOptions +StdEnvVars"])
+ self.end_vhost()
+ return self
+
+ def add_vhost_test1(self, proxy_self=False, h2proxy_self=False):
+ return super().add_vhost_test1(proxy_self=proxy_self, h2proxy_self=h2proxy_self)
+
+ def add_vhost_test2(self):
+ return super().add_vhost_test2()
diff --git a/test/modules/http2/htdocs/cgi/alive.json b/test/modules/http2/htdocs/cgi/alive.json
new file mode 100644
index 0000000..defe2c2
--- /dev/null
+++ b/test/modules/http2/htdocs/cgi/alive.json
@@ -0,0 +1,4 @@
+{
+ "host" : "cgi",
+ "alive" : true
+}
diff --git a/test/modules/http2/htdocs/cgi/echo.py b/test/modules/http2/htdocs/cgi/echo.py
new file mode 100644
index 0000000..c9083e1
--- /dev/null
+++ b/test/modules/http2/htdocs/cgi/echo.py
@@ -0,0 +1,16 @@
+#!/usr/bin/env python3
+import os, sys
+import multipart
+
+status = '200 Ok'
+
+content = ''
+for line in sys.stdin:
+ content += line
+
+# Just echo what we get
+print("Status: 200")
+print(f"Request-Length: {len(content)}")
+print("Content-Type: application/data\n")
+sys.stdout.write(content)
+
diff --git a/test/modules/http2/htdocs/cgi/echohd.py b/test/modules/http2/htdocs/cgi/echohd.py
new file mode 100644
index 0000000..a85a4e3
--- /dev/null
+++ b/test/modules/http2/htdocs/cgi/echohd.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python3
+import os, sys
+from requestparser import get_request_params
+
+
+forms, files = get_request_params()
+name = forms['name'] if 'name' in forms else None
+
+if name:
+ print("Status: 200")
+ print("""\
+Content-Type: text/plain\n""")
+ print("""%s: %s""" % (name, os.environ['HTTP_'+name]))
+else:
+ print("Status: 400 Parameter Missing")
+ print("""\
+Content-Type: text/html\n
+<html><body>
+<p>No name was specified</p>
+</body></html>""")
+
+
diff --git a/test/modules/http2/htdocs/cgi/env.py b/test/modules/http2/htdocs/cgi/env.py
new file mode 100644
index 0000000..455c623
--- /dev/null
+++ b/test/modules/http2/htdocs/cgi/env.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python3
+import os, sys
+from requestparser import get_request_params
+
+
+forms, files = get_request_params()
+
+status = '200 Ok'
+
+try:
+ ename = forms['name']
+
+ # Test if the file was uploaded
+ if ename is not None:
+ val = os.environ[ename] if ename in os.environ else ""
+ print("Status: 200")
+ print("""\
+Content-Type: text/plain\n""")
+ print(f"{ename}={val}")
+
+ else:
+ print("Status: 400 Parameter Missing")
+ print("""\
+Content-Type: text/html\n
+ <html><body>
+ <p>No name was specified: name</p>
+ </body></html>""")
+
+except KeyError:
+ print("Status: 200 Ok")
+ print("""\
+Content-Type: text/html\n
+ <html><body>
+ Echo <form method="POST" enctype="application/x-www-form-urlencoded">
+ <input type="text" name="name">
+ <button type="submit">submit</button></form>
+ </body></html>""")
+ pass
+
+
+
diff --git a/test/modules/http2/htdocs/cgi/files/empty.txt b/test/modules/http2/htdocs/cgi/files/empty.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test/modules/http2/htdocs/cgi/files/empty.txt
diff --git a/test/modules/http2/htdocs/cgi/hecho.py b/test/modules/http2/htdocs/cgi/hecho.py
new file mode 100644
index 0000000..abffd33
--- /dev/null
+++ b/test/modules/http2/htdocs/cgi/hecho.py
@@ -0,0 +1,48 @@
+#!/usr/bin/env python3
+import os, sys
+from requestparser import get_request_params
+
+
+forms, files = get_request_params()
+
+status = '200 Ok'
+
+try:
+
+ # A nested FieldStorage instance holds the file
+ name = forms['name']
+ value = ''
+
+ try:
+ value = forms['value']
+ except KeyError:
+ value = os.environ.get("HTTP_"+name, "unset")
+
+ # Test if a value was given
+ if name:
+ print("Status: 200")
+ print("%s: %s" % (name, value,))
+ print ("""\
+Content-Type: text/plain\n""")
+
+ else:
+ print("Status: 400 Parameter Missing")
+ print("""\
+Content-Type: text/html\n
+ <html><body>
+ <p>No name and value was specified: %s %s</p>
+ </body></html>""" % (name, value))
+
+except KeyError:
+ print("Status: 200 Ok")
+ print("""\
+Content-Type: text/html\n
+ <html><body>
+ Echo <form method="POST" enctype="application/x-www-form-urlencoded">
+ <input type="text" name="name">
+ <input type="text" name="value">
+ <button type="submit">Echo</button></form>
+ </body></html>""")
+ pass
+
+
diff --git a/test/modules/http2/htdocs/cgi/hello.py b/test/modules/http2/htdocs/cgi/hello.py
new file mode 100644
index 0000000..a96da8a
--- /dev/null
+++ b/test/modules/http2/htdocs/cgi/hello.py
@@ -0,0 +1,25 @@
+#!/usr/bin/env python3
+
+import os
+import json
+
+resp = {
+ 'https': os.getenv('HTTPS', ''),
+ 'host': os.getenv('X_HOST', '') if 'X_HOST' in os.environ else os.getenv('SERVER_NAME', ''),
+ 'server': os.getenv('SERVER_NAME', ''),
+ 'h2_original_host': os.getenv('H2_ORIGINAL_HOST', ''),
+ 'port': os.getenv('SERVER_PORT', ''),
+ 'protocol': os.getenv('SERVER_PROTOCOL', ''),
+ 'ssl_protocol': os.getenv('SSL_PROTOCOL', ''),
+ 'h2': os.getenv('HTTP2', ''),
+ 'h2push': os.getenv('H2PUSH', ''),
+ 'h2_stream_id': os.getenv('H2_STREAM_ID', ''),
+ 'x-forwarded-for': os.getenv('HTTP_X_FORWARDED_FOR', ''),
+ 'x-forwarded-host': os.getenv('HTTP_X_FORWARDED_HOST', ''),
+ 'x-forwarded-server': os.getenv('HTTP_X_FORWARDED_SERVER', ''),
+}
+
+print("Content-Type: application/json")
+print()
+print(json.JSONEncoder(indent=2).encode(resp))
+
diff --git a/test/modules/http2/htdocs/cgi/mnot164.py b/test/modules/http2/htdocs/cgi/mnot164.py
new file mode 100644
index 0000000..43a86ea
--- /dev/null
+++ b/test/modules/http2/htdocs/cgi/mnot164.py
@@ -0,0 +1,16 @@
+#!/usr/bin/env python3
+import os, sys
+from requestparser import get_request_params
+
+
+forms, files = get_request_params()
+text = forms['text'] if 'text' in forms else "a"
+count = int(forms['count']) if 'count' in forms else 77784
+
+print("Status: 200 OK")
+print("Content-Type: text/html")
+print()
+sys.stdout.flush()
+for _ in range(count):
+ sys.stdout.write(text)
+
diff --git a/test/modules/http2/htdocs/cgi/necho.py b/test/modules/http2/htdocs/cgi/necho.py
new file mode 100644
index 0000000..715904b
--- /dev/null
+++ b/test/modules/http2/htdocs/cgi/necho.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python3
+import time
+import os, sys
+from requestparser import get_request_params
+
+
+forms, files = get_request_params()
+status = '200 Ok'
+
+try:
+ count = forms['count']
+ text = forms['text']
+
+ waitsec = float(forms['wait1']) if 'wait1' in forms else 0.0
+ if waitsec > 0:
+ time.sleep(waitsec)
+
+ if int(count):
+ print("Status: 200")
+ print("""\
+Content-Type: text/plain\n""")
+
+ waitsec = float(forms['wait2']) if 'wait2' in forms else 0.0
+ if waitsec > 0:
+ time.sleep(waitsec)
+
+ i = 0;
+ for i in range(0, int(count)):
+ print("%s" % (text))
+
+ waitsec = float(forms['wait3']) if 'wait3' in forms else 0.0
+ if waitsec > 0:
+ time.sleep(waitsec)
+
+ else:
+ print("Status: 400 Parameter Missing")
+ print("""\
+Content-Type: text/html\n
+ <html><body>
+ <p>No count was specified: %s</p>
+ </body></html>""" % (count))
+
+except KeyError as ex:
+ print("Status: 200 Ok")
+ print(f"""\
+Content-Type: text/html\n
+ <html><body>uri: uri={os.environ['REQUEST_URI']} ct={os.environ['CONTENT_TYPE']} ex={ex}
+ forms={forms}
+ Echo <form method="POST" enctype="application/x-www-form-urlencoded">
+ <input type="text" name="count">
+ <input type="text" name="text">
+ <button type="submit">Echo</button></form>
+ </body></html>""")
+ pass
+
+
diff --git a/test/modules/http2/htdocs/cgi/requestparser.py b/test/modules/http2/htdocs/cgi/requestparser.py
new file mode 100644
index 0000000..c7e0648
--- /dev/null
+++ b/test/modules/http2/htdocs/cgi/requestparser.py
@@ -0,0 +1,57 @@
+#!/usr/bin/env python3
+import os
+import sys
+from urllib import parse
+import multipart # https://github.com/andrew-d/python-multipart (`apt install python3-multipart`)
+import shutil
+
+
+try: # Windows needs stdio set for binary mode.
+ import msvcrt
+
+ msvcrt.setmode(0, os.O_BINARY) # stdin = 0
+ msvcrt.setmode(1, os.O_BINARY) # stdout = 1
+except ImportError:
+ pass
+
+
+class FileItem:
+
+ def __init__(self, mparse_item):
+ self.item = mparse_item
+
+ @property
+ def file_name(self):
+ return os.path.basename(self.item.file_name.decode())
+
+ def save_to(self, destpath: str):
+ fsrc = self.item.file_object
+ fsrc.seek(0)
+ with open(destpath, 'wb') as fd:
+ shutil.copyfileobj(fsrc, fd)
+
+
+def get_request_params():
+ oforms = {}
+ ofiles = {}
+ if "REQUEST_URI" in os.environ:
+ qforms = parse.parse_qs(parse.urlsplit(os.environ["REQUEST_URI"]).query)
+ for name, values in qforms.items():
+ oforms[name] = values[0]
+ if "CONTENT_TYPE" in os.environ:
+ ctype = os.environ["CONTENT_TYPE"]
+ if ctype == "application/x-www-form-urlencoded":
+ s = sys.stdin.read()
+ qforms = parse.parse_qs(s)
+ for name, values in qforms.items():
+ oforms[name] = values[0]
+ elif ctype.startswith("multipart/"):
+ def on_field(field):
+ oforms[field.field_name.decode()] = field.value.decode()
+ def on_file(file):
+ ofiles[file.field_name.decode()] = FileItem(file)
+ multipart.parse_form(headers={"Content-Type": ctype},
+ input_stream=sys.stdin.buffer,
+ on_field=on_field, on_file=on_file)
+ return oforms, ofiles
+
diff --git a/test/modules/http2/htdocs/cgi/ssi/include.inc b/test/modules/http2/htdocs/cgi/ssi/include.inc
new file mode 100644
index 0000000..8bd8689
--- /dev/null
+++ b/test/modules/http2/htdocs/cgi/ssi/include.inc
@@ -0,0 +1 @@
+Hello include<br>
diff --git a/test/modules/http2/htdocs/cgi/ssi/test.html b/test/modules/http2/htdocs/cgi/ssi/test.html
new file mode 100644
index 0000000..1782358
--- /dev/null
+++ b/test/modules/http2/htdocs/cgi/ssi/test.html
@@ -0,0 +1,9 @@
+<!doctype html>
+<html>
+<head><meta charset="UTF-8"></head>
+<body>
+ test<br>
+ <!--#include virtual="./include.inc"-->
+ hello<br>
+</body>
+</html>
diff --git a/test/modules/http2/htdocs/cgi/upload.py b/test/modules/http2/htdocs/cgi/upload.py
new file mode 100644
index 0000000..fa1e5d6
--- /dev/null
+++ b/test/modules/http2/htdocs/cgi/upload.py
@@ -0,0 +1,55 @@
+#!/usr/bin/env python3
+import os
+import sys
+from requestparser import get_request_params
+
+
+forms, files = get_request_params()
+
+status = '200 Ok'
+
+# Test if the file was uploaded
+if 'file' in files:
+ fitem = files['file']
+ # strip leading path from file name to avoid directory traversal attacks
+ fname = os.path.basename(fitem.file_name)
+ fpath = f'{os.environ["DOCUMENT_ROOT"]}/files/{fname}'
+ fitem.save_to(fpath)
+ message = "The file %s was uploaded successfully" % (fname)
+ print("Status: 201 Created")
+ print("Content-Type: text/html")
+ print("Location: %s://%s/files/%s" % (os.environ["REQUEST_SCHEME"], os.environ["HTTP_HOST"], fname))
+ print("")
+ print("<html><body><p>%s</p></body></html>" % (message))
+
+elif 'remove' in forms:
+ remove = forms['remove']
+ try:
+ fname = os.path.basename(remove)
+ os.remove('./files/' + fname)
+ message = 'The file "' + fname + '" was removed successfully'
+ except OSError as e:
+ message = 'Error removing ' + fname + ': ' + e.strerror
+ status = '404 File Not Found'
+ print("Status: %s" % (status))
+ print("""
+Content-Type: text/html
+
+<html><body>
+<p>%s</p>
+</body></html>""" % (message))
+
+else:
+ message = '''\
+ Upload File<form method="POST" enctype="multipart/form-data">
+ <input type="file" name="file">
+ <button type="submit">Upload</button></form>
+ '''
+ print("Status: %s" % (status))
+ print("""\
+Content-Type: text/html
+
+<html><body>
+<p>%s</p>
+</body></html>""" % (message))
+
diff --git a/test/modules/http2/htdocs/cgi/xxx/test.json b/test/modules/http2/htdocs/cgi/xxx/test.json
new file mode 100644
index 0000000..ceafd0a
--- /dev/null
+++ b/test/modules/http2/htdocs/cgi/xxx/test.json
@@ -0,0 +1 @@
+{"name": "test.json"} \ No newline at end of file
diff --git a/test/modules/http2/htdocs/noh2/alive.json b/test/modules/http2/htdocs/noh2/alive.json
new file mode 100644
index 0000000..7b54893
--- /dev/null
+++ b/test/modules/http2/htdocs/noh2/alive.json
@@ -0,0 +1,5 @@
+{
+ "host" : "noh2",
+ "alive" : true
+}
+
diff --git a/test/modules/http2/htdocs/noh2/index.html b/test/modules/http2/htdocs/noh2/index.html
new file mode 100644
index 0000000..696068e
--- /dev/null
+++ b/test/modules/http2/htdocs/noh2/index.html
@@ -0,0 +1,9 @@
+<html>
+ <head>
+ <title>mod_h2 test site noh2</title>
+ </head>
+ <body>
+ <h1>mod_h2 test site noh2</h1>
+ </body>
+</html>
+
diff --git a/test/modules/http2/mod_h2test/mod_h2test.c b/test/modules/http2/mod_h2test/mod_h2test.c
new file mode 100644
index 0000000..f20b954
--- /dev/null
+++ b/test/modules/http2/mod_h2test/mod_h2test.c
@@ -0,0 +1,588 @@
+/* Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <apr_optional.h>
+#include <apr_optional_hooks.h>
+#include <apr_strings.h>
+#include <apr_cstr.h>
+#include <apr_time.h>
+#include <apr_want.h>
+
+#include <httpd.h>
+#include <http_protocol.h>
+#include <http_request.h>
+#include <http_log.h>
+
+#include "mod_h2test.h"
+
+static void h2test_hooks(apr_pool_t *pool);
+
+AP_DECLARE_MODULE(h2test) = {
+ STANDARD20_MODULE_STUFF,
+ NULL, /* func to create per dir config */
+ NULL, /* func to merge per dir config */
+ NULL, /* func to create per server config */
+ NULL, /* func to merge per server config */
+ NULL, /* command handlers */
+ h2test_hooks,
+#if defined(AP_MODULE_FLAG_NONE)
+ AP_MODULE_FLAG_ALWAYS_MERGE
+#endif
+};
+
+#define SECS_PER_HOUR (60*60)
+#define SECS_PER_DAY (24*SECS_PER_HOUR)
+
+static apr_status_t duration_parse(apr_interval_time_t *ptimeout, const char *value,
+ const char *def_unit)
+{
+ char *endp;
+ apr_int64_t n;
+
+ n = apr_strtoi64(value, &endp, 10);
+ if (errno) {
+ return errno;
+ }
+ if (!endp || !*endp) {
+ if (!def_unit) def_unit = "s";
+ }
+ else if (endp == value) {
+ return APR_EINVAL;
+ }
+ else {
+ def_unit = endp;
+ }
+
+ switch (*def_unit) {
+ case 'D':
+ case 'd':
+ *ptimeout = apr_time_from_sec(n * SECS_PER_DAY);
+ break;
+ case 's':
+ case 'S':
+ *ptimeout = (apr_interval_time_t) apr_time_from_sec(n);
+ break;
+ case 'h':
+ case 'H':
+ /* Time is in hours */
+ *ptimeout = (apr_interval_time_t) apr_time_from_sec(n * SECS_PER_HOUR);
+ break;
+ case 'm':
+ case 'M':
+ switch (*(++def_unit)) {
+ /* Time is in milliseconds */
+ case 's':
+ case 'S':
+ *ptimeout = (apr_interval_time_t) n * 1000;
+ break;
+ /* Time is in minutes */
+ case 'i':
+ case 'I':
+ *ptimeout = (apr_interval_time_t) apr_time_from_sec(n * 60);
+ break;
+ default:
+ return APR_EGENERAL;
+ }
+ break;
+ default:
+ return APR_EGENERAL;
+ }
+ return APR_SUCCESS;
+}
+
+static int h2test_post_config(apr_pool_t *p, apr_pool_t *plog,
+ apr_pool_t *ptemp, server_rec *s)
+{
+ void *data = NULL;
+ const char *mod_h2_init_key = "mod_h2test_init_counter";
+
+ (void)plog;(void)ptemp;
+
+ apr_pool_userdata_get(&data, mod_h2_init_key, s->process->pool);
+ if ( data == NULL ) {
+ /* dry run */
+ apr_pool_userdata_set((const void *)1, mod_h2_init_key,
+ apr_pool_cleanup_null, s->process->pool);
+ return APR_SUCCESS;
+ }
+
+
+ return APR_SUCCESS;
+}
+
+static void h2test_child_init(apr_pool_t *pool, server_rec *s)
+{
+ (void)pool;
+ (void)s;
+}
+
+static int h2test_echo_handler(request_rec *r)
+{
+ conn_rec *c = r->connection;
+ apr_bucket_brigade *bb;
+ apr_bucket *b;
+ apr_status_t rv;
+ char buffer[8192];
+ const char *ct;
+ long l;
+ int i;
+ apr_time_t chunk_delay = 0;
+ apr_array_header_t *args = NULL;
+ apr_size_t blen, fail_after = 0;
+ int fail_requested = 0, error_bucket = 1;
+
+ if (strcmp(r->handler, "h2test-echo")) {
+ return DECLINED;
+ }
+ if (r->method_number != M_GET && r->method_number != M_POST) {
+ return DECLINED;
+ }
+
+ if(r->args) {
+ args = apr_cstr_split(r->args, "&", 1, r->pool);
+ for(i = 0; i < args->nelts; ++i) {
+ char *s, *val, *arg = APR_ARRAY_IDX(args, i, char*);
+ s = strchr(arg, '=');
+ if(s) {
+ *s = '\0';
+ val = s + 1;
+ if(!strcmp("id", arg)) {
+ /* accepted, but not processed */
+ continue;
+ }
+ else if(!strcmp("chunk_delay", arg)) {
+ rv = duration_parse(&chunk_delay, val, "s");
+ if(APR_SUCCESS == rv) {
+ continue;
+ }
+ }
+ else if(!strcmp("fail_after", arg)) {
+ fail_after = (int)apr_atoi64(val);
+ if(fail_after >= 0) {
+ fail_requested = 1;
+ continue;
+ }
+ }
+ }
+ ap_log_rerror(APLOG_MARK, APLOG_ERR, 0, r, "query parameter not "
+ "understood: '%s' in %s",
+ arg, r->args);
+ ap_die(HTTP_BAD_REQUEST, r);
+ return OK;
+ }
+ }
+
+ ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r, "echo_handler: processing request");
+ r->status = 200;
+ r->clength = -1;
+ r->chunked = 1;
+ apr_table_unset(r->headers_out, "Content-Length");
+ /* Discourage content-encodings */
+ apr_table_unset(r->headers_out, "Content-Encoding");
+ apr_table_setn(r->subprocess_env, "no-brotli", "1");
+ apr_table_setn(r->subprocess_env, "no-gzip", "1");
+
+ ct = apr_table_get(r->headers_in, "content-type");
+ ap_set_content_type(r, ct? ct : "application/octet-stream");
+
+ bb = apr_brigade_create(r->pool, c->bucket_alloc);
+ /* copy any request body into the response */
+ if ((rv = ap_setup_client_block(r, REQUEST_CHUNKED_DECHUNK))) goto cleanup;
+ if (ap_should_client_block(r)) {
+ while (0 < (l = ap_get_client_block(r, &buffer[0], sizeof(buffer)))) {
+ ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r,
+ "echo_handler: copying %ld bytes from request body", l);
+ blen = (apr_size_t)l;
+ if (fail_requested) {
+ if (blen > fail_after) {
+ blen = fail_after;
+ }
+ fail_after -= blen;
+ }
+ rv = apr_brigade_write(bb, NULL, NULL, buffer, blen);
+ if (APR_SUCCESS != rv) goto cleanup;
+ if (chunk_delay) {
+ apr_sleep(chunk_delay);
+ }
+ rv = ap_pass_brigade(r->output_filters, bb);
+ if (APR_SUCCESS != rv) goto cleanup;
+ ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r,
+ "echo_handler: passed %ld bytes from request body", l);
+ if (fail_requested && fail_after == 0) {
+ rv = APR_EINVAL;
+ goto cleanup;
+ }
+ }
+ }
+ /* we are done */
+ b = apr_bucket_eos_create(c->bucket_alloc);
+ APR_BRIGADE_INSERT_TAIL(bb, b);
+ ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r, "echo_handler: request read");
+
+ if (r->trailers_in && !apr_is_empty_table(r->trailers_in)) {
+ ap_log_rerror(APLOG_MARK, APLOG_TRACE2, 0, r,
+ "echo_handler: seeing incoming trailers");
+ apr_table_setn(r->trailers_out, "h2test-trailers-in",
+ apr_itoa(r->pool, 1));
+ }
+
+ rv = ap_pass_brigade(r->output_filters, bb);
+
+cleanup:
+ if (rv == APR_SUCCESS
+ || r->status != HTTP_OK
+ || c->aborted) {
+ ap_log_rerror(APLOG_MARK, APLOG_TRACE1, rv, r, "echo_handler: request handled");
+ return OK;
+ }
+ else if (error_bucket) {
+ int status = ap_map_http_request_error(rv, HTTP_BAD_REQUEST);
+ b = ap_bucket_error_create(status, NULL, r->pool, c->bucket_alloc);
+ APR_BRIGADE_INSERT_TAIL(bb, b);
+ ap_pass_brigade(r->output_filters, bb);
+ }
+ else {
+ /* no way to know what type of error occurred */
+ ap_log_rerror(APLOG_MARK, APLOG_TRACE1, rv, r, "h2test_echo_handler failed");
+ return AP_FILTER_ERROR;
+ }
+ return DECLINED;
+}
+
+static int h2test_delay_handler(request_rec *r)
+{
+ conn_rec *c = r->connection;
+ apr_bucket_brigade *bb;
+ apr_bucket *b;
+ apr_status_t rv;
+ char buffer[8192];
+ int i, chunks = 3;
+ long l;
+ apr_time_t delay = 0;
+
+ if (strcmp(r->handler, "h2test-delay")) {
+ return DECLINED;
+ }
+ if (r->method_number != M_GET && r->method_number != M_POST) {
+ return DECLINED;
+ }
+
+ if (r->args) {
+ rv = duration_parse(&delay, r->args, "s");
+ if (APR_SUCCESS != rv) {
+ ap_die(HTTP_BAD_REQUEST, r);
+ return OK;
+ }
+ }
+
+ ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r, "delay_handler: processing request, %ds delay",
+ (int)apr_time_sec(delay));
+ r->status = 200;
+ r->clength = -1;
+ r->chunked = 1;
+ apr_table_unset(r->headers_out, "Content-Length");
+ /* Discourage content-encodings */
+ apr_table_unset(r->headers_out, "Content-Encoding");
+ apr_table_setn(r->subprocess_env, "no-brotli", "1");
+ apr_table_setn(r->subprocess_env, "no-gzip", "1");
+
+ ap_set_content_type(r, "application/octet-stream");
+
+ bb = apr_brigade_create(r->pool, c->bucket_alloc);
+ /* copy any request body into the response */
+ if ((rv = ap_setup_client_block(r, REQUEST_CHUNKED_DECHUNK))) goto cleanup;
+ if (ap_should_client_block(r)) {
+ do {
+ l = ap_get_client_block(r, &buffer[0], sizeof(buffer));
+ if (l > 0) {
+ ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r,
+ "delay_handler: reading %ld bytes from request body", l);
+ }
+ } while (l > 0);
+ if (l < 0) {
+ return AP_FILTER_ERROR;
+ }
+ }
+
+ memset(buffer, 0, sizeof(buffer));
+ l = sizeof(buffer);
+ for (i = 0; i < chunks; ++i) {
+ rv = apr_brigade_write(bb, NULL, NULL, buffer, l);
+ if (APR_SUCCESS != rv) goto cleanup;
+ rv = ap_pass_brigade(r->output_filters, bb);
+ if (APR_SUCCESS != rv) goto cleanup;
+ ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r,
+ "delay_handler: passed %ld bytes as response body", l);
+ if (delay) {
+ apr_sleep(delay);
+ }
+ }
+ /* we are done */
+ b = apr_bucket_eos_create(c->bucket_alloc);
+ APR_BRIGADE_INSERT_TAIL(bb, b);
+ rv = ap_pass_brigade(r->output_filters, bb);
+ apr_brigade_cleanup(bb);
+ ap_log_rerror(APLOG_MARK, APLOG_TRACE1, rv, r, "delay_handler: response passed");
+
+cleanup:
+ ap_log_rerror(APLOG_MARK, APLOG_TRACE1, rv, r,
+ "delay_handler: request cleanup, r->status=%d, aborte=%d",
+ r->status, c->aborted);
+ if (rv == APR_SUCCESS
+ || r->status != HTTP_OK
+ || c->aborted) {
+ return OK;
+ }
+ return AP_FILTER_ERROR;
+}
+
+static int h2test_trailer_handler(request_rec *r)
+{
+ conn_rec *c = r->connection;
+ apr_bucket_brigade *bb;
+ apr_bucket *b;
+ apr_status_t rv;
+ char buffer[8192];
+ long l;
+ int body_len = 0;
+
+ if (strcmp(r->handler, "h2test-trailer")) {
+ return DECLINED;
+ }
+ if (r->method_number != M_GET && r->method_number != M_POST) {
+ return DECLINED;
+ }
+
+ if (r->args) {
+ body_len = (int)apr_atoi64(r->args);
+ if (body_len < 0) body_len = 0;
+ }
+
+ ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r, "trailer_handler: processing request, %d body length",
+ body_len);
+ r->status = 200;
+ r->clength = body_len;
+ ap_set_content_length(r, body_len);
+
+ ap_set_content_type(r, "application/octet-stream");
+ apr_table_mergen(r->headers_out, "Trailer", "trailer-content-length");
+ apr_table_set(r->trailers_out, "trailer-content-length",
+ apr_psprintf(r->pool, "%d", body_len));
+
+ bb = apr_brigade_create(r->pool, c->bucket_alloc);
+ memset(buffer, 0, sizeof(buffer));
+ while (body_len > 0) {
+ l = (sizeof(buffer) > body_len)? body_len : sizeof(buffer);
+ body_len -= l;
+ rv = apr_brigade_write(bb, NULL, NULL, buffer, l);
+ if (APR_SUCCESS != rv) goto cleanup;
+ rv = ap_pass_brigade(r->output_filters, bb);
+ if (APR_SUCCESS != rv) goto cleanup;
+ ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r,
+ "trailer_handler: passed %ld bytes as response body", l);
+ }
+ /* we are done */
+ b = apr_bucket_eos_create(c->bucket_alloc);
+ APR_BRIGADE_INSERT_TAIL(bb, b);
+ rv = ap_pass_brigade(r->output_filters, bb);
+ apr_brigade_cleanup(bb);
+ ap_log_rerror(APLOG_MARK, APLOG_TRACE1, rv, r, "trailer_handler: response passed");
+
+cleanup:
+ ap_log_rerror(APLOG_MARK, APLOG_TRACE1, rv, r,
+ "trailer_handler: request cleanup, r->status=%d, aborte=%d",
+ r->status, c->aborted);
+ if (rv == APR_SUCCESS
+ || r->status != HTTP_OK
+ || c->aborted) {
+ return OK;
+ }
+ return AP_FILTER_ERROR;
+}
+
+static int status_from_str(const char *s, apr_status_t *pstatus)
+{
+ if (!strcmp("timeout", s)) {
+ *pstatus = APR_TIMEUP;
+ return 1;
+ }
+ else if (!strcmp("reset", s)) {
+ *pstatus = APR_ECONNRESET;
+ return 1;
+ }
+ return 0;
+}
+
+static int h2test_error_handler(request_rec *r)
+{
+ conn_rec *c = r->connection;
+ apr_bucket_brigade *bb;
+ apr_bucket *b;
+ apr_status_t rv;
+ char buffer[8192];
+ int i, chunks = 3, error_bucket = 1;
+ long l;
+ apr_time_t delay = 0, body_delay = 0;
+ apr_array_header_t *args = NULL;
+ int http_status = 200;
+ apr_status_t error = APR_SUCCESS, body_error = APR_SUCCESS;
+
+ if (strcmp(r->handler, "h2test-error")) {
+ return DECLINED;
+ }
+ if (r->method_number != M_GET && r->method_number != M_POST) {
+ return DECLINED;
+ }
+
+ if (r->args) {
+ args = apr_cstr_split(r->args, "&", 1, r->pool);
+ for (i = 0; i < args->nelts; ++i) {
+ char *s, *val, *arg = APR_ARRAY_IDX(args, i, char*);
+ s = strchr(arg, '=');
+ if (s) {
+ *s = '\0';
+ val = s + 1;
+ if (!strcmp("status", arg)) {
+ http_status = (int)apr_atoi64(val);
+ if (val > 0) {
+ continue;
+ }
+ }
+ else if (!strcmp("error", arg)) {
+ if (status_from_str(val, &error)) {
+ continue;
+ }
+ }
+ else if (!strcmp("error_bucket", arg)) {
+ error_bucket = (int)apr_atoi64(val);
+ if (val >= 0) {
+ continue;
+ }
+ }
+ else if (!strcmp("body_error", arg)) {
+ if (status_from_str(val, &body_error)) {
+ continue;
+ }
+ }
+ else if (!strcmp("delay", arg)) {
+ rv = duration_parse(&delay, val, "s");
+ if (APR_SUCCESS == rv) {
+ continue;
+ }
+ }
+ else if (!strcmp("body_delay", arg)) {
+ rv = duration_parse(&body_delay, val, "s");
+ if (APR_SUCCESS == rv) {
+ continue;
+ }
+ }
+ }
+ ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r, "error_handler: "
+ "did not understand '%s'", arg);
+ ap_die(HTTP_BAD_REQUEST, r);
+ return OK;
+ }
+ }
+
+ ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r, "error_handler: processing request, %s",
+ r->args? r->args : "(no args)");
+ r->status = http_status;
+ r->clength = -1;
+ r->chunked = 1;
+ apr_table_unset(r->headers_out, "Content-Length");
+ /* Discourage content-encodings */
+ apr_table_unset(r->headers_out, "Content-Encoding");
+ apr_table_setn(r->subprocess_env, "no-brotli", "1");
+ apr_table_setn(r->subprocess_env, "no-gzip", "1");
+
+ ap_set_content_type(r, "application/octet-stream");
+ bb = apr_brigade_create(r->pool, c->bucket_alloc);
+
+ if (delay) {
+ apr_sleep(delay);
+ }
+ if (error != APR_SUCCESS) {
+ return ap_map_http_request_error(error, HTTP_BAD_REQUEST);
+ }
+ /* flush response */
+ b = apr_bucket_flush_create(c->bucket_alloc);
+ APR_BRIGADE_INSERT_TAIL(bb, b);
+ rv = ap_pass_brigade(r->output_filters, bb);
+ if (APR_SUCCESS != rv) goto cleanup;
+
+ memset(buffer, 'X', sizeof(buffer));
+ l = sizeof(buffer);
+ for (i = 0; i < chunks; ++i) {
+ if (body_delay) {
+ apr_sleep(body_delay);
+ }
+ rv = apr_brigade_write(bb, NULL, NULL, buffer, l);
+ if (APR_SUCCESS != rv) goto cleanup;
+ rv = ap_pass_brigade(r->output_filters, bb);
+ if (APR_SUCCESS != rv) goto cleanup;
+ ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r,
+ "error_handler: passed %ld bytes as response body", l);
+ if (body_error != APR_SUCCESS) {
+ rv = body_error;
+ goto cleanup;
+ }
+ }
+ /* we are done */
+ b = apr_bucket_eos_create(c->bucket_alloc);
+ APR_BRIGADE_INSERT_TAIL(bb, b);
+ rv = ap_pass_brigade(r->output_filters, bb);
+ apr_brigade_cleanup(bb);
+ ap_log_rerror(APLOG_MARK, APLOG_TRACE1, rv, r, "error_handler: response passed");
+
+cleanup:
+ ap_log_rerror(APLOG_MARK, APLOG_TRACE1, rv, r,
+ "error_handler: request cleanup, r->status=%d, aborted=%d",
+ r->status, c->aborted);
+ if (rv == APR_SUCCESS) {
+ return OK;
+ }
+ if (error_bucket) {
+ http_status = ap_map_http_request_error(rv, HTTP_BAD_REQUEST);
+ b = ap_bucket_error_create(http_status, NULL, r->pool, c->bucket_alloc);
+ APR_BRIGADE_INSERT_TAIL(bb, b);
+ ap_pass_brigade(r->output_filters, bb);
+ }
+ return AP_FILTER_ERROR;
+}
+
+/* Install this module into the apache2 infrastructure.
+ */
+static void h2test_hooks(apr_pool_t *pool)
+{
+ static const char *const mod_h2[] = { "mod_h2.c", NULL};
+
+ ap_log_perror(APLOG_MARK, APLOG_TRACE1, 0, pool, "installing hooks and handlers");
+
+ /* Run once after configuration is set, but before mpm children initialize.
+ */
+ ap_hook_post_config(h2test_post_config, mod_h2, NULL, APR_HOOK_MIDDLE);
+
+ /* Run once after a child process has been created.
+ */
+ ap_hook_child_init(h2test_child_init, NULL, NULL, APR_HOOK_MIDDLE);
+
+ /* test h2 handlers */
+ ap_hook_handler(h2test_echo_handler, NULL, NULL, APR_HOOK_MIDDLE);
+ ap_hook_handler(h2test_delay_handler, NULL, NULL, APR_HOOK_MIDDLE);
+ ap_hook_handler(h2test_trailer_handler, NULL, NULL, APR_HOOK_MIDDLE);
+ ap_hook_handler(h2test_error_handler, NULL, NULL, APR_HOOK_MIDDLE);
+}
+
diff --git a/test/modules/http2/mod_h2test/mod_h2test.h b/test/modules/http2/mod_h2test/mod_h2test.h
new file mode 100644
index 0000000..a886d29
--- /dev/null
+++ b/test/modules/http2/mod_h2test/mod_h2test.h
@@ -0,0 +1,21 @@
+/* Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __MOD_H2TEST_H__
+#define __MOD_H2TEST_H__
+
+
+#endif
diff --git a/test/modules/http2/test_001_httpd_alive.py b/test/modules/http2/test_001_httpd_alive.py
new file mode 100644
index 0000000..b5708d2
--- /dev/null
+++ b/test/modules/http2/test_001_httpd_alive.py
@@ -0,0 +1,22 @@
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestBasicAlive:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ H2Conf(env).add_vhost_test1().install()
+ assert env.apache_restart() == 0
+
+ # we expect to see the document from the generic server
+ def test_h2_001_01(self, env):
+ url = env.mkurl("https", "test1", "/alive.json")
+ r = env.curl_get(url, 5)
+ assert r.exit_code == 0, r.stderr + r.stdout
+ assert r.response["json"]
+ assert r.response["json"]["alive"] is True
+ assert r.response["json"]["host"] == "test1"
+
diff --git a/test/modules/http2/test_002_curl_basics.py b/test/modules/http2/test_002_curl_basics.py
new file mode 100644
index 0000000..91be772
--- /dev/null
+++ b/test/modules/http2/test_002_curl_basics.py
@@ -0,0 +1,71 @@
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestCurlBasics:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ conf = H2Conf(env)
+ conf.add_vhost_test1()
+ conf.add_vhost_test2()
+ conf.install()
+ assert env.apache_restart() == 0
+
+ # check that we see the correct documents when using the test1 server name over http:
+ def test_h2_002_01(self, env):
+ url = env.mkurl("http", "test1", "/alive.json")
+ r = env.curl_get(url, 5)
+ assert r.response["status"] == 200
+ assert "HTTP/1.1" == r.response["protocol"]
+ assert r.response["json"]["alive"] is True
+ assert r.response["json"]["host"] == "test1"
+
+ # check that we see the correct documents when using the test1 server name over https:
+ def test_h2_002_02(self, env):
+ url = env.mkurl("https", "test1", "/alive.json")
+ r = env.curl_get(url, 5)
+ assert r.response["status"] == 200
+ assert r.response["json"]["alive"] is True
+ assert "test1" == r.response["json"]["host"]
+ assert r.response["header"]["content-type"] == "application/json"
+
+ # enforce HTTP/1.1
+ def test_h2_002_03(self, env):
+ url = env.mkurl("https", "test1", "/alive.json")
+ r = env.curl_get(url, 5, options=["--http1.1"])
+ assert r.response["status"] == 200
+ assert r.response["protocol"] == "HTTP/1.1"
+
+ # enforce HTTP/2
+ def test_h2_002_04(self, env):
+ url = env.mkurl("https", "test1", "/alive.json")
+ r = env.curl_get(url, 5, options=["--http2"])
+ assert r.response["status"] == 200
+ assert r.response["protocol"] == "HTTP/2"
+
+ # default is HTTP/2 on this host
+ def test_h2_002_04b(self, env):
+ url = env.mkurl("https", "test1", "/alive.json")
+ r = env.curl_get(url, 5)
+ assert r.response["status"] == 200
+ assert r.response["protocol"] == "HTTP/2"
+ assert r.response["json"]["host"] == "test1"
+
+ # although, without ALPN, we cannot select it
+ def test_h2_002_05(self, env):
+ url = env.mkurl("https", "test1", "/alive.json")
+ r = env.curl_get(url, 5, options=["--no-alpn"])
+ assert r.response["status"] == 200
+ assert r.response["protocol"] == "HTTP/1.1"
+ assert r.response["json"]["host"] == "test1"
+
+ # default is HTTP/1.1 on the other
+ def test_h2_002_06(self, env):
+ url = env.mkurl("https", "test2", "/alive.json")
+ r = env.curl_get(url, 5)
+ assert r.response["status"] == 200
+ assert r.response["protocol"] == "HTTP/1.1"
+ assert r.response["json"]["host"] == "test2"
diff --git a/test/modules/http2/test_003_get.py b/test/modules/http2/test_003_get.py
new file mode 100644
index 0000000..572c4fb
--- /dev/null
+++ b/test/modules/http2/test_003_get.py
@@ -0,0 +1,265 @@
+import re
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestGet:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ H2Conf(env).add_vhost_cgi(
+ proxy_self=True, h2proxy_self=True
+ ).add_vhost_test1(
+ proxy_self=True, h2proxy_self=True
+ ).install()
+ assert env.apache_restart() == 0
+
+ # check SSL environment variables from CGI script
+ def test_h2_003_01(self, env):
+ url = env.mkurl("https", "cgi", "/hello.py")
+ r = env.curl_get(url, 5, options=["--tlsv1.2"])
+ assert r.response["status"] == 200
+ assert r.response["json"]["protocol"] == "HTTP/2.0"
+ assert r.response["json"]["https"] == "on"
+ tls_version = r.response["json"]["ssl_protocol"]
+ assert tls_version in ["TLSv1.2", "TLSv1.3"]
+ assert r.response["json"]["h2"] == "on"
+ assert r.response["json"]["h2push"] == "off"
+
+ r = env.curl_get(url, 5, options=["--http1.1", "--tlsv1.2"])
+ assert r.response["status"] == 200
+ assert "HTTP/1.1" == r.response["json"]["protocol"]
+ assert "on" == r.response["json"]["https"]
+ tls_version = r.response["json"]["ssl_protocol"]
+ assert tls_version in ["TLSv1.2", "TLSv1.3"]
+ assert "" == r.response["json"]["h2"]
+ assert "" == r.response["json"]["h2push"]
+
+ # retrieve a html file from the server and compare it to its source
+ def test_h2_003_02(self, env):
+ with open(env.htdocs_src("test1/index.html"), mode='rb') as file:
+ src = file.read()
+
+ url = env.mkurl("https", "test1", "/index.html")
+ r = env.curl_get(url, 5)
+ assert r.response["status"] == 200
+ assert "HTTP/2" == r.response["protocol"]
+ assert src == r.response["body"]
+
+ url = env.mkurl("https", "test1", "/index.html")
+ r = env.curl_get(url, 5, options=["--http1.1"])
+ assert r.response["status"] == 200
+ assert "HTTP/1.1" == r.response["protocol"]
+ assert src == r.response["body"]
+
+ # retrieve chunked content from a cgi script
+ def check_necho(self, env, n, text):
+ url = env.mkurl("https", "cgi", "/necho.py")
+ r = env.curl_get(url, 5, options=["-F", f"count={n}", "-F", f"text={text}"])
+ assert r.response["status"] == 200
+ exp = ""
+ for i in range(n):
+ exp += text + "\n"
+ assert exp == r.response["body"].decode('utf-8')
+
+ def test_h2_003_10(self, env):
+ self.check_necho(env, 10, "0123456789")
+
+ def test_h2_003_11(self, env):
+ self.check_necho(env, 100, "0123456789")
+
+ def test_h2_003_12(self, env):
+ self.check_necho(env, 1000, "0123456789")
+
+ def test_h2_003_13(self, env):
+ self.check_necho(env, 10000, "0123456789")
+
+ def test_h2_003_14(self, env):
+ self.check_necho(env, 100000, "0123456789")
+
+ # github issue #126
+ def test_h2_003_20(self, env):
+ url = env.mkurl("https", "test1", "/006/")
+ r = env.curl_get(url, 5)
+ assert r.response["status"] == 200
+ body = r.response["body"].decode('utf-8')
+ # our doctype varies between branches and in time, lets not compare
+ body = re.sub(r'^<!DOCTYPE[^>]+>', '', body)
+ assert '''
+<html>
+ <head>
+ <title>Index of /006</title>
+ </head>
+ <body>
+<h1>Index of /006</h1>
+<ul><li><a href="/"> Parent Directory</a></li>
+<li><a href="006.css"> 006.css</a></li>
+<li><a href="006.js"> 006.js</a></li>
+<li><a href="header.html"> header.html</a></li>
+</ul>
+</body></html>
+''' == body
+
+ # github issue #133
+ def clean_header(self, s):
+ s = re.sub(r'\r\n', '\n', s, flags=re.MULTILINE)
+ s = re.sub(r'^date:.*\n', '', s, flags=re.MULTILINE)
+ s = re.sub(r'^server:.*\n', '', s, flags=re.MULTILINE)
+ s = re.sub(r'^last-modified:.*\n', '', s, flags=re.MULTILINE)
+ s = re.sub(r'^etag:.*\n', '', s, flags=re.MULTILINE)
+ s = re.sub(r'^vary:.*\n', '', s, flags=re.MULTILINE)
+ return re.sub(r'^accept-ranges:.*\n', '', s, flags=re.MULTILINE)
+
+ def test_h2_003_21(self, env):
+ url = env.mkurl("https", "test1", "/index.html")
+ r = env.curl_get(url, 5, options=["-I"])
+ assert r.response["status"] == 200
+ assert "HTTP/2" == r.response["protocol"]
+ s = self.clean_header(r.response["body"].decode('utf-8'))
+ assert '''HTTP/2 200
+content-length: 2007
+content-type: text/html
+
+''' == s
+
+ r = env.curl_get(url, 5, options=["-I", url])
+ assert r.response["status"] == 200
+ assert "HTTP/2" == r.response["protocol"]
+ s = self.clean_header(r.response["body"].decode('utf-8'))
+ assert '''HTTP/2 200
+content-length: 2007
+content-type: text/html
+
+HTTP/2 200
+content-length: 2007
+content-type: text/html
+
+''' == s
+
+ # test conditionals: if-modified-since
+ @pytest.mark.parametrize("path", [
+ "/004.html", "/proxy/004.html", "/h2proxy/004.html"
+ ])
+ def test_h2_003_30(self, env, path):
+ url = env.mkurl("https", "test1", path)
+ r = env.curl_get(url, 5)
+ assert r.response["status"] == 200
+ assert "HTTP/2" == r.response["protocol"]
+ h = r.response["header"]
+ assert "last-modified" in h
+ lastmod = h["last-modified"]
+ r = env.curl_get(url, 5, options=['-H', ("if-modified-since: %s" % lastmod)])
+ assert 304 == r.response["status"]
+
+ # test conditionals: if-etag
+ @pytest.mark.parametrize("path", [
+ "/004.html", "/proxy/004.html", "/h2proxy/004.html"
+ ])
+ def test_h2_003_31(self, env, path):
+ url = env.mkurl("https", "test1", path)
+ r = env.curl_get(url, 5)
+ assert r.response["status"] == 200
+ assert "HTTP/2" == r.response["protocol"]
+ h = r.response["header"]
+ assert "etag" in h
+ etag = h["etag"]
+ r = env.curl_get(url, 5, options=['-H', ("if-none-match: %s" % etag)])
+ assert 304 == r.response["status"]
+
+ # test various response body lengths to work correctly
+ def test_h2_003_40(self, env):
+ n = 1001
+ while n <= 1025024:
+ url = env.mkurl("https", "cgi", f"/mnot164.py?count={n}&text=X")
+ r = env.curl_get(url, 5)
+ assert r.response["status"] == 200
+ assert "HTTP/2" == r.response["protocol"]
+ assert n == len(r.response["body"])
+ n *= 2
+
+ # test various response body lengths to work correctly
+ @pytest.mark.parametrize("n", [
+ 0, 1, 1291, 1292, 80000, 80123, 81087, 98452
+ ])
+ def test_h2_003_41(self, env, n):
+ url = env.mkurl("https", "cgi", f"/mnot164.py?count={n}&text=X")
+ r = env.curl_get(url, 5)
+ assert r.response["status"] == 200
+ assert "HTTP/2" == r.response["protocol"]
+ assert n == len(r.response["body"])
+
+ # test ranges
+ @pytest.mark.parametrize("path", [
+ "/004.html", "/proxy/004.html", "/h2proxy/004.html"
+ ])
+ def test_h2_003_50(self, env, path, repeat):
+ # check that the resource supports ranges and we see its raw content-length
+ url = env.mkurl("https", "test1", path)
+ r = env.curl_get(url, 5)
+ assert r.response["status"] == 200
+ assert "HTTP/2" == r.response["protocol"]
+ h = r.response["header"]
+ assert "accept-ranges" in h
+ assert "bytes" == h["accept-ranges"]
+ assert "content-length" in h
+ clen = h["content-length"]
+ # get the first 1024 bytes of the resource, 206 status, but content-length as original
+ r = env.curl_get(url, 5, options=["-H", "range: bytes=0-1023"])
+ assert 206 == r.response["status"]
+ assert "HTTP/2" == r.response["protocol"]
+ assert 1024 == len(r.response["body"])
+ assert "content-length" in h
+ assert clen == h["content-length"]
+
+ # use an invalid scheme
+ def test_h2_003_51(self, env):
+ url = env.mkurl("https", "cgi", "/")
+ opt = ["-H:scheme: invalid"]
+ r = env.nghttp().get(url, options=opt)
+ assert r.exit_code == 0, r
+ assert r.response['status'] == 400
+
+ # use an differing scheme, but one that is acceptable
+ def test_h2_003_52(self, env):
+ url = env.mkurl("https", "cgi", "/")
+ opt = ["-H:scheme: http"]
+ r = env.nghttp().get(url, options=opt)
+ assert r.exit_code == 0, r
+ assert r.response['status'] == 200
+
+ # Test that we get a proper `Date` and `Server` headers on responses
+ def test_h2_003_60(self, env):
+ url = env.mkurl("https", "test1", "/index.html")
+ r = env.curl_get(url)
+ assert r.exit_code == 0, r
+ assert r.response['status'] == 200
+ assert 'date' in r.response['header']
+ assert 'server' in r.response['header']
+
+ # lets do some error tests
+ def test_h2_003_70(self, env):
+ url = env.mkurl("https", "cgi", "/h2test/error?status=500")
+ r = env.curl_get(url)
+ assert r.exit_code == 0, r
+ assert r.response['status'] == 500
+ url = env.mkurl("https", "cgi", "/h2test/error?error=timeout")
+ r = env.curl_get(url)
+ assert r.exit_code == 0, r
+ assert r.response['status'] == 408
+
+ # produce an error during response body
+ def test_h2_003_71(self, env, repeat):
+ url = env.mkurl("https", "cgi", "/h2test/error?body_error=timeout")
+ r = env.curl_get(url)
+ assert r.exit_code != 0, f"{r}"
+ url = env.mkurl("https", "cgi", "/h2test/error?body_error=reset")
+ r = env.curl_get(url)
+ assert r.exit_code != 0, f"{r}"
+
+ # produce an error, fail to generate an error bucket
+ def test_h2_003_72(self, env, repeat):
+ url = env.mkurl("https", "cgi", "/h2test/error?body_error=timeout&error_bucket=0")
+ r = env.curl_get(url)
+ assert r.exit_code != 0, f"{r}"
diff --git a/test/modules/http2/test_004_post.py b/test/modules/http2/test_004_post.py
new file mode 100644
index 0000000..295f989
--- /dev/null
+++ b/test/modules/http2/test_004_post.py
@@ -0,0 +1,201 @@
+import difflib
+import email.parser
+import inspect
+import json
+import os
+import re
+import sys
+import time
+
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestPost:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ TestPost._local_dir = os.path.dirname(inspect.getfile(TestPost))
+ conf = H2Conf(env, extras={
+ f'cgi.{env.http_tld}': [
+ f'<Directory {env.server_docs_dir}/cgi/xxx>',
+ ' RewriteEngine On',
+ ' RewriteRule .* /proxy/echo.py [QSA]',
+ '</Directory>',
+ ]
+ })
+ conf.add_vhost_cgi(proxy_self=True).install()
+ assert env.apache_restart() == 0
+
+ def local_src(self, fname):
+ return os.path.join(TestPost._local_dir, fname)
+
+ # upload and GET again using curl, compare to original content
+ def curl_upload_and_verify(self, env, fname, options=None):
+ url = env.mkurl("https", "cgi", "/upload.py")
+ fpath = os.path.join(env.gen_dir, fname)
+ r = env.curl_upload(url, fpath, options=options)
+ assert r.exit_code == 0, f"{r}"
+ assert 200 <= r.response["status"] < 300
+
+ r2 = env.curl_get(r.response["header"]["location"])
+ assert r2.exit_code == 0
+ assert r2.response["status"] == 200
+ with open(self.local_src(fpath), mode='rb') as file:
+ src = file.read()
+ assert src == r2.response["body"]
+
+ def test_h2_004_01(self, env):
+ self.curl_upload_and_verify(env, "data-1k", ["-vvv", "--http1.1"])
+ self.curl_upload_and_verify(env, "data-1k", ["--http2"])
+
+ def test_h2_004_02(self, env):
+ self.curl_upload_and_verify(env, "data-10k", ["--http1.1"])
+ self.curl_upload_and_verify(env, "data-10k", ["--http2"])
+
+ def test_h2_004_03(self, env):
+ self.curl_upload_and_verify(env, "data-100k", ["--http1.1"])
+ self.curl_upload_and_verify(env, "data-100k", ["--http2"])
+
+ def test_h2_004_04(self, env):
+ self.curl_upload_and_verify(env, "data-1m", ["--http1.1"])
+ self.curl_upload_and_verify(env, "data-1m", ["--http2"])
+
+ def test_h2_004_05(self, env):
+ self.curl_upload_and_verify(env, "data-1k", ["-v", "--http1.1", "-H", "Expect: 100-continue"])
+ self.curl_upload_and_verify(env, "data-1k", ["-v", "--http2", "-H", "Expect: 100-continue"])
+
+ def test_h2_004_06(self, env):
+ self.curl_upload_and_verify(env, "data-1k", [
+ "--http1.1", "-H", "Content-Length:", "-H", "Transfer-Encoding: chunked"
+ ])
+ self.curl_upload_and_verify(env, "data-1k", ["--http2", "-H", "Content-Length:"])
+
+ @pytest.mark.parametrize("name, value", [
+ ("HTTP2", "on"),
+ ("H2PUSH", "off"),
+ ("H2_PUSHED", ""),
+ ("H2_PUSHED_ON", ""),
+ ("H2_STREAM_ID", "1"),
+ ("H2_STREAM_TAG", r'\d+-\d+-1'),
+ ])
+ def test_h2_004_07(self, env, name, value):
+ url = env.mkurl("https", "cgi", "/env.py")
+ r = env.curl_post_value(url, "name", name)
+ assert r.exit_code == 0
+ assert r.response["status"] == 200
+ m = re.match("{0}=(.*)".format(name), r.response["body"].decode('utf-8'))
+ assert m
+ assert re.match(value, m.group(1))
+
+ # POST some data using nghttp and see it echo'ed properly back
+ def nghttp_post_and_verify(self, env, fname, options=None):
+ url = env.mkurl("https", "cgi", "/echo.py")
+ fpath = os.path.join(env.gen_dir, fname)
+
+ r = env.nghttp().upload(url, fpath, options=options)
+ assert r.exit_code == 0
+ assert r.response["status"] >= 200 and r.response["status"] < 300
+
+ with open(self.local_src(fpath), mode='rb') as file:
+ src = file.read()
+ assert 'request-length' in r.response["header"]
+ assert int(r.response["header"]['request-length']) == len(src)
+ if len(r.response["body"]) != len(src):
+ sys.stderr.writelines(difflib.unified_diff(
+ src.decode().splitlines(True),
+ r.response["body"].decode().splitlines(True),
+ fromfile='source',
+ tofile='response'
+ ))
+ assert len(r.response["body"]) == len(src)
+ assert r.response["body"] == src, f"expected '{src}', got '{r.response['body']}'"
+
+ @pytest.mark.parametrize("name", [
+ "data-1k", "data-10k", "data-100k", "data-1m"
+ ])
+ def test_h2_004_21(self, env, name):
+ self.nghttp_post_and_verify(env, name, [])
+
+ @pytest.mark.parametrize("name", [
+ "data-1k", "data-10k", "data-100k", "data-1m",
+ ])
+ def test_h2_004_22(self, env, name, repeat):
+ self.nghttp_post_and_verify(env, name, ["--no-content-length"])
+
+ # upload and GET again using nghttp, compare to original content
+ def nghttp_upload_and_verify(self, env, fname, options=None):
+ url = env.mkurl("https", "cgi", "/upload.py")
+ fpath = os.path.join(env.gen_dir, fname)
+
+ r = env.nghttp().upload_file(url, fpath, options=options)
+ assert r.exit_code == 0
+ assert r.response["status"] >= 200 and r.response["status"] < 300
+ assert 'location' in r.response["header"], f'{r}'
+ assert r.response["header"]["location"]
+
+ r2 = env.nghttp().get(r.response["header"]["location"])
+ assert r2.exit_code == 0
+ assert r2.response["status"] == 200
+ with open(self.local_src(fpath), mode='rb') as file:
+ src = file.read()
+ assert src == r2.response["body"], f'GET {r.response["header"]["location"]}'
+
+ @pytest.mark.parametrize("name", [
+ "data-1k", "data-10k", "data-100k", "data-1m"
+ ])
+ def test_h2_004_23(self, env, name, repeat):
+ self.nghttp_upload_and_verify(env, name, [])
+
+ @pytest.mark.parametrize("name", [
+ "data-1k", "data-10k", "data-100k", "data-1m"
+ ])
+ def test_h2_004_24(self, env, name, repeat):
+ self.nghttp_upload_and_verify(env, name, ["--expect-continue"])
+
+ @pytest.mark.parametrize("name", [
+ "data-1k", "data-10k", "data-100k", "data-1m"
+ ])
+ def test_h2_004_25(self, env, name, repeat):
+ self.nghttp_upload_and_verify(env, name, ["--no-content-length"])
+
+ def test_h2_004_40(self, env):
+ # echo content using h2test_module "echo" handler
+ def post_and_verify(fname, options=None):
+ url = env.mkurl("https", "cgi", "/h2test/echo")
+ fpath = os.path.join(env.gen_dir, fname)
+ r = env.curl_upload(url, fpath, options=options)
+ assert r.exit_code == 0
+ assert r.response["status"] >= 200 and r.response["status"] < 300
+
+ ct = r.response["header"]["content-type"]
+ mail_hd = "Content-Type: " + ct + "\r\nMIME-Version: 1.0\r\n\r\n"
+ mime_msg = mail_hd.encode() + r.response["body"]
+ # this MIME API is from hell
+ body = email.parser.BytesParser().parsebytes(mime_msg)
+ assert body
+ assert body.is_multipart()
+ filepart = None
+ for part in body.walk():
+ if fname == part.get_filename():
+ filepart = part
+ assert filepart
+ with open(self.local_src(fpath), mode='rb') as file:
+ src = file.read()
+ assert src == filepart.get_payload(decode=True)
+
+ post_and_verify("data-1k", [])
+
+ def test_h2_004_41(self, env):
+ # reproduce PR66597, double chunked encoding on redirects
+ url = env.mkurl("https", "cgi", "/xxx/test.json")
+ r = env.curl_post_data(url, data="0123456789", options=[])
+ assert r.exit_code == 0
+ assert 200 <= r.response["status"] < 300
+ assert r.response['body'] == b'0123456789'
+ r = env.curl_post_data(url, data="0123456789", options=["-H", "Content-Length:"])
+ assert r.exit_code == 0
+ assert 200 <= r.response["status"] < 300
+ assert r.response['body'] == b'0123456789'
diff --git a/test/modules/http2/test_005_files.py b/test/modules/http2/test_005_files.py
new file mode 100644
index 0000000..e761836
--- /dev/null
+++ b/test/modules/http2/test_005_files.py
@@ -0,0 +1,48 @@
+import os
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+def mk_text_file(fpath: str, lines: int):
+ t110 = ""
+ for _ in range(11):
+ t110 += "0123456789"
+ with open(fpath, "w") as fd:
+ for i in range(lines):
+ fd.write("{0:015d}: ".format(i)) # total 128 bytes per line
+ fd.write(t110)
+ fd.write("\n")
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestFiles:
+
+ URI_PATHS = []
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ docs_a = os.path.join(env.server_docs_dir, "cgi/files")
+ uris = []
+ file_count = 32
+ file_sizes = [1, 10, 100, 10000]
+ for i in range(file_count):
+ fsize = file_sizes[i % len(file_sizes)]
+ if fsize is None:
+ raise Exception("file sizes?: {0} {1}".format(i, fsize))
+ fname = "{0}-{1}k.txt".format(i, fsize)
+ mk_text_file(os.path.join(docs_a, fname), 8 * fsize)
+ self.URI_PATHS.append(f"/files/{fname}")
+
+ H2Conf(env).add_vhost_cgi(
+ proxy_self=True, h2proxy_self=True
+ ).add_vhost_test1(
+ proxy_self=True, h2proxy_self=True
+ ).install()
+ assert env.apache_restart() == 0
+
+ def test_h2_005_01(self, env):
+ url = env.mkurl("https", "cgi", self.URI_PATHS[2])
+ r = env.curl_get(url)
+ assert r.response, r.stderr + r.stdout
+ assert r.response["status"] == 200
diff --git a/test/modules/http2/test_006_assets.py b/test/modules/http2/test_006_assets.py
new file mode 100644
index 0000000..778314e
--- /dev/null
+++ b/test/modules/http2/test_006_assets.py
@@ -0,0 +1,75 @@
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestAssets:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ H2Conf(env).add_vhost_test1().install()
+ assert env.apache_restart() == 0
+
+ # single page without any assets
+ def test_h2_006_01(self, env):
+ url = env.mkurl("https", "test1", "/001.html")
+ r = env.nghttp().assets(url, options=["-Haccept-encoding: none"])
+ assert 0 == r.exit_code
+ assert 1 == len(r.assets)
+ assert r.assets == [
+ {"status": 200, "size": "251", "path": "/001.html"}
+ ]
+
+ # single image without any assets
+ def test_h2_006_02(self, env):
+ url = env.mkurl("https", "test1", "/002.jpg")
+ r = env.nghttp().assets(url, options=["-Haccept-encoding: none"])
+ assert 0 == r.exit_code
+ assert 1 == len(r.assets)
+ assert r.assets == [
+ {"status": 200, "size": "88K", "path": "/002.jpg"}
+ ]
+
+ # gophertiles, yea!
+ def test_h2_006_03(self, env):
+ # create the tiles files we originally had checked in
+ exp_assets = [
+ {"status": 200, "size": "10K", "path": "/004.html"},
+ {"status": 200, "size": "742", "path": "/004/gophertiles.jpg"},
+ ]
+ for i in range(2, 181):
+ with open(f"{env.server_docs_dir}/test1/004/gophertiles_{i:03d}.jpg", "w") as fd:
+ fd.write("0123456789\n")
+ exp_assets.append(
+ {"status": 200, "size": "11", "path": f"/004/gophertiles_{i:03d}.jpg"},
+ )
+
+ url = env.mkurl("https", "test1", "/004.html")
+ r = env.nghttp().assets(url, options=["-Haccept-encoding: none"])
+ assert 0 == r.exit_code
+ assert 181 == len(r.assets)
+ assert r.assets == exp_assets
+
+ # page with js and css
+ def test_h2_006_04(self, env):
+ url = env.mkurl("https", "test1", "/006.html")
+ r = env.nghttp().assets(url, options=["-Haccept-encoding: none"])
+ assert 0 == r.exit_code
+ assert 3 == len(r.assets)
+ assert r.assets == [
+ {"status": 200, "size": "543", "path": "/006.html"},
+ {"status": 200, "size": "216", "path": "/006/006.css"},
+ {"status": 200, "size": "839", "path": "/006/006.js"}
+ ]
+
+ # page with image, try different window size
+ def test_h2_006_05(self, env):
+ url = env.mkurl("https", "test1", "/003.html")
+ r = env.nghttp().assets(url, options=["--window-bits=24", "-Haccept-encoding: none"])
+ assert 0 == r.exit_code
+ assert 2 == len(r.assets)
+ assert r.assets == [
+ {"status": 200, "size": "316", "path": "/003.html"},
+ {"status": 200, "size": "88K", "path": "/003/003_img.jpg"}
+ ]
diff --git a/test/modules/http2/test_007_ssi.py b/test/modules/http2/test_007_ssi.py
new file mode 100644
index 0000000..97e38df
--- /dev/null
+++ b/test/modules/http2/test_007_ssi.py
@@ -0,0 +1,43 @@
+import re
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestSSI:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ conf = H2Conf(env, extras={
+ f'cgi.{env.http_tld}': [
+ 'AddOutputFilter INCLUDES .html',
+ '<Location "/ssi">',
+ ' Options +Includes',
+ '</Location>',
+ ],
+ })
+ conf.add_vhost_cgi(
+ proxy_self=True, h2proxy_self=True
+ ).add_vhost_test1(
+ proxy_self=True, h2proxy_self=True
+ ).install()
+ assert env.apache_restart() == 0
+
+ # SSI test from https://bz.apache.org/bugzilla/show_bug.cgi?id=66483
+ def test_h2_007_01(self, env):
+ url = env.mkurl("https", "cgi", "/ssi/test.html")
+ r = env.curl_get(url, 5)
+ assert r.response["status"] == 200
+ assert r.stdout == '''<!doctype html>
+<html>
+<head><meta charset="UTF-8"></head>
+<body>
+ test<br>
+ Hello include<br>
+
+ hello<br>
+</body>
+</html>
+''' , f'{r}'
+
diff --git a/test/modules/http2/test_008_ranges.py b/test/modules/http2/test_008_ranges.py
new file mode 100644
index 0000000..4dcdcc8
--- /dev/null
+++ b/test/modules/http2/test_008_ranges.py
@@ -0,0 +1,189 @@
+import inspect
+import json
+import os
+import re
+import time
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestRanges:
+
+ LOGFILE = ""
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ TestRanges.LOGFILE = os.path.join(env.server_logs_dir, "test_008")
+ TestRanges.SRCDIR = os.path.dirname(inspect.getfile(TestRanges))
+ if os.path.isfile(TestRanges.LOGFILE):
+ os.remove(TestRanges.LOGFILE)
+ destdir = os.path.join(env.gen_dir, 'apache/htdocs/test1')
+ env.make_data_file(indir=destdir, fname="data-100m", fsize=100*1024*1024)
+ conf = H2Conf(env=env, extras={
+ 'base': [
+ 'CustomLog logs/test_008 combined'
+ ],
+ f'test1.{env.http_tld}': [
+ '<Location /status>',
+ ' SetHandler server-status',
+ '</Location>',
+ ]
+ })
+ conf.add_vhost_cgi()
+ conf.add_vhost_test1()
+ conf.install()
+ assert env.apache_restart() == 0
+
+ def test_h2_008_01(self, env):
+ # issue: #203
+ resource = "data-1k"
+ full_length = 1000
+ chunk = 200
+ self.curl_upload_and_verify(env, resource, ["-v", "--http2"])
+ assert env.apache_restart() == 0
+ url = env.mkurl("https", "cgi", f"/files/{resource}?01full")
+ r = env.curl_get(url, 5, options=["--http2"])
+ assert r.response["status"] == 200
+ url = env.mkurl("https", "cgi", f"/files/{resource}?01range")
+ r = env.curl_get(url, 5, options=["--http1.1", "-H", "Range: bytes=0-{0}".format(chunk-1)])
+ assert 206 == r.response["status"]
+ assert chunk == len(r.response["body"].decode('utf-8'))
+ r = env.curl_get(url, 5, options=["--http2", "-H", "Range: bytes=0-{0}".format(chunk-1)])
+ assert 206 == r.response["status"]
+ assert chunk == len(r.response["body"].decode('utf-8'))
+ # Restart for logs to be flushed out
+ assert env.apache_restart() == 0
+ # now check what response lengths have actually been reported
+ detected = {}
+ for line in open(TestRanges.LOGFILE).readlines():
+ e = json.loads(line)
+ if e['request'] == f'GET /files/{resource}?01full HTTP/2.0':
+ assert e['bytes_rx_I'] > 0
+ assert e['bytes_resp_B'] == full_length
+ assert e['bytes_tx_O'] > full_length
+ detected['h2full'] = 1
+ elif e['request'] == f'GET /files/{resource}?01range HTTP/2.0':
+ assert e['bytes_rx_I'] > 0
+ assert e['bytes_resp_B'] == chunk
+ assert e['bytes_tx_O'] > chunk
+ assert e['bytes_tx_O'] < chunk + 256 # response + frame stuff
+ detected['h2range'] = 1
+ elif e['request'] == f'GET /files/{resource}?01range HTTP/1.1':
+ assert e['bytes_rx_I'] > 0 # input bytes received
+ assert e['bytes_resp_B'] == chunk # response bytes sent (payload)
+ assert e['bytes_tx_O'] > chunk # output bytes sent
+ detected['h1range'] = 1
+ assert 'h1range' in detected, f'HTTP/1.1 range request not found in {TestRanges.LOGFILE}'
+ assert 'h2range' in detected, f'HTTP/2 range request not found in {TestRanges.LOGFILE}'
+ assert 'h2full' in detected, f'HTTP/2 full request not found in {TestRanges.LOGFILE}'
+
+ def test_h2_008_02(self, env, repeat):
+ path = '/002.jpg'
+ res_len = 90364
+ url = env.mkurl("https", "test1", f'{path}?02full')
+ r = env.curl_get(url, 5)
+ assert r.response["status"] == 200
+ assert "HTTP/2" == r.response["protocol"]
+ h = r.response["header"]
+ assert "accept-ranges" in h
+ assert "bytes" == h["accept-ranges"]
+ assert "content-length" in h
+ clen = h["content-length"]
+ assert int(clen) == res_len
+ # get the first 1024 bytes of the resource, 206 status, but content-length as original
+ url = env.mkurl("https", "test1", f'{path}?02range')
+ r = env.curl_get(url, 5, options=["-H", "range: bytes=0-1023"])
+ assert 206 == r.response["status"]
+ assert "HTTP/2" == r.response["protocol"]
+ assert 1024 == len(r.response["body"])
+ assert "content-length" in h
+ assert clen == h["content-length"]
+ # Restart for logs to be flushed out
+ assert env.apache_restart() == 0
+ # now check what response lengths have actually been reported
+ found = False
+ for line in open(TestRanges.LOGFILE).readlines():
+ e = json.loads(line)
+ if e['request'] == f'GET {path}?02range HTTP/2.0':
+ assert e['bytes_rx_I'] > 0
+ assert e['bytes_resp_B'] == 1024
+ assert e['bytes_tx_O'] > 1024
+ assert e['bytes_tx_O'] < 1024 + 256 # response and frame stuff
+ found = True
+ break
+ assert found, f'request not found in {self.LOGFILE}'
+
+ # send a paced curl download that aborts in the middle of the transfer
+ def test_h2_008_03(self, env, repeat):
+ path = '/data-100m'
+ url = env.mkurl("https", "test1", f'{path}?03broken')
+ r = env.curl_get(url, 5, options=[
+ '--limit-rate', '2k', '-m', '2'
+ ])
+ assert r.exit_code != 0, f'{r}'
+ found = False
+ for line in open(TestRanges.LOGFILE).readlines():
+ e = json.loads(line)
+ if e['request'] == f'GET {path}?03broken HTTP/2.0':
+ assert e['bytes_rx_I'] > 0
+ assert e['bytes_resp_B'] == 100*1024*1024
+ assert e['bytes_tx_O'] > 1024
+ found = True
+ break
+ assert found, f'request not found in {self.LOGFILE}'
+
+ # test server-status reporting
+ # see <https://bz.apache.org/bugzilla/show_bug.cgi?id=66801>
+ def test_h2_008_04(self, env, repeat):
+ path = '/data-100m'
+ assert env.apache_restart() == 0
+ stats = self.get_server_status(env)
+ # we see the server uptime check request here
+ assert 1 == int(stats['Total Accesses']), f'{stats}'
+ assert 1 == int(stats['Total kBytes']), f'{stats}'
+ count = 10
+ url = env.mkurl("https", "test1", f'/data-100m?[0-{count-1}]')
+ r = env.curl_get(url, 5, options=['--http2', '-H', f'Range: bytes=0-{4096}'])
+ assert r.exit_code == 0, f'{r}'
+ for _ in range(10):
+ # slow cpu might not success on first read
+ stats = self.get_server_status(env)
+ if (4*count)+1 <= int(stats['Total kBytes']):
+ break
+ time.sleep(0.1)
+ # amount reported is larger than (count *4k), the net payload
+ # but does not exceed an additional 4k
+ assert (4*count)+1 <= int(stats['Total kBytes'])
+ assert (4*(count+1))+1 > int(stats['Total kBytes'])
+ # total requests is now at 1 from the start, plus the stat check,
+ # plus the count transfers we did.
+ assert (2+count) == int(stats['Total Accesses'])
+
+ def get_server_status(self, env):
+ status_url = env.mkurl("https", "test1", '/status?auto')
+ r = env.curl_get(status_url, 5)
+ assert r.exit_code == 0, f'{r}'
+ stats = {}
+ for line in r.stdout.splitlines():
+ m = re.match(r'([^:]+): (.*)', line)
+ if m:
+ stats[m.group(1)] = m.group(2)
+ return stats
+
+ # upload and GET again using curl, compare to original content
+ def curl_upload_and_verify(self, env, fname, options=None):
+ url = env.mkurl("https", "cgi", "/upload.py")
+ fpath = os.path.join(env.gen_dir, fname)
+ r = env.curl_upload(url, fpath, options=options)
+ assert r.exit_code == 0, f"{r}"
+ assert 200 <= r.response["status"] < 300
+
+ r2 = env.curl_get(r.response["header"]["location"])
+ assert r2.exit_code == 0
+ assert r2.response["status"] == 200
+ with open(os.path.join(TestRanges.SRCDIR, fpath), mode='rb') as file:
+ src = file.read()
+ assert src == r2.response["body"]
+
diff --git a/test/modules/http2/test_009_timing.py b/test/modules/http2/test_009_timing.py
new file mode 100644
index 0000000..2c62bb0
--- /dev/null
+++ b/test/modules/http2/test_009_timing.py
@@ -0,0 +1,74 @@
+import inspect
+import json
+import os
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestTiming:
+
+ LOGFILE = ""
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ TestTiming.LOGFILE = os.path.join(env.server_logs_dir, "test_009")
+ if os.path.isfile(TestTiming.LOGFILE):
+ os.remove(TestTiming.LOGFILE)
+ conf = H2Conf(env=env)
+ conf.add([
+ "CustomLog logs/test_009 combined"
+ ])
+ conf.add_vhost_cgi()
+ conf.add_vhost_test1()
+ conf.install()
+ assert env.apache_restart() == 0
+
+ # check that we get a positive time_taken reported on a simple GET
+ def test_h2_009_01(self, env):
+ path = '/002.jpg'
+ url = env.mkurl("https", "test1", f'{path}?01')
+ args = [
+ env.h2load, "-n", "1", "-c", "1", "-m", "1",
+ f"--connect-to=localhost:{env.https_port}",
+ f"--base-uri={url}", url
+ ]
+ r = env.run(args)
+ # Restart for logs to be flushed out
+ assert env.apache_restart() == 0
+ found = False
+ for line in open(TestTiming.LOGFILE).readlines():
+ e = json.loads(line)
+ if e['request'] == f'GET {path}?01 HTTP/2.0':
+ assert e['time_taken'] > 0
+ found = True
+ assert found, f'request not found in {TestTiming.LOGFILE}'
+
+ # test issue #253, where time_taken in a keepalive situation is not
+ # reported until the next request arrives
+ def test_h2_009_02(self, env):
+ baseurl = env.mkurl("https", "test1", '/')
+ tscript = os.path.join(env.gen_dir, 'h2load-timing-009_02')
+ with open(tscript, 'w') as fd:
+ fd.write('\n'.join([
+ f'0.0\t/002.jpg?02a', # 1st request right away
+ f'1000.0\t/002.jpg?02b', # 2nd a second later
+ ]))
+ args = [
+ env.h2load,
+ f'--timing-script-file={tscript}',
+ f"--connect-to=localhost:{env.https_port}",
+ f"--base-uri={baseurl}"
+ ]
+ r = env.run(args)
+ # Restart for logs to be flushed out
+ assert env.apache_restart() == 0
+ found = False
+ for line in open(TestTiming.LOGFILE).readlines():
+ e = json.loads(line)
+ if e['request'] == f'GET /002.jpg?02a HTTP/2.0':
+ assert e['time_taken'] > 0
+ assert e['time_taken'] < 500 * 1000, f'time for 1st request not reported correctly'
+ found = True
+ assert found, f'request not found in {TestTiming.LOGFILE}'
diff --git a/test/modules/http2/test_100_conn_reuse.py b/test/modules/http2/test_100_conn_reuse.py
new file mode 100644
index 0000000..3ebac24
--- /dev/null
+++ b/test/modules/http2/test_100_conn_reuse.py
@@ -0,0 +1,57 @@
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestConnReuse:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ H2Conf(env).add_vhost_noh2().add_vhost_test1().add_vhost_cgi().install()
+ assert env.apache_restart() == 0
+
+ # make sure the protocol selection on the different hosts work as expected
+ def test_h2_100_01(self, env):
+ # this host defaults to h2, but we can request h1
+ url = env.mkurl("https", "cgi", "/hello.py")
+ assert "2" == env.curl_protocol_version( url )
+ assert "1.1" == env.curl_protocol_version( url, options=[ "--http1.1" ] )
+
+ # this host does not enable h2, it always falls back to h1
+ url = env.mkurl("https", "noh2", "/hello.py")
+ assert "1.1" == env.curl_protocol_version( url )
+ assert "1.1" == env.curl_protocol_version( url, options=[ "--http2" ] )
+
+ # access a ServerAlias, after using ServerName in SNI
+ def test_h2_100_02(self, env):
+ url = env.mkurl("https", "cgi", "/hello.py")
+ hostname = ("cgi-alias.%s" % env.http_tld)
+ r = env.curl_get(url, 5, options=["-H", f"Host: {hostname}"])
+ assert r.response["status"] == 200
+ assert "HTTP/2" == r.response["protocol"]
+ assert hostname == r.response["json"]["host"]
+
+ # access another vhost, after using ServerName in SNI, that uses same SSL setup
+ def test_h2_100_03(self, env):
+ url = env.mkurl("https", "cgi", "/")
+ hostname = ("test1.%s" % env.http_tld)
+ r = env.curl_get(url, 5, options=[ "-H", "Host:%s" % hostname ])
+ assert r.response["status"] == 200
+ assert "HTTP/2" == r.response["protocol"]
+ assert "text/html" == r.response["header"]["content-type"]
+
+ # access another vhost, after using ServerName in SNI,
+ # that has different SSL certificate. This triggers a 421 (misdirected request) response.
+ def test_h2_100_04(self, env):
+ url = env.mkurl("https", "cgi", "/hello.py")
+ hostname = ("noh2.%s" % env.http_tld)
+ r = env.curl_get(url, 5, options=[ "-H", "Host:%s" % hostname ])
+ assert 421 == r.response["status"]
+
+ # access an unknown vhost, after using ServerName in SNI
+ def test_h2_100_05(self, env):
+ url = env.mkurl("https", "cgi", "/hello.py")
+ hostname = ("unknown.%s" % env.http_tld)
+ r = env.curl_get(url, 5, options=[ "-H", "Host:%s" % hostname ])
+ assert 421 == r.response["status"]
diff --git a/test/modules/http2/test_101_ssl_reneg.py b/test/modules/http2/test_101_ssl_reneg.py
new file mode 100644
index 0000000..528002f
--- /dev/null
+++ b/test/modules/http2/test_101_ssl_reneg.py
@@ -0,0 +1,138 @@
+import re
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+@pytest.mark.skipif(H2TestEnv.get_ssl_module() != "mod_ssl", reason="only for mod_ssl")
+class TestSslRenegotiation:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ domain = f"ssl.{env.http_tld}"
+ conf = H2Conf(env, extras={
+ 'base': [
+ "SSLCipherSuite ECDHE-RSA-AES256-GCM-SHA384",
+ f"<Directory \"{env.server_dir}/htdocs/ssl-client-verify\">",
+ " Require all granted",
+ " SSLVerifyClient require",
+ " SSLVerifyDepth 0",
+ "</Directory>"
+ ],
+ domain: [
+ "Protocols h2 http/1.1",
+ "<Location /renegotiate/cipher>",
+ " SSLCipherSuite ECDHE-RSA-CHACHA20-POLY1305",
+ "</Location>",
+ "<Location /renegotiate/err-doc-cipher>",
+ " SSLCipherSuite ECDHE-RSA-CHACHA20-POLY1305",
+ " ErrorDocument 403 /forbidden.html",
+ "</Location>",
+ "<Location /renegotiate/verify>",
+ " SSLVerifyClient require",
+ "</Location>",
+ f"<Directory \"{env.server_dir}/htdocs/sslrequire\">",
+ " SSLRequireSSL",
+ "</Directory>",
+ f"<Directory \"{env.server_dir}/htdocs/requiressl\">",
+ " Require ssl",
+ "</Directory>",
+ ]})
+ conf.add_vhost(domains=[domain], port=env.https_port,
+ doc_root=f"{env.server_dir}/htdocs")
+ conf.install()
+ # the dir needs to exists for the configuration to have effect
+ env.mkpath("%s/htdocs/ssl-client-verify" % env.server_dir)
+ env.mkpath("%s/htdocs/renegotiate/cipher" % env.server_dir)
+ env.mkpath("%s/htdocs/sslrequire" % env.server_dir)
+ env.mkpath("%s/htdocs/requiressl" % env.server_dir)
+ assert env.apache_restart() == 0
+
+ # access a resource with SSL renegotiation, using HTTP/1.1
+ def test_h2_101_01(self, env):
+ url = env.mkurl("https", "ssl", "/renegotiate/cipher/")
+ r = env.curl_get(url, options=["-v", "--http1.1", "--tlsv1.2", "--tls-max", "1.2"])
+ assert 0 == r.exit_code, f"{r}"
+ assert r.response
+ assert 403 == r.response["status"]
+
+ # try to renegotiate the cipher, should fail with correct code
+ def test_h2_101_02(self, env):
+ if not (env.curl_is_at_least('8.2.0') or env.curl_is_less_than('8.1.0')):
+ pytest.skip("need curl != 8.1.x version")
+ url = env.mkurl("https", "ssl", "/renegotiate/cipher/")
+ r = env.curl_get(url, options=[
+ "-vvv", "--tlsv1.2", "--tls-max", "1.2", "--ciphers", "ECDHE-RSA-AES256-GCM-SHA384"
+ ])
+ assert 0 != r.exit_code
+ assert not r.response
+ assert re.search(r'HTTP_1_1_REQUIRED \(err 13\)', r.stderr)
+
+ # try to renegotiate a client certificate from Location
+ # needs to fail with correct code
+ def test_h2_101_03(self, env):
+ if not (env.curl_is_at_least('8.2.0') or env.curl_is_less_than('8.1.0')):
+ pytest.skip("need curl != 8.1.x version")
+ url = env.mkurl("https", "ssl", "/renegotiate/verify/")
+ r = env.curl_get(url, options=["-vvv", "--tlsv1.2", "--tls-max", "1.2"])
+ assert 0 != r.exit_code
+ assert not r.response
+ assert re.search(r'HTTP_1_1_REQUIRED \(err 13\)', r.stderr)
+
+ # try to renegotiate a client certificate from Directory
+ # needs to fail with correct code
+ def test_h2_101_04(self, env):
+ if not (env.curl_is_at_least('8.2.0') or env.curl_is_less_than('8.1.0')):
+ pytest.skip("need curl != 8.1.x version")
+ url = env.mkurl("https", "ssl", "/ssl-client-verify/index.html")
+ r = env.curl_get(url, options=["-vvv", "--tlsv1.2", "--tls-max", "1.2"])
+ assert 0 != r.exit_code, f"{r}"
+ assert not r.response
+ assert re.search(r'HTTP_1_1_REQUIRED \(err 13\)', r.stderr)
+
+ # make 10 requests on the same connection, none should produce a status code
+ # reported by erki@example.ee
+ def test_h2_101_05(self, env):
+ r = env.run([env.h2load, "-n", "10", "-c", "1", "-m", "1", "-vvvv",
+ f"{env.https_base_url}/ssl-client-verify/index.html"])
+ assert 0 == r.exit_code
+ r = env.h2load_status(r)
+ assert 10 == r.results["h2load"]["requests"]["total"]
+ assert 10 == r.results["h2load"]["requests"]["started"]
+ assert 10 == r.results["h2load"]["requests"]["done"]
+ assert 0 == r.results["h2load"]["requests"]["succeeded"]
+ assert 0 == r.results["h2load"]["status"]["2xx"]
+ assert 0 == r.results["h2load"]["status"]["3xx"]
+ assert 0 == r.results["h2load"]["status"]["4xx"]
+ assert 0 == r.results["h2load"]["status"]["5xx"]
+
+ # Check that "SSLRequireSSL" works on h2 connections
+ # See <https://bz.apache.org/bugzilla/show_bug.cgi?id=62654>
+ def test_h2_101_10a(self, env):
+ url = env.mkurl("https", "ssl", "/sslrequire/index.html")
+ r = env.curl_get(url)
+ assert 0 == r.exit_code
+ assert r.response
+ assert 404 == r.response["status"]
+
+ # Check that "require ssl" works on h2 connections
+ # See <https://bz.apache.org/bugzilla/show_bug.cgi?id=62654>
+ def test_h2_101_10b(self, env):
+ url = env.mkurl("https", "ssl", "/requiressl/index.html")
+ r = env.curl_get(url)
+ assert 0 == r.exit_code
+ assert r.response
+ assert 404 == r.response["status"]
+
+ # Check that status works with ErrorDoc, see pull #174, fixes #172
+ def test_h2_101_11(self, env):
+ if not (env.curl_is_at_least('8.2.0') or env.curl_is_less_than('8.1.0')):
+ pytest.skip("need curl != 8.1.x version")
+ url = env.mkurl("https", "ssl", "/renegotiate/err-doc-cipher")
+ r = env.curl_get(url, options=[
+ "-vvv", "--tlsv1.2", "--tls-max", "1.2", "--ciphers", "ECDHE-RSA-AES256-GCM-SHA384"
+ ])
+ assert 0 != r.exit_code
+ assert not r.response
+ assert re.search(r'HTTP_1_1_REQUIRED \(err 13\)', r.stderr)
diff --git a/test/modules/http2/test_102_require.py b/test/modules/http2/test_102_require.py
new file mode 100644
index 0000000..b7e4eae
--- /dev/null
+++ b/test/modules/http2/test_102_require.py
@@ -0,0 +1,41 @@
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestRequire:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ domain = f"ssl.{env.http_tld}"
+ conf = H2Conf(env)
+ conf.start_vhost(domains=[domain], port=env.https_port)
+ conf.add("""
+ Protocols h2 http/1.1
+ SSLOptions +StdEnvVars
+ <Location /h2only.html>
+ Require expr \"%{HTTP2} == 'on'\"
+ </Location>
+ <Location /noh2.html>
+ Require expr \"%{HTTP2} == 'off'\"
+ </Location>""")
+ conf.end_vhost()
+ conf.install()
+ # the dir needs to exists for the configuration to have effect
+ env.mkpath(f"{env.server_dir}/htdocs/ssl-client-verify")
+ assert env.apache_restart() == 0
+
+ def test_h2_102_01(self, env):
+ url = env.mkurl("https", "ssl", "/h2only.html")
+ r = env.curl_get(url)
+ assert 0 == r.exit_code
+ assert r.response
+ assert 404 == r.response["status"]
+
+ def test_h2_102_02(self, env):
+ url = env.mkurl("https", "ssl", "/noh2.html")
+ r = env.curl_get(url)
+ assert 0 == r.exit_code
+ assert r.response
+ assert 403 == r.response["status"]
diff --git a/test/modules/http2/test_103_upgrade.py b/test/modules/http2/test_103_upgrade.py
new file mode 100644
index 0000000..2fa7d1d
--- /dev/null
+++ b/test/modules/http2/test_103_upgrade.py
@@ -0,0 +1,118 @@
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestUpgrade:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ H2Conf(env).add_vhost_test1().add_vhost_test2().add_vhost_noh2(
+ ).start_vhost(domains=[f"test3.{env.http_tld}"], port=env.https_port, doc_root="htdocs/test1"
+ ).add(
+ """
+ Protocols h2 http/1.1
+ Header unset Upgrade"""
+ ).end_vhost(
+ ).start_vhost(domains=[f"test1b.{env.http_tld}"], port=env.http_port, doc_root="htdocs/test1"
+ ).add(
+ """
+ Protocols h2c http/1.1
+ H2Upgrade off
+ <Location /006.html>
+ H2Upgrade on
+ </Location>"""
+ ).end_vhost(
+ ).install()
+ assert env.apache_restart() == 0
+
+ # accessing http://test1, will not try h2 and advertise h2 in the response
+ def test_h2_103_01(self, env):
+ url = env.mkurl("http", "test1", "/index.html")
+ r = env.curl_get(url)
+ assert 0 == r.exit_code
+ assert r.response
+ assert "upgrade" in r.response["header"]
+ assert "h2c" == r.response["header"]["upgrade"]
+
+ # accessing http://noh2, will not advertise, because noh2 host does not have it enabled
+ def test_h2_103_02(self, env):
+ url = env.mkurl("http", "noh2", "/index.html")
+ r = env.curl_get(url)
+ assert 0 == r.exit_code
+ assert r.response
+ assert "upgrade" not in r.response["header"]
+
+ # accessing http://test2, will not advertise, because h2 has less preference than http/1.1
+ def test_h2_103_03(self, env):
+ url = env.mkurl("http", "test2", "/index.html")
+ r = env.curl_get(url)
+ assert 0 == r.exit_code
+ assert r.response
+ assert "upgrade" not in r.response["header"]
+
+ # accessing https://noh2, will not advertise, because noh2 host does not have it enabled
+ def test_h2_103_04(self, env):
+ url = env.mkurl("https", "noh2", "/index.html")
+ r = env.curl_get(url)
+ assert 0 == r.exit_code
+ assert r.response
+ assert "upgrade" not in r.response["header"]
+
+ # accessing https://test2, will not advertise, because h2 has less preference than http/1.1
+ def test_h2_103_05(self, env):
+ url = env.mkurl("https", "test2", "/index.html")
+ r = env.curl_get(url)
+ assert 0 == r.exit_code
+ assert r.response
+ assert "upgrade" not in r.response["header"]
+
+ # accessing https://test1, will advertise h2 in the response
+ def test_h2_103_06(self, env):
+ url = env.mkurl("https", "test1", "/index.html")
+ r = env.curl_get(url, options=["--http1.1"])
+ assert 0 == r.exit_code
+ assert r.response
+ assert "upgrade" in r.response["header"]
+ assert "h2" == r.response["header"]["upgrade"]
+
+ # accessing https://test3, will not send Upgrade since it is suppressed
+ def test_h2_103_07(self, env):
+ url = env.mkurl("https", "test3", "/index.html")
+ r = env.curl_get(url, options=["--http1.1"])
+ assert 0 == r.exit_code
+ assert r.response
+ assert "upgrade" not in r.response["header"]
+
+ # upgrade to h2c for a request, where h2c is preferred
+ def test_h2_103_20(self, env):
+ url = env.mkurl("http", "test1", "/index.html")
+ r = env.nghttp().get(url, options=["-u"])
+ assert r.response["status"] == 200
+
+ # upgrade to h2c for a request where http/1.1 is preferred, but the clients upgrade
+ # wish is honored nevertheless
+ def test_h2_103_21(self, env):
+ url = env.mkurl("http", "test2", "/index.html")
+ r = env.nghttp().get(url, options=["-u"])
+ assert 404 == r.response["status"]
+
+ # ugrade to h2c on a host where h2c is not enabled will fail
+ def test_h2_103_22(self, env):
+ url = env.mkurl("http", "noh2", "/index.html")
+ r = env.nghttp().get(url, options=["-u"])
+ assert not r.response
+
+ # ugrade to h2c on a host where h2c is preferred, but Upgrade is disabled
+ def test_h2_103_23(self, env):
+ url = env.mkurl("http", "test1b", "/index.html")
+ r = env.nghttp().get(url, options=["-u"])
+ assert not r.response
+
+ # ugrade to h2c on a host where h2c is preferred, but Upgrade is disabled on the server,
+ # but allowed for a specific location
+ def test_h2_103_24(self, env):
+ url = env.mkurl("http", "test1b", "/006.html")
+ r = env.nghttp().get(url, options=["-u"])
+ assert r.response["status"] == 200
diff --git a/test/modules/http2/test_104_padding.py b/test/modules/http2/test_104_padding.py
new file mode 100644
index 0000000..401804a
--- /dev/null
+++ b/test/modules/http2/test_104_padding.py
@@ -0,0 +1,104 @@
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+def frame_padding(payload, padbits):
+ mask = (1 << padbits) - 1
+ return ((payload + 9 + mask) & ~mask) - (payload + 9)
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestPadding:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ def add_echo_handler(conf):
+ conf.add([
+ "<Location \"/h2test/echo\">",
+ " SetHandler h2test-echo",
+ "</Location>",
+ ])
+
+ conf = H2Conf(env)
+ conf.start_vhost(domains=[f"ssl.{env.http_tld}"], port=env.https_port, doc_root="htdocs/cgi")
+ add_echo_handler(conf)
+ conf.end_vhost()
+ conf.start_vhost(domains=[f"pad0.{env.http_tld}"], port=env.https_port, doc_root="htdocs/cgi")
+ conf.add("H2Padding 0")
+ add_echo_handler(conf)
+ conf.end_vhost()
+ conf.start_vhost(domains=[f"pad1.{env.http_tld}"], port=env.https_port, doc_root="htdocs/cgi")
+ conf.add("H2Padding 1")
+ add_echo_handler(conf)
+ conf.end_vhost()
+ conf.start_vhost(domains=[f"pad2.{env.http_tld}"], port=env.https_port, doc_root="htdocs/cgi")
+ conf.add("H2Padding 2")
+ add_echo_handler(conf)
+ conf.end_vhost()
+ conf.start_vhost(domains=[f"pad3.{env.http_tld}"], port=env.https_port, doc_root="htdocs/cgi")
+ conf.add("H2Padding 3")
+ add_echo_handler(conf)
+ conf.end_vhost()
+ conf.start_vhost(domains=[f"pad8.{env.http_tld}"], port=env.https_port, doc_root="htdocs/cgi")
+ conf.add("H2Padding 8")
+ add_echo_handler(conf)
+ conf.end_vhost()
+ conf.install()
+ assert env.apache_restart() == 0
+
+ # default paddings settings: 0 bits
+ def test_h2_104_01(self, env, repeat):
+ url = env.mkurl("https", "ssl", "/h2test/echo")
+ # we get 2 frames back: one with data and an empty one with EOF
+ # check the number of padding bytes is as expected
+ for data in ["x", "xx", "xxx", "xxxx", "xxxxx", "xxxxxx", "xxxxxxx", "xxxxxxxx"]:
+ r = env.nghttp().post_data(url, data, 5)
+ assert r.response["status"] == 200
+ for i in r.results["paddings"]:
+ assert i == frame_padding(len(data)+1, 0)
+
+ # 0 bits of padding
+ def test_h2_104_02(self, env):
+ url = env.mkurl("https", "pad0", "/h2test/echo")
+ for data in ["x", "xx", "xxx", "xxxx", "xxxxx", "xxxxxx", "xxxxxxx", "xxxxxxxx"]:
+ r = env.nghttp().post_data(url, data, 5)
+ assert r.response["status"] == 200
+ for i in r.results["paddings"]:
+ assert i == 0
+
+ # 1 bit of padding
+ def test_h2_104_03(self, env):
+ url = env.mkurl("https", "pad1", "/h2test/echo")
+ for data in ["x", "xx", "xxx", "xxxx", "xxxxx", "xxxxxx", "xxxxxxx", "xxxxxxxx"]:
+ r = env.nghttp().post_data(url, data, 5)
+ assert r.response["status"] == 200
+ for i in r.results["paddings"]:
+ assert i in range(0, 2)
+
+ # 2 bits of padding
+ def test_h2_104_04(self, env):
+ url = env.mkurl("https", "pad2", "/h2test/echo")
+ for data in ["x", "xx", "xxx", "xxxx", "xxxxx", "xxxxxx", "xxxxxxx", "xxxxxxxx"]:
+ r = env.nghttp().post_data(url, data, 5)
+ assert r.response["status"] == 200
+ for i in r.results["paddings"]:
+ assert i in range(0, 4)
+
+ # 3 bits of padding
+ def test_h2_104_05(self, env):
+ url = env.mkurl("https", "pad3", "/h2test/echo")
+ for data in ["x", "xx", "xxx", "xxxx", "xxxxx", "xxxxxx", "xxxxxxx", "xxxxxxxx"]:
+ r = env.nghttp().post_data(url, data, 5)
+ assert r.response["status"] == 200
+ for i in r.results["paddings"]:
+ assert i in range(0, 8)
+
+ # 8 bits of padding
+ def test_h2_104_06(self, env):
+ url = env.mkurl("https", "pad8", "/h2test/echo")
+ for data in ["x", "xx", "xxx", "xxxx", "xxxxx", "xxxxxx", "xxxxxxx", "xxxxxxxx"]:
+ r = env.nghttp().post_data(url, data, 5)
+ assert r.response["status"] == 200
+ for i in r.results["paddings"]:
+ assert i in range(0, 256)
diff --git a/test/modules/http2/test_105_timeout.py b/test/modules/http2/test_105_timeout.py
new file mode 100644
index 0000000..f7d3859
--- /dev/null
+++ b/test/modules/http2/test_105_timeout.py
@@ -0,0 +1,152 @@
+import socket
+import time
+
+import pytest
+
+from .env import H2Conf
+from pyhttpd.curl import CurlPiper
+
+
+class TestTimeout:
+
+ # Check that base servers 'Timeout' setting is observed on SSL handshake
+ def test_h2_105_01(self, env):
+ conf = H2Conf(env)
+ conf.add("""
+ AcceptFilter http none
+ Timeout 1.5
+ """)
+ conf.add_vhost_cgi()
+ conf.install()
+ assert env.apache_restart() == 0
+ host = 'localhost'
+ # read with a longer timeout than the server
+ sock = socket.create_connection((host, int(env.https_port)))
+ try:
+ # on some OS, the server does not see our connection until there is
+ # something incoming
+ sock.send(b'0')
+ sock.settimeout(4)
+ buff = sock.recv(1024)
+ assert buff == b''
+ except Exception as ex:
+ print(f"server did not close in time: {ex}")
+ assert False
+ sock.close()
+ # read with a shorter timeout than the server
+ sock = socket.create_connection((host, int(env.https_port)))
+ try:
+ sock.settimeout(0.5)
+ sock.recv(1024)
+ assert False
+ except Exception as ex:
+ print(f"as expected: {ex}")
+ sock.close()
+
+ # Check that mod_reqtimeout handshake setting takes effect
+ def test_h2_105_02(self, env):
+ conf = H2Conf(env)
+ conf.add("""
+ AcceptFilter http none
+ Timeout 10
+ RequestReadTimeout handshake=1 header=5 body=10
+ """)
+ conf.add_vhost_cgi()
+ conf.install()
+ assert env.apache_restart() == 0
+ host = 'localhost'
+ # read with a longer timeout than the server
+ sock = socket.create_connection((host, int(env.https_port)))
+ try:
+ # on some OS, the server does not see our connection until there is
+ # something incoming
+ sock.send(b'0')
+ sock.settimeout(4)
+ buff = sock.recv(1024)
+ assert buff == b''
+ except Exception as ex:
+ print(f"server did not close in time: {ex}")
+ assert False
+ sock.close()
+ # read with a shorter timeout than the server
+ sock = socket.create_connection((host, int(env.https_port)))
+ try:
+ sock.settimeout(0.5)
+ sock.recv(1024)
+ assert False
+ except Exception as ex:
+ print(f"as expected: {ex}")
+ sock.close()
+
+ # Check that mod_reqtimeout handshake setting do no longer apply to handshaked
+ # connections. See <https://github.com/icing/mod_h2/issues/196>.
+ def test_h2_105_03(self, env):
+ conf = H2Conf(env)
+ conf.add("""
+ Timeout 10
+ RequestReadTimeout handshake=1 header=5 body=10
+ """)
+ conf.add_vhost_cgi()
+ conf.install()
+ assert env.apache_restart() == 0
+ url = env.mkurl("https", "cgi", "/necho.py")
+ r = env.curl_get(url, 5, options=[
+ "-vvv",
+ "-F", ("count=%d" % 100),
+ "-F", ("text=%s" % "abcdefghijklmnopqrstuvwxyz"),
+ "-F", ("wait1=%f" % 1.5),
+ ])
+ assert r.response["status"] == 200
+
+ def test_h2_105_10(self, env):
+ # just a check without delays if all is fine
+ conf = H2Conf(env)
+ conf.add_vhost_cgi()
+ conf.install()
+ assert env.apache_restart() == 0
+ url = env.mkurl("https", "cgi", "/h2test/delay")
+ piper = CurlPiper(env=env, url=url)
+ piper.start()
+ stdout, stderr = piper.close()
+ assert piper.exitcode == 0
+ assert len("".join(stdout)) == 3 * 8192
+
+ def test_h2_105_11(self, env):
+ # short connection timeout, longer stream delay
+ # connection timeout must not abort ongoing streams
+ conf = H2Conf(env)
+ conf.add_vhost_cgi()
+ conf.add("Timeout 1")
+ conf.install()
+ assert env.apache_restart() == 0
+ url = env.mkurl("https", "cgi", "/h2test/delay?1200ms")
+ piper = CurlPiper(env=env, url=url)
+ piper.start()
+ stdout, stderr = piper.close()
+ assert len("".join(stdout)) == 3 * 8192
+
+ def test_h2_105_12(self, env):
+ # long connection timeout, short stream timeout
+ # sending a slow POST
+ if not env.curl_is_at_least('8.0.0'):
+ pytest.skip(f'need at least curl v8.0.0 for this')
+ if not env.httpd_is_at_least("2.5.0"):
+ pytest.skip(f'need at least httpd 2.5.0 for this')
+ conf = H2Conf(env)
+ conf.add_vhost_cgi()
+ conf.add("Timeout 10")
+ conf.add("H2StreamTimeout 1")
+ conf.install()
+ assert env.apache_restart() == 0
+ url = env.mkurl("https", "cgi", "/h2test/delay?5")
+ piper = CurlPiper(env=env, url=url)
+ piper.start()
+ for _ in range(3):
+ time.sleep(2)
+ try:
+ piper.send("0123456789\n")
+ except BrokenPipeError:
+ break
+ piper.close()
+ assert piper.response, f'{piper}'
+ assert piper.response['status'] == 408, f"{piper.response}"
diff --git a/test/modules/http2/test_106_shutdown.py b/test/modules/http2/test_106_shutdown.py
new file mode 100644
index 0000000..83e143c
--- /dev/null
+++ b/test/modules/http2/test_106_shutdown.py
@@ -0,0 +1,75 @@
+#
+# mod-h2 test suite
+# check HTTP/2 timeout behaviour
+#
+import time
+from threading import Thread
+
+import pytest
+
+from .env import H2Conf, H2TestEnv
+from pyhttpd.result import ExecResult
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestShutdown:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ conf = H2Conf(env)
+ conf.add_vhost_cgi()
+ conf.install()
+ assert env.apache_restart() == 0
+
+ def test_h2_106_01(self, env):
+ url = env.mkurl("https", "cgi", "/necho.py")
+ lines = 100000
+ text = "123456789"
+ wait2 = 1.0
+ self.r = None
+ def long_request():
+ args = ["-vvv",
+ "-F", f"count={lines}",
+ "-F", f"text={text}",
+ "-F", f"wait2={wait2}",
+ ]
+ self.r = env.curl_get(url, 5, options=args)
+
+ t = Thread(target=long_request)
+ t.start()
+ time.sleep(0.5)
+ assert env.apache_reload() == 0
+ t.join()
+ # noinspection PyTypeChecker
+ time.sleep(1)
+ r: ExecResult = self.r
+ assert r.exit_code == 0
+ assert r.response, f"no response via {r.args} in {r.stderr}\nstdout: {len(r.stdout)} bytes"
+ assert r.response["status"] == 200, f"{r}"
+ assert len(r.response["body"]) == (lines * (len(text)+1)), f"{r}"
+
+ def test_h2_106_02(self, env):
+ # PR65731: invalid GOAWAY frame at session start when
+ # MaxRequestsPerChild is reached
+ # Create a low limit and only 2 children, so we'll encounter this easily
+ conf = H2Conf(env, extras={
+ 'base': [
+ "ServerLimit 2",
+ "MaxRequestsPerChild 3"
+ ]
+ })
+ conf.add_vhost_test1()
+ conf.install()
+ assert env.apache_restart() == 0
+ url = env.mkurl("https", "test1", "/index.html")
+ for i in range(7):
+ r = env.curl_get(url, options=['-v'])
+ # requests should succeed, but rarely connections get closed
+ # before the response is received
+ if r.exit_code in [16, 55]:
+ # curl send error
+ assert r.response is None
+ else:
+ assert r.exit_code == 0, f"failed on {i}. request: {r.stdout} {r.stderr}"
+ assert r.response["status"] == 200
+ assert "HTTP/2" == r.response["protocol"] \ No newline at end of file
diff --git a/test/modules/http2/test_107_frame_lengths.py b/test/modules/http2/test_107_frame_lengths.py
new file mode 100644
index 0000000..d636093
--- /dev/null
+++ b/test/modules/http2/test_107_frame_lengths.py
@@ -0,0 +1,51 @@
+import os
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+def mk_text_file(fpath: str, lines: int):
+ t110 = ""
+ for _ in range(11):
+ t110 += "0123456789"
+ with open(fpath, "w") as fd:
+ for i in range(lines):
+ fd.write("{0:015d}: ".format(i)) # total 128 bytes per line
+ fd.write(t110)
+ fd.write("\n")
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestFrameLengths:
+
+ URI_PATHS = []
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ docs_a = os.path.join(env.server_docs_dir, "cgi/files")
+ for fsize in [10, 100]:
+ fname = f'0-{fsize}k.txt'
+ mk_text_file(os.path.join(docs_a, fname), 8 * fsize)
+ self.URI_PATHS.append(f"/files/{fname}")
+
+ @pytest.mark.parametrize("data_frame_len", [
+ 99, 1024, 8192
+ ])
+ def test_h2_107_01(self, env, data_frame_len):
+ conf = H2Conf(env, extras={
+ f'cgi.{env.http_tld}': [
+ f'H2MaxDataFrameLen {data_frame_len}',
+ ]
+ })
+ conf.add_vhost_cgi()
+ conf.install()
+ assert env.apache_restart() == 0
+ for p in self.URI_PATHS:
+ url = env.mkurl("https", "cgi", p)
+ r = env.nghttp().get(url, options=[
+ '--header=Accept-Encoding: none',
+ ])
+ assert r.response["status"] == 200
+ assert len(r.results["data_lengths"]) > 0, f'{r}'
+ too_large = [ x for x in r.results["data_lengths"] if x > data_frame_len]
+ assert len(too_large) == 0, f'{p}: {r.results["data_lengths"]}'
diff --git a/test/modules/http2/test_200_header_invalid.py b/test/modules/http2/test_200_header_invalid.py
new file mode 100644
index 0000000..5b3aafd
--- /dev/null
+++ b/test/modules/http2/test_200_header_invalid.py
@@ -0,0 +1,207 @@
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestInvalidHeaders:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ H2Conf(env).add_vhost_cgi().install()
+ assert env.apache_restart() == 0
+
+ # let the hecho.py CGI echo chars < 0x20 in field name
+ # for almost all such characters, the stream returns a 500
+ # or in httpd >= 2.5.0 gets aborted with a h2 error
+ # cr is handled special
+ def test_h2_200_01(self, env):
+ url = env.mkurl("https", "cgi", "/hecho.py")
+ for x in range(1, 32):
+ data = f'name=x%{x:02x}x&value=yz'
+ r = env.curl_post_data(url, data)
+ if x in [13]:
+ assert 0 == r.exit_code, f'unexpected exit code for char 0x{x:02}'
+ assert 200 == r.response["status"], f'unexpected status for char 0x{x:02}'
+ elif x in [10] or env.httpd_is_at_least('2.5.0'):
+ assert 0 == r.exit_code, f'unexpected exit code for char 0x{x:02}'
+ assert 500 == r.response["status"], f'unexpected status for char 0x{x:02}'
+ else:
+ assert 0 != r.exit_code, f'unexpected exit code for char 0x{x:02}'
+
+ # let the hecho.py CGI echo chars < 0x20 in field value
+ # for almost all such characters, the stream returns a 500
+ # or in httpd >= 2.5.0 gets aborted with a h2 error
+ # cr and lf are handled special
+ def test_h2_200_02(self, env):
+ url = env.mkurl("https", "cgi", "/hecho.py")
+ for x in range(1, 32):
+ if 9 != x:
+ r = env.curl_post_data(url, "name=x&value=y%%%02x" % x)
+ if x in [10, 13]:
+ assert 0 == r.exit_code, "unexpected exit code for char 0x%02x" % x
+ assert 200 == r.response["status"], "unexpected status for char 0x%02x" % x
+ elif env.httpd_is_at_least('2.5.0'):
+ assert 0 == r.exit_code, f'unexpected exit code for char 0x{x:02}'
+ assert 500 == r.response["status"], f'unexpected status for char 0x{x:02}'
+ else:
+ assert 0 != r.exit_code, "unexpected exit code for char 0x%02x" % x
+
+ # let the hecho.py CGI echo 0x10 and 0x7f in field name and value
+ def test_h2_200_03(self, env):
+ url = env.mkurl("https", "cgi", "/hecho.py")
+ for h in ["10", "7f"]:
+ r = env.curl_post_data(url, "name=x%%%s&value=yz" % h)
+ if env.httpd_is_at_least('2.5.0'):
+ assert 0 == r.exit_code, f"unexpected exit code for char 0x{h:02}"
+ assert 500 == r.response["status"], f"unexpected exit code for char 0x{h:02}"
+ else:
+ assert 0 != r.exit_code
+ r = env.curl_post_data(url, "name=x&value=y%%%sz" % h)
+ if env.httpd_is_at_least('2.5.0'):
+ assert 0 == r.exit_code, f"unexpected exit code for char 0x{h:02}"
+ assert 500 == r.response["status"], f"unexpected exit code for char 0x{h:02}"
+ else:
+ assert 0 != r.exit_code
+
+ # test header field lengths check, LimitRequestLine
+ def test_h2_200_10(self, env):
+ conf = H2Conf(env)
+ conf.add("""
+ LimitRequestLine 1024
+ """)
+ conf.add_vhost_cgi()
+ conf.install()
+ assert env.apache_restart() == 0
+ val = 200*"1234567890"
+ url = env.mkurl("https", "cgi", f'/?{val[:1022]}')
+ r = env.curl_get(url)
+ assert r.response["status"] == 200
+ url = env.mkurl("https", "cgi", f'/?{val[:1023]}')
+ r = env.curl_get(url)
+ # URI too long
+ assert 414 == r.response["status"]
+
+ # test header field lengths check, LimitRequestFieldSize (default 8190)
+ def test_h2_200_11(self, env):
+ conf = H2Conf(env)
+ conf.add("""
+ LimitRequestFieldSize 1024
+ """)
+ conf.add_vhost_cgi()
+ conf.install()
+ assert env.apache_restart() == 0
+ url = env.mkurl("https", "cgi", "/")
+ val = 200*"1234567890"
+ # two fields, concatenated with ', '
+ # LimitRequestFieldSize, one more char -> 400 in HTTP/1.1
+ r = env.curl_get(url, options=[
+ '-H', f'x: {val[:500]}', '-H', f'x: {val[:519]}'
+ ])
+ assert r.exit_code == 0, f'{r}'
+ assert r.response["status"] == 200, f'{r}'
+ r = env.curl_get(url, options=[
+ '--http1.1', '-H', f'x: {val[:500]}', '-H', f'x: {val[:523]}'
+ ])
+ assert 400 == r.response["status"]
+ r = env.curl_get(url, options=[
+ '-H', f'x: {val[:500]}', '-H', f'x: {val[:520]}'
+ ])
+ assert 431 == r.response["status"]
+
+ # test header field count, LimitRequestFields (default 100)
+ # see #201: several headers with same name are mered and count only once
+ def test_h2_200_12(self, env):
+ url = env.mkurl("https", "cgi", "/")
+ opt = []
+ # curl sends 3 headers itself (user-agent, accept, and our AP-Test-Name)
+ for i in range(97):
+ opt += ["-H", "x: 1"]
+ r = env.curl_get(url, options=opt)
+ assert r.response["status"] == 200
+ r = env.curl_get(url, options=(opt + ["-H", "y: 2"]))
+ assert r.response["status"] == 200
+
+ # test header field count, LimitRequestFields (default 100)
+ # different header names count each
+ def test_h2_200_13(self, env):
+ url = env.mkurl("https", "cgi", "/")
+ opt = []
+ # curl sends 3 headers itself (user-agent, accept, and our AP-Test-Name)
+ for i in range(97):
+ opt += ["-H", f"x{i}: 1"]
+ r = env.curl_get(url, options=opt)
+ assert r.response["status"] == 200
+ r = env.curl_get(url, options=(opt + ["-H", "y: 2"]))
+ assert 431 == r.response["status"]
+
+ # test "LimitRequestFields 0" setting, see #200
+ def test_h2_200_14(self, env):
+ conf = H2Conf(env)
+ conf.add("""
+ LimitRequestFields 20
+ """)
+ conf.add_vhost_cgi()
+ conf.install()
+ assert env.apache_restart() == 0
+ url = env.mkurl("https", "cgi", "/")
+ opt = []
+ for i in range(21):
+ opt += ["-H", "x{0}: 1".format(i)]
+ r = env.curl_get(url, options=opt)
+ assert 431 == r.response["status"]
+ conf = H2Conf(env)
+ conf.add("""
+ LimitRequestFields 0
+ """)
+ conf.add_vhost_cgi()
+ conf.install()
+ assert env.apache_restart() == 0
+ url = env.mkurl("https", "cgi", "/")
+ opt = []
+ for i in range(100):
+ opt += ["-H", "x{0}: 1".format(i)]
+ r = env.curl_get(url, options=opt)
+ assert r.response["status"] == 200
+
+ # the uri limits
+ def test_h2_200_15(self, env):
+ conf = H2Conf(env)
+ conf.add("""
+ LimitRequestLine 48
+ """)
+ conf.add_vhost_cgi()
+ conf.install()
+ assert env.apache_restart() == 0
+ url = env.mkurl("https", "cgi", "/")
+ r = env.curl_get(url)
+ assert r.response["status"] == 200
+ url = env.mkurl("https", "cgi", "/" + (48*"x"))
+ r = env.curl_get(url)
+ assert 414 == r.response["status"]
+ # nghttp sends the :method: header first (so far)
+ # trigger a too long request line on it
+ # the stream will RST and we get no response
+ url = env.mkurl("https", "cgi", "/")
+ opt = ["-H:method: {0}".format(100*"x")]
+ r = env.nghttp().get(url, options=opt)
+ assert r.exit_code == 0, r
+ assert not r.response
+
+ # invalid chars in method
+ def test_h2_200_16(self, env):
+ if not env.h2load_is_at_least('1.45.0'):
+ pytest.skip(f'nhttp2 version too old')
+ conf = H2Conf(env)
+ conf.add_vhost_cgi()
+ conf.install()
+ assert env.apache_restart() == 0
+ url = env.mkurl("https", "cgi", "/hello.py")
+ opt = ["-H:method: GET /hello.py"]
+ r = env.nghttp().get(url, options=opt)
+ assert r.exit_code == 0, r
+ assert r.response is None
+ url = env.mkurl("https", "cgi", "/proxy/hello.py")
+ r = env.nghttp().get(url, options=opt)
+ assert r.exit_code == 0, r
+ assert r.response is None
diff --git a/test/modules/http2/test_201_header_conditional.py b/test/modules/http2/test_201_header_conditional.py
new file mode 100644
index 0000000..f103268
--- /dev/null
+++ b/test/modules/http2/test_201_header_conditional.py
@@ -0,0 +1,70 @@
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestConditionalHeaders:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ H2Conf(env).add(
+ """
+ KeepAlive on
+ MaxKeepAliveRequests 30
+ KeepAliveTimeout 30"""
+ ).add_vhost_test1().install()
+ assert env.apache_restart() == 0
+
+ # check handling of 'if-modified-since' header
+ def test_h2_201_01(self, env):
+ url = env.mkurl("https", "test1", "/006/006.css")
+ r = env.curl_get(url)
+ assert r.response["status"] == 200
+ lm = r.response["header"]["last-modified"]
+ assert lm
+ r = env.curl_get(url, options=["-H", "if-modified-since: %s" % lm])
+ assert 304 == r.response["status"]
+ r = env.curl_get(url, options=["-H", "if-modified-since: Tue, 04 Sep 2010 11:51:59 GMT"])
+ assert r.response["status"] == 200
+
+ # check handling of 'if-none-match' header
+ def test_h2_201_02(self, env):
+ url = env.mkurl("https", "test1", "/006/006.css")
+ r = env.curl_get(url)
+ assert r.response["status"] == 200
+ etag = r.response["header"]["etag"]
+ assert etag
+ r = env.curl_get(url, options=["-H", "if-none-match: %s" % etag])
+ assert 304 == r.response["status"]
+ r = env.curl_get(url, options=["-H", "if-none-match: dummy"])
+ assert r.response["status"] == 200
+
+ @pytest.mark.skipif(True, reason="304 misses the Vary header in trunk and 2.4.x")
+ def test_h2_201_03(self, env):
+ url = env.mkurl("https", "test1", "/006.html")
+ r = env.curl_get(url, options=["-H", "Accept-Encoding: gzip"])
+ assert r.response["status"] == 200
+ for h in r.response["header"]:
+ print("%s: %s" % (h, r.response["header"][h]))
+ lm = r.response["header"]["last-modified"]
+ assert lm
+ assert "gzip" == r.response["header"]["content-encoding"]
+ assert "Accept-Encoding" in r.response["header"]["vary"]
+
+ r = env.curl_get(url, options=["-H", "if-modified-since: %s" % lm,
+ "-H", "Accept-Encoding: gzip"])
+ assert 304 == r.response["status"]
+ for h in r.response["header"]:
+ print("%s: %s" % (h, r.response["header"][h]))
+ assert "vary" in r.response["header"]
+
+ # Check if "Keep-Alive" response header is removed in HTTP/2.
+ def test_h2_201_04(self, env):
+ url = env.mkurl("https", "test1", "/006.html")
+ r = env.curl_get(url, options=["--http1.1", "-H", "Connection: keep-alive"])
+ assert r.response["status"] == 200
+ assert "timeout=30, max=30" == r.response["header"]["keep-alive"]
+ r = env.curl_get(url, options=["-H", "Connection: keep-alive"])
+ assert r.response["status"] == 200
+ assert "keep-alive" not in r.response["header"]
diff --git a/test/modules/http2/test_202_trailer.py b/test/modules/http2/test_202_trailer.py
new file mode 100644
index 0000000..4b4fc42
--- /dev/null
+++ b/test/modules/http2/test_202_trailer.py
@@ -0,0 +1,92 @@
+import os
+import pytest
+
+from .env import H2Conf
+
+
+def setup_data(env):
+ s100 = "012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678\n"
+ with open(os.path.join(env.gen_dir, "data-1k"), 'w') as f:
+ for i in range(10):
+ f.write(s100)
+
+
+# The trailer tests depend on "nghttp" as no other client seems to be able to send those
+# rare things.
+class TestTrailers:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ setup_data(env)
+ conf = H2Conf(env, extras={
+ f"cgi.{env.http_tld}": [
+ "<Location \"/h2test/trailer\">",
+ " SetHandler h2test-trailer",
+ "</Location>"
+ ],
+ })
+ conf.add_vhost_cgi(h2proxy_self=True)
+ conf.install()
+ assert env.apache_restart() == 0
+
+ # check if the server survives a trailer or two
+ def test_h2_202_01(self, env):
+ url = env.mkurl("https", "cgi", "/echo.py")
+ fpath = os.path.join(env.gen_dir, "data-1k")
+ r = env.nghttp().upload(url, fpath, options=["--trailer", "test: 1"])
+ assert r.response["status"] < 300
+ assert len(r.response["body"]) == 1000
+
+ r = env.nghttp().upload(url, fpath, options=["--trailer", "test: 1b", "--trailer", "XXX: test"])
+ assert r.response["status"] < 300
+ assert len(r.response["body"]) == 1000
+
+ # check if the server survives a trailer without content-length
+ def test_h2_202_02(self, env):
+ url = env.mkurl("https", "cgi", "/echo.py")
+ fpath = os.path.join(env.gen_dir, "data-1k")
+ r = env.nghttp().upload(url, fpath, options=["--trailer", "test: 2", "--no-content-length"])
+ assert r.response["status"] < 300
+ assert len(r.response["body"]) == 1000
+
+ # check if echoing request headers in response from GET works
+ def test_h2_202_03(self, env):
+ url = env.mkurl("https", "cgi", "/echohd.py?name=X")
+ r = env.nghttp().get(url, options=["--header", "X: 3"])
+ assert r.response["status"] < 300
+ assert r.response["body"] == b"X: 3\n"
+
+ # check if echoing request headers in response from POST works
+ def test_h2_202_03b(self, env):
+ url = env.mkurl("https", "cgi", "/echohd.py?name=X")
+ r = env.nghttp().post_name(url, "Y", options=["--header", "X: 3b"])
+ assert r.response["status"] < 300
+ assert r.response["body"] == b"X: 3b\n"
+
+ # check if echoing request headers in response from POST works, but trailers are not seen
+ # This is the way CGI invocation works.
+ def test_h2_202_04(self, env):
+ url = env.mkurl("https", "cgi", "/echohd.py?name=X")
+ r = env.nghttp().post_name(url, "Y", options=["--header", "X: 4a", "--trailer", "X: 4b"])
+ assert r.response["status"] < 300
+ assert r.response["body"] == b"X: 4a\n"
+
+ # check that our h2test-trailer handler works
+ def test_h2_202_10(self, env):
+ url = env.mkurl("https", "cgi", "/h2test/trailer?1024")
+ r = env.nghttp().get(url)
+ assert r.response["status"] == 200
+ assert len(r.response["body"]) == 1024
+ assert 'trailer' in r.response
+ assert 'trailer-content-length' in r.response['trailer']
+ assert r.response['trailer']['trailer-content-length'] == '1024'
+
+ # check that trailers also for with empty bodies
+ def test_h2_202_11(self, env):
+ url = env.mkurl("https", "cgi", "/h2test/trailer?0")
+ r = env.nghttp().get(url)
+ assert r.response["status"] == 200
+ assert len(r.response["body"]) == 0, f'{r.response["body"]}'
+ assert 'trailer' in r.response
+ assert 'trailer-content-length' in r.response['trailer']
+ assert r.response['trailer']['trailer-content-length'] == '0'
diff --git a/test/modules/http2/test_203_rfc9113.py b/test/modules/http2/test_203_rfc9113.py
new file mode 100644
index 0000000..9fc8f3b
--- /dev/null
+++ b/test/modules/http2/test_203_rfc9113.py
@@ -0,0 +1,56 @@
+import pytest
+
+from pyhttpd.env import HttpdTestEnv
+from .env import H2Conf
+
+
+class TestRfc9113:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ H2Conf(env).add_vhost_test1().install()
+ assert env.apache_restart() == 0
+
+ # by default, we accept leading/trailing ws in request fields
+ def test_h2_203_01_ws_ignore(self, env):
+ url = env.mkurl("https", "test1", "/")
+ r = env.curl_get(url, options=['-H', 'trailing-space: must not '])
+ assert r.exit_code == 0, f'curl output: {r.stderr}'
+ assert r.response["status"] == 200, f'curl output: {r.stdout}'
+ r = env.curl_get(url, options=['-H', 'trailing-space: must not\t'])
+ assert r.exit_code == 0, f'curl output: {r.stderr}'
+ assert r.response["status"] == 200, f'curl output: {r.stdout}'
+
+ # response header are also handled, but we strip ws before sending
+ @pytest.mark.parametrize(["hvalue", "expvalue", "status"], [
+ ['"123"', '123', 200],
+ ['"123 "', '123', 200], # trailing space stripped
+ ['"123\t"', '123', 200], # trailing tab stripped
+ ['" 123"', '123', 200], # leading space is stripped
+ ['" 123"', '123', 200], # leading spaces are stripped
+ ['"\t123"', '123', 200], # leading tab is stripped
+ ['"expr=%{unescape:123%0A 123}"', '', 500], # illegal char
+ ['" \t "', '', 200], # just ws
+ ])
+ def test_h2_203_02(self, env, hvalue, expvalue, status):
+ hname = 'ap-test-007'
+ conf = H2Conf(env, extras={
+ f'test1.{env.http_tld}': [
+ '<Location /index.html>',
+ f'Header add {hname} {hvalue}',
+ '</Location>',
+ ]
+ })
+ conf.add_vhost_test1(proxy_self=True)
+ conf.install()
+ assert env.apache_restart() == 0
+ url = env.mkurl("https", "test1", "/index.html")
+ r = env.curl_get(url, options=['--http2'])
+ if status == 500 and r.exit_code != 0:
+ # in 2.4.x we fail late on control chars in a response
+ # and RST_STREAM. That's also ok
+ return
+ assert r.response["status"] == status
+ if int(status) < 400:
+ assert r.response["header"][hname] == expvalue
+
diff --git a/test/modules/http2/test_300_interim.py b/test/modules/http2/test_300_interim.py
new file mode 100644
index 0000000..774ab88
--- /dev/null
+++ b/test/modules/http2/test_300_interim.py
@@ -0,0 +1,40 @@
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestInterimResponses:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ H2Conf(env).add_vhost_test1().add_vhost_cgi().install()
+ assert env.apache_restart() == 0
+
+ def setup_method(self, method):
+ print("setup_method: %s" % method.__name__)
+
+ def teardown_method(self, method):
+ print("teardown_method: %s" % method.__name__)
+
+ # check that we normally do not see an interim response
+ def test_h2_300_01(self, env):
+ url = env.mkurl("https", "test1", "/index.html")
+ r = env.curl_post_data(url, 'XYZ')
+ assert r.response["status"] == 200
+ assert "previous" not in r.response
+
+ # check that we see an interim response when we ask for it
+ def test_h2_300_02(self, env):
+ url = env.mkurl("https", "cgi", "/echo.py")
+ r = env.curl_post_data(url, 'XYZ', options=["-H", "expect: 100-continue"])
+ assert r.response["status"] == 200
+ assert "previous" in r.response
+ assert 100 == r.response["previous"]["status"]
+
+ # check proper answer on unexpected
+ def test_h2_300_03(self, env):
+ url = env.mkurl("https", "cgi", "/echo.py")
+ r = env.curl_post_data(url, 'XYZ', options=["-H", "expect: the-unexpected"])
+ assert 417 == r.response["status"]
+ assert "previous" not in r.response
diff --git a/test/modules/http2/test_400_push.py b/test/modules/http2/test_400_push.py
new file mode 100644
index 0000000..9c61608
--- /dev/null
+++ b/test/modules/http2/test_400_push.py
@@ -0,0 +1,200 @@
+import os
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+# The push tests depend on "nghttp"
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestPush:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ H2Conf(env).start_vhost(domains=[f"push.{env.http_tld}"],
+ port=env.https_port, doc_root="htdocs/test1"
+ ).add(r"""
+ RewriteEngine on
+ RewriteRule ^/006-push(.*)?\.html$ /006.html
+ <Location /006-push.html>
+ Header add Link "</006/006.css>;rel=preload"
+ Header add Link "</006/006.js>;rel=preloadX"
+ </Location>
+ <Location /006-push2.html>
+ Header add Link "</006/006.css>;rel=preloadX, </006/006.js>; rel=preload"
+ </Location>
+ <Location /006-push3.html>
+ Header add Link "</006/006.css>;rel=preloa,</006/006.js>;rel=preload"
+ </Location>
+ <Location /006-push4.html>
+ Header add Link "</006/006.css;rel=preload, </006/006.js>; preload"
+ </Location>
+ <Location /006-push5.html>
+ Header add Link '</006/006.css>;rel="preload push"'
+ </Location>
+ <Location /006-push6.html>
+ Header add Link '</006/006.css>;rel="push preload"'
+ </Location>
+ <Location /006-push7.html>
+ Header add Link '</006/006.css>;rel="abc preload push"'
+ </Location>
+ <Location /006-push8.html>
+ Header add Link '</006/006.css>;rel="preload"; nopush'
+ </Location>
+ <Location /006-push20.html>
+ H2PushResource "/006/006.css" critical
+ H2PushResource "/006/006.js"
+ </Location>
+ <Location /006-push30.html>
+ H2Push off
+ Header add Link '</006/006.css>;rel="preload"'
+ </Location>
+ <Location /006-push31.html>
+ H2PushResource "/006/006.css" critical
+ </Location>
+ <Location /006-push32.html>
+ Header add Link "</006/006.css>;rel=preload"
+ </Location>
+ """).end_vhost(
+ ).install()
+ assert env.apache_restart() == 0
+
+ ############################
+ # Link: header handling, various combinations
+
+ # plain resource without configured pushes
+ def test_h2_400_00(self, env):
+ url = env.mkurl("https", "push", "/006.html")
+ r = env.nghttp().get(url)
+ assert r.response["status"] == 200
+ promises = r.results["streams"][r.response["id"]]["promises"]
+ assert 0 == len(promises)
+
+ # 2 link headers configured, only 1 triggers push
+ def test_h2_400_01(self, env):
+ url = env.mkurl("https", "push", "/006-push.html")
+ r = env.nghttp().get(url, options=["-Haccept-encoding: none"])
+ assert r.response["status"] == 200
+ promises = r.results["streams"][r.response["id"]]["promises"]
+ assert 1 == len(promises)
+ assert '/006/006.css' == promises[0]["request"]["header"][":path"]
+ assert 216 == len(promises[0]["response"]["body"])
+
+ # Same as 400_01, but with single header line configured
+ def test_h2_400_02(self, env):
+ url = env.mkurl("https", "push", "/006-push2.html")
+ r = env.nghttp().get(url)
+ assert r.response["status"] == 200
+ promises = r.results["streams"][r.response["id"]]["promises"]
+ assert 1 == len(promises)
+ assert '/006/006.js' == promises[0]["request"]["header"][":path"]
+
+ # 2 Links, only one with correct rel attribute
+ def test_h2_400_03(self, env):
+ url = env.mkurl("https", "push", "/006-push3.html")
+ r = env.nghttp().get(url)
+ assert r.response["status"] == 200
+ promises = r.results["streams"][r.response["id"]]["promises"]
+ assert 1 == len(promises)
+ assert '/006/006.js' == promises[0]["request"]["header"][":path"]
+
+ # Missing > in Link header, PUSH not triggered
+ def test_h2_400_04(self, env):
+ url = env.mkurl("https", "push", "/006-push4.html")
+ r = env.nghttp().get(url)
+ assert r.response["status"] == 200
+ promises = r.results["streams"][r.response["id"]]["promises"]
+ assert 0 == len(promises)
+
+ # More than one value in "rel" parameter
+ def test_h2_400_05(self, env):
+ url = env.mkurl("https", "push", "/006-push5.html")
+ r = env.nghttp().get(url)
+ assert r.response["status"] == 200
+ promises = r.results["streams"][r.response["id"]]["promises"]
+ assert 1 == len(promises)
+ assert '/006/006.css' == promises[0]["request"]["header"][":path"]
+
+ # Another "rel" parameter variation
+ def test_h2_400_06(self, env):
+ url = env.mkurl("https", "push", "/006-push6.html")
+ r = env.nghttp().get(url)
+ assert r.response["status"] == 200
+ promises = r.results["streams"][r.response["id"]]["promises"]
+ assert 1 == len(promises)
+ assert '/006/006.css' == promises[0]["request"]["header"][":path"]
+
+ # Another "rel" parameter variation
+ def test_h2_400_07(self, env):
+ url = env.mkurl("https", "push", "/006-push7.html")
+ r = env.nghttp().get(url)
+ assert r.response["status"] == 200
+ promises = r.results["streams"][r.response["id"]]["promises"]
+ assert 1 == len(promises)
+ assert '/006/006.css' == promises[0]["request"]["header"][":path"]
+
+ # Pushable link header with "nopush" attribute
+ def test_h2_400_08(self, env):
+ url = env.mkurl("https", "push", "/006-push8.html")
+ r = env.nghttp().get(url)
+ assert r.response["status"] == 200
+ promises = r.results["streams"][r.response["id"]]["promises"]
+ assert 0 == len(promises)
+
+ # 2 H2PushResource config trigger on GET, but not on POST
+ def test_h2_400_20(self, env):
+ url = env.mkurl("https", "push", "/006-push20.html")
+ r = env.nghttp().get(url)
+ assert r.response["status"] == 200
+ promises = r.results["streams"][r.response["id"]]["promises"]
+ assert 2 == len(promises)
+
+ fpath = os.path.join(env.gen_dir, "data-400-20")
+ with open(fpath, 'w') as f:
+ f.write("test upload data")
+ r = env.nghttp().upload(url, fpath)
+ assert r.response["status"] == 200
+ promises = r.results["streams"][r.response["id"]]["promises"]
+ assert 0 == len(promises)
+
+ # H2Push configured Off in location
+ def test_h2_400_30(self, env):
+ url = env.mkurl("https", "push", "/006-push30.html")
+ r = env.nghttp().get(url)
+ assert r.response["status"] == 200
+ promises = r.results["streams"][r.response["id"]]["promises"]
+ assert 0 == len(promises)
+
+ # - suppress PUSH
+ def test_h2_400_50(self, env):
+ url = env.mkurl("https", "push", "/006-push.html")
+ r = env.nghttp().get(url, options=['-H', 'accept-push-policy: none'])
+ assert r.response["status"] == 200
+ promises = r.results["streams"][r.response["id"]]["promises"]
+ assert 0 == len(promises)
+
+ # - default pushes desired
+ def test_h2_400_51(self, env):
+ url = env.mkurl("https", "push", "/006-push.html")
+ r = env.nghttp().get(url, options=['-H', 'accept-push-policy: default'])
+ assert r.response["status"] == 200
+ promises = r.results["streams"][r.response["id"]]["promises"]
+ assert 1 == len(promises)
+
+ # - HEAD pushes desired
+ def test_h2_400_52(self, env):
+ url = env.mkurl("https", "push", "/006-push.html")
+ r = env.nghttp().get(url, options=['-H', 'accept-push-policy: head'])
+ assert r.response["status"] == 200
+ promises = r.results["streams"][r.response["id"]]["promises"]
+ assert 1 == len(promises)
+ assert '/006/006.css' == promises[0]["request"]["header"][":path"]
+ assert b"" == promises[0]["response"]["body"]
+ assert 0 == len(promises[0]["response"]["body"])
+
+ # - fast-load pushes desired
+ def test_h2_400_53(self, env):
+ url = env.mkurl("https", "push", "/006-push.html")
+ r = env.nghttp().get(url, options=['-H', 'accept-push-policy: fast-load'])
+ assert r.response["status"] == 200
+ promises = r.results["streams"][r.response["id"]]["promises"]
+ assert 1 == len(promises)
diff --git a/test/modules/http2/test_401_early_hints.py b/test/modules/http2/test_401_early_hints.py
new file mode 100644
index 0000000..5704305
--- /dev/null
+++ b/test/modules/http2/test_401_early_hints.py
@@ -0,0 +1,83 @@
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+# The push tests depend on "nghttp"
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestEarlyHints:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ if not env.httpd_is_at_least('2.4.58'):
+ pytest.skip(f'needs httpd 2.4.58')
+ H2Conf(env).start_vhost(domains=[f"hints.{env.http_tld}"],
+ port=env.https_port, doc_root="htdocs/test1"
+ ).add("""
+ H2EarlyHints on
+ RewriteEngine on
+ RewriteRule ^/006-(.*)?\\.html$ /006.html
+ <Location /006-hints.html>
+ H2PushResource "/006/006.css" critical
+ </Location>
+ <Location /006-nohints.html>
+ Header add Link "</006/006.css>;rel=preload"
+ </Location>
+ <Location /006-early.html>
+ H2EarlyHint Link "</006/006.css>;rel=preload;as=style"
+ </Location>
+ <Location /006-early-no-push.html>
+ H2Push off
+ H2EarlyHint Link "</006/006.css>;rel=preload;as=style"
+ </Location>
+ """).end_vhost(
+ ).install()
+ assert env.apache_restart() == 0
+
+ # H2EarlyHints enabled in general, check that it works for H2PushResource
+ def test_h2_401_31(self, env, repeat):
+ url = env.mkurl("https", "hints", "/006-hints.html")
+ r = env.nghttp().get(url)
+ assert r.response["status"] == 200
+ promises = r.results["streams"][r.response["id"]]["promises"]
+ assert 1 == len(promises)
+ early = r.response["previous"]
+ assert early
+ assert 103 == int(early["header"][":status"])
+ assert early["header"]["link"]
+
+ # H2EarlyHints enabled in general, but does not trigger on added response headers
+ def test_h2_401_32(self, env, repeat):
+ url = env.mkurl("https", "hints", "/006-nohints.html")
+ r = env.nghttp().get(url)
+ assert r.response["status"] == 200
+ promises = r.results["streams"][r.response["id"]]["promises"]
+ assert 1 == len(promises)
+ assert "previous" not in r.response
+
+ # H2EarlyHints enabled in general, check that it works for H2EarlyHint
+ def test_h2_401_33(self, env, repeat):
+ url = env.mkurl("https", "hints", "/006-early.html")
+ r = env.nghttp().get(url)
+ assert r.response["status"] == 200
+ promises = r.results["streams"][r.response["id"]]["promises"]
+ assert 1 == len(promises)
+ early = r.response["previous"]
+ assert early
+ assert 103 == int(early["header"][":status"])
+ assert early["header"]["link"] == '</006/006.css>;rel=preload;as=style'
+
+ # H2EarlyHints enabled, no PUSH, check that it works for H2EarlyHint
+ def test_h2_401_34(self, env, repeat):
+ if not env.httpd_is_at_least('2.4.58'):
+ pytest.skip(f'needs httpd 2.4.58')
+ url = env.mkurl("https", "hints", "/006-early-no-push.html")
+ r = env.nghttp().get(url)
+ assert r.response["status"] == 200
+ promises = r.results["streams"][r.response["id"]]["promises"]
+ assert 0 == len(promises)
+ early = r.response["previous"]
+ assert early
+ assert 103 == int(early["header"][":status"])
+ assert early["header"]["link"] == '</006/006.css>;rel=preload;as=style'
+
diff --git a/test/modules/http2/test_500_proxy.py b/test/modules/http2/test_500_proxy.py
new file mode 100644
index 0000000..88a8ece
--- /dev/null
+++ b/test/modules/http2/test_500_proxy.py
@@ -0,0 +1,157 @@
+import inspect
+import os
+import re
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestProxy:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ H2Conf(env).add_vhost_cgi(proxy_self=True).install()
+ assert env.apache_restart() == 0
+
+ def local_src(self, fname):
+ return os.path.join(os.path.dirname(inspect.getfile(TestProxy)), fname)
+
+ def setup_method(self, method):
+ print("setup_method: %s" % method.__name__)
+
+ def teardown_method(self, method):
+ print("teardown_method: %s" % method.__name__)
+
+ def test_h2_500_01(self, env):
+ url = env.mkurl("https", "cgi", "/proxy/hello.py")
+ r = env.curl_get(url, 5)
+ assert r.response["status"] == 200
+ assert "HTTP/1.1" == r.response["json"]["protocol"]
+ assert r.response["json"]["https"] == ""
+ assert r.response["json"]["ssl_protocol"] == ""
+ assert r.response["json"]["h2"] == ""
+ assert r.response["json"]["h2push"] == ""
+
+ # upload and GET again using curl, compare to original content
+ def curl_upload_and_verify(self, env, fname, options=None):
+ url = env.mkurl("https", "cgi", "/proxy/upload.py")
+ fpath = os.path.join(env.gen_dir, fname)
+ r = env.curl_upload(url, fpath, options=options)
+ assert r.exit_code == 0
+ assert 200 <= r.response["status"] < 300
+
+ # why is the scheme wrong?
+ r2 = env.curl_get(re.sub(r'http:', 'https:', r.response["header"]["location"]))
+ assert r2.exit_code == 0
+ assert r2.response["status"] == 200
+ with open(self.local_src(fpath), mode='rb') as file:
+ src = file.read()
+ assert r2.response["body"] == src
+
+ @pytest.mark.parametrize("name", [
+ "data-1k", "data-10k", "data-100k", "data-1m",
+ ])
+ def test_h2_500_10(self, env, name, repeat):
+ self.curl_upload_and_verify(env, name, ["--http2"])
+
+ def test_h2_500_11(self, env):
+ self.curl_upload_and_verify(env, "data-1k", [
+ "--http1.1", "-H", "Content-Length:", "-H", "Transfer-Encoding: chunked"
+ ])
+ self.curl_upload_and_verify(env, "data-1k", ["--http2", "-H", "Content-Length:"])
+
+ # POST some data using nghttp and see it echo'ed properly back
+ def nghttp_post_and_verify(self, env, fname, options=None):
+ url = env.mkurl("https", "cgi", "/proxy/echo.py")
+ fpath = os.path.join(env.gen_dir, fname)
+ r = env.nghttp().upload(url, fpath, options=options)
+ assert r.exit_code == 0
+ assert 200 <= r.response["status"] < 300
+ with open(self.local_src(fpath), mode='rb') as file:
+ src = file.read()
+ if r.response["body"] != src:
+ with open(os.path.join(env.gen_dir, "nghttp.out"), 'w') as fd:
+ fd.write(r.outraw.decode())
+ fd.write("\nstderr:\n")
+ fd.write(r.stderr)
+ assert r.response["body"] == src
+
+ @pytest.mark.parametrize("name", [
+ "data-1k", "data-10k", "data-100k", "data-1m",
+ ])
+ def test_h2_500_20(self, env, name, repeat):
+ self.nghttp_post_and_verify(env, name, [])
+
+ @pytest.mark.parametrize("name", [
+ "data-1k", "data-10k", "data-100k", "data-1m",
+ ])
+ def test_h2_500_21(self, env, name, repeat):
+ self.nghttp_post_and_verify(env, name, ["--no-content-length"])
+
+ # upload and GET again using nghttp, compare to original content
+ def nghttp_upload_and_verify(self, env, fname, options=None):
+ url = env.mkurl("https", "cgi", "/proxy/upload.py")
+ fpath = os.path.join(env.gen_dir, fname)
+
+ r = env.nghttp().upload_file(url, fpath, options=options)
+ assert r.exit_code == 0
+ assert 200 <= r.response["status"] < 300
+ assert r.response["header"]["location"]
+
+ # why is the scheme wrong?
+ r2 = env.nghttp().get(re.sub(r'http:', 'https:', r.response["header"]["location"]))
+ assert r2.exit_code == 0
+ assert r2.response["status"] == 200
+ with open(self.local_src(fpath), mode='rb') as file:
+ src = file.read()
+ assert src == r2.response["body"]
+
+ @pytest.mark.parametrize("name", [
+ "data-1k", "data-10k", "data-100k", "data-1m",
+ ])
+ def test_h2_500_22(self, env, name):
+ self.nghttp_upload_and_verify(env, name, [])
+
+ @pytest.mark.parametrize("name", [
+ "data-1k", "data-10k", "data-100k", "data-1m",
+ ])
+ def test_h2_500_23(self, env, name):
+ self.nghttp_upload_and_verify(env, name, ["--no-content-length"])
+
+ # upload using nghttp and check returned status
+ def nghttp_upload_stat(self, env, fname, options=None):
+ url = env.mkurl("https", "cgi", "/proxy/upload.py")
+ fpath = os.path.join(env.gen_dir, fname)
+
+ r = env.nghttp().upload_file(url, fpath, options=options)
+ assert r.exit_code == 0
+ assert 200 <= r.response["status"] < 300
+ assert r.response["header"]["location"]
+
+ def test_h2_500_24(self, env):
+ for i in range(50):
+ self.nghttp_upload_stat(env, "data-1k", ["--no-content-length"])
+
+ # lets do some error tests
+ def test_h2_500_30(self, env):
+ url = env.mkurl("https", "cgi", "/proxy/h2test/error?status=500")
+ r = env.curl_get(url)
+ assert r.exit_code == 0, r
+ assert r.response['status'] == 500
+ url = env.mkurl("https", "cgi", "/proxy/h2test/error?error=timeout")
+ r = env.curl_get(url)
+ assert r.exit_code == 0, r
+ assert r.response['status'] == 408
+
+ # produce an error during response body
+ def test_h2_500_31(self, env, repeat):
+ url = env.mkurl("https", "cgi", "/proxy/h2test/error?body_error=timeout")
+ r = env.curl_get(url)
+ assert r.exit_code != 0, r
+
+ # produce an error, fail to generate an error bucket
+ def test_h2_500_32(self, env, repeat):
+ url = env.mkurl("https", "cgi", "/proxy/h2test/error?body_error=timeout&error_bucket=0")
+ r = env.curl_get(url)
+ assert r.exit_code != 0, r
diff --git a/test/modules/http2/test_501_proxy_serverheader.py b/test/modules/http2/test_501_proxy_serverheader.py
new file mode 100644
index 0000000..0d7c188
--- /dev/null
+++ b/test/modules/http2/test_501_proxy_serverheader.py
@@ -0,0 +1,36 @@
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestProxyServerHeader:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ conf = H2Conf(env, extras={
+ f'cgi.{env.http_tld}': [
+ "Header unset Server",
+ "Header always set Server cgi",
+ ]
+ })
+ conf.add_vhost_cgi(proxy_self=True, h2proxy_self=False)
+ conf.install()
+ assert env.apache_restart() == 0
+
+ def setup_method(self, method):
+ print("setup_method: %s" % method.__name__)
+
+ def teardown_method(self, method):
+ print("teardown_method: %s" % method.__name__)
+
+ def test_h2_501_01(self, env):
+ url = env.mkurl("https", "cgi", "/proxy/hello.py")
+ r = env.curl_get(url, 5)
+ assert r.response["status"] == 200
+ assert "HTTP/1.1" == r.response["json"]["protocol"]
+ assert "" == r.response["json"]["https"]
+ assert "" == r.response["json"]["ssl_protocol"]
+ assert "" == r.response["json"]["h2"]
+ assert "" == r.response["json"]["h2push"]
+ assert "cgi" == r.response["header"]["server"]
diff --git a/test/modules/http2/test_502_proxy_port.py b/test/modules/http2/test_502_proxy_port.py
new file mode 100644
index 0000000..f6c6db1
--- /dev/null
+++ b/test/modules/http2/test_502_proxy_port.py
@@ -0,0 +1,41 @@
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestProxyPort:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ conf = H2Conf(env, extras={
+ 'base': [
+ f'Listen {env.proxy_port}',
+ 'Protocols h2c http/1.1',
+ 'LogLevel proxy_http2:trace2 proxy:trace2',
+ ],
+ f'cgi.{env.http_tld}': [
+ "Header unset Server",
+ "Header always set Server cgi",
+ ]
+ })
+ conf.add_vhost_cgi(proxy_self=False, h2proxy_self=False)
+ conf.start_vhost(domains=[f"test1.{env.http_tld}"], port=env.proxy_port)
+ conf.add([
+ 'Protocols h2c',
+ 'RewriteEngine On',
+ 'RewriteRule "^/(.*)$" "h2c://%{HTTP_HOST}/$1"[NC,P]',
+ 'ProxyPassMatch / "h2c://$1/"',
+ ])
+ conf.end_vhost()
+ conf.install()
+ assert env.apache_restart() == 0
+
+ # Test PR 65881
+ # h2c upgraded request via a dynamic proxy onto another port
+ def test_h2_502_01(self, env):
+ url = f'http://localhost:{env.http_port}/hello.py'
+ r = env.curl_get(url, 5, options=['--http2',
+ '--proxy', f'localhost:{env.proxy_port}'])
+ assert r.response['status'] == 200
+ assert r.json['port'] == f'{env.http_port}'
diff --git a/test/modules/http2/test_503_proxy_fwd.py b/test/modules/http2/test_503_proxy_fwd.py
new file mode 100644
index 0000000..478a52d
--- /dev/null
+++ b/test/modules/http2/test_503_proxy_fwd.py
@@ -0,0 +1,79 @@
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestProxyFwd:
+
+ @classmethod
+ def config_fwd_proxy(cls, env, h2_enabled=False):
+ conf = H2Conf(env, extras={
+ 'base': [
+ f'Listen {env.proxy_port}',
+ 'Protocols h2c http/1.1',
+ 'LogLevel proxy_http2:trace2 proxy:trace2',
+ ],
+ })
+ conf.add_vhost_cgi(proxy_self=False, h2proxy_self=False)
+ conf.start_vhost(domains=[f"test1.{env.http_tld}"],
+ port=env.proxy_port, with_ssl=True)
+ conf.add([
+ 'Protocols h2c http/1.1',
+ 'ProxyRequests on',
+ f'H2ProxyRequests {"on" if h2_enabled else "off"}',
+ ])
+ conf.end_vhost()
+ conf.install()
+ assert env.apache_restart() == 0
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(cls, env):
+ cls.config_fwd_proxy(env)
+
+ # test the HTTP/1.1 setup working
+ def test_h2_503_01_proxy_fwd_h1(self, env):
+ url = f'http://localhost:{env.http_port}/hello.py'
+ proxy_host = f'test1.{env.http_tld}'
+ options = [
+ '--proxy', f'https://{proxy_host}:{env.proxy_port}',
+ '--resolve', f'{proxy_host}:{env.proxy_port}:127.0.0.1',
+ '--proxy-cacert', f'{env.get_ca_pem_file(proxy_host)}',
+ ]
+ r = env.curl_get(url, 5, options=options)
+ assert r.exit_code == 0, f'{r}'
+ assert r.response['status'] == 200
+ assert r.json['port'] == f'{env.http_port}'
+
+ def test_h2_503_02_fwd_proxy_h2_off(self, env):
+ if not env.curl_is_at_least('8.1.0'):
+ pytest.skip(f'need at least curl v8.1.0 for this')
+ url = f'http://localhost:{env.http_port}/hello.py'
+ proxy_host = f'test1.{env.http_tld}'
+ options = [
+ '--proxy-http2', '-v',
+ '--proxy', f'https://{proxy_host}:{env.proxy_port}',
+ '--resolve', f'{proxy_host}:{env.proxy_port}:127.0.0.1',
+ '--proxy-cacert', f'{env.get_ca_pem_file(proxy_host)}',
+ ]
+ r = env.curl_get(url, 5, options=options)
+ assert r.exit_code == 0, f'{r}'
+ assert r.response['status'] == 404
+
+ # test the HTTP/2 setup working
+ def test_h2_503_03_proxy_fwd_h2_on(self, env):
+ if not env.curl_is_at_least('8.1.0'):
+ pytest.skip(f'need at least curl v8.1.0 for this')
+ self.config_fwd_proxy(env, h2_enabled=True)
+ url = f'http://localhost:{env.http_port}/hello.py'
+ proxy_host = f'test1.{env.http_tld}'
+ options = [
+ '--proxy-http2', '-v',
+ '--proxy', f'https://{proxy_host}:{env.proxy_port}',
+ '--resolve', f'{proxy_host}:{env.proxy_port}:127.0.0.1',
+ '--proxy-cacert', f'{env.get_ca_pem_file(proxy_host)}',
+ ]
+ r = env.curl_get(url, 5, options=options)
+ assert r.exit_code == 0, f'{r}'
+ assert r.response['status'] == 200
+ assert r.json['port'] == f'{env.http_port}'
diff --git a/test/modules/http2/test_600_h2proxy.py b/test/modules/http2/test_600_h2proxy.py
new file mode 100644
index 0000000..040aef6
--- /dev/null
+++ b/test/modules/http2/test_600_h2proxy.py
@@ -0,0 +1,201 @@
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestH2Proxy:
+
+ def test_h2_600_01(self, env):
+ conf = H2Conf(env, extras={
+ f'cgi.{env.http_tld}': [
+ "SetEnvIf Host (.+) X_HOST=$1",
+ ]
+ })
+ conf.add_vhost_cgi(h2proxy_self=True)
+ conf.install()
+ assert env.apache_restart() == 0
+ url = env.mkurl("https", "cgi", "/h2proxy/hello.py")
+ r = env.curl_get(url, 5)
+ assert r.response["status"] == 200
+ assert r.response["json"]["protocol"] == "HTTP/2.0"
+ assert r.response["json"]["https"] == "on"
+ assert r.response["json"]["ssl_protocol"] != ""
+ assert r.response["json"]["h2"] == "on"
+ assert r.response["json"]["h2push"] == "off"
+ assert r.response["json"]["host"] == f"cgi.{env.http_tld}:{env.https_port}"
+
+ def test_h2_600_02(self, env):
+ conf = H2Conf(env, extras={
+ f'cgi.{env.http_tld}': [
+ "SetEnvIf Host (.+) X_HOST=$1",
+ f"ProxyPreserveHost on",
+ f"ProxyPass /h2c/ h2c://127.0.0.1:{env.http_port}/",
+ ]
+ })
+ conf.add_vhost_cgi()
+ conf.install()
+ assert env.apache_restart() == 0
+ url = env.mkurl("https", "cgi", "/h2c/hello.py")
+ r = env.curl_get(url, 5)
+ assert r.response["status"] == 200
+ assert r.response["json"]["protocol"] == "HTTP/2.0"
+ assert r.response["json"]["https"] == ""
+ # the proxied backend sees Host header as passed on front
+ assert r.response["json"]["host"] == f"cgi.{env.http_tld}:{env.https_port}"
+ assert r.response["json"]["h2_original_host"] == ""
+
+ def test_h2_600_03(self, env):
+ conf = H2Conf(env, extras={
+ f'cgi.{env.http_tld}': [
+ "SetEnvIf Host (.+) X_HOST=$1",
+ f"ProxyPreserveHost off",
+ f"ProxyPass /h2c/ h2c://127.0.0.1:{env.http_port}/",
+ ]
+ })
+ conf.add_vhost_cgi()
+ conf.install()
+ assert env.apache_restart() == 0
+ url = env.mkurl("https", "cgi", "/h2c/hello.py")
+ r = env.curl_get(url, 5)
+ assert r.response["status"] == 200
+ assert r.response["json"]["protocol"] == "HTTP/2.0"
+ assert r.response["json"]["https"] == ""
+ # the proxied backend sees Host as using in connecting to it
+ assert r.response["json"]["host"] == f"127.0.0.1:{env.http_port}"
+ assert r.response["json"]["h2_original_host"] == ""
+
+ # check that connection reuse actually happens as configured
+ @pytest.mark.parametrize("enable_reuse", [ "on", "off" ])
+ def test_h2_600_04(self, env, enable_reuse):
+ conf = H2Conf(env, extras={
+ f'cgi.{env.http_tld}': [
+ f"ProxyPassMatch ^/h2proxy/([0-9]+)/(.*)$ "
+ f" h2c://127.0.0.1:$1/$2 enablereuse={enable_reuse} keepalive=on",
+ ]
+ })
+ conf.add_vhost_cgi()
+ conf.install()
+ assert env.apache_restart() == 0
+ url = env.mkurl("https", "cgi", f"/h2proxy/{env.http_port}/hello.py")
+ # httpd 2.5.0 disables reuse, not matter the config
+ if enable_reuse == "on" and not env.httpd_is_at_least("2.5.0"):
+ # reuse is not guaranteed for each request, but we expect some
+ # to do it and run on a h2 stream id > 1
+ reused = False
+ count = 10
+ r = env.curl_raw([url] * count, 5)
+ response = r.response
+ for n in range(count):
+ assert response["status"] == 200
+ if n == (count - 1):
+ break
+ response = response["previous"]
+ assert r.json[0]["h2_stream_id"] == "1"
+ for n in range(1, count):
+ if int(r.json[n]["h2_stream_id"]) > 1:
+ reused = True
+ break
+ assert reused
+ else:
+ r = env.curl_raw([url, url], 5)
+ assert r.response["previous"]["status"] == 200
+ assert r.response["status"] == 200
+ assert r.json[0]["h2_stream_id"] == "1"
+ assert r.json[1]["h2_stream_id"] == "1"
+
+ # do some flexible setup from #235 to proper connection selection
+ @pytest.mark.parametrize("enable_reuse", [ "on", "off" ])
+ def test_h2_600_05(self, env, enable_reuse):
+ conf = H2Conf(env, extras={
+ f'cgi.{env.http_tld}': [
+ f"ProxyPassMatch ^/h2proxy/([0-9]+)/(.*)$ "
+ f" h2c://127.0.0.1:$1/$2 enablereuse={enable_reuse} keepalive=on",
+ ]
+ })
+ conf.add_vhost_cgi()
+ conf.add([
+ f'Listen {env.http_port2}',
+ 'UseCanonicalName On',
+ 'UseCanonicalPhysicalPort On'
+ ])
+ conf.start_vhost(domains=[f'cgi.{env.http_tld}'],
+ port=5004, doc_root="htdocs/cgi")
+ conf.add("AddHandler cgi-script .py")
+ conf.end_vhost()
+ conf.install()
+ assert env.apache_restart() == 0
+ url = env.mkurl("https", "cgi", f"/h2proxy/{env.http_port}/hello.py")
+ url2 = env.mkurl("https", "cgi", f"/h2proxy/{env.http_port2}/hello.py")
+ r = env.curl_raw([url, url2], 5)
+ assert r.response["previous"]["status"] == 200
+ assert int(r.json[0]["port"]) == env.http_port
+ assert r.response["status"] == 200
+ exp_port = env.http_port if enable_reuse == "on" \
+ and not env.httpd_is_at_least("2.5.0")\
+ else env.http_port2
+ assert int(r.json[1]["port"]) == exp_port
+
+ # test X-Forwarded-* headers
+ def test_h2_600_06(self, env):
+ conf = H2Conf(env, extras={
+ f'cgi.{env.http_tld}': [
+ "SetEnvIf Host (.+) X_HOST=$1",
+ f"ProxyPreserveHost on",
+ f"ProxyPass /h2c/ h2c://127.0.0.1:{env.http_port}/",
+ f"ProxyPass /h1c/ http://127.0.0.1:{env.http_port}/",
+ ]
+ })
+ conf.add_vhost_cgi(proxy_self=True)
+ conf.install()
+ assert env.apache_restart() == 0
+ url = env.mkurl("https", "cgi", "/h1c/hello.py")
+ r1 = env.curl_get(url, 5)
+ assert r1.response["status"] == 200
+ url = env.mkurl("https", "cgi", "/h2c/hello.py")
+ r2 = env.curl_get(url, 5)
+ assert r2.response["status"] == 200
+ for key in ['x-forwarded-for', 'x-forwarded-host','x-forwarded-server']:
+ assert r1.json[key] == r2.json[key], f'{key} differs proxy_http != proxy_http2'
+
+ # lets do some error tests
+ def test_h2_600_30(self, env):
+ conf = H2Conf(env)
+ conf.add_vhost_cgi(h2proxy_self=True)
+ conf.install()
+ assert env.apache_restart() == 0
+ url = env.mkurl("https", "cgi", "/h2proxy/h2test/error?status=500")
+ r = env.curl_get(url)
+ assert r.exit_code == 0, r
+ assert r.response['status'] == 500
+ url = env.mkurl("https", "cgi", "/h2proxy/h2test/error?error=timeout")
+ r = env.curl_get(url)
+ assert r.exit_code == 0, r
+ assert r.response['status'] == 408
+
+ # produce an error during response body
+ def test_h2_600_31(self, env, repeat):
+ conf = H2Conf(env)
+ conf.add_vhost_cgi(h2proxy_self=True)
+ conf.install()
+ assert env.apache_restart() == 0
+ url = env.mkurl("https", "cgi", "/h2proxy/h2test/error?body_error=timeout")
+ r = env.curl_get(url)
+ # depending on when the error is detect in proxying, if may RST the
+ # stream (exit_code != 0) or give a 503 response.
+ if r.exit_code == 0:
+ assert r.response['status'] == 502
+
+ # produce an error, fail to generate an error bucket
+ def test_h2_600_32(self, env, repeat):
+ pytest.skip('only works reliable with r1911964 from trunk')
+ conf = H2Conf(env)
+ conf.add_vhost_cgi(h2proxy_self=True)
+ conf.install()
+ assert env.apache_restart() == 0
+ url = env.mkurl("https", "cgi", "/h2proxy/h2test/error?body_error=timeout&error_bucket=0")
+ r = env.curl_get(url)
+ # depending on when the error is detect in proxying, if may RST the
+ # stream (exit_code != 0) or give a 503 response.
+ if r.exit_code == 0:
+ assert r.response['status'] in [502, 503]
diff --git a/test/modules/http2/test_601_h2proxy_twisted.py b/test/modules/http2/test_601_h2proxy_twisted.py
new file mode 100644
index 0000000..60f5f7d
--- /dev/null
+++ b/test/modules/http2/test_601_h2proxy_twisted.py
@@ -0,0 +1,99 @@
+import json
+import logging
+import os
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+log = logging.getLogger(__name__)
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestH2ProxyTwisted:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ H2Conf(env).add_vhost_cgi(proxy_self=True, h2proxy_self=True).install()
+ assert env.apache_restart() == 0
+
+ @pytest.mark.parametrize("name", [
+ "data-1k", "data-10k", "data-100k", "data-1m",
+ ])
+ def test_h2_601_01_echo_uploads(self, env, name):
+ fpath = os.path.join(env.gen_dir, name)
+ url = env.mkurl("https", "cgi", "/h2proxy/h2test/echo")
+ r = env.curl_upload(url, fpath, options=[])
+ assert r.exit_code == 0
+ assert 200 <= r.response["status"] < 300
+ # we POST a form, so echoed input is larger than the file itself
+ assert len(r.response["body"]) > os.path.getsize(fpath)
+
+ @pytest.mark.parametrize("name", [
+ "data-1k", "data-10k", "data-100k", "data-1m",
+ ])
+ def test_h2_601_02_echo_delayed(self, env, name):
+ fpath = os.path.join(env.gen_dir, name)
+ url = env.mkurl("https", "cgi", "/h2proxy/h2test/echo?chunk_delay=10ms")
+ r = env.curl_upload(url, fpath, options=[])
+ assert r.exit_code == 0
+ assert 200 <= r.response["status"] < 300
+ # we POST a form, so echoed input is larger than the file itself
+ assert len(r.response["body"]) > os.path.getsize(fpath)
+
+ @pytest.mark.parametrize("name", [
+ "data-1k", "data-10k", "data-100k", "data-1m",
+ ])
+ def test_h2_601_03_echo_fail_early(self, env, name):
+ if not env.httpd_is_at_least('2.4.58'):
+ pytest.skip(f'needs httpd 2.4.58')
+ fpath = os.path.join(env.gen_dir, name)
+ url = env.mkurl("https", "cgi", "/h2proxy/h2test/echo?fail_after=512")
+ r = env.curl_upload(url, fpath, options=[])
+ # 92 is curl's CURLE_HTTP2_STREAM
+ assert r.exit_code == 92 or r.response["status"] == 502
+
+ @pytest.mark.parametrize("name", [
+ "data-1k", "data-10k", "data-100k", "data-1m",
+ ])
+ def test_h2_601_04_echo_fail_late(self, env, name):
+ if not env.httpd_is_at_least('2.4.58'):
+ pytest.skip(f'needs httpd 2.4.58')
+ fpath = os.path.join(env.gen_dir, name)
+ url = env.mkurl("https", "cgi", f"/h2proxy/h2test/echo?fail_after={os.path.getsize(fpath)}")
+ r = env.curl_upload(url, fpath, options=[])
+ # 92 is curl's CURLE_HTTP2_STREAM
+ if r.exit_code != 0:
+ # H2 stream or partial file error
+ assert r.exit_code == 92 or r.exit_code == 18, f'{r}'
+ else:
+ assert r.response["status"] == 502, f'{r}'
+
+ def test_h2_601_05_echo_fail_many(self, env):
+ if not env.httpd_is_at_least('2.4.58'):
+ pytest.skip(f'needs httpd 2.4.58')
+ if not env.curl_is_at_least('8.0.0'):
+ pytest.skip(f'need at least curl v8.0.0 for this')
+ count = 200
+ fpath = os.path.join(env.gen_dir, "data-100k")
+ args = [env.curl, '--parallel', '--parallel-max', '20']
+ for i in range(count):
+ if i > 0:
+ args.append('--next')
+ url = env.mkurl("https", "cgi", f"/h2proxy/h2test/echo?id={i}&fail_after={os.path.getsize(fpath)}")
+ args.extend(env.curl_resolve_args(url=url))
+ args.extend([
+ '-o', '/dev/null', '-w', '%{json}\\n', '--form', f'file=@{fpath}', url
+ ])
+ log.error(f'run: {args}')
+ r = env.run(args)
+ stats = []
+ for line in r.stdout.splitlines():
+ stats.append(json.loads(line))
+ assert len(stats) == count
+ for st in stats:
+ if st['exitcode'] != 0:
+ # H2 stream or partial file error
+ assert st['exitcode'] == 92 or st['exitcode'] == 18, f'{r}'
+ else:
+ assert st['http_code'] == 502, f'{r}'
diff --git a/test/modules/http2/test_700_load_get.py b/test/modules/http2/test_700_load_get.py
new file mode 100644
index 0000000..78760fb
--- /dev/null
+++ b/test/modules/http2/test_700_load_get.py
@@ -0,0 +1,63 @@
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+@pytest.mark.skipif(not H2TestEnv().h2load_is_at_least('1.41.0'),
+ reason="h2load misses --connect-to option")
+class TestLoadGet:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ H2Conf(env).add_vhost_cgi().add_vhost_test1().install()
+ assert env.apache_restart() == 0
+
+ def check_h2load_ok(self, env, r, n):
+ assert 0 == r.exit_code
+ r = env.h2load_status(r)
+ assert n == r.results["h2load"]["requests"]["total"], f'{r.results}'
+ assert n == r.results["h2load"]["requests"]["started"], f'{r.results}'
+ assert n == r.results["h2load"]["requests"]["done"], f'{r.results}'
+ assert n == r.results["h2load"]["requests"]["succeeded"], f'{r.results}'
+ assert n == r.results["h2load"]["status"]["2xx"], f'{r.results}'
+ assert 0 == r.results["h2load"]["status"]["3xx"], f'{r.results}'
+ assert 0 == r.results["h2load"]["status"]["4xx"], f'{r.results}'
+ assert 0 == r.results["h2load"]["status"]["5xx"], f'{r.results}'
+
+ # test load on cgi script, single connection, different sizes
+ @pytest.mark.parametrize("start", [
+ 1000, 80000
+ ])
+ def test_h2_700_10(self, env, start):
+ assert env.is_live()
+ text = "X"
+ chunk = 32
+ for n in range(0, 5):
+ args = [env.h2load, "-n", "%d" % chunk, "-c", "1", "-m", "10",
+ f"--connect-to=localhost:{env.https_port}",
+ f"--base-uri={env.mkurl('https', 'cgi', '/')}",
+ ]
+ for i in range(0, chunk):
+ args.append(env.mkurl("https", "cgi", ("/mnot164.py?count=%d&text=%s" % (start+(n*chunk)+i, text))))
+ r = env.run(args)
+ self.check_h2load_ok(env, r, chunk)
+
+ # test load on cgi script, single connection
+ @pytest.mark.parametrize("conns", [
+ 1, 2, 16
+ ])
+ def test_h2_700_11(self, env, conns):
+ assert env.is_live()
+ text = "X"
+ start = 1200
+ chunk = 64
+ for n in range(0, 5):
+ args = [env.h2load, "-n", "%d" % chunk, "-c", "%d" % conns, "-m", "10",
+ f"--connect-to=localhost:{env.https_port}",
+ f"--base-uri={env.mkurl('https', 'cgi', '/')}",
+ ]
+ for i in range(0, chunk):
+ args.append(env.mkurl("https", "cgi", ("/mnot164.py?count=%d&text=%s" % (start+(n*chunk)+i, text))))
+ r = env.run(args)
+ self.check_h2load_ok(env, r, chunk)
diff --git a/test/modules/http2/test_710_load_post_static.py b/test/modules/http2/test_710_load_post_static.py
new file mode 100644
index 0000000..ad8ae96
--- /dev/null
+++ b/test/modules/http2/test_710_load_post_static.py
@@ -0,0 +1,65 @@
+import pytest
+import os
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestLoadPostStatic:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ H2Conf(env).add_vhost_test1().install()
+ assert env.apache_restart() == 0
+
+ def check_h2load_ok(self, env, r, n):
+ assert 0 == r.exit_code
+ r = env.h2load_status(r)
+ assert n == r.results["h2load"]["requests"]["total"]
+ assert n == r.results["h2load"]["requests"]["started"]
+ assert n == r.results["h2load"]["requests"]["done"]
+ assert n == r.results["h2load"]["requests"]["succeeded"]
+ assert n == r.results["h2load"]["status"]["2xx"]
+ assert 0 == r.results["h2load"]["status"]["3xx"]
+ assert 0 == r.results["h2load"]["status"]["4xx"]
+ assert 0 == r.results["h2load"]["status"]["5xx"]
+
+ # test POST on static file, slurped in by server
+ def test_h2_710_00(self, env, repeat):
+ assert env.is_live()
+ url = env.mkurl("https", "test1", "/index.html")
+ n = 10
+ m = 1
+ conn = 1
+ fname = "data-10k"
+ args = [env.h2load, "-n", f"{n}", "-c", f"{conn}", "-m", f"{m}",
+ f"--base-uri={env.https_base_url}",
+ "-d", os.path.join(env.gen_dir, fname), url]
+ r = env.run(args)
+ self.check_h2load_ok(env, r, n)
+
+ def test_h2_710_01(self, env, repeat):
+ assert env.is_live()
+ url = env.mkurl("https", "test1", "/index.html")
+ n = 1000
+ m = 100
+ conn = 1
+ fname = "data-1k"
+ args = [env.h2load, "-n", f"{n}", "-c", f"{conn}", "-m", f"{m}",
+ f"--base-uri={env.https_base_url}",
+ "-d", os.path.join(env.gen_dir, fname), url]
+ r = env.run(args)
+ self.check_h2load_ok(env, r, n)
+
+ def test_h2_710_02(self, env, repeat):
+ assert env.is_live()
+ url = env.mkurl("https", "test1", "/index.html")
+ n = 100
+ m = 50
+ conn = 1
+ fname = "data-100k"
+ args = [env.h2load, "-n", f"{n}", "-c", f"{conn}", "-m", f"{m}",
+ f"--base-uri={env.https_base_url}",
+ "-d", os.path.join(env.gen_dir, fname), url]
+ r = env.run(args)
+ self.check_h2load_ok(env, r, n)
diff --git a/test/modules/http2/test_711_load_post_cgi.py b/test/modules/http2/test_711_load_post_cgi.py
new file mode 100644
index 0000000..82529d1
--- /dev/null
+++ b/test/modules/http2/test_711_load_post_cgi.py
@@ -0,0 +1,73 @@
+import pytest
+import os
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestLoadCgi:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ H2Conf(env).add_vhost_cgi(proxy_self=True, h2proxy_self=True).install()
+ assert env.apache_restart() == 0
+
+ def check_h2load_ok(self, env, r, n):
+ assert 0 == r.exit_code
+ r = env.h2load_status(r)
+ assert n == r.results["h2load"]["requests"]["total"]
+ assert n == r.results["h2load"]["requests"]["started"]
+ assert n == r.results["h2load"]["requests"]["done"]
+ assert n == r.results["h2load"]["requests"]["succeeded"]
+ assert n == r.results["h2load"]["status"]["2xx"]
+ assert 0 == r.results["h2load"]["status"]["3xx"]
+ assert 0 == r.results["h2load"]["status"]["4xx"]
+ assert 0 == r.results["h2load"]["status"]["5xx"]
+
+ # test POST on cgi, where input is read
+ def test_h2_711_10(self, env, repeat):
+ assert env.is_live()
+ url = env.mkurl("https", "test1", "/echo.py")
+ n = 100
+ m = 5
+ conn = 1
+ fname = "data-100k"
+ args = [
+ env.h2load, "-n", str(n), "-c", str(conn), "-m", str(m),
+ f"--base-uri={env.https_base_url}",
+ "-d", os.path.join(env.gen_dir, fname), url
+ ]
+ r = env.run(args)
+ self.check_h2load_ok(env, r, n)
+
+ # test POST on cgi via http/1.1 proxy, where input is read
+ def test_h2_711_11(self, env, repeat):
+ assert env.is_live()
+ url = env.mkurl("https", "test1", "/proxy/echo.py")
+ n = 100
+ m = 5
+ conn = 1
+ fname = "data-100k"
+ args = [
+ env.h2load, "-n", str(n), "-c", str(conn), "-m", str(m),
+ f"--base-uri={env.https_base_url}",
+ "-d", os.path.join(env.gen_dir, fname), url
+ ]
+ r = env.run(args)
+ self.check_h2load_ok(env, r, n)
+
+ # test POST on cgi via h2proxy, where input is read
+ def test_h2_711_12(self, env, repeat):
+ assert env.is_live()
+ url = env.mkurl("https", "test1", "/h2proxy/echo.py")
+ n = 100
+ m = 5
+ conn = 1
+ fname = "data-100k"
+ args = [
+ env.h2load, "-n", str(n), "-c", str(conn), "-m", str(m),
+ f"--base-uri={env.https_base_url}",
+ "-d", os.path.join(env.gen_dir, fname), url
+ ]
+ r = env.run(args)
+ self.check_h2load_ok(env, r, n)
diff --git a/test/modules/http2/test_712_buffering.py b/test/modules/http2/test_712_buffering.py
new file mode 100644
index 0000000..6658441
--- /dev/null
+++ b/test/modules/http2/test_712_buffering.py
@@ -0,0 +1,48 @@
+from datetime import timedelta
+
+import pytest
+
+from .env import H2Conf, H2TestEnv
+from pyhttpd.curl import CurlPiper
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestBuffering:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ conf = H2Conf(env)
+ conf.add_vhost_cgi(h2proxy_self=True).install()
+ assert env.apache_restart() == 0
+
+ @pytest.mark.skip(reason="this test shows unreliable jitter")
+ def test_h2_712_01(self, env):
+ # test gRPC like requests that do not end, but give answers, see #207
+ #
+ # this test works like this:
+ # - use curl to POST data to the server /h2test/echo
+ # - feed curl the data in chunks, wait a bit between chunks
+ # - since some buffering on curl's stdout to Python is involved,
+ # we will see the response data only at the end.
+ # - therefore, we enable tracing with timestamps in curl on stderr
+ # and see when the response chunks arrive
+ # - if the server sends the incoming data chunks back right away,
+ # as it should, we see receiving timestamps separated roughly by the
+ # wait time between sends.
+ #
+ url = env.mkurl("https", "cgi", "/h2test/echo")
+ base_chunk = "0123456789"
+ chunks = ["chunk-{0:03d}-{1}\n".format(i, base_chunk) for i in range(5)]
+ stutter = timedelta(seconds=0.2) # this is short, but works on my machine (tm)
+ piper = CurlPiper(env=env, url=url)
+ piper.stutter_check(chunks, stutter)
+
+ def test_h2_712_02(self, env):
+ # same as 712_01 but via mod_proxy_http2
+ #
+ url = env.mkurl("https", "cgi", "/h2proxy/h2test/echo")
+ base_chunk = "0123456789"
+ chunks = ["chunk-{0:03d}-{1}\n".format(i, base_chunk) for i in range(3)]
+ stutter = timedelta(seconds=1) # need a bit more delay since we have the extra connection
+ piper = CurlPiper(env=env, url=url)
+ piper.stutter_check(chunks, stutter)
diff --git a/test/modules/http2/test_800_websockets.py b/test/modules/http2/test_800_websockets.py
new file mode 100644
index 0000000..5b46da8
--- /dev/null
+++ b/test/modules/http2/test_800_websockets.py
@@ -0,0 +1,363 @@
+import inspect
+import logging
+import os
+import shutil
+import subprocess
+import time
+from datetime import timedelta, datetime
+from typing import Tuple, List
+import packaging.version
+
+import pytest
+import websockets
+from pyhttpd.result import ExecResult
+from pyhttpd.ws_util import WsFrameReader, WsFrame
+
+from .env import H2Conf, H2TestEnv
+
+
+log = logging.getLogger(__name__)
+
+ws_version = packaging.version.parse(websockets.version.version)
+ws_version_min = packaging.version.Version('10.4')
+
+
+def ws_run(env: H2TestEnv, path, authority=None, do_input=None, inbytes=None,
+ send_close=True, timeout=5, scenario='ws-stdin',
+ wait_close: float = 0.0) -> Tuple[ExecResult, List[str], List[WsFrame]]:
+ """ Run the h2ws test client in various scenarios with given input and
+ timings.
+ :param env: the test environment
+ :param path: the path on the Apache server to CONNECt to
+ :param authority: the host:port to use as
+ :param do_input: a Callable for sending input to h2ws
+ :param inbytes: fixed bytes to send to h2ws, unless do_input is given
+ :param send_close: send a CLOSE WebSockets frame at the end
+ :param timeout: timeout for waiting on h2ws to finish
+ :param scenario: name of scenario h2ws should run in
+ :param wait_close: time to wait before closing input
+ :return: ExecResult with exit_code/stdout/stderr of run
+ """
+ h2ws = os.path.join(env.clients_dir, 'h2ws')
+ if not os.path.exists(h2ws):
+ pytest.fail(f'test client not build: {h2ws}')
+ if authority is None:
+ authority = f'cgi.{env.http_tld}:{env.http_port}'
+ args = [
+ h2ws, '-vv', '-c', f'localhost:{env.http_port}',
+ f'ws://{authority}{path}',
+ scenario
+ ]
+ # we write all output to files, because we manipulate input timings
+ # and would run in deadlock situations with h2ws blocking operations
+ # because its output is not consumed
+ start = datetime.now()
+ with open(f'{env.gen_dir}/h2ws.stdout', 'w') as fdout:
+ with open(f'{env.gen_dir}/h2ws.stderr', 'w') as fderr:
+ proc = subprocess.Popen(args=args, stdin=subprocess.PIPE,
+ stdout=fdout, stderr=fderr)
+ if do_input is not None:
+ do_input(proc)
+ elif inbytes is not None:
+ proc.stdin.write(inbytes)
+ proc.stdin.flush()
+
+ if wait_close > 0:
+ time.sleep(wait_close)
+ try:
+ inbytes = WsFrame.client_close(code=1000).to_network() if send_close else None
+ proc.communicate(input=inbytes, timeout=timeout)
+ except subprocess.TimeoutExpired:
+ log.error(f'ws_run: timeout expired')
+ proc.kill()
+ proc.communicate(timeout=timeout)
+ end = datetime.now()
+ lines = open(f'{env.gen_dir}/h2ws.stdout').read().splitlines()
+ infos = [line for line in lines if line.startswith('[1] ')]
+ hex_content = ' '.join([line for line in lines if not line.startswith('[1] ')])
+ if len(infos) > 0 and infos[0] == '[1] :status: 200':
+ frames = WsFrameReader.parse(bytearray.fromhex(hex_content))
+ else:
+ frames = bytearray.fromhex(hex_content)
+ return ExecResult(args=args, exit_code=proc.returncode,
+ stdout=b'', stderr=b'', duration=end - start), infos, frames
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+@pytest.mark.skipif(condition=not H2TestEnv().httpd_is_at_least("2.4.58"),
+ reason=f'need at least httpd 2.4.58 for this')
+@pytest.mark.skipif(condition=ws_version < ws_version_min,
+ reason=f'websockets is {ws_version}, need at least {ws_version_min}')
+class TestWebSockets:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ # Apache config that CONNECT proxies a WebSocket server for paths starting
+ # with '/ws/'
+ # The WebSocket server is started in pytest fixture 'ws_server' below.
+ conf = H2Conf(env, extras={
+ 'base': [
+ 'Timeout 1',
+ ],
+ f'cgi.{env.http_tld}': [
+ f' H2WebSockets on',
+ f' ProxyPass /ws/ http://127.0.0.1:{env.ws_port}/ \\',
+ f' upgrade=websocket timeout=10',
+ f' ReadBufferSize 65535'
+ ]
+ })
+ conf.add_vhost_cgi(proxy_self=True, h2proxy_self=True).install()
+ conf.add_vhost_test1(proxy_self=True, h2proxy_self=True).install()
+ assert env.apache_restart() == 0
+
+ def ws_check_alive(self, env, timeout=5):
+ url = f'http://localhost:{env.ws_port}/'
+ end = datetime.now() + timedelta(seconds=timeout)
+ while datetime.now() < end:
+ r = env.curl_get(url, 5)
+ if r.exit_code == 0:
+ return True
+ time.sleep(.1)
+ return False
+
+ def _mkpath(self, path):
+ if not os.path.exists(path):
+ return os.makedirs(path)
+
+ def _rmrf(self, path):
+ if os.path.exists(path):
+ return shutil.rmtree(path)
+
+ @pytest.fixture(autouse=True, scope='class')
+ def ws_server(self, env):
+ # Run our python websockets server that has some special behaviour
+ # for the different path to CONNECT to.
+ run_dir = os.path.join(env.gen_dir, 'ws-server')
+ err_file = os.path.join(run_dir, 'stderr')
+ self._rmrf(run_dir)
+ self._mkpath(run_dir)
+ with open(err_file, 'w') as cerr:
+ cmd = os.path.join(os.path.dirname(inspect.getfile(TestWebSockets)),
+ 'ws_server.py')
+ args = ['python3', cmd, '--port', str(env.ws_port)]
+ p = subprocess.Popen(args=args, cwd=run_dir, stderr=cerr,
+ stdout=cerr)
+ if not self.ws_check_alive(env):
+ p.kill()
+ p.wait()
+ pytest.fail(f'ws_server did not start. stderr={open(err_file).readlines()}')
+ yield
+ p.terminate()
+
+ # CONNECT with invalid :protocol header, must fail
+ def test_h2_800_01_fail_proto(self, env: H2TestEnv, ws_server):
+ r, infos, frames = ws_run(env, path='/ws/echo/', scenario='fail-proto')
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] :status: 501', '[1] EOF'], f'{r}'
+ env.httpd_error_log.ignore_recent()
+
+ # a correct CONNECT, send CLOSE, expect CLOSE, basic success
+ def test_h2_800_02_ws_empty(self, env: H2TestEnv, ws_server):
+ r, infos, frames = ws_run(env, path='/ws/echo/')
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] :status: 200', '[1] EOF'], f'{r}'
+ assert len(frames) == 1, f'{frames}'
+ assert frames[0].opcode == WsFrame.CLOSE, f'{frames}'
+
+ # CONNECT to a URL path that does not exist on the server
+ def test_h2_800_03_not_found(self, env: H2TestEnv, ws_server):
+ r, infos, frames = ws_run(env, path='/does-not-exist')
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] :status: 404', '[1] EOF'], f'{r}'
+
+ # CONNECT to a URL path that is a normal HTTP file resource
+ # we do not want to receive the body of that
+ def test_h2_800_04_non_ws_resource(self, env: H2TestEnv, ws_server):
+ r, infos, frames = ws_run(env, path='/alive.json')
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] :status: 502', '[1] EOF'], f'{r}'
+ assert frames == b''
+
+ # CONNECT to a URL path that sends a delayed HTTP response body
+ # we do not want to receive the body of that
+ def test_h2_800_05_non_ws_delay_resource(self, env: H2TestEnv, ws_server):
+ r, infos, frames = ws_run(env, path='/h2test/error?body_delay=100ms')
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] :status: 502', '[1] EOF'], f'{r}'
+ assert frames == b''
+
+ # CONNECT missing the sec-webSocket-version header
+ def test_h2_800_06_miss_version(self, env: H2TestEnv, ws_server):
+ r, infos, frames = ws_run(env, path='/ws/echo/', scenario='miss-version')
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] :status: 400', '[1] EOF'], f'{r}'
+
+ # CONNECT missing the :path header
+ def test_h2_800_07_miss_path(self, env: H2TestEnv, ws_server):
+ r, infos, frames = ws_run(env, path='/ws/echo/', scenario='miss-path')
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] RST'], f'{r}'
+
+ # CONNECT missing the :scheme header
+ def test_h2_800_08_miss_scheme(self, env: H2TestEnv, ws_server):
+ r, infos, frames = ws_run(env, path='/ws/echo/', scenario='miss-scheme')
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] RST'], f'{r}'
+
+ # CONNECT missing the :authority header
+ def test_h2_800_09a_miss_authority(self, env: H2TestEnv, ws_server):
+ r, infos, frames = ws_run(env, path='/ws/echo/', scenario='miss-authority')
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] RST'], f'{r}'
+
+ # CONNECT to authority with disabled websockets
+ def test_h2_800_09b_unsupported(self, env: H2TestEnv, ws_server):
+ r, infos, frames = ws_run(env, path='/ws/echo/',
+ authority=f'test1.{env.http_tld}:{env.http_port}')
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] :status: 501', '[1] EOF'], f'{r}'
+
+ # CONNECT and exchange a PING
+ def test_h2_800_10_ws_ping(self, env: H2TestEnv, ws_server):
+ ping = WsFrame.client_ping(b'12345')
+ r, infos, frames = ws_run(env, path='/ws/echo/', inbytes=ping.to_network())
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] :status: 200', '[1] EOF'], f'{r}'
+ assert len(frames) == 2, f'{frames}'
+ assert frames[0].opcode == WsFrame.PONG, f'{frames}'
+ assert frames[0].data == ping.data, f'{frames}'
+ assert frames[1].opcode == WsFrame.CLOSE, f'{frames}'
+
+ # CONNECT and send several PINGs with a delay of 200ms
+ def test_h2_800_11_ws_timed_pings(self, env: H2TestEnv, ws_server):
+ frame_count = 5
+ ping = WsFrame.client_ping(b'12345')
+
+ def do_send(proc):
+ for _ in range(frame_count):
+ try:
+ proc.stdin.write(ping.to_network())
+ proc.stdin.flush()
+ proc.wait(timeout=0.2)
+ except subprocess.TimeoutExpired:
+ pass
+
+ r, infos, frames = ws_run(env, path='/ws/echo/', do_input=do_send)
+ assert r.exit_code == 0
+ assert infos == ['[1] :status: 200', '[1] EOF'], f'{r}'
+ assert len(frames) == frame_count + 1, f'{frames}'
+ assert frames[-1].opcode == WsFrame.CLOSE, f'{frames}'
+ for i in range(frame_count):
+ assert frames[i].opcode == WsFrame.PONG, f'{frames}'
+ assert frames[i].data == ping.data, f'{frames}'
+
+ # CONNECT to path that closes immediately
+ def test_h2_800_12_ws_unknown(self, env: H2TestEnv, ws_server):
+ r, infos, frames = ws_run(env, path='/ws/unknown')
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] :status: 200', '[1] EOF'], f'{r}'
+ assert len(frames) == 1, f'{frames}'
+ # expect a CLOSE with code=4999, reason='path unknown'
+ assert frames[0].opcode == WsFrame.CLOSE, f'{frames}'
+ assert frames[0].data[2:].decode() == 'path unknown', f'{frames}'
+
+ # CONNECT to a path that sends us 1 TEXT frame
+ def test_h2_800_13_ws_text(self, env: H2TestEnv, ws_server):
+ r, infos, frames = ws_run(env, path='/ws/text/')
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] :status: 200', '[1] EOF'], f'{r}'
+ assert len(frames) == 2, f'{frames}'
+ assert frames[0].opcode == WsFrame.TEXT, f'{frames}'
+ assert frames[0].data.decode() == 'hello!', f'{frames}'
+ assert frames[1].opcode == WsFrame.CLOSE, f'{frames}'
+
+ # CONNECT to a path that sends us a named file in BINARY frames
+ @pytest.mark.parametrize("fname,flen", [
+ ("data-1k", 1000),
+ ("data-10k", 10000),
+ ("data-100k", 100*1000),
+ ("data-1m", 1000*1000),
+ ])
+ def test_h2_800_14_ws_file(self, env: H2TestEnv, ws_server, fname, flen):
+ r, infos, frames = ws_run(env, path=f'/ws/file/{fname}', wait_close=0.5)
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] :status: 200', '[1] EOF'], f'{r}'
+ assert len(frames) > 0
+ total_len = sum([f.data_len for f in frames if f.opcode == WsFrame.BINARY])
+ assert total_len == flen, f'{frames}'
+
+ # CONNECT to path with 1MB file and trigger varying BINARY frame lengths
+ @pytest.mark.parametrize("frame_len", [
+ 1000 * 1024,
+ 100 * 1024,
+ 10 * 1024,
+ 1 * 1024,
+ 512,
+ ])
+ def test_h2_800_15_ws_frame_len(self, env: H2TestEnv, ws_server, frame_len):
+ fname = "data-1m"
+ flen = 1000*1000
+ r, infos, frames = ws_run(env, path=f'/ws/file/{fname}/{frame_len}', wait_close=0.5)
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] :status: 200', '[1] EOF'], f'{r}'
+ assert len(frames) > 0
+ total_len = sum([f.data_len for f in frames if f.opcode == WsFrame.BINARY])
+ assert total_len == flen, f'{frames}'
+
+ # CONNECT to path with 1MB file and trigger delays between BINARY frame writes
+ @pytest.mark.parametrize("frame_delay", [
+ 1,
+ 10,
+ 50,
+ 100,
+ ])
+ def test_h2_800_16_ws_frame_delay(self, env: H2TestEnv, ws_server, frame_delay):
+ fname = "data-1m"
+ flen = 1000*1000
+ # adjust frame_len to allow for 1 second overall duration
+ frame_len = int(flen / (1000 / frame_delay))
+ r, infos, frames = ws_run(env, path=f'/ws/file/{fname}/{frame_len}/{frame_delay}',
+ wait_close=1.5)
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] :status: 200', '[1] EOF'], f'{r}'
+ assert len(frames) > 0
+ total_len = sum([f.data_len for f in frames if f.opcode == WsFrame.BINARY])
+ assert total_len == flen, f'{frames}\n{r}'
+
+ # CONNECT to path with 1MB file and trigger delays between BINARY frame writes
+ @pytest.mark.parametrize("frame_len", [
+ 64 * 1024,
+ 16 * 1024,
+ 1 * 1024,
+ ])
+ def test_h2_800_17_ws_throughput(self, env: H2TestEnv, ws_server, frame_len):
+ fname = "data-1m"
+ flen = 1000*1000
+ ncount = 5
+ r, infos, frames = ws_run(env, path=f'/ws/file/{fname}/{frame_len}/0/{ncount}',
+ wait_close=0.1, send_close=False, timeout=30)
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] :status: 200', '[1] EOF'], f'{r}'
+ assert len(frames) > 0
+ total_len = sum([f.data_len for f in frames if f.opcode == WsFrame.BINARY])
+ assert total_len == ncount * flen, f'{frames}\n{r}'
+ # to see these logged, invoke: `pytest -o log_cli=true`
+ log.info(f'throughput (frame-len={frame_len}): "'
+ f'"{(total_len / (1024*1024)) / r.duration.total_seconds():0.2f} MB/s')
+
+ # Check that the tunnel timeout is observed, e.g. the longer holds and
+ # the 1sec cleint conn timeout does not trigger
+ def test_h2_800_18_timeout(self, env: H2TestEnv, ws_server):
+ fname = "data-10k"
+ frame_delay = 1500
+ flen = 10*1000
+ frame_len = 8192
+ # adjust frame_len to allow for 1 second overall duration
+ r, infos, frames = ws_run(env, path=f'/ws/file/{fname}/{frame_len}/{frame_delay}',
+ wait_close=2)
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] :status: 200', '[1] EOF'], f'{r}'
+ assert len(frames) > 0
+ total_len = sum([f.data_len for f in frames if f.opcode == WsFrame.BINARY])
+ assert total_len == flen, f'{frames}\n{r}'
+
diff --git a/test/modules/http2/ws_server.py b/test/modules/http2/ws_server.py
new file mode 100644
index 0000000..99fb9cf
--- /dev/null
+++ b/test/modules/http2/ws_server.py
@@ -0,0 +1,104 @@
+#!/usr/bin/env python3
+import argparse
+import asyncio
+import logging
+import os
+import sys
+import time
+
+import websockets.server as ws_server
+from websockets.exceptions import ConnectionClosedError
+
+log = logging.getLogger(__name__)
+
+logging.basicConfig(
+ format="[%(asctime)s] %(message)s",
+ level=logging.DEBUG,
+)
+
+
+async def echo(websocket):
+ try:
+ async for message in websocket:
+ try:
+ log.info(f'got request {message}')
+ except Exception as e:
+ log.error(f'error {e} getting path from {message}')
+ await websocket.send(message)
+ except ConnectionClosedError:
+ pass
+
+
+async def on_async_conn(conn):
+ rpath = str(conn.path)
+ pcomps = rpath[1:].split('/')
+ if len(pcomps) == 0:
+ pcomps = ['echo'] # default handler
+ log.info(f'connection for {pcomps}')
+ if pcomps[0] == 'echo':
+ log.info(f'/echo endpoint')
+ for message in await conn.recv():
+ await conn.send(message)
+ elif pcomps[0] == 'text':
+ await conn.send('hello!')
+ elif pcomps[0] == 'file':
+ if len(pcomps) < 2:
+ conn.close(code=4999, reason='unknown file')
+ return
+ fpath = os.path.join('../', pcomps[1])
+ if not os.path.exists(fpath):
+ conn.close(code=4999, reason='file not found')
+ return
+ bufsize = 0
+ if len(pcomps) > 2:
+ bufsize = int(pcomps[2])
+ if bufsize <= 0:
+ bufsize = 16*1024
+ delay_ms = 0
+ if len(pcomps) > 3:
+ delay_ms = int(pcomps[3])
+ n = 1
+ if len(pcomps) > 4:
+ n = int(pcomps[4])
+ for _ in range(n):
+ with open(fpath, 'r+b') as fd:
+ while True:
+ buf = fd.read(bufsize)
+ if buf is None or len(buf) == 0:
+ break
+ await conn.send(buf)
+ if delay_ms > 0:
+ time.sleep(delay_ms/1000)
+ else:
+ log.info(f'unknown endpoint: {rpath}')
+ await conn.close(code=4999, reason='path unknown')
+ await conn.close(code=1000, reason='')
+
+
+async def run_server(port):
+ log.info(f'starting server on port {port}')
+ async with ws_server.serve(ws_handler=on_async_conn,
+ host="localhost", port=port):
+ await asyncio.Future()
+
+
+async def main():
+ parser = argparse.ArgumentParser(prog='scorecard',
+ description="Run a websocket echo server.")
+ parser.add_argument("--port", type=int,
+ default=0, help="port to listen on")
+ args = parser.parse_args()
+
+ if args.port == 0:
+ sys.stderr.write('need --port\n')
+ sys.exit(1)
+
+ logging.basicConfig(
+ format="%(asctime)s %(message)s",
+ level=logging.DEBUG,
+ )
+ await run_server(args.port)
+
+
+if __name__ == "__main__":
+ asyncio.run(main())
diff --git a/test/modules/md/__init__.py b/test/modules/md/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test/modules/md/__init__.py
diff --git a/test/modules/md/conftest.py b/test/modules/md/conftest.py
new file mode 100755
index 0000000..04165a2
--- /dev/null
+++ b/test/modules/md/conftest.py
@@ -0,0 +1,92 @@
+import logging
+import os
+import re
+import sys
+import pytest
+
+sys.path.append(os.path.join(os.path.dirname(__file__), '../..'))
+
+from .md_conf import HttpdConf
+from .md_env import MDTestEnv
+from .md_acme import MDPebbleRunner, MDBoulderRunner
+
+
+def pytest_report_header(config, startdir):
+ env = MDTestEnv()
+ return "mod_md: [apache: {aversion}({prefix}), mod_{ssl}, ACME server: {acme}]".format(
+ prefix=env.prefix,
+ aversion=env.get_httpd_version(),
+ ssl=env.ssl_module,
+ acme=env.acme_server,
+ )
+
+
+@pytest.fixture(scope="package")
+def env(pytestconfig) -> MDTestEnv:
+ level = logging.INFO
+ console = logging.StreamHandler()
+ console.setLevel(level)
+ console.setFormatter(logging.Formatter('%(levelname)s: %(message)s'))
+ logging.getLogger('').addHandler(console)
+ logging.getLogger('').setLevel(level=level)
+ env = MDTestEnv(pytestconfig=pytestconfig)
+ env.setup_httpd()
+ env.apache_access_log_clear()
+ env.httpd_error_log.clear_log()
+ return env
+
+
+@pytest.fixture(autouse=True, scope="package")
+def _session_scope(env):
+ # we'd like to check the httpd error logs after the test suite has
+ # run to catch anything unusual. For this, we setup the ignore list
+ # of errors and warnings that we do expect.
+ env.httpd_error_log.set_ignored_lognos([
+ 'AH10040', # mod_md, setup complain
+ 'AH10045', # mod_md complains that there is no vhost for an MDomain
+ 'AH10056', # mod_md, invalid params
+ 'AH10105', # mod_md does not find a vhost with SSL enabled for an MDomain
+ 'AH10085', # mod_ssl complains about fallback certificates
+ 'AH01909', # mod_ssl, cert alt name complains
+ 'AH10170', # mod_md, wrong config, tested
+ 'AH10171', # mod_md, wrong config, tested
+ 'AH10373', # SSL errors on uncompleted handshakes
+ 'AH10398', # test on global store lock
+ ])
+
+ env.httpd_error_log.add_ignored_patterns([
+ re.compile(r'.*urn:ietf:params:acme:error:.*'),
+ re.compile(r'.*None of the ACME challenge methods configured for this domain are suitable.*'),
+ re.compile(r'.*problem\[(challenge-mismatch|challenge-setup-failure|apache:eab-hmac-invalid)].*'),
+ re.compile(r'.*CA considers answer to challenge invalid.].*'),
+ re.compile(r'.*problem\[urn:org:apache:httpd:log:AH\d+:].*'),
+ re.compile(r'.*Unsuccessful in contacting ACME server at :*'),
+ re.compile(r'.*test-md-720-002-\S+.org: dns-01 setup command failed .*'),
+ re.compile(r'.*AH\d*: unable to obtain global registry lock, .*'),
+ ])
+ if env.lacks_ocsp():
+ env.httpd_error_log.add_ignored_patterns([
+ re.compile(r'.*certificate with serial \S+ has no OCSP responder URL.*'),
+ ])
+ yield
+ assert env.apache_stop() == 0
+ errors, warnings = env.httpd_error_log.get_missed()
+ assert (len(errors), len(warnings)) == (0, 0),\
+ f"apache logged {len(errors)} errors and {len(warnings)} warnings: \n"\
+ "{0}\n{1}\n".format("\n".join(errors), "\n".join(warnings))
+
+
+@pytest.fixture(scope="package")
+def acme(env):
+ acme_server = None
+ if env.acme_server == 'pebble':
+ acme_server = MDPebbleRunner(env, configs={
+ 'default': os.path.join(env.gen_dir, 'pebble/pebble.json'),
+ 'eab': os.path.join(env.gen_dir, 'pebble/pebble-eab.json'),
+ })
+ elif env.acme_server == 'boulder':
+ acme_server = MDBoulderRunner(env)
+ yield acme_server
+ if acme_server is not None:
+ acme_server.stop()
+
diff --git a/test/modules/md/data/store_migrate/1.0/sample1/accounts/ACME-localhost-0000/account.json b/test/modules/md/data/store_migrate/1.0/sample1/accounts/ACME-localhost-0000/account.json
new file mode 100644
index 0000000..37c23c3
--- /dev/null
+++ b/test/modules/md/data/store_migrate/1.0/sample1/accounts/ACME-localhost-0000/account.json
@@ -0,0 +1,6 @@
+{
+ "disabled": false,
+ "url": "http://localhost:4000/acme/reg/494",
+ "ca-url": "http://localhost:4000/directory",
+ "id": "ACME-localhost-0000"
+} \ No newline at end of file
diff --git a/test/modules/md/data/store_migrate/1.0/sample1/accounts/ACME-localhost-0000/account.pem b/test/modules/md/data/store_migrate/1.0/sample1/accounts/ACME-localhost-0000/account.pem
new file mode 100644
index 0000000..c4da46b
--- /dev/null
+++ b/test/modules/md/data/store_migrate/1.0/sample1/accounts/ACME-localhost-0000/account.pem
@@ -0,0 +1,54 @@
+-----BEGIN ENCRYPTED PRIVATE KEY-----
+MIIJnzBJBgkqhkiG9w0BBQ0wPDAbBgkqhkiG9w0BBQwwDgQI0s8pf5rIPTECAggA
+MB0GCWCGSAFlAwQBKgQQ2u9SobgmVMhhZxYkXf9kpwSCCVD04Xywr0m+b5f+2aE5
+qjGr8y6xlf4NC/+QL6mBCw+9tlsgt7Z9bBt7PR1eMUQ0Bz5a9veBT2JwqGFU8XLv
+Anfd4a8ciKRx4kdP7JL08rkKAqPxuwkzMin3TeOJwsoghyvt8zFrXrWEcHyhHd4L
+HAoA3ccCxDHH7ydORd7rhEQUOkcjbaJkZi6pzvv+C7kgSTMKYBaI1mlNzX5Oxm6I
+ziwmDcOtRgKb17z26zOYWjzbKHGopPlFe9/l32JxTr5UuCihR4NoPGiK08280OWQ
+HIwRxQ900AKyJZM1q3RkH4r0xtiik0lX0isx+UIiNEefA4Za/kXLCM7hHVCGwF1z
+eE8oX2yNcsX/sw7aVLhRyVDzrT8C5T7+s+K0eV/hfyYXXAZ0z0H+l3f3TRbMlLuq
+1FQnOmEtQy0CbfPGNlzbiK3glp2fc2ZHubTkprMoRTkEKWNiXD0Suhnsll9eV3d2
+cHZgsCQyD3LRz+Xj2v6P+fDOcu7IuM7om9GEjNQB1e7dzo6HOSTG2mIsQo6VByJw
+syoK1zzC70Jhj/G6aFALTh4dMceoBDyHZzOfiVwC3dGX1QEnNvGD7Za/woMNIx8S
+hiqjntDhlXPXCRX/Z/Zvg///6+Ip9FqkCVk74DRWjH9iUzdP7/E1GCyAH2BSdsdc
+PnK15p79Ff5TMV91IQmnVV37s57VqXIez2RtuLd530iUk4RtkJ1/PphybHd+JW/n
+avMj8gsuWB7RqaBsmbjLmSudSl0DNgy0IJKZs11UifrZmSkaUJH+JJ1W2hLHR980
+X75IujUmZasWYkVqq0nvdy8JConCaLd3TT8r8DcO73vZqjFnN+EEHENaEg7F7ig8
+xkp0wk4F3u1BEnkwd34aLonZ9DtSK3miDRqlWXqQGESMaQLYQvHUn9q4X57Tyz4T
+9ZVPeLJiuHwCGq6z2BJhgkAlGs7Eqra0pMpjVnRdylTQzx0Q2vLQbrZasyBpReeM
+zGdadxRR84PyhAGDGdLKR8VCVFhWX32ZBfqJQOjpyAT30Wu11ZDvEPASuTL4GdcD
+o5seucpUZdgzrivvjUhYLkRd0WOjgJyuvtWdillpSiweeGfDAnZvUZUFLd4EMmwH
+W+IUr7yIsjNuGZU3NW0pW/L9d9GuwgljP61WKhS6B7hRmx22YU3z2Y7islXiey3m
+kZ37mAqdK4EIQca2j9GmBQk7oUz+boYdm4vtk7tJI07LEDI79U95B8x1MpzjuIbj
+zlYmH1yw8UefsFrOfjJ4BpkDjVux+J2DmSqCFb5XBcjwWsYiY17niW6Qfrypd6vq
+bew1HgbBhdBNQoL1P8uS1fNNwoHmhJc6PNHFFxU3NP91yqB8Igj3khqk9+/VBcCt
+8xRc/1jR5mfAgvaCWyQgIZAsCgTLnvEXy91MG/DKR0ZdOLZJNas+1W9fjhcFvP6S
+nNmeMMrIAxaI85RVvnLqPEZhsb9AOlyaf6tKFJiCteyQlie6MOQTKSp4jjSOVW+w
+q/WtSZup9zXo8Ek+TnLhD0IJhpIbfR5is5iZaVY7lbcg4pc3Csh/SiMUJ4TJgiPS
+/End7LPoRIabRnw4PBtJRNCwf3ilsWUmi95HU3wLAmLpI1AtnbfQi+zva4UJdOTV
+HJxNN84ZGuey1gG7qZb3U6WpwzQDKvqTm5jK32nIS/LuNv1qpv0FdAmvulV9wBar
+M19CcD5kOlTvNZcf6B4Fkrr+x+Anji/kUV4slIvUbAaU9P4lMO0ORCTg1es7QvI7
+v0KRYYSULrO+G2CNYL7fN8Vf5tRpBZ3H1o6u3plw/P86MTQPOskppjK1VKsBBmL2
+isdeumWjLpFVr1vWxTm68f88f+iau3BRUkCDQXFEVTN7YuOhpexb6Js0T220HYTS
+9hmeVUnNlXii1BpnxLhBx/0O3heVOLc/C7b7vASg5PljieUQmpuyeJSUAJm1vKrI
+p2G/46MgBl+3/NkzLRGepzAH2IGAhhtXEk/zePdRptbVr29+vGDX6IzEWqZ5UYHG
+P5JYzaojrmLd0BNYwEbCrRBRHyM4jYFkRERs/kwCh5/Kle/eZpb+bjvIsAs0xcOC
+/uRF8RfHW1h8M8Bm9tR+rUX8CTxaIF3IY+N5qSPstNt8xGYLv7uvd+KoK0xVHAm+
+FAreqql7koa5D0ncLjTpQGnHiLBKsYmJWC4+TKC+a5m0eKmRgO/r5o+7mmoB9qCZ
+bI9GB9HoYeVW/QVWfmoH0W6rbQCmK/VcSB1dGwvz9rKU1DXHhXvGU2k1IAfPX11t
+RfwUmmLtrM9tjOWdBh74N4G8UvTk5FGygzJ+Eclm/ABeAChIFU7mLJFejOue/bKq
+CRAQul45+CskNyVyZWZvWTFT0UMN290b4E4sjUKoLbFZiA1Y/aU+ruG9iwPJ3yVS
+s09VqogNwKBLWYW5TclUzgf71AQTlnZpTudkqwr36ogIAXXaQpE1f6/HLQz3k1PA
+WmTaxoM//X00WvTq2UxxSmKf7mNPEg9UZ9m4ZTKe35a//ONxXVjBjtK23yN5MuHY
+YrgWF84xlLRPY3Um2ukCsRGb7yZRhlPmOBeYQvRod7BqEA0UmIR+ctnBWDwzSZw7
+JWuR+AZdjIfM+Ilh15fokpLI5IFnTAqvTYDoF0185kqYPkjtI2STAWpALA9XJp70
+aF/rbdbSrRPFI1+izTIvQjffYftro7EOfCFv62XZm6tj5RLHalfgTcWoUWw81ylL
+DOZZaKsv4bOW7HCM47pitFojwzNf9OaHd5VTaSPWts49siF/qCxcG8bwu51picbc
+96H1h3/npNhxDUA5qKzkBK9Bs7panzXt2kNJxPzHEiCjVVGq7t/ei4TZGoSw806D
+kNPFhztVoM1k2m7F7lu1EYOwJH/yXKJUgJYIycIoQyRMX7h0jb76U0oOHrdkw3A2
+9Helksl8kqz10td2PZyoj3K/EWu+33cFKgLtC9JrDATR3Lhdo2N3BQQAotW2+Tht
+HqHj/UzUoIWcEkzCZeJhRn9WRRbbLeWKwdXBxGl0ZESpJJ2+Ml6QkMkdZSUzDURD
+kxYl04U9JXk6vC2hT6780OBLnLivBqIaSUJ72DSkOFnifFoP/OeglWFVkJHWQjQP
+aGMcPD/xLLYhdRQlJND9K12FXtsazW2K/V+861y4rJOt6zJGSZwPrQBkLf7QBNAC
+DWiLOvp6tLT58pX8TSlplbITcQ==
+-----END ENCRYPTED PRIVATE KEY-----
diff --git a/test/modules/md/data/store_migrate/1.0/sample1/archive/7007-1502285564.org.1/md.json b/test/modules/md/data/store_migrate/1.0/sample1/archive/7007-1502285564.org.1/md.json
new file mode 100644
index 0000000..33c771b
--- /dev/null
+++ b/test/modules/md/data/store_migrate/1.0/sample1/archive/7007-1502285564.org.1/md.json
@@ -0,0 +1,18 @@
+{
+ "name": "7007-1502285564.org",
+ "domains": [
+ "7007-1502285564.org"
+ ],
+ "contacts": [
+ "mailto:admin@7007-1502285564.org"
+ ],
+ "transitive": 0,
+ "ca": {
+ "proto": "ACME",
+ "url": "http://localhost:4000/directory",
+ "agreement": "http://boulder:4000/terms/v1"
+ },
+ "state": 1,
+ "renew-mode": 2,
+ "renew-window": 1209600
+}
diff --git a/test/modules/md/data/store_migrate/1.0/sample1/domains/7007-1502285564.org/cert.pem b/test/modules/md/data/store_migrate/1.0/sample1/domains/7007-1502285564.org/cert.pem
new file mode 100644
index 0000000..1a72637
--- /dev/null
+++ b/test/modules/md/data/store_migrate/1.0/sample1/domains/7007-1502285564.org/cert.pem
@@ -0,0 +1,32 @@
+-----BEGIN CERTIFICATE-----
+MIIFkDCCBHigAwIBAgITAP8PGcftT0j60OOjL+Er/XuHrzANBgkqhkiG9w0BAQsF
+ADAfMR0wGwYDVQQDDBRoMnBweSBoMmNrZXIgZmFrZSBDQTAeFw0xNzA4MDkxMjMz
+MDBaFw0xNzExMDcxMjMzMDBaME0xHDAaBgNVBAMTEzcwMDctMTUwMjI4NTU2NC5v
+cmcxLTArBgNVBAUTJGZmMGYxOWM3ZWQ0ZjQ4ZmFkMGUzYTMyZmUxMmJmZDdiODdh
+ZjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMHuhVxT9Jpc6EpNAhrq
+RqzDJ4tWSG9BtguKZzh3sbY92EE5rqym7wpdb5DG5gwew4iD1R+YizY+99+00qlB
+3kNBUVsJCBnew0apmhPq4jjF8v8t3Qqq0ISn2Sdv5bt5mB9NWeO83h3zT1LW0rTm
+847nwxUuGxlIjLXxsibUvPunMfyGJUshflN5V9/Q3YQBOCnDWy5s4FKN2N34cHFE
+IgJo5ToBKZLp9eUaLm03mlfhTFc3/h0AtWwMZ5P2tRRB9EiijqI9nkrVzqyi1QTN
+Hn/XfgDgKRCyMp6i5kcK3hCXo4GjOIU0KA91ttf3IeKhXHKzC7ybc4hdJH2rWzoN
+srYq6tNZ+cOaa1E/H+v+OMSeIRaRrpM56c3nUssIzbneMIXuLHuOluaaL4baCjYp
+Pdc80bUlps06XcnVHysAbsfbtWAtUdzj2l4flVySruGoaqVDudl1GqYoYa+0oReM
+Zqd09Q+pCQvDNE+jiVq3An+JA4msux9EMMz7jkAwnl8iiWy0GMuQPsL5gp3TEXGY
+Cp1wQlzpmxZSdUZ+J6f4UkFOS/Zn6gS6nSxN8nj3XKbRYRbebPQMwRGYGttCyeZO
+dHiUY/3gQBUdpcMBJhAa5GFoabK0J5XPmK2E1P9cGQo7DbNn+Skojnz2WuUtCuyo
+m9la14Ruca9V8NmjBsu+4mXvAgMBAAGjggGVMIIBkTAOBgNVHQ8BAf8EBAMCBaAw
+HQYDVR0lBBYwFAYIKwYBBQUHAwEGCCsGAQUFBwMCMAwGA1UdEwEB/wQCMAAwHQYD
+VR0OBBYEFH426IYgY0KXUe9cLMZ3d8tipsDkMB8GA1UdIwQYMBaAFPt4TxL5YBWD
+LJ8XfzQZsy426kGJMGYGCCsGAQUFBwEBBFowWDAiBggrBgEFBQcwAYYWaHR0cDov
+LzEyNy4wLjAuMTo0MDAyLzAyBggrBgEFBQcwAoYmaHR0cDovLzEyNy4wLjAuMTo0
+MDAwL2FjbWUvaXNzdWVyLWNlcnQwHgYDVR0RBBcwFYITNzAwNy0xNTAyMjg1NTY0
+Lm9yZzAnBgNVHR8EIDAeMBygGqAYhhZodHRwOi8vZXhhbXBsZS5jb20vY3JsMGEG
+A1UdIARaMFgwCAYGZ4EMAQIBMEwGAyoDBDBFMCIGCCsGAQUFBwIBFhZodHRwOi8v
+ZXhhbXBsZS5jb20vY3BzMB8GCCsGAQUFBwICMBMMEURvIFdoYXQgVGhvdSBXaWx0
+MA0GCSqGSIb3DQEBCwUAA4IBAQBfqLXSJZ5Izs2I44cXWrAto631aTylValp0Fiy
+Zz1dj00FS6XN5DGtfIyq7Ymd3MMiOZCLkTOMMb7BrJAvcgeJteKwdk3ffXEDyKH0
+1ttXK7l46trEyGOB+f9PMMKxVMyhDhGKyb6ro4Y5WTK/w4862soqKcP1SjHvk65u
+lIkFws1fWYYzqPLKLij2ILm+4NjdGIl8qPQWP2PtbOaDTFspJBz6hvLmqRgmjVVv
+cENwBUML4LCkVY3TUqoBHXDhpocTZlVeAVRVsroosboQJlY5nIKz6cOjilILn4cT
+hgEKa5IRwK5lUveCoeQtYUyLoyp5ncbota+UxNxCnkl/0veK
+-----END CERTIFICATE-----
diff --git a/test/modules/md/data/store_migrate/1.0/sample1/domains/7007-1502285564.org/chain.pem b/test/modules/md/data/store_migrate/1.0/sample1/domains/7007-1502285564.org/chain.pem
new file mode 100644
index 0000000..267866e
--- /dev/null
+++ b/test/modules/md/data/store_migrate/1.0/sample1/domains/7007-1502285564.org/chain.pem
@@ -0,0 +1,27 @@
+-----BEGIN CERTIFICATE-----
+MIIEijCCA3KgAwIBAgICEk0wDQYJKoZIhvcNAQELBQAwKzEpMCcGA1UEAwwgY2Fj
+a2xpbmcgY3J5cHRvZ3JhcGhlciBmYWtlIFJPT1QwHhcNMTUxMDIxMjAxMTUyWhcN
+MjAxMDE5MjAxMTUyWjAfMR0wGwYDVQQDExRoYXBweSBoYWNrZXIgZmFrZSBDQTCC
+ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMIKR3maBcUSsncXYzQT13D5
+Nr+Z3mLxMMh3TUdt6sACmqbJ0btRlgXfMtNLM2OU1I6a3Ju+tIZSdn2v21JBwvxU
+zpZQ4zy2cimIiMQDZCQHJwzC9GZn8HaW091iz9H0Go3A7WDXwYNmsdLNRi00o14U
+joaVqaPsYrZWvRKaIRqaU0hHmS0AWwQSvN/93iMIXuyiwywmkwKbWnnxCQ/gsctK
+FUtcNrwEx9Wgj6KlhwDTyI1QWSBbxVYNyUgPFzKxrSmwMO0yNff7ho+QT9x5+Y/7
+XE59S4Mc4ZXxcXKew/gSlN9U5mvT+D2BhDtkCupdfsZNCQWp27A+b/DmrFI9NqsC
+AwEAAaOCAcIwggG+MBIGA1UdEwEB/wQIMAYBAf8CAQAwQwYDVR0eBDwwOqE4MAaC
+BC5taWwwCocIAAAAAAAAAAAwIocgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAwDgYDVR0PAQH/BAQDAgGGMH8GCCsGAQUFBwEBBHMwcTAyBggrBgEFBQcw
+AYYmaHR0cDovL2lzcmcudHJ1c3RpZC5vY3NwLmlkZW50cnVzdC5jb20wOwYIKwYB
+BQUHMAKGL2h0dHA6Ly9hcHBzLmlkZW50cnVzdC5jb20vcm9vdHMvZHN0cm9vdGNh
+eDMucDdjMB8GA1UdIwQYMBaAFOmkP+6epeby1dd5YDyTpi4kjpeqMFQGA1UdIARN
+MEswCAYGZ4EMAQIBMD8GCysGAQQBgt8TAQEBMDAwLgYIKwYBBQUHAgEWImh0dHA6
+Ly9jcHMucm9vdC14MS5sZXRzZW5jcnlwdC5vcmcwPAYDVR0fBDUwMzAxoC+gLYYr
+aHR0cDovL2NybC5pZGVudHJ1c3QuY29tL0RTVFJPT1RDQVgzQ1JMLmNybDAdBgNV
+HQ4EFgQU+3hPEvlgFYMsnxd/NBmzLjbqQYkwDQYJKoZIhvcNAQELBQADggEBAA0Y
+AeLXOklx4hhCikUUl+BdnFfn1g0W5AiQLVNIOL6PnqXu0wjnhNyhqdwnfhYMnoy4
+idRh4lB6pz8Gf9pnlLd/DnWSV3gS+/I/mAl1dCkKby6H2V790e6IHmIK2KYm3jm+
+U++FIdGpBdsQTSdmiX/rAyuxMDM0adMkNBwTfQmZQCz6nGHw1QcSPZMvZpsC8Skv
+ekzxsjF1otOrMUPNPQvtTWrVx8GlR2qfx/4xbQa1v2frNvFBCmO59goz+jnWvfTt
+j2NjwDZ7vlMBsPm16dbKYC840uvRoZjxqsdc3ChCZjqimFqlNG/xoPA8+dTicZzC
+XE9ijPIcvW6y1aa3bGw=
+-----END CERTIFICATE-----
diff --git a/test/modules/md/data/store_migrate/1.0/sample1/domains/7007-1502285564.org/md.json b/test/modules/md/data/store_migrate/1.0/sample1/domains/7007-1502285564.org/md.json
new file mode 100644
index 0000000..12e4e48
--- /dev/null
+++ b/test/modules/md/data/store_migrate/1.0/sample1/domains/7007-1502285564.org/md.json
@@ -0,0 +1,23 @@
+{
+ "name": "7007-1502285564.org",
+ "domains": [
+ "7007-1502285564.org"
+ ],
+ "contacts": [
+ "mailto:admin@7007-1502285564.org"
+ ],
+ "transitive": 0,
+ "ca": {
+ "account": "ACME-localhost-0000",
+ "proto": "ACME",
+ "url": "http://localhost:4000/directory",
+ "agreement": "http://boulder:4000/terms/v1"
+ },
+ "cert": {
+ "url": "http://localhost:4000/acme/cert/ff0f19c7ed4f48fad0e3a32fe12bfd7b87af",
+ "expires": "Tue, 07 Nov 2017 12:33:00 GMT"
+ },
+ "state": 2,
+ "renew-mode": 2,
+ "renew-window": 1209600
+}
diff --git a/test/modules/md/data/store_migrate/1.0/sample1/domains/7007-1502285564.org/pkey.pem b/test/modules/md/data/store_migrate/1.0/sample1/domains/7007-1502285564.org/pkey.pem
new file mode 100644
index 0000000..0438ddd
--- /dev/null
+++ b/test/modules/md/data/store_migrate/1.0/sample1/domains/7007-1502285564.org/pkey.pem
@@ -0,0 +1,52 @@
+-----BEGIN PRIVATE KEY-----
+MIIJQwIBADANBgkqhkiG9w0BAQEFAASCCS0wggkpAgEAAoICAQDB7oVcU/SaXOhK
+TQIa6kaswyeLVkhvQbYLimc4d7G2PdhBOa6spu8KXW+QxuYMHsOIg9UfmIs2Pvff
+tNKpQd5DQVFbCQgZ3sNGqZoT6uI4xfL/Ld0KqtCEp9knb+W7eZgfTVnjvN4d809S
+1tK05vOO58MVLhsZSIy18bIm1Lz7pzH8hiVLIX5TeVff0N2EATgpw1subOBSjdjd
++HBxRCICaOU6ASmS6fXlGi5tN5pX4UxXN/4dALVsDGeT9rUUQfRIoo6iPZ5K1c6s
+otUEzR5/134A4CkQsjKeouZHCt4Ql6OBoziFNCgPdbbX9yHioVxyswu8m3OIXSR9
+q1s6DbK2KurTWfnDmmtRPx/r/jjEniEWka6TOenN51LLCM253jCF7ix7jpbmmi+G
+2go2KT3XPNG1JabNOl3J1R8rAG7H27VgLVHc49peH5Vckq7hqGqlQ7nZdRqmKGGv
+tKEXjGandPUPqQkLwzRPo4latwJ/iQOJrLsfRDDM+45AMJ5fIolstBjLkD7C+YKd
+0xFxmAqdcEJc6ZsWUnVGfien+FJBTkv2Z+oEup0sTfJ491ym0WEW3mz0DMERmBrb
+QsnmTnR4lGP94EAVHaXDASYQGuRhaGmytCeVz5ithNT/XBkKOw2zZ/kpKI589lrl
+LQrsqJvZWteEbnGvVfDZowbLvuJl7wIDAQABAoICAQCVSZob0v1O/wpKeDGQqpwx
+TiHY31jvXHRZOffvviRtl/ora84NVoxZPEgv+Q0Kc3wuUN31bqZr4dlKupYYeX4x
+48xO+grkb1l/wfu8LWpsLeW7joDEP245UESYWUlOInJ6Vj9GUxPhlnWP3ZNicw83
+CS5h1ZZCxlibjy2HOukoCDMwo8t9pJDsjVKaFt0PSykC7UH54RJmOo+hgCh+6OYN
+WNZs6owobjY+YQMwTEdiMytjUNUrWmpOfNYXTyliKMt2RrzqI+kAzspElyzIf2Zl
+H2v+HJFAKw1QlTITqkf8Gd9iYlWWJOpZzFIuui25mmHiYfY9AKXVaW4313tomzbg
+L9Muc0pCmR8ge/hsC+C2QkVhHRFThakd5zU8rOEeXClzLKg1tjSVwcyNllXwd3Uy
+gQRtDqAWcWhXj2pqPzLc4v/wobjPE+xEpAbvDBvEof1fMy1PBeyKq7T4mIxswuWF
+takm9/Bt15K2TNBc7qNQV2x+MCS0Bi2Hd1yjLbIHllBDQR2ZsHRw1D38ckbL7ATE
+yDwnzI2gxlYYV7K/iQG9XkM54Ra5tNOFYv9GiCw+JPrLcQ5qmGsCCu6lfktMC8pN
+7VQRbHt60ZKaunE1muwWDmyYzP106qUXMw6nIVMyqX0ywTEPAgtRgWcucLWR33DD
+k1OBcq2tOceaZjA5Pbi4sQKCAQEA+MbI4HEbROlsPeQ7VMOoAHjJPWuhDNXqnz4Q
+c4z3X+W61TAWZINRENYDZd3c7D7wOWb9VBA+o62xrzYviul9qhTAjZ8dRfxagJpH
+OxNY348HNj+IxONj3RXr/7tfOXtzcjiFwzn85oPLRM56XfjYZ5lUgQBSEauXOue5
++bpNBvrYZLPm7i5BM8RpBElH2wtCizLAE9BrKYUqTYWyl76miPfpeSVMv2JOpUwp
+josVrAWAOoQHeIrCLmSF43oqmtzJ9Aq1r/VeOQB/3TT4E0RhWhDWOg3zNuA20w+E
+VuKyl4J/XLo6T86Zc/PM4+vb8zPztjZHQVJj58Iq7N4/y5cBfQKCAQEAx5AP10sw
+C4kCwU/yXORhimMPlRldKx2h+8Ha/0whTkehXaJ0synCV0ZLh7jSgfe81Zx5/3RK
+KKRWVx7+wmQiOqfSIBJN4xWdpVDS7yndk/FW8sYqT1v2sgr2t1u41bQAY3qzezsK
+elNsjbRsUCVvVu9HZ5zH7Pvmf0Ma8P2t8EioQWJ2ptgF6imTXIrQORJPBqDEzp6W
+EjiHC9kuZ2E+uPGl+6oQcxRUjtFkxnI9LgpOQCjNNIhW6cEhJxV3z8YIUnUyd7vd
+i0eEfhKF+DXzrqbtve63iGGU7TFMiiNF59hPxKHkPvHnUlXNZjJ8om9M579i/9fm
+OHYWaWFuzb6g2wKCAQAIZ37FxkxriY80kA9JD8sPKQVzY71vF5Lzij84CB0bSkGD
+jjpTbvRAI1q+CD68ZGvtJIOOYXYcRXPpPWVhxf2Oz2Cp6CQvBxVvnsalQkQQWV6f
+AIp4TE5FW8Y7P3M6F+eQhkROkhjvGKi3TFpp7kwxQ8bNDNu46RkUzltECn0rrTG+
+RS2aAkoFm68IjAk3Zyv6U96VTMcyAeOp9shPxAsQOX/TreTn2kRZ5TbKL/ytcQoh
+7+/orJdexdqYErp5vNe9vNbieOGT/2ZSbMWssPSw/DygfXQn+G8htjZ8UPBDmg7/
+bPMnWw1oE2ZqlL87ehfTogXKOSRS4gZdNizljdZpAoIBADxSfZdUcOdruNt6MQaH
+Ojy8iN9G1XTM9kPFa080UfT5jfthuejWPJpo8zfJVEhY/EmNjQr8udXjJv4armNQ
+JVCZndh37/cud4KbFceZXhL0JpYn9G4cnEthKQZvwUVHrb5kPpCHXjlvsiZ7XSo0
+xpz+oxTcvUoTMq9RN3mVFNjG/aUWAEuajN8lRhf5FcvKjvyv6A2UvkQvthKMyYwS
+RwVcdhHGbEZ85Lpu7QlXSsr57oFSVAUHGU57RGwt/xNdBvL13hV3QhZxvcjmDHzk
+wg4PA1ogKHYfGQdBmaM/2kekiSgkz3t/X67xpK65oBbxkcuTfHddaYezmj6sZvPm
+JXUCggEBAO37OxP7B66FQghuBkfui8sPymY2oSFQIb3IRO5A17/wp9yW1f9X4Bu4
+dh7ln+6IEURZyldAZcVRSHbjrL8VWXtS86eDttnKD7L46BbqAytckc/pebA/5bu0
+tjsM8ulayPGuJzEl/g1F1bU1eduXkmq/O7636S0Q1KCVHldn9qNgkowfjpzANHNs
+ksSwxMIY8n4U2kckMmfCj2B6UrnqQ6Bs7IaijQJ5u/mGYke+gKEGQ99esx2Ts1Vl
+w8WDaDUOwHEywuFyqtGJzizX8BazIzwmSCh8hpedDtFVVnfjszLnf3Y+FOrb9XlM
+Wc8hH7giOwSubI2D2mauspM5CZlez7A=
+-----END PRIVATE KEY-----
diff --git a/test/modules/md/data/store_migrate/1.0/sample1/httpd.json b/test/modules/md/data/store_migrate/1.0/sample1/httpd.json
new file mode 100644
index 0000000..a5bd7fb
--- /dev/null
+++ b/test/modules/md/data/store_migrate/1.0/sample1/httpd.json
@@ -0,0 +1,6 @@
+{
+ "proto": {
+ "http": true,
+ "https": true
+ }
+} \ No newline at end of file
diff --git a/test/modules/md/data/store_migrate/1.0/sample1/md_store.json b/test/modules/md/data/store_migrate/1.0/sample1/md_store.json
new file mode 100644
index 0000000..157782b
--- /dev/null
+++ b/test/modules/md/data/store_migrate/1.0/sample1/md_store.json
@@ -0,0 +1,7 @@
+{
+ "version": "0.6.1-git",
+ "store": {
+ "version": 1.0
+ },
+ "key": "YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXphYmNkZWZnaGlqa2xtbm9wcXJzdHV2"
+} \ No newline at end of file
diff --git a/test/modules/md/data/test_920/002.pubcert b/test/modules/md/data/test_920/002.pubcert
new file mode 100644
index 0000000..02c9e87
--- /dev/null
+++ b/test/modules/md/data/test_920/002.pubcert
@@ -0,0 +1,58 @@
+-----BEGIN CERTIFICATE-----
+MIIFYDCCBEigAwIBAgISAwOcRk1FTt55/NLK6Fn2aPJpMA0GCSqGSIb3DQEBCwUA
+MEoxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1MZXQncyBFbmNyeXB0MSMwIQYDVQQD
+ExpMZXQncyBFbmNyeXB0IEF1dGhvcml0eSBYMzAeFw0xOTA1MzExNjA2MzVaFw0x
+OTA4MjkxNjA2MzVaMBYxFDASBgNVBAMTC2Vpc3Npbmcub3JnMIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEA9d5xZdknImIPfmiUaiiRhHLx4bvazWRTgA2+
+etRNKr42MRjkuLbAhvxGjhw4El0GJlbngKTfiSK0Vq0idW/ehUr++czRSDrRVfqq
+qcI/F4NXLIbIZfmR7/vG0IP8Xc8D9VyQCX0uDapCvw+A/U46p0VOZz4bIB/bl0BW
+/mqBvVhBU9owskUcPjwwI/tK6My933CUVKXuFpPZ4V7zoY0/8Xa6JmWC2q1+7XmE
+h51hPnU35dYH1bA7WblX8rVxnEPCyCOgABVLKb6NhWfTCEqy+yzr32KsoSR1xqe4
+T2EeTcoamwF2yhz2zRC4glX0LM4inJ1/ZOQ+nKbFZTOPVWEnLQIDAQABo4ICcjCC
+Am4wDgYDVR0PAQH/BAQDAgWgMB0GA1UdJQQWMBQGCCsGAQUFBwMBBggrBgEFBQcD
+AjAMBgNVHRMBAf8EAjAAMB0GA1UdDgQWBBTfO7pZGPLsa0NuPZMG4NGlr1TaWjAf
+BgNVHSMEGDAWgBSoSmpjBH3duubRObemRWXv86jsoTBvBggrBgEFBQcBAQRjMGEw
+LgYIKwYBBQUHMAGGImh0dHA6Ly9vY3NwLmludC14My5sZXRzZW5jcnlwdC5vcmcw
+LwYIKwYBBQUHMAKGI2h0dHA6Ly9jZXJ0LmludC14My5sZXRzZW5jcnlwdC5vcmcv
+MCcGA1UdEQQgMB6CC2Vpc3Npbmcub3Jngg93d3cuZWlzc2luZy5vcmcwTAYDVR0g
+BEUwQzAIBgZngQwBAgEwNwYLKwYBBAGC3xMBAQEwKDAmBggrBgEFBQcCARYaaHR0
+cDovL2Nwcy5sZXRzZW5jcnlwdC5vcmcwggEFBgorBgEEAdZ5AgQCBIH2BIHzAPEA
+dwB0ftqDMa0zEJEhnM4lT0Jwwr/9XkIgCMY3NXnmEHvMVgAAAWsO24QlAAAEAwBI
+MEYCIQD8yd2uHl2DNgvnBkSiA8vsK5pOv204NixI9F89LWERwgIhAPMLLiZkFG2h
+DTpEwF50BbZ+laYH8VP03Teq5csk2lX0AHYAKTxRllTIOWW6qlD8WAfUt2+/WHop
+ctykwwz05UVH9HgAAAFrDtuEFgAABAMARzBFAiEA3bYpKSNigSe0HuDyH/kerTW2
+55ugvODp6d+vNbNmgZoCIGTd4cio769BTKfLJTqNbjc9sKK9T7XkHUO4JgQdY6Nq
+MA0GCSqGSIb3DQEBCwUAA4IBAQBeatZxh8leVmeFE/IYTKKqHyZqTccJKdugXIOr
+uIF6sLup/8Fv/2N0wZc+edkj+NCyWhxxkZULyW6xhlL7rtzcwLYbQBSxKvT4Utur
+01a5bwhM62MdMjzkFgCCa5nRKPQ7bc684RrUFNi94d0KSb5ArFv8wovqPW7jbmFp
+X50dYKCE+wohFPHcsQapnV0lXK4+5qJZSZkp/pHANdndLCvFfzRHhV4nqRA12G2T
+VVWjdHN6ShL2uykJVAnSBhu/XD4mh79Yq9TQtS1DHfP3HcKstLqR0nrwBFaB6087
+jXfIpJ46yObq001qHeUMhT+B3WI2YPp/hY7u8A9+hCmDyyq8
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIEkjCCA3qgAwIBAgIQCgFBQgAAAVOFc2oLheynCDANBgkqhkiG9w0BAQsFADA/
+MSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMT
+DkRTVCBSb290IENBIFgzMB4XDTE2MDMxNzE2NDA0NloXDTIxMDMxNzE2NDA0Nlow
+SjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUxldCdzIEVuY3J5cHQxIzAhBgNVBAMT
+GkxldCdzIEVuY3J5cHQgQXV0aG9yaXR5IFgzMIIBIjANBgkqhkiG9w0BAQEFAAOC
+AQ8AMIIBCgKCAQEAnNMM8FrlLke3cl03g7NoYzDq1zUmGSXhvb418XCSL7e4S0EF
+q6meNQhY7LEqxGiHC6PjdeTm86dicbp5gWAf15Gan/PQeGdxyGkOlZHP/uaZ6WA8
+SMx+yk13EiSdRxta67nsHjcAHJyse6cF6s5K671B5TaYucv9bTyWaN8jKkKQDIZ0
+Z8h/pZq4UmEUEz9l6YKHy9v6Dlb2honzhT+Xhq+w3Brvaw2VFn3EK6BlspkENnWA
+a6xK8xuQSXgvopZPKiAlKQTGdMDQMc2PMTiVFrqoM7hD8bEfwzB/onkxEz0tNvjj
+/PIzark5McWvxI0NHWQWM6r6hCm21AvA2H3DkwIDAQABo4IBfTCCAXkwEgYDVR0T
+AQH/BAgwBgEB/wIBADAOBgNVHQ8BAf8EBAMCAYYwfwYIKwYBBQUHAQEEczBxMDIG
+CCsGAQUFBzABhiZodHRwOi8vaXNyZy50cnVzdGlkLm9jc3AuaWRlbnRydXN0LmNv
+bTA7BggrBgEFBQcwAoYvaHR0cDovL2FwcHMuaWRlbnRydXN0LmNvbS9yb290cy9k
+c3Ryb290Y2F4My5wN2MwHwYDVR0jBBgwFoAUxKexpHsscfrb4UuQdf/EFWCFiRAw
+VAYDVR0gBE0wSzAIBgZngQwBAgEwPwYLKwYBBAGC3xMBAQEwMDAuBggrBgEFBQcC
+ARYiaHR0cDovL2Nwcy5yb290LXgxLmxldHNlbmNyeXB0Lm9yZzA8BgNVHR8ENTAz
+MDGgL6AthitodHRwOi8vY3JsLmlkZW50cnVzdC5jb20vRFNUUk9PVENBWDNDUkwu
+Y3JsMB0GA1UdDgQWBBSoSmpjBH3duubRObemRWXv86jsoTANBgkqhkiG9w0BAQsF
+AAOCAQEA3TPXEfNjWDjdGBX7CVW+dla5cEilaUcne8IkCJLxWh9KEik3JHRRHGJo
+uM2VcGfl96S8TihRzZvoroed6ti6WqEBmtzw3Wodatg+VyOeph4EYpr/1wXKtx8/
+wApIvJSwtmVi4MFU5aMqrSDE6ea73Mj2tcMyo5jMd6jmeWUHK8so/joWUoHOUgwu
+X4Po1QYz+3dszkDqMp4fklxBwXRsW10KXzPMTZ+sOPAveyxindmjkW8lGy+QsRlG
+PfZ+G6Z6h7mjem0Y+iWlkYcV4PIWL1iwBi8saCbGS5jN2p8M+X+Q7UNKEkROb3N6
+KOqkqm57TH2H3eDJAkSnh6/DNFu0Qg==
+-----END CERTIFICATE-----
diff --git a/test/modules/md/data/test_conf_validate/test_014.conf b/test/modules/md/data/test_conf_validate/test_014.conf
new file mode 100644
index 0000000..c1a8e06
--- /dev/null
+++ b/test/modules/md/data/test_conf_validate/test_014.conf
@@ -0,0 +1,8 @@
+# global server name as managed domain name
+
+MDomain resistance.fritz.box www.example2.org
+
+<VirtualHost *:12346>
+ ServerName www.example2.org
+
+</VirtualHost>
diff --git a/test/modules/md/data/test_drive/test1.example.org.conf b/test/modules/md/data/test_drive/test1.example.org.conf
new file mode 100644
index 0000000..dd42072
--- /dev/null
+++ b/test/modules/md/data/test_drive/test1.example.org.conf
@@ -0,0 +1,6 @@
+# A setup that required manual driving, e.g. invoking a2md outside apache
+#
+MDRenewMode manual
+
+MDomain test1.not-forbidden.org www.test1.not-forbidden.org mail.test1.not-forbidden.org
+
diff --git a/test/modules/md/data/test_roundtrip/temp.conf b/test/modules/md/data/test_roundtrip/temp.conf
new file mode 100644
index 0000000..eb7b75f
--- /dev/null
+++ b/test/modules/md/data/test_roundtrip/temp.conf
@@ -0,0 +1,27 @@
+ MDDriveMode manual
+ MDCertificateAuthority http://localhost:4000/directory
+ MDCertificateProtocol ACME
+ MDCertificateAgreement http://boulder:4000/terms/v1
+
+ ServerAdmin mailto:admin@test102-1499953506.org
+
+ ManagedDomain test102-1499953506.org test-a.test102-1499953506.org test-b.test102-1499953506.org
+
+<VirtualHost *:5001>
+ ServerName test-a.test102-1499953506.org
+ DocumentRoot htdocs/a
+
+ SSLEngine on
+ SSLCertificateFile /Users/sei/projects/mod_md/test/gen/apache/md/domains/test102-1499953506.org/cert.pem
+ SSLCertificateKeyFile /Users/sei/projects/mod_md/test/gen/apache/md/domains/test102-1499953506.org/pkey.pem
+</VirtualHost>
+
+<VirtualHost *:5001>
+ ServerName test-b.test102-1499953506.org
+ DocumentRoot htdocs/b
+
+ SSLEngine on
+ SSLCertificateFile /Users/sei/projects/mod_md/test/gen/apache/md/domains/test102-1499953506.org/cert.pem
+ SSLCertificateKeyFile /Users/sei/projects/mod_md/test/gen/apache/md/domains/test102-1499953506.org/pkey.pem
+</VirtualHost>
+
diff --git a/test/modules/md/dns01.py b/test/modules/md/dns01.py
new file mode 100755
index 0000000..3afa467
--- /dev/null
+++ b/test/modules/md/dns01.py
@@ -0,0 +1,62 @@
+#!/usr/bin/env python3
+
+import subprocess
+import sys
+
+curl = "curl"
+challtestsrv = "localhost:8055"
+
+
+def run(args):
+ sys.stderr.write(f"run: {' '.join(args)}\n")
+ p = subprocess.Popen(args, stdin=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ output, errput = p.communicate(None)
+ rv = p.wait()
+ if rv != 0:
+ sys.stderr.write(errput.decode())
+ sys.stdout.write(output.decode())
+ return rv
+
+
+def teardown(domain):
+ rv = run([curl, '-s', '-d', f'{{"host":"_acme-challenge.{domain}"}}',
+ f'{challtestsrv}/clear-txt'])
+ if rv == 0:
+ rv = run([curl, '-s', '-d', f'{{"host":"{domain}"}}',
+ f'{challtestsrv}/set-txt'])
+ return rv
+
+
+def setup(domain, challenge):
+ teardown(domain)
+ rv = run([curl, '-s', '-d', f'{{"host":"{domain}", "addresses":["127.0.0.1"]}}',
+ f'{challtestsrv}/set-txt'])
+ if rv == 0:
+ rv = run([curl, '-s', '-d', f'{{"host":"_acme-challenge.{domain}.", "value":"{challenge}"}}',
+ f'{challtestsrv}/set-txt'])
+ return rv
+
+
+def main(argv):
+ if len(argv) > 1:
+ if argv[1] == 'setup':
+ if len(argv) != 4:
+ sys.stderr.write("wrong number of arguments: dns01.py setup <domain> <challenge>\n")
+ sys.exit(2)
+ rv = setup(argv[2], argv[3])
+ elif argv[1] == 'teardown':
+ if len(argv) != 3:
+ sys.stderr.write("wrong number of arguments: dns01.py teardown <domain>\n")
+ sys.exit(1)
+ rv = teardown(argv[2])
+ else:
+ sys.stderr.write(f"unknown option {argv[1]}\n")
+ rv = 2
+ else:
+ sys.stderr.write("dns01.py wrong number of arguments\n")
+ rv = 2
+ sys.exit(rv)
+
+
+if __name__ == "__main__":
+ main(sys.argv)
diff --git a/test/modules/md/http_challenge_foobar.py b/test/modules/md/http_challenge_foobar.py
new file mode 100755
index 0000000..557f907
--- /dev/null
+++ b/test/modules/md/http_challenge_foobar.py
@@ -0,0 +1,27 @@
+#!/usr/bin/env python3
+import os
+import re
+import sys
+
+
+def main(argv):
+ if len(argv) < 4:
+ sys.stderr.write(f"{argv[0]} without too few arguments")
+ sys.exit(7)
+ store_dir = argv[1]
+ event = argv[2]
+ mdomain = argv[3]
+ m = re.match(r'(\S+):(\S+):(\S+)', event)
+ if m and 'challenge-setup' == m.group(1) and 'http-01' == m.group(2):
+ dns_name = m.group(3)
+ challenge_file = f"{store_dir}/challenges/{dns_name}/acme-http-01.txt"
+ if not os.path.isfile(challenge_file):
+ sys.stderr.write(f"{argv[0]} does not exist: {challenge_file}")
+ sys.exit(8)
+ with open(challenge_file, 'w') as fd:
+ fd.write('this_is_an_invalidated_http-01_challenge')
+ sys.exit(0)
+
+
+if __name__ == "__main__":
+ main(sys.argv)
diff --git a/test/modules/md/md_acme.py b/test/modules/md/md_acme.py
new file mode 100755
index 0000000..36be347
--- /dev/null
+++ b/test/modules/md/md_acme.py
@@ -0,0 +1,125 @@
+import logging
+import os
+import shutil
+import subprocess
+import time
+from abc import ABCMeta, abstractmethod
+from datetime import datetime, timedelta
+from threading import Thread
+from typing import Dict
+
+from .md_env import MDTestEnv
+
+
+log = logging.getLogger(__name__)
+
+
+def monitor_proc(env: MDTestEnv, proc):
+ _env = env
+ proc.wait()
+
+
+class ACMEServer:
+ __metaclass__ = ABCMeta
+
+ @abstractmethod
+ def start(self):
+ raise NotImplementedError
+
+ @abstractmethod
+ def stop(self):
+ raise NotImplementedError
+
+ @abstractmethod
+ def install_ca_bundle(self, dest):
+ raise NotImplementedError
+
+
+class MDPebbleRunner(ACMEServer):
+
+ def __init__(self, env: MDTestEnv, configs: Dict[str, str]):
+ self.env = env
+ self.configs = configs
+ self._current = 'default'
+ self._pebble = None
+ self._challtestsrv = None
+ self._log = None
+
+ def start(self, config: str = None):
+ if config is not None and config != self._current:
+ # change, tear down and start again
+ assert config in self.configs
+ self.stop()
+ self._current = config
+ elif self._pebble is not None:
+ # already running
+ return
+ args = ['pebble', '-config', self.configs[self._current], '-dnsserver', ':8053']
+ env = {}
+ env.update(os.environ)
+ env['PEBBLE_VA_NOSLEEP'] = '1'
+ self._log = open(f'{self.env.gen_dir}/pebble.log', 'w')
+ self._pebble = subprocess.Popen(args=args, env=env,
+ stdout=self._log, stderr=self._log)
+ t = Thread(target=monitor_proc, args=(self.env, self._pebble))
+ t.start()
+
+ args = ['pebble-challtestsrv', '-http01', '', '-https01', '', '-tlsalpn01', '']
+ self._challtestsrv = subprocess.Popen(args, stdout=self._log, stderr=self._log)
+ t = Thread(target=monitor_proc, args=(self.env, self._challtestsrv))
+ t.start()
+ self.install_ca_bundle(self.env.acme_ca_pemfile)
+ # disable ipv6 default address, this gives trouble inside docker
+ end = datetime.now() + timedelta(seconds=5)
+ while True:
+ r = self.env.run(['curl', 'localhost:8055/'])
+ if r.exit_code == 0:
+ break
+ if datetime.now() > end:
+ raise TimeoutError(f'unable to contact pebble-challtestsrv on localhost:8055')
+ time.sleep(.1)
+ r = self.env.run(['curl', '-d', f'{{"ip":""}}',
+ 'localhost:8055/set-default-ipv6'])
+ assert r.exit_code == 0, f"{r}"
+
+ def stop(self):
+ if self._pebble:
+ self._pebble.terminate()
+ self._pebble = None
+ if self._challtestsrv:
+ self._challtestsrv.terminate()
+ self._challtestsrv = None
+ if self._log:
+ self._log.close()
+ self._log = None
+
+ def install_ca_bundle(self, dest):
+ shutil.copyfile(self.env.ca.cert_file, dest)
+ end = datetime.now() + timedelta(seconds=20)
+ while datetime.now() < end:
+ r = self.env.curl_get('https://localhost:15000/roots/0', insecure=True)
+ if r.exit_code == 0:
+ with open(dest, 'a') as fd:
+ fd.write(r.stdout)
+ break
+
+
+class MDBoulderRunner(ACMEServer):
+
+ def __init__(self, env: MDTestEnv):
+ self.env = env
+ self.install_ca_bundle(self.env.acme_ca_pemfile)
+
+ def start(self, config=None):
+ pass
+
+ def stop(self):
+ pass
+
+ def install_ca_bundle(self, dest):
+ r = self.env.run([
+ 'docker', 'exec', 'boulder_boulder_1', 'bash', '-c', "cat /tmp/root*.pem"
+ ])
+ assert r.exit_code == 0
+ with open(dest, 'w') as fd:
+ fd.write(r.stdout)
diff --git a/test/modules/md/md_cert_util.py b/test/modules/md/md_cert_util.py
new file mode 100755
index 0000000..8cd99aa
--- /dev/null
+++ b/test/modules/md/md_cert_util.py
@@ -0,0 +1,239 @@
+import logging
+import re
+import os
+import socket
+import OpenSSL
+import time
+import sys
+
+from datetime import datetime
+from datetime import tzinfo
+from datetime import timedelta
+from http.client import HTTPConnection
+from urllib.parse import urlparse
+
+
+SEC_PER_DAY = 24 * 60 * 60
+
+
+log = logging.getLogger(__name__)
+
+
+class MDCertUtil(object):
+ # Utility class for inspecting certificates in test cases
+ # Uses PyOpenSSL: https://pyopenssl.org/en/stable/index.html
+
+ @classmethod
+ def create_self_signed_cert(cls, path, name_list, valid_days, serial=1000):
+ domain = name_list[0]
+ if not os.path.exists(path):
+ os.makedirs(path)
+
+ cert_file = os.path.join(path, 'pubcert.pem')
+ pkey_file = os.path.join(path, 'privkey.pem')
+ # create a key pair
+ if os.path.exists(pkey_file):
+ key_buffer = open(pkey_file, 'rt').read()
+ k = OpenSSL.crypto.load_privatekey(OpenSSL.crypto.FILETYPE_PEM, key_buffer)
+ else:
+ k = OpenSSL.crypto.PKey()
+ k.generate_key(OpenSSL.crypto.TYPE_RSA, 2048)
+
+ # create a self-signed cert
+ cert = OpenSSL.crypto.X509()
+ cert.get_subject().C = "DE"
+ cert.get_subject().ST = "NRW"
+ cert.get_subject().L = "Muenster"
+ cert.get_subject().O = "greenbytes GmbH"
+ cert.get_subject().CN = domain
+ cert.set_serial_number(serial)
+ cert.gmtime_adj_notBefore(valid_days["notBefore"] * SEC_PER_DAY)
+ cert.gmtime_adj_notAfter(valid_days["notAfter"] * SEC_PER_DAY)
+ cert.set_issuer(cert.get_subject())
+
+ cert.add_extensions([OpenSSL.crypto.X509Extension(
+ b"subjectAltName", False, b", ".join(map(lambda n: b"DNS:" + n.encode(), name_list))
+ )])
+ cert.set_pubkey(k)
+ cert.sign(k, 'sha1')
+
+ open(cert_file, "wt").write(
+ OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM, cert).decode('utf-8'))
+ open(pkey_file, "wt").write(
+ OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM, k).decode('utf-8'))
+
+ @classmethod
+ def load_server_cert(cls, host_ip, host_port, host_name, tls=None, ciphers=None):
+ ctx = OpenSSL.SSL.Context(OpenSSL.SSL.SSLv23_METHOD)
+ if tls is not None and tls != 1.0:
+ ctx.set_options(OpenSSL.SSL.OP_NO_TLSv1)
+ if tls is not None and tls != 1.1:
+ ctx.set_options(OpenSSL.SSL.OP_NO_TLSv1_1)
+ if tls is not None and tls != 1.2:
+ ctx.set_options(OpenSSL.SSL.OP_NO_TLSv1_2)
+ if tls is not None and tls != 1.3 and hasattr(OpenSSL.SSL, "OP_NO_TLSv1_3"):
+ ctx.set_options(OpenSSL.SSL.OP_NO_TLSv1_3)
+ if ciphers is not None:
+ ctx.set_cipher_list(ciphers)
+ s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ connection = OpenSSL.SSL.Connection(ctx, s)
+ connection.connect((host_ip, int(host_port)))
+ connection.setblocking(1)
+ connection.set_tlsext_host_name(host_name.encode('utf-8'))
+ connection.do_handshake()
+ peer_cert = connection.get_peer_certificate()
+ return MDCertUtil(None, cert=peer_cert)
+
+ @classmethod
+ def parse_pem_cert(cls, text):
+ cert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, text.encode('utf-8'))
+ return MDCertUtil(None, cert=cert)
+
+ @classmethod
+ def get_plain(cls, url, timeout):
+ server = urlparse(url)
+ try_until = time.time() + timeout
+ while time.time() < try_until:
+ # noinspection PyBroadException
+ try:
+ c = HTTPConnection(server.hostname, server.port, timeout=timeout)
+ c.request('GET', server.path)
+ resp = c.getresponse()
+ data = resp.read()
+ c.close()
+ return data
+ except IOError:
+ log.debug("connect error:", sys.exc_info()[0])
+ time.sleep(.1)
+ except:
+ log.error("Unexpected error:", sys.exc_info()[0])
+ log.error("Unable to contact server after %d sec" % timeout)
+ return None
+
+ def __init__(self, cert_path, cert=None):
+ if cert_path is not None:
+ self.cert_path = cert_path
+ # load certificate and private key
+ if cert_path.startswith("http"):
+ cert_data = self.get_plain(cert_path, 1)
+ else:
+ cert_data = MDCertUtil._load_binary_file(cert_path)
+
+ for file_type in (OpenSSL.crypto.FILETYPE_PEM, OpenSSL.crypto.FILETYPE_ASN1):
+ try:
+ self.cert = OpenSSL.crypto.load_certificate(file_type, cert_data)
+ except Exception as error:
+ self.error = error
+ if cert is not None:
+ self.cert = cert
+
+ if self.cert is None:
+ raise self.error
+
+ def get_issuer(self):
+ return self.cert.get_issuer()
+
+ def get_serial(self):
+ # the string representation of a serial number is not unique. Some
+ # add leading 0s to align with word boundaries.
+ return ("%lx" % (self.cert.get_serial_number())).upper()
+
+ def same_serial_as(self, other):
+ if isinstance(other, MDCertUtil):
+ return self.cert.get_serial_number() == other.cert.get_serial_number()
+ elif isinstance(other, OpenSSL.crypto.X509):
+ return self.cert.get_serial_number() == other.get_serial_number()
+ elif isinstance(other, str):
+ # assume a hex number
+ return self.cert.get_serial_number() == int(other, 16)
+ elif isinstance(other, int):
+ return self.cert.get_serial_number() == other
+ return False
+
+ def get_not_before(self):
+ tsp = self.cert.get_notBefore()
+ return self._parse_tsp(tsp)
+
+ def get_not_after(self):
+ tsp = self.cert.get_notAfter()
+ return self._parse_tsp(tsp)
+
+ def get_cn(self):
+ return self.cert.get_subject().CN
+
+ def get_key_length(self):
+ return self.cert.get_pubkey().bits()
+
+ def get_san_list(self):
+ text = OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_TEXT, self.cert).decode("utf-8")
+ m = re.search(r"X509v3 Subject Alternative Name:\s*(.*)", text)
+ sans_list = []
+ if m:
+ sans_list = m.group(1).split(",")
+
+ def _strip_prefix(s):
+ return s.split(":")[1] if s.strip().startswith("DNS:") else s.strip()
+ return list(map(_strip_prefix, sans_list))
+
+ def get_must_staple(self):
+ text = OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_TEXT, self.cert).decode("utf-8")
+ m = re.search(r"1.3.6.1.5.5.7.1.24:\s*\n\s*0....", text)
+ if not m:
+ # Newer openssl versions print this differently
+ m = re.search(r"TLS Feature:\s*\n\s*status_request\s*\n", text)
+ return m is not None
+
+ @classmethod
+ def validate_privkey(cls, privkey_path, passphrase=None):
+ privkey_data = cls._load_binary_file(privkey_path)
+ if passphrase:
+ privkey = OpenSSL.crypto.load_privatekey(OpenSSL.crypto.FILETYPE_PEM, privkey_data, passphrase)
+ else:
+ privkey = OpenSSL.crypto.load_privatekey(OpenSSL.crypto.FILETYPE_PEM, privkey_data)
+ return privkey.check()
+
+ def validate_cert_matches_priv_key(self, privkey_path):
+ # Verifies that the private key and cert match.
+ privkey_data = MDCertUtil._load_binary_file(privkey_path)
+ privkey = OpenSSL.crypto.load_privatekey(OpenSSL.crypto.FILETYPE_PEM, privkey_data)
+ context = OpenSSL.SSL.Context(OpenSSL.SSL.SSLv23_METHOD)
+ context.use_privatekey(privkey)
+ context.use_certificate(self.cert)
+ context.check_privatekey()
+
+ # --------- _utils_ ---------
+
+ def astr(self, s):
+ return s.decode('utf-8')
+
+ def _parse_tsp(self, tsp):
+ # timestampss returned by PyOpenSSL are bytes
+ # parse date and time part
+ s = ("%s-%s-%s %s:%s:%s" % (self.astr(tsp[0:4]), self.astr(tsp[4:6]), self.astr(tsp[6:8]),
+ self.astr(tsp[8:10]), self.astr(tsp[10:12]), self.astr(tsp[12:14])))
+ timestamp = datetime.strptime(s, '%Y-%m-%d %H:%M:%S')
+ # adjust timezone
+ tz_h, tz_m = 0, 0
+ m = re.match(r"([+\-]\d{2})(\d{2})", self.astr(tsp[14:]))
+ if m:
+ tz_h, tz_m = int(m.group(1)), int(m.group(2)) if tz_h > 0 else -1 * int(m.group(2))
+ return timestamp.replace(tzinfo=self.FixedOffset(60 * tz_h + tz_m))
+
+ @classmethod
+ def _load_binary_file(cls, path):
+ with open(path, mode="rb") as file:
+ return file.read()
+
+ class FixedOffset(tzinfo):
+
+ def __init__(self, offset):
+ self.__offset = timedelta(minutes=offset)
+
+ def utcoffset(self, dt):
+ return self.__offset
+
+ def tzname(self, dt):
+ return None
+
+ def dst(self, dt):
+ return timedelta(0)
diff --git a/test/modules/md/md_certs.py b/test/modules/md/md_certs.py
new file mode 100755
index 0000000..2501d25
--- /dev/null
+++ b/test/modules/md/md_certs.py
@@ -0,0 +1,444 @@
+import os
+import re
+from datetime import timedelta, datetime
+from typing import List, Any, Optional
+
+from cryptography import x509
+from cryptography.hazmat.backends import default_backend
+from cryptography.hazmat.primitives import hashes
+from cryptography.hazmat.primitives.asymmetric import ec, rsa
+from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePrivateKey
+from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey
+from cryptography.hazmat.primitives.serialization import Encoding, PrivateFormat, NoEncryption, load_pem_private_key
+from cryptography.x509 import ExtendedKeyUsageOID, NameOID
+
+
+EC_SUPPORTED = {}
+EC_SUPPORTED.update([(curve.name.upper(), curve) for curve in [
+ ec.SECP192R1,
+ ec.SECP224R1,
+ ec.SECP256R1,
+ ec.SECP384R1,
+]])
+
+
+def _private_key(key_type):
+ if isinstance(key_type, str):
+ key_type = key_type.upper()
+ m = re.match(r'^(RSA)?(\d+)$', key_type)
+ if m:
+ key_type = int(m.group(2))
+
+ if isinstance(key_type, int):
+ return rsa.generate_private_key(
+ public_exponent=65537,
+ key_size=key_type,
+ backend=default_backend()
+ )
+ if not isinstance(key_type, ec.EllipticCurve) and key_type in EC_SUPPORTED:
+ key_type = EC_SUPPORTED[key_type]
+ return ec.generate_private_key(
+ curve=key_type,
+ backend=default_backend()
+ )
+
+
+class CertificateSpec:
+
+ def __init__(self, name: str = None, domains: List[str] = None,
+ email: str = None,
+ key_type: str = None, single_file: bool = False,
+ valid_from: timedelta = timedelta(days=-1),
+ valid_to: timedelta = timedelta(days=89),
+ client: bool = False,
+ sub_specs: List['CertificateSpec'] = None):
+ self._name = name
+ self.domains = domains
+ self.client = client
+ self.email = email
+ self.key_type = key_type
+ self.single_file = single_file
+ self.valid_from = valid_from
+ self.valid_to = valid_to
+ self.sub_specs = sub_specs
+
+ @property
+ def name(self) -> Optional[str]:
+ if self._name:
+ return self._name
+ elif self.domains:
+ return self.domains[0]
+ return None
+
+
+class Credentials:
+
+ def __init__(self, name: str, cert: Any, pkey: Any):
+ self._name = name
+ self._cert = cert
+ self._pkey = pkey
+ self._cert_file = None
+ self._pkey_file = None
+ self._store = None
+
+ @property
+ def name(self) -> str:
+ return self._name
+
+ @property
+ def subject(self) -> x509.Name:
+ return self._cert.subject
+
+ @property
+ def key_type(self):
+ if isinstance(self._pkey, RSAPrivateKey):
+ return f"rsa{self._pkey.key_size}"
+ elif isinstance(self._pkey, EllipticCurvePrivateKey):
+ return f"{self._pkey.curve.name}"
+ else:
+ raise Exception(f"unknown key type: {self._pkey}")
+
+ @property
+ def private_key(self) -> Any:
+ return self._pkey
+
+ @property
+ def certificate(self) -> Any:
+ return self._cert
+
+ @property
+ def cert_pem(self) -> bytes:
+ return self._cert.public_bytes(Encoding.PEM)
+
+ @property
+ def pkey_pem(self) -> bytes:
+ return self._pkey.private_bytes(
+ Encoding.PEM,
+ PrivateFormat.TraditionalOpenSSL if self.key_type.startswith('rsa') else PrivateFormat.PKCS8,
+ NoEncryption())
+
+ def set_store(self, store: 'CertStore'):
+ self._store = store
+
+ def set_files(self, cert_file: str, pkey_file: str = None):
+ self._cert_file = cert_file
+ self._pkey_file = pkey_file
+
+ @property
+ def cert_file(self) -> str:
+ return self._cert_file
+
+ @property
+ def pkey_file(self) -> Optional[str]:
+ return self._pkey_file
+
+ def get_first(self, name) -> Optional['Credentials']:
+ creds = self._store.get_credentials_for_name(name) if self._store else []
+ return creds[0] if len(creds) else None
+
+ def get_credentials_for_name(self, name) -> List['Credentials']:
+ return self._store.get_credentials_for_name(name) if self._store else []
+
+ def issue_certs(self, specs: List[CertificateSpec],
+ chain: List['Credentials'] = None) -> List['Credentials']:
+ return [self.issue_cert(spec=spec, chain=chain) for spec in specs]
+
+ def issue_cert(self, spec: CertificateSpec, chain: List['Credentials'] = None) -> 'Credentials':
+ key_type = spec.key_type if spec.key_type else self.key_type
+ creds = self._store.load_credentials(name=spec.name, key_type=key_type, single_file=spec.single_file) \
+ if self._store else None
+ if creds is None:
+ creds = MDTestCA.create_credentials(spec=spec, issuer=self, key_type=key_type,
+ valid_from=spec.valid_from, valid_to=spec.valid_to)
+ if self._store:
+ self._store.save(creds, single_file=spec.single_file)
+
+ if spec.sub_specs:
+ if self._store:
+ sub_store = CertStore(fpath=os.path.join(self._store.path, creds.name))
+ creds.set_store(sub_store)
+ subchain = chain.copy() if chain else []
+ subchain.append(self)
+ creds.issue_certs(spec.sub_specs, chain=subchain)
+ return creds
+
+
+class CertStore:
+
+ def __init__(self, fpath: str):
+ self._store_dir = fpath
+ if not os.path.exists(self._store_dir):
+ os.makedirs(self._store_dir)
+ self._creds_by_name = {}
+
+ @property
+ def path(self) -> str:
+ return self._store_dir
+
+ def save(self, creds: Credentials, name: str = None,
+ chain: List[Credentials] = None,
+ single_file: bool = False) -> None:
+ name = name if name is not None else creds.name
+ cert_file = self.get_cert_file(name=name, key_type=creds.key_type)
+ pkey_file = self.get_pkey_file(name=name, key_type=creds.key_type)
+ if single_file:
+ pkey_file = None
+ with open(cert_file, "wb") as fd:
+ fd.write(creds.cert_pem)
+ if chain:
+ for c in chain:
+ fd.write(c.cert_pem)
+ if pkey_file is None:
+ fd.write(creds.pkey_pem)
+ if pkey_file is not None:
+ with open(pkey_file, "wb") as fd:
+ fd.write(creds.pkey_pem)
+ creds.set_files(cert_file, pkey_file)
+ self._add_credentials(name, creds)
+
+ def _add_credentials(self, name: str, creds: Credentials):
+ if name not in self._creds_by_name:
+ self._creds_by_name[name] = []
+ self._creds_by_name[name].append(creds)
+
+ def get_credentials_for_name(self, name) -> List[Credentials]:
+ return self._creds_by_name[name] if name in self._creds_by_name else []
+
+ def get_cert_file(self, name: str, key_type=None) -> str:
+ key_infix = ".{0}".format(key_type) if key_type is not None else ""
+ return os.path.join(self._store_dir, f'{name}{key_infix}.cert.pem')
+
+ def get_pkey_file(self, name: str, key_type=None) -> str:
+ key_infix = ".{0}".format(key_type) if key_type is not None else ""
+ return os.path.join(self._store_dir, f'{name}{key_infix}.pkey.pem')
+
+ def load_pem_cert(self, fpath: str) -> x509.Certificate:
+ with open(fpath) as fd:
+ return x509.load_pem_x509_certificate("".join(fd.readlines()).encode())
+
+ def load_pem_pkey(self, fpath: str):
+ with open(fpath) as fd:
+ return load_pem_private_key("".join(fd.readlines()).encode(), password=None)
+
+ def load_credentials(self, name: str, key_type=None, single_file: bool = False):
+ cert_file = self.get_cert_file(name=name, key_type=key_type)
+ pkey_file = cert_file if single_file else self.get_pkey_file(name=name, key_type=key_type)
+ if os.path.isfile(cert_file) and os.path.isfile(pkey_file):
+ cert = self.load_pem_cert(cert_file)
+ pkey = self.load_pem_pkey(pkey_file)
+ creds = Credentials(name=name, cert=cert, pkey=pkey)
+ creds.set_store(self)
+ creds.set_files(cert_file, pkey_file)
+ self._add_credentials(name, creds)
+ return creds
+ return None
+
+
+class MDTestCA:
+
+ @classmethod
+ def create_root(cls, name: str, store_dir: str, key_type: str = "rsa2048") -> Credentials:
+ store = CertStore(fpath=store_dir)
+ creds = store.load_credentials(name="ca", key_type=key_type)
+ if creds is None:
+ creds = MDTestCA._make_ca_credentials(name=name, key_type=key_type)
+ store.save(creds, name="ca")
+ creds.set_store(store)
+ return creds
+
+ @staticmethod
+ def create_credentials(spec: CertificateSpec, issuer: Credentials, key_type: Any,
+ valid_from: timedelta = timedelta(days=-1),
+ valid_to: timedelta = timedelta(days=89),
+ ) -> Credentials:
+ """Create a certificate signed by this CA for the given domains.
+ :returns: the certificate and private key PEM file paths
+ """
+ if spec.domains and len(spec.domains):
+ creds = MDTestCA._make_server_credentials(name=spec.name, domains=spec.domains,
+ issuer=issuer, valid_from=valid_from,
+ valid_to=valid_to, key_type=key_type)
+ elif spec.client:
+ creds = MDTestCA._make_client_credentials(name=spec.name, issuer=issuer,
+ email=spec.email, valid_from=valid_from,
+ valid_to=valid_to, key_type=key_type)
+ elif spec.name:
+ creds = MDTestCA._make_ca_credentials(name=spec.name, issuer=issuer,
+ valid_from=valid_from, valid_to=valid_to,
+ key_type=key_type)
+ else:
+ raise Exception(f"unrecognized certificate specification: {spec}")
+ return creds
+
+ @staticmethod
+ def _make_x509_name(org_name: str = None, common_name: str = None, parent: x509.Name = None) -> x509.Name:
+ name_pieces = []
+ if org_name:
+ oid = NameOID.ORGANIZATIONAL_UNIT_NAME if parent else NameOID.ORGANIZATION_NAME
+ name_pieces.append(x509.NameAttribute(oid, org_name))
+ elif common_name:
+ name_pieces.append(x509.NameAttribute(NameOID.COMMON_NAME, common_name))
+ if parent:
+ name_pieces.extend([rdn for rdn in parent])
+ return x509.Name(name_pieces)
+
+ @staticmethod
+ def _make_csr(
+ subject: x509.Name,
+ pkey: Any,
+ issuer_subject: Optional[Credentials],
+ valid_from_delta: timedelta = None,
+ valid_until_delta: timedelta = None
+ ):
+ pubkey = pkey.public_key()
+ issuer_subject = issuer_subject if issuer_subject is not None else subject
+
+ valid_from = datetime.now()
+ if valid_until_delta is not None:
+ valid_from += valid_from_delta
+ valid_until = datetime.now()
+ if valid_until_delta is not None:
+ valid_until += valid_until_delta
+
+ return (
+ x509.CertificateBuilder()
+ .subject_name(subject)
+ .issuer_name(issuer_subject)
+ .public_key(pubkey)
+ .not_valid_before(valid_from)
+ .not_valid_after(valid_until)
+ .serial_number(x509.random_serial_number())
+ .add_extension(
+ x509.SubjectKeyIdentifier.from_public_key(pubkey),
+ critical=False,
+ )
+ )
+
+ @staticmethod
+ def _add_ca_usages(csr: Any) -> Any:
+ return csr.add_extension(
+ x509.BasicConstraints(ca=True, path_length=9),
+ critical=True,
+ ).add_extension(
+ x509.KeyUsage(
+ digital_signature=True,
+ content_commitment=False,
+ key_encipherment=False,
+ data_encipherment=False,
+ key_agreement=False,
+ key_cert_sign=True,
+ crl_sign=True,
+ encipher_only=False,
+ decipher_only=False),
+ critical=True
+ ).add_extension(
+ x509.ExtendedKeyUsage([
+ ExtendedKeyUsageOID.CLIENT_AUTH,
+ ExtendedKeyUsageOID.SERVER_AUTH,
+ ExtendedKeyUsageOID.CODE_SIGNING,
+ ]),
+ critical=True
+ )
+
+ @staticmethod
+ def _add_leaf_usages(csr: Any, domains: List[str], issuer: Credentials) -> Any:
+ return csr.add_extension(
+ x509.BasicConstraints(ca=False, path_length=None),
+ critical=True,
+ ).add_extension(
+ x509.AuthorityKeyIdentifier.from_issuer_subject_key_identifier(
+ issuer.certificate.extensions.get_extension_for_class(
+ x509.SubjectKeyIdentifier).value),
+ critical=False
+ ).add_extension(
+ x509.SubjectAlternativeName([x509.DNSName(domain) for domain in domains]),
+ critical=True,
+ ).add_extension(
+ x509.ExtendedKeyUsage([
+ ExtendedKeyUsageOID.SERVER_AUTH,
+ ]),
+ critical=True
+ )
+
+ @staticmethod
+ def _add_client_usages(csr: Any, issuer: Credentials, rfc82name: str = None) -> Any:
+ cert = csr.add_extension(
+ x509.BasicConstraints(ca=False, path_length=None),
+ critical=True,
+ ).add_extension(
+ x509.AuthorityKeyIdentifier.from_issuer_subject_key_identifier(
+ issuer.certificate.extensions.get_extension_for_class(
+ x509.SubjectKeyIdentifier).value),
+ critical=False
+ )
+ if rfc82name:
+ cert.add_extension(
+ x509.SubjectAlternativeName([x509.RFC822Name(rfc82name)]),
+ critical=True,
+ )
+ cert.add_extension(
+ x509.ExtendedKeyUsage([
+ ExtendedKeyUsageOID.CLIENT_AUTH,
+ ]),
+ critical=True
+ )
+ return cert
+
+ @staticmethod
+ def _make_ca_credentials(name, key_type: Any,
+ issuer: Credentials = None,
+ valid_from: timedelta = timedelta(days=-1),
+ valid_to: timedelta = timedelta(days=89),
+ ) -> Credentials:
+ pkey = _private_key(key_type=key_type)
+ if issuer is not None:
+ issuer_subject = issuer.certificate.subject
+ issuer_key = issuer.private_key
+ else:
+ issuer_subject = None
+ issuer_key = pkey
+ subject = MDTestCA._make_x509_name(org_name=name, parent=issuer.subject if issuer else None)
+ csr = MDTestCA._make_csr(subject=subject,
+ issuer_subject=issuer_subject, pkey=pkey,
+ valid_from_delta=valid_from, valid_until_delta=valid_to)
+ csr = MDTestCA._add_ca_usages(csr)
+ cert = csr.sign(private_key=issuer_key,
+ algorithm=hashes.SHA256(),
+ backend=default_backend())
+ return Credentials(name=name, cert=cert, pkey=pkey)
+
+ @staticmethod
+ def _make_server_credentials(name: str, domains: List[str], issuer: Credentials,
+ key_type: Any,
+ valid_from: timedelta = timedelta(days=-1),
+ valid_to: timedelta = timedelta(days=89),
+ ) -> Credentials:
+ name = name
+ pkey = _private_key(key_type=key_type)
+ subject = MDTestCA._make_x509_name(common_name=name, parent=issuer.subject)
+ csr = MDTestCA._make_csr(subject=subject,
+ issuer_subject=issuer.certificate.subject, pkey=pkey,
+ valid_from_delta=valid_from, valid_until_delta=valid_to)
+ csr = MDTestCA._add_leaf_usages(csr, domains=domains, issuer=issuer)
+ cert = csr.sign(private_key=issuer.private_key,
+ algorithm=hashes.SHA256(),
+ backend=default_backend())
+ return Credentials(name=name, cert=cert, pkey=pkey)
+
+ @staticmethod
+ def _make_client_credentials(name: str,
+ issuer: Credentials, email: Optional[str],
+ key_type: Any,
+ valid_from: timedelta = timedelta(days=-1),
+ valid_to: timedelta = timedelta(days=89),
+ ) -> Credentials:
+ pkey = _private_key(key_type=key_type)
+ subject = MDTestCA._make_x509_name(common_name=name, parent=issuer.subject)
+ csr = MDTestCA._make_csr(subject=subject,
+ issuer_subject=issuer.certificate.subject, pkey=pkey,
+ valid_from_delta=valid_from, valid_until_delta=valid_to)
+ csr = MDTestCA._add_client_usages(csr, issuer=issuer, rfc82name=email)
+ cert = csr.sign(private_key=issuer.private_key,
+ algorithm=hashes.SHA256(),
+ backend=default_backend())
+ return Credentials(name=name, cert=cert, pkey=pkey)
diff --git a/test/modules/md/md_conf.py b/test/modules/md/md_conf.py
new file mode 100755
index 0000000..19d4977
--- /dev/null
+++ b/test/modules/md/md_conf.py
@@ -0,0 +1,83 @@
+from .md_env import MDTestEnv
+from pyhttpd.conf import HttpdConf
+
+
+class MDConf(HttpdConf):
+
+ def __init__(self, env: MDTestEnv, text=None, std_ports=True,
+ local_ca=True, std_vhosts=True, proxy=False,
+ admin=None):
+ super().__init__(env=env)
+
+ if admin is None:
+ admin = f"admin@{env.http_tld}"
+ if len(admin.strip()):
+ self.add_admin(admin)
+ self.add([
+ "MDRetryDelay 1s", # speed up testing a little
+ ])
+ if local_ca:
+ self.add([
+ f"MDCertificateAuthority {env.acme_url}",
+ f"MDCertificateAgreement accepted",
+ f"MDCACertificateFile {env.server_dir}/acme-ca.pem",
+ "",
+ ])
+ if std_ports:
+ self.add(f"MDPortMap 80:{env.http_port} 443:{env.https_port}")
+ if env.ssl_module == "mod_tls":
+ self.add(f"TLSListen {env.https_port}")
+ self.add([
+ "<Location /server-status>",
+ " SetHandler server-status",
+ "</Location>",
+ "<Location /md-status>",
+ " SetHandler md-status",
+ "</Location>",
+ ])
+ if std_vhosts:
+ self.add_vhost_test1()
+ if proxy:
+ self.add([
+ f"Listen {self.env.proxy_port}",
+ f"<VirtualHost *:{self.env.proxy_port}>",
+ " ProxyRequests On",
+ " ProxyVia On",
+ " # be totally open",
+ " AllowCONNECT 0-56535",
+ " <Proxy *>",
+ " # No require or other restrictions, this is just a test server",
+ " </Proxy>",
+ "</VirtualHost>",
+ ])
+ if text is not None:
+ self.add(text)
+
+ def add_drive_mode(self, mode):
+ self.add("MDRenewMode \"%s\"\n" % mode)
+
+ def add_renew_window(self, window):
+ self.add("MDRenewWindow %s\n" % window)
+
+ def add_private_key(self, key_type, key_params):
+ self.add("MDPrivateKeys %s %s\n" % (key_type, " ".join(map(lambda p: str(p), key_params))))
+
+ def add_admin(self, email):
+ self.add(f"ServerAdmin mailto:{email}")
+
+ def add_md(self, domains):
+ dlist = " ".join(domains) # without quotes
+ self.add(f"MDomain {dlist}\n")
+
+ def start_md(self, domains):
+ dlist = " ".join([f"\"{d}\"" for d in domains]) # with quotes, #257
+ self.add(f"<MDomain {dlist}>\n")
+
+ def end_md(self):
+ self.add("</MDomain>\n")
+
+ def start_md2(self, domains):
+ self.add("<MDomainSet %s>\n" % " ".join(domains))
+
+ def end_md2(self):
+ self.add("</MDomainSet>\n")
diff --git a/test/modules/md/md_env.py b/test/modules/md/md_env.py
new file mode 100755
index 0000000..e8e36e5
--- /dev/null
+++ b/test/modules/md/md_env.py
@@ -0,0 +1,613 @@
+import copy
+import inspect
+import json
+import logging
+from configparser import ConfigParser, ExtendedInterpolation
+
+import pytest
+import re
+import os
+import shutil
+import subprocess
+import time
+
+from datetime import datetime, timedelta
+from typing import Dict, Optional
+
+from pyhttpd.certs import CertificateSpec
+from .md_cert_util import MDCertUtil
+from pyhttpd.env import HttpdTestSetup, HttpdTestEnv
+from pyhttpd.result import ExecResult
+
+log = logging.getLogger(__name__)
+
+
+class MDTestSetup(HttpdTestSetup):
+
+ def __init__(self, env: 'MDTestEnv'):
+ super().__init__(env=env)
+ self.mdenv = env
+ self.add_modules(["watchdog", "proxy_connect", "md"])
+
+ def make(self):
+ super().make()
+ if "pebble" == self.mdenv.acme_server:
+ self._make_pebble_conf()
+ self.mdenv.clear_store()
+
+ def _make_pebble_conf(self):
+ our_dir = os.path.dirname(inspect.getfile(MDTestSetup))
+ conf_src_dir = os.path.join(our_dir, 'pebble')
+ conf_dest_dir = os.path.join(self.env.gen_dir, 'pebble')
+ if not os.path.exists(conf_dest_dir):
+ os.makedirs(conf_dest_dir)
+ for name in os.listdir(conf_src_dir):
+ src_path = os.path.join(conf_src_dir, name)
+ m = re.match(r'(.+).template', name)
+ if m:
+ self._make_template(src_path, os.path.join(conf_dest_dir, m.group(1)))
+ elif os.path.isfile(src_path):
+ shutil.copy(src_path, os.path.join(conf_dest_dir, name))
+
+
+class MDTestEnv(HttpdTestEnv):
+
+ MD_S_UNKNOWN = 0
+ MD_S_INCOMPLETE = 1
+ MD_S_COMPLETE = 2
+ MD_S_EXPIRED = 3
+ MD_S_ERROR = 4
+
+ EMPTY_JOUT = {'status': 0, 'output': []}
+
+ DOMAIN_SUFFIX = "%d.org" % time.time()
+ LOG_FMT_TIGHT = '%(levelname)s: %(message)s'
+
+ @classmethod
+ def get_acme_server(cls):
+ return os.environ['ACME'] if 'ACME' in os.environ else "pebble"
+
+ @classmethod
+ def has_acme_server(cls):
+ return cls.get_acme_server() != 'none'
+
+ @classmethod
+ def has_acme_eab(cls):
+ return cls.get_acme_server() == 'pebble'
+
+ @classmethod
+ def is_pebble(cls) -> bool:
+ return cls.get_acme_server() == 'pebble'
+
+ @classmethod
+ def lacks_ocsp(cls):
+ return cls.is_pebble()
+
+ @classmethod
+ def has_a2md(cls):
+ d = os.path.dirname(inspect.getfile(HttpdTestEnv))
+ config = ConfigParser(interpolation=ExtendedInterpolation())
+ config.read(os.path.join(d, 'config.ini'))
+ bin_dir = config.get('global', 'bindir')
+ a2md_bin = os.path.join(bin_dir, 'a2md')
+ return os.path.isfile(a2md_bin)
+
+ def __init__(self, pytestconfig=None):
+ super().__init__(pytestconfig=pytestconfig)
+ self.add_httpd_log_modules(["md"])
+ self._acme_server = self.get_acme_server()
+ self._acme_tos = "accepted"
+ self._acme_ca_pemfile = os.path.join(self.gen_dir, "apache/acme-ca.pem")
+ if "pebble" == self._acme_server:
+ self._acme_url = "https://localhost:14000/dir"
+ self._acme_eab_url = "https://localhost:14001/dir"
+ elif "boulder" == self._acme_server:
+ self._acme_url = "http://localhost:4001/directory"
+ self._acme_eab_url = None
+ else:
+ raise Exception(f"unknown ACME server type: {self._acme_server}")
+ self._acme_server_down = False
+ self._acme_server_ok = False
+
+ self._a2md_bin = os.path.join(self.bin_dir, 'a2md')
+ self._default_domain = f"test1.{self.http_tld}"
+ self._tailscale_domain = "test.headless-chicken.ts.net"
+ self._store_dir = "./md"
+ self.set_store_dir_default()
+
+ self.add_cert_specs([
+ CertificateSpec(domains=[f"expired.{self._http_tld}"],
+ valid_from=timedelta(days=-100),
+ valid_to=timedelta(days=-10)),
+ CertificateSpec(domains=["localhost"], key_type='rsa2048'),
+ CertificateSpec(domains=[self._tailscale_domain]),
+ ])
+
+ def setup_httpd(self, setup: HttpdTestSetup = None):
+ super().setup_httpd(setup=MDTestSetup(env=self))
+
+ def set_store_dir_default(self):
+ dirpath = "md"
+ if self.httpd_is_at_least("2.5.0"):
+ dirpath = os.path.join("state", dirpath)
+ self.set_store_dir(dirpath)
+
+ def set_store_dir(self, dirpath):
+ self._store_dir = os.path.join(self.server_dir, dirpath)
+ if self.acme_url:
+ self.a2md_stdargs([self.a2md_bin, "-a", self.acme_url,
+ "-d", self._store_dir, "-C", self.acme_ca_pemfile, "-j"])
+ self.a2md_rawargs([self.a2md_bin, "-a", self.acme_url,
+ "-d", self._store_dir, "-C", self.acme_ca_pemfile])
+
+ def get_apxs_var(self, name: str) -> str:
+ p = subprocess.run([self._apxs, "-q", name], capture_output=True, text=True)
+ if p.returncode != 0:
+ return ""
+ return p.stdout.strip()
+
+ @property
+ def acme_server(self):
+ return self._acme_server
+
+ @property
+ def acme_url(self):
+ return self._acme_url
+
+ @property
+ def acme_tos(self):
+ return self._acme_tos
+
+ @property
+ def a2md_bin(self):
+ return self._a2md_bin
+
+ @property
+ def acme_ca_pemfile(self):
+ return self._acme_ca_pemfile
+
+ @property
+ def store_dir(self):
+ return self._store_dir
+
+ @property
+ def tailscale_domain(self):
+ return self._tailscale_domain
+
+ def get_request_domain(self, request):
+ name = request.node.originalname if request.node.originalname else request.node.name
+ return "%s-%s" % (re.sub(r'[_]', '-', name), MDTestEnv.DOMAIN_SUFFIX)
+
+ def get_method_domain(self, method):
+ return "%s-%s" % (re.sub(r'[_]', '-', method.__name__.lower()), MDTestEnv.DOMAIN_SUFFIX)
+
+ def get_module_domain(self, module):
+ return "%s-%s" % (re.sub(r'[_]', '-', module.__name__.lower()), MDTestEnv.DOMAIN_SUFFIX)
+
+ def get_class_domain(self, c):
+ return "%s-%s" % (re.sub(r'[_]', '-', c.__name__.lower()), MDTestEnv.DOMAIN_SUFFIX)
+
+ # --------- cmd execution ---------
+
+ _a2md_args = []
+ _a2md_args_raw = []
+
+ def a2md_stdargs(self, args):
+ self._a2md_args = [] + args
+
+ def a2md_rawargs(self, args):
+ self._a2md_args_raw = [] + args
+
+ def a2md(self, args, raw=False) -> ExecResult:
+ preargs = self._a2md_args
+ if raw:
+ preargs = self._a2md_args_raw
+ log.debug("running: {0} {1}".format(preargs, args))
+ return self.run(preargs + args)
+
+ def check_acme(self):
+ if self._acme_server_ok:
+ return True
+ if self._acme_server_down:
+ pytest.skip(msg="ACME server not running")
+ return False
+ if self.is_live(self.acme_url, timeout=timedelta(seconds=0.5)):
+ self._acme_server_ok = True
+ return True
+ else:
+ self._acme_server_down = True
+ pytest.fail(msg="ACME server not running", pytrace=False)
+ return False
+
+ def get_ca_pem_file(self, hostname: str) -> Optional[str]:
+ pem_file = super().get_ca_pem_file(hostname)
+ if pem_file is None:
+ pem_file = self.acme_ca_pemfile
+ return pem_file
+
+ # --------- access local store ---------
+
+ def purge_store(self):
+ log.debug("purge store dir: %s" % self._store_dir)
+ assert len(self._store_dir) > 1
+ if os.path.exists(self._store_dir):
+ shutil.rmtree(self._store_dir, ignore_errors=False)
+ os.makedirs(self._store_dir)
+
+ def clear_store(self):
+ log.debug("clear store dir: %s" % self._store_dir)
+ assert len(self._store_dir) > 1
+ if not os.path.exists(self._store_dir):
+ os.makedirs(self._store_dir)
+ for dirpath in ["challenges", "tmp", "archive", "domains", "accounts", "staging", "ocsp"]:
+ shutil.rmtree(os.path.join(self._store_dir, dirpath), ignore_errors=True)
+
+ def clear_ocsp_store(self):
+ assert len(self._store_dir) > 1
+ dirpath = os.path.join(self._store_dir, "ocsp")
+ log.debug("clear ocsp store dir: %s" % dir)
+ if os.path.exists(dirpath):
+ shutil.rmtree(dirpath, ignore_errors=True)
+
+ def authz_save(self, name, content):
+ dirpath = os.path.join(self._store_dir, 'staging', name)
+ os.makedirs(dirpath)
+ open(os.path.join(dirpath, 'authz.json'), "w").write(content)
+
+ def path_store_json(self):
+ return os.path.join(self._store_dir, 'md_store.json')
+
+ def path_account(self, acct):
+ return os.path.join(self._store_dir, 'accounts', acct, 'account.json')
+
+ def path_account_key(self, acct):
+ return os.path.join(self._store_dir, 'accounts', acct, 'account.pem')
+
+ def store_domains(self):
+ return os.path.join(self._store_dir, 'domains')
+
+ def store_archives(self):
+ return os.path.join(self._store_dir, 'archive')
+
+ def store_stagings(self):
+ return os.path.join(self._store_dir, 'staging')
+
+ def store_challenges(self):
+ return os.path.join(self._store_dir, 'challenges')
+
+ def store_domain_file(self, domain, filename):
+ return os.path.join(self.store_domains(), domain, filename)
+
+ def store_archived_file(self, domain, version, filename):
+ return os.path.join(self.store_archives(), "%s.%d" % (domain, version), filename)
+
+ def store_staged_file(self, domain, filename):
+ return os.path.join(self.store_stagings(), domain, filename)
+
+ def path_fallback_cert(self, domain):
+ return os.path.join(self._store_dir, 'domains', domain, 'fallback-pubcert.pem')
+
+ def path_job(self, domain):
+ return os.path.join(self._store_dir, 'staging', domain, 'job.json')
+
+ def replace_store(self, src):
+ shutil.rmtree(self._store_dir, ignore_errors=False)
+ shutil.copytree(src, self._store_dir)
+
+ def list_accounts(self):
+ return os.listdir(os.path.join(self._store_dir, 'accounts'))
+
+ def check_md(self, domain, md=None, state=-1, ca=None, protocol=None, agreement=None, contacts=None):
+ domains = None
+ if isinstance(domain, list):
+ domains = domain
+ domain = domains[0]
+ if md:
+ domain = md
+ path = self.store_domain_file(domain, 'md.json')
+ with open(path) as f:
+ md = json.load(f)
+ assert md
+ if domains:
+ assert md['domains'] == domains
+ if state >= 0:
+ assert md['state'] == state
+ if ca:
+ assert len(md['ca']['urls']) == 1
+ assert md['ca']['urls'][0] == ca
+ if protocol:
+ assert md['ca']['proto'] == protocol
+ if agreement:
+ assert md['ca']['agreement'] == agreement
+ if contacts:
+ assert md['contacts'] == contacts
+
+ def pkey_fname(self, pkeyspec=None):
+ if pkeyspec and not re.match(r'^rsa( ?\d+)?$', pkeyspec.lower()):
+ return "privkey.{0}.pem".format(pkeyspec.lower())
+ return 'privkey.pem'
+
+ def cert_fname(self, pkeyspec=None):
+ if pkeyspec and not re.match(r'^rsa( ?\d+)?$', pkeyspec.lower()):
+ return "pubcert.{0}.pem".format(pkeyspec.lower())
+ return 'pubcert.pem'
+
+ def check_md_complete(self, domain, pkey=None):
+ md = self.get_md_status(domain)
+ assert md
+ assert 'state' in md, "md is unexpected: {0}".format(md)
+ assert md['state'] is MDTestEnv.MD_S_COMPLETE, f"unexpected state: {md['state']}"
+ pkey_file = self.store_domain_file(domain, self.pkey_fname(pkey))
+ cert_file = self.store_domain_file(domain, self.cert_fname(pkey))
+ r = self.run(['ls', os.path.dirname(pkey_file)])
+ if not os.path.isfile(pkey_file):
+ assert False, f"pkey missing: {pkey_file}: {r.stdout}"
+ if not os.path.isfile(cert_file):
+ assert False, f"cert missing: {cert_file}: {r.stdout}"
+ return md
+
+ def check_md_credentials(self, domain):
+ if isinstance(domain, list):
+ domains = domain
+ domain = domains[0]
+ else:
+ domains = [domain]
+ # check private key, validate certificate, etc
+ MDCertUtil.validate_privkey(self.store_domain_file(domain, 'privkey.pem'))
+ cert = MDCertUtil(self.store_domain_file(domain, 'pubcert.pem'))
+ cert.validate_cert_matches_priv_key(self.store_domain_file(domain, 'privkey.pem'))
+ # check SANs and CN
+ assert cert.get_cn() == domain
+ # compare lists twice in opposite directions: SAN may not respect ordering
+ san_list = list(cert.get_san_list())
+ assert len(san_list) == len(domains)
+ assert set(san_list).issubset(domains)
+ assert set(domains).issubset(san_list)
+ # check valid dates interval
+ not_before = cert.get_not_before()
+ not_after = cert.get_not_after()
+ assert not_before < datetime.now(not_before.tzinfo)
+ assert not_after > datetime.now(not_after.tzinfo)
+
+ # --------- check utilities ---------
+
+ def check_json_contains(self, actual, expected):
+ # write all expected key:value bindings to a copy of the actual data ...
+ # ... assert it stays unchanged
+ test_json = copy.deepcopy(actual)
+ test_json.update(expected)
+ assert actual == test_json
+
+ def check_file_access(self, path, exp_mask):
+ actual_mask = os.lstat(path).st_mode & 0o777
+ assert oct(actual_mask) == oct(exp_mask)
+
+ def check_dir_empty(self, path):
+ assert os.listdir(path) == []
+
+ def get_http_status(self, domain, path, use_https=True):
+ r = self.get_meta(domain, path, use_https, insecure=True)
+ return r.response['status']
+
+ def get_cert(self, domain, tls=None, ciphers=None):
+ return MDCertUtil.load_server_cert(self._httpd_addr, self.https_port,
+ domain, tls=tls, ciphers=ciphers)
+
+ def get_server_cert(self, domain, proto=None, ciphers=None):
+ args = [
+ "openssl", "s_client", "-status",
+ "-connect", "%s:%s" % (self._httpd_addr, self.https_port),
+ "-CAfile", self.acme_ca_pemfile,
+ "-servername", domain,
+ "-showcerts"
+ ]
+ if proto is not None:
+ args.extend(["-{0}".format(proto)])
+ if ciphers is not None:
+ args.extend(["-cipher", ciphers])
+ r = self.run(args)
+ # noinspection PyBroadException
+ try:
+ return MDCertUtil.parse_pem_cert(r.stdout)
+ except:
+ return None
+
+ def verify_cert_key_lenghts(self, domain, pkeys):
+ for p in pkeys:
+ cert = self.get_server_cert(domain, proto="tls1_2", ciphers=p['ciphers'])
+ if 0 == p['keylen']:
+ assert cert is None
+ else:
+ assert cert, "no cert returned for cipher: {0}".format(p['ciphers'])
+ assert cert.get_key_length() == p['keylen'], "key length, expected {0}, got {1}".format(
+ p['keylen'], cert.get_key_length()
+ )
+
+ def get_meta(self, domain, path, use_https=True, insecure=False):
+ schema = "https" if use_https else "http"
+ port = self.https_port if use_https else self.http_port
+ r = self.curl_get(f"{schema}://{domain}:{port}{path}", insecure=insecure)
+ assert r.exit_code == 0
+ assert r.response
+ assert r.response['header']
+ return r
+
+ def get_content(self, domain, path, use_https=True):
+ schema = "https" if use_https else "http"
+ port = self.https_port if use_https else self.http_port
+ r = self.curl_get(f"{schema}://{domain}:{port}{path}")
+ assert r.exit_code == 0
+ return r.stdout
+
+ def get_json_content(self, domain, path, use_https=True, insecure=False):
+ schema = "https" if use_https else "http"
+ port = self.https_port if use_https else self.http_port
+ url = f"{schema}://{domain}:{port}{path}"
+ r = self.curl_get(url, insecure=insecure)
+ if r.exit_code != 0:
+ log.error(f"curl get on {url} returned {r.exit_code}"
+ f"\nstdout: {r.stdout}"
+ f"\nstderr: {r.stderr}")
+ assert r.exit_code == 0, r.stderr
+ return r.json
+
+ def get_certificate_status(self, domain) -> Dict:
+ return self.get_json_content(domain, "/.httpd/certificate-status", insecure=True)
+
+ def get_md_status(self, domain, via_domain=None, use_https=True) -> Dict:
+ if via_domain is None:
+ via_domain = self._default_domain
+ return self.get_json_content(via_domain, f"/md-status/{domain}",
+ use_https=use_https)
+
+ def get_server_status(self, query="/", via_domain=None, use_https=True):
+ if via_domain is None:
+ via_domain = self._default_domain
+ return self.get_content(via_domain, "/server-status%s" % query, use_https=use_https)
+
+ def await_completion(self, names, must_renew=False, restart=True, timeout=60,
+ via_domain=None, use_https=True):
+ try_until = time.time() + timeout
+ renewals = {}
+ names = names.copy()
+ while len(names) > 0:
+ if time.time() >= try_until:
+ return False
+ for name in names:
+ mds = self.get_md_status(name, via_domain=via_domain, use_https=use_https)
+ if mds is None:
+ log.debug("not managed by md: %s" % name)
+ return False
+
+ if 'renewal' in mds:
+ renewal = mds['renewal']
+ renewals[name] = True
+ if 'finished' in renewal and renewal['finished'] is True:
+ if (not must_renew) or (name in renewals):
+ log.debug(f"domain cert was renewed: {name}")
+ names.remove(name)
+
+ if len(names) != 0:
+ time.sleep(0.1)
+ if restart:
+ time.sleep(0.1)
+ return self.apache_restart() == 0
+ return True
+
+ def is_renewing(self, name):
+ stat = self.get_certificate_status(name)
+ return 'renewal' in stat
+
+ def await_renewal(self, names, timeout=60):
+ try_until = time.time() + timeout
+ while len(names) > 0:
+ if time.time() >= try_until:
+ return False
+ for name in names:
+ md = self.get_md_status(name)
+ if md is None:
+ log.debug("not managed by md: %s" % name)
+ return False
+
+ if 'renewal' in md:
+ names.remove(name)
+
+ if len(names) != 0:
+ time.sleep(0.1)
+ return True
+
+ def await_error(self, domain, timeout=60, via_domain=None, use_https=True, errors=1):
+ try_until = time.time() + timeout
+ while True:
+ if time.time() >= try_until:
+ return False
+ md = self.get_md_status(domain, via_domain=via_domain, use_https=use_https)
+ if md:
+ if 'state' in md and md['state'] == MDTestEnv.MD_S_ERROR:
+ return md
+ if 'renewal' in md and 'errors' in md['renewal'] \
+ and md['renewal']['errors'] >= errors:
+ return md
+ time.sleep(0.1)
+
+ def await_file(self, fpath, timeout=60):
+ try_until = time.time() + timeout
+ while True:
+ if time.time() >= try_until:
+ return False
+ if os.path.isfile(fpath):
+ return True
+ time.sleep(0.1)
+
+ def check_file_permissions(self, domain):
+ dpath = os.path.join(self.store_dir, 'domains', domain)
+ assert os.path.isdir(dpath)
+ md = json.load(open(os.path.join(dpath, 'md.json')))
+ assert md
+ acct = md['ca']['account']
+ assert acct
+ self.check_file_access(self.path_store_json(), 0o600)
+ # domains
+ self.check_file_access(self.store_domains(), 0o700)
+ self.check_file_access(os.path.join(self.store_domains(), domain), 0o700)
+ self.check_file_access(self.store_domain_file(domain, 'privkey.pem'), 0o600)
+ self.check_file_access(self.store_domain_file(domain, 'pubcert.pem'), 0o600)
+ self.check_file_access(self.store_domain_file(domain, 'md.json'), 0o600)
+ # archive
+ self.check_file_access(self.store_archived_file(domain, 1, 'md.json'), 0o600)
+ # accounts
+ self.check_file_access(os.path.join(self._store_dir, 'accounts'), 0o755)
+ self.check_file_access(os.path.join(self._store_dir, 'accounts', acct), 0o755)
+ self.check_file_access(self.path_account(acct), 0o644)
+ self.check_file_access(self.path_account_key(acct), 0o644)
+ # staging
+ self.check_file_access(self.store_stagings(), 0o755)
+
+ def get_ocsp_status(self, domain, proto=None, cipher=None, ca_file=None):
+ stat = {}
+ args = [
+ "openssl", "s_client", "-status",
+ "-connect", "%s:%s" % (self._httpd_addr, self.https_port),
+ "-CAfile", ca_file if ca_file else self.acme_ca_pemfile,
+ "-servername", domain,
+ "-showcerts"
+ ]
+ if proto is not None:
+ args.extend(["-{0}".format(proto)])
+ if cipher is not None:
+ args.extend(["-cipher", cipher])
+ r = self.run(args, debug_log=False)
+ ocsp_regex = re.compile(r'OCSP response: +([^=\n]+)\n')
+ matches = ocsp_regex.finditer(r.stdout)
+ for m in matches:
+ if m.group(1) != "":
+ stat['ocsp'] = m.group(1)
+ if 'ocsp' not in stat:
+ ocsp_regex = re.compile(r'OCSP Response Status:\s*(.+)')
+ matches = ocsp_regex.finditer(r.stdout)
+ for m in matches:
+ if m.group(1) != "":
+ stat['ocsp'] = m.group(1)
+ verify_regex = re.compile(r'Verify return code:\s*(.+)')
+ matches = verify_regex.finditer(r.stdout)
+ for m in matches:
+ if m.group(1) != "":
+ stat['verify'] = m.group(1)
+ return stat
+
+ def await_ocsp_status(self, domain, timeout=10, ca_file=None):
+ try_until = time.time() + timeout
+ while True:
+ if time.time() >= try_until:
+ break
+ stat = self.get_ocsp_status(domain, ca_file=ca_file)
+ if 'ocsp' in stat and stat['ocsp'] != "no response sent":
+ return stat
+ time.sleep(0.1)
+ raise TimeoutError(f"ocsp respopnse not available: {domain}")
+
+ def create_self_signed_cert(self, name_list, valid_days, serial=1000, path=None):
+ dirpath = path
+ if not path:
+ dirpath = os.path.join(self.store_domains(), name_list[0])
+ return MDCertUtil.create_self_signed_cert(dirpath, name_list, valid_days, serial)
diff --git a/test/modules/md/message.py b/test/modules/md/message.py
new file mode 100755
index 0000000..578289c
--- /dev/null
+++ b/test/modules/md/message.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python3
+
+import os
+import sys
+
+
+def main(argv):
+ if len(argv) > 2:
+ cmd = argv[2]
+ if 'renewing' != cmd:
+ f1 = open(argv[1], 'a+')
+ f1.write(f'{argv}\n')
+ if 'MD_VERSION' in os.environ:
+ f1.write(f'MD_VERSION={os.environ["MD_VERSION"]}\n')
+ if 'MD_STORE' in os.environ:
+ f1.write(f'MD_STORE={os.environ["MD_STORE"]}\n')
+ f1.close()
+ sys.stderr.write("done, all fine.\n")
+ sys.exit(0)
+ else:
+ sys.stderr.write(f"{argv[0]} without arguments")
+ sys.exit(7)
+
+
+if __name__ == "__main__":
+ main(sys.argv)
diff --git a/test/modules/md/msg_fail_on.py b/test/modules/md/msg_fail_on.py
new file mode 100755
index 0000000..fec95d4
--- /dev/null
+++ b/test/modules/md/msg_fail_on.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python3
+
+import os
+import sys
+
+
+def main(argv):
+ if len(argv) > 3:
+ log = argv[1]
+ fail_on = argv[2]
+ cmd = argv[3]
+ domain = argv[4]
+ if 'renewing' != cmd:
+ f1 = open(log, 'a+')
+ f1.write(f"{[argv[0], log, cmd, domain]}\n")
+ f1.close()
+ if cmd.startswith(fail_on):
+ sys.stderr.write(f"failing on: {cmd}\n")
+ sys.exit(1)
+ sys.stderr.write("done, all fine.\n")
+ sys.exit(0)
+ else:
+ sys.stderr.write("%s without arguments" % (argv[0]))
+ sys.exit(7)
+
+
+if __name__ == "__main__":
+ main(sys.argv)
diff --git a/test/modules/md/notifail.py b/test/modules/md/notifail.py
new file mode 100755
index 0000000..a02cd39
--- /dev/null
+++ b/test/modules/md/notifail.py
@@ -0,0 +1,16 @@
+#!/usr/bin/env python3
+
+import sys
+
+
+def main(argv):
+ if len(argv) > 1:
+ msg = argv[2] if len(argv) > 2 else None
+ # fail on later messaging stages, not the initial 'renewing' one.
+ # we have test_901_030 that check that later stages are not invoked
+ # when misconfigurations are detected early.
+ sys.exit(1 if msg != "renewing" else 0)
+
+
+if __name__ == "__main__":
+ main(sys.argv)
diff --git a/test/modules/md/notify.py b/test/modules/md/notify.py
new file mode 100755
index 0000000..c5971c8
--- /dev/null
+++ b/test/modules/md/notify.py
@@ -0,0 +1,18 @@
+#!/usr/bin/env python3
+
+import sys
+
+
+def main(argv):
+ if len(argv) > 2:
+ with open(argv[1], 'a+') as f1:
+ f1.write(f"{argv}\n")
+ sys.stderr.write("done, all fine.\n")
+ sys.exit(0)
+ else:
+ sys.stderr.write(f"{argv[0]} without arguments")
+ sys.exit(7)
+
+
+if __name__ == "__main__":
+ main(sys.argv)
diff --git a/test/modules/md/pebble/pebble-eab.json.template b/test/modules/md/pebble/pebble-eab.json.template
new file mode 100644
index 0000000..dd5bee5
--- /dev/null
+++ b/test/modules/md/pebble/pebble-eab.json.template
@@ -0,0 +1,16 @@
+{
+ "pebble": {
+ "listenAddress": "0.0.0.0:14000",
+ "managementListenAddress": "0.0.0.0:15000",
+ "certificate": "${server_dir}/ca/localhost.rsa2048.cert.pem",
+ "privateKey": "${server_dir}/ca/localhost.rsa2048.pkey.pem",
+ "httpPort": ${http_port},
+ "tlsPort": ${https_port},
+ "ocspResponderURL": "",
+ "externalAccountBindingRequired": true,
+ "externalAccountMACKeys": {
+ "kid-1": "zWNDZM6eQGHWpSRTPal5eIUYFTu7EajVIoguysqZ9wG44nMEtx3MUAsUDkMTQ12W",
+ "kid-2": "b10lLJs8l1GPIzsLP0s6pMt8O0XVGnfTaCeROxQM0BIt2XrJMDHJZBM5NuQmQJQH"
+ }
+ }
+}
diff --git a/test/modules/md/pebble/pebble.json.template b/test/modules/md/pebble/pebble.json.template
new file mode 100644
index 0000000..9c41271
--- /dev/null
+++ b/test/modules/md/pebble/pebble.json.template
@@ -0,0 +1,12 @@
+{
+ "pebble": {
+ "listenAddress": "0.0.0.0:14000",
+ "managementListenAddress": "0.0.0.0:15000",
+ "certificate": "${server_dir}/ca/localhost.rsa2048.cert.pem",
+ "privateKey": "${server_dir}/ca/localhost.rsa2048.pkey.pem",
+ "httpPort": ${http_port},
+ "tlsPort": ${https_port},
+ "ocspResponderURL": "",
+ "externalAccountBindingRequired": false
+ }
+}
diff --git a/test/modules/md/test_001_store.py b/test/modules/md/test_001_store.py
new file mode 100644
index 0000000..995d40d
--- /dev/null
+++ b/test/modules/md/test_001_store.py
@@ -0,0 +1,213 @@
+# test mod_md acme terms-of-service handling
+
+import re
+
+import pytest
+
+from .md_env import MDTestEnv
+
+
+def md_name(md):
+ return md['name']
+
+
+@pytest.mark.skipif(condition=not MDTestEnv.has_a2md(), reason="no a2md available")
+class TestStore:
+
+ @pytest.fixture(autouse=True, scope='function')
+ def _method_scope(self, env):
+ env.purge_store()
+
+ # verify expected binary version
+ def test_md_001_001(self, env: MDTestEnv):
+ r = env.run([env.a2md_bin, "-V"])
+ m = re.match(r'version: (\d+\.\d+\.\d+)(-git)?$', r.stdout)
+ assert m, f"expected version info in '{r.stdout}'"
+
+ # verify that store is clean
+ def test_md_001_002(self, env: MDTestEnv):
+ r = env.run(["find", env.store_dir])
+ assert re.match(env.store_dir, r.stdout)
+
+ # test case: add a single dns managed domain
+ def test_md_001_100(self, env: MDTestEnv):
+ dns = "greenbytes.de"
+ env.check_json_contains(
+ env.a2md(["store", "add", dns]).json['output'][0],
+ {
+ "name": dns,
+ "domains": [dns],
+ "contacts": [],
+ "ca": {
+ "urls": [env.acme_url],
+ "proto": "ACME"
+ },
+ "state": 0
+ })
+
+ # test case: add > 1 dns managed domain
+ def test_md_001_101(self, env: MDTestEnv):
+ dns = ["greenbytes2.de", "www.greenbytes2.de", "mail.greenbytes2.de"]
+ env.check_json_contains(
+ env.a2md(["store", "add"] + dns).json['output'][0],
+ {
+ "name": dns[0],
+ "domains": dns,
+ "contacts": [],
+ "ca": {
+ "urls": [env.acme_url],
+ "proto": "ACME"
+ },
+ "state": 0
+ })
+
+ # test case: add second managed domain
+ def test_md_001_102(self, env: MDTestEnv):
+ dns1 = ["test000-102.com", "test000-102a.com", "test000-102b.com"]
+ assert env.a2md(["store", "add"] + dns1).exit_code == 0
+ #
+ # add second managed domain
+ dns2 = ["greenbytes2.de", "www.greenbytes2.de", "mail.greenbytes2.de"]
+ jout = env.a2md(["store", "add"] + dns2).json
+ # assert: output covers only changed md
+ assert len(jout['output']) == 1
+ env.check_json_contains(jout['output'][0], {
+ "name": dns2[0],
+ "domains": dns2,
+ "contacts": [],
+ "ca": {
+ "urls": [env.acme_url],
+ "proto": "ACME"
+ },
+ "state": 0
+ })
+
+ # test case: add existing domain
+ def test_md_001_103(self, env: MDTestEnv):
+ dns = "greenbytes.de"
+ assert env.a2md(["store", "add", dns]).exit_code == 0
+ # add same domain again
+ assert env.a2md(["store", "add", dns]).exit_code == 1
+
+ # test case: add without CA URL
+ def test_md_001_104(self, env: MDTestEnv):
+ dns = "greenbytes.de"
+ args = [env.a2md_bin, "-d", env.store_dir, "-j", "store", "add", dns]
+ jout = env.run(args).json
+ assert len(jout['output']) == 1
+ env.check_json_contains(jout['output'][0], {
+ "name": dns,
+ "domains": [dns],
+ "contacts": [],
+ "ca": {
+ "proto": "ACME"
+ },
+ "state": 0
+ })
+
+ # test case: list empty store
+ def test_md_001_200(self, env: MDTestEnv):
+ assert env.a2md(["store", "list"]).json == env.EMPTY_JOUT
+
+ # test case: list two managed domains
+ def test_md_001_201(self, env: MDTestEnv):
+ domains = [
+ ["test000-201.com", "test000-201a.com", "test000-201b.com"],
+ ["greenbytes2.de", "www.greenbytes2.de", "mail.greenbytes2.de"]
+ ]
+ for dns in domains:
+ assert env.a2md(["store", "add"] + dns).exit_code == 0
+ #
+ # list all store content
+ jout = env.a2md(["store", "list"]).json
+ assert len(jout['output']) == len(domains)
+ domains.reverse()
+ jout['output'] = sorted(jout['output'], key=md_name)
+ for i in range(0, len(jout['output'])):
+ env.check_json_contains(jout['output'][i], {
+ "name": domains[i][0],
+ "domains": domains[i],
+ "contacts": [],
+ "ca": {
+ "urls": [env.acme_url],
+ "proto": "ACME"
+ },
+ "state": 0
+ })
+
+ # test case: remove managed domain
+ def test_md_001_300(self, env: MDTestEnv):
+ dns = "test000-300.com"
+ assert env.a2md(["store", "add", dns]).exit_code == 0
+ assert env.a2md(["store", "remove", dns]).json == env.EMPTY_JOUT
+ assert env.a2md(["store", "list"]).json == env.EMPTY_JOUT
+
+ # test case: remove from list of managed domains
+ def test_md_001_301(self, env: MDTestEnv):
+ dns1 = ["test000-301.com", "test000-301a.com", "test000-301b.com"]
+ assert env.a2md(["store", "add"] + dns1).exit_code == 0
+ #
+ dns2 = ["greenbytes2.de", "www.greenbytes2.de", "mail.greenbytes2.de"]
+ jout1 = env.a2md(["store", "add"] + dns2).json
+ # remove managed domain
+ assert env.a2md(["store", "remove", "test000-301.com"]).json == env.EMPTY_JOUT
+ # list store content
+ assert env.a2md(["store", "list"]).json == jout1
+
+ # test case: remove nonexisting managed domain
+ def test_md_001_302(self, env: MDTestEnv):
+ dns1 = "test000-302.com"
+ r = env.a2md(["store", "remove", dns1])
+ assert r.exit_code == 1
+ assert r.json == {
+ 'status': 2, 'description': 'No such file or directory', 'output': []
+ }
+
+ # test case: force remove nonexisting managed domain
+ def test_md_001_303(self, env: MDTestEnv):
+ dns1 = "test000-303.com"
+ assert env.a2md(["store", "remove", "-f", dns1]).json == env.EMPTY_JOUT
+
+ # test case: null change
+ def test_md_001_400(self, env: MDTestEnv):
+ dns = "test000-400.com"
+ r1 = env.a2md(["store", "add", dns])
+ assert env.a2md(["store", "update", dns]).json == r1.json
+
+ # test case: add dns to managed domain
+ def test_md_001_401(self, env: MDTestEnv):
+ dns1 = "test000-401.com"
+ env.a2md(["store", "add", dns1])
+ dns2 = "test-101.com"
+ args = ["store", "update", dns1, "domains", dns1, dns2]
+ assert env.a2md(args).json['output'][0]['domains'] == [dns1, dns2]
+
+ # test case: change CA URL
+ def test_md_001_402(self, env: MDTestEnv):
+ dns = "test000-402.com"
+ args = ["store", "add", dns]
+ assert env.a2md(args).json['output'][0]['ca']['urls'][0] == env.acme_url
+ nurl = "https://foo.com/"
+ args = [env.a2md_bin, "-a", nurl, "-d", env.store_dir, "-j", "store", "update", dns]
+ assert env.run(args).json['output'][0]['ca']['urls'][0] == nurl
+
+ # test case: update nonexisting managed domain
+ def test_md_001_403(self, env: MDTestEnv):
+ dns = "test000-403.com"
+ assert env.a2md(["store", "update", dns]).exit_code == 1
+
+ # test case: update domains, throw away md name
+ def test_md_001_404(self, env: MDTestEnv):
+ dns1 = "test000-404.com"
+ dns2 = "greenbytes.com"
+ args = ["store", "add", dns1]
+ assert env.a2md(args).json['output'][0]['domains'] == [dns1]
+ # override domains list
+ args = ["store", "update", dns1, "domains", dns2]
+ assert env.a2md(args).json['output'][0]['domains'] == [dns2]
+
+ # test case: update domains with empty dns list
+ def test_md_001_405(self, env: MDTestEnv):
+ dns1 = "test000-405.com"
+ assert env.a2md(["store", "add", dns1]).exit_code == 0
+ assert env.a2md(["store", "update", dns1, "domains"]).exit_code == 1
diff --git a/test/modules/md/test_010_store_migrate.py b/test/modules/md/test_010_store_migrate.py
new file mode 100644
index 0000000..d734b29
--- /dev/null
+++ b/test/modules/md/test_010_store_migrate.py
@@ -0,0 +1,43 @@
+# test mod_md acme terms-of-service handling
+
+import os
+import pytest
+
+from .md_conf import MDConf
+from .md_env import MDTestEnv
+
+
+@pytest.mark.skipif(condition=not MDTestEnv.has_a2md(), reason="no a2md available")
+class TestStoreMigrate:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ MDConf(env).install()
+ assert env.apache_restart() == 0
+
+ # install old store, start a2md list, check files afterwards
+ def test_md_010_000(self, env):
+ domain = "7007-1502285564.org"
+ env.replace_store(os.path.join(env.test_dir, "../modules/md/data/store_migrate/1.0/sample1"))
+ #
+ # use 1.0 file name for private key
+ fpkey_1_0 = os.path.join(env.store_dir, 'domains', domain, 'pkey.pem')
+ fpkey_1_1 = os.path.join(env.store_dir, 'domains', domain, 'privkey.pem')
+ cert_1_0 = os.path.join(env.store_dir, 'domains', domain, 'cert.pem')
+ cert_1_1 = os.path.join(env.store_dir, 'domains', domain, 'pubcert.pem')
+ chain_1_0 = os.path.join(env.store_dir, 'domains', domain, 'chain.pem')
+ #
+ assert os.path.exists(fpkey_1_0)
+ assert os.path.exists(cert_1_0)
+ assert os.path.exists(chain_1_0)
+ assert not os.path.exists(fpkey_1_1)
+ assert not os.path.exists(cert_1_1)
+ #
+ md = env.a2md(["-vvv", "list", domain]).json['output'][0]
+ assert domain == md["name"]
+ #
+ assert not os.path.exists(fpkey_1_0)
+ assert os.path.exists(cert_1_0)
+ assert os.path.exists(chain_1_0)
+ assert os.path.exists(fpkey_1_1)
+ assert os.path.exists(cert_1_1)
diff --git a/test/modules/md/test_100_reg_add.py b/test/modules/md/test_100_reg_add.py
new file mode 100644
index 0000000..1a6d3fe
--- /dev/null
+++ b/test/modules/md/test_100_reg_add.py
@@ -0,0 +1,152 @@
+# test mod_md acme terms-of-service handling
+
+import pytest
+
+from .md_env import MDTestEnv
+
+
+@pytest.mark.skipif(condition=not MDTestEnv.has_a2md(), reason="no a2md available")
+@pytest.mark.skipif(condition=not MDTestEnv.has_acme_server(),
+ reason="no ACME test server configured")
+class TestRegAdd:
+
+ @pytest.fixture(autouse=True, scope='function')
+ def _method_scope(self, env):
+ env.purge_store()
+
+ # test case: add a single dns managed domain
+ def test_md_100_000(self, env):
+ dns = "greenbytes.de"
+ jout1 = env.a2md(["add", dns]).json
+ env.check_json_contains(jout1['output'][0], {
+ "name": dns,
+ "domains": [dns],
+ "contacts": [],
+ "ca": {
+ "urls": [env.acme_url],
+ "proto": "ACME"
+ },
+ "state": env.MD_S_INCOMPLETE
+ })
+ assert env.a2md(["list"]).json == jout1
+
+ # test case: add > 1 dns managed domain
+ def test_md_100_001(self, env):
+ dns = ["greenbytes2.de", "www.greenbytes2.de", "mail.greenbytes2.de"]
+ jout1 = env.a2md(["add"] + dns).json
+ env.check_json_contains(jout1['output'][0], {
+ "name": dns[0],
+ "domains": dns,
+ "contacts": [],
+ "ca": {
+ "urls": [env.acme_url],
+ "proto": "ACME"
+ },
+ "state": env.MD_S_INCOMPLETE
+ })
+ assert env.a2md(["list"]).json == jout1
+
+ # test case: add second managed domain
+ def test_md_100_002(self, env):
+ dns1 = ["test100-002.com", "test100-002a.com", "test100-002b.com"]
+ env.a2md(["add"] + dns1)
+ # add second managed domain
+ dns2 = ["greenbytes2.de", "www.greenbytes2.de", "mail.greenbytes2.de"]
+ jout = env.a2md(["add"] + dns2).json
+ # assert: output covers only changed md
+ assert len(jout['output']) == 1
+ env.check_json_contains(jout['output'][0], {
+ "name": dns2[0],
+ "domains": dns2,
+ "contacts": [],
+ "ca": {
+ "urls": [env.acme_url],
+ "proto": "ACME"
+ },
+ "state": env.MD_S_INCOMPLETE
+ })
+ assert len(env.a2md(["list"]).json['output']) == 2
+
+ # test case: add existing domain
+ def test_md_100_003(self, env):
+ dns = "greenbytes.de"
+ assert env.a2md(["add", dns]).exit_code == 0
+ assert env.a2md(["add", dns]).exit_code == 1
+
+ # test case: add without CA URL
+ def test_md_100_004(self, env):
+ dns = "greenbytes.de"
+ jout1 = env.run([env.a2md_bin, "-d", env.store_dir, "-j", "add", dns]).json
+ assert len(jout1['output']) == 1
+ env.check_json_contains(jout1['output'][0], {
+ "name": dns,
+ "domains": [dns],
+ "contacts": [],
+ "ca": {
+ "proto": "ACME"
+ },
+ "state": env.MD_S_INCOMPLETE
+ })
+ assert env.a2md(["list"]).json == jout1
+
+ # test case: add with invalid DNS
+ @pytest.mark.parametrize("invalid_dns", [
+ "tld", "white sp.ace", "invalid.*.wildcard.com", "k\xc3ller.idn.com"
+ ])
+ def test_md_100_005(self, env, invalid_dns):
+ assert env.a2md(["add", invalid_dns]).exit_code == 1
+ assert env.a2md(["add", "test-100.de", invalid_dns]).exit_code == 1
+
+ # test case: add with invalid ACME URL
+ @pytest.mark.parametrize("invalid_url", [
+ "no.schema/path", "http://white space/path", "http://bad.port:-1/path"])
+ def test_md_100_006(self, env, invalid_url):
+ args = [env.a2md_bin, "-a", invalid_url, "-d", env.store_dir, "-j"]
+ dns = "greenbytes.de"
+ args.extend(["add", dns])
+ assert env.run(args).exit_code == 1
+
+ # test case: add overlapping dns names
+ def test_md_100_007(self, env):
+ assert env.a2md(["add", "test-100.com", "test-101.com"]).exit_code == 0
+ # 1: alternate DNS exists as primary name
+ assert env.a2md(["add", "greenbytes2.de", "test-100.com"]).exit_code == 1
+ # 2: alternate DNS exists as alternate DNS
+ assert env.a2md(["add", "greenbytes2.de", "test-101.com"]).exit_code == 1
+ # 3: primary name exists as alternate DNS
+ assert env.a2md(["add", "test-101.com"]).exit_code == 1
+
+ # test case: add subdomains as separate managed domain
+ def test_md_100_008(self, env):
+ assert env.a2md(["add", "test-100.com"]).exit_code == 0
+ assert env.a2md(["add", "sub.test-100.com"]).exit_code == 0
+
+ # test case: add duplicate domain
+ def test_md_100_009(self, env):
+ dns1 = "test-100.com"
+ dns2 = "test-101.com"
+ jout = env.a2md(["add", dns1, dns2, dns1, dns2]).json
+ # DNS is only listed once
+ assert len(jout['output']) == 1
+ md = jout['output'][0]
+ assert md['domains'] == [dns1, dns2]
+
+ # test case: add pnuycode name
+ def test_md_100_010(self, env):
+ assert env.a2md(["add", "xn--kller-jua.punycode.de"]).exit_code == 0
+
+ # test case: don't sort alternate names
+ def test_md_100_011(self, env):
+ dns = ["test-100.com", "test-xxx.com", "test-aaa.com"]
+ jout = env.a2md(["add"] + dns).json
+ # DNS is only listed as specified
+ assert len(jout['output']) == 1
+ md = jout['output'][0]
+ assert md['domains'] == dns
+
+ # test case: add DNS wildcard
+ @pytest.mark.parametrize("wild_dns", [
+ "*.wildcard.com"
+ ])
+ def test_md_100_012(self, env, wild_dns):
+ assert env.a2md(["add", wild_dns]).exit_code == 0
diff --git a/test/modules/md/test_110_reg_update.py b/test/modules/md/test_110_reg_update.py
new file mode 100644
index 0000000..71b50f8
--- /dev/null
+++ b/test/modules/md/test_110_reg_update.py
@@ -0,0 +1,273 @@
+# test mod_md acme terms-of-service handling
+
+import pytest
+
+from .md_env import MDTestEnv
+
+
+@pytest.mark.skipif(condition=not MDTestEnv.has_a2md(), reason="no a2md available")
+@pytest.mark.skipif(condition=not MDTestEnv.has_acme_server(),
+ reason="no ACME test server configured")
+class TestRegUpdate:
+
+ NAME1 = "greenbytes2.de"
+ NAME2 = "test-100.com"
+
+ @pytest.fixture(autouse=True, scope='function')
+ def _method_scope(self, env):
+ env.clear_store()
+ # add managed domains
+ domains = [
+ [self.NAME1, "www.greenbytes2.de", "mail.greenbytes2.de"],
+ [self.NAME2, "test-101.com", "test-102.com"]
+ ]
+ for dns in domains:
+ env.a2md(["-a", env.acme_url, "add"] + dns)
+
+ def teardown_method(self, method):
+ print("teardown_method: %s" % method.__name__)
+
+ # test case: update domains
+ def test_md_110_000(self, env):
+ dns = ["foo.de", "bar.de"]
+ output1 = env.a2md(["-vvvv", "update", self.NAME1, "domains"] + dns).json['output']
+ assert len(output1) == 1
+ env.check_json_contains(output1[0], {
+ "name": self.NAME1,
+ "domains": dns,
+ "contacts": [],
+ "ca": {
+ "urls": [env.acme_url],
+ "proto": "ACME"
+ },
+ "state": env.MD_S_INCOMPLETE
+ })
+ assert env.a2md(["list"]).json['output'][0] == output1[0]
+
+ # test case: remove all domains
+ def test_md_110_001(self, env):
+ assert env.a2md(["update", self.NAME1, "domains"]).exit_code == 1
+
+ # test case: update domains with invalid DNS
+ @pytest.mark.parametrize("invalid_dns", [
+ "tld", "white sp.ace", "invalid.*.wildcard.com", "k\xc3ller.idn.com"
+ ])
+ def test_md_110_002(self, env, invalid_dns):
+ assert env.a2md(["update", self.NAME1, "domains", invalid_dns]).exit_code == 1
+
+ # test case: update domains with overlapping DNS list
+ def test_md_110_003(self, env):
+ dns = [self.NAME1, self.NAME2]
+ assert env.a2md(["update", self.NAME1, "domains"] + dns).exit_code == 1
+
+ # test case: update with subdomains
+ def test_md_110_004(self, env):
+ dns = ["test-foo.com", "sub.test-foo.com"]
+ md = env.a2md(["update", self.NAME1, "domains"] + dns).json['output'][0]
+ assert md['name'] == self.NAME1
+ assert md['domains'] == dns
+
+ # test case: update domains with duplicates
+ def test_md_110_005(self, env):
+ dns = [self.NAME1, self.NAME1, self.NAME1]
+ md = env.a2md(["update", self.NAME1, "domains"] + dns).json['output'][0]
+ assert md['name'] == self.NAME1
+ assert md['domains'] == [self.NAME1]
+
+ # test case: remove domains with punycode
+ def test_md_110_006(self, env):
+ dns = [self.NAME1, "xn--kller-jua.punycode.de"]
+ md = env.a2md(["update", self.NAME1, "domains"] + dns).json['output'][0]
+ assert md['name'] == self.NAME1
+ assert md['domains'] == dns
+
+ # test case: update non-existing managed domain
+ def test_md_110_007(self, env):
+ assert env.a2md(["update", "test-foo.com", "domains", "test-foo.com"]).exit_code == 1
+
+ # test case: update domains with DNS wildcard
+ @pytest.mark.parametrize("wild_dns", [
+ "*.wildcard.com"
+ ])
+ def test_md_110_008(self, env, wild_dns):
+ assert env.a2md(["update", self.NAME1, "domains", wild_dns]).exit_code == 0
+
+ # --------- update ca ---------
+
+ # test case: update CA URL
+ def test_md_110_100(self, env):
+ url = "http://localhost.com:9999"
+ output = env.a2md(["update", self.NAME1, "ca", url]).json['output']
+ assert len(output) == 1
+ env.check_json_contains(output[0], {
+ "name": self.NAME1,
+ "domains": [self.NAME1, "www.greenbytes2.de", "mail.greenbytes2.de"],
+ "contacts": [],
+ "ca": {
+ "urls": [url],
+ "proto": "ACME"
+ },
+ "state": env.MD_S_INCOMPLETE
+ })
+
+ # test case: update CA with invalid URL
+ @pytest.mark.parametrize("invalid_url", [
+ "no.schema/path", "http://white space/path", "http://bad.port:-1/path"
+ ])
+ def test_md_110_101(self, env, invalid_url):
+ assert env.a2md(["update", self.NAME1, "ca", invalid_url]).exit_code == 1
+
+ # test case: update ca protocol
+ def test_md_110_102(self, env):
+ md = env.a2md(["update", self.NAME1, "ca", env.acme_url, "FOO"]).json['output'][0]
+ env.check_json_contains(md['ca'], {
+ "urls": [env.acme_url],
+ "proto": "FOO"
+ })
+ assert md['state'] == 1
+
+ # test case: update account ID
+ def test_md_110_200(self, env):
+ acc_id = "test.account.id"
+ output = env.a2md(["update", self.NAME1, "account", acc_id]).json['output']
+ assert len(output) == 1
+ env.check_json_contains(output[0], {
+ "name": self.NAME1,
+ "domains": [self.NAME1, "www.greenbytes2.de", "mail.greenbytes2.de"],
+ "contacts": [],
+ "ca": {
+ "account": acc_id,
+ "urls": [env.acme_url],
+ "proto": "ACME"
+ },
+ "state": env.MD_S_INCOMPLETE
+ })
+
+ # test case: remove account ID
+ def test_md_110_201(self, env):
+ assert env.a2md(["update", self.NAME1, "account", "test.account.id"]).exit_code == 0
+ md = env.a2md(["update", self.NAME1, "account"]).json['output'][0]
+ env.check_json_contains(md['ca'], {
+ "urls": [env.acme_url],
+ "proto": "ACME"
+ })
+ assert md['state'] == 1
+
+ # test case: change existing account ID
+ def test_md_110_202(self, env):
+ assert env.a2md(["update", self.NAME1, "account", "test.account.id"]).exit_code == 0
+ md = env.a2md(["update", self.NAME1, "account", "foo.test.com"]).json['output'][0]
+ env.check_json_contains(md['ca'], {
+ "account": "foo.test.com",
+ "urls": [env.acme_url],
+ "proto": "ACME"
+ })
+ assert md['state'] == 1
+
+ # test case: ignore additional argument
+ def test_md_110_203(self, env):
+ md = env.a2md(["update", self.NAME1, "account", "test.account.id",
+ "test2.account.id"]).json['output'][0]
+ env.check_json_contains(md['ca'], {
+ "account": "test.account.id",
+ "urls": [env.acme_url],
+ "proto": "ACME"
+ })
+ assert md['state'] == 1
+
+ # test case: add contact info
+ def test_md_110_300(self, env):
+ mail = "test@greenbytes.de"
+ output = env.a2md(["update", self.NAME1, "contacts", mail]).json['output']
+ assert len(output) == 1
+ env.check_json_contains(output[0], {
+ "name": self.NAME1,
+ "domains": [self.NAME1, "www.greenbytes2.de", "mail.greenbytes2.de"],
+ "contacts": ["mailto:" + mail],
+ "ca": {
+ "urls": [env.acme_url],
+ "proto": "ACME"
+ },
+ "state": env.MD_S_INCOMPLETE
+ })
+
+ # test case: add multiple contact info, preserve order
+ def test_md_110_301(self, env):
+ mail = ["xxx@greenbytes.de", "aaa@greenbytes.de"]
+ md = env.a2md(["update", self.NAME1, "contacts"] + mail).json['output'][0]
+ assert md['contacts'] == ["mailto:" + mail[0], "mailto:" + mail[1]]
+ assert md['state'] == 1
+
+ # test case: must not remove contact info
+ def test_md_110_302(self, env):
+ assert env.a2md(["update", self.NAME1, "contacts", "test@greenbytes.de"]).exit_code == 0
+ assert env.a2md(["update", self.NAME1, "contacts"]).exit_code == 1
+
+ # test case: replace existing contact info
+ def test_md_110_303(self, env):
+ assert env.a2md(["update", self.NAME1, "contacts", "test@greenbytes.de"]).exit_code == 0
+ md = env.a2md(["update", self.NAME1, "contacts", "xxx@greenbytes.de"]).json['output'][0]
+ assert md['contacts'] == ["mailto:xxx@greenbytes.de"]
+ assert md['state'] == 1
+
+ # test case: use invalid mail address
+ @pytest.mark.parametrize("invalid_mail", [
+ "no.at.char", "with blank@test.com", "missing.host@", "@missing.localpart.de",
+ "double..dot@test.com", "double@at@test.com"
+ ])
+ def test_md_110_304(self, env, invalid_mail):
+ # SEI: Uhm, es ist nicht sinnvoll, eine komplette verification von
+ # https://tools.ietf.org/html/rfc822 zu bauen?
+ assert env.a2md(["update", self.NAME1, "contacts", invalid_mail]).exit_code == 1
+
+ # test case: respect urls as given
+ @pytest.mark.parametrize("url", [
+ "mailto:test@greenbytes.de", "wrong://schema@test.com"])
+ def test_md_110_305(self, env, url):
+ md = env.a2md(["update", self.NAME1, "contacts", url]).json['output'][0]
+ assert md['contacts'] == [url]
+ assert md['state'] == 1
+
+ # test case: add tos agreement
+ def test_md_110_400(self, env):
+ output = env.a2md(["update", self.NAME1, "agreement", env.acme_tos]).json['output']
+ assert len(output) == 1
+ env.check_json_contains(output[0], {
+ "name": self.NAME1,
+ "domains": [self.NAME1, "www.greenbytes2.de", "mail.greenbytes2.de"],
+ "contacts": [],
+ "ca": {
+ "urls": [env.acme_url],
+ "proto": "ACME",
+ "agreement": env.acme_tos
+ },
+ "state": env.MD_S_INCOMPLETE
+ })
+
+ # test case: remove tos agreement
+ def test_md_110_402(self, env):
+ assert env.a2md(["update", self.NAME1, "agreement", env.acme_tos]).exit_code == 0
+ md = env.a2md(["update", self.NAME1, "agreement"]).json['output'][0]
+ env.check_json_contains(md['ca'], {
+ "urls": [env.acme_url],
+ "proto": "ACME"
+ })
+ assert md['state'] == 1
+
+ # test case: ignore additional arguments
+ def test_md_110_403(self, env):
+ md = env.a2md(["update", self.NAME1, "agreement",
+ env.acme_tos, "http://invalid.tos/"]).json['output'][0]
+ env.check_json_contains(md['ca'], {
+ "urls": [env.acme_url],
+ "proto": "ACME",
+ "agreement": env.acme_tos
+ })
+ assert md['state'] == 1
+
+ # test case: update agreement with invalid URL
+ @pytest.mark.parametrize("invalid_url", [
+ "no.schema/path", "http://white space/path", "http://bad.port:-1/path"
+ ])
+ def test_md_110_404(self, env, invalid_url):
+ assert env.a2md(["update", self.NAME1, "agreement", invalid_url]).exit_code == 1
diff --git a/test/modules/md/test_120_reg_list.py b/test/modules/md/test_120_reg_list.py
new file mode 100644
index 0000000..82e109f
--- /dev/null
+++ b/test/modules/md/test_120_reg_list.py
@@ -0,0 +1,87 @@
+# test mod_md acme terms-of-service handling
+
+from shutil import copyfile
+
+import pytest
+
+from .md_env import MDTestEnv
+
+
+@pytest.mark.skipif(condition=not MDTestEnv.has_a2md(), reason="no a2md available")
+@pytest.mark.skipif(condition=not MDTestEnv.has_acme_server(),
+ reason="no ACME test server configured")
+class TestRegAdd:
+
+ @pytest.fixture(autouse=True, scope='function')
+ def _method_scope(self, env):
+ env.clear_store()
+
+ # test case: list empty store
+ def test_md_120_000(self, env):
+ assert env.a2md(["list"]).json == env.EMPTY_JOUT
+
+ # test case: list two managed domains
+ def test_md_120_001(self, env):
+ domains = [
+ ["test120-001.com", "test120-001a.com", "test120-001b.com"],
+ ["greenbytes2.de", "www.greenbytes2.de", "mail.greenbytes2.de"]
+ ]
+ for dns in domains:
+ assert env.a2md(["add"] + dns).exit_code == 0
+ #
+ # list all store content
+ jout = env.a2md(["list"]).json
+ assert len(jout['output']) == len(domains)
+ domains.reverse()
+ for i in range(0, len(jout['output'])):
+ env.check_json_contains(jout['output'][i], {
+ "name": domains[i][0],
+ "domains": domains[i],
+ "contacts": [],
+ "ca": {
+ "urls": [env.acme_url],
+ "proto": "ACME"
+ },
+ "state": env.MD_S_INCOMPLETE
+ })
+ # list md by name
+ for dns in ["test120-001.com", "greenbytes2.de"]:
+ md = env.a2md(["list", dns]).json['output'][0]
+ assert md['name'] == dns
+
+ # test case: validate md state in store
+ def test_md_120_002(self, env):
+ # check: md without pkey/cert -> INCOMPLETE
+ domain = f"test1.{env.http_tld}"
+ assert env.a2md(["add", domain]).exit_code == 0
+ assert env.a2md(["update", domain, "contacts", "admin@" + domain]).exit_code == 0
+ assert env.a2md(["update", domain, "agreement", env.acme_tos]).exit_code == 0
+ assert env.a2md(["list", domain]).json['output'][0]['state'] == env.MD_S_INCOMPLETE
+ # check: valid pkey/cert -> COMPLETE
+ cred = env.get_credentials_for_name(domain)[0]
+ copyfile(cred.pkey_file, env.store_domain_file(domain, 'privkey.pem'))
+ copyfile(cred.cert_file, env.store_domain_file(domain, 'pubcert.pem'))
+ assert env.a2md(["list", domain]).json['output'][0]['state'] == env.MD_S_COMPLETE
+ # check: expired cert -> EXPIRED
+ cred = env.get_credentials_for_name(f"expired.{env.http_tld}")[0]
+ copyfile(cred.pkey_file, env.store_domain_file(domain, 'privkey.pem'))
+ copyfile(cred.cert_file, env.store_domain_file(domain, 'pubcert.pem'))
+ out = env.a2md(["list", domain]).json['output'][0]
+ assert out['state'] == env.MD_S_INCOMPLETE
+ assert out['renew'] is True
+
+ # test case: broken cert file
+ def test_md_120_003(self, env):
+ domain = f"test1.{env.http_tld}"
+ assert env.a2md(["add", domain]).exit_code == 0
+ assert env.a2md(["update", domain, "contacts", "admin@" + domain]).exit_code == 0
+ assert env.a2md(["update", domain, "agreement", env.acme_tos]).exit_code == 0
+ # check: valid pkey/cert -> COMPLETE
+ cred = env.get_credentials_for_name(domain)[0]
+ copyfile(cred.pkey_file, env.store_domain_file(domain, 'privkey.pem'))
+ copyfile(cred.cert_file, env.store_domain_file(domain, 'pubcert.pem'))
+ assert env.a2md(["list", domain]).json['output'][0]['state'] == env.MD_S_COMPLETE
+ # check: replace cert by broken file -> ERROR
+ with open(env.store_domain_file(domain, 'pubcert.pem'), 'w') as fd:
+ fd.write("dummy\n")
+ assert env.a2md(["list", domain]).json['output'][0]['state'] == env.MD_S_INCOMPLETE
diff --git a/test/modules/md/test_202_acmev2_regs.py b/test/modules/md/test_202_acmev2_regs.py
new file mode 100644
index 0000000..97f093e
--- /dev/null
+++ b/test/modules/md/test_202_acmev2_regs.py
@@ -0,0 +1,132 @@
+# test mod_md ACMEv2 registrations
+
+import re
+import json
+import pytest
+
+from .md_conf import MDConf
+from .md_env import MDTestEnv
+
+
+@pytest.mark.skipif(condition=not MDTestEnv.has_a2md(), reason="no a2md available")
+@pytest.mark.skipif(condition=not MDTestEnv.has_acme_server(),
+ reason="no ACME test server configured")
+class TestAcmeAcc:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env, acme):
+ acme.start(config='default')
+ env.check_acme()
+ env.APACHE_CONF_SRC = "data/test_drive"
+ MDConf(env).install()
+ assert env.apache_restart() == 0
+
+ @pytest.fixture(autouse=True, scope='function')
+ def _method_scope(self, env):
+ env.check_acme()
+ env.clear_store()
+
+ # test case: register a new account, vary length to check base64 encoding
+ @pytest.mark.parametrize("contact", [
+ "x@not-forbidden.org", "xx@not-forbidden.org", "xxx@not-forbidden.org"
+ ])
+ def test_md_202_000(self, env, contact):
+ r = env.a2md(["-t", "accepted", "acme", "newreg", contact], raw=True)
+ assert r.exit_code == 0, r
+ m = re.match("registered: (.*)$", r.stdout)
+ assert m, "did not match: {0}".format(r.stdout)
+ acct = m.group(1)
+ print("newreg: %s" % m.group(1))
+ self._check_account(env, acct, ["mailto:" + contact])
+
+ # test case: register a new account without accepting ToS, must fail
+ def test_md_202_000b(self, env):
+ r = env.a2md(["acme", "newreg", "x@not-forbidden.org"], raw=True)
+ assert r.exit_code == 1
+ m = re.match(".*must agree to terms of service.*", r.stderr)
+ if m is None:
+ # the pebble variant
+ m = re.match(".*account did not agree to the terms of service.*", r.stderr)
+ assert m, "did not match: {0}".format(r.stderr)
+
+ # test case: respect 'mailto:' prefix in contact url
+ def test_md_202_001(self, env):
+ contact = "mailto:xx@not-forbidden.org"
+ r = env.a2md(["-t", "accepted", "acme", "newreg", contact], raw=True)
+ assert r.exit_code == 0
+ m = re.match("registered: (.*)$", r.stdout)
+ assert m
+ acct = m.group(1)
+ self._check_account(env, acct, [contact])
+
+ # test case: fail on invalid contact url
+ @pytest.mark.parametrize("invalid_contact", [
+ "mehlto:xxx@not-forbidden.org", "no.at.char", "with blank@test.com",
+ "missing.host@", "@missing.localpart.de",
+ "double..dot@test.com", "double@at@test.com"
+ ])
+ def test_md_202_002(self, env, invalid_contact):
+ assert env.a2md(["acme", "newreg", invalid_contact]).exit_code == 1
+
+ # test case: use contact list
+ def test_md_202_003(self, env):
+ contact = ["xx@not-forbidden.org", "aa@not-forbidden.org"]
+ r = env.a2md(["-t", "accepted", "acme", "newreg"] + contact, raw=True)
+ assert r.exit_code == 0
+ m = re.match("registered: (.*)$", r.stdout)
+ assert m
+ acct = m.group(1)
+ self._check_account(env, acct, ["mailto:" + contact[0], "mailto:" + contact[1]])
+
+ # test case: validate new account
+ def test_md_202_100(self, env):
+ acct = self._prepare_account(env, ["tmp@not-forbidden.org"])
+ assert env.a2md(["acme", "validate", acct]).exit_code == 0
+
+ # test case: fail on non-existing account
+ def test_md_202_101(self, env):
+ assert env.a2md(["acme", "validate", "ACME-localhost-1000"]).exit_code == 1
+
+ # test case: report fail on request signing problem
+ def test_md_202_102(self, env):
+ acct = self._prepare_account(env, ["tmp@not-forbidden.org"])
+ with open(env.path_account(acct)) as f:
+ acctj = json.load(f)
+ acctj['url'] = acctj['url'] + "0"
+ open(env.path_account(acct), "w").write(json.dumps(acctj))
+ assert env.a2md(["acme", "validate", acct]).exit_code == 1
+
+ # test case: register and try delete an account, will fail without persistence
+ def test_md_202_200(self, env):
+ acct = self._prepare_account(env, ["tmp@not-forbidden.org"])
+ assert env.a2md(["delreg", acct]).exit_code == 1
+
+ # test case: register and try delete an account with persistence
+ def test_md_202_201(self, env):
+ acct = self._prepare_account(env, ["tmp@not-forbidden.org"])
+ assert env.a2md(["acme", "delreg", acct]).exit_code == 0
+ # check that store is clean
+ r = env.run(["find", env.store_dir])
+ assert re.match(env.store_dir, r.stdout)
+
+ # test case: delete a persisted account without specifying url
+ def test_md_202_202(self, env):
+ acct = self._prepare_account(env, ["tmp@not-forbidden.org"])
+ assert env.run([env.a2md_bin, "-d", env.store_dir, "acme", "delreg", acct]).exit_code == 0
+
+ # test case: delete, then validate an account
+ def test_md_202_203(self, env):
+ acct = self._prepare_account(env, ["test014@not-forbidden.org"])
+ assert env.a2md(["acme", "delreg", acct]).exit_code == 0
+ # validate on deleted account fails
+ assert env.a2md(["acme", "validate", acct]).exit_code == 1
+
+ def _check_account(self, env, acct, contact):
+ with open(env.path_account(acct)) as f:
+ acctj = json.load(f)
+ assert acctj['registration']['contact'] == contact
+
+ def _prepare_account(self, env, contact):
+ r = env.a2md(["-t", "accepted", "acme", "newreg"] + contact, raw=True)
+ assert r.exit_code == 0
+ return re.match("registered: (.*)$", r.stdout).group(1)
diff --git a/test/modules/md/test_300_conf_validate.py b/test/modules/md/test_300_conf_validate.py
new file mode 100644
index 0000000..85371ba
--- /dev/null
+++ b/test/modules/md/test_300_conf_validate.py
@@ -0,0 +1,390 @@
+# test mod_md basic configurations
+
+import re
+import time
+from datetime import datetime, timedelta
+
+import pytest
+
+from .md_conf import MDConf
+from .md_env import MDTestEnv
+
+
+@pytest.mark.skipif(condition=not MDTestEnv.has_acme_server(),
+ reason="no ACME test server configured")
+class TestConf:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ env.clear_store()
+
+ # test case: just one MDomain definition
+ def test_md_300_001(self, env):
+ MDConf(env, text="""
+ MDomain not-forbidden.org www.not-forbidden.org mail.not-forbidden.org
+ """).install()
+ assert env.apache_restart() == 0
+
+ # test case: two MDomain definitions, non-overlapping
+ def test_md_300_002(self, env):
+ MDConf(env, text="""
+ MDomain not-forbidden.org www.not-forbidden.org mail.not-forbidden.org
+ MDomain example2.org www.example2.org mail.example2.org
+ """).install()
+ assert env.apache_restart() == 0
+
+ # test case: two MDomain definitions, exactly the same
+ def test_md_300_003(self, env):
+ assert env.apache_stop() == 0
+ MDConf(env, text="""
+ MDomain not-forbidden.org www.not-forbidden.org mail.not-forbidden.org test3.not-forbidden.org
+ MDomain not-forbidden.org www.not-forbidden.org mail.not-forbidden.org test3.not-forbidden.org
+ """).install()
+ assert env.apache_fail() == 0
+
+ # test case: two MDomain definitions, overlapping
+ def test_md_300_004(self, env):
+ assert env.apache_stop() == 0
+ MDConf(env, text="""
+ MDomain not-forbidden.org www.not-forbidden.org mail.not-forbidden.org test3.not-forbidden.org
+ MDomain example2.org test3.not-forbidden.org www.example2.org mail.example2.org
+ """).install()
+ assert env.apache_fail() == 0
+
+ # test case: two MDomains, one inside a virtual host
+ def test_md_300_005(self, env):
+ MDConf(env, text="""
+ MDomain not-forbidden.org www.not-forbidden.org mail.not-forbidden.org test3.not-forbidden.org
+ <VirtualHost *:12346>
+ MDomain example2.org www.example2.org www.example3.org
+ </VirtualHost>
+ """).install()
+ assert env.apache_restart() == 0
+
+ # test case: two MDomains, one correct vhost name
+ def test_md_300_006(self, env):
+ MDConf(env, text="""
+ MDomain not-forbidden.org www.not-forbidden.org mail.not-forbidden.org test3.not-forbidden.org
+ <VirtualHost *:12346>
+ ServerName example2.org
+ MDomain example2.org www.example2.org www.example3.org
+ </VirtualHost>
+ """).install()
+ assert env.apache_restart() == 0
+
+ # test case: two MDomains, two correct vhost names
+ def test_md_300_007(self, env):
+ MDConf(env, text="""
+ MDomain not-forbidden.org www.not-forbidden.org mail.not-forbidden.org test3.not-forbidden.org
+ <VirtualHost *:12346>
+ ServerName example2.org
+ MDomain example2.org www.example2.org www.example3.org
+ </VirtualHost>
+ <VirtualHost *:12346>
+ ServerName www.example2.org
+ </VirtualHost>
+ """).install()
+ assert env.apache_restart() == 0
+
+ # test case: two MDomains, overlapping vhosts
+ def test_md_300_008(self, env):
+ MDConf(env, text="""
+ MDomain not-forbidden.org www.not-forbidden.org mail.not-forbidden.org test3.not-forbidden.org
+ <VirtualHost *:12346>
+ ServerName example2.org
+ ServerAlias www.example3.org
+ MDomain example2.org www.example2.org www.example3.org
+ </VirtualHost>
+
+ <VirtualHost *:12346>
+ ServerName www.example2.org
+ ServerAlias example2.org
+ </VirtualHost>
+ """).install()
+ assert env.apache_restart() == 0
+
+ # test case: vhosts with overlapping MDs
+ def test_md_300_009(self, env):
+ assert env.apache_stop() == 0
+ conf = MDConf(env)
+ conf.add("""
+ MDMembers manual
+ MDomain not-forbidden.org www.not-forbidden.org mail.not-forbidden.org test3.not-forbidden.org
+ MDomain example2.org www.example2.org www.example3.org
+ """)
+ conf.add_vhost(port=12346, domains=["example2.org", "www.example3.org"], with_ssl=True)
+ conf.add_vhost(port=12346, domains=["www.example2.org", "example2.org"], with_ssl=True)
+ conf.add_vhost(port=12346, domains=["not-forbidden.org", "example2.org"], with_ssl=True)
+ conf.install()
+ assert env.apache_fail() == 0
+ env.apache_stop()
+ env.httpd_error_log.ignore_recent()
+
+ # test case: MDomain, vhost with matching ServerAlias
+ def test_md_300_010(self, env):
+ conf = MDConf(env)
+ conf.add("""
+ MDomain not-forbidden.org www.not-forbidden.org mail.not-forbidden.org test3.not-forbidden.org
+
+ <VirtualHost *:12346>
+ ServerName not-forbidden.org
+ ServerAlias test3.not-forbidden.org
+ </VirtualHost>
+ """)
+ conf.install()
+ assert env.apache_restart() == 0
+
+ # test case: MDomain, misses one ServerAlias
+ def test_md_300_011a(self, env):
+ env.apache_stop()
+ conf = MDConf(env, text="""
+ MDomain not-forbidden.org manual www.not-forbidden.org mail.not-forbidden.org test3.not-forbidden.org
+ """)
+ conf.add_vhost(port=env.https_port, domains=[
+ "not-forbidden.org", "test3.not-forbidden.org", "test4.not-forbidden.org"
+ ])
+ conf.install()
+ assert env.apache_fail() == 0
+ env.apache_stop()
+
+ # test case: MDomain, misses one ServerAlias, but auto add enabled
+ def test_md_300_011b(self, env):
+ env.apache_stop()
+ MDConf(env, text="""
+ MDomain not-forbidden.org auto mail.not-forbidden.org
+
+ <VirtualHost *:%s>
+ ServerName not-forbidden.org
+ ServerAlias test3.not-forbidden.org
+ ServerAlias test4.not-forbidden.org
+ </VirtualHost>
+ """ % env.https_port).install()
+ assert env.apache_restart() == 0
+
+ # test case: MDomain does not match any vhost
+ def test_md_300_012(self, env):
+ MDConf(env, text="""
+ MDomain example012.org www.example012.org
+ <VirtualHost *:12346>
+ ServerName not-forbidden.org
+ ServerAlias test3.not-forbidden.org
+ </VirtualHost>
+ """).install()
+ assert env.apache_restart() == 0
+
+ # test case: one md covers two vhosts
+ def test_md_300_013(self, env):
+ MDConf(env, text="""
+ MDomain example2.org test-a.example2.org test-b.example2.org
+ <VirtualHost *:12346>
+ ServerName test-a.example2.org
+ </VirtualHost>
+ <VirtualHost *:12346>
+ ServerName test-b.example2.org
+ </VirtualHost>
+ """).install()
+ assert env.apache_restart() == 0
+
+ # test case: global server name as managed domain name
+ def test_md_300_014(self, env):
+ MDConf(env, text=f"""
+ MDomain www.{env.http_tld} www.example2.org
+
+ <VirtualHost *:12346>
+ ServerName www.example2.org
+ </VirtualHost>
+ """).install()
+ assert env.apache_restart() == 0
+
+ # test case: valid pkey specification
+ def test_md_300_015(self, env):
+ MDConf(env, text="""
+ MDPrivateKeys Default
+ MDPrivateKeys RSA
+ MDPrivateKeys RSA 2048
+ MDPrivateKeys RSA 3072
+ MDPrivateKeys RSA 4096
+ """).install()
+ assert env.apache_restart() == 0
+
+ # test case: invalid pkey specification
+ @pytest.mark.parametrize("line,exp_err_msg", [
+ ("MDPrivateKeys", "needs to specify the private key type"),
+ ("MDPrivateKeys Default RSA 1024", "'Default' allows no other parameter"),
+ ("MDPrivateKeys RSA 1024", "must be 2048 or higher"),
+ ("MDPrivateKeys RSA 1024", "must be 2048 or higher"),
+ ("MDPrivateKeys rsa 2048 rsa 4096", "two keys of type 'RSA' are not possible"),
+ ("MDPrivateKeys p-256 secp384r1 P-256", "two keys of type 'P-256' are not possible"),
+ ])
+ def test_md_300_016(self, env, line, exp_err_msg):
+ MDConf(env, text=line).install()
+ assert env.apache_fail() == 0
+ assert exp_err_msg in env.apachectl_stderr
+
+ # test case: invalid renew window directive
+ @pytest.mark.parametrize("line,exp_err_msg", [
+ ("MDRenewWindow dec-31", "has unrecognized format"),
+ ("MDRenewWindow 1y", "has unrecognized format"),
+ ("MDRenewWindow 10 d", "takes one argument"),
+ ("MDRenewWindow 102%", "a length of 100% or more is not allowed.")])
+ def test_md_300_017(self, env, line, exp_err_msg):
+ MDConf(env, text=line).install()
+ assert env.apache_fail() == 0
+ assert exp_err_msg in env.apachectl_stderr
+
+ # test case: invalid uri for MDProxyPass
+ @pytest.mark.parametrize("line,exp_err_msg", [
+ ("MDHttpProxy", "takes one argument"),
+ ("MDHttpProxy localhost:8080", "scheme must be http or https"),
+ ("MDHttpProxy https://127.0.0.1:-443", "invalid port"),
+ ("MDHttpProxy HTTP localhost 8080", "takes one argument")])
+ def test_md_300_018(self, env, line, exp_err_msg):
+ MDConf(env, text=line).install()
+ assert env.apache_fail() == 0, "Server accepted test config {}".format(line)
+ assert exp_err_msg in env.apachectl_stderr
+
+ # test case: invalid parameter for MDRequireHttps
+ @pytest.mark.parametrize("line,exp_err_msg", [
+ ("MDRequireHTTPS yes", "supported parameter values are 'temporary' and 'permanent'"),
+ ("MDRequireHTTPS", "takes one argument")])
+ def test_md_300_019(self, env, line, exp_err_msg):
+ MDConf(env, text=line).install()
+ assert env.apache_fail() == 0, "Server accepted test config {}".format(line)
+ assert exp_err_msg in env.apachectl_stderr
+
+ # test case: invalid parameter for MDMustStaple
+ @pytest.mark.parametrize("line,exp_err_msg", [
+ ("MDMustStaple", "takes one argument"),
+ ("MDMustStaple yes", "supported parameter values are 'on' and 'off'"),
+ ("MDMustStaple true", "supported parameter values are 'on' and 'off'")])
+ def test_md_300_020(self, env, line, exp_err_msg):
+ MDConf(env, text=line).install()
+ assert env.apache_fail() == 0, "Server accepted test config {}".format(line)
+ assert exp_err_msg in env.apachectl_stderr
+ env.httpd_error_log.ignore_recent()
+
+ # test case: alt-names incomplete detection, github isse #68
+ def test_md_300_021(self, env):
+ env.apache_stop()
+ conf = MDConf(env, text="""
+ MDMembers manual
+ MDomain secret.com
+ """)
+ conf.add_vhost(port=12344, domains=[
+ "not.secret.com", "secret.com"
+ ])
+ conf.install()
+ assert env.apache_fail() == 0
+ # this is unreliable on debian
+ #assert env.httpd_error_log.scan_recent(
+ # re.compile(r'.*Virtual Host not.secret.com:0 matches Managed Domain \'secret.com\', '
+ # 'but the name/alias not.secret.com itself is not managed. A requested '
+ # 'MD certificate will not match ServerName.*'), timeout=10
+ #)
+
+ # test case: use MDRequireHttps in an <if> construct, but not in <Directory
+ def test_md_300_022(self, env):
+ MDConf(env, text="""
+ MDomain secret.com
+ <If "1 == 1">
+ MDRequireHttps temporary
+ </If>
+ <VirtualHost *:12344>
+ ServerName secret.com
+ </VirtualHost>
+ """).install()
+ assert env.apache_restart() == 0
+
+ # test case: use MDRequireHttps not in <Directory
+ def test_md_300_023(self, env):
+ conf = MDConf(env, text="""
+ MDomain secret.com
+ <Directory /tmp>
+ MDRequireHttps temporary
+ </Directory>
+ """)
+ conf.add_vhost(port=12344, domains=["secret.com"])
+ conf.install()
+ assert env.apache_fail() == 0
+
+ # test case: invalid parameter for MDCertificateAuthority
+ @pytest.mark.parametrize("ca,exp_err_msg", [
+ ("", "takes one argument"),
+ ("yes", "The CA name 'yes' is not known "),
+ ])
+ def test_md_300_024(self, env, ca, exp_err_msg):
+ conf = MDConf(env, text=f"""
+ MDCertificateAuthority {ca}
+ MDRenewMode manual # lets not contact these in testing
+ """)
+ conf.install()
+ assert env.apache_fail() == 0
+ assert exp_err_msg in env.apachectl_stderr
+
+ # test case: valid parameter for MDCertificateAuthority
+ @pytest.mark.parametrize("ca, url", [
+ ("LetsEncrypt", "https://acme-v02.api.letsencrypt.org/directory"),
+ ("letsencrypt", "https://acme-v02.api.letsencrypt.org/directory"),
+ ("letsencrypt-test", "https://acme-staging-v02.api.letsencrypt.org/directory"),
+ ("LETSEncrypt-TESt", "https://acme-staging-v02.api.letsencrypt.org/directory"),
+ ("buypass", "https://api.buypass.com/acme/directory"),
+ ("buypass-test", "https://api.test4.buypass.no/acme/directory"),
+ ])
+ def test_md_300_025(self, env, ca, url):
+ domain = f"test1.{env.http_tld}"
+ conf = MDConf(env, text=f"""
+ MDCertificateAuthority {ca}
+ MDRenewMode manual
+ """)
+ conf.add_md([domain])
+ conf.install()
+ assert env.apache_restart() == 0, "Server did not accepted CA '{}'".format(ca)
+ md = env.get_md_status(domain)
+ assert md['ca']['urls'][0] == url, f"CA url '{url}' not set in {md}"
+
+ # vhost on another address, see #278
+ def test_md_300_026(self, env):
+ assert env.apache_stop() == 0
+ conf = MDConf(env)
+ domain = f"t300_026.{env.http_tld}"
+ conf.add(f"""
+ MDomain {domain}
+ """)
+ conf.add_vhost(port=env.http_port, domains=[domain], with_ssl=False)
+ conf.add(f"""
+ <VirtualHost 10.0.0.1:{env.https_port}>
+ ServerName {domain}
+ ServerAlias xxx.{env.http_tld}
+ SSLEngine on
+ </VirtualHost>
+ <VirtualHost 10.0.0.1:12345>
+ ServerName {domain}
+ SSLEngine on
+ </VirtualHost>
+ """)
+ conf.install()
+ assert env.apache_restart() == 0
+
+ # test case: configure more than 1 CA
+ @pytest.mark.parametrize("cas, should_work", [
+ (["https://acme-v02.api.letsencrypt.org/directory"], True),
+ (["https://acme-v02.api.letsencrypt.org/directory", "buypass"], True),
+ (["x", "buypass"], False),
+ (["letsencrypt", "abc"], False),
+ (["letsencrypt", "buypass"], True),
+ ])
+ def test_md_300_027(self, env, cas, should_work):
+ domain = f"test1.{env.http_tld}"
+ conf = MDConf(env, text=f"""
+ MDCertificateAuthority {' '.join(cas)}
+ MDRenewMode manual
+ """)
+ conf.add_md([domain])
+ conf.install()
+ rv = env.apache_restart()
+ if should_work:
+ assert rv == 0, "Server did not accepted CAs '{}'".format(cas)
+ md = env.get_md_status(domain)
+ assert len(md['ca']['urls']) == len(cas)
+ else:
+ assert rv != 0, "Server should not have accepted CAs '{}'".format(cas)
diff --git a/test/modules/md/test_310_conf_store.py b/test/modules/md/test_310_conf_store.py
new file mode 100644
index 0000000..f2bb9c7
--- /dev/null
+++ b/test/modules/md/test_310_conf_store.py
@@ -0,0 +1,850 @@
+# test mod_md basic configurations
+import time
+
+import pytest
+import os
+
+from .md_conf import MDConf
+from .md_env import MDTestEnv
+
+SEC_PER_DAY = 24 * 60 * 60
+MS_PER_DAY = SEC_PER_DAY * 1000
+NS_PER_DAY = MS_PER_DAY * 1000
+
+
+@pytest.mark.skipif(condition=not MDTestEnv.has_a2md(), reason="no a2md available")
+@pytest.mark.skipif(condition=not MDTestEnv.has_acme_server(),
+ reason="no ACME test server configured")
+class TestConf:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env, acme):
+ acme.start(config='default')
+ env.check_acme()
+
+ @pytest.fixture(autouse=True, scope='function')
+ def _method_scope(self, env, request):
+ env.clear_store()
+ self.test_domain = env.get_request_domain(request)
+
+ # test case: no md definitions in config
+ def test_md_310_001(self, env):
+ MDConf(env, text="").install()
+ assert env.apache_restart() == 0
+ r = env.a2md(["list"])
+ assert 0 == len(r.json["output"])
+
+ # test case: add md definitions on empty store
+ @pytest.mark.parametrize("confline,dns_lists,md_count", [
+ ("MDomain testdomain.org www.testdomain.org mail.testdomain.org",
+ [["testdomain.org", "www.testdomain.org", "mail.testdomain.org"]], 1),
+ ("""MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ MDomain testdomain2.org www.testdomain2.org mail.testdomain2.org""",
+ [["testdomain.org", "www.testdomain.org", "mail.testdomain.org"],
+ ["testdomain2.org", "www.testdomain2.org", "mail.testdomain2.org"]], 2)
+ ])
+ def test_md_310_100(self, env, confline, dns_lists, md_count):
+ MDConf(env, text=confline).install()
+ assert env.apache_restart() == 0
+ for i in range(0, len(dns_lists)):
+ env.check_md(dns_lists[i], state=1)
+
+ # test case: add managed domains as separate steps
+ def test_md_310_101(self, env):
+ MDConf(env, text="""
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ env.check_md(["testdomain.org", "www.testdomain.org", "mail.testdomain.org"], state=1)
+ MDConf(env, text="""
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ MDomain testdomain2.org www.testdomain2.org mail.testdomain2.org
+ """).install()
+ assert env.apache_restart() == 0
+ env.check_md(["testdomain.org", "www.testdomain.org", "mail.testdomain.org"], state=1)
+ env.check_md(["testdomain2.org", "www.testdomain2.org", "mail.testdomain2.org"], state=1)
+
+ # test case: add dns to existing md
+ def test_md_310_102(self, env):
+ assert env.a2md(["add", "testdomain.org", "www.testdomain.org"]).exit_code == 0
+ MDConf(env, text="""
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ env.check_md(["testdomain.org", "www.testdomain.org", "mail.testdomain.org"], state=1)
+
+ # test case: add new md definition with acme url, acme protocol, acme agreement
+ def test_md_310_103(self, env):
+ MDConf(env, text="""
+ MDCertificateAuthority http://acme.test.org:4000/directory
+ MDCertificateProtocol ACME
+ MDCertificateAgreement http://acme.test.org:4000/terms/v1
+
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """, local_ca=False).install()
+ assert env.apache_restart() == 0
+ name = "testdomain.org"
+ env.check_md([name, "www.testdomain.org", "mail.testdomain.org"], state=1,
+ ca="http://acme.test.org:4000/directory", protocol="ACME",
+ agreement="http://acme.test.org:4000/terms/v1")
+
+ # test case: add to existing md: acme url, acme protocol
+ def test_md_310_104(self, env):
+ name = "testdomain.org"
+ MDConf(env, local_ca=False, text="""
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ env.check_md([name, "www.testdomain.org", "mail.testdomain.org"], state=1,
+ ca="https://acme-v02.api.letsencrypt.org/directory", protocol="ACME")
+ MDConf(env, local_ca=False, text="""
+ MDCertificateAuthority http://acme.test.org:4000/directory
+ MDCertificateProtocol ACME
+ MDCertificateAgreement http://acme.test.org:4000/terms/v1
+
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ env.check_md([name, "www.testdomain.org", "mail.testdomain.org"], state=1,
+ ca="http://acme.test.org:4000/directory", protocol="ACME",
+ agreement="http://acme.test.org:4000/terms/v1")
+
+ # test case: add new md definition with server admin
+ def test_md_310_105(self, env):
+ MDConf(env, admin="admin@testdomain.org", text="""
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ name = "testdomain.org"
+ env.check_md([name, "www.testdomain.org", "mail.testdomain.org"], state=1,
+ contacts=["mailto:admin@testdomain.org"])
+
+ # test case: add to existing md: server admin
+ def test_md_310_106(self, env):
+ name = "testdomain.org"
+ assert env.a2md(["add", name, "www.testdomain.org", "mail.testdomain.org"]).exit_code == 0
+ MDConf(env, admin="admin@testdomain.org", text="""
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ env.check_md([name, "www.testdomain.org", "mail.testdomain.org"], state=1,
+ contacts=["mailto:admin@testdomain.org"])
+
+ # test case: assign separate contact info based on VirtualHost
+ def test_md_310_107(self, env):
+ MDConf(env, admin="", text="""
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ MDomain testdomain2.org www.testdomain2.org mail.testdomain2.org
+
+ <VirtualHost *:12346>
+ ServerName testdomain.org
+ ServerAlias www.testdomain.org
+ ServerAdmin mailto:admin@testdomain.org
+ </VirtualHost>
+
+ <VirtualHost *:12346>
+ ServerName testdomain2.org
+ ServerAlias www.testdomain2.org
+ ServerAdmin mailto:admin@testdomain2.org
+ </VirtualHost>
+ """).install()
+ assert env.apache_restart() == 0
+ name1 = "testdomain.org"
+ name2 = "testdomain2.org"
+ env.check_md([name1, "www." + name1, "mail." + name1], state=1, contacts=["mailto:admin@" + name1])
+ env.check_md([name2, "www." + name2, "mail." + name2], state=1, contacts=["mailto:admin@" + name2])
+
+ # test case: normalize names - lowercase
+ def test_md_310_108(self, env):
+ MDConf(env, text="""
+ MDomain testdomain.org WWW.testdomain.org MAIL.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ env.check_md(["testdomain.org", "www.testdomain.org", "mail.testdomain.org"], state=1)
+
+ # test case: default drive mode - auto
+ def test_md_310_109(self, env):
+ MDConf(env, text="""
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ assert env.a2md(["list"]).json['output'][0]['renew-mode'] == 1
+
+ # test case: drive mode manual
+ def test_md_310_110(self, env):
+ MDConf(env, text="""
+ MDRenewMode manual
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ assert env.a2md(["list"]).json['output'][0]['renew-mode'] == 0
+
+ # test case: drive mode auto
+ def test_md_310_111(self, env):
+ MDConf(env, text="""
+ MDRenewMode auto
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ assert env.a2md(["list"]).json['output'][0]['renew-mode'] == 1
+
+ # test case: drive mode always
+ def test_md_310_112(self, env):
+ MDConf(env, text="""
+ MDRenewMode always
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ assert env.a2md(["list"]).json['output'][0]['renew-mode'] == 2
+
+ # test case: renew window - 14 days
+ def test_md_310_113a(self, env):
+ MDConf(env, text="""
+ MDRenewWindow 14d
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ assert env.a2md(["list"]).json['output'][0]['renew-window'] == '14d'
+
+ # test case: renew window - 10 percent
+ def test_md_310_113b(self, env):
+ MDConf(env, text="""
+ MDRenewWindow 10%
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ assert env.a2md(["list"]).json['output'][0]['renew-window'] == '10%'
+
+ # test case: ca challenge type - http-01
+ def test_md_310_114(self, env):
+ MDConf(env, text="""
+ MDCAChallenges http-01
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ assert env.a2md(["list"]).json['output'][0]['ca']['challenges'] == ['http-01']
+
+ # test case: ca challenge type - http-01
+ def test_md_310_115(self, env):
+ MDConf(env, text="""
+ MDCAChallenges tls-alpn-01
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ assert env.a2md(["list"]).json['output'][0]['ca']['challenges'] == ['tls-alpn-01']
+
+ # test case: ca challenge type - all
+ def test_md_310_116(self, env):
+ MDConf(env, text="""
+ MDCAChallenges http-01 tls-alpn-01
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ assert env.a2md(["list"]).json['output'][0]['ca']['challenges'] == ['http-01', 'tls-alpn-01']
+
+ # test case: automatically collect md names from vhost config
+ def test_md_310_117(self, env):
+ conf = MDConf(env, text="""
+ MDMember auto
+ MDomain testdomain.org
+ """)
+ conf.add_vhost(port=12346, domains=[
+ "testdomain.org", "test.testdomain.org", "mail.testdomain.org",
+ ], with_ssl=True)
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.a2md(["list"]).json['output'][0]['domains'] == \
+ ['testdomain.org', 'test.testdomain.org', 'mail.testdomain.org']
+
+ # add renew window to existing md
+ def test_md_310_118(self, env):
+ MDConf(env, text="""
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ MDConf(env, text="""
+ MDRenewWindow 14d
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ stat = env.get_md_status("testdomain.org")
+ assert stat['renew-window'] == '14d'
+
+ # test case: set RSA key length 2048
+ def test_md_310_119(self, env):
+ MDConf(env, text="""
+ MDPrivateKeys RSA 2048
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ assert env.a2md(["list"]).json['output'][0]['privkey'] == {
+ "type": "RSA",
+ "bits": 2048
+ }
+
+ # test case: set RSA key length 4096
+ def test_md_310_120(self, env):
+ MDConf(env, text="""
+ MDPrivateKeys RSA 4096
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ assert env.a2md(["list"]).json['output'][0]['privkey'] == {
+ "type": "RSA",
+ "bits": 4096
+ }
+
+ # test case: require HTTPS
+ def test_md_310_121(self, env):
+ MDConf(env, text="""
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ MDRequireHttps temporary
+ """).install()
+ assert env.apache_restart() == 0
+ assert env.a2md(["list"]).json['output'][0]['require-https'] == "temporary"
+
+ # test case: require OCSP stapling
+ def test_md_310_122(self, env):
+ MDConf(env, text="""
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ MDMustStaple on
+ """).install()
+ assert env.apache_restart() == 0
+ assert env.a2md(["list"]).json['output'][0]['must-staple'] is True
+
+ # test case: remove managed domain from config
+ def test_md_310_200(self, env):
+ dns_list = ["testdomain.org", "www.testdomain.org", "mail.testdomain.org"]
+ env.a2md(["add"] + dns_list)
+ env.check_md(dns_list, state=1)
+ conf = MDConf(env,)
+ conf.install()
+ assert env.apache_restart() == 0
+ # check: md stays in store
+ env.check_md(dns_list, state=1)
+
+ # test case: remove alias DNS from managed domain
+ def test_md_310_201(self, env):
+ dns_list = ["testdomain.org", "test.testdomain.org", "www.testdomain.org", "mail.testdomain.org"]
+ env.a2md(["add"] + dns_list)
+ env.check_md(dns_list, state=1)
+ MDConf(env, text="""
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ # check: DNS has been removed from md in store
+ env.check_md(["testdomain.org", "www.testdomain.org", "mail.testdomain.org"], state=1)
+
+ # test case: remove primary name from managed domain
+ def test_md_310_202(self, env):
+ dns_list = ["name.testdomain.org", "testdomain.org", "www.testdomain.org", "mail.testdomain.org"]
+ env.a2md(["add"] + dns_list)
+ env.check_md(dns_list, state=1)
+ MDConf(env, text="""
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ # check: md overwrite previous name and changes name
+ env.check_md(["testdomain.org", "www.testdomain.org", "mail.testdomain.org"],
+ md="testdomain.org", state=1)
+
+ # test case: remove one md, keep another
+ def test_md_310_203(self, env):
+ dns_list1 = ["greenbytes2.de", "www.greenbytes2.de", "mail.greenbytes2.de"]
+ dns_list2 = ["testdomain.org", "www.testdomain.org", "mail.testdomain.org"]
+ env.a2md(["add"] + dns_list1)
+ env.a2md(["add"] + dns_list2)
+ env.check_md(dns_list1, state=1)
+ env.check_md(dns_list2, state=1)
+ MDConf(env, text="""
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ # all mds stay in store
+ env.check_md(dns_list1, state=1)
+ env.check_md(dns_list2, state=1)
+
+ # test case: remove ca info from md, should switch over to new defaults
+ def test_md_310_204(self, env):
+ name = "testdomain.org"
+ MDConf(env, local_ca=False, text="""
+ MDCertificateAuthority http://acme.test.org:4000/directory
+ MDCertificateProtocol ACME
+ MDCertificateAgreement http://acme.test.org:4000/terms/v1
+
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ # setup: sync with ca info removed
+ MDConf(env, local_ca=False, text="""
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ env.check_md([name, "www.testdomain.org", "mail.testdomain.org"], state=1,
+ ca="https://acme-v02.api.letsencrypt.org/directory", protocol="ACME")
+
+ # test case: remove server admin from md
+ def test_md_310_205(self, env):
+ name = "testdomain.org"
+ MDConf(env, admin="admin@testdomain.org", text="""
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ # setup: sync with admin info removed
+ MDConf(env, admin="", text="""
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ # check: md stays the same with previous admin info
+ env.check_md([name, "www.testdomain.org", "mail.testdomain.org"], state=1,
+ contacts=["mailto:admin@testdomain.org"])
+
+ # test case: remove renew window from conf -> fallback to default
+ def test_md_310_206(self, env):
+ MDConf(env, text="""
+ MDRenewWindow 14d
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ assert env.a2md(["list"]).json['output'][0]['renew-window'] == '14d'
+ MDConf(env, text="""
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ # check: renew window not set
+ assert env.a2md(["list"]).json['output'][0]['renew-window'] == '33%'
+
+ # test case: remove drive mode from conf -> fallback to default (auto)
+ @pytest.mark.parametrize("renew_mode,exp_code", [
+ ("manual", 0),
+ ("auto", 1),
+ ("always", 2)
+ ])
+ def test_md_310_207(self, env, renew_mode, exp_code):
+ MDConf(env, text="""
+ MDRenewMode %s
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """ % renew_mode).install()
+ assert env.apache_restart() == 0
+ assert env.a2md(["list"]).json['output'][0]['renew-mode'] == exp_code
+ #
+ MDConf(env, text="""
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ assert env.a2md(["list"]).json['output'][0]['renew-mode'] == 1
+
+ # test case: remove challenges from conf -> fallback to default (not set)
+ def test_md_310_208(self, env):
+ MDConf(env, text="""
+ MDCAChallenges http-01
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ assert env.a2md(["list"]).json['output'][0]['ca']['challenges'] == ['http-01']
+ #
+ MDConf(env, text="""
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ assert 'challenges' not in env.a2md(["list"]).json['output'][0]['ca']
+
+ # test case: specify RSA key
+ @pytest.mark.parametrize("key_size", ["2048", "4096"])
+ def test_md_310_209(self, env, key_size):
+ MDConf(env, text="""
+ MDPrivateKeys RSA %s
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """ % key_size).install()
+ assert env.apache_restart() == 0
+ assert env.a2md(["list"]).json['output'][0]['privkey']['type'] == "RSA"
+ #
+ MDConf(env, text="""
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ assert "privkey" not in env.a2md(["list"]).json['output'][0]
+
+ # test case: require HTTPS
+ @pytest.mark.parametrize("mode", ["temporary", "permanent"])
+ def test_md_310_210(self, env, mode):
+ MDConf(env, text="""
+ <MDomainSet testdomain.org>
+ MDMember www.testdomain.org mail.testdomain.org
+ MDRequireHttps %s
+ </MDomainSet>
+ """ % mode).install()
+ assert env.apache_restart() == 0
+ assert env.a2md(["list"]).json['output'][0]['require-https'] == mode, \
+ "Unexpected HTTPS require mode in store. config: {}".format(mode)
+ #
+ MDConf(env, text="""
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ assert "require-https" not in env.a2md(["list"]).json['output'][0], \
+ "HTTPS require still persisted in store. config: {}".format(mode)
+
+ # test case: require OCSP stapling
+ def test_md_310_211(self, env):
+ MDConf(env, text="""
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ MDMustStaple on
+ """).install()
+ assert env.apache_restart() == 0
+ assert env.a2md(["list"]).json['output'][0]['must-staple'] is True
+ #
+ MDConf(env, text="""
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ assert env.a2md(["list"]).json['output'][0]['must-staple'] is False
+
+ # test case: reorder DNS names in md definition
+ def test_md_310_300(self, env):
+ dns_list = ["testdomain.org", "mail.testdomain.org", "www.testdomain.org"]
+ env.a2md(["add"] + dns_list)
+ env.check_md(dns_list, state=1)
+ MDConf(env, text="""
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ # check: dns list changes
+ env.check_md(["testdomain.org", "www.testdomain.org", "mail.testdomain.org"], state=1)
+
+ # test case: move DNS from one md to another
+ def test_md_310_301(self, env):
+ env.a2md(["add", "testdomain.org", "www.testdomain.org", "mail.testdomain.org", "mail.testdomain2.org"])
+ env.a2md(["add", "testdomain2.org", "www.testdomain2.org"])
+ env.check_md(["testdomain.org", "www.testdomain.org",
+ "mail.testdomain.org", "mail.testdomain2.org"], state=1)
+ env.check_md(["testdomain2.org", "www.testdomain2.org"], state=1)
+ MDConf(env, text="""
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ MDomain testdomain2.org www.testdomain2.org mail.testdomain2.org
+ """).install()
+ assert env.apache_restart() == 0
+ env.check_md(["testdomain.org", "www.testdomain.org", "mail.testdomain.org"], state=1)
+ env.check_md(["testdomain2.org", "www.testdomain2.org", "mail.testdomain2.org"], state=1)
+
+ # test case: change ca info
+ def test_md_310_302(self, env):
+ name = "testdomain.org"
+ MDConf(env, local_ca=False, text="""
+ MDCertificateAuthority http://acme.test.org:4000/directory
+ MDCertificateProtocol ACME
+ MDCertificateAgreement http://acme.test.org:4000/terms/v1
+
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ # setup: sync with changed ca info
+ MDConf(env, local_ca=False, admin="webmaster@testdomain.org",
+ text="""
+ MDCertificateAuthority http://somewhere.com:6666/directory
+ MDCertificateProtocol ACME
+ MDCertificateAgreement http://somewhere.com:6666/terms/v1
+
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ # check: md stays the same with previous ca info
+ env.check_md([name, "www.testdomain.org", "mail.testdomain.org"], state=1,
+ ca="http://somewhere.com:6666/directory", protocol="ACME",
+ agreement="http://somewhere.com:6666/terms/v1")
+
+ # test case: change server admin
+ def test_md_310_303(self, env):
+ name = "testdomain.org"
+ MDConf(env, admin="admin@testdomain.org", text="""
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ # setup: sync with changed admin info
+ MDConf(env, local_ca=False, admin="webmaster@testdomain.org", text="""
+ MDCertificateAuthority http://somewhere.com:6666/directory
+ MDCertificateProtocol ACME
+ MDCertificateAgreement http://somewhere.com:6666/terms/v1
+
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ # check: md stays the same with previous admin info
+ env.check_md([name, "www.testdomain.org", "mail.testdomain.org"], state=1,
+ contacts=["mailto:webmaster@testdomain.org"])
+
+ # test case: change drive mode - manual -> auto -> always
+ def test_md_310_304(self, env):
+ MDConf(env, text="""
+ MDRenewMode manual
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ assert env.a2md(["list"]).json['output'][0]['renew-mode'] == 0
+ # test case: drive mode auto
+ MDConf(env, text="""
+ MDRenewMode auto
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ assert env.a2md(["list"]).json['output'][0]['renew-mode'] == 1
+ # test case: drive mode always
+ MDConf(env, text="""
+ MDRenewMode always
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ assert env.a2md(["list"]).json['output'][0]['renew-mode'] == 2
+
+ # test case: change config value for renew window, use various syntax alternatives
+ def test_md_310_305(self, env):
+ MDConf(env, text="""
+ MDRenewWindow 14d
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ md = env.a2md(["list"]).json['output'][0]
+ assert md['renew-window'] == '14d'
+ MDConf(env, text="""
+ MDRenewWindow 10
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ md = env.a2md(["list"]).json['output'][0]
+ assert md['renew-window'] == '10d'
+ MDConf(env, text="""
+ MDRenewWindow 10%
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ md = env.a2md(["list"]).json['output'][0]
+ assert md['renew-window'] == '10%'
+
+ # test case: change challenge types - http -> tls-sni -> all
+ def test_md_310_306(self, env):
+ MDConf(env, text="""
+ MDCAChallenges http-01
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ assert env.a2md(["list"]).json['output'][0]['ca']['challenges'] == ['http-01']
+ # test case: drive mode auto
+ MDConf(env, text="""
+ MDCAChallenges tls-alpn-01
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ assert env.a2md(["list"]).json['output'][0]['ca']['challenges'] == ['tls-alpn-01']
+ # test case: drive mode always
+ MDConf(env, text="""
+ MDCAChallenges http-01 tls-alpn-01
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ assert env.a2md(["list"]).json['output'][0]['ca']['challenges'] == ['http-01', 'tls-alpn-01']
+
+ # test case: RSA key length: 4096 -> 2048 -> 4096
+ def test_md_310_307(self, env):
+ MDConf(env, text="""
+ MDPrivateKeys RSA 4096
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ assert env.a2md(["list"]).json['output'][0]['privkey'] == {
+ "type": "RSA",
+ "bits": 4096
+ }
+ MDConf(env, text="""
+ MDPrivateKeys RSA 2048
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ assert env.a2md(["list"]).json['output'][0]['privkey'] == {
+ "type": "RSA",
+ "bits": 2048
+ }
+ MDConf(env, text="""
+ MDPrivateKeys RSA 4096
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ assert env.a2md(["list"]).json['output'][0]['privkey'] == {
+ "type": "RSA",
+ "bits": 4096
+ }
+
+ # test case: change HTTPS require settings on existing md
+ def test_md_310_308(self, env):
+ # setup: nothing set
+ MDConf(env, text="""
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ assert "require-https" not in env.a2md(["list"]).json['output'][0]
+ # test case: temporary redirect
+ MDConf(env, text="""
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ MDRequireHttps temporary
+ """).install()
+ assert env.apache_restart() == 0
+ assert env.a2md(["list"]).json['output'][0]['require-https'] == "temporary"
+ # test case: permanent redirect
+ MDConf(env, text="""
+ <MDomainSet testdomain.org>
+ MDMember www.testdomain.org mail.testdomain.org
+ MDRequireHttps permanent
+ </MDomainSet>
+ """).install()
+ assert env.apache_restart() == 0
+ assert env.a2md(["list"]).json['output'][0]['require-https'] == "permanent"
+
+ # test case: change OCSP stapling settings on existing md
+ def test_md_310_309(self, env):
+ # setup: nothing set
+ MDConf(env, text="""
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ assert env.a2md(["list"]).json['output'][0]['must-staple'] is False
+ # test case: OCSP stapling on
+ MDConf(env, text="""
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ MDMustStaple on
+ """).install()
+ assert env.apache_restart() == 0
+ assert env.a2md(["list"]).json['output'][0]['must-staple'] is True
+ # test case: OCSP stapling off
+ MDConf(env, text="""
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ MDMustStaple off
+ """).install()
+ assert env.apache_restart() == 0
+ assert env.a2md(["list"]).json['output'][0]['must-staple'] is False
+
+ # test case: change renew window parameter
+ @pytest.mark.parametrize("window", [
+ "0%", "33d", "40%"
+ ])
+ def test_md_310_310(self, env, window):
+ # non-default renewal setting
+ domain = self.test_domain
+ conf = MDConf(env, admin="admin@" + domain)
+ conf.start_md([domain])
+ conf.add_drive_mode("manual")
+ conf.add_renew_window(window)
+ conf.end_md()
+ conf.add_vhost(domains=domain)
+ conf.install()
+ assert env.apache_restart() == 0
+ stat = env.get_md_status(domain)
+ assert stat["renew-window"] == window
+
+ # test case: add dns name on existing valid md
+ def test_md_310_400(self, env):
+ # setup: create complete md in store
+ domain = self.test_domain
+ name = "www." + domain
+ assert env.a2md(["add", name, "test1." + domain]).exit_code == 0
+ assert env.a2md(["update", name, "contacts", "admin@" + name]).exit_code == 0
+ assert env.a2md(["update", name, "agreement", env.acme_tos]).exit_code == 0
+ MDConf(env).install()
+ assert env.apache_restart() == 0
+
+ # setup: drive it
+ r = env.a2md(["-v", "drive", name])
+ assert r.exit_code == 0, "drive not successful: {0}".format(r.stderr)
+ assert env.a2md(["list", name]).json['output'][0]['state'] == env.MD_S_COMPLETE
+
+ # remove one domain -> status stays COMPLETE
+ assert env.a2md(["update", name, "domains", name]).exit_code == 0
+ assert env.a2md(["list", name]).json['output'][0]['state'] == env.MD_S_COMPLETE
+
+ # add other domain -> status INCOMPLETE
+ assert env.a2md(["update", name, "domains", name, "test2." + domain]).exit_code == 0
+ assert env.a2md(["list", name]).json['output'][0]['state'] == env.MD_S_INCOMPLETE
+
+ # test case: change ca info
+ def test_md_310_401(self, env):
+ # setup: create complete md in store
+ domain = self.test_domain
+ name = "www." + domain
+ assert env.a2md(["add", name]).exit_code == 0
+ assert env.a2md(["update", name, "contacts", "admin@" + name]).exit_code == 0
+ assert env.a2md(["update", name, "agreement", env.acme_tos]).exit_code == 0
+ assert env.apache_restart() == 0
+ # setup: drive it
+ assert env.a2md(["drive", name]).exit_code == 0
+ assert env.a2md(["list", name]).json['output'][0]['state'] == env.MD_S_COMPLETE
+ # setup: change CA URL
+ assert env.a2md(["update", name, "ca", env.acme_url]).exit_code == 0
+ # check: state stays COMPLETE
+ assert env.a2md(["list", name]).json['output'][0]['state'] == env.MD_S_COMPLETE
+
+ # test case: change the store dir
+ def test_md_310_500(self, env):
+ MDConf(env, text="""
+ MDStoreDir md-other
+ MDomain testdomain.org www.testdomain.org mail.testdomain.org
+ """).install()
+ assert env.apache_restart() == 0
+ assert env.a2md(["list"]).json['output'] == []
+ env.set_store_dir("md-other")
+ env.check_md(["testdomain.org", "www.testdomain.org", "mail.testdomain.org"], state=1)
+ env.clear_store()
+ env.set_store_dir_default()
+
+ # test case: place an unexpected file into the store, check startup survival, see #218
+ def test_md_310_501(self, env):
+ # setup: create complete md in store
+ domain = self.test_domain
+ conf = MDConf(env, admin="admin@" + domain)
+ conf.start_md([domain])
+ conf.end_md()
+ conf.add_vhost(domains=[domain])
+ conf.install()
+ assert env.apache_restart() == 0
+ # add a file at top level
+ assert env.await_completion([domain])
+ fpath = os.path.join(env.store_domains(), "wrong.com")
+ with open(fpath, 'w') as fd:
+ fd.write("this does not belong here\n")
+ assert env.apache_restart() == 0
+
+ # test case: add external account binding
+ def test_md_310_601(self, env):
+ domain = self.test_domain
+ # directly set
+ conf = MDConf(env, admin="admin@" + domain)
+ conf.start_md([domain])
+ conf.add_drive_mode("manual")
+ conf.add("MDExternalAccountBinding k123 hash123")
+ conf.end_md()
+ conf.add_vhost(domains=domain)
+ conf.install()
+ assert env.apache_restart() == 0
+ stat = env.get_md_status(domain)
+ assert stat["eab"] == {'kid': 'k123', 'hmac': '***'}
+ # eab inherited
+ conf = MDConf(env, admin="admin@" + domain)
+ conf.add("MDExternalAccountBinding k456 hash456")
+ conf.start_md([domain])
+ conf.add_drive_mode("manual")
+ conf.end_md()
+ conf.add_vhost(domains=domain)
+ conf.install()
+ assert env.apache_restart() == 0
+ stat = env.get_md_status(domain)
+ assert stat["eab"] == {'kid': 'k456', 'hmac': '***'}
+ # override eab inherited
+ conf = MDConf(env, admin="admin@" + domain)
+ conf.add("MDExternalAccountBinding k456 hash456")
+ conf.start_md([domain])
+ conf.add_drive_mode("manual")
+ conf.add("MDExternalAccountBinding none")
+ conf.end_md()
+ conf.add_vhost(domains=domain)
+ conf.install()
+ assert env.apache_restart() == 0
+ stat = env.get_md_status(domain)
+ assert "eab" not in stat
+
diff --git a/test/modules/md/test_502_acmev2_drive.py b/test/modules/md/test_502_acmev2_drive.py
new file mode 100644
index 0000000..eb754f2
--- /dev/null
+++ b/test/modules/md/test_502_acmev2_drive.py
@@ -0,0 +1,549 @@
+# test driving the ACMEv2 protocol
+
+import base64
+import json
+import os.path
+import re
+import time
+
+import pytest
+
+from .md_conf import MDConf, MDConf
+from .md_cert_util import MDCertUtil
+from .md_env import MDTestEnv
+
+
+@pytest.mark.skipif(condition=not MDTestEnv.has_a2md(), reason="no a2md available")
+@pytest.mark.skipif(condition=not MDTestEnv.has_acme_server(),
+ reason="no ACME test server configured")
+class TestDrivev2:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env, acme):
+ acme.start(config='default')
+ env.check_acme()
+ env.APACHE_CONF_SRC = "data/test_drive"
+ MDConf(env).install()
+ assert env.apache_restart() == 0
+
+ @pytest.fixture(autouse=True, scope='function')
+ def _method_scope(self, env, request):
+ env.clear_store()
+ MDConf(env).install()
+ self.test_domain = env.get_request_domain(request)
+
+ # --------- invalid precondition ---------
+
+ def test_md_502_000(self, env):
+ # test case: md without contact info
+ domain = self.test_domain
+ name = "www." + domain
+ assert env.a2md(["add", name]).exit_code == 0
+ r = env.a2md(["drive", name])
+ assert r.exit_code == 1
+ assert re.search("No contact information", r.stderr)
+
+ def test_md_502_001(self, env):
+ # test case: md with contact, but without TOS
+ domain = self.test_domain
+ name = "www." + domain
+ assert env.a2md(["add", name]).exit_code == 0
+ assert env.a2md(
+ ["update", name, "contacts", "admin@test1.not-forbidden.org"]
+ ).exit_code == 0
+ r = env.a2md(["drive", name])
+ assert r.exit_code == 1
+ assert re.search("the CA requires you to accept the terms-of-service as specified in ", r.stderr)
+
+ # test_102 removed, was based on false assumption
+ def test_md_502_003(self, env):
+ # test case: md with unknown protocol FOO
+ domain = self.test_domain
+ name = "www." + domain
+ self._prepare_md(env, [name])
+ assert env.a2md(
+ ["update", name, "ca", env.acme_url, "FOO"]
+ ).exit_code == 0
+ r = env.a2md(["drive", name])
+ assert r.exit_code == 1
+ assert re.search("Unknown CA protocol", r.stderr)
+
+ # --------- driving OK ---------
+
+ def test_md_502_100(self, env):
+ # test case: md with one domain
+ domain = self.test_domain
+ name = "www." + domain
+ self._prepare_md(env, [name])
+ assert env.apache_restart() == 0
+ # drive
+ prev_md = env.a2md(["list", name]).json['output'][0]
+ r = env.a2md(["-vv", "drive", "-c", "http-01", name])
+ assert r.exit_code == 0, "a2md drive failed: {0}".format(r.stderr)
+ env.check_md_credentials([name])
+ self._check_account_key(env, name)
+
+ # check archive content
+ store_md = json.loads(open(env.store_archived_file(name, 1, 'md.json')).read())
+ for f in ['name', 'ca', 'domains', 'contacts', 'renew-mode', 'renew-window', 'must-staple']:
+ assert store_md[f] == prev_md[f]
+
+ # check file system permissions:
+ env.check_file_permissions(name)
+ # check: challenges removed
+ env.check_dir_empty(env.store_challenges())
+ # check how the challenge resources are answered in sevceral combinations
+ r = env.get_meta(domain, "/.well-known/acme-challenge", False)
+ assert r.exit_code == 0
+ assert r.response['status'] == 404
+ r = env.get_meta(domain, "/.well-known/acme-challenge/", False)
+ assert r.exit_code == 0
+ assert r.response['status'] == 404
+ r = env.get_meta(domain, "/.well-known/acme-challenge/123", False)
+ assert r.exit_code == 0
+ assert r.response['status'] == 404
+ assert r.exit_code == 0
+ cdir = os.path.join(env.store_challenges(), domain)
+ os.makedirs(cdir)
+ open(os.path.join(cdir, 'acme-http-01.txt'), "w").write("content-of-123")
+ r = env.get_meta(domain, "/.well-known/acme-challenge/123", False)
+ assert r.exit_code == 0
+ assert r.response['status'] == 200
+ assert r.response['header']['content-length'] == '14'
+
+ def test_md_502_101(self, env):
+ # test case: md with 2 domains
+ domain = self.test_domain
+ name = "www." + domain
+ self._prepare_md(env, [name, "test." + domain])
+ assert env.apache_restart() == 0
+ # drive
+ r = env.a2md(["-vv", "drive", "-c", "http-01", name])
+ assert r.exit_code == 0, "a2md drive failed: {0}".format(r.stderr)
+ env.check_md_credentials([name, "test." + domain])
+
+ # test_502_102 removed, as accounts without ToS are not allowed in ACMEv2
+
+ def test_md_502_103(self, env):
+ # test case: md with one domain, ACME account and TOS agreement on server
+ # setup: create md
+ domain = self.test_domain
+ name = "www." + domain
+ assert env.a2md(["add", name]).exit_code == 0
+ assert env.a2md(["update", name, "contacts", "admin@" + domain]).exit_code == 0
+ assert env.apache_restart() == 0
+ # setup: create account on server
+ r = env.a2md(["-t", "accepted", "acme", "newreg", "admin@" + domain], raw=True)
+ assert r.exit_code == 0
+ acct = re.match("registered: (.*)$", r.stdout).group(1)
+ # setup: link md to account
+ assert env.a2md(["update", name, "account", acct]).exit_code == 0
+ # drive
+ r = env.a2md(["-vv", "drive", name])
+ assert r.exit_code == 0, "a2md drive failed: {0}".format(r.stderr)
+ env.check_md_credentials([name])
+
+ # test_502_104 removed, order are created differently in ACMEv2
+
+ def test_md_502_105(self, env):
+ # test case: md with one domain, local TOS agreement and ACME account that is deleted (!) on server
+ # setup: create md
+ domain = self.test_domain
+ name = "www." + domain
+ self._prepare_md(env, [name])
+ assert env.apache_restart() == 0
+ # setup: create account on server
+ r = env.a2md(["-t", "accepted", "acme", "newreg", "test@" + domain], raw=True)
+ assert r.exit_code == 0
+ acct = re.match("registered: (.*)$", r.stdout).group(1)
+ # setup: link md to account
+ assert env.a2md(["update", name, "account", acct]).exit_code == 0
+ # setup: delete account on server
+ assert env.a2md(["acme", "delreg", acct]).exit_code == 0
+ # drive
+ r = env.a2md(["drive", name])
+ assert r.exit_code == 0, "a2md drive failed: {0}".format(r.stderr)
+ env.check_md_credentials([name])
+
+ def test_md_502_107(self, env):
+ # test case: drive again on COMPLETE md, then drive --force
+ # setup: prepare md in store
+ domain = self.test_domain
+ name = "www." + domain
+ self._prepare_md(env, [name])
+ assert env.apache_restart() == 0
+ # drive
+ r = env.a2md(["-vv", "drive", name])
+ assert r.exit_code == 0, "a2md drive failed: {0}".format(r.stderr)
+ env.check_md_credentials([name])
+ orig_cert = MDCertUtil(env.store_domain_file(name, 'pubcert.pem'))
+
+ # drive again
+ assert env.a2md(["-vv", "drive", name]).exit_code == 0
+ env.check_md_credentials([name])
+ cert = MDCertUtil(env.store_domain_file(name, 'pubcert.pem'))
+ # check: cert not changed
+ assert cert.same_serial_as(orig_cert)
+
+ # drive --force
+ assert env.a2md(["-vv", "drive", "--force", name]).exit_code == 0
+ env.check_md_credentials([name])
+ cert = MDCertUtil(env.store_domain_file(name, 'pubcert.pem'))
+ # check: cert not changed
+ assert not cert.same_serial_as(orig_cert)
+ # check: previous cert was archived
+ cert = MDCertUtil(env.store_archived_file(name, 2, 'pubcert.pem'))
+ assert cert.same_serial_as(orig_cert)
+
+ def test_md_502_108(self, env):
+ # test case: drive via HTTP proxy
+ domain = self.test_domain
+ name = "www." + domain
+ self._prepare_md(env, [name])
+ conf = MDConf(env, proxy=True)
+ conf.add('LogLevel proxy:trace8')
+ conf.install()
+ assert env.apache_restart() == 0
+
+ # drive it, with wrong proxy url -> FAIL
+ r = env.a2md(["-p", "http://localhost:1", "drive", name])
+ assert r.exit_code == 1
+ assert "Connection refused" in r.stderr
+
+ # drive it, working proxy url -> SUCCESS
+ proxy_url = f"http://localhost:{env.proxy_port}"
+ r = env.a2md(["-vv", "-p", proxy_url, "drive", name])
+ assert 0 == r.exit_code, "a2md failed: {0}".format(r.stderr)
+ env.check_md_credentials([name])
+
+ def test_md_502_109(self, env):
+ # test case: redirect on SSL-only domain
+ # setup: prepare config
+ domain = self.test_domain
+ name = "www." + domain
+ conf = MDConf(env, admin="admin@" + domain)
+ conf.add_drive_mode("manual")
+ conf.add_md([name])
+ conf.add_vhost(name, port=env.http_port, doc_root="htdocs/test")
+ conf.add_vhost(name, doc_root="htdocs/test")
+ conf.install()
+ # setup: create resource files
+ self._write_res_file(os.path.join(env.server_docs_dir, "test"), "name.txt", name)
+ self._write_res_file(os.path.join(env.server_docs_dir), "name.txt", "not-forbidden.org")
+ assert env.apache_restart() == 0
+
+ # drive it
+ assert env.a2md(["drive", name]).exit_code == 0
+ assert env.apache_restart() == 0
+ # test HTTP access - no redirect
+ jdata = env.get_json_content(f"test1.{env.http_tld}", "/alive.json", use_https=False)
+ assert jdata['host']== "test1"
+ assert env.get_content(name, "/name.txt", use_https=False) == name
+ r = env.get_meta(name, "/name.txt", use_https=False)
+ assert int(r.response['header']['content-length']) == len(name)
+ assert "Location" not in r.response['header']
+ # test HTTPS access
+ assert env.get_content(name, "/name.txt", use_https=True) == name
+
+ # test HTTP access again -> redirect to default HTTPS port
+ conf.add("MDRequireHttps temporary")
+ conf.install()
+ assert env.apache_restart() == 0
+ r = env.get_meta(name, "/name.txt", use_https=False)
+ assert r.response['status'] == 302
+ exp_location = "https://%s/name.txt" % name
+ assert r.response['header']['location'] == exp_location
+ # should not see this
+ assert 'strict-transport-security' not in r.response['header']
+ # test default HTTP vhost -> still no redirect
+ jdata = env.get_json_content(f"test1.{env.http_tld}", "/alive.json", use_https=False)
+ assert jdata['host']== "test1"
+ r = env.get_meta(name, "/name.txt", use_https=True)
+ # also not for this
+ assert 'strict-transport-security' not in r.response['header']
+
+ # test HTTP access again -> redirect permanent
+ conf.add("MDRequireHttps permanent")
+ conf.install()
+ assert env.apache_restart() == 0
+ r = env.get_meta(name, "/name.txt", use_https=False)
+ assert r.response['status'] == 301
+ exp_location = "https://%s/name.txt" % name
+ assert r.response['header']['location'] == exp_location
+ assert 'strict-transport-security' not in r.response['header']
+ # should see this
+ r = env.get_meta(name, "/name.txt", use_https=True)
+ assert r.response['header']['strict-transport-security'] == 'max-age=15768000'
+
+ def test_md_502_110(self, env):
+ # test case: SSL-only domain, override headers generated by mod_md
+ # setup: prepare config
+ domain = self.test_domain
+ name = "www." + domain
+ conf = MDConf(env, admin="admin@" + domain)
+ conf.add_drive_mode("manual")
+ conf.add("MDRequireHttps permanent")
+ conf.add_md([name])
+ conf.add_vhost(name, port=env.http_port)
+ conf.add_vhost(name)
+ conf.install()
+ assert env.apache_restart() == 0
+ # drive it
+ assert env.a2md(["drive", name]).exit_code == 0
+ assert env.apache_restart() == 0
+
+ # test override HSTS header
+ conf.add('Header set Strict-Transport-Security "max-age=10886400; includeSubDomains; preload"')
+ conf.install()
+ assert env.apache_restart() == 0
+ r = env.get_meta(name, "/name.txt", use_https=True)
+ assert 'strict-transport-security' in r.response['header'], r.response['header']
+ assert r.response['header']['strict-transport-security'] == \
+ 'max-age=10886400; includeSubDomains; preload'
+
+ # test override Location header
+ conf.add(' Redirect /a /name.txt')
+ conf.add(' Redirect seeother /b /name.txt')
+ conf.install()
+ assert env.apache_restart() == 0
+ # check: default redirect by mod_md still works
+ exp_location = "https://%s/name.txt" % name
+ r = env.get_meta(name, "/name.txt", use_https=False)
+ assert r.response['status'] == 301
+ assert r.response['header']['location'] == exp_location
+ # check: redirect as given by mod_alias
+ exp_location = "https://%s/a" % name
+ r = env.get_meta(name, "/a", use_https=False)
+ assert r.response['status'] == 301 # FAIL: mod_alias generates Location header instead of mod_md
+ assert r.response['header']['location'] == exp_location
+
+ def test_md_502_111(self, env):
+ # test case: vhost with parallel HTTP/HTTPS, check mod_alias redirects
+ # setup: prepare config
+ domain = self.test_domain
+ name = "www." + domain
+ conf = MDConf(env, admin="admin@" + domain)
+ conf.add_drive_mode("manual")
+ conf.add_md([name])
+ conf.add(" LogLevel alias:debug")
+ conf.add_vhost(name, port=env.http_port)
+ conf.add_vhost(name)
+ conf.install()
+ assert env.apache_restart() == 0
+ # drive it
+ r = env.a2md(["-v", "drive", name])
+ assert r.exit_code == 0, "a2md drive failed: {0}".format(r.stderr)
+ assert env.apache_restart() == 0
+
+ # setup: place redirect rules
+ conf.add(' Redirect /a /name.txt')
+ conf.add(' Redirect seeother /b /name.txt')
+ conf.install()
+ assert env.apache_restart() == 0
+ # check: redirects on HTTP
+ exp_location = "http://%s:%s/name.txt" % (name, env.http_port)
+ r = env.get_meta(name, "/a", use_https=False)
+ assert r.response['status'] == 302
+ assert r.response['header']['location'] == exp_location
+ r = env.get_meta(name, "/b", use_https=False)
+ assert r.response['status'] == 303
+ assert r.response['header']['location'] == exp_location
+ # check: redirects on HTTPS
+ exp_location = "https://%s:%s/name.txt" % (name, env.https_port)
+ r = env.get_meta(name, "/a", use_https=True)
+ assert r.response['status'] == 302
+ assert r.response['header']['location'] == exp_location # FAIL: expected 'https://...' but found 'http://...'
+ r = env.get_meta(name, "/b", use_https=True)
+ assert r.response['status'] == 303
+ assert r.response['header']['location'] == exp_location
+
+ def test_md_502_120(self, env):
+ # test case: NP dereference reported by Daniel Caminada <daniel.caminada@ergon.ch>
+ domain = self.test_domain
+ name = "www." + domain
+ conf = MDConf(env, admin="admin@" + domain)
+ conf.add_drive_mode("manual")
+ conf.add_md([name])
+ conf.add_vhost(name)
+ conf.install()
+ assert env.apache_restart() == 0
+ env.run(["openssl", "s_client",
+ f"-connect", "localhost:{env.https_port}",
+ "-servername", "example.com", "-crlf"
+ ], intext="GET https:// HTTP/1.1\nHost: example.com\n\n")
+ assert env.apache_restart() == 0
+
+ # --------- critical state change -> drive again ---------
+
+ def test_md_502_200(self, env):
+ # test case: add dns name on existing valid md
+ # setup: create md in store
+ domain = self.test_domain
+ name = "www." + domain
+ self._prepare_md(env, [name])
+ assert env.apache_restart() == 0
+ # setup: drive it
+ r = env.a2md(["drive", name])
+ assert r.exit_code == 0, "a2md drive failed: {0}".format(r.stderr)
+ old_cert = MDCertUtil(env.store_domain_file(name, 'pubcert.pem'))
+ # setup: add second domain
+ assert env.a2md(["update", name, "domains", name, "test." + domain]).exit_code == 0
+ # drive
+ r = env.a2md(["-vv", "drive", name])
+ assert r.exit_code == 0, "a2md drive failed: {0}".format(r.stderr)
+ # check new cert
+ env.check_md_credentials([name, "test." + domain])
+ new_cert = MDCertUtil(env.store_domain_file(name, 'pubcert.pem'))
+ assert not old_cert.same_serial_as(new_cert.get_serial)
+
+ @pytest.mark.parametrize("renew_window,test_data_list", [
+ ("14d", [
+ {"valid": {"notBefore": -5, "notAfter": 180}, "renew": False},
+ {"valid": {"notBefore": -200, "notAfter": 15}, "renew": False},
+ {"valid": {"notBefore": -200, "notAfter": 13}, "renew": True},
+ ]),
+ ("30%", [
+ {"valid": {"notBefore": -0, "notAfter": 180}, "renew": False},
+ {"valid": {"notBefore": -120, "notAfter": 60}, "renew": False},
+ {"valid": {"notBefore": -126, "notAfter": 53}, "renew": True},
+ ])
+ ])
+ def test_md_502_201(self, env, renew_window, test_data_list):
+ # test case: trigger cert renew when entering renew window
+ # setup: prepare COMPLETE md
+ domain = self.test_domain
+ name = "www." + domain
+ conf = MDConf(env, admin="admin@" + domain)
+ conf.add_drive_mode("manual")
+ conf.add_renew_window(renew_window)
+ conf.add_md([name])
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.a2md(["list", name]).json['output'][0]['state'] == env.MD_S_INCOMPLETE
+ # setup: drive it
+ r = env.a2md(["drive", name])
+ assert r.exit_code == 0, "a2md drive failed: {0}".format(r.stderr)
+ cert1 = MDCertUtil(env.store_domain_file(name, 'pubcert.pem'))
+ assert env.a2md(["list", name]).json['output'][0]['state'] == env.MD_S_COMPLETE
+
+ # replace cert by self-signed one -> check md status
+ print("TRACE: start testing renew window: %s" % renew_window)
+ for tc in test_data_list:
+ print("TRACE: create self-signed cert: %s" % tc["valid"])
+ env.create_self_signed_cert([name], tc["valid"])
+ cert2 = MDCertUtil(env.store_domain_file(name, 'pubcert.pem'))
+ assert not cert2.same_serial_as(cert1)
+ md = env.a2md(["list", name]).json['output'][0]
+ assert md["renew"] == tc["renew"], \
+ "Expected renew == {} indicator in {}, test case {}".format(tc["renew"], md, tc)
+
+ @pytest.mark.parametrize("key_type,key_params,exp_key_length", [
+ ("RSA", [2048], 2048),
+ ("RSA", [3072], 3072),
+ ("RSA", [4096], 4096),
+ ("Default", [], 2048)
+ ])
+ def test_md_502_202(self, env, key_type, key_params, exp_key_length):
+ # test case: specify RSA key length and verify resulting cert key
+ # setup: prepare md
+ domain = self.test_domain
+ name = "www." + domain
+ conf = MDConf(env, admin="admin@" + domain)
+ conf.add_drive_mode("manual")
+ conf.add_private_key(key_type, key_params)
+ conf.add_md([name])
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.a2md(["list", name]).json['output'][0]['state'] == env.MD_S_INCOMPLETE
+ # setup: drive it
+ r = env.a2md(["-vv", "drive", name])
+ assert r.exit_code == 0, "drive for MDPrivateKeys {} {}: {}".format(key_type, key_params, r.stderr)
+ assert env.a2md(["list", name]).json['output'][0]['state'] == env.MD_S_COMPLETE
+ # check cert key length
+ cert = MDCertUtil(env.store_domain_file(name, 'pubcert.pem'))
+ assert cert.get_key_length() == exp_key_length
+
+ # test_502_203 removed, as ToS agreement is not really checked in ACMEv2
+
+ # --------- non-critical state change -> keep data ---------
+
+ def test_md_502_300(self, env):
+ # test case: remove one domain name from existing valid md
+ # setup: create md in store
+ domain = self.test_domain
+ name = "www." + domain
+ self._prepare_md(env, [name, "test." + domain, "xxx." + domain])
+ assert env.apache_restart() == 0
+ # setup: drive it
+ r = env.a2md(["drive", name])
+ assert r.exit_code == 0, "a2md drive failed: {0}".format(r.stderr)
+ old_cert = MDCertUtil(env.store_domain_file(name, 'pubcert.pem'))
+ # setup: remove one domain
+ assert env.a2md(["update", name, "domains"] + [name, "test." + domain]).exit_code == 0
+ # drive
+ assert env.a2md(["-vv", "drive", name]).exit_code == 0
+ # compare cert serial
+ new_cert = MDCertUtil(env.store_domain_file(name, 'pubcert.pem'))
+ assert old_cert.same_serial_as(new_cert)
+
+ def test_md_502_301(self, env):
+ # test case: change contact info on existing valid md
+ # setup: create md in store
+ domain = self.test_domain
+ name = "www." + domain
+ self._prepare_md(env, [name])
+ assert env.apache_restart() == 0
+ # setup: drive it
+ r = env.a2md(["drive", name])
+ assert r.exit_code == 0, "a2md drive failed: {0}".format(r.stderr)
+ old_cert = MDCertUtil(env.store_domain_file(name, 'pubcert.pem'))
+ # setup: add second domain
+ assert env.a2md(["update", name, "contacts", "test@" + domain]).exit_code == 0
+ # drive
+ assert env.a2md(["drive", name]).exit_code == 0
+ # compare cert serial
+ new_cert = MDCertUtil(env.store_domain_file(name, 'pubcert.pem'))
+ assert old_cert.same_serial_as(new_cert)
+
+ # --------- network problems ---------
+
+ def test_md_502_400(self, env):
+ # test case: server not reachable
+ domain = self.test_domain
+ name = "www." + domain
+ self._prepare_md(env, [name])
+ assert env.a2md(
+ ["update", name, "ca", "http://localhost:4711/directory"]
+ ).exit_code == 0
+ # drive
+ r = env.a2md(["drive", name])
+ assert r.exit_code == 1
+ assert r.json['status'] != 0
+ assert r.json['description'] == 'Connection refused'
+
+ # --------- _utils_ ---------
+
+ def _prepare_md(self, env, domains):
+ assert env.a2md(["add"] + domains).exit_code == 0
+ assert env.a2md(
+ ["update", domains[0], "contacts", "admin@" + domains[0]]
+ ).exit_code == 0
+ assert env.a2md(
+ ["update", domains[0], "agreement", env.acme_tos]
+ ).exit_code == 0
+
+ def _write_res_file(self, doc_root, name, content):
+ if not os.path.exists(doc_root):
+ os.makedirs(doc_root)
+ open(os.path.join(doc_root, name), "w").write(content)
+
+ RE_MSG_OPENSSL_BAD_DECRYPT = re.compile('.*\'bad decrypt\'.*')
+
+ def _check_account_key(self, env, name):
+ # read encryption key
+ md_store = json.loads(open(env.path_store_json(), 'r').read())
+ encrypt_key = base64.urlsafe_b64decode(str(md_store['key']))
+ # check: key file is encrypted PEM
+ md = env.a2md(["list", name]).json['output'][0]
+ acc = md['ca']['account']
+ MDCertUtil.validate_privkey(env.path_account_key(acc), lambda *args: encrypt_key)
diff --git a/test/modules/md/test_602_roundtrip.py b/test/modules/md/test_602_roundtrip.py
new file mode 100644
index 0000000..9ff87e5
--- /dev/null
+++ b/test/modules/md/test_602_roundtrip.py
@@ -0,0 +1,143 @@
+# test mod_md basic configurations
+
+import os
+
+import pytest
+
+from .md_conf import MDConf
+from .md_env import MDTestEnv
+
+
+@pytest.mark.skipif(condition=not MDTestEnv.has_a2md(), reason="no a2md available")
+@pytest.mark.skipif(condition=not MDTestEnv.has_acme_server(),
+ reason="no ACME test server configured")
+class TestRoundtripv2:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env, acme):
+ acme.start(config='default')
+ env.APACHE_CONF_SRC = "data/test_roundtrip"
+ env.clear_store()
+ MDConf(env).install()
+
+ @pytest.fixture(autouse=True, scope='function')
+ def _method_scope(self, env, request):
+ env.check_acme()
+ self.test_domain = env.get_request_domain(request)
+
+ # --------- add to store ---------
+
+ def test_md_602_000(self, env):
+ # test case: generate config with md -> restart -> drive -> generate config
+ # with vhost and ssl -> restart -> check HTTPS access
+ domain = self.test_domain
+ domains = [domain, "www." + domain]
+
+ # - generate config with one md
+ conf = MDConf(env, admin="admin@" + domain)
+ conf.add_drive_mode("manual")
+ conf.add_md(domains)
+ conf.install()
+ # - restart, check that md is in store
+ assert env.apache_restart() == 0
+ env.check_md(domains)
+ # - drive
+ assert env.a2md(["-v", "drive", domain]).exit_code == 0
+ assert env.apache_restart() == 0
+ env.check_md_complete(domain)
+ # - append vhost to config
+ conf.add_vhost(domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ # check: SSL is running OK
+ cert = env.get_cert(domain)
+ assert domain in cert.get_san_list()
+
+ # check file system permissions:
+ env.check_file_permissions(domain)
+
+ def test_md_602_001(self, env):
+ # test case: same as test_600_000, but with two parallel managed domains
+ domain_a = "a-" + self.test_domain
+ domain_b = "b-" + self.test_domain
+ # - generate config with one md
+ domains_a = [domain_a, "www." + domain_a]
+ domains_b = [domain_b, "www." + domain_b]
+
+ conf = MDConf(env)
+ conf.add_drive_mode("manual")
+ conf.add_md(domains_a)
+ conf.add_md(domains_b)
+ conf.install()
+
+ # - restart, check that md is in store
+ assert env.apache_restart() == 0
+ env.check_md(domains_a)
+ env.check_md(domains_b)
+
+ # - drive
+ assert env.a2md(["drive", domain_a]).exit_code == 0
+ assert env.a2md(["drive", domain_b]).exit_code == 0
+ assert env.apache_restart() == 0
+ env.check_md_complete(domain_a)
+ env.check_md_complete(domain_b)
+
+ # - append vhost to config
+ conf.add_vhost(domains_a)
+ conf.add_vhost(domains_b)
+ conf.install()
+
+ # check: SSL is running OK
+ assert env.apache_restart() == 0
+ cert_a = env.get_cert(domain_a)
+ assert domains_a == cert_a.get_san_list()
+ cert_b = env.get_cert(domain_b)
+ assert domains_b == cert_b.get_san_list()
+
+ def test_md_602_002(self, env):
+ # test case: one md, that covers two vhosts
+ domain = self.test_domain
+ name_a = "a." + domain
+ name_b = "b." + domain
+ domains = [domain, name_a, name_b]
+
+ # - generate config with one md
+ conf = MDConf(env, admin="admin@" + domain)
+ conf.add_drive_mode("manual")
+ conf.add_md(domains)
+ conf.install()
+
+ # - restart, check that md is in store
+ assert env.apache_restart() == 0
+ env.check_md(domains)
+
+ # - drive
+ assert env.a2md(["drive", domain]).exit_code == 0
+ assert env.apache_restart() == 0
+ env.check_md_complete(domain)
+
+ # - append vhost to config
+ conf.add_vhost(name_a, doc_root="htdocs/a")
+ conf.add_vhost(name_b, doc_root="htdocs/b")
+ conf.install()
+
+ # - create docRoot folder
+ self._write_res_file(os.path.join(env.server_docs_dir, "a"), "name.txt", name_a)
+ self._write_res_file(os.path.join(env.server_docs_dir, "b"), "name.txt", name_b)
+
+ # check: SSL is running OK
+ assert env.apache_restart() == 0
+ cert_a = env.get_cert(name_a)
+ assert name_a in cert_a.get_san_list()
+ cert_b = env.get_cert(name_b)
+ assert name_b in cert_b.get_san_list()
+ assert cert_a.same_serial_as(cert_b)
+ assert env.get_content(name_a, "/name.txt") == name_a
+ assert env.get_content(name_b, "/name.txt") == name_b
+
+ # --------- _utils_ ---------
+
+ def _write_res_file(self, doc_root, name, content):
+ if not os.path.exists(doc_root):
+ os.makedirs(doc_root)
+ open(os.path.join(doc_root, name), "w").write(content)
diff --git a/test/modules/md/test_702_auto.py b/test/modules/md/test_702_auto.py
new file mode 100644
index 0000000..8e8f5f1
--- /dev/null
+++ b/test/modules/md/test_702_auto.py
@@ -0,0 +1,756 @@
+import os
+import time
+
+import pytest
+
+from pyhttpd.conf import HttpdConf
+from pyhttpd.env import HttpdTestEnv
+from .md_cert_util import MDCertUtil
+from .md_env import MDTestEnv
+from .md_conf import MDConf
+
+
+@pytest.mark.skipif(condition=not MDTestEnv.has_acme_server(),
+ reason="no ACME test server configured")
+class TestAutov2:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env, acme):
+ env.APACHE_CONF_SRC = "data/test_auto"
+ acme.start(config='default')
+ env.check_acme()
+ env.clear_store()
+ MDConf(env).install()
+ assert env.apache_restart() == 0
+
+ @pytest.fixture(autouse=True, scope='function')
+ def _method_scope(self, env, request):
+ env.clear_store()
+ self.test_domain = env.get_request_domain(request)
+
+ def _write_res_file(self, doc_root, name, content):
+ if not os.path.exists(doc_root):
+ os.makedirs(doc_root)
+ open(os.path.join(doc_root, name), "w").write(content)
+
+ # create a MD not used in any virtual host, auto drive should NOT pick it up
+ def test_md_702_001(self, env):
+ domain = self.test_domain
+ # generate config with one MD
+ domains = [domain, "www." + domain]
+ conf = MDConf(env, admin="admin@" + domain)
+ conf.add_drive_mode("auto")
+ conf.add_md(domains)
+ conf.install()
+ #
+ # restart, check that MD is synched to store
+ assert env.apache_restart() == 0
+ env.check_md(domains)
+ stat = env.get_md_status(domain)
+ assert stat["watched"] == 0
+ #
+ # add vhost for MD, restart should drive it
+ conf.add_vhost(domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_completion([domain])
+ env.check_md_complete(domain)
+ stat = env.get_md_status(domain)
+ assert stat["watched"] == 1
+ cert = env.get_cert(domain)
+ assert domain in cert.get_san_list()
+ #
+ # challenges should have been removed
+ # file system needs to have correct permissions
+ env.check_dir_empty(env.store_challenges())
+ env.check_file_permissions(domain)
+
+ # test case: same as test_702_001, but with two parallel managed domains
+ def test_md_702_002(self, env):
+ domain = self.test_domain
+ domain_a = "a-" + domain
+ domain_b = "b-" + domain
+ #
+ # generate config with two MDs
+ domains_a = [domain_a, "www." + domain_a]
+ domains_b = [domain_b, "www." + domain_b]
+ conf = MDConf(env)
+ conf.add_drive_mode("auto")
+ conf.add_md(domains_a)
+ conf.add_md(domains_b)
+ conf.add_vhost(domains_a)
+ conf.add_vhost(domains_b)
+ conf.install()
+ #
+ # restart, check that md is in store
+ assert env.apache_restart() == 0
+ env.check_md(domains_a)
+ env.check_md(domains_b)
+ #
+ # await drive completion, do not restart
+ assert env.await_completion([domain_a, domain_b], restart=False)
+ # staged certificates are now visible on the status resources
+ status = env.get_md_status(domain_a)
+ assert 'renewal' in status
+ assert 'cert' in status['renewal']
+ assert 'rsa' in status['renewal']['cert']
+ assert 'sha256-fingerprint' in status['renewal']['cert']['rsa']
+ # check the non-staged status
+ assert status['state'] == 1
+ assert status['state-descr'] == "certificate(rsa) is missing"
+
+ # restart and activate
+ assert env.apache_restart() == 0
+ # check: SSL is running OK
+ cert_a = env.get_cert(domain_a)
+ assert domains_a == cert_a.get_san_list()
+ cert_b = env.get_cert(domain_b)
+ assert domains_b == cert_b.get_san_list()
+ # check that we created only one account
+ md_a = env.get_md_status(domain_a)
+ md_b = env.get_md_status(domain_b)
+ assert md_a['ca'] == md_b['ca']
+
+ # test case: one MD, that covers two vhosts
+ def test_md_702_003(self, env):
+ domain = self.test_domain
+ name_a = "test-a." + domain
+ name_b = "test-b." + domain
+ domains = [domain, name_a, name_b]
+ #
+ # generate 1 MD and 2 vhosts
+ conf = MDConf(env, admin="admin@" + domain)
+ conf.add_md(domains)
+ conf.add_vhost(name_a, doc_root="htdocs/a")
+ conf.add_vhost(name_b, doc_root="htdocs/b")
+ conf.install()
+ #
+ # create docRoot folder
+ self._write_res_file(os.path.join(env.server_docs_dir, "a"), "name.txt", name_a)
+ self._write_res_file(os.path.join(env.server_docs_dir, "b"), "name.txt", name_b)
+ #
+ # restart (-> drive), check that MD was synched and completes
+ assert env.apache_restart() == 0
+ env.check_md(domains)
+ assert env.await_completion([domain])
+ md = env.check_md_complete(domain)
+ assert md['ca']['url'], f"URL of CA used not set in md: {md}"
+ #
+ # check: SSL is running OK
+ cert_a = env.get_cert(name_a)
+ assert name_a in cert_a.get_san_list()
+ cert_b = env.get_cert(name_b)
+ assert name_b in cert_b.get_san_list()
+ assert cert_a.same_serial_as(cert_b)
+ #
+ assert env.get_content(name_a, "/name.txt") == name_a
+ assert env.get_content(name_b, "/name.txt") == name_b
+
+ # test case: drive with using single challenge type explicitly
+ @pytest.mark.parametrize("challenge_type", [
+ "tls-alpn-01", "http-01",
+ ])
+ def test_md_702_004(self, env, challenge_type):
+ domain = self.test_domain
+ domains = [domain, "www." + domain]
+ #
+ # generate 1 MD and 1 vhost
+ conf = MDConf(env, admin="admin@" + domain)
+ conf.add("Protocols http/1.1 acme-tls/1")
+ conf.add_drive_mode("auto")
+ conf.add(f"MDCAChallenges {challenge_type}")
+ conf.add_md(domains)
+ conf.add_vhost(domains)
+ conf.install()
+ #
+ # restart (-> drive), check that MD was synched and completes
+ assert env.apache_restart() == 0
+ env.check_md(domains)
+ assert env.await_completion([domain])
+ env.check_md_complete(domain)
+ #
+ # check SSL running OK
+ cert = env.get_cert(domain)
+ assert domain in cert.get_san_list()
+
+ # test case: drive_mode manual, check that server starts, but requests to domain are 503'd
+ def test_md_702_005(self, env):
+ domain = self.test_domain
+ name_a = "test-a." + domain
+ domains = [domain, name_a]
+ #
+ # generate 1 MD and 1 vhost
+ conf = MDConf(env, admin="admin@" + domain)
+ conf.add_drive_mode("manual")
+ conf.add_md(domains)
+ conf.add_vhost(name_a, doc_root="htdocs/a")
+ conf.install()
+ #
+ # create docRoot folder
+ self._write_res_file(os.path.join(env.server_docs_dir, "a"), "name.txt", name_a)
+ #
+ # restart, check that md is in store
+ assert env.apache_restart() == 0
+ env.check_md(domains)
+ #
+ # check: that request to domains give 503 Service Unavailable
+ cert1 = env.get_cert(name_a)
+ assert name_a in cert1.get_san_list()
+ assert env.get_http_status(name_a, "/name.txt") == 503
+ #
+ # check temporary cert from server
+ cert2 = MDCertUtil(env.path_fallback_cert(domain))
+ assert cert1.same_serial_as(cert2), \
+ "Unexpected temporary certificate on vhost %s. Expected cn: %s , "\
+ "but found cn: %s" % (name_a, cert2.get_cn(), cert1.get_cn())
+
+ # test case: drive MD with only invalid challenges, domains should stay 503'd
+ def test_md_702_006(self, env):
+ domain = self.test_domain
+ name_a = "test-a." + domain
+ domains = [domain, name_a]
+ #
+ # generate 1 MD, 1 vhost
+ conf = MDConf(env, admin="admin@" + domain)
+ conf.add("MDCAChallenges invalid-01 invalid-02")
+ conf.add_md(domains)
+ conf.add_vhost(name_a, doc_root="htdocs/a")
+ conf.install()
+ #
+ # create docRoot folder
+ self._write_res_file(os.path.join(env.server_docs_dir, "a"), "name.txt", name_a)
+ #
+ # restart, check that md is in store
+ assert env.apache_restart() == 0
+ env.check_md(domains)
+ # await drive completion
+ md = env.await_error(domain)
+ assert md
+ assert md['renewal']['errors'] > 0
+ assert md['renewal']['last']['problem'] == 'challenge-mismatch'
+ assert 'account' not in md['ca']
+ #
+ # check: that request to domains give 503 Service Unavailable
+ cert = env.get_cert(name_a)
+ assert name_a in cert.get_san_list()
+ assert env.get_http_status(name_a, "/name.txt") == 503
+
+ # Specify a non-working http proxy
+ def test_md_702_008(self, env):
+ domain = self.test_domain
+ domains = [domain]
+ #
+ conf = MDConf(env, admin="admin@" + domain)
+ conf.add_drive_mode("always")
+ conf.add("MDHttpProxy http://localhost:1")
+ conf.add_md(domains)
+ conf.install()
+ #
+ # - restart (-> drive)
+ assert env.apache_restart() == 0
+ # await drive completion
+ md = env.await_error(domain)
+ assert md
+ assert md['renewal']['errors'] > 0
+ assert md['renewal']['last']['status-description'] == 'Connection refused'
+ assert 'account' not in md['ca']
+
+ # Specify a valid http proxy
+ def test_md_702_008a(self, env):
+ domain = self.test_domain
+ domains = [domain]
+ #
+ conf = MDConf(env, admin=f"admin@{domain}", proxy=True)
+ conf.add_drive_mode("always")
+ conf.add(f"MDHttpProxy http://localhost:{env.proxy_port}")
+ conf.add_md(domains)
+ conf.install()
+ #
+ # - restart (-> drive), check that md is in store
+ assert env.apache_restart() == 0
+ assert env.await_completion([domain])
+ assert env.apache_restart() == 0
+ env.check_md_complete(domain)
+
+ # Force cert renewal due to critical remaining valid duration
+ # Assert that new cert activation is delayed
+ def test_md_702_009(self, env):
+ domain = self.test_domain
+ domains = [domain]
+ #
+ # prepare md
+ conf = MDConf(env, admin="admin@" + domain)
+ conf.add_drive_mode("auto")
+ conf.add_renew_window("10d")
+ conf.add_md(domains)
+ conf.add_vhost(domain)
+ conf.install()
+ #
+ # restart (-> drive), check that md+cert is in store, TLS is up
+ assert env.apache_restart() == 0
+ assert env.await_completion([domain])
+ env.check_md_complete(domain)
+ cert1 = MDCertUtil(env.store_domain_file(domain, 'pubcert.pem'))
+ # compare with what md reports as status
+ stat = env.get_certificate_status(domain)
+ assert cert1.same_serial_as(stat['rsa']['serial'])
+ #
+ # create self-signed cert, with critical remaining valid duration -> drive again
+ env.create_self_signed_cert([domain], {"notBefore": -120, "notAfter": 2}, serial=7029)
+ cert3 = MDCertUtil(env.store_domain_file(domain, 'pubcert.pem'))
+ assert cert3.same_serial_as('1B75')
+ assert env.apache_restart() == 0
+ stat = env.get_certificate_status(domain)
+ assert cert3.same_serial_as(stat['rsa']['serial'])
+ #
+ # cert should renew and be different afterwards
+ assert env.await_completion([domain], must_renew=True)
+ stat = env.get_certificate_status(domain)
+ assert not cert3.same_serial_as(stat['rsa']['serial'])
+
+ # test case: drive with an unsupported challenge due to port availability
+ def test_md_702_010(self, env):
+ domain = self.test_domain
+ domains = [domain, "www." + domain]
+ #
+ # generate 1 MD and 1 vhost, map port 80 to where the server does not listen
+ conf = MDConf(env, admin="admin@" + domain)
+ conf.add_drive_mode("auto")
+ conf.add("MDPortMap 80:99")
+ conf.add_md(domains)
+ conf.add_vhost(domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ md = env.await_error(domain)
+ assert md["renewal"]["errors"] > 0
+ #
+ # now the same with a 80 mapped to a supported port
+ conf = MDConf(env, admin="admin@" + domain)
+ conf.add_drive_mode("auto")
+ conf.add("MDCAChallenges http-01")
+ conf.add("MDPortMap 80:%s" % env.http_port)
+ conf.add_md(domains)
+ conf.add_vhost(domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ env.check_md(domains)
+ assert env.await_completion([domain])
+
+ def test_md_702_011(self, env):
+ domain = self.test_domain
+ domains = [domain, "www." + domain]
+ #
+ # generate 1 MD and 1 vhost, map port 443 to where the server does not listen
+ conf = MDConf(env, admin="admin@" + domain)
+ conf.add("Protocols http/1.1 acme-tls/1")
+ conf.add_drive_mode("auto")
+ conf.add("MDPortMap https:99 http:99")
+ conf.add_md(domains)
+ conf.add_vhost(domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ md = env.await_error(domain)
+ assert md["renewal"]["errors"] > 0
+ #
+ # now the same with a 443 mapped to a supported port
+ conf = MDConf(env, admin="admin@" + domain)
+ conf.add("Protocols http/1.1 acme-tls/1")
+ conf.add_drive_mode("auto")
+ conf.add("MDCAChallenges tls-alpn-01")
+ conf.add("MDPortMap https:%s" % env.https_port)
+ conf.add_md(domains)
+ conf.add_vhost(domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ env.check_md(domains)
+ assert env.await_completion([domain])
+
+ # test case: one MD with several dns names. sign up. remove the *first* name
+ # in the MD. restart. should find and keep the existing MD.
+ # See: https://github.com/icing/mod_md/issues/68
+ def test_md_702_030(self, env):
+ domain = self.test_domain
+ name_x = "test-x." + domain
+ name_a = "test-a." + domain
+ name_b = "test-b." + domain
+ domains = [name_x, name_a, name_b]
+ #
+ # generate 1 MD and 2 vhosts
+ conf = MDConf(env, admin="admin@" + domain)
+ conf.add_md(domains)
+ conf.add_vhost(name_a)
+ conf.add_vhost(name_b)
+ conf.install()
+ #
+ # restart (-> drive), check that MD was synched and completes
+ assert env.apache_restart() == 0
+ env.check_md(domains)
+ assert env.await_completion([name_x])
+ env.check_md_complete(name_x)
+ #
+ # check: SSL is running OK
+ cert_a = env.get_cert(name_a)
+ assert name_a in cert_a.get_san_list()
+ cert_b = env.get_cert(name_b)
+ assert name_b in cert_b.get_san_list()
+ assert cert_a.same_serial_as(cert_b)
+ #
+ # change MD by removing 1st name
+ new_list = [name_a, name_b]
+ conf = MDConf(env, admin="admin@" + domain)
+ conf.add_md(new_list)
+ conf.add_vhost(name_a)
+ conf.add_vhost(name_b)
+ conf.install()
+ # restart, check that host still works and kept the cert
+ assert env.apache_restart() == 0
+ env.check_md(new_list)
+ status = env.get_certificate_status(name_a)
+ assert cert_a.same_serial_as(status['rsa']['serial'])
+
+ # test case: Same as 7030, but remove *and* add another at the same time.
+ # restart. should find and keep the existing MD and renew for additional name.
+ # See: https://github.com/icing/mod_md/issues/68
+ def test_md_702_031(self, env):
+ domain = self.test_domain
+ name_x = "test-x." + domain
+ name_a = "test-a." + domain
+ name_b = "test-b." + domain
+ name_c = "test-c." + domain
+ domains = [name_x, name_a, name_b]
+ #
+ # generate 1 MD and 2 vhosts
+ conf = MDConf(env, admin="admin@" + domain)
+ conf.add_md(domains)
+ conf.add_vhost(name_a)
+ conf.add_vhost(name_b)
+ conf.install()
+ #
+ # restart (-> drive), check that MD was synched and completes
+ assert env.apache_restart() == 0
+ env.check_md(domains)
+ assert env.await_completion([name_x])
+ env.check_md_complete(name_x)
+ #
+ # check: SSL is running OK
+ cert_a = env.get_cert(name_a)
+ assert name_a in cert_a.get_san_list()
+ cert_b = env.get_cert(name_b)
+ assert name_b in cert_b.get_san_list()
+ assert cert_a.same_serial_as(cert_b)
+ #
+ # change MD by removing 1st name and adding another
+ new_list = [name_a, name_b, name_c]
+ conf = MDConf(env, admin="admin@" + domain)
+ conf.add_md(new_list)
+ conf.add_vhost(name_a)
+ conf.add_vhost(name_b)
+ conf.install()
+ # restart, check that host still works and have new cert
+ assert env.apache_restart() == 0
+ env.check_md(new_list)
+ assert env.await_completion([name_a])
+ #
+ cert_a2 = env.get_cert(name_a)
+ assert name_a in cert_a2.get_san_list()
+ assert not cert_a.same_serial_as(cert_a2)
+
+ # test case: create two MDs, move them into one
+ # see: <https://bz.apache.org/bugzilla/show_bug.cgi?id=62572>
+ def test_md_702_032(self, env):
+ domain = self.test_domain
+ name1 = "server1." + domain
+ name2 = "server2.b" + domain # need a separate TLD to avoid rate limites
+ #
+ # generate 2 MDs and 2 vhosts
+ conf = MDConf(env, admin="admin@" + domain)
+ conf.add("MDMembers auto")
+ conf.add_md([name1])
+ conf.add_md([name2])
+ conf.add_vhost(name1)
+ conf.add_vhost(name2)
+ conf.install()
+ #
+ # restart (-> drive), check that MD was synched and completes
+ assert env.apache_restart() == 0
+ env.check_md([name1])
+ env.check_md([name2])
+ assert env.await_completion([name1, name2])
+ env.check_md_complete(name2)
+ #
+ # check: SSL is running OK
+ cert1 = env.get_cert(name1)
+ assert name1 in cert1.get_san_list()
+ cert2 = env.get_cert(name2)
+ assert name2 in cert2.get_san_list()
+ #
+ # remove second md and vhost, add name2 to vhost1
+ conf = MDConf(env, admin="admin@" + domain)
+ conf.add("MDMembers auto")
+ conf.add_md([name1])
+ conf.add_vhost([name1, name2])
+ conf.install()
+ assert env.apache_restart() == 0
+ env.check_md([name1, name2])
+ assert env.await_completion([name1])
+ #
+ cert1b = env.get_cert(name1)
+ assert name1 in cert1b.get_san_list()
+ assert name2 in cert1b.get_san_list()
+ assert not cert1.same_serial_as(cert1b)
+
+ # test case: test "tls-alpn-01" challenge handling
+ def test_md_702_040(self, env):
+ domain = self.test_domain
+ domains = [domain, "www." + domain]
+ #
+ # generate 1 MD and 1 vhost
+ conf = MDConf(env, admin="admin@" + domain)
+ conf.add("LogLevel core:debug")
+ conf.add("Protocols http/1.1 acme-tls/1")
+ conf.add_drive_mode("auto")
+ conf.add("MDCAChallenges tls-alpn-01")
+ conf.add_md(domains)
+ conf.add_vhost(domains=domains)
+ conf.install()
+ #
+ # restart (-> drive), check that MD was synched and completes
+ assert env.apache_restart() == 0
+ env.check_md(domains)
+ # check that acme-tls/1 is available for all domains
+ stat = env.get_md_status(domain)
+ assert stat["proto"]["acme-tls/1"] == domains
+ assert env.await_completion([domain])
+ env.check_md_complete(domain)
+ #
+ # check SSL running OK
+ cert = env.get_cert(domain)
+ assert domain in cert.get_san_list()
+
+ # test case: test "tls-alpn-01" without enabling 'acme-tls/1' challenge protocol
+ def test_md_702_041(self, env):
+ domain = self.test_domain
+ domains = [domain, "www." + domain]
+ #
+ # generate 1 MD and 1 vhost
+ conf = MDConf(env, admin="admin@" + domain)
+ conf.add("LogLevel core:debug")
+ conf.add_drive_mode("auto")
+ conf.add("MDCAChallenges tls-alpn-01")
+ conf.add_md(domains)
+ conf.add_vhost(domains)
+ conf.install()
+ #
+ # restart (-> drive), check that MD job shows errors
+ # and that missing proto is detected
+ assert env.apache_restart() == 0
+ env.check_md(domains)
+ # check that acme-tls/1 is available for none of the domains
+ stat = env.get_md_status(domain)
+ assert stat["proto"]["acme-tls/1"] == []
+
+ # test case: 2.4.40 mod_ssl stumbles over a SSLCertificateChainFile when installing
+ # a fallback certificate
+ @pytest.mark.skipif(HttpdTestEnv.get_ssl_module() != "mod_ssl", reason="only for mod_ssl")
+ def test_md_702_042(self, env):
+ domain = self.test_domain
+ dns_list = [domain]
+ conf = MDConf(env, admin="admin@" + domain)
+ conf.add("LogLevel core:debug")
+ cred = env.get_credentials_for_name(f"test1.{env.http_tld}")[0]
+ conf.add(f"SSLCertificateChainFile {cred.cert_file}")
+ conf.add_drive_mode("auto")
+ conf.add_md(dns_list)
+ conf.add_vhost(dns_list)
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_completion([domain])
+
+ # test case: test "tls-alpn-01" without enabling 'acme-tls/1' challenge protocol
+ # and fallback "http-01" configured, see https://github.com/icing/mod_md/issues/255
+ def test_md_702_043(self, env):
+ domain = self.test_domain
+ domains = [domain, "www." + domain]
+ #
+ # generate 1 MD and 1 vhost
+ conf = MDConf(env, admin="admin@" + domain)
+ conf.add("LogLevel core:debug")
+ conf.add_drive_mode("auto")
+ conf.add("MDPortMap 80:%s" % env.http_port)
+ conf.add("MDCAChallenges tls-alpn-01 http-01")
+ conf.add_md(domains)
+ conf.add_vhost(domains)
+ conf.install()
+ #
+ # restart (-> drive), check that MD job shows errors
+ # and that missing proto is detected
+ assert env.apache_restart() == 0
+ env.check_md(domains)
+ # check that acme-tls/1 is available for none of the domains
+ stat = env.get_md_status(domain)
+ assert stat["proto"]["acme-tls/1"] == []
+ # but make sure it completes nevertheless
+ assert env.await_completion([domain])
+
+ # test case: drive with using single challenge type explicitly
+ # and make sure that dns names not mapped to a VirtualHost also work
+ @pytest.mark.parametrize("challenge_type", [
+ "tls-alpn-01" # , "http-01",
+ ])
+ def test_md_702_044(self, env, challenge_type):
+ domain = self.test_domain
+ md_domains = [domain, "mail." + domain]
+ domains = [domain]
+ #
+ # generate 1 MD and 1 vhost
+ conf = MDConf(env, admin="admin@" + domain)
+ conf.add("Protocols http/1.1 acme-tls/1")
+ conf.add_drive_mode("auto")
+ conf.add(f"MDCAChallenges {challenge_type}")
+ conf.add_md(md_domains)
+ conf.add_vhost(domains)
+ conf.install()
+ #
+ # restart (-> drive), check that MD was synched and completes
+ assert env.apache_restart() == 0
+ env.check_md(md_domains)
+ assert env.await_completion([domain])
+ env.check_md_complete(domain)
+ #
+ # check SSL running OK
+ cert = env.get_cert(domain)
+ assert md_domains[0] in cert.get_san_list()
+ assert md_domains[1] in cert.get_san_list()
+
+ # Make a setup using the base server. It will use http-01 challenge.
+ def test_md_702_050(self, env):
+ domain = self.test_domain
+ conf = MDConf(env, admin=f"admin@{domain}")
+ conf.add(f"""
+ MDBaseServer on
+ ServerName {domain}
+ """)
+ conf.add_md([domain])
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_completion([domain])
+
+ # Make a setup using the base server without http:, will fail.
+ def test_md_702_051(self, env):
+ domain = self.test_domain
+ conf = MDConf(env, admin=f"admin@{domain}")
+ conf.add(f"""
+ MDBaseServer on
+ MDPortMap http:-
+ ServerName {domain}
+ """)
+ conf.add_md([domain])
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_error(domain)
+
+ # Make a setup using the base server without http:, but with acme-tls/1, should work.
+ def test_md_702_052(self, env):
+ domain = self.test_domain
+ conf = MDConf(env, std_vhosts=False, admin=f"admin@{domain}")
+ conf.add([
+ "MDBaseServer on",
+ "MDPortMap http:-",
+ "Protocols h2 http/1.1 acme-tls/1",
+ f"ServerName {domain}",
+ "<IfModule ssl_module>",
+ " SSLEngine on",
+ "</IfModule>",
+ "<IfModule tls_module>",
+ f" TLSEngine {env.https_port}",
+ "</IfModule>",
+ ])
+ conf.add_md([domain])
+ conf.install()
+ assert env.apache_restart() == 0
+ stat = env.get_md_status(domain, via_domain=env.http_addr, use_https=False)
+ assert stat["proto"]["acme-tls/1"] == [domain]
+ assert env.await_completion([domain], via_domain=env.http_addr, use_https=False)
+
+ # Test a domain name longer than 64 chars, but components < 64, see #227
+ # Background: DNS has an official limit of 253 ASCII chars and components must be
+ # of length [1, 63].
+ # However the CN in a certificate is restricted too, see
+ # <https://github.com/letsencrypt/boulder/issues/2093>.
+ @pytest.mark.skipif(MDTestEnv.is_pebble(), reason="pebble differs here from boulder")
+ @pytest.mark.parametrize("challenge_type", [
+ "tls-alpn-01", "http-01"
+ ])
+ def test_md_702_060(self, env, challenge_type):
+ domain = self.test_domain
+ # use only too long names, this is expected to fail:
+ # see <https://github.com/jetstack/cert-manager/issues/1462>
+ long_domain = ("x" * (65 - len(domain))) + domain
+ domains = [long_domain, "www." + long_domain]
+ conf = MDConf(env, admin="admin@" + domain)
+ conf.add("Protocols http/1.1 acme-tls/1")
+ conf.add_drive_mode("auto")
+ conf.add(f"MDCAChallenges {challenge_type}")
+ conf.add_md(domains)
+ conf.add_vhost(domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ env.check_md(domains)
+ assert env.await_error(long_domain)
+ # add a short domain to the SAN list, the CA should now use that one
+ # and issue a cert.
+ long_domain = ("y" * (65 - len(domain))) + domain
+ domains = [long_domain, "www." + long_domain, "xxx." + domain]
+ conf = MDConf(env, admin="admin@" + domain)
+ conf.add("Protocols http/1.1 acme-tls/1")
+ conf.add_drive_mode("auto")
+ conf.add(f"MDCAChallenges {challenge_type}")
+ conf.add_md(domains)
+ conf.add_vhost(domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_completion([long_domain])
+ env.check_md_complete(long_domain)
+ #
+ # check SSL running OK
+ cert = env.get_cert(long_domain)
+ assert long_domain in cert.get_san_list()
+
+ # test case: fourth level domain
+ def test_md_702_070(self, env):
+ domain = self.test_domain
+ name_a = "one.test." + domain
+ name_b = "two.test." + domain
+ domains = [name_a, name_b]
+ #
+ # generate 1 MD and 2 vhosts
+ conf = MDConf(env)
+ conf.add_admin("admin@" + domain)
+ conf.add_md(domains)
+ conf.add_vhost(name_a)
+ conf.install()
+ #
+ # restart (-> drive), check that MD was synched and completes
+ assert env.apache_restart() == 0
+ assert env.await_completion(domains)
+ env.check_md_complete(domains[0])
+
+ # test case: fifth level domain
+ def test_md_702_071(self, env):
+ domain = self.test_domain
+ name_a = "one.more.test." + domain
+ name_b = "two.more.test." + domain
+ domains = [name_a, name_b]
+ #
+ # generate 1 MD and 2 vhosts
+ conf = MDConf(env)
+ conf.add_admin("admin@" + domain)
+ conf.add_md(domains)
+ conf.add_vhost(name_a)
+ conf.install()
+ #
+ # restart (-> drive), check that MD was synched and completes
+ assert env.apache_restart() == 0
+ assert env.await_completion(domains)
+ env.check_md_complete(domains[0])
+
diff --git a/test/modules/md/test_720_wildcard.py b/test/modules/md/test_720_wildcard.py
new file mode 100644
index 0000000..23b311c
--- /dev/null
+++ b/test/modules/md/test_720_wildcard.py
@@ -0,0 +1,254 @@
+# test wildcard certifcates
+import os
+
+import pytest
+
+from .md_conf import MDConf, MDConf
+from .md_env import MDTestEnv
+
+
+@pytest.mark.skipif(condition=not MDTestEnv.has_acme_server(),
+ reason="no ACME test server configured")
+class TestWildcard:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env, acme):
+ env.APACHE_CONF_SRC = "data/test_auto"
+ acme.start(config='default')
+ env.check_acme()
+ env.clear_store()
+ MDConf(env).install()
+ assert env.apache_restart() == 0
+
+ @pytest.fixture(autouse=True, scope='function')
+ def _method_scope(self, env, request):
+ env.clear_store()
+ self.test_domain = env.get_request_domain(request)
+
+ # test case: a wildcard certificate with ACMEv2, no dns-01 supported
+ def test_md_720_001(self, env):
+ domain = self.test_domain
+
+ # generate config with DNS wildcard
+ domains = [domain, "*." + domain]
+ conf = MDConf(env)
+ conf.add_md(domains)
+ conf.add_vhost(domains)
+ conf.install()
+
+ # restart, check that md is in store
+ assert env.apache_restart() == 0
+ env.check_md(domains)
+ # await drive completion
+ md = env.await_error(domain)
+ assert md
+ assert md['renewal']['errors'] > 0
+ assert md['renewal']['last']['problem'] == 'challenge-mismatch'
+
+ # test case: a wildcard certificate with ACMEv2, only dns-01 configured, invalid command path
+ def test_md_720_002(self, env):
+ dns01cmd = os.path.join(env.test_dir, "../modules/md/dns01-not-found.py")
+
+ domain = self.test_domain
+ domains = [domain, "*." + domain]
+
+ conf = MDConf(env)
+ conf.add("MDCAChallenges dns-01")
+ conf.add(f"MDChallengeDns01 {dns01cmd}")
+ conf.add_md(domains)
+ conf.add_vhost(domains)
+ conf.install()
+
+ # restart, check that md is in store
+ assert env.apache_restart() == 0
+ env.check_md(domains)
+ # await drive completion
+ md = env.await_error(domain)
+ assert md
+ assert md['renewal']['errors'] > 0
+ assert md['renewal']['last']['problem'] == 'challenge-setup-failure'
+
+ # variation, invalid cmd path, other challenges still get certificate for non-wildcard
+ def test_md_720_002b(self, env):
+ dns01cmd = os.path.join(env.test_dir, "../modules/md/dns01-not-found.py")
+ domain = self.test_domain
+ domains = [domain, "xxx." + domain]
+
+ conf = MDConf(env)
+ conf.add(f"MDChallengeDns01 {dns01cmd}")
+ conf.add_md(domains)
+ conf.add_vhost(domains)
+ conf.install()
+
+ # restart, check that md is in store
+ assert env.apache_restart() == 0
+ env.check_md(domains)
+ # await drive completion
+ assert env.await_completion([domain])
+ env.check_md_complete(domain)
+ # check: SSL is running OK
+ cert_a = env.get_cert(domain)
+ altnames = cert_a.get_san_list()
+ for domain in domains:
+ assert domain in altnames
+
+ # test case: a wildcard certificate with ACMEv2, only dns-01 configured, invalid command option
+ def test_md_720_003(self, env):
+ dns01cmd = os.path.join(env.test_dir, "../modules/md/dns01.py fail")
+ domain = self.test_domain
+ domains = [domain, "*." + domain]
+
+ conf = MDConf(env)
+ conf.add("MDCAChallenges dns-01")
+ conf.add(f"MDChallengeDns01 {dns01cmd}")
+ conf.add_md(domains)
+ conf.add_vhost(domains)
+ conf.install()
+
+ # restart, check that md is in store
+ assert env.apache_restart() == 0
+ env.check_md(domains)
+ # await drive completion
+ md = env.await_error(domain)
+ assert md
+ assert md['renewal']['errors'] > 0
+ assert md['renewal']['last']['problem'] == 'challenge-setup-failure'
+
+ # test case: a wildcard name certificate with ACMEv2, only dns-01 configured
+ def test_md_720_004(self, env):
+ dns01cmd = os.path.join(env.test_dir, "../modules/md/dns01.py")
+ domain = self.test_domain
+ domains = [domain, "*." + domain]
+
+ conf = MDConf(env)
+ conf.add("MDCAChallenges dns-01")
+ conf.add(f"MDChallengeDns01 {dns01cmd}")
+ conf.add_md(domains)
+ conf.add_vhost(domains)
+ conf.install()
+
+ # restart, check that md is in store
+ assert env.apache_restart() == 0
+ env.check_md(domains)
+ # await drive completion
+ assert env.await_completion([domain])
+ env.check_md_complete(domain)
+ # check: SSL is running OK
+ cert_a = env.get_cert(domain)
+ altnames = cert_a.get_san_list()
+ for domain in domains:
+ assert domain in altnames
+
+ # test case: a wildcard name and 2nd normal vhost, not overlapping
+ def test_md_720_005(self, env):
+ dns01cmd = os.path.join(env.test_dir, "../modules/md/dns01.py")
+ domain = self.test_domain
+ domain2 = "www.x" + domain
+ domains = [domain, "*." + domain, domain2]
+
+ conf = MDConf(env)
+ conf.add("MDCAChallenges dns-01")
+ conf.add(f"MDChallengeDns01 {dns01cmd}")
+ conf.add_md(domains)
+ conf.add_vhost(domain2)
+ conf.add_vhost(domains)
+ conf.install()
+
+ # restart, check that md is in store
+ assert env.apache_restart() == 0
+ env.check_md(domains)
+ # await drive completion
+ assert env.await_completion([domain])
+ env.check_md_complete(domain)
+ # check: SSL is running OK
+ cert_a = env.get_cert(domain)
+ altnames = cert_a.get_san_list()
+ for domain in domains:
+ assert domain in altnames
+
+ # test case: a wildcard name and 2nd normal vhost, overlapping
+ def test_md_720_006(self, env):
+ dns01cmd = os.path.join(env.test_dir, "../modules/md/dns01.py")
+ domain = self.test_domain
+ dwild = "*." + domain
+ domain2 = "www." + domain
+ domains = [domain, dwild, domain2]
+
+ conf = MDConf(env)
+ conf.add("MDCAChallenges dns-01")
+ conf.add(f"MDChallengeDns01 {dns01cmd}")
+ conf.add_md(domains)
+ conf.add_vhost(domain2)
+ conf.add_vhost([domain, dwild])
+ conf.install()
+
+ # restart, check that md is in store
+ assert env.apache_restart() == 0
+ env.check_md(domains)
+ # await drive completion
+ assert env.await_completion([domain])
+ env.check_md_complete(domain)
+ # check: SSL is running OK
+ cert_a = env.get_cert(domain)
+ altnames = cert_a.get_san_list()
+ for domain in [domain, dwild]:
+ assert domain in altnames
+
+ # test case: a MDomain with just a wildcard, see #239
+ def test_md_720_007(self, env):
+ dns01cmd = os.path.join(env.test_dir, "../modules/md/dns01.py")
+ domain = self.test_domain
+ dwild = "*." + domain
+ wwwdomain = "www." + domain
+ domains = [dwild]
+
+ conf = MDConf(env)
+ conf.add("MDCAChallenges dns-01")
+ conf.add(f"MDChallengeDns01 {dns01cmd}")
+ conf.add_md(domains)
+ conf.add_vhost(wwwdomain)
+ conf.install()
+
+ # restart, check that md is in store
+ assert env.apache_restart() == 0
+ env.check_md(domains)
+ # await drive completion
+ assert env.await_completion([wwwdomain])
+ env.check_md_complete(dwild)
+ # check: SSL is running OK
+ cert_a = env.get_cert(wwwdomain)
+ altnames = cert_a.get_san_list()
+ assert domains == altnames
+
+ # test case: a plain name, only dns-01 configured,
+ # http-01 should not be intercepted. See #279
+ def test_md_720_008(self, env):
+ dns01cmd = os.path.join(env.test_dir, "../modules/md/dns01.py")
+ domain = self.test_domain
+ domains = [domain]
+
+ conf = MDConf(env)
+ conf.add("MDCAChallenges dns-01")
+ conf.add(f"MDChallengeDns01 {dns01cmd}")
+ conf.add_md(domains)
+ conf.add_vhost(domains)
+ conf.add("LogLevel http:trace4")
+ conf.install()
+
+ challengedir = os.path.join(env.server_dir, "htdocs/test1/.well-known/acme-challenge")
+ env.mkpath(challengedir)
+ content = b'not a challenge'
+ with open(os.path.join(challengedir, "123456"), "wb") as fd:
+ fd.write(content)
+
+ # restart, check that md is in store
+ assert env.apache_restart() == 0
+ env.check_md(domains)
+ # await drive completion
+ assert env.await_completion([domain], restart=False)
+ # access a fake http-01 challenge on the domain
+ r = env.curl_get(f"http://{domain}:{env.http_port}/.well-known/acme-challenge/123456")
+ assert r.response['status'] == 200
+ assert r.response['body'] == content
+ assert env.apache_restart() == 0
+ env.check_md_complete(domain)
diff --git a/test/modules/md/test_730_static.py b/test/modules/md/test_730_static.py
new file mode 100644
index 0000000..f7f7b4b
--- /dev/null
+++ b/test/modules/md/test_730_static.py
@@ -0,0 +1,117 @@
+import os
+
+import pytest
+
+from .md_conf import MDConf
+from .md_env import MDTestEnv
+
+
+@pytest.mark.skipif(condition=not MDTestEnv.has_acme_server(),
+ reason="no ACME test server configured")
+class TestStatic:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env, acme):
+ env.APACHE_CONF_SRC = "data/test_auto"
+ acme.start(config='default')
+ env.check_acme()
+ env.clear_store()
+ MDConf(env).install()
+ assert env.apache_restart() == 0
+
+ @pytest.fixture(autouse=True, scope='function')
+ def _method_scope(self, env, request):
+ env.clear_store()
+ self.test_domain = env.get_request_domain(request)
+
+ def test_md_730_001(self, env):
+ # MD with static cert files, will not be driven
+ domain = self.test_domain
+ domains = [domain, 'www.%s' % domain]
+ testpath = os.path.join(env.gen_dir, 'test_920_001')
+ # cert that is only 10 more days valid
+ env.create_self_signed_cert(domains, {"notBefore": -80, "notAfter": 10},
+ serial=730001, path=testpath)
+ cert_file = os.path.join(testpath, 'pubcert.pem')
+ pkey_file = os.path.join(testpath, 'privkey.pem')
+ assert os.path.exists(cert_file)
+ assert os.path.exists(pkey_file)
+ conf = MDConf(env)
+ conf.start_md(domains)
+ conf.add(f"MDCertificateFile {cert_file}")
+ conf.add(f"MDCertificateKeyFile {pkey_file}")
+ conf.end_md()
+ conf.add_vhost(domain)
+ conf.install()
+ assert env.apache_restart() == 0
+
+ # check if the domain uses it, it appears in our stats and renewal is off
+ cert = env.get_cert(domain)
+ assert cert.same_serial_as(730001)
+ stat = env.get_md_status(domain)
+ assert stat
+ assert 'cert' in stat
+ assert stat['renew'] is True
+ assert 'renewal' not in stat
+
+ def test_md_730_002(self, env):
+ # MD with static cert files, force driving
+ domain = self.test_domain
+ domains = [domain, 'www.%s' % domain]
+ testpath = os.path.join(env.gen_dir, 'test_920_001')
+ # cert that is only 10 more days valid
+ env.create_self_signed_cert(domains, {"notBefore": -80, "notAfter": 10},
+ serial=730001, path=testpath)
+ cert_file = os.path.join(testpath, 'pubcert.pem')
+ pkey_file = os.path.join(testpath, 'privkey.pem')
+ assert os.path.exists(cert_file)
+ assert os.path.exists(pkey_file)
+ conf = MDConf(env)
+ conf.start_md(domains)
+ conf.add(f"MDPrivateKeys secp384r1 rsa3072")
+ conf.add(f"MDCertificateFile {cert_file}")
+ conf.add(f"MDCertificateKeyFile {pkey_file}")
+ conf.add("MDRenewMode always")
+ conf.end_md()
+ conf.add_vhost(domain)
+ conf.install()
+ assert env.apache_restart() == 0
+ # this should enforce a renewal
+ stat = env.get_md_status(domain)
+ assert stat['renew'] is True, stat
+ assert env.await_completion(domains, restart=False)
+ # and show the newly created certificates
+ stat = env.get_md_status(domain)
+ assert 'renewal' in stat
+ assert 'cert' in stat['renewal']
+ assert 'secp384r1' in stat['renewal']['cert']
+ assert 'rsa' in stat['renewal']['cert']
+
+ def test_md_730_003(self, env):
+ # just configuring one file will not work
+ domain = self.test_domain
+ domains = [domain, 'www.%s' % domain]
+ testpath = os.path.join(env.gen_dir, 'test_920_001')
+ # cert that is only 10 more days valid
+ env.create_self_signed_cert(domains, {"notBefore": -80, "notAfter": 10},
+ serial=730001, path=testpath)
+ cert_file = os.path.join(testpath, 'pubcert.pem')
+ pkey_file = os.path.join(testpath, 'privkey.pem')
+ assert os.path.exists(cert_file)
+ assert os.path.exists(pkey_file)
+
+ conf = MDConf(env)
+ conf.start_md(domains)
+ conf.add(f"MDCertificateFile {cert_file}")
+ conf.end_md()
+ conf.add_vhost(domain)
+ conf.install()
+ assert env.apache_fail() == 0
+
+ conf = MDConf(env)
+ conf.start_md(domains)
+ conf.add(f"MDCertificateKeyFile {pkey_file}")
+ conf.end_md()
+ conf.add_vhost(domain)
+ conf.install()
+ assert env.apache_fail() == 0
diff --git a/test/modules/md/test_740_acme_errors.py b/test/modules/md/test_740_acme_errors.py
new file mode 100644
index 0000000..670c9ab
--- /dev/null
+++ b/test/modules/md/test_740_acme_errors.py
@@ -0,0 +1,72 @@
+# test ACME error responses and their processing
+import pytest
+
+from .md_conf import MDConf
+from .md_env import MDTestEnv
+
+
+@pytest.mark.skipif(condition=not MDTestEnv.has_acme_server(),
+ reason="no ACME test server configured")
+class TestAcmeErrors:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env, acme):
+ env.APACHE_CONF_SRC = "data/test_auto"
+ acme.start(config='default')
+ env.check_acme()
+ env.clear_store()
+ MDConf(env).install()
+ assert env.apache_restart() == 0
+
+ @pytest.fixture(autouse=True, scope='function')
+ def _method_scope(self, env, request):
+ env.clear_store()
+ self.test_domain = env.get_request_domain(request)
+
+ # -----------------------------------------------------------------------------------------------
+ # test case: MD with 2 names, one invalid
+ #
+ def test_md_740_000(self, env):
+ domain = self.test_domain
+ domains = [domain, "invalid!." + domain]
+ conf = MDConf(env)
+ conf.add_md(domains)
+ conf.add_vhost(domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ md = env.await_error(domain)
+ assert md
+ assert md['renewal']['errors'] > 0
+ if env.acme_server == 'pebble':
+ assert md['renewal']['last']['problem'] == 'urn:ietf:params:acme:error:malformed'
+ assert md['renewal']['last']['detail'] == \
+ "Order included DNS identifier with a value containing an illegal character: '!'"
+ else:
+ assert md['renewal']['last']['problem'] == 'urn:ietf:params:acme:error:rejectedIdentifier'
+ assert md['renewal']['last']['detail'] == (
+ "Error creating new order :: Cannot issue for "
+ "\"%s\": Domain name contains an invalid character" % domains[1])
+
+ # test case: MD with 3 names, 2 invalid
+ #
+ def test_md_740_001(self, env):
+ domain = self.test_domain
+ domains = [domain, "invalid1!." + domain, "invalid2!." + domain]
+ conf = MDConf(env)
+ conf.add_md(domains)
+ conf.add_vhost(domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ md = env.await_error(domain)
+ assert md
+ assert md['renewal']['errors'] > 0
+ if env.acme_server == 'pebble':
+ assert md['renewal']['last']['problem'] == 'urn:ietf:params:acme:error:malformed'
+ assert md['renewal']['last']['detail'].startswith(
+ "Order included DNS identifier with a value containing an illegal character")
+ else:
+ assert md['renewal']['last']['problem'] == 'urn:ietf:params:acme:error:rejectedIdentifier'
+ assert md['renewal']['last']['detail'].startswith(
+ "Error creating new order :: Cannot issue for")
+ assert md['renewal']['last']['subproblems']
+ assert len(md['renewal']['last']['subproblems']) == 2
diff --git a/test/modules/md/test_741_setup_errors.py b/test/modules/md/test_741_setup_errors.py
new file mode 100644
index 0000000..49b4e78
--- /dev/null
+++ b/test/modules/md/test_741_setup_errors.py
@@ -0,0 +1,48 @@
+# test ACME error responses and their processing
+import os
+
+import pytest
+
+from .md_conf import MDConf
+from .md_env import MDTestEnv
+
+
+@pytest.mark.skipif(condition=not MDTestEnv.has_acme_server(),
+ reason="no ACME test server configured")
+class TestSetupErrors:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env, acme):
+ env.APACHE_CONF_SRC = "data/test_auto"
+ acme.start(config='default')
+ env.check_acme()
+ env.clear_store()
+ MDConf(env).install()
+ assert env.apache_restart() == 0
+
+ @pytest.fixture(autouse=True, scope='function')
+ def _method_scope(self, env, request):
+ env.clear_store()
+ self.mcmd = os.path.join(env.test_dir, "../modules/md/http_challenge_foobar.py")
+ self.test_domain = env.get_request_domain(request)
+
+ def test_md_741_001(self, env):
+ # setup an MD with a MDMessageCmd that make the http-01 challenge file invalid
+ # before the ACME server is asked to retrieve it. This will result in
+ # an "invalid" domain authorization.
+ # The certificate sign-up will be attempted again after 4 seconds and
+ # of course fail again.
+ # Verify that the error counter for the staging job increments, so
+ # that our retry logic goes into proper delayed backoff.
+ domain = self.test_domain
+ domains = [domain]
+ conf = MDConf(env)
+ conf.add("MDCAChallenges http-01")
+ conf.add(f"MDMessageCmd {self.mcmd} {env.store_dir}")
+ conf.add_md(domains)
+ conf.add_vhost(domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ md = env.await_error(domain, errors=2, timeout=10)
+ assert md
+ assert md['renewal']['errors'] > 0
diff --git a/test/modules/md/test_750_eab.py b/test/modules/md/test_750_eab.py
new file mode 100644
index 0000000..af1be95
--- /dev/null
+++ b/test/modules/md/test_750_eab.py
@@ -0,0 +1,337 @@
+import json.encoder
+import os
+import re
+
+import pytest
+
+from .md_conf import MDConf
+from .md_env import MDTestEnv
+
+
+@pytest.mark.skipif(condition=not MDTestEnv.has_acme_eab(),
+ reason="ACME test server does not support External Account Binding")
+class TestEab:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env, acme):
+ acme.start(config='eab')
+ env.check_acme()
+ env.clear_store()
+ MDConf(env).install()
+ assert env.apache_restart() == 0
+
+ @pytest.fixture(autouse=True, scope='function')
+ def _method_scope(self, env, request):
+ env.clear_store()
+ self.test_domain = env.get_request_domain(request)
+
+ def test_md_750_001(self, env):
+ # md without EAB configured
+ domain = self.test_domain
+ domains = [domain]
+ conf = MDConf(env)
+ conf.add_md(domains)
+ conf.add_vhost(domains=domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ md = env.await_error(domain)
+ assert md['renewal']['errors'] > 0
+ assert md['renewal']['last']['problem'] == 'urn:ietf:params:acme:error:externalAccountRequired'
+
+ def test_md_750_002(self, env):
+ # md with known EAB KID and non base64 hmac key configured
+ domain = self.test_domain
+ domains = [domain]
+ conf = MDConf(env)
+ conf.add("MDExternalAccountBinding kid-1 äöüß")
+ conf.add_md(domains)
+ conf.add_vhost(domains=domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ md = env.await_error(domain)
+ assert md['renewal']['errors'] > 0
+ assert md['renewal']['last']['problem'] == 'apache:eab-hmac-invalid'
+
+ def test_md_750_003(self, env):
+ # md with empty EAB KID configured
+ domain = self.test_domain
+ domains = [domain]
+ conf = MDConf(env)
+ conf.add("MDExternalAccountBinding \" \" bm90IGEgdmFsaWQgaG1hYwo=")
+ conf.add_md(domains)
+ conf.add_vhost(domains=domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ md = env.await_error(domain)
+ assert md['renewal']['errors'] > 0
+ assert md['renewal']['last']['problem'] == 'urn:ietf:params:acme:error:unauthorized'
+
+ def test_md_750_004(self, env):
+ # md with unknown EAB KID configured
+ domain = self.test_domain
+ domains = [domain]
+ conf = MDConf(env)
+ conf.add("MDExternalAccountBinding key-x bm90IGEgdmFsaWQgaG1hYwo=")
+ conf.add_md(domains)
+ conf.add_vhost(domains=domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ md = env.await_error(domain)
+ assert md['renewal']['errors'] > 0
+ assert md['renewal']['last']['problem'] == 'urn:ietf:params:acme:error:unauthorized'
+
+ def test_md_750_005(self, env):
+ # md with known EAB KID but wrong HMAC configured
+ domain = self.test_domain
+ domains = [domain]
+ conf = MDConf(env)
+ conf.add("MDExternalAccountBinding kid-1 bm90IGEgdmFsaWQgaG1hYwo=")
+ conf.add_md(domains)
+ conf.add_vhost(domains=domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ md = env.await_error(domain)
+ assert md['renewal']['errors'] > 0
+ assert md['renewal']['last']['problem'] == 'urn:ietf:params:acme:error:unauthorized'
+
+ def test_md_750_010(self, env):
+ # md with correct EAB configured
+ domain = self.test_domain
+ domains = [domain]
+ conf = MDConf(env)
+ # this is one of the values in conf/pebble-eab.json
+ conf.add("MDExternalAccountBinding kid-1 zWNDZM6eQGHWpSRTPal5eIUYFTu7EajVIoguysqZ9wG44nMEtx3MUAsUDkMTQ12W")
+ conf.add_md(domains)
+ conf.add_vhost(domains=domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_completion(domains)
+
+ def test_md_750_011(self, env):
+ # first one md with EAB, then one without, works only for the first
+ # as the second is unable to reuse the account
+ domain_a = f"a{self.test_domain}"
+ domain_b = f"b{self.test_domain}"
+ conf = MDConf(env)
+ conf.start_md([domain_a])
+ conf.add("MDExternalAccountBinding kid-1 zWNDZM6eQGHWpSRTPal5eIUYFTu7EajVIoguysqZ9wG44nMEtx3MUAsUDkMTQ12W")
+ conf.end_md()
+ conf.add_vhost(domains=[domain_a])
+ conf.add_md([domain_b])
+ conf.add_vhost(domains=[domain_b])
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_completion([domain_a], restart=False)
+ md = env.await_error(domain_b)
+ assert md['renewal']['errors'] > 0
+ assert md['renewal']['last']['problem'] == 'urn:ietf:params:acme:error:externalAccountRequired'
+
+ def test_md_750_012(self, env):
+ # first one md without EAB, then one with
+ # first one fails, second works
+ domain_a = f"a{self.test_domain}"
+ domain_b = f"b{self.test_domain}"
+ conf = MDConf(env)
+ conf.add_md([domain_a])
+ conf.add_vhost(domains=[domain_a])
+ conf.start_md([domain_b])
+ conf.add("MDExternalAccountBinding kid-1 zWNDZM6eQGHWpSRTPal5eIUYFTu7EajVIoguysqZ9wG44nMEtx3MUAsUDkMTQ12W")
+ conf.end_md()
+ conf.add_vhost(domains=[domain_b])
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_completion([domain_b], restart=False)
+ md = env.await_error(domain_a)
+ assert md['renewal']['errors'] > 0
+ assert md['renewal']['last']['problem'] == 'urn:ietf:params:acme:error:externalAccountRequired'
+
+ def test_md_750_013(self, env):
+ # 2 mds with the same EAB, should one create a single account
+ domain_a = f"a{self.test_domain}"
+ domain_b = f"b{self.test_domain}"
+ conf = MDConf(env)
+ conf.start_md([domain_a])
+ conf.add("MDExternalAccountBinding kid-1 zWNDZM6eQGHWpSRTPal5eIUYFTu7EajVIoguysqZ9wG44nMEtx3MUAsUDkMTQ12W")
+ conf.end_md()
+ conf.add_vhost(domains=[domain_a])
+ conf.start_md([domain_b])
+ conf.add("MDExternalAccountBinding kid-1 zWNDZM6eQGHWpSRTPal5eIUYFTu7EajVIoguysqZ9wG44nMEtx3MUAsUDkMTQ12W")
+ conf.end_md()
+ conf.add_vhost(domains=[domain_b])
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_completion([domain_a, domain_b])
+ md_a = env.get_md_status(domain_a)
+ md_b = env.get_md_status(domain_b)
+ assert md_a['ca'] == md_b['ca']
+
+ def test_md_750_014(self, env):
+ # md with correct EAB, get cert, change to another correct EAB
+ # needs to create a new account
+ domain = self.test_domain
+ domains = [domain]
+ conf = MDConf(env)
+ conf.add("MDExternalAccountBinding kid-1 zWNDZM6eQGHWpSRTPal5eIUYFTu7EajVIoguysqZ9wG44nMEtx3MUAsUDkMTQ12W")
+ conf.add_md(domains)
+ conf.add_vhost(domains=domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_completion(domains)
+ md_1 = env.get_md_status(domain)
+ conf = MDConf(env)
+ # this is another one of the values in conf/pebble-eab.json
+ # add a dns name to force renewal
+ domains = [domain, f'www.{domain}']
+ conf.add("MDExternalAccountBinding kid-2 b10lLJs8l1GPIzsLP0s6pMt8O0XVGnfTaCeROxQM0BIt2XrJMDHJZBM5NuQmQJQH")
+ conf.add_md(domains)
+ conf.add_vhost(domains=domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_completion(domains)
+ md_2 = env.get_md_status(domain)
+ assert md_1['ca'] != md_2['ca']
+
+ def test_md_750_015(self, env):
+ # md with correct EAB, get cert, change to no EAB
+ # needs to fail
+ domain = self.test_domain
+ domains = [domain]
+ conf = MDConf(env)
+ conf.add("MDExternalAccountBinding kid-1 zWNDZM6eQGHWpSRTPal5eIUYFTu7EajVIoguysqZ9wG44nMEtx3MUAsUDkMTQ12W")
+ conf.add_md(domains)
+ conf.add_vhost(domains=domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_completion(domains)
+ conf = MDConf(env)
+ # this is another one of the values in conf/pebble-eab.json
+ # add a dns name to force renewal
+ domains = [domain, f'www.{domain}']
+ conf.add_md(domains)
+ conf.add_vhost(domains=domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_error(domain)
+ md = env.await_error(domain)
+ assert md['renewal']['errors'] > 0
+ assert md['renewal']['last']['problem'] == 'urn:ietf:params:acme:error:externalAccountRequired'
+
+ def test_md_750_016(self, env):
+ # md with correct EAB, get cert, change to invalid EAB
+ # needs to fail
+ domain = self.test_domain
+ domains = [domain]
+ conf = MDConf(env)
+ conf.add("MDExternalAccountBinding kid-1 zWNDZM6eQGHWpSRTPal5eIUYFTu7EajVIoguysqZ9wG44nMEtx3MUAsUDkMTQ12W")
+ conf.add_md(domains)
+ conf.add_vhost(domains=domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_completion(domains)
+ conf = MDConf(env)
+ # this is another one of the values in conf/pebble-eab.json
+ # add a dns name to force renewal
+ domains = [domain, f'www.{domain}']
+ conf.add("MDExternalAccountBinding kid-invalud blablabalbalbla")
+ conf.add_md(domains)
+ conf.add_vhost(domains=domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_error(domain)
+ md = env.await_error(domain)
+ assert md['renewal']['errors'] > 0
+ assert md['renewal']['last']['problem'] == 'urn:ietf:params:acme:error:unauthorized'
+
+ def test_md_750_017(self, env):
+ # md without EAB explicitly set to none
+ domain = self.test_domain
+ domains = [domain]
+ conf = MDConf(env)
+ conf.add("MDExternalAccountBinding kid-1 zWNDZM6eQGHWpSRTPal5eIUYFTu7EajVIoguysqZ9wG44nMEtx3MUAsUDkMTQ12W")
+ conf.start_md(domains)
+ conf.add("MDExternalAccountBinding none")
+ conf.end_md()
+ conf.add_vhost(domains=domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ md = env.await_error(domain)
+ assert md['renewal']['errors'] > 0
+ assert md['renewal']['last']['problem'] == 'urn:ietf:params:acme:error:externalAccountRequired'
+
+ def test_md_750_018(self, env):
+ # md with EAB file that does not exist
+ domain = self.test_domain
+ domains = [domain]
+ conf = MDConf(env)
+ conf.add("MDExternalAccountBinding does-not-exist")
+ conf.add_md(domains)
+ conf.add_vhost(domains=domains)
+ conf.install()
+ assert env.apache_fail() == 0
+ assert re.search(r'.*file not found:', env.apachectl_stderr), env.apachectl_stderr
+
+ def test_md_750_019(self, env):
+ # md with EAB file that is not valid JSON
+ domain = self.test_domain
+ domains = [domain]
+ eab_file = os.path.join(env.server_dir, 'eab.json')
+ with open(eab_file, 'w') as fd:
+ fd.write("something not JSON\n")
+ conf = MDConf(env)
+ conf.add("MDExternalAccountBinding eab.json")
+ conf.add_md(domains)
+ conf.add_vhost(domains=domains)
+ conf.install()
+ assert env.apache_fail() == 0
+ assert re.search(r'.*error reading JSON file.*', env.apachectl_stderr), env.apachectl_stderr
+
+ def test_md_750_020(self, env):
+ # md with EAB file that is JSON, but missind kid
+ domain = self.test_domain
+ domains = [domain]
+ eab_file = os.path.join(env.server_dir, 'eab.json')
+ with open(eab_file, 'w') as fd:
+ eab = {'something': 1, 'other': 2}
+ fd.write(json.encoder.JSONEncoder().encode(eab))
+ conf = MDConf(env)
+ conf.add("MDExternalAccountBinding eab.json")
+ conf.add_md(domains)
+ conf.add_vhost(domains=domains)
+ conf.install()
+ assert env.apache_fail() == 0
+ assert re.search(r'.*JSON does not contain \'kid\' element.*', env.apachectl_stderr), env.apachectl_stderr
+
+ def test_md_750_021(self, env):
+ # md with EAB file that is JSON, but missind hmac
+ domain = self.test_domain
+ domains = [domain]
+ eab_file = os.path.join(env.server_dir, 'eab.json')
+ with open(eab_file, 'w') as fd:
+ eab = {'kid': 'kid-1', 'other': 2}
+ fd.write(json.encoder.JSONEncoder().encode(eab))
+ conf = MDConf(env)
+ conf.add("MDExternalAccountBinding eab.json")
+ conf.add_md(domains)
+ conf.add_vhost(domains=domains)
+ conf.install()
+ assert env.apache_fail() == 0
+ assert re.search(r'.*JSON does not contain \'hmac\' element.*', env.apachectl_stderr), env.apachectl_stderr
+
+ def test_md_750_022(self, env):
+ # md with EAB file that has correct values
+ domain = self.test_domain
+ domains = [domain]
+ eab_file = os.path.join(env.server_dir, 'eab.json')
+ with open(eab_file, 'w') as fd:
+ eab = {'kid': 'kid-1', 'hmac': 'zWNDZM6eQGHWpSRTPal5eIUYFTu7EajVIoguysqZ9wG44nMEtx3MUAsUDkMTQ12W'}
+ fd.write(json.encoder.JSONEncoder().encode(eab))
+ domain = self.test_domain
+ domains = [domain]
+ conf = MDConf(env)
+ # this is one of the values in conf/pebble-eab.json
+ conf.add("MDExternalAccountBinding eab.json")
+ conf.add_md(domains)
+ conf.add_vhost(domains=domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_completion(domains)
diff --git a/test/modules/md/test_751_sectigo.py b/test/modules/md/test_751_sectigo.py
new file mode 100644
index 0000000..5cbd642
--- /dev/null
+++ b/test/modules/md/test_751_sectigo.py
@@ -0,0 +1,181 @@
+import os
+import re
+import time
+
+import pytest
+
+from .md_conf import MDConf
+
+# set the environment variables
+# SECTIGO_EAB="$kid $hmac" for
+# SECTIGO_TLD="<your registered dns name>"
+# these tests to become active
+#
+
+DEMO_ACME = "https://acme.demo.sectigo.com/"
+DEMO_TLD = None
+
+EABS = [
+ {'kid': '0123', 'hmac': 'abcdef'},
+]
+
+
+def missing_eab():
+ global EABS
+ if len(EABS) == 1 and 'SECTIGO_EAB' in os.environ:
+ m = re.match(r'^\s*(\S+)\s+(\S+)\s*$', os.environ['SECTIGO_EAB'])
+ if m:
+ EABS.append({'kid': m.group(1), 'hmac': m.group(2)})
+ return len(EABS) == 1
+
+
+def missing_tld():
+ global DEMO_TLD
+ if 'SECTIGO_TLD' in os.environ:
+ DEMO_TLD = os.environ['SECTIGO_TLD']
+ return DEMO_TLD is None
+
+
+@pytest.mark.skipif(condition=missing_tld(), reason="env var SECTIGO_TLD not set")
+@pytest.mark.skipif(condition=missing_eab(), reason="env var SECTIGO_EAB not set")
+class TestSectigo:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env, acme):
+ acme.start(config='eab')
+ env.check_acme()
+ env.clear_store()
+ MDConf(env).install()
+ assert env.apache_restart() == 0
+
+ @pytest.fixture(autouse=True, scope='function')
+ def _method_scope(self, env, request):
+ env.clear_store()
+ self.test_domain = env.get_request_domain(request)
+
+ def test_md_751_001(self, env):
+ # valid config, expect cert with correct chain
+ domain = f"test1.{DEMO_TLD}"
+ domains = [domain]
+ conf = MDConf(env)
+ conf.start_md(domains)
+ conf.add(f"MDCertificateAuthority {DEMO_ACME}")
+ conf.add("MDCACertificateFile none")
+ conf.add(f"MDExternalAccountBinding {EABS[1]['kid']} {EABS[1]['hmac']}")
+ conf.end_md()
+ conf.add_vhost(domains=domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_completion(domains)
+ r = env.curl_get(f"https://{domain}:{env.https_port}", options=[
+ "--cacert", f"{env.test_dir}/data/sectigo-demo-root.pem"
+ ])
+ assert r.response['status'] == 200
+
+ def test_md_751_002(self, env):
+ # without EAB set
+ domain = f"test1.{DEMO_TLD}"
+ domains = [domain]
+ conf = MDConf(env)
+ conf.start_md(domains)
+ conf.add(f"MDCertificateAuthority {DEMO_ACME}")
+ conf.add("MDCACertificateFile none")
+ conf.end_md()
+ conf.add_vhost(domains=domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_error(domain)
+ md = env.get_md_status(domain)
+ assert md['renewal']['errors'] > 0
+ assert md['renewal']['last']['problem'] == 'urn:ietf:params:acme:error:externalAccountRequired'
+
+ def test_md_751_003(self, env):
+ # with wrong EAB set
+ domain = f"test1.{DEMO_TLD}"
+ domains = [domain]
+ conf = MDConf(env)
+ conf.start_md(domains)
+ conf.add(f"MDCertificateAuthority {DEMO_ACME}")
+ conf.add("MDCACertificateFile none")
+ conf.add(f"MDExternalAccountBinding xxxxxx aaaaaaaaaaaaasdddddsdasdsadsadsadasdsadsa")
+ conf.end_md()
+ conf.add_vhost(domains=domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_error(domain)
+ md = env.get_md_status(domain)
+ assert md['renewal']['errors'] > 0
+ assert md['renewal']['last']['problem'] == 'urn:ietf:params:acme:error:unauthorized'
+
+ def test_md_751_004(self, env):
+ # valid config, get cert, add dns name, renew cert
+ domain = f"test1.{DEMO_TLD}"
+ domain2 = f"test2.{DEMO_TLD}"
+ domains = [domain]
+ conf = MDConf(env)
+ conf.start_md(domains)
+ conf.add(f"MDCertificateAuthority {DEMO_ACME}")
+ conf.add("MDCACertificateFile none")
+ conf.add(f"MDExternalAccountBinding {EABS[1]['kid']} {EABS[1]['hmac']}")
+ conf.end_md()
+ conf.add_vhost(domains=domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_completion(domains)
+ r = env.curl_get(f"https://{domain}:{env.https_port}", options=[
+ "--cacert", f"{env.test_dir}/data/sectigo-demo-root.pem"
+ ])
+ assert r.response['status'] == 200
+ r = env.curl_get(f"https://{domain2}:{env.https_port}", options=[
+ "--cacert", f"{env.test_dir}/data/sectigo-demo-root.pem"
+ ])
+ assert r.exit_code != 0
+ md1 = env.get_md_status(domain)
+ acct1 = md1['ca']['account']
+ # add the domain2 to the dns names
+ domains = [domain, domain2]
+ conf = MDConf(env)
+ conf.start_md(domains)
+ conf.add(f"MDCertificateAuthority {DEMO_ACME}")
+ conf.add("MDCACertificateFile none")
+ conf.add(f"MDExternalAccountBinding {EABS[1]['kid']} {EABS[1]['hmac']}")
+ conf.end_md()
+ conf.add_vhost(domains=domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_completion(domains)
+ r = env.curl_get(f"https://{domain2}:{env.https_port}", options=[
+ "--cacert", f"{env.test_dir}/data/sectigo-demo-root.pem"
+ ])
+ assert r.response['status'] == 200
+ md2 = env.get_md_status(domain)
+ acct2 = md2['ca']['account']
+ assert acct2 == acct1, f"ACME account was not reused: {acct1} became {acct2}"
+
+ def test_md_751_020(self, env):
+ # valid config, get cert, check OCSP status
+ domain = f"test1.{DEMO_TLD}"
+ domains = [domain]
+ conf = MDConf(env)
+ conf.add("MDStapling on")
+ conf.start_md(domains)
+ conf.add(f"""
+ MDCertificateAuthority {DEMO_ACME}
+ MDCACertificateFile none
+ MDExternalAccountBinding {EABS[1]['kid']} {EABS[1]['hmac']}
+ """)
+ conf.end_md()
+ conf.add_vhost(domains=domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_completion(domains)
+ r = env.curl_get(f"https://{domain}:{env.https_port}", options=[
+ "--cacert", f"{env.test_dir}/data/sectigo-demo-root.pem"
+ ])
+ assert r.response['status'] == 200
+ time.sleep(1)
+ for domain in domains:
+ stat = env.await_ocsp_status(domain,
+ ca_file=f"{env.test_dir}/data/sectigo-demo-root.pem")
+ assert stat['ocsp'] == "successful (0x0)"
+ assert stat['verify'] == "0 (ok)"
diff --git a/test/modules/md/test_752_zerossl.py b/test/modules/md/test_752_zerossl.py
new file mode 100644
index 0000000..1884665
--- /dev/null
+++ b/test/modules/md/test_752_zerossl.py
@@ -0,0 +1,202 @@
+import os
+import time
+
+import pytest
+
+from .md_conf import MDConf
+
+# set the environment variables
+# ZEROSSL_TLD="<your registered dns name>"
+# these tests to become active
+#
+
+DEMO_ACME = "https://acme.zerossl.com/v2/DV90"
+DEMO_EAB_URL = "http://api.zerossl.com/acme/eab-credentials-email"
+DEMO_TLD = None
+
+
+def missing_tld():
+ global DEMO_TLD
+ if 'ZEROSSL_TLD' in os.environ:
+ DEMO_TLD = os.environ['ZEROSSL_TLD']
+ return DEMO_TLD is None
+
+
+def get_new_eab(env):
+ r = env.curl_raw(DEMO_EAB_URL, options=[
+ "-d", f"email=admin@zerossl.{DEMO_TLD}"
+ ], force_resolve=False)
+ assert r.exit_code == 0
+ assert r.json
+ assert r.json['success'] is True
+ assert r.json['eab_kid']
+ assert r.json['eab_hmac_key']
+ return {'kid': r.json['eab_kid'], 'hmac': r.json['eab_hmac_key']}
+
+
+@pytest.mark.skipif(condition=missing_tld(), reason="env var ZEROSSL_TLD not set")
+class TestZeroSSL:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env, acme):
+ acme.start(config='eab')
+ env.check_acme()
+ env.clear_store()
+ MDConf(env).install()
+ assert env.apache_restart() == 0
+
+ @pytest.fixture(autouse=True, scope='function')
+ def _method_scope(self, env, request):
+ self.test_domain = env.get_request_domain(request)
+
+ def test_md_752_001(self, env):
+ # valid config, expect cert with correct chain
+ domain = f"test1.{DEMO_TLD}"
+ domains = [domain]
+ eab = get_new_eab(env)
+ conf = MDConf(env)
+ conf.start_md(domains)
+ conf.add(f"""
+ MDCertificateAuthority {DEMO_ACME}
+ MDCertificateAgreement accepted
+ MDContactEmail admin@zerossl.{DEMO_TLD}
+ MDCACertificateFile none
+ MDExternalAccountBinding {eab['kid']} {eab['hmac']}
+ """)
+ conf.end_md()
+ conf.add_vhost(domains=domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_completion(domains)
+ r = env.curl_get(f"https://{domain}:{env.https_port}", options=[
+ "--cacert", f"{env.test_dir}/data/sectigo-demo-root.pem"
+ ])
+ assert r.response['status'] == 200
+
+ def test_md_752_002(self, env):
+ # without EAB set
+ domain = f"test1.{DEMO_TLD}"
+ domains = [domain]
+ conf = MDConf(env)
+ conf.start_md(domains)
+ conf.add(f"""
+ MDCertificateAuthority {DEMO_ACME}
+ MDCertificateAgreement accepted
+ MDContactEmail admin@zerossl.{DEMO_TLD}
+ MDCACertificateFile none
+ """)
+ conf.end_md()
+ conf.add_vhost(domains=domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_error(domain)
+ md = env.get_md_status(domain)
+ assert md['renewal']['errors'] > 0
+ assert md['renewal']['last']['problem'] == 'urn:ietf:params:acme:error:externalAccountRequired'
+
+ def test_md_752_003(self, env):
+ # with wrong EAB set
+ domain = f"test1.{DEMO_TLD}"
+ domains = [domain]
+ conf = MDConf(env)
+ conf.start_md(domains)
+ conf.add(f"""
+ MDCertificateAuthority {DEMO_ACME}
+ MDCertificateAgreement accepted
+ MDContactEmail admin@zerossl.{DEMO_TLD}
+ MDCACertificateFile none
+ """)
+ conf.add(f"MDExternalAccountBinding YmxhYmxhYmxhCg YmxhYmxhYmxhCg")
+ conf.end_md()
+ conf.add_vhost(domains=domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_error(domain)
+ md = env.get_md_status(domain)
+ assert md['renewal']['errors'] > 0
+ assert md['renewal']['last']['problem'] == 'urn:ietf:params:acme:error:malformed'
+
+ def test_md_752_004(self, env):
+ # valid config, get cert, add dns name, renew cert
+ domain = f"test1.{DEMO_TLD}"
+ domain2 = f"test2.{DEMO_TLD}"
+ domains = [domain]
+ eab = get_new_eab(env)
+ conf = MDConf(env)
+ conf.start_md(domains)
+ conf.add(f"""
+ MDCertificateAuthority {DEMO_ACME}
+ MDCertificateAgreement accepted
+ MDContactEmail admin@zerossl.{DEMO_TLD}
+ MDCACertificateFile none
+ MDExternalAccountBinding {eab['kid']} {eab['hmac']}
+ """)
+ conf.end_md()
+ conf.add_vhost(domains=domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_completion(domains)
+ r = env.curl_get(f"https://{domain}:{env.https_port}", options=[
+ "--cacert", f"{env.test_dir}/data/sectigo-demo-root.pem"
+ ])
+ assert r.response['status'] == 200
+ r = env.curl_get(f"https://{domain2}:{env.https_port}", options=[
+ "--cacert", f"{env.test_dir}/data/sectigo-demo-root.pem"
+ ])
+ assert r.exit_code != 0
+ md1 = env.get_md_status(domain)
+ acct1 = md1['ca']['account']
+ # add the domain2 to the dns names
+ domains = [domain, domain2]
+ conf = MDConf(env)
+ conf.start_md(domains)
+ conf.add(f"""
+ MDCertificateAuthority {DEMO_ACME}
+ MDCertificateAgreement accepted
+ MDContactEmail admin@zerossl.{DEMO_TLD}
+ MDCACertificateFile none
+ MDExternalAccountBinding {eab['kid']} {eab['hmac']}
+ """)
+ conf.end_md()
+ conf.add_vhost(domains=domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_completion(domains)
+ r = env.curl_get(f"https://{domain2}:{env.https_port}", options=[
+ "--cacert", f"{env.test_dir}/data/sectigo-demo-root.pem"
+ ])
+ assert r.response['status'] == 200
+ md2 = env.get_md_status(domain)
+ acct2 = md2['ca']['account']
+ assert acct2 == acct1, f"ACME account was not reused: {acct1} became {acct2}"
+
+ def test_md_752_020(self, env):
+ # valid config, get cert, check OCSP status
+ domain = f"test1.{DEMO_TLD}"
+ domains = [domain]
+ eab = get_new_eab(env)
+ conf = MDConf(env)
+ conf.add("MDStapling on")
+ conf.start_md(domains)
+ conf.add(f"""
+ MDCertificateAuthority {DEMO_ACME}
+ MDCertificateAgreement accepted
+ MDContactEmail admin@zerossl.{DEMO_TLD}
+ MDCACertificateFile none
+ MDExternalAccountBinding {eab['kid']} {eab['hmac']}
+ """)
+ conf.end_md()
+ conf.add_vhost(domains=domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_completion(domains)
+ r = env.curl_get(f"https://{domain}:{env.https_port}", options=[
+ "--cacert", f"{env.test_dir}/data/sectigo-demo-root.pem"
+ ])
+ assert r.response['status'] == 200
+ time.sleep(1)
+ for domain in domains:
+ stat = env.await_ocsp_status(domain,
+ ca_file=f"{env.test_dir}/data/sectigo-demo-root.pem")
+ assert stat['ocsp'] == "successful (0x0)"
+ assert stat['verify'] == "0 (ok)"
diff --git a/test/modules/md/test_780_tailscale.py b/test/modules/md/test_780_tailscale.py
new file mode 100644
index 0000000..84a266b
--- /dev/null
+++ b/test/modules/md/test_780_tailscale.py
@@ -0,0 +1,186 @@
+import os
+import re
+import socket
+import sys
+from threading import Thread
+
+import pytest
+
+from .md_conf import MDConf
+
+
+class TailscaleFaker:
+
+ def __init__(self, env, path):
+ self.env = env
+ self._uds_path = path
+ self._done = False
+
+ def start(self):
+ def process(self):
+ self._socket.listen(1)
+ self._process()
+
+ try:
+ os.unlink(self._uds_path)
+ except OSError:
+ if os.path.exists(self._uds_path):
+ raise
+ self._socket = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
+ self._socket.bind(self._uds_path)
+ self._thread = Thread(target=process, daemon=True, args=[self])
+ self._thread.start()
+
+ def stop(self):
+ self._done = True
+ self._socket.close()
+
+ def send_error(self, c, status, reason):
+ c.sendall(f"""HTTP/1.1 {status} {reason}\r
+Server: TailscaleFaker\r
+Content-Length: 0\r
+Connection: close\r
+\r
+""".encode())
+
+ def send_data(self, c, ctype: str, data: bytes):
+ c.sendall(f"""HTTP/1.1 200 OK\r
+Server: TailscaleFaker\r
+Content-Type: {ctype}\r
+Content-Length: {len(data)}\r
+Connection: close\r
+\r
+""".encode() + data)
+
+ def _process(self):
+ # a http server written on a sunny afternooon
+ while self._done is False:
+ try:
+ c, client_address = self._socket.accept()
+ try:
+ data = c.recv(1024)
+ lines = data.decode().splitlines()
+ m = re.match(r'^(?P<method>\w+)\s+(?P<uri>\S+)\s+HTTP/1.1', lines[0])
+ if m is None:
+ self.send_error(c, 400, "Bad Request")
+ continue
+ uri = m.group('uri')
+ m = re.match(r'/localapi/v0/cert/(?P<domain>\S+)\?type=(?P<type>\w+)', uri)
+ if m is None:
+ self.send_error(c, 404, "Not Found")
+ continue
+ domain = m.group('domain')
+ cred_type = m.group('type')
+ creds = self.env.get_credentials_for_name(domain)
+ sys.stderr.write(f"lookup domain={domain}, type={cred_type} -> {creds}\n")
+ if creds is None or len(creds) == 0:
+ self.send_error(c, 404, "Not Found")
+ continue
+ if cred_type == 'crt':
+ self.send_data(c, "text/plain", creds[0].cert_pem)
+ pass
+ elif cred_type == 'key':
+ self.send_data(c, "text/plain", creds[0].pkey_pem)
+ else:
+ self.send_error(c, 404, "Not Found")
+ continue
+ finally:
+ c.close()
+
+ except ConnectionAbortedError:
+ self._done = True
+
+
+class TestTailscale:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env, acme):
+ UDS_PATH = f"{env.gen_dir}/tailscale.sock"
+ TestTailscale.UDS_PATH = UDS_PATH
+ faker = TailscaleFaker(env=env, path=UDS_PATH)
+ faker.start()
+ env.APACHE_CONF_SRC = "data/test_auto"
+ acme.start(config='default')
+ env.clear_store()
+ MDConf(env).install()
+ assert env.apache_restart() == 0
+ yield
+ faker.stop()
+
+ @pytest.fixture(autouse=True, scope='function')
+ def _method_scope(self, env, request):
+ env.clear_store()
+ self.test_domain = env.get_request_domain(request)
+
+ def _write_res_file(self, doc_root, name, content):
+ if not os.path.exists(doc_root):
+ os.makedirs(doc_root)
+ open(os.path.join(doc_root, name), "w").write(content)
+
+ # create a MD using `tailscale` as protocol, wrong path
+ def test_md_780_001(self, env):
+ domain = env.tailscale_domain
+ # generate config with one MD
+ domains = [domain]
+ socket_path = '/xxx'
+ conf = MDConf(env, admin="admin@" + domain)
+ conf.start_md(domains)
+ conf.add([
+ "MDCertificateProtocol tailscale",
+ f"MDCertificateAuthority file://{socket_path}",
+ ])
+ conf.end_md()
+ conf.add_vhost(domains)
+ conf.install()
+ # restart and watch it fail due to wrong tailscale unix socket path
+ assert env.apache_restart() == 0
+ md = env.await_error(domain)
+ assert md
+ assert md['renewal']['errors'] > 0
+ assert md['renewal']['last']['status-description'] == 'No such file or directory'
+ assert md['renewal']['last']['detail'] == \
+ f"tailscale socket not available, may not be up: {socket_path}"
+
+ # create a MD using `tailscale` as protocol, path to faker, should succeed
+ def test_md_780_002(self, env):
+ domain = env.tailscale_domain
+ # generate config with one MD
+ domains = [domain]
+ socket_path = '/xxx'
+ conf = MDConf(env, admin="admin@" + domain)
+ conf.start_md(domains)
+ conf.add([
+ "MDCertificateProtocol tailscale",
+ f"MDCertificateAuthority file://{self.UDS_PATH}",
+ ])
+ conf.end_md()
+ conf.add_vhost(domains)
+ conf.install()
+ # restart and watch it fail due to wrong tailscale unix socket path
+ assert env.apache_restart() == 0
+ assert env.await_completion(domains)
+ assert env.apache_restart() == 0
+ env.check_md_complete(domain)
+
+ # create a MD using `tailscale` as protocol, but domain name not assigned by tailscale
+ def test_md_780_003(self, env):
+ domain = "test.not-correct.ts.net"
+ # generate config with one MD
+ domains = [domain]
+ socket_path = '/xxx'
+ conf = MDConf(env, admin="admin@" + domain)
+ conf.start_md(domains)
+ conf.add([
+ "MDCertificateProtocol tailscale",
+ f"MDCertificateAuthority file://{self.UDS_PATH}",
+ ])
+ conf.end_md()
+ conf.add_vhost(domains)
+ conf.install()
+ # restart and watch it fail due to wrong tailscale unix socket path
+ assert env.apache_restart() == 0
+ md = env.await_error(domain)
+ assert md
+ assert md['renewal']['errors'] > 0
+ assert md['renewal']['last']['status-description'] == 'No such file or directory'
+ assert md['renewal']['last']['detail'] == "retrieving certificate from tailscale"
diff --git a/test/modules/md/test_790_failover.py b/test/modules/md/test_790_failover.py
new file mode 100644
index 0000000..a939912
--- /dev/null
+++ b/test/modules/md/test_790_failover.py
@@ -0,0 +1,87 @@
+import pytest
+
+from .md_env import MDTestEnv
+from .md_conf import MDConf
+
+
+@pytest.mark.skipif(condition=not MDTestEnv.has_acme_server(),
+ reason="no ACME test server configured")
+class TestFailover:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env, acme):
+ acme.start(config='default')
+ env.check_acme()
+ env.clear_store()
+ conf = MDConf(env)
+ conf.install()
+
+ assert env.apache_restart() == 0
+
+ @pytest.fixture(autouse=True, scope='function')
+ def _method_scope(self, env, request):
+ env.clear_store()
+ self.test_domain = env.get_request_domain(request)
+
+ # set 2 ACME certificata authority, valid + invalid
+ def test_md_790_001(self, env):
+ domain = self.test_domain
+ # generate config with one MD
+ domains = [domain, "www." + domain]
+ conf = MDConf(env)
+ conf.add([
+ "MDRetryDelay 200ms", # speed up failovers
+ ])
+ conf.start_md(domains)
+ conf.add([
+ f"MDCertificateAuthority {env.acme_url} https://does-not-exist/dir"
+ ])
+ conf.end_md()
+ conf.add_vhost(domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_completion([domain])
+ env.check_md_complete(domain)
+
+ # set 2 ACME certificata authority, invalid + valid
+ def test_md_790_002(self, env):
+ domain = self.test_domain
+ # generate config with one MD
+ domains = [domain, "www." + domain]
+ conf = MDConf(env)
+ conf.add([
+ "MDRetryDelay 100ms", # speed up failovers
+ "MDRetryFailover 2",
+ ])
+ conf.start_md(domains)
+ conf.add([
+ f"MDCertificateAuthority https://does-not-exist/dir {env.acme_url} "
+ ])
+ conf.end_md()
+ conf.add_vhost(domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_completion([domain])
+ env.check_md_complete(domain)
+
+ # set 3 ACME certificata authority, invalid + invalid + valid
+ def test_md_790_003(self, env):
+ domain = self.test_domain
+ # generate config with one MD
+ domains = [domain, "www." + domain]
+ conf = MDConf(env)
+ conf.add([
+ "MDRetryDelay 100ms", # speed up failovers
+ "MDRetryFailover 2",
+ ])
+ conf.start_md(domains)
+ conf.add([
+ f"MDCertificateAuthority https://does-not-exist/dir https://does-not-either/ "
+ f"{env.acme_url} "
+ ])
+ conf.end_md()
+ conf.add_vhost(domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_completion([domain])
+ env.check_md_complete(domain)
diff --git a/test/modules/md/test_800_must_staple.py b/test/modules/md/test_800_must_staple.py
new file mode 100644
index 0000000..32edee3
--- /dev/null
+++ b/test/modules/md/test_800_must_staple.py
@@ -0,0 +1,84 @@
+# test mod_md must-staple support
+import pytest
+
+from .md_conf import MDConf
+from .md_cert_util import MDCertUtil
+from .md_env import MDTestEnv
+
+
+@pytest.mark.skipif(condition=not MDTestEnv.has_acme_server(),
+ reason="no ACME test server configured")
+class TestMustStaple:
+ domain = None
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env, acme):
+ acme.start(config='default')
+ env.check_acme()
+ env.clear_store()
+ MDConf(env).install()
+ assert env.apache_restart() == 0
+
+ @pytest.fixture(autouse=True, scope='function')
+ def _method_scope(self, env, request):
+ self.domain = env.get_class_domain(self.__class__)
+
+ def configure_httpd(self, env, domain, add_lines=""):
+ conf = MDConf(env, admin="admin@" + domain)
+ conf.add(add_lines)
+ conf.add_md([domain])
+ conf.add_vhost(domain)
+ conf.install()
+
+ # MD with default, e.g. not staple
+ def test_md_800_001(self, env):
+ self.configure_httpd(env, self.domain)
+ assert env.apache_restart() == 0
+ assert env.await_completion([self.domain])
+ env.check_md_complete(self.domain)
+ cert1 = MDCertUtil(env.store_domain_file(self.domain, 'pubcert.pem'))
+ assert not cert1.get_must_staple()
+
+ # MD that should explicitly not staple
+ def test_md_800_002(self, env):
+ self.configure_httpd(env, self.domain, "MDMustStaple off")
+ assert env.apache_restart() == 0
+ env.check_md_complete(self.domain)
+ cert1 = MDCertUtil(env.store_domain_file(self.domain, 'pubcert.pem'))
+ assert not cert1.get_must_staple()
+ stat = env.get_ocsp_status(self.domain)
+ assert 'ocsp' not in stat or stat['ocsp'] == "no response sent"
+
+ # MD that must staple and toggle off again
+ @pytest.mark.skipif(MDTestEnv.lacks_ocsp(), reason="no OCSP responder")
+ def test_md_800_003(self, env):
+ self.configure_httpd(env, self.domain, "MDMustStaple on")
+ assert env.apache_restart() == 0
+ assert env.await_completion([self.domain])
+ env.check_md_complete(self.domain)
+ cert1 = MDCertUtil(env.store_domain_file(self.domain, 'pubcert.pem'))
+ assert cert1.get_must_staple()
+ self.configure_httpd(env, self.domain, "MDMustStaple off")
+ assert env.apache_restart() == 0
+ assert env.await_completion([self.domain])
+ env.check_md_complete(self.domain)
+ cert1 = MDCertUtil(env.store_domain_file(self.domain, 'pubcert.pem'))
+ assert not cert1.get_must_staple()
+
+ # MD that must staple
+ @pytest.mark.skipif(MDTestEnv.lacks_ocsp(), reason="no OCSP responder")
+ @pytest.mark.skipif(MDTestEnv.get_ssl_module() != "mod_ssl", reason="only for mod_ssl")
+ def test_md_800_004(self, env):
+ # mod_ssl stapling is off, expect no stapling
+ stat = env.get_ocsp_status(self.domain)
+ assert stat['ocsp'] == "no response sent"
+ # turn mod_ssl stapling on, expect an answer
+ self.configure_httpd(env, self.domain, """
+ LogLevel ssl:trace2
+ SSLUseStapling On
+ SSLStaplingCache shmcb:stapling_cache(128000)
+ """)
+ assert env.apache_restart() == 0
+ stat = env.get_ocsp_status(self.domain)
+ assert stat['ocsp'] == "successful (0x0)"
+ assert stat['verify'] == "0 (ok)"
diff --git a/test/modules/md/test_801_stapling.py b/test/modules/md/test_801_stapling.py
new file mode 100644
index 0000000..5c03602
--- /dev/null
+++ b/test/modules/md/test_801_stapling.py
@@ -0,0 +1,391 @@
+# test mod_md stapling support
+
+import os
+import time
+import pytest
+
+from .md_conf import MDConf
+from .md_env import MDTestEnv
+
+
+@pytest.mark.skipif(condition=not MDTestEnv.has_acme_server(),
+ reason="no ACME test server configured")
+@pytest.mark.skipif(MDTestEnv.lacks_ocsp(), reason="no OCSP responder")
+class TestStapling:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env, acme):
+ acme.start(config='default')
+ env.check_acme()
+ env.clear_store()
+ domain = env.get_class_domain(self.__class__)
+ mdA = "a-" + domain
+ mdB = "b-" + domain
+ self.configure_httpd(env, [mdA, mdB]).install()
+ env.apache_stop()
+ assert env.apache_restart() == 0
+ assert env.await_completion([mdA, mdB])
+ env.check_md_complete(mdA)
+ env.check_md_complete(mdB)
+
+ @pytest.fixture(autouse=True, scope='function')
+ def _method_scope(self, env, request):
+ self.domain = env.get_class_domain(self.__class__)
+ self.mdA = "a-" + self.domain
+ self.mdB = "b-" + self.domain
+ yield
+ env.apache_stop()
+
+ def configure_httpd(self, env, domains=None, add_lines="", ssl_stapling=False):
+ if not isinstance(domains, list):
+ domains = [domains] if domains else []
+ conf = MDConf(env)
+ conf.add("""
+ <IfModule tls_module>
+ LogLevel tls:trace4
+ </IfModule>
+ <IfModule ssl_module>
+ LogLevel ssl:trace4
+ </IfModule>
+ """)
+ if ssl_stapling:
+ conf.add("""
+ <IfModule ssl_module>
+ SSLUseStapling On
+ SSLStaplingCache shmcb:stapling_cache(128000)
+ </IfModule>
+ """)
+ conf.add(add_lines)
+ for domain in domains:
+ conf.add_md([domain])
+ conf.add_vhost(domain)
+ return conf
+
+ # MD with stapling on/off and mod_ssl stapling off
+ # expect to only see stapling response when MD stapling is on
+ def test_md_801_001(self, env):
+ md = self.mdA
+ self.configure_httpd(env, md).install()
+ assert env.apache_restart() == 0
+ stat = env.get_ocsp_status(md)
+ assert stat['ocsp'] == "no response sent"
+ stat = env.get_md_status(md)
+ assert not stat["stapling"]
+ #
+ # turn stapling on, wait for it to appear in connections
+ self.configure_httpd(env, md, """
+ MDStapling on
+ LogLevel md:trace5
+ """).install()
+ assert env.apache_restart() == 0
+ stat = env.await_ocsp_status(md)
+ assert stat['ocsp'] == "successful (0x0)"
+ assert stat['verify'] == "0 (ok)"
+ stat = env.get_md_status(md)
+ assert stat["stapling"]
+ pkey = 'rsa'
+ assert stat["cert"][pkey]["ocsp"]["status"] == "good"
+ assert stat["cert"][pkey]["ocsp"]["valid"]
+ #
+ # turn stapling off (explicitly) again, should disappear
+ self.configure_httpd(env, md, "MDStapling off").install()
+ assert env.apache_restart() == 0
+ stat = env.get_ocsp_status(md)
+ assert stat['ocsp'] == "no response sent"
+ stat = env.get_md_status(md)
+ assert not stat["stapling"]
+
+ # MD with stapling on/off and mod_ssl stapling on
+ # expect to see stapling response in all cases
+ def test_md_801_002(self, env):
+ md = self.mdA
+ self.configure_httpd(env, md, ssl_stapling=True).install()
+ assert env.apache_restart() == 0
+ stat = env.get_ocsp_status(md)
+ assert stat['ocsp'] == "successful (0x0)" if \
+ env.ssl_module == "mod_ssl" else "no response sent"
+ stat = env.get_md_status(md)
+ assert not stat["stapling"]
+ #
+ # turn stapling on, wait for it to appear in connections
+ self.configure_httpd(env, md, "MDStapling on", ssl_stapling=True).install()
+ assert env.apache_restart() == 0
+ stat = env.await_ocsp_status(md)
+ assert stat['ocsp'] == "successful (0x0)"
+ assert stat['verify'] == "0 (ok)"
+ stat = env.get_md_status(md)
+ assert stat["stapling"]
+ pkey = 'rsa'
+ assert stat["cert"][pkey]["ocsp"]["status"] == "good"
+ assert stat["cert"][pkey]["ocsp"]["valid"]
+ #
+ # turn stapling off (explicitly) again, should disappear
+ self.configure_httpd(env, md, "MDStapling off", ssl_stapling=True).install()
+ assert env.apache_restart() == 0
+ stat = env.get_ocsp_status(md)
+ assert stat['ocsp'] == "successful (0x0)" if \
+ env.ssl_module == "mod_ssl" else "no response sent"
+ stat = env.get_md_status(md)
+ assert not stat["stapling"]
+
+ # 2 MDs, one with md stapling on, one with default (off)
+ def test_md_801_003(self, env):
+ md_a = self.mdA
+ md_b = self.mdB
+ conf = self.configure_httpd(env)
+ conf.add("""
+ <MDomain %s>
+ MDStapling on
+ </MDomain>
+ <MDomain %s>
+ </MDomain>
+ """ % (md_a, md_b))
+ conf.add_vhost(md_a)
+ conf.add_vhost(md_b)
+ conf.install()
+ assert env.apache_restart() == 0
+ # mdA has stapling
+ stat = env.await_ocsp_status(md_a)
+ assert stat['ocsp'] == "successful (0x0)"
+ assert stat['verify'] == "0 (ok)"
+ stat = env.get_md_status(md_a)
+ assert stat["stapling"]
+ pkey = 'rsa'
+ assert stat["cert"][pkey]["ocsp"]["status"] == "good"
+ assert stat["cert"][pkey]["ocsp"]["valid"]
+ # mdB has no stapling
+ stat = env.get_ocsp_status(md_b)
+ assert stat['ocsp'] == "no response sent"
+ stat = env.get_md_status(md_b)
+ assert not stat["stapling"]
+
+ # 2 MDs, md stapling on+off, ssl stapling on
+ def test_md_801_004(self, env):
+ md_a = self.mdA
+ md_b = self.mdB
+ conf = self.configure_httpd(env, ssl_stapling=True)
+ conf.add("""
+ <MDomain %s>
+ MDStapling on
+ </MDomain>
+ <MDomain %s>
+ </MDomain>
+ """ % (md_a, md_b))
+ conf.add_vhost(md_a)
+ conf.add_vhost(md_b)
+ conf.install()
+ assert env.apache_restart() == 0
+ # mdA has stapling
+ stat = env.await_ocsp_status(md_a)
+ assert stat['ocsp'] == "successful (0x0)"
+ assert stat['verify'] == "0 (ok)"
+ stat = env.get_md_status(md_a)
+ assert stat["stapling"]
+ pkey = 'rsa'
+ assert stat["cert"][pkey]["ocsp"]["status"] == "good"
+ assert stat["cert"][pkey]["ocsp"]["valid"]
+ # mdB has no md stapling, but mod_ssl kicks in
+ stat = env.get_ocsp_status(md_b)
+ assert stat['ocsp'] == "successful (0x0)" if \
+ env.ssl_module == "mod_ssl" else "no response sent"
+ stat = env.get_md_status(md_b)
+ assert not stat["stapling"]
+
+ # MD, check that restart leaves response unchanged, reconfigure keep interval,
+ # should remove the file on restart and get a new one
+ def test_md_801_005(self, env):
+ # TODO: mod_watchdog seems to have problems sometimes with fast restarts
+ # turn stapling on, wait for it to appear in connections
+ md = self.mdA
+ self.configure_httpd(env, md, "MDStapling on").install()
+ assert env.apache_restart() == 0
+ stat = env.await_ocsp_status(md)
+ assert stat['ocsp'] == "successful (0x0)"
+ assert stat['verify'] == "0 (ok)"
+ # fine the file where the ocsp response is stored
+ dirpath = os.path.join(env.store_dir, 'ocsp', md)
+ files = os.listdir(dirpath)
+ ocsp_file = None
+ for name in files:
+ if name.startswith("ocsp-"):
+ ocsp_file = os.path.join(dirpath, name)
+ assert ocsp_file
+ mtime1 = os.path.getmtime(ocsp_file)
+ # wait a sec, restart and check that file does not change
+ time.sleep(1)
+ assert env.apache_restart() == 0
+ stat = env.await_ocsp_status(md)
+ assert stat['ocsp'] == "successful (0x0)"
+ mtime2 = os.path.getmtime(ocsp_file)
+ assert mtime1 == mtime2
+ # configure a keep time of 1 second, restart, the file is gone
+ # (which is a side effec that we load it before the cleanup removes it.
+ # since it was valid, no new one needed fetching
+ self.configure_httpd(env, md, """
+ MDStapling on
+ MDStaplingKeepResponse 1s
+ """).install()
+ assert env.apache_restart() == 0
+ stat = env.await_ocsp_status(md)
+ assert stat['ocsp'] == "successful (0x0)"
+ assert not os.path.exists(ocsp_file)
+ # if we restart again, a new file needs to appear
+ assert env.apache_restart() == 0
+ stat = env.await_ocsp_status(md)
+ assert stat['ocsp'] == "successful (0x0)"
+ mtime3 = os.path.getmtime(ocsp_file)
+ assert mtime1 != mtime3
+
+ # MD, check that stapling renew window works. Set a large window
+ # that causes response to be retrieved all the time.
+ def test_md_801_006(self, env):
+ # turn stapling on, wait for it to appear in connections
+ md = self.mdA
+ self.configure_httpd(env, md, "MDStapling on").install()
+ assert env.apache_restart() == 0
+ stat = env.await_ocsp_status(md)
+ assert stat['ocsp'] == "successful (0x0)"
+ assert stat['verify'] == "0 (ok)"
+ # fine the file where the ocsp response is stored
+ dirpath = os.path.join(env.store_dir, 'ocsp', md)
+ files = os.listdir(dirpath)
+ ocsp_file = None
+ for name in files:
+ if name.startswith("ocsp-"):
+ ocsp_file = os.path.join(dirpath, name)
+ assert ocsp_file
+ mtime1 = os.path.getmtime(ocsp_file)
+ assert env.apache_restart() == 0
+ stat = env.await_ocsp_status(md)
+ assert stat['ocsp'] == "successful (0x0)"
+ # wait a sec, restart and check that file does not change
+ time.sleep(1)
+ mtime2 = os.path.getmtime(ocsp_file)
+ assert mtime1 == mtime2
+ # configure a renew window of 10 days, restart, larger than any life time.
+ self.configure_httpd(env, md, """
+ MDStapling on
+ MDStaplingRenewWindow 10d
+ """).install()
+ assert env.apache_restart() == 0
+ stat = env.await_ocsp_status(md)
+ assert stat['ocsp'] == "successful (0x0)"
+ # wait a sec, restart and check that file does change
+ time.sleep(1)
+ mtime3 = os.path.getmtime(ocsp_file)
+ assert mtime1 != mtime3
+
+ # MD, make a MDomain with static files, check that stapling works
+ def test_md_801_007(self, env):
+ # turn stapling on, wait for it to appear in connections
+ md = self.mdA
+ conf = self.configure_httpd(env)
+ conf.add("""
+ <MDomain %s>
+ MDCertificateKeyFile %s
+ MDCertificateFile %s
+ MDStapling on
+ </MDomain>
+ """ % (md, env.store_domain_file(md, 'privkey.pem'),
+ env.store_domain_file(md, 'pubcert.pem')))
+ conf.add_vhost(md)
+ conf.install()
+ assert env.apache_restart() == 0
+ stat = env.await_ocsp_status(md)
+ assert stat['ocsp'] == "successful (0x0)"
+ assert stat['verify'] == "0 (ok)"
+ # fine the file where the ocsp response is stored
+ dirpath = os.path.join(env.store_dir, 'ocsp', md)
+ files = os.listdir(dirpath)
+ ocsp_file = None
+ for name in files:
+ if name.startswith("ocsp-"):
+ ocsp_file = os.path.join(dirpath, name)
+ assert ocsp_file
+
+ # Use certificate files in direct config, check that stapling works
+ def test_md_801_008(self, env):
+ # turn stapling on, wait for it to appear in connections
+ md = self.mdA
+ conf = self.configure_httpd(env)
+ conf.add("MDStapling on")
+ conf.start_vhost(md)
+ conf.add_certificate(env.store_domain_file(md, 'pubcert.pem'),
+ env.store_domain_file(md, 'privkey.pem'))
+ conf.end_vhost()
+ conf.install()
+ assert env.apache_restart() == 0
+ stat = env.await_ocsp_status(md)
+ assert stat['ocsp'] == "successful (0x0)"
+ assert stat['verify'] == "0 (ok)"
+ # fine the file where the ocsp response is stored
+ dirpath = os.path.join(env.store_dir, 'ocsp', 'other')
+ files = os.listdir(dirpath)
+ ocsp_file = None
+ for name in files:
+ if name.startswith("ocsp-"):
+ ocsp_file = os.path.join(dirpath, name)
+ assert ocsp_file
+
+ # Turn on stapling for a certificate without OCSP responder and issuer
+ # (certificates without issuer prevent mod_ssl asking around for stapling)
+ def test_md_801_009(self, env):
+ md = self.mdA
+ domains = [md]
+ testpath = os.path.join(env.gen_dir, 'test_801_009')
+ # cert that is 30 more days valid
+ env.create_self_signed_cert(domains, {"notBefore": -60, "notAfter": 30},
+ serial=801009, path=testpath)
+ cert_file = os.path.join(testpath, 'pubcert.pem')
+ pkey_file = os.path.join(testpath, 'privkey.pem')
+ assert os.path.exists(cert_file)
+ assert os.path.exists(pkey_file)
+ conf = MDConf(env)
+ conf.start_md(domains)
+ conf.add("MDCertificateFile %s" % cert_file)
+ conf.add("MDCertificateKeyFile %s" % pkey_file)
+ conf.add("MDStapling on")
+ conf.end_md()
+ conf.add_vhost(md)
+ conf.install()
+ assert env.apache_restart() == 0
+ time.sleep(1)
+ stat = env.get_ocsp_status(md)
+ assert stat['ocsp'] == "no response sent"
+
+ # Turn on stapling for an MDomain not used in any virtualhost
+ # There was a crash in server-status in this case
+ def test_md_801_010(self, env):
+ env.clear_ocsp_store()
+ md = self.mdA
+ domains = [md]
+ conf = MDConf(env)
+ conf.start_md(domains)
+ conf.add("MDStapling on")
+ conf.end_md()
+ conf.install()
+ assert env.apache_restart() == 0
+ stat = env.get_server_status()
+ assert stat
+
+ # add 7 mdomains that need OCSP stapling, once activated
+ # we use at max 6 connections against the same OCSP responder and
+ # this triggers our use of curl_multi_perform with iterative
+ # scheduling.
+ # This checks the mistaken assert() reported in
+ # <https://bz.apache.org/bugzilla/show_bug.cgi?id=65567>
+ def test_md_801_011(self, env):
+ domains = [ f'test-801-011-{i}-{env.DOMAIN_SUFFIX}' for i in range(7)]
+ self.configure_httpd(env, domains, """
+ MDStapling on
+ LogLevel md:trace2 ssl:warn
+ """).install()
+ assert env.apache_restart() == 0
+ assert env.await_completion(domains, restart=False, timeout=120)
+ assert env.apache_restart() == 0
+ # now the certs are installed and ocsp will be retrieved
+ time.sleep(1)
+ for domain in domains:
+ stat = env.await_ocsp_status(domain)
+ assert stat['ocsp'] == "successful (0x0)"
+ assert stat['verify'] == "0 (ok)"
diff --git a/test/modules/md/test_810_ec.py b/test/modules/md/test_810_ec.py
new file mode 100644
index 0000000..5c31018
--- /dev/null
+++ b/test/modules/md/test_810_ec.py
@@ -0,0 +1,153 @@
+# tests with elliptic curve keys and certificates
+import logging
+
+import pytest
+
+from .md_conf import MDConf
+from .md_env import MDTestEnv
+
+
+@pytest.mark.skipif(condition=not MDTestEnv.has_acme_server(),
+ reason="no ACME test server configured")
+class TestAutov2:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env, acme):
+ env.APACHE_CONF_SRC = "data/test_auto"
+ acme.start(config='default')
+ env.check_acme()
+ env.clear_store()
+ MDConf(env).install()
+ assert env.apache_restart() == 0
+
+ @pytest.fixture(autouse=True, scope='function')
+ def _method_scope(self, env, request):
+ env.clear_store()
+ self.test_domain = env.get_request_domain(request)
+
+ def set_get_pkeys(self, env, domain, pkeys, conf=None):
+ domains = [domain]
+ if conf is None:
+ conf = MDConf(env)
+ conf.add("MDPrivateKeys {0}".format(" ".join([p['spec'] for p in pkeys])))
+ conf.add_md(domains)
+ conf.add_vhost(domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_completion([domain])
+
+ def check_pkeys(self, env, domain, pkeys):
+ # check that files for all types have been created
+ for p in [p for p in pkeys if len(p['spec'])]:
+ env.check_md_complete(domain, p['spec'])
+ # check that openssl client sees the cert with given keylength for cipher
+ env.verify_cert_key_lenghts(domain, pkeys)
+
+ def set_get_check_pkeys(self, env, domain, pkeys, conf=None):
+ self.set_get_pkeys(env, domain, pkeys, conf=conf)
+ self.check_pkeys(env, domain, pkeys)
+
+ # one EC key, no RSA
+ def test_md_810_001(self, env):
+ domain = self.test_domain
+ self.set_get_check_pkeys(env, domain, [
+ {'spec': "secp256r1", 'ciphers': "ECDSA", 'keylen': 256},
+ {'spec': "", 'ciphers': "RSA", 'keylen': 0},
+ ])
+
+ # set EC key type override on MD and get certificate
+ def test_md_810_002(self, env):
+ domain = self.test_domain
+ # generate config with one MD
+ domains = [domain]
+ conf = MDConf(env)
+ conf.add("MDPrivateKeys secp256r1")
+ conf.start_md(domains)
+ conf.add(" MDPrivateKeys secp384r1")
+ conf.end_md()
+ conf.add_vhost(domains)
+ self.set_get_check_pkeys(env, domain, [
+ {'spec': "secp384r1", 'ciphers': "ECDSA", 'keylen': 384},
+ {'spec': "", 'ciphers': "RSA", 'keylen': 0},
+ ])
+
+ # set two key spec, ec before rsa
+ def test_md_810_003a(self, env):
+ domain = self.test_domain
+ self.set_get_check_pkeys(env, domain, [
+ {'spec': "P-256", 'ciphers': "ECDSA", 'keylen': 256},
+ {'spec': "RSA 3072", 'ciphers': "ECDHE-RSA-CHACHA20-POLY1305", 'keylen': 3072},
+ ])
+
+ # set two key spec, rsa before ec
+ def test_md_810_003b(self, env):
+ domain = self.test_domain
+ self.set_get_check_pkeys(env, domain, [
+ {'spec': "RSA 3072", 'ciphers': "ECDHE-RSA-CHACHA20-POLY1305", 'keylen': 3072},
+ {'spec': "secp384r1", 'ciphers': "ECDSA", 'keylen': 384},
+ ])
+
+ # use a curve unsupported by LE
+ # only works with mod_ssl as rustls refuses to load such a weak key
+ @pytest.mark.skipif(MDTestEnv.get_ssl_module() != "mod_ssl", reason="only for mod_ssl")
+ @pytest.mark.skipif(MDTestEnv.get_acme_server() != 'boulder', reason="only boulder rejects this")
+ def test_md_810_004(self, env):
+ domain = self.test_domain
+ # generate config with one MD
+ domains = [domain]
+ conf = MDConf(env)
+ conf.add("MDPrivateKeys secp192r1")
+ conf.add_md(domains)
+ conf.add_vhost(domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ md = env.await_error(domain)
+ assert md
+ assert md['renewal']['errors'] > 0
+ assert md['renewal']['last']['problem'] == 'urn:ietf:params:acme:error:malformed'
+
+ # set three key specs
+ def test_md_810_005(self, env):
+ domain = self.test_domain
+ # behaviour differences, mod_ssl selects the strongest suitable,
+ # mod_tls selects the first suitable
+ ec_key_len = 384 if env.ssl_module == "mod_ssl" else 256
+ self.set_get_check_pkeys(env, domain, [
+ {'spec': "secp256r1", 'ciphers': "ECDSA", 'keylen': ec_key_len},
+ {'spec': "RSA 4096", 'ciphers': "ECDHE-RSA-CHACHA20-POLY1305", 'keylen': 4096},
+ {'spec': "P-384", 'ciphers': "ECDSA", 'keylen': ec_key_len},
+ ])
+
+ # set three key specs
+ def test_md_810_006(self, env):
+ domain = self.test_domain
+ self.set_get_check_pkeys(env, domain, [
+ {'spec': "rsa2048", 'ciphers': "ECDHE-RSA-CHACHA20-POLY1305", 'keylen': 2048},
+ {'spec': "secp256r1", 'ciphers': "ECDSA", 'keylen': 256},
+ ])
+
+ # start with one pkey and add another one
+ def test_md_810_007(self, env):
+ domain = self.test_domain
+ domains = [domain]
+ conf = MDConf(env)
+ conf.add("MDPrivateKeys rsa3072")
+ conf.add_md(domains)
+ conf.add_vhost(domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_completion(domains)
+ conf = MDConf(env)
+ conf.add("MDPrivateKeys rsa3072 secp384r1")
+ conf.add_md(domains)
+ conf.add_vhost(domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ mds = env.get_md_status(domain, via_domain=domain, use_https=True)
+ assert 'renew' in mds and mds['renew'] is True, f"{mds}"
+ assert env.await_completion(domains)
+ self.check_pkeys(env, domain, [
+ {'spec': "rsa3072", 'ciphers': "ECDHE-RSA-CHACHA20-POLY1305", 'keylen': 3072},
+ {'spec': "secp384r1", 'ciphers': "ECDSA", 'keylen': 384},
+ ])
+
diff --git a/test/modules/md/test_820_locks.py b/test/modules/md/test_820_locks.py
new file mode 100644
index 0000000..9436912
--- /dev/null
+++ b/test/modules/md/test_820_locks.py
@@ -0,0 +1,73 @@
+import os
+
+import pytest
+from filelock import Timeout, FileLock
+
+from .md_cert_util import MDCertUtil
+from .md_conf import MDConf
+from .md_env import MDTestEnv
+
+
+@pytest.mark.skipif(condition=not MDTestEnv.has_acme_server(),
+ reason="no ACME test server configured")
+class TestLocks:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env, acme):
+ env.APACHE_CONF_SRC = "data/test_auto"
+ acme.start(config='default')
+ env.check_acme()
+ env.clear_store()
+
+ @pytest.fixture(autouse=True, scope='function')
+ def _method_scope(self, env, request):
+ env.clear_store()
+ self.test_domain = env.get_request_domain(request)
+
+ def configure_httpd(self, env, domains, add_lines=""):
+ conf = MDConf(env)
+ conf.add(add_lines)
+ conf.add_md(domains)
+ conf.add_vhost(domains)
+ conf.install()
+
+ # normal renewal with store locks activated
+ def test_md_820_001(self, env):
+ domain = self.test_domain
+ self.configure_httpd(env, [domain], add_lines=[
+ "MDStoreLocks 1s"
+ ])
+ assert env.apache_restart() == 0
+ assert env.await_completion([domain])
+
+ # renewal, with global lock held during restert
+ @pytest.mark.skip("does not work in our CI")
+ def test_md_820_002(self, env):
+ domain = self.test_domain
+ self.configure_httpd(env, [domain], add_lines=[
+ "MDStoreLocks 1s"
+ ])
+ assert env.apache_restart() == 0
+ assert env.await_completion([domain])
+ # we have a cert now, add a dns name to force renewal
+ certa = MDCertUtil(env.store_domain_file(domain, 'pubcert.pem'))
+ self.configure_httpd(env, [domain, f"x.{domain}"], add_lines=[
+ "MDStoreLocks 1s"
+ ])
+ assert env.apache_restart() == 0
+ # await new cert, but do not restart, keeps the cert in staging
+ assert env.await_completion([domain], restart=False)
+ # obtain global lock and restart
+ lockfile = os.path.join(env.store_dir, "store.lock")
+ with FileLock(lockfile):
+ assert env.apache_restart() == 0
+ # lock should have prevented staging from being activated,
+ # meaning we will have the same cert
+ certb = MDCertUtil(env.store_domain_file(domain, 'pubcert.pem'))
+ assert certa.same_serial_as(certb)
+ # now restart without lock
+ assert env.apache_restart() == 0
+ certc = MDCertUtil(env.store_domain_file(domain, 'pubcert.pem'))
+ assert not certa.same_serial_as(certc)
+
+
diff --git a/test/modules/md/test_900_notify.py b/test/modules/md/test_900_notify.py
new file mode 100644
index 0000000..30e0742
--- /dev/null
+++ b/test/modules/md/test_900_notify.py
@@ -0,0 +1,122 @@
+# test mod_md notify support
+
+import os
+import time
+
+import pytest
+
+from .md_conf import MDConf, MDConf
+from .md_env import MDTestEnv
+
+
+@pytest.mark.skipif(condition=not MDTestEnv.has_acme_server(),
+ reason="no ACME test server configured")
+class TestNotify:
+ notify_cmd = None
+ notify_log = None
+ domain = None
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env, acme):
+ acme.start(config='default')
+ env.check_acme()
+ env.clear_store()
+
+ @pytest.fixture(autouse=True, scope='function')
+ def _method_scope(self, env, request):
+ self.domain = env.get_request_domain(request)
+ self.notify_cmd = os.path.join(env.test_dir, "../modules/md/notify.py")
+ self.notify_log = os.path.join(env.gen_dir, "notify.log")
+ if os.path.isfile(self.notify_log):
+ os.remove(self.notify_log)
+
+ def configure_httpd(self, env, domain, add_lines=""):
+ conf = MDConf(env)
+ conf.add(add_lines)
+ conf.add_md([domain])
+ conf.add_vhost(domain)
+ conf.install()
+ return domain
+
+ # test: invalid notify cmd, check error
+ def test_md_900_001(self, env):
+ command = "blablabla"
+ args = ""
+ self.configure_httpd(env, self.domain, f"""
+ MDNotifyCmd {command} {args}
+ """)
+ assert env.apache_restart() == 0
+ assert env.await_error(self.domain)
+ stat = env.get_md_status(self.domain)
+ assert stat["renewal"]["last"]["problem"] == "urn:org:apache:httpd:log:AH10108:"
+
+ # test: valid notify cmd that fails, check error
+ def test_md_900_002(self, env):
+ command = "%s/notifail.py" % env.test_dir
+ args = ""
+ self.configure_httpd(env, self.domain, f"""
+ MDNotifyCmd {command} {args}
+ """)
+ assert env.apache_restart() == 0
+ assert env.await_error(self.domain)
+ stat = env.get_md_status(self.domain)
+ assert stat["renewal"]["last"]["problem"] == "urn:org:apache:httpd:log:AH10108:"
+
+ # test: valid notify that logs to file
+ def test_md_900_010(self, env):
+ command = self.notify_cmd
+ args = self.notify_log
+ self.configure_httpd(env, self.domain, f"""
+ MDNotifyCmd {command} {args}
+ """)
+ assert env.apache_restart() == 0
+ assert env.await_completion([self.domain], restart=False)
+ time.sleep(1)
+ stat = env.get_md_status(self.domain)
+ assert stat["renewal"]["last"]["status"] == 0
+ time.sleep(1)
+ nlines = open(self.notify_log).readlines()
+ assert 1 == len(nlines)
+ assert ("['%s', '%s', '%s']" % (command, args, self.domain)) == nlines[0].strip()
+
+ # test: signup with working notify cmd and see that it is called with the
+ # configured extra arguments
+ def test_md_900_011(self, env):
+ command = self.notify_cmd
+ args = self.notify_log
+ extra_arg = "test_900_011_extra"
+ self.configure_httpd(env, self.domain, f"""
+ MDNotifyCmd {command} {args} {extra_arg}
+ """)
+ assert env.apache_restart() == 0
+ assert env.await_completion([self.domain], restart=False)
+ time.sleep(1)
+ stat = env.get_md_status(self.domain)
+ assert stat["renewal"]["last"]["status"] == 0
+ nlines = open(self.notify_log).readlines()
+ assert ("['%s', '%s', '%s', '%s']" % (command, args, extra_arg, self.domain)) == nlines[0].strip()
+
+ # test: signup with working notify cmd for 2 MD and expect it to be called twice
+ def test_md_900_012(self, env):
+ md1 = "a-" + self.domain
+ domains1 = [md1, "www." + md1]
+ md2 = "b-" + self.domain
+ domains2 = [md2, "www." + md2]
+ command = self.notify_cmd
+ args = self.notify_log
+ conf = MDConf(env)
+ conf.add(f"MDNotifyCmd {command} {args}")
+ conf.add_md(domains1)
+ conf.add_md(domains2)
+ conf.add_vhost(domains1)
+ conf.add_vhost(domains2)
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_completion([md1, md2], restart=False)
+ time.sleep(1)
+ stat = env.get_md_status(md1)
+ assert stat["renewal"]["last"]["status"] == 0
+ stat = env.get_md_status(md2)
+ assert stat["renewal"]["last"]["status"] == 0
+ nlines = open(args).readlines()
+ assert 2 == len(nlines)
diff --git a/test/modules/md/test_901_message.py b/test/modules/md/test_901_message.py
new file mode 100644
index 0000000..8d03bfd
--- /dev/null
+++ b/test/modules/md/test_901_message.py
@@ -0,0 +1,297 @@
+# test mod_md message support
+
+import json
+import os
+import time
+import pytest
+
+from .md_conf import MDConf, MDConf
+from .md_env import MDTestEnv
+
+
+@pytest.mark.skipif(condition=not MDTestEnv.has_acme_server(),
+ reason="no ACME test server configured")
+class TestMessage:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env, acme):
+ env.APACHE_CONF_SRC = "data/test_auto"
+ acme.start(config='default')
+ env.check_acme()
+ env.clear_store()
+ MDConf(env).install()
+ assert env.apache_restart() == 0
+
+ @pytest.fixture(autouse=True, scope='function')
+ def _method_scope(self, env, request):
+ env.clear_store()
+ self.test_domain = env.get_request_domain(request)
+ self.mcmd = os.path.join(env.test_dir, "../modules/md/message.py")
+ self.mlog = os.path.join(env.gen_dir, "message.log")
+ if os.path.isfile(self.mlog):
+ os.remove(self.mlog)
+
+ # test: signup with configured message cmd that is invalid
+ def test_md_901_001(self, env):
+ domain = self.test_domain
+ domains = [domain, "www." + domain]
+ conf = MDConf(env)
+ conf.add("MDMessageCmd blablabla")
+ conf.add_drive_mode("auto")
+ conf.add_md(domains)
+ conf.add_vhost(domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_file(env.store_staged_file(domain, 'job.json'))
+ stat = env.get_md_status(domain)
+ # this command should have failed and logged an error
+ assert stat["renewal"]["last"]["problem"] == "urn:org:apache:httpd:log:AH10109:"
+
+ # test: signup with configured message cmd that is valid but returns != 0
+ def test_md_901_002(self, env):
+ mcmd = os.path.join(env.test_dir, "../modules/md/notifail.py")
+ domain = self.test_domain
+ domains = [domain, "www." + domain]
+ conf = MDConf(env)
+ conf.add(f"MDMessageCmd {mcmd} {self.mlog}")
+ conf.add_drive_mode("auto")
+ conf.add_md(domains)
+ conf.add_vhost(domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_error(domain)
+ stat = env.get_md_status(domain)
+ # this command should have failed and logged an error
+ assert stat["renewal"]["last"]["problem"] == "urn:org:apache:httpd:log:AH10109:"
+
+ # test: signup with working message cmd and see that it logs the right things
+ def test_md_901_003(self, env):
+ domain = self.test_domain
+ domains = [domain, "www." + domain]
+ conf = MDConf(env)
+ conf.add(f"MDMessageCmd {self.mcmd} {self.mlog}")
+ conf.add_drive_mode("auto")
+ conf.add_md(domains)
+ conf.add_vhost(domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_completion([domain], restart=False)
+ time.sleep(1)
+ stat = env.get_md_status(domain)
+ # this command did not fail and logged itself the correct information
+ assert stat["renewal"]["last"]["status"] == 0
+ assert stat["renewal"]["log"]["entries"]
+ assert stat["renewal"]["log"]["entries"][0]["type"] == "message-renewed"
+ # shut down server to make sure that md has completed
+ assert env.apache_stop() == 0
+ nlines = open(self.mlog).readlines()
+ assert 3 == len(nlines)
+ nlines = [s.strip() for s in nlines]
+ assert "['{cmd}', '{logfile}', 'challenge-setup:http-01:{dns}', '{mdomain}']".format(
+ cmd=self.mcmd, logfile=self.mlog, mdomain=domain, dns=domains[0]) in nlines
+ assert "['{cmd}', '{logfile}', 'challenge-setup:http-01:{dns}', '{mdomain}']".format(
+ cmd=self.mcmd, logfile=self.mlog, mdomain=domain, dns=domains[1]) in nlines
+ assert nlines[2].strip() == "['{cmd}', '{logfile}', 'renewed', '{mdomain}']".format(
+ cmd=self.mcmd, logfile=self.mlog, mdomain=domain)
+
+ # test issue #145:
+ # - a server renews a valid certificate and is not restarted when recommended
+ # - the job did not clear its next_run and was run over and over again
+ # - the job logged the re-verifications again and again. which was saved.
+ # - this eventually flushed out the "message-renew" log entry
+ # - which caused the renew message handling to trigger again and again
+ # the fix does:
+ # - reset the next run
+ # - no longer adds the re-validations to the log
+ # - messages only once
+ @pytest.mark.skipif(MDTestEnv.is_pebble(), reason="ACME server certs valid too long")
+ def test_md_901_004(self, env):
+ domain = self.test_domain
+ domains = [domain, "www." + domain]
+ conf = MDConf(env)
+ conf.add_md(domains)
+ conf.add_vhost(domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_completion([domain])
+ # force renew
+ conf = MDConf(env)
+ conf.add(f"MDMessageCmd {self.mcmd} {self.mlog}")
+ conf.add("MDRenewWindow 120d")
+ conf.add("MDActivationDelay -7d")
+ conf.add_md(domains)
+ conf.add_vhost(domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_completion([domain], restart=False)
+ env.get_md_status(domain)
+ assert env.await_file(self.mlog)
+ nlines = open(self.mlog).readlines()
+ assert len(nlines) == 1
+ assert nlines[0].strip() == f"['{self.mcmd}', '{self.mlog}', 'renewed', '{domain}']"
+
+ def test_md_901_010(self, env):
+ # MD with static cert files, lifetime in renewal window, no message about renewal
+ domain = self.test_domain
+ domains = [domain, 'www.%s' % domain]
+ testpath = os.path.join(env.gen_dir, 'test_901_010')
+ # cert that is only 10 more days valid
+ env.create_self_signed_cert(domains, {"notBefore": -70, "notAfter": 20},
+ serial=901010, path=testpath)
+ cert_file = os.path.join(testpath, 'pubcert.pem')
+ pkey_file = os.path.join(testpath, 'privkey.pem')
+ assert os.path.exists(cert_file)
+ assert os.path.exists(pkey_file)
+ conf = MDConf(env)
+ conf.add(f"MDMessageCmd {self.mcmd} {self.mlog}")
+ conf.start_md(domains)
+ conf.add(f"MDCertificateFile {cert_file}")
+ conf.add(f"MDCertificateKeyFile {pkey_file}")
+ conf.end_md()
+ conf.add_vhost(domain)
+ conf.install()
+ assert env.apache_restart() == 0
+ assert not os.path.isfile(self.mlog)
+
+ def test_md_901_011(self, env):
+ # MD with static cert files, lifetime in warn window, check message
+ domain = self.test_domain
+ domains = [domain, f'www.{domain}']
+ testpath = os.path.join(env.gen_dir, 'test_901_011')
+ # cert that is only 10 more days valid
+ env.create_self_signed_cert(domains, {"notBefore": -85, "notAfter": 5},
+ serial=901011, path=testpath)
+ cert_file = os.path.join(testpath, 'pubcert.pem')
+ pkey_file = os.path.join(testpath, 'privkey.pem')
+ assert os.path.exists(cert_file)
+ assert os.path.exists(pkey_file)
+ conf = MDConf(env)
+ conf.add(f"MDMessageCmd {self.mcmd} {self.mlog}")
+ conf.start_md(domains)
+ conf.add(f"MDCertificateFile {cert_file}")
+ conf.add(f"MDCertificateKeyFile {pkey_file}")
+ conf.end_md()
+ conf.add_vhost(domain)
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_file(self.mlog)
+ nlines = open(self.mlog).readlines()
+ assert len(nlines) == 1
+ assert nlines[0].strip() == f"['{self.mcmd}', '{self.mlog}', 'expiring', '{domain}']"
+ # check that we do not get it resend right away again
+ assert env.apache_restart() == 0
+ time.sleep(1)
+ nlines = open(self.mlog).readlines()
+ assert len(nlines) == 1
+ assert nlines[0].strip() == f"['{self.mcmd}', '{self.mlog}', 'expiring', '{domain}']"
+
+ # MD, check messages from stapling
+ @pytest.mark.skipif(MDTestEnv.lacks_ocsp(), reason="no OCSP responder")
+ def test_md_901_020(self, env):
+ domain = self.test_domain
+ domains = [domain]
+ conf = MDConf(env)
+ conf.add(f"MDMessageCmd {self.mcmd} {self.mlog}")
+ conf.add_drive_mode("auto")
+ conf.add_md(domains)
+ conf.add("MDStapling on")
+ conf.add_vhost(domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_completion([domain])
+ env.await_ocsp_status(domain)
+ assert env.await_file(self.mlog)
+ time.sleep(1)
+ nlines = open(self.mlog).readlines()
+ assert len(nlines) == 4
+ assert nlines[0].strip() == \
+ f"['{self.mcmd}', '{self.mlog}', 'challenge-setup:http-01:{domain}', '{domain}']"
+ assert nlines[1].strip() == \
+ f"['{self.mcmd}', '{self.mlog}', 'renewed', '{domain}']"
+ assert nlines[2].strip() == \
+ f"['{self.mcmd}', '{self.mlog}', 'installed', '{domain}']"
+ assert nlines[3].strip() == \
+ f"['{self.mcmd}', '{self.mlog}', 'ocsp-renewed', '{domain}']"
+
+ # test: while testing gh issue #146, it was noted that a failed renew notification never
+ # resets the MD activity.
+ @pytest.mark.skipif(MDTestEnv.is_pebble(), reason="ACME server certs valid too long")
+ def test_md_901_030(self, env):
+ domain = self.test_domain
+ domains = [domain, "www." + domain]
+ conf = MDConf(env)
+ conf.add_md(domains)
+ conf.add_vhost(domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_completion([domain])
+ # set the warn window that triggers right away and a failing message command
+ conf = MDConf(env)
+ conf.add(f"MDMessageCmd {env.test_dir}../modules/md/notifail.py {self.mlog}")
+ conf.add_md(domains)
+ conf.add("""
+ MDWarnWindow 100d
+ """)
+ conf.add_vhost(domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ env.get_md_status(domain)
+ # this command should have failed and logged an error
+ # shut down server to make sure that md has completed
+ assert env.await_file(env.store_staged_file(domain, 'job.json'))
+ while True:
+ with open(env.store_staged_file(domain, 'job.json')) as f:
+ job = json.load(f)
+ if job["errors"] > 0:
+ assert job["errors"] > 0, "unexpected job result: {0}".format(job)
+ assert job["last"]["problem"] == "urn:org:apache:httpd:log:AH10109:"
+ break
+ time.sleep(0.1)
+ env.httpd_error_log.ignore_recent()
+
+ # reconfigure to a working notification command and restart
+ conf = MDConf(env)
+ conf.add(f"MDMessageCmd {self.mcmd} {self.mlog}")
+ conf.add_md(domains)
+ conf.add("""
+ MDWarnWindow 100d
+ """)
+ conf.add_vhost(domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_file(self.mlog)
+ # we see the notification logged by the command
+ nlines = open(self.mlog).readlines()
+ assert len(nlines) == 1
+ assert nlines[0].strip() == f"['{self.mcmd}', '{self.mlog}', 'expiring', '{domain}']"
+ # the error needs to be gone
+ assert env.await_file(env.store_staged_file(domain, 'job.json'))
+ with open(env.store_staged_file(domain, 'job.json')) as f:
+ job = json.load(f)
+ assert job["errors"] == 0
+
+ # MD, check a failed challenge setup
+ def test_md_901_040(self, env):
+ domain = self.test_domain
+ domains = [domain]
+ conf = MDConf(env)
+ mcmd = os.path.join(env.test_dir, "../modules/md/msg_fail_on.py")
+ conf.add(f"MDMessageCmd {mcmd} {self.mlog} challenge-setup")
+ conf.add_drive_mode("auto")
+ conf.add_md(domains)
+ conf.add_vhost(domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_error(domain)
+ assert env.await_file(self.mlog)
+ time.sleep(1)
+ nlines = open(self.mlog).readlines()
+ assert len(nlines) == 2
+ assert nlines[0].strip() == \
+ f"['{mcmd}', '{self.mlog}', 'challenge-setup:http-01:{domain}', '{domain}']"
+ assert nlines[1].strip() == \
+ f"['{mcmd}', '{self.mlog}', 'errored', '{domain}']"
+ stat = env.get_md_status(domain)
+ # this command should have failed and logged an error
+ assert stat["renewal"]["last"]["problem"] == "challenge-setup-failure"
+
diff --git a/test/modules/md/test_910_cleanups.py b/test/modules/md/test_910_cleanups.py
new file mode 100644
index 0000000..1971fda
--- /dev/null
+++ b/test/modules/md/test_910_cleanups.py
@@ -0,0 +1,54 @@
+# test mod_md cleanups and sanitation
+
+import os
+
+import pytest
+
+from .md_conf import MDConf
+from .md_env import MDTestEnv
+
+
+@pytest.mark.skipif(condition=not MDTestEnv.has_acme_server(),
+ reason="no ACME test server configured")
+class TestCleanups:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env, acme):
+ env.APACHE_CONF_SRC = "data/test_auto"
+ acme.start(config='default')
+ env.check_acme()
+ env.clear_store()
+ MDConf(env).install()
+ assert env.apache_restart() == 0
+
+ @pytest.fixture(autouse=True, scope='function')
+ def _method_scope(self, env, request):
+ env.clear_store()
+ self.test_domain = env.get_request_domain(request)
+
+ def teardown_method(self, method):
+ print("teardown_method: %s" % method.__name__)
+
+ def test_md_910_01(self, env):
+ # generate a simple MD
+ domain = self.test_domain
+ domains = [domain]
+ conf = MDConf(env)
+ conf.add_drive_mode("manual")
+ conf.add_md(domains)
+ conf.add_vhost(domain)
+ conf.install()
+
+ # create valid/invalid challenges subdirs
+ challenges_dir = env.store_challenges()
+ dirs_before = ["aaa", "bbb", domain, "zzz"]
+ for name in dirs_before:
+ os.makedirs(os.path.join(challenges_dir, name))
+
+ assert env.apache_restart() == 0
+ # the one we use is still there
+ assert os.path.isdir(os.path.join(challenges_dir, domain))
+ # and the others are gone
+ missing_after = ["aaa", "bbb", "zzz"]
+ for name in missing_after:
+ assert not os.path.exists(os.path.join(challenges_dir, name))
diff --git a/test/modules/md/test_920_status.py b/test/modules/md/test_920_status.py
new file mode 100644
index 0000000..c89ce6d
--- /dev/null
+++ b/test/modules/md/test_920_status.py
@@ -0,0 +1,245 @@
+# test mod_md status resources
+
+import os
+import re
+import time
+
+import pytest
+
+from .md_conf import MDConf
+from shutil import copyfile
+
+from .md_env import MDTestEnv
+
+
+@pytest.mark.skipif(condition=not MDTestEnv.has_acme_server(),
+ reason="no ACME test server configured")
+class TestStatus:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env, acme):
+ acme.start(config='default')
+ env.check_acme()
+ env.clear_store()
+
+ @pytest.fixture(autouse=True, scope='function')
+ def _method_scope(self, env, request):
+ env.clear_store()
+ self.test_domain = env.get_request_domain(request)
+
+ # simple MD, drive it, check status before activation
+ def test_md_920_001(self, env):
+ domain = self.test_domain
+ domains = [domain]
+ conf = MDConf(env)
+ conf.add_md(domains)
+ conf.add_vhost(domain)
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_completion([domain], restart=False)
+ # we started without a valid certificate, so we expect /.httpd/certificate-status
+ # to not give information about one and - since we waited for the ACME signup
+ # to complete - to give information in 'renewal' about the new cert.
+ status = env.get_certificate_status(domain)
+ assert 'sha256-fingerprint' not in status
+ assert 'valid' not in status
+ assert 'renewal' in status
+ assert 'valid' in status['renewal']['cert']
+ assert 'sha256-fingerprint' in status['renewal']['cert']['rsa']
+ # restart and activate
+ # once activated, the staging must be gone and attributes exist for the active cert
+ assert env.apache_restart() == 0
+ status = env.get_certificate_status(domain)
+ assert 'renewal' not in status
+ assert 'sha256-fingerprint' in status['rsa']
+ assert 'valid' in status['rsa']
+ assert 'from' in status['rsa']['valid']
+
+ # simple MD, drive it, manipulate staged credentials and check status
+ def test_md_920_002(self, env):
+ domain = self.test_domain
+ domains = [domain]
+ conf = MDConf(env)
+ conf.add_md(domains)
+ conf.add_vhost(domain)
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_completion([domain], restart=False)
+ # copy a real certificate from LE over to staging
+ staged_cert = os.path.join(env.store_dir, 'staging', domain, 'pubcert.pem')
+ real_cert = os.path.join(env.test_dir, '../modules/md/data', 'test_920', '002.pubcert')
+ assert copyfile(real_cert, staged_cert)
+ status = env.get_certificate_status(domain)
+ # status shows the copied cert's properties as staged
+ assert 'renewal' in status
+ assert 'Thu, 29 Aug 2019 16:06:35 GMT' == status['renewal']['cert']['rsa']['valid']['until']
+ assert 'Fri, 31 May 2019 16:06:35 GMT' == status['renewal']['cert']['rsa']['valid']['from']
+ assert '03039C464D454EDE79FCD2CAE859F668F269' == status['renewal']['cert']['rsa']['serial']
+ assert 'sha256-fingerprint' in status['renewal']['cert']['rsa']
+
+ # test if switching status off has effect
+ def test_md_920_003(self, env):
+ domain = self.test_domain
+ domains = [domain]
+ conf = MDConf(env)
+ conf.add_md(domains)
+ conf.add("MDCertificateStatus off")
+ conf.add_vhost(domain)
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_completion([domain], restart=False)
+ status = env.get_certificate_status(domain)
+ assert not status
+
+ def test_md_920_004(self, env):
+ domain = self.test_domain
+ domains = [domain]
+ conf = MDConf(env)
+ conf.add_md(domains)
+ conf.add("MDCertificateStatus off")
+ conf.add_vhost(domain)
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_completion([domain])
+ status = env.get_md_status("")
+ assert "version" in status
+ assert "managed-domains" in status
+ assert 1 == len(status["managed-domains"])
+
+ # get the status of a domain on base server
+ def test_md_920_010(self, env):
+ domain = self.test_domain
+ domains = [domain]
+ conf = MDConf(env, std_vhosts=False, std_ports=False, text=f"""
+MDBaseServer on
+MDPortMap http:- https:{env.https_port}
+
+ServerName {domain}
+<IfModule ssl_module>
+SSLEngine on
+</IfModule>
+<IfModule tls_module>
+TLSListen {env.https_port}
+TLSStrictSNI off
+</IfModule>
+Protocols h2 http/1.1 acme-tls/1
+
+<Location "/server-status">
+ SetHandler server-status
+</Location>
+<Location "/md-status">
+ SetHandler md-status
+</Location>
+<VirtualHost *:{env.http_port}>
+ SSLEngine off
+</VirtualHost>
+ """)
+ conf.add_md(domains)
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_completion([domain], restart=False,
+ via_domain=env.http_addr, use_https=False)
+ status = env.get_md_status("", via_domain=env.http_addr, use_https=False)
+ assert "version" in status
+ assert "managed-domains" in status
+ assert 1 == len(status["managed-domains"])
+ # get the html page
+ status = env.get_server_status(via_domain=env.http_addr, use_https=False)
+ assert re.search(r'<h3>Managed Certificates</h3>', status, re.MULTILINE)
+ # get the ascii summary
+ status = env.get_server_status(query="?auto", via_domain=env.http_addr, use_https=False)
+ m = re.search(r'ManagedCertificatesTotal: (\d+)', status, re.MULTILINE)
+ assert m, status
+ assert int(m.group(1)) == 1
+ m = re.search(r'ManagedCertificatesOK: (\d+)', status, re.MULTILINE)
+ assert int(m.group(1)) == 0
+ m = re.search(r'ManagedCertificatesRenew: (\d+)', status, re.MULTILINE)
+ assert int(m.group(1)) == 1
+ m = re.search(r'ManagedCertificatesErrored: (\d+)', status, re.MULTILINE)
+ assert int(m.group(1)) == 0
+ m = re.search(r'ManagedCertificatesReady: (\d+)', status, re.MULTILINE)
+ assert int(m.group(1)) == 1
+
+ def test_md_920_011(self, env):
+ # MD with static cert files in base server, see issue #161
+ domain = self.test_domain
+ domains = [domain, 'www.%s' % domain]
+ testpath = os.path.join(env.gen_dir, 'test_920_011')
+ # cert that is only 10 more days valid
+ env.create_self_signed_cert(domains, {"notBefore": -70, "notAfter": 20},
+ serial=920011, path=testpath)
+ cert_file = os.path.join(testpath, 'pubcert.pem')
+ pkey_file = os.path.join(testpath, 'privkey.pem')
+ assert os.path.exists(cert_file)
+ assert os.path.exists(pkey_file)
+ conf = MDConf(env, std_vhosts=False, std_ports=False, text=f"""
+ MDBaseServer on
+ MDPortMap http:- https:{env.https_port}
+
+ ServerName {domain}
+ <IfModule ssl_module>
+ SSLEngine on
+ </IfModule>
+ <IfModule tls_module>
+ TLSListen {env.https_port}
+ TLSStrictSNI off
+ </IfModule>
+ Protocols h2 http/1.1 acme-tls/1
+
+ <Location "/server-status">
+ SetHandler server-status
+ </Location>
+ <Location "/md-status">
+ SetHandler md-status
+ </Location>
+ """)
+ conf.start_md(domains)
+ conf.add(f"MDCertificateFile {cert_file}")
+ conf.add(f"MDCertificateKeyFile {pkey_file}")
+ conf.end_md()
+ conf.start_vhost([env.http_addr], port=env.http_port)
+ conf.add("SSLEngine off")
+ conf.end_vhost()
+ conf.install()
+ assert env.apache_restart() == 0
+ status = env.get_md_status(domain, via_domain=env.http_addr, use_https=False)
+ assert status
+ assert 'renewal' not in status
+ print(status)
+ assert status['state'] == env.MD_S_COMPLETE
+ assert status['renew-mode'] == 1 # manual
+
+ # MD with 2 certificates
+ def test_md_920_020(self, env):
+ domain = self.test_domain
+ domains = [domain]
+ conf = MDConf(env)
+ conf.add("MDStapling on")
+ conf.add("MDPrivateKeys secp256r1 RSA")
+ conf.add_md(domains)
+ conf.add_vhost(domain)
+ conf.install()
+ assert env.apache_restart() == 0
+ assert env.await_completion([domain], restart=False)
+ # In the stats JSON, we expect 2 certificates under 'renewal'
+ stat = env.get_md_status(domain)
+ assert 'renewal' in stat
+ assert 'cert' in stat['renewal']
+ assert 'rsa' in stat['renewal']['cert']
+ assert 'secp256r1' in stat['renewal']['cert']
+ # In /.httpd/certificate-status 'renewal' we expect 2 certificates
+ status = env.get_certificate_status(domain)
+ assert 'renewal' in status
+ assert 'cert' in status['renewal']
+ assert 'secp256r1' in status['renewal']['cert']
+ assert 'rsa' in status['renewal']['cert']
+ # restart and activate
+ # once activated, certs are listed in status
+ assert env.apache_restart() == 0
+ stat = env.get_md_status(domain)
+ assert 'cert' in stat
+ assert 'valid' in stat['cert']
+ for ktype in ['rsa', 'secp256r1']:
+ assert ktype in stat['cert']
+ if env.acme_server == 'boulder':
+ assert 'ocsp' in stat['cert'][ktype]
diff --git a/test/modules/proxy/__init__.py b/test/modules/proxy/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test/modules/proxy/__init__.py
diff --git a/test/modules/proxy/conftest.py b/test/modules/proxy/conftest.py
new file mode 100644
index 0000000..23c5f14
--- /dev/null
+++ b/test/modules/proxy/conftest.py
@@ -0,0 +1,51 @@
+import logging
+import os
+import sys
+import pytest
+
+from .env import ProxyTestEnv
+
+sys.path.append(os.path.join(os.path.dirname(__file__), '../..'))
+
+
+def pytest_report_header(config, startdir):
+ env = ProxyTestEnv()
+ return "mod_proxy: [apache: {aversion}({prefix})]".format(
+ prefix=env.prefix,
+ aversion=env.get_httpd_version(),
+ )
+
+
+@pytest.fixture(scope="package")
+def env(pytestconfig) -> ProxyTestEnv:
+ level = logging.INFO
+ console = logging.StreamHandler()
+ console.setLevel(level)
+ console.setFormatter(logging.Formatter('%(levelname)s: %(message)s'))
+ logging.getLogger('').addHandler(console)
+ logging.getLogger('').setLevel(level=level)
+ env = ProxyTestEnv(pytestconfig=pytestconfig)
+ env.setup_httpd()
+ env.apache_access_log_clear()
+ env.httpd_error_log.clear_log()
+ return env
+
+
+@pytest.fixture(autouse=True, scope="package")
+def _session_scope(env):
+ # we'd like to check the httpd error logs after the test suite has
+ # run to catch anything unusual. For this, we setup the ignore list
+ # of errors and warnings that we do expect.
+ env.httpd_error_log.set_ignored_lognos([
+ 'AH01144', # No protocol handler was valid for the URL
+ ])
+
+ env.httpd_error_log.add_ignored_patterns([
+ #re.compile(r'.*urn:ietf:params:acme:error:.*'),
+ ])
+ yield
+ assert env.apache_stop() == 0
+ errors, warnings = env.httpd_error_log.get_missed()
+ assert (len(errors), len(warnings)) == (0, 0),\
+ f"apache logged {len(errors)} errors and {len(warnings)} warnings: \n"\
+ "{0}\n{1}\n".format("\n".join(errors), "\n".join(warnings))
diff --git a/test/modules/proxy/env.py b/test/modules/proxy/env.py
new file mode 100644
index 0000000..9ed635c
--- /dev/null
+++ b/test/modules/proxy/env.py
@@ -0,0 +1,54 @@
+import inspect
+import logging
+import os
+import re
+import subprocess
+from typing import Dict, Any
+
+from pyhttpd.certs import CertificateSpec
+from pyhttpd.conf import HttpdConf
+from pyhttpd.env import HttpdTestEnv, HttpdTestSetup
+
+log = logging.getLogger(__name__)
+
+
+class ProxyTestSetup(HttpdTestSetup):
+
+ def __init__(self, env: 'HttpdTestEnv'):
+ super().__init__(env=env)
+ self.add_source_dir(os.path.dirname(inspect.getfile(ProxyTestSetup)))
+ self.add_modules(["proxy", "proxy_http", "proxy_balancer", "lbmethod_byrequests"])
+
+
+class ProxyTestEnv(HttpdTestEnv):
+
+ def __init__(self, pytestconfig=None):
+ super().__init__(pytestconfig=pytestconfig)
+ self.add_httpd_conf([
+ ])
+ self._d_reverse = f"reverse.{self.http_tld}"
+ self._d_forward = f"forward.{self.http_tld}"
+ self._d_mixed = f"mixed.{self.http_tld}"
+
+ self.add_httpd_log_modules(["proxy", "proxy_http", "proxy_balancer", "lbmethod_byrequests", "ssl"])
+ self.add_cert_specs([
+ CertificateSpec(domains=[
+ self._d_forward, self._d_reverse, self._d_mixed
+ ]),
+ CertificateSpec(domains=[f"noh2.{self.http_tld}"], key_type='rsa2048'),
+ ])
+
+ def setup_httpd(self, setup: HttpdTestSetup = None):
+ super().setup_httpd(setup=ProxyTestSetup(env=self))
+
+ @property
+ def d_forward(self):
+ return self._d_forward
+
+ @property
+ def d_reverse(self):
+ return self._d_reverse
+
+ @property
+ def d_mixed(self):
+ return self._d_mixed
diff --git a/test/modules/proxy/test_01_http.py b/test/modules/proxy/test_01_http.py
new file mode 100644
index 0000000..ef71b16
--- /dev/null
+++ b/test/modules/proxy/test_01_http.py
@@ -0,0 +1,96 @@
+import os
+import time
+
+import pytest
+
+from pyhttpd.conf import HttpdConf
+
+
+class TestProxyHttp:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ # setup 3 vhosts on https: for reverse, forward and mixed proxying
+ # setup 3 vhosts on http: with different document roots
+ conf = HttpdConf(env)
+ conf.add("ProxyPreserveHost on")
+ conf.start_vhost(domains=[env.d_reverse], port=env.https_port)
+ conf.add([
+ f"ProxyPass / http://127.0.0.1:{env.http_port}/"
+ ])
+ conf.end_vhost()
+ conf.add_vhost(domains=[env.d_reverse], port=env.http_port, doc_root='htdocs/test1')
+
+ conf.start_vhost(domains=[env.d_forward], port=env.https_port)
+ conf.add([
+ "ProxyRequests on"
+ ])
+ conf.end_vhost()
+ conf.add_vhost(domains=[env.d_forward], port=env.http_port, doc_root='htdocs/test2')
+
+ conf.start_vhost(domains=[env.d_mixed], port=env.https_port)
+ conf.add([
+ f"ProxyPass / http://127.0.0.1:{env.http_port}/",
+ "ProxyRequests on"
+ ])
+ conf.end_vhost()
+ conf.add_vhost(domains=[env.d_mixed], port=env.http_port, doc_root='htdocs')
+ conf.install()
+ assert env.apache_restart() == 0
+
+ @pytest.mark.parametrize(["via", "seen"], [
+ ["reverse", "test1"],
+ ["mixed", "generic"],
+ ])
+ def test_proxy_01_001(self, env, via, seen):
+ # make requests to a reverse proxy https: vhost to the http: vhost
+ # check that we see the document we expect there (host matching worked)
+ r = env.curl_get(f"https://{via}.{env.http_tld}:{env.https_port}/alive.json", 5)
+ assert r.response["status"] == 200
+ assert r.json['host'] == seen
+
+ @pytest.mark.parametrize(["via", "seen"], [
+ ["reverse", "test1"],
+ ["forward", "test2"],
+ ["mixed", "generic"],
+ ])
+ def test_proxy_01_002(self, env, via, seen):
+ # make requests to a forward proxy https: vhost to the http: vhost
+ # check that we see the document we expect there (host matching worked)
+ # we need to explicitly provide a Host: header since mod_proxy cannot
+ # resolve the name via DNS.
+ if not env.curl_is_at_least('8.0.0'):
+ pytest.skip(f'need at least curl v8.0.0 for this')
+ domain = f"{via}.{env.http_tld}"
+ r = env.curl_get(f"http://127.0.0.1:{env.http_port}/alive.json", 5, options=[
+ '-H', f"Host: {domain}",
+ '--proxy', f"https://{domain}:{env.https_port}/",
+ '--resolve', f"{domain}:{env.https_port}:127.0.0.1",
+ '--proxy-cacert', f"{env.get_ca_pem_file(domain)}",
+
+ ])
+ assert r.exit_code == 0, f"{r.stdout}{r.stderr}"
+ assert r.response["status"] == 200
+ assert r.json['host'] == seen
+
+ def test_proxy_01_003(self, env):
+ domain = f"test1.{env.http_tld}"
+ conf = HttpdConf(env)
+ conf.add([
+ "ProxyPreserveHost on",
+ "<Proxy balancer://backends>",
+ f" BalancerMember https://localhost:{env.https_port}",
+ " SSLProxyEngine on",
+ "</Proxy>",
+ ])
+ conf.start_vhost(domains=[domain], port=env.https_port, doc_root="htdocs/test1")
+ conf.add([
+ "ProxyPass /proxy balancer://backends",
+ "ProxyPassReverse /proxy balancer://backends",
+ ])
+ conf.end_vhost()
+ conf.install()
+ assert env.apache_restart() == 0
+ r = env.curl_get(f"https://{domain}:{env.https_port}/proxy/alive.json", 5)
+ assert r.response["status"] == 200
+ assert r.json['host'] == "test1"
diff --git a/test/modules/proxy/test_02_unix.py b/test/modules/proxy/test_02_unix.py
new file mode 100644
index 0000000..7f3d4d5
--- /dev/null
+++ b/test/modules/proxy/test_02_unix.py
@@ -0,0 +1,187 @@
+import os
+import re
+import socket
+from threading import Thread
+
+import pytest
+
+from pyhttpd.conf import HttpdConf
+from pyhttpd.result import ExecResult
+
+
+class UDSFaker:
+
+ def __init__(self, path):
+ self._uds_path = path
+ self._done = False
+
+ def start(self):
+ def process(self):
+ self._socket.listen(1)
+ self._process()
+
+ try:
+ os.unlink(self._uds_path)
+ except OSError:
+ if os.path.exists(self._uds_path):
+ raise
+ self._socket = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
+ self._socket.bind(self._uds_path)
+ self._thread = Thread(target=process, daemon=True, args=[self])
+ self._thread.start()
+
+ def stop(self):
+ self._done = True
+ self._socket.close()
+
+ def _process(self):
+ while self._done is False:
+ try:
+ c, client_address = self._socket.accept()
+ try:
+ data = c.recv(16)
+ c.sendall("""HTTP/1.1 200 Ok
+Server: UdsFaker
+Content-Type: application/json
+Content-Length: 19
+
+{ "host": "faked" }""".encode())
+ finally:
+ c.close()
+
+ except ConnectionAbortedError:
+ self._done = True
+
+
+class TestProxyUds:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ # setup 3 vhosts on https: for reverse, forward and
+ # mixed proxying to a unix: domain socket
+ # We setup a UDSFaker running that returns a fixed response
+ UDS_PATH = f"{env.gen_dir}/proxy_02.sock"
+ TestProxyUds.UDS_PATH = UDS_PATH
+ faker = UDSFaker(path=UDS_PATH)
+ faker.start()
+
+ conf = HttpdConf(env)
+ conf.add("ProxyPreserveHost on")
+ conf.start_vhost(domains=[env.d_reverse], port=env.https_port)
+ conf.add([
+ f"ProxyPass / unix:{UDS_PATH}|http://127.0.0.1:{env.http_port}/"
+ ])
+ conf.end_vhost()
+
+ conf.start_vhost(domains=[env.d_forward], port=env.https_port)
+ conf.add([
+ "ProxyRequests on"
+ ])
+ conf.end_vhost()
+
+ conf.start_vhost(domains=[env.d_mixed], port=env.https_port)
+ conf.add([
+ f"ProxyPass / unix:{UDS_PATH}|http://127.0.0.1:{env.http_port}/",
+ "ProxyRequests on"
+ ])
+ conf.end_vhost()
+ conf.install()
+ assert env.apache_restart() == 0
+ yield
+ faker.stop()
+
+ @pytest.mark.parametrize(["via", "seen"], [
+ ["reverse", "faked"],
+ ["mixed", "faked"],
+ ])
+ def test_proxy_02_001(self, env, via, seen):
+ # make requests to a reverse proxy https: vhost to the http: vhost
+ # check that we see the document we expect there (host matching worked)
+ r = env.curl_get(f"https://{via}.{env.http_tld}:{env.https_port}/alive.json", 5)
+ assert r.response["status"] == 200
+ assert r.json['host'] == seen
+
+ @pytest.mark.parametrize(["via", "seen"], [
+ ["forward", "generic"],
+ ["mixed", "faked"],
+ ])
+ def test_proxy_02_002(self, env, via, seen):
+ # make requests to a forward proxy https: vhost to the http: vhost
+ # check that we see the document we expect there (host matching worked)
+ # we need to explicitly provide a Host: header since mod_proxy cannot
+ # resolve the name via DNS.
+ if not env.curl_is_at_least('8.0.0'):
+ pytest.skip(f'need at least curl v8.0.0 for this')
+ domain = f"{via}.{env.http_tld}"
+ r = env.curl_get(f"http://127.0.0.1:{env.http_port}/alive.json", 5, options=[
+ '-H', f"Host: {domain}",
+ '--proxy', f"https://{domain}:{env.https_port}/",
+ '--resolve', f"{domain}:{env.https_port}:127.0.0.1",
+ '--proxy-cacert', f"{env.get_ca_pem_file(domain)}",
+
+ ])
+ assert r.exit_code == 0, f"{r.stdout}{r.stderr}"
+ assert r.response["status"] == 200
+ assert r.json['host'] == seen
+
+ @pytest.mark.parametrize(["via", "exp_status"], [
+ ["reverse", 400],
+ ["forward", 500],
+ ["mixed", 500],
+ ])
+ def test_proxy_02_003(self, env, via, exp_status):
+ # make requests to a forward proxy https: vhost and GET
+ # a URL which carries the unix: domain socket.
+ # This needs to fail.
+ domain = f"{via}.{env.http_tld}"
+ r = env.run(args=[
+ 'openssl', 's_client', '-connect', f"127.0.0.1:{env.https_port}",
+ '-servername', domain,
+ '-crlf', '-ign_eof',
+ '-CAfile', env.get_ca_pem_file(domain)
+ ], intext=f"""GET unix:{TestProxyUds.UDS_PATH}|http://127.0.0.1:{env.http_port}/alive.json HTTP/1.1
+Host: {domain}
+
+""")
+ assert r.exit_code == 0, f"{r.stdout}{r.stderr}"
+ lines = r.stdout.split('\n')
+ rlines = None
+ for idx, l in enumerate(lines):
+ if l.startswith('HTTP/'):
+ rlines = lines[idx:]
+ assert rlines, f"No response found in: {r.stdout}"
+ r2 = self.parse_response(rlines)
+ assert r2.response
+ assert r2.response['status'] == exp_status
+
+ def parse_response(self, lines) -> ExecResult:
+ exp_body = False
+ exp_stat = True
+ r = ExecResult(args=[], exit_code=0, stdout=b'', stderr=b'')
+ header = {}
+ body = []
+ for line in lines:
+ if exp_stat:
+ m = re.match(r'^(\S+) (\d+) (.*)$', line)
+ assert m, f"first line no HTTP status line: {line}"
+ r.add_response({
+ "protocol": m.group(1),
+ "status": int(m.group(2)),
+ "description": m.group(3),
+ "body": r.outraw
+ })
+ header = {}
+ exp_stat = False
+ exp_body = False
+ elif re.match(r'^\r?$', line):
+ exp_body = True
+ elif exp_body:
+ body.append(line)
+ else:
+ m = re.match(r'^([^:]+):\s*(.*)$', line)
+ assert m, f"not a header line: {line}"
+ header[m.group(1).lower()] = m.group(2)
+ if r.response:
+ r.response["header"] = header
+ r.response["body"] = body
+ return r
diff --git a/test/modules/tls/__init__.py b/test/modules/tls/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test/modules/tls/__init__.py
diff --git a/test/modules/tls/conf.py b/test/modules/tls/conf.py
new file mode 100644
index 0000000..ddeb91f
--- /dev/null
+++ b/test/modules/tls/conf.py
@@ -0,0 +1,61 @@
+import os
+from typing import List, Dict, Any
+
+from pyhttpd.conf import HttpdConf
+from pyhttpd.env import HttpdTestEnv
+
+
+class TlsTestConf(HttpdConf):
+
+ def __init__(self, env: HttpdTestEnv, extras: Dict[str, Any] = None):
+ extras = extras if extras is not None else {}
+ super().__init__(env=env, extras=extras)
+
+ def start_tls_vhost(self, domains: List[str], port=None, ssl_module=None):
+ if ssl_module is None:
+ ssl_module = 'mod_tls'
+ super().start_vhost(domains=domains, port=port, doc_root=f"htdocs/{domains[0]}", ssl_module=ssl_module)
+
+ def end_tls_vhost(self):
+ self.end_vhost()
+
+ def add_tls_vhosts(self, domains: List[str], port=None, ssl_module=None):
+ for domain in domains:
+ self.start_tls_vhost(domains=[domain], port=port, ssl_module=ssl_module)
+ self.end_tls_vhost()
+
+ def add_md_vhosts(self, domains: List[str], port = None):
+ self.add([
+ f"LoadModule md_module {self.env.libexec_dir}/mod_md.so",
+ "LogLevel md:debug",
+ ])
+ for domain in domains:
+ self.add(f"<MDomain {domain}>")
+ for cred in self.env.ca.get_credentials_for_name(domain):
+ cert_file = os.path.relpath(cred.cert_file, self.env.server_dir)
+ pkey_file = os.path.relpath(cred.pkey_file, self.env.server_dir) if cred.pkey_file else cert_file
+ self.add([
+ f" MDCertificateFile {cert_file}",
+ f" MDCertificateKeyFile {pkey_file}",
+ ])
+ self.add("</MDomain>")
+ super().add_vhost(domains=[domain], port=port, doc_root=f"htdocs/{domain}",
+ with_ssl=True, with_certificates=False, ssl_module='mod_tls')
+
+ def add_md_base(self, domain: str):
+ self.add([
+ f"LoadModule md_module {self.env.libexec_dir}/mod_md.so",
+ "LogLevel md:debug",
+ f"ServerName {domain}",
+ "MDBaseServer on",
+ ])
+ self.add(f"TLSEngine {self.env.https_port}")
+ self.add(f"<MDomain {domain}>")
+ for cred in self.env.ca.get_credentials_for_name(domain):
+ cert_file = os.path.relpath(cred.cert_file, self.env.server_dir)
+ pkey_file = os.path.relpath(cred.pkey_file, self.env.server_dir) if cred.pkey_file else cert_file
+ self.add([
+ f"MDCertificateFile {cert_file}",
+ f"MDCertificateKeyFile {pkey_file}",
+ ])
+ self.add("</MDomain>")
diff --git a/test/modules/tls/conftest.py b/test/modules/tls/conftest.py
new file mode 100644
index 0000000..cde4be6
--- /dev/null
+++ b/test/modules/tls/conftest.py
@@ -0,0 +1,39 @@
+import logging
+import os
+import sys
+import pytest
+
+sys.path.append(os.path.join(os.path.dirname(__file__), '../..'))
+
+from .env import TlsTestEnv
+
+
+def pytest_report_header(config, startdir):
+ _x = config
+ _x = startdir
+ env = TlsTestEnv()
+ return "mod_tls [apache: {aversion}({prefix})]".format(
+ prefix=env.prefix,
+ aversion=env.get_httpd_version()
+ )
+
+
+@pytest.fixture(scope="package")
+def env(pytestconfig) -> TlsTestEnv:
+ level = logging.INFO
+ console = logging.StreamHandler()
+ console.setLevel(level)
+ console.setFormatter(logging.Formatter('%(levelname)s: %(message)s'))
+ logging.getLogger('').addHandler(console)
+ logging.getLogger('').setLevel(level=level)
+ env = TlsTestEnv(pytestconfig=pytestconfig)
+ env.setup_httpd()
+ env.apache_access_log_clear()
+ env.httpd_error_log.clear_log()
+ return env
+
+
+@pytest.fixture(autouse=True, scope="package")
+def _session_scope(env):
+ yield
+ assert env.apache_stop() == 0
diff --git a/test/modules/tls/env.py b/test/modules/tls/env.py
new file mode 100644
index 0000000..0e457bf
--- /dev/null
+++ b/test/modules/tls/env.py
@@ -0,0 +1,190 @@
+import inspect
+import logging
+import os
+import re
+import subprocess
+
+from datetime import timedelta, datetime
+from typing import List, Optional, Dict, Tuple, Union
+
+from pyhttpd.certs import CertificateSpec
+from pyhttpd.env import HttpdTestEnv, HttpdTestSetup
+from pyhttpd.result import ExecResult
+
+log = logging.getLogger(__name__)
+
+
+class TlsTestSetup(HttpdTestSetup):
+
+ def __init__(self, env: 'HttpdTestEnv'):
+ super().__init__(env=env)
+ self.add_source_dir(os.path.dirname(inspect.getfile(TlsTestSetup)))
+ self.add_modules(["tls", "http2", "cgid", "watchdog", "proxy_http2"])
+
+
+class TlsCipher:
+
+ def __init__(self, id: int, name: str, flavour: str,
+ min_version: float, max_version: float = None,
+ openssl: str = None):
+ self.id = id
+ self.name = name
+ self.flavour = flavour
+ self.min_version = min_version
+ self.max_version = max_version if max_version is not None else self.min_version
+ if openssl is None:
+ if name.startswith('TLS13_'):
+ openssl = re.sub(r'^TLS13_', 'TLS_', name)
+ else:
+ openssl = re.sub(r'^TLS_', '', name)
+ openssl = re.sub(r'_WITH_([^_]+)_', r'_\1_', openssl)
+ openssl = re.sub(r'_AES_(\d+)', r'_AES\1', openssl)
+ openssl = re.sub(r'(_POLY1305)_\S+$', r'\1', openssl)
+ openssl = re.sub(r'_', '-', openssl)
+ self.openssl_name = openssl
+ self.id_name = "TLS_CIPHER_0x{0:04x}".format(self.id)
+
+ def __repr__(self):
+ return self.name
+
+ def __str__(self):
+ return self.name
+
+
+class TlsTestEnv(HttpdTestEnv):
+
+ CURL_SUPPORTS_TLS_1_3 = None
+
+ @classmethod
+ def curl_supports_tls_1_3(cls) -> bool:
+ if cls.CURL_SUPPORTS_TLS_1_3 is None:
+ # Unfortunately, there is no reliable, platform-independant
+ # way to verify that TLSv1.3 is properly supported by curl.
+ #
+ # p = subprocess.run(['curl', '--tlsv1.3', 'https://shouldneverexistreally'],
+ # stderr=subprocess.PIPE, stdout=subprocess.PIPE)
+ # return code 6 means the site could not be resolved, but the
+ # tls parameter was recognized
+ cls.CURL_SUPPORTS_TLS_1_3 = False
+ return cls.CURL_SUPPORTS_TLS_1_3
+
+
+ # current rustls supported ciphers in their order of preference
+ # used to test cipher selection, see test_06_ciphers.py
+ RUSTLS_CIPHERS = [
+ TlsCipher(0x1303, "TLS13_CHACHA20_POLY1305_SHA256", "CHACHA", 1.3),
+ TlsCipher(0x1302, "TLS13_AES_256_GCM_SHA384", "AES", 1.3),
+ TlsCipher(0x1301, "TLS13_AES_128_GCM_SHA256", "AES", 1.3),
+ TlsCipher(0xcca9, "TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256", "ECDSA", 1.2),
+ TlsCipher(0xcca8, "TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256", "RSA", 1.2),
+ TlsCipher(0xc02c, "TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384", "ECDSA", 1.2),
+ TlsCipher(0xc02b, "TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256", "ECDSA", 1.2),
+ TlsCipher(0xc030, "TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384", "RSA", 1.2),
+ TlsCipher(0xc02f, "TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256", "RSA", 1.2),
+ ]
+
+ def __init__(self, pytestconfig=None):
+ super().__init__(pytestconfig=pytestconfig)
+ self._domain_a = "a.mod-tls.test"
+ self._domain_b = "b.mod-tls.test"
+ self.add_httpd_conf([
+ f'<Directory "{self.server_dir}/htdocs/{self.domain_a}">',
+ ' AllowOverride None',
+ ' Require all granted',
+ ' AddHandler cgi-script .py',
+ ' Options +ExecCGI',
+ '</Directory>',
+ f'<Directory "{self.server_dir}/htdocs/{self.domain_b}">',
+ ' AllowOverride None',
+ ' Require all granted',
+ ' AddHandler cgi-script .py',
+ ' Options +ExecCGI',
+ '</Directory>',
+ f'<VirtualHost *:{self.http_port}>',
+ ' ServerName localhost',
+ ' DocumentRoot "htdocs"',
+ '</VirtualHost>',
+ f'<VirtualHost *:{self.http_port}>',
+ f' ServerName {self.domain_a}',
+ ' DocumentRoot "htdocs/a.mod-tls.test"',
+ '</VirtualHost>',
+ f'<VirtualHost *:{self.http_port}>',
+ f' ServerName {self.domain_b}',
+ ' DocumentRoot "htdocs/b.mod-tls.test"',
+ '</VirtualHost>',
+ ])
+ self.add_cert_specs([
+ CertificateSpec(domains=[self.domain_a]),
+ CertificateSpec(domains=[self.domain_b], key_type='secp256r1', single_file=True),
+ CertificateSpec(domains=[self.domain_b], key_type='rsa4096'),
+ CertificateSpec(name="clientsX", sub_specs=[
+ CertificateSpec(name="user1", client=True, single_file=True),
+ CertificateSpec(name="user2", client=True, single_file=True),
+ CertificateSpec(name="user_expired", client=True,
+ single_file=True, valid_from=timedelta(days=-91),
+ valid_to=timedelta(days=-1)),
+ ]),
+ CertificateSpec(name="clientsY", sub_specs=[
+ CertificateSpec(name="user1", client=True, single_file=True),
+ ]),
+ CertificateSpec(name="user1", client=True, single_file=True),
+ ])
+ self.add_httpd_log_modules(['tls'])
+
+
+ def setup_httpd(self, setup: TlsTestSetup = None):
+ if setup is None:
+ setup = TlsTestSetup(env=self)
+ super().setup_httpd(setup=setup)
+
+ @property
+ def domain_a(self) -> str:
+ return self._domain_a
+
+ @property
+ def domain_b(self) -> str:
+ return self._domain_b
+
+ def tls_get(self, domain, paths: Union[str, List[str]], options: List[str] = None, no_stdout_list = False) -> ExecResult:
+ if isinstance(paths, str):
+ paths = [paths]
+ urls = [f"https://{domain}:{self.https_port}{path}" for path in paths]
+ return self.curl_raw(urls=urls, options=options, no_stdout_list=no_stdout_list)
+
+ def tls_get_json(self, domain: str, path: str, options=None):
+ r = self.tls_get(domain=domain, paths=path, options=options)
+ return r.json
+
+ def run_diff(self, fleft: str, fright: str) -> ExecResult:
+ return self.run(['diff', '-u', fleft, fright])
+
+ def openssl(self, args: List[str]) -> ExecResult:
+ return self.run(['openssl'] + args)
+
+ def openssl_client(self, domain, extra_args: List[str] = None) -> ExecResult:
+ args = ["s_client", "-CAfile", self.ca.cert_file, "-servername", domain,
+ "-connect", "localhost:{port}".format(
+ port=self.https_port
+ )]
+ if extra_args:
+ args.extend(extra_args)
+ args.extend([])
+ return self.openssl(args)
+
+ OPENSSL_SUPPORTED_PROTOCOLS = None
+
+ @staticmethod
+ def openssl_supports_tls_1_3() -> bool:
+ if TlsTestEnv.OPENSSL_SUPPORTED_PROTOCOLS is None:
+ env = TlsTestEnv()
+ r = env.openssl(args=["ciphers", "-v"])
+ protos = set()
+ ciphers = set()
+ for line in r.stdout.splitlines():
+ m = re.match(r'^(\S+)\s+(\S+)\s+(.*)$', line)
+ if m:
+ ciphers.add(m.group(1))
+ protos.add(m.group(2))
+ TlsTestEnv.OPENSSL_SUPPORTED_PROTOCOLS = protos
+ TlsTestEnv.OPENSSL_SUPPORTED_CIPHERS = ciphers
+ return "TLSv1.3" in TlsTestEnv.OPENSSL_SUPPORTED_PROTOCOLS
diff --git a/test/modules/tls/htdocs/a.mod-tls.test/index.json b/test/modules/tls/htdocs/a.mod-tls.test/index.json
new file mode 100644
index 0000000..ffc32cb
--- /dev/null
+++ b/test/modules/tls/htdocs/a.mod-tls.test/index.json
@@ -0,0 +1,3 @@
+{
+ "domain": "a.mod-tls.test"
+} \ No newline at end of file
diff --git a/test/modules/tls/htdocs/a.mod-tls.test/vars.py b/test/modules/tls/htdocs/a.mod-tls.test/vars.py
new file mode 100755
index 0000000..bd520e2
--- /dev/null
+++ b/test/modules/tls/htdocs/a.mod-tls.test/vars.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python3
+import json
+import os, sys
+from urllib import parse
+import multipart # https://github.com/andrew-d/python-multipart (`apt install python3-multipart`)
+
+
+def get_request_params():
+ oforms = {}
+ ofiles = {}
+ if "REQUEST_URI" in os.environ:
+ qforms = parse.parse_qs(parse.urlsplit(os.environ["REQUEST_URI"]).query)
+ for name, values in qforms.items():
+ oforms[name] = values[0]
+ if "HTTP_CONTENT_TYPE" in os.environ:
+ ctype = os.environ["HTTP_CONTENT_TYPE"]
+ if ctype == "application/x-www-form-urlencoded":
+ qforms = parse.parse_qs(parse.urlsplit(sys.stdin.read()).query)
+ for name, values in qforms.items():
+ oforms[name] = values[0]
+ elif ctype.startswith("multipart/"):
+ def on_field(field):
+ oforms[field.field_name] = field.value
+ def on_file(file):
+ ofiles[field.field_name] = field.value
+ multipart.parse_form(headers={"Content-Type": ctype}, input_stream=sys.stdin.buffer, on_field=on_field, on_file=on_file)
+ return oforms, ofiles
+
+
+forms, files = get_request_params()
+
+jenc = json.JSONEncoder()
+
+def get_var(name: str, def_val: str = ""):
+ if name in os.environ:
+ return os.environ[name]
+ return def_val
+
+def get_json_var(name: str, def_val: str = ""):
+ var = get_var(name, def_val=def_val)
+ return jenc.encode(var)
+
+
+name = forms['name'] if 'name' in forms else None
+
+print("Content-Type: application/json\n")
+if name:
+ print(f"""{{ "{name}" : {get_json_var(name, '')}}}""")
+else:
+ print(f"""{{ "https" : {get_json_var('HTTPS', '')},
+ "host" : {get_json_var('SERVER_NAME', '')},
+ "protocol" : {get_json_var('SERVER_PROTOCOL', '')},
+ "ssl_protocol" : {get_json_var('SSL_PROTOCOL', '')},
+ "ssl_cipher" : {get_json_var('SSL_CIPHER', '')}
+}}""")
+
diff --git a/test/modules/tls/htdocs/b.mod-tls.test/dir1/vars.py b/test/modules/tls/htdocs/b.mod-tls.test/dir1/vars.py
new file mode 100755
index 0000000..b86a968
--- /dev/null
+++ b/test/modules/tls/htdocs/b.mod-tls.test/dir1/vars.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python3
+import os
+
+def get_var(name: str, def_val: str = ""):
+ if name in os.environ:
+ return os.environ[name]
+ return def_val
+
+print("Content-Type: application/json")
+print()
+print("""{{ "https" : "{https}",
+ "host" : "{server_name}",
+ "protocol" : "{protocol}",
+ "ssl_protocol" : "{ssl_protocol}",
+ "ssl_cipher" : "{ssl_cipher}"
+}}""".format(
+ https=get_var('HTTPS', ''),
+ server_name=get_var('SERVER_NAME', ''),
+ protocol=get_var('SERVER_PROTOCOL', ''),
+ ssl_protocol=get_var('SSL_PROTOCOL', ''),
+ ssl_cipher=get_var('SSL_CIPHER', ''),
+))
+
diff --git a/test/modules/tls/htdocs/b.mod-tls.test/index.json b/test/modules/tls/htdocs/b.mod-tls.test/index.json
new file mode 100644
index 0000000..e5d3ccf
--- /dev/null
+++ b/test/modules/tls/htdocs/b.mod-tls.test/index.json
@@ -0,0 +1,3 @@
+{
+ "domain": "b.mod-tls.test"
+} \ No newline at end of file
diff --git a/test/modules/tls/htdocs/b.mod-tls.test/resp-jitter.py b/test/modules/tls/htdocs/b.mod-tls.test/resp-jitter.py
new file mode 100755
index 0000000..f7b1349
--- /dev/null
+++ b/test/modules/tls/htdocs/b.mod-tls.test/resp-jitter.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python3
+import random
+import sys
+import time
+from datetime import timedelta
+
+random.seed()
+to_write = total_len = random.randint(1, 10*1024*1024)
+
+sys.stdout.write("Content-Type: application/octet-stream\n")
+sys.stdout.write(f"Content-Length: {total_len}\n")
+sys.stdout.write("\n")
+sys.stdout.flush()
+
+while to_write > 0:
+ len = random.randint(1, 1024*1024)
+ len = min(len, to_write)
+ sys.stdout.buffer.write(random.randbytes(len))
+ to_write -= len
+ delay = timedelta(seconds=random.uniform(0.0, 0.5))
+ time.sleep(delay.total_seconds())
+sys.stdout.flush()
+
diff --git a/test/modules/tls/htdocs/b.mod-tls.test/vars.py b/test/modules/tls/htdocs/b.mod-tls.test/vars.py
new file mode 100755
index 0000000..bd520e2
--- /dev/null
+++ b/test/modules/tls/htdocs/b.mod-tls.test/vars.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python3
+import json
+import os, sys
+from urllib import parse
+import multipart # https://github.com/andrew-d/python-multipart (`apt install python3-multipart`)
+
+
+def get_request_params():
+ oforms = {}
+ ofiles = {}
+ if "REQUEST_URI" in os.environ:
+ qforms = parse.parse_qs(parse.urlsplit(os.environ["REQUEST_URI"]).query)
+ for name, values in qforms.items():
+ oforms[name] = values[0]
+ if "HTTP_CONTENT_TYPE" in os.environ:
+ ctype = os.environ["HTTP_CONTENT_TYPE"]
+ if ctype == "application/x-www-form-urlencoded":
+ qforms = parse.parse_qs(parse.urlsplit(sys.stdin.read()).query)
+ for name, values in qforms.items():
+ oforms[name] = values[0]
+ elif ctype.startswith("multipart/"):
+ def on_field(field):
+ oforms[field.field_name] = field.value
+ def on_file(file):
+ ofiles[field.field_name] = field.value
+ multipart.parse_form(headers={"Content-Type": ctype}, input_stream=sys.stdin.buffer, on_field=on_field, on_file=on_file)
+ return oforms, ofiles
+
+
+forms, files = get_request_params()
+
+jenc = json.JSONEncoder()
+
+def get_var(name: str, def_val: str = ""):
+ if name in os.environ:
+ return os.environ[name]
+ return def_val
+
+def get_json_var(name: str, def_val: str = ""):
+ var = get_var(name, def_val=def_val)
+ return jenc.encode(var)
+
+
+name = forms['name'] if 'name' in forms else None
+
+print("Content-Type: application/json\n")
+if name:
+ print(f"""{{ "{name}" : {get_json_var(name, '')}}}""")
+else:
+ print(f"""{{ "https" : {get_json_var('HTTPS', '')},
+ "host" : {get_json_var('SERVER_NAME', '')},
+ "protocol" : {get_json_var('SERVER_PROTOCOL', '')},
+ "ssl_protocol" : {get_json_var('SSL_PROTOCOL', '')},
+ "ssl_cipher" : {get_json_var('SSL_CIPHER', '')}
+}}""")
+
diff --git a/test/modules/tls/htdocs/index.html b/test/modules/tls/htdocs/index.html
new file mode 100644
index 0000000..3c07626
--- /dev/null
+++ b/test/modules/tls/htdocs/index.html
@@ -0,0 +1,9 @@
+<html>
+ <head>
+ <title>mod_h2 test site generic</title>
+ </head>
+ <body>
+ <h1>mod_h2 test site generic</h1>
+ </body>
+</html>
+
diff --git a/test/modules/tls/htdocs/index.json b/test/modules/tls/htdocs/index.json
new file mode 100644
index 0000000..6d456e0
--- /dev/null
+++ b/test/modules/tls/htdocs/index.json
@@ -0,0 +1,3 @@
+{
+ "domain": "localhost"
+} \ No newline at end of file
diff --git a/test/modules/tls/test_01_apache.py b/test/modules/tls/test_01_apache.py
new file mode 100644
index 0000000..cb6af6d
--- /dev/null
+++ b/test/modules/tls/test_01_apache.py
@@ -0,0 +1,14 @@
+import pytest
+
+from .conf import TlsTestConf
+
+
+class TestApache:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ TlsTestConf(env=env).install()
+ assert env.apache_restart() == 0
+
+ def test_tls_01_apache_http(self, env):
+ assert env.is_live(env.http_base_url)
diff --git a/test/modules/tls/test_02_conf.py b/test/modules/tls/test_02_conf.py
new file mode 100644
index 0000000..4d6aa60
--- /dev/null
+++ b/test/modules/tls/test_02_conf.py
@@ -0,0 +1,138 @@
+import os
+from datetime import timedelta
+
+import pytest
+
+from .conf import TlsTestConf
+
+
+class TestConf:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ TlsTestConf(env=env).install()
+ assert env.apache_restart() == 0
+
+ @pytest.fixture(autouse=True, scope='function')
+ def _function_scope(self, env):
+ if env.is_live(timeout=timedelta(milliseconds=100)):
+ assert env.apache_stop() == 0
+
+ def test_tls_02_conf_cert_args_missing(self, env):
+ conf = TlsTestConf(env=env)
+ conf.add("TLSCertificate")
+ conf.install()
+ assert env.apache_fail() == 0
+
+ def test_tls_02_conf_cert_single_arg(self, env):
+ conf = TlsTestConf(env=env)
+ conf.add("TLSCertificate cert.pem")
+ conf.install()
+ assert env.apache_fail() == 0
+
+ def test_tls_02_conf_cert_file_missing(self, env):
+ conf = TlsTestConf(env=env)
+ conf.add("TLSCertificate cert.pem key.pem")
+ conf.install()
+ assert env.apache_fail() == 0
+
+ def test_tls_02_conf_cert_file_exist(self, env):
+ conf = TlsTestConf(env=env)
+ conf.add("TLSCertificate test-02-cert.pem test-02-key.pem")
+ conf.install()
+ for name in ["test-02-cert.pem", "test-02-key.pem"]:
+ with open(os.path.join(env.server_dir, name), "w") as fd:
+ fd.write("")
+ assert env.apache_fail() == 0
+
+ def test_tls_02_conf_cert_listen_missing(self, env):
+ conf = TlsTestConf(env=env)
+ conf.add("TLSEngine")
+ conf.install()
+ assert env.apache_fail() == 0
+
+ def test_tls_02_conf_cert_listen_wrong(self, env):
+ conf = TlsTestConf(env=env)
+ conf.add("TLSEngine ^^^^^")
+ conf.install()
+ assert env.apache_fail() == 0
+
+ @pytest.mark.parametrize("listen", [
+ "443",
+ "129.168.178.188:443",
+ "[::]:443",
+ ])
+ def test_tls_02_conf_cert_listen_valid(self, env, listen: str):
+ conf = TlsTestConf(env=env)
+ conf.add("TLSEngine {listen}".format(listen=listen))
+ conf.install()
+ assert env.apache_restart() == 0
+
+ def test_tls_02_conf_cert_listen_cert(self, env):
+ domain = env.domain_a
+ conf = TlsTestConf(env=env)
+ conf.add_tls_vhosts(domains=[domain])
+ conf.install()
+ assert env.apache_restart() == 0
+
+ def test_tls_02_conf_proto_wrong(self, env):
+ conf = TlsTestConf(env=env)
+ conf.add("TLSProtocol wrong")
+ conf.install()
+ assert env.apache_fail() == 0
+
+ @pytest.mark.parametrize("proto", [
+ "default",
+ "TLSv1.2+",
+ "TLSv1.3+",
+ "TLSv0x0303+",
+ ])
+ def test_tls_02_conf_proto_valid(self, env, proto):
+ conf = TlsTestConf(env=env)
+ conf.add("TLSProtocol {proto}".format(proto=proto))
+ conf.install()
+ assert env.apache_restart() == 0
+
+ def test_tls_02_conf_honor_wrong(self, env):
+ conf = TlsTestConf(env=env)
+ conf.add("TLSHonorClientOrder wrong")
+ conf.install()
+ assert env.apache_fail() == 0
+
+ @pytest.mark.parametrize("honor", [
+ "on",
+ "OfF",
+ ])
+ def test_tls_02_conf_honor_valid(self, env, honor: str):
+ conf = TlsTestConf(env=env)
+ conf.add("TLSHonorClientOrder {honor}".format(honor=honor))
+ conf.install()
+ assert env.apache_restart() == 0
+
+ @pytest.mark.parametrize("cipher", [
+ "default",
+ "TLS13_AES_128_GCM_SHA256:TLS13_AES_256_GCM_SHA384:TLS13_CHACHA20_POLY1305_SHA256",
+ "TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256:TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256:"
+ "TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384:TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384:"
+ "TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256:TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256",
+ """TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256 \\
+ TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384 TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384\\
+ TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256:TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256"""
+ ])
+ def test_tls_02_conf_cipher_valid(self, env, cipher):
+ conf = TlsTestConf(env=env)
+ conf.add("TLSCiphersPrefer {cipher}".format(cipher=cipher))
+ conf.install()
+ assert env.apache_restart() == 0
+
+ @pytest.mark.parametrize("cipher", [
+ "wrong",
+ "YOLO",
+ "TLS_NULL_WITH_NULL_NULLX", # not supported
+ "TLS_DHE_RSA_WITH_AES128_GCM_SHA256", # not supported
+ ])
+ def test_tls_02_conf_cipher_wrong(self, env, cipher):
+ conf = TlsTestConf(env=env)
+ conf.add("TLSCiphersPrefer {cipher}".format(cipher=cipher))
+ conf.install()
+ assert env.apache_fail() == 0
diff --git a/test/modules/tls/test_03_sni.py b/test/modules/tls/test_03_sni.py
new file mode 100644
index 0000000..cf421c0
--- /dev/null
+++ b/test/modules/tls/test_03_sni.py
@@ -0,0 +1,71 @@
+from datetime import timedelta
+
+import pytest
+
+from .conf import TlsTestConf
+from .env import TlsTestEnv
+
+
+class TestSni:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ conf = TlsTestConf(env=env)
+ conf.add_tls_vhosts(domains=[env.domain_a, env.domain_b])
+ conf.install()
+ assert env.apache_restart() == 0
+
+ @pytest.fixture(autouse=True, scope='function')
+ def _function_scope(self, env):
+ pass
+
+ def test_tls_03_sni_get_a(self, env):
+ # do we see the correct json for the domain_a?
+ data = env.tls_get_json(env.domain_a, "/index.json")
+ assert data == {'domain': env.domain_a}
+
+ def test_tls_03_sni_get_b(self, env):
+ # do we see the correct json for the domain_a?
+ data = env.tls_get_json(env.domain_b, "/index.json")
+ assert data == {'domain': env.domain_b}
+
+ def test_tls_03_sni_unknown(self, env):
+ # connection will be denied as cert does not cover this domain
+ domain_unknown = "unknown.test"
+ r = env.tls_get(domain_unknown, "/index.json")
+ assert r.exit_code != 0
+
+ def test_tls_03_sni_request_other_same_config(self, env):
+ # do we see the first vhost response for another domain with different certs?
+ r = env.tls_get(env.domain_a, "/index.json", options=[
+ "-vvvv", "--header", "Host: {0}".format(env.domain_b)
+ ])
+ # request is marked as misdirected
+ assert r.exit_code == 0
+ assert r.json is None
+ assert r.response['status'] == 421
+
+ def test_tls_03_sni_request_other_other_honor(self, env):
+ # do we see the first vhost response for an unknown domain?
+ conf = TlsTestConf(env=env, extras={
+ env.domain_a: "TLSProtocol TLSv1.2+",
+ env.domain_b: "TLSProtocol TLSv1.3+"
+ })
+ conf.add_tls_vhosts(domains=[env.domain_a, env.domain_b])
+ conf.install()
+ assert env.apache_restart() == 0
+ r = env.tls_get(env.domain_a, "/index.json", options=[
+ "-vvvv", "--tls-max", "1.2", "--header", "Host: {0}".format(env.domain_b)
+ ])
+ # request denied
+ assert r.exit_code == 0
+ assert r.json is None
+
+ @pytest.mark.skip('openssl behaviour changed on ventura, unreliable')
+ def test_tls_03_sni_bad_hostname(self, env):
+ # curl checks hostnames we give it, but the openssl client
+ # does not. Good for us, since we need to test it.
+ r = env.openssl(["s_client", "-connect",
+ "localhost:{0}".format(env.https_port),
+ "-servername", b'x\x2f.y'.decode()])
+ assert r.exit_code == 1, r.stderr
diff --git a/test/modules/tls/test_04_get.py b/test/modules/tls/test_04_get.py
new file mode 100644
index 0000000..6944381
--- /dev/null
+++ b/test/modules/tls/test_04_get.py
@@ -0,0 +1,67 @@
+import os
+import time
+from datetime import timedelta
+
+import pytest
+
+from .env import TlsTestEnv
+from .conf import TlsTestConf
+
+
+def mk_text_file(fpath: str, lines: int):
+ t110 = 11 * "0123456789"
+ with open(fpath, "w") as fd:
+ for i in range(lines):
+ fd.write("{0:015d}: ".format(i)) # total 128 bytes per line
+ fd.write(t110)
+ fd.write("\n")
+
+
+class TestGet:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ conf = TlsTestConf(env=env)
+ conf.add_tls_vhosts(domains=[env.domain_a, env.domain_b])
+ conf.install()
+ docs_a = os.path.join(env.server_docs_dir, env.domain_a)
+ mk_text_file(os.path.join(docs_a, "1k.txt"), 8)
+ mk_text_file(os.path.join(docs_a, "10k.txt"), 80)
+ mk_text_file(os.path.join(docs_a, "100k.txt"), 800)
+ mk_text_file(os.path.join(docs_a, "1m.txt"), 8000)
+ mk_text_file(os.path.join(docs_a, "10m.txt"), 80000)
+ assert env.apache_restart() == 0
+
+ @pytest.mark.parametrize("fname, flen", [
+ ("1k.txt", 1024),
+ ("10k.txt", 10*1024),
+ ("100k.txt", 100 * 1024),
+ ("1m.txt", 1000 * 1024),
+ ("10m.txt", 10000 * 1024),
+ ])
+ def test_tls_04_get(self, env, fname, flen):
+ # do we see the correct json for the domain_a?
+ docs_a = os.path.join(env.server_docs_dir, env.domain_a)
+ r = env.tls_get(env.domain_a, "/{0}".format(fname))
+ assert r.exit_code == 0
+ assert len(r.stdout) == flen
+ pref = os.path.join(docs_a, fname)
+ pout = os.path.join(docs_a, "{0}.out".format(fname))
+ with open(pout, 'w') as fd:
+ fd.write(r.stdout)
+ dr = env.run_diff(pref, pout)
+ assert dr.exit_code == 0, "differences found:\n{0}".format(dr.stdout)
+
+ @pytest.mark.parametrize("fname, flen", [
+ ("1k.txt", 1024),
+ ])
+ def test_tls_04_double_get(self, env, fname, flen):
+ # we'd like to check that we can do >1 requests on the same connection
+ # however curl hides that from us, unless we analyze its verbose output
+ docs_a = os.path.join(env.server_docs_dir, env.domain_a)
+ r = env.tls_get(env.domain_a, no_stdout_list=True, paths=[
+ "/{0}".format(fname),
+ "/{0}".format(fname)
+ ])
+ assert r.exit_code == 0
+ assert len(r.stdout) == 2*flen
diff --git a/test/modules/tls/test_05_proto.py b/test/modules/tls/test_05_proto.py
new file mode 100644
index 0000000..d874a90
--- /dev/null
+++ b/test/modules/tls/test_05_proto.py
@@ -0,0 +1,64 @@
+import time
+from datetime import timedelta
+import socket
+from threading import Thread
+
+import pytest
+
+from .conf import TlsTestConf
+from .env import TlsTestEnv
+
+
+class TestProto:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ conf = TlsTestConf(env=env, extras={
+ env.domain_a: "TLSProtocol TLSv1.3+",
+ env.domain_b: [
+ "# the commonly used name",
+ "TLSProtocol TLSv1.2+",
+ "# the numeric one (yes, this is 1.2)",
+ "TLSProtocol TLSv0x0303+",
+ ],
+ })
+ conf.add_tls_vhosts(domains=[env.domain_a, env.domain_b])
+ conf.install()
+ assert env.apache_restart() == 0
+
+ @pytest.fixture(autouse=True, scope='function')
+ def _function_scope(self, env):
+ pass
+
+ def test_tls_05_proto_1_2(self, env):
+ r = env.tls_get(env.domain_b, "/index.json", options=["--tlsv1.2"])
+ assert r.exit_code == 0, r.stderr
+
+ @pytest.mark.skip('curl does not have TLSv1.3 on all platforms')
+ def test_tls_05_proto_1_3(self, env):
+ r = env.tls_get(env.domain_a, "/index.json", options=["--tlsv1.3", '-v'])
+ if True: # testing TlsTestEnv.curl_supports_tls_1_3() is unreliable (curl should support TLS1.3 nowadays..)
+ assert r.exit_code == 0, f'{r}'
+ else:
+ assert r.exit_code == 4, f'{r}'
+
+ def test_tls_05_proto_close(self, env):
+ s = socket.create_connection(('localhost', env.https_port))
+ time.sleep(0.1)
+ s.close()
+
+ def test_tls_05_proto_ssl_close(self, env):
+ conf = TlsTestConf(env=env, extras={
+ 'base': "LogLevel ssl:debug",
+ env.domain_a: "SSLProtocol TLSv1.3",
+ env.domain_b: "SSLProtocol TLSv1.2",
+ })
+ for d in [env.domain_a, env.domain_b]:
+ conf.add_vhost(domains=[d], port=env.https_port)
+ conf.install()
+ assert env.apache_restart() == 0
+ s = socket.create_connection(('localhost', env.https_port))
+ time.sleep(0.1)
+ s.close()
+
+
diff --git a/test/modules/tls/test_06_ciphers.py b/test/modules/tls/test_06_ciphers.py
new file mode 100644
index 0000000..2e60bdd
--- /dev/null
+++ b/test/modules/tls/test_06_ciphers.py
@@ -0,0 +1,209 @@
+import re
+from datetime import timedelta
+
+import pytest
+
+from .env import TlsTestEnv
+from .conf import TlsTestConf
+
+
+class TestCiphers:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ conf = TlsTestConf(env=env, extras={
+ 'base': "TLSHonorClientOrder off",
+ })
+ conf.add_tls_vhosts(domains=[env.domain_a, env.domain_b])
+ conf.install()
+ assert env.apache_restart() == 0
+
+ @pytest.fixture(autouse=True, scope='function')
+ def _function_scope(self, env):
+ pass
+
+ def _get_protocol_cipher(self, output: str):
+ protocol = None
+ cipher = None
+ for line in output.splitlines():
+ m = re.match(r'^\s+Protocol\s*:\s*(\S+)$', line)
+ if m:
+ protocol = m.group(1)
+ continue
+ m = re.match(r'^\s+Cipher\s*:\s*(\S+)$', line)
+ if m:
+ cipher = m.group(1)
+ return protocol, cipher
+
+ def test_tls_06_ciphers_ecdsa(self, env):
+ ecdsa_1_2 = [c for c in env.RUSTLS_CIPHERS
+ if c.max_version == 1.2 and c.flavour == 'ECDSA'][0]
+ # client speaks only this cipher, see that it gets it
+ r = env.openssl_client(env.domain_b, extra_args=[
+ "-cipher", ecdsa_1_2.openssl_name, "-tls1_2"
+ ])
+ protocol, cipher = self._get_protocol_cipher(r.stdout)
+ assert protocol == "TLSv1.2", r.stdout
+ assert cipher == ecdsa_1_2.openssl_name, r.stdout
+
+ def test_tls_06_ciphers_rsa(self, env):
+ rsa_1_2 = [c for c in env.RUSTLS_CIPHERS
+ if c.max_version == 1.2 and c.flavour == 'RSA'][0]
+ # client speaks only this cipher, see that it gets it
+ r = env.openssl_client(env.domain_b, extra_args=[
+ "-cipher", rsa_1_2.openssl_name, "-tls1_2"
+ ])
+ protocol, cipher = self._get_protocol_cipher(r.stdout)
+ assert protocol == "TLSv1.2", r.stdout
+ assert cipher == rsa_1_2.openssl_name, r.stdout
+
+ @pytest.mark.parametrize("cipher", [
+ c for c in TlsTestEnv.RUSTLS_CIPHERS if c.max_version == 1.2 and c.flavour == 'ECDSA'
+ ], ids=[
+ c.name for c in TlsTestEnv.RUSTLS_CIPHERS if c.max_version == 1.2 and c.flavour == 'ECDSA'
+ ])
+ def test_tls_06_ciphers_server_prefer_ecdsa(self, env, cipher):
+ # Select a ECSDA ciphers as preference and suppress all RSA ciphers.
+ # The last is not strictly necessary since rustls prefers ECSDA anyway
+ suppress_names = [c.name for c in env.RUSTLS_CIPHERS
+ if c.max_version == 1.2 and c.flavour == 'RSA']
+ conf = TlsTestConf(env=env, extras={
+ env.domain_b: [
+ "TLSHonorClientOrder off",
+ f"TLSCiphersPrefer {cipher.name}",
+ f"TLSCiphersSuppress {':'.join(suppress_names)}",
+ ]
+ })
+ conf.add_tls_vhosts(domains=[env.domain_a, env.domain_b])
+ conf.install()
+ assert env.apache_restart() == 0
+ r = env.openssl_client(env.domain_b, extra_args=["-tls1_2"])
+ client_proto, client_cipher = self._get_protocol_cipher(r.stdout)
+ assert client_proto == "TLSv1.2", r.stdout
+ assert client_cipher == cipher.openssl_name, r.stdout
+
+ @pytest.mark.skip(reason="Wrong certified key selected by rustls")
+ # see <https://github.com/rustls/rustls-ffi/issues/236>
+ @pytest.mark.parametrize("cipher", [
+ c for c in TlsTestEnv.RUSTLS_CIPHERS if c.max_version == 1.2 and c.flavour == 'RSA'
+ ], ids=[
+ c.name for c in TlsTestEnv.RUSTLS_CIPHERS if c.max_version == 1.2 and c.flavour == 'RSA'
+ ])
+ def test_tls_06_ciphers_server_prefer_rsa(self, env, cipher):
+ # Select a RSA ciphers as preference and suppress all ECDSA ciphers.
+ # The last is necessary since rustls prefers ECSDA and openssl leaks that it can.
+ suppress_names = [c.name for c in env.RUSTLS_CIPHERS
+ if c.max_version == 1.2 and c.flavour == 'ECDSA']
+ conf = TlsTestConf(env=env, extras={
+ env.domain_b: [
+ "TLSHonorClientOrder off",
+ f"TLSCiphersPrefer {cipher.name}",
+ f"TLSCiphersSuppress {':'.join(suppress_names)}",
+ ]
+ })
+ conf.add_tls_vhosts(domains=[env.domain_a, env.domain_b])
+ conf.install()
+ assert env.apache_restart() == 0
+ r = env.openssl_client(env.domain_b, extra_args=["-tls1_2"])
+ client_proto, client_cipher = self._get_protocol_cipher(r.stdout)
+ assert client_proto == "TLSv1.2", r.stdout
+ assert client_cipher == cipher.openssl_name, r.stdout
+
+ @pytest.mark.skip(reason="Wrong certified key selected by rustls")
+ # see <https://github.com/rustls/rustls-ffi/issues/236>
+ @pytest.mark.parametrize("cipher", [
+ c for c in TlsTestEnv.RUSTLS_CIPHERS if c.max_version == 1.2 and c.flavour == 'RSA'
+ ], ids=[
+ c.openssl_name for c in TlsTestEnv.RUSTLS_CIPHERS if c.max_version == 1.2 and c.flavour == 'RSA'
+ ])
+ def test_tls_06_ciphers_server_prefer_rsa_alias(self, env, cipher):
+ # same as above, but using openssl names for ciphers
+ suppress_names = [c.openssl_name for c in env.RUSTLS_CIPHERS
+ if c.max_version == 1.2 and c.flavour == 'ECDSA']
+ conf = TlsTestConf(env=env, extras={
+ env.domain_b: [
+ "TLSHonorClientOrder off",
+ f"TLSCiphersPrefer {cipher.openssl_name}",
+ f"TLSCiphersSuppress {':'.join(suppress_names)}",
+ ]
+ })
+ conf.add_tls_vhosts(domains=[env.domain_a, env.domain_b])
+ conf.install()
+ assert env.apache_restart() == 0
+ r = env.openssl_client(env.domain_b, extra_args=["-tls1_2"])
+ client_proto, client_cipher = self._get_protocol_cipher(r.stdout)
+ assert client_proto == "TLSv1.2", r.stdout
+ assert client_cipher == cipher.openssl_name, r.stdout
+
+ @pytest.mark.skip(reason="Wrong certified key selected by rustls")
+ # see <https://github.com/rustls/rustls-ffi/issues/236>
+ @pytest.mark.parametrize("cipher", [
+ c for c in TlsTestEnv.RUSTLS_CIPHERS if c.max_version == 1.2 and c.flavour == 'RSA'
+ ], ids=[
+ c.id_name for c in TlsTestEnv.RUSTLS_CIPHERS if c.max_version == 1.2 and c.flavour == 'RSA'
+ ])
+ def test_tls_06_ciphers_server_prefer_rsa_id(self, env, cipher):
+ # same as above, but using openssl names for ciphers
+ suppress_names = [c.id_name for c in env.RUSTLS_CIPHERS
+ if c.max_version == 1.2 and c.flavour == 'ECDSA']
+ conf = TlsTestConf(env=env, extras={
+ env.domain_b: [
+ "TLSHonorClientOrder off",
+ f"TLSCiphersPrefer {cipher.id_name}",
+ f"TLSCiphersSuppress {':'.join(suppress_names)}",
+ ]
+ })
+ conf.add_tls_vhosts(domains=[env.domain_a, env.domain_b])
+ conf.install()
+ assert env.apache_restart() == 0
+ r = env.openssl_client(env.domain_b, extra_args=["-tls1_2"])
+ client_proto, client_cipher = self._get_protocol_cipher(r.stdout)
+ assert client_proto == "TLSv1.2", r.stdout
+ assert client_cipher == cipher.openssl_name, r.stdout
+
+ def test_tls_06_ciphers_pref_unknown(self, env):
+ conf = TlsTestConf(env=env, extras={
+ env.domain_b: "TLSCiphersPrefer TLS_MY_SUPER_CIPHER:SSL_WHAT_NOT"
+ })
+ conf.add_tls_vhosts(domains=[env.domain_a, env.domain_b])
+ conf.install()
+ assert env.apache_restart() != 0
+ # get a working config again, so that subsequent test cases do not stumble
+ conf = TlsTestConf(env=env)
+ conf.add_tls_vhosts(domains=[env.domain_a, env.domain_b])
+ conf.install()
+ env.apache_restart()
+
+ def test_tls_06_ciphers_pref_unsupported(self, env):
+ # a warning on preferring a known, but not supported cipher
+ env.httpd_error_log.ignore_recent()
+ conf = TlsTestConf(env=env, extras={
+ env.domain_b: "TLSCiphersPrefer TLS_NULL_WITH_NULL_NULL"
+ })
+ conf.add_tls_vhosts(domains=[env.domain_a, env.domain_b])
+ conf.install()
+ assert env.apache_restart() == 0
+ (errors, warnings) = env.httpd_error_log.get_recent_count()
+ assert errors == 0
+ assert warnings == 2 # once on dry run, once on start
+
+ def test_tls_06_ciphers_supp_unknown(self, env):
+ conf = TlsTestConf(env=env, extras={
+ env.domain_b: "TLSCiphersSuppress TLS_MY_SUPER_CIPHER:SSL_WHAT_NOT"
+ })
+ conf.add_tls_vhosts(domains=[env.domain_a, env.domain_b])
+ conf.install()
+ assert env.apache_restart() != 0
+
+ def test_tls_06_ciphers_supp_unsupported(self, env):
+ # no warnings on suppressing known, but not supported ciphers
+ env.httpd_error_log.ignore_recent()
+ conf = TlsTestConf(env=env, extras={
+ env.domain_b: "TLSCiphersSuppress TLS_NULL_WITH_NULL_NULL"
+ })
+ conf.add_tls_vhosts(domains=[env.domain_a, env.domain_b])
+ conf.install()
+ assert env.apache_restart() == 0
+ (errors, warnings) = env.httpd_error_log.get_recent_count()
+ assert errors == 0
+ assert warnings == 0
diff --git a/test/modules/tls/test_07_alpn.py b/test/modules/tls/test_07_alpn.py
new file mode 100644
index 0000000..06aff3c
--- /dev/null
+++ b/test/modules/tls/test_07_alpn.py
@@ -0,0 +1,43 @@
+import re
+from datetime import timedelta
+
+import pytest
+
+from .conf import TlsTestConf
+
+
+class TestAlpn:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ conf = TlsTestConf(env=env, extras={
+ env.domain_b: "Protocols h2 http/1.1"
+ })
+ conf.add_tls_vhosts(domains=[env.domain_a, env.domain_b])
+ conf.install()
+ assert env.apache_restart() == 0
+
+ @pytest.fixture(autouse=True, scope='function')
+ def _function_scope(self, env):
+ pass
+
+ def _get_protocol(self, output: str):
+ for line in output.splitlines():
+ m = re.match(r'^\*\s+ALPN[:,] server accepted (to use\s+)?(.*)$', line)
+ if m:
+ return m.group(2)
+ return None
+
+ def test_tls_07_alpn_get_a(self, env):
+ # do we see the correct json for the domain_a?
+ r = env.tls_get(env.domain_a, "/index.json", options=["-vvvvvv", "--http1.1"])
+ assert r.exit_code == 0, r.stderr
+ protocol = self._get_protocol(r.stderr)
+ assert protocol == "http/1.1", r.stderr
+
+ def test_tls_07_alpn_get_b(self, env):
+ # do we see the correct json for the domain_a?
+ r = env.tls_get(env.domain_b, "/index.json", options=["-vvvvvv"])
+ assert r.exit_code == 0, r.stderr
+ protocol = self._get_protocol(r.stderr)
+ assert protocol == "h2", r.stderr
diff --git a/test/modules/tls/test_08_vars.py b/test/modules/tls/test_08_vars.py
new file mode 100644
index 0000000..f1bd9b4
--- /dev/null
+++ b/test/modules/tls/test_08_vars.py
@@ -0,0 +1,60 @@
+import re
+
+import pytest
+
+from .conf import TlsTestConf
+from .env import TlsTestEnv
+
+
+class TestVars:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ conf = TlsTestConf(env=env, extras={
+ 'base': [
+ "TLSHonorClientOrder off",
+ "TLSOptions +StdEnvVars",
+ ]
+ })
+ conf.add_tls_vhosts(domains=[env.domain_a, env.domain_b])
+ conf.install()
+ assert env.apache_restart() == 0
+
+ def test_tls_08_vars_root(self, env):
+ # in domain_b root, the StdEnvVars is switch on
+ exp_proto = "TLSv1.2"
+ exp_cipher = "TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384"
+ options = [ '--tls-max', '1.2']
+ r = env.tls_get(env.domain_b, "/vars.py", options=options)
+ assert r.exit_code == 0, r.stderr
+ assert r.json == {
+ 'https': 'on',
+ 'host': 'b.mod-tls.test',
+ 'protocol': 'HTTP/1.1',
+ 'ssl_protocol': exp_proto,
+ # this will vary by client potentially
+ 'ssl_cipher': exp_cipher,
+ }
+
+ @pytest.mark.parametrize("name, value", [
+ ("SERVER_NAME", "b.mod-tls.test"),
+ ("SSL_SESSION_RESUMED", "Initial"),
+ ("SSL_SECURE_RENEG", "false"),
+ ("SSL_COMPRESS_METHOD", "NULL"),
+ ("SSL_CIPHER_EXPORT", "false"),
+ ("SSL_CLIENT_VERIFY", "NONE"),
+ ])
+ def test_tls_08_vars_const(self, env, name: str, value: str):
+ r = env.tls_get(env.domain_b, f"/vars.py?name={name}")
+ assert r.exit_code == 0, r.stderr
+ assert r.json == {name: value}, r.stdout
+
+ @pytest.mark.parametrize("name, pattern", [
+ ("SSL_VERSION_INTERFACE", r'mod_tls/\d+\.\d+\.\d+'),
+ ("SSL_VERSION_LIBRARY", r'rustls-ffi/\d+\.\d+\.\d+/rustls/\d+\.\d+\.\d+'),
+ ])
+ def test_tls_08_vars_match(self, env, name: str, pattern: str):
+ r = env.tls_get(env.domain_b, f"/vars.py?name={name}")
+ assert r.exit_code == 0, r.stderr
+ assert name in r.json
+ assert re.match(pattern, r.json[name]), r.json
diff --git a/test/modules/tls/test_09_timeout.py b/test/modules/tls/test_09_timeout.py
new file mode 100644
index 0000000..70cc894
--- /dev/null
+++ b/test/modules/tls/test_09_timeout.py
@@ -0,0 +1,43 @@
+import socket
+from datetime import timedelta
+
+import pytest
+
+from .conf import TlsTestConf
+
+
+class TestTimeout:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ conf = TlsTestConf(env=env, extras={
+ 'base': "RequestReadTimeout handshake=1",
+ })
+ conf.add_tls_vhosts(domains=[env.domain_a, env.domain_b])
+ conf.install()
+ assert env.apache_restart() == 0
+
+ @pytest.fixture(autouse=True, scope='function')
+ def _function_scope(self, env):
+ pass
+
+ def test_tls_09_timeout_handshake(self, env):
+ # in domain_b root, the StdEnvVars is switch on
+ s = socket.create_connection(('localhost', env.https_port))
+ s.send(b'1234')
+ s.settimeout(0.0)
+ try:
+ s.recv(1024)
+ assert False, "able to recv() on a TLS connection before we sent a hello"
+ except BlockingIOError:
+ pass
+ s.settimeout(3.0)
+ try:
+ while True:
+ buf = s.recv(1024)
+ if not buf:
+ break
+ print("recv() -> {0}".format(buf))
+ except (socket.timeout, BlockingIOError):
+ assert False, "socket not closed as handshake timeout should trigger"
+ s.close()
diff --git a/test/modules/tls/test_10_session_id.py b/test/modules/tls/test_10_session_id.py
new file mode 100644
index 0000000..848bc1a
--- /dev/null
+++ b/test/modules/tls/test_10_session_id.py
@@ -0,0 +1,50 @@
+import re
+from typing import List
+
+import pytest
+
+from pyhttpd.result import ExecResult
+from .env import TlsTestEnv
+from .conf import TlsTestConf
+
+
+class TestSessionID:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ conf = TlsTestConf(env=env)
+ conf.add_tls_vhosts(domains=[env.domain_a, env.domain_b])
+ conf.install()
+ assert env.apache_restart() == 0
+
+ def find_openssl_session_ids(self, r: ExecResult) -> List[str]:
+ ids = []
+ for line in r.stdout.splitlines():
+ m = re.match(r'^\s*Session-ID: (\S+)$', line)
+ if m:
+ ids.append(m.group(1))
+ return ids
+
+ def test_tls_10_session_id_12(self, env):
+ r = env.openssl_client(env.domain_b, extra_args=[
+ "-reconnect", "-tls1_2"
+ ])
+ session_ids = self.find_openssl_session_ids(r)
+ assert 1 < len(session_ids), "expected several session-ids: {0}, stderr={1}".format(
+ session_ids, r.stderr
+ )
+ assert 1 == len(set(session_ids)), "sesion-ids should all be the same: {0}".format(session_ids)
+
+ @pytest.mark.skipif(True or not TlsTestEnv.openssl_supports_tls_1_3(),
+ reason="openssl TLSv1.3 session storage test incomplete")
+ def test_tls_10_session_id_13(self, env):
+ r = env.openssl_client(env.domain_b, extra_args=[
+ "-reconnect", "-tls1_3"
+ ])
+ # openssl -reconnect closes connection immediately after the handhshake, so
+ # the Session data in TLSv1.3 is not seen and not found in its output.
+ # FIXME: how to check session data with TLSv1.3?
+ session_ids = self.find_openssl_session_ids(r)
+ assert 0 == len(session_ids), "expected no session-ids: {0}, stderr={1}".format(
+ session_ids, r.stdout
+ )
diff --git a/test/modules/tls/test_11_md.py b/test/modules/tls/test_11_md.py
new file mode 100644
index 0000000..9d733db
--- /dev/null
+++ b/test/modules/tls/test_11_md.py
@@ -0,0 +1,37 @@
+import time
+from datetime import timedelta
+
+import pytest
+
+from .conf import TlsTestConf
+
+
+class TestMD:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ conf = TlsTestConf(env=env, extras={
+ 'base': "LogLevel md:trace4"
+ })
+ conf.add_md_vhosts(domains=[env.domain_a, env.domain_b])
+ conf.install()
+ assert env.apache_restart() == 0
+
+ def test_tls_11_get_a(self, env):
+ # do we see the correct json for the domain_a?
+ data = env.tls_get_json(env.domain_a, "/index.json")
+ assert data == {'domain': env.domain_a}
+
+ def test_tls_11_get_b(self, env):
+ # do we see the correct json for the domain_a?
+ data = env.tls_get_json(env.domain_b, "/index.json")
+ assert data == {'domain': env.domain_b}
+
+ def test_tls_11_get_base(self, env):
+ # give the base server domain_a and lookup its index.json
+ conf = TlsTestConf(env=env)
+ conf.add_md_base(domain=env.domain_a)
+ conf.install()
+ assert env.apache_restart() == 0
+ data = env.tls_get_json(env.domain_a, "/index.json")
+ assert data == {'domain': 'localhost'}
diff --git a/test/modules/tls/test_12_cauth.py b/test/modules/tls/test_12_cauth.py
new file mode 100644
index 0000000..1411609
--- /dev/null
+++ b/test/modules/tls/test_12_cauth.py
@@ -0,0 +1,235 @@
+import os
+from datetime import timedelta
+from typing import Optional
+
+import pytest
+
+from pyhttpd.certs import Credentials
+from .conf import TlsTestConf
+
+
+@pytest.fixture
+def clients_x(env):
+ return env.ca.get_first("clientsX")
+
+
+@pytest.fixture
+def clients_y(env):
+ return env.ca.get_first("clientsY")
+
+
+@pytest.fixture
+def cax_file(clients_x):
+ return os.path.join(os.path.dirname(clients_x.cert_file), "clientX-ca.pem")
+
+
+@pytest.mark.skip(reason="client certs disabled")
+class TestTLS:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env, clients_x, cax_file):
+ with open(cax_file, 'w') as fd:
+ fd.write("".join(open(clients_x.cert_file).readlines()))
+ fd.write("".join(open(env.ca.cert_file).readlines()))
+
+ @pytest.fixture(autouse=True, scope='function')
+ def _function_scope(self, env):
+ if env.is_live(timeout=timedelta(milliseconds=100)):
+ assert env.apache_stop() == 0
+
+ def get_ssl_var(self, env, domain: str, cert: Optional[Credentials], name: str):
+ r = env.tls_get(domain, f"/vars.py?name={name}", options=[
+ "--cert", cert.cert_file
+ ] if cert else [])
+ assert r.exit_code == 0, r.stderr
+ assert r.json, r.stderr + r.stdout
+ return r.json[name] if name in r.json else None
+
+ def test_tls_12_set_ca_non_existing(self, env):
+ conf = TlsTestConf(env=env, extras={
+ env.domain_a: "TLSClientCA xxx"
+ })
+ conf.add_md_vhosts(domains=[env.domain_a, env.domain_b])
+ conf.install()
+ assert env.apache_restart() == 1
+
+ def test_tls_12_set_ca_existing(self, env, cax_file):
+ conf = TlsTestConf(env=env, extras={
+ env.domain_a: f"TLSClientCA {cax_file}"
+ })
+ conf.add_md_vhosts(domains=[env.domain_a, env.domain_b])
+ conf.install()
+ assert env.apache_restart() == 0
+
+ def test_tls_12_set_auth_no_ca(self, env):
+ conf = TlsTestConf(env=env, extras={
+ env.domain_a: "TLSClientCertificate required"
+ })
+ conf.add_md_vhosts(domains=[env.domain_a, env.domain_b])
+ conf.install()
+ # will fail bc lacking clien CA
+ assert env.apache_restart() == 1
+
+ def test_tls_12_auth_option_std(self, env, cax_file, clients_x):
+ conf = TlsTestConf(env=env, extras={
+ env.domain_b: [
+ f"TLSClientCertificate required",
+ f"TLSClientCA {cax_file}",
+ "# TODO: TLSUserName SSL_CLIENT_S_DN_CN",
+ "TLSOptions +StdEnvVars",
+ ]
+ })
+ conf.add_md_vhosts(domains=[env.domain_b])
+ conf.install()
+ assert env.apache_restart() == 0
+ # should be denied
+ r = env.tls_get(domain=env.domain_b, paths="/index.json")
+ assert r.exit_code != 0, r.stdout
+ # should work
+ ccert = clients_x.get_first("user1")
+ data = env.tls_get_json(env.domain_b, "/index.json", options=[
+ "--cert", ccert.cert_file
+ ])
+ assert data == {'domain': env.domain_b}
+ r = env.tls_get(env.domain_b, "/vars.py?name=SSL_CLIENT_S_DN_CN")
+ assert r.exit_code != 0, "should have been prevented"
+ val = self.get_ssl_var(env, env.domain_b, ccert, "SSL_CLIENT_S_DN_CN")
+ assert val == 'Not Implemented'
+ # TODO
+ # val = self.get_ssl_var(env, env.domain_b, ccert, "REMOTE_USER")
+ # assert val == 'Not Implemented'
+ # not set on StdEnvVars, needs option ExportCertData
+ val = self.get_ssl_var(env, env.domain_b, ccert, "SSL_CLIENT_CERT")
+ assert val == ""
+
+ def test_tls_12_auth_option_cert(self, env, test_ca, cax_file, clients_x):
+ conf = TlsTestConf(env=env, extras={
+ env.domain_b: [
+ "TLSClientCertificate required",
+ f"TLSClientCA {cax_file}",
+ "TLSOptions Defaults +ExportCertData",
+ ]
+ })
+ conf.add_md_vhosts(domains=[env.domain_b])
+ conf.install()
+ assert env.apache_restart() == 0
+ ccert = clients_x.get_first("user1")
+ val = self.get_ssl_var(env, env.domain_b, ccert, "SSL_CLIENT_CERT")
+ assert val == ccert.cert_pem.decode()
+ # no chain should be present
+ val = self.get_ssl_var(env, env.domain_b, ccert, "SSL_CLIENT_CHAIN_0")
+ assert val == ''
+ val = self.get_ssl_var(env, env.domain_b, ccert, "SSL_SERVER_CERT")
+ assert val
+ server_certs = test_ca.get_credentials_for_name(env.domain_b)
+ assert val in [c.cert_pem.decode() for c in server_certs]
+
+ def test_tls_12_auth_ssl_optional(self, env, cax_file, clients_x):
+ domain = env.domain_b
+ conf = TlsTestConf(env=env, extras={
+ domain: [
+ "SSLVerifyClient optional",
+ "SSLVerifyDepth 2",
+ "SSLOptions +StdEnvVars +ExportCertData",
+ f"SSLCACertificateFile {cax_file}",
+ "SSLUserName SSL_CLIENT_S_DN",
+ ]
+ })
+ conf.add_ssl_vhosts(domains=[domain])
+ conf.install()
+ assert env.apache_restart() == 0
+ # should work either way
+ data = env.tls_get_json(domain, "/index.json")
+ assert data == {'domain': domain}
+ # no client cert given, we expect the server variable to be empty
+ val = self.get_ssl_var(env, env.domain_b, None, "SSL_CLIENT_S_DN_CN")
+ assert val == ''
+ ccert = clients_x.get_first("user1")
+ data = env.tls_get_json(domain, "/index.json", options=[
+ "--cert", ccert.cert_file
+ ])
+ assert data == {'domain': domain}
+ val = self.get_ssl_var(env, env.domain_b, ccert, "SSL_CLIENT_S_DN_CN")
+ assert val == 'user1'
+ val = self.get_ssl_var(env, env.domain_b, ccert, "SSL_CLIENT_S_DN")
+ assert val == 'O=abetterinternet-mod_tls,OU=clientsX,CN=user1'
+ val = self.get_ssl_var(env, env.domain_b, ccert, "REMOTE_USER")
+ assert val == 'O=abetterinternet-mod_tls,OU=clientsX,CN=user1'
+ val = self.get_ssl_var(env, env.domain_b, ccert, "SSL_CLIENT_I_DN")
+ assert val == 'O=abetterinternet-mod_tls,OU=clientsX'
+ val = self.get_ssl_var(env, env.domain_b, ccert, "SSL_CLIENT_I_DN_CN")
+ assert val == ''
+ val = self.get_ssl_var(env, env.domain_b, ccert, "SSL_CLIENT_I_DN_OU")
+ assert val == 'clientsX'
+ val = self.get_ssl_var(env, env.domain_b, ccert, "SSL_CLIENT_CERT")
+ assert val == ccert.cert_pem.decode()
+
+ def test_tls_12_auth_optional(self, env, cax_file, clients_x):
+ domain = env.domain_b
+ conf = TlsTestConf(env=env, extras={
+ domain: [
+ "TLSClientCertificate optional",
+ f"TLSClientCA {cax_file}",
+ ]
+ })
+ conf.add_md_vhosts(domains=[domain])
+ conf.install()
+ assert env.apache_restart() == 0
+ # should work either way
+ data = env.tls_get_json(domain, "/index.json")
+ assert data == {'domain': domain}
+ # no client cert given, we expect the server variable to be empty
+ r = env.tls_get(domain, "/vars.py?name=SSL_CLIENT_S_DN_CN")
+ assert r.exit_code == 0, r.stderr
+ assert r.json == {
+ 'SSL_CLIENT_S_DN_CN': '',
+ }, r.stdout
+ data = env.tls_get_json(domain, "/index.json", options=[
+ "--cert", clients_x.get_first("user1").cert_file
+ ])
+ assert data == {'domain': domain}
+ r = env.tls_get(domain, "/vars.py?name=SSL_CLIENT_S_DN_CN", options=[
+ "--cert", clients_x.get_first("user1").cert_file
+ ])
+ # with client cert, we expect the server variable to show? Do we?
+ assert r.exit_code == 0, r.stderr
+ assert r.json == {
+ 'SSL_CLIENT_S_DN_CN': 'Not Implemented',
+ }, r.stdout
+
+ def test_tls_12_auth_expired(self, env, cax_file, clients_x):
+ conf = TlsTestConf(env=env, extras={
+ env.domain_b: [
+ "TLSClientCertificate required",
+ f"TLSClientCA {cax_file}",
+ ]
+ })
+ conf.add_md_vhosts(domains=[env.domain_b])
+ conf.install()
+ assert env.apache_restart() == 0
+ # should not work
+ r = env.tls_get(domain=env.domain_b, paths="/index.json", options=[
+ "--cert", clients_x.get_first("user_expired").cert_file
+ ])
+ assert r.exit_code != 0
+
+ def test_tls_12_auth_other_ca(self, env, cax_file, clients_y):
+ conf = TlsTestConf(env=env, extras={
+ env.domain_b: [
+ "TLSClientCertificate required",
+ f"TLSClientCA {cax_file}",
+ ]
+ })
+ conf.add_md_vhosts(domains=[env.domain_b])
+ conf.install()
+ assert env.apache_restart() == 0
+ # should not work
+ r = env.tls_get(domain=env.domain_b, paths="/index.json", options=[
+ "--cert", clients_y.get_first("user1").cert_file
+ ])
+ assert r.exit_code != 0
+ # This will work, as the CA root is present in the CA file
+ r = env.tls_get(domain=env.domain_b, paths="/index.json", options=[
+ "--cert", env.ca.get_first("user1").cert_file
+ ])
+ assert r.exit_code == 0
diff --git a/test/modules/tls/test_13_proxy.py b/test/modules/tls/test_13_proxy.py
new file mode 100644
index 0000000..8bd305f
--- /dev/null
+++ b/test/modules/tls/test_13_proxy.py
@@ -0,0 +1,40 @@
+from datetime import timedelta
+
+import pytest
+
+from .conf import TlsTestConf
+
+
+class TestProxy:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ conf = TlsTestConf(env=env, extras={
+ 'base': "LogLevel proxy:trace1 proxy_http:trace1 ssl:trace1",
+ env.domain_b: [
+ "ProxyPreserveHost on",
+ f'ProxyPass "/proxy/" "http://127.0.0.1:{env.http_port}/"',
+ f'ProxyPassReverse "/proxy/" "http://{env.domain_b}:{env.http_port}"',
+ ]
+ })
+ # add vhosts a+b and a ssl proxy from a to b
+ conf.add_tls_vhosts(domains=[env.domain_a, env.domain_b])
+ conf.install()
+ assert env.apache_restart() == 0
+
+ def test_tls_13_proxy_http_get(self, env):
+ data = env.tls_get_json(env.domain_b, "/proxy/index.json")
+ assert data == {'domain': env.domain_b}
+
+ @pytest.mark.parametrize("name, value", [
+ ("SERVER_NAME", "b.mod-tls.test"),
+ ("SSL_SESSION_RESUMED", ""),
+ ("SSL_SECURE_RENEG", ""),
+ ("SSL_COMPRESS_METHOD", ""),
+ ("SSL_CIPHER_EXPORT", ""),
+ ("SSL_CLIENT_VERIFY", ""),
+ ])
+ def test_tls_13_proxy_http_vars(self, env, name: str, value: str):
+ r = env.tls_get(env.domain_b, f"/proxy/vars.py?name={name}")
+ assert r.exit_code == 0, r.stderr
+ assert r.json == {name: value}, r.stdout
diff --git a/test/modules/tls/test_14_proxy_ssl.py b/test/modules/tls/test_14_proxy_ssl.py
new file mode 100644
index 0000000..79b2fb4
--- /dev/null
+++ b/test/modules/tls/test_14_proxy_ssl.py
@@ -0,0 +1,78 @@
+import re
+import pytest
+
+from .conf import TlsTestConf
+
+
+class TestProxySSL:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ # add vhosts a+b and a ssl proxy from a to b
+ conf = TlsTestConf(env=env, extras={
+ 'base': [
+ "LogLevel proxy:trace1 proxy_http:trace1 ssl:trace1 proxy_http2:trace1",
+ f"<Proxy https://127.0.0.1:{env.https_port}/>",
+ " SSLProxyEngine on",
+ " SSLProxyVerify require",
+ f" SSLProxyCACertificateFile {env.ca.cert_file}",
+ " ProxyPreserveHost on",
+ "</Proxy>",
+ f"<Proxy https://localhost:{env.https_port}/>",
+ " ProxyPreserveHost on",
+ "</Proxy>",
+ f"<Proxy h2://127.0.0.1:{env.https_port}/>",
+ " SSLProxyEngine on",
+ " SSLProxyVerify require",
+ f" SSLProxyCACertificateFile {env.ca.cert_file}",
+ " ProxyPreserveHost on",
+ "</Proxy>",
+ ],
+ env.domain_b: [
+ "Protocols h2 http/1.1",
+ f'ProxyPass /proxy-ssl/ https://127.0.0.1:{env.https_port}/',
+ f'ProxyPass /proxy-local/ https://localhost:{env.https_port}/',
+ f'ProxyPass /proxy-h2-ssl/ h2://127.0.0.1:{env.https_port}/',
+ "TLSOptions +StdEnvVars",
+ ],
+ })
+ conf.add_tls_vhosts(domains=[env.domain_a, env.domain_b])
+ conf.install()
+ assert env.apache_restart() == 0
+
+ def test_tls_14_proxy_ssl_get(self, env):
+ data = env.tls_get_json(env.domain_b, "/proxy-ssl/index.json")
+ assert data == {'domain': env.domain_b}
+
+ def test_tls_14_proxy_ssl_get_local(self, env):
+ # does not work, since SSLProxy* not configured
+ data = env.tls_get_json(env.domain_b, "/proxy-local/index.json")
+ assert data is None
+
+ def test_tls_14_proxy_ssl_h2_get(self, env):
+ r = env.tls_get(env.domain_b, "/proxy-h2-ssl/index.json")
+ assert r.exit_code == 0
+ assert r.json == {'domain': env.domain_b}
+
+ @pytest.mark.parametrize("name, value", [
+ ("SERVER_NAME", "b.mod-tls.test"),
+ ("SSL_SESSION_RESUMED", "Initial"),
+ ("SSL_SECURE_RENEG", "false"),
+ ("SSL_COMPRESS_METHOD", "NULL"),
+ ("SSL_CIPHER_EXPORT", "false"),
+ ("SSL_CLIENT_VERIFY", "NONE"),
+ ])
+ def test_tls_14_proxy_ssl_vars_const(self, env, name: str, value: str):
+ r = env.tls_get(env.domain_b, f"/proxy-ssl/vars.py?name={name}")
+ assert r.exit_code == 0, r.stderr
+ assert r.json == {name: value}, r.stdout
+
+ @pytest.mark.parametrize("name, pattern", [
+ ("SSL_VERSION_INTERFACE", r'mod_tls/\d+\.\d+\.\d+'),
+ ("SSL_VERSION_LIBRARY", r'rustls-ffi/\d+\.\d+\.\d+/rustls/\d+\.\d+\.\d+'),
+ ])
+ def test_tls_14_proxy_ssl_vars_match(self, env, name: str, pattern: str):
+ r = env.tls_get(env.domain_b, f"/proxy-ssl/vars.py?name={name}")
+ assert r.exit_code == 0, r.stderr
+ assert name in r.json
+ assert re.match(pattern, r.json[name]), r.json
diff --git a/test/modules/tls/test_15_proxy_tls.py b/test/modules/tls/test_15_proxy_tls.py
new file mode 100644
index 0000000..f2f670d
--- /dev/null
+++ b/test/modules/tls/test_15_proxy_tls.py
@@ -0,0 +1,86 @@
+import re
+from datetime import timedelta
+
+import pytest
+
+from .conf import TlsTestConf
+
+
+class TestProxyTLS:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ # add vhosts a+b and a ssl proxy from a to b
+ conf = TlsTestConf(env=env, extras={
+ 'base': [
+ "LogLevel proxy:trace1 proxy_http:trace1 proxy_http2:trace2 http2:trace2 cgid:trace4",
+ "TLSProxyProtocol TLSv1.3+",
+ f"<Proxy https://127.0.0.1:{env.https_port}/>",
+ " TLSProxyEngine on",
+ f" TLSProxyCA {env.ca.cert_file}",
+ " TLSProxyProtocol TLSv1.2+",
+ " TLSProxyCiphersPrefer TLS13_AES_256_GCM_SHA384",
+ " TLSProxyCiphersSuppress TLS13_AES_128_GCM_SHA256",
+ " ProxyPreserveHost on",
+ "</Proxy>",
+ f"<Proxy https://localhost:{env.https_port}/>",
+ " ProxyPreserveHost on",
+ "</Proxy>",
+ f"<Proxy h2://127.0.0.1:{env.https_port}/>",
+ " TLSProxyEngine on",
+ f" TLSProxyCA {env.ca.cert_file}",
+ " TLSProxyCiphersSuppress TLS_AES_256_GCM_SHA384:TLS_AES_128_GCM_SHA256",
+ " ProxyPreserveHost on",
+ "</Proxy>",
+ ],
+ env.domain_b: [
+ "Protocols h2 http/1.1",
+ f"ProxyPass /proxy-tls/ https://127.0.0.1:{env.https_port}/",
+ f"ProxyPass /proxy-local/ https://localhost:{env.https_port}/",
+ f"ProxyPass /proxy-h2-tls/ h2://127.0.0.1:{env.https_port}/",
+ "TLSOptions +StdEnvVars",
+ ],
+ })
+ conf.add_tls_vhosts(domains=[env.domain_a, env.domain_b])
+ conf.install()
+ assert env.apache_restart() == 0
+
+ def test_tls_15_proxy_tls_get(self, env):
+ data = env.tls_get_json(env.domain_b, "/proxy-tls/index.json")
+ assert data == {'domain': env.domain_b}
+
+ def test_tls_15_proxy_tls_get_local(self, env):
+ # does not work, since SSLProxy* not configured
+ data = env.tls_get_json(env.domain_b, "/proxy-local/index.json")
+ assert data is None
+
+ def test_tls_15_proxy_tls_h2_get(self, env):
+ r = env.tls_get(env.domain_b, "/proxy-h2-tls/index.json")
+ assert r.exit_code == 0
+ assert r.json == {'domain': env.domain_b}, f"{r.stdout}"
+
+ @pytest.mark.parametrize("name, value", [
+ ("SERVER_NAME", "b.mod-tls.test"),
+ ("SSL_PROTOCOL", "TLSv1.3"),
+ ("SSL_CIPHER", "TLS_AES_256_GCM_SHA384"),
+ ("SSL_SESSION_RESUMED", "Initial"),
+ ("SSL_SECURE_RENEG", "false"),
+ ("SSL_COMPRESS_METHOD", "NULL"),
+ ("SSL_CIPHER_EXPORT", "false"),
+ ("SSL_CLIENT_VERIFY", "NONE"),
+ ])
+ def test_tls_15_proxy_tls_h1_vars(self, env, name: str, value: str):
+ r = env.tls_get(env.domain_b, f"/proxy-tls/vars.py?name={name}")
+ assert r.exit_code == 0, r.stderr
+ assert r.json == {name: value}, r.stdout
+
+ @pytest.mark.parametrize("name, value", [
+ ("SERVER_NAME", "b.mod-tls.test"),
+ ("SSL_PROTOCOL", "TLSv1.3"),
+ ("SSL_CIPHER", "TLS_CHACHA20_POLY1305_SHA256"),
+ ("SSL_SESSION_RESUMED", "Initial"),
+ ])
+ def test_tls_15_proxy_tls_h2_vars(self, env, name: str, value: str):
+ r = env.tls_get(env.domain_b, f"/proxy-h2-tls/vars.py?name={name}")
+ assert r.exit_code == 0, r.stderr
+ assert r.json == {name: value}, r.stdout
diff --git a/test/modules/tls/test_16_proxy_mixed.py b/test/modules/tls/test_16_proxy_mixed.py
new file mode 100644
index 0000000..ca08236
--- /dev/null
+++ b/test/modules/tls/test_16_proxy_mixed.py
@@ -0,0 +1,47 @@
+import time
+
+import pytest
+
+from .conf import TlsTestConf
+
+
+class TestProxyMixed:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ conf = TlsTestConf(env=env, extras={
+ 'base': [
+ "LogLevel proxy:trace1 proxy_http:trace1 ssl:trace1 proxy_http2:trace1 http2:debug",
+ "ProxyPreserveHost on",
+ ],
+ env.domain_a: [
+ "Protocols h2 http/1.1",
+ "TLSProxyEngine on",
+ f"TLSProxyCA {env.ca.cert_file}",
+ "<Location /proxy-tls/>",
+ f" ProxyPass h2://127.0.0.1:{env.https_port}/",
+ "</Location>",
+ ],
+ env.domain_b: [
+ "SSLProxyEngine on",
+ "SSLProxyVerify require",
+ f"SSLProxyCACertificateFile {env.ca.cert_file}",
+ "<Location /proxy-ssl/>",
+ f" ProxyPass https://127.0.0.1:{env.https_port}/",
+ "</Location>",
+ ],
+ })
+ # add vhosts a+b and a ssl proxy from a to b
+ conf.add_tls_vhosts(domains=[env.domain_a, env.domain_b])
+ conf.install()
+ assert env.apache_restart() == 0
+
+ def test_tls_16_proxy_mixed_ssl_get(self, env, repeat):
+ data = env.tls_get_json(env.domain_b, "/proxy-ssl/index.json")
+ assert data == {'domain': env.domain_b}
+
+ def test_tls_16_proxy_mixed_tls_get(self, env, repeat):
+ data = env.tls_get_json(env.domain_a, "/proxy-tls/index.json")
+ if data is None:
+ time.sleep(300)
+ assert data == {'domain': env.domain_a}
diff --git a/test/modules/tls/test_17_proxy_machine_cert.py b/test/modules/tls/test_17_proxy_machine_cert.py
new file mode 100644
index 0000000..7b5ef44
--- /dev/null
+++ b/test/modules/tls/test_17_proxy_machine_cert.py
@@ -0,0 +1,69 @@
+import os
+
+import pytest
+
+from .conf import TlsTestConf
+
+
+class TestProxyMachineCert:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def clients_x(cls, env):
+ return env.ca.get_first("clientsX")
+
+ @pytest.fixture(autouse=True, scope='class')
+ def clients_y(cls, env):
+ return env.ca.get_first("clientsY")
+
+ @pytest.fixture(autouse=True, scope='class')
+ def cax_file(cls, clients_x):
+ return os.path.join(os.path.dirname(clients_x.cert_file), "clientsX-ca.pem")
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(cls, env, cax_file, clients_x):
+ # add vhosts a(tls)+b(ssl, port2) and a ssl proxy from a to b with a machine cert
+ # host b requires a client certificate
+ conf = TlsTestConf(env=env, extras={
+ 'base': [
+ "LogLevel proxy:trace1 proxy_http:trace1 ssl:trace4 proxy_http2:trace1",
+ "ProxyPreserveHost on",
+ f"Listen {env.proxy_port}",
+ ],
+ })
+ conf.start_tls_vhost(domains=[env.domain_a], port=env.https_port)
+ conf.add([
+ "Protocols h2 http/1.1",
+ "TLSProxyEngine on",
+ f"TLSProxyCA {env.ca.cert_file}",
+ f"TLSProxyMachineCertificate {clients_x.get_first('user1').cert_file}",
+ "<Location /proxy-tls/>",
+ f" ProxyPass https://127.0.0.1:{env.proxy_port}/",
+ "</Location>",
+ ])
+ conf.end_tls_vhost()
+ conf.start_vhost(domains=[env.domain_a], port=env.proxy_port,
+ doc_root=f"htdocs/{env.domain_a}", with_ssl=True)
+ conf.add([
+ "SSLVerifyClient require",
+ "SSLVerifyDepth 2",
+ "SSLOptions +StdEnvVars +ExportCertData",
+ f"SSLCACertificateFile {cax_file}",
+ "SSLUserName SSL_CLIENT_S_DN_CN"
+ ])
+ conf.end_vhost()
+ conf.install()
+ assert env.apache_restart() == 0
+
+ def test_tls_17_proxy_machine_cert_get_a(self, env):
+ data = env.tls_get_json(env.domain_a, "/proxy-tls/index.json")
+ assert data == {'domain': env.domain_a}
+
+ @pytest.mark.parametrize("name, value", [
+ ("SERVER_NAME", "a.mod-tls.test"),
+ ("SSL_CLIENT_VERIFY", "SUCCESS"),
+ ("REMOTE_USER", "user1"),
+ ])
+ def test_tls_17_proxy_machine_cert_vars(self, env, name: str, value: str):
+ r = env.tls_get(env.domain_a, f"/proxy-tls/vars.py?name={name}")
+ assert r.exit_code == 0, r.stderr
+ assert r.json == {name: value}, r.stdout
diff --git a/test/pyhttpd/__init__.py b/test/pyhttpd/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test/pyhttpd/__init__.py
diff --git a/test/pyhttpd/certs.py b/test/pyhttpd/certs.py
new file mode 100644
index 0000000..5519f16
--- /dev/null
+++ b/test/pyhttpd/certs.py
@@ -0,0 +1,476 @@
+import os
+import re
+from datetime import timedelta, datetime
+from typing import List, Any, Optional
+
+from cryptography import x509
+from cryptography.hazmat.backends import default_backend
+from cryptography.hazmat.primitives import hashes
+from cryptography.hazmat.primitives.asymmetric import ec, rsa
+from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePrivateKey
+from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey
+from cryptography.hazmat.primitives.serialization import Encoding, PrivateFormat, NoEncryption, load_pem_private_key
+from cryptography.x509 import ExtendedKeyUsageOID, NameOID
+
+
+EC_SUPPORTED = {}
+EC_SUPPORTED.update([(curve.name.upper(), curve) for curve in [
+ ec.SECP192R1,
+ ec.SECP224R1,
+ ec.SECP256R1,
+ ec.SECP384R1,
+]])
+
+
+def _private_key(key_type):
+ if isinstance(key_type, str):
+ key_type = key_type.upper()
+ m = re.match(r'^(RSA)?(\d+)$', key_type)
+ if m:
+ key_type = int(m.group(2))
+
+ if isinstance(key_type, int):
+ return rsa.generate_private_key(
+ public_exponent=65537,
+ key_size=key_type,
+ backend=default_backend()
+ )
+ if not isinstance(key_type, ec.EllipticCurve) and key_type in EC_SUPPORTED:
+ key_type = EC_SUPPORTED[key_type]
+ return ec.generate_private_key(
+ curve=key_type,
+ backend=default_backend()
+ )
+
+
+class CertificateSpec:
+
+ def __init__(self, name: str = None, domains: List[str] = None,
+ email: str = None,
+ key_type: str = None, single_file: bool = False,
+ valid_from: timedelta = timedelta(days=-1),
+ valid_to: timedelta = timedelta(days=89),
+ client: bool = False,
+ sub_specs: List['CertificateSpec'] = None):
+ self._name = name
+ self.domains = domains
+ self.client = client
+ self.email = email
+ self.key_type = key_type
+ self.single_file = single_file
+ self.valid_from = valid_from
+ self.valid_to = valid_to
+ self.sub_specs = sub_specs
+
+ @property
+ def name(self) -> Optional[str]:
+ if self._name:
+ return self._name
+ elif self.domains:
+ return self.domains[0]
+ return None
+
+ @property
+ def type(self) -> Optional[str]:
+ if self.domains and len(self.domains):
+ return "server"
+ elif self.client:
+ return "client"
+ elif self.name:
+ return "ca"
+ return None
+
+
+class Credentials:
+
+ def __init__(self, name: str, cert: Any, pkey: Any, issuer: 'Credentials' = None):
+ self._name = name
+ self._cert = cert
+ self._pkey = pkey
+ self._issuer = issuer
+ self._cert_file = None
+ self._pkey_file = None
+ self._store = None
+
+ @property
+ def name(self) -> str:
+ return self._name
+
+ @property
+ def subject(self) -> x509.Name:
+ return self._cert.subject
+
+ @property
+ def key_type(self):
+ if isinstance(self._pkey, RSAPrivateKey):
+ return f"rsa{self._pkey.key_size}"
+ elif isinstance(self._pkey, EllipticCurvePrivateKey):
+ return f"{self._pkey.curve.name}"
+ else:
+ raise Exception(f"unknown key type: {self._pkey}")
+
+ @property
+ def private_key(self) -> Any:
+ return self._pkey
+
+ @property
+ def certificate(self) -> Any:
+ return self._cert
+
+ @property
+ def cert_pem(self) -> bytes:
+ return self._cert.public_bytes(Encoding.PEM)
+
+ @property
+ def pkey_pem(self) -> bytes:
+ return self._pkey.private_bytes(
+ Encoding.PEM,
+ PrivateFormat.TraditionalOpenSSL if self.key_type.startswith('rsa') else PrivateFormat.PKCS8,
+ NoEncryption())
+
+ @property
+ def issuer(self) -> Optional['Credentials']:
+ return self._issuer
+
+ def set_store(self, store: 'CertStore'):
+ self._store = store
+
+ def set_files(self, cert_file: str, pkey_file: str = None):
+ self._cert_file = cert_file
+ self._pkey_file = pkey_file
+
+ @property
+ def cert_file(self) -> str:
+ return self._cert_file
+
+ @property
+ def pkey_file(self) -> Optional[str]:
+ return self._pkey_file
+
+ def get_first(self, name) -> Optional['Credentials']:
+ creds = self._store.get_credentials_for_name(name) if self._store else []
+ return creds[0] if len(creds) else None
+
+ def get_credentials_for_name(self, name) -> List['Credentials']:
+ return self._store.get_credentials_for_name(name) if self._store else []
+
+ def issue_certs(self, specs: List[CertificateSpec],
+ chain: List['Credentials'] = None) -> List['Credentials']:
+ return [self.issue_cert(spec=spec, chain=chain) for spec in specs]
+
+ def issue_cert(self, spec: CertificateSpec, chain: List['Credentials'] = None) -> 'Credentials':
+ key_type = spec.key_type if spec.key_type else self.key_type
+ creds = None
+ if self._store:
+ creds = self._store.load_credentials(
+ name=spec.name, key_type=key_type, single_file=spec.single_file, issuer=self)
+ if creds is None:
+ creds = HttpdTestCA.create_credentials(spec=spec, issuer=self, key_type=key_type,
+ valid_from=spec.valid_from, valid_to=spec.valid_to)
+ if self._store:
+ self._store.save(creds, single_file=spec.single_file)
+ if spec.type == "ca":
+ self._store.save_chain(creds, "ca", with_root=True)
+
+ if spec.sub_specs:
+ if self._store:
+ sub_store = CertStore(fpath=os.path.join(self._store.path, creds.name))
+ creds.set_store(sub_store)
+ subchain = chain.copy() if chain else []
+ subchain.append(self)
+ creds.issue_certs(spec.sub_specs, chain=subchain)
+ return creds
+
+
+class CertStore:
+
+ def __init__(self, fpath: str):
+ self._store_dir = fpath
+ if not os.path.exists(self._store_dir):
+ os.makedirs(self._store_dir)
+ self._creds_by_name = {}
+
+ @property
+ def path(self) -> str:
+ return self._store_dir
+
+ def save(self, creds: Credentials, name: str = None,
+ chain: List[Credentials] = None,
+ single_file: bool = False) -> None:
+ name = name if name is not None else creds.name
+ cert_file = self.get_cert_file(name=name, key_type=creds.key_type)
+ pkey_file = self.get_pkey_file(name=name, key_type=creds.key_type)
+ if single_file:
+ pkey_file = None
+ with open(cert_file, "wb") as fd:
+ fd.write(creds.cert_pem)
+ if chain:
+ for c in chain:
+ fd.write(c.cert_pem)
+ if pkey_file is None:
+ fd.write(creds.pkey_pem)
+ if pkey_file is not None:
+ with open(pkey_file, "wb") as fd:
+ fd.write(creds.pkey_pem)
+ creds.set_files(cert_file, pkey_file)
+ self._add_credentials(name, creds)
+
+ def save_chain(self, creds: Credentials, infix: str, with_root=False):
+ name = creds.name
+ chain = [creds]
+ while creds.issuer is not None:
+ creds = creds.issuer
+ chain.append(creds)
+ if not with_root and len(chain) > 1:
+ chain = chain[:-1]
+ chain_file = os.path.join(self._store_dir, f'{name}-{infix}.pem')
+ with open(chain_file, "wb") as fd:
+ for c in chain:
+ fd.write(c.cert_pem)
+
+ def _add_credentials(self, name: str, creds: Credentials):
+ if name not in self._creds_by_name:
+ self._creds_by_name[name] = []
+ self._creds_by_name[name].append(creds)
+
+ def get_credentials_for_name(self, name) -> List[Credentials]:
+ return self._creds_by_name[name] if name in self._creds_by_name else []
+
+ def get_cert_file(self, name: str, key_type=None) -> str:
+ key_infix = ".{0}".format(key_type) if key_type is not None else ""
+ return os.path.join(self._store_dir, f'{name}{key_infix}.cert.pem')
+
+ def get_pkey_file(self, name: str, key_type=None) -> str:
+ key_infix = ".{0}".format(key_type) if key_type is not None else ""
+ return os.path.join(self._store_dir, f'{name}{key_infix}.pkey.pem')
+
+ def load_pem_cert(self, fpath: str) -> x509.Certificate:
+ with open(fpath) as fd:
+ return x509.load_pem_x509_certificate("".join(fd.readlines()).encode())
+
+ def load_pem_pkey(self, fpath: str):
+ with open(fpath) as fd:
+ return load_pem_private_key("".join(fd.readlines()).encode(), password=None)
+
+ def load_credentials(self, name: str, key_type=None, single_file: bool = False, issuer: Credentials = None):
+ cert_file = self.get_cert_file(name=name, key_type=key_type)
+ pkey_file = cert_file if single_file else self.get_pkey_file(name=name, key_type=key_type)
+ if os.path.isfile(cert_file) and os.path.isfile(pkey_file):
+ cert = self.load_pem_cert(cert_file)
+ pkey = self.load_pem_pkey(pkey_file)
+ creds = Credentials(name=name, cert=cert, pkey=pkey, issuer=issuer)
+ creds.set_store(self)
+ creds.set_files(cert_file, pkey_file)
+ self._add_credentials(name, creds)
+ return creds
+ return None
+
+
+class HttpdTestCA:
+
+ @classmethod
+ def create_root(cls, name: str, store_dir: str, key_type: str = "rsa2048") -> Credentials:
+ store = CertStore(fpath=store_dir)
+ creds = store.load_credentials(name="ca", key_type=key_type, issuer=None)
+ if creds is None:
+ creds = HttpdTestCA._make_ca_credentials(name=name, key_type=key_type)
+ store.save(creds, name="ca")
+ creds.set_store(store)
+ return creds
+
+ @staticmethod
+ def create_credentials(spec: CertificateSpec, issuer: Credentials, key_type: Any,
+ valid_from: timedelta = timedelta(days=-1),
+ valid_to: timedelta = timedelta(days=89),
+ ) -> Credentials:
+ """Create a certificate signed by this CA for the given domains.
+ :returns: the certificate and private key PEM file paths
+ """
+ if spec.domains and len(spec.domains):
+ creds = HttpdTestCA._make_server_credentials(name=spec.name, domains=spec.domains,
+ issuer=issuer, valid_from=valid_from,
+ valid_to=valid_to, key_type=key_type)
+ elif spec.client:
+ creds = HttpdTestCA._make_client_credentials(name=spec.name, issuer=issuer,
+ email=spec.email, valid_from=valid_from,
+ valid_to=valid_to, key_type=key_type)
+ elif spec.name:
+ creds = HttpdTestCA._make_ca_credentials(name=spec.name, issuer=issuer,
+ valid_from=valid_from, valid_to=valid_to,
+ key_type=key_type)
+ else:
+ raise Exception(f"unrecognized certificate specification: {spec}")
+ return creds
+
+ @staticmethod
+ def _make_x509_name(org_name: str = None, common_name: str = None, parent: x509.Name = None) -> x509.Name:
+ name_pieces = []
+ if org_name:
+ oid = NameOID.ORGANIZATIONAL_UNIT_NAME if parent else NameOID.ORGANIZATION_NAME
+ name_pieces.append(x509.NameAttribute(oid, org_name))
+ elif common_name:
+ name_pieces.append(x509.NameAttribute(NameOID.COMMON_NAME, common_name))
+ if parent:
+ name_pieces.extend([rdn for rdn in parent])
+ return x509.Name(name_pieces)
+
+ @staticmethod
+ def _make_csr(
+ subject: x509.Name,
+ pkey: Any,
+ issuer_subject: Optional[Credentials],
+ valid_from_delta: timedelta = None,
+ valid_until_delta: timedelta = None
+ ):
+ pubkey = pkey.public_key()
+ issuer_subject = issuer_subject if issuer_subject is not None else subject
+
+ valid_from = datetime.now()
+ if valid_until_delta is not None:
+ valid_from += valid_from_delta
+ valid_until = datetime.now()
+ if valid_until_delta is not None:
+ valid_until += valid_until_delta
+
+ return (
+ x509.CertificateBuilder()
+ .subject_name(subject)
+ .issuer_name(issuer_subject)
+ .public_key(pubkey)
+ .not_valid_before(valid_from)
+ .not_valid_after(valid_until)
+ .serial_number(x509.random_serial_number())
+ .add_extension(
+ x509.SubjectKeyIdentifier.from_public_key(pubkey),
+ critical=False,
+ )
+ )
+
+ @staticmethod
+ def _add_ca_usages(csr: Any) -> Any:
+ return csr.add_extension(
+ x509.BasicConstraints(ca=True, path_length=9),
+ critical=True,
+ ).add_extension(
+ x509.KeyUsage(
+ digital_signature=True,
+ content_commitment=False,
+ key_encipherment=False,
+ data_encipherment=False,
+ key_agreement=False,
+ key_cert_sign=True,
+ crl_sign=True,
+ encipher_only=False,
+ decipher_only=False),
+ critical=True
+ ).add_extension(
+ x509.ExtendedKeyUsage([
+ ExtendedKeyUsageOID.CLIENT_AUTH,
+ ExtendedKeyUsageOID.SERVER_AUTH,
+ ExtendedKeyUsageOID.CODE_SIGNING,
+ ]),
+ critical=True
+ )
+
+ @staticmethod
+ def _add_leaf_usages(csr: Any, domains: List[str], issuer: Credentials) -> Any:
+ return csr.add_extension(
+ x509.BasicConstraints(ca=False, path_length=None),
+ critical=True,
+ ).add_extension(
+ x509.AuthorityKeyIdentifier.from_issuer_subject_key_identifier(
+ issuer.certificate.extensions.get_extension_for_class(
+ x509.SubjectKeyIdentifier).value),
+ critical=False
+ ).add_extension(
+ x509.SubjectAlternativeName([x509.DNSName(domain) for domain in domains]),
+ critical=True,
+ ).add_extension(
+ x509.ExtendedKeyUsage([
+ ExtendedKeyUsageOID.SERVER_AUTH,
+ ]),
+ critical=True
+ )
+
+ @staticmethod
+ def _add_client_usages(csr: Any, issuer: Credentials, rfc82name: str = None) -> Any:
+ cert = csr.add_extension(
+ x509.BasicConstraints(ca=False, path_length=None),
+ critical=True,
+ ).add_extension(
+ x509.AuthorityKeyIdentifier.from_issuer_subject_key_identifier(
+ issuer.certificate.extensions.get_extension_for_class(
+ x509.SubjectKeyIdentifier).value),
+ critical=False
+ )
+ if rfc82name:
+ cert.add_extension(
+ x509.SubjectAlternativeName([x509.RFC822Name(rfc82name)]),
+ critical=True,
+ )
+ cert.add_extension(
+ x509.ExtendedKeyUsage([
+ ExtendedKeyUsageOID.CLIENT_AUTH,
+ ]),
+ critical=True
+ )
+ return cert
+
+ @staticmethod
+ def _make_ca_credentials(name, key_type: Any,
+ issuer: Credentials = None,
+ valid_from: timedelta = timedelta(days=-1),
+ valid_to: timedelta = timedelta(days=89),
+ ) -> Credentials:
+ pkey = _private_key(key_type=key_type)
+ if issuer is not None:
+ issuer_subject = issuer.certificate.subject
+ issuer_key = issuer.private_key
+ else:
+ issuer_subject = None
+ issuer_key = pkey
+ subject = HttpdTestCA._make_x509_name(org_name=name, parent=issuer.subject if issuer else None)
+ csr = HttpdTestCA._make_csr(subject=subject,
+ issuer_subject=issuer_subject, pkey=pkey,
+ valid_from_delta=valid_from, valid_until_delta=valid_to)
+ csr = HttpdTestCA._add_ca_usages(csr)
+ cert = csr.sign(private_key=issuer_key,
+ algorithm=hashes.SHA256(),
+ backend=default_backend())
+ return Credentials(name=name, cert=cert, pkey=pkey, issuer=issuer)
+
+ @staticmethod
+ def _make_server_credentials(name: str, domains: List[str], issuer: Credentials,
+ key_type: Any,
+ valid_from: timedelta = timedelta(days=-1),
+ valid_to: timedelta = timedelta(days=89),
+ ) -> Credentials:
+ name = name
+ pkey = _private_key(key_type=key_type)
+ subject = HttpdTestCA._make_x509_name(common_name=name, parent=issuer.subject)
+ csr = HttpdTestCA._make_csr(subject=subject,
+ issuer_subject=issuer.certificate.subject, pkey=pkey,
+ valid_from_delta=valid_from, valid_until_delta=valid_to)
+ csr = HttpdTestCA._add_leaf_usages(csr, domains=domains, issuer=issuer)
+ cert = csr.sign(private_key=issuer.private_key,
+ algorithm=hashes.SHA256(),
+ backend=default_backend())
+ return Credentials(name=name, cert=cert, pkey=pkey, issuer=issuer)
+
+ @staticmethod
+ def _make_client_credentials(name: str,
+ issuer: Credentials, email: Optional[str],
+ key_type: Any,
+ valid_from: timedelta = timedelta(days=-1),
+ valid_to: timedelta = timedelta(days=89),
+ ) -> Credentials:
+ pkey = _private_key(key_type=key_type)
+ subject = HttpdTestCA._make_x509_name(common_name=name, parent=issuer.subject)
+ csr = HttpdTestCA._make_csr(subject=subject,
+ issuer_subject=issuer.certificate.subject, pkey=pkey,
+ valid_from_delta=valid_from, valid_until_delta=valid_to)
+ csr = HttpdTestCA._add_client_usages(csr, issuer=issuer, rfc82name=email)
+ cert = csr.sign(private_key=issuer.private_key,
+ algorithm=hashes.SHA256(),
+ backend=default_backend())
+ return Credentials(name=name, cert=cert, pkey=pkey, issuer=issuer)
diff --git a/test/pyhttpd/conf.py b/test/pyhttpd/conf.py
new file mode 100644
index 0000000..cd3363f
--- /dev/null
+++ b/test/pyhttpd/conf.py
@@ -0,0 +1,188 @@
+from typing import Dict, Any
+
+from pyhttpd.env import HttpdTestEnv
+
+
+class HttpdConf(object):
+
+ def __init__(self, env: HttpdTestEnv, extras: Dict[str, Any] = None):
+ """ Create a new httpd configuration.
+ :param env: then environment this operates in
+ :param extras: extra configuration directive with ServerName as key and
+ 'base' as special key for global configuration additions.
+ """
+ self.env = env
+ self._indents = 0
+ self._lines = []
+ self._extras = extras.copy() if extras else {}
+ if 'base' in self._extras:
+ self.add(self._extras['base'])
+ self._tls_engine_ports = set()
+
+ def __repr__(self):
+ s = '\n'.join(self._lines)
+ return f"HttpdConf[{s}]"
+
+ def install(self):
+ self.env.install_test_conf(self._lines)
+
+ def add(self, line: Any):
+ if isinstance(line, str):
+ if self._indents > 0:
+ line = f"{' ' * self._indents}{line}"
+ self._lines.append(line)
+ else:
+ if self._indents > 0:
+ line = [f"{' ' * self._indents}{l}" for l in line]
+ self._lines.extend(line)
+ return self
+
+ def add_certificate(self, cert_file, key_file, ssl_module=None):
+ if ssl_module is None:
+ ssl_module = self.env.ssl_module
+ if ssl_module == 'mod_ssl':
+ self.add([
+ f"SSLCertificateFile {cert_file}",
+ f"SSLCertificateKeyFile {key_file if key_file else cert_file}",
+ ])
+ elif ssl_module == 'mod_tls':
+ self.add(f"TLSCertificate {cert_file} {key_file if key_file else ''}")
+ elif ssl_module == 'mod_gnutls':
+ self.add([
+ f"GnuTLSCertificateFile {cert_file}",
+ f"GnuTLSKeyFile {key_file if key_file else cert_file}",
+ ])
+ else:
+ raise Exception(f"unsupported ssl module: {ssl_module}")
+
+ def add_vhost(self, domains, port=None, doc_root="htdocs", with_ssl=None,
+ with_certificates=None, ssl_module=None):
+ self.start_vhost(domains=domains, port=port, doc_root=doc_root,
+ with_ssl=with_ssl, with_certificates=with_certificates,
+ ssl_module=ssl_module)
+ self.end_vhost()
+ return self
+
+ def start_vhost(self, domains, port=None, doc_root="htdocs", with_ssl=None,
+ ssl_module=None, with_certificates=None):
+ if not isinstance(domains, list):
+ domains = [domains]
+ if port is None:
+ port = self.env.https_port
+ if ssl_module is None:
+ ssl_module = self.env.ssl_module
+ if with_ssl is None:
+ with_ssl = self.env.https_port == port
+ if with_ssl and ssl_module == 'mod_tls' and port not in self._tls_engine_ports:
+ self.add(f"TLSEngine {port}")
+ self._tls_engine_ports.add(port)
+ self.add("")
+ self.add(f"<VirtualHost *:{port}>")
+ self._indents += 1
+ self.add(f"ServerName {domains[0]}")
+ for alias in domains[1:]:
+ self.add(f"ServerAlias {alias}")
+ self.add(f"DocumentRoot {doc_root}")
+ if with_ssl:
+ if ssl_module == 'mod_ssl':
+ self.add("SSLEngine on")
+ elif ssl_module == 'mod_gnutls':
+ self.add("GnuTLSEnable on")
+ if with_certificates is not False:
+ for cred in self.env.get_credentials_for_name(domains[0]):
+ self.add_certificate(cred.cert_file, cred.pkey_file, ssl_module=ssl_module)
+ if domains[0] in self._extras:
+ self.add(self._extras[domains[0]])
+ return self
+
+ def end_vhost(self):
+ self._indents -= 1
+ self.add("</VirtualHost>")
+ self.add("")
+ return self
+
+ def add_proxies(self, host, proxy_self=False, h2proxy_self=False):
+ if proxy_self or h2proxy_self:
+ self.add("ProxyPreserveHost on")
+ if proxy_self:
+ self.add([
+ f"ProxyPass /proxy/ http://127.0.0.1:{self.env.http_port}/",
+ f"ProxyPassReverse /proxy/ http://{host}.{self.env.http_tld}:{self.env.http_port}/",
+ ])
+ if h2proxy_self:
+ self.add([
+ f"ProxyPass /h2proxy/ h2://127.0.0.1:{self.env.https_port}/",
+ f"ProxyPassReverse /h2proxy/ https://{host}.{self.env.http_tld}:self.env.https_port/",
+ ])
+ return self
+
+ def add_vhost_test1(self, proxy_self=False, h2proxy_self=False):
+ domain = f"test1.{self.env.http_tld}"
+ self.start_vhost(domains=[domain, f"www1.{self.env.http_tld}"],
+ port=self.env.http_port, doc_root="htdocs/test1")
+ self.end_vhost()
+ self.start_vhost(domains=[domain, f"www1.{self.env.http_tld}"],
+ port=self.env.https_port, doc_root="htdocs/test1")
+ self.add([
+ "<Location /006>",
+ " Options +Indexes",
+ "</Location>",
+ ])
+ self.add_proxies("test1", proxy_self, h2proxy_self)
+ self.end_vhost()
+ return self
+
+ def add_vhost_test2(self):
+ domain = f"test2.{self.env.http_tld}"
+ self.start_vhost(domains=[domain, f"www2.{self.env.http_tld}"],
+ port=self.env.http_port, doc_root="htdocs/test2")
+ self.end_vhost()
+ self.start_vhost(domains=[domain, f"www2.{self.env.http_tld}"],
+ port=self.env.https_port, doc_root="htdocs/test2")
+ self.add([
+ "<Location /006>",
+ " Options +Indexes",
+ "</Location>",
+ ])
+ self.end_vhost()
+ return self
+
+ def add_vhost_cgi(self, proxy_self=False, h2proxy_self=False):
+ domain = f"cgi.{self.env.http_tld}"
+ if proxy_self:
+ self.add(["ProxyStatus on", "ProxyTimeout 5",
+ "SSLProxyEngine on", "SSLProxyVerify none"])
+ if h2proxy_self:
+ self.add(["SSLProxyEngine on", "SSLProxyCheckPeerName off"])
+ self.start_vhost(domains=[domain, f"cgi-alias.{self.env.http_tld}"],
+ port=self.env.https_port, doc_root="htdocs/cgi")
+ self.add_proxies("cgi", proxy_self=proxy_self, h2proxy_self=h2proxy_self)
+ self.end_vhost()
+ self.start_vhost(domains=[domain, f"cgi-alias.{self.env.http_tld}"],
+ port=self.env.http_port, doc_root="htdocs/cgi")
+ self.add("AddHandler cgi-script .py")
+ self.add_proxies("cgi", proxy_self=proxy_self, h2proxy_self=h2proxy_self)
+ self.end_vhost()
+ return self
+
+ @staticmethod
+ def merge_extras(e1: Dict[str, Any], e2: Dict[str, Any]) -> Dict[str, Any]:
+ def _concat(v1, v2):
+ if isinstance(v1, str):
+ v1 = [v1]
+ if isinstance(v2, str):
+ v2 = [v2]
+ v1.extend(v2)
+ return v1
+
+ if e1 is None:
+ return e2.copy() if e2 else None
+ if e2 is None:
+ return e1.copy()
+ e3 = e1.copy()
+ for name, val in e2.items():
+ if name in e3:
+ e3[name] = _concat(e3[name], val)
+ else:
+ e3[name] = val
+ return e3
diff --git a/test/pyhttpd/conf/httpd.conf.template b/test/pyhttpd/conf/httpd.conf.template
new file mode 100644
index 0000000..255b88a
--- /dev/null
+++ b/test/pyhttpd/conf/httpd.conf.template
@@ -0,0 +1,60 @@
+ServerName localhost
+ServerRoot "${server_dir}"
+
+Include "conf/modules.conf"
+
+DocumentRoot "${server_dir}/htdocs"
+
+<IfModule log_config_module>
+ LogFormat "{ \"request\": \"%r\", \"status\": %>s, \"bytes_resp_B\": %B, \"bytes_tx_O\": %O, \"bytes_rx_I\": %I, \"bytes_rx_tx_S\": %S, \"time_taken\": %D }" combined
+ LogFormat "%h %l %u %t \"%r\" %>s %b" common
+ CustomLog "logs/access_log" combined
+
+</IfModule>
+
+TypesConfig "${gen_dir}/apache/conf/mime.types"
+
+Listen ${http_port}
+Listen ${https_port}
+
+<IfModule mod_ssl.c>
+ # provide some default
+ SSLSessionCache "shmcb:ssl_gcache_data(32000)"
+</IfModule>
+
+# Insert our test specific configuration before the first vhost,
+# so that its vhosts can be the default one. This is relevant in
+# certain behaviours, such as protocol selection during SSL ALPN
+# negotiation.
+#
+Include "conf/test.conf"
+
+RequestReadTimeout header=10 body=10
+
+<IfModule deflate_module>
+ AddOutputFilterByType DEFLATE text/html text/plain text/xml text/css
+</IfModule>
+<IfModule brotli_module>
+ AddOutputFilterByType BROTLI_COMPRESS text/html text/plain text/xml text/css
+</IfModule>
+
+<VirtualHost *:${http_port}>
+ ServerName ${http_tld}
+ ServerAlias www.${http_tld}
+ <IfModule ssl_module>
+ SSLEngine off
+ </IfModule>
+ DocumentRoot "${server_dir}/htdocs"
+</VirtualHost>
+
+<Directory "${server_dir}/htdocs/cgi">
+ Options Indexes FollowSymLinks
+ AllowOverride None
+ Require all granted
+
+ AddHandler cgi-script .py
+ AddHandler cgi-script .cgi
+ Options +ExecCGI
+</Directory>
+
+
diff --git a/test/pyhttpd/conf/mime.types b/test/pyhttpd/conf/mime.types
new file mode 100644
index 0000000..b90b165
--- /dev/null
+++ b/test/pyhttpd/conf/mime.types
@@ -0,0 +1,1588 @@
+# This file maps Internet media types to unique file extension(s).
+# Although created for httpd, this file is used by many software systems
+# and has been placed in the public domain for unlimited redisribution.
+#
+# The table below contains both registered and (common) unregistered types.
+# A type that has no unique extension can be ignored -- they are listed
+# here to guide configurations toward known types and to make it easier to
+# identify "new" types. File extensions are also commonly used to indicate
+# content languages and encodings, so choose them carefully.
+#
+# Internet media types should be registered as described in RFC 4288.
+# The registry is at <http://www.iana.org/assignments/media-types/>.
+#
+# MIME type (lowercased) Extensions
+# ============================================ ==========
+# application/1d-interleaved-parityfec
+# application/3gpp-ims+xml
+# application/activemessage
+application/andrew-inset ez
+# application/applefile
+application/applixware aw
+application/atom+xml atom
+application/atomcat+xml atomcat
+# application/atomicmail
+application/atomsvc+xml atomsvc
+# application/auth-policy+xml
+# application/batch-smtp
+# application/beep+xml
+# application/calendar+xml
+# application/cals-1840
+# application/ccmp+xml
+application/ccxml+xml ccxml
+application/cdmi-capability cdmia
+application/cdmi-container cdmic
+application/cdmi-domain cdmid
+application/cdmi-object cdmio
+application/cdmi-queue cdmiq
+# application/cea-2018+xml
+# application/cellml+xml
+# application/cfw
+# application/cnrp+xml
+# application/commonground
+# application/conference-info+xml
+# application/cpl+xml
+# application/csta+xml
+# application/cstadata+xml
+application/cu-seeme cu
+# application/cybercash
+application/davmount+xml davmount
+# application/dca-rft
+# application/dec-dx
+# application/dialog-info+xml
+# application/dicom
+# application/dns
+application/docbook+xml dbk
+# application/dskpp+xml
+application/dssc+der dssc
+application/dssc+xml xdssc
+# application/dvcs
+application/ecmascript ecma
+# application/edi-consent
+# application/edi-x12
+# application/edifact
+application/emma+xml emma
+# application/epp+xml
+application/epub+zip epub
+# application/eshop
+# application/example
+application/exi exi
+# application/fastinfoset
+# application/fastsoap
+# application/fits
+application/font-tdpfr pfr
+# application/framework-attributes+xml
+application/gml+xml gml
+application/gpx+xml gpx
+application/gxf gxf
+# application/h224
+# application/held+xml
+# application/http
+application/hyperstudio stk
+# application/ibe-key-request+xml
+# application/ibe-pkg-reply+xml
+# application/ibe-pp-data
+# application/iges
+# application/im-iscomposing+xml
+# application/index
+# application/index.cmd
+# application/index.obj
+# application/index.response
+# application/index.vnd
+application/inkml+xml ink inkml
+# application/iotp
+application/ipfix ipfix
+# application/ipp
+# application/isup
+application/java-archive jar
+application/java-serialized-object ser
+application/java-vm class
+application/javascript js
+application/json json
+application/jsonml+json jsonml
+# application/kpml-request+xml
+# application/kpml-response+xml
+application/lost+xml lostxml
+application/mac-binhex40 hqx
+application/mac-compactpro cpt
+# application/macwriteii
+application/mads+xml mads
+application/marc mrc
+application/marcxml+xml mrcx
+application/mathematica ma nb mb
+# application/mathml-content+xml
+# application/mathml-presentation+xml
+application/mathml+xml mathml
+# application/mbms-associated-procedure-description+xml
+# application/mbms-deregister+xml
+# application/mbms-envelope+xml
+# application/mbms-msk+xml
+# application/mbms-msk-response+xml
+# application/mbms-protection-description+xml
+# application/mbms-reception-report+xml
+# application/mbms-register+xml
+# application/mbms-register-response+xml
+# application/mbms-user-service-description+xml
+application/mbox mbox
+# application/media_control+xml
+application/mediaservercontrol+xml mscml
+application/metalink+xml metalink
+application/metalink4+xml meta4
+application/mets+xml mets
+# application/mikey
+application/mods+xml mods
+# application/moss-keys
+# application/moss-signature
+# application/mosskey-data
+# application/mosskey-request
+application/mp21 m21 mp21
+application/mp4 mp4s
+# application/mpeg4-generic
+# application/mpeg4-iod
+# application/mpeg4-iod-xmt
+# application/msc-ivr+xml
+# application/msc-mixer+xml
+application/msword doc dot
+application/mxf mxf
+# application/nasdata
+# application/news-checkgroups
+# application/news-groupinfo
+# application/news-transmission
+# application/nss
+# application/ocsp-request
+# application/ocsp-response
+application/octet-stream bin dms lrf mar so dist distz pkg bpk dump elc deploy
+application/oda oda
+application/oebps-package+xml opf
+application/ogg ogx
+application/omdoc+xml omdoc
+application/onenote onetoc onetoc2 onetmp onepkg
+application/oxps oxps
+# application/parityfec
+application/patch-ops-error+xml xer
+application/pdf pdf
+application/pgp-encrypted pgp
+# application/pgp-keys
+application/pgp-signature asc sig
+application/pics-rules prf
+# application/pidf+xml
+# application/pidf-diff+xml
+application/pkcs10 p10
+application/pkcs7-mime p7m p7c
+application/pkcs7-signature p7s
+application/pkcs8 p8
+application/pkix-attr-cert ac
+application/pkix-cert cer
+application/pkix-crl crl
+application/pkix-pkipath pkipath
+application/pkixcmp pki
+application/pls+xml pls
+# application/poc-settings+xml
+application/postscript ai eps ps
+# application/prs.alvestrand.titrax-sheet
+application/prs.cww cww
+# application/prs.nprend
+# application/prs.plucker
+# application/prs.rdf-xml-crypt
+# application/prs.xsf+xml
+application/pskc+xml pskcxml
+# application/qsig
+application/rdf+xml rdf
+application/reginfo+xml rif
+application/relax-ng-compact-syntax rnc
+# application/remote-printing
+application/resource-lists+xml rl
+application/resource-lists-diff+xml rld
+# application/riscos
+# application/rlmi+xml
+application/rls-services+xml rs
+application/rpki-ghostbusters gbr
+application/rpki-manifest mft
+application/rpki-roa roa
+# application/rpki-updown
+application/rsd+xml rsd
+application/rss+xml rss
+application/rtf rtf
+# application/rtx
+# application/samlassertion+xml
+# application/samlmetadata+xml
+application/sbml+xml sbml
+application/scvp-cv-request scq
+application/scvp-cv-response scs
+application/scvp-vp-request spq
+application/scvp-vp-response spp
+application/sdp sdp
+# application/set-payment
+application/set-payment-initiation setpay
+# application/set-registration
+application/set-registration-initiation setreg
+# application/sgml
+# application/sgml-open-catalog
+application/shf+xml shf
+# application/sieve
+# application/simple-filter+xml
+# application/simple-message-summary
+# application/simplesymbolcontainer
+# application/slate
+# application/smil
+application/smil+xml smi smil
+# application/soap+fastinfoset
+# application/soap+xml
+application/sparql-query rq
+application/sparql-results+xml srx
+# application/spirits-event+xml
+application/srgs gram
+application/srgs+xml grxml
+application/sru+xml sru
+application/ssdl+xml ssdl
+application/ssml+xml ssml
+# application/tamp-apex-update
+# application/tamp-apex-update-confirm
+# application/tamp-community-update
+# application/tamp-community-update-confirm
+# application/tamp-error
+# application/tamp-sequence-adjust
+# application/tamp-sequence-adjust-confirm
+# application/tamp-status-query
+# application/tamp-status-response
+# application/tamp-update
+# application/tamp-update-confirm
+application/tei+xml tei teicorpus
+application/thraud+xml tfi
+# application/timestamp-query
+# application/timestamp-reply
+application/timestamped-data tsd
+# application/tve-trigger
+# application/ulpfec
+# application/vcard+xml
+# application/vemmi
+# application/vividence.scriptfile
+# application/vnd.3gpp.bsf+xml
+application/vnd.3gpp.pic-bw-large plb
+application/vnd.3gpp.pic-bw-small psb
+application/vnd.3gpp.pic-bw-var pvb
+# application/vnd.3gpp.sms
+# application/vnd.3gpp2.bcmcsinfo+xml
+# application/vnd.3gpp2.sms
+application/vnd.3gpp2.tcap tcap
+application/vnd.3m.post-it-notes pwn
+application/vnd.accpac.simply.aso aso
+application/vnd.accpac.simply.imp imp
+application/vnd.acucobol acu
+application/vnd.acucorp atc acutc
+application/vnd.adobe.air-application-installer-package+zip air
+application/vnd.adobe.formscentral.fcdt fcdt
+application/vnd.adobe.fxp fxp fxpl
+# application/vnd.adobe.partial-upload
+application/vnd.adobe.xdp+xml xdp
+application/vnd.adobe.xfdf xfdf
+# application/vnd.aether.imp
+# application/vnd.ah-barcode
+application/vnd.ahead.space ahead
+application/vnd.airzip.filesecure.azf azf
+application/vnd.airzip.filesecure.azs azs
+application/vnd.amazon.ebook azw
+application/vnd.americandynamics.acc acc
+application/vnd.amiga.ami ami
+# application/vnd.amundsen.maze+xml
+application/vnd.android.package-archive apk
+application/vnd.anser-web-certificate-issue-initiation cii
+application/vnd.anser-web-funds-transfer-initiation fti
+application/vnd.antix.game-component atx
+application/vnd.apple.installer+xml mpkg
+application/vnd.apple.mpegurl m3u8
+# application/vnd.arastra.swi
+application/vnd.aristanetworks.swi swi
+application/vnd.astraea-software.iota iota
+application/vnd.audiograph aep
+# application/vnd.autopackage
+# application/vnd.avistar+xml
+application/vnd.blueice.multipass mpm
+# application/vnd.bluetooth.ep.oob
+application/vnd.bmi bmi
+application/vnd.businessobjects rep
+# application/vnd.cab-jscript
+# application/vnd.canon-cpdl
+# application/vnd.canon-lips
+# application/vnd.cendio.thinlinc.clientconf
+application/vnd.chemdraw+xml cdxml
+application/vnd.chipnuts.karaoke-mmd mmd
+application/vnd.cinderella cdy
+# application/vnd.cirpack.isdn-ext
+application/vnd.claymore cla
+application/vnd.cloanto.rp9 rp9
+application/vnd.clonk.c4group c4g c4d c4f c4p c4u
+application/vnd.cluetrust.cartomobile-config c11amc
+application/vnd.cluetrust.cartomobile-config-pkg c11amz
+# application/vnd.collection+json
+# application/vnd.commerce-battelle
+application/vnd.commonspace csp
+application/vnd.contact.cmsg cdbcmsg
+application/vnd.cosmocaller cmc
+application/vnd.crick.clicker clkx
+application/vnd.crick.clicker.keyboard clkk
+application/vnd.crick.clicker.palette clkp
+application/vnd.crick.clicker.template clkt
+application/vnd.crick.clicker.wordbank clkw
+application/vnd.criticaltools.wbs+xml wbs
+application/vnd.ctc-posml pml
+# application/vnd.ctct.ws+xml
+# application/vnd.cups-pdf
+# application/vnd.cups-postscript
+application/vnd.cups-ppd ppd
+# application/vnd.cups-raster
+# application/vnd.cups-raw
+# application/vnd.curl
+application/vnd.curl.car car
+application/vnd.curl.pcurl pcurl
+# application/vnd.cybank
+application/vnd.dart dart
+application/vnd.data-vision.rdz rdz
+application/vnd.dece.data uvf uvvf uvd uvvd
+application/vnd.dece.ttml+xml uvt uvvt
+application/vnd.dece.unspecified uvx uvvx
+application/vnd.dece.zip uvz uvvz
+application/vnd.denovo.fcselayout-link fe_launch
+# application/vnd.dir-bi.plate-dl-nosuffix
+application/vnd.dna dna
+application/vnd.dolby.mlp mlp
+# application/vnd.dolby.mobile.1
+# application/vnd.dolby.mobile.2
+application/vnd.dpgraph dpg
+application/vnd.dreamfactory dfac
+application/vnd.ds-keypoint kpxx
+application/vnd.dvb.ait ait
+# application/vnd.dvb.dvbj
+# application/vnd.dvb.esgcontainer
+# application/vnd.dvb.ipdcdftnotifaccess
+# application/vnd.dvb.ipdcesgaccess
+# application/vnd.dvb.ipdcesgaccess2
+# application/vnd.dvb.ipdcesgpdd
+# application/vnd.dvb.ipdcroaming
+# application/vnd.dvb.iptv.alfec-base
+# application/vnd.dvb.iptv.alfec-enhancement
+# application/vnd.dvb.notif-aggregate-root+xml
+# application/vnd.dvb.notif-container+xml
+# application/vnd.dvb.notif-generic+xml
+# application/vnd.dvb.notif-ia-msglist+xml
+# application/vnd.dvb.notif-ia-registration-request+xml
+# application/vnd.dvb.notif-ia-registration-response+xml
+# application/vnd.dvb.notif-init+xml
+# application/vnd.dvb.pfr
+application/vnd.dvb.service svc
+# application/vnd.dxr
+application/vnd.dynageo geo
+# application/vnd.easykaraoke.cdgdownload
+# application/vnd.ecdis-update
+application/vnd.ecowin.chart mag
+# application/vnd.ecowin.filerequest
+# application/vnd.ecowin.fileupdate
+# application/vnd.ecowin.series
+# application/vnd.ecowin.seriesrequest
+# application/vnd.ecowin.seriesupdate
+# application/vnd.emclient.accessrequest+xml
+application/vnd.enliven nml
+# application/vnd.eprints.data+xml
+application/vnd.epson.esf esf
+application/vnd.epson.msf msf
+application/vnd.epson.quickanime qam
+application/vnd.epson.salt slt
+application/vnd.epson.ssf ssf
+# application/vnd.ericsson.quickcall
+application/vnd.eszigno3+xml es3 et3
+# application/vnd.etsi.aoc+xml
+# application/vnd.etsi.cug+xml
+# application/vnd.etsi.iptvcommand+xml
+# application/vnd.etsi.iptvdiscovery+xml
+# application/vnd.etsi.iptvprofile+xml
+# application/vnd.etsi.iptvsad-bc+xml
+# application/vnd.etsi.iptvsad-cod+xml
+# application/vnd.etsi.iptvsad-npvr+xml
+# application/vnd.etsi.iptvservice+xml
+# application/vnd.etsi.iptvsync+xml
+# application/vnd.etsi.iptvueprofile+xml
+# application/vnd.etsi.mcid+xml
+# application/vnd.etsi.overload-control-policy-dataset+xml
+# application/vnd.etsi.sci+xml
+# application/vnd.etsi.simservs+xml
+# application/vnd.etsi.tsl+xml
+# application/vnd.etsi.tsl.der
+# application/vnd.eudora.data
+application/vnd.ezpix-album ez2
+application/vnd.ezpix-package ez3
+# application/vnd.f-secure.mobile
+application/vnd.fdf fdf
+application/vnd.fdsn.mseed mseed
+application/vnd.fdsn.seed seed dataless
+# application/vnd.ffsns
+# application/vnd.fints
+application/vnd.flographit gph
+application/vnd.fluxtime.clip ftc
+# application/vnd.font-fontforge-sfd
+application/vnd.framemaker fm frame maker book
+application/vnd.frogans.fnc fnc
+application/vnd.frogans.ltf ltf
+application/vnd.fsc.weblaunch fsc
+application/vnd.fujitsu.oasys oas
+application/vnd.fujitsu.oasys2 oa2
+application/vnd.fujitsu.oasys3 oa3
+application/vnd.fujitsu.oasysgp fg5
+application/vnd.fujitsu.oasysprs bh2
+# application/vnd.fujixerox.art-ex
+# application/vnd.fujixerox.art4
+# application/vnd.fujixerox.hbpl
+application/vnd.fujixerox.ddd ddd
+application/vnd.fujixerox.docuworks xdw
+application/vnd.fujixerox.docuworks.binder xbd
+# application/vnd.fut-misnet
+application/vnd.fuzzysheet fzs
+application/vnd.genomatix.tuxedo txd
+# application/vnd.geocube+xml
+application/vnd.geogebra.file ggb
+application/vnd.geogebra.tool ggt
+application/vnd.geometry-explorer gex gre
+application/vnd.geonext gxt
+application/vnd.geoplan g2w
+application/vnd.geospace g3w
+# application/vnd.globalplatform.card-content-mgt
+# application/vnd.globalplatform.card-content-mgt-response
+application/vnd.gmx gmx
+application/vnd.google-earth.kml+xml kml
+application/vnd.google-earth.kmz kmz
+application/vnd.grafeq gqf gqs
+# application/vnd.gridmp
+application/vnd.groove-account gac
+application/vnd.groove-help ghf
+application/vnd.groove-identity-message gim
+application/vnd.groove-injector grv
+application/vnd.groove-tool-message gtm
+application/vnd.groove-tool-template tpl
+application/vnd.groove-vcard vcg
+# application/vnd.hal+json
+application/vnd.hal+xml hal
+application/vnd.handheld-entertainment+xml zmm
+application/vnd.hbci hbci
+# application/vnd.hcl-bireports
+application/vnd.hhe.lesson-player les
+application/vnd.hp-hpgl hpgl
+application/vnd.hp-hpid hpid
+application/vnd.hp-hps hps
+application/vnd.hp-jlyt jlt
+application/vnd.hp-pcl pcl
+application/vnd.hp-pclxl pclxl
+# application/vnd.httphone
+application/vnd.hydrostatix.sof-data sfd-hdstx
+# application/vnd.hzn-3d-crossword
+# application/vnd.ibm.afplinedata
+# application/vnd.ibm.electronic-media
+application/vnd.ibm.minipay mpy
+application/vnd.ibm.modcap afp listafp list3820
+application/vnd.ibm.rights-management irm
+application/vnd.ibm.secure-container sc
+application/vnd.iccprofile icc icm
+application/vnd.igloader igl
+application/vnd.immervision-ivp ivp
+application/vnd.immervision-ivu ivu
+# application/vnd.informedcontrol.rms+xml
+# application/vnd.informix-visionary
+# application/vnd.infotech.project
+# application/vnd.infotech.project+xml
+# application/vnd.innopath.wamp.notification
+application/vnd.insors.igm igm
+application/vnd.intercon.formnet xpw xpx
+application/vnd.intergeo i2g
+# application/vnd.intertrust.digibox
+# application/vnd.intertrust.nncp
+application/vnd.intu.qbo qbo
+application/vnd.intu.qfx qfx
+# application/vnd.iptc.g2.conceptitem+xml
+# application/vnd.iptc.g2.knowledgeitem+xml
+# application/vnd.iptc.g2.newsitem+xml
+# application/vnd.iptc.g2.newsmessage+xml
+# application/vnd.iptc.g2.packageitem+xml
+# application/vnd.iptc.g2.planningitem+xml
+application/vnd.ipunplugged.rcprofile rcprofile
+application/vnd.irepository.package+xml irp
+application/vnd.is-xpr xpr
+application/vnd.isac.fcs fcs
+application/vnd.jam jam
+# application/vnd.japannet-directory-service
+# application/vnd.japannet-jpnstore-wakeup
+# application/vnd.japannet-payment-wakeup
+# application/vnd.japannet-registration
+# application/vnd.japannet-registration-wakeup
+# application/vnd.japannet-setstore-wakeup
+# application/vnd.japannet-verification
+# application/vnd.japannet-verification-wakeup
+application/vnd.jcp.javame.midlet-rms rms
+application/vnd.jisp jisp
+application/vnd.joost.joda-archive joda
+application/vnd.kahootz ktz ktr
+application/vnd.kde.karbon karbon
+application/vnd.kde.kchart chrt
+application/vnd.kde.kformula kfo
+application/vnd.kde.kivio flw
+application/vnd.kde.kontour kon
+application/vnd.kde.kpresenter kpr kpt
+application/vnd.kde.kspread ksp
+application/vnd.kde.kword kwd kwt
+application/vnd.kenameaapp htke
+application/vnd.kidspiration kia
+application/vnd.kinar kne knp
+application/vnd.koan skp skd skt skm
+application/vnd.kodak-descriptor sse
+application/vnd.las.las+xml lasxml
+# application/vnd.liberty-request+xml
+application/vnd.llamagraphics.life-balance.desktop lbd
+application/vnd.llamagraphics.life-balance.exchange+xml lbe
+application/vnd.lotus-1-2-3 123
+application/vnd.lotus-approach apr
+application/vnd.lotus-freelance pre
+application/vnd.lotus-notes nsf
+application/vnd.lotus-organizer org
+application/vnd.lotus-screencam scm
+application/vnd.lotus-wordpro lwp
+application/vnd.macports.portpkg portpkg
+# application/vnd.marlin.drm.actiontoken+xml
+# application/vnd.marlin.drm.conftoken+xml
+# application/vnd.marlin.drm.license+xml
+# application/vnd.marlin.drm.mdcf
+application/vnd.mcd mcd
+application/vnd.medcalcdata mc1
+application/vnd.mediastation.cdkey cdkey
+# application/vnd.meridian-slingshot
+application/vnd.mfer mwf
+application/vnd.mfmp mfm
+application/vnd.micrografx.flo flo
+application/vnd.micrografx.igx igx
+application/vnd.mif mif
+# application/vnd.minisoft-hp3000-save
+# application/vnd.mitsubishi.misty-guard.trustweb
+application/vnd.mobius.daf daf
+application/vnd.mobius.dis dis
+application/vnd.mobius.mbk mbk
+application/vnd.mobius.mqy mqy
+application/vnd.mobius.msl msl
+application/vnd.mobius.plc plc
+application/vnd.mobius.txf txf
+application/vnd.mophun.application mpn
+application/vnd.mophun.certificate mpc
+# application/vnd.motorola.flexsuite
+# application/vnd.motorola.flexsuite.adsi
+# application/vnd.motorola.flexsuite.fis
+# application/vnd.motorola.flexsuite.gotap
+# application/vnd.motorola.flexsuite.kmr
+# application/vnd.motorola.flexsuite.ttc
+# application/vnd.motorola.flexsuite.wem
+# application/vnd.motorola.iprm
+application/vnd.mozilla.xul+xml xul
+application/vnd.ms-artgalry cil
+# application/vnd.ms-asf
+application/vnd.ms-cab-compressed cab
+# application/vnd.ms-color.iccprofile
+application/vnd.ms-excel xls xlm xla xlc xlt xlw
+application/vnd.ms-excel.addin.macroenabled.12 xlam
+application/vnd.ms-excel.sheet.binary.macroenabled.12 xlsb
+application/vnd.ms-excel.sheet.macroenabled.12 xlsm
+application/vnd.ms-excel.template.macroenabled.12 xltm
+application/vnd.ms-fontobject eot
+application/vnd.ms-htmlhelp chm
+application/vnd.ms-ims ims
+application/vnd.ms-lrm lrm
+# application/vnd.ms-office.activex+xml
+application/vnd.ms-officetheme thmx
+# application/vnd.ms-opentype
+# application/vnd.ms-package.obfuscated-opentype
+application/vnd.ms-pki.seccat cat
+application/vnd.ms-pki.stl stl
+# application/vnd.ms-playready.initiator+xml
+application/vnd.ms-powerpoint ppt pps pot
+application/vnd.ms-powerpoint.addin.macroenabled.12 ppam
+application/vnd.ms-powerpoint.presentation.macroenabled.12 pptm
+application/vnd.ms-powerpoint.slide.macroenabled.12 sldm
+application/vnd.ms-powerpoint.slideshow.macroenabled.12 ppsm
+application/vnd.ms-powerpoint.template.macroenabled.12 potm
+# application/vnd.ms-printing.printticket+xml
+application/vnd.ms-project mpp mpt
+# application/vnd.ms-tnef
+# application/vnd.ms-wmdrm.lic-chlg-req
+# application/vnd.ms-wmdrm.lic-resp
+# application/vnd.ms-wmdrm.meter-chlg-req
+# application/vnd.ms-wmdrm.meter-resp
+application/vnd.ms-word.document.macroenabled.12 docm
+application/vnd.ms-word.template.macroenabled.12 dotm
+application/vnd.ms-works wps wks wcm wdb
+application/vnd.ms-wpl wpl
+application/vnd.ms-xpsdocument xps
+application/vnd.mseq mseq
+# application/vnd.msign
+# application/vnd.multiad.creator
+# application/vnd.multiad.creator.cif
+# application/vnd.music-niff
+application/vnd.musician mus
+application/vnd.muvee.style msty
+application/vnd.mynfc taglet
+# application/vnd.ncd.control
+# application/vnd.ncd.reference
+# application/vnd.nervana
+# application/vnd.netfpx
+application/vnd.neurolanguage.nlu nlu
+application/vnd.nitf ntf nitf
+application/vnd.noblenet-directory nnd
+application/vnd.noblenet-sealer nns
+application/vnd.noblenet-web nnw
+# application/vnd.nokia.catalogs
+# application/vnd.nokia.conml+wbxml
+# application/vnd.nokia.conml+xml
+# application/vnd.nokia.isds-radio-presets
+# application/vnd.nokia.iptv.config+xml
+# application/vnd.nokia.landmark+wbxml
+# application/vnd.nokia.landmark+xml
+# application/vnd.nokia.landmarkcollection+xml
+# application/vnd.nokia.n-gage.ac+xml
+application/vnd.nokia.n-gage.data ngdat
+application/vnd.nokia.n-gage.symbian.install n-gage
+# application/vnd.nokia.ncd
+# application/vnd.nokia.pcd+wbxml
+# application/vnd.nokia.pcd+xml
+application/vnd.nokia.radio-preset rpst
+application/vnd.nokia.radio-presets rpss
+application/vnd.novadigm.edm edm
+application/vnd.novadigm.edx edx
+application/vnd.novadigm.ext ext
+# application/vnd.ntt-local.file-transfer
+# application/vnd.ntt-local.sip-ta_remote
+# application/vnd.ntt-local.sip-ta_tcp_stream
+application/vnd.oasis.opendocument.chart odc
+application/vnd.oasis.opendocument.chart-template otc
+application/vnd.oasis.opendocument.database odb
+application/vnd.oasis.opendocument.formula odf
+application/vnd.oasis.opendocument.formula-template odft
+application/vnd.oasis.opendocument.graphics odg
+application/vnd.oasis.opendocument.graphics-template otg
+application/vnd.oasis.opendocument.image odi
+application/vnd.oasis.opendocument.image-template oti
+application/vnd.oasis.opendocument.presentation odp
+application/vnd.oasis.opendocument.presentation-template otp
+application/vnd.oasis.opendocument.spreadsheet ods
+application/vnd.oasis.opendocument.spreadsheet-template ots
+application/vnd.oasis.opendocument.text odt
+application/vnd.oasis.opendocument.text-master odm
+application/vnd.oasis.opendocument.text-template ott
+application/vnd.oasis.opendocument.text-web oth
+# application/vnd.obn
+# application/vnd.oftn.l10n+json
+# application/vnd.oipf.contentaccessdownload+xml
+# application/vnd.oipf.contentaccessstreaming+xml
+# application/vnd.oipf.cspg-hexbinary
+# application/vnd.oipf.dae.svg+xml
+# application/vnd.oipf.dae.xhtml+xml
+# application/vnd.oipf.mippvcontrolmessage+xml
+# application/vnd.oipf.pae.gem
+# application/vnd.oipf.spdiscovery+xml
+# application/vnd.oipf.spdlist+xml
+# application/vnd.oipf.ueprofile+xml
+# application/vnd.oipf.userprofile+xml
+application/vnd.olpc-sugar xo
+# application/vnd.oma-scws-config
+# application/vnd.oma-scws-http-request
+# application/vnd.oma-scws-http-response
+# application/vnd.oma.bcast.associated-procedure-parameter+xml
+# application/vnd.oma.bcast.drm-trigger+xml
+# application/vnd.oma.bcast.imd+xml
+# application/vnd.oma.bcast.ltkm
+# application/vnd.oma.bcast.notification+xml
+# application/vnd.oma.bcast.provisioningtrigger
+# application/vnd.oma.bcast.sgboot
+# application/vnd.oma.bcast.sgdd+xml
+# application/vnd.oma.bcast.sgdu
+# application/vnd.oma.bcast.simple-symbol-container
+# application/vnd.oma.bcast.smartcard-trigger+xml
+# application/vnd.oma.bcast.sprov+xml
+# application/vnd.oma.bcast.stkm
+# application/vnd.oma.cab-address-book+xml
+# application/vnd.oma.cab-feature-handler+xml
+# application/vnd.oma.cab-pcc+xml
+# application/vnd.oma.cab-user-prefs+xml
+# application/vnd.oma.dcd
+# application/vnd.oma.dcdc
+application/vnd.oma.dd2+xml dd2
+# application/vnd.oma.drm.risd+xml
+# application/vnd.oma.group-usage-list+xml
+# application/vnd.oma.pal+xml
+# application/vnd.oma.poc.detailed-progress-report+xml
+# application/vnd.oma.poc.final-report+xml
+# application/vnd.oma.poc.groups+xml
+# application/vnd.oma.poc.invocation-descriptor+xml
+# application/vnd.oma.poc.optimized-progress-report+xml
+# application/vnd.oma.push
+# application/vnd.oma.scidm.messages+xml
+# application/vnd.oma.xcap-directory+xml
+# application/vnd.omads-email+xml
+# application/vnd.omads-file+xml
+# application/vnd.omads-folder+xml
+# application/vnd.omaloc-supl-init
+application/vnd.openofficeorg.extension oxt
+# application/vnd.openxmlformats-officedocument.custom-properties+xml
+# application/vnd.openxmlformats-officedocument.customxmlproperties+xml
+# application/vnd.openxmlformats-officedocument.drawing+xml
+# application/vnd.openxmlformats-officedocument.drawingml.chart+xml
+# application/vnd.openxmlformats-officedocument.drawingml.chartshapes+xml
+# application/vnd.openxmlformats-officedocument.drawingml.diagramcolors+xml
+# application/vnd.openxmlformats-officedocument.drawingml.diagramdata+xml
+# application/vnd.openxmlformats-officedocument.drawingml.diagramlayout+xml
+# application/vnd.openxmlformats-officedocument.drawingml.diagramstyle+xml
+# application/vnd.openxmlformats-officedocument.extended-properties+xml
+# application/vnd.openxmlformats-officedocument.presentationml.commentauthors+xml
+# application/vnd.openxmlformats-officedocument.presentationml.comments+xml
+# application/vnd.openxmlformats-officedocument.presentationml.handoutmaster+xml
+# application/vnd.openxmlformats-officedocument.presentationml.notesmaster+xml
+# application/vnd.openxmlformats-officedocument.presentationml.notesslide+xml
+application/vnd.openxmlformats-officedocument.presentationml.presentation pptx
+# application/vnd.openxmlformats-officedocument.presentationml.presentation.main+xml
+# application/vnd.openxmlformats-officedocument.presentationml.presprops+xml
+application/vnd.openxmlformats-officedocument.presentationml.slide sldx
+# application/vnd.openxmlformats-officedocument.presentationml.slide+xml
+# application/vnd.openxmlformats-officedocument.presentationml.slidelayout+xml
+# application/vnd.openxmlformats-officedocument.presentationml.slidemaster+xml
+application/vnd.openxmlformats-officedocument.presentationml.slideshow ppsx
+# application/vnd.openxmlformats-officedocument.presentationml.slideshow.main+xml
+# application/vnd.openxmlformats-officedocument.presentationml.slideupdateinfo+xml
+# application/vnd.openxmlformats-officedocument.presentationml.tablestyles+xml
+# application/vnd.openxmlformats-officedocument.presentationml.tags+xml
+application/vnd.openxmlformats-officedocument.presentationml.template potx
+# application/vnd.openxmlformats-officedocument.presentationml.template.main+xml
+# application/vnd.openxmlformats-officedocument.presentationml.viewprops+xml
+# application/vnd.openxmlformats-officedocument.spreadsheetml.calcchain+xml
+# application/vnd.openxmlformats-officedocument.spreadsheetml.chartsheet+xml
+# application/vnd.openxmlformats-officedocument.spreadsheetml.comments+xml
+# application/vnd.openxmlformats-officedocument.spreadsheetml.connections+xml
+# application/vnd.openxmlformats-officedocument.spreadsheetml.dialogsheet+xml
+# application/vnd.openxmlformats-officedocument.spreadsheetml.externallink+xml
+# application/vnd.openxmlformats-officedocument.spreadsheetml.pivotcachedefinition+xml
+# application/vnd.openxmlformats-officedocument.spreadsheetml.pivotcacherecords+xml
+# application/vnd.openxmlformats-officedocument.spreadsheetml.pivottable+xml
+# application/vnd.openxmlformats-officedocument.spreadsheetml.querytable+xml
+# application/vnd.openxmlformats-officedocument.spreadsheetml.revisionheaders+xml
+# application/vnd.openxmlformats-officedocument.spreadsheetml.revisionlog+xml
+# application/vnd.openxmlformats-officedocument.spreadsheetml.sharedstrings+xml
+application/vnd.openxmlformats-officedocument.spreadsheetml.sheet xlsx
+# application/vnd.openxmlformats-officedocument.spreadsheetml.sheet.main+xml
+# application/vnd.openxmlformats-officedocument.spreadsheetml.sheetmetadata+xml
+# application/vnd.openxmlformats-officedocument.spreadsheetml.styles+xml
+# application/vnd.openxmlformats-officedocument.spreadsheetml.table+xml
+# application/vnd.openxmlformats-officedocument.spreadsheetml.tablesinglecells+xml
+application/vnd.openxmlformats-officedocument.spreadsheetml.template xltx
+# application/vnd.openxmlformats-officedocument.spreadsheetml.template.main+xml
+# application/vnd.openxmlformats-officedocument.spreadsheetml.usernames+xml
+# application/vnd.openxmlformats-officedocument.spreadsheetml.volatiledependencies+xml
+# application/vnd.openxmlformats-officedocument.spreadsheetml.worksheet+xml
+# application/vnd.openxmlformats-officedocument.theme+xml
+# application/vnd.openxmlformats-officedocument.themeoverride+xml
+# application/vnd.openxmlformats-officedocument.vmldrawing
+# application/vnd.openxmlformats-officedocument.wordprocessingml.comments+xml
+application/vnd.openxmlformats-officedocument.wordprocessingml.document docx
+# application/vnd.openxmlformats-officedocument.wordprocessingml.document.glossary+xml
+# application/vnd.openxmlformats-officedocument.wordprocessingml.document.main+xml
+# application/vnd.openxmlformats-officedocument.wordprocessingml.endnotes+xml
+# application/vnd.openxmlformats-officedocument.wordprocessingml.fonttable+xml
+# application/vnd.openxmlformats-officedocument.wordprocessingml.footer+xml
+# application/vnd.openxmlformats-officedocument.wordprocessingml.footnotes+xml
+# application/vnd.openxmlformats-officedocument.wordprocessingml.numbering+xml
+# application/vnd.openxmlformats-officedocument.wordprocessingml.settings+xml
+# application/vnd.openxmlformats-officedocument.wordprocessingml.styles+xml
+application/vnd.openxmlformats-officedocument.wordprocessingml.template dotx
+# application/vnd.openxmlformats-officedocument.wordprocessingml.template.main+xml
+# application/vnd.openxmlformats-officedocument.wordprocessingml.websettings+xml
+# application/vnd.openxmlformats-package.core-properties+xml
+# application/vnd.openxmlformats-package.digital-signature-xmlsignature+xml
+# application/vnd.openxmlformats-package.relationships+xml
+# application/vnd.quobject-quoxdocument
+# application/vnd.osa.netdeploy
+application/vnd.osgeo.mapguide.package mgp
+# application/vnd.osgi.bundle
+application/vnd.osgi.dp dp
+application/vnd.osgi.subsystem esa
+# application/vnd.otps.ct-kip+xml
+application/vnd.palm pdb pqa oprc
+# application/vnd.paos.xml
+application/vnd.pawaafile paw
+application/vnd.pg.format str
+application/vnd.pg.osasli ei6
+# application/vnd.piaccess.application-licence
+application/vnd.picsel efif
+application/vnd.pmi.widget wg
+# application/vnd.poc.group-advertisement+xml
+application/vnd.pocketlearn plf
+application/vnd.powerbuilder6 pbd
+# application/vnd.powerbuilder6-s
+# application/vnd.powerbuilder7
+# application/vnd.powerbuilder7-s
+# application/vnd.powerbuilder75
+# application/vnd.powerbuilder75-s
+# application/vnd.preminet
+application/vnd.previewsystems.box box
+application/vnd.proteus.magazine mgz
+application/vnd.publishare-delta-tree qps
+application/vnd.pvi.ptid1 ptid
+# application/vnd.pwg-multiplexed
+# application/vnd.pwg-xhtml-print+xml
+# application/vnd.qualcomm.brew-app-res
+application/vnd.quark.quarkxpress qxd qxt qwd qwt qxl qxb
+# application/vnd.radisys.moml+xml
+# application/vnd.radisys.msml+xml
+# application/vnd.radisys.msml-audit+xml
+# application/vnd.radisys.msml-audit-conf+xml
+# application/vnd.radisys.msml-audit-conn+xml
+# application/vnd.radisys.msml-audit-dialog+xml
+# application/vnd.radisys.msml-audit-stream+xml
+# application/vnd.radisys.msml-conf+xml
+# application/vnd.radisys.msml-dialog+xml
+# application/vnd.radisys.msml-dialog-base+xml
+# application/vnd.radisys.msml-dialog-fax-detect+xml
+# application/vnd.radisys.msml-dialog-fax-sendrecv+xml
+# application/vnd.radisys.msml-dialog-group+xml
+# application/vnd.radisys.msml-dialog-speech+xml
+# application/vnd.radisys.msml-dialog-transform+xml
+# application/vnd.rainstor.data
+# application/vnd.rapid
+application/vnd.realvnc.bed bed
+application/vnd.recordare.musicxml mxl
+application/vnd.recordare.musicxml+xml musicxml
+# application/vnd.renlearn.rlprint
+application/vnd.rig.cryptonote cryptonote
+application/vnd.rim.cod cod
+application/vnd.rn-realmedia rm
+application/vnd.rn-realmedia-vbr rmvb
+application/vnd.route66.link66+xml link66
+# application/vnd.rs-274x
+# application/vnd.ruckus.download
+# application/vnd.s3sms
+application/vnd.sailingtracker.track st
+# application/vnd.sbm.cid
+# application/vnd.sbm.mid2
+# application/vnd.scribus
+# application/vnd.sealed.3df
+# application/vnd.sealed.csf
+# application/vnd.sealed.doc
+# application/vnd.sealed.eml
+# application/vnd.sealed.mht
+# application/vnd.sealed.net
+# application/vnd.sealed.ppt
+# application/vnd.sealed.tiff
+# application/vnd.sealed.xls
+# application/vnd.sealedmedia.softseal.html
+# application/vnd.sealedmedia.softseal.pdf
+application/vnd.seemail see
+application/vnd.sema sema
+application/vnd.semd semd
+application/vnd.semf semf
+application/vnd.shana.informed.formdata ifm
+application/vnd.shana.informed.formtemplate itp
+application/vnd.shana.informed.interchange iif
+application/vnd.shana.informed.package ipk
+application/vnd.simtech-mindmapper twd twds
+application/vnd.smaf mmf
+# application/vnd.smart.notebook
+application/vnd.smart.teacher teacher
+# application/vnd.software602.filler.form+xml
+# application/vnd.software602.filler.form-xml-zip
+application/vnd.solent.sdkm+xml sdkm sdkd
+application/vnd.spotfire.dxp dxp
+application/vnd.spotfire.sfs sfs
+# application/vnd.sss-cod
+# application/vnd.sss-dtf
+# application/vnd.sss-ntf
+application/vnd.stardivision.calc sdc
+application/vnd.stardivision.draw sda
+application/vnd.stardivision.impress sdd
+application/vnd.stardivision.math smf
+application/vnd.stardivision.writer sdw vor
+application/vnd.stardivision.writer-global sgl
+application/vnd.stepmania.package smzip
+application/vnd.stepmania.stepchart sm
+# application/vnd.street-stream
+application/vnd.sun.xml.calc sxc
+application/vnd.sun.xml.calc.template stc
+application/vnd.sun.xml.draw sxd
+application/vnd.sun.xml.draw.template std
+application/vnd.sun.xml.impress sxi
+application/vnd.sun.xml.impress.template sti
+application/vnd.sun.xml.math sxm
+application/vnd.sun.xml.writer sxw
+application/vnd.sun.xml.writer.global sxg
+application/vnd.sun.xml.writer.template stw
+# application/vnd.sun.wadl+xml
+application/vnd.sus-calendar sus susp
+application/vnd.svd svd
+# application/vnd.swiftview-ics
+application/vnd.symbian.install sis sisx
+application/vnd.syncml+xml xsm
+application/vnd.syncml.dm+wbxml bdm
+application/vnd.syncml.dm+xml xdm
+# application/vnd.syncml.dm.notification
+# application/vnd.syncml.ds.notification
+application/vnd.tao.intent-module-archive tao
+application/vnd.tcpdump.pcap pcap cap dmp
+application/vnd.tmobile-livetv tmo
+application/vnd.trid.tpt tpt
+application/vnd.triscape.mxs mxs
+application/vnd.trueapp tra
+# application/vnd.truedoc
+# application/vnd.ubisoft.webplayer
+application/vnd.ufdl ufd ufdl
+application/vnd.uiq.theme utz
+application/vnd.umajin umj
+application/vnd.unity unityweb
+application/vnd.uoml+xml uoml
+# application/vnd.uplanet.alert
+# application/vnd.uplanet.alert-wbxml
+# application/vnd.uplanet.bearer-choice
+# application/vnd.uplanet.bearer-choice-wbxml
+# application/vnd.uplanet.cacheop
+# application/vnd.uplanet.cacheop-wbxml
+# application/vnd.uplanet.channel
+# application/vnd.uplanet.channel-wbxml
+# application/vnd.uplanet.list
+# application/vnd.uplanet.list-wbxml
+# application/vnd.uplanet.listcmd
+# application/vnd.uplanet.listcmd-wbxml
+# application/vnd.uplanet.signal
+application/vnd.vcx vcx
+# application/vnd.vd-study
+# application/vnd.vectorworks
+# application/vnd.verimatrix.vcas
+# application/vnd.vidsoft.vidconference
+application/vnd.visio vsd vst vss vsw
+application/vnd.visionary vis
+# application/vnd.vividence.scriptfile
+application/vnd.vsf vsf
+# application/vnd.wap.sic
+# application/vnd.wap.slc
+application/vnd.wap.wbxml wbxml
+application/vnd.wap.wmlc wmlc
+application/vnd.wap.wmlscriptc wmlsc
+application/vnd.webturbo wtb
+# application/vnd.wfa.wsc
+# application/vnd.wmc
+# application/vnd.wmf.bootstrap
+# application/vnd.wolfram.mathematica
+# application/vnd.wolfram.mathematica.package
+application/vnd.wolfram.player nbp
+application/vnd.wordperfect wpd
+application/vnd.wqd wqd
+# application/vnd.wrq-hp3000-labelled
+application/vnd.wt.stf stf
+# application/vnd.wv.csp+wbxml
+# application/vnd.wv.csp+xml
+# application/vnd.wv.ssp+xml
+application/vnd.xara xar
+application/vnd.xfdl xfdl
+# application/vnd.xfdl.webform
+# application/vnd.xmi+xml
+# application/vnd.xmpie.cpkg
+# application/vnd.xmpie.dpkg
+# application/vnd.xmpie.plan
+# application/vnd.xmpie.ppkg
+# application/vnd.xmpie.xlim
+application/vnd.yamaha.hv-dic hvd
+application/vnd.yamaha.hv-script hvs
+application/vnd.yamaha.hv-voice hvp
+application/vnd.yamaha.openscoreformat osf
+application/vnd.yamaha.openscoreformat.osfpvg+xml osfpvg
+# application/vnd.yamaha.remote-setup
+application/vnd.yamaha.smaf-audio saf
+application/vnd.yamaha.smaf-phrase spf
+# application/vnd.yamaha.through-ngn
+# application/vnd.yamaha.tunnel-udpencap
+application/vnd.yellowriver-custom-menu cmp
+application/vnd.zul zir zirz
+application/vnd.zzazz.deck+xml zaz
+application/voicexml+xml vxml
+# application/vq-rtcpxr
+# application/watcherinfo+xml
+# application/whoispp-query
+# application/whoispp-response
+application/widget wgt
+application/winhlp hlp
+# application/wita
+# application/wordperfect5.1
+application/wsdl+xml wsdl
+application/wspolicy+xml wspolicy
+application/x-7z-compressed 7z
+application/x-abiword abw
+application/x-ace-compressed ace
+# application/x-amf
+application/x-apple-diskimage dmg
+application/x-authorware-bin aab x32 u32 vox
+application/x-authorware-map aam
+application/x-authorware-seg aas
+application/x-bcpio bcpio
+application/x-bittorrent torrent
+application/x-blorb blb blorb
+application/x-bzip bz
+application/x-bzip2 bz2 boz
+application/x-cbr cbr cba cbt cbz cb7
+application/x-cdlink vcd
+application/x-cfs-compressed cfs
+application/x-chat chat
+application/x-chess-pgn pgn
+application/x-conference nsc
+# application/x-compress
+application/x-cpio cpio
+application/x-csh csh
+application/x-debian-package deb udeb
+application/x-dgc-compressed dgc
+application/x-director dir dcr dxr cst cct cxt w3d fgd swa
+application/x-doom wad
+application/x-dtbncx+xml ncx
+application/x-dtbook+xml dtb
+application/x-dtbresource+xml res
+application/x-dvi dvi
+application/x-envoy evy
+application/x-eva eva
+application/x-font-bdf bdf
+# application/x-font-dos
+# application/x-font-framemaker
+application/x-font-ghostscript gsf
+# application/x-font-libgrx
+application/x-font-linux-psf psf
+application/x-font-otf otf
+application/x-font-pcf pcf
+application/x-font-snf snf
+# application/x-font-speedo
+# application/x-font-sunos-news
+application/x-font-ttf ttf ttc
+application/x-font-type1 pfa pfb pfm afm
+application/x-font-woff woff
+# application/x-font-vfont
+application/x-freearc arc
+application/x-futuresplash spl
+application/x-gca-compressed gca
+application/x-glulx ulx
+application/x-gnumeric gnumeric
+application/x-gramps-xml gramps
+application/x-gtar gtar
+# application/x-gzip
+application/x-hdf hdf
+application/x-install-instructions install
+application/x-iso9660-image iso
+application/x-java-jnlp-file jnlp
+application/x-latex latex
+application/x-lzh-compressed lzh lha
+application/x-mie mie
+application/x-mobipocket-ebook prc mobi
+application/x-ms-application application
+application/x-ms-shortcut lnk
+application/x-ms-wmd wmd
+application/x-ms-wmz wmz
+application/x-ms-xbap xbap
+application/x-msaccess mdb
+application/x-msbinder obd
+application/x-mscardfile crd
+application/x-msclip clp
+application/x-msdownload exe dll com bat msi
+application/x-msmediaview mvb m13 m14
+application/x-msmetafile wmf wmz emf emz
+application/x-msmoney mny
+application/x-mspublisher pub
+application/x-msschedule scd
+application/x-msterminal trm
+application/x-mswrite wri
+application/x-netcdf nc cdf
+application/x-nzb nzb
+application/x-pkcs12 p12 pfx
+application/x-pkcs7-certificates p7b spc
+application/x-pkcs7-certreqresp p7r
+application/x-rar-compressed rar
+application/x-research-info-systems ris
+application/x-sh sh
+application/x-shar shar
+application/x-shockwave-flash swf
+application/x-silverlight-app xap
+application/x-sql sql
+application/x-stuffit sit
+application/x-stuffitx sitx
+application/x-subrip srt
+application/x-sv4cpio sv4cpio
+application/x-sv4crc sv4crc
+application/x-t3vm-image t3
+application/x-tads gam
+application/x-tar tar
+application/x-tcl tcl
+application/x-tex tex
+application/x-tex-tfm tfm
+application/x-texinfo texinfo texi
+application/x-tgif obj
+application/x-ustar ustar
+application/x-wais-source src
+application/x-x509-ca-cert der crt
+application/x-xfig fig
+application/x-xliff+xml xlf
+application/x-xpinstall xpi
+application/x-xz xz
+application/x-zmachine z1 z2 z3 z4 z5 z6 z7 z8
+# application/x400-bp
+application/xaml+xml xaml
+# application/xcap-att+xml
+# application/xcap-caps+xml
+application/xcap-diff+xml xdf
+# application/xcap-el+xml
+# application/xcap-error+xml
+# application/xcap-ns+xml
+# application/xcon-conference-info-diff+xml
+# application/xcon-conference-info+xml
+application/xenc+xml xenc
+application/xhtml+xml xhtml xht
+# application/xhtml-voice+xml
+application/xml xml xsl
+application/xml-dtd dtd
+# application/xml-external-parsed-entity
+# application/xmpp+xml
+application/xop+xml xop
+application/xproc+xml xpl
+application/xslt+xml xslt
+application/xspf+xml xspf
+application/xv+xml mxml xhvml xvml xvm
+application/yang yang
+application/yin+xml yin
+application/zip zip
+# audio/1d-interleaved-parityfec
+# audio/32kadpcm
+# audio/3gpp
+# audio/3gpp2
+# audio/ac3
+audio/adpcm adp
+# audio/amr
+# audio/amr-wb
+# audio/amr-wb+
+# audio/asc
+# audio/atrac-advanced-lossless
+# audio/atrac-x
+# audio/atrac3
+audio/basic au snd
+# audio/bv16
+# audio/bv32
+# audio/clearmode
+# audio/cn
+# audio/dat12
+# audio/dls
+# audio/dsr-es201108
+# audio/dsr-es202050
+# audio/dsr-es202211
+# audio/dsr-es202212
+# audio/dv
+# audio/dvi4
+# audio/eac3
+# audio/evrc
+# audio/evrc-qcp
+# audio/evrc0
+# audio/evrc1
+# audio/evrcb
+# audio/evrcb0
+# audio/evrcb1
+# audio/evrcwb
+# audio/evrcwb0
+# audio/evrcwb1
+# audio/example
+# audio/fwdred
+# audio/g719
+# audio/g722
+# audio/g7221
+# audio/g723
+# audio/g726-16
+# audio/g726-24
+# audio/g726-32
+# audio/g726-40
+# audio/g728
+# audio/g729
+# audio/g7291
+# audio/g729d
+# audio/g729e
+# audio/gsm
+# audio/gsm-efr
+# audio/gsm-hr-08
+# audio/ilbc
+# audio/ip-mr_v2.5
+# audio/isac
+# audio/l16
+# audio/l20
+# audio/l24
+# audio/l8
+# audio/lpc
+audio/midi mid midi kar rmi
+# audio/mobile-xmf
+audio/mp4 mp4a
+# audio/mp4a-latm
+# audio/mpa
+# audio/mpa-robust
+audio/mpeg mpga mp2 mp2a mp3 m2a m3a
+# audio/mpeg4-generic
+# audio/musepack
+audio/ogg oga ogg spx
+# audio/opus
+# audio/parityfec
+# audio/pcma
+# audio/pcma-wb
+# audio/pcmu-wb
+# audio/pcmu
+# audio/prs.sid
+# audio/qcelp
+# audio/red
+# audio/rtp-enc-aescm128
+# audio/rtp-midi
+# audio/rtx
+audio/s3m s3m
+audio/silk sil
+# audio/smv
+# audio/smv0
+# audio/smv-qcp
+# audio/sp-midi
+# audio/speex
+# audio/t140c
+# audio/t38
+# audio/telephone-event
+# audio/tone
+# audio/uemclip
+# audio/ulpfec
+# audio/vdvi
+# audio/vmr-wb
+# audio/vnd.3gpp.iufp
+# audio/vnd.4sb
+# audio/vnd.audiokoz
+# audio/vnd.celp
+# audio/vnd.cisco.nse
+# audio/vnd.cmles.radio-events
+# audio/vnd.cns.anp1
+# audio/vnd.cns.inf1
+audio/vnd.dece.audio uva uvva
+audio/vnd.digital-winds eol
+# audio/vnd.dlna.adts
+# audio/vnd.dolby.heaac.1
+# audio/vnd.dolby.heaac.2
+# audio/vnd.dolby.mlp
+# audio/vnd.dolby.mps
+# audio/vnd.dolby.pl2
+# audio/vnd.dolby.pl2x
+# audio/vnd.dolby.pl2z
+# audio/vnd.dolby.pulse.1
+audio/vnd.dra dra
+audio/vnd.dts dts
+audio/vnd.dts.hd dtshd
+# audio/vnd.dvb.file
+# audio/vnd.everad.plj
+# audio/vnd.hns.audio
+audio/vnd.lucent.voice lvp
+audio/vnd.ms-playready.media.pya pya
+# audio/vnd.nokia.mobile-xmf
+# audio/vnd.nortel.vbk
+audio/vnd.nuera.ecelp4800 ecelp4800
+audio/vnd.nuera.ecelp7470 ecelp7470
+audio/vnd.nuera.ecelp9600 ecelp9600
+# audio/vnd.octel.sbc
+# audio/vnd.qcelp
+# audio/vnd.rhetorex.32kadpcm
+audio/vnd.rip rip
+# audio/vnd.sealedmedia.softseal.mpeg
+# audio/vnd.vmx.cvsd
+# audio/vorbis
+# audio/vorbis-config
+audio/webm weba
+audio/x-aac aac
+audio/x-aiff aif aiff aifc
+audio/x-caf caf
+audio/x-flac flac
+audio/x-matroska mka
+audio/x-mpegurl m3u
+audio/x-ms-wax wax
+audio/x-ms-wma wma
+audio/x-pn-realaudio ram ra
+audio/x-pn-realaudio-plugin rmp
+# audio/x-tta
+audio/x-wav wav
+audio/xm xm
+chemical/x-cdx cdx
+chemical/x-cif cif
+chemical/x-cmdf cmdf
+chemical/x-cml cml
+chemical/x-csml csml
+# chemical/x-pdb
+chemical/x-xyz xyz
+image/bmp bmp
+image/cgm cgm
+# image/example
+# image/fits
+image/g3fax g3
+image/gif gif
+image/ief ief
+# image/jp2
+image/jpeg jpeg jpg jpe
+# image/jpm
+# image/jpx
+image/ktx ktx
+# image/naplps
+image/png png
+image/prs.btif btif
+# image/prs.pti
+image/sgi sgi
+image/svg+xml svg svgz
+# image/t38
+image/tiff tiff tif
+# image/tiff-fx
+image/vnd.adobe.photoshop psd
+# image/vnd.cns.inf2
+image/vnd.dece.graphic uvi uvvi uvg uvvg
+image/vnd.dvb.subtitle sub
+image/vnd.djvu djvu djv
+image/vnd.dwg dwg
+image/vnd.dxf dxf
+image/vnd.fastbidsheet fbs
+image/vnd.fpx fpx
+image/vnd.fst fst
+image/vnd.fujixerox.edmics-mmr mmr
+image/vnd.fujixerox.edmics-rlc rlc
+# image/vnd.globalgraphics.pgb
+# image/vnd.microsoft.icon
+# image/vnd.mix
+image/vnd.ms-modi mdi
+image/vnd.ms-photo wdp
+image/vnd.net-fpx npx
+# image/vnd.radiance
+# image/vnd.sealed.png
+# image/vnd.sealedmedia.softseal.gif
+# image/vnd.sealedmedia.softseal.jpg
+# image/vnd.svf
+image/vnd.wap.wbmp wbmp
+image/vnd.xiff xif
+image/webp webp
+image/x-3ds 3ds
+image/x-cmu-raster ras
+image/x-cmx cmx
+image/x-freehand fh fhc fh4 fh5 fh7
+image/x-icon ico
+image/x-mrsid-image sid
+image/x-pcx pcx
+image/x-pict pic pct
+image/x-portable-anymap pnm
+image/x-portable-bitmap pbm
+image/x-portable-graymap pgm
+image/x-portable-pixmap ppm
+image/x-rgb rgb
+image/x-tga tga
+image/x-xbitmap xbm
+image/x-xpixmap xpm
+image/x-xwindowdump xwd
+# message/cpim
+# message/delivery-status
+# message/disposition-notification
+# message/example
+# message/external-body
+# message/feedback-report
+# message/global
+# message/global-delivery-status
+# message/global-disposition-notification
+# message/global-headers
+# message/http
+# message/imdn+xml
+# message/news
+# message/partial
+message/rfc822 eml mime
+# message/s-http
+# message/sip
+# message/sipfrag
+# message/tracking-status
+# message/vnd.si.simp
+# model/example
+model/iges igs iges
+model/mesh msh mesh silo
+model/vnd.collada+xml dae
+model/vnd.dwf dwf
+# model/vnd.flatland.3dml
+model/vnd.gdl gdl
+# model/vnd.gs-gdl
+# model/vnd.gs.gdl
+model/vnd.gtw gtw
+# model/vnd.moml+xml
+model/vnd.mts mts
+# model/vnd.parasolid.transmit.binary
+# model/vnd.parasolid.transmit.text
+model/vnd.vtu vtu
+model/vrml wrl vrml
+model/x3d+binary x3db x3dbz
+model/x3d+vrml x3dv x3dvz
+model/x3d+xml x3d x3dz
+# multipart/alternative
+# multipart/appledouble
+# multipart/byteranges
+# multipart/digest
+# multipart/encrypted
+# multipart/example
+# multipart/form-data
+# multipart/header-set
+# multipart/mixed
+# multipart/parallel
+# multipart/related
+# multipart/report
+# multipart/signed
+# multipart/voice-message
+# text/1d-interleaved-parityfec
+text/cache-manifest appcache
+text/calendar ics ifb
+text/css css
+text/csv csv
+# text/directory
+# text/dns
+# text/ecmascript
+# text/enriched
+# text/example
+# text/fwdred
+text/html html htm
+# text/javascript
+text/n3 n3
+# text/parityfec
+text/plain txt text conf def list log in
+# text/prs.fallenstein.rst
+text/prs.lines.tag dsc
+# text/vnd.radisys.msml-basic-layout
+# text/red
+# text/rfc822-headers
+text/richtext rtx
+# text/rtf
+# text/rtp-enc-aescm128
+# text/rtx
+text/sgml sgml sgm
+# text/t140
+text/tab-separated-values tsv
+text/troff t tr roff man me ms
+text/turtle ttl
+# text/ulpfec
+text/uri-list uri uris urls
+text/vcard vcard
+# text/vnd.abc
+text/vnd.curl curl
+text/vnd.curl.dcurl dcurl
+text/vnd.curl.scurl scurl
+text/vnd.curl.mcurl mcurl
+# text/vnd.dmclientscript
+text/vnd.dvb.subtitle sub
+# text/vnd.esmertec.theme-descriptor
+text/vnd.fly fly
+text/vnd.fmi.flexstor flx
+text/vnd.graphviz gv
+text/vnd.in3d.3dml 3dml
+text/vnd.in3d.spot spot
+# text/vnd.iptc.newsml
+# text/vnd.iptc.nitf
+# text/vnd.latex-z
+# text/vnd.motorola.reflex
+# text/vnd.ms-mediapackage
+# text/vnd.net2phone.commcenter.command
+# text/vnd.si.uricatalogue
+text/vnd.sun.j2me.app-descriptor jad
+# text/vnd.trolltech.linguist
+# text/vnd.wap.si
+# text/vnd.wap.sl
+text/vnd.wap.wml wml
+text/vnd.wap.wmlscript wmls
+text/x-asm s asm
+text/x-c c cc cxx cpp h hh dic
+text/x-fortran f for f77 f90
+text/x-java-source java
+text/x-opml opml
+text/x-pascal p pas
+text/x-nfo nfo
+text/x-setext etx
+text/x-sfv sfv
+text/x-uuencode uu
+text/x-vcalendar vcs
+text/x-vcard vcf
+# text/xml
+# text/xml-external-parsed-entity
+# video/1d-interleaved-parityfec
+video/3gpp 3gp
+# video/3gpp-tt
+video/3gpp2 3g2
+# video/bmpeg
+# video/bt656
+# video/celb
+# video/dv
+# video/example
+video/h261 h261
+video/h263 h263
+# video/h263-1998
+# video/h263-2000
+video/h264 h264
+# video/h264-rcdo
+# video/h264-svc
+video/jpeg jpgv
+# video/jpeg2000
+video/jpm jpm jpgm
+video/mj2 mj2 mjp2
+# video/mp1s
+# video/mp2p
+# video/mp2t
+video/mp4 mp4 mp4v mpg4
+# video/mp4v-es
+video/mpeg mpeg mpg mpe m1v m2v
+# video/mpeg4-generic
+# video/mpv
+# video/nv
+video/ogg ogv
+# video/parityfec
+# video/pointer
+video/quicktime qt mov
+# video/raw
+# video/rtp-enc-aescm128
+# video/rtx
+# video/smpte292m
+# video/ulpfec
+# video/vc1
+# video/vnd.cctv
+video/vnd.dece.hd uvh uvvh
+video/vnd.dece.mobile uvm uvvm
+# video/vnd.dece.mp4
+video/vnd.dece.pd uvp uvvp
+video/vnd.dece.sd uvs uvvs
+video/vnd.dece.video uvv uvvv
+# video/vnd.directv.mpeg
+# video/vnd.directv.mpeg-tts
+# video/vnd.dlna.mpeg-tts
+video/vnd.dvb.file dvb
+video/vnd.fvt fvt
+# video/vnd.hns.video
+# video/vnd.iptvforum.1dparityfec-1010
+# video/vnd.iptvforum.1dparityfec-2005
+# video/vnd.iptvforum.2dparityfec-1010
+# video/vnd.iptvforum.2dparityfec-2005
+# video/vnd.iptvforum.ttsavc
+# video/vnd.iptvforum.ttsmpeg2
+# video/vnd.motorola.video
+# video/vnd.motorola.videop
+video/vnd.mpegurl mxu m4u
+video/vnd.ms-playready.media.pyv pyv
+# video/vnd.nokia.interleaved-multimedia
+# video/vnd.nokia.videovoip
+# video/vnd.objectvideo
+# video/vnd.sealed.mpeg1
+# video/vnd.sealed.mpeg4
+# video/vnd.sealed.swf
+# video/vnd.sealedmedia.softseal.mov
+video/vnd.uvvu.mp4 uvu uvvu
+video/vnd.vivo viv
+video/webm webm
+video/x-f4v f4v
+video/x-fli fli
+video/x-flv flv
+video/x-m4v m4v
+video/x-matroska mkv mk3d mks
+video/x-mng mng
+video/x-ms-asf asf asx
+video/x-ms-vob vob
+video/x-ms-wm wm
+video/x-ms-wmv wmv
+video/x-ms-wmx wmx
+video/x-ms-wvx wvx
+video/x-msvideo avi
+video/x-sgi-movie movie
+video/x-smv smv
+x-conference/x-cooltalk ice
diff --git a/test/pyhttpd/conf/stop.conf.template b/test/pyhttpd/conf/stop.conf.template
new file mode 100644
index 0000000..21bae84
--- /dev/null
+++ b/test/pyhttpd/conf/stop.conf.template
@@ -0,0 +1,46 @@
+# a config safe to use for stopping the server
+# this allows us to stop the server even when+
+# the config in the file is borked (as test cases may try to do that)
+#
+ServerName localhost
+ServerRoot "${server_dir}"
+
+Include "conf/modules.conf"
+
+DocumentRoot "${server_dir}/htdocs"
+
+<IfModule log_config_module>
+ LogFormat "%h %l %u %t \"%r\" %>s %O \"%{Referer}i\" \"%{User-Agent}i\" %k" combined
+ LogFormat "%h %l %u %t \"%r\" %>s %b" common
+ CustomLog "logs/access_log" combined
+
+</IfModule>
+
+TypesConfig "${gen_dir}/apache/conf/mime.types"
+
+Listen ${http_port}
+Listen ${https_port}
+
+<IfModule mod_ssl.c>
+ # provide some default
+ SSLSessionCache "shmcb:ssl_gcache_data(32000)"
+</IfModule>
+
+<VirtualHost *:${http_port}>
+ ServerName ${http_tld}
+ ServerAlias www.${http_tld}
+ <IfModule ssl_module>
+ SSLEngine off
+ </IfModule>
+ DocumentRoot "${server_dir}/htdocs"
+</VirtualHost>
+
+<Directory "${server_dir}/htdocs/cgi">
+ Options Indexes FollowSymLinks
+ AllowOverride None
+ Require all granted
+
+ AddHandler cgi-script .py
+ AddHandler cgi-script .cgi
+ Options +ExecCGI
+</Directory>
diff --git a/test/pyhttpd/conf/test.conf b/test/pyhttpd/conf/test.conf
new file mode 100644
index 0000000..7534af6
--- /dev/null
+++ b/test/pyhttpd/conf/test.conf
@@ -0,0 +1 @@
+# empty placeholder for test specific configurations
diff --git a/test/pyhttpd/config.ini.in b/test/pyhttpd/config.ini.in
new file mode 100644
index 0000000..3f42248
--- /dev/null
+++ b/test/pyhttpd/config.ini.in
@@ -0,0 +1,32 @@
+[global]
+curl_bin = curl
+nghttp = nghttp
+h2load = h2load
+
+prefix = @prefix@
+exec_prefix = @exec_prefix@
+bindir = @bindir@
+sbindir = @sbindir@
+libdir = @libdir@
+libexecdir = @libexecdir@
+
+apr_bindir = @APR_BINDIR@
+apxs = @bindir@/apxs
+apachectl = @sbindir@/apachectl
+
+[httpd]
+version = @HTTPD_VERSION@
+name = @progname@
+dso_modules = @DSO_MODULES@
+mpm_modules = @MPM_MODULES@
+
+[test]
+gen_dir = @abs_srcdir@/../gen
+http_port = 5002
+https_port = 5001
+proxy_port = 5003
+http_port2 = 5004
+ws_port = 5100
+http_tld = tests.httpd.apache.org
+test_dir = @abs_srcdir@
+test_src_dir = @abs_srcdir@
diff --git a/test/pyhttpd/curl.py b/test/pyhttpd/curl.py
new file mode 100644
index 0000000..5a215cd
--- /dev/null
+++ b/test/pyhttpd/curl.py
@@ -0,0 +1,138 @@
+import datetime
+import re
+import subprocess
+import sys
+import time
+from threading import Thread
+
+from .env import HttpdTestEnv
+
+
+class CurlPiper:
+
+ def __init__(self, env: HttpdTestEnv, url: str):
+ self.env = env
+ self.url = url
+ self.proc = None
+ self.args = None
+ self.headerfile = None
+ self._stderr = []
+ self._stdout = []
+ self.stdout_thread = None
+ self.stderr_thread = None
+ self._exitcode = -1
+ self._r = None
+
+ @property
+ def exitcode(self):
+ return self._exitcode
+
+ @property
+ def response(self):
+ return self._r.response if self._r else None
+
+ def __repr__(self):
+ return f'CurlPiper[exitcode={self._exitcode}, stderr={self._stderr}, stdout={self._stdout}]'
+
+ def start(self):
+ self.args, self.headerfile = self.env.curl_complete_args([self.url], timeout=5, options=[
+ "-T", "-", "-X", "POST", "--trace-ascii", "%", "--trace-time"
+ ])
+ self.args.append(self.url)
+ sys.stderr.write("starting: {0}\n".format(self.args))
+ self.proc = subprocess.Popen(self.args, stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ bufsize=0)
+
+ def read_output(fh, buffer):
+ while True:
+ chunk = fh.read()
+ if not chunk:
+ break
+ buffer.append(chunk.decode())
+
+ # collect all stdout and stderr until we are done
+ # use separate threads to not block ourself
+ self._stderr = []
+ self._stdout = []
+ if self.proc.stderr:
+ self.stderr_thread = Thread(target=read_output, args=(self.proc.stderr, self._stderr))
+ self.stderr_thread.start()
+ if self.proc.stdout:
+ self.stdout_thread = Thread(target=read_output, args=(self.proc.stdout, self._stdout))
+ self.stdout_thread.start()
+ return self.proc
+
+ def send(self, data: str):
+ self.proc.stdin.write(data.encode())
+ self.proc.stdin.flush()
+
+ def close(self) -> ([str], [str]):
+ self.proc.stdin.close()
+ self.stdout_thread.join()
+ self.stderr_thread.join()
+ self._end()
+ return self._stdout, self._stderr
+
+ def _end(self):
+ if self.proc:
+ # noinspection PyBroadException
+ try:
+ if self.proc.stdin:
+ # noinspection PyBroadException
+ try:
+ self.proc.stdin.close()
+ except Exception:
+ pass
+ if self.proc.stdout:
+ self.proc.stdout.close()
+ if self.proc.stderr:
+ self.proc.stderr.close()
+ except Exception:
+ self.proc.terminate()
+ finally:
+ self.proc.wait()
+ self.stdout_thread = None
+ self.stderr_thread = None
+ self._exitcode = self.proc.returncode
+ self.proc = None
+ self._r = self.env.curl_parse_headerfile(self.headerfile)
+
+ def stutter_check(self, chunks: [str], stutter: datetime.timedelta):
+ if not self.proc:
+ self.start()
+ for chunk in chunks:
+ self.send(chunk)
+ time.sleep(stutter.total_seconds())
+ recv_out, recv_err = self.close()
+ # assert we got everything back
+ assert "".join(chunks) == "".join(recv_out)
+ # now the tricky part: check *when* we got everything back
+ recv_times = []
+ for line in "".join(recv_err).split('\n'):
+ m = re.match(r'^\s*(\d+:\d+:\d+(\.\d+)?) <= Recv data, (\d+) bytes.*', line)
+ if m:
+ recv_times.append(datetime.time.fromisoformat(m.group(1)))
+ # received as many chunks as we sent
+ assert len(chunks) == len(recv_times), "received response not in {0} chunks, but {1}".format(
+ len(chunks), len(recv_times))
+
+ def microsecs(tdelta):
+ return ((tdelta.hour * 60 + tdelta.minute) * 60 + tdelta.second) * 1000000 + tdelta.microsecond
+
+ recv_deltas = []
+ last_mics = microsecs(recv_times[0])
+ for ts in recv_times[1:]:
+ mics = microsecs(ts)
+ delta_mics = mics - last_mics
+ if delta_mics < 0:
+ delta_mics += datetime.time(23, 59, 59, 999999)
+ recv_deltas.append(datetime.timedelta(microseconds=delta_mics))
+ last_mics = mics
+ stutter_td = datetime.timedelta(seconds=stutter.total_seconds() * 0.75) # 25% leeway
+ # TODO: the first two chunks are often close together, it seems
+ # there still is a little buffering delay going on
+ for idx, td in enumerate(recv_deltas[1:]):
+ assert stutter_td < td, \
+ f"chunk {idx} arrived too early \n{recv_deltas}\nafter {td}\n{recv_err}"
diff --git a/test/pyhttpd/env.py b/test/pyhttpd/env.py
new file mode 100644
index 0000000..d79ff6f
--- /dev/null
+++ b/test/pyhttpd/env.py
@@ -0,0 +1,893 @@
+import importlib
+import inspect
+import logging
+import re
+import os
+import shutil
+import stat
+import subprocess
+import sys
+import time
+from datetime import datetime, timedelta
+from string import Template
+from typing import List, Optional
+
+from configparser import ConfigParser, ExtendedInterpolation
+from urllib.parse import urlparse
+
+from .certs import Credentials, HttpdTestCA, CertificateSpec
+from .log import HttpdErrorLog
+from .nghttp import Nghttp
+from .result import ExecResult
+
+
+log = logging.getLogger(__name__)
+
+
+class Dummy:
+ pass
+
+
+class HttpdTestSetup:
+
+ # the modules we want to load
+ MODULES = [
+ "log_config",
+ "logio",
+ "unixd",
+ "version",
+ "watchdog",
+ "authn_core",
+ "authz_host",
+ "authz_groupfile",
+ "authz_user",
+ "authz_core",
+ "access_compat",
+ "auth_basic",
+ "cache",
+ "cache_disk",
+ "cache_socache",
+ "socache_shmcb",
+ "dumpio",
+ "reqtimeout",
+ "filter",
+ "mime",
+ "env",
+ "headers",
+ "setenvif",
+ "slotmem_shm",
+ "status",
+ "dir",
+ "alias",
+ "rewrite",
+ "deflate",
+ "proxy",
+ "proxy_http",
+ ]
+
+ CURL_STDOUT_SEPARATOR = "===CURL_STDOUT_SEPARATOR==="
+
+ def __init__(self, env: 'HttpdTestEnv'):
+ self.env = env
+ self._source_dirs = [os.path.dirname(inspect.getfile(HttpdTestSetup))]
+ self._modules = HttpdTestSetup.MODULES.copy()
+ self._optional_modules = []
+
+ def add_source_dir(self, source_dir):
+ self._source_dirs.append(source_dir)
+
+ def add_modules(self, modules: List[str]):
+ self._modules.extend(modules)
+
+ def add_optional_modules(self, modules: List[str]):
+ self._optional_modules.extend(modules)
+
+ def make(self):
+ self._make_dirs()
+ self._make_conf()
+ if self.env.mpm_module is not None \
+ and self.env.mpm_module in self.env.mpm_modules:
+ self.add_modules([self.env.mpm_module])
+ if self.env.ssl_module is not None:
+ self.add_modules([self.env.ssl_module])
+ self._make_modules_conf()
+ self._make_htdocs()
+ self._add_aptest()
+ self.env.clear_curl_headerfiles()
+
+ def _make_dirs(self):
+ if not os.path.exists(self.env.gen_dir):
+ os.makedirs(self.env.gen_dir)
+ if not os.path.exists(self.env.server_logs_dir):
+ os.makedirs(self.env.server_logs_dir)
+
+ def _make_conf(self):
+ # remove anything from another run/test suite
+ conf_dest_dir = os.path.join(self.env.server_dir, 'conf')
+ if os.path.isdir(conf_dest_dir):
+ shutil.rmtree(conf_dest_dir)
+ for d in self._source_dirs:
+ conf_src_dir = os.path.join(d, 'conf')
+ if os.path.isdir(conf_src_dir):
+ if not os.path.exists(conf_dest_dir):
+ os.makedirs(conf_dest_dir)
+ for name in os.listdir(conf_src_dir):
+ src_path = os.path.join(conf_src_dir, name)
+ m = re.match(r'(.+).template', name)
+ if m:
+ self._make_template(src_path, os.path.join(conf_dest_dir, m.group(1)))
+ elif os.path.isfile(src_path):
+ shutil.copy(src_path, os.path.join(conf_dest_dir, name))
+
+ def _make_template(self, src, dest):
+ var_map = dict()
+ for name, value in HttpdTestEnv.__dict__.items():
+ if isinstance(value, property):
+ var_map[name] = value.fget(self.env)
+ t = Template(''.join(open(src).readlines()))
+ with open(dest, 'w') as fd:
+ fd.write(t.substitute(var_map))
+
+ def _make_modules_conf(self):
+ loaded = set()
+ modules_conf = os.path.join(self.env.server_dir, 'conf/modules.conf')
+ with open(modules_conf, 'w') as fd:
+ # issue load directives for all modules we want that are shared
+ missing_mods = list()
+ for m in self._modules:
+ match = re.match(r'^mod_(.+)$', m)
+ if match:
+ m = match.group(1)
+ if m in loaded:
+ continue
+ mod_path = os.path.join(self.env.libexec_dir, f"mod_{m}.so")
+ if os.path.isfile(mod_path):
+ fd.write(f"LoadModule {m}_module \"{mod_path}\"\n")
+ elif m in self.env.dso_modules:
+ missing_mods.append(m)
+ else:
+ fd.write(f"#built static: LoadModule {m}_module \"{mod_path}\"\n")
+ loaded.add(m)
+ for m in self._optional_modules:
+ match = re.match(r'^mod_(.+)$', m)
+ if match:
+ m = match.group(1)
+ if m in loaded:
+ continue
+ mod_path = os.path.join(self.env.libexec_dir, f"mod_{m}.so")
+ if os.path.isfile(mod_path):
+ fd.write(f"LoadModule {m}_module \"{mod_path}\"\n")
+ loaded.add(m)
+ if len(missing_mods) > 0:
+ raise Exception(f"Unable to find modules: {missing_mods} "
+ f"DSOs: {self.env.dso_modules}")
+
+ def _make_htdocs(self):
+ if not os.path.exists(self.env.server_docs_dir):
+ os.makedirs(self.env.server_docs_dir)
+ dest_dir = os.path.join(self.env.server_dir, 'htdocs')
+ # remove anything from another run/test suite
+ if os.path.isdir(dest_dir):
+ shutil.rmtree(dest_dir)
+ for d in self._source_dirs:
+ srcdocs = os.path.join(d, 'htdocs')
+ if os.path.isdir(srcdocs):
+ shutil.copytree(srcdocs, dest_dir, dirs_exist_ok=True)
+ # make all contained .py scripts executable
+ for dirpath, _dirnames, filenames in os.walk(dest_dir):
+ for fname in filenames:
+ if re.match(r'.+\.py', fname):
+ py_file = os.path.join(dirpath, fname)
+ st = os.stat(py_file)
+ os.chmod(py_file, st.st_mode | stat.S_IEXEC)
+
+ def _add_aptest(self):
+ local_dir = os.path.dirname(inspect.getfile(HttpdTestSetup))
+ p = subprocess.run([self.env.apxs, '-c', 'mod_aptest.c'],
+ capture_output=True,
+ cwd=os.path.join(local_dir, 'mod_aptest'))
+ rv = p.returncode
+ if rv != 0:
+ log.error(f"compiling mod_aptest failed: {p.stderr}")
+ raise Exception(f"compiling mod_aptest failed: {p.stderr}")
+
+ modules_conf = os.path.join(self.env.server_dir, 'conf/modules.conf')
+ with open(modules_conf, 'a') as fd:
+ # load our test module which is not installed
+ fd.write(f"LoadModule aptest_module \"{local_dir}/mod_aptest/.libs/mod_aptest.so\"\n")
+
+
+class HttpdTestEnv:
+
+ LIBEXEC_DIR = None
+
+ @classmethod
+ def has_python_package(cls, name: str) -> bool:
+ if name in sys.modules:
+ # already loaded
+ return True
+ elif (spec := importlib.util.find_spec(name)) is not None:
+ module = importlib.util.module_from_spec(spec)
+ sys.modules[name] = module
+ spec.loader.exec_module(module)
+ return True
+ else:
+ return False
+
+ @classmethod
+ def get_ssl_module(cls):
+ return os.environ['SSL'] if 'SSL' in os.environ else 'mod_ssl'
+
+ @classmethod
+ def has_shared_module(cls, name):
+ if cls.LIBEXEC_DIR is None:
+ env = HttpdTestEnv() # will initialized it
+ path = os.path.join(cls.LIBEXEC_DIR, f"mod_{name}.so")
+ return os.path.isfile(path)
+
+ def __init__(self, pytestconfig=None):
+ self._our_dir = os.path.dirname(inspect.getfile(Dummy))
+ self.config = ConfigParser(interpolation=ExtendedInterpolation())
+ self.config.read(os.path.join(self._our_dir, 'config.ini'))
+
+ self._bin_dir = self.config.get('global', 'bindir')
+ self._apxs = self.config.get('global', 'apxs')
+ self._prefix = self.config.get('global', 'prefix')
+ self._apachectl = self.config.get('global', 'apachectl')
+ if HttpdTestEnv.LIBEXEC_DIR is None:
+ HttpdTestEnv.LIBEXEC_DIR = self._libexec_dir = self.get_apxs_var('LIBEXECDIR')
+ self._curl = self.config.get('global', 'curl_bin')
+ if 'CURL' in os.environ:
+ self._curl = os.environ['CURL']
+ self._nghttp = self.config.get('global', 'nghttp')
+ if self._nghttp is None:
+ self._nghttp = 'nghttp'
+ self._h2load = self.config.get('global', 'h2load')
+ if self._h2load is None:
+ self._h2load = 'h2load'
+
+ self._http_port = int(self.config.get('test', 'http_port'))
+ self._http_port2 = int(self.config.get('test', 'http_port2'))
+ self._https_port = int(self.config.get('test', 'https_port'))
+ self._proxy_port = int(self.config.get('test', 'proxy_port'))
+ self._ws_port = int(self.config.get('test', 'ws_port'))
+ self._http_tld = self.config.get('test', 'http_tld')
+ self._test_dir = self.config.get('test', 'test_dir')
+ self._clients_dir = os.path.join(os.path.dirname(self._test_dir), 'clients')
+ self._gen_dir = self.config.get('test', 'gen_dir')
+ self._server_dir = os.path.join(self._gen_dir, 'apache')
+ self._server_conf_dir = os.path.join(self._server_dir, "conf")
+ self._server_docs_dir = os.path.join(self._server_dir, "htdocs")
+ self._server_logs_dir = os.path.join(self.server_dir, "logs")
+ self._server_access_log = os.path.join(self._server_logs_dir, "access_log")
+ self._error_log = HttpdErrorLog(os.path.join(self._server_logs_dir, "error_log"))
+ self._apachectl_stderr = None
+
+ self._dso_modules = self.config.get('httpd', 'dso_modules').split(' ')
+ self._mpm_modules = self.config.get('httpd', 'mpm_modules').split(' ')
+ self._mpm_module = f"mpm_{os.environ['MPM']}" if 'MPM' in os.environ else 'mpm_event'
+ self._ssl_module = self.get_ssl_module()
+ if len(self._ssl_module.strip()) == 0:
+ self._ssl_module = None
+
+ self._httpd_addr = "127.0.0.1"
+ self._http_base = f"http://{self._httpd_addr}:{self.http_port}"
+ self._https_base = f"https://{self._httpd_addr}:{self.https_port}"
+
+ self._verbosity = pytestconfig.option.verbose if pytestconfig is not None else 0
+ self._test_conf = os.path.join(self._server_conf_dir, "test.conf")
+ self._httpd_base_conf = []
+ self._httpd_log_modules = ['aptest']
+ self._log_interesting = None
+ self._setup = None
+
+ self._ca = None
+ self._cert_specs = [CertificateSpec(domains=[
+ f"test1.{self._http_tld}",
+ f"test2.{self._http_tld}",
+ f"test3.{self._http_tld}",
+ f"cgi.{self._http_tld}",
+ ], key_type='rsa4096')]
+
+ self._verify_certs = False
+ self._curl_headerfiles_n = 0
+ self._curl_version = None
+ self._h2load_version = None
+ self._current_test = None
+
+ def add_httpd_conf(self, lines: List[str]):
+ self._httpd_base_conf.extend(lines)
+
+ def add_httpd_log_modules(self, modules: List[str]):
+ self._httpd_log_modules.extend(modules)
+
+ def issue_certs(self):
+ if self._ca is None:
+ self._ca = HttpdTestCA.create_root(name=self.http_tld,
+ store_dir=os.path.join(self.server_dir, 'ca'),
+ key_type="rsa4096")
+ self._ca.issue_certs(self._cert_specs)
+
+ def setup_httpd(self, setup: HttpdTestSetup = None):
+ """Create the server environment with config, htdocs and certificates"""
+ self._setup = setup if setup is not None else HttpdTestSetup(env=self)
+ self._setup.make()
+ self.issue_certs()
+ if self._httpd_log_modules:
+ if self._verbosity >= 2:
+ log_level = "trace2"
+ elif self._verbosity >= 1:
+ log_level = "debug"
+ else:
+ log_level = "info"
+ self._log_interesting = "LogLevel"
+ for name in self._httpd_log_modules:
+ self._log_interesting += f" {name}:{log_level}"
+
+ @property
+ def curl(self) -> str:
+ return self._curl
+
+ @property
+ def apxs(self) -> str:
+ return self._apxs
+
+ @property
+ def verbosity(self) -> int:
+ return self._verbosity
+
+ @property
+ def prefix(self) -> str:
+ return self._prefix
+
+ @property
+ def mpm_module(self) -> str:
+ return self._mpm_module
+
+ @property
+ def ssl_module(self) -> str:
+ return self._ssl_module
+
+ @property
+ def http_addr(self) -> str:
+ return self._httpd_addr
+
+ @property
+ def http_port(self) -> int:
+ return self._http_port
+
+ @property
+ def http_port2(self) -> int:
+ return self._http_port2
+
+ @property
+ def https_port(self) -> int:
+ return self._https_port
+
+ @property
+ def proxy_port(self) -> int:
+ return self._proxy_port
+
+ @property
+ def ws_port(self) -> int:
+ return self._ws_port
+
+ @property
+ def http_tld(self) -> str:
+ return self._http_tld
+
+ @property
+ def http_base_url(self) -> str:
+ return self._http_base
+
+ @property
+ def https_base_url(self) -> str:
+ return self._https_base
+
+ @property
+ def bin_dir(self) -> str:
+ return self._bin_dir
+
+ @property
+ def gen_dir(self) -> str:
+ return self._gen_dir
+
+ @property
+ def test_dir(self) -> str:
+ return self._test_dir
+
+ @property
+ def clients_dir(self) -> str:
+ return self._clients_dir
+
+ @property
+ def server_dir(self) -> str:
+ return self._server_dir
+
+ @property
+ def server_logs_dir(self) -> str:
+ return self._server_logs_dir
+
+ @property
+ def libexec_dir(self) -> str:
+ return HttpdTestEnv.LIBEXEC_DIR
+
+ @property
+ def dso_modules(self) -> List[str]:
+ return self._dso_modules
+
+ @property
+ def mpm_modules(self) -> List[str]:
+ return self._mpm_modules
+
+ @property
+ def server_conf_dir(self) -> str:
+ return self._server_conf_dir
+
+ @property
+ def server_docs_dir(self) -> str:
+ return self._server_docs_dir
+
+ @property
+ def httpd_error_log(self) -> HttpdErrorLog:
+ return self._error_log
+
+ def htdocs_src(self, path):
+ return os.path.join(self._our_dir, 'htdocs', path)
+
+ @property
+ def h2load(self) -> str:
+ return self._h2load
+
+ @property
+ def ca(self) -> Credentials:
+ return self._ca
+
+ @property
+ def current_test_name(self) -> str:
+ return self._current_test
+
+ def set_current_test_name(self, val) -> None:
+ self._current_test = val
+
+ @property
+ def apachectl_stderr(self):
+ return self._apachectl_stderr
+
+ def add_cert_specs(self, specs: List[CertificateSpec]):
+ self._cert_specs.extend(specs)
+
+ def get_credentials_for_name(self, dns_name) -> List['Credentials']:
+ for spec in [s for s in self._cert_specs if s.domains is not None]:
+ if dns_name in spec.domains:
+ return self.ca.get_credentials_for_name(spec.domains[0])
+ return []
+
+ def _versiontuple(self, v):
+ v = re.sub(r'(\d+\.\d+(\.\d+)?)(-\S+)?', r'\1', v)
+ return tuple(map(int, v.split('.')))
+
+ def httpd_is_at_least(self, minv):
+ hv = self._versiontuple(self.get_httpd_version())
+ return hv >= self._versiontuple(minv)
+
+ def has_h2load(self):
+ return self._h2load != ""
+
+ def h2load_is_at_least(self, minv):
+ if not self.has_h2load():
+ return False
+ if self._h2load_version is None:
+ p = subprocess.run([self._h2load, '--version'], capture_output=True, text=True)
+ if p.returncode != 0:
+ return False
+ s = p.stdout.strip()
+ m = re.match(r'h2load nghttp2/(\S+)', s)
+ if m:
+ self._h2load_version = self._versiontuple(m.group(1))
+ if self._h2load_version is not None:
+ return self._h2load_version >= self._versiontuple(minv)
+ return False
+
+ def curl_is_at_least(self, minv):
+ if self._curl_version is None:
+ p = subprocess.run([self._curl, '-V'], capture_output=True, text=True)
+ if p.returncode != 0:
+ return False
+ for l in p.stdout.splitlines():
+ m = re.match(r'curl ([0-9.]+)[- ].*', l)
+ if m:
+ self._curl_version = self._versiontuple(m.group(1))
+ break
+ if self._curl_version is not None:
+ return self._curl_version >= self._versiontuple(minv)
+ return False
+
+ def curl_is_less_than(self, version):
+ if self._curl_version is None:
+ p = subprocess.run([self._curl, '-V'], capture_output=True, text=True)
+ if p.returncode != 0:
+ return False
+ for l in p.stdout.splitlines():
+ m = re.match(r'curl ([0-9.]+)[- ].*', l)
+ if m:
+ self._curl_version = self._versiontuple(m.group(1))
+ break
+ if self._curl_version is not None:
+ return self._curl_version < self._versiontuple(version)
+ return False
+
+ def has_nghttp(self):
+ return self._nghttp != ""
+
+ def has_nghttp_get_assets(self):
+ if not self.has_nghttp():
+ return False
+ args = [self._nghttp, "-a"]
+ p = subprocess.run(args, stderr=subprocess.PIPE, stdout=subprocess.PIPE)
+ rv = p.returncode
+ if rv != 0:
+ return False
+ return p.stderr == ""
+
+ def get_apxs_var(self, name: str) -> str:
+ p = subprocess.run([self._apxs, "-q", name], capture_output=True, text=True)
+ if p.returncode != 0:
+ return ""
+ return p.stdout.strip()
+
+ def get_httpd_version(self) -> str:
+ return self.get_apxs_var("HTTPD_VERSION")
+
+ def mkpath(self, path):
+ if not os.path.exists(path):
+ return os.makedirs(path)
+
+ def run(self, args, stdout_list=False, intext=None, inbytes=None, debug_log=True):
+ if debug_log:
+ log.debug(f"run: {args}")
+ start = datetime.now()
+ if intext is not None:
+ inbytes = intext.encode()
+ p = subprocess.run(args, stderr=subprocess.PIPE, stdout=subprocess.PIPE,
+ input=inbytes)
+ stdout_as_list = None
+ if stdout_list:
+ try:
+ out = p.stdout.decode()
+ if HttpdTestSetup.CURL_STDOUT_SEPARATOR in out:
+ stdout_as_list = out.split(HttpdTestSetup.CURL_STDOUT_SEPARATOR)
+ if not stdout_as_list[len(stdout_as_list) - 1]:
+ stdout_as_list.pop()
+ p.stdout.replace(HttpdTestSetup.CURL_STDOUT_SEPARATOR.encode(), b'')
+ except:
+ pass
+ return ExecResult(args=args, exit_code=p.returncode,
+ stdout=p.stdout, stderr=p.stderr,
+ stdout_as_list=stdout_as_list,
+ duration=datetime.now() - start)
+
+ def mkurl(self, scheme, hostname, path='/'):
+ port = self.https_port if scheme == 'https' else self.http_port
+ return f"{scheme}://{hostname}.{self.http_tld}:{port}{path}"
+
+ def install_test_conf(self, lines: List[str]):
+ with open(self._test_conf, 'w') as fd:
+ fd.write('\n'.join(self._httpd_base_conf))
+ fd.write('\n')
+ if self._verbosity >= 2:
+ fd.write(f"LogLevel core:trace5 {self.mpm_module}:trace5 http:trace5\n")
+ if self._log_interesting:
+ fd.write(self._log_interesting)
+ fd.write('\n\n')
+ fd.write('\n'.join(lines))
+ fd.write('\n')
+
+ def is_live(self, url: str = None, timeout: timedelta = None):
+ if url is None:
+ url = self._http_base
+ if timeout is None:
+ timeout = timedelta(seconds=5)
+ try_until = datetime.now() + timeout
+ last_err = ""
+ while datetime.now() < try_until:
+ # noinspection PyBroadException
+ try:
+ r = self.curl_get(url, insecure=True)
+ if r.exit_code == 0:
+ return True
+ time.sleep(.1)
+ except ConnectionRefusedError:
+ log.debug("connection refused")
+ time.sleep(.1)
+ except:
+ if last_err != str(sys.exc_info()[0]):
+ last_err = str(sys.exc_info()[0])
+ log.debug("Unexpected error: %s", last_err)
+ time.sleep(.1)
+ log.debug(f"Unable to contact server after {timeout}")
+ return False
+
+ def is_dead(self, url: str = None, timeout: timedelta = None):
+ if url is None:
+ url = self._http_base
+ if timeout is None:
+ timeout = timedelta(seconds=5)
+ try_until = datetime.now() + timeout
+ last_err = None
+ while datetime.now() < try_until:
+ # noinspection PyBroadException
+ try:
+ r = self.curl_get(url)
+ if r.exit_code != 0:
+ return True
+ time.sleep(.1)
+ except ConnectionRefusedError:
+ log.debug("connection refused")
+ return True
+ except:
+ if last_err != str(sys.exc_info()[0]):
+ last_err = str(sys.exc_info()[0])
+ log.debug("Unexpected error: %s", last_err)
+ time.sleep(.1)
+ log.debug(f"Server still responding after {timeout}")
+ return False
+
+ def _run_apachectl(self, cmd) -> ExecResult:
+ conf_file = 'stop.conf' if cmd == 'stop' else 'httpd.conf'
+ args = [self._apachectl,
+ "-d", self.server_dir,
+ "-f", os.path.join(self._server_dir, f'conf/{conf_file}'),
+ "-k", cmd]
+ r = self.run(args)
+ self._apachectl_stderr = r.stderr
+ if r.exit_code != 0:
+ log.warning(f"failed: {r}")
+ return r
+
+ def apache_reload(self):
+ r = self._run_apachectl("graceful")
+ if r.exit_code == 0:
+ timeout = timedelta(seconds=10)
+ return 0 if self.is_live(self._http_base, timeout=timeout) else -1
+ return r.exit_code
+
+ def apache_restart(self):
+ self.apache_stop()
+ r = self._run_apachectl("start")
+ if r.exit_code == 0:
+ timeout = timedelta(seconds=10)
+ return 0 if self.is_live(self._http_base, timeout=timeout) else -1
+ return r.exit_code
+
+ def apache_stop(self):
+ r = self._run_apachectl("stop")
+ if r.exit_code == 0:
+ timeout = timedelta(seconds=10)
+ return 0 if self.is_dead(self._http_base, timeout=timeout) else -1
+ return r
+
+ def apache_graceful_stop(self):
+ log.debug("stop apache")
+ self._run_apachectl("graceful-stop")
+ return 0 if self.is_dead() else -1
+
+ def apache_fail(self):
+ log.debug("expect apache fail")
+ self._run_apachectl("stop")
+ rv = self._run_apachectl("start")
+ if rv == 0:
+ rv = 0 if self.is_dead() else -1
+ else:
+ rv = 0
+ return rv
+
+ def apache_access_log_clear(self):
+ if os.path.isfile(self._server_access_log):
+ os.remove(self._server_access_log)
+
+ def get_ca_pem_file(self, hostname: str) -> Optional[str]:
+ if len(self.get_credentials_for_name(hostname)) > 0:
+ return self.ca.cert_file
+ return None
+
+ def clear_curl_headerfiles(self):
+ for fname in os.listdir(path=self.gen_dir):
+ if re.match(r'curl\.headers\.\d+', fname):
+ os.remove(os.path.join(self.gen_dir, fname))
+ self._curl_headerfiles_n = 0
+
+ def curl_resolve_args(self, url, insecure=False, force_resolve=True, options=None):
+ u = urlparse(url)
+
+ args = [
+ ]
+ if u.scheme == 'http':
+ pass
+ elif insecure:
+ args.append('--insecure')
+ elif options and "--cacert" in options:
+ pass
+ elif u.hostname:
+ ca_pem = self.get_ca_pem_file(u.hostname)
+ if ca_pem:
+ args.extend(["--cacert", ca_pem])
+
+ if force_resolve and u.hostname and u.hostname != 'localhost' \
+ and u.hostname != self._httpd_addr \
+ and not re.match(r'^(\d+|\[|:).*', u.hostname):
+ assert u.port, f"port not in url: {url}"
+ args.extend(["--resolve", f"{u.hostname}:{u.port}:{self._httpd_addr}"])
+ return args
+
+ def curl_complete_args(self, urls, stdout_list=False,
+ timeout=None, options=None,
+ insecure=False, force_resolve=True):
+ headerfile = f"{self.gen_dir}/curl.headers.{self._curl_headerfiles_n}"
+ self._curl_headerfiles_n += 1
+
+ args = [
+ self._curl, "-s", "--path-as-is", "-D", headerfile,
+ ]
+ args.extend(self.curl_resolve_args(urls[0], insecure=insecure,
+ force_resolve=force_resolve,
+ options=options))
+ if stdout_list:
+ args.extend(['-w', '%{stdout}' + HttpdTestSetup.CURL_STDOUT_SEPARATOR])
+ if self._current_test is not None:
+ args.extend(["-H", f'AP-Test-Name: {self._current_test}'])
+ if timeout is not None and int(timeout) > 0:
+ args.extend(["--connect-timeout", str(int(timeout))])
+ if options:
+ args.extend(options)
+ return args, headerfile
+
+ def curl_parse_headerfile(self, headerfile: str, r: ExecResult = None) -> ExecResult:
+ lines = open(headerfile).readlines()
+ if r is None:
+ r = ExecResult(args=[], exit_code=0, stdout=b'', stderr=b'')
+
+ response = None
+ def fin_response(response):
+ if response:
+ r.add_response(response)
+
+ expected = ['status']
+ for line in lines:
+ if re.match(r'^$', line):
+ if 'trailer' in expected:
+ # end of trailers
+ fin_response(response)
+ response = None
+ expected = ['status']
+ elif 'header' in expected:
+ # end of header, another status or trailers might follow
+ expected = ['status', 'trailer']
+ else:
+ assert False, f"unexpected line: {line}"
+ continue
+ if 'status' in expected:
+ log.debug("reading 1st response line: %s", line)
+ m = re.match(r'^(\S+) (\d+) (.*)$', line)
+ if m:
+ fin_response(response)
+ response = {
+ "protocol": m.group(1),
+ "status": int(m.group(2)),
+ "description": m.group(3),
+ "header": {},
+ "trailer": {},
+ "body": r.outraw
+ }
+ expected = ['header']
+ continue
+ if 'trailer' in expected:
+ m = re.match(r'^([^:]+):\s*(.*)$', line)
+ if m:
+ response['trailer'][m.group(1).lower()] = m.group(2)
+ continue
+ if 'header' in expected:
+ m = re.match(r'^([^:]+):\s*(.*)$', line)
+ if m:
+ response['header'][m.group(1).lower()] = m.group(2)
+ continue
+ assert False, f"unexpected line: {line}"
+
+ fin_response(response)
+ return r
+
+ def curl_raw(self, urls, timeout=10, options=None, insecure=False,
+ force_resolve=True, no_stdout_list=False):
+ if not isinstance(urls, list):
+ urls = [urls]
+ stdout_list = False
+ if len(urls) > 1 and not no_stdout_list:
+ stdout_list = True
+ args, headerfile = self.curl_complete_args(
+ urls=urls, stdout_list=stdout_list,
+ timeout=timeout, options=options, insecure=insecure,
+ force_resolve=force_resolve)
+ args += urls
+ r = self.run(args, stdout_list=stdout_list)
+ if r.exit_code == 0:
+ self.curl_parse_headerfile(headerfile, r=r)
+ if r.json:
+ r.response["json"] = r.json
+ if os.path.isfile(headerfile):
+ os.remove(headerfile)
+ return r
+
+ def curl_get(self, url, insecure=False, options=None):
+ return self.curl_raw([url], insecure=insecure, options=options)
+
+ def curl_upload(self, url, fpath, timeout=5, options=None):
+ if not options:
+ options = []
+ options.extend([
+ "--form", ("file=@%s" % fpath)
+ ])
+ return self.curl_raw(urls=[url], timeout=timeout, options=options)
+
+ def curl_post_data(self, url, data="", timeout=5, options=None):
+ if not options:
+ options = []
+ options.extend(["--data", "%s" % data])
+ return self.curl_raw(url, timeout, options)
+
+ def curl_post_value(self, url, key, value, timeout=5, options=None):
+ if not options:
+ options = []
+ options.extend(["--form", "{0}={1}".format(key, value)])
+ return self.curl_raw(url, timeout, options)
+
+ def curl_protocol_version(self, url, timeout=5, options=None):
+ if not options:
+ options = []
+ options.extend(["-w", "%{http_version}\n", "-o", "/dev/null"])
+ r = self.curl_raw(url, timeout=timeout, options=options)
+ if r.exit_code == 0 and r.response:
+ return r.response["body"].decode('utf-8').rstrip()
+ return -1
+
+ def nghttp(self):
+ return Nghttp(self._nghttp, connect_addr=self._httpd_addr,
+ tmp_dir=self.gen_dir, test_name=self._current_test)
+
+ def h2load_status(self, run: ExecResult):
+ stats = {}
+ m = re.search(
+ r'requests: (\d+) total, (\d+) started, (\d+) done, (\d+) succeeded'
+ r', (\d+) failed, (\d+) errored, (\d+) timeout', run.stdout)
+ if m:
+ stats["requests"] = {
+ "total": int(m.group(1)),
+ "started": int(m.group(2)),
+ "done": int(m.group(3)),
+ "succeeded": int(m.group(4))
+ }
+ m = re.search(r'status codes: (\d+) 2xx, (\d+) 3xx, (\d+) 4xx, (\d+) 5xx',
+ run.stdout)
+ if m:
+ stats["status"] = {
+ "2xx": int(m.group(1)),
+ "3xx": int(m.group(2)),
+ "4xx": int(m.group(3)),
+ "5xx": int(m.group(4))
+ }
+ run.add_results({"h2load": stats})
+ return run
+
+ def make_data_file(self, indir: str, fname: str, fsize: int) -> str:
+ fpath = os.path.join(indir, fname)
+ s10 = "0123456789"
+ s = (101 * s10) + s10[0:3]
+ with open(fpath, 'w') as fd:
+ for i in range(int(fsize / 1024)):
+ fd.write(f"{i:09d}-{s}\n")
+ remain = int(fsize % 1024)
+ if remain != 0:
+ i = int(fsize / 1024) + 1
+ s = f"{i:09d}-{s}\n"
+ fd.write(s[0:remain])
+ return fpath
+
diff --git a/test/pyhttpd/htdocs/alive.json b/test/pyhttpd/htdocs/alive.json
new file mode 100644
index 0000000..2239ee2
--- /dev/null
+++ b/test/pyhttpd/htdocs/alive.json
@@ -0,0 +1,4 @@
+{
+ "host" : "generic",
+ "alive" : true
+}
diff --git a/test/pyhttpd/htdocs/forbidden.html b/test/pyhttpd/htdocs/forbidden.html
new file mode 100644
index 0000000..e186310
--- /dev/null
+++ b/test/pyhttpd/htdocs/forbidden.html
@@ -0,0 +1,11 @@
+<html>
+ <head>
+ <title>403 - Forbidden</title>
+ </head>
+ <body>
+ <h1>403 - Forbidden</h1>
+ <p>
+ An example of an error document.
+ </p>
+ </body>
+</html>
diff --git a/test/pyhttpd/htdocs/index.html b/test/pyhttpd/htdocs/index.html
new file mode 100644
index 0000000..3c07626
--- /dev/null
+++ b/test/pyhttpd/htdocs/index.html
@@ -0,0 +1,9 @@
+<html>
+ <head>
+ <title>mod_h2 test site generic</title>
+ </head>
+ <body>
+ <h1>mod_h2 test site generic</h1>
+ </body>
+</html>
+
diff --git a/test/pyhttpd/htdocs/test1/001.html b/test/pyhttpd/htdocs/test1/001.html
new file mode 100644
index 0000000..184952d
--- /dev/null
+++ b/test/pyhttpd/htdocs/test1/001.html
@@ -0,0 +1,10 @@
+<!DOCTYPE HTML>
+ <html>
+ <head>
+ <title>HTML/2.0 Test File: 001</title>
+ </head>
+ <body>
+ <p><h1>HTML/2.0 Test File: 001</h1></p>
+ <p>This file only contains a simple HTML structure with plain text.</p>
+ </body>
+</html>
diff --git a/test/pyhttpd/htdocs/test1/002.jpg b/test/pyhttpd/htdocs/test1/002.jpg
new file mode 100644
index 0000000..3feefb0
--- /dev/null
+++ b/test/pyhttpd/htdocs/test1/002.jpg
Binary files differ
diff --git a/test/pyhttpd/htdocs/test1/003.html b/test/pyhttpd/htdocs/test1/003.html
new file mode 100644
index 0000000..d5b08c5
--- /dev/null
+++ b/test/pyhttpd/htdocs/test1/003.html
@@ -0,0 +1,11 @@
+<!DOCTYPE HTML>
+ <html>
+ <head>
+ <title>HTML/2.0 Test File: 003</title>
+ </head>
+ <body>
+ <p><h1>HTML/2.0 Test File: 003</h1></p>
+ <p>This is a text HTML file with a big image:</p>
+ <p><img src="003/003_img.jpg" alt="GSMA Logo" style="width:269px;height:249px"></p>
+ </body>
+</html>
diff --git a/test/pyhttpd/htdocs/test1/003/003_img.jpg b/test/pyhttpd/htdocs/test1/003/003_img.jpg
new file mode 100644
index 0000000..3feefb0
--- /dev/null
+++ b/test/pyhttpd/htdocs/test1/003/003_img.jpg
Binary files differ
diff --git a/test/pyhttpd/htdocs/test1/004.html b/test/pyhttpd/htdocs/test1/004.html
new file mode 100644
index 0000000..768cb82
--- /dev/null
+++ b/test/pyhttpd/htdocs/test1/004.html
@@ -0,0 +1,23 @@
+<html>
+ <head>
+ <title>HTML/2.0 Test File: 004</title>
+ </head>
+ <body>
+ <p><h1>HTML/2.0 Test File: 004</h1>
+ This file contains plain text with a bunch of images.<br>
+ <img src="004/gophertiles_142.jpg" height="32" width="32"><img src="004/gophertiles_084.jpg" height="32" width="32"><img src="004/gophertiles_052.jpg" height="32" width="32"><img src="004/gophertiles_077.jpg" height="32" width="32"><img src="004/gophertiles_030.jpg" height="32" width="32"><img src="004/gophertiles_027.jpg" height="32" width="32"><img src="004/gophertiles_039.jpg" height="32" width="32"><img src="004/gophertiles_025.jpg" height="32" width="32"><img src="004/gophertiles_017.jpg" height="32" width="32"><img src="004/gophertiles_179.jpg" height="32" width="32"><img src="004/gophertiles_032.jpg" height="32" width="32"><img src="004/gophertiles_161.jpg" height="32" width="32"><img src="004/gophertiles_088.jpg" height="32" width="32"><img src="004/gophertiles_022.jpg" height="32" width="32"><img src="004/gophertiles_146.jpg" height="32" width="32"><br>
+ <img src="004/gophertiles_102.jpg" height="32" width="32"><img src="004/gophertiles_009.jpg" height="32" width="32"><img src="004/gophertiles_132.jpg" height="32" width="32"><img src="004/gophertiles_137.jpg" height="32" width="32"><img src="004/gophertiles_055.jpg" height="32" width="32"><img src="004/gophertiles_036.jpg" height="32" width="32"><img src="004/gophertiles_127.jpg" height="32" width="32"><img src="004/gophertiles_145.jpg" height="32" width="32"><img src="004/gophertiles_147.jpg" height="32" width="32"><img src="004/gophertiles_153.jpg" height="32" width="32"><img src="004/gophertiles_105.jpg" height="32" width="32"><img src="004/gophertiles_103.jpg" height="32" width="32"><img src="004/gophertiles_033.jpg" height="32" width="32"><img src="004/gophertiles_054.jpg" height="32" width="32"><img src="004/gophertiles_015.jpg" height="32" width="32"><br>
+ <img src="004/gophertiles_016.jpg" height="32" width="32"><img src="004/gophertiles_072.jpg" height="32" width="32"><img src="004/gophertiles_115.jpg" height="32" width="32"><img src="004/gophertiles_108.jpg" height="32" width="32"><img src="004/gophertiles_148.jpg" height="32" width="32"><img src="004/gophertiles_070.jpg" height="32" width="32"><img src="004/gophertiles_083.jpg" height="32" width="32"><img src="004/gophertiles_118.jpg" height="32" width="32"><img src="004/gophertiles_053.jpg" height="32" width="32"><img src="004/gophertiles_021.jpg" height="32" width="32"><img src="004/gophertiles_059.jpg" height="32" width="32"><img src="004/gophertiles_130.jpg" height="32" width="32"><img src="004/gophertiles_163.jpg" height="32" width="32"><img src="004/gophertiles_098.jpg" height="32" width="32"><img src="004/gophertiles_064.jpg" height="32" width="32"><br>
+ <img src="004/gophertiles_018.jpg" height="32" width="32"><img src="004/gophertiles_058.jpg" height="32" width="32"><img src="004/gophertiles_167.jpg" height="32" width="32"><img src="004/gophertiles_082.jpg" height="32" width="32"><img src="004/gophertiles_056.jpg" height="32" width="32"><img src="004/gophertiles_180.jpg" height="32" width="32"><img src="004/gophertiles_046.jpg" height="32" width="32"><img src="004/gophertiles_093.jpg" height="32" width="32"><img src="004/gophertiles_106.jpg" height="32" width="32"><img src="004/gophertiles_065.jpg" height="32" width="32"><img src="004/gophertiles_175.jpg" height="32" width="32"><img src="004/gophertiles_139.jpg" height="32" width="32"><img src="004/gophertiles_101.jpg" height="32" width="32"><img src="004/gophertiles_099.jpg" height="32" width="32"><img src="004/gophertiles_051.jpg" height="32" width="32"><br>
+ <img src="004/gophertiles_140.jpg" height="32" width="32"><img src="004/gophertiles_134.jpg" height="32" width="32"><img src="004/gophertiles_149.jpg" height="32" width="32"><img src="004/gophertiles_049.jpg" height="32" width="32"><img src="004/gophertiles_095.jpg" height="32" width="32"><img src="004/gophertiles_075.jpg" height="32" width="32"><img src="004/gophertiles_066.jpg" height="32" width="32"><img src="004/gophertiles_090.jpg" height="32" width="32"><img src="004/gophertiles_035.jpg" height="32" width="32"><img src="004/gophertiles_114.jpg" height="32" width="32"><img src="004/gophertiles_160.jpg" height="32" width="32"><img src="004/gophertiles_079.jpg" height="32" width="32"><img src="004/gophertiles_062.jpg" height="32" width="32"><img src="004/gophertiles_096.jpg" height="32" width="32"><img src="004/gophertiles_100.jpg" height="32" width="32"><br>
+ <img src="004/gophertiles_104.jpg" height="32" width="32"><img src="004/gophertiles_057.jpg" height="32" width="32"><img src="004/gophertiles_037.jpg" height="32" width="32"><img src="004/gophertiles_086.jpg" height="32" width="32"><img src="004/gophertiles_168.jpg" height="32" width="32"><img src="004/gophertiles_138.jpg" height="32" width="32"><img src="004/gophertiles_045.jpg" height="32" width="32"><img src="004/gophertiles_141.jpg" height="32" width="32"><img src="004/gophertiles_029.jpg" height="32" width="32"><img src="004/gophertiles_165.jpg" height="32" width="32"><img src="004/gophertiles_110.jpg" height="32" width="32"><img src="004/gophertiles_063.jpg" height="32" width="32"><img src="004/gophertiles_158.jpg" height="32" width="32"><img src="004/gophertiles_122.jpg" height="32" width="32"><img src="004/gophertiles_068.jpg" height="32" width="32"><br>
+ <img src="004/gophertiles_170.jpg" height="32" width="32"><img src="004/gophertiles_120.jpg" height="32" width="32"><img src="004/gophertiles_117.jpg" height="32" width="32"><img src="004/gophertiles_031.jpg" height="32" width="32"><img src="004/gophertiles_113.jpg" height="32" width="32"><img src="004/gophertiles_074.jpg" height="32" width="32"><img src="004/gophertiles_129.jpg" height="32" width="32"><img src="004/gophertiles_019.jpg" height="32" width="32"><img src="004/gophertiles_060.jpg" height="32" width="32"><img src="004/gophertiles_109.jpg" height="32" width="32"><img src="004/gophertiles_080.jpg" height="32" width="32"><img src="004/gophertiles_097.jpg" height="32" width="32"><img src="004/gophertiles_116.jpg" height="32" width="32"><img src="004/gophertiles_085.jpg" height="32" width="32"><img src="004/gophertiles_050.jpg" height="32" width="32"><br>
+ <img src="004/gophertiles_151.jpg" height="32" width="32"><img src="004/gophertiles_094.jpg" height="32" width="32"><img src="004/gophertiles_067.jpg" height="32" width="32"><img src="004/gophertiles_128.jpg" height="32" width="32"><img src="004/gophertiles_034.jpg" height="32" width="32"><img src="004/gophertiles_135.jpg" height="32" width="32"><img src="004/gophertiles_012.jpg" height="32" width="32"><img src="004/gophertiles_010.jpg" height="32" width="32"><img src="004/gophertiles_152.jpg" height="32" width="32"><img src="004/gophertiles_171.jpg" height="32" width="32"><img src="004/gophertiles_087.jpg" height="32" width="32"><img src="004/gophertiles_126.jpg" height="32" width="32"><img src="004/gophertiles_048.jpg" height="32" width="32"><img src="004/gophertiles_023.jpg" height="32" width="32"><img src="004/gophertiles_078.jpg" height="32" width="32"><br>
+ <img src="004/gophertiles_071.jpg" height="32" width="32"><img src="004/gophertiles_131.jpg" height="32" width="32"><img src="004/gophertiles_073.jpg" height="32" width="32"><img src="004/gophertiles_143.jpg" height="32" width="32"><img src="004/gophertiles_173.jpg" height="32" width="32"><img src="004/gophertiles_154.jpg" height="32" width="32"><img src="004/gophertiles_061.jpg" height="32" width="32"><img src="004/gophertiles_178.jpg" height="32" width="32"><img src="004/gophertiles_013.jpg" height="32" width="32"><img src="004/gophertiles_028.jpg" height="32" width="32"><img src="004/gophertiles_157.jpg" height="32" width="32"><img src="004/gophertiles_038.jpg" height="32" width="32"><img src="004/gophertiles_069.jpg" height="32" width="32"><img src="004/gophertiles_174.jpg" height="32" width="32"><img src="004/gophertiles_076.jpg" height="32" width="32"><br>
+ <img src="004/gophertiles_155.jpg" height="32" width="32"><img src="004/gophertiles_107.jpg" height="32" width="32"><img src="004/gophertiles_136.jpg" height="32" width="32"><img src="004/gophertiles_144.jpg" height="32" width="32"><img src="004/gophertiles_091.jpg" height="32" width="32"><img src="004/gophertiles_024.jpg" height="32" width="32"><img src="004/gophertiles_014.jpg" height="32" width="32"><img src="004/gophertiles_159.jpg" height="32" width="32"><img src="004/gophertiles_011.jpg" height="32" width="32"><img src="004/gophertiles_176.jpg" height="32" width="32"><img src="004/gophertiles_162.jpg" height="32" width="32"><img src="004/gophertiles_156.jpg" height="32" width="32"><img src="004/gophertiles_081.jpg" height="32" width="32"><img src="004/gophertiles_119.jpg" height="32" width="32"><img src="004/gophertiles_026.jpg" height="32" width="32"><br>
+ <img src="004/gophertiles_133.jpg" height="32" width="32"><img src="004/gophertiles_020.jpg" height="32" width="32"><img src="004/gophertiles_044.jpg" height="32" width="32"><img src="004/gophertiles_125.jpg" height="32" width="32"><img src="004/gophertiles_150.jpg" height="32" width="32"><img src="004/gophertiles_172.jpg" height="32" width="32"><img src="004/gophertiles_002.jpg" height="32" width="32"><img src="004/gophertiles_169.jpg" height="32" width="32"><img src="004/gophertiles_007.jpg" height="32" width="32"><img src="004/gophertiles_008.jpg" height="32" width="32"><img src="004/gophertiles_042.jpg" height="32" width="32"><img src="004/gophertiles_041.jpg" height="32" width="32"><img src="004/gophertiles_166.jpg" height="32" width="32"><img src="004/gophertiles_005.jpg" height="32" width="32"><img src="004/gophertiles_089.jpg" height="32" width="32"><br>
+ <img src="004/gophertiles_177.jpg" height="32" width="32"><img src="004/gophertiles_092.jpg" height="32" width="32"><img src="004/gophertiles_043.jpg" height="32" width="32"><img src="004/gophertiles_111.jpg" height="32" width="32"><img src="004/gophertiles_047.jpg" height="32" width="32"><img src="004/gophertiles.jpg" height="32" width="32"><img src="004/gophertiles_006.jpg" height="32" width="32"><img src="004/gophertiles_121.jpg" height="32" width="32"><img src="004/gophertiles_004.jpg" height="32" width="32"><img src="004/gophertiles_124.jpg" height="32" width="32"><img src="004/gophertiles_123.jpg" height="32" width="32"><img src="004/gophertiles_112.jpg" height="32" width="32"><img src="004/gophertiles_040.jpg" height="32" width="32"><img src="004/gophertiles_164.jpg" height="32" width="32"><img src="004/gophertiles_003.jpg" height="32" width="32"><br>
+ <hr>This page is developed using this template:<a href="https://http2.golang.org/">HTTP/2 demo server</a>
+ </p>
+ </body>
+</html> \ No newline at end of file
diff --git a/test/pyhttpd/htdocs/test1/004/gophertiles.jpg b/test/pyhttpd/htdocs/test1/004/gophertiles.jpg
new file mode 100644
index 0000000..e45ac3b
--- /dev/null
+++ b/test/pyhttpd/htdocs/test1/004/gophertiles.jpg
Binary files differ
diff --git a/test/pyhttpd/htdocs/test1/006.html b/test/pyhttpd/htdocs/test1/006.html
new file mode 100644
index 0000000..6b73025
--- /dev/null
+++ b/test/pyhttpd/htdocs/test1/006.html
@@ -0,0 +1,23 @@
+<!DOCTYPE HTML>
+ <html>
+ <head>
+ <title>HTML/2.0 Test File: 006</title>
+ <link rel="stylesheet" type="text/css" href="006/006.css">
+ <script type="text/javascript" src="006/006.js"></script>
+ </head>
+ <body>
+ <h1>HTML/2.0 Test File: 006</h1>
+ <div class="listTitle">This page contains:
+ <ul class="listElements">
+ <li>HTML
+ <li>CSS
+ <li>JavaScript
+ </ul>
+ </div>
+ <div class="listTitle">
+ <script type="text/javascript">
+ mainJavascript();
+ </script>
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/test/pyhttpd/htdocs/test1/006/006.css b/test/pyhttpd/htdocs/test1/006/006.css
new file mode 100644
index 0000000..de6aa5f
--- /dev/null
+++ b/test/pyhttpd/htdocs/test1/006/006.css
@@ -0,0 +1,21 @@
+@CHARSET "ISO-8859-1";
+body{
+ background:HoneyDew;
+}
+p{
+color:#0000FF;
+text-align:left;
+}
+
+h1{
+color:#FF0000;
+text-align:center;
+}
+
+.listTitle{
+ font-size:large;
+}
+
+.listElements{
+ color:#3366FF
+} \ No newline at end of file
diff --git a/test/pyhttpd/htdocs/test1/006/006.js b/test/pyhttpd/htdocs/test1/006/006.js
new file mode 100644
index 0000000..b450067
--- /dev/null
+++ b/test/pyhttpd/htdocs/test1/006/006.js
@@ -0,0 +1,31 @@
+/**
+ * JavaScript Functions File
+ */
+function returnDate()
+{
+ var currentDate;
+ currentDate=new Date();
+ var dateString=(currentDate.getMonth()+1)+'/'+currentDate.getDate()+'/'+currentDate.getFullYear();
+ return dateString;
+}
+
+function returnHour()
+{
+ var currentDate;
+ currentDate=new Date();
+ var hourString=currentDate.getHours()+':'+currentDate.getMinutes()+':'+currentDate.getSeconds();
+ return hourString;
+}
+
+function javaScriptMessage(){
+ return 'This section is generated under JavaScript:<br>';
+}
+
+function mainJavascript(){
+ document.write(javaScriptMessage())
+ document.write('<ul class="listElements">');
+ document.write('<li>Current date (dd/mm/yyyy): ' + returnDate());
+ document.write('<br>');
+ document.write('<li>Current time (hh:mm:ss): '+returnHour());
+ document.write('</ul>');
+} \ No newline at end of file
diff --git a/test/pyhttpd/htdocs/test1/006/header.html b/test/pyhttpd/htdocs/test1/006/header.html
new file mode 100644
index 0000000..bace20e
--- /dev/null
+++ b/test/pyhttpd/htdocs/test1/006/header.html
@@ -0,0 +1 @@
+<title>My Header Title</title>
diff --git a/test/pyhttpd/htdocs/test1/007.html b/test/pyhttpd/htdocs/test1/007.html
new file mode 100644
index 0000000..4db93e4
--- /dev/null
+++ b/test/pyhttpd/htdocs/test1/007.html
@@ -0,0 +1,21 @@
+<!DOCTYPE html>
+<html>
+<head>
+<meta charset="ISO-8859-1">
+<title>HTML/2.0 Test File: 007</title>
+</head>
+<body>
+ <h1>HTML/2.0 Test File: 007</h1>
+ <div><p>This page is used to send data from the client to the server:</p>
+ <FORM ACTION="007/007.py" METHOD="post" ENCTYPE="multipart/form-data">
+ <input type="hidden" name="pageName" value="007.html">
+ Name:<input type="text" name="pName" value="Write your name here." size="30" maxlength="30"><br>
+ Age:<input type="text" name="pAge" value="00" size="2" maxlength="2"><br>
+ Gender: Male<input type="radio" name="pGender" VALUE="Male">
+ Female<input type="radio" name="pGender" VALUE="Female"><br>
+ <input type="submit" name="userForm" value="Send">
+ <input type="reset" value="Clear">
+ </FORM>
+ </div>
+</body>
+</html> \ No newline at end of file
diff --git a/test/pyhttpd/htdocs/test1/007/007.py b/test/pyhttpd/htdocs/test1/007/007.py
new file mode 100644
index 0000000..02b5466
--- /dev/null
+++ b/test/pyhttpd/htdocs/test1/007/007.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+import cgi, sys
+import cgitb; cgitb.enable()
+
+print "Content-Type: text/html;charset=UTF-8"
+print
+
+print """\
+ <!DOCTYPE html><html><head>
+ <title>HTML/2.0 Test File: 007 (received data)</title></head>
+ <body><h1>HTML/2.0 Test File: 007</h1>"""
+
+# alternative output: parsed form params <-> plain POST body
+parseContent = True # <-> False
+
+if parseContent:
+ print '<h2>Data processed:</h2><ul>'
+ form = cgi.FieldStorage()
+ for name in form:
+ print '<li>', name, ': ', form[name].value, '</li>'
+ print '</ul>'
+else:
+ print '<h2>POST data output:</h2><div><pre>'
+ data = sys.stdin.read()
+ print data
+ print '</pre></div>'
+
+print '</body></html>' \ No newline at end of file
diff --git a/test/pyhttpd/htdocs/test1/009.py b/test/pyhttpd/htdocs/test1/009.py
new file mode 100644
index 0000000..8fd9095
--- /dev/null
+++ b/test/pyhttpd/htdocs/test1/009.py
@@ -0,0 +1,21 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+import cgi, sys, time
+import cgitb; cgitb.enable()
+
+print "Content-Type: text/html;charset=UTF-8"
+print
+
+print """\
+ <!DOCTYPE html><html><head>
+ <title>HTML/2.0 Test File: 009 (server time)</title></head>
+ <body><h1>HTML/2.0 Test File: 009</h1>
+ <p>60 seconds of server time, one by one.</p>"""
+
+for i in range(60):
+ s = time.strftime("%Y-%m-%d %H:%M:%S")
+ print "<div>", s, "</div>"
+ sys.stdout.flush()
+ time.sleep(1)
+
+print "<p>done.</p></body></html>" \ No newline at end of file
diff --git a/test/pyhttpd/htdocs/test1/alive.json b/test/pyhttpd/htdocs/test1/alive.json
new file mode 100644
index 0000000..93e7f95
--- /dev/null
+++ b/test/pyhttpd/htdocs/test1/alive.json
@@ -0,0 +1,5 @@
+{
+ "host" : "test1",
+ "alive" : true
+}
+
diff --git a/test/pyhttpd/htdocs/test1/index.html b/test/pyhttpd/htdocs/test1/index.html
new file mode 100644
index 0000000..9f752b5
--- /dev/null
+++ b/test/pyhttpd/htdocs/test1/index.html
@@ -0,0 +1,46 @@
+<html>
+ <head>
+ <title>mod_h2 test site</title>
+ </head>
+ <body>
+ <h1>mod_h2 test site</h1>
+ <p></p>
+ <h2>served directly</h2>
+ <ul>
+ <li><a href="001.html">01: html</a></li>
+ <li><a href="002.jpg">02: image</a></li>
+ <li><a href="003.html">03: html+image</a></li>
+ <li><a href="004.html">04: tiled image</a></li>
+ <li><a href="005.txt">05: large text</a></li>
+ <li><a href="006.html">06: html/js/css</a></li>
+ <li><a href="007.html">07: form submit</a></li>
+ <li><a href="upload.py">08: upload</a></li>
+ <li><a href="009.py">09: small chunks</a></li>
+ </ul>
+ <h2>mod_proxyied</h2>
+ <ul>
+ <li><a href="proxy/001.html">01: html</a></li>
+ <li><a href="proxy/002.jpg">02: image</a></li>
+ <li><a href="proxy/003.html">03: html+image</a></li>
+ <li><a href="proxy/004.html">04: tiled image</a></li>
+ <li><a href="proxy/005.txt">05: large text</a></li>
+ <li><a href="proxy/006.html">06: html/js/css</a></li>
+ <li><a href="proxy/007.html">07: form submit</a></li>
+ <li><a href="proxy/upload.py">08: upload</a></li>
+ <li><a href="proxy/009.py">09: small chunks</a></li>
+ </ul>
+ <h2>mod_rewritten</h2>
+ <ul>
+ <li><a href="rewrite/001.html">01: html</a></li>
+ <li><a href="rewrite/002.jpg">02: image</a></li>
+ <li><a href="rewrite/003.html">03: html+image</a></li>
+ <li><a href="rewrite/004.html">04: tiled image</a></li>
+ <li><a href="rewrite/005.txt">05: large text</a></li>
+ <li><a href="rewrite/006.html">06: html/js/css</a></li>
+ <li><a href="rewrite/007.html">07: form submit</a></li>
+ <li><a href="rewrite/upload.py">08: upload</a></li>
+ <li><a href="rewrite/009.py">09: small chunks</a></li>
+ </ul>
+ </body>
+</html>
+
diff --git a/test/pyhttpd/htdocs/test2/006/006.css b/test/pyhttpd/htdocs/test2/006/006.css
new file mode 100755
index 0000000..de6aa5f
--- /dev/null
+++ b/test/pyhttpd/htdocs/test2/006/006.css
@@ -0,0 +1,21 @@
+@CHARSET "ISO-8859-1";
+body{
+ background:HoneyDew;
+}
+p{
+color:#0000FF;
+text-align:left;
+}
+
+h1{
+color:#FF0000;
+text-align:center;
+}
+
+.listTitle{
+ font-size:large;
+}
+
+.listElements{
+ color:#3366FF
+} \ No newline at end of file
diff --git a/test/pyhttpd/htdocs/test2/10%abnormal.txt b/test/pyhttpd/htdocs/test2/10%abnormal.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test/pyhttpd/htdocs/test2/10%abnormal.txt
diff --git a/test/pyhttpd/htdocs/test2/alive.json b/test/pyhttpd/htdocs/test2/alive.json
new file mode 100644
index 0000000..6a74223
--- /dev/null
+++ b/test/pyhttpd/htdocs/test2/alive.json
@@ -0,0 +1,4 @@
+{
+ "host" : "test2",
+ "alive" : true
+}
diff --git a/test/pyhttpd/htdocs/test2/x%2f.test b/test/pyhttpd/htdocs/test2/x%2f.test
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test/pyhttpd/htdocs/test2/x%2f.test
diff --git a/test/pyhttpd/log.py b/test/pyhttpd/log.py
new file mode 100644
index 0000000..dff7623
--- /dev/null
+++ b/test/pyhttpd/log.py
@@ -0,0 +1,163 @@
+import os
+import re
+import time
+from datetime import datetime, timedelta
+from io import SEEK_END
+from typing import List, Tuple, Any
+
+
+class HttpdErrorLog:
+ """Checking the httpd error log for errors and warnings, including
+ limiting checks from a last known position forward.
+ """
+
+ RE_ERRLOG_ERROR = re.compile(r'.*\[(?P<module>[^:]+):error].*')
+ RE_ERRLOG_WARN = re.compile(r'.*\[(?P<module>[^:]+):warn].*')
+ RE_APLOGNO = re.compile(r'.*\[(?P<module>[^:]+):(error|warn)].* (?P<aplogno>AH\d+): .+')
+ RE_SSL_LIB_ERR = re.compile(r'.*\[ssl:error].* SSL Library Error: error:(?P<errno>\S+):.+')
+
+ def __init__(self, path: str):
+ self._path = path
+ self._ignored_modules = []
+ self._ignored_lognos = set()
+ self._ignored_patterns = []
+ # remember the file position we started with
+ self._start_pos = 0
+ if os.path.isfile(self._path):
+ with open(self._path) as fd:
+ self._start_pos = fd.seek(0, SEEK_END)
+ self._last_pos = self._start_pos
+ self._last_errors = []
+ self._last_warnings = []
+ self._observed_erros = set()
+ self._observed_warnings = set()
+
+ def __repr__(self):
+ return f"HttpdErrorLog[{self._path}, errors: {' '.join(self._last_errors)}, " \
+ f"warnings: {' '.join(self._last_warnings)}]"
+
+ @property
+ def path(self) -> str:
+ return self._path
+
+ def clear_log(self):
+ if os.path.isfile(self.path):
+ os.remove(self.path)
+ self._start_pos = 0
+ self._last_pos = self._start_pos
+ self._last_errors = []
+ self._last_warnings = []
+ self._observed_erros = set()
+ self._observed_warnings = set()
+
+ def set_ignored_modules(self, modules: List[str]):
+ self._ignored_modules = modules.copy() if modules else []
+
+ def set_ignored_lognos(self, lognos: List[str]):
+ if lognos:
+ for l in lognos:
+ self._ignored_lognos.add(l)
+
+ def add_ignored_patterns(self, patterns: List[Any]):
+ self._ignored_patterns.extend(patterns)
+
+ def _is_ignored(self, line: str) -> bool:
+ for p in self._ignored_patterns:
+ if p.match(line):
+ return True
+ m = self.RE_APLOGNO.match(line)
+ if m and m.group('aplogno') in self._ignored_lognos:
+ return True
+ return False
+
+ def get_recent(self, advance=True) -> Tuple[List[str], List[str]]:
+ """Collect error and warning from the log since the last remembered position
+ :param advance: advance the position to the end of the log afterwards
+ :return: list of error and list of warnings as tuple
+ """
+ self._last_errors = []
+ self._last_warnings = []
+ if os.path.isfile(self._path):
+ with open(self._path) as fd:
+ fd.seek(self._last_pos, os.SEEK_SET)
+ for line in fd:
+ if self._is_ignored(line):
+ continue
+ m = self.RE_ERRLOG_ERROR.match(line)
+ if m and m.group('module') not in self._ignored_modules:
+ self._last_errors.append(line)
+ continue
+ m = self.RE_ERRLOG_WARN.match(line)
+ if m:
+ if m and m.group('module') not in self._ignored_modules:
+ self._last_warnings.append(line)
+ continue
+ if advance:
+ self._last_pos = fd.tell()
+ self._observed_erros.update(set(self._last_errors))
+ self._observed_warnings.update(set(self._last_warnings))
+ return self._last_errors, self._last_warnings
+
+ def get_recent_count(self, advance=True):
+ errors, warnings = self.get_recent(advance=advance)
+ return len(errors), len(warnings)
+
+ def ignore_recent(self):
+ """After a test case triggered errors/warnings on purpose, add
+ those to our 'observed' list so the do not get reported as 'missed'.
+ """
+ self._last_errors = []
+ self._last_warnings = []
+ if os.path.isfile(self._path):
+ with open(self._path) as fd:
+ fd.seek(self._last_pos, os.SEEK_SET)
+ for line in fd:
+ if self._is_ignored(line):
+ continue
+ m = self.RE_ERRLOG_ERROR.match(line)
+ if m and m.group('module') not in self._ignored_modules:
+ self._observed_erros.add(line)
+ continue
+ m = self.RE_ERRLOG_WARN.match(line)
+ if m:
+ if m and m.group('module') not in self._ignored_modules:
+ self._observed_warnings.add(line)
+ continue
+ self._last_pos = fd.tell()
+
+ def get_missed(self) -> Tuple[List[str], List[str]]:
+ errors = []
+ warnings = []
+ if os.path.isfile(self._path):
+ with open(self._path) as fd:
+ fd.seek(self._start_pos, os.SEEK_SET)
+ for line in fd:
+ if self._is_ignored(line):
+ continue
+ m = self.RE_ERRLOG_ERROR.match(line)
+ if m and m.group('module') not in self._ignored_modules \
+ and line not in self._observed_erros:
+ errors.append(line)
+ continue
+ m = self.RE_ERRLOG_WARN.match(line)
+ if m:
+ if m and m.group('module') not in self._ignored_modules \
+ and line not in self._observed_warnings:
+ warnings.append(line)
+ continue
+ return errors, warnings
+
+ def scan_recent(self, pattern: re, timeout=10):
+ if not os.path.isfile(self.path):
+ return False
+ with open(self.path) as fd:
+ end = datetime.now() + timedelta(seconds=timeout)
+ while True:
+ fd.seek(self._last_pos, os.SEEK_SET)
+ for line in fd:
+ if pattern.match(line):
+ return True
+ if datetime.now() > end:
+ raise TimeoutError(f"pattern not found in error log after {timeout} seconds")
+ time.sleep(.1)
+ return False
diff --git a/test/pyhttpd/mod_aptest/mod_aptest.c b/test/pyhttpd/mod_aptest/mod_aptest.c
new file mode 100644
index 0000000..d1a8e05
--- /dev/null
+++ b/test/pyhttpd/mod_aptest/mod_aptest.c
@@ -0,0 +1,66 @@
+/* Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <apr_optional.h>
+#include <apr_optional_hooks.h>
+#include <apr_strings.h>
+#include <apr_cstr.h>
+#include <apr_want.h>
+
+#include <httpd.h>
+#include <http_protocol.h>
+#include <http_request.h>
+#include <http_log.h>
+
+static void aptest_hooks(apr_pool_t *pool);
+
+AP_DECLARE_MODULE(aptest) = {
+ STANDARD20_MODULE_STUFF,
+ NULL, /* func to create per dir config */
+ NULL, /* func to merge per dir config */
+ NULL, /* func to create per server config */
+ NULL, /* func to merge per server config */
+ NULL, /* command handlers */
+ aptest_hooks,
+#if defined(AP_MODULE_FLAG_NONE)
+ AP_MODULE_FLAG_ALWAYS_MERGE
+#endif
+};
+
+
+static int aptest_post_read_request(request_rec *r)
+{
+ const char *test_name = apr_table_get(r->headers_in, "AP-Test-Name");
+ if (test_name) {
+ ap_log_rerror(APLOG_MARK, APLOG_INFO, 0, r, "test[%s]: %s",
+ test_name, r->the_request);
+ }
+ return DECLINED;
+}
+
+/* Install this module into the apache2 infrastructure.
+ */
+static void aptest_hooks(apr_pool_t *pool)
+{
+ ap_log_perror(APLOG_MARK, APLOG_TRACE1, 0, pool,
+ "installing hooks and handlers");
+
+ /* test case monitoring */
+ ap_hook_post_read_request(aptest_post_read_request, NULL,
+ NULL, APR_HOOK_MIDDLE);
+
+}
+
diff --git a/test/pyhttpd/nghttp.py b/test/pyhttpd/nghttp.py
new file mode 100644
index 0000000..43721f5
--- /dev/null
+++ b/test/pyhttpd/nghttp.py
@@ -0,0 +1,304 @@
+import re
+import os
+import subprocess
+from datetime import datetime
+from typing import Dict
+
+from urllib.parse import urlparse
+
+from .result import ExecResult
+
+
+def _get_path(x):
+ return x["path"]
+
+
+class Nghttp:
+
+ def __init__(self, path, connect_addr=None, tmp_dir="/tmp",
+ test_name: str = None):
+ self.NGHTTP = path
+ self.CONNECT_ADDR = connect_addr
+ self.TMP_DIR = tmp_dir
+ self._test_name = test_name
+
+ @staticmethod
+ def get_stream(streams, sid):
+ sid = int(sid)
+ if sid not in streams:
+ streams[sid] = {
+ "id": sid,
+ "header": {},
+ "request": {
+ "id": sid,
+ "body": b''
+ },
+ "response": {
+ "id": sid,
+ "body": b''
+ },
+ "data_lengths": [],
+ "paddings": [],
+ "promises": []
+ }
+ return streams[sid] if sid in streams else None
+
+ def run(self, urls, timeout, options):
+ return self._baserun(urls, timeout, options)
+
+ def complete_args(self, url, _timeout, options: [str]) -> [str]:
+ if not isinstance(url, list):
+ url = [url]
+ u = urlparse(url[0])
+ args = [self.NGHTTP]
+ if self.CONNECT_ADDR:
+ connect_host = self.CONNECT_ADDR
+ args.append("--header=host: %s:%s" % (u.hostname, u.port))
+ else:
+ connect_host = u.hostname
+ if options:
+ args.extend(options)
+ for xurl in url:
+ xu = urlparse(xurl)
+ nurl = "%s://%s:%s/%s" % (u.scheme, connect_host, xu.port, xu.path)
+ if xu.query:
+ nurl = "%s?%s" % (nurl, xu.query)
+ args.append(nurl)
+ return args
+
+ def _baserun(self, url, timeout, options):
+ return self._run(self.complete_args(url, timeout, options))
+
+ def parse_output(self, btext) -> Dict:
+ # getting meta data and response body out of nghttp's output
+ # is a bit tricky. Without '-v' we just get the body. With '-v' meta
+ # data and timings in both directions are listed.
+ # We rely on response :status: to be unique and on
+ # response body not starting with space.
+ # Something not good enough for general purpose, but for these tests.
+ output = {}
+ body = ''
+ streams = {}
+ skip_indents = True
+ # chunk output into lines. nghttp mixes text
+ # meta output with bytes from the response body.
+ lines = [l.decode() for l in btext.split(b'\n')]
+
+ for lidx, l in enumerate(lines):
+ if len(l) == 0:
+ body += '\n'
+ continue
+ m = re.match(r'(.*)\[.*] recv \(stream_id=(\d+)\) (\S+): (\S*)', l)
+ if m:
+ body += m.group(1)
+ s = self.get_stream(streams, m.group(2))
+ hname = m.group(3)
+ hval = m.group(4)
+ print("stream %d header %s: %s" % (s["id"], hname, hval))
+ header = s["header"]
+ if hname in header:
+ header[hname] += ", %s" % hval
+ else:
+ header[hname] = hval
+ continue
+
+ m = re.match(r'(.*)\[.*] recv HEADERS frame <.* stream_id=(\d+)>', l)
+ if m:
+ body += m.group(1)
+ s = self.get_stream(streams, m.group(2))
+ if s:
+ print("stream %d: recv %d header" % (s["id"], len(s["header"])))
+ response = s["response"]
+ hkey = "header"
+ if "header" in response:
+ h = response["header"]
+ if ":status" in h and int(h[":status"]) >= 200:
+ hkey = "trailer"
+ else:
+ prev = {
+ "header": h
+ }
+ if "previous" in response:
+ prev["previous"] = response["previous"]
+ response["previous"] = prev
+ response[hkey] = s["header"]
+ s["header"] = {}
+ body = ''
+ continue
+
+ m = re.match(r'(.*)\[.*] recv DATA frame <length=(\d+), .*stream_id=(\d+)>', l)
+ if m:
+ body += m.group(1)
+ s = self.get_stream(streams, m.group(3))
+ blen = int(m.group(2))
+ if s:
+ print(f'stream {s["id"]}: {blen} DATA bytes added via "{l}"')
+ padlen = 0
+ if len(lines) > lidx + 2:
+ mpad = re.match(r' +\(padlen=(\d+)\)', lines[lidx+2])
+ if mpad:
+ padlen = int(mpad.group(1))
+ s["data_lengths"].append(blen)
+ s["paddings"].append(padlen)
+ blen -= padlen
+ s["response"]["body"] += body[-blen:].encode()
+ body = ''
+ skip_indents = True
+ continue
+
+ m = re.match(r'(.*)\[.*] recv PUSH_PROMISE frame <.* stream_id=(\d+)>', l)
+ if m:
+ body += m.group(1)
+ s = self.get_stream(streams, m.group(2))
+ if s:
+ # headers we have are request headers for the PUSHed stream
+ # these have been received on the originating stream, the promised
+ # stream id it mentioned in the following lines
+ print("stream %d: %d PUSH_PROMISE header" % (s["id"], len(s["header"])))
+ if len(lines) > lidx+2:
+ m2 = re.match(r'\s+\(.*promised_stream_id=(\d+)\)', lines[lidx+2])
+ if m2:
+ s2 = self.get_stream(streams, m2.group(1))
+ s2["request"]["header"] = s["header"]
+ s["promises"].append(s2)
+ s["header"] = {}
+ continue
+
+ m = re.match(r'(.*)\[.*] recv (\S+) frame <length=(\d+), .*stream_id=(\d+)>', l)
+ if m:
+ print("recv frame %s on stream %s" % (m.group(2), m.group(4)))
+ body += m.group(1)
+ skip_indents = True
+ continue
+
+ m = re.match(r'(.*)\[.*] send (\S+) frame <length=(\d+), .*stream_id=(\d+)>', l)
+ if m:
+ print("send frame %s on stream %s" % (m.group(2), m.group(4)))
+ body += m.group(1)
+ skip_indents = True
+ continue
+
+ if skip_indents and l.startswith(' '):
+ continue
+
+ if '[' != l[0]:
+ skip_indents = None
+ body += l + '\n'
+
+ # the main request is done on the lowest odd numbered id
+ main_stream = 99999999999
+ for sid in streams:
+ s = streams[sid]
+ if "header" in s["response"] and ":status" in s["response"]["header"]:
+ s["response"]["status"] = int(s["response"]["header"][":status"])
+ if (sid % 2) == 1 and sid < main_stream:
+ main_stream = sid
+
+ output["streams"] = streams
+ if main_stream in streams:
+ output["response"] = streams[main_stream]["response"]
+ output["paddings"] = streams[main_stream]["paddings"]
+ output["data_lengths"] = streams[main_stream]["data_lengths"]
+ return output
+
+ def _raw(self, url, timeout, options):
+ args = ["-v"]
+ if self._test_name is not None:
+ args.append(f'--header=AP-Test-Name: {self._test_name}')
+ if options:
+ args.extend(options)
+ r = self._baserun(url, timeout, args)
+ if 0 == r.exit_code:
+ r.add_results(self.parse_output(r.outraw))
+ return r
+
+ def get(self, url, timeout=5, options=None):
+ return self._raw(url, timeout, options)
+
+ def assets(self, url, timeout=5, options=None):
+ if not options:
+ options = []
+ options.extend(["-ans"])
+ r = self._baserun(url, timeout, options)
+ assets = []
+ if 0 == r.exit_code:
+ lines = re.findall(r'[^\n]*\n', r.stdout, re.MULTILINE)
+ for lidx, l in enumerate(lines):
+ m = re.match(r'\s*(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+/(.*)', l)
+ if m:
+ assets.append({
+ "path": m.group(7),
+ "status": int(m.group(5)),
+ "size": m.group(6)
+ })
+ assets.sort(key=_get_path)
+ r.add_assets(assets)
+ return r
+
+ def post_data(self, url, data, timeout=5, options=None):
+ reqbody = ("%s/nghttp.req.body" % self.TMP_DIR)
+ with open(reqbody, 'wb') as f:
+ f.write(data.encode('utf-8'))
+ if not options:
+ options = []
+ options.extend(["--data=%s" % reqbody])
+ return self._raw(url, timeout, options)
+
+ def post_name(self, url, name, timeout=5, options=None):
+ reqbody = ("%s/nghttp.req.body" % self.TMP_DIR)
+ with open(reqbody, 'w') as f:
+ f.write("--DSAJKcd9876\r\n")
+ f.write("Content-Disposition: form-data; name=\"value\"; filename=\"xxxxx\"\r\n")
+ f.write("Content-Type: text/plain\r\n")
+ f.write(f"\r\n{name}")
+ f.write("\r\n--DSAJKcd9876\r\n")
+ if not options:
+ options = []
+ options.extend([
+ "--data=%s" % reqbody,
+ "-HContent-Type: multipart/form-data; boundary=DSAJKcd9876"])
+ return self._raw(url, timeout, options)
+
+ def upload(self, url, fpath, timeout=5, options=None):
+ if not options:
+ options = []
+ options.extend(["--data=%s" % fpath])
+ return self._raw(url, timeout, options)
+
+ def upload_file(self, url, fpath, timeout=5, options=None):
+ fname = os.path.basename(fpath)
+ reqbody = ("%s/nghttp.req.body" % self.TMP_DIR)
+ with open(fpath, 'rb') as fin:
+ with open(reqbody, 'wb') as f:
+ preamble = [
+ '--DSAJKcd9876',
+ 'Content-Disposition: form-data; name="xxx"; filename="xxxxx"',
+ 'Content-Type: text/plain',
+ '',
+ 'testing mod_h2',
+ '\r\n--DSAJKcd9876',
+ f'Content-Disposition: form-data; name="file"; filename="{fname}"',
+ 'Content-Type: application/octet-stream',
+ 'Content-Transfer-Encoding: binary',
+ '', ''
+ ]
+ f.write('\r\n'.join(preamble).encode('utf-8'))
+ f.write(fin.read())
+ f.write('\r\n'.join([
+ '\r\n--DSAJKcd9876', ''
+ ]).encode('utf-8'))
+ if not options:
+ options = []
+ options.extend([
+ "--data=%s" % reqbody,
+ "--expect-continue",
+ "-HContent-Type: multipart/form-data; boundary=DSAJKcd9876"])
+ return self._raw(url, timeout, options)
+
+ def _run(self, args) -> ExecResult:
+ print(("execute: %s" % " ".join(args)))
+ start = datetime.now()
+ p = subprocess.run(args, capture_output=True, text=False)
+ return ExecResult(args=args, exit_code=p.returncode,
+ stdout=p.stdout, stderr=p.stderr,
+ duration=datetime.now() - start)
diff --git a/test/pyhttpd/result.py b/test/pyhttpd/result.py
new file mode 100644
index 0000000..4bf9ff2
--- /dev/null
+++ b/test/pyhttpd/result.py
@@ -0,0 +1,92 @@
+import json
+from datetime import timedelta
+from typing import Optional, Dict, List
+
+
+class ExecResult:
+
+ def __init__(self, args: List[str], exit_code: int,
+ stdout: bytes, stderr: bytes = None,
+ stdout_as_list: List[bytes] = None,
+ duration: timedelta = None):
+ self._args = args
+ self._exit_code = exit_code
+ self._stdout = stdout if stdout is not None else b''
+ self._stderr = stderr if stderr is not None else b''
+ self._duration = duration if duration is not None else timedelta()
+ self._response = None
+ self._results = {}
+ self._assets = []
+ # noinspection PyBroadException
+ try:
+ if stdout_as_list is None:
+ out = self._stdout.decode()
+ else:
+ out = "[" + ','.join(stdout_as_list) + "]"
+ self._json_out = json.loads(out)
+ except:
+ self._json_out = None
+
+ def __repr__(self):
+ out = [
+ f"ExecResult[code={self.exit_code}, args={self._args}\n",
+ "----stdout---------------------------------------\n",
+ self._stdout.decode(),
+ "----stderr---------------------------------------\n",
+ self._stderr.decode()
+ ]
+ return ''.join(out)
+
+ @property
+ def exit_code(self) -> int:
+ return self._exit_code
+
+ @property
+ def args(self) -> List[str]:
+ return self._args
+
+ @property
+ def outraw(self) -> bytes:
+ return self._stdout
+
+ @property
+ def stdout(self) -> str:
+ return self._stdout.decode()
+
+ @property
+ def json(self) -> Optional[Dict]:
+ """Output as JSON dictionary or None if not parseable."""
+ return self._json_out
+
+ @property
+ def stderr(self) -> str:
+ return self._stderr.decode()
+
+ @property
+ def duration(self) -> timedelta:
+ return self._duration
+
+ @property
+ def response(self) -> Optional[Dict]:
+ return self._response
+
+ @property
+ def results(self) -> Dict:
+ return self._results
+
+ @property
+ def assets(self) -> List:
+ return self._assets
+
+ def add_response(self, resp: Dict):
+ if self._response:
+ resp['previous'] = self._response
+ self._response = resp
+
+ def add_results(self, results: Dict):
+ self._results.update(results)
+ if 'response' in results:
+ self.add_response(results['response'])
+
+ def add_assets(self, assets: List):
+ self._assets.extend(assets)
diff --git a/test/pyhttpd/ws_util.py b/test/pyhttpd/ws_util.py
new file mode 100644
index 0000000..38a3cf7
--- /dev/null
+++ b/test/pyhttpd/ws_util.py
@@ -0,0 +1,137 @@
+import logging
+import struct
+
+
+log = logging.getLogger(__name__)
+
+
+class WsFrame:
+
+ CONT = 0
+ TEXT = 1
+ BINARY = 2
+ RSVD3 = 3
+ RSVD4 = 4
+ RSVD5 = 5
+ RSVD6 = 6
+ RSVD7 = 7
+ CLOSE = 8
+ PING = 9
+ PONG = 10
+ RSVD11 = 11
+ RSVD12 = 12
+ RSVD13 = 13
+ RSVD14 = 14
+ RSVD15 = 15
+
+ OP_NAMES = [
+ "CONT",
+ "TEXT",
+ "BINARY",
+ "RSVD3",
+ "RSVD4",
+ "RSVD5",
+ "RSVD6",
+ "RSVD7",
+ "CLOSE",
+ "PING",
+ "PONG",
+ "RSVD11",
+ "RSVD12",
+ "RSVD13",
+ "RSVD14",
+ "RSVD15",
+ ]
+
+ def __init__(self, opcode: int, fin: bool, mask: bytes, data: bytes):
+ self.opcode = opcode
+ self.fin = fin
+ self.mask = mask
+ self.data = data
+ self.length = len(data)
+
+ def __repr__(self):
+ return f'WsFrame[{self.OP_NAMES[self.opcode]} fin={self.fin}, mask={self.mask}, len={len(self.data)}]'
+
+ @property
+ def data_len(self) -> int:
+ return len(self.data) if self.data else 0
+
+ def to_network(self) -> bytes:
+ nd = bytearray()
+ h1 = self.opcode
+ if self.fin:
+ h1 |= 0x80
+ nd.extend(struct.pack("!B", h1))
+ mask_bit = 0x80 if self.mask is not None else 0x0
+ h2 = self.data_len
+ if h2 > 65535:
+ nd.extend(struct.pack("!BQ", 127|mask_bit, h2))
+ elif h2 > 126:
+ nd.extend(struct.pack("!BH", 126|mask_bit, h2))
+ else:
+ nd.extend(struct.pack("!B", h2|mask_bit))
+ if self.mask is not None:
+ nd.extend(self.mask)
+ if self.data is not None:
+ nd.extend(self.data)
+ return nd
+
+ @classmethod
+ def client_ping(cls, data: bytes, mask: bytes = None) -> 'WsFrame':
+ if mask is None:
+ mask = bytes.fromhex('00 00 00 00')
+ return WsFrame(opcode=WsFrame.PING, fin=True, mask=mask, data=data)
+
+ @classmethod
+ def client_close(cls, code: int, reason: str = None,
+ mask: bytes = None) -> 'WsFrame':
+ data = bytearray(struct.pack("!H", code))
+ if reason is not None:
+ data.extend(reason.encode())
+ if mask is None:
+ mask = bytes.fromhex('00 00 00 00')
+ return WsFrame(opcode=WsFrame.CLOSE, fin=True, mask=mask, data=data)
+
+
+class WsFrameReader:
+
+ def __init__(self, data: bytes):
+ self.data = data
+
+ def _read(self, n: int):
+ if len(self.data) < n:
+ raise EOFError(f'have {len(self.data)} bytes left, but {n} requested')
+ elif n == 0:
+ return b''
+ chunk = self.data[:n]
+ del self.data[:n]
+ return chunk
+
+ def next_frame(self):
+ data = self._read(2)
+ h1, h2 = struct.unpack("!BB", data)
+ log.debug(f'parsed h1={h1} h2={h2} from {data}')
+ fin = True if h1 & 0x80 else False
+ opcode = h1 & 0xf
+ has_mask = True if h2 & 0x80 else False
+ mask = None
+ dlen = h2 & 0x7f
+ if dlen == 126:
+ (dlen,) = struct.unpack("!H", self._read(2))
+ elif dlen == 127:
+ (dlen,) = struct.unpack("!Q", self._read(8))
+ if has_mask:
+ mask = self._read(4)
+ return WsFrame(opcode=opcode, fin=fin, mask=mask, data=self._read(dlen))
+
+ def eof(self):
+ return len(self.data) == 0
+
+ @classmethod
+ def parse(cls, data: bytes):
+ frames = []
+ reader = WsFrameReader(data=data)
+ while not reader.eof():
+ frames.append(reader.next_frame())
+ return frames
diff --git a/test/tcpdumpscii.txt b/test/tcpdumpscii.txt
new file mode 100644
index 0000000..9c1060e
--- /dev/null
+++ b/test/tcpdumpscii.txt
@@ -0,0 +1,50 @@
+
+From marcs@znep.com Fri Apr 17 15:16:16 1998
+Date: Sat, 22 Nov 1997 20:44:10 -0700 (MST)
+From: Marc Slemko <marcs@znep.com>
+To: TLOSAP <new-httpd@apache.org>
+Subject: Re: Getting ethernet packets content under FreeBSD? (fwd)
+Reply-To: new-httpd@apache.org
+
+Anyone too lazy to hack tcpdump (eg. my tcpdump has a -X option to display
+the data in ASCII) can use something like the below to grab HTTP headers
+when debugging broken clients.
+
+Nothing complicated, but handy.
+
+---------- Forwarded message ----------
+Date: Sat, 22 Nov 1997 14:35:23 PST
+From: Bill Fenner <fenner@parc.xerox.com>
+To: Nate Williams <nate@mt.sri.com>
+Cc: bmah@ca.sandia.gov, hackers@FreeBSD.ORG
+Subject: Re: Getting ethernet packets content under FreeBSD?
+
+I usually just use this perl script, which I call "tcpdumpscii".
+Then run "tcpdumpscii -s 1500 -x [other tcpdump args]".
+
+ Bill
+
+#!/import/misc/bin/perl
+#
+#
+open(TCPDUMP,"tcpdump -l @ARGV|");
+while (<TCPDUMP>) {
+ if (/^\s+(\S\S)+/) {
+ $sav = $_;
+ $asc = "";
+ while (s/\s*(\S\S)\s*//) {
+ $i = hex($1);
+ if ($i < 32 || $i > 126) {
+ $asc .= ".";
+ } else {
+ $asc .= pack(C,hex($1));
+ }
+ }
+ $foo = "." x length($asc);
+ $_ = $sav;
+ s/\t/ /g;
+ s/^$foo/$asc/;
+ }
+ print;
+}
+
diff --git a/test/test-writev.c b/test/test-writev.c
new file mode 100644
index 0000000..83b6503
--- /dev/null
+++ b/test/test-writev.c
@@ -0,0 +1,101 @@
+/* Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ test-writev: use this to figure out if your writev() does intelligent
+ things on the network. Some writev()s when given multiple buffers
+ will break them up into multiple packets, which is a waste.
+
+ Linux prior to 2.0.31 has this problem.
+
+ Solaris 2.5, 2.5.1 doesn't appear to, 2.6 hasn't been tested.
+
+ IRIX 5.3 doesn't have this problem.
+
+ To use this you want to snoop the wire with tcpdump, and then run
+ "test-writev a.b.c.d port#" ... against some TCP service on another
+ box. For example you can run it against port 80 on another server.
+ You want to look to see how many data packets are sent, you're hoping
+ only one of size 300 is sent.
+*/
+
+#include <stdio.h>
+#include <sys/types.h>
+#include <sys/socket.h>
+#include <netinet/in.h>
+#include <stdlib.h>
+#include <unistd.h>
+#include <arpa/inet.h>
+#include <sys/uio.h>
+#include <errno.h>
+
+#ifndef INADDR_NONE
+#define INADDR_NONE (-1ul)
+#endif
+
+void main( int argc, char **argv )
+{
+ struct sockaddr_in server_addr;
+ int s;
+ struct iovec vector[3];
+ char buf[100];
+ int i;
+ const int just_say_no = 1;
+
+ if( argc != 3 ) {
+usage:
+ fprintf( stderr, "usage: test-writev a.b.c.d port#\n" );
+ exit( 1 );
+ }
+ server_addr.sin_family = AF_INET;
+ server_addr.sin_addr.s_addr = inet_addr( argv[1] );
+ if( server_addr.sin_addr.s_addr == INADDR_NONE ) {
+ fprintf( stderr, "bogus address\n" );
+ goto usage;
+ }
+ server_addr.sin_port = htons( atoi( argv[2] ) );
+
+ s = socket( AF_INET, SOCK_STREAM, 0 );
+ if( s < 0 ) {
+ perror("socket");
+ exit(1);
+ }
+ if( connect( s, (struct sockaddr *)&server_addr, sizeof( server_addr ) )
+ != 0 ) {
+ perror("connect");
+ exit(1);
+ }
+
+ if( setsockopt(s, IPPROTO_TCP, TCP_NODELAY, (char*)&just_say_no,
+ sizeof(just_say_no)) != 0 ) {
+ perror( "TCP_NODELAY" );
+ exit(1);
+ }
+ /* now build up a two part writev and write it out */
+ for( i = 0; i < sizeof( buf ); ++i ) {
+ buf[i] = 'x';
+ }
+ vector[0].iov_base = buf;
+ vector[0].iov_len = sizeof(buf);
+ vector[1].iov_base = buf;
+ vector[1].iov_len = sizeof(buf);
+ vector[2].iov_base = buf;
+ vector[2].iov_len = sizeof(buf);
+
+ i = writev( s, &vector[0], 3 );
+ fprintf( stdout, "i=%d, errno=%d\n", i, errno );
+ exit(0);
+}
diff --git a/test/test_find.c b/test/test_find.c
new file mode 100644
index 0000000..5019331
--- /dev/null
+++ b/test/test_find.c
@@ -0,0 +1,78 @@
+/* Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/* This program tests the ap_find_list_item routine in ../main/util.c.
+ *
+ * The defines in this sample compile line are specific to Roy's system.
+ * They should match whatever was used to compile Apache first.
+ *
+ gcc -g -O2 -I../os/unix -I../include -o test_find \
+ -DSOLARIS2=250 -Wall -DALLOC_DEBUG -DPOOL_DEBUG \
+ ../main/alloc.o ../main/buff.o ../main/util.o \
+ ../ap/libap.a -lsocket -lnsl test_find.c
+ *
+ * Roy Fielding, 1999
+ */
+#include <stdio.h>
+#include <stdlib.h>
+#include "httpd.h"
+#include "apr_general.h"
+
+/*
+ * Dummy a bunch of stuff just to get a compile
+ */
+uid_t ap_user_id;
+gid_t ap_group_id;
+void *ap_dummy_mutex = &ap_dummy_mutex;
+char *ap_server_argv0;
+
+AP_DECLARE(void) ap_block_alarms(void)
+{
+ ;
+}
+
+AP_DECLARE(void) ap_unblock_alarms(void)
+{
+ ;
+}
+
+AP_DECLARE(void) ap_log_error(const char *file, int line, int level,
+ const request_rec *r, const char *fmt, ...)
+{
+ ;
+}
+
+int main (void)
+{
+ apr_pool_t *p;
+ char line[512];
+ char tok[512];
+
+ p = apr_pool_alloc_init();
+
+ printf("Enter field value to find items within:\n");
+ if (!gets(line))
+ exit(0);
+
+ printf("Enter search item:\n");
+ while (gets(tok)) {
+ printf(" [%s] == %s\n", tok, ap_find_list_item(p, line, tok)
+ ? "Yes" : "No");
+ printf("Enter search item:\n");
+ }
+
+ exit(0);
+}
diff --git a/test/test_limits.c b/test/test_limits.c
new file mode 100644
index 0000000..0bbf876
--- /dev/null
+++ b/test/test_limits.c
@@ -0,0 +1,200 @@
+/**************************************************************
+ * test_limits.c
+ *
+ * A simple program for sending abusive requests to a server, based
+ * on the sioux.c exploit code that this nimrod posted (see below).
+ * Roy added options for testing long header fieldsize (-t h), long
+ * request-lines (-t r), and a long request body (-t b).
+ *
+ * FreeBSD 2.2.x, FreeBSD 3.0, IRIX 5.3, IRIX 6.2:
+ * gcc -o test_limits test_limits.c
+ *
+ * Solaris 2.5.1:
+ * gcc -o test_limits test_limits.c -lsocket -lnsl
+ *
+ *
+ * Message-ID: <861zqspvtw.fsf@niobe.ewox.org>
+ * Date: Fri, 7 Aug 1998 19:04:27 +0200
+ * Sender: Bugtraq List <BUGTRAQ@netspace.org>
+ * From: Dag-Erling Coidan =?ISO-8859-1?Q?Sm=F8rgrav?= <finrod@EWOX.ORG>
+ * Subject: YA Apache DoS attack
+ *
+ * Copyright (c) 1998 Dag-Erling Codan Smrgrav
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer
+ * in this position and unchanged.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
+ * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
+ * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+ * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+/*
+ * Kudos to Mark Huizer who originally suggested this on freebsd-current
+ */
+
+#include <sys/types.h>
+#include <sys/uio.h>
+
+#include <sys/socket.h>
+#include <netinet/in.h>
+
+#include <netdb.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <unistd.h>
+
+#define TEST_LONG_REQUEST_LINE 1
+#define TEST_LONG_REQUEST_FIELDS 2
+#define TEST_LONG_REQUEST_FIELDSIZE 3
+#define TEST_LONG_REQUEST_BODY 4
+
+void
+usage(void)
+{
+ fprintf(stderr,
+ "usage: test_limits [-t (r|n|h|b)] [-a address] [-p port] [-n num]\n");
+ exit(1);
+}
+
+int
+main(int argc, char *argv[])
+{
+ struct sockaddr_in sin;
+ struct hostent *he;
+ FILE *f;
+ int o, sd;
+
+ /* default parameters */
+ char *addr = "localhost";
+ int port = 80;
+ int num = 1000;
+ int testtype = TEST_LONG_REQUEST_FIELDS;
+
+ /* get options */
+ while ((o = getopt(argc, argv, "t:a:p:n:")) != EOF)
+ switch (o) {
+ case 't':
+ if (*optarg == 'r')
+ testtype = TEST_LONG_REQUEST_LINE;
+ else if (*optarg == 'n')
+ testtype = TEST_LONG_REQUEST_FIELDS;
+ else if (*optarg == 'h')
+ testtype = TEST_LONG_REQUEST_FIELDSIZE;
+ else if (*optarg == 'b')
+ testtype = TEST_LONG_REQUEST_BODY;
+ break;
+ case 'a':
+ addr = optarg;
+ break;
+ case 'p':
+ port = atoi(optarg);
+ break;
+ case 'n':
+ num = atoi(optarg);
+ break;
+ default:
+ usage();
+ }
+
+ if (argc != optind)
+ usage();
+
+ /* connect */
+ if ((he = gethostbyname(addr)) == NULL) {
+ perror("gethostbyname");
+ exit(1);
+ }
+ memset(&sin, sizeof(sin));
+ memcpy((char *)&sin.sin_addr, he->h_addr, he->h_length);
+ sin.sin_family = he->h_addrtype;
+ sin.sin_port = htons(port);
+
+ if ((sd = socket(sin.sin_family, SOCK_STREAM, IPPROTO_TCP)) == -1) {
+ perror("socket");
+ exit(1);
+ }
+
+ if (connect(sd, (struct sockaddr *)&sin, sizeof(sin)) == -1) {
+ perror("connect");
+ exit(1);
+ }
+
+ if ((f = fdopen(sd, "r+")) == NULL) {
+ perror("fdopen");
+ exit(1);
+ }
+
+ /* attack! */
+ fprintf(stderr, "Testing like a plague of locusts on %s\n", addr);
+
+ if (testtype == TEST_LONG_REQUEST_LINE) {
+ fprintf(f, "GET ");
+ while (num-- && !ferror(f)) {
+ fprintf(f, "/123456789");
+ fflush(f);
+ }
+ fprintf(f, " HTTP/1.0\r\n\r\n");
+ }
+ else {
+ fprintf(f, "GET /fred/foo HTTP/1.0\r\n");
+
+ if (testtype == TEST_LONG_REQUEST_FIELDSIZE) {
+ while (num-- && !ferror(f)) {
+ fprintf(f, "User-Agent: sioux");
+ fflush(f);
+ }
+ fprintf(f, "\r\n");
+ }
+ else if (testtype == TEST_LONG_REQUEST_FIELDS) {
+ while (num-- && !ferror(f))
+ fprintf(f, "User-Agent: sioux\r\n");
+ fprintf(f, "\r\n");
+ }
+ else if (testtype == TEST_LONG_REQUEST_BODY) {
+ fprintf(f, "User-Agent: sioux\r\n");
+ fprintf(f, "Content-Length: 33554433\r\n");
+ fprintf(f, "\r\n");
+ while (num-- && !ferror(f))
+ fprintf(f, "User-Agent: sioux\r\n");
+ }
+ else {
+ fprintf(f, "\r\n");
+ }
+ }
+ fflush(f);
+
+ {
+ apr_ssize_t len;
+ char buff[512];
+
+ while ((len = read(sd, buff, 512)) > 0)
+ len = write(1, buff, len);
+ }
+ if (ferror(f)) {
+ perror("fprintf");
+ exit(1);
+ }
+
+ fclose(f);
+ exit(0);
+}
diff --git a/test/test_parser.c b/test/test_parser.c
new file mode 100644
index 0000000..bc5207d
--- /dev/null
+++ b/test/test_parser.c
@@ -0,0 +1,75 @@
+/* Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/* This program tests the ap_get_list_item routine in ../main/util.c.
+ *
+ * The defines in this sample compile line are specific to Roy's system.
+ * They should match whatever was used to compile Apache first.
+ *
+ gcc -g -O2 -I../os/unix -I../include -o test_parser \
+ -DSOLARIS2=250 -Wall -DALLOC_DEBUG -DPOOL_DEBUG \
+ ../main/alloc.o ../main/buff.o ../main/util.o \
+ ../ap/libap.a -lsocket -lnsl test_parser.c
+ *
+ * Roy Fielding, 1999
+ */
+#include <stdio.h>
+#include <stdlib.h>
+#include "httpd.h"
+#include "apr_general.h"
+
+/*
+ * Dummy a bunch of stuff just to get a compile
+ */
+uid_t ap_user_id;
+gid_t ap_group_id;
+void *ap_dummy_mutex = &ap_dummy_mutex;
+char *ap_server_argv0;
+
+AP_DECLARE(void) ap_block_alarms(void)
+{
+ ;
+}
+
+AP_DECLARE(void) ap_unblock_alarms(void)
+{
+ ;
+}
+
+AP_DECLARE(void) ap_log_error(const char *file, int line, int level,
+ const request_rec *r, const char *fmt, ...)
+{
+ ;
+}
+
+int main (void)
+{
+ apr_pool_t *p;
+ const char *field;
+ char *newstr;
+ char instr[512];
+
+ p = apr_pool_alloc_init();
+
+ while (gets(instr)) {
+ printf(" [%s] ==\n", instr);
+ field = instr;
+ while ((newstr = ap_get_list_item(p, &field)) != NULL)
+ printf(" <%s> ..\n", newstr);
+ }
+
+ exit(0);
+}
diff --git a/test/test_select.c b/test/test_select.c
new file mode 100644
index 0000000..af11035
--- /dev/null
+++ b/test/test_select.c
@@ -0,0 +1,46 @@
+/* Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/* This is just a quick test program to see how long a wait is
+ * produced by a select loop with an exponential backoff.
+ *
+ * gcc -g -O2 -o test_select test_select.c
+ * test_select
+ *
+ * Roy Fielding, 1996
+ */
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <sys/time.h>
+
+int main (void)
+{
+ int srv;
+ long waittime = 4096;
+ struct timeval tv;
+
+ printf("Start\n");
+ while ((waittime > 0) && (waittime < 3000000)) {
+ printf("%d\n", waittime);
+ tv.tv_sec = waittime/1000000;
+ tv.tv_usec = waittime%1000000;
+ waittime <<= 1;
+ srv = select(0, NULL, NULL, NULL, &tv);
+ }
+ printf("End\n");
+ exit(0);
+}
diff --git a/test/test_travis_conditions.sh b/test/test_travis_conditions.sh
new file mode 100755
index 0000000..82794eb
--- /dev/null
+++ b/test/test_travis_conditions.sh
@@ -0,0 +1,42 @@
+#!/bin/sh -e
+# Script to test whether travis conditions match correctly.
+# "gem install travis-conditions". Tests .travis.yml in the cwd.
+
+cond_24x="`sed -n '/&condition_24x_only/{s/.*condition_24x_only//;p;q;}' .travis.yml`"
+cond_not_24x="`sed -n '/&condition_not_24x/{s/.*condition_not_24x//;p;q;}' .travis.yml`"
+
+echo Condition 2.4.x: ${cond_24x}
+echo Condition not 2.4.x: ${cond_not_24x}
+
+function testit()
+{
+ local data=$1
+ local expect=$2
+
+ is_24x=`travis-conditions eval "$cond_24x" --data "${data}"`
+ not_24x=`travis-conditions eval "$cond_not_24x" --data "${data}"`
+
+ if [ $is_24x = $not_24x ]; then
+ echo FAIL: Tests as both 2.4.x and not 2.4.x for "$data"
+ return 1
+ elif [ $expect = 2.4.x -a $is_24x = true ]; then
+ echo PASS
+ elif [ $expect = trunk ]; then
+ echo PASS
+ else
+ echo FAIL for "$data"
+ return 1
+ fi
+ return 0
+}
+
+testit '{"tag": "2.4.49"}' 2.4.x
+testit '{"tag": "2.5.59"}' trunk
+testit '{"branch": "2.4.x"}' 2.4.x
+testit '{"branch": "candidate-2.4.49"}' 2.4.x
+testit '{"branch": "2.4.55-candidate"}' 2.4.x
+testit '{"branch": "2.4-backport-branch"}' 2.4.x
+testit '{"branch": "2.4.x-something"}' 2.4.x
+testit '{"branch": "2.5.0"}' trunk
+testit '{"branch": "2.5.x"}' trunk
+testit '{"branch": "trunk"}' trunk
diff --git a/test/time-sem.c b/test/time-sem.c
new file mode 100644
index 0000000..60e5982
--- /dev/null
+++ b/test/time-sem.c
@@ -0,0 +1,593 @@
+/* Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+time-sem.c has the basics of the semaphores we use in http_main.c. It's
+intended for timing differences between various methods on an
+architecture. In practice we've found many things affect which semaphore
+to be used:
+
+ - NFS filesystems absolutely suck for fcntl() and flock()
+
+ - uslock absolutely sucks on single-processor IRIX boxes, but
+ absolutely rocks on multi-processor boxes. The converse
+ is true for fcntl. sysvsem seems a moderate balance.
+
+ - Under Solaris you can't have too many processes use SEM_UNDO, there
+ might be a tuneable somewhere that increases the limit from 29.
+ We're not sure what the tunable is, so there's a define
+ NO_SEM_UNDO which can be used to simulate us trapping/blocking
+ signals to be able to properly release the semaphore on a clean
+ child death. You'll also need to define NEED_UNION_SEMUN
+ under solaris.
+
+You'll need to define USE_SHMGET_SCOREBOARD if anonymous shared mmap()
+doesn't work on your system (i.e. linux).
+
+argv[1] is the #children, argv[2] is the #iterations per child
+
+You should run each over many different #children inputs, and choose
+#iter such that the program runs for at least a second or so... or even
+longer depending on your patience.
+
+compile with:
+
+gcc -o time-FCNTL -Wall -O time-sem.c -DUSE_FCNTL_SERIALIZED_ACCEPT
+gcc -o time-FLOCK -Wall -O time-sem.c -DUSE_FLOCK_SERIALIZED_ACCEPT
+gcc -o time-SYSVSEM -Wall -O time-sem.c -DUSE_SYSVSEM_SERIALIZED_ACCEPT
+gcc -o time-SYSVSEM2 -Wall -O time-sem.c -DUSE_SYSVSEM_SERIALIZED_ACCEPT -DNO_SEM_UNDO
+gcc -o time-PTHREAD -Wall -O time-sem.c -DUSE_PTHREAD_SERIALIZED_ACCEPT -lpthread
+gcc -o time-USLOCK -Wall -O time-sem.c -DUSE_USLOCK_SERIALIZED_ACCEPT
+
+not all versions work on all systems.
+*/
+
+#include <errno.h>
+#include <sys/time.h>
+#include <unistd.h>
+#include <stdio.h>
+#include <string.h>
+#include <stdlib.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <sys/wait.h>
+#include <sys/mman.h>
+#include <signal.h>
+
+#if defined(USE_FCNTL_SERIALIZED_ACCEPT)
+
+static struct flock lock_it;
+static struct flock unlock_it;
+
+static int fcntl_fd=-1;
+
+#define accept_mutex_child_init()
+#define accept_mutex_cleanup()
+
+/*
+ * Initialize mutex lock.
+ * Must be safe to call this on a restart.
+ */
+void
+accept_mutex_init(void)
+{
+
+ lock_it.l_whence = SEEK_SET; /* from current point */
+ lock_it.l_start = 0; /* -"- */
+ lock_it.l_len = 0; /* until end of file */
+ lock_it.l_type = F_WRLCK; /* set exclusive/write lock */
+ lock_it.l_pid = 0; /* pid not actually interesting */
+ unlock_it.l_whence = SEEK_SET; /* from current point */
+ unlock_it.l_start = 0; /* -"- */
+ unlock_it.l_len = 0; /* until end of file */
+ unlock_it.l_type = F_UNLCK; /* set exclusive/write lock */
+ unlock_it.l_pid = 0; /* pid not actually interesting */
+
+ printf("opening test-lock-thing in current directory\n");
+ fcntl_fd = open("test-lock-thing", O_CREAT | O_WRONLY | O_EXCL, 0644);
+ if (fcntl_fd == -1)
+ {
+ perror ("open");
+ fprintf (stderr, "Cannot open lock file: %s\n", "test-lock-thing");
+ exit (1);
+ }
+ unlink("test-lock-thing");
+}
+
+void accept_mutex_on(void)
+{
+ int ret;
+
+ while ((ret = fcntl(fcntl_fd, F_SETLKW, &lock_it)) < 0 && errno == EINTR)
+ continue;
+
+ if (ret < 0) {
+ perror ("fcntl lock_it");
+ exit(1);
+ }
+}
+
+void accept_mutex_off(void)
+{
+ if (fcntl (fcntl_fd, F_SETLKW, &unlock_it) < 0)
+ {
+ perror ("fcntl unlock_it");
+ exit(1);
+ }
+}
+
+#elif defined(USE_FLOCK_SERIALIZED_ACCEPT)
+
+#include <sys/file.h>
+
+static int flock_fd=-1;
+
+#define FNAME "test-lock-thing"
+
+/*
+ * Initialize mutex lock.
+ * Must be safe to call this on a restart.
+ */
+void accept_mutex_init(void)
+{
+
+ printf("opening " FNAME " in current directory\n");
+ flock_fd = open(FNAME, O_CREAT | O_WRONLY | O_EXCL, 0644);
+ if (flock_fd == -1)
+ {
+ perror ("open");
+ fprintf (stderr, "Cannot open lock file: %s\n", "test-lock-thing");
+ exit (1);
+ }
+}
+
+void accept_mutex_child_init(void)
+{
+ flock_fd = open(FNAME, O_WRONLY, 0600);
+ if (flock_fd == -1) {
+ perror("open");
+ exit(1);
+ }
+}
+
+void accept_mutex_cleanup(void)
+{
+ unlink(FNAME);
+}
+
+void accept_mutex_on(void)
+{
+ int ret;
+
+ while ((ret = flock(flock_fd, LOCK_EX)) < 0 && errno == EINTR)
+ continue;
+
+ if (ret < 0) {
+ perror ("flock(LOCK_EX)");
+ exit(1);
+ }
+}
+
+void accept_mutex_off(void)
+{
+ if (flock (flock_fd, LOCK_UN) < 0)
+ {
+ perror ("flock(LOCK_UN)");
+ exit(1);
+ }
+}
+
+#elif defined (USE_SYSVSEM_SERIALIZED_ACCEPT)
+
+#include <sys/types.h>
+#include <sys/ipc.h>
+#include <sys/sem.h>
+
+static int sem_id = -1;
+#ifdef NO_SEM_UNDO
+static sigset_t accept_block_mask;
+static sigset_t accept_previous_mask;
+#endif
+
+#define accept_mutex_child_init()
+#define accept_mutex_cleanup()
+
+void accept_mutex_init(void)
+{
+#ifdef NEED_UNION_SEMUN
+ /* believe it or not, you need to define this under solaris */
+ union semun {
+ int val;
+ struct semid_ds *buf;
+ ushort *array;
+ };
+#endif
+
+ union semun ick;
+
+ sem_id = semget(999, 1, IPC_CREAT | 0666);
+ if (sem_id < 0) {
+ perror ("semget");
+ exit (1);
+ }
+ ick.val = 1;
+ if (semctl(sem_id, 0, SETVAL, ick) < 0) {
+ perror ("semctl");
+ exit(1);
+ }
+#ifdef NO_SEM_UNDO
+ sigfillset(&accept_block_mask);
+ sigdelset(&accept_block_mask, SIGHUP);
+ sigdelset(&accept_block_mask, SIGTERM);
+ sigdelset(&accept_block_mask, SIGUSR1);
+#endif
+}
+
+void accept_mutex_on()
+{
+ struct sembuf op;
+
+#ifdef NO_SEM_UNDO
+ if (sigprocmask(SIG_BLOCK, &accept_block_mask, &accept_previous_mask)) {
+ perror("sigprocmask(SIG_BLOCK)");
+ exit (1);
+ }
+ op.sem_flg = 0;
+#else
+ op.sem_flg = SEM_UNDO;
+#endif
+ op.sem_num = 0;
+ op.sem_op = -1;
+ if (semop(sem_id, &op, 1) < 0) {
+ perror ("accept_mutex_on");
+ exit (1);
+ }
+}
+
+void accept_mutex_off()
+{
+ struct sembuf op;
+
+ op.sem_num = 0;
+ op.sem_op = 1;
+#ifdef NO_SEM_UNDO
+ op.sem_flg = 0;
+#else
+ op.sem_flg = SEM_UNDO;
+#endif
+ if (semop(sem_id, &op, 1) < 0) {
+ perror ("accept_mutex_off");
+ exit (1);
+ }
+#ifdef NO_SEM_UNDO
+ if (sigprocmask(SIG_SETMASK, &accept_previous_mask, NULL)) {
+ perror("sigprocmask(SIG_SETMASK)");
+ exit (1);
+ }
+#endif
+}
+
+#elif defined (USE_PTHREAD_SERIALIZED_ACCEPT)
+
+/* note: pthread mutexes aren't released on child death, hence the
+ * signal goop ... in a real implementation we'd do special things
+ * during hup, term, usr1.
+ */
+
+#include <pthread.h>
+
+static pthread_mutex_t *mutex;
+static sigset_t accept_block_mask;
+static sigset_t accept_previous_mask;
+
+#define accept_mutex_child_init()
+#define accept_mutex_cleanup()
+
+void accept_mutex_init(void)
+{
+ pthread_mutexattr_t mattr;
+ int fd;
+
+ fd = open ("/dev/zero", O_RDWR);
+ if (fd == -1) {
+ perror ("open(/dev/zero)");
+ exit (1);
+ }
+ mutex = (pthread_mutex_t *)mmap ((caddr_t)0, sizeof (*mutex),
+ PROT_READ|PROT_WRITE, MAP_SHARED, fd, 0);
+ if (mutex == (void *)(caddr_t)-1) {
+ perror ("mmap");
+ exit (1);
+ }
+ close (fd);
+ if (pthread_mutexattr_init(&mattr)) {
+ perror ("pthread_mutexattr_init");
+ exit (1);
+ }
+ if (pthread_mutexattr_setpshared(&mattr, PTHREAD_PROCESS_SHARED)) {
+ perror ("pthread_mutexattr_setpshared");
+ exit (1);
+ }
+ if (pthread_mutex_init(mutex, &mattr)) {
+ perror ("pthread_mutex_init");
+ exit (1);
+ }
+ sigfillset(&accept_block_mask);
+ sigdelset(&accept_block_mask, SIGHUP);
+ sigdelset(&accept_block_mask, SIGTERM);
+ sigdelset(&accept_block_mask, SIGUSR1);
+}
+
+void accept_mutex_on()
+{
+ if (sigprocmask(SIG_BLOCK, &accept_block_mask, &accept_previous_mask)) {
+ perror("sigprocmask(SIG_BLOCK)");
+ exit (1);
+ }
+ if (pthread_mutex_lock (mutex)) {
+ perror ("pthread_mutex_lock");
+ exit (1);
+ }
+}
+
+void accept_mutex_off()
+{
+ if (pthread_mutex_unlock (mutex)) {
+ perror ("pthread_mutex_unlock");
+ exit (1);
+ }
+ if (sigprocmask(SIG_SETMASK, &accept_previous_mask, NULL)) {
+ perror("sigprocmask(SIG_SETMASK)");
+ exit (1);
+ }
+}
+
+#elif defined (USE_USLOCK_SERIALIZED_ACCEPT)
+
+#include <ulocks.h>
+
+static usptr_t *us = NULL;
+static ulock_t uslock = NULL;
+
+#define accept_mutex_child_init()
+#define accept_mutex_cleanup()
+
+void accept_mutex_init(void)
+{
+ ptrdiff_t old;
+ /* default is 8 */
+#define CONF_INITUSERS_MAX 15
+ if ((old = usconfig(CONF_INITUSERS, CONF_INITUSERS_MAX)) == -1) {
+ perror("usconfig");
+ exit(-1);
+ }
+ if ((old = usconfig(CONF_LOCKTYPE, US_NODEBUG)) == -1) {
+ perror("usconfig");
+ exit(-1);
+ }
+ if ((old = usconfig(CONF_ARENATYPE, US_SHAREDONLY)) == -1) {
+ perror("usconfig");
+ exit(-1);
+ }
+ if ((us = usinit("/dev/zero")) == NULL) {
+ perror("usinit");
+ exit(-1);
+ }
+ if ((uslock = usnewlock(us)) == NULL) {
+ perror("usnewlock");
+ exit(-1);
+ }
+}
+void accept_mutex_on()
+{
+ switch(ussetlock(uslock)) {
+ case 1:
+ /* got lock */
+ break;
+ case 0:
+ fprintf(stderr, "didn't get lock\n");
+ exit(-1);
+ case -1:
+ perror("ussetlock");
+ exit(-1);
+ }
+}
+void accept_mutex_off()
+{
+ if (usunsetlock(uslock) == -1) {
+ perror("usunsetlock");
+ exit(-1);
+ }
+}
+#endif
+
+
+#ifndef USE_SHMGET_SCOREBOARD
+static void *get_shared_mem(apr_size_t size)
+{
+ void *result;
+
+ /* allocate shared memory for the shared_counter */
+ result = (unsigned long *)mmap ((caddr_t)0, size,
+ PROT_READ|PROT_WRITE, MAP_ANON|MAP_SHARED, -1, 0);
+ if (result == (void *)(caddr_t)-1) {
+ perror ("mmap");
+ exit (1);
+ }
+ return result;
+}
+#else
+#include <sys/types.h>
+#include <sys/ipc.h>
+#ifdef HAVE_SYS_MUTEX_H
+#include <sys/mutex.h>
+#endif
+#include <sys/shm.h>
+
+static void *get_shared_mem(apr_size_t size)
+{
+ key_t shmkey = IPC_PRIVATE;
+ int shmid = -1;
+ void *result;
+#ifdef MOVEBREAK
+ char *obrk;
+#endif
+
+ if ((shmid = shmget(shmkey, size, IPC_CREAT | SHM_R | SHM_W)) == -1) {
+ perror("shmget");
+ exit(1);
+ }
+
+#ifdef MOVEBREAK
+ /*
+ * Some SysV systems place the shared segment WAY too close
+ * to the dynamic memory break point (sbrk(0)). This severely
+ * limits the use of malloc/sbrk in the program since sbrk will
+ * refuse to move past that point.
+ *
+ * To get around this, we move the break point "way up there",
+ * attach the segment and then move break back down. Ugly
+ */
+ if ((obrk = sbrk(MOVEBREAK)) == (char *) -1) {
+ perror("sbrk");
+ }
+#endif
+
+#define BADSHMAT ((void *)(-1))
+ if ((result = shmat(shmid, 0, 0)) == BADSHMAT) {
+ perror("shmat");
+ }
+ /*
+ * We must avoid leaving segments in the kernel's
+ * (small) tables.
+ */
+ if (shmctl(shmid, IPC_RMID, NULL) != 0) {
+ perror("shmctl(IPC_RMID)");
+ }
+ if (result == BADSHMAT) /* now bailout */
+ exit(1);
+
+#ifdef MOVEBREAK
+ if (obrk == (char *) -1)
+ return; /* nothing else to do */
+ if (sbrk(-(MOVEBREAK)) == (char *) -1) {
+ perror("sbrk 2");
+ }
+#endif
+ return result;
+}
+#endif
+
+#ifdef _POSIX_PRIORITY_SCHEDULING
+/* don't ask */
+#define _P __P
+#include <sched.h>
+#define YIELD sched_yield()
+#else
+#define YIELD do { struct timeval zero; zero.tv_sec = zero.tv_usec = 0; select(0,0,0,0,&zero); } while(0)
+#endif
+
+void main (int argc, char **argv)
+{
+ int num_iter;
+ int num_child;
+ int i;
+ struct timeval first;
+ struct timeval last;
+ long ms;
+ int pid;
+ unsigned long *shared_counter;
+
+ if (argc != 3) {
+ fprintf (stderr, "Usage: time-sem num-child num iter\n");
+ exit (1);
+ }
+
+ num_child = atoi (argv[1]);
+ num_iter = atoi (argv[2]);
+
+ /* allocate shared memory for the shared_counter */
+ shared_counter = get_shared_mem(sizeof(*shared_counter));
+
+ /* initialize counter to 0 */
+ *shared_counter = 0;
+
+ accept_mutex_init ();
+
+ /* parent grabs mutex until done spawning children */
+ accept_mutex_on ();
+
+ for (i = 0; i < num_child; ++i) {
+ pid = fork();
+ if (pid == 0) {
+ /* child, do our thing */
+ accept_mutex_child_init();
+ for (i = 0; i < num_iter; ++i) {
+ unsigned long tmp;
+
+ accept_mutex_on ();
+ tmp = *shared_counter;
+ YIELD;
+ *shared_counter = tmp + 1;
+ accept_mutex_off ();
+ }
+ exit (0);
+ } else if (pid == -1) {
+ perror ("fork");
+ accept_mutex_off ();
+ exit (1);
+ }
+ }
+
+ /* a quick test to see that nothing is screwed up */
+ if (*shared_counter != 0) {
+ puts ("WTF! shared_counter != 0 before the children have been started!");
+ accept_mutex_off ();
+ exit (1);
+ }
+
+ gettimeofday (&first, NULL);
+ /* launch children into action */
+ accept_mutex_off ();
+ for (i = 0; i < num_child; ++i) {
+ if (wait(NULL) == -1) {
+ perror ("wait");
+ }
+ }
+ gettimeofday (&last, NULL);
+
+ if (*shared_counter != num_child * num_iter) {
+ printf ("WTF! shared_counter != num_child * num_iter!\n"
+ "shared_counter = %lu\nnum_child = %d\nnum_iter=%d\n",
+ *shared_counter,
+ num_child, num_iter);
+ }
+
+ last.tv_sec -= first.tv_sec;
+ ms = last.tv_usec - first.tv_usec;
+ if (ms < 0) {
+ --last.tv_sec;
+ ms += 1000000;
+ }
+ last.tv_usec = ms;
+ printf ("%8lu.%06lu\n", last.tv_sec, last.tv_usec);
+
+ accept_mutex_cleanup();
+
+ exit(0);
+}
+
diff --git a/test/travis_Dockerfile_slapd b/test/travis_Dockerfile_slapd
new file mode 100644
index 0000000..e0729e3
--- /dev/null
+++ b/test/travis_Dockerfile_slapd
@@ -0,0 +1,9 @@
+FROM ubuntu:bionic
+RUN echo slapd slapd/password1 password travis | debconf-set-selections
+RUN echo slapd slapd/password2 password travis | debconf-set-selections
+RUN echo slapd slapd/internal/adminpw password travis | debconf-set-selections
+RUN echo slapd slapd/internal/generated_adminpw password travis | debconf-set-selections
+RUN echo slapd slapd/domain string example.com | debconf-set-selections
+RUN apt-get update && apt-get -y install slapd ldap-utils
+# With -d passed, slapd stays in the foreground
+CMD /usr/sbin/slapd -d1 '-h ldap:// ldapi:///'
diff --git a/test/travis_Dockerfile_slapd.centos7 b/test/travis_Dockerfile_slapd.centos7
new file mode 100644
index 0000000..85bcf0a
--- /dev/null
+++ b/test/travis_Dockerfile_slapd.centos7
@@ -0,0 +1,5 @@
+FROM quay.io/centos/centos:7
+RUN yum install -y yum-utils && \
+ yum install -y openldap openldap-clients openldap-servers openldap-devel && \
+ yum -y clean all --enablerepo='*'
+CMD /usr/sbin/slapd -u ldap -d1 '-h ldap:// ldapi:///'
diff --git a/test/travis_before_linux.sh b/test/travis_before_linux.sh
new file mode 100755
index 0000000..23214f5
--- /dev/null
+++ b/test/travis_before_linux.sh
@@ -0,0 +1,178 @@
+#!/bin/bash -xe
+
+if test -v CLEAR_CACHE; then
+ rm -rf $HOME/root
+fi
+
+: Travis tag = ${TRAVIS_TAG}
+: Travis branch = ${TRAVIS_BRANCH}
+
+: /etc/hosts --
+cat /etc/hosts
+: -- ends
+
+# ### FIXME: This is a workaround, non-x86 builds have an IPv6
+# configuration which somehow breaks the test suite runs. Appears
+# that Apache::Test only configures the server to Listen on 0.0.0.0
+# (that is hard-coded), but then Apache::TestSerer::wait_till_is_up()
+# tries to connect via ::1, which fails/times out.
+if grep ip6-localhost /etc/hosts; then
+ sudo sed -i "/ip6-/d" /etc/hosts
+ cat /etc/hosts
+fi
+
+# Use a rudimental retry workflow as workaround to svn export hanging for minutes.
+# Travis automatically kills a build if one step takes more than 10 minutes without
+# reporting any progress.
+function run_svn_export() {
+ local url=$1
+ local revision=$2
+ local dest_dir=$3
+ local max_tries=$4
+
+ # Disable -e to allow fail/retry
+ set +e
+
+ for i in $(seq 1 $max_tries)
+ do
+ timeout 60 svn export -r ${revision} --force -q $url $dest_dir
+ if [ $? -eq 0 ]; then
+ break
+ else
+ if [ $i -eq $max_tries ]; then
+ exit 1
+ else
+ sleep $((100 * i))
+ fi
+ fi
+ done
+
+ # Restore -e behavior after fail/retry
+ set -e
+}
+
+function install_apx() {
+ local name=$1
+ local version=$2
+ local root=https://svn.apache.org/repos/asf/apr/${name}
+ local prefix=${HOME}/root/${name}-${version}
+ local build=${HOME}/build/${name}-${version}
+ local config=$3
+ local buildconf=$4
+
+ case $version in
+ trunk) url=${root}/trunk ;;
+ *.x) url=${root}/branches/${version} ;;
+ *) url=${root}/tags/${version} ;;
+ esac
+
+ local revision=`svn info --show-item last-changed-revision ${url}`
+
+ # Blow away the cached install root if the cached install is stale
+ # or doesn't match the expected configuration.
+ grep -q "${version} ${revision} ${config} CC=$CC" ${HOME}/root/.key-${name} || rm -rf ${prefix}
+ # TEST_H2 APR cache seems to be broken, do not use.
+ # Unknown why this happens on this CI job only and how to fix it
+ if test -v TEST_H2; then
+ rm -rf ${prefix}
+ fi
+
+ if test -d ${prefix}; then
+ return 0
+ fi
+
+ svn export -q -r ${revision} ${url} ${build}
+ pushd $build
+ ./buildconf ${buildconf}
+ ./configure --prefix=${prefix} ${config}
+ make -j2
+ make install
+ popd
+
+ echo ${version} ${revision} "${config}" "CC=${CC}" > ${HOME}/root/.key-${name}
+}
+
+# Allow to load $HOME/build/apache/httpd/.gdbinit
+echo "add-auto-load-safe-path $HOME/build/apache/httpd/.gdbinit" >> $HOME/.gdbinit
+
+# Prepare perl-framework test environment
+if ! test -v SKIP_TESTING; then
+ # Clear CPAN cache if necessary
+ if [ -v CLEAR_CACHE ]; then rm -rf ~/perl5; fi
+
+ cpanm --local-lib=~/perl5 local::lib && eval $(perl -I ~/perl5/lib/perl5/ -Mlocal::lib)
+
+ pkgs="Net::SSL LWP::Protocol::https \
+ LWP::Protocol::AnyEvent::http ExtUtils::Embed Test::More \
+ AnyEvent DateTime HTTP::DAV FCGI \
+ AnyEvent::WebSocket::Client Apache::Test"
+
+ # CPAN modules are to be used with the system Perl and always with
+ # CC=gcc, e.g. for the CC="gcc -m32" case the builds are not correct
+ # otherwise.
+ CC=gcc cpanm --notest $pkgs
+
+ # Set cache key.
+ echo $pkgs > ~/perl5/.key
+ unset pkgs
+
+ # Make a shallow clone of httpd-tests git repo.
+ git clone --depth=1 https://github.com/apache/httpd-tests.git test/perl-framework
+fi
+
+# For LDAP testing, run slapd listening on port 8389 and populate the
+# directory as described in t/modules/ldap.t in the test framework:
+if test -v TEST_LDAP -a -x test/perl-framework/scripts/ldap-init.sh; then
+ docker build -t httpd_ldap -f test/travis_Dockerfile_slapd.centos7 test/
+ pushd test/perl-framework
+ ./scripts/ldap-init.sh
+ popd
+fi
+
+if test -v TEST_SSL; then
+ pushd test/perl-framework
+ ./scripts/memcached-init.sh
+ ./scripts/redis-init.sh
+ popd
+fi
+
+if test -v TEST_OPENSSL3; then
+ # Build the requested version of OpenSSL if it's not already
+ # installed in the cached ~/root
+ if ! test -f $HOME/root/openssl-is-${TEST_OPENSSL3}; then
+ # Remove any previous install.
+ rm -rf $HOME/root/openssl3
+
+ mkdir -p build/openssl
+ pushd build/openssl
+ curl "https://www.openssl.org/source/openssl-${TEST_OPENSSL3}.tar.gz" |
+ tar -xzf -
+ cd openssl-${TEST_OPENSSL3}
+ ./Configure --prefix=$HOME/root/openssl3 shared no-tests
+ make $MFLAGS
+ make install_sw
+ touch $HOME/root/openssl-is-${TEST_OPENSSL3}
+ popd
+ fi
+
+ # Point APR/APR-util at the installed version of OpenSSL.
+ if test -v APU_VERSION; then
+ APU_CONFIG="${APU_CONFIG} --with-openssl=$HOME/root/openssl3"
+ elif test -v APR_VERSION; then
+ APR_CONFIG="${APR_CONFIG} --with-openssl=$HOME/root/openssl3"
+ else
+ : Non-system APR/APR-util must be used to build with OpenSSL 3 to avoid mismatch with system libraries
+ exit 1
+ fi
+fi
+
+if test -v APR_VERSION; then
+ install_apx apr ${APR_VERSION} "${APR_CONFIG}"
+ ldd $HOME/root/apr-${APR_VERSION}/lib/libapr-?.so || true
+ APU_CONFIG="$APU_CONFIG --with-apr=$HOME/root/apr-${APR_VERSION}"
+fi
+
+if test -v APU_VERSION; then
+ install_apx apr-util ${APU_VERSION} "${APU_CONFIG}" --with-apr=$HOME/build/apr-${APR_VERSION}
+ ldd $HOME/root/apr-util-${APU_VERSION}/lib/libaprutil-?.so || true
+fi
diff --git a/test/travis_run_linux.sh b/test/travis_run_linux.sh
new file mode 100755
index 0000000..e953fbd
--- /dev/null
+++ b/test/travis_run_linux.sh
@@ -0,0 +1,268 @@
+#!/bin/bash -ex
+
+# Test for APLOGNO() macro errors (duplicates, empty args) etc. For
+# trunk, run the updater script to see if it fails. If it succeeds
+# and changes any files (because there was a missing argument), the
+# git diff will be non-empty, so fail for that case too. For
+# non-trunk use a grep and only catch the empty argument case.
+if test -v TEST_LOGNO; then
+ if test -f docs/log-message-tags/update-log-msg-tags; then
+ find server modules os -name \*.c | \
+ xargs perl docs/log-message-tags/update-log-msg-tags
+ git diff --exit-code .
+ : PASSED
+ exit 0
+ else
+ set -o pipefail
+ if find server modules os -name \*.c | \
+ xargs grep -C1 --color=always 'APLOGNO()'; then
+ : FAILED
+ exit 1
+ else
+ : PASSED
+ exit 0
+ fi
+ fi
+fi
+
+### Installed apr/apr-util don't include the *.m4 files but the
+### Debian packages helpfully install them, so use the system APR to buildconf
+./buildconf --with-apr=/usr/bin/apr-1-config ${BUILDCONFIG}
+
+PREFIX=${PREFIX:-$HOME/build/httpd-root}
+
+# For trunk, "make check" is sufficient to run the test suite.
+# For 2.4.x, the test suite must be run manually
+if test ! -v SKIP_TESTING; then
+ CONFIG="$CONFIG --enable-load-all-modules"
+ if grep -q ^check: Makefile.in; then
+ CONFIG="--with-test-suite=test/perl-framework $CONFIG"
+ WITH_TEST_SUITE=1
+ fi
+
+ # Use the CPAN environment.
+ eval $(perl -I ~/perl5/lib/perl5/ -Mlocal::lib)
+fi
+if test -v APR_VERSION; then
+ CONFIG="$CONFIG --with-apr=$HOME/root/apr-${APR_VERSION}"
+else
+ CONFIG="$CONFIG --with-apr=/usr"
+fi
+if test -v APU_VERSION; then
+ CONFIG="$CONFIG --with-apr-util=$HOME/root/apr-util-${APU_VERSION}"
+else
+ CONFIG="$CONFIG --with-apr-util=/usr"
+fi
+
+# Since librustls is not a package (yet) on any platform, we
+# build the version we want from source
+if test -v TEST_MOD_TLS; then
+ RUSTLS_HOME="$HOME/build/rustls-ffi"
+ RUSTLS_VERSION="v0.10.0"
+ git clone -b "$RUSTLS_VERSION" https://github.com/rustls/rustls-ffi.git "$RUSTLS_HOME"
+ pushd "$RUSTLS_HOME"
+ make install DESTDIR="$PREFIX"
+ popd
+ CONFIG="$CONFIG --with-tls --with-rustls=$PREFIX"
+fi
+
+if test -v TEST_OPENSSL3; then
+ CONFIG="$CONFIG --with-ssl=$HOME/root/openssl3"
+ export LD_LIBRARY_PATH=$HOME/root/openssl3/lib:$HOME/root/openssl3/lib64
+fi
+
+srcdir=$PWD
+
+if test -v TEST_VPATH; then
+ mkdir ../vpath
+ cd ../vpath
+fi
+
+$srcdir/configure --prefix=$PREFIX $CONFIG
+make $MFLAGS
+
+if test -v TEST_INSTALL; then
+ make install
+ pushd $PREFIX
+ test `./bin/apxs -q PREFIX` = $PREFIX
+ test `$PWD/bin/apxs -q PREFIX` = $PREFIX
+ ./bin/apxs -g -n foobar
+ cd foobar; make
+ popd
+fi
+
+if ! test -v SKIP_TESTING; then
+ set +e
+ RV=0
+
+ if test -v TEST_MALLOC; then
+ # Enable enhanced glibc malloc debugging, see mallopt(3)
+ export MALLOC_PERTURB_=65 MALLOC_CHECK_=3
+ export LIBC_FATAL_STDERR_=1
+ fi
+
+ if test -v TEST_UBSAN; then
+ export UBSAN_OPTIONS="log_path=$PWD/ubsan.log"
+ fi
+
+ if test -v TEST_ASAN; then
+ export ASAN_OPTIONS="log_path=$PWD/asan.log"
+ fi
+
+ # Try to keep all potential coredumps from all processes
+ sudo sysctl -w kernel.core_uses_pid=1 2>/dev/null || true
+
+ if test -v WITH_TEST_SUITE; then
+ make check TESTS="${TESTS}" TEST_CONFIG="${TEST_ARGS}"
+ RV=$?
+ else
+ test -v TEST_INSTALL || make install
+ pushd test/perl-framework
+ perl Makefile.PL -apxs $PREFIX/bin/apxs
+ make test APACHE_TEST_EXTRA_ARGS="${TEST_ARGS} ${TESTS}" | tee test.log
+ RV=${PIPESTATUS[0]}
+ # re-run failing tests with -v, avoiding set -e
+ if [ $RV -ne 0 ]; then
+ #mv t/logs/error_log t/logs/error_log_save
+ FAILERS=""
+ while read FAILER; do
+ FAILERS="$FAILERS $FAILER"
+ done < <(awk '/Failed:/{print $1}' test.log)
+ if [ -n "$FAILERS" ]; then
+ t/TEST -v $FAILERS || true
+ fi
+ # set -e would have killed us after the original t/TEST
+ rm -f test.log
+ #mv t/logs/error_log_save t/logs/error_log
+ false
+ fi
+ popd
+ fi
+
+ # Skip further testing if a core dump was created during the test
+ # suite run above.
+ if test $RV -eq 0 && test -n "`ls test/perl-framework/t/core{,.*} 2>/dev/null`"; then
+ RV=4
+ fi
+
+ if test -v TEST_SSL -a $RV -eq 0; then
+ pushd test/perl-framework
+ # Test loading encrypted private keys
+ ./t/TEST -defines "TEST_SSL_DES3_KEY TEST_SSL_PASSPHRASE_EXEC" t/ssl
+ RV=$?
+
+ # Log the OpenSSL version.
+ grep 'mod_ssl.*compiled against' t/logs/error_log | tail -n 1
+
+ # Test various session cache backends
+ for cache in shmcb redis:localhost:6379 memcache:localhost:11211; do
+ test $RV -eq 0 || break
+
+ SSL_SESSCACHE=$cache ./t/TEST -sslproto TLSv1.2 -defines TEST_SSL_SESSCACHE -start
+ ./t/TEST t/ssl
+ RV=$?
+ ./t/TEST -stop
+ SRV=$?
+ if test $RV -eq 0 -a $SRV -ne 0; then
+ RV=$SRV
+ fi
+ done
+ popd
+ fi
+
+ if test -v LITMUS -a $RV -eq 0; then
+ pushd test/perl-framework
+ mkdir -p t/htdocs/modules/dav
+ ./t/TEST -start
+ # litmus uses $TESTS, so unset it.
+ unset TESTS
+ litmus http://localhost:8529/modules/dav/
+ RV=$?
+ ./t/TEST -stop
+ popd
+ fi
+
+ if test $RV -ne 0 && test -f test/perl-framework/t/logs/error_log; then
+ grep -v ':\(debug\|trace[12345678]\)\]' test/perl-framework/t/logs/error_log
+ fi
+
+ if test -v TEST_CORE -a $RV -eq 0; then
+ # Run HTTP/2 tests.
+ MPM=event py.test-3 test/modules/core
+ RV=$?
+ fi
+
+ if test -v TEST_H2 -a $RV -eq 0; then
+ # Build the test clients
+ (cd test/clients && make)
+ # Run HTTP/2 tests.
+ MPM=event py.test-3 test/modules/http2
+ RV=$?
+ if test $RV -eq 0; then
+ MPM=worker py.test-3 test/modules/http2
+ RV=$?
+ fi
+ fi
+
+ if test -v TEST_MD -a $RV -eq 0; then
+ # Run ACME tests.
+ # need the go based pebble as ACME test server
+ # which is a package on debian sid, but not on focal
+ export GOPATH=${PREFIX}/gocode
+ mkdir -p "${GOPATH}"
+ export PATH="${GOROOT}/bin:${GOPATH}/bin:${PATH}"
+ go get -u github.com/letsencrypt/pebble/...
+ (cd $GOPATH/src/github.com/letsencrypt/pebble && go install ./...)
+
+ py.test-3 test/modules/md
+ RV=$?
+ fi
+
+ if test -v TEST_MOD_TLS -a $RV -eq 0; then
+ # Run mod_tls tests. The underlying librustls was build
+ # and installed before we configured the server (see top of file).
+ # This will be replaved once librustls is available as a package.
+ py.test-3 test/modules/tls
+ RV=$?
+ fi
+
+ # Catch cases where abort()s get logged to stderr by libraries but
+ # only cause child processes to terminate e.g. during shutdown,
+ # which may not otherwise trigger test failures.
+
+ # "glibc detected": printed with LIBC_FATAL_STDERR_/MALLOC_CHECK_
+ # glibc will abort when malloc errors are detected. This will get
+ # caught by the segfault grep as well.
+
+ # "pool concurrency check": printed by APR built with
+ # --enable-thread-debug when an APR pool concurrency check aborts
+
+ for phrase in 'Segmentation fault' 'glibc detected' 'pool concurrency check:' 'Assertion.*failed'; do
+ # Ignore IO/debug logs
+ if grep -v ':\(debug\|trace[12345678]\)\]' test/perl-framework/t/logs/error_log | grep -q "$phrase"; then
+ grep --color=always -C5 "$phrase" test/perl-framework/t/logs/error_log
+ RV=2
+ fi
+ done
+
+ if test -v TEST_UBSAN && test -n "`ls ubsan.log.* 2>/dev/null`"; then
+ cat ubsan.log.*
+ RV=3
+ fi
+
+ if test -v TEST_ASAN && test -n "`ls asan.log.* 2>/dev/null`"; then
+ cat asan.log.*
+
+ # ASan can report memory leaks, fail on errors only
+ if grep -q "ERROR: AddressSanitizer:" `ls asan.log.*`; then
+ RV=4
+ fi
+ fi
+
+ for core in `ls test/perl-framework/t/core{,.*} 2>/dev/null`; do
+ gdb -ex 'thread apply all backtrace full' -batch ./httpd "$core"
+ RV=5
+ done
+
+ exit $RV
+fi