summaryrefslogtreecommitdiffstats
path: root/test/modules/http2
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-08 19:09:22 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-08 19:09:22 +0000
commit2faa747e2303ee774a4b4aace961188e950e185a (patch)
tree604e79c7481956ce48f458e3546eaf1090b3ffff /test/modules/http2
parentInitial commit. (diff)
downloadapache2-2faa747e2303ee774a4b4aace961188e950e185a.tar.xz
apache2-2faa747e2303ee774a4b4aace961188e950e185a.zip
Adding upstream version 2.4.58.upstream/2.4.58
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to '')
-rw-r--r--test/modules/http2/.gitignore3
-rw-r--r--test/modules/http2/Makefile.in20
-rw-r--r--test/modules/http2/__init__.py0
-rw-r--r--test/modules/http2/conftest.py40
-rw-r--r--test/modules/http2/env.py169
-rw-r--r--test/modules/http2/htdocs/cgi/alive.json4
-rw-r--r--test/modules/http2/htdocs/cgi/echo.py16
-rw-r--r--test/modules/http2/htdocs/cgi/echohd.py22
-rw-r--r--test/modules/http2/htdocs/cgi/env.py41
-rw-r--r--test/modules/http2/htdocs/cgi/files/empty.txt0
-rw-r--r--test/modules/http2/htdocs/cgi/hecho.py48
-rw-r--r--test/modules/http2/htdocs/cgi/hello.py25
-rw-r--r--test/modules/http2/htdocs/cgi/mnot164.py16
-rw-r--r--test/modules/http2/htdocs/cgi/necho.py56
-rw-r--r--test/modules/http2/htdocs/cgi/requestparser.py57
-rw-r--r--test/modules/http2/htdocs/cgi/ssi/include.inc1
-rw-r--r--test/modules/http2/htdocs/cgi/ssi/test.html9
-rw-r--r--test/modules/http2/htdocs/cgi/upload.py55
-rw-r--r--test/modules/http2/htdocs/cgi/xxx/test.json1
-rw-r--r--test/modules/http2/htdocs/noh2/alive.json5
-rw-r--r--test/modules/http2/htdocs/noh2/index.html9
-rw-r--r--test/modules/http2/mod_h2test/mod_h2test.c588
-rw-r--r--test/modules/http2/mod_h2test/mod_h2test.h21
-rw-r--r--test/modules/http2/test_001_httpd_alive.py22
-rw-r--r--test/modules/http2/test_002_curl_basics.py71
-rw-r--r--test/modules/http2/test_003_get.py265
-rw-r--r--test/modules/http2/test_004_post.py201
-rw-r--r--test/modules/http2/test_005_files.py48
-rw-r--r--test/modules/http2/test_006_assets.py75
-rw-r--r--test/modules/http2/test_007_ssi.py43
-rw-r--r--test/modules/http2/test_008_ranges.py189
-rw-r--r--test/modules/http2/test_009_timing.py74
-rw-r--r--test/modules/http2/test_100_conn_reuse.py57
-rw-r--r--test/modules/http2/test_101_ssl_reneg.py138
-rw-r--r--test/modules/http2/test_102_require.py41
-rw-r--r--test/modules/http2/test_103_upgrade.py118
-rw-r--r--test/modules/http2/test_104_padding.py104
-rw-r--r--test/modules/http2/test_105_timeout.py152
-rw-r--r--test/modules/http2/test_106_shutdown.py75
-rw-r--r--test/modules/http2/test_107_frame_lengths.py51
-rw-r--r--test/modules/http2/test_200_header_invalid.py207
-rw-r--r--test/modules/http2/test_201_header_conditional.py70
-rw-r--r--test/modules/http2/test_202_trailer.py92
-rw-r--r--test/modules/http2/test_203_rfc9113.py56
-rw-r--r--test/modules/http2/test_300_interim.py40
-rw-r--r--test/modules/http2/test_400_push.py200
-rw-r--r--test/modules/http2/test_401_early_hints.py83
-rw-r--r--test/modules/http2/test_500_proxy.py157
-rw-r--r--test/modules/http2/test_501_proxy_serverheader.py36
-rw-r--r--test/modules/http2/test_502_proxy_port.py41
-rw-r--r--test/modules/http2/test_503_proxy_fwd.py79
-rw-r--r--test/modules/http2/test_600_h2proxy.py201
-rw-r--r--test/modules/http2/test_601_h2proxy_twisted.py99
-rw-r--r--test/modules/http2/test_700_load_get.py63
-rw-r--r--test/modules/http2/test_710_load_post_static.py65
-rw-r--r--test/modules/http2/test_711_load_post_cgi.py73
-rw-r--r--test/modules/http2/test_712_buffering.py48
-rw-r--r--test/modules/http2/test_800_websockets.py363
-rw-r--r--test/modules/http2/ws_server.py104
59 files changed, 5007 insertions, 0 deletions
diff --git a/test/modules/http2/.gitignore b/test/modules/http2/.gitignore
new file mode 100644
index 0000000..d68cd09
--- /dev/null
+++ b/test/modules/http2/.gitignore
@@ -0,0 +1,3 @@
+gen
+config.ini
+__pycache__
diff --git a/test/modules/http2/Makefile.in b/test/modules/http2/Makefile.in
new file mode 100644
index 0000000..15d404d
--- /dev/null
+++ b/test/modules/http2/Makefile.in
@@ -0,0 +1,20 @@
+
+# no targets: we don't want to build anything by default. if you want the
+# test programs, then "make test"
+TARGETS =
+
+bin_PROGRAMS =
+
+PROGRAM_LDADD = $(EXTRA_LDFLAGS) $(PROGRAM_DEPENDENCIES) $(EXTRA_LIBS)
+PROGRAM_DEPENDENCIES = \
+ $(top_srcdir)/srclib/apr-util/libaprutil.la \
+ $(top_srcdir)/srclib/apr/libapr.la
+
+include $(top_builddir)/build/rules.mk
+
+test: $(bin_PROGRAMS)
+
+# example for building a test proggie
+# dbu_OBJECTS = dbu.lo
+# dbu: $(dbu_OBJECTS)
+# $(LINK) $(dbu_OBJECTS) $(PROGRAM_LDADD)
diff --git a/test/modules/http2/__init__.py b/test/modules/http2/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test/modules/http2/__init__.py
diff --git a/test/modules/http2/conftest.py b/test/modules/http2/conftest.py
new file mode 100644
index 0000000..55d0c3a
--- /dev/null
+++ b/test/modules/http2/conftest.py
@@ -0,0 +1,40 @@
+import logging
+import os
+
+import pytest
+import sys
+
+sys.path.append(os.path.join(os.path.dirname(__file__), '../..'))
+
+from .env import H2TestEnv
+
+
+def pytest_report_header(config, startdir):
+ env = H2TestEnv()
+ return f"mod_h2 [apache: {env.get_httpd_version()}, mpm: {env.mpm_module}, {env.prefix}]"
+
+
+@pytest.fixture(scope="package")
+def env(pytestconfig) -> H2TestEnv:
+ level = logging.INFO
+ console = logging.StreamHandler()
+ console.setLevel(level)
+ console.setFormatter(logging.Formatter('%(levelname)s: %(message)s'))
+ logging.getLogger('').addHandler(console)
+ logging.getLogger('').setLevel(level=level)
+ env = H2TestEnv(pytestconfig=pytestconfig)
+ env.setup_httpd()
+ env.apache_access_log_clear()
+ env.httpd_error_log.clear_log()
+ return env
+
+
+@pytest.fixture(autouse=True, scope="package")
+def _session_scope(env):
+ yield
+ assert env.apache_stop() == 0
+ errors, warnings = env.httpd_error_log.get_missed()
+ assert (len(errors), len(warnings)) == (0, 0),\
+ f"apache logged {len(errors)} errors and {len(warnings)} warnings: \n"\
+ "{0}\n{1}\n".format("\n".join(errors), "\n".join(warnings))
+
diff --git a/test/modules/http2/env.py b/test/modules/http2/env.py
new file mode 100644
index 0000000..34d196d
--- /dev/null
+++ b/test/modules/http2/env.py
@@ -0,0 +1,169 @@
+import inspect
+import logging
+import os
+import re
+import subprocess
+from typing import Dict, Any
+
+from pyhttpd.certs import CertificateSpec
+from pyhttpd.conf import HttpdConf
+from pyhttpd.env import HttpdTestEnv, HttpdTestSetup
+
+log = logging.getLogger(__name__)
+
+
+class H2TestSetup(HttpdTestSetup):
+
+ def __init__(self, env: 'HttpdTestEnv'):
+ super().__init__(env=env)
+ self.add_source_dir(os.path.dirname(inspect.getfile(H2TestSetup)))
+ self.add_modules(["http2", "proxy_http2", "cgid", "autoindex", "ssl", "include"])
+
+ def make(self):
+ super().make()
+ self._add_h2test()
+ self._setup_data_1k_1m()
+
+ def _add_h2test(self):
+ local_dir = os.path.dirname(inspect.getfile(H2TestSetup))
+ p = subprocess.run([self.env.apxs, '-c', 'mod_h2test.c'],
+ capture_output=True,
+ cwd=os.path.join(local_dir, 'mod_h2test'))
+ rv = p.returncode
+ if rv != 0:
+ log.error(f"compiling md_h2test failed: {p.stderr}")
+ raise Exception(f"compiling md_h2test failed: {p.stderr}")
+
+ modules_conf = os.path.join(self.env.server_dir, 'conf/modules.conf')
+ with open(modules_conf, 'a') as fd:
+ # load our test module which is not installed
+ fd.write(f"LoadModule h2test_module \"{local_dir}/mod_h2test/.libs/mod_h2test.so\"\n")
+
+ def _setup_data_1k_1m(self):
+ s90 = "01234567890123456789012345678901234567890123456789012345678901234567890123456789012345678\n"
+ with open(os.path.join(self.env.gen_dir, "data-1k"), 'w') as f:
+ for i in range(10):
+ f.write(f"{i:09d}-{s90}")
+ with open(os.path.join(self.env.gen_dir, "data-10k"), 'w') as f:
+ for i in range(100):
+ f.write(f"{i:09d}-{s90}")
+ with open(os.path.join(self.env.gen_dir, "data-100k"), 'w') as f:
+ for i in range(1000):
+ f.write(f"{i:09d}-{s90}")
+ with open(os.path.join(self.env.gen_dir, "data-1m"), 'w') as f:
+ for i in range(10000):
+ f.write(f"{i:09d}-{s90}")
+
+
+class H2TestEnv(HttpdTestEnv):
+
+ @classmethod
+ @property
+ def is_unsupported(cls):
+ mpm_module = f"mpm_{os.environ['MPM']}" if 'MPM' in os.environ else 'mpm_event'
+ return mpm_module == 'mpm_prefork'
+
+ def __init__(self, pytestconfig=None):
+ super().__init__(pytestconfig=pytestconfig)
+ self.add_httpd_conf([
+ "H2MinWorkers 1",
+ "H2MaxWorkers 64",
+ "Protocols h2 http/1.1 h2c",
+ ])
+ self.add_httpd_log_modules(["http2", "proxy_http2", "h2test", "proxy", "proxy_http"])
+ self.add_cert_specs([
+ CertificateSpec(domains=[
+ f"push.{self._http_tld}",
+ f"hints.{self._http_tld}",
+ f"ssl.{self._http_tld}",
+ f"pad0.{self._http_tld}",
+ f"pad1.{self._http_tld}",
+ f"pad2.{self._http_tld}",
+ f"pad3.{self._http_tld}",
+ f"pad8.{self._http_tld}",
+ ]),
+ CertificateSpec(domains=[f"noh2.{self.http_tld}"], key_type='rsa2048'),
+ ])
+
+ self.httpd_error_log.set_ignored_lognos([
+ 'AH02032',
+ 'AH01276',
+ 'AH01630',
+ 'AH00135',
+ 'AH02261', # Re-negotiation handshake failed (our test_101)
+ 'AH03490', # scoreboard full, happens on limit tests
+ 'AH02429', # invalid chars in response header names, see test_h2_200
+ 'AH02430', # invalid chars in response header values, see test_h2_200
+ 'AH10373', # SSL errors on uncompleted handshakes, see test_h2_105
+ 'AH01247', # mod_cgid sometimes freaks out on load tests
+ 'AH01110', # error by proxy reading response
+ 'AH10400', # warning that 'enablereuse' has not effect in certain configs test_h2_600
+ 'AH00045', # child did not exit in time, SIGTERM was sent
+ ])
+ self.httpd_error_log.add_ignored_patterns([
+ re.compile(r'.*malformed header from script \'hecho.py\': Bad header: x.*'),
+ re.compile(r'.*:tls_post_process_client_hello:.*'),
+ # OSSL 3 dropped the function name from the error description. Use the code instead:
+ # 0A0000C1 = no shared cipher -- Too restrictive SSLCipherSuite or using DSA server certificate?
+ re.compile(r'.*SSL Library Error: error:0A0000C1:.*'),
+ re.compile(r'.*:tls_process_client_certificate:.*'),
+ # OSSL 3 dropped the function name from the error description. Use the code instead:
+ # 0A0000C7 = peer did not return a certificate -- No CAs known to server for verification?
+ re.compile(r'.*SSL Library Error: error:0A0000C7:.*'),
+ re.compile(r'.*have incompatible TLS configurations.'),
+ ])
+
+ def setup_httpd(self, setup: HttpdTestSetup = None):
+ super().setup_httpd(setup=H2TestSetup(env=self))
+
+
+class H2Conf(HttpdConf):
+
+ def __init__(self, env: HttpdTestEnv, extras: Dict[str, Any] = None):
+ super().__init__(env=env, extras=HttpdConf.merge_extras(extras, {
+ f"cgi.{env.http_tld}": [
+ "SSLOptions +StdEnvVars",
+ "AddHandler cgi-script .py",
+ "<Location \"/h2test/echo\">",
+ " SetHandler h2test-echo",
+ "</Location>",
+ "<Location \"/h2test/delay\">",
+ " SetHandler h2test-delay",
+ "</Location>",
+ "<Location \"/h2test/error\">",
+ " SetHandler h2test-error",
+ "</Location>",
+ ]
+ }))
+
+ def start_vhost(self, domains, port=None, doc_root="htdocs", with_ssl=None,
+ ssl_module=None, with_certificates=None):
+ super().start_vhost(domains=domains, port=port, doc_root=doc_root,
+ with_ssl=with_ssl, ssl_module=ssl_module,
+ with_certificates=with_certificates)
+ if f"noh2.{self.env.http_tld}" in domains:
+ protos = ["http/1.1"]
+ elif port == self.env.https_port or with_ssl is True:
+ protos = ["h2", "http/1.1"]
+ else:
+ protos = ["h2c", "http/1.1"]
+ if f"test2.{self.env.http_tld}" in domains:
+ protos = reversed(protos)
+ self.add(f"Protocols {' '.join(protos)}")
+ return self
+
+ def add_vhost_noh2(self):
+ domains = [f"noh2.{self.env.http_tld}", f"noh2-alias.{self.env.http_tld}"]
+ self.start_vhost(domains=domains, port=self.env.https_port, doc_root="htdocs/noh2")
+ self.add(["Protocols http/1.1", "SSLOptions +StdEnvVars"])
+ self.end_vhost()
+ self.start_vhost(domains=domains, port=self.env.http_port, doc_root="htdocs/noh2")
+ self.add(["Protocols http/1.1", "SSLOptions +StdEnvVars"])
+ self.end_vhost()
+ return self
+
+ def add_vhost_test1(self, proxy_self=False, h2proxy_self=False):
+ return super().add_vhost_test1(proxy_self=proxy_self, h2proxy_self=h2proxy_self)
+
+ def add_vhost_test2(self):
+ return super().add_vhost_test2()
diff --git a/test/modules/http2/htdocs/cgi/alive.json b/test/modules/http2/htdocs/cgi/alive.json
new file mode 100644
index 0000000..defe2c2
--- /dev/null
+++ b/test/modules/http2/htdocs/cgi/alive.json
@@ -0,0 +1,4 @@
+{
+ "host" : "cgi",
+ "alive" : true
+}
diff --git a/test/modules/http2/htdocs/cgi/echo.py b/test/modules/http2/htdocs/cgi/echo.py
new file mode 100644
index 0000000..c9083e1
--- /dev/null
+++ b/test/modules/http2/htdocs/cgi/echo.py
@@ -0,0 +1,16 @@
+#!/usr/bin/env python3
+import os, sys
+import multipart
+
+status = '200 Ok'
+
+content = ''
+for line in sys.stdin:
+ content += line
+
+# Just echo what we get
+print("Status: 200")
+print(f"Request-Length: {len(content)}")
+print("Content-Type: application/data\n")
+sys.stdout.write(content)
+
diff --git a/test/modules/http2/htdocs/cgi/echohd.py b/test/modules/http2/htdocs/cgi/echohd.py
new file mode 100644
index 0000000..a85a4e3
--- /dev/null
+++ b/test/modules/http2/htdocs/cgi/echohd.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python3
+import os, sys
+from requestparser import get_request_params
+
+
+forms, files = get_request_params()
+name = forms['name'] if 'name' in forms else None
+
+if name:
+ print("Status: 200")
+ print("""\
+Content-Type: text/plain\n""")
+ print("""%s: %s""" % (name, os.environ['HTTP_'+name]))
+else:
+ print("Status: 400 Parameter Missing")
+ print("""\
+Content-Type: text/html\n
+<html><body>
+<p>No name was specified</p>
+</body></html>""")
+
+
diff --git a/test/modules/http2/htdocs/cgi/env.py b/test/modules/http2/htdocs/cgi/env.py
new file mode 100644
index 0000000..455c623
--- /dev/null
+++ b/test/modules/http2/htdocs/cgi/env.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python3
+import os, sys
+from requestparser import get_request_params
+
+
+forms, files = get_request_params()
+
+status = '200 Ok'
+
+try:
+ ename = forms['name']
+
+ # Test if the file was uploaded
+ if ename is not None:
+ val = os.environ[ename] if ename in os.environ else ""
+ print("Status: 200")
+ print("""\
+Content-Type: text/plain\n""")
+ print(f"{ename}={val}")
+
+ else:
+ print("Status: 400 Parameter Missing")
+ print("""\
+Content-Type: text/html\n
+ <html><body>
+ <p>No name was specified: name</p>
+ </body></html>""")
+
+except KeyError:
+ print("Status: 200 Ok")
+ print("""\
+Content-Type: text/html\n
+ <html><body>
+ Echo <form method="POST" enctype="application/x-www-form-urlencoded">
+ <input type="text" name="name">
+ <button type="submit">submit</button></form>
+ </body></html>""")
+ pass
+
+
+
diff --git a/test/modules/http2/htdocs/cgi/files/empty.txt b/test/modules/http2/htdocs/cgi/files/empty.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test/modules/http2/htdocs/cgi/files/empty.txt
diff --git a/test/modules/http2/htdocs/cgi/hecho.py b/test/modules/http2/htdocs/cgi/hecho.py
new file mode 100644
index 0000000..abffd33
--- /dev/null
+++ b/test/modules/http2/htdocs/cgi/hecho.py
@@ -0,0 +1,48 @@
+#!/usr/bin/env python3
+import os, sys
+from requestparser import get_request_params
+
+
+forms, files = get_request_params()
+
+status = '200 Ok'
+
+try:
+
+ # A nested FieldStorage instance holds the file
+ name = forms['name']
+ value = ''
+
+ try:
+ value = forms['value']
+ except KeyError:
+ value = os.environ.get("HTTP_"+name, "unset")
+
+ # Test if a value was given
+ if name:
+ print("Status: 200")
+ print("%s: %s" % (name, value,))
+ print ("""\
+Content-Type: text/plain\n""")
+
+ else:
+ print("Status: 400 Parameter Missing")
+ print("""\
+Content-Type: text/html\n
+ <html><body>
+ <p>No name and value was specified: %s %s</p>
+ </body></html>""" % (name, value))
+
+except KeyError:
+ print("Status: 200 Ok")
+ print("""\
+Content-Type: text/html\n
+ <html><body>
+ Echo <form method="POST" enctype="application/x-www-form-urlencoded">
+ <input type="text" name="name">
+ <input type="text" name="value">
+ <button type="submit">Echo</button></form>
+ </body></html>""")
+ pass
+
+
diff --git a/test/modules/http2/htdocs/cgi/hello.py b/test/modules/http2/htdocs/cgi/hello.py
new file mode 100644
index 0000000..a96da8a
--- /dev/null
+++ b/test/modules/http2/htdocs/cgi/hello.py
@@ -0,0 +1,25 @@
+#!/usr/bin/env python3
+
+import os
+import json
+
+resp = {
+ 'https': os.getenv('HTTPS', ''),
+ 'host': os.getenv('X_HOST', '') if 'X_HOST' in os.environ else os.getenv('SERVER_NAME', ''),
+ 'server': os.getenv('SERVER_NAME', ''),
+ 'h2_original_host': os.getenv('H2_ORIGINAL_HOST', ''),
+ 'port': os.getenv('SERVER_PORT', ''),
+ 'protocol': os.getenv('SERVER_PROTOCOL', ''),
+ 'ssl_protocol': os.getenv('SSL_PROTOCOL', ''),
+ 'h2': os.getenv('HTTP2', ''),
+ 'h2push': os.getenv('H2PUSH', ''),
+ 'h2_stream_id': os.getenv('H2_STREAM_ID', ''),
+ 'x-forwarded-for': os.getenv('HTTP_X_FORWARDED_FOR', ''),
+ 'x-forwarded-host': os.getenv('HTTP_X_FORWARDED_HOST', ''),
+ 'x-forwarded-server': os.getenv('HTTP_X_FORWARDED_SERVER', ''),
+}
+
+print("Content-Type: application/json")
+print()
+print(json.JSONEncoder(indent=2).encode(resp))
+
diff --git a/test/modules/http2/htdocs/cgi/mnot164.py b/test/modules/http2/htdocs/cgi/mnot164.py
new file mode 100644
index 0000000..43a86ea
--- /dev/null
+++ b/test/modules/http2/htdocs/cgi/mnot164.py
@@ -0,0 +1,16 @@
+#!/usr/bin/env python3
+import os, sys
+from requestparser import get_request_params
+
+
+forms, files = get_request_params()
+text = forms['text'] if 'text' in forms else "a"
+count = int(forms['count']) if 'count' in forms else 77784
+
+print("Status: 200 OK")
+print("Content-Type: text/html")
+print()
+sys.stdout.flush()
+for _ in range(count):
+ sys.stdout.write(text)
+
diff --git a/test/modules/http2/htdocs/cgi/necho.py b/test/modules/http2/htdocs/cgi/necho.py
new file mode 100644
index 0000000..715904b
--- /dev/null
+++ b/test/modules/http2/htdocs/cgi/necho.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python3
+import time
+import os, sys
+from requestparser import get_request_params
+
+
+forms, files = get_request_params()
+status = '200 Ok'
+
+try:
+ count = forms['count']
+ text = forms['text']
+
+ waitsec = float(forms['wait1']) if 'wait1' in forms else 0.0
+ if waitsec > 0:
+ time.sleep(waitsec)
+
+ if int(count):
+ print("Status: 200")
+ print("""\
+Content-Type: text/plain\n""")
+
+ waitsec = float(forms['wait2']) if 'wait2' in forms else 0.0
+ if waitsec > 0:
+ time.sleep(waitsec)
+
+ i = 0;
+ for i in range(0, int(count)):
+ print("%s" % (text))
+
+ waitsec = float(forms['wait3']) if 'wait3' in forms else 0.0
+ if waitsec > 0:
+ time.sleep(waitsec)
+
+ else:
+ print("Status: 400 Parameter Missing")
+ print("""\
+Content-Type: text/html\n
+ <html><body>
+ <p>No count was specified: %s</p>
+ </body></html>""" % (count))
+
+except KeyError as ex:
+ print("Status: 200 Ok")
+ print(f"""\
+Content-Type: text/html\n
+ <html><body>uri: uri={os.environ['REQUEST_URI']} ct={os.environ['CONTENT_TYPE']} ex={ex}
+ forms={forms}
+ Echo <form method="POST" enctype="application/x-www-form-urlencoded">
+ <input type="text" name="count">
+ <input type="text" name="text">
+ <button type="submit">Echo</button></form>
+ </body></html>""")
+ pass
+
+
diff --git a/test/modules/http2/htdocs/cgi/requestparser.py b/test/modules/http2/htdocs/cgi/requestparser.py
new file mode 100644
index 0000000..c7e0648
--- /dev/null
+++ b/test/modules/http2/htdocs/cgi/requestparser.py
@@ -0,0 +1,57 @@
+#!/usr/bin/env python3
+import os
+import sys
+from urllib import parse
+import multipart # https://github.com/andrew-d/python-multipart (`apt install python3-multipart`)
+import shutil
+
+
+try: # Windows needs stdio set for binary mode.
+ import msvcrt
+
+ msvcrt.setmode(0, os.O_BINARY) # stdin = 0
+ msvcrt.setmode(1, os.O_BINARY) # stdout = 1
+except ImportError:
+ pass
+
+
+class FileItem:
+
+ def __init__(self, mparse_item):
+ self.item = mparse_item
+
+ @property
+ def file_name(self):
+ return os.path.basename(self.item.file_name.decode())
+
+ def save_to(self, destpath: str):
+ fsrc = self.item.file_object
+ fsrc.seek(0)
+ with open(destpath, 'wb') as fd:
+ shutil.copyfileobj(fsrc, fd)
+
+
+def get_request_params():
+ oforms = {}
+ ofiles = {}
+ if "REQUEST_URI" in os.environ:
+ qforms = parse.parse_qs(parse.urlsplit(os.environ["REQUEST_URI"]).query)
+ for name, values in qforms.items():
+ oforms[name] = values[0]
+ if "CONTENT_TYPE" in os.environ:
+ ctype = os.environ["CONTENT_TYPE"]
+ if ctype == "application/x-www-form-urlencoded":
+ s = sys.stdin.read()
+ qforms = parse.parse_qs(s)
+ for name, values in qforms.items():
+ oforms[name] = values[0]
+ elif ctype.startswith("multipart/"):
+ def on_field(field):
+ oforms[field.field_name.decode()] = field.value.decode()
+ def on_file(file):
+ ofiles[file.field_name.decode()] = FileItem(file)
+ multipart.parse_form(headers={"Content-Type": ctype},
+ input_stream=sys.stdin.buffer,
+ on_field=on_field, on_file=on_file)
+ return oforms, ofiles
+
diff --git a/test/modules/http2/htdocs/cgi/ssi/include.inc b/test/modules/http2/htdocs/cgi/ssi/include.inc
new file mode 100644
index 0000000..8bd8689
--- /dev/null
+++ b/test/modules/http2/htdocs/cgi/ssi/include.inc
@@ -0,0 +1 @@
+Hello include<br>
diff --git a/test/modules/http2/htdocs/cgi/ssi/test.html b/test/modules/http2/htdocs/cgi/ssi/test.html
new file mode 100644
index 0000000..1782358
--- /dev/null
+++ b/test/modules/http2/htdocs/cgi/ssi/test.html
@@ -0,0 +1,9 @@
+<!doctype html>
+<html>
+<head><meta charset="UTF-8"></head>
+<body>
+ test<br>
+ <!--#include virtual="./include.inc"-->
+ hello<br>
+</body>
+</html>
diff --git a/test/modules/http2/htdocs/cgi/upload.py b/test/modules/http2/htdocs/cgi/upload.py
new file mode 100644
index 0000000..fa1e5d6
--- /dev/null
+++ b/test/modules/http2/htdocs/cgi/upload.py
@@ -0,0 +1,55 @@
+#!/usr/bin/env python3
+import os
+import sys
+from requestparser import get_request_params
+
+
+forms, files = get_request_params()
+
+status = '200 Ok'
+
+# Test if the file was uploaded
+if 'file' in files:
+ fitem = files['file']
+ # strip leading path from file name to avoid directory traversal attacks
+ fname = os.path.basename(fitem.file_name)
+ fpath = f'{os.environ["DOCUMENT_ROOT"]}/files/{fname}'
+ fitem.save_to(fpath)
+ message = "The file %s was uploaded successfully" % (fname)
+ print("Status: 201 Created")
+ print("Content-Type: text/html")
+ print("Location: %s://%s/files/%s" % (os.environ["REQUEST_SCHEME"], os.environ["HTTP_HOST"], fname))
+ print("")
+ print("<html><body><p>%s</p></body></html>" % (message))
+
+elif 'remove' in forms:
+ remove = forms['remove']
+ try:
+ fname = os.path.basename(remove)
+ os.remove('./files/' + fname)
+ message = 'The file "' + fname + '" was removed successfully'
+ except OSError as e:
+ message = 'Error removing ' + fname + ': ' + e.strerror
+ status = '404 File Not Found'
+ print("Status: %s" % (status))
+ print("""
+Content-Type: text/html
+
+<html><body>
+<p>%s</p>
+</body></html>""" % (message))
+
+else:
+ message = '''\
+ Upload File<form method="POST" enctype="multipart/form-data">
+ <input type="file" name="file">
+ <button type="submit">Upload</button></form>
+ '''
+ print("Status: %s" % (status))
+ print("""\
+Content-Type: text/html
+
+<html><body>
+<p>%s</p>
+</body></html>""" % (message))
+
diff --git a/test/modules/http2/htdocs/cgi/xxx/test.json b/test/modules/http2/htdocs/cgi/xxx/test.json
new file mode 100644
index 0000000..ceafd0a
--- /dev/null
+++ b/test/modules/http2/htdocs/cgi/xxx/test.json
@@ -0,0 +1 @@
+{"name": "test.json"} \ No newline at end of file
diff --git a/test/modules/http2/htdocs/noh2/alive.json b/test/modules/http2/htdocs/noh2/alive.json
new file mode 100644
index 0000000..7b54893
--- /dev/null
+++ b/test/modules/http2/htdocs/noh2/alive.json
@@ -0,0 +1,5 @@
+{
+ "host" : "noh2",
+ "alive" : true
+}
+
diff --git a/test/modules/http2/htdocs/noh2/index.html b/test/modules/http2/htdocs/noh2/index.html
new file mode 100644
index 0000000..696068e
--- /dev/null
+++ b/test/modules/http2/htdocs/noh2/index.html
@@ -0,0 +1,9 @@
+<html>
+ <head>
+ <title>mod_h2 test site noh2</title>
+ </head>
+ <body>
+ <h1>mod_h2 test site noh2</h1>
+ </body>
+</html>
+
diff --git a/test/modules/http2/mod_h2test/mod_h2test.c b/test/modules/http2/mod_h2test/mod_h2test.c
new file mode 100644
index 0000000..f20b954
--- /dev/null
+++ b/test/modules/http2/mod_h2test/mod_h2test.c
@@ -0,0 +1,588 @@
+/* Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <apr_optional.h>
+#include <apr_optional_hooks.h>
+#include <apr_strings.h>
+#include <apr_cstr.h>
+#include <apr_time.h>
+#include <apr_want.h>
+
+#include <httpd.h>
+#include <http_protocol.h>
+#include <http_request.h>
+#include <http_log.h>
+
+#include "mod_h2test.h"
+
+static void h2test_hooks(apr_pool_t *pool);
+
+AP_DECLARE_MODULE(h2test) = {
+ STANDARD20_MODULE_STUFF,
+ NULL, /* func to create per dir config */
+ NULL, /* func to merge per dir config */
+ NULL, /* func to create per server config */
+ NULL, /* func to merge per server config */
+ NULL, /* command handlers */
+ h2test_hooks,
+#if defined(AP_MODULE_FLAG_NONE)
+ AP_MODULE_FLAG_ALWAYS_MERGE
+#endif
+};
+
+#define SECS_PER_HOUR (60*60)
+#define SECS_PER_DAY (24*SECS_PER_HOUR)
+
+static apr_status_t duration_parse(apr_interval_time_t *ptimeout, const char *value,
+ const char *def_unit)
+{
+ char *endp;
+ apr_int64_t n;
+
+ n = apr_strtoi64(value, &endp, 10);
+ if (errno) {
+ return errno;
+ }
+ if (!endp || !*endp) {
+ if (!def_unit) def_unit = "s";
+ }
+ else if (endp == value) {
+ return APR_EINVAL;
+ }
+ else {
+ def_unit = endp;
+ }
+
+ switch (*def_unit) {
+ case 'D':
+ case 'd':
+ *ptimeout = apr_time_from_sec(n * SECS_PER_DAY);
+ break;
+ case 's':
+ case 'S':
+ *ptimeout = (apr_interval_time_t) apr_time_from_sec(n);
+ break;
+ case 'h':
+ case 'H':
+ /* Time is in hours */
+ *ptimeout = (apr_interval_time_t) apr_time_from_sec(n * SECS_PER_HOUR);
+ break;
+ case 'm':
+ case 'M':
+ switch (*(++def_unit)) {
+ /* Time is in milliseconds */
+ case 's':
+ case 'S':
+ *ptimeout = (apr_interval_time_t) n * 1000;
+ break;
+ /* Time is in minutes */
+ case 'i':
+ case 'I':
+ *ptimeout = (apr_interval_time_t) apr_time_from_sec(n * 60);
+ break;
+ default:
+ return APR_EGENERAL;
+ }
+ break;
+ default:
+ return APR_EGENERAL;
+ }
+ return APR_SUCCESS;
+}
+
+static int h2test_post_config(apr_pool_t *p, apr_pool_t *plog,
+ apr_pool_t *ptemp, server_rec *s)
+{
+ void *data = NULL;
+ const char *mod_h2_init_key = "mod_h2test_init_counter";
+
+ (void)plog;(void)ptemp;
+
+ apr_pool_userdata_get(&data, mod_h2_init_key, s->process->pool);
+ if ( data == NULL ) {
+ /* dry run */
+ apr_pool_userdata_set((const void *)1, mod_h2_init_key,
+ apr_pool_cleanup_null, s->process->pool);
+ return APR_SUCCESS;
+ }
+
+
+ return APR_SUCCESS;
+}
+
+static void h2test_child_init(apr_pool_t *pool, server_rec *s)
+{
+ (void)pool;
+ (void)s;
+}
+
+static int h2test_echo_handler(request_rec *r)
+{
+ conn_rec *c = r->connection;
+ apr_bucket_brigade *bb;
+ apr_bucket *b;
+ apr_status_t rv;
+ char buffer[8192];
+ const char *ct;
+ long l;
+ int i;
+ apr_time_t chunk_delay = 0;
+ apr_array_header_t *args = NULL;
+ apr_size_t blen, fail_after = 0;
+ int fail_requested = 0, error_bucket = 1;
+
+ if (strcmp(r->handler, "h2test-echo")) {
+ return DECLINED;
+ }
+ if (r->method_number != M_GET && r->method_number != M_POST) {
+ return DECLINED;
+ }
+
+ if(r->args) {
+ args = apr_cstr_split(r->args, "&", 1, r->pool);
+ for(i = 0; i < args->nelts; ++i) {
+ char *s, *val, *arg = APR_ARRAY_IDX(args, i, char*);
+ s = strchr(arg, '=');
+ if(s) {
+ *s = '\0';
+ val = s + 1;
+ if(!strcmp("id", arg)) {
+ /* accepted, but not processed */
+ continue;
+ }
+ else if(!strcmp("chunk_delay", arg)) {
+ rv = duration_parse(&chunk_delay, val, "s");
+ if(APR_SUCCESS == rv) {
+ continue;
+ }
+ }
+ else if(!strcmp("fail_after", arg)) {
+ fail_after = (int)apr_atoi64(val);
+ if(fail_after >= 0) {
+ fail_requested = 1;
+ continue;
+ }
+ }
+ }
+ ap_log_rerror(APLOG_MARK, APLOG_ERR, 0, r, "query parameter not "
+ "understood: '%s' in %s",
+ arg, r->args);
+ ap_die(HTTP_BAD_REQUEST, r);
+ return OK;
+ }
+ }
+
+ ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r, "echo_handler: processing request");
+ r->status = 200;
+ r->clength = -1;
+ r->chunked = 1;
+ apr_table_unset(r->headers_out, "Content-Length");
+ /* Discourage content-encodings */
+ apr_table_unset(r->headers_out, "Content-Encoding");
+ apr_table_setn(r->subprocess_env, "no-brotli", "1");
+ apr_table_setn(r->subprocess_env, "no-gzip", "1");
+
+ ct = apr_table_get(r->headers_in, "content-type");
+ ap_set_content_type(r, ct? ct : "application/octet-stream");
+
+ bb = apr_brigade_create(r->pool, c->bucket_alloc);
+ /* copy any request body into the response */
+ if ((rv = ap_setup_client_block(r, REQUEST_CHUNKED_DECHUNK))) goto cleanup;
+ if (ap_should_client_block(r)) {
+ while (0 < (l = ap_get_client_block(r, &buffer[0], sizeof(buffer)))) {
+ ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r,
+ "echo_handler: copying %ld bytes from request body", l);
+ blen = (apr_size_t)l;
+ if (fail_requested) {
+ if (blen > fail_after) {
+ blen = fail_after;
+ }
+ fail_after -= blen;
+ }
+ rv = apr_brigade_write(bb, NULL, NULL, buffer, blen);
+ if (APR_SUCCESS != rv) goto cleanup;
+ if (chunk_delay) {
+ apr_sleep(chunk_delay);
+ }
+ rv = ap_pass_brigade(r->output_filters, bb);
+ if (APR_SUCCESS != rv) goto cleanup;
+ ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r,
+ "echo_handler: passed %ld bytes from request body", l);
+ if (fail_requested && fail_after == 0) {
+ rv = APR_EINVAL;
+ goto cleanup;
+ }
+ }
+ }
+ /* we are done */
+ b = apr_bucket_eos_create(c->bucket_alloc);
+ APR_BRIGADE_INSERT_TAIL(bb, b);
+ ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r, "echo_handler: request read");
+
+ if (r->trailers_in && !apr_is_empty_table(r->trailers_in)) {
+ ap_log_rerror(APLOG_MARK, APLOG_TRACE2, 0, r,
+ "echo_handler: seeing incoming trailers");
+ apr_table_setn(r->trailers_out, "h2test-trailers-in",
+ apr_itoa(r->pool, 1));
+ }
+
+ rv = ap_pass_brigade(r->output_filters, bb);
+
+cleanup:
+ if (rv == APR_SUCCESS
+ || r->status != HTTP_OK
+ || c->aborted) {
+ ap_log_rerror(APLOG_MARK, APLOG_TRACE1, rv, r, "echo_handler: request handled");
+ return OK;
+ }
+ else if (error_bucket) {
+ int status = ap_map_http_request_error(rv, HTTP_BAD_REQUEST);
+ b = ap_bucket_error_create(status, NULL, r->pool, c->bucket_alloc);
+ APR_BRIGADE_INSERT_TAIL(bb, b);
+ ap_pass_brigade(r->output_filters, bb);
+ }
+ else {
+ /* no way to know what type of error occurred */
+ ap_log_rerror(APLOG_MARK, APLOG_TRACE1, rv, r, "h2test_echo_handler failed");
+ return AP_FILTER_ERROR;
+ }
+ return DECLINED;
+}
+
+static int h2test_delay_handler(request_rec *r)
+{
+ conn_rec *c = r->connection;
+ apr_bucket_brigade *bb;
+ apr_bucket *b;
+ apr_status_t rv;
+ char buffer[8192];
+ int i, chunks = 3;
+ long l;
+ apr_time_t delay = 0;
+
+ if (strcmp(r->handler, "h2test-delay")) {
+ return DECLINED;
+ }
+ if (r->method_number != M_GET && r->method_number != M_POST) {
+ return DECLINED;
+ }
+
+ if (r->args) {
+ rv = duration_parse(&delay, r->args, "s");
+ if (APR_SUCCESS != rv) {
+ ap_die(HTTP_BAD_REQUEST, r);
+ return OK;
+ }
+ }
+
+ ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r, "delay_handler: processing request, %ds delay",
+ (int)apr_time_sec(delay));
+ r->status = 200;
+ r->clength = -1;
+ r->chunked = 1;
+ apr_table_unset(r->headers_out, "Content-Length");
+ /* Discourage content-encodings */
+ apr_table_unset(r->headers_out, "Content-Encoding");
+ apr_table_setn(r->subprocess_env, "no-brotli", "1");
+ apr_table_setn(r->subprocess_env, "no-gzip", "1");
+
+ ap_set_content_type(r, "application/octet-stream");
+
+ bb = apr_brigade_create(r->pool, c->bucket_alloc);
+ /* copy any request body into the response */
+ if ((rv = ap_setup_client_block(r, REQUEST_CHUNKED_DECHUNK))) goto cleanup;
+ if (ap_should_client_block(r)) {
+ do {
+ l = ap_get_client_block(r, &buffer[0], sizeof(buffer));
+ if (l > 0) {
+ ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r,
+ "delay_handler: reading %ld bytes from request body", l);
+ }
+ } while (l > 0);
+ if (l < 0) {
+ return AP_FILTER_ERROR;
+ }
+ }
+
+ memset(buffer, 0, sizeof(buffer));
+ l = sizeof(buffer);
+ for (i = 0; i < chunks; ++i) {
+ rv = apr_brigade_write(bb, NULL, NULL, buffer, l);
+ if (APR_SUCCESS != rv) goto cleanup;
+ rv = ap_pass_brigade(r->output_filters, bb);
+ if (APR_SUCCESS != rv) goto cleanup;
+ ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r,
+ "delay_handler: passed %ld bytes as response body", l);
+ if (delay) {
+ apr_sleep(delay);
+ }
+ }
+ /* we are done */
+ b = apr_bucket_eos_create(c->bucket_alloc);
+ APR_BRIGADE_INSERT_TAIL(bb, b);
+ rv = ap_pass_brigade(r->output_filters, bb);
+ apr_brigade_cleanup(bb);
+ ap_log_rerror(APLOG_MARK, APLOG_TRACE1, rv, r, "delay_handler: response passed");
+
+cleanup:
+ ap_log_rerror(APLOG_MARK, APLOG_TRACE1, rv, r,
+ "delay_handler: request cleanup, r->status=%d, aborte=%d",
+ r->status, c->aborted);
+ if (rv == APR_SUCCESS
+ || r->status != HTTP_OK
+ || c->aborted) {
+ return OK;
+ }
+ return AP_FILTER_ERROR;
+}
+
+static int h2test_trailer_handler(request_rec *r)
+{
+ conn_rec *c = r->connection;
+ apr_bucket_brigade *bb;
+ apr_bucket *b;
+ apr_status_t rv;
+ char buffer[8192];
+ long l;
+ int body_len = 0;
+
+ if (strcmp(r->handler, "h2test-trailer")) {
+ return DECLINED;
+ }
+ if (r->method_number != M_GET && r->method_number != M_POST) {
+ return DECLINED;
+ }
+
+ if (r->args) {
+ body_len = (int)apr_atoi64(r->args);
+ if (body_len < 0) body_len = 0;
+ }
+
+ ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r, "trailer_handler: processing request, %d body length",
+ body_len);
+ r->status = 200;
+ r->clength = body_len;
+ ap_set_content_length(r, body_len);
+
+ ap_set_content_type(r, "application/octet-stream");
+ apr_table_mergen(r->headers_out, "Trailer", "trailer-content-length");
+ apr_table_set(r->trailers_out, "trailer-content-length",
+ apr_psprintf(r->pool, "%d", body_len));
+
+ bb = apr_brigade_create(r->pool, c->bucket_alloc);
+ memset(buffer, 0, sizeof(buffer));
+ while (body_len > 0) {
+ l = (sizeof(buffer) > body_len)? body_len : sizeof(buffer);
+ body_len -= l;
+ rv = apr_brigade_write(bb, NULL, NULL, buffer, l);
+ if (APR_SUCCESS != rv) goto cleanup;
+ rv = ap_pass_brigade(r->output_filters, bb);
+ if (APR_SUCCESS != rv) goto cleanup;
+ ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r,
+ "trailer_handler: passed %ld bytes as response body", l);
+ }
+ /* we are done */
+ b = apr_bucket_eos_create(c->bucket_alloc);
+ APR_BRIGADE_INSERT_TAIL(bb, b);
+ rv = ap_pass_brigade(r->output_filters, bb);
+ apr_brigade_cleanup(bb);
+ ap_log_rerror(APLOG_MARK, APLOG_TRACE1, rv, r, "trailer_handler: response passed");
+
+cleanup:
+ ap_log_rerror(APLOG_MARK, APLOG_TRACE1, rv, r,
+ "trailer_handler: request cleanup, r->status=%d, aborte=%d",
+ r->status, c->aborted);
+ if (rv == APR_SUCCESS
+ || r->status != HTTP_OK
+ || c->aborted) {
+ return OK;
+ }
+ return AP_FILTER_ERROR;
+}
+
+static int status_from_str(const char *s, apr_status_t *pstatus)
+{
+ if (!strcmp("timeout", s)) {
+ *pstatus = APR_TIMEUP;
+ return 1;
+ }
+ else if (!strcmp("reset", s)) {
+ *pstatus = APR_ECONNRESET;
+ return 1;
+ }
+ return 0;
+}
+
+static int h2test_error_handler(request_rec *r)
+{
+ conn_rec *c = r->connection;
+ apr_bucket_brigade *bb;
+ apr_bucket *b;
+ apr_status_t rv;
+ char buffer[8192];
+ int i, chunks = 3, error_bucket = 1;
+ long l;
+ apr_time_t delay = 0, body_delay = 0;
+ apr_array_header_t *args = NULL;
+ int http_status = 200;
+ apr_status_t error = APR_SUCCESS, body_error = APR_SUCCESS;
+
+ if (strcmp(r->handler, "h2test-error")) {
+ return DECLINED;
+ }
+ if (r->method_number != M_GET && r->method_number != M_POST) {
+ return DECLINED;
+ }
+
+ if (r->args) {
+ args = apr_cstr_split(r->args, "&", 1, r->pool);
+ for (i = 0; i < args->nelts; ++i) {
+ char *s, *val, *arg = APR_ARRAY_IDX(args, i, char*);
+ s = strchr(arg, '=');
+ if (s) {
+ *s = '\0';
+ val = s + 1;
+ if (!strcmp("status", arg)) {
+ http_status = (int)apr_atoi64(val);
+ if (val > 0) {
+ continue;
+ }
+ }
+ else if (!strcmp("error", arg)) {
+ if (status_from_str(val, &error)) {
+ continue;
+ }
+ }
+ else if (!strcmp("error_bucket", arg)) {
+ error_bucket = (int)apr_atoi64(val);
+ if (val >= 0) {
+ continue;
+ }
+ }
+ else if (!strcmp("body_error", arg)) {
+ if (status_from_str(val, &body_error)) {
+ continue;
+ }
+ }
+ else if (!strcmp("delay", arg)) {
+ rv = duration_parse(&delay, val, "s");
+ if (APR_SUCCESS == rv) {
+ continue;
+ }
+ }
+ else if (!strcmp("body_delay", arg)) {
+ rv = duration_parse(&body_delay, val, "s");
+ if (APR_SUCCESS == rv) {
+ continue;
+ }
+ }
+ }
+ ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r, "error_handler: "
+ "did not understand '%s'", arg);
+ ap_die(HTTP_BAD_REQUEST, r);
+ return OK;
+ }
+ }
+
+ ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r, "error_handler: processing request, %s",
+ r->args? r->args : "(no args)");
+ r->status = http_status;
+ r->clength = -1;
+ r->chunked = 1;
+ apr_table_unset(r->headers_out, "Content-Length");
+ /* Discourage content-encodings */
+ apr_table_unset(r->headers_out, "Content-Encoding");
+ apr_table_setn(r->subprocess_env, "no-brotli", "1");
+ apr_table_setn(r->subprocess_env, "no-gzip", "1");
+
+ ap_set_content_type(r, "application/octet-stream");
+ bb = apr_brigade_create(r->pool, c->bucket_alloc);
+
+ if (delay) {
+ apr_sleep(delay);
+ }
+ if (error != APR_SUCCESS) {
+ return ap_map_http_request_error(error, HTTP_BAD_REQUEST);
+ }
+ /* flush response */
+ b = apr_bucket_flush_create(c->bucket_alloc);
+ APR_BRIGADE_INSERT_TAIL(bb, b);
+ rv = ap_pass_brigade(r->output_filters, bb);
+ if (APR_SUCCESS != rv) goto cleanup;
+
+ memset(buffer, 'X', sizeof(buffer));
+ l = sizeof(buffer);
+ for (i = 0; i < chunks; ++i) {
+ if (body_delay) {
+ apr_sleep(body_delay);
+ }
+ rv = apr_brigade_write(bb, NULL, NULL, buffer, l);
+ if (APR_SUCCESS != rv) goto cleanup;
+ rv = ap_pass_brigade(r->output_filters, bb);
+ if (APR_SUCCESS != rv) goto cleanup;
+ ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r,
+ "error_handler: passed %ld bytes as response body", l);
+ if (body_error != APR_SUCCESS) {
+ rv = body_error;
+ goto cleanup;
+ }
+ }
+ /* we are done */
+ b = apr_bucket_eos_create(c->bucket_alloc);
+ APR_BRIGADE_INSERT_TAIL(bb, b);
+ rv = ap_pass_brigade(r->output_filters, bb);
+ apr_brigade_cleanup(bb);
+ ap_log_rerror(APLOG_MARK, APLOG_TRACE1, rv, r, "error_handler: response passed");
+
+cleanup:
+ ap_log_rerror(APLOG_MARK, APLOG_TRACE1, rv, r,
+ "error_handler: request cleanup, r->status=%d, aborted=%d",
+ r->status, c->aborted);
+ if (rv == APR_SUCCESS) {
+ return OK;
+ }
+ if (error_bucket) {
+ http_status = ap_map_http_request_error(rv, HTTP_BAD_REQUEST);
+ b = ap_bucket_error_create(http_status, NULL, r->pool, c->bucket_alloc);
+ APR_BRIGADE_INSERT_TAIL(bb, b);
+ ap_pass_brigade(r->output_filters, bb);
+ }
+ return AP_FILTER_ERROR;
+}
+
+/* Install this module into the apache2 infrastructure.
+ */
+static void h2test_hooks(apr_pool_t *pool)
+{
+ static const char *const mod_h2[] = { "mod_h2.c", NULL};
+
+ ap_log_perror(APLOG_MARK, APLOG_TRACE1, 0, pool, "installing hooks and handlers");
+
+ /* Run once after configuration is set, but before mpm children initialize.
+ */
+ ap_hook_post_config(h2test_post_config, mod_h2, NULL, APR_HOOK_MIDDLE);
+
+ /* Run once after a child process has been created.
+ */
+ ap_hook_child_init(h2test_child_init, NULL, NULL, APR_HOOK_MIDDLE);
+
+ /* test h2 handlers */
+ ap_hook_handler(h2test_echo_handler, NULL, NULL, APR_HOOK_MIDDLE);
+ ap_hook_handler(h2test_delay_handler, NULL, NULL, APR_HOOK_MIDDLE);
+ ap_hook_handler(h2test_trailer_handler, NULL, NULL, APR_HOOK_MIDDLE);
+ ap_hook_handler(h2test_error_handler, NULL, NULL, APR_HOOK_MIDDLE);
+}
+
diff --git a/test/modules/http2/mod_h2test/mod_h2test.h b/test/modules/http2/mod_h2test/mod_h2test.h
new file mode 100644
index 0000000..a886d29
--- /dev/null
+++ b/test/modules/http2/mod_h2test/mod_h2test.h
@@ -0,0 +1,21 @@
+/* Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __MOD_H2TEST_H__
+#define __MOD_H2TEST_H__
+
+
+#endif
diff --git a/test/modules/http2/test_001_httpd_alive.py b/test/modules/http2/test_001_httpd_alive.py
new file mode 100644
index 0000000..b5708d2
--- /dev/null
+++ b/test/modules/http2/test_001_httpd_alive.py
@@ -0,0 +1,22 @@
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestBasicAlive:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ H2Conf(env).add_vhost_test1().install()
+ assert env.apache_restart() == 0
+
+ # we expect to see the document from the generic server
+ def test_h2_001_01(self, env):
+ url = env.mkurl("https", "test1", "/alive.json")
+ r = env.curl_get(url, 5)
+ assert r.exit_code == 0, r.stderr + r.stdout
+ assert r.response["json"]
+ assert r.response["json"]["alive"] is True
+ assert r.response["json"]["host"] == "test1"
+
diff --git a/test/modules/http2/test_002_curl_basics.py b/test/modules/http2/test_002_curl_basics.py
new file mode 100644
index 0000000..91be772
--- /dev/null
+++ b/test/modules/http2/test_002_curl_basics.py
@@ -0,0 +1,71 @@
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestCurlBasics:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ conf = H2Conf(env)
+ conf.add_vhost_test1()
+ conf.add_vhost_test2()
+ conf.install()
+ assert env.apache_restart() == 0
+
+ # check that we see the correct documents when using the test1 server name over http:
+ def test_h2_002_01(self, env):
+ url = env.mkurl("http", "test1", "/alive.json")
+ r = env.curl_get(url, 5)
+ assert r.response["status"] == 200
+ assert "HTTP/1.1" == r.response["protocol"]
+ assert r.response["json"]["alive"] is True
+ assert r.response["json"]["host"] == "test1"
+
+ # check that we see the correct documents when using the test1 server name over https:
+ def test_h2_002_02(self, env):
+ url = env.mkurl("https", "test1", "/alive.json")
+ r = env.curl_get(url, 5)
+ assert r.response["status"] == 200
+ assert r.response["json"]["alive"] is True
+ assert "test1" == r.response["json"]["host"]
+ assert r.response["header"]["content-type"] == "application/json"
+
+ # enforce HTTP/1.1
+ def test_h2_002_03(self, env):
+ url = env.mkurl("https", "test1", "/alive.json")
+ r = env.curl_get(url, 5, options=["--http1.1"])
+ assert r.response["status"] == 200
+ assert r.response["protocol"] == "HTTP/1.1"
+
+ # enforce HTTP/2
+ def test_h2_002_04(self, env):
+ url = env.mkurl("https", "test1", "/alive.json")
+ r = env.curl_get(url, 5, options=["--http2"])
+ assert r.response["status"] == 200
+ assert r.response["protocol"] == "HTTP/2"
+
+ # default is HTTP/2 on this host
+ def test_h2_002_04b(self, env):
+ url = env.mkurl("https", "test1", "/alive.json")
+ r = env.curl_get(url, 5)
+ assert r.response["status"] == 200
+ assert r.response["protocol"] == "HTTP/2"
+ assert r.response["json"]["host"] == "test1"
+
+ # although, without ALPN, we cannot select it
+ def test_h2_002_05(self, env):
+ url = env.mkurl("https", "test1", "/alive.json")
+ r = env.curl_get(url, 5, options=["--no-alpn"])
+ assert r.response["status"] == 200
+ assert r.response["protocol"] == "HTTP/1.1"
+ assert r.response["json"]["host"] == "test1"
+
+ # default is HTTP/1.1 on the other
+ def test_h2_002_06(self, env):
+ url = env.mkurl("https", "test2", "/alive.json")
+ r = env.curl_get(url, 5)
+ assert r.response["status"] == 200
+ assert r.response["protocol"] == "HTTP/1.1"
+ assert r.response["json"]["host"] == "test2"
diff --git a/test/modules/http2/test_003_get.py b/test/modules/http2/test_003_get.py
new file mode 100644
index 0000000..572c4fb
--- /dev/null
+++ b/test/modules/http2/test_003_get.py
@@ -0,0 +1,265 @@
+import re
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestGet:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ H2Conf(env).add_vhost_cgi(
+ proxy_self=True, h2proxy_self=True
+ ).add_vhost_test1(
+ proxy_self=True, h2proxy_self=True
+ ).install()
+ assert env.apache_restart() == 0
+
+ # check SSL environment variables from CGI script
+ def test_h2_003_01(self, env):
+ url = env.mkurl("https", "cgi", "/hello.py")
+ r = env.curl_get(url, 5, options=["--tlsv1.2"])
+ assert r.response["status"] == 200
+ assert r.response["json"]["protocol"] == "HTTP/2.0"
+ assert r.response["json"]["https"] == "on"
+ tls_version = r.response["json"]["ssl_protocol"]
+ assert tls_version in ["TLSv1.2", "TLSv1.3"]
+ assert r.response["json"]["h2"] == "on"
+ assert r.response["json"]["h2push"] == "off"
+
+ r = env.curl_get(url, 5, options=["--http1.1", "--tlsv1.2"])
+ assert r.response["status"] == 200
+ assert "HTTP/1.1" == r.response["json"]["protocol"]
+ assert "on" == r.response["json"]["https"]
+ tls_version = r.response["json"]["ssl_protocol"]
+ assert tls_version in ["TLSv1.2", "TLSv1.3"]
+ assert "" == r.response["json"]["h2"]
+ assert "" == r.response["json"]["h2push"]
+
+ # retrieve a html file from the server and compare it to its source
+ def test_h2_003_02(self, env):
+ with open(env.htdocs_src("test1/index.html"), mode='rb') as file:
+ src = file.read()
+
+ url = env.mkurl("https", "test1", "/index.html")
+ r = env.curl_get(url, 5)
+ assert r.response["status"] == 200
+ assert "HTTP/2" == r.response["protocol"]
+ assert src == r.response["body"]
+
+ url = env.mkurl("https", "test1", "/index.html")
+ r = env.curl_get(url, 5, options=["--http1.1"])
+ assert r.response["status"] == 200
+ assert "HTTP/1.1" == r.response["protocol"]
+ assert src == r.response["body"]
+
+ # retrieve chunked content from a cgi script
+ def check_necho(self, env, n, text):
+ url = env.mkurl("https", "cgi", "/necho.py")
+ r = env.curl_get(url, 5, options=["-F", f"count={n}", "-F", f"text={text}"])
+ assert r.response["status"] == 200
+ exp = ""
+ for i in range(n):
+ exp += text + "\n"
+ assert exp == r.response["body"].decode('utf-8')
+
+ def test_h2_003_10(self, env):
+ self.check_necho(env, 10, "0123456789")
+
+ def test_h2_003_11(self, env):
+ self.check_necho(env, 100, "0123456789")
+
+ def test_h2_003_12(self, env):
+ self.check_necho(env, 1000, "0123456789")
+
+ def test_h2_003_13(self, env):
+ self.check_necho(env, 10000, "0123456789")
+
+ def test_h2_003_14(self, env):
+ self.check_necho(env, 100000, "0123456789")
+
+ # github issue #126
+ def test_h2_003_20(self, env):
+ url = env.mkurl("https", "test1", "/006/")
+ r = env.curl_get(url, 5)
+ assert r.response["status"] == 200
+ body = r.response["body"].decode('utf-8')
+ # our doctype varies between branches and in time, lets not compare
+ body = re.sub(r'^<!DOCTYPE[^>]+>', '', body)
+ assert '''
+<html>
+ <head>
+ <title>Index of /006</title>
+ </head>
+ <body>
+<h1>Index of /006</h1>
+<ul><li><a href="/"> Parent Directory</a></li>
+<li><a href="006.css"> 006.css</a></li>
+<li><a href="006.js"> 006.js</a></li>
+<li><a href="header.html"> header.html</a></li>
+</ul>
+</body></html>
+''' == body
+
+ # github issue #133
+ def clean_header(self, s):
+ s = re.sub(r'\r\n', '\n', s, flags=re.MULTILINE)
+ s = re.sub(r'^date:.*\n', '', s, flags=re.MULTILINE)
+ s = re.sub(r'^server:.*\n', '', s, flags=re.MULTILINE)
+ s = re.sub(r'^last-modified:.*\n', '', s, flags=re.MULTILINE)
+ s = re.sub(r'^etag:.*\n', '', s, flags=re.MULTILINE)
+ s = re.sub(r'^vary:.*\n', '', s, flags=re.MULTILINE)
+ return re.sub(r'^accept-ranges:.*\n', '', s, flags=re.MULTILINE)
+
+ def test_h2_003_21(self, env):
+ url = env.mkurl("https", "test1", "/index.html")
+ r = env.curl_get(url, 5, options=["-I"])
+ assert r.response["status"] == 200
+ assert "HTTP/2" == r.response["protocol"]
+ s = self.clean_header(r.response["body"].decode('utf-8'))
+ assert '''HTTP/2 200
+content-length: 2007
+content-type: text/html
+
+''' == s
+
+ r = env.curl_get(url, 5, options=["-I", url])
+ assert r.response["status"] == 200
+ assert "HTTP/2" == r.response["protocol"]
+ s = self.clean_header(r.response["body"].decode('utf-8'))
+ assert '''HTTP/2 200
+content-length: 2007
+content-type: text/html
+
+HTTP/2 200
+content-length: 2007
+content-type: text/html
+
+''' == s
+
+ # test conditionals: if-modified-since
+ @pytest.mark.parametrize("path", [
+ "/004.html", "/proxy/004.html", "/h2proxy/004.html"
+ ])
+ def test_h2_003_30(self, env, path):
+ url = env.mkurl("https", "test1", path)
+ r = env.curl_get(url, 5)
+ assert r.response["status"] == 200
+ assert "HTTP/2" == r.response["protocol"]
+ h = r.response["header"]
+ assert "last-modified" in h
+ lastmod = h["last-modified"]
+ r = env.curl_get(url, 5, options=['-H', ("if-modified-since: %s" % lastmod)])
+ assert 304 == r.response["status"]
+
+ # test conditionals: if-etag
+ @pytest.mark.parametrize("path", [
+ "/004.html", "/proxy/004.html", "/h2proxy/004.html"
+ ])
+ def test_h2_003_31(self, env, path):
+ url = env.mkurl("https", "test1", path)
+ r = env.curl_get(url, 5)
+ assert r.response["status"] == 200
+ assert "HTTP/2" == r.response["protocol"]
+ h = r.response["header"]
+ assert "etag" in h
+ etag = h["etag"]
+ r = env.curl_get(url, 5, options=['-H', ("if-none-match: %s" % etag)])
+ assert 304 == r.response["status"]
+
+ # test various response body lengths to work correctly
+ def test_h2_003_40(self, env):
+ n = 1001
+ while n <= 1025024:
+ url = env.mkurl("https", "cgi", f"/mnot164.py?count={n}&text=X")
+ r = env.curl_get(url, 5)
+ assert r.response["status"] == 200
+ assert "HTTP/2" == r.response["protocol"]
+ assert n == len(r.response["body"])
+ n *= 2
+
+ # test various response body lengths to work correctly
+ @pytest.mark.parametrize("n", [
+ 0, 1, 1291, 1292, 80000, 80123, 81087, 98452
+ ])
+ def test_h2_003_41(self, env, n):
+ url = env.mkurl("https", "cgi", f"/mnot164.py?count={n}&text=X")
+ r = env.curl_get(url, 5)
+ assert r.response["status"] == 200
+ assert "HTTP/2" == r.response["protocol"]
+ assert n == len(r.response["body"])
+
+ # test ranges
+ @pytest.mark.parametrize("path", [
+ "/004.html", "/proxy/004.html", "/h2proxy/004.html"
+ ])
+ def test_h2_003_50(self, env, path, repeat):
+ # check that the resource supports ranges and we see its raw content-length
+ url = env.mkurl("https", "test1", path)
+ r = env.curl_get(url, 5)
+ assert r.response["status"] == 200
+ assert "HTTP/2" == r.response["protocol"]
+ h = r.response["header"]
+ assert "accept-ranges" in h
+ assert "bytes" == h["accept-ranges"]
+ assert "content-length" in h
+ clen = h["content-length"]
+ # get the first 1024 bytes of the resource, 206 status, but content-length as original
+ r = env.curl_get(url, 5, options=["-H", "range: bytes=0-1023"])
+ assert 206 == r.response["status"]
+ assert "HTTP/2" == r.response["protocol"]
+ assert 1024 == len(r.response["body"])
+ assert "content-length" in h
+ assert clen == h["content-length"]
+
+ # use an invalid scheme
+ def test_h2_003_51(self, env):
+ url = env.mkurl("https", "cgi", "/")
+ opt = ["-H:scheme: invalid"]
+ r = env.nghttp().get(url, options=opt)
+ assert r.exit_code == 0, r
+ assert r.response['status'] == 400
+
+ # use an differing scheme, but one that is acceptable
+ def test_h2_003_52(self, env):
+ url = env.mkurl("https", "cgi", "/")
+ opt = ["-H:scheme: http"]
+ r = env.nghttp().get(url, options=opt)
+ assert r.exit_code == 0, r
+ assert r.response['status'] == 200
+
+ # Test that we get a proper `Date` and `Server` headers on responses
+ def test_h2_003_60(self, env):
+ url = env.mkurl("https", "test1", "/index.html")
+ r = env.curl_get(url)
+ assert r.exit_code == 0, r
+ assert r.response['status'] == 200
+ assert 'date' in r.response['header']
+ assert 'server' in r.response['header']
+
+ # lets do some error tests
+ def test_h2_003_70(self, env):
+ url = env.mkurl("https", "cgi", "/h2test/error?status=500")
+ r = env.curl_get(url)
+ assert r.exit_code == 0, r
+ assert r.response['status'] == 500
+ url = env.mkurl("https", "cgi", "/h2test/error?error=timeout")
+ r = env.curl_get(url)
+ assert r.exit_code == 0, r
+ assert r.response['status'] == 408
+
+ # produce an error during response body
+ def test_h2_003_71(self, env, repeat):
+ url = env.mkurl("https", "cgi", "/h2test/error?body_error=timeout")
+ r = env.curl_get(url)
+ assert r.exit_code != 0, f"{r}"
+ url = env.mkurl("https", "cgi", "/h2test/error?body_error=reset")
+ r = env.curl_get(url)
+ assert r.exit_code != 0, f"{r}"
+
+ # produce an error, fail to generate an error bucket
+ def test_h2_003_72(self, env, repeat):
+ url = env.mkurl("https", "cgi", "/h2test/error?body_error=timeout&error_bucket=0")
+ r = env.curl_get(url)
+ assert r.exit_code != 0, f"{r}"
diff --git a/test/modules/http2/test_004_post.py b/test/modules/http2/test_004_post.py
new file mode 100644
index 0000000..295f989
--- /dev/null
+++ b/test/modules/http2/test_004_post.py
@@ -0,0 +1,201 @@
+import difflib
+import email.parser
+import inspect
+import json
+import os
+import re
+import sys
+import time
+
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestPost:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ TestPost._local_dir = os.path.dirname(inspect.getfile(TestPost))
+ conf = H2Conf(env, extras={
+ f'cgi.{env.http_tld}': [
+ f'<Directory {env.server_docs_dir}/cgi/xxx>',
+ ' RewriteEngine On',
+ ' RewriteRule .* /proxy/echo.py [QSA]',
+ '</Directory>',
+ ]
+ })
+ conf.add_vhost_cgi(proxy_self=True).install()
+ assert env.apache_restart() == 0
+
+ def local_src(self, fname):
+ return os.path.join(TestPost._local_dir, fname)
+
+ # upload and GET again using curl, compare to original content
+ def curl_upload_and_verify(self, env, fname, options=None):
+ url = env.mkurl("https", "cgi", "/upload.py")
+ fpath = os.path.join(env.gen_dir, fname)
+ r = env.curl_upload(url, fpath, options=options)
+ assert r.exit_code == 0, f"{r}"
+ assert 200 <= r.response["status"] < 300
+
+ r2 = env.curl_get(r.response["header"]["location"])
+ assert r2.exit_code == 0
+ assert r2.response["status"] == 200
+ with open(self.local_src(fpath), mode='rb') as file:
+ src = file.read()
+ assert src == r2.response["body"]
+
+ def test_h2_004_01(self, env):
+ self.curl_upload_and_verify(env, "data-1k", ["-vvv", "--http1.1"])
+ self.curl_upload_and_verify(env, "data-1k", ["--http2"])
+
+ def test_h2_004_02(self, env):
+ self.curl_upload_and_verify(env, "data-10k", ["--http1.1"])
+ self.curl_upload_and_verify(env, "data-10k", ["--http2"])
+
+ def test_h2_004_03(self, env):
+ self.curl_upload_and_verify(env, "data-100k", ["--http1.1"])
+ self.curl_upload_and_verify(env, "data-100k", ["--http2"])
+
+ def test_h2_004_04(self, env):
+ self.curl_upload_and_verify(env, "data-1m", ["--http1.1"])
+ self.curl_upload_and_verify(env, "data-1m", ["--http2"])
+
+ def test_h2_004_05(self, env):
+ self.curl_upload_and_verify(env, "data-1k", ["-v", "--http1.1", "-H", "Expect: 100-continue"])
+ self.curl_upload_and_verify(env, "data-1k", ["-v", "--http2", "-H", "Expect: 100-continue"])
+
+ def test_h2_004_06(self, env):
+ self.curl_upload_and_verify(env, "data-1k", [
+ "--http1.1", "-H", "Content-Length:", "-H", "Transfer-Encoding: chunked"
+ ])
+ self.curl_upload_and_verify(env, "data-1k", ["--http2", "-H", "Content-Length:"])
+
+ @pytest.mark.parametrize("name, value", [
+ ("HTTP2", "on"),
+ ("H2PUSH", "off"),
+ ("H2_PUSHED", ""),
+ ("H2_PUSHED_ON", ""),
+ ("H2_STREAM_ID", "1"),
+ ("H2_STREAM_TAG", r'\d+-\d+-1'),
+ ])
+ def test_h2_004_07(self, env, name, value):
+ url = env.mkurl("https", "cgi", "/env.py")
+ r = env.curl_post_value(url, "name", name)
+ assert r.exit_code == 0
+ assert r.response["status"] == 200
+ m = re.match("{0}=(.*)".format(name), r.response["body"].decode('utf-8'))
+ assert m
+ assert re.match(value, m.group(1))
+
+ # POST some data using nghttp and see it echo'ed properly back
+ def nghttp_post_and_verify(self, env, fname, options=None):
+ url = env.mkurl("https", "cgi", "/echo.py")
+ fpath = os.path.join(env.gen_dir, fname)
+
+ r = env.nghttp().upload(url, fpath, options=options)
+ assert r.exit_code == 0
+ assert r.response["status"] >= 200 and r.response["status"] < 300
+
+ with open(self.local_src(fpath), mode='rb') as file:
+ src = file.read()
+ assert 'request-length' in r.response["header"]
+ assert int(r.response["header"]['request-length']) == len(src)
+ if len(r.response["body"]) != len(src):
+ sys.stderr.writelines(difflib.unified_diff(
+ src.decode().splitlines(True),
+ r.response["body"].decode().splitlines(True),
+ fromfile='source',
+ tofile='response'
+ ))
+ assert len(r.response["body"]) == len(src)
+ assert r.response["body"] == src, f"expected '{src}', got '{r.response['body']}'"
+
+ @pytest.mark.parametrize("name", [
+ "data-1k", "data-10k", "data-100k", "data-1m"
+ ])
+ def test_h2_004_21(self, env, name):
+ self.nghttp_post_and_verify(env, name, [])
+
+ @pytest.mark.parametrize("name", [
+ "data-1k", "data-10k", "data-100k", "data-1m",
+ ])
+ def test_h2_004_22(self, env, name, repeat):
+ self.nghttp_post_and_verify(env, name, ["--no-content-length"])
+
+ # upload and GET again using nghttp, compare to original content
+ def nghttp_upload_and_verify(self, env, fname, options=None):
+ url = env.mkurl("https", "cgi", "/upload.py")
+ fpath = os.path.join(env.gen_dir, fname)
+
+ r = env.nghttp().upload_file(url, fpath, options=options)
+ assert r.exit_code == 0
+ assert r.response["status"] >= 200 and r.response["status"] < 300
+ assert 'location' in r.response["header"], f'{r}'
+ assert r.response["header"]["location"]
+
+ r2 = env.nghttp().get(r.response["header"]["location"])
+ assert r2.exit_code == 0
+ assert r2.response["status"] == 200
+ with open(self.local_src(fpath), mode='rb') as file:
+ src = file.read()
+ assert src == r2.response["body"], f'GET {r.response["header"]["location"]}'
+
+ @pytest.mark.parametrize("name", [
+ "data-1k", "data-10k", "data-100k", "data-1m"
+ ])
+ def test_h2_004_23(self, env, name, repeat):
+ self.nghttp_upload_and_verify(env, name, [])
+
+ @pytest.mark.parametrize("name", [
+ "data-1k", "data-10k", "data-100k", "data-1m"
+ ])
+ def test_h2_004_24(self, env, name, repeat):
+ self.nghttp_upload_and_verify(env, name, ["--expect-continue"])
+
+ @pytest.mark.parametrize("name", [
+ "data-1k", "data-10k", "data-100k", "data-1m"
+ ])
+ def test_h2_004_25(self, env, name, repeat):
+ self.nghttp_upload_and_verify(env, name, ["--no-content-length"])
+
+ def test_h2_004_40(self, env):
+ # echo content using h2test_module "echo" handler
+ def post_and_verify(fname, options=None):
+ url = env.mkurl("https", "cgi", "/h2test/echo")
+ fpath = os.path.join(env.gen_dir, fname)
+ r = env.curl_upload(url, fpath, options=options)
+ assert r.exit_code == 0
+ assert r.response["status"] >= 200 and r.response["status"] < 300
+
+ ct = r.response["header"]["content-type"]
+ mail_hd = "Content-Type: " + ct + "\r\nMIME-Version: 1.0\r\n\r\n"
+ mime_msg = mail_hd.encode() + r.response["body"]
+ # this MIME API is from hell
+ body = email.parser.BytesParser().parsebytes(mime_msg)
+ assert body
+ assert body.is_multipart()
+ filepart = None
+ for part in body.walk():
+ if fname == part.get_filename():
+ filepart = part
+ assert filepart
+ with open(self.local_src(fpath), mode='rb') as file:
+ src = file.read()
+ assert src == filepart.get_payload(decode=True)
+
+ post_and_verify("data-1k", [])
+
+ def test_h2_004_41(self, env):
+ # reproduce PR66597, double chunked encoding on redirects
+ url = env.mkurl("https", "cgi", "/xxx/test.json")
+ r = env.curl_post_data(url, data="0123456789", options=[])
+ assert r.exit_code == 0
+ assert 200 <= r.response["status"] < 300
+ assert r.response['body'] == b'0123456789'
+ r = env.curl_post_data(url, data="0123456789", options=["-H", "Content-Length:"])
+ assert r.exit_code == 0
+ assert 200 <= r.response["status"] < 300
+ assert r.response['body'] == b'0123456789'
diff --git a/test/modules/http2/test_005_files.py b/test/modules/http2/test_005_files.py
new file mode 100644
index 0000000..e761836
--- /dev/null
+++ b/test/modules/http2/test_005_files.py
@@ -0,0 +1,48 @@
+import os
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+def mk_text_file(fpath: str, lines: int):
+ t110 = ""
+ for _ in range(11):
+ t110 += "0123456789"
+ with open(fpath, "w") as fd:
+ for i in range(lines):
+ fd.write("{0:015d}: ".format(i)) # total 128 bytes per line
+ fd.write(t110)
+ fd.write("\n")
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestFiles:
+
+ URI_PATHS = []
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ docs_a = os.path.join(env.server_docs_dir, "cgi/files")
+ uris = []
+ file_count = 32
+ file_sizes = [1, 10, 100, 10000]
+ for i in range(file_count):
+ fsize = file_sizes[i % len(file_sizes)]
+ if fsize is None:
+ raise Exception("file sizes?: {0} {1}".format(i, fsize))
+ fname = "{0}-{1}k.txt".format(i, fsize)
+ mk_text_file(os.path.join(docs_a, fname), 8 * fsize)
+ self.URI_PATHS.append(f"/files/{fname}")
+
+ H2Conf(env).add_vhost_cgi(
+ proxy_self=True, h2proxy_self=True
+ ).add_vhost_test1(
+ proxy_self=True, h2proxy_self=True
+ ).install()
+ assert env.apache_restart() == 0
+
+ def test_h2_005_01(self, env):
+ url = env.mkurl("https", "cgi", self.URI_PATHS[2])
+ r = env.curl_get(url)
+ assert r.response, r.stderr + r.stdout
+ assert r.response["status"] == 200
diff --git a/test/modules/http2/test_006_assets.py b/test/modules/http2/test_006_assets.py
new file mode 100644
index 0000000..778314e
--- /dev/null
+++ b/test/modules/http2/test_006_assets.py
@@ -0,0 +1,75 @@
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestAssets:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ H2Conf(env).add_vhost_test1().install()
+ assert env.apache_restart() == 0
+
+ # single page without any assets
+ def test_h2_006_01(self, env):
+ url = env.mkurl("https", "test1", "/001.html")
+ r = env.nghttp().assets(url, options=["-Haccept-encoding: none"])
+ assert 0 == r.exit_code
+ assert 1 == len(r.assets)
+ assert r.assets == [
+ {"status": 200, "size": "251", "path": "/001.html"}
+ ]
+
+ # single image without any assets
+ def test_h2_006_02(self, env):
+ url = env.mkurl("https", "test1", "/002.jpg")
+ r = env.nghttp().assets(url, options=["-Haccept-encoding: none"])
+ assert 0 == r.exit_code
+ assert 1 == len(r.assets)
+ assert r.assets == [
+ {"status": 200, "size": "88K", "path": "/002.jpg"}
+ ]
+
+ # gophertiles, yea!
+ def test_h2_006_03(self, env):
+ # create the tiles files we originally had checked in
+ exp_assets = [
+ {"status": 200, "size": "10K", "path": "/004.html"},
+ {"status": 200, "size": "742", "path": "/004/gophertiles.jpg"},
+ ]
+ for i in range(2, 181):
+ with open(f"{env.server_docs_dir}/test1/004/gophertiles_{i:03d}.jpg", "w") as fd:
+ fd.write("0123456789\n")
+ exp_assets.append(
+ {"status": 200, "size": "11", "path": f"/004/gophertiles_{i:03d}.jpg"},
+ )
+
+ url = env.mkurl("https", "test1", "/004.html")
+ r = env.nghttp().assets(url, options=["-Haccept-encoding: none"])
+ assert 0 == r.exit_code
+ assert 181 == len(r.assets)
+ assert r.assets == exp_assets
+
+ # page with js and css
+ def test_h2_006_04(self, env):
+ url = env.mkurl("https", "test1", "/006.html")
+ r = env.nghttp().assets(url, options=["-Haccept-encoding: none"])
+ assert 0 == r.exit_code
+ assert 3 == len(r.assets)
+ assert r.assets == [
+ {"status": 200, "size": "543", "path": "/006.html"},
+ {"status": 200, "size": "216", "path": "/006/006.css"},
+ {"status": 200, "size": "839", "path": "/006/006.js"}
+ ]
+
+ # page with image, try different window size
+ def test_h2_006_05(self, env):
+ url = env.mkurl("https", "test1", "/003.html")
+ r = env.nghttp().assets(url, options=["--window-bits=24", "-Haccept-encoding: none"])
+ assert 0 == r.exit_code
+ assert 2 == len(r.assets)
+ assert r.assets == [
+ {"status": 200, "size": "316", "path": "/003.html"},
+ {"status": 200, "size": "88K", "path": "/003/003_img.jpg"}
+ ]
diff --git a/test/modules/http2/test_007_ssi.py b/test/modules/http2/test_007_ssi.py
new file mode 100644
index 0000000..97e38df
--- /dev/null
+++ b/test/modules/http2/test_007_ssi.py
@@ -0,0 +1,43 @@
+import re
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestSSI:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ conf = H2Conf(env, extras={
+ f'cgi.{env.http_tld}': [
+ 'AddOutputFilter INCLUDES .html',
+ '<Location "/ssi">',
+ ' Options +Includes',
+ '</Location>',
+ ],
+ })
+ conf.add_vhost_cgi(
+ proxy_self=True, h2proxy_self=True
+ ).add_vhost_test1(
+ proxy_self=True, h2proxy_self=True
+ ).install()
+ assert env.apache_restart() == 0
+
+ # SSI test from https://bz.apache.org/bugzilla/show_bug.cgi?id=66483
+ def test_h2_007_01(self, env):
+ url = env.mkurl("https", "cgi", "/ssi/test.html")
+ r = env.curl_get(url, 5)
+ assert r.response["status"] == 200
+ assert r.stdout == '''<!doctype html>
+<html>
+<head><meta charset="UTF-8"></head>
+<body>
+ test<br>
+ Hello include<br>
+
+ hello<br>
+</body>
+</html>
+''' , f'{r}'
+
diff --git a/test/modules/http2/test_008_ranges.py b/test/modules/http2/test_008_ranges.py
new file mode 100644
index 0000000..4dcdcc8
--- /dev/null
+++ b/test/modules/http2/test_008_ranges.py
@@ -0,0 +1,189 @@
+import inspect
+import json
+import os
+import re
+import time
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestRanges:
+
+ LOGFILE = ""
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ TestRanges.LOGFILE = os.path.join(env.server_logs_dir, "test_008")
+ TestRanges.SRCDIR = os.path.dirname(inspect.getfile(TestRanges))
+ if os.path.isfile(TestRanges.LOGFILE):
+ os.remove(TestRanges.LOGFILE)
+ destdir = os.path.join(env.gen_dir, 'apache/htdocs/test1')
+ env.make_data_file(indir=destdir, fname="data-100m", fsize=100*1024*1024)
+ conf = H2Conf(env=env, extras={
+ 'base': [
+ 'CustomLog logs/test_008 combined'
+ ],
+ f'test1.{env.http_tld}': [
+ '<Location /status>',
+ ' SetHandler server-status',
+ '</Location>',
+ ]
+ })
+ conf.add_vhost_cgi()
+ conf.add_vhost_test1()
+ conf.install()
+ assert env.apache_restart() == 0
+
+ def test_h2_008_01(self, env):
+ # issue: #203
+ resource = "data-1k"
+ full_length = 1000
+ chunk = 200
+ self.curl_upload_and_verify(env, resource, ["-v", "--http2"])
+ assert env.apache_restart() == 0
+ url = env.mkurl("https", "cgi", f"/files/{resource}?01full")
+ r = env.curl_get(url, 5, options=["--http2"])
+ assert r.response["status"] == 200
+ url = env.mkurl("https", "cgi", f"/files/{resource}?01range")
+ r = env.curl_get(url, 5, options=["--http1.1", "-H", "Range: bytes=0-{0}".format(chunk-1)])
+ assert 206 == r.response["status"]
+ assert chunk == len(r.response["body"].decode('utf-8'))
+ r = env.curl_get(url, 5, options=["--http2", "-H", "Range: bytes=0-{0}".format(chunk-1)])
+ assert 206 == r.response["status"]
+ assert chunk == len(r.response["body"].decode('utf-8'))
+ # Restart for logs to be flushed out
+ assert env.apache_restart() == 0
+ # now check what response lengths have actually been reported
+ detected = {}
+ for line in open(TestRanges.LOGFILE).readlines():
+ e = json.loads(line)
+ if e['request'] == f'GET /files/{resource}?01full HTTP/2.0':
+ assert e['bytes_rx_I'] > 0
+ assert e['bytes_resp_B'] == full_length
+ assert e['bytes_tx_O'] > full_length
+ detected['h2full'] = 1
+ elif e['request'] == f'GET /files/{resource}?01range HTTP/2.0':
+ assert e['bytes_rx_I'] > 0
+ assert e['bytes_resp_B'] == chunk
+ assert e['bytes_tx_O'] > chunk
+ assert e['bytes_tx_O'] < chunk + 256 # response + frame stuff
+ detected['h2range'] = 1
+ elif e['request'] == f'GET /files/{resource}?01range HTTP/1.1':
+ assert e['bytes_rx_I'] > 0 # input bytes received
+ assert e['bytes_resp_B'] == chunk # response bytes sent (payload)
+ assert e['bytes_tx_O'] > chunk # output bytes sent
+ detected['h1range'] = 1
+ assert 'h1range' in detected, f'HTTP/1.1 range request not found in {TestRanges.LOGFILE}'
+ assert 'h2range' in detected, f'HTTP/2 range request not found in {TestRanges.LOGFILE}'
+ assert 'h2full' in detected, f'HTTP/2 full request not found in {TestRanges.LOGFILE}'
+
+ def test_h2_008_02(self, env, repeat):
+ path = '/002.jpg'
+ res_len = 90364
+ url = env.mkurl("https", "test1", f'{path}?02full')
+ r = env.curl_get(url, 5)
+ assert r.response["status"] == 200
+ assert "HTTP/2" == r.response["protocol"]
+ h = r.response["header"]
+ assert "accept-ranges" in h
+ assert "bytes" == h["accept-ranges"]
+ assert "content-length" in h
+ clen = h["content-length"]
+ assert int(clen) == res_len
+ # get the first 1024 bytes of the resource, 206 status, but content-length as original
+ url = env.mkurl("https", "test1", f'{path}?02range')
+ r = env.curl_get(url, 5, options=["-H", "range: bytes=0-1023"])
+ assert 206 == r.response["status"]
+ assert "HTTP/2" == r.response["protocol"]
+ assert 1024 == len(r.response["body"])
+ assert "content-length" in h
+ assert clen == h["content-length"]
+ # Restart for logs to be flushed out
+ assert env.apache_restart() == 0
+ # now check what response lengths have actually been reported
+ found = False
+ for line in open(TestRanges.LOGFILE).readlines():
+ e = json.loads(line)
+ if e['request'] == f'GET {path}?02range HTTP/2.0':
+ assert e['bytes_rx_I'] > 0
+ assert e['bytes_resp_B'] == 1024
+ assert e['bytes_tx_O'] > 1024
+ assert e['bytes_tx_O'] < 1024 + 256 # response and frame stuff
+ found = True
+ break
+ assert found, f'request not found in {self.LOGFILE}'
+
+ # send a paced curl download that aborts in the middle of the transfer
+ def test_h2_008_03(self, env, repeat):
+ path = '/data-100m'
+ url = env.mkurl("https", "test1", f'{path}?03broken')
+ r = env.curl_get(url, 5, options=[
+ '--limit-rate', '2k', '-m', '2'
+ ])
+ assert r.exit_code != 0, f'{r}'
+ found = False
+ for line in open(TestRanges.LOGFILE).readlines():
+ e = json.loads(line)
+ if e['request'] == f'GET {path}?03broken HTTP/2.0':
+ assert e['bytes_rx_I'] > 0
+ assert e['bytes_resp_B'] == 100*1024*1024
+ assert e['bytes_tx_O'] > 1024
+ found = True
+ break
+ assert found, f'request not found in {self.LOGFILE}'
+
+ # test server-status reporting
+ # see <https://bz.apache.org/bugzilla/show_bug.cgi?id=66801>
+ def test_h2_008_04(self, env, repeat):
+ path = '/data-100m'
+ assert env.apache_restart() == 0
+ stats = self.get_server_status(env)
+ # we see the server uptime check request here
+ assert 1 == int(stats['Total Accesses']), f'{stats}'
+ assert 1 == int(stats['Total kBytes']), f'{stats}'
+ count = 10
+ url = env.mkurl("https", "test1", f'/data-100m?[0-{count-1}]')
+ r = env.curl_get(url, 5, options=['--http2', '-H', f'Range: bytes=0-{4096}'])
+ assert r.exit_code == 0, f'{r}'
+ for _ in range(10):
+ # slow cpu might not success on first read
+ stats = self.get_server_status(env)
+ if (4*count)+1 <= int(stats['Total kBytes']):
+ break
+ time.sleep(0.1)
+ # amount reported is larger than (count *4k), the net payload
+ # but does not exceed an additional 4k
+ assert (4*count)+1 <= int(stats['Total kBytes'])
+ assert (4*(count+1))+1 > int(stats['Total kBytes'])
+ # total requests is now at 1 from the start, plus the stat check,
+ # plus the count transfers we did.
+ assert (2+count) == int(stats['Total Accesses'])
+
+ def get_server_status(self, env):
+ status_url = env.mkurl("https", "test1", '/status?auto')
+ r = env.curl_get(status_url, 5)
+ assert r.exit_code == 0, f'{r}'
+ stats = {}
+ for line in r.stdout.splitlines():
+ m = re.match(r'([^:]+): (.*)', line)
+ if m:
+ stats[m.group(1)] = m.group(2)
+ return stats
+
+ # upload and GET again using curl, compare to original content
+ def curl_upload_and_verify(self, env, fname, options=None):
+ url = env.mkurl("https", "cgi", "/upload.py")
+ fpath = os.path.join(env.gen_dir, fname)
+ r = env.curl_upload(url, fpath, options=options)
+ assert r.exit_code == 0, f"{r}"
+ assert 200 <= r.response["status"] < 300
+
+ r2 = env.curl_get(r.response["header"]["location"])
+ assert r2.exit_code == 0
+ assert r2.response["status"] == 200
+ with open(os.path.join(TestRanges.SRCDIR, fpath), mode='rb') as file:
+ src = file.read()
+ assert src == r2.response["body"]
+
diff --git a/test/modules/http2/test_009_timing.py b/test/modules/http2/test_009_timing.py
new file mode 100644
index 0000000..2c62bb0
--- /dev/null
+++ b/test/modules/http2/test_009_timing.py
@@ -0,0 +1,74 @@
+import inspect
+import json
+import os
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestTiming:
+
+ LOGFILE = ""
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ TestTiming.LOGFILE = os.path.join(env.server_logs_dir, "test_009")
+ if os.path.isfile(TestTiming.LOGFILE):
+ os.remove(TestTiming.LOGFILE)
+ conf = H2Conf(env=env)
+ conf.add([
+ "CustomLog logs/test_009 combined"
+ ])
+ conf.add_vhost_cgi()
+ conf.add_vhost_test1()
+ conf.install()
+ assert env.apache_restart() == 0
+
+ # check that we get a positive time_taken reported on a simple GET
+ def test_h2_009_01(self, env):
+ path = '/002.jpg'
+ url = env.mkurl("https", "test1", f'{path}?01')
+ args = [
+ env.h2load, "-n", "1", "-c", "1", "-m", "1",
+ f"--connect-to=localhost:{env.https_port}",
+ f"--base-uri={url}", url
+ ]
+ r = env.run(args)
+ # Restart for logs to be flushed out
+ assert env.apache_restart() == 0
+ found = False
+ for line in open(TestTiming.LOGFILE).readlines():
+ e = json.loads(line)
+ if e['request'] == f'GET {path}?01 HTTP/2.0':
+ assert e['time_taken'] > 0
+ found = True
+ assert found, f'request not found in {TestTiming.LOGFILE}'
+
+ # test issue #253, where time_taken in a keepalive situation is not
+ # reported until the next request arrives
+ def test_h2_009_02(self, env):
+ baseurl = env.mkurl("https", "test1", '/')
+ tscript = os.path.join(env.gen_dir, 'h2load-timing-009_02')
+ with open(tscript, 'w') as fd:
+ fd.write('\n'.join([
+ f'0.0\t/002.jpg?02a', # 1st request right away
+ f'1000.0\t/002.jpg?02b', # 2nd a second later
+ ]))
+ args = [
+ env.h2load,
+ f'--timing-script-file={tscript}',
+ f"--connect-to=localhost:{env.https_port}",
+ f"--base-uri={baseurl}"
+ ]
+ r = env.run(args)
+ # Restart for logs to be flushed out
+ assert env.apache_restart() == 0
+ found = False
+ for line in open(TestTiming.LOGFILE).readlines():
+ e = json.loads(line)
+ if e['request'] == f'GET /002.jpg?02a HTTP/2.0':
+ assert e['time_taken'] > 0
+ assert e['time_taken'] < 500 * 1000, f'time for 1st request not reported correctly'
+ found = True
+ assert found, f'request not found in {TestTiming.LOGFILE}'
diff --git a/test/modules/http2/test_100_conn_reuse.py b/test/modules/http2/test_100_conn_reuse.py
new file mode 100644
index 0000000..3ebac24
--- /dev/null
+++ b/test/modules/http2/test_100_conn_reuse.py
@@ -0,0 +1,57 @@
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestConnReuse:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ H2Conf(env).add_vhost_noh2().add_vhost_test1().add_vhost_cgi().install()
+ assert env.apache_restart() == 0
+
+ # make sure the protocol selection on the different hosts work as expected
+ def test_h2_100_01(self, env):
+ # this host defaults to h2, but we can request h1
+ url = env.mkurl("https", "cgi", "/hello.py")
+ assert "2" == env.curl_protocol_version( url )
+ assert "1.1" == env.curl_protocol_version( url, options=[ "--http1.1" ] )
+
+ # this host does not enable h2, it always falls back to h1
+ url = env.mkurl("https", "noh2", "/hello.py")
+ assert "1.1" == env.curl_protocol_version( url )
+ assert "1.1" == env.curl_protocol_version( url, options=[ "--http2" ] )
+
+ # access a ServerAlias, after using ServerName in SNI
+ def test_h2_100_02(self, env):
+ url = env.mkurl("https", "cgi", "/hello.py")
+ hostname = ("cgi-alias.%s" % env.http_tld)
+ r = env.curl_get(url, 5, options=["-H", f"Host: {hostname}"])
+ assert r.response["status"] == 200
+ assert "HTTP/2" == r.response["protocol"]
+ assert hostname == r.response["json"]["host"]
+
+ # access another vhost, after using ServerName in SNI, that uses same SSL setup
+ def test_h2_100_03(self, env):
+ url = env.mkurl("https", "cgi", "/")
+ hostname = ("test1.%s" % env.http_tld)
+ r = env.curl_get(url, 5, options=[ "-H", "Host:%s" % hostname ])
+ assert r.response["status"] == 200
+ assert "HTTP/2" == r.response["protocol"]
+ assert "text/html" == r.response["header"]["content-type"]
+
+ # access another vhost, after using ServerName in SNI,
+ # that has different SSL certificate. This triggers a 421 (misdirected request) response.
+ def test_h2_100_04(self, env):
+ url = env.mkurl("https", "cgi", "/hello.py")
+ hostname = ("noh2.%s" % env.http_tld)
+ r = env.curl_get(url, 5, options=[ "-H", "Host:%s" % hostname ])
+ assert 421 == r.response["status"]
+
+ # access an unknown vhost, after using ServerName in SNI
+ def test_h2_100_05(self, env):
+ url = env.mkurl("https", "cgi", "/hello.py")
+ hostname = ("unknown.%s" % env.http_tld)
+ r = env.curl_get(url, 5, options=[ "-H", "Host:%s" % hostname ])
+ assert 421 == r.response["status"]
diff --git a/test/modules/http2/test_101_ssl_reneg.py b/test/modules/http2/test_101_ssl_reneg.py
new file mode 100644
index 0000000..528002f
--- /dev/null
+++ b/test/modules/http2/test_101_ssl_reneg.py
@@ -0,0 +1,138 @@
+import re
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+@pytest.mark.skipif(H2TestEnv.get_ssl_module() != "mod_ssl", reason="only for mod_ssl")
+class TestSslRenegotiation:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ domain = f"ssl.{env.http_tld}"
+ conf = H2Conf(env, extras={
+ 'base': [
+ "SSLCipherSuite ECDHE-RSA-AES256-GCM-SHA384",
+ f"<Directory \"{env.server_dir}/htdocs/ssl-client-verify\">",
+ " Require all granted",
+ " SSLVerifyClient require",
+ " SSLVerifyDepth 0",
+ "</Directory>"
+ ],
+ domain: [
+ "Protocols h2 http/1.1",
+ "<Location /renegotiate/cipher>",
+ " SSLCipherSuite ECDHE-RSA-CHACHA20-POLY1305",
+ "</Location>",
+ "<Location /renegotiate/err-doc-cipher>",
+ " SSLCipherSuite ECDHE-RSA-CHACHA20-POLY1305",
+ " ErrorDocument 403 /forbidden.html",
+ "</Location>",
+ "<Location /renegotiate/verify>",
+ " SSLVerifyClient require",
+ "</Location>",
+ f"<Directory \"{env.server_dir}/htdocs/sslrequire\">",
+ " SSLRequireSSL",
+ "</Directory>",
+ f"<Directory \"{env.server_dir}/htdocs/requiressl\">",
+ " Require ssl",
+ "</Directory>",
+ ]})
+ conf.add_vhost(domains=[domain], port=env.https_port,
+ doc_root=f"{env.server_dir}/htdocs")
+ conf.install()
+ # the dir needs to exists for the configuration to have effect
+ env.mkpath("%s/htdocs/ssl-client-verify" % env.server_dir)
+ env.mkpath("%s/htdocs/renegotiate/cipher" % env.server_dir)
+ env.mkpath("%s/htdocs/sslrequire" % env.server_dir)
+ env.mkpath("%s/htdocs/requiressl" % env.server_dir)
+ assert env.apache_restart() == 0
+
+ # access a resource with SSL renegotiation, using HTTP/1.1
+ def test_h2_101_01(self, env):
+ url = env.mkurl("https", "ssl", "/renegotiate/cipher/")
+ r = env.curl_get(url, options=["-v", "--http1.1", "--tlsv1.2", "--tls-max", "1.2"])
+ assert 0 == r.exit_code, f"{r}"
+ assert r.response
+ assert 403 == r.response["status"]
+
+ # try to renegotiate the cipher, should fail with correct code
+ def test_h2_101_02(self, env):
+ if not (env.curl_is_at_least('8.2.0') or env.curl_is_less_than('8.1.0')):
+ pytest.skip("need curl != 8.1.x version")
+ url = env.mkurl("https", "ssl", "/renegotiate/cipher/")
+ r = env.curl_get(url, options=[
+ "-vvv", "--tlsv1.2", "--tls-max", "1.2", "--ciphers", "ECDHE-RSA-AES256-GCM-SHA384"
+ ])
+ assert 0 != r.exit_code
+ assert not r.response
+ assert re.search(r'HTTP_1_1_REQUIRED \(err 13\)', r.stderr)
+
+ # try to renegotiate a client certificate from Location
+ # needs to fail with correct code
+ def test_h2_101_03(self, env):
+ if not (env.curl_is_at_least('8.2.0') or env.curl_is_less_than('8.1.0')):
+ pytest.skip("need curl != 8.1.x version")
+ url = env.mkurl("https", "ssl", "/renegotiate/verify/")
+ r = env.curl_get(url, options=["-vvv", "--tlsv1.2", "--tls-max", "1.2"])
+ assert 0 != r.exit_code
+ assert not r.response
+ assert re.search(r'HTTP_1_1_REQUIRED \(err 13\)', r.stderr)
+
+ # try to renegotiate a client certificate from Directory
+ # needs to fail with correct code
+ def test_h2_101_04(self, env):
+ if not (env.curl_is_at_least('8.2.0') or env.curl_is_less_than('8.1.0')):
+ pytest.skip("need curl != 8.1.x version")
+ url = env.mkurl("https", "ssl", "/ssl-client-verify/index.html")
+ r = env.curl_get(url, options=["-vvv", "--tlsv1.2", "--tls-max", "1.2"])
+ assert 0 != r.exit_code, f"{r}"
+ assert not r.response
+ assert re.search(r'HTTP_1_1_REQUIRED \(err 13\)', r.stderr)
+
+ # make 10 requests on the same connection, none should produce a status code
+ # reported by erki@example.ee
+ def test_h2_101_05(self, env):
+ r = env.run([env.h2load, "-n", "10", "-c", "1", "-m", "1", "-vvvv",
+ f"{env.https_base_url}/ssl-client-verify/index.html"])
+ assert 0 == r.exit_code
+ r = env.h2load_status(r)
+ assert 10 == r.results["h2load"]["requests"]["total"]
+ assert 10 == r.results["h2load"]["requests"]["started"]
+ assert 10 == r.results["h2load"]["requests"]["done"]
+ assert 0 == r.results["h2load"]["requests"]["succeeded"]
+ assert 0 == r.results["h2load"]["status"]["2xx"]
+ assert 0 == r.results["h2load"]["status"]["3xx"]
+ assert 0 == r.results["h2load"]["status"]["4xx"]
+ assert 0 == r.results["h2load"]["status"]["5xx"]
+
+ # Check that "SSLRequireSSL" works on h2 connections
+ # See <https://bz.apache.org/bugzilla/show_bug.cgi?id=62654>
+ def test_h2_101_10a(self, env):
+ url = env.mkurl("https", "ssl", "/sslrequire/index.html")
+ r = env.curl_get(url)
+ assert 0 == r.exit_code
+ assert r.response
+ assert 404 == r.response["status"]
+
+ # Check that "require ssl" works on h2 connections
+ # See <https://bz.apache.org/bugzilla/show_bug.cgi?id=62654>
+ def test_h2_101_10b(self, env):
+ url = env.mkurl("https", "ssl", "/requiressl/index.html")
+ r = env.curl_get(url)
+ assert 0 == r.exit_code
+ assert r.response
+ assert 404 == r.response["status"]
+
+ # Check that status works with ErrorDoc, see pull #174, fixes #172
+ def test_h2_101_11(self, env):
+ if not (env.curl_is_at_least('8.2.0') or env.curl_is_less_than('8.1.0')):
+ pytest.skip("need curl != 8.1.x version")
+ url = env.mkurl("https", "ssl", "/renegotiate/err-doc-cipher")
+ r = env.curl_get(url, options=[
+ "-vvv", "--tlsv1.2", "--tls-max", "1.2", "--ciphers", "ECDHE-RSA-AES256-GCM-SHA384"
+ ])
+ assert 0 != r.exit_code
+ assert not r.response
+ assert re.search(r'HTTP_1_1_REQUIRED \(err 13\)', r.stderr)
diff --git a/test/modules/http2/test_102_require.py b/test/modules/http2/test_102_require.py
new file mode 100644
index 0000000..b7e4eae
--- /dev/null
+++ b/test/modules/http2/test_102_require.py
@@ -0,0 +1,41 @@
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestRequire:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ domain = f"ssl.{env.http_tld}"
+ conf = H2Conf(env)
+ conf.start_vhost(domains=[domain], port=env.https_port)
+ conf.add("""
+ Protocols h2 http/1.1
+ SSLOptions +StdEnvVars
+ <Location /h2only.html>
+ Require expr \"%{HTTP2} == 'on'\"
+ </Location>
+ <Location /noh2.html>
+ Require expr \"%{HTTP2} == 'off'\"
+ </Location>""")
+ conf.end_vhost()
+ conf.install()
+ # the dir needs to exists for the configuration to have effect
+ env.mkpath(f"{env.server_dir}/htdocs/ssl-client-verify")
+ assert env.apache_restart() == 0
+
+ def test_h2_102_01(self, env):
+ url = env.mkurl("https", "ssl", "/h2only.html")
+ r = env.curl_get(url)
+ assert 0 == r.exit_code
+ assert r.response
+ assert 404 == r.response["status"]
+
+ def test_h2_102_02(self, env):
+ url = env.mkurl("https", "ssl", "/noh2.html")
+ r = env.curl_get(url)
+ assert 0 == r.exit_code
+ assert r.response
+ assert 403 == r.response["status"]
diff --git a/test/modules/http2/test_103_upgrade.py b/test/modules/http2/test_103_upgrade.py
new file mode 100644
index 0000000..2fa7d1d
--- /dev/null
+++ b/test/modules/http2/test_103_upgrade.py
@@ -0,0 +1,118 @@
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestUpgrade:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ H2Conf(env).add_vhost_test1().add_vhost_test2().add_vhost_noh2(
+ ).start_vhost(domains=[f"test3.{env.http_tld}"], port=env.https_port, doc_root="htdocs/test1"
+ ).add(
+ """
+ Protocols h2 http/1.1
+ Header unset Upgrade"""
+ ).end_vhost(
+ ).start_vhost(domains=[f"test1b.{env.http_tld}"], port=env.http_port, doc_root="htdocs/test1"
+ ).add(
+ """
+ Protocols h2c http/1.1
+ H2Upgrade off
+ <Location /006.html>
+ H2Upgrade on
+ </Location>"""
+ ).end_vhost(
+ ).install()
+ assert env.apache_restart() == 0
+
+ # accessing http://test1, will not try h2 and advertise h2 in the response
+ def test_h2_103_01(self, env):
+ url = env.mkurl("http", "test1", "/index.html")
+ r = env.curl_get(url)
+ assert 0 == r.exit_code
+ assert r.response
+ assert "upgrade" in r.response["header"]
+ assert "h2c" == r.response["header"]["upgrade"]
+
+ # accessing http://noh2, will not advertise, because noh2 host does not have it enabled
+ def test_h2_103_02(self, env):
+ url = env.mkurl("http", "noh2", "/index.html")
+ r = env.curl_get(url)
+ assert 0 == r.exit_code
+ assert r.response
+ assert "upgrade" not in r.response["header"]
+
+ # accessing http://test2, will not advertise, because h2 has less preference than http/1.1
+ def test_h2_103_03(self, env):
+ url = env.mkurl("http", "test2", "/index.html")
+ r = env.curl_get(url)
+ assert 0 == r.exit_code
+ assert r.response
+ assert "upgrade" not in r.response["header"]
+
+ # accessing https://noh2, will not advertise, because noh2 host does not have it enabled
+ def test_h2_103_04(self, env):
+ url = env.mkurl("https", "noh2", "/index.html")
+ r = env.curl_get(url)
+ assert 0 == r.exit_code
+ assert r.response
+ assert "upgrade" not in r.response["header"]
+
+ # accessing https://test2, will not advertise, because h2 has less preference than http/1.1
+ def test_h2_103_05(self, env):
+ url = env.mkurl("https", "test2", "/index.html")
+ r = env.curl_get(url)
+ assert 0 == r.exit_code
+ assert r.response
+ assert "upgrade" not in r.response["header"]
+
+ # accessing https://test1, will advertise h2 in the response
+ def test_h2_103_06(self, env):
+ url = env.mkurl("https", "test1", "/index.html")
+ r = env.curl_get(url, options=["--http1.1"])
+ assert 0 == r.exit_code
+ assert r.response
+ assert "upgrade" in r.response["header"]
+ assert "h2" == r.response["header"]["upgrade"]
+
+ # accessing https://test3, will not send Upgrade since it is suppressed
+ def test_h2_103_07(self, env):
+ url = env.mkurl("https", "test3", "/index.html")
+ r = env.curl_get(url, options=["--http1.1"])
+ assert 0 == r.exit_code
+ assert r.response
+ assert "upgrade" not in r.response["header"]
+
+ # upgrade to h2c for a request, where h2c is preferred
+ def test_h2_103_20(self, env):
+ url = env.mkurl("http", "test1", "/index.html")
+ r = env.nghttp().get(url, options=["-u"])
+ assert r.response["status"] == 200
+
+ # upgrade to h2c for a request where http/1.1 is preferred, but the clients upgrade
+ # wish is honored nevertheless
+ def test_h2_103_21(self, env):
+ url = env.mkurl("http", "test2", "/index.html")
+ r = env.nghttp().get(url, options=["-u"])
+ assert 404 == r.response["status"]
+
+ # ugrade to h2c on a host where h2c is not enabled will fail
+ def test_h2_103_22(self, env):
+ url = env.mkurl("http", "noh2", "/index.html")
+ r = env.nghttp().get(url, options=["-u"])
+ assert not r.response
+
+ # ugrade to h2c on a host where h2c is preferred, but Upgrade is disabled
+ def test_h2_103_23(self, env):
+ url = env.mkurl("http", "test1b", "/index.html")
+ r = env.nghttp().get(url, options=["-u"])
+ assert not r.response
+
+ # ugrade to h2c on a host where h2c is preferred, but Upgrade is disabled on the server,
+ # but allowed for a specific location
+ def test_h2_103_24(self, env):
+ url = env.mkurl("http", "test1b", "/006.html")
+ r = env.nghttp().get(url, options=["-u"])
+ assert r.response["status"] == 200
diff --git a/test/modules/http2/test_104_padding.py b/test/modules/http2/test_104_padding.py
new file mode 100644
index 0000000..401804a
--- /dev/null
+++ b/test/modules/http2/test_104_padding.py
@@ -0,0 +1,104 @@
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+def frame_padding(payload, padbits):
+ mask = (1 << padbits) - 1
+ return ((payload + 9 + mask) & ~mask) - (payload + 9)
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestPadding:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ def add_echo_handler(conf):
+ conf.add([
+ "<Location \"/h2test/echo\">",
+ " SetHandler h2test-echo",
+ "</Location>",
+ ])
+
+ conf = H2Conf(env)
+ conf.start_vhost(domains=[f"ssl.{env.http_tld}"], port=env.https_port, doc_root="htdocs/cgi")
+ add_echo_handler(conf)
+ conf.end_vhost()
+ conf.start_vhost(domains=[f"pad0.{env.http_tld}"], port=env.https_port, doc_root="htdocs/cgi")
+ conf.add("H2Padding 0")
+ add_echo_handler(conf)
+ conf.end_vhost()
+ conf.start_vhost(domains=[f"pad1.{env.http_tld}"], port=env.https_port, doc_root="htdocs/cgi")
+ conf.add("H2Padding 1")
+ add_echo_handler(conf)
+ conf.end_vhost()
+ conf.start_vhost(domains=[f"pad2.{env.http_tld}"], port=env.https_port, doc_root="htdocs/cgi")
+ conf.add("H2Padding 2")
+ add_echo_handler(conf)
+ conf.end_vhost()
+ conf.start_vhost(domains=[f"pad3.{env.http_tld}"], port=env.https_port, doc_root="htdocs/cgi")
+ conf.add("H2Padding 3")
+ add_echo_handler(conf)
+ conf.end_vhost()
+ conf.start_vhost(domains=[f"pad8.{env.http_tld}"], port=env.https_port, doc_root="htdocs/cgi")
+ conf.add("H2Padding 8")
+ add_echo_handler(conf)
+ conf.end_vhost()
+ conf.install()
+ assert env.apache_restart() == 0
+
+ # default paddings settings: 0 bits
+ def test_h2_104_01(self, env, repeat):
+ url = env.mkurl("https", "ssl", "/h2test/echo")
+ # we get 2 frames back: one with data and an empty one with EOF
+ # check the number of padding bytes is as expected
+ for data in ["x", "xx", "xxx", "xxxx", "xxxxx", "xxxxxx", "xxxxxxx", "xxxxxxxx"]:
+ r = env.nghttp().post_data(url, data, 5)
+ assert r.response["status"] == 200
+ for i in r.results["paddings"]:
+ assert i == frame_padding(len(data)+1, 0)
+
+ # 0 bits of padding
+ def test_h2_104_02(self, env):
+ url = env.mkurl("https", "pad0", "/h2test/echo")
+ for data in ["x", "xx", "xxx", "xxxx", "xxxxx", "xxxxxx", "xxxxxxx", "xxxxxxxx"]:
+ r = env.nghttp().post_data(url, data, 5)
+ assert r.response["status"] == 200
+ for i in r.results["paddings"]:
+ assert i == 0
+
+ # 1 bit of padding
+ def test_h2_104_03(self, env):
+ url = env.mkurl("https", "pad1", "/h2test/echo")
+ for data in ["x", "xx", "xxx", "xxxx", "xxxxx", "xxxxxx", "xxxxxxx", "xxxxxxxx"]:
+ r = env.nghttp().post_data(url, data, 5)
+ assert r.response["status"] == 200
+ for i in r.results["paddings"]:
+ assert i in range(0, 2)
+
+ # 2 bits of padding
+ def test_h2_104_04(self, env):
+ url = env.mkurl("https", "pad2", "/h2test/echo")
+ for data in ["x", "xx", "xxx", "xxxx", "xxxxx", "xxxxxx", "xxxxxxx", "xxxxxxxx"]:
+ r = env.nghttp().post_data(url, data, 5)
+ assert r.response["status"] == 200
+ for i in r.results["paddings"]:
+ assert i in range(0, 4)
+
+ # 3 bits of padding
+ def test_h2_104_05(self, env):
+ url = env.mkurl("https", "pad3", "/h2test/echo")
+ for data in ["x", "xx", "xxx", "xxxx", "xxxxx", "xxxxxx", "xxxxxxx", "xxxxxxxx"]:
+ r = env.nghttp().post_data(url, data, 5)
+ assert r.response["status"] == 200
+ for i in r.results["paddings"]:
+ assert i in range(0, 8)
+
+ # 8 bits of padding
+ def test_h2_104_06(self, env):
+ url = env.mkurl("https", "pad8", "/h2test/echo")
+ for data in ["x", "xx", "xxx", "xxxx", "xxxxx", "xxxxxx", "xxxxxxx", "xxxxxxxx"]:
+ r = env.nghttp().post_data(url, data, 5)
+ assert r.response["status"] == 200
+ for i in r.results["paddings"]:
+ assert i in range(0, 256)
diff --git a/test/modules/http2/test_105_timeout.py b/test/modules/http2/test_105_timeout.py
new file mode 100644
index 0000000..f7d3859
--- /dev/null
+++ b/test/modules/http2/test_105_timeout.py
@@ -0,0 +1,152 @@
+import socket
+import time
+
+import pytest
+
+from .env import H2Conf
+from pyhttpd.curl import CurlPiper
+
+
+class TestTimeout:
+
+ # Check that base servers 'Timeout' setting is observed on SSL handshake
+ def test_h2_105_01(self, env):
+ conf = H2Conf(env)
+ conf.add("""
+ AcceptFilter http none
+ Timeout 1.5
+ """)
+ conf.add_vhost_cgi()
+ conf.install()
+ assert env.apache_restart() == 0
+ host = 'localhost'
+ # read with a longer timeout than the server
+ sock = socket.create_connection((host, int(env.https_port)))
+ try:
+ # on some OS, the server does not see our connection until there is
+ # something incoming
+ sock.send(b'0')
+ sock.settimeout(4)
+ buff = sock.recv(1024)
+ assert buff == b''
+ except Exception as ex:
+ print(f"server did not close in time: {ex}")
+ assert False
+ sock.close()
+ # read with a shorter timeout than the server
+ sock = socket.create_connection((host, int(env.https_port)))
+ try:
+ sock.settimeout(0.5)
+ sock.recv(1024)
+ assert False
+ except Exception as ex:
+ print(f"as expected: {ex}")
+ sock.close()
+
+ # Check that mod_reqtimeout handshake setting takes effect
+ def test_h2_105_02(self, env):
+ conf = H2Conf(env)
+ conf.add("""
+ AcceptFilter http none
+ Timeout 10
+ RequestReadTimeout handshake=1 header=5 body=10
+ """)
+ conf.add_vhost_cgi()
+ conf.install()
+ assert env.apache_restart() == 0
+ host = 'localhost'
+ # read with a longer timeout than the server
+ sock = socket.create_connection((host, int(env.https_port)))
+ try:
+ # on some OS, the server does not see our connection until there is
+ # something incoming
+ sock.send(b'0')
+ sock.settimeout(4)
+ buff = sock.recv(1024)
+ assert buff == b''
+ except Exception as ex:
+ print(f"server did not close in time: {ex}")
+ assert False
+ sock.close()
+ # read with a shorter timeout than the server
+ sock = socket.create_connection((host, int(env.https_port)))
+ try:
+ sock.settimeout(0.5)
+ sock.recv(1024)
+ assert False
+ except Exception as ex:
+ print(f"as expected: {ex}")
+ sock.close()
+
+ # Check that mod_reqtimeout handshake setting do no longer apply to handshaked
+ # connections. See <https://github.com/icing/mod_h2/issues/196>.
+ def test_h2_105_03(self, env):
+ conf = H2Conf(env)
+ conf.add("""
+ Timeout 10
+ RequestReadTimeout handshake=1 header=5 body=10
+ """)
+ conf.add_vhost_cgi()
+ conf.install()
+ assert env.apache_restart() == 0
+ url = env.mkurl("https", "cgi", "/necho.py")
+ r = env.curl_get(url, 5, options=[
+ "-vvv",
+ "-F", ("count=%d" % 100),
+ "-F", ("text=%s" % "abcdefghijklmnopqrstuvwxyz"),
+ "-F", ("wait1=%f" % 1.5),
+ ])
+ assert r.response["status"] == 200
+
+ def test_h2_105_10(self, env):
+ # just a check without delays if all is fine
+ conf = H2Conf(env)
+ conf.add_vhost_cgi()
+ conf.install()
+ assert env.apache_restart() == 0
+ url = env.mkurl("https", "cgi", "/h2test/delay")
+ piper = CurlPiper(env=env, url=url)
+ piper.start()
+ stdout, stderr = piper.close()
+ assert piper.exitcode == 0
+ assert len("".join(stdout)) == 3 * 8192
+
+ def test_h2_105_11(self, env):
+ # short connection timeout, longer stream delay
+ # connection timeout must not abort ongoing streams
+ conf = H2Conf(env)
+ conf.add_vhost_cgi()
+ conf.add("Timeout 1")
+ conf.install()
+ assert env.apache_restart() == 0
+ url = env.mkurl("https", "cgi", "/h2test/delay?1200ms")
+ piper = CurlPiper(env=env, url=url)
+ piper.start()
+ stdout, stderr = piper.close()
+ assert len("".join(stdout)) == 3 * 8192
+
+ def test_h2_105_12(self, env):
+ # long connection timeout, short stream timeout
+ # sending a slow POST
+ if not env.curl_is_at_least('8.0.0'):
+ pytest.skip(f'need at least curl v8.0.0 for this')
+ if not env.httpd_is_at_least("2.5.0"):
+ pytest.skip(f'need at least httpd 2.5.0 for this')
+ conf = H2Conf(env)
+ conf.add_vhost_cgi()
+ conf.add("Timeout 10")
+ conf.add("H2StreamTimeout 1")
+ conf.install()
+ assert env.apache_restart() == 0
+ url = env.mkurl("https", "cgi", "/h2test/delay?5")
+ piper = CurlPiper(env=env, url=url)
+ piper.start()
+ for _ in range(3):
+ time.sleep(2)
+ try:
+ piper.send("0123456789\n")
+ except BrokenPipeError:
+ break
+ piper.close()
+ assert piper.response, f'{piper}'
+ assert piper.response['status'] == 408, f"{piper.response}"
diff --git a/test/modules/http2/test_106_shutdown.py b/test/modules/http2/test_106_shutdown.py
new file mode 100644
index 0000000..83e143c
--- /dev/null
+++ b/test/modules/http2/test_106_shutdown.py
@@ -0,0 +1,75 @@
+#
+# mod-h2 test suite
+# check HTTP/2 timeout behaviour
+#
+import time
+from threading import Thread
+
+import pytest
+
+from .env import H2Conf, H2TestEnv
+from pyhttpd.result import ExecResult
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestShutdown:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ conf = H2Conf(env)
+ conf.add_vhost_cgi()
+ conf.install()
+ assert env.apache_restart() == 0
+
+ def test_h2_106_01(self, env):
+ url = env.mkurl("https", "cgi", "/necho.py")
+ lines = 100000
+ text = "123456789"
+ wait2 = 1.0
+ self.r = None
+ def long_request():
+ args = ["-vvv",
+ "-F", f"count={lines}",
+ "-F", f"text={text}",
+ "-F", f"wait2={wait2}",
+ ]
+ self.r = env.curl_get(url, 5, options=args)
+
+ t = Thread(target=long_request)
+ t.start()
+ time.sleep(0.5)
+ assert env.apache_reload() == 0
+ t.join()
+ # noinspection PyTypeChecker
+ time.sleep(1)
+ r: ExecResult = self.r
+ assert r.exit_code == 0
+ assert r.response, f"no response via {r.args} in {r.stderr}\nstdout: {len(r.stdout)} bytes"
+ assert r.response["status"] == 200, f"{r}"
+ assert len(r.response["body"]) == (lines * (len(text)+1)), f"{r}"
+
+ def test_h2_106_02(self, env):
+ # PR65731: invalid GOAWAY frame at session start when
+ # MaxRequestsPerChild is reached
+ # Create a low limit and only 2 children, so we'll encounter this easily
+ conf = H2Conf(env, extras={
+ 'base': [
+ "ServerLimit 2",
+ "MaxRequestsPerChild 3"
+ ]
+ })
+ conf.add_vhost_test1()
+ conf.install()
+ assert env.apache_restart() == 0
+ url = env.mkurl("https", "test1", "/index.html")
+ for i in range(7):
+ r = env.curl_get(url, options=['-v'])
+ # requests should succeed, but rarely connections get closed
+ # before the response is received
+ if r.exit_code in [16, 55]:
+ # curl send error
+ assert r.response is None
+ else:
+ assert r.exit_code == 0, f"failed on {i}. request: {r.stdout} {r.stderr}"
+ assert r.response["status"] == 200
+ assert "HTTP/2" == r.response["protocol"] \ No newline at end of file
diff --git a/test/modules/http2/test_107_frame_lengths.py b/test/modules/http2/test_107_frame_lengths.py
new file mode 100644
index 0000000..d636093
--- /dev/null
+++ b/test/modules/http2/test_107_frame_lengths.py
@@ -0,0 +1,51 @@
+import os
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+def mk_text_file(fpath: str, lines: int):
+ t110 = ""
+ for _ in range(11):
+ t110 += "0123456789"
+ with open(fpath, "w") as fd:
+ for i in range(lines):
+ fd.write("{0:015d}: ".format(i)) # total 128 bytes per line
+ fd.write(t110)
+ fd.write("\n")
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestFrameLengths:
+
+ URI_PATHS = []
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ docs_a = os.path.join(env.server_docs_dir, "cgi/files")
+ for fsize in [10, 100]:
+ fname = f'0-{fsize}k.txt'
+ mk_text_file(os.path.join(docs_a, fname), 8 * fsize)
+ self.URI_PATHS.append(f"/files/{fname}")
+
+ @pytest.mark.parametrize("data_frame_len", [
+ 99, 1024, 8192
+ ])
+ def test_h2_107_01(self, env, data_frame_len):
+ conf = H2Conf(env, extras={
+ f'cgi.{env.http_tld}': [
+ f'H2MaxDataFrameLen {data_frame_len}',
+ ]
+ })
+ conf.add_vhost_cgi()
+ conf.install()
+ assert env.apache_restart() == 0
+ for p in self.URI_PATHS:
+ url = env.mkurl("https", "cgi", p)
+ r = env.nghttp().get(url, options=[
+ '--header=Accept-Encoding: none',
+ ])
+ assert r.response["status"] == 200
+ assert len(r.results["data_lengths"]) > 0, f'{r}'
+ too_large = [ x for x in r.results["data_lengths"] if x > data_frame_len]
+ assert len(too_large) == 0, f'{p}: {r.results["data_lengths"]}'
diff --git a/test/modules/http2/test_200_header_invalid.py b/test/modules/http2/test_200_header_invalid.py
new file mode 100644
index 0000000..5b3aafd
--- /dev/null
+++ b/test/modules/http2/test_200_header_invalid.py
@@ -0,0 +1,207 @@
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestInvalidHeaders:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ H2Conf(env).add_vhost_cgi().install()
+ assert env.apache_restart() == 0
+
+ # let the hecho.py CGI echo chars < 0x20 in field name
+ # for almost all such characters, the stream returns a 500
+ # or in httpd >= 2.5.0 gets aborted with a h2 error
+ # cr is handled special
+ def test_h2_200_01(self, env):
+ url = env.mkurl("https", "cgi", "/hecho.py")
+ for x in range(1, 32):
+ data = f'name=x%{x:02x}x&value=yz'
+ r = env.curl_post_data(url, data)
+ if x in [13]:
+ assert 0 == r.exit_code, f'unexpected exit code for char 0x{x:02}'
+ assert 200 == r.response["status"], f'unexpected status for char 0x{x:02}'
+ elif x in [10] or env.httpd_is_at_least('2.5.0'):
+ assert 0 == r.exit_code, f'unexpected exit code for char 0x{x:02}'
+ assert 500 == r.response["status"], f'unexpected status for char 0x{x:02}'
+ else:
+ assert 0 != r.exit_code, f'unexpected exit code for char 0x{x:02}'
+
+ # let the hecho.py CGI echo chars < 0x20 in field value
+ # for almost all such characters, the stream returns a 500
+ # or in httpd >= 2.5.0 gets aborted with a h2 error
+ # cr and lf are handled special
+ def test_h2_200_02(self, env):
+ url = env.mkurl("https", "cgi", "/hecho.py")
+ for x in range(1, 32):
+ if 9 != x:
+ r = env.curl_post_data(url, "name=x&value=y%%%02x" % x)
+ if x in [10, 13]:
+ assert 0 == r.exit_code, "unexpected exit code for char 0x%02x" % x
+ assert 200 == r.response["status"], "unexpected status for char 0x%02x" % x
+ elif env.httpd_is_at_least('2.5.0'):
+ assert 0 == r.exit_code, f'unexpected exit code for char 0x{x:02}'
+ assert 500 == r.response["status"], f'unexpected status for char 0x{x:02}'
+ else:
+ assert 0 != r.exit_code, "unexpected exit code for char 0x%02x" % x
+
+ # let the hecho.py CGI echo 0x10 and 0x7f in field name and value
+ def test_h2_200_03(self, env):
+ url = env.mkurl("https", "cgi", "/hecho.py")
+ for h in ["10", "7f"]:
+ r = env.curl_post_data(url, "name=x%%%s&value=yz" % h)
+ if env.httpd_is_at_least('2.5.0'):
+ assert 0 == r.exit_code, f"unexpected exit code for char 0x{h:02}"
+ assert 500 == r.response["status"], f"unexpected exit code for char 0x{h:02}"
+ else:
+ assert 0 != r.exit_code
+ r = env.curl_post_data(url, "name=x&value=y%%%sz" % h)
+ if env.httpd_is_at_least('2.5.0'):
+ assert 0 == r.exit_code, f"unexpected exit code for char 0x{h:02}"
+ assert 500 == r.response["status"], f"unexpected exit code for char 0x{h:02}"
+ else:
+ assert 0 != r.exit_code
+
+ # test header field lengths check, LimitRequestLine
+ def test_h2_200_10(self, env):
+ conf = H2Conf(env)
+ conf.add("""
+ LimitRequestLine 1024
+ """)
+ conf.add_vhost_cgi()
+ conf.install()
+ assert env.apache_restart() == 0
+ val = 200*"1234567890"
+ url = env.mkurl("https", "cgi", f'/?{val[:1022]}')
+ r = env.curl_get(url)
+ assert r.response["status"] == 200
+ url = env.mkurl("https", "cgi", f'/?{val[:1023]}')
+ r = env.curl_get(url)
+ # URI too long
+ assert 414 == r.response["status"]
+
+ # test header field lengths check, LimitRequestFieldSize (default 8190)
+ def test_h2_200_11(self, env):
+ conf = H2Conf(env)
+ conf.add("""
+ LimitRequestFieldSize 1024
+ """)
+ conf.add_vhost_cgi()
+ conf.install()
+ assert env.apache_restart() == 0
+ url = env.mkurl("https", "cgi", "/")
+ val = 200*"1234567890"
+ # two fields, concatenated with ', '
+ # LimitRequestFieldSize, one more char -> 400 in HTTP/1.1
+ r = env.curl_get(url, options=[
+ '-H', f'x: {val[:500]}', '-H', f'x: {val[:519]}'
+ ])
+ assert r.exit_code == 0, f'{r}'
+ assert r.response["status"] == 200, f'{r}'
+ r = env.curl_get(url, options=[
+ '--http1.1', '-H', f'x: {val[:500]}', '-H', f'x: {val[:523]}'
+ ])
+ assert 400 == r.response["status"]
+ r = env.curl_get(url, options=[
+ '-H', f'x: {val[:500]}', '-H', f'x: {val[:520]}'
+ ])
+ assert 431 == r.response["status"]
+
+ # test header field count, LimitRequestFields (default 100)
+ # see #201: several headers with same name are mered and count only once
+ def test_h2_200_12(self, env):
+ url = env.mkurl("https", "cgi", "/")
+ opt = []
+ # curl sends 3 headers itself (user-agent, accept, and our AP-Test-Name)
+ for i in range(97):
+ opt += ["-H", "x: 1"]
+ r = env.curl_get(url, options=opt)
+ assert r.response["status"] == 200
+ r = env.curl_get(url, options=(opt + ["-H", "y: 2"]))
+ assert r.response["status"] == 200
+
+ # test header field count, LimitRequestFields (default 100)
+ # different header names count each
+ def test_h2_200_13(self, env):
+ url = env.mkurl("https", "cgi", "/")
+ opt = []
+ # curl sends 3 headers itself (user-agent, accept, and our AP-Test-Name)
+ for i in range(97):
+ opt += ["-H", f"x{i}: 1"]
+ r = env.curl_get(url, options=opt)
+ assert r.response["status"] == 200
+ r = env.curl_get(url, options=(opt + ["-H", "y: 2"]))
+ assert 431 == r.response["status"]
+
+ # test "LimitRequestFields 0" setting, see #200
+ def test_h2_200_14(self, env):
+ conf = H2Conf(env)
+ conf.add("""
+ LimitRequestFields 20
+ """)
+ conf.add_vhost_cgi()
+ conf.install()
+ assert env.apache_restart() == 0
+ url = env.mkurl("https", "cgi", "/")
+ opt = []
+ for i in range(21):
+ opt += ["-H", "x{0}: 1".format(i)]
+ r = env.curl_get(url, options=opt)
+ assert 431 == r.response["status"]
+ conf = H2Conf(env)
+ conf.add("""
+ LimitRequestFields 0
+ """)
+ conf.add_vhost_cgi()
+ conf.install()
+ assert env.apache_restart() == 0
+ url = env.mkurl("https", "cgi", "/")
+ opt = []
+ for i in range(100):
+ opt += ["-H", "x{0}: 1".format(i)]
+ r = env.curl_get(url, options=opt)
+ assert r.response["status"] == 200
+
+ # the uri limits
+ def test_h2_200_15(self, env):
+ conf = H2Conf(env)
+ conf.add("""
+ LimitRequestLine 48
+ """)
+ conf.add_vhost_cgi()
+ conf.install()
+ assert env.apache_restart() == 0
+ url = env.mkurl("https", "cgi", "/")
+ r = env.curl_get(url)
+ assert r.response["status"] == 200
+ url = env.mkurl("https", "cgi", "/" + (48*"x"))
+ r = env.curl_get(url)
+ assert 414 == r.response["status"]
+ # nghttp sends the :method: header first (so far)
+ # trigger a too long request line on it
+ # the stream will RST and we get no response
+ url = env.mkurl("https", "cgi", "/")
+ opt = ["-H:method: {0}".format(100*"x")]
+ r = env.nghttp().get(url, options=opt)
+ assert r.exit_code == 0, r
+ assert not r.response
+
+ # invalid chars in method
+ def test_h2_200_16(self, env):
+ if not env.h2load_is_at_least('1.45.0'):
+ pytest.skip(f'nhttp2 version too old')
+ conf = H2Conf(env)
+ conf.add_vhost_cgi()
+ conf.install()
+ assert env.apache_restart() == 0
+ url = env.mkurl("https", "cgi", "/hello.py")
+ opt = ["-H:method: GET /hello.py"]
+ r = env.nghttp().get(url, options=opt)
+ assert r.exit_code == 0, r
+ assert r.response is None
+ url = env.mkurl("https", "cgi", "/proxy/hello.py")
+ r = env.nghttp().get(url, options=opt)
+ assert r.exit_code == 0, r
+ assert r.response is None
diff --git a/test/modules/http2/test_201_header_conditional.py b/test/modules/http2/test_201_header_conditional.py
new file mode 100644
index 0000000..f103268
--- /dev/null
+++ b/test/modules/http2/test_201_header_conditional.py
@@ -0,0 +1,70 @@
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestConditionalHeaders:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ H2Conf(env).add(
+ """
+ KeepAlive on
+ MaxKeepAliveRequests 30
+ KeepAliveTimeout 30"""
+ ).add_vhost_test1().install()
+ assert env.apache_restart() == 0
+
+ # check handling of 'if-modified-since' header
+ def test_h2_201_01(self, env):
+ url = env.mkurl("https", "test1", "/006/006.css")
+ r = env.curl_get(url)
+ assert r.response["status"] == 200
+ lm = r.response["header"]["last-modified"]
+ assert lm
+ r = env.curl_get(url, options=["-H", "if-modified-since: %s" % lm])
+ assert 304 == r.response["status"]
+ r = env.curl_get(url, options=["-H", "if-modified-since: Tue, 04 Sep 2010 11:51:59 GMT"])
+ assert r.response["status"] == 200
+
+ # check handling of 'if-none-match' header
+ def test_h2_201_02(self, env):
+ url = env.mkurl("https", "test1", "/006/006.css")
+ r = env.curl_get(url)
+ assert r.response["status"] == 200
+ etag = r.response["header"]["etag"]
+ assert etag
+ r = env.curl_get(url, options=["-H", "if-none-match: %s" % etag])
+ assert 304 == r.response["status"]
+ r = env.curl_get(url, options=["-H", "if-none-match: dummy"])
+ assert r.response["status"] == 200
+
+ @pytest.mark.skipif(True, reason="304 misses the Vary header in trunk and 2.4.x")
+ def test_h2_201_03(self, env):
+ url = env.mkurl("https", "test1", "/006.html")
+ r = env.curl_get(url, options=["-H", "Accept-Encoding: gzip"])
+ assert r.response["status"] == 200
+ for h in r.response["header"]:
+ print("%s: %s" % (h, r.response["header"][h]))
+ lm = r.response["header"]["last-modified"]
+ assert lm
+ assert "gzip" == r.response["header"]["content-encoding"]
+ assert "Accept-Encoding" in r.response["header"]["vary"]
+
+ r = env.curl_get(url, options=["-H", "if-modified-since: %s" % lm,
+ "-H", "Accept-Encoding: gzip"])
+ assert 304 == r.response["status"]
+ for h in r.response["header"]:
+ print("%s: %s" % (h, r.response["header"][h]))
+ assert "vary" in r.response["header"]
+
+ # Check if "Keep-Alive" response header is removed in HTTP/2.
+ def test_h2_201_04(self, env):
+ url = env.mkurl("https", "test1", "/006.html")
+ r = env.curl_get(url, options=["--http1.1", "-H", "Connection: keep-alive"])
+ assert r.response["status"] == 200
+ assert "timeout=30, max=30" == r.response["header"]["keep-alive"]
+ r = env.curl_get(url, options=["-H", "Connection: keep-alive"])
+ assert r.response["status"] == 200
+ assert "keep-alive" not in r.response["header"]
diff --git a/test/modules/http2/test_202_trailer.py b/test/modules/http2/test_202_trailer.py
new file mode 100644
index 0000000..4b4fc42
--- /dev/null
+++ b/test/modules/http2/test_202_trailer.py
@@ -0,0 +1,92 @@
+import os
+import pytest
+
+from .env import H2Conf
+
+
+def setup_data(env):
+ s100 = "012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678\n"
+ with open(os.path.join(env.gen_dir, "data-1k"), 'w') as f:
+ for i in range(10):
+ f.write(s100)
+
+
+# The trailer tests depend on "nghttp" as no other client seems to be able to send those
+# rare things.
+class TestTrailers:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ setup_data(env)
+ conf = H2Conf(env, extras={
+ f"cgi.{env.http_tld}": [
+ "<Location \"/h2test/trailer\">",
+ " SetHandler h2test-trailer",
+ "</Location>"
+ ],
+ })
+ conf.add_vhost_cgi(h2proxy_self=True)
+ conf.install()
+ assert env.apache_restart() == 0
+
+ # check if the server survives a trailer or two
+ def test_h2_202_01(self, env):
+ url = env.mkurl("https", "cgi", "/echo.py")
+ fpath = os.path.join(env.gen_dir, "data-1k")
+ r = env.nghttp().upload(url, fpath, options=["--trailer", "test: 1"])
+ assert r.response["status"] < 300
+ assert len(r.response["body"]) == 1000
+
+ r = env.nghttp().upload(url, fpath, options=["--trailer", "test: 1b", "--trailer", "XXX: test"])
+ assert r.response["status"] < 300
+ assert len(r.response["body"]) == 1000
+
+ # check if the server survives a trailer without content-length
+ def test_h2_202_02(self, env):
+ url = env.mkurl("https", "cgi", "/echo.py")
+ fpath = os.path.join(env.gen_dir, "data-1k")
+ r = env.nghttp().upload(url, fpath, options=["--trailer", "test: 2", "--no-content-length"])
+ assert r.response["status"] < 300
+ assert len(r.response["body"]) == 1000
+
+ # check if echoing request headers in response from GET works
+ def test_h2_202_03(self, env):
+ url = env.mkurl("https", "cgi", "/echohd.py?name=X")
+ r = env.nghttp().get(url, options=["--header", "X: 3"])
+ assert r.response["status"] < 300
+ assert r.response["body"] == b"X: 3\n"
+
+ # check if echoing request headers in response from POST works
+ def test_h2_202_03b(self, env):
+ url = env.mkurl("https", "cgi", "/echohd.py?name=X")
+ r = env.nghttp().post_name(url, "Y", options=["--header", "X: 3b"])
+ assert r.response["status"] < 300
+ assert r.response["body"] == b"X: 3b\n"
+
+ # check if echoing request headers in response from POST works, but trailers are not seen
+ # This is the way CGI invocation works.
+ def test_h2_202_04(self, env):
+ url = env.mkurl("https", "cgi", "/echohd.py?name=X")
+ r = env.nghttp().post_name(url, "Y", options=["--header", "X: 4a", "--trailer", "X: 4b"])
+ assert r.response["status"] < 300
+ assert r.response["body"] == b"X: 4a\n"
+
+ # check that our h2test-trailer handler works
+ def test_h2_202_10(self, env):
+ url = env.mkurl("https", "cgi", "/h2test/trailer?1024")
+ r = env.nghttp().get(url)
+ assert r.response["status"] == 200
+ assert len(r.response["body"]) == 1024
+ assert 'trailer' in r.response
+ assert 'trailer-content-length' in r.response['trailer']
+ assert r.response['trailer']['trailer-content-length'] == '1024'
+
+ # check that trailers also for with empty bodies
+ def test_h2_202_11(self, env):
+ url = env.mkurl("https", "cgi", "/h2test/trailer?0")
+ r = env.nghttp().get(url)
+ assert r.response["status"] == 200
+ assert len(r.response["body"]) == 0, f'{r.response["body"]}'
+ assert 'trailer' in r.response
+ assert 'trailer-content-length' in r.response['trailer']
+ assert r.response['trailer']['trailer-content-length'] == '0'
diff --git a/test/modules/http2/test_203_rfc9113.py b/test/modules/http2/test_203_rfc9113.py
new file mode 100644
index 0000000..9fc8f3b
--- /dev/null
+++ b/test/modules/http2/test_203_rfc9113.py
@@ -0,0 +1,56 @@
+import pytest
+
+from pyhttpd.env import HttpdTestEnv
+from .env import H2Conf
+
+
+class TestRfc9113:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ H2Conf(env).add_vhost_test1().install()
+ assert env.apache_restart() == 0
+
+ # by default, we accept leading/trailing ws in request fields
+ def test_h2_203_01_ws_ignore(self, env):
+ url = env.mkurl("https", "test1", "/")
+ r = env.curl_get(url, options=['-H', 'trailing-space: must not '])
+ assert r.exit_code == 0, f'curl output: {r.stderr}'
+ assert r.response["status"] == 200, f'curl output: {r.stdout}'
+ r = env.curl_get(url, options=['-H', 'trailing-space: must not\t'])
+ assert r.exit_code == 0, f'curl output: {r.stderr}'
+ assert r.response["status"] == 200, f'curl output: {r.stdout}'
+
+ # response header are also handled, but we strip ws before sending
+ @pytest.mark.parametrize(["hvalue", "expvalue", "status"], [
+ ['"123"', '123', 200],
+ ['"123 "', '123', 200], # trailing space stripped
+ ['"123\t"', '123', 200], # trailing tab stripped
+ ['" 123"', '123', 200], # leading space is stripped
+ ['" 123"', '123', 200], # leading spaces are stripped
+ ['"\t123"', '123', 200], # leading tab is stripped
+ ['"expr=%{unescape:123%0A 123}"', '', 500], # illegal char
+ ['" \t "', '', 200], # just ws
+ ])
+ def test_h2_203_02(self, env, hvalue, expvalue, status):
+ hname = 'ap-test-007'
+ conf = H2Conf(env, extras={
+ f'test1.{env.http_tld}': [
+ '<Location /index.html>',
+ f'Header add {hname} {hvalue}',
+ '</Location>',
+ ]
+ })
+ conf.add_vhost_test1(proxy_self=True)
+ conf.install()
+ assert env.apache_restart() == 0
+ url = env.mkurl("https", "test1", "/index.html")
+ r = env.curl_get(url, options=['--http2'])
+ if status == 500 and r.exit_code != 0:
+ # in 2.4.x we fail late on control chars in a response
+ # and RST_STREAM. That's also ok
+ return
+ assert r.response["status"] == status
+ if int(status) < 400:
+ assert r.response["header"][hname] == expvalue
+
diff --git a/test/modules/http2/test_300_interim.py b/test/modules/http2/test_300_interim.py
new file mode 100644
index 0000000..774ab88
--- /dev/null
+++ b/test/modules/http2/test_300_interim.py
@@ -0,0 +1,40 @@
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestInterimResponses:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ H2Conf(env).add_vhost_test1().add_vhost_cgi().install()
+ assert env.apache_restart() == 0
+
+ def setup_method(self, method):
+ print("setup_method: %s" % method.__name__)
+
+ def teardown_method(self, method):
+ print("teardown_method: %s" % method.__name__)
+
+ # check that we normally do not see an interim response
+ def test_h2_300_01(self, env):
+ url = env.mkurl("https", "test1", "/index.html")
+ r = env.curl_post_data(url, 'XYZ')
+ assert r.response["status"] == 200
+ assert "previous" not in r.response
+
+ # check that we see an interim response when we ask for it
+ def test_h2_300_02(self, env):
+ url = env.mkurl("https", "cgi", "/echo.py")
+ r = env.curl_post_data(url, 'XYZ', options=["-H", "expect: 100-continue"])
+ assert r.response["status"] == 200
+ assert "previous" in r.response
+ assert 100 == r.response["previous"]["status"]
+
+ # check proper answer on unexpected
+ def test_h2_300_03(self, env):
+ url = env.mkurl("https", "cgi", "/echo.py")
+ r = env.curl_post_data(url, 'XYZ', options=["-H", "expect: the-unexpected"])
+ assert 417 == r.response["status"]
+ assert "previous" not in r.response
diff --git a/test/modules/http2/test_400_push.py b/test/modules/http2/test_400_push.py
new file mode 100644
index 0000000..9c61608
--- /dev/null
+++ b/test/modules/http2/test_400_push.py
@@ -0,0 +1,200 @@
+import os
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+# The push tests depend on "nghttp"
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestPush:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ H2Conf(env).start_vhost(domains=[f"push.{env.http_tld}"],
+ port=env.https_port, doc_root="htdocs/test1"
+ ).add(r"""
+ RewriteEngine on
+ RewriteRule ^/006-push(.*)?\.html$ /006.html
+ <Location /006-push.html>
+ Header add Link "</006/006.css>;rel=preload"
+ Header add Link "</006/006.js>;rel=preloadX"
+ </Location>
+ <Location /006-push2.html>
+ Header add Link "</006/006.css>;rel=preloadX, </006/006.js>; rel=preload"
+ </Location>
+ <Location /006-push3.html>
+ Header add Link "</006/006.css>;rel=preloa,</006/006.js>;rel=preload"
+ </Location>
+ <Location /006-push4.html>
+ Header add Link "</006/006.css;rel=preload, </006/006.js>; preload"
+ </Location>
+ <Location /006-push5.html>
+ Header add Link '</006/006.css>;rel="preload push"'
+ </Location>
+ <Location /006-push6.html>
+ Header add Link '</006/006.css>;rel="push preload"'
+ </Location>
+ <Location /006-push7.html>
+ Header add Link '</006/006.css>;rel="abc preload push"'
+ </Location>
+ <Location /006-push8.html>
+ Header add Link '</006/006.css>;rel="preload"; nopush'
+ </Location>
+ <Location /006-push20.html>
+ H2PushResource "/006/006.css" critical
+ H2PushResource "/006/006.js"
+ </Location>
+ <Location /006-push30.html>
+ H2Push off
+ Header add Link '</006/006.css>;rel="preload"'
+ </Location>
+ <Location /006-push31.html>
+ H2PushResource "/006/006.css" critical
+ </Location>
+ <Location /006-push32.html>
+ Header add Link "</006/006.css>;rel=preload"
+ </Location>
+ """).end_vhost(
+ ).install()
+ assert env.apache_restart() == 0
+
+ ############################
+ # Link: header handling, various combinations
+
+ # plain resource without configured pushes
+ def test_h2_400_00(self, env):
+ url = env.mkurl("https", "push", "/006.html")
+ r = env.nghttp().get(url)
+ assert r.response["status"] == 200
+ promises = r.results["streams"][r.response["id"]]["promises"]
+ assert 0 == len(promises)
+
+ # 2 link headers configured, only 1 triggers push
+ def test_h2_400_01(self, env):
+ url = env.mkurl("https", "push", "/006-push.html")
+ r = env.nghttp().get(url, options=["-Haccept-encoding: none"])
+ assert r.response["status"] == 200
+ promises = r.results["streams"][r.response["id"]]["promises"]
+ assert 1 == len(promises)
+ assert '/006/006.css' == promises[0]["request"]["header"][":path"]
+ assert 216 == len(promises[0]["response"]["body"])
+
+ # Same as 400_01, but with single header line configured
+ def test_h2_400_02(self, env):
+ url = env.mkurl("https", "push", "/006-push2.html")
+ r = env.nghttp().get(url)
+ assert r.response["status"] == 200
+ promises = r.results["streams"][r.response["id"]]["promises"]
+ assert 1 == len(promises)
+ assert '/006/006.js' == promises[0]["request"]["header"][":path"]
+
+ # 2 Links, only one with correct rel attribute
+ def test_h2_400_03(self, env):
+ url = env.mkurl("https", "push", "/006-push3.html")
+ r = env.nghttp().get(url)
+ assert r.response["status"] == 200
+ promises = r.results["streams"][r.response["id"]]["promises"]
+ assert 1 == len(promises)
+ assert '/006/006.js' == promises[0]["request"]["header"][":path"]
+
+ # Missing > in Link header, PUSH not triggered
+ def test_h2_400_04(self, env):
+ url = env.mkurl("https", "push", "/006-push4.html")
+ r = env.nghttp().get(url)
+ assert r.response["status"] == 200
+ promises = r.results["streams"][r.response["id"]]["promises"]
+ assert 0 == len(promises)
+
+ # More than one value in "rel" parameter
+ def test_h2_400_05(self, env):
+ url = env.mkurl("https", "push", "/006-push5.html")
+ r = env.nghttp().get(url)
+ assert r.response["status"] == 200
+ promises = r.results["streams"][r.response["id"]]["promises"]
+ assert 1 == len(promises)
+ assert '/006/006.css' == promises[0]["request"]["header"][":path"]
+
+ # Another "rel" parameter variation
+ def test_h2_400_06(self, env):
+ url = env.mkurl("https", "push", "/006-push6.html")
+ r = env.nghttp().get(url)
+ assert r.response["status"] == 200
+ promises = r.results["streams"][r.response["id"]]["promises"]
+ assert 1 == len(promises)
+ assert '/006/006.css' == promises[0]["request"]["header"][":path"]
+
+ # Another "rel" parameter variation
+ def test_h2_400_07(self, env):
+ url = env.mkurl("https", "push", "/006-push7.html")
+ r = env.nghttp().get(url)
+ assert r.response["status"] == 200
+ promises = r.results["streams"][r.response["id"]]["promises"]
+ assert 1 == len(promises)
+ assert '/006/006.css' == promises[0]["request"]["header"][":path"]
+
+ # Pushable link header with "nopush" attribute
+ def test_h2_400_08(self, env):
+ url = env.mkurl("https", "push", "/006-push8.html")
+ r = env.nghttp().get(url)
+ assert r.response["status"] == 200
+ promises = r.results["streams"][r.response["id"]]["promises"]
+ assert 0 == len(promises)
+
+ # 2 H2PushResource config trigger on GET, but not on POST
+ def test_h2_400_20(self, env):
+ url = env.mkurl("https", "push", "/006-push20.html")
+ r = env.nghttp().get(url)
+ assert r.response["status"] == 200
+ promises = r.results["streams"][r.response["id"]]["promises"]
+ assert 2 == len(promises)
+
+ fpath = os.path.join(env.gen_dir, "data-400-20")
+ with open(fpath, 'w') as f:
+ f.write("test upload data")
+ r = env.nghttp().upload(url, fpath)
+ assert r.response["status"] == 200
+ promises = r.results["streams"][r.response["id"]]["promises"]
+ assert 0 == len(promises)
+
+ # H2Push configured Off in location
+ def test_h2_400_30(self, env):
+ url = env.mkurl("https", "push", "/006-push30.html")
+ r = env.nghttp().get(url)
+ assert r.response["status"] == 200
+ promises = r.results["streams"][r.response["id"]]["promises"]
+ assert 0 == len(promises)
+
+ # - suppress PUSH
+ def test_h2_400_50(self, env):
+ url = env.mkurl("https", "push", "/006-push.html")
+ r = env.nghttp().get(url, options=['-H', 'accept-push-policy: none'])
+ assert r.response["status"] == 200
+ promises = r.results["streams"][r.response["id"]]["promises"]
+ assert 0 == len(promises)
+
+ # - default pushes desired
+ def test_h2_400_51(self, env):
+ url = env.mkurl("https", "push", "/006-push.html")
+ r = env.nghttp().get(url, options=['-H', 'accept-push-policy: default'])
+ assert r.response["status"] == 200
+ promises = r.results["streams"][r.response["id"]]["promises"]
+ assert 1 == len(promises)
+
+ # - HEAD pushes desired
+ def test_h2_400_52(self, env):
+ url = env.mkurl("https", "push", "/006-push.html")
+ r = env.nghttp().get(url, options=['-H', 'accept-push-policy: head'])
+ assert r.response["status"] == 200
+ promises = r.results["streams"][r.response["id"]]["promises"]
+ assert 1 == len(promises)
+ assert '/006/006.css' == promises[0]["request"]["header"][":path"]
+ assert b"" == promises[0]["response"]["body"]
+ assert 0 == len(promises[0]["response"]["body"])
+
+ # - fast-load pushes desired
+ def test_h2_400_53(self, env):
+ url = env.mkurl("https", "push", "/006-push.html")
+ r = env.nghttp().get(url, options=['-H', 'accept-push-policy: fast-load'])
+ assert r.response["status"] == 200
+ promises = r.results["streams"][r.response["id"]]["promises"]
+ assert 1 == len(promises)
diff --git a/test/modules/http2/test_401_early_hints.py b/test/modules/http2/test_401_early_hints.py
new file mode 100644
index 0000000..5704305
--- /dev/null
+++ b/test/modules/http2/test_401_early_hints.py
@@ -0,0 +1,83 @@
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+# The push tests depend on "nghttp"
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestEarlyHints:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ if not env.httpd_is_at_least('2.4.58'):
+ pytest.skip(f'needs httpd 2.4.58')
+ H2Conf(env).start_vhost(domains=[f"hints.{env.http_tld}"],
+ port=env.https_port, doc_root="htdocs/test1"
+ ).add("""
+ H2EarlyHints on
+ RewriteEngine on
+ RewriteRule ^/006-(.*)?\\.html$ /006.html
+ <Location /006-hints.html>
+ H2PushResource "/006/006.css" critical
+ </Location>
+ <Location /006-nohints.html>
+ Header add Link "</006/006.css>;rel=preload"
+ </Location>
+ <Location /006-early.html>
+ H2EarlyHint Link "</006/006.css>;rel=preload;as=style"
+ </Location>
+ <Location /006-early-no-push.html>
+ H2Push off
+ H2EarlyHint Link "</006/006.css>;rel=preload;as=style"
+ </Location>
+ """).end_vhost(
+ ).install()
+ assert env.apache_restart() == 0
+
+ # H2EarlyHints enabled in general, check that it works for H2PushResource
+ def test_h2_401_31(self, env, repeat):
+ url = env.mkurl("https", "hints", "/006-hints.html")
+ r = env.nghttp().get(url)
+ assert r.response["status"] == 200
+ promises = r.results["streams"][r.response["id"]]["promises"]
+ assert 1 == len(promises)
+ early = r.response["previous"]
+ assert early
+ assert 103 == int(early["header"][":status"])
+ assert early["header"]["link"]
+
+ # H2EarlyHints enabled in general, but does not trigger on added response headers
+ def test_h2_401_32(self, env, repeat):
+ url = env.mkurl("https", "hints", "/006-nohints.html")
+ r = env.nghttp().get(url)
+ assert r.response["status"] == 200
+ promises = r.results["streams"][r.response["id"]]["promises"]
+ assert 1 == len(promises)
+ assert "previous" not in r.response
+
+ # H2EarlyHints enabled in general, check that it works for H2EarlyHint
+ def test_h2_401_33(self, env, repeat):
+ url = env.mkurl("https", "hints", "/006-early.html")
+ r = env.nghttp().get(url)
+ assert r.response["status"] == 200
+ promises = r.results["streams"][r.response["id"]]["promises"]
+ assert 1 == len(promises)
+ early = r.response["previous"]
+ assert early
+ assert 103 == int(early["header"][":status"])
+ assert early["header"]["link"] == '</006/006.css>;rel=preload;as=style'
+
+ # H2EarlyHints enabled, no PUSH, check that it works for H2EarlyHint
+ def test_h2_401_34(self, env, repeat):
+ if not env.httpd_is_at_least('2.4.58'):
+ pytest.skip(f'needs httpd 2.4.58')
+ url = env.mkurl("https", "hints", "/006-early-no-push.html")
+ r = env.nghttp().get(url)
+ assert r.response["status"] == 200
+ promises = r.results["streams"][r.response["id"]]["promises"]
+ assert 0 == len(promises)
+ early = r.response["previous"]
+ assert early
+ assert 103 == int(early["header"][":status"])
+ assert early["header"]["link"] == '</006/006.css>;rel=preload;as=style'
+
diff --git a/test/modules/http2/test_500_proxy.py b/test/modules/http2/test_500_proxy.py
new file mode 100644
index 0000000..88a8ece
--- /dev/null
+++ b/test/modules/http2/test_500_proxy.py
@@ -0,0 +1,157 @@
+import inspect
+import os
+import re
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestProxy:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ H2Conf(env).add_vhost_cgi(proxy_self=True).install()
+ assert env.apache_restart() == 0
+
+ def local_src(self, fname):
+ return os.path.join(os.path.dirname(inspect.getfile(TestProxy)), fname)
+
+ def setup_method(self, method):
+ print("setup_method: %s" % method.__name__)
+
+ def teardown_method(self, method):
+ print("teardown_method: %s" % method.__name__)
+
+ def test_h2_500_01(self, env):
+ url = env.mkurl("https", "cgi", "/proxy/hello.py")
+ r = env.curl_get(url, 5)
+ assert r.response["status"] == 200
+ assert "HTTP/1.1" == r.response["json"]["protocol"]
+ assert r.response["json"]["https"] == ""
+ assert r.response["json"]["ssl_protocol"] == ""
+ assert r.response["json"]["h2"] == ""
+ assert r.response["json"]["h2push"] == ""
+
+ # upload and GET again using curl, compare to original content
+ def curl_upload_and_verify(self, env, fname, options=None):
+ url = env.mkurl("https", "cgi", "/proxy/upload.py")
+ fpath = os.path.join(env.gen_dir, fname)
+ r = env.curl_upload(url, fpath, options=options)
+ assert r.exit_code == 0
+ assert 200 <= r.response["status"] < 300
+
+ # why is the scheme wrong?
+ r2 = env.curl_get(re.sub(r'http:', 'https:', r.response["header"]["location"]))
+ assert r2.exit_code == 0
+ assert r2.response["status"] == 200
+ with open(self.local_src(fpath), mode='rb') as file:
+ src = file.read()
+ assert r2.response["body"] == src
+
+ @pytest.mark.parametrize("name", [
+ "data-1k", "data-10k", "data-100k", "data-1m",
+ ])
+ def test_h2_500_10(self, env, name, repeat):
+ self.curl_upload_and_verify(env, name, ["--http2"])
+
+ def test_h2_500_11(self, env):
+ self.curl_upload_and_verify(env, "data-1k", [
+ "--http1.1", "-H", "Content-Length:", "-H", "Transfer-Encoding: chunked"
+ ])
+ self.curl_upload_and_verify(env, "data-1k", ["--http2", "-H", "Content-Length:"])
+
+ # POST some data using nghttp and see it echo'ed properly back
+ def nghttp_post_and_verify(self, env, fname, options=None):
+ url = env.mkurl("https", "cgi", "/proxy/echo.py")
+ fpath = os.path.join(env.gen_dir, fname)
+ r = env.nghttp().upload(url, fpath, options=options)
+ assert r.exit_code == 0
+ assert 200 <= r.response["status"] < 300
+ with open(self.local_src(fpath), mode='rb') as file:
+ src = file.read()
+ if r.response["body"] != src:
+ with open(os.path.join(env.gen_dir, "nghttp.out"), 'w') as fd:
+ fd.write(r.outraw.decode())
+ fd.write("\nstderr:\n")
+ fd.write(r.stderr)
+ assert r.response["body"] == src
+
+ @pytest.mark.parametrize("name", [
+ "data-1k", "data-10k", "data-100k", "data-1m",
+ ])
+ def test_h2_500_20(self, env, name, repeat):
+ self.nghttp_post_and_verify(env, name, [])
+
+ @pytest.mark.parametrize("name", [
+ "data-1k", "data-10k", "data-100k", "data-1m",
+ ])
+ def test_h2_500_21(self, env, name, repeat):
+ self.nghttp_post_and_verify(env, name, ["--no-content-length"])
+
+ # upload and GET again using nghttp, compare to original content
+ def nghttp_upload_and_verify(self, env, fname, options=None):
+ url = env.mkurl("https", "cgi", "/proxy/upload.py")
+ fpath = os.path.join(env.gen_dir, fname)
+
+ r = env.nghttp().upload_file(url, fpath, options=options)
+ assert r.exit_code == 0
+ assert 200 <= r.response["status"] < 300
+ assert r.response["header"]["location"]
+
+ # why is the scheme wrong?
+ r2 = env.nghttp().get(re.sub(r'http:', 'https:', r.response["header"]["location"]))
+ assert r2.exit_code == 0
+ assert r2.response["status"] == 200
+ with open(self.local_src(fpath), mode='rb') as file:
+ src = file.read()
+ assert src == r2.response["body"]
+
+ @pytest.mark.parametrize("name", [
+ "data-1k", "data-10k", "data-100k", "data-1m",
+ ])
+ def test_h2_500_22(self, env, name):
+ self.nghttp_upload_and_verify(env, name, [])
+
+ @pytest.mark.parametrize("name", [
+ "data-1k", "data-10k", "data-100k", "data-1m",
+ ])
+ def test_h2_500_23(self, env, name):
+ self.nghttp_upload_and_verify(env, name, ["--no-content-length"])
+
+ # upload using nghttp and check returned status
+ def nghttp_upload_stat(self, env, fname, options=None):
+ url = env.mkurl("https", "cgi", "/proxy/upload.py")
+ fpath = os.path.join(env.gen_dir, fname)
+
+ r = env.nghttp().upload_file(url, fpath, options=options)
+ assert r.exit_code == 0
+ assert 200 <= r.response["status"] < 300
+ assert r.response["header"]["location"]
+
+ def test_h2_500_24(self, env):
+ for i in range(50):
+ self.nghttp_upload_stat(env, "data-1k", ["--no-content-length"])
+
+ # lets do some error tests
+ def test_h2_500_30(self, env):
+ url = env.mkurl("https", "cgi", "/proxy/h2test/error?status=500")
+ r = env.curl_get(url)
+ assert r.exit_code == 0, r
+ assert r.response['status'] == 500
+ url = env.mkurl("https", "cgi", "/proxy/h2test/error?error=timeout")
+ r = env.curl_get(url)
+ assert r.exit_code == 0, r
+ assert r.response['status'] == 408
+
+ # produce an error during response body
+ def test_h2_500_31(self, env, repeat):
+ url = env.mkurl("https", "cgi", "/proxy/h2test/error?body_error=timeout")
+ r = env.curl_get(url)
+ assert r.exit_code != 0, r
+
+ # produce an error, fail to generate an error bucket
+ def test_h2_500_32(self, env, repeat):
+ url = env.mkurl("https", "cgi", "/proxy/h2test/error?body_error=timeout&error_bucket=0")
+ r = env.curl_get(url)
+ assert r.exit_code != 0, r
diff --git a/test/modules/http2/test_501_proxy_serverheader.py b/test/modules/http2/test_501_proxy_serverheader.py
new file mode 100644
index 0000000..0d7c188
--- /dev/null
+++ b/test/modules/http2/test_501_proxy_serverheader.py
@@ -0,0 +1,36 @@
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestProxyServerHeader:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ conf = H2Conf(env, extras={
+ f'cgi.{env.http_tld}': [
+ "Header unset Server",
+ "Header always set Server cgi",
+ ]
+ })
+ conf.add_vhost_cgi(proxy_self=True, h2proxy_self=False)
+ conf.install()
+ assert env.apache_restart() == 0
+
+ def setup_method(self, method):
+ print("setup_method: %s" % method.__name__)
+
+ def teardown_method(self, method):
+ print("teardown_method: %s" % method.__name__)
+
+ def test_h2_501_01(self, env):
+ url = env.mkurl("https", "cgi", "/proxy/hello.py")
+ r = env.curl_get(url, 5)
+ assert r.response["status"] == 200
+ assert "HTTP/1.1" == r.response["json"]["protocol"]
+ assert "" == r.response["json"]["https"]
+ assert "" == r.response["json"]["ssl_protocol"]
+ assert "" == r.response["json"]["h2"]
+ assert "" == r.response["json"]["h2push"]
+ assert "cgi" == r.response["header"]["server"]
diff --git a/test/modules/http2/test_502_proxy_port.py b/test/modules/http2/test_502_proxy_port.py
new file mode 100644
index 0000000..f6c6db1
--- /dev/null
+++ b/test/modules/http2/test_502_proxy_port.py
@@ -0,0 +1,41 @@
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestProxyPort:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ conf = H2Conf(env, extras={
+ 'base': [
+ f'Listen {env.proxy_port}',
+ 'Protocols h2c http/1.1',
+ 'LogLevel proxy_http2:trace2 proxy:trace2',
+ ],
+ f'cgi.{env.http_tld}': [
+ "Header unset Server",
+ "Header always set Server cgi",
+ ]
+ })
+ conf.add_vhost_cgi(proxy_self=False, h2proxy_self=False)
+ conf.start_vhost(domains=[f"test1.{env.http_tld}"], port=env.proxy_port)
+ conf.add([
+ 'Protocols h2c',
+ 'RewriteEngine On',
+ 'RewriteRule "^/(.*)$" "h2c://%{HTTP_HOST}/$1"[NC,P]',
+ 'ProxyPassMatch / "h2c://$1/"',
+ ])
+ conf.end_vhost()
+ conf.install()
+ assert env.apache_restart() == 0
+
+ # Test PR 65881
+ # h2c upgraded request via a dynamic proxy onto another port
+ def test_h2_502_01(self, env):
+ url = f'http://localhost:{env.http_port}/hello.py'
+ r = env.curl_get(url, 5, options=['--http2',
+ '--proxy', f'localhost:{env.proxy_port}'])
+ assert r.response['status'] == 200
+ assert r.json['port'] == f'{env.http_port}'
diff --git a/test/modules/http2/test_503_proxy_fwd.py b/test/modules/http2/test_503_proxy_fwd.py
new file mode 100644
index 0000000..478a52d
--- /dev/null
+++ b/test/modules/http2/test_503_proxy_fwd.py
@@ -0,0 +1,79 @@
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestProxyFwd:
+
+ @classmethod
+ def config_fwd_proxy(cls, env, h2_enabled=False):
+ conf = H2Conf(env, extras={
+ 'base': [
+ f'Listen {env.proxy_port}',
+ 'Protocols h2c http/1.1',
+ 'LogLevel proxy_http2:trace2 proxy:trace2',
+ ],
+ })
+ conf.add_vhost_cgi(proxy_self=False, h2proxy_self=False)
+ conf.start_vhost(domains=[f"test1.{env.http_tld}"],
+ port=env.proxy_port, with_ssl=True)
+ conf.add([
+ 'Protocols h2c http/1.1',
+ 'ProxyRequests on',
+ f'H2ProxyRequests {"on" if h2_enabled else "off"}',
+ ])
+ conf.end_vhost()
+ conf.install()
+ assert env.apache_restart() == 0
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(cls, env):
+ cls.config_fwd_proxy(env)
+
+ # test the HTTP/1.1 setup working
+ def test_h2_503_01_proxy_fwd_h1(self, env):
+ url = f'http://localhost:{env.http_port}/hello.py'
+ proxy_host = f'test1.{env.http_tld}'
+ options = [
+ '--proxy', f'https://{proxy_host}:{env.proxy_port}',
+ '--resolve', f'{proxy_host}:{env.proxy_port}:127.0.0.1',
+ '--proxy-cacert', f'{env.get_ca_pem_file(proxy_host)}',
+ ]
+ r = env.curl_get(url, 5, options=options)
+ assert r.exit_code == 0, f'{r}'
+ assert r.response['status'] == 200
+ assert r.json['port'] == f'{env.http_port}'
+
+ def test_h2_503_02_fwd_proxy_h2_off(self, env):
+ if not env.curl_is_at_least('8.1.0'):
+ pytest.skip(f'need at least curl v8.1.0 for this')
+ url = f'http://localhost:{env.http_port}/hello.py'
+ proxy_host = f'test1.{env.http_tld}'
+ options = [
+ '--proxy-http2', '-v',
+ '--proxy', f'https://{proxy_host}:{env.proxy_port}',
+ '--resolve', f'{proxy_host}:{env.proxy_port}:127.0.0.1',
+ '--proxy-cacert', f'{env.get_ca_pem_file(proxy_host)}',
+ ]
+ r = env.curl_get(url, 5, options=options)
+ assert r.exit_code == 0, f'{r}'
+ assert r.response['status'] == 404
+
+ # test the HTTP/2 setup working
+ def test_h2_503_03_proxy_fwd_h2_on(self, env):
+ if not env.curl_is_at_least('8.1.0'):
+ pytest.skip(f'need at least curl v8.1.0 for this')
+ self.config_fwd_proxy(env, h2_enabled=True)
+ url = f'http://localhost:{env.http_port}/hello.py'
+ proxy_host = f'test1.{env.http_tld}'
+ options = [
+ '--proxy-http2', '-v',
+ '--proxy', f'https://{proxy_host}:{env.proxy_port}',
+ '--resolve', f'{proxy_host}:{env.proxy_port}:127.0.0.1',
+ '--proxy-cacert', f'{env.get_ca_pem_file(proxy_host)}',
+ ]
+ r = env.curl_get(url, 5, options=options)
+ assert r.exit_code == 0, f'{r}'
+ assert r.response['status'] == 200
+ assert r.json['port'] == f'{env.http_port}'
diff --git a/test/modules/http2/test_600_h2proxy.py b/test/modules/http2/test_600_h2proxy.py
new file mode 100644
index 0000000..040aef6
--- /dev/null
+++ b/test/modules/http2/test_600_h2proxy.py
@@ -0,0 +1,201 @@
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestH2Proxy:
+
+ def test_h2_600_01(self, env):
+ conf = H2Conf(env, extras={
+ f'cgi.{env.http_tld}': [
+ "SetEnvIf Host (.+) X_HOST=$1",
+ ]
+ })
+ conf.add_vhost_cgi(h2proxy_self=True)
+ conf.install()
+ assert env.apache_restart() == 0
+ url = env.mkurl("https", "cgi", "/h2proxy/hello.py")
+ r = env.curl_get(url, 5)
+ assert r.response["status"] == 200
+ assert r.response["json"]["protocol"] == "HTTP/2.0"
+ assert r.response["json"]["https"] == "on"
+ assert r.response["json"]["ssl_protocol"] != ""
+ assert r.response["json"]["h2"] == "on"
+ assert r.response["json"]["h2push"] == "off"
+ assert r.response["json"]["host"] == f"cgi.{env.http_tld}:{env.https_port}"
+
+ def test_h2_600_02(self, env):
+ conf = H2Conf(env, extras={
+ f'cgi.{env.http_tld}': [
+ "SetEnvIf Host (.+) X_HOST=$1",
+ f"ProxyPreserveHost on",
+ f"ProxyPass /h2c/ h2c://127.0.0.1:{env.http_port}/",
+ ]
+ })
+ conf.add_vhost_cgi()
+ conf.install()
+ assert env.apache_restart() == 0
+ url = env.mkurl("https", "cgi", "/h2c/hello.py")
+ r = env.curl_get(url, 5)
+ assert r.response["status"] == 200
+ assert r.response["json"]["protocol"] == "HTTP/2.0"
+ assert r.response["json"]["https"] == ""
+ # the proxied backend sees Host header as passed on front
+ assert r.response["json"]["host"] == f"cgi.{env.http_tld}:{env.https_port}"
+ assert r.response["json"]["h2_original_host"] == ""
+
+ def test_h2_600_03(self, env):
+ conf = H2Conf(env, extras={
+ f'cgi.{env.http_tld}': [
+ "SetEnvIf Host (.+) X_HOST=$1",
+ f"ProxyPreserveHost off",
+ f"ProxyPass /h2c/ h2c://127.0.0.1:{env.http_port}/",
+ ]
+ })
+ conf.add_vhost_cgi()
+ conf.install()
+ assert env.apache_restart() == 0
+ url = env.mkurl("https", "cgi", "/h2c/hello.py")
+ r = env.curl_get(url, 5)
+ assert r.response["status"] == 200
+ assert r.response["json"]["protocol"] == "HTTP/2.0"
+ assert r.response["json"]["https"] == ""
+ # the proxied backend sees Host as using in connecting to it
+ assert r.response["json"]["host"] == f"127.0.0.1:{env.http_port}"
+ assert r.response["json"]["h2_original_host"] == ""
+
+ # check that connection reuse actually happens as configured
+ @pytest.mark.parametrize("enable_reuse", [ "on", "off" ])
+ def test_h2_600_04(self, env, enable_reuse):
+ conf = H2Conf(env, extras={
+ f'cgi.{env.http_tld}': [
+ f"ProxyPassMatch ^/h2proxy/([0-9]+)/(.*)$ "
+ f" h2c://127.0.0.1:$1/$2 enablereuse={enable_reuse} keepalive=on",
+ ]
+ })
+ conf.add_vhost_cgi()
+ conf.install()
+ assert env.apache_restart() == 0
+ url = env.mkurl("https", "cgi", f"/h2proxy/{env.http_port}/hello.py")
+ # httpd 2.5.0 disables reuse, not matter the config
+ if enable_reuse == "on" and not env.httpd_is_at_least("2.5.0"):
+ # reuse is not guaranteed for each request, but we expect some
+ # to do it and run on a h2 stream id > 1
+ reused = False
+ count = 10
+ r = env.curl_raw([url] * count, 5)
+ response = r.response
+ for n in range(count):
+ assert response["status"] == 200
+ if n == (count - 1):
+ break
+ response = response["previous"]
+ assert r.json[0]["h2_stream_id"] == "1"
+ for n in range(1, count):
+ if int(r.json[n]["h2_stream_id"]) > 1:
+ reused = True
+ break
+ assert reused
+ else:
+ r = env.curl_raw([url, url], 5)
+ assert r.response["previous"]["status"] == 200
+ assert r.response["status"] == 200
+ assert r.json[0]["h2_stream_id"] == "1"
+ assert r.json[1]["h2_stream_id"] == "1"
+
+ # do some flexible setup from #235 to proper connection selection
+ @pytest.mark.parametrize("enable_reuse", [ "on", "off" ])
+ def test_h2_600_05(self, env, enable_reuse):
+ conf = H2Conf(env, extras={
+ f'cgi.{env.http_tld}': [
+ f"ProxyPassMatch ^/h2proxy/([0-9]+)/(.*)$ "
+ f" h2c://127.0.0.1:$1/$2 enablereuse={enable_reuse} keepalive=on",
+ ]
+ })
+ conf.add_vhost_cgi()
+ conf.add([
+ f'Listen {env.http_port2}',
+ 'UseCanonicalName On',
+ 'UseCanonicalPhysicalPort On'
+ ])
+ conf.start_vhost(domains=[f'cgi.{env.http_tld}'],
+ port=5004, doc_root="htdocs/cgi")
+ conf.add("AddHandler cgi-script .py")
+ conf.end_vhost()
+ conf.install()
+ assert env.apache_restart() == 0
+ url = env.mkurl("https", "cgi", f"/h2proxy/{env.http_port}/hello.py")
+ url2 = env.mkurl("https", "cgi", f"/h2proxy/{env.http_port2}/hello.py")
+ r = env.curl_raw([url, url2], 5)
+ assert r.response["previous"]["status"] == 200
+ assert int(r.json[0]["port"]) == env.http_port
+ assert r.response["status"] == 200
+ exp_port = env.http_port if enable_reuse == "on" \
+ and not env.httpd_is_at_least("2.5.0")\
+ else env.http_port2
+ assert int(r.json[1]["port"]) == exp_port
+
+ # test X-Forwarded-* headers
+ def test_h2_600_06(self, env):
+ conf = H2Conf(env, extras={
+ f'cgi.{env.http_tld}': [
+ "SetEnvIf Host (.+) X_HOST=$1",
+ f"ProxyPreserveHost on",
+ f"ProxyPass /h2c/ h2c://127.0.0.1:{env.http_port}/",
+ f"ProxyPass /h1c/ http://127.0.0.1:{env.http_port}/",
+ ]
+ })
+ conf.add_vhost_cgi(proxy_self=True)
+ conf.install()
+ assert env.apache_restart() == 0
+ url = env.mkurl("https", "cgi", "/h1c/hello.py")
+ r1 = env.curl_get(url, 5)
+ assert r1.response["status"] == 200
+ url = env.mkurl("https", "cgi", "/h2c/hello.py")
+ r2 = env.curl_get(url, 5)
+ assert r2.response["status"] == 200
+ for key in ['x-forwarded-for', 'x-forwarded-host','x-forwarded-server']:
+ assert r1.json[key] == r2.json[key], f'{key} differs proxy_http != proxy_http2'
+
+ # lets do some error tests
+ def test_h2_600_30(self, env):
+ conf = H2Conf(env)
+ conf.add_vhost_cgi(h2proxy_self=True)
+ conf.install()
+ assert env.apache_restart() == 0
+ url = env.mkurl("https", "cgi", "/h2proxy/h2test/error?status=500")
+ r = env.curl_get(url)
+ assert r.exit_code == 0, r
+ assert r.response['status'] == 500
+ url = env.mkurl("https", "cgi", "/h2proxy/h2test/error?error=timeout")
+ r = env.curl_get(url)
+ assert r.exit_code == 0, r
+ assert r.response['status'] == 408
+
+ # produce an error during response body
+ def test_h2_600_31(self, env, repeat):
+ conf = H2Conf(env)
+ conf.add_vhost_cgi(h2proxy_self=True)
+ conf.install()
+ assert env.apache_restart() == 0
+ url = env.mkurl("https", "cgi", "/h2proxy/h2test/error?body_error=timeout")
+ r = env.curl_get(url)
+ # depending on when the error is detect in proxying, if may RST the
+ # stream (exit_code != 0) or give a 503 response.
+ if r.exit_code == 0:
+ assert r.response['status'] == 502
+
+ # produce an error, fail to generate an error bucket
+ def test_h2_600_32(self, env, repeat):
+ pytest.skip('only works reliable with r1911964 from trunk')
+ conf = H2Conf(env)
+ conf.add_vhost_cgi(h2proxy_self=True)
+ conf.install()
+ assert env.apache_restart() == 0
+ url = env.mkurl("https", "cgi", "/h2proxy/h2test/error?body_error=timeout&error_bucket=0")
+ r = env.curl_get(url)
+ # depending on when the error is detect in proxying, if may RST the
+ # stream (exit_code != 0) or give a 503 response.
+ if r.exit_code == 0:
+ assert r.response['status'] in [502, 503]
diff --git a/test/modules/http2/test_601_h2proxy_twisted.py b/test/modules/http2/test_601_h2proxy_twisted.py
new file mode 100644
index 0000000..60f5f7d
--- /dev/null
+++ b/test/modules/http2/test_601_h2proxy_twisted.py
@@ -0,0 +1,99 @@
+import json
+import logging
+import os
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+log = logging.getLogger(__name__)
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestH2ProxyTwisted:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ H2Conf(env).add_vhost_cgi(proxy_self=True, h2proxy_self=True).install()
+ assert env.apache_restart() == 0
+
+ @pytest.mark.parametrize("name", [
+ "data-1k", "data-10k", "data-100k", "data-1m",
+ ])
+ def test_h2_601_01_echo_uploads(self, env, name):
+ fpath = os.path.join(env.gen_dir, name)
+ url = env.mkurl("https", "cgi", "/h2proxy/h2test/echo")
+ r = env.curl_upload(url, fpath, options=[])
+ assert r.exit_code == 0
+ assert 200 <= r.response["status"] < 300
+ # we POST a form, so echoed input is larger than the file itself
+ assert len(r.response["body"]) > os.path.getsize(fpath)
+
+ @pytest.mark.parametrize("name", [
+ "data-1k", "data-10k", "data-100k", "data-1m",
+ ])
+ def test_h2_601_02_echo_delayed(self, env, name):
+ fpath = os.path.join(env.gen_dir, name)
+ url = env.mkurl("https", "cgi", "/h2proxy/h2test/echo?chunk_delay=10ms")
+ r = env.curl_upload(url, fpath, options=[])
+ assert r.exit_code == 0
+ assert 200 <= r.response["status"] < 300
+ # we POST a form, so echoed input is larger than the file itself
+ assert len(r.response["body"]) > os.path.getsize(fpath)
+
+ @pytest.mark.parametrize("name", [
+ "data-1k", "data-10k", "data-100k", "data-1m",
+ ])
+ def test_h2_601_03_echo_fail_early(self, env, name):
+ if not env.httpd_is_at_least('2.4.58'):
+ pytest.skip(f'needs httpd 2.4.58')
+ fpath = os.path.join(env.gen_dir, name)
+ url = env.mkurl("https", "cgi", "/h2proxy/h2test/echo?fail_after=512")
+ r = env.curl_upload(url, fpath, options=[])
+ # 92 is curl's CURLE_HTTP2_STREAM
+ assert r.exit_code == 92 or r.response["status"] == 502
+
+ @pytest.mark.parametrize("name", [
+ "data-1k", "data-10k", "data-100k", "data-1m",
+ ])
+ def test_h2_601_04_echo_fail_late(self, env, name):
+ if not env.httpd_is_at_least('2.4.58'):
+ pytest.skip(f'needs httpd 2.4.58')
+ fpath = os.path.join(env.gen_dir, name)
+ url = env.mkurl("https", "cgi", f"/h2proxy/h2test/echo?fail_after={os.path.getsize(fpath)}")
+ r = env.curl_upload(url, fpath, options=[])
+ # 92 is curl's CURLE_HTTP2_STREAM
+ if r.exit_code != 0:
+ # H2 stream or partial file error
+ assert r.exit_code == 92 or r.exit_code == 18, f'{r}'
+ else:
+ assert r.response["status"] == 502, f'{r}'
+
+ def test_h2_601_05_echo_fail_many(self, env):
+ if not env.httpd_is_at_least('2.4.58'):
+ pytest.skip(f'needs httpd 2.4.58')
+ if not env.curl_is_at_least('8.0.0'):
+ pytest.skip(f'need at least curl v8.0.0 for this')
+ count = 200
+ fpath = os.path.join(env.gen_dir, "data-100k")
+ args = [env.curl, '--parallel', '--parallel-max', '20']
+ for i in range(count):
+ if i > 0:
+ args.append('--next')
+ url = env.mkurl("https", "cgi", f"/h2proxy/h2test/echo?id={i}&fail_after={os.path.getsize(fpath)}")
+ args.extend(env.curl_resolve_args(url=url))
+ args.extend([
+ '-o', '/dev/null', '-w', '%{json}\\n', '--form', f'file=@{fpath}', url
+ ])
+ log.error(f'run: {args}')
+ r = env.run(args)
+ stats = []
+ for line in r.stdout.splitlines():
+ stats.append(json.loads(line))
+ assert len(stats) == count
+ for st in stats:
+ if st['exitcode'] != 0:
+ # H2 stream or partial file error
+ assert st['exitcode'] == 92 or st['exitcode'] == 18, f'{r}'
+ else:
+ assert st['http_code'] == 502, f'{r}'
diff --git a/test/modules/http2/test_700_load_get.py b/test/modules/http2/test_700_load_get.py
new file mode 100644
index 0000000..78760fb
--- /dev/null
+++ b/test/modules/http2/test_700_load_get.py
@@ -0,0 +1,63 @@
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+@pytest.mark.skipif(not H2TestEnv().h2load_is_at_least('1.41.0'),
+ reason="h2load misses --connect-to option")
+class TestLoadGet:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ H2Conf(env).add_vhost_cgi().add_vhost_test1().install()
+ assert env.apache_restart() == 0
+
+ def check_h2load_ok(self, env, r, n):
+ assert 0 == r.exit_code
+ r = env.h2load_status(r)
+ assert n == r.results["h2load"]["requests"]["total"], f'{r.results}'
+ assert n == r.results["h2load"]["requests"]["started"], f'{r.results}'
+ assert n == r.results["h2load"]["requests"]["done"], f'{r.results}'
+ assert n == r.results["h2load"]["requests"]["succeeded"], f'{r.results}'
+ assert n == r.results["h2load"]["status"]["2xx"], f'{r.results}'
+ assert 0 == r.results["h2load"]["status"]["3xx"], f'{r.results}'
+ assert 0 == r.results["h2load"]["status"]["4xx"], f'{r.results}'
+ assert 0 == r.results["h2load"]["status"]["5xx"], f'{r.results}'
+
+ # test load on cgi script, single connection, different sizes
+ @pytest.mark.parametrize("start", [
+ 1000, 80000
+ ])
+ def test_h2_700_10(self, env, start):
+ assert env.is_live()
+ text = "X"
+ chunk = 32
+ for n in range(0, 5):
+ args = [env.h2load, "-n", "%d" % chunk, "-c", "1", "-m", "10",
+ f"--connect-to=localhost:{env.https_port}",
+ f"--base-uri={env.mkurl('https', 'cgi', '/')}",
+ ]
+ for i in range(0, chunk):
+ args.append(env.mkurl("https", "cgi", ("/mnot164.py?count=%d&text=%s" % (start+(n*chunk)+i, text))))
+ r = env.run(args)
+ self.check_h2load_ok(env, r, chunk)
+
+ # test load on cgi script, single connection
+ @pytest.mark.parametrize("conns", [
+ 1, 2, 16
+ ])
+ def test_h2_700_11(self, env, conns):
+ assert env.is_live()
+ text = "X"
+ start = 1200
+ chunk = 64
+ for n in range(0, 5):
+ args = [env.h2load, "-n", "%d" % chunk, "-c", "%d" % conns, "-m", "10",
+ f"--connect-to=localhost:{env.https_port}",
+ f"--base-uri={env.mkurl('https', 'cgi', '/')}",
+ ]
+ for i in range(0, chunk):
+ args.append(env.mkurl("https", "cgi", ("/mnot164.py?count=%d&text=%s" % (start+(n*chunk)+i, text))))
+ r = env.run(args)
+ self.check_h2load_ok(env, r, chunk)
diff --git a/test/modules/http2/test_710_load_post_static.py b/test/modules/http2/test_710_load_post_static.py
new file mode 100644
index 0000000..ad8ae96
--- /dev/null
+++ b/test/modules/http2/test_710_load_post_static.py
@@ -0,0 +1,65 @@
+import pytest
+import os
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestLoadPostStatic:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ H2Conf(env).add_vhost_test1().install()
+ assert env.apache_restart() == 0
+
+ def check_h2load_ok(self, env, r, n):
+ assert 0 == r.exit_code
+ r = env.h2load_status(r)
+ assert n == r.results["h2load"]["requests"]["total"]
+ assert n == r.results["h2load"]["requests"]["started"]
+ assert n == r.results["h2load"]["requests"]["done"]
+ assert n == r.results["h2load"]["requests"]["succeeded"]
+ assert n == r.results["h2load"]["status"]["2xx"]
+ assert 0 == r.results["h2load"]["status"]["3xx"]
+ assert 0 == r.results["h2load"]["status"]["4xx"]
+ assert 0 == r.results["h2load"]["status"]["5xx"]
+
+ # test POST on static file, slurped in by server
+ def test_h2_710_00(self, env, repeat):
+ assert env.is_live()
+ url = env.mkurl("https", "test1", "/index.html")
+ n = 10
+ m = 1
+ conn = 1
+ fname = "data-10k"
+ args = [env.h2load, "-n", f"{n}", "-c", f"{conn}", "-m", f"{m}",
+ f"--base-uri={env.https_base_url}",
+ "-d", os.path.join(env.gen_dir, fname), url]
+ r = env.run(args)
+ self.check_h2load_ok(env, r, n)
+
+ def test_h2_710_01(self, env, repeat):
+ assert env.is_live()
+ url = env.mkurl("https", "test1", "/index.html")
+ n = 1000
+ m = 100
+ conn = 1
+ fname = "data-1k"
+ args = [env.h2load, "-n", f"{n}", "-c", f"{conn}", "-m", f"{m}",
+ f"--base-uri={env.https_base_url}",
+ "-d", os.path.join(env.gen_dir, fname), url]
+ r = env.run(args)
+ self.check_h2load_ok(env, r, n)
+
+ def test_h2_710_02(self, env, repeat):
+ assert env.is_live()
+ url = env.mkurl("https", "test1", "/index.html")
+ n = 100
+ m = 50
+ conn = 1
+ fname = "data-100k"
+ args = [env.h2load, "-n", f"{n}", "-c", f"{conn}", "-m", f"{m}",
+ f"--base-uri={env.https_base_url}",
+ "-d", os.path.join(env.gen_dir, fname), url]
+ r = env.run(args)
+ self.check_h2load_ok(env, r, n)
diff --git a/test/modules/http2/test_711_load_post_cgi.py b/test/modules/http2/test_711_load_post_cgi.py
new file mode 100644
index 0000000..82529d1
--- /dev/null
+++ b/test/modules/http2/test_711_load_post_cgi.py
@@ -0,0 +1,73 @@
+import pytest
+import os
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestLoadCgi:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ H2Conf(env).add_vhost_cgi(proxy_self=True, h2proxy_self=True).install()
+ assert env.apache_restart() == 0
+
+ def check_h2load_ok(self, env, r, n):
+ assert 0 == r.exit_code
+ r = env.h2load_status(r)
+ assert n == r.results["h2load"]["requests"]["total"]
+ assert n == r.results["h2load"]["requests"]["started"]
+ assert n == r.results["h2load"]["requests"]["done"]
+ assert n == r.results["h2load"]["requests"]["succeeded"]
+ assert n == r.results["h2load"]["status"]["2xx"]
+ assert 0 == r.results["h2load"]["status"]["3xx"]
+ assert 0 == r.results["h2load"]["status"]["4xx"]
+ assert 0 == r.results["h2load"]["status"]["5xx"]
+
+ # test POST on cgi, where input is read
+ def test_h2_711_10(self, env, repeat):
+ assert env.is_live()
+ url = env.mkurl("https", "test1", "/echo.py")
+ n = 100
+ m = 5
+ conn = 1
+ fname = "data-100k"
+ args = [
+ env.h2load, "-n", str(n), "-c", str(conn), "-m", str(m),
+ f"--base-uri={env.https_base_url}",
+ "-d", os.path.join(env.gen_dir, fname), url
+ ]
+ r = env.run(args)
+ self.check_h2load_ok(env, r, n)
+
+ # test POST on cgi via http/1.1 proxy, where input is read
+ def test_h2_711_11(self, env, repeat):
+ assert env.is_live()
+ url = env.mkurl("https", "test1", "/proxy/echo.py")
+ n = 100
+ m = 5
+ conn = 1
+ fname = "data-100k"
+ args = [
+ env.h2load, "-n", str(n), "-c", str(conn), "-m", str(m),
+ f"--base-uri={env.https_base_url}",
+ "-d", os.path.join(env.gen_dir, fname), url
+ ]
+ r = env.run(args)
+ self.check_h2load_ok(env, r, n)
+
+ # test POST on cgi via h2proxy, where input is read
+ def test_h2_711_12(self, env, repeat):
+ assert env.is_live()
+ url = env.mkurl("https", "test1", "/h2proxy/echo.py")
+ n = 100
+ m = 5
+ conn = 1
+ fname = "data-100k"
+ args = [
+ env.h2load, "-n", str(n), "-c", str(conn), "-m", str(m),
+ f"--base-uri={env.https_base_url}",
+ "-d", os.path.join(env.gen_dir, fname), url
+ ]
+ r = env.run(args)
+ self.check_h2load_ok(env, r, n)
diff --git a/test/modules/http2/test_712_buffering.py b/test/modules/http2/test_712_buffering.py
new file mode 100644
index 0000000..6658441
--- /dev/null
+++ b/test/modules/http2/test_712_buffering.py
@@ -0,0 +1,48 @@
+from datetime import timedelta
+
+import pytest
+
+from .env import H2Conf, H2TestEnv
+from pyhttpd.curl import CurlPiper
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestBuffering:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ conf = H2Conf(env)
+ conf.add_vhost_cgi(h2proxy_self=True).install()
+ assert env.apache_restart() == 0
+
+ @pytest.mark.skip(reason="this test shows unreliable jitter")
+ def test_h2_712_01(self, env):
+ # test gRPC like requests that do not end, but give answers, see #207
+ #
+ # this test works like this:
+ # - use curl to POST data to the server /h2test/echo
+ # - feed curl the data in chunks, wait a bit between chunks
+ # - since some buffering on curl's stdout to Python is involved,
+ # we will see the response data only at the end.
+ # - therefore, we enable tracing with timestamps in curl on stderr
+ # and see when the response chunks arrive
+ # - if the server sends the incoming data chunks back right away,
+ # as it should, we see receiving timestamps separated roughly by the
+ # wait time between sends.
+ #
+ url = env.mkurl("https", "cgi", "/h2test/echo")
+ base_chunk = "0123456789"
+ chunks = ["chunk-{0:03d}-{1}\n".format(i, base_chunk) for i in range(5)]
+ stutter = timedelta(seconds=0.2) # this is short, but works on my machine (tm)
+ piper = CurlPiper(env=env, url=url)
+ piper.stutter_check(chunks, stutter)
+
+ def test_h2_712_02(self, env):
+ # same as 712_01 but via mod_proxy_http2
+ #
+ url = env.mkurl("https", "cgi", "/h2proxy/h2test/echo")
+ base_chunk = "0123456789"
+ chunks = ["chunk-{0:03d}-{1}\n".format(i, base_chunk) for i in range(3)]
+ stutter = timedelta(seconds=1) # need a bit more delay since we have the extra connection
+ piper = CurlPiper(env=env, url=url)
+ piper.stutter_check(chunks, stutter)
diff --git a/test/modules/http2/test_800_websockets.py b/test/modules/http2/test_800_websockets.py
new file mode 100644
index 0000000..5b46da8
--- /dev/null
+++ b/test/modules/http2/test_800_websockets.py
@@ -0,0 +1,363 @@
+import inspect
+import logging
+import os
+import shutil
+import subprocess
+import time
+from datetime import timedelta, datetime
+from typing import Tuple, List
+import packaging.version
+
+import pytest
+import websockets
+from pyhttpd.result import ExecResult
+from pyhttpd.ws_util import WsFrameReader, WsFrame
+
+from .env import H2Conf, H2TestEnv
+
+
+log = logging.getLogger(__name__)
+
+ws_version = packaging.version.parse(websockets.version.version)
+ws_version_min = packaging.version.Version('10.4')
+
+
+def ws_run(env: H2TestEnv, path, authority=None, do_input=None, inbytes=None,
+ send_close=True, timeout=5, scenario='ws-stdin',
+ wait_close: float = 0.0) -> Tuple[ExecResult, List[str], List[WsFrame]]:
+ """ Run the h2ws test client in various scenarios with given input and
+ timings.
+ :param env: the test environment
+ :param path: the path on the Apache server to CONNECt to
+ :param authority: the host:port to use as
+ :param do_input: a Callable for sending input to h2ws
+ :param inbytes: fixed bytes to send to h2ws, unless do_input is given
+ :param send_close: send a CLOSE WebSockets frame at the end
+ :param timeout: timeout for waiting on h2ws to finish
+ :param scenario: name of scenario h2ws should run in
+ :param wait_close: time to wait before closing input
+ :return: ExecResult with exit_code/stdout/stderr of run
+ """
+ h2ws = os.path.join(env.clients_dir, 'h2ws')
+ if not os.path.exists(h2ws):
+ pytest.fail(f'test client not build: {h2ws}')
+ if authority is None:
+ authority = f'cgi.{env.http_tld}:{env.http_port}'
+ args = [
+ h2ws, '-vv', '-c', f'localhost:{env.http_port}',
+ f'ws://{authority}{path}',
+ scenario
+ ]
+ # we write all output to files, because we manipulate input timings
+ # and would run in deadlock situations with h2ws blocking operations
+ # because its output is not consumed
+ start = datetime.now()
+ with open(f'{env.gen_dir}/h2ws.stdout', 'w') as fdout:
+ with open(f'{env.gen_dir}/h2ws.stderr', 'w') as fderr:
+ proc = subprocess.Popen(args=args, stdin=subprocess.PIPE,
+ stdout=fdout, stderr=fderr)
+ if do_input is not None:
+ do_input(proc)
+ elif inbytes is not None:
+ proc.stdin.write(inbytes)
+ proc.stdin.flush()
+
+ if wait_close > 0:
+ time.sleep(wait_close)
+ try:
+ inbytes = WsFrame.client_close(code=1000).to_network() if send_close else None
+ proc.communicate(input=inbytes, timeout=timeout)
+ except subprocess.TimeoutExpired:
+ log.error(f'ws_run: timeout expired')
+ proc.kill()
+ proc.communicate(timeout=timeout)
+ end = datetime.now()
+ lines = open(f'{env.gen_dir}/h2ws.stdout').read().splitlines()
+ infos = [line for line in lines if line.startswith('[1] ')]
+ hex_content = ' '.join([line for line in lines if not line.startswith('[1] ')])
+ if len(infos) > 0 and infos[0] == '[1] :status: 200':
+ frames = WsFrameReader.parse(bytearray.fromhex(hex_content))
+ else:
+ frames = bytearray.fromhex(hex_content)
+ return ExecResult(args=args, exit_code=proc.returncode,
+ stdout=b'', stderr=b'', duration=end - start), infos, frames
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+@pytest.mark.skipif(condition=not H2TestEnv().httpd_is_at_least("2.4.58"),
+ reason=f'need at least httpd 2.4.58 for this')
+@pytest.mark.skipif(condition=ws_version < ws_version_min,
+ reason=f'websockets is {ws_version}, need at least {ws_version_min}')
+class TestWebSockets:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ # Apache config that CONNECT proxies a WebSocket server for paths starting
+ # with '/ws/'
+ # The WebSocket server is started in pytest fixture 'ws_server' below.
+ conf = H2Conf(env, extras={
+ 'base': [
+ 'Timeout 1',
+ ],
+ f'cgi.{env.http_tld}': [
+ f' H2WebSockets on',
+ f' ProxyPass /ws/ http://127.0.0.1:{env.ws_port}/ \\',
+ f' upgrade=websocket timeout=10',
+ f' ReadBufferSize 65535'
+ ]
+ })
+ conf.add_vhost_cgi(proxy_self=True, h2proxy_self=True).install()
+ conf.add_vhost_test1(proxy_self=True, h2proxy_self=True).install()
+ assert env.apache_restart() == 0
+
+ def ws_check_alive(self, env, timeout=5):
+ url = f'http://localhost:{env.ws_port}/'
+ end = datetime.now() + timedelta(seconds=timeout)
+ while datetime.now() < end:
+ r = env.curl_get(url, 5)
+ if r.exit_code == 0:
+ return True
+ time.sleep(.1)
+ return False
+
+ def _mkpath(self, path):
+ if not os.path.exists(path):
+ return os.makedirs(path)
+
+ def _rmrf(self, path):
+ if os.path.exists(path):
+ return shutil.rmtree(path)
+
+ @pytest.fixture(autouse=True, scope='class')
+ def ws_server(self, env):
+ # Run our python websockets server that has some special behaviour
+ # for the different path to CONNECT to.
+ run_dir = os.path.join(env.gen_dir, 'ws-server')
+ err_file = os.path.join(run_dir, 'stderr')
+ self._rmrf(run_dir)
+ self._mkpath(run_dir)
+ with open(err_file, 'w') as cerr:
+ cmd = os.path.join(os.path.dirname(inspect.getfile(TestWebSockets)),
+ 'ws_server.py')
+ args = ['python3', cmd, '--port', str(env.ws_port)]
+ p = subprocess.Popen(args=args, cwd=run_dir, stderr=cerr,
+ stdout=cerr)
+ if not self.ws_check_alive(env):
+ p.kill()
+ p.wait()
+ pytest.fail(f'ws_server did not start. stderr={open(err_file).readlines()}')
+ yield
+ p.terminate()
+
+ # CONNECT with invalid :protocol header, must fail
+ def test_h2_800_01_fail_proto(self, env: H2TestEnv, ws_server):
+ r, infos, frames = ws_run(env, path='/ws/echo/', scenario='fail-proto')
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] :status: 501', '[1] EOF'], f'{r}'
+ env.httpd_error_log.ignore_recent()
+
+ # a correct CONNECT, send CLOSE, expect CLOSE, basic success
+ def test_h2_800_02_ws_empty(self, env: H2TestEnv, ws_server):
+ r, infos, frames = ws_run(env, path='/ws/echo/')
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] :status: 200', '[1] EOF'], f'{r}'
+ assert len(frames) == 1, f'{frames}'
+ assert frames[0].opcode == WsFrame.CLOSE, f'{frames}'
+
+ # CONNECT to a URL path that does not exist on the server
+ def test_h2_800_03_not_found(self, env: H2TestEnv, ws_server):
+ r, infos, frames = ws_run(env, path='/does-not-exist')
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] :status: 404', '[1] EOF'], f'{r}'
+
+ # CONNECT to a URL path that is a normal HTTP file resource
+ # we do not want to receive the body of that
+ def test_h2_800_04_non_ws_resource(self, env: H2TestEnv, ws_server):
+ r, infos, frames = ws_run(env, path='/alive.json')
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] :status: 502', '[1] EOF'], f'{r}'
+ assert frames == b''
+
+ # CONNECT to a URL path that sends a delayed HTTP response body
+ # we do not want to receive the body of that
+ def test_h2_800_05_non_ws_delay_resource(self, env: H2TestEnv, ws_server):
+ r, infos, frames = ws_run(env, path='/h2test/error?body_delay=100ms')
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] :status: 502', '[1] EOF'], f'{r}'
+ assert frames == b''
+
+ # CONNECT missing the sec-webSocket-version header
+ def test_h2_800_06_miss_version(self, env: H2TestEnv, ws_server):
+ r, infos, frames = ws_run(env, path='/ws/echo/', scenario='miss-version')
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] :status: 400', '[1] EOF'], f'{r}'
+
+ # CONNECT missing the :path header
+ def test_h2_800_07_miss_path(self, env: H2TestEnv, ws_server):
+ r, infos, frames = ws_run(env, path='/ws/echo/', scenario='miss-path')
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] RST'], f'{r}'
+
+ # CONNECT missing the :scheme header
+ def test_h2_800_08_miss_scheme(self, env: H2TestEnv, ws_server):
+ r, infos, frames = ws_run(env, path='/ws/echo/', scenario='miss-scheme')
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] RST'], f'{r}'
+
+ # CONNECT missing the :authority header
+ def test_h2_800_09a_miss_authority(self, env: H2TestEnv, ws_server):
+ r, infos, frames = ws_run(env, path='/ws/echo/', scenario='miss-authority')
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] RST'], f'{r}'
+
+ # CONNECT to authority with disabled websockets
+ def test_h2_800_09b_unsupported(self, env: H2TestEnv, ws_server):
+ r, infos, frames = ws_run(env, path='/ws/echo/',
+ authority=f'test1.{env.http_tld}:{env.http_port}')
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] :status: 501', '[1] EOF'], f'{r}'
+
+ # CONNECT and exchange a PING
+ def test_h2_800_10_ws_ping(self, env: H2TestEnv, ws_server):
+ ping = WsFrame.client_ping(b'12345')
+ r, infos, frames = ws_run(env, path='/ws/echo/', inbytes=ping.to_network())
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] :status: 200', '[1] EOF'], f'{r}'
+ assert len(frames) == 2, f'{frames}'
+ assert frames[0].opcode == WsFrame.PONG, f'{frames}'
+ assert frames[0].data == ping.data, f'{frames}'
+ assert frames[1].opcode == WsFrame.CLOSE, f'{frames}'
+
+ # CONNECT and send several PINGs with a delay of 200ms
+ def test_h2_800_11_ws_timed_pings(self, env: H2TestEnv, ws_server):
+ frame_count = 5
+ ping = WsFrame.client_ping(b'12345')
+
+ def do_send(proc):
+ for _ in range(frame_count):
+ try:
+ proc.stdin.write(ping.to_network())
+ proc.stdin.flush()
+ proc.wait(timeout=0.2)
+ except subprocess.TimeoutExpired:
+ pass
+
+ r, infos, frames = ws_run(env, path='/ws/echo/', do_input=do_send)
+ assert r.exit_code == 0
+ assert infos == ['[1] :status: 200', '[1] EOF'], f'{r}'
+ assert len(frames) == frame_count + 1, f'{frames}'
+ assert frames[-1].opcode == WsFrame.CLOSE, f'{frames}'
+ for i in range(frame_count):
+ assert frames[i].opcode == WsFrame.PONG, f'{frames}'
+ assert frames[i].data == ping.data, f'{frames}'
+
+ # CONNECT to path that closes immediately
+ def test_h2_800_12_ws_unknown(self, env: H2TestEnv, ws_server):
+ r, infos, frames = ws_run(env, path='/ws/unknown')
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] :status: 200', '[1] EOF'], f'{r}'
+ assert len(frames) == 1, f'{frames}'
+ # expect a CLOSE with code=4999, reason='path unknown'
+ assert frames[0].opcode == WsFrame.CLOSE, f'{frames}'
+ assert frames[0].data[2:].decode() == 'path unknown', f'{frames}'
+
+ # CONNECT to a path that sends us 1 TEXT frame
+ def test_h2_800_13_ws_text(self, env: H2TestEnv, ws_server):
+ r, infos, frames = ws_run(env, path='/ws/text/')
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] :status: 200', '[1] EOF'], f'{r}'
+ assert len(frames) == 2, f'{frames}'
+ assert frames[0].opcode == WsFrame.TEXT, f'{frames}'
+ assert frames[0].data.decode() == 'hello!', f'{frames}'
+ assert frames[1].opcode == WsFrame.CLOSE, f'{frames}'
+
+ # CONNECT to a path that sends us a named file in BINARY frames
+ @pytest.mark.parametrize("fname,flen", [
+ ("data-1k", 1000),
+ ("data-10k", 10000),
+ ("data-100k", 100*1000),
+ ("data-1m", 1000*1000),
+ ])
+ def test_h2_800_14_ws_file(self, env: H2TestEnv, ws_server, fname, flen):
+ r, infos, frames = ws_run(env, path=f'/ws/file/{fname}', wait_close=0.5)
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] :status: 200', '[1] EOF'], f'{r}'
+ assert len(frames) > 0
+ total_len = sum([f.data_len for f in frames if f.opcode == WsFrame.BINARY])
+ assert total_len == flen, f'{frames}'
+
+ # CONNECT to path with 1MB file and trigger varying BINARY frame lengths
+ @pytest.mark.parametrize("frame_len", [
+ 1000 * 1024,
+ 100 * 1024,
+ 10 * 1024,
+ 1 * 1024,
+ 512,
+ ])
+ def test_h2_800_15_ws_frame_len(self, env: H2TestEnv, ws_server, frame_len):
+ fname = "data-1m"
+ flen = 1000*1000
+ r, infos, frames = ws_run(env, path=f'/ws/file/{fname}/{frame_len}', wait_close=0.5)
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] :status: 200', '[1] EOF'], f'{r}'
+ assert len(frames) > 0
+ total_len = sum([f.data_len for f in frames if f.opcode == WsFrame.BINARY])
+ assert total_len == flen, f'{frames}'
+
+ # CONNECT to path with 1MB file and trigger delays between BINARY frame writes
+ @pytest.mark.parametrize("frame_delay", [
+ 1,
+ 10,
+ 50,
+ 100,
+ ])
+ def test_h2_800_16_ws_frame_delay(self, env: H2TestEnv, ws_server, frame_delay):
+ fname = "data-1m"
+ flen = 1000*1000
+ # adjust frame_len to allow for 1 second overall duration
+ frame_len = int(flen / (1000 / frame_delay))
+ r, infos, frames = ws_run(env, path=f'/ws/file/{fname}/{frame_len}/{frame_delay}',
+ wait_close=1.5)
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] :status: 200', '[1] EOF'], f'{r}'
+ assert len(frames) > 0
+ total_len = sum([f.data_len for f in frames if f.opcode == WsFrame.BINARY])
+ assert total_len == flen, f'{frames}\n{r}'
+
+ # CONNECT to path with 1MB file and trigger delays between BINARY frame writes
+ @pytest.mark.parametrize("frame_len", [
+ 64 * 1024,
+ 16 * 1024,
+ 1 * 1024,
+ ])
+ def test_h2_800_17_ws_throughput(self, env: H2TestEnv, ws_server, frame_len):
+ fname = "data-1m"
+ flen = 1000*1000
+ ncount = 5
+ r, infos, frames = ws_run(env, path=f'/ws/file/{fname}/{frame_len}/0/{ncount}',
+ wait_close=0.1, send_close=False, timeout=30)
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] :status: 200', '[1] EOF'], f'{r}'
+ assert len(frames) > 0
+ total_len = sum([f.data_len for f in frames if f.opcode == WsFrame.BINARY])
+ assert total_len == ncount * flen, f'{frames}\n{r}'
+ # to see these logged, invoke: `pytest -o log_cli=true`
+ log.info(f'throughput (frame-len={frame_len}): "'
+ f'"{(total_len / (1024*1024)) / r.duration.total_seconds():0.2f} MB/s')
+
+ # Check that the tunnel timeout is observed, e.g. the longer holds and
+ # the 1sec cleint conn timeout does not trigger
+ def test_h2_800_18_timeout(self, env: H2TestEnv, ws_server):
+ fname = "data-10k"
+ frame_delay = 1500
+ flen = 10*1000
+ frame_len = 8192
+ # adjust frame_len to allow for 1 second overall duration
+ r, infos, frames = ws_run(env, path=f'/ws/file/{fname}/{frame_len}/{frame_delay}',
+ wait_close=2)
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] :status: 200', '[1] EOF'], f'{r}'
+ assert len(frames) > 0
+ total_len = sum([f.data_len for f in frames if f.opcode == WsFrame.BINARY])
+ assert total_len == flen, f'{frames}\n{r}'
+
diff --git a/test/modules/http2/ws_server.py b/test/modules/http2/ws_server.py
new file mode 100644
index 0000000..99fb9cf
--- /dev/null
+++ b/test/modules/http2/ws_server.py
@@ -0,0 +1,104 @@
+#!/usr/bin/env python3
+import argparse
+import asyncio
+import logging
+import os
+import sys
+import time
+
+import websockets.server as ws_server
+from websockets.exceptions import ConnectionClosedError
+
+log = logging.getLogger(__name__)
+
+logging.basicConfig(
+ format="[%(asctime)s] %(message)s",
+ level=logging.DEBUG,
+)
+
+
+async def echo(websocket):
+ try:
+ async for message in websocket:
+ try:
+ log.info(f'got request {message}')
+ except Exception as e:
+ log.error(f'error {e} getting path from {message}')
+ await websocket.send(message)
+ except ConnectionClosedError:
+ pass
+
+
+async def on_async_conn(conn):
+ rpath = str(conn.path)
+ pcomps = rpath[1:].split('/')
+ if len(pcomps) == 0:
+ pcomps = ['echo'] # default handler
+ log.info(f'connection for {pcomps}')
+ if pcomps[0] == 'echo':
+ log.info(f'/echo endpoint')
+ for message in await conn.recv():
+ await conn.send(message)
+ elif pcomps[0] == 'text':
+ await conn.send('hello!')
+ elif pcomps[0] == 'file':
+ if len(pcomps) < 2:
+ conn.close(code=4999, reason='unknown file')
+ return
+ fpath = os.path.join('../', pcomps[1])
+ if not os.path.exists(fpath):
+ conn.close(code=4999, reason='file not found')
+ return
+ bufsize = 0
+ if len(pcomps) > 2:
+ bufsize = int(pcomps[2])
+ if bufsize <= 0:
+ bufsize = 16*1024
+ delay_ms = 0
+ if len(pcomps) > 3:
+ delay_ms = int(pcomps[3])
+ n = 1
+ if len(pcomps) > 4:
+ n = int(pcomps[4])
+ for _ in range(n):
+ with open(fpath, 'r+b') as fd:
+ while True:
+ buf = fd.read(bufsize)
+ if buf is None or len(buf) == 0:
+ break
+ await conn.send(buf)
+ if delay_ms > 0:
+ time.sleep(delay_ms/1000)
+ else:
+ log.info(f'unknown endpoint: {rpath}')
+ await conn.close(code=4999, reason='path unknown')
+ await conn.close(code=1000, reason='')
+
+
+async def run_server(port):
+ log.info(f'starting server on port {port}')
+ async with ws_server.serve(ws_handler=on_async_conn,
+ host="localhost", port=port):
+ await asyncio.Future()
+
+
+async def main():
+ parser = argparse.ArgumentParser(prog='scorecard',
+ description="Run a websocket echo server.")
+ parser.add_argument("--port", type=int,
+ default=0, help="port to listen on")
+ args = parser.parse_args()
+
+ if args.port == 0:
+ sys.stderr.write('need --port\n')
+ sys.exit(1)
+
+ logging.basicConfig(
+ format="%(asctime)s %(message)s",
+ level=logging.DEBUG,
+ )
+ await run_server(args.port)
+
+
+if __name__ == "__main__":
+ asyncio.run(main())