summaryrefslogtreecommitdiffstats
path: root/test
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-04 10:03:17 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-04 10:03:17 +0000
commit69f568eb1183a2a1f5148e6db34a8d42e0e52ff6 (patch)
treefdeb0b5ff80273f95ce61607fc3613dff0b9a235 /test
parentAdding upstream version 2.4.57. (diff)
downloadapache2-69f568eb1183a2a1f5148e6db34a8d42e0e52ff6.tar.xz
apache2-69f568eb1183a2a1f5148e6db34a8d42e0e52ff6.zip
Adding upstream version 2.4.59.upstream/2.4.59upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'test')
-rw-r--r--test/clients/.gitignore1
-rw-r--r--test/clients/Makefile.in20
-rw-r--r--test/clients/h2ws.c1110
-rw-r--r--test/modules/http2/env.py3
-rw-r--r--test/modules/http2/htdocs/cgi/echohd.py17
-rw-r--r--test/modules/http2/htdocs/cgi/env.py17
-rw-r--r--test/modules/http2/htdocs/cgi/hecho.py17
-rw-r--r--test/modules/http2/htdocs/cgi/hello.py31
-rw-r--r--test/modules/http2/htdocs/cgi/mnot164.py17
-rw-r--r--test/modules/http2/htdocs/cgi/necho.py24
-rw-r--r--test/modules/http2/htdocs/cgi/requestparser.py57
-rw-r--r--test/modules/http2/htdocs/cgi/ssi/include.inc1
-rw-r--r--test/modules/http2/htdocs/cgi/ssi/test.html9
-rw-r--r--test/modules/http2/htdocs/cgi/upload.py29
-rw-r--r--test/modules/http2/htdocs/cgi/xxx/test.json1
-rw-r--r--test/modules/http2/mod_h2test/mod_h2test.c69
-rw-r--r--test/modules/http2/test_003_get.py2
-rw-r--r--test/modules/http2/test_004_post.py72
-rw-r--r--test/modules/http2/test_007_ssi.py43
-rw-r--r--test/modules/http2/test_008_ranges.py189
-rw-r--r--test/modules/http2/test_009_timing.py74
-rw-r--r--test/modules/http2/test_101_ssl_reneg.py8
-rw-r--r--test/modules/http2/test_104_padding.py42
-rw-r--r--test/modules/http2/test_105_timeout.py41
-rw-r--r--test/modules/http2/test_106_shutdown.py14
-rw-r--r--test/modules/http2/test_107_frame_lengths.py51
-rw-r--r--test/modules/http2/test_200_header_invalid.py113
-rw-r--r--test/modules/http2/test_401_early_hints.py36
-rw-r--r--test/modules/http2/test_500_proxy.py58
-rw-r--r--test/modules/http2/test_503_proxy_fwd.py79
-rw-r--r--test/modules/http2/test_600_h2proxy.py34
-rw-r--r--test/modules/http2/test_601_h2proxy_twisted.py99
-rw-r--r--test/modules/http2/test_700_load_get.py18
-rw-r--r--test/modules/http2/test_712_buffering.py12
-rw-r--r--test/modules/http2/test_800_websockets.py363
-rw-r--r--test/modules/http2/ws_server.py104
-rw-r--r--test/modules/proxy/test_01_http.py2
-rw-r--r--test/modules/proxy/test_02_unix.py2
-rw-r--r--test/modules/tls/env.py4
-rwxr-xr-xtest/modules/tls/htdocs/a.mod-tls.test/vars.py20
-rwxr-xr-xtest/modules/tls/htdocs/b.mod-tls.test/vars.py20
-rw-r--r--test/modules/tls/test_04_get.py2
-rw-r--r--test/modules/tls/test_05_proto.py12
-rw-r--r--test/pyhttpd/conf/httpd.conf.template2
-rw-r--r--test/pyhttpd/conf/mime.types2
-rw-r--r--test/pyhttpd/config.ini.in1
-rw-r--r--test/pyhttpd/curl.py9
-rw-r--r--test/pyhttpd/env.py122
-rw-r--r--test/pyhttpd/nghttp.py44
-rw-r--r--test/pyhttpd/result.py9
-rw-r--r--test/pyhttpd/ws_util.py137
-rwxr-xr-xtest/travis_before_linux.sh19
-rwxr-xr-xtest/travis_run_linux.sh32
53 files changed, 2915 insertions, 399 deletions
diff --git a/test/clients/.gitignore b/test/clients/.gitignore
new file mode 100644
index 0000000..18b1263
--- /dev/null
+++ b/test/clients/.gitignore
@@ -0,0 +1 @@
+h2ws \ No newline at end of file
diff --git a/test/clients/Makefile.in b/test/clients/Makefile.in
new file mode 100644
index 0000000..a322a58
--- /dev/null
+++ b/test/clients/Makefile.in
@@ -0,0 +1,20 @@
+DISTCLEAN_TARGETS = h2ws
+
+CLEAN_TARGETS = h2ws
+
+bin_PROGRAMS = h2ws
+TARGETS = $(bin_PROGRAMS)
+
+PROGRAM_LDADD = $(UTIL_LDFLAGS) $(PROGRAM_DEPENDENCIES) $(EXTRA_LIBS) $(AP_LIBS)
+PROGRAM_DEPENDENCIES =
+
+include $(top_builddir)/build/rules.mk
+
+h2ws.lo: h2ws.c
+ $(LIBTOOL) --mode=compile $(CC) $(ab_CFLAGS) $(ALL_CFLAGS) $(ALL_CPPFLAGS) \
+ $(ALL_INCLUDES) $(PICFLAGS) $(LTCFLAGS) -c $< && touch $@
+h2ws_OBJECTS = h2ws.lo
+h2ws_LDADD = -lnghttp2
+h2ws: $(h2ws_OBJECTS)
+ $(LIBTOOL) --mode=link $(CC) $(ALL_CFLAGS) $(PILDFLAGS) \
+ $(LT_LDFLAGS) $(ALL_LDFLAGS) -o $@ $(h2ws_LTFLAGS) $(h2ws_OBJECTS) $(h2ws_LDADD)
diff --git a/test/clients/h2ws.c b/test/clients/h2ws.c
new file mode 100644
index 0000000..1de3876
--- /dev/null
+++ b/test/clients/h2ws.c
@@ -0,0 +1,1110 @@
+/* Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <apr.h>
+
+#include <assert.h>
+#include <inttypes.h>
+#include <stdlib.h>
+#ifdef APR_HAVE_UNISTD_H
+# include <unistd.h>
+#endif /* HAVE_UNISTD_H */
+#ifdef APR_HAVE_FCNTL_H
+# include <fcntl.h>
+#endif /* HAVE_FCNTL_H */
+#include <sys/types.h>
+#include <sys/time.h>
+#ifdef APR_HAVE_SYS_SOCKET_H
+# include <sys/socket.h>
+#endif /* HAVE_SYS_SOCKET_H */
+#ifdef APR_HAVE_NETDB_H
+# include <netdb.h>
+#endif /* HAVE_NETDB_H */
+#ifdef APR_HAVE_NETINET_IN_H
+# include <netinet/in.h>
+#endif /* HAVE_NETINET_IN_H */
+#include <netinet/tcp.h>
+#include <poll.h>
+#include <signal.h>
+#include <stdio.h>
+#include <string.h>
+#include <time.h>
+#include <errno.h>
+
+#include <nghttp2/nghttp2.h>
+
+#define MAKE_NV(NAME, VALUE) \
+ { \
+ (uint8_t *)NAME, (uint8_t *)VALUE, sizeof(NAME) - 1, sizeof(VALUE) - 1, \
+ NGHTTP2_NV_FLAG_NONE \
+ }
+
+#define MAKE_NV_CS(NAME, VALUE) \
+ { \
+ (uint8_t *)NAME, (uint8_t *)VALUE, sizeof(NAME) - 1, strlen(VALUE), \
+ NGHTTP2_NV_FLAG_NONE \
+ }
+
+
+static int verbose;
+static const char *cmd;
+
+static void log_out(const char *level, const char *where, const char *msg)
+{
+ struct timespec tp;
+ struct tm tm;
+ char timebuf[128];
+
+ clock_gettime(CLOCK_REALTIME, &tp);
+ localtime_r(&tp.tv_sec, &tm);
+ strftime(timebuf, sizeof(timebuf)-1, "%H:%M:%S", &tm);
+ fprintf(stderr, "[%s.%09lu][%s][%s] %s\n", timebuf, tp.tv_nsec, level, where, msg);
+}
+
+static void log_err(const char *where, const char *msg)
+{
+ log_out("ERROR", where, msg);
+}
+
+static void log_info(const char *where, const char *msg)
+{
+ if (verbose)
+ log_out("INFO", where, msg);
+}
+
+static void log_debug(const char *where, const char *msg)
+{
+ if (verbose > 1)
+ log_out("DEBUG", where, msg);
+}
+
+#if defined(__GNUC__)
+ __attribute__((format(printf, 2, 3)))
+#endif
+static void log_errf(const char *where, const char *msg, ...)
+{
+ char buffer[8*1024];
+ va_list ap;
+
+ va_start(ap, msg);
+ vsnprintf(buffer, sizeof(buffer), msg, ap);
+ va_end(ap);
+ log_err(where, buffer);
+}
+
+#if defined(__GNUC__)
+ __attribute__((format(printf, 2, 3)))
+#endif
+static void log_infof(const char *where, const char *msg, ...)
+{
+ if (verbose) {
+ char buffer[8*1024];
+ va_list ap;
+
+ va_start(ap, msg);
+ vsnprintf(buffer, sizeof(buffer), msg, ap);
+ va_end(ap);
+ log_info(where, buffer);
+ }
+}
+
+#if defined(__GNUC__)
+ __attribute__((format(printf, 2, 3)))
+#endif
+static void log_debugf(const char *where, const char *msg, ...)
+{
+ if (verbose > 1) {
+ char buffer[8*1024];
+ va_list ap;
+
+ va_start(ap, msg);
+ vsnprintf(buffer, sizeof(buffer), msg, ap);
+ va_end(ap);
+ log_debug(where, buffer);
+ }
+}
+
+static int parse_host_port(const char **phost, uint16_t *pport,
+ int *pipv6, size_t *pconsumed,
+ const char *s, size_t len, uint16_t def_port)
+{
+ size_t i, offset;
+ char *host = NULL;
+ int port = 0;
+ int rv = 1, ipv6 = 0;
+
+ if (!len)
+ goto leave;
+ offset = 0;
+ if (s[offset] == '[') {
+ ipv6 = 1;
+ for (i = offset++; i < len; ++i) {
+ if (s[i] == ']')
+ break;
+ }
+ if (i >= len || i == offset)
+ goto leave;
+ host = strndup(s + offset, i - offset);
+ offset = i + 1;
+ }
+ else {
+ for (i = offset; i < len; ++i) {
+ if (strchr(":/?#", s[i]))
+ break;
+ }
+ if (i == offset) {
+ log_debugf("parse_uri", "empty host name in '%.*s", (int)len, s);
+ goto leave;
+ }
+ host = strndup(s + offset, i - offset);
+ offset = i;
+ }
+ if (offset < len && s[offset] == ':') {
+ port = 0;
+ ++offset;
+ for (i = offset; i < len; ++i) {
+ if (strchr("/?#", s[i]))
+ break;
+ if (s[i] < '0' || s[i] > '9') {
+ log_debugf("parse_uri", "invalid port char '%c'", s[i]);
+ goto leave;
+ }
+ port *= 10;
+ port += s[i] - '0';
+ if (port > 65535) {
+ log_debugf("parse_uri", "invalid port number '%d'", port);
+ goto leave;
+ }
+ }
+ offset = i;
+ }
+ rv = 0;
+
+leave:
+ *phost = rv? NULL : host;
+ *pport = rv? 0 : (port? (uint16_t)port : def_port);
+ if (pipv6)
+ *pipv6 = ipv6;
+ if (pconsumed)
+ *pconsumed = offset;
+ return rv;
+}
+
+struct uri {
+ const char *scheme;
+ const char *host;
+ const char *authority;
+ const char *path;
+ uint16_t port;
+ int ipv6;
+};
+
+static int parse_uri(struct uri *uri, const char *s, size_t len)
+{
+ char tmp[8192];
+ size_t n, offset = 0;
+ uint16_t def_port = 0;
+ int rv = 1;
+
+ /* NOT A REAL URI PARSER */
+ memset(uri, 0, sizeof(*uri));
+ if (len > 5 && !memcmp("ws://", s, 5)) {
+ uri->scheme = "ws";
+ def_port = 80;
+ offset = 5;
+ }
+ else if (len > 6 && !memcmp("wss://", s, 6)) {
+ uri->scheme = "wss";
+ def_port = 443;
+ offset = 6;
+ }
+ else {
+ /* not a scheme we process */
+ goto leave;
+ }
+
+ if (parse_host_port(&uri->host, &uri->port, &uri->ipv6, &n, s + offset,
+ len - offset, def_port))
+ goto leave;
+ offset += n;
+
+ if (uri->port == def_port)
+ uri->authority = uri->host;
+ else if (uri->ipv6) {
+ snprintf(tmp, sizeof(tmp), "[%s]:%u", uri->host, uri->port);
+ uri->authority = strdup(tmp);
+ }
+ else {
+ snprintf(tmp, sizeof(tmp), "%s:%u", uri->host, uri->port);
+ uri->authority = strdup(tmp);
+ }
+
+ if (offset < len) {
+ uri->path = strndup(s + offset, len - offset);
+ }
+ rv = 0;
+
+leave:
+ return rv;
+}
+
+static int sock_nonblock_nodelay(int fd) {
+ int flags, rv;
+ int val = 1;
+
+ while ((flags = fcntl(fd, F_GETFL, 0)) == -1 && errno == EINTR)
+ ;
+ if (flags == -1) {
+ log_errf("sock_nonblock_nodelay", "fcntl get error %d (%s)",
+ errno, strerror(errno));
+ return -1;
+ }
+ while ((rv = fcntl(fd, F_SETFL, flags | O_NONBLOCK)) == -1 && errno == EINTR)
+ ;
+ if (rv == -1) {
+ log_errf("sock_nonblock_nodelay", "fcntl set error %d (%s)",
+ errno, strerror(errno));
+ return -1;
+ }
+ rv = setsockopt(fd, IPPROTO_TCP, TCP_NODELAY, &val, (socklen_t)sizeof(val));
+ if (rv == -1) {
+ log_errf("sock_nonblock_nodelay", "set nodelay error %d (%s)",
+ errno, strerror(errno));
+ return -1;
+ }
+ return 0;
+}
+
+static int open_connection(const char *host, uint16_t port)
+{
+ char service[NI_MAXSERV];
+ struct addrinfo hints;
+ struct addrinfo *res = NULL, *rp;
+ int rv, fd = -1;
+
+ memset(&hints, 0, sizeof(hints));
+ snprintf(service, sizeof(service), "%u", port);
+ hints.ai_family = AF_UNSPEC;
+ hints.ai_socktype = SOCK_STREAM;
+ rv = getaddrinfo(host, service, &hints, &res);
+ if (rv) {
+ log_err("getaddrinfo", gai_strerror(rv));
+ goto leave;
+ }
+
+ for (rp = res; rp; rp = rp->ai_next) {
+ fd = socket(rp->ai_family, rp->ai_socktype, rp->ai_protocol);
+ if (fd == -1) {
+ continue;
+ }
+ while ((rv = connect(fd, rp->ai_addr, rp->ai_addrlen)) == -1 &&
+ errno == EINTR)
+ ;
+ if (!rv) /* connected */
+ break;
+ close(fd);
+ fd = -1;
+ }
+
+leave:
+ if (res)
+ freeaddrinfo(res);
+ return fd;
+}
+
+struct h2_stream;
+
+#define IO_WANT_NONE 0
+#define IO_WANT_READ 1
+#define IO_WANT_WRITE 2
+
+struct h2_session {
+ const char *server_name;
+ const char *connect_host;
+ uint16_t connect_port;
+ int fd;
+ nghttp2_session *ngh2;
+ struct h2_stream *streams;
+ int aborted;
+ int want_io;
+};
+
+typedef void h2_stream_closed_cb(struct h2_stream *stream);
+typedef void h2_stream_recv_data(struct h2_stream *stream,
+ const uint8_t *data, size_t len);
+
+struct h2_stream {
+ struct h2_stream *next;
+ struct uri *uri;
+ int32_t id;
+ int fdin;
+ int http_status;
+ uint32_t error_code;
+ unsigned input_closed : 1;
+ unsigned closed : 1;
+ unsigned reset : 1;
+ h2_stream_closed_cb *on_close;
+ h2_stream_recv_data *on_recv_data;
+};
+
+static void h2_session_stream_add(struct h2_session *session,
+ struct h2_stream *stream)
+{
+ struct h2_stream *s;
+ for (s = session->streams; s; s = s->next) {
+ if (s == stream) /* already there? */
+ return;
+ }
+ stream->next = session->streams;
+ session->streams = stream;
+}
+
+static void h2_session_stream_remove(struct h2_session *session,
+ struct h2_stream *stream)
+{
+ struct h2_stream *s, **pnext;
+ pnext = &session->streams;
+ s = session->streams;
+ while (s) {
+ if (s == stream) {
+ *pnext = s->next;
+ s->next = NULL;
+ break;
+ }
+ pnext = &s->next;
+ s = s->next;
+ }
+}
+
+static struct h2_stream *h2_session_stream_get(struct h2_session *session,
+ int32_t id)
+{
+ struct h2_stream *s;
+ for (s = session->streams; s; s = s->next) {
+ if (s->id == id)
+ return s;
+ }
+ return NULL;
+}
+
+static ssize_t h2_session_send(nghttp2_session *ngh2, const uint8_t *data,
+ size_t length, int flags, void *user_data)
+{
+ struct h2_session *session = user_data;
+ ssize_t nwritten;
+ (void)ngh2;
+ (void)flags;
+
+ session->want_io = IO_WANT_NONE;
+ nwritten = send(session->fd, data, length, 0);
+ if (nwritten < 0) {
+ int err = errno;
+ if ((EWOULDBLOCK == err) || (EAGAIN == err) ||
+ (EINTR == err) || (EINPROGRESS == err)) {
+ return NGHTTP2_ERR_WOULDBLOCK;
+ }
+ log_errf("h2_session_send", "error sending %ld bytes: %d (%s)",
+ (long)length, err, strerror(err));
+ return NGHTTP2_ERR_CALLBACK_FAILURE;
+ }
+ return nwritten;
+}
+
+static ssize_t h2_session_recv(nghttp2_session *ngh2, uint8_t *buf,
+ size_t length, int flags, void *user_data)
+{
+ struct h2_session *session = user_data;
+ ssize_t nread;
+ (void)ngh2;
+ (void)flags;
+
+ session->want_io = IO_WANT_NONE;
+ nread = recv(session->fd, buf, length, 0);
+ if (nread < 0) {
+ int err = errno;
+ if ((EWOULDBLOCK == err) || (EAGAIN == err) || (EINTR == err)) {
+ return NGHTTP2_ERR_WOULDBLOCK;
+ }
+ log_errf("h2_session_recv", "error reading %ld bytes: %d (%s)",
+ (long)length, err, strerror(err));
+ return NGHTTP2_ERR_CALLBACK_FAILURE;
+ }
+ return nread;
+}
+
+static int h2_session_on_frame_send(nghttp2_session *session,
+ const nghttp2_frame *frame,
+ void *user_data)
+{
+ size_t i;
+ (void)user_data;
+
+ switch (frame->hd.type) {
+ case NGHTTP2_HEADERS:
+ if (nghttp2_session_get_stream_user_data(session, frame->hd.stream_id)) {
+ const nghttp2_nv *nva = frame->headers.nva;
+ log_infof("frame send", "FRAME[HEADERS, stream=%d",
+ frame->hd.stream_id);
+ for (i = 0; i < frame->headers.nvlen; ++i) {
+ log_infof("frame send", " %.*s: %.*s",
+ (int)nva[i].namelen, nva[i].name,
+ (int)nva[i].valuelen, nva[i].value);
+ }
+ log_infof("frame send", "]");
+ }
+ break;
+ case NGHTTP2_DATA:
+ log_infof("frame send", "FRAME[DATA, stream=%d, length=%d, flags=%d]",
+ frame->hd.stream_id, (int)frame->hd.length,
+ (int)frame->hd.flags);
+ break;
+ case NGHTTP2_RST_STREAM:
+ log_infof("frame send", "FRAME[RST, stream=%d]",
+ frame->hd.stream_id);
+ break;
+ case NGHTTP2_WINDOW_UPDATE:
+ log_infof("frame send", "FRAME[WINDOW_UPDATE, stream=%d]",
+ frame->hd.stream_id);
+ break;
+ case NGHTTP2_GOAWAY:
+ log_infof("frame send", "FRAME[GOAWAY]");
+ break;
+ }
+ return 0;
+}
+
+static int h2_session_on_frame_recv(nghttp2_session *ngh2,
+ const nghttp2_frame *frame,
+ void *user_data)
+{
+ (void)user_data;
+
+ switch (frame->hd.type) {
+ case NGHTTP2_HEADERS:
+ if (frame->headers.cat == NGHTTP2_HCAT_RESPONSE) {
+ log_infof("frame recv", "FRAME[HEADERS, stream=%d]",
+ frame->hd.stream_id);
+ }
+ break;
+ case NGHTTP2_DATA:
+ log_infof("frame recv", "FRAME[DATA, stream=%d, len=%lu, eof=%d]",
+ frame->hd.stream_id, frame->hd.length,
+ (frame->hd.flags & NGHTTP2_FLAG_END_STREAM) != 0);
+ break;
+ case NGHTTP2_RST_STREAM:
+ log_infof("frame recv", "FRAME[RST, stream=%d]",
+ frame->hd.stream_id);
+ fprintf(stdout, "[%d] RST\n", frame->hd.stream_id);
+ break;
+ case NGHTTP2_GOAWAY:
+ log_infof("frame recv", "FRAME[GOAWAY]");
+ break;
+ }
+ return 0;
+}
+
+static int h2_session_on_header(nghttp2_session *ngh2,
+ const nghttp2_frame *frame,
+ const uint8_t *name, size_t namelen,
+ const uint8_t *value, size_t valuelen,
+ uint8_t flags, void *user_data)
+{
+ struct h2_session *session = user_data;
+ struct h2_stream *stream;
+ (void)flags;
+ (void)user_data;
+ log_infof("frame recv", "stream=%d, HEADER %.*s: %.*s",
+ frame->hd.stream_id, (int)namelen, name,
+ (int)valuelen, value);
+ stream = h2_session_stream_get(session, frame->hd.stream_id);
+ if (stream) {
+ if (namelen == 7 && !strncmp(":status", (const char *)name, namelen)) {
+ stream->http_status = 0;
+ if (valuelen < 10) {
+ char tmp[10], *endp;
+ memcpy(tmp, value, valuelen);
+ tmp[valuelen] = 0;
+ stream->http_status = (int)strtol(tmp, &endp, 10);
+ }
+ if (stream->http_status < 100 || stream->http_status >= 600) {
+ log_errf("on header recv", "stream=%d, invalid :status: %.*s",
+ frame->hd.stream_id, (int)valuelen, value);
+ return NGHTTP2_ERR_CALLBACK_FAILURE;
+ }
+ else {
+ fprintf(stdout, "[%d] :status: %d\n", stream->id,
+ stream->http_status);
+ }
+ }
+ }
+ return 0;
+}
+
+static int h2_session_on_stream_close(nghttp2_session *ngh2, int32_t stream_id,
+ uint32_t error_code, void *user_data)
+{
+ struct h2_session *session = user_data;
+ struct h2_stream *stream;
+
+ stream = h2_session_stream_get(session, stream_id);
+ if (stream) {
+ /* closed known stream */
+ stream->error_code = error_code;
+ stream->closed = 1;
+ if (error_code)
+ stream->reset = 1;
+ if (error_code) {
+ log_errf("stream close", "stream %d closed with error %d",
+ stream_id, error_code);
+ }
+
+ h2_session_stream_remove(session, stream);
+ if (stream->on_close)
+ stream->on_close(stream);
+ /* last one? */
+ if (!session->streams) {
+ int rv;
+ rv = nghttp2_session_terminate_session(ngh2, NGHTTP2_NO_ERROR);
+ if (rv) {
+ log_errf("terminate session", "error %d (%s)",
+ rv, nghttp2_strerror(rv));
+ session->aborted = 1;
+ }
+ }
+ }
+ return 0;
+}
+
+static int h2_session_on_data_chunk_recv(nghttp2_session *ngh2, uint8_t flags,
+ int32_t stream_id, const uint8_t *data,
+ size_t len, void *user_data) {
+ struct h2_session *session = user_data;
+ struct h2_stream *stream;
+
+ stream = h2_session_stream_get(session, stream_id);
+ if (stream && stream->on_recv_data) {
+ stream->on_recv_data(stream, data, len);
+ }
+ return 0;
+}
+
+static int h2_session_open(struct h2_session *session, const char *server_name,
+ const char *host, uint16_t port)
+{
+ nghttp2_session_callbacks *cbs = NULL;
+ nghttp2_settings_entry settings[2];
+ int rv = -1;
+
+ memset(session, 0, sizeof(*session));
+ session->server_name = server_name;
+ session->connect_host = host;
+ session->connect_port = port;
+ /* establish socket */
+ session->fd = open_connection(session->connect_host, session->connect_port);
+ if (session->fd < 0) {
+ log_errf(cmd, "could not connect to %s:%u",
+ session->connect_host, session->connect_port);
+ goto leave;
+ }
+ if (sock_nonblock_nodelay(session->fd))
+ goto leave;
+ session->want_io = IO_WANT_NONE;
+
+ log_infof(cmd, "connected to %s via %s:%u", session->server_name,
+ session->connect_host, session->connect_port);
+
+ rv = nghttp2_session_callbacks_new(&cbs);
+ if (rv) {
+ log_errf("setup callbacks", "error_code=%d, msg=%s\n", rv,
+ nghttp2_strerror(rv));
+ rv = -1;
+ goto leave;
+ }
+ /* setup session callbacks */
+ nghttp2_session_callbacks_set_send_callback(cbs, h2_session_send);
+ nghttp2_session_callbacks_set_recv_callback(cbs, h2_session_recv);
+ nghttp2_session_callbacks_set_on_frame_send_callback(
+ cbs, h2_session_on_frame_send);
+ nghttp2_session_callbacks_set_on_frame_recv_callback(
+ cbs, h2_session_on_frame_recv);
+ nghttp2_session_callbacks_set_on_header_callback(
+ cbs, h2_session_on_header);
+ nghttp2_session_callbacks_set_on_stream_close_callback(
+ cbs, h2_session_on_stream_close);
+ nghttp2_session_callbacks_set_on_data_chunk_recv_callback(
+ cbs, h2_session_on_data_chunk_recv);
+ /* create the ngh2 session */
+ rv = nghttp2_session_client_new(&session->ngh2, cbs, session);
+ if (rv) {
+ log_errf("client new", "error_code=%d, msg=%s\n", rv,
+ nghttp2_strerror(rv));
+ rv = -1;
+ goto leave;
+ }
+ /* submit initial settings */
+ settings[0].settings_id = NGHTTP2_SETTINGS_MAX_CONCURRENT_STREAMS;
+ settings[0].value = 100;
+ settings[1].settings_id = NGHTTP2_SETTINGS_INITIAL_WINDOW_SIZE;
+ settings[1].value = 10 * 1024 * 1024;
+
+ rv = nghttp2_submit_settings(session->ngh2, NGHTTP2_FLAG_NONE, settings, 2);
+ if (rv) {
+ log_errf("submit settings", "error_code=%d, msg=%s\n", rv,
+ nghttp2_strerror(rv));
+ rv = -1;
+ goto leave;
+ }
+ rv = nghttp2_session_set_local_window_size(session->ngh2, NGHTTP2_FLAG_NONE,
+ 0, 10 * 1024 * 1024);
+ if (rv) {
+ log_errf("set connection window size", "error_code=%d, msg=%s\n", rv,
+ nghttp2_strerror(rv));
+ rv = -1;
+ goto leave;
+ }
+ rv = 0;
+
+leave:
+ if (cbs)
+ nghttp2_session_callbacks_del(cbs);
+ return rv;
+}
+
+static int h2_session_io(struct h2_session *session) {
+ int rv;
+ rv = nghttp2_session_recv(session->ngh2);
+ if (rv) {
+ log_errf("session recv", "error_code=%d, msg=%s\n", rv,
+ nghttp2_strerror(rv));
+ return 1;
+ }
+ rv = nghttp2_session_send(session->ngh2);
+ if (rv) {
+ log_errf("session send", "error_code=%d, msg=%s\n", rv,
+ nghttp2_strerror(rv));
+ }
+ return 0;
+}
+
+struct h2_poll_ctx;
+typedef int h2_poll_ev_cb(struct h2_poll_ctx *pctx, struct pollfd *pfd);
+
+struct h2_poll_ctx {
+ struct h2_session *session;
+ struct h2_stream *stream;
+ h2_poll_ev_cb *on_ev;
+};
+
+static int h2_session_ev(struct h2_poll_ctx *pctx, struct pollfd *pfd)
+{
+ if (pfd->revents & (POLLIN | POLLOUT)) {
+ h2_session_io(pctx->session);
+ }
+ else if (pfd->revents & POLLHUP) {
+ log_errf("session run", "connection closed");
+ return -1;
+ }
+ else if (pfd->revents & POLLERR) {
+ log_errf("session run", "connection error");
+ return -1;
+ }
+ return 0;
+}
+
+static int h2_stream_ev(struct h2_poll_ctx *pctx, struct pollfd *pfd)
+{
+ if (pfd->revents & (POLLIN | POLLHUP)) {
+ nghttp2_session_resume_data(pctx->session->ngh2, pctx->stream->id);
+ }
+ else if (pfd->revents & (POLLERR)) {
+ nghttp2_submit_rst_stream(pctx->session->ngh2, NGHTTP2_FLAG_NONE,
+ pctx->stream->id, NGHTTP2_STREAM_CLOSED);
+ }
+ return 0;
+}
+
+static nfds_t h2_session_set_poll(struct h2_session *session,
+ struct h2_poll_ctx *pollctxs,
+ struct pollfd *pfds)
+{
+ nfds_t n = 0;
+ int want_read, want_write;
+ struct h2_stream *stream;
+
+ want_read = (nghttp2_session_want_read(session->ngh2) ||
+ session->want_io == IO_WANT_READ);
+ want_write = (nghttp2_session_want_write(session->ngh2) ||
+ session->want_io == IO_WANT_WRITE);
+ if (want_read || want_write) {
+ pollctxs[n].session = session;
+ pollctxs[n].stream = NULL;
+ pollctxs[n].on_ev = h2_session_ev;
+ pfds[n].fd = session->fd;
+ pfds[n].events = pfds[n].revents = 0;
+ if (want_read)
+ pfds[n].events |= (POLLIN | POLLHUP);
+ if (want_write)
+ pfds[n].events |= (POLLOUT | POLLERR);
+ ++n;
+ }
+
+ for (stream = session->streams; stream; stream = stream->next) {
+ if (stream->fdin >= 0 && !stream->input_closed && !stream->closed) {
+ pollctxs[n].session = session;
+ pollctxs[n].stream = stream;
+ pollctxs[n].on_ev = h2_stream_ev;
+ pfds[n].fd = stream->fdin;
+ pfds[n].revents = 0;
+ pfds[n].events = (POLLIN | POLLHUP);
+ ++n;
+ }
+ }
+ return n;
+}
+
+static void h2_session_run(struct h2_session *session)
+{
+ struct h2_poll_ctx pollctxs[5];
+ struct pollfd pfds[5];
+ nfds_t npollfds, i;
+
+ npollfds = h2_session_set_poll(session, pollctxs, pfds);
+ while (npollfds) {
+ if (poll(pfds, npollfds, -1) == -1) {
+ log_errf("session run", "poll error %d (%s)", errno, strerror(errno));
+ break;
+ }
+ for (i = 0; i < npollfds; ++i) {
+ if (pfds[i].revents) {
+ if (pollctxs[i].on_ev(&pollctxs[i], &pfds[i])) {
+ break;
+ }
+ }
+ }
+ npollfds = h2_session_set_poll(session, pollctxs, pfds);
+ if (!session->streams)
+ break;
+ }
+}
+
+static void h2_session_close(struct h2_session *session)
+{
+ log_infof(cmd, "closed session to %s:%u",
+ session->connect_host, session->connect_port);
+}
+
+/* websocket stream */
+
+struct ws_stream {
+ struct h2_stream s;
+};
+
+static void ws_stream_on_close(struct h2_stream *stream)
+{
+ log_infof("ws stream", "stream %d closed", stream->id);
+ if (!stream->reset)
+ fprintf(stdout, "[%d] EOF\n", stream->id);
+}
+
+static void ws_stream_on_recv_data(struct h2_stream *stream,
+ const uint8_t *data, size_t len)
+{
+ size_t i;
+
+ log_infof("ws stream", "stream %d recv %lu data bytes",
+ stream->id, (unsigned long)len);
+ for (i = 0; i < len; ++i) {
+ fprintf(stdout, "%s%02x", (i&0xf)? " " : (i? "\n" : ""), data[i]);
+ }
+ fprintf(stdout, "\n");
+}
+
+static int ws_stream_create(struct ws_stream **pstream, struct uri *uri)
+{
+ struct ws_stream *stream;
+
+ stream = calloc(1, sizeof(*stream));
+ if (!stream) {
+ log_errf("ws stream create", "out of memory");
+ *pstream = NULL;
+ return -1;
+ }
+ stream->s.uri = uri;
+ stream->s.id = -1;
+ stream->s.on_close = ws_stream_on_close;
+ stream->s.on_recv_data = ws_stream_on_recv_data;
+ *pstream = stream;
+ return 0;
+}
+
+static ssize_t ws_stream_read_req_body(nghttp2_session *ngh2,
+ int32_t stream_id,
+ uint8_t *buf, size_t buflen,
+ uint32_t *pflags,
+ nghttp2_data_source *source,
+ void *user_data)
+{
+ struct h2_session *session = user_data;
+ struct ws_stream *stream;
+ ssize_t nread = 0;
+ int eof = 0;
+
+ stream = (struct ws_stream *)h2_session_stream_get(session, stream_id);
+ if (!stream) {
+ log_errf("stream req body", "stream not known");
+ return NGHTTP2_ERR_CALLBACK_FAILURE;
+ }
+
+ (void)source;
+ assert(stream->s.fdin >= 0);
+ nread = read(stream->s.fdin, buf, buflen);
+ log_debugf("stream req body", "fread(len=%lu) -> %ld",
+ (unsigned long)buflen, (long)nread);
+
+ if (nread < 0) {
+ if (errno == EAGAIN) {
+ nread = 0;
+ }
+ else {
+ log_errf("stream req body", "error on input");
+ return NGHTTP2_ERR_CALLBACK_FAILURE;
+ }
+ }
+ else if (nread == 0) {
+ eof = 1;
+ stream->s.input_closed = 1;
+ }
+
+ *pflags = stream->s.input_closed? NGHTTP2_DATA_FLAG_EOF : 0;
+ if (nread == 0 && !eof) {
+ return NGHTTP2_ERR_DEFERRED;
+ }
+ return nread;
+}
+
+static int ws_stream_submit(struct ws_stream *stream,
+ struct h2_session *session,
+ const nghttp2_nv *nva, size_t nvalen,
+ int fdin)
+{
+ nghttp2_data_provider provider, *req_body = NULL;
+
+ if (fdin >= 0) {
+ sock_nonblock_nodelay(fdin);
+ stream->s.fdin = fdin;
+ provider.read_callback = ws_stream_read_req_body;
+ provider.source.ptr = NULL;
+ req_body = &provider;
+ }
+ else {
+ stream->s.input_closed = 1;
+ }
+
+ stream->s.id = nghttp2_submit_request(session->ngh2, NULL, nva, nvalen,
+ req_body, stream);
+ if (stream->s.id < 0) {
+ log_errf("ws stream submit", "nghttp2_submit_request: error %d",
+ stream->s.id);
+ return -1;
+ }
+
+ h2_session_stream_add(session, &stream->s);
+ log_infof("ws stream submit", "stream %d opened for %s%s",
+ stream->s.id, stream->s.uri->authority, stream->s.uri->path);
+ return 0;
+}
+
+static void usage(const char *msg)
+{
+ if(msg)
+ fprintf(stderr, "%s\n", msg);
+ fprintf(stderr,
+ "usage: [options] ws-uri scenario\n"
+ " run a websocket scenario to the ws-uri, options:\n"
+ " -c host:port connect to host:port\n"
+ " -v increase verbosity\n"
+ "scenarios are:\n"
+ " * fail-proto: CONNECT using wrong :protocol\n"
+ " * miss-authority: CONNECT without :authority header\n"
+ " * miss-path: CONNECT without :path header\n"
+ " * miss-scheme: CONNECT without :scheme header\n"
+ " * miss-version: CONNECT without sec-webSocket-version header\n"
+ " * ws-empty: open valid websocket, do not send anything\n"
+ );
+}
+
+int main(int argc, char *argv[])
+{
+ const char *host = NULL, *scenario;
+ uint16_t port = 80;
+ struct uri uri;
+ struct h2_session session;
+ struct ws_stream *stream;
+ char ch;
+
+ cmd = argv[0];
+ while((ch = getopt(argc, argv, "c:vh")) != -1) {
+ switch(ch) {
+ case 'c':
+ if (parse_host_port(&host, &port, NULL, NULL,
+ optarg, strlen(optarg), 80)) {
+ log_errf(cmd, "could not parse connect '%s'", optarg);
+ return 1;
+ }
+ break;
+ case 'h':
+ usage(NULL);
+ return 2;
+ break;
+ case 'v':
+ ++verbose;
+ break;
+ default:
+ usage("invalid option");
+ return 1;
+ }
+ }
+ argc -= optind;
+ argv += optind;
+
+ if (argc < 1) {
+ usage("need URL");
+ return 1;
+ }
+ if (argc < 2) {
+ usage("need scenario");
+ return 1;
+ }
+ if (parse_uri(&uri, argv[0], strlen(argv[0]))) {
+ log_errf(cmd, "could not parse uri '%s'", argv[0]);
+ return 1;
+ }
+ log_debugf(cmd, "normalized uri: %s://%s:%u%s", uri.scheme, uri.host,
+ uri.port, uri.path? uri.path : "");
+ scenario = argv[1];
+
+ if (!host) {
+ host = uri.host;
+ port = uri.port;
+ }
+
+ if (h2_session_open(&session, uri.host, host, port))
+ return 1;
+
+ if (ws_stream_create(&stream, &uri))
+ return 1;
+
+ if (!strcmp(scenario, "ws-stdin")) {
+ const nghttp2_nv nva[] = {
+ MAKE_NV(":method", "CONNECT"),
+ MAKE_NV_CS(":path", stream->s.uri->path),
+ MAKE_NV_CS(":scheme", "http"),
+ MAKE_NV_CS(":authority", stream->s.uri->authority),
+ MAKE_NV_CS(":protocol", "websocket"),
+ MAKE_NV("accept", "*/*"),
+ MAKE_NV("user-agent", "mod_h2/h2ws-test"),
+ MAKE_NV("sec-webSocket-version", "13"),
+ MAKE_NV("sec-webSocket-protocol", "chat"),
+ };
+ if (ws_stream_submit(stream, &session,
+ nva, sizeof(nva) / sizeof(nva[0]), 0))
+ return 1;
+ }
+ else if (!strcmp(scenario, "fail-proto")) {
+ const nghttp2_nv nva[] = {
+ MAKE_NV(":method", "CONNECT"),
+ MAKE_NV_CS(":path", stream->s.uri->path),
+ MAKE_NV_CS(":scheme", "http"),
+ MAKE_NV_CS(":authority", stream->s.uri->authority),
+ MAKE_NV_CS(":protocol", "websockets"),
+ MAKE_NV("accept", "*/*"),
+ MAKE_NV("user-agent", "mod_h2/h2ws-test"),
+ MAKE_NV("sec-webSocket-version", "13"),
+ MAKE_NV("sec-webSocket-protocol", "chat"),
+ };
+ if (ws_stream_submit(stream, &session,
+ nva, sizeof(nva) / sizeof(nva[0]), -1))
+ return 1;
+ }
+ else if (!strcmp(scenario, "miss-version")) {
+ const nghttp2_nv nva[] = {
+ MAKE_NV(":method", "CONNECT"),
+ MAKE_NV_CS(":path", stream->s.uri->path),
+ MAKE_NV_CS(":scheme", "http"),
+ MAKE_NV_CS(":authority", stream->s.uri->authority),
+ MAKE_NV_CS(":protocol", "websocket"),
+ MAKE_NV("accept", "*/*"),
+ MAKE_NV("user-agent", "mod_h2/h2ws-test"),
+ MAKE_NV("sec-webSocket-protocol", "chat"),
+ };
+ if (ws_stream_submit(stream, &session,
+ nva, sizeof(nva) / sizeof(nva[0]), -1))
+ return 1;
+ }
+ else if (!strcmp(scenario, "miss-path")) {
+ const nghttp2_nv nva[] = {
+ MAKE_NV(":method", "CONNECT"),
+ MAKE_NV_CS(":scheme", "http"),
+ MAKE_NV_CS(":authority", stream->s.uri->authority),
+ MAKE_NV_CS(":protocol", "websocket"),
+ MAKE_NV("accept", "*/*"),
+ MAKE_NV("user-agent", "mod_h2/h2ws-test"),
+ MAKE_NV("sec-webSocket-version", "13"),
+ MAKE_NV("sec-webSocket-protocol", "chat"),
+ };
+ if (ws_stream_submit(stream, &session,
+ nva, sizeof(nva) / sizeof(nva[0]), -1))
+ return 1;
+ }
+ else if (!strcmp(scenario, "miss-scheme")) {
+ const nghttp2_nv nva[] = {
+ MAKE_NV(":method", "CONNECT"),
+ MAKE_NV_CS(":path", stream->s.uri->path),
+ MAKE_NV_CS(":authority", stream->s.uri->authority),
+ MAKE_NV_CS(":protocol", "websocket"),
+ MAKE_NV("accept", "*/*"),
+ MAKE_NV("user-agent", "mod_h2/h2ws-test"),
+ MAKE_NV("sec-webSocket-version", "13"),
+ MAKE_NV("sec-webSocket-protocol", "chat"),
+ };
+ if (ws_stream_submit(stream, &session,
+ nva, sizeof(nva) / sizeof(nva[0]), -1))
+ return 1;
+ }
+ else if (!strcmp(scenario, "miss-authority")) {
+ const nghttp2_nv nva[] = {
+ MAKE_NV(":method", "CONNECT"),
+ MAKE_NV_CS(":path", stream->s.uri->path),
+ MAKE_NV_CS(":scheme", "http"),
+ MAKE_NV_CS(":protocol", "websocket"),
+ MAKE_NV("accept", "*/*"),
+ MAKE_NV("user-agent", "mod_h2/h2ws-test"),
+ MAKE_NV("sec-webSocket-version", "13"),
+ MAKE_NV("sec-webSocket-protocol", "chat"),
+ };
+ if (ws_stream_submit(stream, &session,
+ nva, sizeof(nva) / sizeof(nva[0]), -1))
+ return 1;
+ }
+ else {
+ log_errf(cmd, "unknown scenario: %s", scenario);
+ return 1;
+ }
+
+ h2_session_run(&session);
+ h2_session_close(&session);
+ return 0;
+}
diff --git a/test/modules/http2/env.py b/test/modules/http2/env.py
index 537d3bf..34d196d 100644
--- a/test/modules/http2/env.py
+++ b/test/modules/http2/env.py
@@ -17,7 +17,7 @@ class H2TestSetup(HttpdTestSetup):
def __init__(self, env: 'HttpdTestEnv'):
super().__init__(env=env)
self.add_source_dir(os.path.dirname(inspect.getfile(H2TestSetup)))
- self.add_modules(["http2", "proxy_http2", "cgid", "autoindex", "ssl"])
+ self.add_modules(["http2", "proxy_http2", "cgid", "autoindex", "ssl", "include"])
def make(self):
super().make()
@@ -98,6 +98,7 @@ class H2TestEnv(HttpdTestEnv):
'AH01247', # mod_cgid sometimes freaks out on load tests
'AH01110', # error by proxy reading response
'AH10400', # warning that 'enablereuse' has not effect in certain configs test_h2_600
+ 'AH00045', # child did not exit in time, SIGTERM was sent
])
self.httpd_error_log.add_ignored_patterns([
re.compile(r'.*malformed header from script \'hecho.py\': Bad header: x.*'),
diff --git a/test/modules/http2/htdocs/cgi/echohd.py b/test/modules/http2/htdocs/cgi/echohd.py
index 2a138cd..a85a4e3 100644
--- a/test/modules/http2/htdocs/cgi/echohd.py
+++ b/test/modules/http2/htdocs/cgi/echohd.py
@@ -1,21 +1,6 @@
#!/usr/bin/env python3
import os, sys
-import multipart
-from urllib import parse
-
-
-def get_request_params():
- oforms = {}
- if "REQUEST_URI" in os.environ:
- qforms = parse.parse_qs(parse.urlsplit(os.environ["REQUEST_URI"]).query)
- for name, values in qforms.items():
- oforms[name] = values[0]
- myenv = os.environ.copy()
- myenv['wsgi.input'] = sys.stdin.buffer
- mforms, ofiles = multipart.parse_form_data(environ=myenv)
- for name, item in mforms.items():
- oforms[name] = item
- return oforms, ofiles
+from requestparser import get_request_params
forms, files = get_request_params()
diff --git a/test/modules/http2/htdocs/cgi/env.py b/test/modules/http2/htdocs/cgi/env.py
index 3af5764..455c623 100644
--- a/test/modules/http2/htdocs/cgi/env.py
+++ b/test/modules/http2/htdocs/cgi/env.py
@@ -1,21 +1,6 @@
#!/usr/bin/env python3
import os, sys
-import multipart
-from urllib import parse
-
-
-def get_request_params():
- oforms = {}
- if "REQUEST_URI" in os.environ:
- qforms = parse.parse_qs(parse.urlsplit(os.environ["REQUEST_URI"]).query)
- for name, values in qforms.items():
- oforms[name] = values[0]
- myenv = os.environ.copy()
- myenv['wsgi.input'] = sys.stdin.buffer
- mforms, ofiles = multipart.parse_form_data(environ=myenv)
- for name, item in mforms.items():
- oforms[name] = item
- return oforms, ofiles
+from requestparser import get_request_params
forms, files = get_request_params()
diff --git a/test/modules/http2/htdocs/cgi/hecho.py b/test/modules/http2/htdocs/cgi/hecho.py
index fb9e330..abffd33 100644
--- a/test/modules/http2/htdocs/cgi/hecho.py
+++ b/test/modules/http2/htdocs/cgi/hecho.py
@@ -1,21 +1,6 @@
#!/usr/bin/env python3
import os, sys
-import multipart
-from urllib import parse
-
-
-def get_request_params():
- oforms = {}
- if "REQUEST_URI" in os.environ:
- qforms = parse.parse_qs(parse.urlsplit(os.environ["REQUEST_URI"]).query)
- for name, values in qforms.items():
- oforms[name] = values[0]
- myenv = os.environ.copy()
- myenv['wsgi.input'] = sys.stdin.buffer
- mforms, ofiles = multipart.parse_form_data(environ=myenv)
- for name, item in mforms.items():
- oforms[name] = item
- return oforms, ofiles
+from requestparser import get_request_params
forms, files = get_request_params()
diff --git a/test/modules/http2/htdocs/cgi/hello.py b/test/modules/http2/htdocs/cgi/hello.py
index 20974bf..a96da8a 100644
--- a/test/modules/http2/htdocs/cgi/hello.py
+++ b/test/modules/http2/htdocs/cgi/hello.py
@@ -1,20 +1,25 @@
#!/usr/bin/env python3
import os
+import json
+
+resp = {
+ 'https': os.getenv('HTTPS', ''),
+ 'host': os.getenv('X_HOST', '') if 'X_HOST' in os.environ else os.getenv('SERVER_NAME', ''),
+ 'server': os.getenv('SERVER_NAME', ''),
+ 'h2_original_host': os.getenv('H2_ORIGINAL_HOST', ''),
+ 'port': os.getenv('SERVER_PORT', ''),
+ 'protocol': os.getenv('SERVER_PROTOCOL', ''),
+ 'ssl_protocol': os.getenv('SSL_PROTOCOL', ''),
+ 'h2': os.getenv('HTTP2', ''),
+ 'h2push': os.getenv('H2PUSH', ''),
+ 'h2_stream_id': os.getenv('H2_STREAM_ID', ''),
+ 'x-forwarded-for': os.getenv('HTTP_X_FORWARDED_FOR', ''),
+ 'x-forwarded-host': os.getenv('HTTP_X_FORWARDED_HOST', ''),
+ 'x-forwarded-server': os.getenv('HTTP_X_FORWARDED_SERVER', ''),
+}
print("Content-Type: application/json")
print()
-print("{")
-print(" \"https\" : \"%s\"," % (os.getenv('HTTPS', '')))
-print(" \"host\" : \"%s\"," % (os.getenv('X_HOST', '') \
- if 'X_HOST' in os.environ else os.getenv('SERVER_NAME', '')))
-print(" \"server\" : \"%s\"," % (os.getenv('SERVER_NAME', '')))
-print(" \"h2_original_host\" : \"%s\"," % (os.getenv('H2_ORIGINAL_HOST', '')))
-print(" \"port\" : \"%s\"," % (os.getenv('SERVER_PORT', '')))
-print(" \"protocol\" : \"%s\"," % (os.getenv('SERVER_PROTOCOL', '')))
-print(" \"ssl_protocol\" : \"%s\"," % (os.getenv('SSL_PROTOCOL', '')))
-print(" \"h2\" : \"%s\"," % (os.getenv('HTTP2', '')))
-print(" \"h2push\" : \"%s\"," % (os.getenv('H2PUSH', '')))
-print(" \"h2_stream_id\" : \"%s\"" % (os.getenv('H2_STREAM_ID', '')))
-print("}")
+print(json.JSONEncoder(indent=2).encode(resp))
diff --git a/test/modules/http2/htdocs/cgi/mnot164.py b/test/modules/http2/htdocs/cgi/mnot164.py
index c29ccc1..43a86ea 100644
--- a/test/modules/http2/htdocs/cgi/mnot164.py
+++ b/test/modules/http2/htdocs/cgi/mnot164.py
@@ -1,21 +1,6 @@
#!/usr/bin/env python3
import os, sys
-import multipart
-from urllib import parse
-
-
-def get_request_params():
- oforms = {}
- if "REQUEST_URI" in os.environ:
- qforms = parse.parse_qs(parse.urlsplit(os.environ["REQUEST_URI"]).query)
- for name, values in qforms.items():
- oforms[name] = values[0]
- myenv = os.environ.copy()
- myenv['wsgi.input'] = sys.stdin.buffer
- mforms, ofiles = multipart.parse_form_data(environ=myenv)
- for name, item in mforms.items():
- oforms[name] = item
- return oforms, ofiles
+from requestparser import get_request_params
forms, files = get_request_params()
diff --git a/test/modules/http2/htdocs/cgi/necho.py b/test/modules/http2/htdocs/cgi/necho.py
index 78e2aad..715904b 100644
--- a/test/modules/http2/htdocs/cgi/necho.py
+++ b/test/modules/http2/htdocs/cgi/necho.py
@@ -1,22 +1,7 @@
#!/usr/bin/env python3
import time
import os, sys
-import multipart
-from urllib import parse
-
-
-def get_request_params():
- oforms = {}
- if "REQUEST_URI" in os.environ:
- qforms = parse.parse_qs(parse.urlsplit(os.environ["REQUEST_URI"]).query)
- for name, values in qforms.items():
- oforms[name] = values[0]
- myenv = os.environ.copy()
- myenv['wsgi.input'] = sys.stdin.buffer
- mforms, ofiles = multipart.parse_form_data(environ=myenv)
- for name, item in mforms.items():
- oforms[name] = item
- return oforms, ofiles
+from requestparser import get_request_params
forms, files = get_request_params()
@@ -55,11 +40,12 @@ Content-Type: text/html\n
<p>No count was specified: %s</p>
</body></html>""" % (count))
-except KeyError:
+except KeyError as ex:
print("Status: 200 Ok")
- print("""\
+ print(f"""\
Content-Type: text/html\n
- <html><body>
+ <html><body>uri: uri={os.environ['REQUEST_URI']} ct={os.environ['CONTENT_TYPE']} ex={ex}
+ forms={forms}
Echo <form method="POST" enctype="application/x-www-form-urlencoded">
<input type="text" name="count">
<input type="text" name="text">
diff --git a/test/modules/http2/htdocs/cgi/requestparser.py b/test/modules/http2/htdocs/cgi/requestparser.py
new file mode 100644
index 0000000..c7e0648
--- /dev/null
+++ b/test/modules/http2/htdocs/cgi/requestparser.py
@@ -0,0 +1,57 @@
+#!/usr/bin/env python3
+import os
+import sys
+from urllib import parse
+import multipart # https://github.com/andrew-d/python-multipart (`apt install python3-multipart`)
+import shutil
+
+
+try: # Windows needs stdio set for binary mode.
+ import msvcrt
+
+ msvcrt.setmode(0, os.O_BINARY) # stdin = 0
+ msvcrt.setmode(1, os.O_BINARY) # stdout = 1
+except ImportError:
+ pass
+
+
+class FileItem:
+
+ def __init__(self, mparse_item):
+ self.item = mparse_item
+
+ @property
+ def file_name(self):
+ return os.path.basename(self.item.file_name.decode())
+
+ def save_to(self, destpath: str):
+ fsrc = self.item.file_object
+ fsrc.seek(0)
+ with open(destpath, 'wb') as fd:
+ shutil.copyfileobj(fsrc, fd)
+
+
+def get_request_params():
+ oforms = {}
+ ofiles = {}
+ if "REQUEST_URI" in os.environ:
+ qforms = parse.parse_qs(parse.urlsplit(os.environ["REQUEST_URI"]).query)
+ for name, values in qforms.items():
+ oforms[name] = values[0]
+ if "CONTENT_TYPE" in os.environ:
+ ctype = os.environ["CONTENT_TYPE"]
+ if ctype == "application/x-www-form-urlencoded":
+ s = sys.stdin.read()
+ qforms = parse.parse_qs(s)
+ for name, values in qforms.items():
+ oforms[name] = values[0]
+ elif ctype.startswith("multipart/"):
+ def on_field(field):
+ oforms[field.field_name.decode()] = field.value.decode()
+ def on_file(file):
+ ofiles[file.field_name.decode()] = FileItem(file)
+ multipart.parse_form(headers={"Content-Type": ctype},
+ input_stream=sys.stdin.buffer,
+ on_field=on_field, on_file=on_file)
+ return oforms, ofiles
+
diff --git a/test/modules/http2/htdocs/cgi/ssi/include.inc b/test/modules/http2/htdocs/cgi/ssi/include.inc
new file mode 100644
index 0000000..8bd8689
--- /dev/null
+++ b/test/modules/http2/htdocs/cgi/ssi/include.inc
@@ -0,0 +1 @@
+Hello include<br>
diff --git a/test/modules/http2/htdocs/cgi/ssi/test.html b/test/modules/http2/htdocs/cgi/ssi/test.html
new file mode 100644
index 0000000..1782358
--- /dev/null
+++ b/test/modules/http2/htdocs/cgi/ssi/test.html
@@ -0,0 +1,9 @@
+<!doctype html>
+<html>
+<head><meta charset="UTF-8"></head>
+<body>
+ test<br>
+ <!--#include virtual="./include.inc"-->
+ hello<br>
+</body>
+</html>
diff --git a/test/modules/http2/htdocs/cgi/upload.py b/test/modules/http2/htdocs/cgi/upload.py
index 59fbb58..fa1e5d6 100644
--- a/test/modules/http2/htdocs/cgi/upload.py
+++ b/test/modules/http2/htdocs/cgi/upload.py
@@ -1,30 +1,7 @@
#!/usr/bin/env python3
import os
import sys
-import multipart
-from urllib import parse
-
-
-try: # Windows needs stdio set for binary mode.
- import msvcrt
-
- msvcrt.setmode(0, os.O_BINARY) # stdin = 0
- msvcrt.setmode(1, os.O_BINARY) # stdout = 1
-except ImportError:
- pass
-
-def get_request_params():
- oforms = {}
- if "REQUEST_URI" in os.environ:
- qforms = parse.parse_qs(parse.urlsplit(os.environ["REQUEST_URI"]).query)
- for name, values in qforms.items():
- oforms[name] = values[0]
- myenv = os.environ.copy()
- myenv['wsgi.input'] = sys.stdin.buffer
- mforms, ofiles = multipart.parse_form_data(environ=myenv)
- for name, item in mforms.items():
- oforms[name] = item
- return oforms, ofiles
+from requestparser import get_request_params
forms, files = get_request_params()
@@ -35,9 +12,9 @@ status = '200 Ok'
if 'file' in files:
fitem = files['file']
# strip leading path from file name to avoid directory traversal attacks
- fname = fitem.filename
+ fname = os.path.basename(fitem.file_name)
fpath = f'{os.environ["DOCUMENT_ROOT"]}/files/{fname}'
- fitem.save_as(fpath)
+ fitem.save_to(fpath)
message = "The file %s was uploaded successfully" % (fname)
print("Status: 201 Created")
print("Content-Type: text/html")
diff --git a/test/modules/http2/htdocs/cgi/xxx/test.json b/test/modules/http2/htdocs/cgi/xxx/test.json
new file mode 100644
index 0000000..ceafd0a
--- /dev/null
+++ b/test/modules/http2/htdocs/cgi/xxx/test.json
@@ -0,0 +1 @@
+{"name": "test.json"} \ No newline at end of file
diff --git a/test/modules/http2/mod_h2test/mod_h2test.c b/test/modules/http2/mod_h2test/mod_h2test.c
index b5ee8ad..f20b954 100644
--- a/test/modules/http2/mod_h2test/mod_h2test.c
+++ b/test/modules/http2/mod_h2test/mod_h2test.c
@@ -138,7 +138,12 @@ static int h2test_echo_handler(request_rec *r)
char buffer[8192];
const char *ct;
long l;
-
+ int i;
+ apr_time_t chunk_delay = 0;
+ apr_array_header_t *args = NULL;
+ apr_size_t blen, fail_after = 0;
+ int fail_requested = 0, error_bucket = 1;
+
if (strcmp(r->handler, "h2test-echo")) {
return DECLINED;
}
@@ -146,6 +151,40 @@ static int h2test_echo_handler(request_rec *r)
return DECLINED;
}
+ if(r->args) {
+ args = apr_cstr_split(r->args, "&", 1, r->pool);
+ for(i = 0; i < args->nelts; ++i) {
+ char *s, *val, *arg = APR_ARRAY_IDX(args, i, char*);
+ s = strchr(arg, '=');
+ if(s) {
+ *s = '\0';
+ val = s + 1;
+ if(!strcmp("id", arg)) {
+ /* accepted, but not processed */
+ continue;
+ }
+ else if(!strcmp("chunk_delay", arg)) {
+ rv = duration_parse(&chunk_delay, val, "s");
+ if(APR_SUCCESS == rv) {
+ continue;
+ }
+ }
+ else if(!strcmp("fail_after", arg)) {
+ fail_after = (int)apr_atoi64(val);
+ if(fail_after >= 0) {
+ fail_requested = 1;
+ continue;
+ }
+ }
+ }
+ ap_log_rerror(APLOG_MARK, APLOG_ERR, 0, r, "query parameter not "
+ "understood: '%s' in %s",
+ arg, r->args);
+ ap_die(HTTP_BAD_REQUEST, r);
+ return OK;
+ }
+ }
+
ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r, "echo_handler: processing request");
r->status = 200;
r->clength = -1;
@@ -166,12 +205,26 @@ static int h2test_echo_handler(request_rec *r)
while (0 < (l = ap_get_client_block(r, &buffer[0], sizeof(buffer)))) {
ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r,
"echo_handler: copying %ld bytes from request body", l);
- rv = apr_brigade_write(bb, NULL, NULL, buffer, l);
+ blen = (apr_size_t)l;
+ if (fail_requested) {
+ if (blen > fail_after) {
+ blen = fail_after;
+ }
+ fail_after -= blen;
+ }
+ rv = apr_brigade_write(bb, NULL, NULL, buffer, blen);
if (APR_SUCCESS != rv) goto cleanup;
+ if (chunk_delay) {
+ apr_sleep(chunk_delay);
+ }
rv = ap_pass_brigade(r->output_filters, bb);
if (APR_SUCCESS != rv) goto cleanup;
ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r,
"echo_handler: passed %ld bytes from request body", l);
+ if (fail_requested && fail_after == 0) {
+ rv = APR_EINVAL;
+ goto cleanup;
+ }
}
}
/* we are done */
@@ -195,6 +248,12 @@ cleanup:
ap_log_rerror(APLOG_MARK, APLOG_TRACE1, rv, r, "echo_handler: request handled");
return OK;
}
+ else if (error_bucket) {
+ int status = ap_map_http_request_error(rv, HTTP_BAD_REQUEST);
+ b = ap_bucket_error_create(status, NULL, r->pool, c->bucket_alloc);
+ APR_BRIGADE_INSERT_TAIL(bb, b);
+ ap_pass_brigade(r->output_filters, bb);
+ }
else {
/* no way to know what type of error occurred */
ap_log_rerror(APLOG_MARK, APLOG_TRACE1, rv, r, "h2test_echo_handler failed");
@@ -419,18 +478,20 @@ static int h2test_error_handler(request_rec *r)
}
}
else if (!strcmp("delay", arg)) {
- rv = duration_parse(&delay, r->args, "s");
+ rv = duration_parse(&delay, val, "s");
if (APR_SUCCESS == rv) {
continue;
}
}
else if (!strcmp("body_delay", arg)) {
- rv = duration_parse(&body_delay, r->args, "s");
+ rv = duration_parse(&body_delay, val, "s");
if (APR_SUCCESS == rv) {
continue;
}
}
}
+ ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r, "error_handler: "
+ "did not understand '%s'", arg);
ap_die(HTTP_BAD_REQUEST, r);
return OK;
}
diff --git a/test/modules/http2/test_003_get.py b/test/modules/http2/test_003_get.py
index 410097a..572c4fb 100644
--- a/test/modules/http2/test_003_get.py
+++ b/test/modules/http2/test_003_get.py
@@ -194,7 +194,7 @@ content-type: text/html
@pytest.mark.parametrize("path", [
"/004.html", "/proxy/004.html", "/h2proxy/004.html"
])
- def test_h2_003_50(self, env, path):
+ def test_h2_003_50(self, env, path, repeat):
# check that the resource supports ranges and we see its raw content-length
url = env.mkurl("https", "test1", path)
r = env.curl_get(url, 5)
diff --git a/test/modules/http2/test_004_post.py b/test/modules/http2/test_004_post.py
index 44f31d2..295f989 100644
--- a/test/modules/http2/test_004_post.py
+++ b/test/modules/http2/test_004_post.py
@@ -18,7 +18,15 @@ class TestPost:
@pytest.fixture(autouse=True, scope='class')
def _class_scope(self, env):
TestPost._local_dir = os.path.dirname(inspect.getfile(TestPost))
- H2Conf(env).add_vhost_cgi().install()
+ conf = H2Conf(env, extras={
+ f'cgi.{env.http_tld}': [
+ f'<Directory {env.server_docs_dir}/cgi/xxx>',
+ ' RewriteEngine On',
+ ' RewriteRule .* /proxy/echo.py [QSA]',
+ '</Directory>',
+ ]
+ })
+ conf.add_vhost_cgi(proxy_self=True).install()
assert env.apache_restart() == 0
def local_src(self, fname):
@@ -59,10 +67,11 @@ class TestPost:
self.curl_upload_and_verify(env, "data-1k", ["-v", "--http1.1", "-H", "Expect: 100-continue"])
self.curl_upload_and_verify(env, "data-1k", ["-v", "--http2", "-H", "Expect: 100-continue"])
- @pytest.mark.skipif(True, reason="python3 regresses in chunked inputs to cgi")
def test_h2_004_06(self, env):
- self.curl_upload_and_verify(env, "data-1k", ["--http1.1", "-H", "Content-Length: "])
- self.curl_upload_and_verify(env, "data-1k", ["--http2", "-H", "Content-Length: "])
+ self.curl_upload_and_verify(env, "data-1k", [
+ "--http1.1", "-H", "Content-Length:", "-H", "Transfer-Encoding: chunked"
+ ])
+ self.curl_upload_and_verify(env, "data-1k", ["--http2", "-H", "Content-Length:"])
@pytest.mark.parametrize("name, value", [
("HTTP2", "on"),
@@ -124,6 +133,7 @@ class TestPost:
r = env.nghttp().upload_file(url, fpath, options=options)
assert r.exit_code == 0
assert r.response["status"] >= 200 and r.response["status"] < 300
+ assert 'location' in r.response["header"], f'{r}'
assert r.response["header"]["location"]
r2 = env.nghttp().get(r.response["header"]["location"])
@@ -131,7 +141,7 @@ class TestPost:
assert r2.response["status"] == 200
with open(self.local_src(fpath), mode='rb') as file:
src = file.read()
- assert src == r2.response["body"]
+ assert src == r2.response["body"], f'GET {r.response["header"]["location"]}'
@pytest.mark.parametrize("name", [
"data-1k", "data-10k", "data-100k", "data-1m"
@@ -151,46 +161,6 @@ class TestPost:
def test_h2_004_25(self, env, name, repeat):
self.nghttp_upload_and_verify(env, name, ["--no-content-length"])
- def test_h2_004_30(self, env):
- # issue: #203
- resource = "data-1k"
- full_length = 1000
- chunk = 200
- self.curl_upload_and_verify(env, resource, ["-v", "--http2"])
- logfile = os.path.join(env.server_logs_dir, "test_004_30")
- if os.path.isfile(logfile):
- os.remove(logfile)
- H2Conf(env).add("""
-LogFormat "{ \\"request\\": \\"%r\\", \\"status\\": %>s, \\"bytes_resp_B\\": %B, \\"bytes_tx_O\\": %O, \\"bytes_rx_I\\": %I, \\"bytes_rx_tx_S\\": %S }" issue_203
-CustomLog logs/test_004_30 issue_203
- """).add_vhost_cgi().install()
- assert env.apache_restart() == 0
- url = env.mkurl("https", "cgi", "/files/{0}".format(resource))
- r = env.curl_get(url, 5, options=["--http2"])
- assert r.response["status"] == 200
- r = env.curl_get(url, 5, options=["--http1.1", "-H", "Range: bytes=0-{0}".format(chunk-1)])
- assert 206 == r.response["status"]
- assert chunk == len(r.response["body"].decode('utf-8'))
- r = env.curl_get(url, 5, options=["--http2", "-H", "Range: bytes=0-{0}".format(chunk-1)])
- assert 206 == r.response["status"]
- assert chunk == len(r.response["body"].decode('utf-8'))
- # Wait for log completeness
- time.sleep(1)
- # now check what response lengths have actually been reported
- lines = open(logfile).readlines()
- log_h2_full = json.loads(lines[-3])
- log_h1 = json.loads(lines[-2])
- log_h2 = json.loads(lines[-1])
- assert log_h2_full['bytes_rx_I'] > 0
- assert log_h2_full['bytes_resp_B'] == full_length
- assert log_h2_full['bytes_tx_O'] > full_length
- assert log_h1['bytes_rx_I'] > 0 # input bytes received
- assert log_h1['bytes_resp_B'] == chunk # response bytes sent (payload)
- assert log_h1['bytes_tx_O'] > chunk # output bytes sent
- assert log_h2['bytes_rx_I'] > 0
- assert log_h2['bytes_resp_B'] == chunk
- assert log_h2['bytes_tx_O'] > chunk
-
def test_h2_004_40(self, env):
# echo content using h2test_module "echo" handler
def post_and_verify(fname, options=None):
@@ -217,3 +187,15 @@ CustomLog logs/test_004_30 issue_203
assert src == filepart.get_payload(decode=True)
post_and_verify("data-1k", [])
+
+ def test_h2_004_41(self, env):
+ # reproduce PR66597, double chunked encoding on redirects
+ url = env.mkurl("https", "cgi", "/xxx/test.json")
+ r = env.curl_post_data(url, data="0123456789", options=[])
+ assert r.exit_code == 0
+ assert 200 <= r.response["status"] < 300
+ assert r.response['body'] == b'0123456789'
+ r = env.curl_post_data(url, data="0123456789", options=["-H", "Content-Length:"])
+ assert r.exit_code == 0
+ assert 200 <= r.response["status"] < 300
+ assert r.response['body'] == b'0123456789'
diff --git a/test/modules/http2/test_007_ssi.py b/test/modules/http2/test_007_ssi.py
new file mode 100644
index 0000000..97e38df
--- /dev/null
+++ b/test/modules/http2/test_007_ssi.py
@@ -0,0 +1,43 @@
+import re
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestSSI:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ conf = H2Conf(env, extras={
+ f'cgi.{env.http_tld}': [
+ 'AddOutputFilter INCLUDES .html',
+ '<Location "/ssi">',
+ ' Options +Includes',
+ '</Location>',
+ ],
+ })
+ conf.add_vhost_cgi(
+ proxy_self=True, h2proxy_self=True
+ ).add_vhost_test1(
+ proxy_self=True, h2proxy_self=True
+ ).install()
+ assert env.apache_restart() == 0
+
+ # SSI test from https://bz.apache.org/bugzilla/show_bug.cgi?id=66483
+ def test_h2_007_01(self, env):
+ url = env.mkurl("https", "cgi", "/ssi/test.html")
+ r = env.curl_get(url, 5)
+ assert r.response["status"] == 200
+ assert r.stdout == '''<!doctype html>
+<html>
+<head><meta charset="UTF-8"></head>
+<body>
+ test<br>
+ Hello include<br>
+
+ hello<br>
+</body>
+</html>
+''' , f'{r}'
+
diff --git a/test/modules/http2/test_008_ranges.py b/test/modules/http2/test_008_ranges.py
new file mode 100644
index 0000000..4dcdcc8
--- /dev/null
+++ b/test/modules/http2/test_008_ranges.py
@@ -0,0 +1,189 @@
+import inspect
+import json
+import os
+import re
+import time
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestRanges:
+
+ LOGFILE = ""
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ TestRanges.LOGFILE = os.path.join(env.server_logs_dir, "test_008")
+ TestRanges.SRCDIR = os.path.dirname(inspect.getfile(TestRanges))
+ if os.path.isfile(TestRanges.LOGFILE):
+ os.remove(TestRanges.LOGFILE)
+ destdir = os.path.join(env.gen_dir, 'apache/htdocs/test1')
+ env.make_data_file(indir=destdir, fname="data-100m", fsize=100*1024*1024)
+ conf = H2Conf(env=env, extras={
+ 'base': [
+ 'CustomLog logs/test_008 combined'
+ ],
+ f'test1.{env.http_tld}': [
+ '<Location /status>',
+ ' SetHandler server-status',
+ '</Location>',
+ ]
+ })
+ conf.add_vhost_cgi()
+ conf.add_vhost_test1()
+ conf.install()
+ assert env.apache_restart() == 0
+
+ def test_h2_008_01(self, env):
+ # issue: #203
+ resource = "data-1k"
+ full_length = 1000
+ chunk = 200
+ self.curl_upload_and_verify(env, resource, ["-v", "--http2"])
+ assert env.apache_restart() == 0
+ url = env.mkurl("https", "cgi", f"/files/{resource}?01full")
+ r = env.curl_get(url, 5, options=["--http2"])
+ assert r.response["status"] == 200
+ url = env.mkurl("https", "cgi", f"/files/{resource}?01range")
+ r = env.curl_get(url, 5, options=["--http1.1", "-H", "Range: bytes=0-{0}".format(chunk-1)])
+ assert 206 == r.response["status"]
+ assert chunk == len(r.response["body"].decode('utf-8'))
+ r = env.curl_get(url, 5, options=["--http2", "-H", "Range: bytes=0-{0}".format(chunk-1)])
+ assert 206 == r.response["status"]
+ assert chunk == len(r.response["body"].decode('utf-8'))
+ # Restart for logs to be flushed out
+ assert env.apache_restart() == 0
+ # now check what response lengths have actually been reported
+ detected = {}
+ for line in open(TestRanges.LOGFILE).readlines():
+ e = json.loads(line)
+ if e['request'] == f'GET /files/{resource}?01full HTTP/2.0':
+ assert e['bytes_rx_I'] > 0
+ assert e['bytes_resp_B'] == full_length
+ assert e['bytes_tx_O'] > full_length
+ detected['h2full'] = 1
+ elif e['request'] == f'GET /files/{resource}?01range HTTP/2.0':
+ assert e['bytes_rx_I'] > 0
+ assert e['bytes_resp_B'] == chunk
+ assert e['bytes_tx_O'] > chunk
+ assert e['bytes_tx_O'] < chunk + 256 # response + frame stuff
+ detected['h2range'] = 1
+ elif e['request'] == f'GET /files/{resource}?01range HTTP/1.1':
+ assert e['bytes_rx_I'] > 0 # input bytes received
+ assert e['bytes_resp_B'] == chunk # response bytes sent (payload)
+ assert e['bytes_tx_O'] > chunk # output bytes sent
+ detected['h1range'] = 1
+ assert 'h1range' in detected, f'HTTP/1.1 range request not found in {TestRanges.LOGFILE}'
+ assert 'h2range' in detected, f'HTTP/2 range request not found in {TestRanges.LOGFILE}'
+ assert 'h2full' in detected, f'HTTP/2 full request not found in {TestRanges.LOGFILE}'
+
+ def test_h2_008_02(self, env, repeat):
+ path = '/002.jpg'
+ res_len = 90364
+ url = env.mkurl("https", "test1", f'{path}?02full')
+ r = env.curl_get(url, 5)
+ assert r.response["status"] == 200
+ assert "HTTP/2" == r.response["protocol"]
+ h = r.response["header"]
+ assert "accept-ranges" in h
+ assert "bytes" == h["accept-ranges"]
+ assert "content-length" in h
+ clen = h["content-length"]
+ assert int(clen) == res_len
+ # get the first 1024 bytes of the resource, 206 status, but content-length as original
+ url = env.mkurl("https", "test1", f'{path}?02range')
+ r = env.curl_get(url, 5, options=["-H", "range: bytes=0-1023"])
+ assert 206 == r.response["status"]
+ assert "HTTP/2" == r.response["protocol"]
+ assert 1024 == len(r.response["body"])
+ assert "content-length" in h
+ assert clen == h["content-length"]
+ # Restart for logs to be flushed out
+ assert env.apache_restart() == 0
+ # now check what response lengths have actually been reported
+ found = False
+ for line in open(TestRanges.LOGFILE).readlines():
+ e = json.loads(line)
+ if e['request'] == f'GET {path}?02range HTTP/2.0':
+ assert e['bytes_rx_I'] > 0
+ assert e['bytes_resp_B'] == 1024
+ assert e['bytes_tx_O'] > 1024
+ assert e['bytes_tx_O'] < 1024 + 256 # response and frame stuff
+ found = True
+ break
+ assert found, f'request not found in {self.LOGFILE}'
+
+ # send a paced curl download that aborts in the middle of the transfer
+ def test_h2_008_03(self, env, repeat):
+ path = '/data-100m'
+ url = env.mkurl("https", "test1", f'{path}?03broken')
+ r = env.curl_get(url, 5, options=[
+ '--limit-rate', '2k', '-m', '2'
+ ])
+ assert r.exit_code != 0, f'{r}'
+ found = False
+ for line in open(TestRanges.LOGFILE).readlines():
+ e = json.loads(line)
+ if e['request'] == f'GET {path}?03broken HTTP/2.0':
+ assert e['bytes_rx_I'] > 0
+ assert e['bytes_resp_B'] == 100*1024*1024
+ assert e['bytes_tx_O'] > 1024
+ found = True
+ break
+ assert found, f'request not found in {self.LOGFILE}'
+
+ # test server-status reporting
+ # see <https://bz.apache.org/bugzilla/show_bug.cgi?id=66801>
+ def test_h2_008_04(self, env, repeat):
+ path = '/data-100m'
+ assert env.apache_restart() == 0
+ stats = self.get_server_status(env)
+ # we see the server uptime check request here
+ assert 1 == int(stats['Total Accesses']), f'{stats}'
+ assert 1 == int(stats['Total kBytes']), f'{stats}'
+ count = 10
+ url = env.mkurl("https", "test1", f'/data-100m?[0-{count-1}]')
+ r = env.curl_get(url, 5, options=['--http2', '-H', f'Range: bytes=0-{4096}'])
+ assert r.exit_code == 0, f'{r}'
+ for _ in range(10):
+ # slow cpu might not success on first read
+ stats = self.get_server_status(env)
+ if (4*count)+1 <= int(stats['Total kBytes']):
+ break
+ time.sleep(0.1)
+ # amount reported is larger than (count *4k), the net payload
+ # but does not exceed an additional 4k
+ assert (4*count)+1 <= int(stats['Total kBytes'])
+ assert (4*(count+1))+1 > int(stats['Total kBytes'])
+ # total requests is now at 1 from the start, plus the stat check,
+ # plus the count transfers we did.
+ assert (2+count) == int(stats['Total Accesses'])
+
+ def get_server_status(self, env):
+ status_url = env.mkurl("https", "test1", '/status?auto')
+ r = env.curl_get(status_url, 5)
+ assert r.exit_code == 0, f'{r}'
+ stats = {}
+ for line in r.stdout.splitlines():
+ m = re.match(r'([^:]+): (.*)', line)
+ if m:
+ stats[m.group(1)] = m.group(2)
+ return stats
+
+ # upload and GET again using curl, compare to original content
+ def curl_upload_and_verify(self, env, fname, options=None):
+ url = env.mkurl("https", "cgi", "/upload.py")
+ fpath = os.path.join(env.gen_dir, fname)
+ r = env.curl_upload(url, fpath, options=options)
+ assert r.exit_code == 0, f"{r}"
+ assert 200 <= r.response["status"] < 300
+
+ r2 = env.curl_get(r.response["header"]["location"])
+ assert r2.exit_code == 0
+ assert r2.response["status"] == 200
+ with open(os.path.join(TestRanges.SRCDIR, fpath), mode='rb') as file:
+ src = file.read()
+ assert src == r2.response["body"]
+
diff --git a/test/modules/http2/test_009_timing.py b/test/modules/http2/test_009_timing.py
new file mode 100644
index 0000000..2c62bb0
--- /dev/null
+++ b/test/modules/http2/test_009_timing.py
@@ -0,0 +1,74 @@
+import inspect
+import json
+import os
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestTiming:
+
+ LOGFILE = ""
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ TestTiming.LOGFILE = os.path.join(env.server_logs_dir, "test_009")
+ if os.path.isfile(TestTiming.LOGFILE):
+ os.remove(TestTiming.LOGFILE)
+ conf = H2Conf(env=env)
+ conf.add([
+ "CustomLog logs/test_009 combined"
+ ])
+ conf.add_vhost_cgi()
+ conf.add_vhost_test1()
+ conf.install()
+ assert env.apache_restart() == 0
+
+ # check that we get a positive time_taken reported on a simple GET
+ def test_h2_009_01(self, env):
+ path = '/002.jpg'
+ url = env.mkurl("https", "test1", f'{path}?01')
+ args = [
+ env.h2load, "-n", "1", "-c", "1", "-m", "1",
+ f"--connect-to=localhost:{env.https_port}",
+ f"--base-uri={url}", url
+ ]
+ r = env.run(args)
+ # Restart for logs to be flushed out
+ assert env.apache_restart() == 0
+ found = False
+ for line in open(TestTiming.LOGFILE).readlines():
+ e = json.loads(line)
+ if e['request'] == f'GET {path}?01 HTTP/2.0':
+ assert e['time_taken'] > 0
+ found = True
+ assert found, f'request not found in {TestTiming.LOGFILE}'
+
+ # test issue #253, where time_taken in a keepalive situation is not
+ # reported until the next request arrives
+ def test_h2_009_02(self, env):
+ baseurl = env.mkurl("https", "test1", '/')
+ tscript = os.path.join(env.gen_dir, 'h2load-timing-009_02')
+ with open(tscript, 'w') as fd:
+ fd.write('\n'.join([
+ f'0.0\t/002.jpg?02a', # 1st request right away
+ f'1000.0\t/002.jpg?02b', # 2nd a second later
+ ]))
+ args = [
+ env.h2load,
+ f'--timing-script-file={tscript}',
+ f"--connect-to=localhost:{env.https_port}",
+ f"--base-uri={baseurl}"
+ ]
+ r = env.run(args)
+ # Restart for logs to be flushed out
+ assert env.apache_restart() == 0
+ found = False
+ for line in open(TestTiming.LOGFILE).readlines():
+ e = json.loads(line)
+ if e['request'] == f'GET /002.jpg?02a HTTP/2.0':
+ assert e['time_taken'] > 0
+ assert e['time_taken'] < 500 * 1000, f'time for 1st request not reported correctly'
+ found = True
+ assert found, f'request not found in {TestTiming.LOGFILE}'
diff --git a/test/modules/http2/test_101_ssl_reneg.py b/test/modules/http2/test_101_ssl_reneg.py
index 66f2638..528002f 100644
--- a/test/modules/http2/test_101_ssl_reneg.py
+++ b/test/modules/http2/test_101_ssl_reneg.py
@@ -59,6 +59,8 @@ class TestSslRenegotiation:
# try to renegotiate the cipher, should fail with correct code
def test_h2_101_02(self, env):
+ if not (env.curl_is_at_least('8.2.0') or env.curl_is_less_than('8.1.0')):
+ pytest.skip("need curl != 8.1.x version")
url = env.mkurl("https", "ssl", "/renegotiate/cipher/")
r = env.curl_get(url, options=[
"-vvv", "--tlsv1.2", "--tls-max", "1.2", "--ciphers", "ECDHE-RSA-AES256-GCM-SHA384"
@@ -70,6 +72,8 @@ class TestSslRenegotiation:
# try to renegotiate a client certificate from Location
# needs to fail with correct code
def test_h2_101_03(self, env):
+ if not (env.curl_is_at_least('8.2.0') or env.curl_is_less_than('8.1.0')):
+ pytest.skip("need curl != 8.1.x version")
url = env.mkurl("https", "ssl", "/renegotiate/verify/")
r = env.curl_get(url, options=["-vvv", "--tlsv1.2", "--tls-max", "1.2"])
assert 0 != r.exit_code
@@ -79,6 +83,8 @@ class TestSslRenegotiation:
# try to renegotiate a client certificate from Directory
# needs to fail with correct code
def test_h2_101_04(self, env):
+ if not (env.curl_is_at_least('8.2.0') or env.curl_is_less_than('8.1.0')):
+ pytest.skip("need curl != 8.1.x version")
url = env.mkurl("https", "ssl", "/ssl-client-verify/index.html")
r = env.curl_get(url, options=["-vvv", "--tlsv1.2", "--tls-max", "1.2"])
assert 0 != r.exit_code, f"{r}"
@@ -121,6 +127,8 @@ class TestSslRenegotiation:
# Check that status works with ErrorDoc, see pull #174, fixes #172
def test_h2_101_11(self, env):
+ if not (env.curl_is_at_least('8.2.0') or env.curl_is_less_than('8.1.0')):
+ pytest.skip("need curl != 8.1.x version")
url = env.mkurl("https", "ssl", "/renegotiate/err-doc-cipher")
r = env.curl_get(url, options=[
"-vvv", "--tlsv1.2", "--tls-max", "1.2", "--ciphers", "ECDHE-RSA-AES256-GCM-SHA384"
diff --git a/test/modules/http2/test_104_padding.py b/test/modules/http2/test_104_padding.py
index 7b874ed..401804a 100644
--- a/test/modules/http2/test_104_padding.py
+++ b/test/modules/http2/test_104_padding.py
@@ -13,57 +13,63 @@ class TestPadding:
@pytest.fixture(autouse=True, scope='class')
def _class_scope(self, env):
+ def add_echo_handler(conf):
+ conf.add([
+ "<Location \"/h2test/echo\">",
+ " SetHandler h2test-echo",
+ "</Location>",
+ ])
+
conf = H2Conf(env)
conf.start_vhost(domains=[f"ssl.{env.http_tld}"], port=env.https_port, doc_root="htdocs/cgi")
- conf.add("AddHandler cgi-script .py")
+ add_echo_handler(conf)
conf.end_vhost()
conf.start_vhost(domains=[f"pad0.{env.http_tld}"], port=env.https_port, doc_root="htdocs/cgi")
conf.add("H2Padding 0")
- conf.add("AddHandler cgi-script .py")
+ add_echo_handler(conf)
conf.end_vhost()
conf.start_vhost(domains=[f"pad1.{env.http_tld}"], port=env.https_port, doc_root="htdocs/cgi")
conf.add("H2Padding 1")
- conf.add("AddHandler cgi-script .py")
+ add_echo_handler(conf)
conf.end_vhost()
conf.start_vhost(domains=[f"pad2.{env.http_tld}"], port=env.https_port, doc_root="htdocs/cgi")
conf.add("H2Padding 2")
- conf.add("AddHandler cgi-script .py")
+ add_echo_handler(conf)
conf.end_vhost()
conf.start_vhost(domains=[f"pad3.{env.http_tld}"], port=env.https_port, doc_root="htdocs/cgi")
conf.add("H2Padding 3")
- conf.add("AddHandler cgi-script .py")
+ add_echo_handler(conf)
conf.end_vhost()
conf.start_vhost(domains=[f"pad8.{env.http_tld}"], port=env.https_port, doc_root="htdocs/cgi")
conf.add("H2Padding 8")
- conf.add("AddHandler cgi-script .py")
+ add_echo_handler(conf)
conf.end_vhost()
conf.install()
assert env.apache_restart() == 0
# default paddings settings: 0 bits
- def test_h2_104_01(self, env):
- url = env.mkurl("https", "ssl", "/echo.py")
+ def test_h2_104_01(self, env, repeat):
+ url = env.mkurl("https", "ssl", "/h2test/echo")
# we get 2 frames back: one with data and an empty one with EOF
# check the number of padding bytes is as expected
for data in ["x", "xx", "xxx", "xxxx", "xxxxx", "xxxxxx", "xxxxxxx", "xxxxxxxx"]:
r = env.nghttp().post_data(url, data, 5)
assert r.response["status"] == 200
- assert r.results["paddings"] == [
- frame_padding(len(data)+1, 0),
- frame_padding(0, 0)
- ]
+ for i in r.results["paddings"]:
+ assert i == frame_padding(len(data)+1, 0)
# 0 bits of padding
def test_h2_104_02(self, env):
- url = env.mkurl("https", "pad0", "/echo.py")
+ url = env.mkurl("https", "pad0", "/h2test/echo")
for data in ["x", "xx", "xxx", "xxxx", "xxxxx", "xxxxxx", "xxxxxxx", "xxxxxxxx"]:
r = env.nghttp().post_data(url, data, 5)
assert r.response["status"] == 200
- assert r.results["paddings"] == [0, 0]
+ for i in r.results["paddings"]:
+ assert i == 0
# 1 bit of padding
def test_h2_104_03(self, env):
- url = env.mkurl("https", "pad1", "/echo.py")
+ url = env.mkurl("https", "pad1", "/h2test/echo")
for data in ["x", "xx", "xxx", "xxxx", "xxxxx", "xxxxxx", "xxxxxxx", "xxxxxxxx"]:
r = env.nghttp().post_data(url, data, 5)
assert r.response["status"] == 200
@@ -72,7 +78,7 @@ class TestPadding:
# 2 bits of padding
def test_h2_104_04(self, env):
- url = env.mkurl("https", "pad2", "/echo.py")
+ url = env.mkurl("https", "pad2", "/h2test/echo")
for data in ["x", "xx", "xxx", "xxxx", "xxxxx", "xxxxxx", "xxxxxxx", "xxxxxxxx"]:
r = env.nghttp().post_data(url, data, 5)
assert r.response["status"] == 200
@@ -81,7 +87,7 @@ class TestPadding:
# 3 bits of padding
def test_h2_104_05(self, env):
- url = env.mkurl("https", "pad3", "/echo.py")
+ url = env.mkurl("https", "pad3", "/h2test/echo")
for data in ["x", "xx", "xxx", "xxxx", "xxxxx", "xxxxxx", "xxxxxxx", "xxxxxxxx"]:
r = env.nghttp().post_data(url, data, 5)
assert r.response["status"] == 200
@@ -90,7 +96,7 @@ class TestPadding:
# 8 bits of padding
def test_h2_104_06(self, env):
- url = env.mkurl("https", "pad8", "/echo.py")
+ url = env.mkurl("https", "pad8", "/h2test/echo")
for data in ["x", "xx", "xxx", "xxxx", "xxxxx", "xxxxxx", "xxxxxxx", "xxxxxxxx"]:
r = env.nghttp().post_data(url, data, 5)
assert r.response["status"] == 200
diff --git a/test/modules/http2/test_105_timeout.py b/test/modules/http2/test_105_timeout.py
index 13aa8ed..f7d3859 100644
--- a/test/modules/http2/test_105_timeout.py
+++ b/test/modules/http2/test_105_timeout.py
@@ -128,22 +128,25 @@ class TestTimeout:
def test_h2_105_12(self, env):
# long connection timeout, short stream timeout
# sending a slow POST
- if env.httpd_is_at_least("2.5.0"):
- conf = H2Conf(env)
- conf.add_vhost_cgi()
- conf.add("Timeout 10")
- conf.add("H2StreamTimeout 1")
- conf.install()
- assert env.apache_restart() == 0
- url = env.mkurl("https", "cgi", "/h2test/delay?5")
- piper = CurlPiper(env=env, url=url)
- piper.start()
- for _ in range(3):
- time.sleep(2)
- try:
- piper.send("0123456789\n")
- except BrokenPipeError:
- break
- piper.close()
- assert piper.response
- assert piper.response['status'] == 408, f"{piper.response}"
+ if not env.curl_is_at_least('8.0.0'):
+ pytest.skip(f'need at least curl v8.0.0 for this')
+ if not env.httpd_is_at_least("2.5.0"):
+ pytest.skip(f'need at least httpd 2.5.0 for this')
+ conf = H2Conf(env)
+ conf.add_vhost_cgi()
+ conf.add("Timeout 10")
+ conf.add("H2StreamTimeout 1")
+ conf.install()
+ assert env.apache_restart() == 0
+ url = env.mkurl("https", "cgi", "/h2test/delay?5")
+ piper = CurlPiper(env=env, url=url)
+ piper.start()
+ for _ in range(3):
+ time.sleep(2)
+ try:
+ piper.send("0123456789\n")
+ except BrokenPipeError:
+ break
+ piper.close()
+ assert piper.response, f'{piper}'
+ assert piper.response['status'] == 408, f"{piper.response}"
diff --git a/test/modules/http2/test_106_shutdown.py b/test/modules/http2/test_106_shutdown.py
index b119292..83e143c 100644
--- a/test/modules/http2/test_106_shutdown.py
+++ b/test/modules/http2/test_106_shutdown.py
@@ -63,7 +63,13 @@ class TestShutdown:
assert env.apache_restart() == 0
url = env.mkurl("https", "test1", "/index.html")
for i in range(7):
- r = env.curl_get(url, options=['-vvv'])
- assert r.exit_code == 0, f"failed on {i}. request: {r.stdout} {r.stderr}"
- assert r.response["status"] == 200
- assert "HTTP/2" == r.response["protocol"] \ No newline at end of file
+ r = env.curl_get(url, options=['-v'])
+ # requests should succeed, but rarely connections get closed
+ # before the response is received
+ if r.exit_code in [16, 55]:
+ # curl send error
+ assert r.response is None
+ else:
+ assert r.exit_code == 0, f"failed on {i}. request: {r.stdout} {r.stderr}"
+ assert r.response["status"] == 200
+ assert "HTTP/2" == r.response["protocol"] \ No newline at end of file
diff --git a/test/modules/http2/test_107_frame_lengths.py b/test/modules/http2/test_107_frame_lengths.py
new file mode 100644
index 0000000..d636093
--- /dev/null
+++ b/test/modules/http2/test_107_frame_lengths.py
@@ -0,0 +1,51 @@
+import os
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+def mk_text_file(fpath: str, lines: int):
+ t110 = ""
+ for _ in range(11):
+ t110 += "0123456789"
+ with open(fpath, "w") as fd:
+ for i in range(lines):
+ fd.write("{0:015d}: ".format(i)) # total 128 bytes per line
+ fd.write(t110)
+ fd.write("\n")
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestFrameLengths:
+
+ URI_PATHS = []
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ docs_a = os.path.join(env.server_docs_dir, "cgi/files")
+ for fsize in [10, 100]:
+ fname = f'0-{fsize}k.txt'
+ mk_text_file(os.path.join(docs_a, fname), 8 * fsize)
+ self.URI_PATHS.append(f"/files/{fname}")
+
+ @pytest.mark.parametrize("data_frame_len", [
+ 99, 1024, 8192
+ ])
+ def test_h2_107_01(self, env, data_frame_len):
+ conf = H2Conf(env, extras={
+ f'cgi.{env.http_tld}': [
+ f'H2MaxDataFrameLen {data_frame_len}',
+ ]
+ })
+ conf.add_vhost_cgi()
+ conf.install()
+ assert env.apache_restart() == 0
+ for p in self.URI_PATHS:
+ url = env.mkurl("https", "cgi", p)
+ r = env.nghttp().get(url, options=[
+ '--header=Accept-Encoding: none',
+ ])
+ assert r.response["status"] == 200
+ assert len(r.results["data_lengths"]) > 0, f'{r}'
+ too_large = [ x for x in r.results["data_lengths"] if x > data_frame_len]
+ assert len(too_large) == 0, f'{p}: {r.results["data_lengths"]}'
diff --git a/test/modules/http2/test_200_header_invalid.py b/test/modules/http2/test_200_header_invalid.py
index fdbfbe4..5b3aafd 100644
--- a/test/modules/http2/test_200_header_invalid.py
+++ b/test/modules/http2/test_200_header_invalid.py
@@ -12,24 +12,27 @@ class TestInvalidHeaders:
assert env.apache_restart() == 0
# let the hecho.py CGI echo chars < 0x20 in field name
- # for almost all such characters, the stream gets aborted with a h2 error and
- # there will be no http status, cr and lf are handled special
+ # for almost all such characters, the stream returns a 500
+ # or in httpd >= 2.5.0 gets aborted with a h2 error
+ # cr is handled special
def test_h2_200_01(self, env):
url = env.mkurl("https", "cgi", "/hecho.py")
for x in range(1, 32):
- r = env.curl_post_data(url, "name=x%%%02xx&value=yz" % x)
- if x in [10]:
- assert 0 == r.exit_code, "unexpected exit code for char 0x%02x" % x
- assert 500 == r.response["status"], "unexpected status for char 0x%02x" % x
- elif x in [13]:
- assert 0 == r.exit_code, "unexpected exit code for char 0x%02x" % x
- assert 200 == r.response["status"], "unexpected status for char 0x%02x" % x
+ data = f'name=x%{x:02x}x&value=yz'
+ r = env.curl_post_data(url, data)
+ if x in [13]:
+ assert 0 == r.exit_code, f'unexpected exit code for char 0x{x:02}'
+ assert 200 == r.response["status"], f'unexpected status for char 0x{x:02}'
+ elif x in [10] or env.httpd_is_at_least('2.5.0'):
+ assert 0 == r.exit_code, f'unexpected exit code for char 0x{x:02}'
+ assert 500 == r.response["status"], f'unexpected status for char 0x{x:02}'
else:
- assert 0 != r.exit_code, "unexpected exit code for char 0x%02x" % x
+ assert 0 != r.exit_code, f'unexpected exit code for char 0x{x:02}'
# let the hecho.py CGI echo chars < 0x20 in field value
- # for almost all such characters, the stream gets aborted with a h2 error and
- # there will be no http status, cr and lf are handled special
+ # for almost all such characters, the stream returns a 500
+ # or in httpd >= 2.5.0 gets aborted with a h2 error
+ # cr and lf are handled special
def test_h2_200_02(self, env):
url = env.mkurl("https", "cgi", "/hecho.py")
for x in range(1, 32):
@@ -38,6 +41,9 @@ class TestInvalidHeaders:
if x in [10, 13]:
assert 0 == r.exit_code, "unexpected exit code for char 0x%02x" % x
assert 200 == r.response["status"], "unexpected status for char 0x%02x" % x
+ elif env.httpd_is_at_least('2.5.0'):
+ assert 0 == r.exit_code, f'unexpected exit code for char 0x{x:02}'
+ assert 500 == r.response["status"], f'unexpected status for char 0x{x:02}'
else:
assert 0 != r.exit_code, "unexpected exit code for char 0x%02x" % x
@@ -46,40 +52,61 @@ class TestInvalidHeaders:
url = env.mkurl("https", "cgi", "/hecho.py")
for h in ["10", "7f"]:
r = env.curl_post_data(url, "name=x%%%s&value=yz" % h)
- assert 0 != r.exit_code
+ if env.httpd_is_at_least('2.5.0'):
+ assert 0 == r.exit_code, f"unexpected exit code for char 0x{h:02}"
+ assert 500 == r.response["status"], f"unexpected exit code for char 0x{h:02}"
+ else:
+ assert 0 != r.exit_code
r = env.curl_post_data(url, "name=x&value=y%%%sz" % h)
- assert 0 != r.exit_code
-
- # test header field lengths check, LimitRequestLine (default 8190)
+ if env.httpd_is_at_least('2.5.0'):
+ assert 0 == r.exit_code, f"unexpected exit code for char 0x{h:02}"
+ assert 500 == r.response["status"], f"unexpected exit code for char 0x{h:02}"
+ else:
+ assert 0 != r.exit_code
+
+ # test header field lengths check, LimitRequestLine
def test_h2_200_10(self, env):
- url = env.mkurl("https", "cgi", "/")
- val = "1234567890" # 10 chars
- for i in range(3): # make a 10000 char string
- val = "%s%s%s%s%s%s%s%s%s%s" % (val, val, val, val, val, val, val, val, val, val)
- # LimitRequestLine 8190 ok, one more char -> 431
- r = env.curl_get(url, options=["-H", "x: %s" % (val[:8187])])
- assert r.response["status"] == 200
- r = env.curl_get(url, options=["-H", "x: %sx" % (val[:8188])])
- assert 431 == r.response["status"]
- # same with field name
- r = env.curl_get(url, options=["-H", "y%s: 1" % (val[:8186])])
+ conf = H2Conf(env)
+ conf.add("""
+ LimitRequestLine 1024
+ """)
+ conf.add_vhost_cgi()
+ conf.install()
+ assert env.apache_restart() == 0
+ val = 200*"1234567890"
+ url = env.mkurl("https", "cgi", f'/?{val[:1022]}')
+ r = env.curl_get(url)
assert r.response["status"] == 200
- r = env.curl_get(url, options=["-H", "y%s: 1" % (val[:8188])])
- assert 431 == r.response["status"]
+ url = env.mkurl("https", "cgi", f'/?{val[:1023]}')
+ r = env.curl_get(url)
+ # URI too long
+ assert 414 == r.response["status"]
# test header field lengths check, LimitRequestFieldSize (default 8190)
def test_h2_200_11(self, env):
+ conf = H2Conf(env)
+ conf.add("""
+ LimitRequestFieldSize 1024
+ """)
+ conf.add_vhost_cgi()
+ conf.install()
+ assert env.apache_restart() == 0
url = env.mkurl("https", "cgi", "/")
- val = "1234567890" # 10 chars
- for i in range(3): # make a 10000 char string
- val = "%s%s%s%s%s%s%s%s%s%s" % (val, val, val, val, val, val, val, val, val, val)
- # LimitRequestFieldSize 8190 ok, one more char -> 400 in HTTP/1.1
- # (we send 4000+4185 since they are concatenated by ", " and start with "x: "
- r = env.curl_get(url, options=["-H", "x: %s" % (val[:4000]), "-H", "x: %s" % (val[:4185])])
- assert r.response["status"] == 200
- r = env.curl_get(url, options=["--http1.1", "-H", "x: %s" % (val[:4000]), "-H", "x: %s" % (val[:4189])])
+ val = 200*"1234567890"
+ # two fields, concatenated with ', '
+ # LimitRequestFieldSize, one more char -> 400 in HTTP/1.1
+ r = env.curl_get(url, options=[
+ '-H', f'x: {val[:500]}', '-H', f'x: {val[:519]}'
+ ])
+ assert r.exit_code == 0, f'{r}'
+ assert r.response["status"] == 200, f'{r}'
+ r = env.curl_get(url, options=[
+ '--http1.1', '-H', f'x: {val[:500]}', '-H', f'x: {val[:523]}'
+ ])
assert 400 == r.response["status"]
- r = env.curl_get(url, options=["-H", "x: %s" % (val[:4000]), "-H", "x: %s" % (val[:4191])])
+ r = env.curl_get(url, options=[
+ '-H', f'x: {val[:500]}', '-H', f'x: {val[:520]}'
+ ])
assert 431 == r.response["status"]
# test header field count, LimitRequestFields (default 100)
@@ -163,6 +190,8 @@ class TestInvalidHeaders:
# invalid chars in method
def test_h2_200_16(self, env):
+ if not env.h2load_is_at_least('1.45.0'):
+ pytest.skip(f'nhttp2 version too old')
conf = H2Conf(env)
conf.add_vhost_cgi()
conf.install()
@@ -171,12 +200,8 @@ class TestInvalidHeaders:
opt = ["-H:method: GET /hello.py"]
r = env.nghttp().get(url, options=opt)
assert r.exit_code == 0, r
- # nghttp version >= 1.45.0 check pseudo headers and RST streams,
- # which means we see no response.
- if r.response is not None:
- assert r.response["status"] == 400
+ assert r.response is None
url = env.mkurl("https", "cgi", "/proxy/hello.py")
r = env.nghttp().get(url, options=opt)
assert r.exit_code == 0, r
- if r.response is not None:
- assert r.response["status"] == 400
+ assert r.response is None
diff --git a/test/modules/http2/test_401_early_hints.py b/test/modules/http2/test_401_early_hints.py
index f73dcc4..5704305 100644
--- a/test/modules/http2/test_401_early_hints.py
+++ b/test/modules/http2/test_401_early_hints.py
@@ -9,6 +9,8 @@ class TestEarlyHints:
@pytest.fixture(autouse=True, scope='class')
def _class_scope(self, env):
+ if not env.httpd_is_at_least('2.4.58'):
+ pytest.skip(f'needs httpd 2.4.58')
H2Conf(env).start_vhost(domains=[f"hints.{env.http_tld}"],
port=env.https_port, doc_root="htdocs/test1"
).add("""
@@ -21,6 +23,13 @@ class TestEarlyHints:
<Location /006-nohints.html>
Header add Link "</006/006.css>;rel=preload"
</Location>
+ <Location /006-early.html>
+ H2EarlyHint Link "</006/006.css>;rel=preload;as=style"
+ </Location>
+ <Location /006-early-no-push.html>
+ H2Push off
+ H2EarlyHint Link "</006/006.css>;rel=preload;as=style"
+ </Location>
""").end_vhost(
).install()
assert env.apache_restart() == 0
@@ -45,3 +54,30 @@ class TestEarlyHints:
promises = r.results["streams"][r.response["id"]]["promises"]
assert 1 == len(promises)
assert "previous" not in r.response
+
+ # H2EarlyHints enabled in general, check that it works for H2EarlyHint
+ def test_h2_401_33(self, env, repeat):
+ url = env.mkurl("https", "hints", "/006-early.html")
+ r = env.nghttp().get(url)
+ assert r.response["status"] == 200
+ promises = r.results["streams"][r.response["id"]]["promises"]
+ assert 1 == len(promises)
+ early = r.response["previous"]
+ assert early
+ assert 103 == int(early["header"][":status"])
+ assert early["header"]["link"] == '</006/006.css>;rel=preload;as=style'
+
+ # H2EarlyHints enabled, no PUSH, check that it works for H2EarlyHint
+ def test_h2_401_34(self, env, repeat):
+ if not env.httpd_is_at_least('2.4.58'):
+ pytest.skip(f'needs httpd 2.4.58')
+ url = env.mkurl("https", "hints", "/006-early-no-push.html")
+ r = env.nghttp().get(url)
+ assert r.response["status"] == 200
+ promises = r.results["streams"][r.response["id"]]["promises"]
+ assert 0 == len(promises)
+ early = r.response["previous"]
+ assert early
+ assert 103 == int(early["header"][":status"])
+ assert early["header"]["link"] == '</006/006.css>;rel=preload;as=style'
+
diff --git a/test/modules/http2/test_500_proxy.py b/test/modules/http2/test_500_proxy.py
index 2e61415..88a8ece 100644
--- a/test/modules/http2/test_500_proxy.py
+++ b/test/modules/http2/test_500_proxy.py
@@ -49,11 +49,17 @@ class TestProxy:
src = file.read()
assert r2.response["body"] == src
- def test_h2_500_10(self, env, repeat):
- self.curl_upload_and_verify(env, "data-1k", ["--http2"])
- self.curl_upload_and_verify(env, "data-10k", ["--http2"])
- self.curl_upload_and_verify(env, "data-100k", ["--http2"])
- self.curl_upload_and_verify(env, "data-1m", ["--http2"])
+ @pytest.mark.parametrize("name", [
+ "data-1k", "data-10k", "data-100k", "data-1m",
+ ])
+ def test_h2_500_10(self, env, name, repeat):
+ self.curl_upload_and_verify(env, name, ["--http2"])
+
+ def test_h2_500_11(self, env):
+ self.curl_upload_and_verify(env, "data-1k", [
+ "--http1.1", "-H", "Content-Length:", "-H", "Transfer-Encoding: chunked"
+ ])
+ self.curl_upload_and_verify(env, "data-1k", ["--http2", "-H", "Content-Length:"])
# POST some data using nghttp and see it echo'ed properly back
def nghttp_post_and_verify(self, env, fname, options=None):
@@ -71,17 +77,17 @@ class TestProxy:
fd.write(r.stderr)
assert r.response["body"] == src
- def test_h2_500_20(self, env, repeat):
- self.nghttp_post_and_verify(env, "data-1k", [])
- self.nghttp_post_and_verify(env, "data-10k", [])
- self.nghttp_post_and_verify(env, "data-100k", [])
- self.nghttp_post_and_verify(env, "data-1m", [])
+ @pytest.mark.parametrize("name", [
+ "data-1k", "data-10k", "data-100k", "data-1m",
+ ])
+ def test_h2_500_20(self, env, name, repeat):
+ self.nghttp_post_and_verify(env, name, [])
- def test_h2_500_21(self, env, repeat):
- self.nghttp_post_and_verify(env, "data-1k", ["--no-content-length"])
- self.nghttp_post_and_verify(env, "data-10k", ["--no-content-length"])
- self.nghttp_post_and_verify(env, "data-100k", ["--no-content-length"])
- self.nghttp_post_and_verify(env, "data-1m", ["--no-content-length"])
+ @pytest.mark.parametrize("name", [
+ "data-1k", "data-10k", "data-100k", "data-1m",
+ ])
+ def test_h2_500_21(self, env, name, repeat):
+ self.nghttp_post_and_verify(env, name, ["--no-content-length"])
# upload and GET again using nghttp, compare to original content
def nghttp_upload_and_verify(self, env, fname, options=None):
@@ -101,17 +107,17 @@ class TestProxy:
src = file.read()
assert src == r2.response["body"]
- def test_h2_500_22(self, env):
- self.nghttp_upload_and_verify(env, "data-1k", [])
- self.nghttp_upload_and_verify(env, "data-10k", [])
- self.nghttp_upload_and_verify(env, "data-100k", [])
- self.nghttp_upload_and_verify(env, "data-1m", [])
+ @pytest.mark.parametrize("name", [
+ "data-1k", "data-10k", "data-100k", "data-1m",
+ ])
+ def test_h2_500_22(self, env, name):
+ self.nghttp_upload_and_verify(env, name, [])
- def test_h2_500_23(self, env):
- self.nghttp_upload_and_verify(env, "data-1k", ["--no-content-length"])
- self.nghttp_upload_and_verify(env, "data-10k", ["--no-content-length"])
- self.nghttp_upload_and_verify(env, "data-100k", ["--no-content-length"])
- self.nghttp_upload_and_verify(env, "data-1m", ["--no-content-length"])
+ @pytest.mark.parametrize("name", [
+ "data-1k", "data-10k", "data-100k", "data-1m",
+ ])
+ def test_h2_500_23(self, env, name):
+ self.nghttp_upload_and_verify(env, name, ["--no-content-length"])
# upload using nghttp and check returned status
def nghttp_upload_stat(self, env, fname, options=None):
@@ -124,7 +130,7 @@ class TestProxy:
assert r.response["header"]["location"]
def test_h2_500_24(self, env):
- for i in range(100):
+ for i in range(50):
self.nghttp_upload_stat(env, "data-1k", ["--no-content-length"])
# lets do some error tests
diff --git a/test/modules/http2/test_503_proxy_fwd.py b/test/modules/http2/test_503_proxy_fwd.py
new file mode 100644
index 0000000..478a52d
--- /dev/null
+++ b/test/modules/http2/test_503_proxy_fwd.py
@@ -0,0 +1,79 @@
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestProxyFwd:
+
+ @classmethod
+ def config_fwd_proxy(cls, env, h2_enabled=False):
+ conf = H2Conf(env, extras={
+ 'base': [
+ f'Listen {env.proxy_port}',
+ 'Protocols h2c http/1.1',
+ 'LogLevel proxy_http2:trace2 proxy:trace2',
+ ],
+ })
+ conf.add_vhost_cgi(proxy_self=False, h2proxy_self=False)
+ conf.start_vhost(domains=[f"test1.{env.http_tld}"],
+ port=env.proxy_port, with_ssl=True)
+ conf.add([
+ 'Protocols h2c http/1.1',
+ 'ProxyRequests on',
+ f'H2ProxyRequests {"on" if h2_enabled else "off"}',
+ ])
+ conf.end_vhost()
+ conf.install()
+ assert env.apache_restart() == 0
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(cls, env):
+ cls.config_fwd_proxy(env)
+
+ # test the HTTP/1.1 setup working
+ def test_h2_503_01_proxy_fwd_h1(self, env):
+ url = f'http://localhost:{env.http_port}/hello.py'
+ proxy_host = f'test1.{env.http_tld}'
+ options = [
+ '--proxy', f'https://{proxy_host}:{env.proxy_port}',
+ '--resolve', f'{proxy_host}:{env.proxy_port}:127.0.0.1',
+ '--proxy-cacert', f'{env.get_ca_pem_file(proxy_host)}',
+ ]
+ r = env.curl_get(url, 5, options=options)
+ assert r.exit_code == 0, f'{r}'
+ assert r.response['status'] == 200
+ assert r.json['port'] == f'{env.http_port}'
+
+ def test_h2_503_02_fwd_proxy_h2_off(self, env):
+ if not env.curl_is_at_least('8.1.0'):
+ pytest.skip(f'need at least curl v8.1.0 for this')
+ url = f'http://localhost:{env.http_port}/hello.py'
+ proxy_host = f'test1.{env.http_tld}'
+ options = [
+ '--proxy-http2', '-v',
+ '--proxy', f'https://{proxy_host}:{env.proxy_port}',
+ '--resolve', f'{proxy_host}:{env.proxy_port}:127.0.0.1',
+ '--proxy-cacert', f'{env.get_ca_pem_file(proxy_host)}',
+ ]
+ r = env.curl_get(url, 5, options=options)
+ assert r.exit_code == 0, f'{r}'
+ assert r.response['status'] == 404
+
+ # test the HTTP/2 setup working
+ def test_h2_503_03_proxy_fwd_h2_on(self, env):
+ if not env.curl_is_at_least('8.1.0'):
+ pytest.skip(f'need at least curl v8.1.0 for this')
+ self.config_fwd_proxy(env, h2_enabled=True)
+ url = f'http://localhost:{env.http_port}/hello.py'
+ proxy_host = f'test1.{env.http_tld}'
+ options = [
+ '--proxy-http2', '-v',
+ '--proxy', f'https://{proxy_host}:{env.proxy_port}',
+ '--resolve', f'{proxy_host}:{env.proxy_port}:127.0.0.1',
+ '--proxy-cacert', f'{env.get_ca_pem_file(proxy_host)}',
+ ]
+ r = env.curl_get(url, 5, options=options)
+ assert r.exit_code == 0, f'{r}'
+ assert r.response['status'] == 200
+ assert r.json['port'] == f'{env.http_port}'
diff --git a/test/modules/http2/test_600_h2proxy.py b/test/modules/http2/test_600_h2proxy.py
index e93ba1a..18d5d1d 100644
--- a/test/modules/http2/test_600_h2proxy.py
+++ b/test/modules/http2/test_600_h2proxy.py
@@ -78,7 +78,8 @@ class TestH2Proxy:
conf.install()
assert env.apache_restart() == 0
url = env.mkurl("https", "cgi", f"/h2proxy/{env.http_port}/hello.py")
- if enable_reuse == "on":
+ # httpd 2.4.59 disables reuse, not matter the config
+ if enable_reuse == "on" and not env.httpd_is_at_least("2.4.59"):
# reuse is not guaranteed for each request, but we expect some
# to do it and run on a h2 stream id > 1
reused = False
@@ -130,9 +131,33 @@ class TestH2Proxy:
assert r.response["previous"]["status"] == 200
assert int(r.json[0]["port"]) == env.http_port
assert r.response["status"] == 200
- exp_port = env.http_port if enable_reuse == "on" else env.http_port2
+ exp_port = env.http_port if enable_reuse == "on" \
+ and not env.httpd_is_at_least("2.4.59")\
+ else env.http_port2
assert int(r.json[1]["port"]) == exp_port
+ # test X-Forwarded-* headers
+ def test_h2_600_06(self, env):
+ conf = H2Conf(env, extras={
+ f'cgi.{env.http_tld}': [
+ "SetEnvIf Host (.+) X_HOST=$1",
+ f"ProxyPreserveHost on",
+ f"ProxyPass /h2c/ h2c://127.0.0.1:{env.http_port}/",
+ f"ProxyPass /h1c/ http://127.0.0.1:{env.http_port}/",
+ ]
+ })
+ conf.add_vhost_cgi(proxy_self=True)
+ conf.install()
+ assert env.apache_restart() == 0
+ url = env.mkurl("https", "cgi", "/h1c/hello.py")
+ r1 = env.curl_get(url, 5)
+ assert r1.response["status"] == 200
+ url = env.mkurl("https", "cgi", "/h2c/hello.py")
+ r2 = env.curl_get(url, 5)
+ assert r2.response["status"] == 200
+ for key in ['x-forwarded-for', 'x-forwarded-host','x-forwarded-server']:
+ assert r1.json[key] == r2.json[key], f'{key} differs proxy_http != proxy_http2'
+
# lets do some error tests
def test_h2_600_30(self, env):
conf = H2Conf(env)
@@ -159,10 +184,11 @@ class TestH2Proxy:
# depending on when the error is detect in proxying, if may RST the
# stream (exit_code != 0) or give a 503 response.
if r.exit_code == 0:
- assert r.response['status'] == 503
+ assert r.response['status'] == 502
# produce an error, fail to generate an error bucket
def test_h2_600_32(self, env, repeat):
+ pytest.skip('only works reliable with r1911964 from trunk')
conf = H2Conf(env)
conf.add_vhost_cgi(h2proxy_self=True)
conf.install()
@@ -172,4 +198,4 @@ class TestH2Proxy:
# depending on when the error is detect in proxying, if may RST the
# stream (exit_code != 0) or give a 503 response.
if r.exit_code == 0:
- assert r.response['status'] == 503
+ assert r.response['status'] in [502, 503]
diff --git a/test/modules/http2/test_601_h2proxy_twisted.py b/test/modules/http2/test_601_h2proxy_twisted.py
new file mode 100644
index 0000000..60f5f7d
--- /dev/null
+++ b/test/modules/http2/test_601_h2proxy_twisted.py
@@ -0,0 +1,99 @@
+import json
+import logging
+import os
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+log = logging.getLogger(__name__)
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestH2ProxyTwisted:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ H2Conf(env).add_vhost_cgi(proxy_self=True, h2proxy_self=True).install()
+ assert env.apache_restart() == 0
+
+ @pytest.mark.parametrize("name", [
+ "data-1k", "data-10k", "data-100k", "data-1m",
+ ])
+ def test_h2_601_01_echo_uploads(self, env, name):
+ fpath = os.path.join(env.gen_dir, name)
+ url = env.mkurl("https", "cgi", "/h2proxy/h2test/echo")
+ r = env.curl_upload(url, fpath, options=[])
+ assert r.exit_code == 0
+ assert 200 <= r.response["status"] < 300
+ # we POST a form, so echoed input is larger than the file itself
+ assert len(r.response["body"]) > os.path.getsize(fpath)
+
+ @pytest.mark.parametrize("name", [
+ "data-1k", "data-10k", "data-100k", "data-1m",
+ ])
+ def test_h2_601_02_echo_delayed(self, env, name):
+ fpath = os.path.join(env.gen_dir, name)
+ url = env.mkurl("https", "cgi", "/h2proxy/h2test/echo?chunk_delay=10ms")
+ r = env.curl_upload(url, fpath, options=[])
+ assert r.exit_code == 0
+ assert 200 <= r.response["status"] < 300
+ # we POST a form, so echoed input is larger than the file itself
+ assert len(r.response["body"]) > os.path.getsize(fpath)
+
+ @pytest.mark.parametrize("name", [
+ "data-1k", "data-10k", "data-100k", "data-1m",
+ ])
+ def test_h2_601_03_echo_fail_early(self, env, name):
+ if not env.httpd_is_at_least('2.4.58'):
+ pytest.skip(f'needs httpd 2.4.58')
+ fpath = os.path.join(env.gen_dir, name)
+ url = env.mkurl("https", "cgi", "/h2proxy/h2test/echo?fail_after=512")
+ r = env.curl_upload(url, fpath, options=[])
+ # 92 is curl's CURLE_HTTP2_STREAM
+ assert r.exit_code == 92 or r.response["status"] == 502
+
+ @pytest.mark.parametrize("name", [
+ "data-1k", "data-10k", "data-100k", "data-1m",
+ ])
+ def test_h2_601_04_echo_fail_late(self, env, name):
+ if not env.httpd_is_at_least('2.4.58'):
+ pytest.skip(f'needs httpd 2.4.58')
+ fpath = os.path.join(env.gen_dir, name)
+ url = env.mkurl("https", "cgi", f"/h2proxy/h2test/echo?fail_after={os.path.getsize(fpath)}")
+ r = env.curl_upload(url, fpath, options=[])
+ # 92 is curl's CURLE_HTTP2_STREAM
+ if r.exit_code != 0:
+ # H2 stream or partial file error
+ assert r.exit_code == 92 or r.exit_code == 18, f'{r}'
+ else:
+ assert r.response["status"] == 502, f'{r}'
+
+ def test_h2_601_05_echo_fail_many(self, env):
+ if not env.httpd_is_at_least('2.4.58'):
+ pytest.skip(f'needs httpd 2.4.58')
+ if not env.curl_is_at_least('8.0.0'):
+ pytest.skip(f'need at least curl v8.0.0 for this')
+ count = 200
+ fpath = os.path.join(env.gen_dir, "data-100k")
+ args = [env.curl, '--parallel', '--parallel-max', '20']
+ for i in range(count):
+ if i > 0:
+ args.append('--next')
+ url = env.mkurl("https", "cgi", f"/h2proxy/h2test/echo?id={i}&fail_after={os.path.getsize(fpath)}")
+ args.extend(env.curl_resolve_args(url=url))
+ args.extend([
+ '-o', '/dev/null', '-w', '%{json}\\n', '--form', f'file=@{fpath}', url
+ ])
+ log.error(f'run: {args}')
+ r = env.run(args)
+ stats = []
+ for line in r.stdout.splitlines():
+ stats.append(json.loads(line))
+ assert len(stats) == count
+ for st in stats:
+ if st['exitcode'] != 0:
+ # H2 stream or partial file error
+ assert st['exitcode'] == 92 or st['exitcode'] == 18, f'{r}'
+ else:
+ assert st['http_code'] == 502, f'{r}'
diff --git a/test/modules/http2/test_700_load_get.py b/test/modules/http2/test_700_load_get.py
index 9ee8898..78760fb 100644
--- a/test/modules/http2/test_700_load_get.py
+++ b/test/modules/http2/test_700_load_get.py
@@ -16,14 +16,14 @@ class TestLoadGet:
def check_h2load_ok(self, env, r, n):
assert 0 == r.exit_code
r = env.h2load_status(r)
- assert n == r.results["h2load"]["requests"]["total"]
- assert n == r.results["h2load"]["requests"]["started"]
- assert n == r.results["h2load"]["requests"]["done"]
- assert n == r.results["h2load"]["requests"]["succeeded"]
- assert n == r.results["h2load"]["status"]["2xx"]
- assert 0 == r.results["h2load"]["status"]["3xx"]
- assert 0 == r.results["h2load"]["status"]["4xx"]
- assert 0 == r.results["h2load"]["status"]["5xx"]
+ assert n == r.results["h2load"]["requests"]["total"], f'{r.results}'
+ assert n == r.results["h2load"]["requests"]["started"], f'{r.results}'
+ assert n == r.results["h2load"]["requests"]["done"], f'{r.results}'
+ assert n == r.results["h2load"]["requests"]["succeeded"], f'{r.results}'
+ assert n == r.results["h2load"]["status"]["2xx"], f'{r.results}'
+ assert 0 == r.results["h2load"]["status"]["3xx"], f'{r.results}'
+ assert 0 == r.results["h2load"]["status"]["4xx"], f'{r.results}'
+ assert 0 == r.results["h2load"]["status"]["5xx"], f'{r.results}'
# test load on cgi script, single connection, different sizes
@pytest.mark.parametrize("start", [
@@ -45,7 +45,7 @@ class TestLoadGet:
# test load on cgi script, single connection
@pytest.mark.parametrize("conns", [
- 1, 2, 16, 32
+ 1, 2, 16
])
def test_h2_700_11(self, env, conns):
assert env.is_live()
diff --git a/test/modules/http2/test_712_buffering.py b/test/modules/http2/test_712_buffering.py
index 9eb2689..6658441 100644
--- a/test/modules/http2/test_712_buffering.py
+++ b/test/modules/http2/test_712_buffering.py
@@ -43,16 +43,6 @@ class TestBuffering:
url = env.mkurl("https", "cgi", "/h2proxy/h2test/echo")
base_chunk = "0123456789"
chunks = ["chunk-{0:03d}-{1}\n".format(i, base_chunk) for i in range(3)]
- stutter = timedelta(seconds=0.4) # need a bit more delay since we have the extra connection
- piper = CurlPiper(env=env, url=url)
- piper.stutter_check(chunks, stutter)
-
- def test_h2_712_03(self, env):
- # same as 712_02 but with smaller chunks
- #
- url = env.mkurl("https", "cgi", "/h2proxy/h2test/echo")
- base_chunk = "0"
- chunks = ["ck{0}-{1}\n".format(i, base_chunk) for i in range(3)]
- stutter = timedelta(seconds=0.4) # need a bit more delay since we have the extra connection
+ stutter = timedelta(seconds=1) # need a bit more delay since we have the extra connection
piper = CurlPiper(env=env, url=url)
piper.stutter_check(chunks, stutter)
diff --git a/test/modules/http2/test_800_websockets.py b/test/modules/http2/test_800_websockets.py
new file mode 100644
index 0000000..5b46da8
--- /dev/null
+++ b/test/modules/http2/test_800_websockets.py
@@ -0,0 +1,363 @@
+import inspect
+import logging
+import os
+import shutil
+import subprocess
+import time
+from datetime import timedelta, datetime
+from typing import Tuple, List
+import packaging.version
+
+import pytest
+import websockets
+from pyhttpd.result import ExecResult
+from pyhttpd.ws_util import WsFrameReader, WsFrame
+
+from .env import H2Conf, H2TestEnv
+
+
+log = logging.getLogger(__name__)
+
+ws_version = packaging.version.parse(websockets.version.version)
+ws_version_min = packaging.version.Version('10.4')
+
+
+def ws_run(env: H2TestEnv, path, authority=None, do_input=None, inbytes=None,
+ send_close=True, timeout=5, scenario='ws-stdin',
+ wait_close: float = 0.0) -> Tuple[ExecResult, List[str], List[WsFrame]]:
+ """ Run the h2ws test client in various scenarios with given input and
+ timings.
+ :param env: the test environment
+ :param path: the path on the Apache server to CONNECt to
+ :param authority: the host:port to use as
+ :param do_input: a Callable for sending input to h2ws
+ :param inbytes: fixed bytes to send to h2ws, unless do_input is given
+ :param send_close: send a CLOSE WebSockets frame at the end
+ :param timeout: timeout for waiting on h2ws to finish
+ :param scenario: name of scenario h2ws should run in
+ :param wait_close: time to wait before closing input
+ :return: ExecResult with exit_code/stdout/stderr of run
+ """
+ h2ws = os.path.join(env.clients_dir, 'h2ws')
+ if not os.path.exists(h2ws):
+ pytest.fail(f'test client not build: {h2ws}')
+ if authority is None:
+ authority = f'cgi.{env.http_tld}:{env.http_port}'
+ args = [
+ h2ws, '-vv', '-c', f'localhost:{env.http_port}',
+ f'ws://{authority}{path}',
+ scenario
+ ]
+ # we write all output to files, because we manipulate input timings
+ # and would run in deadlock situations with h2ws blocking operations
+ # because its output is not consumed
+ start = datetime.now()
+ with open(f'{env.gen_dir}/h2ws.stdout', 'w') as fdout:
+ with open(f'{env.gen_dir}/h2ws.stderr', 'w') as fderr:
+ proc = subprocess.Popen(args=args, stdin=subprocess.PIPE,
+ stdout=fdout, stderr=fderr)
+ if do_input is not None:
+ do_input(proc)
+ elif inbytes is not None:
+ proc.stdin.write(inbytes)
+ proc.stdin.flush()
+
+ if wait_close > 0:
+ time.sleep(wait_close)
+ try:
+ inbytes = WsFrame.client_close(code=1000).to_network() if send_close else None
+ proc.communicate(input=inbytes, timeout=timeout)
+ except subprocess.TimeoutExpired:
+ log.error(f'ws_run: timeout expired')
+ proc.kill()
+ proc.communicate(timeout=timeout)
+ end = datetime.now()
+ lines = open(f'{env.gen_dir}/h2ws.stdout').read().splitlines()
+ infos = [line for line in lines if line.startswith('[1] ')]
+ hex_content = ' '.join([line for line in lines if not line.startswith('[1] ')])
+ if len(infos) > 0 and infos[0] == '[1] :status: 200':
+ frames = WsFrameReader.parse(bytearray.fromhex(hex_content))
+ else:
+ frames = bytearray.fromhex(hex_content)
+ return ExecResult(args=args, exit_code=proc.returncode,
+ stdout=b'', stderr=b'', duration=end - start), infos, frames
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+@pytest.mark.skipif(condition=not H2TestEnv().httpd_is_at_least("2.4.58"),
+ reason=f'need at least httpd 2.4.58 for this')
+@pytest.mark.skipif(condition=ws_version < ws_version_min,
+ reason=f'websockets is {ws_version}, need at least {ws_version_min}')
+class TestWebSockets:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env):
+ # Apache config that CONNECT proxies a WebSocket server for paths starting
+ # with '/ws/'
+ # The WebSocket server is started in pytest fixture 'ws_server' below.
+ conf = H2Conf(env, extras={
+ 'base': [
+ 'Timeout 1',
+ ],
+ f'cgi.{env.http_tld}': [
+ f' H2WebSockets on',
+ f' ProxyPass /ws/ http://127.0.0.1:{env.ws_port}/ \\',
+ f' upgrade=websocket timeout=10',
+ f' ReadBufferSize 65535'
+ ]
+ })
+ conf.add_vhost_cgi(proxy_self=True, h2proxy_self=True).install()
+ conf.add_vhost_test1(proxy_self=True, h2proxy_self=True).install()
+ assert env.apache_restart() == 0
+
+ def ws_check_alive(self, env, timeout=5):
+ url = f'http://localhost:{env.ws_port}/'
+ end = datetime.now() + timedelta(seconds=timeout)
+ while datetime.now() < end:
+ r = env.curl_get(url, 5)
+ if r.exit_code == 0:
+ return True
+ time.sleep(.1)
+ return False
+
+ def _mkpath(self, path):
+ if not os.path.exists(path):
+ return os.makedirs(path)
+
+ def _rmrf(self, path):
+ if os.path.exists(path):
+ return shutil.rmtree(path)
+
+ @pytest.fixture(autouse=True, scope='class')
+ def ws_server(self, env):
+ # Run our python websockets server that has some special behaviour
+ # for the different path to CONNECT to.
+ run_dir = os.path.join(env.gen_dir, 'ws-server')
+ err_file = os.path.join(run_dir, 'stderr')
+ self._rmrf(run_dir)
+ self._mkpath(run_dir)
+ with open(err_file, 'w') as cerr:
+ cmd = os.path.join(os.path.dirname(inspect.getfile(TestWebSockets)),
+ 'ws_server.py')
+ args = ['python3', cmd, '--port', str(env.ws_port)]
+ p = subprocess.Popen(args=args, cwd=run_dir, stderr=cerr,
+ stdout=cerr)
+ if not self.ws_check_alive(env):
+ p.kill()
+ p.wait()
+ pytest.fail(f'ws_server did not start. stderr={open(err_file).readlines()}')
+ yield
+ p.terminate()
+
+ # CONNECT with invalid :protocol header, must fail
+ def test_h2_800_01_fail_proto(self, env: H2TestEnv, ws_server):
+ r, infos, frames = ws_run(env, path='/ws/echo/', scenario='fail-proto')
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] :status: 501', '[1] EOF'], f'{r}'
+ env.httpd_error_log.ignore_recent()
+
+ # a correct CONNECT, send CLOSE, expect CLOSE, basic success
+ def test_h2_800_02_ws_empty(self, env: H2TestEnv, ws_server):
+ r, infos, frames = ws_run(env, path='/ws/echo/')
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] :status: 200', '[1] EOF'], f'{r}'
+ assert len(frames) == 1, f'{frames}'
+ assert frames[0].opcode == WsFrame.CLOSE, f'{frames}'
+
+ # CONNECT to a URL path that does not exist on the server
+ def test_h2_800_03_not_found(self, env: H2TestEnv, ws_server):
+ r, infos, frames = ws_run(env, path='/does-not-exist')
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] :status: 404', '[1] EOF'], f'{r}'
+
+ # CONNECT to a URL path that is a normal HTTP file resource
+ # we do not want to receive the body of that
+ def test_h2_800_04_non_ws_resource(self, env: H2TestEnv, ws_server):
+ r, infos, frames = ws_run(env, path='/alive.json')
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] :status: 502', '[1] EOF'], f'{r}'
+ assert frames == b''
+
+ # CONNECT to a URL path that sends a delayed HTTP response body
+ # we do not want to receive the body of that
+ def test_h2_800_05_non_ws_delay_resource(self, env: H2TestEnv, ws_server):
+ r, infos, frames = ws_run(env, path='/h2test/error?body_delay=100ms')
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] :status: 502', '[1] EOF'], f'{r}'
+ assert frames == b''
+
+ # CONNECT missing the sec-webSocket-version header
+ def test_h2_800_06_miss_version(self, env: H2TestEnv, ws_server):
+ r, infos, frames = ws_run(env, path='/ws/echo/', scenario='miss-version')
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] :status: 400', '[1] EOF'], f'{r}'
+
+ # CONNECT missing the :path header
+ def test_h2_800_07_miss_path(self, env: H2TestEnv, ws_server):
+ r, infos, frames = ws_run(env, path='/ws/echo/', scenario='miss-path')
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] RST'], f'{r}'
+
+ # CONNECT missing the :scheme header
+ def test_h2_800_08_miss_scheme(self, env: H2TestEnv, ws_server):
+ r, infos, frames = ws_run(env, path='/ws/echo/', scenario='miss-scheme')
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] RST'], f'{r}'
+
+ # CONNECT missing the :authority header
+ def test_h2_800_09a_miss_authority(self, env: H2TestEnv, ws_server):
+ r, infos, frames = ws_run(env, path='/ws/echo/', scenario='miss-authority')
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] RST'], f'{r}'
+
+ # CONNECT to authority with disabled websockets
+ def test_h2_800_09b_unsupported(self, env: H2TestEnv, ws_server):
+ r, infos, frames = ws_run(env, path='/ws/echo/',
+ authority=f'test1.{env.http_tld}:{env.http_port}')
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] :status: 501', '[1] EOF'], f'{r}'
+
+ # CONNECT and exchange a PING
+ def test_h2_800_10_ws_ping(self, env: H2TestEnv, ws_server):
+ ping = WsFrame.client_ping(b'12345')
+ r, infos, frames = ws_run(env, path='/ws/echo/', inbytes=ping.to_network())
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] :status: 200', '[1] EOF'], f'{r}'
+ assert len(frames) == 2, f'{frames}'
+ assert frames[0].opcode == WsFrame.PONG, f'{frames}'
+ assert frames[0].data == ping.data, f'{frames}'
+ assert frames[1].opcode == WsFrame.CLOSE, f'{frames}'
+
+ # CONNECT and send several PINGs with a delay of 200ms
+ def test_h2_800_11_ws_timed_pings(self, env: H2TestEnv, ws_server):
+ frame_count = 5
+ ping = WsFrame.client_ping(b'12345')
+
+ def do_send(proc):
+ for _ in range(frame_count):
+ try:
+ proc.stdin.write(ping.to_network())
+ proc.stdin.flush()
+ proc.wait(timeout=0.2)
+ except subprocess.TimeoutExpired:
+ pass
+
+ r, infos, frames = ws_run(env, path='/ws/echo/', do_input=do_send)
+ assert r.exit_code == 0
+ assert infos == ['[1] :status: 200', '[1] EOF'], f'{r}'
+ assert len(frames) == frame_count + 1, f'{frames}'
+ assert frames[-1].opcode == WsFrame.CLOSE, f'{frames}'
+ for i in range(frame_count):
+ assert frames[i].opcode == WsFrame.PONG, f'{frames}'
+ assert frames[i].data == ping.data, f'{frames}'
+
+ # CONNECT to path that closes immediately
+ def test_h2_800_12_ws_unknown(self, env: H2TestEnv, ws_server):
+ r, infos, frames = ws_run(env, path='/ws/unknown')
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] :status: 200', '[1] EOF'], f'{r}'
+ assert len(frames) == 1, f'{frames}'
+ # expect a CLOSE with code=4999, reason='path unknown'
+ assert frames[0].opcode == WsFrame.CLOSE, f'{frames}'
+ assert frames[0].data[2:].decode() == 'path unknown', f'{frames}'
+
+ # CONNECT to a path that sends us 1 TEXT frame
+ def test_h2_800_13_ws_text(self, env: H2TestEnv, ws_server):
+ r, infos, frames = ws_run(env, path='/ws/text/')
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] :status: 200', '[1] EOF'], f'{r}'
+ assert len(frames) == 2, f'{frames}'
+ assert frames[0].opcode == WsFrame.TEXT, f'{frames}'
+ assert frames[0].data.decode() == 'hello!', f'{frames}'
+ assert frames[1].opcode == WsFrame.CLOSE, f'{frames}'
+
+ # CONNECT to a path that sends us a named file in BINARY frames
+ @pytest.mark.parametrize("fname,flen", [
+ ("data-1k", 1000),
+ ("data-10k", 10000),
+ ("data-100k", 100*1000),
+ ("data-1m", 1000*1000),
+ ])
+ def test_h2_800_14_ws_file(self, env: H2TestEnv, ws_server, fname, flen):
+ r, infos, frames = ws_run(env, path=f'/ws/file/{fname}', wait_close=0.5)
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] :status: 200', '[1] EOF'], f'{r}'
+ assert len(frames) > 0
+ total_len = sum([f.data_len for f in frames if f.opcode == WsFrame.BINARY])
+ assert total_len == flen, f'{frames}'
+
+ # CONNECT to path with 1MB file and trigger varying BINARY frame lengths
+ @pytest.mark.parametrize("frame_len", [
+ 1000 * 1024,
+ 100 * 1024,
+ 10 * 1024,
+ 1 * 1024,
+ 512,
+ ])
+ def test_h2_800_15_ws_frame_len(self, env: H2TestEnv, ws_server, frame_len):
+ fname = "data-1m"
+ flen = 1000*1000
+ r, infos, frames = ws_run(env, path=f'/ws/file/{fname}/{frame_len}', wait_close=0.5)
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] :status: 200', '[1] EOF'], f'{r}'
+ assert len(frames) > 0
+ total_len = sum([f.data_len for f in frames if f.opcode == WsFrame.BINARY])
+ assert total_len == flen, f'{frames}'
+
+ # CONNECT to path with 1MB file and trigger delays between BINARY frame writes
+ @pytest.mark.parametrize("frame_delay", [
+ 1,
+ 10,
+ 50,
+ 100,
+ ])
+ def test_h2_800_16_ws_frame_delay(self, env: H2TestEnv, ws_server, frame_delay):
+ fname = "data-1m"
+ flen = 1000*1000
+ # adjust frame_len to allow for 1 second overall duration
+ frame_len = int(flen / (1000 / frame_delay))
+ r, infos, frames = ws_run(env, path=f'/ws/file/{fname}/{frame_len}/{frame_delay}',
+ wait_close=1.5)
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] :status: 200', '[1] EOF'], f'{r}'
+ assert len(frames) > 0
+ total_len = sum([f.data_len for f in frames if f.opcode == WsFrame.BINARY])
+ assert total_len == flen, f'{frames}\n{r}'
+
+ # CONNECT to path with 1MB file and trigger delays between BINARY frame writes
+ @pytest.mark.parametrize("frame_len", [
+ 64 * 1024,
+ 16 * 1024,
+ 1 * 1024,
+ ])
+ def test_h2_800_17_ws_throughput(self, env: H2TestEnv, ws_server, frame_len):
+ fname = "data-1m"
+ flen = 1000*1000
+ ncount = 5
+ r, infos, frames = ws_run(env, path=f'/ws/file/{fname}/{frame_len}/0/{ncount}',
+ wait_close=0.1, send_close=False, timeout=30)
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] :status: 200', '[1] EOF'], f'{r}'
+ assert len(frames) > 0
+ total_len = sum([f.data_len for f in frames if f.opcode == WsFrame.BINARY])
+ assert total_len == ncount * flen, f'{frames}\n{r}'
+ # to see these logged, invoke: `pytest -o log_cli=true`
+ log.info(f'throughput (frame-len={frame_len}): "'
+ f'"{(total_len / (1024*1024)) / r.duration.total_seconds():0.2f} MB/s')
+
+ # Check that the tunnel timeout is observed, e.g. the longer holds and
+ # the 1sec cleint conn timeout does not trigger
+ def test_h2_800_18_timeout(self, env: H2TestEnv, ws_server):
+ fname = "data-10k"
+ frame_delay = 1500
+ flen = 10*1000
+ frame_len = 8192
+ # adjust frame_len to allow for 1 second overall duration
+ r, infos, frames = ws_run(env, path=f'/ws/file/{fname}/{frame_len}/{frame_delay}',
+ wait_close=2)
+ assert r.exit_code == 0, f'{r}'
+ assert infos == ['[1] :status: 200', '[1] EOF'], f'{r}'
+ assert len(frames) > 0
+ total_len = sum([f.data_len for f in frames if f.opcode == WsFrame.BINARY])
+ assert total_len == flen, f'{frames}\n{r}'
+
diff --git a/test/modules/http2/ws_server.py b/test/modules/http2/ws_server.py
new file mode 100644
index 0000000..99fb9cf
--- /dev/null
+++ b/test/modules/http2/ws_server.py
@@ -0,0 +1,104 @@
+#!/usr/bin/env python3
+import argparse
+import asyncio
+import logging
+import os
+import sys
+import time
+
+import websockets.server as ws_server
+from websockets.exceptions import ConnectionClosedError
+
+log = logging.getLogger(__name__)
+
+logging.basicConfig(
+ format="[%(asctime)s] %(message)s",
+ level=logging.DEBUG,
+)
+
+
+async def echo(websocket):
+ try:
+ async for message in websocket:
+ try:
+ log.info(f'got request {message}')
+ except Exception as e:
+ log.error(f'error {e} getting path from {message}')
+ await websocket.send(message)
+ except ConnectionClosedError:
+ pass
+
+
+async def on_async_conn(conn):
+ rpath = str(conn.path)
+ pcomps = rpath[1:].split('/')
+ if len(pcomps) == 0:
+ pcomps = ['echo'] # default handler
+ log.info(f'connection for {pcomps}')
+ if pcomps[0] == 'echo':
+ log.info(f'/echo endpoint')
+ for message in await conn.recv():
+ await conn.send(message)
+ elif pcomps[0] == 'text':
+ await conn.send('hello!')
+ elif pcomps[0] == 'file':
+ if len(pcomps) < 2:
+ conn.close(code=4999, reason='unknown file')
+ return
+ fpath = os.path.join('../', pcomps[1])
+ if not os.path.exists(fpath):
+ conn.close(code=4999, reason='file not found')
+ return
+ bufsize = 0
+ if len(pcomps) > 2:
+ bufsize = int(pcomps[2])
+ if bufsize <= 0:
+ bufsize = 16*1024
+ delay_ms = 0
+ if len(pcomps) > 3:
+ delay_ms = int(pcomps[3])
+ n = 1
+ if len(pcomps) > 4:
+ n = int(pcomps[4])
+ for _ in range(n):
+ with open(fpath, 'r+b') as fd:
+ while True:
+ buf = fd.read(bufsize)
+ if buf is None or len(buf) == 0:
+ break
+ await conn.send(buf)
+ if delay_ms > 0:
+ time.sleep(delay_ms/1000)
+ else:
+ log.info(f'unknown endpoint: {rpath}')
+ await conn.close(code=4999, reason='path unknown')
+ await conn.close(code=1000, reason='')
+
+
+async def run_server(port):
+ log.info(f'starting server on port {port}')
+ async with ws_server.serve(ws_handler=on_async_conn,
+ host="localhost", port=port):
+ await asyncio.Future()
+
+
+async def main():
+ parser = argparse.ArgumentParser(prog='scorecard',
+ description="Run a websocket echo server.")
+ parser.add_argument("--port", type=int,
+ default=0, help="port to listen on")
+ args = parser.parse_args()
+
+ if args.port == 0:
+ sys.stderr.write('need --port\n')
+ sys.exit(1)
+
+ logging.basicConfig(
+ format="%(asctime)s %(message)s",
+ level=logging.DEBUG,
+ )
+ await run_server(args.port)
+
+
+if __name__ == "__main__":
+ asyncio.run(main())
diff --git a/test/modules/proxy/test_01_http.py b/test/modules/proxy/test_01_http.py
index 7763565..ef71b16 100644
--- a/test/modules/proxy/test_01_http.py
+++ b/test/modules/proxy/test_01_http.py
@@ -59,6 +59,8 @@ class TestProxyHttp:
# check that we see the document we expect there (host matching worked)
# we need to explicitly provide a Host: header since mod_proxy cannot
# resolve the name via DNS.
+ if not env.curl_is_at_least('8.0.0'):
+ pytest.skip(f'need at least curl v8.0.0 for this')
domain = f"{via}.{env.http_tld}"
r = env.curl_get(f"http://127.0.0.1:{env.http_port}/alive.json", 5, options=[
'-H', f"Host: {domain}",
diff --git a/test/modules/proxy/test_02_unix.py b/test/modules/proxy/test_02_unix.py
index a66cdf7..7f3d4d5 100644
--- a/test/modules/proxy/test_02_unix.py
+++ b/test/modules/proxy/test_02_unix.py
@@ -110,6 +110,8 @@ class TestProxyUds:
# check that we see the document we expect there (host matching worked)
# we need to explicitly provide a Host: header since mod_proxy cannot
# resolve the name via DNS.
+ if not env.curl_is_at_least('8.0.0'):
+ pytest.skip(f'need at least curl v8.0.0 for this')
domain = f"{via}.{env.http_tld}"
r = env.curl_get(f"http://127.0.0.1:{env.http_port}/alive.json", 5, options=[
'-H', f"Host: {domain}",
diff --git a/test/modules/tls/env.py b/test/modules/tls/env.py
index a39fcaa..0e457bf 100644
--- a/test/modules/tls/env.py
+++ b/test/modules/tls/env.py
@@ -145,11 +145,11 @@ class TlsTestEnv(HttpdTestEnv):
def domain_b(self) -> str:
return self._domain_b
- def tls_get(self, domain, paths: Union[str, List[str]], options: List[str] = None) -> ExecResult:
+ def tls_get(self, domain, paths: Union[str, List[str]], options: List[str] = None, no_stdout_list = False) -> ExecResult:
if isinstance(paths, str):
paths = [paths]
urls = [f"https://{domain}:{self.https_port}{path}" for path in paths]
- return self.curl_raw(urls=urls, options=options)
+ return self.curl_raw(urls=urls, options=options, no_stdout_list=no_stdout_list)
def tls_get_json(self, domain: str, path: str, options=None):
r = self.tls_get(domain=domain, paths=path, options=options)
diff --git a/test/modules/tls/htdocs/a.mod-tls.test/vars.py b/test/modules/tls/htdocs/a.mod-tls.test/vars.py
index f41ec6a..bd520e2 100755
--- a/test/modules/tls/htdocs/a.mod-tls.test/vars.py
+++ b/test/modules/tls/htdocs/a.mod-tls.test/vars.py
@@ -1,21 +1,29 @@
#!/usr/bin/env python3
import json
import os, sys
-import multipart
from urllib import parse
+import multipart # https://github.com/andrew-d/python-multipart (`apt install python3-multipart`)
def get_request_params():
oforms = {}
+ ofiles = {}
if "REQUEST_URI" in os.environ:
qforms = parse.parse_qs(parse.urlsplit(os.environ["REQUEST_URI"]).query)
for name, values in qforms.items():
oforms[name] = values[0]
- myenv = os.environ.copy()
- myenv['wsgi.input'] = sys.stdin.buffer
- mforms, ofiles = multipart.parse_form_data(environ=myenv)
- for name, item in mforms.items():
- oforms[name] = item
+ if "HTTP_CONTENT_TYPE" in os.environ:
+ ctype = os.environ["HTTP_CONTENT_TYPE"]
+ if ctype == "application/x-www-form-urlencoded":
+ qforms = parse.parse_qs(parse.urlsplit(sys.stdin.read()).query)
+ for name, values in qforms.items():
+ oforms[name] = values[0]
+ elif ctype.startswith("multipart/"):
+ def on_field(field):
+ oforms[field.field_name] = field.value
+ def on_file(file):
+ ofiles[field.field_name] = field.value
+ multipart.parse_form(headers={"Content-Type": ctype}, input_stream=sys.stdin.buffer, on_field=on_field, on_file=on_file)
return oforms, ofiles
diff --git a/test/modules/tls/htdocs/b.mod-tls.test/vars.py b/test/modules/tls/htdocs/b.mod-tls.test/vars.py
index f41ec6a..bd520e2 100755
--- a/test/modules/tls/htdocs/b.mod-tls.test/vars.py
+++ b/test/modules/tls/htdocs/b.mod-tls.test/vars.py
@@ -1,21 +1,29 @@
#!/usr/bin/env python3
import json
import os, sys
-import multipart
from urllib import parse
+import multipart # https://github.com/andrew-d/python-multipart (`apt install python3-multipart`)
def get_request_params():
oforms = {}
+ ofiles = {}
if "REQUEST_URI" in os.environ:
qforms = parse.parse_qs(parse.urlsplit(os.environ["REQUEST_URI"]).query)
for name, values in qforms.items():
oforms[name] = values[0]
- myenv = os.environ.copy()
- myenv['wsgi.input'] = sys.stdin.buffer
- mforms, ofiles = multipart.parse_form_data(environ=myenv)
- for name, item in mforms.items():
- oforms[name] = item
+ if "HTTP_CONTENT_TYPE" in os.environ:
+ ctype = os.environ["HTTP_CONTENT_TYPE"]
+ if ctype == "application/x-www-form-urlencoded":
+ qforms = parse.parse_qs(parse.urlsplit(sys.stdin.read()).query)
+ for name, values in qforms.items():
+ oforms[name] = values[0]
+ elif ctype.startswith("multipart/"):
+ def on_field(field):
+ oforms[field.field_name] = field.value
+ def on_file(file):
+ ofiles[field.field_name] = field.value
+ multipart.parse_form(headers={"Content-Type": ctype}, input_stream=sys.stdin.buffer, on_field=on_field, on_file=on_file)
return oforms, ofiles
diff --git a/test/modules/tls/test_04_get.py b/test/modules/tls/test_04_get.py
index 4412a66..6944381 100644
--- a/test/modules/tls/test_04_get.py
+++ b/test/modules/tls/test_04_get.py
@@ -59,7 +59,7 @@ class TestGet:
# we'd like to check that we can do >1 requests on the same connection
# however curl hides that from us, unless we analyze its verbose output
docs_a = os.path.join(env.server_docs_dir, env.domain_a)
- r = env.tls_get(env.domain_a, paths=[
+ r = env.tls_get(env.domain_a, no_stdout_list=True, paths=[
"/{0}".format(fname),
"/{0}".format(fname)
])
diff --git a/test/modules/tls/test_05_proto.py b/test/modules/tls/test_05_proto.py
index 447d052..d874a90 100644
--- a/test/modules/tls/test_05_proto.py
+++ b/test/modules/tls/test_05_proto.py
@@ -33,16 +33,14 @@ class TestProto:
def test_tls_05_proto_1_2(self, env):
r = env.tls_get(env.domain_b, "/index.json", options=["--tlsv1.2"])
assert r.exit_code == 0, r.stderr
- if TlsTestEnv.curl_supports_tls_1_3():
- r = env.tls_get(env.domain_b, "/index.json", options=["--tlsv1.3"])
- assert r.exit_code == 0, r.stderr
+ @pytest.mark.skip('curl does not have TLSv1.3 on all platforms')
def test_tls_05_proto_1_3(self, env):
- r = env.tls_get(env.domain_a, "/index.json", options=["--tlsv1.3"])
- if TlsTestEnv.curl_supports_tls_1_3():
- assert r.exit_code == 0, r.stderr
+ r = env.tls_get(env.domain_a, "/index.json", options=["--tlsv1.3", '-v'])
+ if True: # testing TlsTestEnv.curl_supports_tls_1_3() is unreliable (curl should support TLS1.3 nowadays..)
+ assert r.exit_code == 0, f'{r}'
else:
- assert r.exit_code == 4, r.stderr
+ assert r.exit_code == 4, f'{r}'
def test_tls_05_proto_close(self, env):
s = socket.create_connection(('localhost', env.https_port))
diff --git a/test/pyhttpd/conf/httpd.conf.template b/test/pyhttpd/conf/httpd.conf.template
index f44935e..255b88a 100644
--- a/test/pyhttpd/conf/httpd.conf.template
+++ b/test/pyhttpd/conf/httpd.conf.template
@@ -6,7 +6,7 @@ Include "conf/modules.conf"
DocumentRoot "${server_dir}/htdocs"
<IfModule log_config_module>
- LogFormat "%h %l %u %t \"%r\" %>s %O \"%{Referer}i\" \"%{User-Agent}i\" %k" combined
+ LogFormat "{ \"request\": \"%r\", \"status\": %>s, \"bytes_resp_B\": %B, \"bytes_tx_O\": %O, \"bytes_rx_I\": %I, \"bytes_rx_tx_S\": %S, \"time_taken\": %D }" combined
LogFormat "%h %l %u %t \"%r\" %>s %b" common
CustomLog "logs/access_log" combined
diff --git a/test/pyhttpd/conf/mime.types b/test/pyhttpd/conf/mime.types
index b90b165..2db5c09 100644
--- a/test/pyhttpd/conf/mime.types
+++ b/test/pyhttpd/conf/mime.types
@@ -1,6 +1,6 @@
# This file maps Internet media types to unique file extension(s).
# Although created for httpd, this file is used by many software systems
-# and has been placed in the public domain for unlimited redisribution.
+# and has been placed in the public domain for unlimited redistribution.
#
# The table below contains both registered and (common) unregistered types.
# A type that has no unique extension can be ignored -- they are listed
diff --git a/test/pyhttpd/config.ini.in b/test/pyhttpd/config.ini.in
index e1ae070..3f42248 100644
--- a/test/pyhttpd/config.ini.in
+++ b/test/pyhttpd/config.ini.in
@@ -26,6 +26,7 @@ http_port = 5002
https_port = 5001
proxy_port = 5003
http_port2 = 5004
+ws_port = 5100
http_tld = tests.httpd.apache.org
test_dir = @abs_srcdir@
test_src_dir = @abs_srcdir@
diff --git a/test/pyhttpd/curl.py b/test/pyhttpd/curl.py
index 2b6840b..5a215cd 100644
--- a/test/pyhttpd/curl.py
+++ b/test/pyhttpd/curl.py
@@ -31,9 +31,14 @@ class CurlPiper:
def response(self):
return self._r.response if self._r else None
+ def __repr__(self):
+ return f'CurlPiper[exitcode={self._exitcode}, stderr={self._stderr}, stdout={self._stdout}]'
+
def start(self):
self.args, self.headerfile = self.env.curl_complete_args([self.url], timeout=5, options=[
- "-T", "-", "-X", "POST", "--trace-ascii", "%", "--trace-time"])
+ "-T", "-", "-X", "POST", "--trace-ascii", "%", "--trace-time"
+ ])
+ self.args.append(self.url)
sys.stderr.write("starting: {0}\n".format(self.args))
self.proc = subprocess.Popen(self.args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
@@ -125,7 +130,7 @@ class CurlPiper:
delta_mics += datetime.time(23, 59, 59, 999999)
recv_deltas.append(datetime.timedelta(microseconds=delta_mics))
last_mics = mics
- stutter_td = datetime.timedelta(seconds=stutter.total_seconds() * 0.9) # 10% leeway
+ stutter_td = datetime.timedelta(seconds=stutter.total_seconds() * 0.75) # 25% leeway
# TODO: the first two chunks are often close together, it seems
# there still is a little buffering delay going on
for idx, td in enumerate(recv_deltas[1:]):
diff --git a/test/pyhttpd/env.py b/test/pyhttpd/env.py
index 2c91859..1d4e8b1 100644
--- a/test/pyhttpd/env.py
+++ b/test/pyhttpd/env.py
@@ -96,9 +96,8 @@ class HttpdTestSetup:
self.env.clear_curl_headerfiles()
def _make_dirs(self):
- if os.path.exists(self.env.gen_dir):
- shutil.rmtree(self.env.gen_dir)
- os.makedirs(self.env.gen_dir)
+ if not os.path.exists(self.env.gen_dir):
+ os.makedirs(self.env.gen_dir)
if not os.path.exists(self.env.server_logs_dir):
os.makedirs(self.env.server_logs_dir)
@@ -238,6 +237,8 @@ class HttpdTestEnv:
if HttpdTestEnv.LIBEXEC_DIR is None:
HttpdTestEnv.LIBEXEC_DIR = self._libexec_dir = self.get_apxs_var('LIBEXECDIR')
self._curl = self.config.get('global', 'curl_bin')
+ if 'CURL' in os.environ:
+ self._curl = os.environ['CURL']
self._nghttp = self.config.get('global', 'nghttp')
if self._nghttp is None:
self._nghttp = 'nghttp'
@@ -249,8 +250,10 @@ class HttpdTestEnv:
self._http_port2 = int(self.config.get('test', 'http_port2'))
self._https_port = int(self.config.get('test', 'https_port'))
self._proxy_port = int(self.config.get('test', 'proxy_port'))
+ self._ws_port = int(self.config.get('test', 'ws_port'))
self._http_tld = self.config.get('test', 'http_tld')
self._test_dir = self.config.get('test', 'test_dir')
+ self._clients_dir = os.path.join(os.path.dirname(self._test_dir), 'clients')
self._gen_dir = self.config.get('test', 'gen_dir')
self._server_dir = os.path.join(self._gen_dir, 'apache')
self._server_conf_dir = os.path.join(self._server_dir, "conf")
@@ -288,6 +291,7 @@ class HttpdTestEnv:
self._verify_certs = False
self._curl_headerfiles_n = 0
+ self._curl_version = None
self._h2load_version = None
self._current_test = None
@@ -321,6 +325,10 @@ class HttpdTestEnv:
self._log_interesting += f" {name}:{log_level}"
@property
+ def curl(self) -> str:
+ return self._curl
+
+ @property
def apxs(self) -> str:
return self._apxs
@@ -361,6 +369,10 @@ class HttpdTestEnv:
return self._proxy_port
@property
+ def ws_port(self) -> int:
+ return self._ws_port
+
+ @property
def http_tld(self) -> str:
return self._http_tld
@@ -385,6 +397,10 @@ class HttpdTestEnv:
return self._test_dir
@property
+ def clients_dir(self) -> str:
+ return self._clients_dir
+
+ @property
def server_dir(self) -> str:
return self._server_dir
@@ -473,6 +489,34 @@ class HttpdTestEnv:
return self._h2load_version >= self._versiontuple(minv)
return False
+ def curl_is_at_least(self, minv):
+ if self._curl_version is None:
+ p = subprocess.run([self._curl, '-V'], capture_output=True, text=True)
+ if p.returncode != 0:
+ return False
+ for l in p.stdout.splitlines():
+ m = re.match(r'curl ([0-9.]+)[- ].*', l)
+ if m:
+ self._curl_version = self._versiontuple(m.group(1))
+ break
+ if self._curl_version is not None:
+ return self._curl_version >= self._versiontuple(minv)
+ return False
+
+ def curl_is_less_than(self, version):
+ if self._curl_version is None:
+ p = subprocess.run([self._curl, '-V'], capture_output=True, text=True)
+ if p.returncode != 0:
+ return False
+ for l in p.stdout.splitlines():
+ m = re.match(r'curl ([0-9.]+)[- ].*', l)
+ if m:
+ self._curl_version = self._versiontuple(m.group(1))
+ break
+ if self._curl_version is not None:
+ return self._curl_version < self._versiontuple(version)
+ return False
+
def has_nghttp(self):
return self._nghttp != ""
@@ -499,12 +543,14 @@ class HttpdTestEnv:
if not os.path.exists(path):
return os.makedirs(path)
- def run(self, args, stdout_list=False, intext=None, debug_log=True):
+ def run(self, args, stdout_list=False, intext=None, inbytes=None, debug_log=True):
if debug_log:
log.debug(f"run: {args}")
start = datetime.now()
+ if intext is not None:
+ inbytes = intext.encode()
p = subprocess.run(args, stderr=subprocess.PIPE, stdout=subprocess.PIPE,
- input=intext.encode() if intext else None)
+ input=inbytes)
stdout_as_list = None
if stdout_list:
try:
@@ -529,8 +575,13 @@ class HttpdTestEnv:
with open(self._test_conf, 'w') as fd:
fd.write('\n'.join(self._httpd_base_conf))
fd.write('\n')
+ fd.write(f"CoreDumpDirectory {self._server_dir}\n")
if self._verbosity >= 2:
- fd.write(f"LogLevel core:trace5 {self.mpm_module}:trace5\n")
+ fd.write(f"LogLevel core:trace5 {self.mpm_module}:trace5 http:trace5\n")
+ if self._verbosity >= 3:
+ fd.write(f"LogLevel dumpio:trace7\n")
+ fd.write(f"DumpIoOutput on\n")
+ fd.write(f"DumpIoInput on\n")
if self._log_interesting:
fd.write(self._log_interesting)
fd.write('\n\n')
@@ -651,19 +702,11 @@ class HttpdTestEnv:
os.remove(os.path.join(self.gen_dir, fname))
self._curl_headerfiles_n = 0
- def curl_complete_args(self, urls, stdout_list=False,
- timeout=None, options=None,
- insecure=False, force_resolve=True):
- u = urlparse(urls[0])
- #assert u.hostname, f"hostname not in url: {urls[0]}"
- headerfile = f"{self.gen_dir}/curl.headers.{self._curl_headerfiles_n}"
- self._curl_headerfiles_n += 1
+ def curl_resolve_args(self, url, insecure=False, force_resolve=True, options=None):
+ u = urlparse(url)
args = [
- self._curl, "-s", "--path-as-is", "-D", headerfile,
]
- if stdout_list:
- args.extend(['-w', '%{stdout}' + HttpdTestSetup.CURL_STDOUT_SEPARATOR])
if u.scheme == 'http':
pass
elif insecure:
@@ -675,19 +718,33 @@ class HttpdTestEnv:
if ca_pem:
args.extend(["--cacert", ca_pem])
- if self._current_test is not None:
- args.extend(["-H", f'AP-Test-Name: {self._current_test}'])
-
if force_resolve and u.hostname and u.hostname != 'localhost' \
and u.hostname != self._httpd_addr \
and not re.match(r'^(\d+|\[|:).*', u.hostname):
- assert u.port, f"port not in url: {urls[0]}"
+ assert u.port, f"port not in url: {url}"
args.extend(["--resolve", f"{u.hostname}:{u.port}:{self._httpd_addr}"])
+ return args
+
+ def curl_complete_args(self, urls, stdout_list=False,
+ timeout=None, options=None,
+ insecure=False, force_resolve=True):
+ headerfile = f"{self.gen_dir}/curl.headers.{self._curl_headerfiles_n}"
+ self._curl_headerfiles_n += 1
+
+ args = [
+ self._curl, "-s", "--path-as-is", "-D", headerfile,
+ ]
+ args.extend(self.curl_resolve_args(urls[0], insecure=insecure,
+ force_resolve=force_resolve,
+ options=options))
+ if stdout_list:
+ args.extend(['-w', '%{stdout}' + HttpdTestSetup.CURL_STDOUT_SEPARATOR])
+ if self._current_test is not None:
+ args.extend(["-H", f'AP-Test-Name: {self._current_test}'])
if timeout is not None and int(timeout) > 0:
args.extend(["--connect-timeout", str(int(timeout))])
if options:
args.extend(options)
- args += urls
return args, headerfile
def curl_parse_headerfile(self, headerfile: str, r: ExecResult = None) -> ExecResult:
@@ -745,22 +802,24 @@ class HttpdTestEnv:
return r
def curl_raw(self, urls, timeout=10, options=None, insecure=False,
- force_resolve=True):
+ force_resolve=True, no_stdout_list=False):
if not isinstance(urls, list):
urls = [urls]
stdout_list = False
- if len(urls) > 1:
+ if len(urls) > 1 and not no_stdout_list:
stdout_list = True
args, headerfile = self.curl_complete_args(
urls=urls, stdout_list=stdout_list,
timeout=timeout, options=options, insecure=insecure,
force_resolve=force_resolve)
+ args += urls
r = self.run(args, stdout_list=stdout_list)
if r.exit_code == 0:
self.curl_parse_headerfile(headerfile, r=r)
if r.json:
r.response["json"] = r.json
- os.remove(headerfile)
+ if os.path.isfile(headerfile):
+ os.remove(headerfile)
return r
def curl_get(self, url, insecure=False, options=None):
@@ -822,3 +881,18 @@ class HttpdTestEnv:
}
run.add_results({"h2load": stats})
return run
+
+ def make_data_file(self, indir: str, fname: str, fsize: int) -> str:
+ fpath = os.path.join(indir, fname)
+ s10 = "0123456789"
+ s = (101 * s10) + s10[0:3]
+ with open(fpath, 'w') as fd:
+ for i in range(int(fsize / 1024)):
+ fd.write(f"{i:09d}-{s}\n")
+ remain = int(fsize % 1024)
+ if remain != 0:
+ i = int(fsize / 1024) + 1
+ s = f"{i:09d}-{s}\n"
+ fd.write(s[0:remain])
+ return fpath
+
diff --git a/test/pyhttpd/nghttp.py b/test/pyhttpd/nghttp.py
index f27e40d..43721f5 100644
--- a/test/pyhttpd/nghttp.py
+++ b/test/pyhttpd/nghttp.py
@@ -37,6 +37,7 @@ class Nghttp:
"id": sid,
"body": b''
},
+ "data_lengths": [],
"paddings": [],
"promises": []
}
@@ -131,12 +132,13 @@ class Nghttp:
s = self.get_stream(streams, m.group(3))
blen = int(m.group(2))
if s:
- print("stream %d: %d DATA bytes added" % (s["id"], blen))
+ print(f'stream {s["id"]}: {blen} DATA bytes added via "{l}"')
padlen = 0
if len(lines) > lidx + 2:
mpad = re.match(r' +\(padlen=(\d+)\)', lines[lidx+2])
if mpad:
padlen = int(mpad.group(1))
+ s["data_lengths"].append(blen)
s["paddings"].append(padlen)
blen -= padlen
s["response"]["body"] += body[-blen:].encode()
@@ -196,6 +198,7 @@ class Nghttp:
if main_stream in streams:
output["response"] = streams[main_stream]["response"]
output["paddings"] = streams[main_stream]["paddings"]
+ output["data_lengths"] = streams[main_stream]["data_lengths"]
return output
def _raw(self, url, timeout, options):
@@ -244,11 +247,11 @@ class Nghttp:
def post_name(self, url, name, timeout=5, options=None):
reqbody = ("%s/nghttp.req.body" % self.TMP_DIR)
with open(reqbody, 'w') as f:
- f.write("--DSAJKcd9876\n")
- f.write("Content-Disposition: form-data; name=\"value\"; filename=\"xxxxx\"\n")
- f.write("Content-Type: text/plain\n")
- f.write("\n%s\n" % name)
- f.write("--DSAJKcd9876\n")
+ f.write("--DSAJKcd9876\r\n")
+ f.write("Content-Disposition: form-data; name=\"value\"; filename=\"xxxxx\"\r\n")
+ f.write("Content-Type: text/plain\r\n")
+ f.write(f"\r\n{name}")
+ f.write("\r\n--DSAJKcd9876\r\n")
if not options:
options = []
options.extend([
@@ -267,20 +270,23 @@ class Nghttp:
reqbody = ("%s/nghttp.req.body" % self.TMP_DIR)
with open(fpath, 'rb') as fin:
with open(reqbody, 'wb') as f:
- f.write(("""--DSAJKcd9876
-Content-Disposition: form-data; name="xxx"; filename="xxxxx"
-Content-Type: text/plain
-
-testing mod_h2
---DSAJKcd9876
-Content-Disposition: form-data; name="file"; filename="%s"
-Content-Type: application/octet-stream
-Content-Transfer-Encoding: binary
-
-""" % fname).encode('utf-8'))
+ preamble = [
+ '--DSAJKcd9876',
+ 'Content-Disposition: form-data; name="xxx"; filename="xxxxx"',
+ 'Content-Type: text/plain',
+ '',
+ 'testing mod_h2',
+ '\r\n--DSAJKcd9876',
+ f'Content-Disposition: form-data; name="file"; filename="{fname}"',
+ 'Content-Type: application/octet-stream',
+ 'Content-Transfer-Encoding: binary',
+ '', ''
+ ]
+ f.write('\r\n'.join(preamble).encode('utf-8'))
f.write(fin.read())
- f.write("""
---DSAJKcd9876""".encode('utf-8'))
+ f.write('\r\n'.join([
+ '\r\n--DSAJKcd9876', ''
+ ]).encode('utf-8'))
if not options:
options = []
options.extend([
diff --git a/test/pyhttpd/result.py b/test/pyhttpd/result.py
index 3789461..4bf9ff2 100644
--- a/test/pyhttpd/result.py
+++ b/test/pyhttpd/result.py
@@ -28,7 +28,14 @@ class ExecResult:
self._json_out = None
def __repr__(self):
- return f"ExecResult[code={self.exit_code}, args={self._args}, stdout={self._stdout}, stderr={self._stderr}]"
+ out = [
+ f"ExecResult[code={self.exit_code}, args={self._args}\n",
+ "----stdout---------------------------------------\n",
+ self._stdout.decode(),
+ "----stderr---------------------------------------\n",
+ self._stderr.decode()
+ ]
+ return ''.join(out)
@property
def exit_code(self) -> int:
diff --git a/test/pyhttpd/ws_util.py b/test/pyhttpd/ws_util.py
new file mode 100644
index 0000000..38a3cf7
--- /dev/null
+++ b/test/pyhttpd/ws_util.py
@@ -0,0 +1,137 @@
+import logging
+import struct
+
+
+log = logging.getLogger(__name__)
+
+
+class WsFrame:
+
+ CONT = 0
+ TEXT = 1
+ BINARY = 2
+ RSVD3 = 3
+ RSVD4 = 4
+ RSVD5 = 5
+ RSVD6 = 6
+ RSVD7 = 7
+ CLOSE = 8
+ PING = 9
+ PONG = 10
+ RSVD11 = 11
+ RSVD12 = 12
+ RSVD13 = 13
+ RSVD14 = 14
+ RSVD15 = 15
+
+ OP_NAMES = [
+ "CONT",
+ "TEXT",
+ "BINARY",
+ "RSVD3",
+ "RSVD4",
+ "RSVD5",
+ "RSVD6",
+ "RSVD7",
+ "CLOSE",
+ "PING",
+ "PONG",
+ "RSVD11",
+ "RSVD12",
+ "RSVD13",
+ "RSVD14",
+ "RSVD15",
+ ]
+
+ def __init__(self, opcode: int, fin: bool, mask: bytes, data: bytes):
+ self.opcode = opcode
+ self.fin = fin
+ self.mask = mask
+ self.data = data
+ self.length = len(data)
+
+ def __repr__(self):
+ return f'WsFrame[{self.OP_NAMES[self.opcode]} fin={self.fin}, mask={self.mask}, len={len(self.data)}]'
+
+ @property
+ def data_len(self) -> int:
+ return len(self.data) if self.data else 0
+
+ def to_network(self) -> bytes:
+ nd = bytearray()
+ h1 = self.opcode
+ if self.fin:
+ h1 |= 0x80
+ nd.extend(struct.pack("!B", h1))
+ mask_bit = 0x80 if self.mask is not None else 0x0
+ h2 = self.data_len
+ if h2 > 65535:
+ nd.extend(struct.pack("!BQ", 127|mask_bit, h2))
+ elif h2 > 126:
+ nd.extend(struct.pack("!BH", 126|mask_bit, h2))
+ else:
+ nd.extend(struct.pack("!B", h2|mask_bit))
+ if self.mask is not None:
+ nd.extend(self.mask)
+ if self.data is not None:
+ nd.extend(self.data)
+ return nd
+
+ @classmethod
+ def client_ping(cls, data: bytes, mask: bytes = None) -> 'WsFrame':
+ if mask is None:
+ mask = bytes.fromhex('00 00 00 00')
+ return WsFrame(opcode=WsFrame.PING, fin=True, mask=mask, data=data)
+
+ @classmethod
+ def client_close(cls, code: int, reason: str = None,
+ mask: bytes = None) -> 'WsFrame':
+ data = bytearray(struct.pack("!H", code))
+ if reason is not None:
+ data.extend(reason.encode())
+ if mask is None:
+ mask = bytes.fromhex('00 00 00 00')
+ return WsFrame(opcode=WsFrame.CLOSE, fin=True, mask=mask, data=data)
+
+
+class WsFrameReader:
+
+ def __init__(self, data: bytes):
+ self.data = data
+
+ def _read(self, n: int):
+ if len(self.data) < n:
+ raise EOFError(f'have {len(self.data)} bytes left, but {n} requested')
+ elif n == 0:
+ return b''
+ chunk = self.data[:n]
+ del self.data[:n]
+ return chunk
+
+ def next_frame(self):
+ data = self._read(2)
+ h1, h2 = struct.unpack("!BB", data)
+ log.debug(f'parsed h1={h1} h2={h2} from {data}')
+ fin = True if h1 & 0x80 else False
+ opcode = h1 & 0xf
+ has_mask = True if h2 & 0x80 else False
+ mask = None
+ dlen = h2 & 0x7f
+ if dlen == 126:
+ (dlen,) = struct.unpack("!H", self._read(2))
+ elif dlen == 127:
+ (dlen,) = struct.unpack("!Q", self._read(8))
+ if has_mask:
+ mask = self._read(4)
+ return WsFrame(opcode=opcode, fin=fin, mask=mask, data=self._read(dlen))
+
+ def eof(self):
+ return len(self.data) == 0
+
+ @classmethod
+ def parse(cls, data: bytes):
+ frames = []
+ reader = WsFrameReader(data=data)
+ while not reader.eof():
+ frames.append(reader.next_frame())
+ return frames
diff --git a/test/travis_before_linux.sh b/test/travis_before_linux.sh
index bc4d659..2722c6a 100755
--- a/test/travis_before_linux.sh
+++ b/test/travis_before_linux.sh
@@ -88,10 +88,11 @@ function install_apx() {
}
# Allow to load $HOME/build/apache/httpd/.gdbinit
-echo "add-auto-load-safe-path $HOME/build/apache/httpd/.gdbinit" >> $HOME/.gdbinit
+echo "add-auto-load-safe-path $HOME/work/httpd/httpd/.gdbinit" >> $HOME/.gdbinit
-# Prepare perl-framework test environment
-if ! test -v SKIP_TESTING; then
+# Unless either SKIP_TESTING or NO_TEST_FRAMEWORK are set, install
+# CPAN modules required to run the Perl test framework.
+if ! test -v SKIP_TESTING -o -v NO_TEST_FRAMEWORK; then
# Clear CPAN cache if necessary
if [ -v CLEAR_CACHE ]; then rm -rf ~/perl5; fi
@@ -171,3 +172,15 @@ if test -v APU_VERSION; then
install_apx apr-util ${APU_VERSION} "${APU_CONFIG}" --with-apr=$HOME/build/apr-${APR_VERSION}
ldd $HOME/root/apr-util-${APU_VERSION}/lib/libaprutil-?.so || true
fi
+
+# Since librustls is not a package (yet) on any platform, we
+# build the version we want from source
+if test -v TEST_MOD_TLS -a -v RUSTLS_VERSION; then
+ if ! test -d $HOME/root/rustls; then
+ RUSTLS_HOME="$HOME/build/rustls-ffi"
+ git clone -q --depth=1 -b "$RUSTLS_VERSION" https://github.com/rustls/rustls-ffi.git "$RUSTLS_HOME"
+ pushd "$RUSTLS_HOME"
+ make install DESTDIR="$HOME/root/rustls"
+ popd
+ fi
+fi
diff --git a/test/travis_run_linux.sh b/test/travis_run_linux.sh
index f7a72d2..373e666 100755
--- a/test/travis_run_linux.sh
+++ b/test/travis_run_linux.sh
@@ -31,9 +31,9 @@ fi
PREFIX=${PREFIX:-$HOME/build/httpd-root}
-# For trunk, "make check" is sufficient to run the test suite.
-# For 2.4.x, the test suite must be run manually
-if test ! -v SKIP_TESTING; then
+# If perl-framework testing is required it is checked out here by
+# _before_linux.sh:
+if test -d test/perl-framework; then
CONFIG="$CONFIG --enable-load-all-modules"
if grep -q ^check: Makefile.in; then
CONFIG="--with-test-suite=test/perl-framework $CONFIG"
@@ -54,19 +54,9 @@ else
CONFIG="$CONFIG --with-apr-util=/usr"
fi
-# Since librustls is not a package (yet) on any platform, we
-# build the version we want from source
-if test -v TEST_MOD_TLS; then
- RUSTLS_HOME="$HOME/build/rustls-ffi"
- RUSTLS_VERSION="v0.9.0"
- git clone https://github.com/rustls/rustls-ffi.git "$RUSTLS_HOME"
- pushd "$RUSTLS_HOME"
- # since v0.9.0, there is no longer a dependency on cbindgen
- git fetch origin
- git checkout tags/$RUSTLS_VERSION
- make install DESTDIR="$PREFIX"
- popd
- CONFIG="$CONFIG --with-tls --with-rustls=$PREFIX"
+# Pick up the rustls install built previously.
+if test -v TEST_MOD_TLS -a RUSTLS_VERSION; then
+ CONFIG="$CONFIG --with-tls --with-rustls=$HOME/root/rustls"
fi
if test -v TEST_OPENSSL3; then
@@ -109,11 +99,15 @@ if ! test -v SKIP_TESTING; then
fi
if test -v TEST_ASAN; then
- export ASAN_OPTIONS="log_path=$PWD/asan.log"
+ export ASAN_OPTIONS="log_path=$PWD/asan.log:detect_leaks=0"
fi
# Try to keep all potential coredumps from all processes
sudo sysctl -w kernel.core_uses_pid=1 2>/dev/null || true
+ # Systemd based systems might process core dumps via systemd-coredump.
+ # But we want to have local unprocessed files.
+ sudo sysctl -w kernel.core_pattern=core || true
+ ulimit -c unlimited 2>/dev/null || true
if test -v WITH_TEST_SUITE; then
make check TESTS="${TESTS}" TEST_CONFIG="${TEST_ARGS}"
@@ -196,6 +190,8 @@ if ! test -v SKIP_TESTING; then
fi
if test -v TEST_H2 -a $RV -eq 0; then
+ # Build the test clients
+ (cd test/clients && make)
# Run HTTP/2 tests.
MPM=event py.test-3 test/modules/http2
RV=$?
@@ -260,7 +256,7 @@ if ! test -v SKIP_TESTING; then
fi
fi
- for core in `ls test/perl-framework/t/core{,.*} 2>/dev/null`; do
+ for core in `ls test/perl-framework/t/core{,.*} test/gen/apache/core{,.*} 2>/dev/null`; do
gdb -ex 'thread apply all backtrace full' -batch ./httpd "$core"
RV=5
done