summaryrefslogtreecommitdiffstats
path: root/testing/mozbase/mozhttpd
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 00:47:55 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 00:47:55 +0000
commit26a029d407be480d791972afb5975cf62c9360a6 (patch)
treef435a8308119effd964b339f76abb83a57c29483 /testing/mozbase/mozhttpd
parentInitial commit. (diff)
downloadfirefox-26a029d407be480d791972afb5975cf62c9360a6.tar.xz
firefox-26a029d407be480d791972afb5975cf62c9360a6.zip
Adding upstream version 124.0.1.upstream/124.0.1
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'testing/mozbase/mozhttpd')
-rw-r--r--testing/mozbase/mozhttpd/mozhttpd/__init__.py47
-rw-r--r--testing/mozbase/mozhttpd/mozhttpd/handlers.py20
-rwxr-xr-xtesting/mozbase/mozhttpd/mozhttpd/mozhttpd.py350
-rw-r--r--testing/mozbase/mozhttpd/setup.py34
-rw-r--r--testing/mozbase/mozhttpd/tests/api.py381
-rw-r--r--testing/mozbase/mozhttpd/tests/baseurl.py33
-rw-r--r--testing/mozbase/mozhttpd/tests/basic.py50
-rw-r--r--testing/mozbase/mozhttpd/tests/filelisting.py68
-rw-r--r--testing/mozbase/mozhttpd/tests/manifest.toml16
-rw-r--r--testing/mozbase/mozhttpd/tests/paths.py121
-rw-r--r--testing/mozbase/mozhttpd/tests/requestlog.py62
11 files changed, 1182 insertions, 0 deletions
diff --git a/testing/mozbase/mozhttpd/mozhttpd/__init__.py b/testing/mozbase/mozhttpd/mozhttpd/__init__.py
new file mode 100644
index 0000000000..65c860f9c5
--- /dev/null
+++ b/testing/mozbase/mozhttpd/mozhttpd/__init__.py
@@ -0,0 +1,47 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+Mozhttpd is a simple http webserver written in python, designed expressly
+for use in automated testing scenarios. It is designed to both serve static
+content and provide simple web services.
+
+The server is based on python standard library modules such as
+SimpleHttpServer, urlparse, etc. The ThreadingMixIn is used to
+serve each request on a discrete thread.
+
+Some existing uses of mozhttpd include Peptest_, Eideticker_, and Talos_.
+
+.. _Peptest: https://github.com/mozilla/peptest/
+
+.. _Eideticker: https://github.com/mozilla/eideticker/
+
+.. _Talos: http://hg.mozilla.org/build/
+
+The following simple example creates a basic HTTP server which serves
+content from the current directory, defines a single API endpoint
+`/api/resource/<resourceid>` and then serves requests indefinitely:
+
+::
+
+ import mozhttpd
+
+ @mozhttpd.handlers.json_response
+ def resource_get(request, objid):
+ return (200, { 'id': objid,
+ 'query': request.query })
+
+
+ httpd = mozhttpd.MozHttpd(port=8080, docroot='.',
+ urlhandlers = [ { 'method': 'GET',
+ 'path': '/api/resources/([^/]+)/?',
+ 'function': resource_get } ])
+ print "Serving '%s' at %s:%s" % (httpd.docroot, httpd.host, httpd.port)
+ httpd.start(block=True)
+
+"""
+from .handlers import json_response
+from .mozhttpd import MozHttpd, Request, RequestHandler, main
+
+__all__ = ["MozHttpd", "Request", "RequestHandler", "main", "json_response"]
diff --git a/testing/mozbase/mozhttpd/mozhttpd/handlers.py b/testing/mozbase/mozhttpd/mozhttpd/handlers.py
new file mode 100644
index 0000000000..44f657031a
--- /dev/null
+++ b/testing/mozbase/mozhttpd/mozhttpd/handlers.py
@@ -0,0 +1,20 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import json
+
+
+def json_response(func):
+ """Translates results of 'func' into a JSON response."""
+
+ def wrap(*a, **kw):
+ (code, data) = func(*a, **kw)
+ json_data = json.dumps(data)
+ return (
+ code,
+ {"Content-type": "application/json", "Content-Length": len(json_data)},
+ json_data,
+ )
+
+ return wrap
diff --git a/testing/mozbase/mozhttpd/mozhttpd/mozhttpd.py b/testing/mozbase/mozhttpd/mozhttpd/mozhttpd.py
new file mode 100755
index 0000000000..dd4e606a55
--- /dev/null
+++ b/testing/mozbase/mozhttpd/mozhttpd/mozhttpd.py
@@ -0,0 +1,350 @@
+#!/usr/bin/env python
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import errno
+import logging
+import os
+import posixpath
+import re
+import socket
+import sys
+import threading
+import time
+import traceback
+from argparse import ArgumentDefaultsHelpFormatter, ArgumentParser
+
+import moznetwork
+from six import ensure_binary, iteritems
+from six.moves.BaseHTTPServer import HTTPServer
+from six.moves.SimpleHTTPServer import SimpleHTTPRequestHandler
+from six.moves.socketserver import ThreadingMixIn
+from six.moves.urllib.parse import unquote, urlsplit
+
+
+class EasyServer(ThreadingMixIn, HTTPServer):
+ allow_reuse_address = True
+ acceptable_errors = (errno.EPIPE, errno.ECONNABORTED)
+
+ def handle_error(self, request, client_address):
+ error = sys.exc_info()[1]
+
+ if (
+ isinstance(error, socket.error)
+ and isinstance(error.args, tuple)
+ and error.args[0] in self.acceptable_errors
+ ) or (isinstance(error, IOError) and error.errno in self.acceptable_errors):
+ pass # remote hang up before the result is sent
+ else:
+ logging.error(error)
+ # The error can be ambiguous just the short description is logged, so we
+ # dump a stack trace to discover its origin.
+ traceback.print_exc()
+
+
+class Request(object):
+ """Details of a request."""
+
+ # attributes from urlsplit that this class also sets
+ uri_attrs = ("scheme", "netloc", "path", "query", "fragment")
+
+ def __init__(self, uri, headers, rfile=None):
+ self.uri = uri
+ self.headers = headers
+ parsed = urlsplit(uri)
+ for i, attr in enumerate(self.uri_attrs):
+ setattr(self, attr, parsed[i])
+ try:
+ body_len = int(self.headers.get("Content-length", 0))
+ except ValueError:
+ body_len = 0
+ if body_len and rfile:
+ self.body = rfile.read(body_len)
+ else:
+ self.body = None
+
+
+class RequestHandler(SimpleHTTPRequestHandler):
+ docroot = os.getcwd() # current working directory at time of import
+ proxy_host_dirs = False
+ request_log = []
+ log_requests = False
+ request = None
+
+ def __init__(self, *args, **kwargs):
+ SimpleHTTPRequestHandler.__init__(self, *args, **kwargs)
+ self.extensions_map[".svg"] = "image/svg+xml"
+
+ def _try_handler(self, method):
+ if self.log_requests:
+ self.request_log.append(
+ {"method": method, "path": self.request.path, "time": time.time()}
+ )
+
+ handlers = [
+ handler for handler in self.urlhandlers if handler["method"] == method
+ ]
+ for handler in handlers:
+ m = re.match(handler["path"], self.request.path)
+ if m:
+ (response_code, headerdict, data) = handler["function"](
+ self.request, *m.groups()
+ )
+ self.send_response(response_code)
+ for keyword, value in iteritems(headerdict):
+ self.send_header(keyword, value)
+ self.end_headers()
+ self.wfile.write(ensure_binary(data))
+
+ return True
+
+ return False
+
+ def _find_path(self):
+ """Find the on-disk path to serve this request from,
+ using self.path_mappings and self.docroot.
+ Return (url_path, disk_path)."""
+ path_components = list(filter(None, self.request.path.split("/")))
+ for prefix, disk_path in iteritems(self.path_mappings):
+ prefix_components = list(filter(None, prefix.split("/")))
+ if len(path_components) < len(prefix_components):
+ continue
+ if path_components[: len(prefix_components)] == prefix_components:
+ return ("/".join(path_components[len(prefix_components) :]), disk_path)
+ if self.docroot:
+ return self.request.path, self.docroot
+ return None
+
+ def parse_request(self):
+ retval = SimpleHTTPRequestHandler.parse_request(self)
+ self.request = Request(self.path, self.headers, self.rfile)
+ return retval
+
+ def do_GET(self):
+ if not self._try_handler("GET"):
+ res = self._find_path()
+ if res:
+ self.path, self.disk_root = res
+ # don't include query string and fragment, and prepend
+ # host directory if required.
+ if self.request.netloc and self.proxy_host_dirs:
+ self.path = "/" + self.request.netloc + self.path
+ SimpleHTTPRequestHandler.do_GET(self)
+ else:
+ self.send_response(404)
+ self.end_headers()
+ self.wfile.write(b"")
+
+ def do_POST(self):
+ # if we don't have a match, we always fall through to 404 (this may
+ # not be "technically" correct if we have a local file at the same
+ # path as the resource but... meh)
+ if not self._try_handler("POST"):
+ self.send_response(404)
+ self.end_headers()
+ self.wfile.write(b"")
+
+ def do_DEL(self):
+ # if we don't have a match, we always fall through to 404 (this may
+ # not be "technically" correct if we have a local file at the same
+ # path as the resource but... meh)
+ if not self._try_handler("DEL"):
+ self.send_response(404)
+ self.end_headers()
+ self.wfile.write(b"")
+
+ def translate_path(self, path):
+ # this is taken from SimpleHTTPRequestHandler.translate_path(),
+ # except we serve from self.docroot instead of os.getcwd(), and
+ # parse_request()/do_GET() have already stripped the query string and
+ # fragment and mangled the path for proxying, if required.
+ path = posixpath.normpath(unquote(self.path))
+ words = path.split("/")
+ words = list(filter(None, words))
+ path = self.disk_root
+ for word in words:
+ drive, word = os.path.splitdrive(word)
+ head, word = os.path.split(word)
+ if word in (os.curdir, os.pardir):
+ continue
+ path = os.path.join(path, word)
+ return path
+
+ # I found on my local network that calls to this were timing out
+ # I believe all of these calls are from log_message
+ def address_string(self):
+ return "a.b.c.d"
+
+ # This produces a LOT of noise
+ def log_message(self, format, *args):
+ pass
+
+
+class MozHttpd(object):
+ """
+ :param host: Host from which to serve (default 127.0.0.1)
+ :param port: Port from which to serve (default 8888)
+ :param docroot: Server root (default os.getcwd())
+ :param urlhandlers: Handlers to specify behavior against method and path match (default None)
+ :param path_mappings: A dict mapping URL prefixes to additional on-disk paths.
+ :param proxy_host_dirs: Toggle proxy behavior (default False)
+ :param log_requests: Toggle logging behavior (default False)
+
+ Very basic HTTP server class. Takes a docroot (path on the filesystem)
+ and a set of urlhandler dictionaries of the form:
+
+ ::
+
+ {
+ 'method': HTTP method (string): GET, POST, or DEL,
+ 'path': PATH_INFO (regular expression string),
+ 'function': function of form fn(arg1, arg2, arg3, ..., request)
+ }
+
+ and serves HTTP. For each request, MozHttpd will either return a file
+ off the docroot, or dispatch to a handler function (if both path and
+ method match).
+
+ Note that one of docroot or urlhandlers may be None (in which case no
+ local files or handlers, respectively, will be used). If both docroot or
+ urlhandlers are None then MozHttpd will default to serving just the local
+ directory.
+
+ MozHttpd also handles proxy requests (i.e. with a full URI on the request
+ line). By default files are served from docroot according to the request
+ URI's path component, but if proxy_host_dirs is True, files are served
+ from <self.docroot>/<host>/.
+
+ For example, the request "GET http://foo.bar/dir/file.html" would
+ (assuming no handlers match) serve <docroot>/dir/file.html if
+ proxy_host_dirs is False, or <docroot>/foo.bar/dir/file.html if it is
+ True.
+ """
+
+ def __init__(
+ self,
+ host="127.0.0.1",
+ port=0,
+ docroot=None,
+ urlhandlers=None,
+ path_mappings=None,
+ proxy_host_dirs=False,
+ log_requests=False,
+ ):
+ self.host = host
+ self.port = int(port)
+ self.docroot = docroot
+ if not (urlhandlers or docroot or path_mappings):
+ self.docroot = os.getcwd()
+ self.proxy_host_dirs = proxy_host_dirs
+ self.httpd = None
+ self.urlhandlers = urlhandlers or []
+ self.path_mappings = path_mappings or {}
+ self.log_requests = log_requests
+ self.request_log = []
+
+ class RequestHandlerInstance(RequestHandler):
+ docroot = self.docroot
+ urlhandlers = self.urlhandlers
+ path_mappings = self.path_mappings
+ proxy_host_dirs = self.proxy_host_dirs
+ request_log = self.request_log
+ log_requests = self.log_requests
+
+ self.handler_class = RequestHandlerInstance
+
+ def start(self, block=False):
+ """
+ Starts the server.
+
+ If `block` is True, the call will not return. If `block` is False, the
+ server will be started on a separate thread that can be terminated by
+ a call to stop().
+ """
+ self.httpd = EasyServer((self.host, self.port), self.handler_class)
+ if block:
+ self.httpd.serve_forever()
+ else:
+ self.server = threading.Thread(target=self.httpd.serve_forever)
+ self.server.setDaemon(True) # don't hang on exit
+ self.server.start()
+
+ def stop(self):
+ """
+ Stops the server.
+
+ If the server is not running, this method has no effect.
+ """
+ if self.httpd:
+ # FIXME: There is no shutdown() method in Python 2.4...
+ try:
+ self.httpd.shutdown()
+ except AttributeError:
+ pass
+ self.httpd = None
+
+ def get_url(self, path="/"):
+ """
+ Returns a URL that can be used for accessing the server (e.g. http://192.168.1.3:4321/)
+
+ :param path: Path to append to URL (e.g. if path were /foobar.html you would get a URL like
+ http://192.168.1.3:4321/foobar.html). Default is `/`.
+ """
+ if not self.httpd:
+ return None
+
+ return "http://%s:%s%s" % (self.host, self.httpd.server_port, path)
+
+ __del__ = stop
+
+
+def main(args=sys.argv[1:]):
+ # parse command line options
+ parser = ArgumentParser(
+ description="Basic python webserver.",
+ formatter_class=ArgumentDefaultsHelpFormatter,
+ )
+ parser.add_argument(
+ "-p",
+ "--port",
+ dest="port",
+ type=int,
+ default=8888,
+ help="port to run the server on",
+ )
+ parser.add_argument(
+ "-H", "--host", dest="host", default="127.0.0.1", help="host address"
+ )
+ parser.add_argument(
+ "-i",
+ "--external-ip",
+ action="store_true",
+ dest="external_ip",
+ default=False,
+ help="find and use external ip for host",
+ )
+ parser.add_argument(
+ "-d",
+ "--docroot",
+ dest="docroot",
+ default=os.getcwd(),
+ help="directory to serve files from",
+ )
+ args = parser.parse_args()
+
+ if args.external_ip:
+ host = moznetwork.get_lan_ip()
+ else:
+ host = args.host
+
+ # create the server
+ server = MozHttpd(host=host, port=args.port, docroot=args.docroot)
+
+ print("Serving '%s' at %s:%s" % (server.docroot, server.host, server.port))
+ server.start(block=True)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/testing/mozbase/mozhttpd/setup.py b/testing/mozbase/mozhttpd/setup.py
new file mode 100644
index 0000000000..4d4f689113
--- /dev/null
+++ b/testing/mozbase/mozhttpd/setup.py
@@ -0,0 +1,34 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from setuptools import setup
+
+PACKAGE_VERSION = "0.7.1"
+deps = ["moznetwork >= 0.24", "mozinfo >= 1.0.0", "six >= 1.13.0"]
+
+setup(
+ name="mozhttpd",
+ version=PACKAGE_VERSION,
+ description="Python webserver intended for use with Mozilla testing",
+ long_description="see https://firefox-source-docs.mozilla.org/mozbase/index.html",
+ classifiers=[
+ "Programming Language :: Python :: 2.7",
+ "Programming Language :: Python :: 2 :: Only",
+ ],
+ # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
+ keywords="mozilla",
+ author="Mozilla Automation and Testing Team",
+ author_email="tools@lists.mozilla.org",
+ url="https://wiki.mozilla.org/Auto-tools/Projects/Mozbase",
+ license="MPL",
+ packages=["mozhttpd"],
+ include_package_data=True,
+ zip_safe=False,
+ install_requires=deps,
+ entry_points="""
+ # -*- Entry points: -*-
+ [console_scripts]
+ mozhttpd = mozhttpd:main
+ """,
+)
diff --git a/testing/mozbase/mozhttpd/tests/api.py b/testing/mozbase/mozhttpd/tests/api.py
new file mode 100644
index 0000000000..c2fce58be9
--- /dev/null
+++ b/testing/mozbase/mozhttpd/tests/api.py
@@ -0,0 +1,381 @@
+#!/usr/bin/env python
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import collections
+import json
+import os
+
+import mozhttpd
+import mozunit
+import pytest
+from six import ensure_binary, ensure_str
+from six.moves.urllib.error import HTTPError
+from six.moves.urllib.request import (
+ HTTPHandler,
+ ProxyHandler,
+ Request,
+ build_opener,
+ install_opener,
+ urlopen,
+)
+
+
+def httpd_url(httpd, path, querystr=None):
+ """Return the URL to a started MozHttpd server for the given info."""
+
+ url = "http://127.0.0.1:{port}{path}".format(
+ port=httpd.httpd.server_port,
+ path=path,
+ )
+
+ if querystr is not None:
+ url = "{url}?{querystr}".format(
+ url=url,
+ querystr=querystr,
+ )
+
+ return url
+
+
+@pytest.fixture(name="num_requests")
+def fixture_num_requests():
+ """Return a defaultdict to count requests to HTTP handlers."""
+ return collections.defaultdict(int)
+
+
+@pytest.fixture(name="try_get")
+def fixture_try_get(num_requests):
+ """Return a function to try GET requests to the server."""
+
+ def try_get(httpd, querystr):
+ """Try GET requests to the server."""
+
+ num_requests["get_handler"] = 0
+
+ f = urlopen(httpd_url(httpd, "/api/resource/1", querystr))
+
+ assert f.getcode() == 200
+ assert json.loads(f.read()) == {"called": 1, "id": "1", "query": querystr}
+ assert num_requests["get_handler"] == 1
+
+ return try_get
+
+
+@pytest.fixture(name="try_post")
+def fixture_try_post(num_requests):
+ """Return a function to try POST calls to the server."""
+
+ def try_post(httpd, querystr):
+ """Try POST calls to the server."""
+
+ num_requests["post_handler"] = 0
+
+ postdata = {"hamburgers": "1234"}
+
+ f = urlopen(
+ httpd_url(httpd, "/api/resource/", querystr),
+ data=ensure_binary(json.dumps(postdata)),
+ )
+
+ assert f.getcode() == 201
+ assert json.loads(f.read()) == {
+ "called": 1,
+ "data": postdata,
+ "query": querystr,
+ }
+ assert num_requests["post_handler"] == 1
+
+ return try_post
+
+
+@pytest.fixture(name="try_del")
+def fixture_try_del(num_requests):
+ """Return a function to try DEL calls to the server."""
+
+ def try_del(httpd, querystr):
+ """Try DEL calls to the server."""
+
+ num_requests["del_handler"] = 0
+
+ opener = build_opener(HTTPHandler)
+ request = Request(httpd_url(httpd, "/api/resource/1", querystr))
+ request.get_method = lambda: "DEL"
+ f = opener.open(request)
+
+ assert f.getcode() == 200
+ assert json.loads(f.read()) == {"called": 1, "id": "1", "query": querystr}
+ assert num_requests["del_handler"] == 1
+
+ return try_del
+
+
+@pytest.fixture(name="httpd_no_urlhandlers")
+def fixture_httpd_no_urlhandlers():
+ """Yields a started MozHttpd server with no URL handlers."""
+ httpd = mozhttpd.MozHttpd(port=0)
+ httpd.start(block=False)
+ yield httpd
+ httpd.stop()
+
+
+@pytest.fixture(name="httpd_with_docroot")
+def fixture_httpd_with_docroot(num_requests):
+ """Yields a started MozHttpd server with docroot set."""
+
+ @mozhttpd.handlers.json_response
+ def get_handler(request, objid):
+ """Handler for HTTP GET requests."""
+
+ num_requests["get_handler"] += 1
+
+ return (
+ 200,
+ {
+ "called": num_requests["get_handler"],
+ "id": objid,
+ "query": request.query,
+ },
+ )
+
+ httpd = mozhttpd.MozHttpd(
+ port=0,
+ docroot=os.path.dirname(os.path.abspath(__file__)),
+ urlhandlers=[
+ {
+ "method": "GET",
+ "path": "/api/resource/([^/]+)/?",
+ "function": get_handler,
+ }
+ ],
+ )
+
+ httpd.start(block=False)
+ yield httpd
+ httpd.stop()
+
+
+@pytest.fixture(name="httpd")
+def fixture_httpd(num_requests):
+ """Yield a started MozHttpd server."""
+
+ @mozhttpd.handlers.json_response
+ def get_handler(request, objid):
+ """Handler for HTTP GET requests."""
+
+ num_requests["get_handler"] += 1
+
+ return (
+ 200,
+ {
+ "called": num_requests["get_handler"],
+ "id": objid,
+ "query": request.query,
+ },
+ )
+
+ @mozhttpd.handlers.json_response
+ def post_handler(request):
+ """Handler for HTTP POST requests."""
+
+ num_requests["post_handler"] += 1
+
+ return (
+ 201,
+ {
+ "called": num_requests["post_handler"],
+ "data": json.loads(request.body),
+ "query": request.query,
+ },
+ )
+
+ @mozhttpd.handlers.json_response
+ def del_handler(request, objid):
+ """Handler for HTTP DEL requests."""
+
+ num_requests["del_handler"] += 1
+
+ return (
+ 200,
+ {
+ "called": num_requests["del_handler"],
+ "id": objid,
+ "query": request.query,
+ },
+ )
+
+ httpd = mozhttpd.MozHttpd(
+ port=0,
+ urlhandlers=[
+ {
+ "method": "GET",
+ "path": "/api/resource/([^/]+)/?",
+ "function": get_handler,
+ },
+ {
+ "method": "POST",
+ "path": "/api/resource/?",
+ "function": post_handler,
+ },
+ {
+ "method": "DEL",
+ "path": "/api/resource/([^/]+)/?",
+ "function": del_handler,
+ },
+ ],
+ )
+
+ httpd.start(block=False)
+ yield httpd
+ httpd.stop()
+
+
+def test_api(httpd, try_get, try_post, try_del):
+ # GET requests
+ try_get(httpd, "")
+ try_get(httpd, "?foo=bar")
+
+ # POST requests
+ try_post(httpd, "")
+ try_post(httpd, "?foo=bar")
+
+ # DEL requests
+ try_del(httpd, "")
+ try_del(httpd, "?foo=bar")
+
+ # GET: By default we don't serve any files if we just define an API
+ with pytest.raises(HTTPError) as exc_info:
+ urlopen(httpd_url(httpd, "/"))
+
+ assert exc_info.value.code == 404
+
+
+def test_nonexistent_resources(httpd_no_urlhandlers):
+ # GET: Return 404 for non-existent endpoint
+ with pytest.raises(HTTPError) as excinfo:
+ urlopen(httpd_url(httpd_no_urlhandlers, "/api/resource/"))
+ assert excinfo.value.code == 404
+
+ # POST: POST should also return 404
+ with pytest.raises(HTTPError) as excinfo:
+ urlopen(
+ httpd_url(httpd_no_urlhandlers, "/api/resource/"),
+ data=ensure_binary(json.dumps({})),
+ )
+ assert excinfo.value.code == 404
+
+ # DEL: DEL should also return 404
+ opener = build_opener(HTTPHandler)
+ request = Request(httpd_url(httpd_no_urlhandlers, "/api/resource/"))
+ request.get_method = lambda: "DEL"
+
+ with pytest.raises(HTTPError) as excinfo:
+ opener.open(request)
+ assert excinfo.value.code == 404
+
+
+def test_api_with_docroot(httpd_with_docroot, try_get):
+ f = urlopen(httpd_url(httpd_with_docroot, "/"))
+ assert f.getcode() == 200
+ assert "Directory listing for" in ensure_str(f.read())
+
+ # Make sure API methods still work
+ try_get(httpd_with_docroot, "")
+ try_get(httpd_with_docroot, "?foo=bar")
+
+
+def index_contents(host):
+ """Return the expected index contents for the given host."""
+ return "{host} index".format(host=host)
+
+
+@pytest.fixture(name="hosts")
+def fixture_hosts():
+ """Returns a tuple of hosts."""
+ return ("mozilla.com", "mozilla.org")
+
+
+@pytest.fixture(name="docroot")
+def fixture_docroot(tmpdir):
+ """Returns a path object to a temporary docroot directory."""
+ docroot = tmpdir.mkdir("docroot")
+ index_file = docroot.join("index.html")
+ index_file.write(index_contents("*"))
+
+ yield docroot
+
+ docroot.remove()
+
+
+@pytest.fixture(name="httpd_with_proxy_handler")
+def fixture_httpd_with_proxy_handler(docroot):
+ """Yields a started MozHttpd server for the proxy test."""
+
+ httpd = mozhttpd.MozHttpd(port=0, docroot=str(docroot))
+ httpd.start(block=False)
+
+ port = httpd.httpd.server_port
+ proxy_support = ProxyHandler(
+ {
+ "http": "http://127.0.0.1:{port:d}".format(port=port),
+ }
+ )
+ install_opener(build_opener(proxy_support))
+
+ yield httpd
+
+ httpd.stop()
+
+ # Reset proxy opener in case it changed
+ install_opener(None)
+
+
+def test_proxy(httpd_with_proxy_handler, hosts):
+ for host in hosts:
+ f = urlopen("http://{host}/".format(host=host))
+ assert f.getcode() == 200
+ assert f.read() == ensure_binary(index_contents("*"))
+
+
+@pytest.fixture(name="httpd_with_proxy_host_dirs")
+def fixture_httpd_with_proxy_host_dirs(docroot, hosts):
+ for host in hosts:
+ index_file = docroot.mkdir(host).join("index.html")
+ index_file.write(index_contents(host))
+
+ httpd = mozhttpd.MozHttpd(port=0, docroot=str(docroot), proxy_host_dirs=True)
+
+ httpd.start(block=False)
+
+ port = httpd.httpd.server_port
+ proxy_support = ProxyHandler(
+ {"http": "http://127.0.0.1:{port:d}".format(port=port)}
+ )
+ install_opener(build_opener(proxy_support))
+
+ yield httpd
+
+ httpd.stop()
+
+ # Reset proxy opener in case it changed
+ install_opener(None)
+
+
+def test_proxy_separate_directories(httpd_with_proxy_host_dirs, hosts):
+ for host in hosts:
+ f = urlopen("http://{host}/".format(host=host))
+ assert f.getcode() == 200
+ assert f.read() == ensure_binary(index_contents(host))
+
+ unproxied_host = "notmozilla.org"
+
+ with pytest.raises(HTTPError) as excinfo:
+ urlopen("http://{host}/".format(host=unproxied_host))
+
+ assert excinfo.value.code == 404
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/testing/mozbase/mozhttpd/tests/baseurl.py b/testing/mozbase/mozhttpd/tests/baseurl.py
new file mode 100644
index 0000000000..4bf923a8d7
--- /dev/null
+++ b/testing/mozbase/mozhttpd/tests/baseurl.py
@@ -0,0 +1,33 @@
+import mozhttpd
+import mozunit
+import pytest
+
+
+@pytest.fixture(name="httpd")
+def fixture_httpd():
+ """Yields a started MozHttpd server."""
+ httpd = mozhttpd.MozHttpd(port=0)
+ httpd.start(block=False)
+ yield httpd
+ httpd.stop()
+
+
+def test_base_url(httpd):
+ port = httpd.httpd.server_port
+
+ want = "http://127.0.0.1:{}/".format(port)
+ got = httpd.get_url()
+ assert got == want
+
+ want = "http://127.0.0.1:{}/cheezburgers.html".format(port)
+ got = httpd.get_url(path="/cheezburgers.html")
+ assert got == want
+
+
+def test_base_url_when_not_started():
+ httpd = mozhttpd.MozHttpd(port=0)
+ assert httpd.get_url() is None
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/testing/mozbase/mozhttpd/tests/basic.py b/testing/mozbase/mozhttpd/tests/basic.py
new file mode 100644
index 0000000000..a9dcf109e0
--- /dev/null
+++ b/testing/mozbase/mozhttpd/tests/basic.py
@@ -0,0 +1,50 @@
+#!/usr/bin/env python
+
+import os
+
+import mozfile
+import mozhttpd
+import mozunit
+import pytest
+
+
+@pytest.fixture(name="files")
+def fixture_files():
+ """Return a list of tuples with name and binary_string."""
+ return [("small", os.urandom(128)), ("large", os.urandom(16384))]
+
+
+@pytest.fixture(name="docroot")
+def fixture_docroot(tmpdir, files):
+ """Yield a str path to docroot."""
+ docroot = tmpdir.mkdir("docroot")
+
+ for name, binary_string in files:
+ filename = docroot.join(name)
+ filename.write_binary(binary_string)
+
+ yield str(docroot)
+
+ docroot.remove()
+
+
+@pytest.fixture(name="httpd_url")
+def fixture_httpd_url(docroot):
+ """Yield the URL to a started MozHttpd server."""
+ httpd = mozhttpd.MozHttpd(docroot=docroot)
+ httpd.start()
+ yield httpd.get_url()
+ httpd.stop()
+
+
+def test_basic(httpd_url, files):
+ """Test that mozhttpd can serve files."""
+
+ # Retrieve file and check contents matchup
+ for name, binary_string in files:
+ retrieved_content = mozfile.load(httpd_url + name).read()
+ assert retrieved_content == binary_string
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/testing/mozbase/mozhttpd/tests/filelisting.py b/testing/mozbase/mozhttpd/tests/filelisting.py
new file mode 100644
index 0000000000..195059a261
--- /dev/null
+++ b/testing/mozbase/mozhttpd/tests/filelisting.py
@@ -0,0 +1,68 @@
+#!/usr/bin/env python
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import re
+
+import mozhttpd
+import mozunit
+import pytest
+from six import ensure_str
+from six.moves.urllib.request import urlopen
+
+
+@pytest.fixture(name="docroot")
+def fixture_docroot():
+ """Returns a docroot path."""
+ return os.path.dirname(os.path.abspath(__file__))
+
+
+@pytest.fixture(name="httpd")
+def fixture_httpd(docroot):
+ """Yields a started MozHttpd server."""
+ httpd = mozhttpd.MozHttpd(port=0, docroot=docroot)
+ httpd.start(block=False)
+ yield httpd
+ httpd.stop()
+
+
+@pytest.mark.parametrize(
+ "path",
+ [
+ pytest.param("", id="no_params"),
+ pytest.param("?foo=bar&fleem=&foo=fleem", id="with_params"),
+ ],
+)
+def test_filelist(httpd, docroot, path):
+ f = urlopen(
+ "http://{host}:{port}/{path}".format(
+ host="127.0.0.1", port=httpd.httpd.server_port, path=path
+ )
+ )
+
+ filelist = os.listdir(docroot)
+
+ pattern = "\<[a-zA-Z0-9\-\_\.\=\"'\/\\\%\!\@\#\$\^\&\*\(\) :;]*\>"
+
+ for line in f.readlines():
+ subbed_lined = re.sub(pattern, "", ensure_str(line).strip("\n"))
+ webline = subbed_lined.strip("/").strip().strip("@")
+
+ if (
+ webline
+ and not webline.startswith("Directory listing for")
+ and not webline.startswith("<!DOCTYPE")
+ ):
+ msg = "File {} in dir listing corresponds to a file".format(webline)
+ assert webline in filelist, msg
+ filelist.remove(webline)
+
+ msg = "Should have no items in filelist ({}) unaccounted for".format(filelist)
+ assert len(filelist) == 0, msg
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/testing/mozbase/mozhttpd/tests/manifest.toml b/testing/mozbase/mozhttpd/tests/manifest.toml
new file mode 100644
index 0000000000..59c9be5ed0
--- /dev/null
+++ b/testing/mozbase/mozhttpd/tests/manifest.toml
@@ -0,0 +1,16 @@
+[DEFAULT]
+subsuite = "mozbase"
+
+["api.py"]
+skip-if = ["true"]
+
+["baseurl.py"]
+
+["basic.py"]
+
+["filelisting.py"]
+skip-if = ["true"]
+
+["paths.py"]
+
+["requestlog.py"]
diff --git a/testing/mozbase/mozhttpd/tests/paths.py b/testing/mozbase/mozhttpd/tests/paths.py
new file mode 100644
index 0000000000..6d4c2ce953
--- /dev/null
+++ b/testing/mozbase/mozhttpd/tests/paths.py
@@ -0,0 +1,121 @@
+#!/usr/bin/env python
+
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+import mozhttpd
+import mozunit
+import pytest
+from six.moves.urllib.error import HTTPError
+from six.moves.urllib.request import urlopen
+
+
+def try_get(url, expected_contents):
+ f = urlopen(url)
+ assert f.getcode() == 200
+ assert f.read() == expected_contents
+
+
+def try_get_expect_404(url):
+ with pytest.raises(HTTPError) as excinfo:
+ urlopen(url)
+ assert excinfo.value.code == 404
+
+
+@pytest.fixture(name="httpd_basic")
+def fixture_httpd_basic(tmpdir):
+ d1 = tmpdir.mkdir("d1")
+ d1.join("test1.txt").write("test 1 contents")
+
+ d2 = tmpdir.mkdir("d2")
+ d2.join("test2.txt").write("test 2 contents")
+
+ httpd = mozhttpd.MozHttpd(
+ port=0,
+ docroot=str(d1),
+ path_mappings={"/files": str(d2)},
+ )
+ httpd.start(block=False)
+
+ yield httpd
+
+ httpd.stop()
+ d1.remove()
+ d2.remove()
+
+
+def test_basic(httpd_basic):
+ """Test that requests to docroot and a path mapping work as expected."""
+ try_get(httpd_basic.get_url("/test1.txt"), b"test 1 contents")
+ try_get(httpd_basic.get_url("/files/test2.txt"), b"test 2 contents")
+ try_get_expect_404(httpd_basic.get_url("/files/test2_nope.txt"))
+
+
+@pytest.fixture(name="httpd_substring_mappings")
+def fixture_httpd_substring_mappings(tmpdir):
+ d1 = tmpdir.mkdir("d1")
+ d1.join("test1.txt").write("test 1 contents")
+
+ d2 = tmpdir.mkdir("d2")
+ d2.join("test2.txt").write("test 2 contents")
+
+ httpd = mozhttpd.MozHttpd(
+ port=0,
+ path_mappings={"/abcxyz": str(d1), "/abc": str(d2)},
+ )
+ httpd.start(block=False)
+ yield httpd
+ httpd.stop()
+ d1.remove()
+ d2.remove()
+
+
+def test_substring_mappings(httpd_substring_mappings):
+ httpd = httpd_substring_mappings
+ try_get(httpd.get_url("/abcxyz/test1.txt"), b"test 1 contents")
+ try_get(httpd.get_url("/abc/test2.txt"), b"test 2 contents")
+
+
+@pytest.fixture(name="httpd_multipart_path_mapping")
+def fixture_httpd_multipart_path_mapping(tmpdir):
+ d1 = tmpdir.mkdir("d1")
+ d1.join("test1.txt").write("test 1 contents")
+
+ httpd = mozhttpd.MozHttpd(
+ port=0,
+ path_mappings={"/abc/def/ghi": str(d1)},
+ )
+ httpd.start(block=False)
+ yield httpd
+ httpd.stop()
+ d1.remove()
+
+
+def test_multipart_path_mapping(httpd_multipart_path_mapping):
+ """Test that a path mapping with multiple directories works."""
+ httpd = httpd_multipart_path_mapping
+ try_get(httpd.get_url("/abc/def/ghi/test1.txt"), b"test 1 contents")
+ try_get_expect_404(httpd.get_url("/abc/test1.txt"))
+ try_get_expect_404(httpd.get_url("/abc/def/test1.txt"))
+
+
+@pytest.fixture(name="httpd_no_docroot")
+def fixture_httpd_no_docroot(tmpdir):
+ d1 = tmpdir.mkdir("d1")
+ httpd = mozhttpd.MozHttpd(
+ port=0,
+ path_mappings={"/foo": str(d1)},
+ )
+ httpd.start(block=False)
+ yield httpd
+ httpd.stop()
+ d1.remove()
+
+
+def test_no_docroot(httpd_no_docroot):
+ """Test that path mappings with no docroot work."""
+ try_get_expect_404(httpd_no_docroot.get_url())
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/testing/mozbase/mozhttpd/tests/requestlog.py b/testing/mozbase/mozhttpd/tests/requestlog.py
new file mode 100644
index 0000000000..8e7b065f3d
--- /dev/null
+++ b/testing/mozbase/mozhttpd/tests/requestlog.py
@@ -0,0 +1,62 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+
+import mozhttpd
+import mozunit
+import pytest
+from six.moves.urllib.request import urlopen
+
+
+def log_requests(enabled):
+ """Decorator to change the log_requests parameter for MozHttpd."""
+ param_id = "enabled" if enabled else "disabled"
+ return pytest.mark.parametrize("log_requests", [enabled], ids=[param_id])
+
+
+@pytest.fixture(name="docroot")
+def fixture_docroot():
+ """Return a docroot path."""
+ return os.path.dirname(os.path.abspath(__file__))
+
+
+@pytest.fixture(name="request_log")
+def fixture_request_log(docroot, log_requests):
+ """Yields the request log of a started MozHttpd server."""
+ httpd = mozhttpd.MozHttpd(
+ port=0,
+ docroot=docroot,
+ log_requests=log_requests,
+ )
+ httpd.start(block=False)
+
+ url = "http://{host}:{port}/".format(
+ host="127.0.0.1",
+ port=httpd.httpd.server_port,
+ )
+ f = urlopen(url)
+ f.read()
+
+ yield httpd.request_log
+
+ httpd.stop()
+
+
+@log_requests(True)
+def test_logging_enabled(request_log):
+ assert len(request_log) == 1
+ log_entry = request_log[0]
+ assert log_entry["method"] == "GET"
+ assert log_entry["path"] == "/"
+ assert type(log_entry["time"]) == float
+
+
+@log_requests(False)
+def test_logging_disabled(request_log):
+ assert len(request_log) == 0
+
+
+if __name__ == "__main__":
+ mozunit.main()