From 9e9d75224939029e63760bddc02d084846f49fe0 Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Sat, 13 Apr 2024 14:18:06 +0200 Subject: Adding debian version 2.9.5-1. Signed-off-by: Daniel Baumann --- debian/changelog | 2624 ++++++++++++++++++++ debian/clean | 3 + debian/control | 60 + debian/copyright | 933 +++++++ debian/dconv/LICENSE | 202 ++ debian/dconv/NOTICE | 13 + debian/dconv/README.md | 21 + debian/dconv/css/check.png | Bin 0 -> 531 bytes debian/dconv/css/cross.png | Bin 0 -> 640 bytes debian/dconv/css/page.css | 223 ++ debian/dconv/haproxy-dconv.py | 534 ++++ debian/dconv/img/logo-med.png | Bin 0 -> 3522 bytes debian/dconv/js/typeahead.bundle.js | 2451 ++++++++++++++++++ debian/dconv/parser/__init__.py | 81 + debian/dconv/parser/arguments.py | 132 + debian/dconv/parser/example.py | 77 + debian/dconv/parser/keyword.py | 142 ++ debian/dconv/parser/seealso.py | 32 + debian/dconv/parser/table.py | 244 ++ debian/dconv/parser/underline.py | 16 + debian/dconv/templates/parser/arguments.tpl | 9 + debian/dconv/templates/parser/example.tpl | 12 + debian/dconv/templates/parser/example/comment.tpl | 1 + debian/dconv/templates/parser/seealso.tpl | 1 + debian/dconv/templates/parser/table.tpl | 11 + debian/dconv/templates/parser/table/header.tpl | 6 + debian/dconv/templates/parser/table/row.tpl | 36 + debian/dconv/templates/parser/underline.tpl | 1 + debian/dconv/templates/summary.html | 43 + debian/dconv/templates/template.html | 238 ++ debian/dconv/tools/generate-docs.sh | 177 ++ debian/gbp.conf | 3 + debian/halog.1 | 108 + debian/haproxy-doc.doc-base.haproxy | 9 + debian/haproxy-doc.doc-base.haproxy-lua | 9 + debian/haproxy-doc.docs | 1 + debian/haproxy-doc.install | 7 + debian/haproxy-doc.links | 6 + debian/haproxy-doc.maintscript | 2 + debian/haproxy.README.Debian | 29 + debian/haproxy.cfg | 34 + debian/haproxy.default | 10 + debian/haproxy.dirs | 4 + debian/haproxy.docs | 9 + debian/haproxy.examples | 1 + debian/haproxy.init | 197 ++ debian/haproxy.install | 3 + debian/haproxy.lintian-overrides | 2 + debian/haproxy.maintscript | 1 + debian/haproxy.manpages | 3 + debian/haproxy.postinst | 22 + debian/haproxy.postrm | 16 + debian/haproxy.tmpfile | 1 + debian/haproxy.vim | 2 + debian/logrotate.conf | 11 + debian/patches/debianize-dconv.patch | 170 ++ .../haproxy.service-add-documentation.patch | 23 + ...e-make-systemd-bind-dev-log-inside-chroot.patch | 21 + .../haproxy.service-start-after-syslog.patch | 27 + debian/patches/reproducible.patch | 13 + debian/patches/series | 7 + debian/rsyslog.conf | 9 + debian/rules | 98 + debian/salsa-ci.yml | 4 + debian/source/format | 1 + debian/source/include-binaries | 3 + debian/tests/cli | 7 + debian/tests/control | 15 + debian/tests/proxy-localhost | 44 + debian/tests/proxy-ssl-pass-through | 59 + debian/tests/proxy-ssl-termination | 48 + debian/tests/utils | 58 + debian/vim-haproxy.install | 3 + debian/vim-haproxy.yaml | 5 + debian/watch | 2 + 75 files changed, 9430 insertions(+) create mode 100644 debian/changelog create mode 100644 debian/clean create mode 100644 debian/control create mode 100644 debian/copyright create mode 100644 debian/dconv/LICENSE create mode 100644 debian/dconv/NOTICE create mode 100644 debian/dconv/README.md create mode 100644 debian/dconv/css/check.png create mode 100644 debian/dconv/css/cross.png create mode 100644 debian/dconv/css/page.css create mode 100755 debian/dconv/haproxy-dconv.py create mode 100644 debian/dconv/img/logo-med.png create mode 100644 debian/dconv/js/typeahead.bundle.js create mode 100644 debian/dconv/parser/__init__.py create mode 100644 debian/dconv/parser/arguments.py create mode 100644 debian/dconv/parser/example.py create mode 100644 debian/dconv/parser/keyword.py create mode 100644 debian/dconv/parser/seealso.py create mode 100644 debian/dconv/parser/table.py create mode 100644 debian/dconv/parser/underline.py create mode 100644 debian/dconv/templates/parser/arguments.tpl create mode 100644 debian/dconv/templates/parser/example.tpl create mode 100644 debian/dconv/templates/parser/example/comment.tpl create mode 100644 debian/dconv/templates/parser/seealso.tpl create mode 100644 debian/dconv/templates/parser/table.tpl create mode 100644 debian/dconv/templates/parser/table/header.tpl create mode 100644 debian/dconv/templates/parser/table/row.tpl create mode 100644 debian/dconv/templates/parser/underline.tpl create mode 100644 debian/dconv/templates/summary.html create mode 100644 debian/dconv/templates/template.html create mode 100755 debian/dconv/tools/generate-docs.sh create mode 100644 debian/gbp.conf create mode 100644 debian/halog.1 create mode 100644 debian/haproxy-doc.doc-base.haproxy create mode 100644 debian/haproxy-doc.doc-base.haproxy-lua create mode 100644 debian/haproxy-doc.docs create mode 100644 debian/haproxy-doc.install create mode 100644 debian/haproxy-doc.links create mode 100644 debian/haproxy-doc.maintscript create mode 100644 debian/haproxy.README.Debian create mode 100644 debian/haproxy.cfg create mode 100644 debian/haproxy.default create mode 100644 debian/haproxy.dirs create mode 100644 debian/haproxy.docs create mode 100644 debian/haproxy.examples create mode 100644 debian/haproxy.init create mode 100644 debian/haproxy.install create mode 100644 debian/haproxy.lintian-overrides create mode 100644 debian/haproxy.maintscript create mode 100644 debian/haproxy.manpages create mode 100644 debian/haproxy.postinst create mode 100644 debian/haproxy.postrm create mode 100644 debian/haproxy.tmpfile create mode 100644 debian/haproxy.vim create mode 100644 debian/logrotate.conf create mode 100644 debian/patches/debianize-dconv.patch create mode 100644 debian/patches/haproxy.service-add-documentation.patch create mode 100644 debian/patches/haproxy.service-make-systemd-bind-dev-log-inside-chroot.patch create mode 100644 debian/patches/haproxy.service-start-after-syslog.patch create mode 100644 debian/patches/reproducible.patch create mode 100644 debian/patches/series create mode 100644 debian/rsyslog.conf create mode 100755 debian/rules create mode 100644 debian/salsa-ci.yml create mode 100644 debian/source/format create mode 100644 debian/source/include-binaries create mode 100644 debian/tests/cli create mode 100644 debian/tests/control create mode 100644 debian/tests/proxy-localhost create mode 100644 debian/tests/proxy-ssl-pass-through create mode 100644 debian/tests/proxy-ssl-termination create mode 100644 debian/tests/utils create mode 100644 debian/vim-haproxy.install create mode 100644 debian/vim-haproxy.yaml create mode 100644 debian/watch (limited to 'debian') diff --git a/debian/changelog b/debian/changelog new file mode 100644 index 0000000..6ccec7d --- /dev/null +++ b/debian/changelog @@ -0,0 +1,2624 @@ +haproxy (2.9.5-1) unstable; urgency=medium + + * New upstream release. + + -- Vincent Bernat Thu, 15 Feb 2024 22:21:25 +0100 + +haproxy (2.9.4-1) unstable; urgency=medium + + * New upstream release. + + -- Vincent Bernat Thu, 01 Feb 2024 22:39:57 +0100 + +haproxy (2.9.3-1) unstable; urgency=medium + + * New upstream release. + * Upload to unstable. Let bite the bullet! + + -- Vincent Bernat Thu, 18 Jan 2024 21:25:36 +0100 + +haproxy (2.9.2-1) experimental; urgency=medium + + * New upstream release. + * Upload back to experimental. + + -- Vincent Bernat Fri, 12 Jan 2024 20:55:52 +0100 + +haproxy (2.9.1-1) unstable; urgency=medium + + [ Chris Hofstaedtler ] + * d/rules: use dh_installsystemd to install .service files. + Closes: #1057943. + + [ Vincent Bernat ] + * New upstream release. + + -- Vincent Bernat Fri, 15 Dec 2023 15:33:29 +0100 + +haproxy (2.9.0-1) experimental; urgency=medium + + * New upstream release. + + -- Vincent Bernat Sat, 09 Dec 2023 15:33:05 +0100 + +haproxy (2.8.5-2) UNRELEASED; urgency=medium + + * d/rules: use dh_installsystemd to install .service files. + Closes: #1057943. + + -- Chris Hofstaedtler Sun, 10 Dec 2023 20:15:00 +0100 + +haproxy (2.8.5-1) unstable; urgency=medium + + * New upstream release. + + -- Vincent Bernat Sat, 09 Dec 2023 16:14:43 +0100 + +haproxy (2.8.4-2) experimental; urgency=medium + + * d/rules: really link against jemalloc. + * d/rules: enable USE_QUIC (with USE_QUIC_OPENSSL_COMPAT). + + -- Vincent Bernat Wed, 22 Nov 2023 22:52:31 +0100 + +haproxy (2.8.4-1) experimental; urgency=medium + + * New upstream release. + * d/rules: link against jemalloc. + + -- Vincent Bernat Fri, 17 Nov 2023 19:53:48 +0100 + +haproxy (2.8.3-1) experimental; urgency=medium + + * New upstream release. + + -- Vincent Bernat Fri, 08 Sep 2023 20:25:39 +0200 + +haproxy (2.8.2-1) experimental; urgency=medium + + * New upstream release. Fix CVE-2023-40225. + + -- Vincent Bernat Sat, 12 Aug 2023 13:33:36 +0200 + +haproxy (2.8.1-1) experimental; urgency=medium + + * New upstream release. + + -- Vincent Bernat Mon, 03 Jul 2023 21:13:18 +0200 + +haproxy (2.8.0-1) experimental; urgency=medium + + * New upstream release. + * d/NEWS: remove + * d/lintian-overrides: override warning about obsolete lsb-base dependency + + -- Vincent Bernat Sat, 03 Jun 2023 00:11:07 +0200 + +haproxy (2.7.8-1) experimental; urgency=medium + + * New upstream release. + + -- Vincent Bernat Sat, 06 May 2023 15:08:44 +0200 + +haproxy (2.7.7-1) experimental; urgency=medium + + * New upstream release. + + -- Vincent Bernat Thu, 27 Apr 2023 21:22:39 +0200 + +haproxy (2.7.6-1) experimental; urgency=medium + + * New upstream release. + + -- Vincent Bernat Fri, 31 Mar 2023 08:46:51 +0200 + +haproxy (2.7.5-1) experimental; urgency=medium + + * New upstream release. + + -- Vincent Bernat Sat, 18 Mar 2023 14:00:02 +0100 + +haproxy (2.7.4-1) experimental; urgency=medium + + * New upstream release. + + -- Vincent Bernat Fri, 10 Mar 2023 21:18:59 +0100 + +haproxy (2.7.3-1) experimental; urgency=medium + + * New upstream release. + + -- Vincent Bernat Wed, 15 Feb 2023 08:18:45 +0100 + +haproxy (2.7.2-2) experimental; urgency=medium + + * BUG/CRITICAL: http: properly reject empty http header field names + (CVE-2023-25725). + + -- Vincent Bernat Mon, 13 Feb 2023 19:37:55 +0100 + +haproxy (2.7.2-1) experimental; urgency=medium + + * New upstream release. + + -- Vincent Bernat Fri, 20 Jan 2023 22:15:56 +0100 + +haproxy (2.7.1-1) experimental; urgency=medium + + * New upstream release. + + -- Vincent Bernat Mon, 19 Dec 2022 21:11:23 +0100 + +haproxy (2.7.0-1) experimental; urgency=medium + + * New upstream release. + + -- Vincent Bernat Thu, 01 Dec 2022 17:25:51 +0100 + +haproxy (2.6.6-2) unstable; urgency=medium + + * Upload to unstable. + + -- Vincent Bernat Sat, 22 Oct 2022 10:19:03 +0200 + +haproxy (2.6.6-1) experimental; urgency=medium + + * New upstream release. + + -- Vincent Bernat Thu, 22 Sep 2022 20:22:23 +0200 + +haproxy (2.6.5-1) experimental; urgency=medium + + * New upstream release. + + -- Vincent Bernat Sat, 03 Sep 2022 19:33:51 +0200 + +haproxy (2.6.4-1) experimental; urgency=medium + + * New upstream release. + + -- Vincent Bernat Mon, 22 Aug 2022 19:01:25 +0200 + +haproxy (2.6.3-1) experimental; urgency=medium + + * New upstream release. + + -- Vincent Bernat Fri, 19 Aug 2022 19:16:11 +0200 + +haproxy (2.6.2-1) experimental; urgency=medium + + * New upstream release. + + -- Vincent Bernat Fri, 22 Jul 2022 18:21:43 +0200 + +haproxy (2.6.1-1) experimental; urgency=medium + + [ Lucas Kanashiro ] + * d/t/utils: add helper functions to be re-used in tests + * d/t/proxy-localhost: refactor to use the check_index_file helper function + * d/t/proxy-ssl-termination: add test for the SSL termination proxy feature + * d/t/proxy-ssl-pass-through: add test for the SSL Pass-Through proxy feature + + [ Vincent Bernat ] + * New upstream release. + + -- Vincent Bernat Wed, 22 Jun 2022 20:06:08 +0200 + +haproxy (2.6.0-1) experimental; urgency=medium + + * New upstream release. + + -- Vincent Bernat Thu, 02 Jun 2022 08:49:38 +0200 + +haproxy (2.5.7-1) experimental; urgency=medium + + * New upstream release. + + -- Vincent Bernat Sat, 14 May 2022 12:01:07 +0200 + +haproxy (2.5.6-1) experimental; urgency=medium + + * New upstream release. + + -- Vincent Bernat Tue, 26 Apr 2022 17:59:23 +0200 + +haproxy (2.5.5-1) experimental; urgency=medium + + * New upstream release. + + -- Vincent Bernat Mon, 14 Mar 2022 19:26:46 +0100 + +haproxy (2.5.4-1) experimental; urgency=medium + + * New upstream release. + + -- Vincent Bernat Fri, 25 Feb 2022 17:39:11 +0100 + +haproxy (2.5.3-1) experimental; urgency=medium + + * New upstream release. + + -- Vincent Bernat Fri, 18 Feb 2022 20:22:25 +0100 + +haproxy (2.5.2-1) experimental; urgency=medium + + * New upstream release. + + -- Vincent Bernat Wed, 16 Feb 2022 19:09:04 +0100 + +haproxy (2.5.1-1) experimental; urgency=medium + + * New upstream release. + + -- Vincent Bernat Tue, 11 Jan 2022 19:23:50 +0100 + +haproxy (2.5.0-1) experimental; urgency=medium + + * New upstream release. + * d/patches: patch to make logging work without rsyslog with systemd + + -- Vincent Bernat Thu, 25 Nov 2021 21:20:30 +0100 + +haproxy (2.4.19-1) unstable; urgency=medium + + * New upstream release. + + -- Vincent Bernat Fri, 30 Sep 2022 09:07:13 +0200 + +haproxy (2.4.18-1) unstable; urgency=medium + + [ Lucas Kanashiro ] + * d/t/utils: add helper functions to be re-used in tests + * d/t/proxy-localhost: refactor to use the check_index_file helper function + * d/t/proxy-ssl-termination: add test for the SSL termination proxy feature + * d/t/proxy-ssl-pass-through: add test for the SSL Pass-Through proxy feature + + [ Vincent Bernat ] + * New upstream release. + + -- Vincent Bernat Wed, 27 Jul 2022 15:59:36 +0200 + +haproxy (2.4.17-1) unstable; urgency=medium + + * New upstream release. + + -- Vincent Bernat Sat, 14 May 2022 14:27:20 +0200 + +haproxy (2.4.16-1) unstable; urgency=medium + + * New upstream release. + * d/install: do not install halog explicitely. + + -- Vincent Bernat Fri, 29 Apr 2022 17:42:42 +0200 + +haproxy (2.4.15-1) unstable; urgency=medium + + * New upstream release. + + -- Vincent Bernat Mon, 14 Mar 2022 20:17:04 +0100 + +haproxy (2.4.14-1) unstable; urgency=medium + + * New upstream release. + - Fix compilation with OpenSSL 3.0. Closes: #996423, #1006007. + + -- Vincent Bernat Fri, 25 Feb 2022 18:38:27 +0100 + +haproxy (2.4.13-1) unstable; urgency=medium + + * New upstream release. + + -- Vincent Bernat Thu, 17 Feb 2022 10:03:46 +0100 + +haproxy (2.4.12-1) unstable; urgency=medium + + * New upstream release. + + -- Vincent Bernat Tue, 11 Jan 2022 12:06:17 +0100 + +haproxy (2.4.11-1) unstable; urgency=medium + + * New upstream release. + + -- Vincent Bernat Fri, 07 Jan 2022 17:25:51 +0100 + +haproxy (2.4.10-1) unstable; urgency=medium + + * New upstream release. + + -- Vincent Bernat Thu, 23 Dec 2021 19:13:26 +0100 + +haproxy (2.4.9-1) unstable; urgency=medium + + * New upstream release. + + -- Vincent Bernat Wed, 24 Nov 2021 19:42:28 +0100 + +haproxy (2.4.8-3) unstable; urgency=medium + + * d/logrotate: only use rsyslog-rotate if present. Closes: #1000436. + + -- Vincent Bernat Wed, 24 Nov 2021 09:29:54 +0100 + +haproxy (2.4.8-2) unstable; urgency=medium + + * Non-maintainer upload. + * Enable OpenTracing support. + + -- Stephen Gelman Tue, 09 Nov 2021 23:06:46 -0600 + +haproxy (2.4.8-1) unstable; urgency=medium + + * New upstream release. + + -- Vincent Bernat Thu, 04 Nov 2021 08:36:56 +0100 + +haproxy (2.4.7-2) unstable; urgency=medium + + * Upload to unstable. + + -- Vincent Bernat Sat, 16 Oct 2021 20:43:13 +0200 + +haproxy (2.4.7-1) experimental; urgency=medium + + * New upstream release. + + -- Vincent Bernat Thu, 07 Oct 2021 09:08:09 +0200 + +haproxy (2.4.4-1) experimental; urgency=medium + + * New upstream release. + * d/patches: remove patches applied upstream. + + -- Vincent Bernat Wed, 08 Sep 2021 08:38:05 +0200 + +haproxy (2.4.3-2) experimental; urgency=high + + * d/patches: fix missing header name length check in HTX (CVE-2021-40346). + + -- Vincent Bernat Sat, 04 Sep 2021 11:56:31 +0200 + +haproxy (2.4.3-1) experimental; urgency=medium + + * New upstream release. + * d/patches: remove patches applied upstream. + * d/patches: h2: match absolute-path not path-absolute for :path. + + -- Vincent Bernat Sat, 21 Aug 2021 16:32:25 +0200 + +haproxy (2.4.2-2) experimental; urgency=medium + + * Fix HTTP request smuggling via HTTP/2 desync attacks. + + -- Vincent Bernat Fri, 13 Aug 2021 16:12:31 +0200 + +haproxy (2.4.2-1) experimental; urgency=medium + + * New upstream release. + + -- Vincent Bernat Wed, 07 Jul 2021 21:47:17 +0200 + +haproxy (2.4.1-1) experimental; urgency=medium + + * New upstream release. + + -- Vincent Bernat Thu, 17 Jun 2021 13:57:57 +0200 + +haproxy (2.4.0-1) experimental; urgency=medium + + * New upstream release. + * d/rules: switch to SLZ instead of zlib + * d/rules: update build for contrib → admin + * d/rules: remove use of USE_REGPARM (outdated) + * d/rules: remove hack around gcc_s + * d/copyright: update + + -- Vincent Bernat Tue, 18 May 2021 22:00:05 +0200 + +haproxy (2.3.10-1) experimental; urgency=medium + + * New upstream release. + + -- Vincent Bernat Sat, 24 Apr 2021 18:22:41 +0200 + +haproxy (2.3.9-1) experimental; urgency=medium + + * New upstream release. + + -- Vincent Bernat Tue, 30 Mar 2021 19:50:42 +0200 + +haproxy (2.3.8-1) experimental; urgency=medium + + * New upstream release. + * d/logrotate: reduce log retention to 7 days. Closes: #985441. + + -- Vincent Bernat Thu, 25 Mar 2021 18:17:18 +0100 + +haproxy (2.3.7-1) experimental; urgency=medium + + * New upstream release. + + -- Vincent Bernat Tue, 16 Mar 2021 18:41:25 +0100 + +haproxy (2.3.6-1) experimental; urgency=medium + + * New upstream release. + + -- Vincent Bernat Thu, 04 Mar 2021 13:57:49 +0100 + +haproxy (2.3.5-1) experimental; urgency=medium + + * New upstream release. + + -- Vincent Bernat Sat, 06 Feb 2021 17:12:53 +0100 + +haproxy (2.3.4-1) experimental; urgency=medium + + * New upstream release: + - Revert "BUG/MINOR: dns: SRV records ignores duplicated AR records" + + -- Vincent Bernat Fri, 15 Jan 2021 14:13:28 +0100 + +haproxy (2.3.3-1) experimental; urgency=medium + + * d/tests: sleep before test to let Apache2 start. + Closes: #976997. + * New upstream release: + - BUG/MAJOR: ring: tcp forward on ring can break the reader counter. + - BUG/MAJOR: spoa/python: Fixing return None + - BUG/MEDIUM: local log format regression. Closes: #974977. + + -- Vincent Bernat Sat, 09 Jan 2021 15:18:10 +0100 + +haproxy (2.3.2-1) experimental; urgency=medium + + * New upstream release. + - BUG/MAJOR: connection: reset conn->owner when detaching from session + list + - BUG/MAJOR: filters: Always keep all offsets up to date during data + filtering + - BUG/MAJOR: peers: fix partial message decoding + - BUG/MAJOR: tcpcheck: Allocate input and output buffers from the buffer + pool + + -- Vincent Bernat Sat, 28 Nov 2020 20:25:34 +0100 + +haproxy (2.3.1-1) experimental; urgency=medium + + * New upstream release. + - BUG/MAJOR: spoe: Be sure to remove all references on a released spoe + applet + * d/patches: remove patches applied upstream. + + -- Vincent Bernat Sat, 14 Nov 2020 23:17:20 +0100 + +haproxy (2.3.0-1) experimental; urgency=medium + + * New upstream release. + * d/gbp, d/watch: prepare for 2.3.0 release + + -- Vincent Bernat Wed, 11 Nov 2020 16:30:10 +0100 + +haproxy (2.2.17-1) unstable; urgency=medium + + * New upstream release. + * d/patches: remove upstream-applied patch. + + -- Vincent Bernat Thu, 09 Sep 2021 19:42:08 +0200 + +haproxy (2.2.16-3) unstable; urgency=high + + * d/patches: fix missing header name length check in HTX (CVE-2021-40346). + + -- Vincent Bernat Sat, 04 Sep 2021 16:14:51 +0200 + +haproxy (2.2.16-2) unstable; urgency=medium + + * d/patches: h2: match absolute-path not path-absolute for :path + + -- Vincent Bernat Sat, 21 Aug 2021 16:19:52 +0200 + +haproxy (2.2.16-1) unstable; urgency=high + + * New upstream release. + * Fix CVE-2021-39240, CVE-2021-39241, CVE-2021-39242. + * d/patches: remove upstream-applied patch. + + -- Vincent Bernat Thu, 19 Aug 2021 07:22:05 +0200 + +haproxy (2.2.15-1) UNRELEASED; urgency=medium + + * New upstream release. + + -- Vincent Bernat Fri, 16 Jul 2021 11:18:32 +0200 + +haproxy (2.2.14-1) UNRELEASED; urgency=medium + + * New upstream release. + + -- Vincent Bernat Thu, 29 Apr 2021 15:32:49 +0200 + +haproxy (2.2.13-1) UNRELEASED; urgency=medium + + * New upstream release. + + -- Vincent Bernat Fri, 02 Apr 2021 21:18:28 +0200 + +haproxy (2.2.12-1) UNRELEASED; urgency=medium + + * New upstream release. + + -- Vincent Bernat Wed, 31 Mar 2021 20:31:24 +0200 + +haproxy (2.2.11-1) UNRELEASED; urgency=medium + + * New upstream release. + + -- Vincent Bernat Thu, 18 Mar 2021 21:34:40 +0100 + +haproxy (2.2.10-1) UNRELEASED; urgency=medium + + * New upstream release. + + -- Vincent Bernat Thu, 04 Mar 2021 19:08:41 +0100 + +haproxy (2.2.9-2) unstable; urgency=medium + + * d/patches: fix agent-check regression putting down servers. + Closes: #988779. + + -- Vincent Bernat Thu, 27 May 2021 15:00:01 +0200 + +haproxy (2.2.9-1) unstable; urgency=medium + + * New upstream release. + - BUG/MAJOR: connection: reset conn->owner when detaching from session + list + + -- Vincent Bernat Sat, 06 Feb 2021 18:52:20 +0100 + +haproxy (2.2.8-1) unstable; urgency=medium + + * New upstream release. + - Revert "BUG/MINOR: dns: SRV records ignores duplicated AR records" + + -- Vincent Bernat Thu, 14 Jan 2021 11:48:52 +0100 + +haproxy (2.2.7-1) unstable; urgency=medium + + * New upstream release. + - BUG/MAJOR: ring: tcp forward on ring can break the reader counter. + - BUG/MAJOR: spoa/python: Fixing return None + + -- Vincent Bernat Sat, 09 Jan 2021 15:31:08 +0100 + +haproxy (2.2.6-2) unstable; urgency=medium + + * d/tests: sleep before test to let Apache2 start. + Closes: #976997. + + -- Vincent Bernat Thu, 07 Jan 2021 07:56:14 +0100 + +haproxy (2.2.6-1) unstable; urgency=medium + + * New upstream release. + - BUG/MAJOR: filters: Always keep all offsets up to date during data + filtering + - BUG/MAJOR: peers: fix partial message decoding + - BUG/MAJOR: spoe: Be sure to remove all references on a released spoe + applet + - BUG/MAJOR: tcpcheck: Allocate input and output buffers from the buffer + pool + * d/patches: remove patches applied upstream + + -- Vincent Bernat Mon, 30 Nov 2020 20:02:49 +0100 + +haproxy (2.2.5-2) unstable; urgency=medium + + * Upload to unstable. + + -- Vincent Bernat Wed, 11 Nov 2020 16:21:12 +0100 + +haproxy (2.2.5-1) experimental; urgency=medium + + * New upstream release. + - BUG/MAJOR: mux-h2: Don't try to send data if we know it is no longer + possible + * d/patches: warn if payload of an errorfile doesn't match the C-L + + -- Vincent Bernat Sun, 08 Nov 2020 19:12:02 +0100 + +haproxy (2.2.4-1) experimental; urgency=medium + + * New upstream release. + * d/patches: drop patch for ARM32 + + -- Vincent Bernat Fri, 02 Oct 2020 21:29:56 +0200 + +haproxy (2.2.3-2) experimental; urgency=medium + + * d/patches: add upstream patch to fix build on ARM32 + + -- Vincent Bernat Wed, 09 Sep 2020 19:38:52 +0200 + +haproxy (2.2.3-1) experimental; urgency=medium + + * New upstream version. + - BUG/MAJOR: dns: disabled servers through SRV records never recover + + -- Vincent Bernat Tue, 08 Sep 2020 23:12:05 +0200 + +haproxy (2.2.2-1) experimental; urgency=medium + + * New upstream version. + - BUG/MAJOR: dns: don't treat Authority records as an error + - BUG/MAJOR: dns: fix null pointer dereference in + snr_update_srv_status + + -- Vincent Bernat Sat, 01 Aug 2020 17:06:42 +0200 + +haproxy (2.2.1-1) experimental; urgency=medium + + * New upstream version. + - BUG/MAJOR: tasks: don't requeue global tasks into the local + queue + - BUG/MAJOR: dns: Make the do-resolve action thread-safe + + -- Vincent Bernat Thu, 23 Jul 2020 13:39:14 +0200 + +haproxy (2.2.0-1) experimental; urgency=medium + + * New upstream version. + * Upload to experimental + * Update d/watch to look for 2.2 stable releases + * d/gbp.conf: set branch names for 2.2 + * d/patches: refresh patches + + -- Vincent Bernat Tue, 14 Jul 2020 16:53:23 +0200 + +haproxy (2.1.7-1) experimental; urgency=medium + + * New upstream version. + + -- Vincent Bernat Fri, 12 Jun 2020 07:50:48 +0200 + +haproxy (2.1.5-1) experimental; urgency=medium + + * New upstream version. + - BUG/MAJOR: mux-fcgi: Stop sending loop if FCGI stream is blocked for + any reason + - Revert "BUG/MINOR: connection: always send address-less LOCAL PROXY + connections" + - Revert "BUG/MINOR: connection: make sure to correctly tag local + PROXY connections" + + -- Vincent Bernat Mon, 01 Jun 2020 08:52:56 +0200 + +haproxy (2.1.4-1) experimental; urgency=medium + + * New upstream version. + - BUG/CRITICAL: hpack: never index a header into the headroom after + wrapping + - BUG/MAJOR: http-ana: Always abort the request when a tarpit is + triggered + - BUG/MAJOR: list: fix invalid element address calculation + - BUG/MAJOR: proxy_protocol: Properly validate TLV lengths + * d/control: fix maintainer address. Closes: #955553. + + -- Vincent Bernat Sun, 12 Apr 2020 13:29:54 +0200 + +haproxy (2.1.3-3) experimental; urgency=medium + + * d/copryight: document OpenSSL exception. Closes: #951782. + * d/haproxy.cfg: use "ssl-min-ver" to set minimum version. + * d/patches: fix an overflow in HTTP/2 header handling. + Fix CVE-2020-11100. + + -- Vincent Bernat Wed, 01 Apr 2020 21:18:57 +0200 + +haproxy (2.1.3-2) experimental; urgency=medium + + * d/dconv: use Python 3 to build the documentation. + Closes: #948296, #950435. + * d/dconv: replace cgi.escape by html.escape. Closes: #951416. + + -- Vincent Bernat Wed, 19 Feb 2020 07:53:53 +0100 + +haproxy (2.1.3-1) experimental; urgency=medium + + * New upstream version. + - BUG/MAJOR: hashes: fix the signedness of the hash inputs + - BUG/MAJOR: memory: Don't forget to unlock the rwlock if the pool is + empty. + + -- Vincent Bernat Mon, 20 Jan 2020 06:53:23 +0100 + +haproxy (2.1.2-1) experimental; urgency=medium + + * New upstream version 2.1.2. + - BUG/MAJOR: task: add a new TASK_SHARED_WQ flag to fix foreign requeuing + * d/logrotate.conf: use rsyslog helper instead of SysV init script. + Closes: #946973. + + -- Vincent Bernat Fri, 20 Dec 2019 08:20:33 +0100 + +haproxy (2.1.1-1) experimental; urgency=medium + + * New upstream version 2.1.1. + - BUG/MAJOR: dns: add minimalist error processing on the Rx path + + -- Vincent Bernat Sat, 14 Dec 2019 11:20:32 +0100 + +haproxy (2.1.0-2) experimental; urgency=medium + + * Link against libatomic on riscv64 + + -- Apollon Oikonomopoulos Fri, 29 Nov 2019 14:03:49 +0200 + +haproxy (2.1.0-1) experimental; urgency=medium + + * New upstream version 2.1.0 + * Upload to experimental + * Update d/watch to look for 2.1 stable releases + * d/gbp.conf: set branch names for 2.1 + * Bump Standards-Version to 4.4.1; no changes needed + * Bump dh compat level to 12 + + B-D on debhelper-compat and remove debian/compat + + Override dh_installsystemd with the same args as dh_installinit + + Add ${misc:Pre-Depends} to haproxy's Pre-Depends + + -- Apollon Oikonomopoulos Wed, 27 Nov 2019 23:30:30 +0200 + +haproxy (2.0.19-1) unstable; urgency=medium + + * New upstream release. + - BUG/MAJOR: mux-h2: Don't try to send data if we know it is no longer + possible + + -- Vincent Bernat Fri, 06 Nov 2020 19:33:59 +0100 + +haproxy (2.0.18-1) unstable; urgency=medium + + * New upstream release. + + -- Vincent Bernat Wed, 30 Sep 2020 13:41:09 +0200 + +haproxy (2.0.17-1) unstable; urgency=medium + + * New upstream release. + - BUG/MAJOR: dns: Make the do-resolve action thread-safe + + -- Vincent Bernat Sat, 01 Aug 2020 20:05:01 +0200 + +haproxy (2.0.16-1) unstable; urgency=medium + + * New upstream release. + - BUG/MAJOR: stream: Mark the server address as unset on new outgoing + connection + * d/patches: refresh patches. + + -- Vincent Bernat Sat, 18 Jul 2020 13:50:56 +0200 + +haproxy (2.0.15-1) unstable; urgency=medium + + * New upstream release. + - BUG/MAJOR: stream-int: always detach a faulty endpoint on connect + failure + + -- Vincent Bernat Sat, 13 Jun 2020 18:48:25 +0200 + +haproxy (2.0.14-1) unstable; urgency=medium + + * New upstream release. + - BUG/CRITICAL: hpack: never index a header into the headroom after + wrapping + - BUG/MAJOR: http-ana: Always abort the request when a tarpit is + triggered + - BUG/MAJOR: list: fix invalid element address calculation + - BUG/MAJOR: proxy_protocol: Properly validate TLV lengths + * d/control: fix maintainer address. Closes: #955553. + + -- Vincent Bernat Thu, 16 Apr 2020 18:34:22 +0200 + +haproxy (2.0.13-2) unstable; urgency=medium + + * d/dconv: replace cgi.escape by html.escape. Closes: #951416. + * d/copryight: document OpenSSL exception. Closes: #951782. + * d/haproxy.cfg: use "ssl-min-ver" to set minimum version. + * Apply one patch to fix an overflow in HTTP/2 header handling. + Fix CVE-2020-11100. + + -- Vincent Bernat Wed, 01 Apr 2020 21:49:32 +0200 + +haproxy (2.0.13-1) unstable; urgency=medium + + * New upstream release. + - BUG/MAJOR: hashes: fix the signedness of the hash inputs + - BUG/MAJOR: memory: Don't forget to unlock the rwlock if the pool is + empty. + * d/dconv: use Python 3 to build the documentation. + Closes: #948296, #950435. + + -- Vincent Bernat Sat, 15 Feb 2020 15:32:32 +0100 + +haproxy (2.0.12-1) unstable; urgency=medium + + * New upstream version. + - BUG/MAJOR: task: add a new TASK_SHARED_WQ flag to fix foreign requeuing + * d/logrotate.conf: use rsyslog helper instead of SysV init script. + Closes: #946973. + + -- Vincent Bernat Fri, 20 Dec 2019 08:20:33 +0100 + +haproxy (2.0.11-1) unstable; urgency=medium + + * New upstream release. + - BUG/MAJOR: dns: add minimalist error processing on the Rx path + + -- Vincent Bernat Fri, 13 Dec 2019 19:22:03 +0100 + +haproxy (2.0.10-1) unstable; urgency=medium + + * New upstream release. + - BUG/MAJOR: h2: make header field name filtering stronger + - BUG/MAJOR: h2: reject header values containing invalid chars + - BUG/MAJOR: mux-h2: don't try to decode a response HEADERS frame in + idle state + + -- Vincent Bernat Tue, 26 Nov 2019 13:22:17 +0100 + +haproxy (2.0.9-1) unstable; urgency=medium + + * New upstream release. + - BUG/MAJOR: stream-int: Don't receive data from mux until SI_ST_EST + is reached + + -- Vincent Bernat Sat, 16 Nov 2019 17:38:51 +0100 + +haproxy (2.0.8-1) unstable; urgency=medium + + * New upstream release. + - BUG/MAJOR: idle conns: schedule the cleanup task on the correct + threads + + -- Vincent Bernat Wed, 23 Oct 2019 08:55:55 +0200 + +haproxy (2.0.7-1) unstable; urgency=medium + + * New upstream release. + - BUG/MAJOR: mux-h2: Handle HEADERS frames received after a RST_STREAM + frame + - BUG/MAJOR: mux_h2: Don't consume more payload than received for + skipped frames + - BUG/MEDIUM: checks: make sure the connection is ready before trying + to recv + + -- Vincent Bernat Fri, 27 Sep 2019 19:14:12 +0200 + +haproxy (2.0.6-2) unstable; urgency=medium + + * d/patches: fix regression with checks. + + -- Vincent Bernat Wed, 18 Sep 2019 08:02:53 +0200 + +haproxy (2.0.6-1) unstable; urgency=medium + + * New upstream release. + - BUG/MAJOR: ssl: ssl_sock was not fully initialized. + + -- Vincent Bernat Fri, 13 Sep 2019 21:25:38 +0200 + +haproxy (2.0.5-1) unstable; urgency=medium + + * New upstream release. + - BUG/MEDIUM: mux_h1: Don't bother subscribing in recv if we're not + connected. + - BUG/MEDIUM: mux_pt: Don't call unsubscribe if we did not subscribe. + - BUG/MEDIUM: proxy: Don't forget the SF_HTX flag when upgrading + TCP=>H1+HTX. + - BUG/MEDIUM: proxy: Don't use cs_destroy() when freeing the + conn_stream. + - BUG/MEDIUM: stick-table: Wrong stick-table backends parsing. + + -- Vincent Bernat Fri, 16 Aug 2019 19:51:24 +0200 + +haproxy (2.0.4-1) unstable; urgency=medium + + * New upstream release. Upload to unstable. + - BUG/MAJOR: http/sample: use a static buffer for raw -> htx + conversion + - BUG/MAJOR: queue/threads: avoid an AB/BA locking issue in + process_srv_queue() + * d/haproxy.cfg: update default cipher lists to more secure defaults. + TLSv1.0 and TLSv1.1 are disabled, as well as TLS tickets (they are + breaking forward secrecy unless correctly rotated). + Closes: #932763. + + -- Vincent Bernat Fri, 09 Aug 2019 14:22:23 +0200 + +haproxy (2.0.3-1) experimental; urgency=medium + + * New upstream version. + - BUG/CRITICAL: http_ana: Fix parsing of malformed cookies which start by + a delimiter (CVE-2019-14241) + - BUG/MEDIUM: checks: Don't attempt to receive data if we already + subscribed. + - BUG/MEDIUM: http/htx: unbreak option http_proxy + - DOC: htx: Update comments in HTX files + - BUG/MEDIUM: mux-h1: Trim excess server data at the end of a transaction + - BUG/MEDIUM: tcp-checks: do not dereference inexisting conn_stream + * Bump Standards-Version to 4.4.0; no changes needed + + -- Apollon Oikonomopoulos Tue, 23 Jul 2019 13:31:31 -0300 + +haproxy (2.0.2-1) experimental; urgency=medium + + * New upstream version. + - BUG/MAJOR: listener: fix thread safety in resume_listener() + + -- Vincent Bernat Wed, 17 Jul 2019 12:19:54 +0200 + +haproxy (2.0.1-1) experimental; urgency=medium + + * New upstream version. + - BUG/MAJOR: sample: Wrong stick-table name parsing in "if/unless" ACL + condition. + - BUG/MAJOR: mux-h1: Don't crush trash chunk area when outgoing + message is formatted + * d/rules: fix crash during reload due to libgcc_s.so missing when + chrooted. + + -- Vincent Bernat Mon, 24 Jun 2019 19:28:26 +0200 + +haproxy (2.0.0-1) experimental; urgency=medium + + * New upstream version. + * d/watch: update to follow 2.0. + * d/gbp.conf: update for 2.0 and experimental. + * d/rules: update to use linux-glibc target. + * d/rules: enable prometheus exporter. + * d/patches: refresh patches. + * d/vim-haproxy.install: update path to vim syntax file. + * d/README.Debian: remove outdated information. + + -- Vincent Bernat Thu, 20 Jun 2019 11:40:19 +0200 + +haproxy (1.9.8-1) experimental; urgency=medium + + * New upstream version. + - BUG/MAJOR: map/acl: real fix segfault during show map/acl on CLI + - BUG/MAJOR: mux-h2: do not add a stream twice to the send list + + -- Vincent Bernat Thu, 16 May 2019 01:50:10 +0200 + +haproxy (1.9.7-1) experimental; urgency=medium + + * New upstream version. + - BUG/MAJOR: http_fetch: Get the channel depending on the keyword used + - BUG/MAJOR: lb/threads: fix AB/BA locking issue in round-robin LB + - BUG/MAJOR: lb/threads: fix insufficient locking on round-robin LB + - BUG/MAJOR: muxes: Use the HTX mode to find the best mux for HTTP + proxies only + - BUG/MAJOR: task: make sure never to delete a queued task + + -- Vincent Bernat Sun, 28 Apr 2019 17:37:04 +0200 + +haproxy (1.9.6-1) experimental; urgency=medium + + * New upstream version. + - BUG/MAJOR: checks: segfault during tcpcheck_main + + -- Vincent Bernat Sat, 30 Mar 2019 12:43:33 +0100 + +haproxy (1.9.5-1) experimental; urgency=medium + + * New upstream version. + - BUG/MAJOR: cache/htx: Set the start-line offset when a cached object + is served + - BUG/MAJOR: fd/threads, task/threads: ensure all spin locks are + unlocked + - BUG/MAJOR: listener: Make sure the listener exist before using it. + - BUG/MAJOR: mux-h2: fix race condition between close on both ends + - BUG/MAJOR: spoe: Don't try to get agent config during SPOP + healthcheck + - BUG/MAJOR: spoe: Fix initialization of thread-dependent fields + - BUG/MAJOR: stats: Fix how huge POST data are read from the channel + - BUG/MAJOR: stream: avoid double free on unique_id + - BUG/MAJOR: tasks: Use the TASK_GLOBAL flag to know if we're in the + global rq. + + -- Vincent Bernat Tue, 19 Mar 2019 20:13:48 +0100 + +haproxy (1.9.4-1) experimental; urgency=medium + + * New upstream version. + - BUG/MAJOR: config: verify that targets of track-sc and stick rules + are present + - BUG/MAJOR: htx/backend: Make all tests on HTTP messages compatible + with HTX + - BUG/MAJOR: spoe: verify that backends used by SPOE cover all their + callers' processes + + -- Vincent Bernat Thu, 07 Feb 2019 12:48:42 +0100 + +haproxy (1.9.3-1) experimental; urgency=medium + + * New upstream version. + - BUG/MAJOR: mux-h2: don't destroy the stream on failed allocation in + h2_snd_buf() + - BUG/MEDIUM: checks: fix recent regression on agent-check making it + crash + - BUG/MEDIUM: ssl: Fix handling of TLS 1.3 KeyUpdate messages + + -- Vincent Bernat Tue, 29 Jan 2019 12:59:10 +0100 + +haproxy (1.9.2-1) experimental; urgency=medium + + * New upstream version. + - BUG/MAJOR: cache: fix confusion between zero and uninitialized cache + key + - BUG/MEDIUM: checks: Avoid having an associated server for email + checks. + - BUG/MEDIUM: connection: properly unregister the mux on failed + initialization + - BUG/MEDIUM: h1: Get the h1m state when restarting the headers + parsing + - BUG/MEDIUM: h1: Make sure we destroy an inactive connectin that did + shutw. + - BUG/MEDIUM: init: Initialize idle_orphan_conns for first server in + server-template + - BUG/MEDIUM: mux-h2: decode trailers in HEADERS frames + - BUG/MEDIUM: ssl: Disable anti-replay protection and set max data + with 0RTT. + - BUG/MEDIUM: ssl: missing allocation failure checks loading tls key + file + - BUG/MEDIUM: stats: Get the right scope pointer depending on HTX is + used or not + * d/patches: removal of CVE-2018-20615.patch (applied upstream) + + -- Vincent Bernat Thu, 17 Jan 2019 19:19:27 +0100 + +haproxy (1.9.0-2) experimental; urgency=medium + + * Fix out-of-bounds read in HTTP2 mux (CVE-2018-20615). + Possible crash in H2 HEADERS frame decoder when the PRIORITY flag + is present, due to a missing frame size check. + * Bump Standards-Version to 4.3.0; no changes needed. + + -- Apollon Oikonomopoulos Thu, 03 Jan 2019 12:41:02 +0200 + +haproxy (1.9.0-1) experimental; urgency=medium + + * New upstream version 1.9.0. + See https://www.haproxy.com/blog/haproxy-1-9-has-arrived/. + * d/watch: update to follow 1.9. + * d/gbp.conf: update for 1.9 and experimental. + * d/rules: do not override CFLAGS, hijack DEBUG_CFLAGS for this instead. + * d/patches: add regression fix for DNS. + + -- Vincent Bernat Fri, 21 Dec 2018 11:13:41 +0100 + +haproxy (1.8.15-1) unstable; urgency=high + + [ Vincent Bernat ] + * d/rules: switch to pcre2. Closes: #911933. + + [ Apollon Oikonomopoulos ] + * New upstream version 1.8.15 + - BUG: dns: Fix off-by-one write in dns_validate_dns_response() ( + - BUG: dns: Fix out-of-bounds read via signedness error in + dns_validate_dns_response() + - BUG: dns: Prevent out-of-bounds read in dns_read_name() + - BUG: dns: Prevent out-of-bounds read in dns_validate_dns_response() + (CVE-2018-20102, closes: #916308) + - BUG: dns: Prevent stack-exhaustion via recursion loop in dns_read_name + (CVE-2018-20103, closes: #916307) + - BUG/MAJOR: http: http_txn_get_path() may deference an inexisting buffer + + -- Apollon Oikonomopoulos Fri, 14 Dec 2018 15:31:04 +0200 + +haproxy (1.8.14-1) unstable; urgency=medium + + * New upstream version. + - BUG/CRITICAL: hpack: fix improper sign check on the header index + value (already fixed in 1.8.13-2) + - BUG/MAJOR: kqueue: Don't reset the changes number by accident. + - BUG/MAJOR: thread: lua: Wrong SSL context initialization. + + -- Vincent Bernat Sun, 23 Sep 2018 12:25:03 +0200 + +haproxy (1.8.13-2) unstable; urgency=high + + * Fix improper sign check on the HPACK header index value (CVE-2018-14645) + * Bump Standards-Version to 4.2.1; no changes needed + + -- Apollon Oikonomopoulos Wed, 19 Sep 2018 22:46:58 +0300 + +haproxy (1.8.13-1) unstable; urgency=medium + + * New upstream version. + - BUG/MEDIUM: h2: don't accept new streams if conn_streams are still + in excess + - BUG/MEDIUM: h2: make sure the last stream closes the connection + after a timeout + - BUG/MEDIUM: h2: never leave pending data in the output buffer on close + - BUG/MEDIUM: h2: prevent orphaned streams from blocking a connection + forever + - BUG/MEDIUM: stats: don't ask for more data as long as we're responding + - BUG/MEDIUM: stream-int: don't immediately enable reading when the + buffer was reportedly full + - BUG/MEDIUM: threads/sync: use sched_yield when available + - BUG/MEDIUM: threads: Fix the exit condition of the thread barrier + - BUG/MEDIUM: threads: properly fix nbthreads == MAX_THREADS + - BUG/MEDIUM: threads: unbreak "bind" referencing an incorrect thread + number + * d/patches: drop systemd exit status patch (applied upstream). + + -- Vincent Bernat Wed, 01 Aug 2018 11:36:20 +0200 + +haproxy (1.8.12-1) unstable; urgency=medium + + * New upstream version. + - BUG/MAJOR: stick_table: Complete incomplete SEGV fix + + -- Vincent Bernat Wed, 27 Jun 2018 20:05:50 +0200 + +haproxy (1.8.11-1) unstable; urgency=medium + + * New upstream version. + - BUG/MAJOR: Stick-tables crash with segfault when the key is not in + the stick-table + + -- Vincent Bernat Tue, 26 Jun 2018 18:26:05 +0200 + +haproxy (1.8.10-1) unstable; urgency=medium + + * New upstream version. + - BUG/MAJOR: lua: Dead lock with sockets + - BUG/MAJOR: map: fix a segfault when using http-request set-map + - BUG/MAJOR: ssl: OpenSSL context is stored in non-reserved memory slot + - BUG/MAJOR: ssl: Random crash with cipherlist capture + - BUG/MEDIUM: cache: don't cache when an Authorization header is present + - BUG/MEDIUM: dns: Delay the attempt to run a DNS resolution on check + failure. + - BUG/MEDIUM: fd: Don't modify the update_mask in fd_dodelete(). + - BUG/MEDIUM: fd: Only check update_mask against all_threads_mask. + - BUG/MEDIUM: servers: Add srv_addr default placeholder to the state file + - BUG/MEDIUM: stick-tables: Decrement ref_cnt in table_* converters + - BUG/MEDIUM: threads: Use the sync point to check active jobs and exit + - BUG/MEDIUM: threads: handle signal queue only in thread 0 + * Remove patch from CVE. Included upstream. + * d/patches: add a patch for clean stop with systemd. + + -- Vincent Bernat Fri, 22 Jun 2018 20:21:37 +0200 + +haproxy (1.8.9-2) unstable; urgency=high + + * d/patches: fix CVE-2018-11469: do not cache when an Authorization + header is present. Closes: #900084. + + -- Vincent Bernat Sat, 26 May 2018 16:05:07 +0200 + +haproxy (1.8.9-1) unstable; urgency=medium + + * New upstream version. + - BUG/MAJOR: channel: Fix crash when trying to read from a closed socket + - BUG/MEDIUM: h2: implement missing support for chunked encoded uploads + - BUG/MEDIUM: http: don't always abort transfers on CF_SHUTR + - BUG/MEDIUM: lua: Fix segmentation fault if a Lua task exits + - BUG/MEDIUM: pollers: Use a global list for fd shared between threads + - BUG/MEDIUM: ssl: properly protect SSL cert generation + - BUG/MEDIUM: task: Don't free a task that is about to be run + - BUG/MEDIUM: threads: Fix the sync point for more than 32 threads + * d/rsyslog.conf: use modern syntax and statements, thanks to Guillem + Jover. Closes: #897914. + + -- Vincent Bernat Sat, 19 May 2018 15:00:17 +0200 + +haproxy (1.8.8-1) unstable; urgency=high + + * New upstream version. + - BUG/CRITICAL: h2: fix incorrect frame length check + + -- Vincent Bernat Thu, 19 Apr 2018 17:51:55 +0200 + +haproxy (1.8.7-1) unstable; urgency=medium + + * New upstream version. + - BUG/MAJOR: cache: always initialize newly created objects + * d/control: switch maintainer address to tracker.debian.org. + + -- Vincent Bernat Sat, 07 Apr 2018 07:58:34 +0200 + +haproxy (1.8.6-1) unstable; urgency=medium + + * New upstream version. + - BUG/MAJOR: cache: fix random crashes caused by incorrect delete() on + non-first blocks + - BUG/MAJOR: h2: remove orphaned streams from the send list before closing + - BUG/MEDIUM: h2/threads: never release the task outside of the task + handler + - BUG/MEDIUM: h2: always add a stream to the send or fctl list when + blocked + - BUG/MEDIUM: h2: don't consider pending data on detach if connection + is in error + + -- Vincent Bernat Thu, 05 Apr 2018 21:08:12 +0200 + +haproxy (1.8.5-1) unstable; urgency=medium + + * New upstream version. + - BUG/MAJOR: threads/queue: Fix thread-safety issues on the queues + management + - BUG/MEDIUM: buffer: Fix the wrapping case in bi_putblk + - BUG/MEDIUM: buffer: Fix the wrapping case in bo_putblk + - BUG/MEDIUM: fix a 100% cpu usage with cpu-map and nbthread/nbproc + - BUG/MEDIUM: h2: also arm the h2 timeout when sending + - BUG/MEDIUM: h2: always consume any trailing data after end of output + buffers + - BUG/MEDIUM: h2: properly account for DATA padding in flow control + - BUG/MEDIUM: http: Switch the HTTP response in tunnel mode as earlier + as possible + - BUG/MEDIUM: spoe: Remove idle applets from idle list when HAProxy is + stopping + - BUG/MEDIUM: ssl/sample: ssl_bc_* fetch keywords are broken. + - BUG/MEDIUM: ssl: Don't always treat SSL_ERROR_SYSCALL as + unrecovarable. + - BUG/MEDIUM: ssl: Shutdown the connection for reading on + SSL_ERROR_SYSCALL + - BUG/MEDIUM: tcp-check: single connect rule can't detect DOWN servers + - BUG/MEDIUM: threads/queue: wake up other threads upon dequeue + - BUG/MEDIUM: threads/unix: Fix a deadlock when a listener is + temporarily disabled + * Upload to unstable. + * d/control: update Vcs-* fields to salsa.debian.org. + + -- Vincent Bernat Sun, 25 Mar 2018 11:31:25 +0200 + +haproxy (1.8.4-1) experimental; urgency=medium + + * New upstream stable release. + * d/patches: document why dconv patch is not in series. + * d/docs: ship NOTICE file in haproxy-doc. + + -- Vincent Bernat Sat, 10 Feb 2018 08:43:36 +0100 + +haproxy (1.8.3-1) experimental; urgency=medium + + * New upstream stable release. + * Change default configuration of stats socket to support hitless + reload. + + -- Vincent Bernat Tue, 02 Jan 2018 18:48:24 +0100 + +haproxy (1.8.2-1) experimental; urgency=medium + + * New upstream stable release + * Refresh patches + * Bump Standards-Version to 4.1.2; no changes needed + + -- Apollon Oikonomopoulos Sun, 24 Dec 2017 14:28:28 +0200 + +haproxy (1.8.1-1) experimental; urgency=medium + + * New upstream stable release. + * Enable PCRE JIT. + * systemd: replace Wants/After=syslog.service with After=rsyslog.service + (Closes: #882610) + + -- Apollon Oikonomopoulos Sun, 03 Dec 2017 23:59:03 +0200 + +haproxy (1.8.0-2) experimental; urgency=medium + + * Use libatomic on platforms without 64-bit atomics. Fixes FTBFS on armel, + mips, mipsel, powerpc, powerpcspe, sh4 and m68k. + * d/rules: use variables defined in architecture.mk and buildflags.mk + * d/rules: drop unreachable else case. + + -- Apollon Oikonomopoulos Wed, 29 Nov 2017 01:21:40 +0200 + +haproxy (1.8.0-1) experimental; urgency=medium + + * New upstream stable series. Notable new features include: + + HTTP/2 support + + Support for multiple worker threads to allow scalability across CPUs + (e.g. for SSL termination) + + Seamless reloads + + HTTP small object caching + + Dynamic backend server configuration + See https://www.haproxy.com/blog/whats-new-haproxy-1-8/ and + https://www.mail-archive.com/haproxy@formilux.org/msg28004.html for more + detailed descriptions of the new features. + * Upload to experimental + * Refresh all patches. + * d/watch: switch to the 1.8.x upstream stable series + * Bump Standards to 4.1.1 + + Switch haproxy-doc to Priority: optional from extra. + * Bump compat to 10: + + B-D on debhelper (>= 10) + + Drop explicit dh-systemd dependency and invocation + + Replace --no-restart-on-upgrade with --no-restart-after-upgrade + --no-stop-on-upgrade to make up for DH 10 defaults. + * B-D on libsystemd-dev and enable sd_notify() support on Linux. + * B-D on python3-sphinx instead of python-sphinx. + * d/rules: do not call dpkg-parsechangelog directly. + * d/copyright: drop obsolete section. + * Drop obsolete lintian overrides. + * Do a full-service restart when upgrading from pre-1.8 versions and running + under systemd, to migrate to the new process model and service type. + + Document this in d/NEWS as well. + + -- Apollon Oikonomopoulos Tue, 28 Nov 2017 22:25:11 +0200 + +haproxy (1.7.10-1) unstable; urgency=medium + + * New upstream version release (see CHANGELOG): + - BUG/MAJOR: stream-int: don't re-arm recv if send fails + - BUG/MAJOR: stream: ensure analysers are always called upon close + - BUG/MEDIUM: compression: Fix check on txn in smp_fetch_res_comp_algo + - BUG/MEDIUM: connection: remove useless flag CO_FL_DATA_RD_SH + - BUG/MEDIUM: deinit: correctly deinitialize the proxy and global + listener tasks + - BUG/MEDIUM: deviceatlas: ignore not valuable HTTP request data + - BUG/MEDIUM: epoll: ensure we always consider HUP and ERR + - BUG/MEDIUM: http: Close streams for connections closed before a + redirect + - BUG/MEDIUM: http: Fix a regression bug when a HTTP response is in + TUNNEL mode + - BUG/MEDIUM: http: Return an error when url_dec sample converter + failed + - BUG/MEDIUM: http: don't automatically forward request close + - BUG/MEDIUM: http: don't disable lingering on requests with tunnelled + responses + - BUG/MEDIUM: kqueue: Don't bother closing the kqueue after fork. + - BUG/MEDIUM: lua: HTTP services must take care of body-less status + codes + - BUG/MEDIUM: lua: fix crash when using bogus mode in + register_service() + - BUG/MEDIUM: peers: set NOLINGER on the outgoing stream interface + - BUG/MEDIUM: prevent buffers being overwritten during build_logline() + execution + - BUG/MEDIUM: ssl: fix OCSP expiry calculation + - BUG/MEDIUM: stream: don't ignore res.analyse_exp anymore + - BUG/MEDIUM: stream: properly set the required HTTP analysers on + use-service + - BUG/MEDIUM: tcp-check: don't call tcpcheck_main() from the I/O + handlers! + - BUG/MEDIUM: tcp-check: properly indicate polling state before + performing I/O + - BUG/MEDIUM: tcp/http: set-dst-port action broken + * Fix VERDATE build argument to really use changelog date. + * Bump compat to 10. + * d/control: B-D on python3-sphinx instead of python-sphinx. + * d/control: make haproxy-doc Priority: optional. + * d/rules: enable PCRE JIT. + * d/rules: use variables defined in *.mk. + * d/patches: refresh and replace Wants/After=syslog.service with + After=rsyslog.service. Closes: #882610. + + -- Vincent Bernat Wed, 03 Jan 2018 08:29:48 +0100 + +haproxy (1.7.9-1) unstable; urgency=medium + + * New upstream version release (see CHANGELOG): + - BUG/MAJOR: lua/socket: resources not destroyed when the socket is + aborted + - BUG/MEDIUM: lua: bad memory access + - BUG/MEDIUM: http: Switch HTTP responses in TUNNEL mode when body + length is undefined + + -- Vincent Bernat Sat, 19 Aug 2017 12:05:02 +0200 + +haproxy (1.7.8-1) unstable; urgency=medium + + * New upstream version release (see CHANGELOG): + - BUG/MAJOR: cli: fix custom io_release was crushed by NULL. + - BUG/MAJOR: compression: Be sure to release the compression state in + all cases + - BUG/MAJOR: map: fix segfault during 'show map/acl' on cli. + - BUG/MEDIUM: filters: Be sure to call flt_end_analyze for both + channels + - BUG/MEDIUM: map/acl: fix unwanted flags inheritance. + * Bump Standards-Version to 4.0.0. No changes needed. + * Update d/watch to use https. + + -- Vincent Bernat Sat, 08 Jul 2017 08:24:35 +0200 + +haproxy (1.7.7-1) unstable; urgency=medium + + * New upstream version release (see CHANGELOG): + - BUG/MEDIUM: http: Drop the connection establishment when a redirect + is performed + - BUG/MEDIUM: cfgparse: Check if tune.http.maxhdr is in the range + 1..32767 + + -- Vincent Bernat Mon, 26 Jun 2017 14:06:48 +0200 + +haproxy (1.7.6-1) unstable; urgency=medium + + * New upstream version release (see CHANGELOG): + - BUG/MAJOR: Use -fwrapv. + - BUG/MAJOR: http: call manage_client_side_cookies() before erasing + the buffer + - BUG/MAJOR: server: Segfault after parsing server state file. + - BUG/MEDIUM: acl: don't free unresolved args in prune_acl_expr() + - BUG/MEDIUM: acl: proprely release unused args in prune_acl_expr() + - BUG/MEDIUM: arg: ensure that we properly unlink unresolved arguments + on error + - BUG/MEDIUM: lua: memory leak + - BUG/MEDIUM: lua: segfault if a converter or a sample doesn't return + anything + - BUG/MEDIUM: peers: Peers CLOSE_WAIT issue. + - BUG/MEDIUM: unix: never unlink a unix socket from the file system + + -- Vincent Bernat Sun, 18 Jun 2017 12:34:40 +0200 + +haproxy (1.7.5-2) unstable; urgency=medium + + * Enable getaddrinfo() support, allowing resolution of hostnames to IPv6 + addresses (Closes: #862780). Thanks to Anton Eliasson + ! + + -- Apollon Oikonomopoulos Wed, 17 May 2017 13:01:45 +0300 + +haproxy (1.7.5-1) unstable; urgency=medium + + * New upstream version release (see CHANGELOG): + - BUG/MEDIUM: peers: fix buffer overflow control in intdecode. + - BUG/MEDIUM: buffers: Fix how input/output data are injected into buffers + - BUG/MEDIUM: http: Fix blocked HTTP/1.0 responses when compression is + enabled + + -- Apollon Oikonomopoulos Tue, 04 Apr 2017 14:25:38 +0300 + +haproxy (1.7.4-1) unstable; urgency=medium + + * New upstream release (see CHANGELOG): + - BUG/MAJOR: connection: update CO_FL_CONNECTED before calling the + data layer + - BUG/MAJOR: http: fix typo in http_apply_redirect_rule + - BUG/MAJOR: stream-int: do not depend on connection flags to detect + connection + - BUG/MEDIUM: cli: Prevent double free in CLI ACL lookup + - BUG/MEDIUM: connection: ensure to always report the end of handshakes + - BUG/MEDIUM: listener: do not try to rebind another process' socket + - BUG/MEDIUM: stream: fix client-fin/server-fin handling + - BUG/MEDIUM: tcp: don't require privileges to bind to device + + -- Vincent Bernat Fri, 31 Mar 2017 11:01:14 +0200 + +haproxy (1.7.3-1) unstable; urgency=medium + + * New upstream release (see CHANGELOG): + - BUG/MAJOR: lua segmentation fault when the request is like 'GET + ?arg=val HTTP/1.1' + - BUG/MAJOR: dns: restart sockets after fork() + - BUG/MEDIUM: tcp: don't poll for write when connect() succeeds + - BUG/MEDIUM: http: prevent redirect from overwriting a buffer + - BUG/MEDIUM: filters: Do not truncate HTTP response when body length + is undefined + - BUG/MEDIUM: http: Prevent replace-header from overwriting a buffer + - BUG/MEDIUM: config: reject anything but "if" or "unless" after a + use-backend rule + + -- Vincent Bernat Wed, 01 Mar 2017 20:03:12 +0100 + +haproxy (1.7.2-1) unstable; urgency=medium + + * New upstream release (see CHANGELOG): + + Fix a regression whereby fragmented requests were randomly flagged as + bad requests depending on previous buffer contents; this was noticable + under low load with authenticated requests. + + Fix dynamic address resolution for IPv6-only hosts. + + Make sure SSL sessions are not reused when the SNI changes. This makes + SNI and SSL health checks play nice together. + + Minor improvements: + - Add the ability to perform actions on multiple servers via the stats + page. + - Add the ability to specify a custom HTTP reason field in generated + responses. + - New sample fetch function, `fc_rcvd_proxy', indicating wheter the + PROXY protocol was used on the frontend for a connection or not. + + -- Apollon Oikonomopoulos Fri, 13 Jan 2017 14:49:05 +0200 + +haproxy (1.7.1-1) unstable; urgency=medium + + * New upstream stable release. + * Upload to unstable. + * Notable new features since 1.6: + + SPOE (stream processing offload engine) : ability to delegate some + slow, unreliable or dangerous processing to external processes. + + More statistics in the CSV output. + + Support of directories for config files: if the argument to -f + is a directory, all files found there are loaded in alphabetical order. + + It is now possible to set/unset/preset environment variables directly in + the global section and query them through the CLI. + + The CLI makes it possible to change a server's address, port, maxconn, + check address and port at runtime, without reloading haproxy. + + Support for multiple certificates: different certificates for the same + domain so that the best one can be picked according to browser support. + The main use is to be able to deliver ECDSA certificates to clients + supporting them, without breaking compatibility with older clients. + + SO_REUSEPORT is now configurable and can be disabled. + + Updates to the Lua API, including new classes to access many internal + objects like listeners, servers, proxies etc. + + Support for a new type of maps consisting of regular expressions with + replacement values. + + -- Apollon Oikonomopoulos Tue, 13 Dec 2016 12:32:32 +0200 + +haproxy (1.7.0-1) experimental; urgency=medium + + * New upstream stable series. + + -- Apollon Oikonomopoulos Fri, 25 Nov 2016 18:00:55 +0200 + +haproxy (1.7~dev6-1) experimental; urgency=medium + + * New upstream development release (Closes: #828337) + * Upload to experimental + * d/watch: look for 1.7 + * B-D on zlib1g-dev + * haproxy: Depend on lsb-base for the initscript + * Ship additional plain-text documentation + * haproxy-doc: ship HTML version of management.txt + * Update the default SSL cipher list and add a link to Mozilla's SSL + configuration generator (Closes: #840735) + * d/rules: use SUBVERS to pass the Debian revision to HAPROXY_VERSION + + -- Apollon Oikonomopoulos Thu, 10 Nov 2016 16:02:27 +0200 + +haproxy (1.6.10-1) unstable; urgency=medium + + * New upstream release (see CHANGELOG): + + Fix retransmits in proxy mode and rare cases of unkillable tasks. + + systemd wrapper: do not leave old processes behind when reloading too + fast. + + systemd wrapper: correctly set the status code. + + Fix two bugs in the peers' task management possibly causing some + CLOSE_WAIT connection after some rare race conditions. + + Make SO_REUSEPORT use configurable via the "-dR" command line switch + or the "noreuseport" config option in the global section. + * B-D on libssl1.0-dev (Closes: #828337); upstream does not currently + support OpenSSL 1.1 for the 1.6 series. + * haproxy: depend on lsb-base for the initscript's use of + /lib/lsb/init-functions. + + -- Apollon Oikonomopoulos Mon, 21 Nov 2016 11:46:16 +0200 + +haproxy (1.6.9-2) unstable; urgency=medium + + * Enable Linux namespace support. + * Pass the full Debian version and package release date from d/changelog to + the build system. + * initscript: reorder the reload command arguments to always parse EXTRAOPTS + properly. + + -- Apollon Oikonomopoulos Wed, 28 Sep 2016 10:45:43 +0300 + +haproxy (1.6.9-1) unstable; urgency=medium + + * New upstream release (see CHANGELOG): + + BUG/MAJOR: stream: properly mark the server address as unset on + connect retry + + -- Vincent Bernat Wed, 31 Aug 2016 07:44:27 +0200 + +haproxy (1.6.8-1) unstable; urgency=medium + + * New upstream release (see CHANGELOG): + + BUG/MAJOR: compression: initialize avail_in/next_in even during + flush + + BUG/MAJOR: server: the "sni" directive could randomly cause trouble + + BUG/MAJOR: stick-counters: possible crash when using sc_trackers + with wrong table + + -- Vincent Bernat Sun, 14 Aug 2016 14:17:08 +0200 + +haproxy (1.6.7-1) unstable; urgency=medium + + * New upstream release (see CHANGELOG): + + BUG/MAJOR: fix use-after-free crash on start + + BUG/MEDIUM: dns: fix alignment issues in the DNS response parser + + -- Vincent Bernat Thu, 14 Jul 2016 08:29:43 +0200 + +haproxy (1.6.6-1) unstable; urgency=medium + + * New upstream release (see CHANGELOG): + + BUG/MAJOR: fix listening IP address storage for frontends + + BUG/MAJOR: http: fix breakage of "reqdeny" causing random crashes + + BUG/MEDIUM: stick-tables: fix breakage in table converters + + BUG/MEDIUM: dns: unbreak DNS resolver after header fix + + BUG/MEDIUM: stats: show servers state may show an servers from another + backend + + BUG/MEDIUM: fix risk of segfault with "show tls-keys" + + BUG/MEDIUM: sticktables: segfault in some configuration error cases + + BUG/MEDIUM: lua: converters doesn't work + + BUG/MEDIUM: http: add-header: buffer overwritten + + BUG/MEDIUM: external-checks: close all FDs right after the fork() + + BUG/MAJOR: external-checks: use asynchronous signal delivery + * Drop haproxy.service-check-config-before-reload.patch. Applied + upstream. + + -- Vincent Bernat Tue, 28 Jun 2016 10:13:33 +0200 + +haproxy (1.6.5-2) unstable; urgency=high + + * Add a patch to fix CVE-2016-5360. Closes: #826869. + + BUG/MAJOR: http: fix breakage of "reqdeny" causing random crashes + + -- Vincent Bernat Sat, 11 Jun 2016 22:23:50 +0200 + +haproxy (1.6.5-1) unstable; urgency=medium + + * New upstream release (see CHANGELOG): + + BUG/MAJOR: channel: fix miscalculation of available buffer space + + BUG/MAJOR: Fix crash in http_get_fhdr with exactly MAX_HDR_HISTORY + headers + + BUG/MEDIUM: channel: don't allow to overwrite the reserve until + connected + + BUG/MEDIUM: channel: fix inconsistent handling of 4GB-1 transfers + + BUG/MEDIUM: channel: incorrect polling condition may delay event + delivery + + BUG/MEDIUM: dns: fix alignment issue when building DNS queries + + BUG/MEDIUM: fix maxaccept computation on per-process listeners + + BUG/MEDIUM: Fix RFC5077 resumption when more than TLS_TICKETS_NO are + present + + BUG/MEDIUM: http: fix risk of CPU spikes with pipelined requests from + dead client + + BUG/MEDIUM: log: fix risk of segfault when logging HTTP fields in TCP + mode + + BUG/MEDIUM: lua: protects the upper boundary of the argument list for + converters/fetches. + + BUG/MEDIUM: peers: fix incorrect age in frequency counters + + BUG/MEDIUM: sample: initialize the pointer before parse_binary call. + + BUG/MEDIUM: stats: show backend may show an empty or incomplete result + + BUG/MEDIUM: stats: show servers state may show an empty or incomplete + result + + BUG/MEDIUM: stick-tables: some sample-fetch doesn't work in the + connection state. + + BUG/MEDIUM: stream: ensure the SI_FL_DONT_WAKE flag is properly cleared + + BUG/MEDIUM: trace.c: rdtsc() is defined in two files + + MEDIUM: unblock signals on startup. + * Bump standards to 3.9.8; no changes needed. + + -- Apollon Oikonomopoulos Wed, 11 May 2016 11:07:24 +0300 + +haproxy (1.6.4-3) unstable; urgency=medium + + * d/init: remove support for dynamic script name. This enable haproxy to + be started on boot. + + -- Vincent Bernat Thu, 24 Mar 2016 20:36:08 +0100 + +haproxy (1.6.4-2) unstable; urgency=medium + + * d/init: fix SysV init script w/ respect to handling EXTRAOPTS on check. + * d/control: add Pre-Depends for dpkg-maintscript-helper support of + dir_to_symlink. + + -- Vincent Bernat Sat, 19 Mar 2016 16:35:20 +0100 + +haproxy (1.6.4-1) unstable; urgency=medium + + * New upstream release (see CHANGELOG): + + BUG/MAJOR: http-reuse: fix risk of orphaned connections. + + BUG/MAJOR: lua: applets can't sleep. + + BUG/MAJOR: samples: check smp->strm before using it. + + BUG/MAJOR: servers state: server port is erased when dns resolution is + enabled on a server. + + BUG/MAJOR: vars: always retrieve the stream and session from the sample + + BUG/MEDIUM: buffers: do not round up buffer size during allocation + + BUG/MEDIUM: dns: no DNS resolution happens if no ports provided to the + nameserver + + BUG/MEDIUM: servers state: server port is used uninitialized + + BUG/MEDIUM: config: Adding validation to stick-table expire value. + + BUG/MEDIUM: sample: http_date() doesn't provide the right day of the + week + + BUG/MEDIUM: channel: fix miscalculation of available buffer space. + + BUG/MEDIUM: http-reuse: do not share private connections across backends + + BUG/MEDIUM: ssl: fix off-by-one in ALPN list allocation + + BUG/MEDIUM: ssl: fix off-by-one in NPN list allocation + + BUG/MEDIUM: stats: stats bind-process doesn't propagate the process mask + correctly + + BUG/MEDIUM: chunks: always reject negative-length chunks + + BUG/MEDIUM: cfgparse: wrong argument offset after parsing server "sni" + keyword + + [ Vincent Bernat ] + * haproxy.init: append ${EXTRAOPTS} when verifying configuration file. + * haproxy.init: move EXTRAOPTS after all other parameters. + * haproxy.init: management of multiple HAProxy instances with SysV + init.d script, courtesy of Ivan Savcic. + + [ Apollon Oikonomopoulos ] + * Bump standards to 3.9.7: + + haproxy-doc: move the additional documentation from + /usr/share/doc/haproxy-doc to /usr/share/doc/haproxy, as per the + recommendation in Policy §12.3. + + Add compatibility symlinks from /usr/share/doc/haproxy-doc to + /usr/share/doc/haproxy. + * Enable all hardening flags. + * d/control: use HTTPS for Vcs-* + * Use www.haproxy.org as the project's homepage in d/control and + d/copyright. + * d/copyright: adjust debian/* years. + * Add basic DEP-8 tests. + * Drop the haproxy-dbg binary package in favor of ddebs. + * haproxy-doc: + + Use dpkg-maintscript-helper dir_to_symlink for the compatibility + symlinks. + + Add Lua documentation doc-base entry. + + -- Apollon Oikonomopoulos Tue, 15 Mar 2016 21:04:11 +0200 + +haproxy (1.6.3-1) unstable; urgency=medium + + [ Apollon Oikonomopoulos ] + * haproxy.init: use s-s-d's --pidfile option. + Thanks to Louis Bouchard (Closes: 804530) + + [ Vincent Bernat ] + * watch: fix d/watch to look for 1.6 version + * Imported Upstream version 1.6.3 + + -- Vincent Bernat Thu, 31 Dec 2015 08:10:10 +0100 + +haproxy (1.6.2-2) unstable; urgency=medium + + * Enable USE_REGPARM on amd64 as well. + + -- Vincent Bernat Tue, 03 Nov 2015 21:21:30 +0100 + +haproxy (1.6.2-1) unstable; urgency=medium + + * New upstream release. + - BUG/MAJOR: dns: first DNS response packet not matching queried + hostname may lead to a loop + - BUG/MAJOR: http: don't requeue an idle connection that is already + queued + * Upload to unstable. + + -- Vincent Bernat Tue, 03 Nov 2015 13:36:22 +0100 + +haproxy (1.6.1-2) experimental; urgency=medium + + * Build the Lua manpage in -arch, fixes FTBFS in binary-only builds. + + -- Apollon Oikonomopoulos Thu, 22 Oct 2015 12:19:41 +0300 + +haproxy (1.6.1-1) experimental; urgency=medium + + [ Vincent Bernat ] + * New upstream release. + - BUG/MAJOR: ssl: free the generated SSL_CTX if the LRU cache is + disabled + * Drop 0001-BUILD-install-only-relevant-and-existing-documentati.patch. + + [ Apollon Oikonomopoulos ] + * Ship and generate Lua API documentation. + + -- Vincent Bernat Thu, 22 Oct 2015 10:45:55 +0200 + +haproxy (1.6.0+ds1-1) experimental; urgency=medium + + * New upstream release! + * Add a patch to fix documentation installation: + + 0001-BUILD-install-only-relevant-and-existing-documentati.patch + * Update HAProxy documentation converter to a more recent version. + + -- Vincent Bernat Wed, 14 Oct 2015 17:29:19 +0200 + +haproxy (1.6~dev7-1) experimental; urgency=medium + + * New upstream release. + + -- Vincent Bernat Tue, 06 Oct 2015 16:01:26 +0200 + +haproxy (1.6~dev5-1) experimental; urgency=medium + + * New upstream release. + + -- Vincent Bernat Mon, 14 Sep 2015 15:50:28 +0200 + +haproxy (1.6~dev4-1) experimental; urgency=medium + + * New upstream release. + * Refresh debian/copyright. + + -- Vincent Bernat Sun, 30 Aug 2015 23:54:10 +0200 + +haproxy (1.6~dev3-1) experimental; urgency=medium + + * New upstream release. + * Enable Lua support. + + -- Vincent Bernat Sat, 15 Aug 2015 17:51:29 +0200 + +haproxy (1.5.15-1) unstable; urgency=medium + + * New upstream stable release including the following fix: + - BUG/MAJOR: http: don't call http_send_name_header() after an error + + -- Vincent Bernat Mon, 02 Nov 2015 07:34:19 +0100 + +haproxy (1.5.14-1) unstable; urgency=high + + * New upstream version. Fix an information leak (CVE-2015-3281): + - BUG/MAJOR: buffers: make the buffer_slow_realign() function + respect output data. + * Add $named as a dependency for init script. Closes: #790638. + + -- Vincent Bernat Fri, 03 Jul 2015 19:49:02 +0200 + +haproxy (1.5.13-1) unstable; urgency=medium + + * New upstream stable release including the following fixes: + - MAJOR: peers: allow peers section to be used with nbproc > 1 + - BUG/MAJOR: checks: always check for end of list before proceeding + - MEDIUM: ssl: replace standards DH groups with custom ones + - BUG/MEDIUM: ssl: fix tune.ssl.default-dh-param value being overwritten + - BUG/MEDIUM: cfgparse: segfault when userlist is misused + - BUG/MEDIUM: stats: properly initialize the scope before dumping stats + - BUG/MEDIUM: http: don't forward client shutdown without NOLINGER + except for tunnels + - BUG/MEDIUM: checks: do not dereference head of a tcp-check at the end + - BUG/MEDIUM: checks: do not dereference a list as a tcpcheck struct + - BUG/MEDIUM: peers: apply a random reconnection timeout + - BUG/MEDIUM: config: properly compute the default number of processes + for a proxy + + -- Vincent Bernat Sat, 27 Jun 2015 20:52:07 +0200 + +haproxy (1.5.12-1) unstable; urgency=medium + + * New upstream stable release including the following fixes: + - BUG/MAJOR: http: don't read past buffer's end in http_replace_value + - BUG/MAJOR: http: prevent risk of reading past end with balance + url_param + - BUG/MEDIUM: Do not consider an agent check as failed on L7 error + - BUG/MEDIUM: patern: some entries are not deleted with case + insensitive match + - BUG/MEDIUM: buffer: one byte miss in buffer free space check + - BUG/MEDIUM: http: thefunction "(req|res)-replace-value" doesn't + respect the HTTP syntax + - BUG/MEDIUM: peers: correctly configure the client timeout + - BUG/MEDIUM: http: hdr_cnt would not count any header when called + without name + - BUG/MEDIUM: listener: don't report an error when resuming unbound + listeners + - BUG/MEDIUM: init: don't limit cpu-map to the first 32 processes only + - BUG/MEDIUM: stream-int: always reset si->ops when si->end is + nullified + - BUG/MEDIUM: http: remove content-length from chunked messages + - BUG/MEDIUM: http: do not restrict parsing of transfer-encoding to + HTTP/1.1 + - BUG/MEDIUM: http: incorrect transfer-coding in the request is a bad + request + - BUG/MEDIUM: http: remove content-length form responses with bad + transfer-encoding + - BUG/MEDIUM: http: wait for the exact amount of body bytes in + wait_for_request_body + + -- Vincent Bernat Sat, 02 May 2015 16:38:28 +0200 + +haproxy (1.5.11-2) unstable; urgency=medium + + * Upload to unstable. + + -- Vincent Bernat Sun, 26 Apr 2015 17:46:58 +0200 + +haproxy (1.5.11-1) experimental; urgency=medium + + * New upstream stable release including the following fixes: + - BUG/MAJOR: log: don't try to emit a log if no logger is set + - BUG/MEDIUM: backend: correctly detect the domain when + use_domain_only is used + - BUG/MEDIUM: Do not set agent health to zero if server is disabled + in config + - BUG/MEDIUM: Only explicitly report "DOWN (agent)" if the agent health + is zero + - BUG/MEDIUM: http: fix header removal when previous header ends with + pure LF + - BUG/MEDIUM: channel: fix possible integer overflow on reserved size + computation + - BUG/MEDIUM: channel: don't schedule data in transit for leaving until + connected + - BUG/MEDIUM: http: make http-request set-header compute the string + before removal + * Upload to experimental. + + -- Vincent Bernat Sun, 01 Feb 2015 09:22:27 +0100 + +haproxy (1.5.10-1) experimental; urgency=medium + + * New upstream stable release including the following fixes: + - BUG/MAJOR: stream-int: properly check the memory allocation return + - BUG/MEDIUM: sample: fix random number upper-bound + - BUG/MEDIUM: patterns: previous fix was incomplete + - BUG/MEDIUM: payload: ensure that a request channel is available + - BUG/MEDIUM: tcp-check: don't rely on random memory contents + - BUG/MEDIUM: tcp-checks: disable quick-ack unless next rule is an expect + - BUG/MEDIUM: config: do not propagate processes between stopped + processes + - BUG/MEDIUM: memory: fix freeing logic in pool_gc2() + - BUG/MEDIUM: compression: correctly report zlib_mem + * Upload to experimental. + + -- Vincent Bernat Sun, 04 Jan 2015 13:17:56 +0100 + +haproxy (1.5.9-1) experimental; urgency=medium + + * New upstream stable release including the following fixes: + - BUG/MAJOR: sessions: unlink session from list on out + of memory + - BUG/MEDIUM: pattern: don't load more than once a pattern + list. + - BUG/MEDIUM: connection: sanitize PPv2 header length before + parsing address information + - BUG/MAJOR: frontend: initialize capture pointers earlier + - BUG/MEDIUM: checks: fix conflicts between agent checks and + ssl healthchecks + - BUG/MEDIUM: ssl: force a full GC in case of memory shortage + - BUG/MEDIUM: ssl: fix bad ssl context init can cause + segfault in case of OOM. + * Upload to experimental. + + -- Vincent Bernat Sun, 07 Dec 2014 16:37:36 +0100 + +haproxy (1.5.8-3) unstable; urgency=medium + + * Remove RC4 from the default cipher string shipped in configuration. + + -- Vincent Bernat Fri, 27 Feb 2015 11:29:23 +0100 + +haproxy (1.5.8-2) unstable; urgency=medium + + * Cherry-pick the following patches from 1.5.9 release: + - 8a0b93bde77e BUG/MAJOR: sessions: unlink session from list on out + of memory + - bae03eaad40a BUG/MEDIUM: pattern: don't load more than once a pattern + list. + - 93637b6e8503 BUG/MEDIUM: connection: sanitize PPv2 header length before + parsing address information + - 8ba50128832b BUG/MAJOR: frontend: initialize capture pointers earlier + - 1f96a87c4e14 BUG/MEDIUM: checks: fix conflicts between agent checks and + ssl healthchecks + - 9bcc01ae2598 BUG/MEDIUM: ssl: force a full GC in case of memory shortage + - 909514970089 BUG/MEDIUM: ssl: fix bad ssl context init can cause + segfault in case of OOM. + * Cherry-pick the following patches from future 1.5.10 release: + - 1e89acb6be9b BUG/MEDIUM: payload: ensure that a request channel is + available + - bad3c6f1b6d7 BUG/MEDIUM: patterns: previous fix was incomplete + + -- Vincent Bernat Sun, 07 Dec 2014 11:11:21 +0100 + +haproxy (1.5.8-1) unstable; urgency=medium + + * New upstream stable release including the following fixes: + + + BUG/MAJOR: buffer: check the space left is enough or not when input + data in a buffer is wrapped + + BUG/MINOR: ssl: correctly initialize ssl ctx for invalid certificates + + BUG/MEDIUM: tcp: don't use SO_ORIGINAL_DST on non-AF_INET sockets + + BUG/MEDIUM: regex: fix pcre_study error handling + + BUG/MEDIUM: tcp: fix outgoing polling based on proxy protocol + + BUG/MINOR: log: fix request flags when keep-alive is enabled + + BUG/MAJOR: cli: explicitly call cli_release_handler() upon error + + BUG/MEDIUM: http: don't dump debug headers on MSG_ERROR + * Also includes the following new features: + + MINOR: ssl: add statement to force some ssl options in global. + + MINOR: ssl: add fetchs 'ssl_c_der' and 'ssl_f_der' to return DER + formatted certs + * Disable SSLv3 in the default configuration file. + + -- Vincent Bernat Fri, 31 Oct 2014 13:48:19 +0100 + +haproxy (1.5.6-1) unstable; urgency=medium + + * New upstream stable release including the following fixes: + + BUG/MEDIUM: systemd: set KillMode to 'mixed' + + MINOR: systemd: Check configuration before start + + BUG/MEDIUM: config: avoid skipping disabled proxies + + BUG/MINOR: config: do not accept more track-sc than configured + + BUG/MEDIUM: backend: fix URI hash when a query string is present + * Drop systemd patches: + + haproxy.service-also-check-on-start.patch + + haproxy.service-set-killmode-to-mixed.patch + * Refresh other patches. + + -- Vincent Bernat Mon, 20 Oct 2014 18:10:21 +0200 + +haproxy (1.5.5-1) unstable; urgency=medium + + [ Vincent Bernat ] + * initscript: use start-stop-daemon to reliably terminate all haproxy + processes. Also treat stopping a non-running haproxy as success. + (Closes: #762608, LP: #1038139) + + [ Apollon Oikonomopoulos ] + * New upstream stable release including the following fixes: + + DOC: Address issue where documentation is excluded due to a gitignore + rule. + + MEDIUM: Improve signal handling in systemd wrapper. + + BUG/MINOR: config: don't propagate process binding for dynamic + use_backend + + MINOR: Also accept SIGHUP/SIGTERM in systemd-wrapper + + DOC: clearly state that the "show sess" output format is not fixed + + MINOR: stats: fix minor typo fix in stats_dump_errors_to_buffer() + + DOC: indicate in the doc that track-sc* can wait if data are missing + + MEDIUM: http: enable header manipulation for 101 responses + + BUG/MEDIUM: config: propagate frontend to backend process binding again. + + MEDIUM: config: properly propagate process binding between proxies + + MEDIUM: config: make the frontends automatically bind to the listeners' + processes + + MEDIUM: config: compute the exact bind-process before listener's + maxaccept + + MEDIUM: config: only warn if stats are attached to multi-process bind + directives + + MEDIUM: config: report it when tcp-request rules are misplaced + + MINOR: config: detect the case where a tcp-request content rule has no + inspect-delay + + MEDIUM: systemd-wrapper: support multiple executable versions and names + + BUG/MEDIUM: remove debugging code from systemd-wrapper + + BUG/MEDIUM: http: adjust close mode when switching to backend + + BUG/MINOR: config: don't propagate process binding on fatal errors. + + BUG/MEDIUM: check: rule-less tcp-check must detect connect failures + + BUG/MINOR: tcp-check: report the correct failed step in the status + + DOC: indicate that weight zero is reported as DRAIN + * Add a new patch (haproxy.service-set-killmode-to-mixed.patch) to fix the + systemctl stop action conflicting with the systemd wrapper now catching + SIGTERM. + * Bump standards to 3.9.6; no changes needed. + * haproxy-doc: link to tracker.debian.org instead of packages.qa.debian.org. + * d/copyright: move debian/dconv/* paragraph after debian/*, so that it + actually matches the files it is supposed to. + + -- Apollon Oikonomopoulos Wed, 08 Oct 2014 12:34:53 +0300 + +haproxy (1.5.4-1) unstable; urgency=high + + * New upstream version. + + Fix a critical bug that, under certain unlikely conditions, allows a + client to crash haproxy. + * Prefix rsyslog configuration file to ensure to log only to + /var/log/haproxy. Thanks to Paul Bourke for the patch. + + -- Vincent Bernat Tue, 02 Sep 2014 19:14:38 +0200 + +haproxy (1.5.3-1) unstable; urgency=medium + + * New upstream stable release, fixing the following issues: + + Memory corruption when building a proxy protocol v2 header + + Memory leak in SSL DHE key exchange + + -- Apollon Oikonomopoulos Fri, 25 Jul 2014 10:41:36 +0300 + +haproxy (1.5.2-1) unstable; urgency=medium + + * New upstream stable release. Important fixes: + + A few sample fetch functions when combined in certain ways would return + malformed results, possibly crashing the HAProxy process. + + Hash-based load balancing and http-send-name-header would fail for + requests which contain a body which starts to be forwarded before the + data is used. + + -- Apollon Oikonomopoulos Mon, 14 Jul 2014 00:42:32 +0300 + +haproxy (1.5.1-1) unstable; urgency=medium + + * New upstream stable release: + + Fix a file descriptor leak for clients that disappear before connecting. + + Do not staple expired OCSP responses. + + -- Apollon Oikonomopoulos Tue, 24 Jun 2014 12:56:30 +0300 + +haproxy (1.5.0-1) unstable; urgency=medium + + * New upstream stable series. Notable changes since the 1.4 series: + + Native SSL support on both sides with SNI/NPN/ALPN and OCSP stapling. + + IPv6 and UNIX sockets are supported everywhere + + End-to-end HTTP keep-alive for better support of NTLM and improved + efficiency in static farms + + HTTP/1.1 response compression (deflate, gzip) to save bandwidth + + PROXY protocol versions 1 and 2 on both sides + + Data sampling on everything in request or response, including payload + + ACLs can use any matching method with any input sample + + Maps and dynamic ACLs updatable from the CLI + + Stick-tables support counters to track activity on any input sample + + Custom format for logs, unique-id, header rewriting, and redirects + + Improved health checks (SSL, scripted TCP, check agent, ...) + + Much more scalable configuration supports hundreds of thousands of + backends and certificates without sweating + + * Upload to unstable, merge all 1.5 work from experimental. Most important + packaging changes since 1.4.25-1 include: + + systemd support. + + A more sane default config file. + + Zero-downtime upgrades between 1.5 releases by gracefully reloading + HAProxy during upgrades. + + HTML documentation shipped in the haproxy-doc package. + + kqueue support for kfreebsd. + + * Packaging changes since 1.5~dev26-2: + + Drop patches merged upstream: + o Fix-reference-location-in-manpage.patch + o 0001-BUILD-stats-workaround-stupid-and-bogus-Werror-forma.patch + + d/watch: look for stable 1.5 releases + + systemd: respect CONFIG and EXTRAOPTS when specified in + /etc/default/haproxy. + + initscript: test the configuration before start or reload. + + initscript: remove the ENABLED flag and logic. + + -- Apollon Oikonomopoulos Fri, 20 Jun 2014 11:05:17 +0300 + +haproxy (1.5~dev26-2) experimental; urgency=medium + + * initscript: start should not fail when haproxy is already running + + Fixes upgrades from post-1.5~dev24-1 installations + + -- Apollon Oikonomopoulos Wed, 04 Jun 2014 13:20:39 +0300 + +haproxy (1.5~dev26-1) experimental; urgency=medium + + * New upstream development version. + + Add a patch to fix compilation with -Werror=format-security + + -- Vincent Bernat Wed, 28 May 2014 20:32:10 +0200 + +haproxy (1.5~dev25-1) experimental; urgency=medium + + [ Vincent Bernat ] + * New upstream development version. + * Rename "contimeout", "clitimeout" and "srvtimeout" in the default + configuration file to "timeout connection", "timeout client" and + "timeout server". + + [ Apollon Oikonomopoulos ] + * Build on kfreebsd using the "freebsd" target; enables kqueue support. + + -- Vincent Bernat Thu, 15 May 2014 00:20:11 +0200 + +haproxy (1.5~dev24-2) experimental; urgency=medium + + * New binary package: haproxy-doc + + Contains the HTML documentation built using a version of Cyril Bonté's + haproxy-dconv (https://github.com/cbonte/haproxy-dconv). + + Add Build-Depends-Indep on python and python-mako + + haproxy Suggests: haproxy-doc + * systemd: check config file for validity on reload. + * haproxy.cfg: + + Enable the stats socket by default and bind it to + /run/haproxy/admin.sock, which is accessible by the haproxy group. + /run/haproxy creation is handled by the initscript for sysv-rc and a + tmpfiles.d config for systemd. + + Set the default locations for CA and server certificates to + /etc/ssl/certs and /etc/ssl/private respectively. + + Set the default cipher list to be used on listening SSL sockets to + enable PFS, preferring ECDHE ciphers by default. + * Gracefully reload HAProxy on upgrade instead of performing a full restart. + * debian/rules: split build into binary-arch and binary-indep. + * Build-depend on debhelper >= 9, set compat to 9. + + -- Apollon Oikonomopoulos Sun, 27 Apr 2014 13:37:17 +0300 + +haproxy (1.5~dev24-1) experimental; urgency=medium + + * New upstream development version, fixes major regressions introduced in + 1.5~dev23: + + + Forwarding of a message body (request or response) would automatically + stop after the transfer timeout strikes, and with no error. + + Redirects failed to update the msg->next offset after consuming the + request, so if they were made with keep-alive enabled and starting with + a slash (relative location), then the buffer was shifted by a negative + amount of data, causing a crash. + + The code to standardize DH parameters caused an important performance + regression for, so it was temporarily reverted for the time needed to + understand the cause and to fix it. + + For a complete release announcement, including other bugfixes and feature + enhancements, see http://deb.li/yBVA. + + -- Apollon Oikonomopoulos Sun, 27 Apr 2014 11:09:37 +0300 + +haproxy (1.5~dev23-1) experimental; urgency=medium + + * New upstream development version; notable changes since 1.5~dev22: + + SSL record size optimizations to speed up both, small and large + transfers. + + Dynamic backend name support in use_backend. + + Compressed chunked transfer encoding support. + + Dynamic ACL manipulation via the CLI. + + New "language" converter for extracting language preferences from + Accept-Language headers. + * Remove halog source and systemd unit files from + /usr/share/doc/haproxy/contrib, they are built and shipped in their + appropriate locations since 1.5~dev19-2. + + -- Apollon Oikonomopoulos Wed, 23 Apr 2014 11:12:34 +0300 + +haproxy (1.5~dev22-1) experimental; urgency=medium + + * New upstream development version + * watch: use the source page and not the main one + + -- Apollon Oikonomopoulos Mon, 03 Feb 2014 17:45:51 +0200 + +haproxy (1.5~dev21+20140118-1) experimental; urgency=medium + + * New upstream development snapshot, with the following fixes since + 1.5-dev21: + + 00b0fb9 BUG/MAJOR: ssl: fix breakage caused by recent fix abf08d9 + + 410f810 BUG/MEDIUM: map: segmentation fault with the stats's socket + command "set map ..." + + abf08d9 BUG/MAJOR: connection: fix mismatch between rcv_buf's API and + usage + + 35249cb BUG/MINOR: pattern: pattern comparison executed twice + + c920096 BUG/MINOR: http: don't clear the SI_FL_DONT_WAKE flag between + requests + + b800623 BUG/MEDIUM: stats: fix HTTP/1.0 breakage introduced in previous + patch + + 61f7f0a BUG/MINOR: stream-int: do not clear the owner upon unregister + + 983eb31 BUG/MINOR: channel: CHN_INFINITE_FORWARD must be unsigned + + a3ae932 BUG/MEDIUM: stats: the web interface must check the tracked + servers before enabling + + e24d963 BUG/MEDIUM: checks: unchecked servers could not be enabled + anymore + + 7257550 BUG/MINOR: http: always disable compression on HTTP/1.0 + + 9f708ab BUG/MINOR: checks: successful check completion must not + re-enable MAINT servers + + ff605db BUG/MEDIUM: backend: do not re-initialize the connection's + context upon reuse + + ea90063 BUG/MEDIUM: stream-int: fix the keep-alive idle connection + handler + * Update debian/copyright to reflect the license of ebtree/ + (closes: #732614) + * Synchronize debian/copyright with source + * Add Documentation field to the systemd unit file + + -- Apollon Oikonomopoulos Mon, 20 Jan 2014 10:07:34 +0200 + +haproxy (1.5~dev21-1) experimental; urgency=low + + [ Prach Pongpanich ] + * Bump Standards-Version to 3.9.5 + + [ Thomas Bechtold ] + * debian/control: Add haproxy-dbg binary package for debug symbols. + + [ Apollon Oikonomopoulos ] + * New upstream development version. + * Require syslog to be operational before starting. Closes: #726323. + + -- Vincent Bernat Tue, 17 Dec 2013 01:38:04 +0700 + +haproxy (1.5~dev19-2) experimental; urgency=low + + [ Vincent Bernat ] + * Really enable systemd support by using dh-systemd helper. + * Don't use -L/usr/lib and rely on default search path. Closes: #722777. + + [ Apollon Oikonomopoulos ] + * Ship halog. + + -- Vincent Bernat Thu, 12 Sep 2013 21:58:05 +0200 + +haproxy (1.5~dev19-1) experimental; urgency=high + + [ Vincent Bernat ] + * New upstream version. + + CVE-2013-2175: fix a possible crash when using negative header + occurrences. + + Drop 0002-Fix-typo-in-src-haproxy.patch: applied upstream. + * Enable gzip compression feature. + + [ Prach Pongpanich ] + * Drop bashism patch. It seems useless to maintain a patch to convert + example scripts from /bin/bash to /bin/sh. + * Fix reload/restart action of init script (LP: #1187469) + + -- Vincent Bernat Mon, 17 Jun 2013 22:03:58 +0200 + +haproxy (1.5~dev18-1) experimental; urgency=low + + [ Apollon Oikonomopoulos ] + * New upstream development version + + [ Vincent Bernat ] + * Add support for systemd. Currently, /etc/default/haproxy is not used + when using systemd. + + -- Vincent Bernat Sun, 26 May 2013 12:33:00 +0200 + +haproxy (1.4.25-1) unstable; urgency=medium + + [ Prach Pongpanich ] + * New upstream version. + * Update watch file to use the source page. + * Bump Standards-Version to 3.9.5. + + [ Thomas Bechtold ] + * debian/control: Add haproxy-dbg binary package for debug symbols. + + [ Apollon Oikonomopoulos ] + * Require syslog to be operational before starting. Closes: #726323. + * Document how to bind non-local IPv6 addresses. + * Add a reference to configuration.txt.gz to the manpage. + * debian/copyright: synchronize with source. + + -- Prach Pongpanich Fri, 28 Mar 2014 09:35:09 +0700 + +haproxy (1.4.24-2) unstable; urgency=low + + [ Apollon Oikonomopoulos ] + * Ship contrib/halog as /usr/bin/halog. + + [ Vincent Bernat ] + * Don't use -L/usr/lib and rely on default search path. Closes: #722777. + + -- Vincent Bernat Sun, 15 Sep 2013 14:36:27 +0200 + +haproxy (1.4.24-1) unstable; urgency=high + + [ Vincent Bernat ] + * New upstream version. + + CVE-2013-2175: fix a possible crash when using negative header + occurrences. + + [ Prach Pongpanich ] + * Drop bashism patch. It seems useless to maintain a patch to convert + example scripts from /bin/bash to /bin/sh. + * Fix reload/restart action of init script (LP: #1187469). + + -- Vincent Bernat Mon, 17 Jun 2013 21:56:26 +0200 + +haproxy (1.4.23-1) unstable; urgency=low + + [ Apollon Oikonomopoulos ] + * New upstream version (Closes: #643650, #678953) + + This fixes CVE-2012-2942 (Closes: #674447) + + This fixes CVE-2013-1912 (Closes: #704611) + * Ship vim addon as vim-haproxy (Closes: #702893) + * Check for the configuration file after sourcing /etc/default/haproxy + (Closes: #641762) + * Use /dev/log for logging by default (Closes: #649085) + + [ Vincent Bernat ] + * debian/control: + + add Vcs-* fields + + switch maintenance to Debian HAProxy team. (Closes: #706890) + + drop dependency to quilt: 3.0 (quilt) format is in use. + * debian/rules: + + don't explicitly call dh_installchangelog. + + use dh_installdirs to install directories. + + use dh_install to install error and configuration files. + + switch to `linux2628` Makefile target for Linux. + * debian/postrm: + + remove haproxy user and group on purge. + * Ship a more minimal haproxy.cfg file: no `listen` blocks but `global` + and `defaults` block with appropriate configuration to use chroot and + logging in the expected way. + + [ Prach Pongpanich ] + * debian/copyright: + + add missing copyright holders + + update years of copyright + * debian/rules: + + build with -Wl,--as-needed to get rid of unnecessary depends + * Remove useless files in debian/haproxy.{docs,examples} + * Update debian/watch file, thanks to Bart Martens + + -- Vincent Bernat Mon, 06 May 2013 20:02:14 +0200 + +haproxy (1.4.15-1) unstable; urgency=low + + * New upstream release with critical bug fix (Closes: #631351) + + -- Christo Buschek Thu, 14 Jul 2011 18:17:05 +0200 + +haproxy (1.4.13-1) unstable; urgency=low + + * New maintainer upload (Closes: #615246) + * New upstream release + * Standards-version goes 3.9.1 (no change) + * Added patch bashism (Closes: #581109) + * Added a README.source file. + + -- Christo Buschek Thu, 11 Mar 2011 12:41:59 +0000 + +haproxy (1.4.8-1) unstable; urgency=low + + * New upstream release. + + -- Arnaud Cornet Fri, 18 Jun 2010 00:42:53 +0100 + +haproxy (1.4.4-1) unstable; urgency=low + + * New upstream release + * Add splice and tproxy support + * Add regparm optimization on i386 + * Switch to dpkg-source 3.0 (quilt) format + + -- Arnaud Cornet Thu, 15 Apr 2010 20:00:34 +0100 + +haproxy (1.4.2-1) unstable; urgency=low + + * New upstream release + * Remove debian/patches/haproxy.1-hyphen.patch gone upstream + * Tighten quilt build dep (Closes: #567087) + * standards-version goes 3.8.4 (no change) + * Add $remote_fs to init.d script required start and stop + + -- Arnaud Cornet Sat, 27 Mar 2010 15:19:48 +0000 + +haproxy (1.3.22-1) unstable; urgency=low + + * New upstream bugfix release + + -- Arnaud Cornet Mon, 19 Oct 2009 22:31:45 +0100 + +haproxy (1.3.21-1) unstable; urgency=low + + [ Michael Shuler ] + * New Upstream Version (Closes: #538992) + * Added override for example shell scripts in docs (Closes: #530096) + * Added upstream changelog to docs + * Added debian/watch + * Updated debian/copyright format + * Added haproxy.1-hyphen.patch, to fix hyphen in man page + * Upgrade Standards-Version to 3.8.3 (no change needed) + * Upgrade debian/compat to 7 (no change needed) + + [ Arnaud Cornet ] + * New upstream version. + * Merge Michael's work, few changelog fixes + * Add debian/README.source to point to quilt doc + * Depend on debhelper >= 7.0.50~ and use overrides in debian/rules + + -- Arnaud Cornet Sun, 18 Oct 2009 14:01:29 +0200 + +haproxy (1.3.18-1) unstable; urgency=low + + * New Upstream Version (Closes: #534583). + * Add contrib directory in docs + + -- Arnaud Cornet Fri, 26 Jun 2009 00:11:01 +0200 + +haproxy (1.3.15.7-2) unstable; urgency=low + + * Fix build without debian/patches directory (Closes: #515682) using + /usr/share/quilt/quilt.make. + + -- Arnaud Cornet Tue, 17 Feb 2009 08:55:12 +0100 + +haproxy (1.3.15.7-1) unstable; urgency=low + + * New Upstream Version. + * Remove upstream patches: + -use_backend-consider-unless.patch + -segfault-url_param+check_post.patch + -server-timeout.patch + -closed-fd-remove.patch + -connection-slot-during-retry.patch + -srv_dynamic_maxconn.patch + -do-not-pause-backends-on-reload.patch + -acl-in-default.patch + -cookie-capture-check.patch + -dead-servers-queue.patch + + -- Arnaud Cornet Mon, 16 Feb 2009 11:20:21 +0100 + +haproxy (1.3.15.2-2~lenny1) testing-proposed-updates; urgency=low + + * Rebuild for lenny to circumvent pcre3 shlibs bump. + + -- Arnaud Cornet Wed, 14 Jan 2009 11:28:36 +0100 + +haproxy (1.3.15.2-2) unstable; urgency=low + + * Add stable branch bug fixes from upstream (Closes: #510185). + - use_backend-consider-unless.patch: consider "unless" in use_backend + - segfault-url_param+check_post.patch: fix segfault with url_param + + check_post + - server-timeout.patch: consider server timeout in all circumstances + - closed-fd-remove.patch: drop info about closed file descriptors + - connection-slot-during-retry.patch: do not release the connection slot + during a retry + - srv_dynamic_maxconn.patch: dynamic connection throttling api fix + - do-not-pause-backends-on-reload.patch: make reload reliable + - acl-in-default.patch: allow acl-related keywords in defaults sections + - cookie-capture-check.patch: cookie capture is declared in the frontend + but checked on the backend + - dead-servers-queue.patch: make dead servers not suck pending connections + * Add quilt build-dependancy. Use quilt in debian/rules to apply + patches. + + -- Arnaud Cornet Wed, 31 Dec 2008 08:50:21 +0100 + +haproxy (1.3.15.2-1) unstable; urgency=low + + * New Upstream Version (Closes: #497186). + + -- Arnaud Cornet Sat, 30 Aug 2008 18:06:31 +0200 + +haproxy (1.3.15.1-1) unstable; urgency=low + + * New Upstream Version + * Upgrade standards version to 3.8.0 (no change needed). + * Build with TARGET=linux26 on linux, TARGET=generic on other systems. + + -- Arnaud Cornet Fri, 20 Jun 2008 00:38:50 +0200 + +haproxy (1.3.14.5-1) unstable; urgency=low + + * New Upstream Version (Closes: #484221) + * Use debhelper 7, drop CDBS. + + -- Arnaud Cornet Wed, 04 Jun 2008 19:21:56 +0200 + +haproxy (1.3.14.3-1) unstable; urgency=low + + * New Upstream Version + * Add status argument support to init-script to conform to LSB. + * Cleanup pidfile after stop in init script. Init script return code fixups. + + -- Arnaud Cornet Sun, 09 Mar 2008 21:30:29 +0100 + +haproxy (1.3.14.2-3) unstable; urgency=low + + * Add init script support for nbproc > 1 in configuration. That is, + multiple haproxy processes. + * Use 'option redispatch' instead of redispatch in debian default + config. + + -- Arnaud Cornet Sun, 03 Feb 2008 18:22:28 +0100 + +haproxy (1.3.14.2-2) unstable; urgency=low + + * Fix init scripts's reload function to use -sf instead of -st (to wait for + active session to finish cleanly). Also support dash. Thanks to + Jean-Baptiste Quenot for noticing. + + -- Arnaud Cornet Thu, 24 Jan 2008 23:47:26 +0100 + +haproxy (1.3.14.2-1) unstable; urgency=low + + * New Upstream Version + * Simplify DEB_MAKE_INVOKE, as upstream now supports us overriding + CFLAGS. + * Move haproxy to usr/sbin. + + -- Arnaud Cornet Mon, 21 Jan 2008 22:42:51 +0100 + +haproxy (1.3.14.1-1) unstable; urgency=low + + * New upstream release. + * Drop dfsg list and hash code rewrite (merged upstream). + * Add a HAPROXY variable in init script. + * Drop makefile patch, fix debian/rules accordingly. Drop build-dependancy + on quilt. + * Manpage now upstream. Ship upstream's and drop ours. + + -- Arnaud Cornet Tue, 01 Jan 2008 22:50:09 +0100 + +haproxy (1.3.12.dfsg2-1) unstable; urgency=low + + * New upstream bugfix release. + * Use new Homepage tag. + * Bump standards-version (no change needed). + * Add build-depend on quilt and add patch to allow proper CFLAGS passing to + make. + + -- Arnaud Cornet Tue, 25 Dec 2007 21:52:59 +0100 + +haproxy (1.3.12.dfsg-1) unstable; urgency=low + + * Initial release (Closes: #416397). + * The DFSG removes files with GPL-incompabitle license and adds a + re-implementation by me. + + -- Arnaud Cornet Fri, 17 Aug 2007 09:33:41 +0200 diff --git a/debian/clean b/debian/clean new file mode 100644 index 0000000..8d9fe01 --- /dev/null +++ b/debian/clean @@ -0,0 +1,3 @@ +doc/configuration.html +doc/intro.html +doc/management.html diff --git a/debian/control b/debian/control new file mode 100644 index 0000000..81528db --- /dev/null +++ b/debian/control @@ -0,0 +1,60 @@ +Source: haproxy +Section: net +Priority: optional +Maintainer: Debian HAProxy Maintainers +Uploaders: Apollon Oikonomopoulos , + Prach Pongpanich , + Vincent Bernat +Standards-Version: 4.4.1 +Build-Depends: debhelper-compat (= 12), + libpcre2-dev, + libssl-dev, + liblua5.3-dev, + libsystemd-dev [linux-any], + libjemalloc-dev, + python3-sphinx, + libopentracing-c-wrapper-dev, + pkg-config, + systemd-dev [linux-any] +Build-Depends-Indep: python3, python3-mako +Homepage: http://www.haproxy.org/ +Vcs-Git: https://salsa.debian.org/haproxy-team/haproxy.git +Vcs-Browser: https://salsa.debian.org/haproxy-team/haproxy + +Package: haproxy +Architecture: any +Depends: ${shlibs:Depends}, ${misc:Depends}, adduser, lsb-base (>= 3.0-6) +Pre-Depends: dpkg (>= 1.17.14), ${misc:Pre-Depends} +Suggests: vim-haproxy, haproxy-doc +Description: fast and reliable load balancing reverse proxy + HAProxy is a TCP/HTTP reverse proxy which is particularly suited for high + availability environments. It features connection persistence through HTTP + cookies, load balancing, header addition, modification, deletion both ways. It + has request blocking capabilities and provides interface to display server + status. + +Package: haproxy-doc +Section: doc +Architecture: all +Depends: ${misc:Depends}, libjs-bootstrap (<< 4), libjs-jquery, + ${sphinxdoc:Depends} +Pre-Depends: dpkg (>= 1.17.14) +Description: fast and reliable load balancing reverse proxy (HTML documentation) + HAProxy is a TCP/HTTP reverse proxy which is particularly suited for high + availability environments. It features connection persistence through HTTP + cookies, load balancing, header addition, modification, deletion both ways. It + has request blocking capabilities and provides interface to display server + status. + . + This package contains the HTML documentation for haproxy. + +Package: vim-haproxy +Architecture: all +Depends: ${misc:Depends} +Recommends: vim-addon-manager +Description: syntax highlighting for HAProxy configuration files + The vim-haproxy package provides filetype detection and syntax highlighting + for HAProxy configuration files. + . + As per the Debian vim policy, installed addons are not activated + automatically, but the "vim-addon-manager" tool can be used for this purpose. diff --git a/debian/copyright b/debian/copyright new file mode 100644 index 0000000..e33a528 --- /dev/null +++ b/debian/copyright @@ -0,0 +1,933 @@ +Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ +Upstream-Name: haproxy +Upstream-Contact: Willy Tarreau +Source: http://www.haproxy.org/ + +Files: * +Copyright: Copyright 2000-2021 Willy Tarreau . +License: GPL-2+ +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: .github/matrix.py +Copyright: 2020, Tim Duesterhus + 2019, Ilya Shipitsin +License: GPL-2+ +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: addons/51degrees/dummy/* +Copyright: 51Degrees Mobile Experts Limited. / 2019, 51Degrees Mobile Experts Limited, 5 Charlotte Close +License: MPL-2.0 +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: addons/ot/* +Copyright: 2017, 2020, HAProxy Technologies +License: GPL-2+ +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: addons/promex/service-prometheus.c +Copyright: 2018, 2019, Christopher Faulet +License: GPL-2+ +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: addons/wurfl/dummy/* +Copyright: ScientiaMobile, Inc. +License: GPL-2+ +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: addons/wurfl/dummy/Makefile +Copyright: Copyright 2000-2021 Willy Tarreau . +License: GPL-2+ +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: admin/halog/fgets2.c +Copyright: 2000-2012, Willy Tarreau +License: LGPL-2.1 +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: admin/halog/halog.c +Copyright: 2000-2020, Willy Tarreau +License: GPL-2+ +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: admin/iprange/* +Copyright: 2000-2021, Willy Tarreau +License: GPL-2+ +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: admin/netsnmp-perl/* +Copyright: 2007-2010, Krzysztof Piotr Oledzki +License: GPL-2+ +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: admin/release-estimator/release-estimator.py +Copyright: 2020, HAProxy Technologies, Daniel Corbett +License: GPL-3+ +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: admin/wireshark-dissectors/peers/packet-happp.c +Copyright: 2016, Frédéric Lécaille + 1998, Gerald Combs +License: GPL-2+ +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: debian/* +Copyright: Copyright (C) 2007-2011, Arnaud Cornet + Copyright (C) 2011, Christo Buschek + Copyright (C) 2013, Prach Pongpanich + Copyright (C) 2013-2016, Apollon Oikonomopoulos + Copyright (C) 2013-2016, Vincent Bernat +License: GPL-2 + +Files: debian/dconv/* +Copyright: 2012, Cyril Bonté +License: Apache-2.0 +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: debian/dconv/js/typeahead.bundle.js +Copyright: Copyright 2013-2015 Twitter, Inc. and other contributors +License: Expat + +Files: dev/base64/* +Copyright: 2009, 2010, Krzysztof Piotr Oledzki +License: GPL-2+ +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: dev/tcploop/tcploop.c +Copyright: 2012-2017, Willy Tarreau +License: Expat +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: include/* +Copyright: 2000-2020, Willy Tarreau - w@1wt.eu +License: LGPL-2.1 + +Files: include/haproxy/api-t.h + include/haproxy/api.h + include/haproxy/buf-t.h + include/haproxy/buf.h + include/haproxy/bug.h + include/haproxy/initcall.h + include/haproxy/istbuf.h +Copyright: 2000-2020, Willy Tarreau - w@1wt.eu +License: Expat + +Files: include/haproxy/arg-t.h + include/haproxy/arg.h + include/haproxy/protobuf-t.h + include/haproxy/protobuf.h +Copyright: 2012, Willy Tarreau +License: LGPL-2.1 + +Files: include/haproxy/atomic.h + include/haproxy/thread-t.h + include/haproxy/thread.h +Copyright: 2020, Willy Tarreau - w@1wt.eu + 2017, Christopher Faulet - cfaulet@haproxy.com +License: LGPL-2.1 + +Files: include/haproxy/auth-t.h + include/haproxy/auth.h +Copyright: 2009, 2010, Krzysztof Piotr Oledzki +License: GPL-2+ + +Files: include/haproxy/base64.h + include/haproxy/signal-t.h + include/haproxy/signal.h + include/haproxy/uri_auth-t.h + include/haproxy/uri_auth.h +Copyright: 2000-2020, Willy Tarreau +License: GPL-2+ + +Files: include/haproxy/check-t.h +Copyright: 2008, 2009, Krzysztof Piotr Oledzki + 2000-2020, Willy Tarreau - w@1wt.eu +License: GPL-2+ + +Files: include/haproxy/compression-t.h +Copyright: 2012, Exceliance, David Du Colombier +License: LGPL-2.1 + +Files: include/haproxy/compression.h +Copyright: 2012, (C) Exceliance, David Du Colombier +License: LGPL-2.1 + +Files: include/haproxy/counters-t.h +Copyright: 2011-2014, Willy Tarreau + 2008, 2009, Krzysztof Piotr Oledzki +License: LGPL-2.1 + +Files: include/haproxy/dgram-t.h + include/haproxy/dgram.h + include/haproxy/dns-t.h + include/haproxy/fix-t.h + include/haproxy/fix.h + include/haproxy/mqtt-t.h + include/haproxy/mqtt.h + include/haproxy/resolvers-t.h + include/haproxy/resolvers.h +Copyright: 2014, 2020, Baptiste Assmann +License: LGPL-2.1 + +Files: include/haproxy/dict-t.h + include/haproxy/dict.h +Copyright: 2019, Frédéric Lécaille +License: Expat + +Files: include/haproxy/dns.h +Copyright: 2020, HAProxy Technologies +License: LGPL-2.1 + +Files: include/haproxy/extcheck.h +Copyright: 2014, Horms Solutions Ltd, Simon Horman + 2000-2009, 2020, Willy Tarreau +License: LGPL-2.1 + +Files: include/haproxy/fcgi-app-t.h + include/haproxy/fcgi-app.h + include/haproxy/fcgi.h + include/haproxy/h1_htx.h + include/haproxy/http_htx-t.h + include/haproxy/http_htx.h + include/haproxy/htx-t.h + include/haproxy/htx.h + include/haproxy/spoe-t.h + include/haproxy/spoe.h +Copyright: 2017-2019, HAProxy Technologies, Christopher Faulet +License: LGPL-2.1 + +Files: include/haproxy/filters-t.h + include/haproxy/filters.h + include/haproxy/flt_http_comp.h +Copyright: 2015, Qualys Inc., Christopher Faulet +License: LGPL-2.1 + +Files: include/haproxy/h2.h +Copyright: 2017, HAProxy Technologies + 2000-2017, Willy Tarreau - w@1wt.eu +License: Expat + +Files: include/haproxy/hlua-t.h + include/haproxy/hlua.h + include/haproxy/hlua_fcn.h +Copyright: 2015, 2016, Thierry Fournier +License: LGPL-2.1 + +Files: include/haproxy/hpack-dec.h + include/haproxy/hpack-enc.h + include/haproxy/hpack-huff.h + include/haproxy/hpack-tbl-t.h + include/haproxy/hpack-tbl.h + include/haproxy/http-hdr-t.h + include/haproxy/http-hdr.h +Copyright: 2017, HAProxy Technologies + 2014-2020, Willy Tarreau +License: Expat + +Files: include/haproxy/mailers-t.h +Copyright: 2015, Horms Solutions Ltd., Simon Horman + 2010, EXCELIANCE, Emeric Brun +License: LGPL-2.1 + +Files: include/haproxy/mailers.h +Copyright: 2020, Willy Tarreau + 2015, Horms Solutions Ltd., Simon Horman + 2010, EXCELIANCE, Emeric Brun +License: LGPL-2.1 + +Files: include/haproxy/mworker-t.h + include/haproxy/mworker.h +Copyright: HAProxy Technologies 2019, - William Lallemand +License: GPL-2+ + +Files: include/haproxy/namespace-t.h + include/haproxy/namespace.h +Copyright: 2015-2020, Willy Tarreau + 2014, Tamas Kovacs, Sarkozi Laszlo, Krisztian Kovacs +License: LGPL-2.1 + +Files: include/haproxy/net_helper.h +Copyright: 2017-2020, Willy Tarreau + 2017, Olivier Houchard +License: Expat + +Files: include/haproxy/openssl-compat.h +Copyright: Copyright 2000-2013 Willy Tarreau - w@1wt.eu +License: LGPL-2.1 + +Files: include/haproxy/peers-t.h + include/haproxy/peers.h + include/haproxy/ssl_sock-t.h + include/haproxy/ssl_sock.h +Copyright: 2010, 2012, EXCELIANCE, Emeric Brun +License: LGPL-2.1 + +Files: include/haproxy/proto_quic.h + include/haproxy/quic_sock.h +Copyright: 2020, Frédéric Lécaille +License: LGPL-2.1 + +Files: include/haproxy/proto_sockpair.h +Copyright: HAProxy Technologies - William Lallemand +License: LGPL-2.1 + +Files: include/haproxy/proto_udp.h + include/haproxy/quic_cc-t.h + include/haproxy/quic_cc.h + include/haproxy/quic_frame-t.h + include/haproxy/quic_frame.h + include/haproxy/quic_loss-t.h + include/haproxy/quic_loss.h + include/haproxy/xprt_quic-t.h + include/haproxy/xprt_quic.h +Copyright: 2019, 2020, HAProxy Technologies, Frédéric Lécaille +License: LGPL-2.1 + +Files: include/haproxy/qpack-tbl-t.h +Copyright: 2020, HAProxy Technologies, Frédéric Lécaille +License: Expat + +Files: include/haproxy/quic_tls-t.h + include/haproxy/quic_tls.h +Copyright: 2019, 2020, HAProxy Technologies, Frédéric Lécaille +License: GPL-2+ + +Files: include/haproxy/sample-t.h + include/haproxy/sample.h + include/haproxy/sample_data-t.h + include/haproxy/stick_table-t.h + include/haproxy/stick_table.h +Copyright: 2010, 2012, 2013, 2020, Willy Tarreau + 2009, 2010, EXCELIANCE, Emeric Brun +License: LGPL-2.1 + +Files: include/haproxy/shctx-t.h + include/haproxy/shctx.h +Copyright: 2011, 2012, EXCELIANCE +License: GPL-2+ + +Files: include/haproxy/ssl_ckch-t.h + include/haproxy/ssl_ckch.h + include/haproxy/ssl_crtlist-t.h + include/haproxy/ssl_crtlist.h +Copyright: 2020, HAProxy Technologies, William Lallemand +License: LGPL-2.1 + +Files: include/haproxy/ssl_utils.h +Copyright: 2020, HAProxy Technologies, William Lallemand + 2012, EXCELIANCE, Emeric Brun +License: LGPL-2.1 + +Files: include/haproxy/tcpcheck-t.h +Copyright: 2020, Gaetan Rivet + 2020, Christopher Faulet + 2013, Baptiste Assmann + 2007-2010, Krzysztof Piotr Oledzki + 2000-2009, 2020, Willy Tarreau +License: GPL-2+ + +Files: include/haproxy/tcpcheck.h +Copyright: 2020, Gaetan Rivet + 2020, Christopher Faulet + 2013, Baptiste Assmann + 2007-2010, Krzysztof Piotr Oledzki + 2000-2009, 2020, Willy Tarreau +License: LGPL-2.1 + +Files: include/haproxy/uri_normalizer-t.h + include/haproxy/uri_normalizer.h +Copyright: 2021, Tim Duesterhus +License: GPL-2+ + +Files: include/haproxy/vars-t.h + include/haproxy/vars.h +Copyright: 2015, Thierry FOURNIER +License: LGPL-2.1 + +Files: include/haproxy/xref-t.h + include/haproxy/xref.h +Copyright: 2020, Willy Tarreau - w@1wt.eu + 2017, Thierry Fournier +License: Expat + +Files: include/import/* +Copyright: 2002-2011, - Willy Tarreau +License: LGPL-2.1 + +Files: include/import/atomic-ops.h +Copyright: Copyright 2000-2013 Willy Tarreau - w@1wt.eu +License: LGPL-2.1 + +Files: include/import/eb32sctree.h +Copyright: 2002-2017, - Willy Tarreau +License: LGPL-2.1 + +Files: include/import/ist.h +Copyright: 2000-2020, Willy Tarreau - w@1wt.eu +License: Expat + +Files: include/import/lru.h + include/import/plock.h + include/import/slz.h +Copyright: 2012-2017, Willy Tarreau +License: Expat + +Files: include/import/mjson.h +Copyright: 2018-2020, Cesanta Software Limited +License: Expat + +Files: include/import/sha1.h +Copyright: 2009-2015, Linus Torvalds and others. +License: LGPL-2.1 + +Files: include/import/xxhash.h +Copyright: 2012-2020, Yann Collet +License: BSD-2-clause + +Files: src/* +Copyright: 2000-2020, Willy Tarreau +License: GPL-2+ +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: src/action.c + src/fcgi-app.c + src/flt_spoe.c + src/h1_htx.c + src/http_ana.c + src/http_htx.c + src/htx.c + src/mux_fcgi.c + src/server_state.c +Copyright: 2016-2019, 2021, HAProxy Technologies, Christopher Faulet +License: GPL-2+ +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: src/auth.c +Copyright: 2009, 2010, Krzysztof Piotr Oledzki +License: GPL-2+ + +Files: src/base64.c + src/server.c + src/stats.c +Copyright: 2007-2010, Krzysztof Piotr Oledzki + 2000-2012, Willy Tarreau +License: GPL-2+ + +Files: src/cache.c +Copyright: 2017, 2020, HAProxy Technologies +License: GPL-2+ +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: src/cfgparse-global.c + src/cfgparse-listen.c + src/dict.c + src/namespace.c + src/vars.c + src/version.c +Copyright: Copyright 2000-2015 Willy Tarreau . +License: GPL-2+ +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: src/cfgparse-ssl.c + src/ssl_sample.c + src/ssl_utils.c +Copyright: 2020, HAProxy Technologies, William Lallemand + 2012, EXCELIANCE, Emeric Brun +License: GPL-2+ +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: src/check.c + src/cli.c +Copyright: 2007-2010, Krzysztof Piotr Oledzki + 2000-2012, Willy Tarreau +License: GPL-2+ +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: src/compression.c +Copyright: 2012, Exceliance, David Du Colombier +License: GPL-2+ + +Files: src/debug.c + src/haproxy.c + src/wdt.c +Copyright: 2000-2021, Willy Tarreau . +License: GPL-2+ +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: src/dgram.c + src/fix.c + src/mqtt.c + src/resolvers.c +Copyright: 2014, 2020, Baptiste Assmann +License: GPL-2+ +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: src/dns.c +Copyright: 2017, 2020, HAProxy Technologies +License: GPL-2+ + +Files: src/eb32sctree.c + src/eb32tree.c + src/eb64tree.c + src/ebimtree.c + src/ebistree.c + src/ebmbtree.c + src/ebpttree.c + src/ebsttree.c + src/ebtree.c +Copyright: 2002-2011, - Willy Tarreau +License: LGPL-2.1 +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: src/ev_evports.c +Copyright: 2018, Joyent, Inc. +License: GPL-2+ +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: src/extcheck.c +Copyright: 2014, Horms Solutions Ltd, Simon Horman + 2000-2009, 2020, Willy Tarreau +License: GPL-2+ +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: src/fcgi.c +Copyright: 2019, HAProxy Technologies, Christopher Faulet +License: Expat +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: src/filters.c + src/flt_http_comp.c + src/flt_trace.c +Copyright: 2015, Qualys Inc., Christopher Faulet +License: GPL-2+ +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: src/h2.c +Copyright: 2017, Willy Tarreau + 2017, HAProxy Technologies +License: Expat +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: src/hlua.c + src/hlua_fcn.c +Copyright: 2015, 2016, Thierry Fournier +License: GPL-2+ +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: src/hpack-dec.c + src/hpack-enc.c + src/hpack-huff.c + src/hpack-tbl.c +Copyright: 2017, HAProxy Technologies + 2014-2020, Willy Tarreau +License: Expat +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: src/lru.c +Copyright: 2012-2017, Willy Tarreau +License: Expat + +Files: src/mailers.c +Copyright: 2020, Willy Tarreau + 2015, Horms Solutions Ltd, Simon Horman +License: GPL-2+ + +Files: src/mjson.c +Copyright: 2018-2020, Cesanta Software Limited +License: Expat +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: src/mux_h1.c +Copyright: 2018, 2019, Christopher Faulet +License: GPL-2+ +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: src/mworker-prog.c + src/proto_sockpair.c +Copyright: HAProxy Technologies - William Lallemand +License: GPL-2+ +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: src/mworker.c +Copyright: HAProxy Technologies 2019, - William Lallemand +License: GPL-2+ +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: src/peers.c + src/ssl_sock.c +Copyright: 2010, 2012, EXCELIANCE, Emeric Brun +License: GPL-2+ + +Files: src/proto_quic.c +Copyright: 2020, Frédéric Lécaille +License: GPL-2+ +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: src/proto_udp.c + src/quic_frame.c + src/quic_sock.c + src/xprt_quic.c +Copyright: 2019, 2020, HAProxy Technologies, Frédéric Lécaille +License: GPL-2+ +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: src/proto_uxdg.c +Copyright: 2020, HAProxy Technologies, Emeric Brun +License: GPL-2+ +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: src/qpack-tbl.c +Copyright: 2020, HAProxy Technologies, Frédéric Lécaille +License: Expat +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: src/quic_cc.c + src/quic_cc_newreno.c +Copyright: 2019, 2020, HAProxy Technologies, Frédéric Lécaille +License: LGPL-2.1 +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: src/ring.c + src/sink.c + src/trace.c +Copyright: 2000-2020, Willy Tarreau - w@1wt.eu +License: LGPL-2.1 +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: src/sample.c + src/stick_table.c +Copyright: 2010, 2012, Willy Tarreau + 2009, 2010, EXCELIANCE, Emeric Brun +License: GPL-2+ + +Files: src/sha1.c +Copyright: 2009-2015, Linus Torvalds and others. +License: LGPL-2.1 +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: src/shctx.c +Copyright: 2011, 2012, EXCELIANCE +License: GPL-2+ + +Files: src/slz.c +Copyright: 2012-2017, Willy Tarreau +License: Expat +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: src/ssl_ckch.c + src/ssl_crtlist.c +Copyright: 2020, HAProxy Technologies, William Lallemand +License: GPL-2+ +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: src/tcpcheck.c +Copyright: 2020, Gaetan Rivet + 2020, Christopher Faulet + 2013, Baptiste Assmann + 2007-2010, Krzysztof Piotr Oledzki + 2000-2009, 2020, Willy Tarreau +License: GPL-2+ +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: src/thread.c +Copyright: 2017, Christopher Fauet - cfaulet@haproxy.com +License: GPL-2+ +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: src/uri_normalizer.c +Copyright: 2021, Tim Duesterhus +License: GPL-2+ +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: src/xprt_handshake.c +Copyright: 2019, HAProxy Technologies, Olivier Houchard +License: GPL-2+ +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +Files: tests/exp/filltab25.c +Copyright: 2007, willy tarreau. +License: GPL-2+ +Comment: + An OpenSSL exception is present in the LICENSE file: "This program is + released under the GPL with the additional exemption that compiling, + linking, and/or using OpenSSL is allowed." + +License: Apache-2.0 + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + . + http://www.apache.org/licenses/LICENSE-2.0 + . + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + . + On Debian systems, the full text of the Apache License version 2.0 can be + found in the file `/usr/share/common-licenses/Apache-2.0'. + +License: BSD-2-clause + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + . + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following disclaimer + in the documentation and/or other materials provided with the + distribution. + . + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +License: Expat + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + "Software"), to deal in the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + . + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + . + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE + LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION + OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION + WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +License: GPL-2 + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License version 2 as + published by the Free Software Foundation. + . + On Debian systems, the complete text of the GNU General Public License, version + 2, can be found in /usr/share/common-licenses/GPL-2. + +License: GPL-2+ + This program is free software; you can redistribute it + and/or modify it under the terms of the GNU General Public + License as published by the Free Software Foundation; either + version 2 of the License, or (at your option) any later + version. + . + This program is distributed in the hope that it will be + useful, but WITHOUT ANY WARRANTY; without even the implied + warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR + PURPOSE. See the GNU General Public License for more + details. + . + You should have received a copy of the GNU General Public + License along with this package; if not, write to the Free + Software Foundation, Inc., 51 Franklin St, Fifth Floor, + Boston, MA 02110-1301 USA + . + On Debian systems, the full text of the GNU General Public + License version 2 can be found in the file + `/usr/share/common-licenses/GPL-2'. + +License: GPL-3+ + This software is Copyright (c) 2020 by X. Ample. + . + This is free software, licensed under: + . + The GNU General Public License, Version 3, June 2007 + . + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; version 3 dated June, 2007, or (at + your option) any later version. + On Debian systems, the complete text of version 3 of the GNU General + Public License can be found in '/usr/share/common-licenses/GPL-3'. + +License: LGPL-2.1 + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + . + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + . + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + . + On Debian systems, the complete text of the GNU Lesser General Public License, + version 2.1, can be found in /usr/share/common-licenses/LGPL-2.1. + +License: MPL-2.0 + This software is Copyright (c) 2020 by X. Ample. + . + This is free software, licensed under: + . + Mozilla Public License Version 2.0 + . + This Source Code Form is subject to the terms of the Mozilla Public + License, v. 2.0. + On Debian systems, the complete text of Mozilla Public License v 2.0 + can be found in '/usr/share/common-licenses/MPL-2.0'. diff --git a/debian/dconv/LICENSE b/debian/dconv/LICENSE new file mode 100644 index 0000000..d645695 --- /dev/null +++ b/debian/dconv/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/debian/dconv/NOTICE b/debian/dconv/NOTICE new file mode 100644 index 0000000..c9575a7 --- /dev/null +++ b/debian/dconv/NOTICE @@ -0,0 +1,13 @@ +Copyright 2012 Cyril Bonté + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/debian/dconv/README.md b/debian/dconv/README.md new file mode 100644 index 0000000..4ca89b2 --- /dev/null +++ b/debian/dconv/README.md @@ -0,0 +1,21 @@ +# HAProxy Documentation Converter + +Made to convert the HAProxy documentation into HTML. + +More than HTML, the main goal is to provide easy navigation. + +## Documentations + +A bot periodically fetches last commits for HAProxy 1.4 and 1.5 to produce up-to-date documentations. + +Converted documentations are then stored online : +- HAProxy 1.4 Configuration Manual : [stable](http://cbonte.github.com/haproxy-dconv/configuration-1.4.html) / [snapshot](http://cbonte.github.com/haproxy-dconv/snapshot/configuration-1.4.html) +- HAProxy 1.5 Configuration Manual : [stable](http://cbonte.github.com/haproxy-dconv/configuration-1.5.html) / [snapshot](http://cbonte.github.com/haproxy-dconv/snapshot/configuration-1.5.html) +- HAProxy 1.6 Configuration Manual : [stable](http://cbonte.github.com/haproxy-dconv/configuration-1.6.html) / [snapshot](http://cbonte.github.com/haproxy-dconv/snapshot/configuration-1.6.html) + + +## Contribute + +The project now lives by itself, as it is sufficiently useable. But I'm sure we can do even better. +Feel free to report feature requests or to provide patches ! + diff --git a/debian/dconv/css/check.png b/debian/dconv/css/check.png new file mode 100644 index 0000000..a7fab32 Binary files /dev/null and b/debian/dconv/css/check.png differ diff --git a/debian/dconv/css/cross.png b/debian/dconv/css/cross.png new file mode 100644 index 0000000..24f5064 Binary files /dev/null and b/debian/dconv/css/cross.png differ diff --git a/debian/dconv/css/page.css b/debian/dconv/css/page.css new file mode 100644 index 0000000..b48fdd2 --- /dev/null +++ b/debian/dconv/css/page.css @@ -0,0 +1,223 @@ +/* Global Styles */ + +body { + margin-top: 50px; + background: #eee; +} + +a.anchor { + display: block; position: relative; top: -50px; visibility: hidden; +} + +/* ------------------------------- */ + +/* Wrappers */ + +/* ------------------------------- */ + +#wrapper { + width: 100%; +} + +#page-wrapper { + padding: 0 15px 50px; + width: 740px; + background-color: #fff; + margin-left: 250px; +} + +#sidebar { + position: fixed; + width: 250px; + top: 50px; + bottom: 0; + padding: 15px; + background: #f5f5f5; + border-right: 1px solid #ccc; +} + + +/* ------------------------------- */ + +/* Twitter typeahead.js */ + +/* ------------------------------- */ + +.twitter-typeahead { + width: 100%; +} +.typeahead, +.tt-query, +.tt-hint { + width: 100%; + padding: 8px 12px; + border: 2px solid #ccc; + -webkit-border-radius: 8px; + -moz-border-radius: 8px; + border-radius: 8px; + outline: none; +} + +.typeahead { + background-color: #fff; +} + +.typeahead:focus { + border: 2px solid #0097cf; +} + +.tt-query { + -webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075); + -moz-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075); + box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075); +} + +.tt-hint { + color: #999 +} + +.tt-menu { + width: 100%; + margin-top: 4px; + padding: 8px 0; + background-color: #fff; + border: 1px solid #ccc; + border: 1px solid rgba(0, 0, 0, 0.2); + -webkit-border-radius: 8px; + -moz-border-radius: 8px; + border-radius: 8px; + -webkit-box-shadow: 0 5px 10px rgba(0,0,0,.2); + -moz-box-shadow: 0 5px 10px rgba(0,0,0,.2); + box-shadow: 0 5px 10px rgba(0,0,0,.2); +} + +.tt-suggestion { + padding: 3px 8px; + line-height: 24px; +} + +.tt-suggestion:hover { + cursor: pointer; + color: #fff; + background-color: #0097cf; +} + +.tt-suggestion.tt-cursor { + color: #fff; + background-color: #0097cf; + +} + +.tt-suggestion p { + margin: 0; +} + +#searchKeyword { + width: 100%; + margin: 0; +} + +#searchKeyword .tt-menu { + max-height: 300px; + overflow-y: auto; +} + +/* ------------------------------- */ + +/* Misc */ + +/* ------------------------------- */ + +.well-small ul { + padding: 0px; +} +.table th, +.table td.pagination-centered { + text-align: center; +} + +pre { + overflow: visible; /* Workaround for dropdown menus */ +} + +pre.text { + padding: 0; + font-size: 13px; + color: #000; + background: transparent; + border: none; + margin-bottom: 18px; +} +pre.arguments { + font-size: 13px; + color: #000; + background: transparent; +} + +.comment { + color: #888; +} +small, .small { + color: #888; +} +.level1 { + font-size: 125%; +} +.sublevels { + border-left: 1px solid #ccc; + padding-left: 10px; +} +.tab { + padding-left: 20px; +} +.keyword { + font-family: Menlo, Monaco, "Courier New", monospace; + white-space: pre; + background: #eee; + border-top: 1px solid #fff; + border-bottom: 1px solid #ccc; +} + +.label-see-also { + background-color: #999; +} +.label-disabled { + background-color: #ccc; +} +h5 { + text-decoration: underline; +} + +.example-desc { + border-bottom: 1px solid #ccc; + margin-bottom: 18px; +} +.noheight { + min-height: 0 !important; +} +.separator { + margin-bottom: 18px; +} + +div { + word-wrap: break-word; +} + +html, body { + width: 100%; + min-height: 100%: +} + +.dropdown-menu > li { + white-space: nowrap; +} +/* TEMPORARILY HACKS WHILE PRE TAGS ARE USED +-------------------------------------------------- */ + +h5, +.unpre, +.example-desc, +.dropdown-menu { + font-family: "Helvetica Neue", Helvetica, Arial, sans-serif; + white-space: normal; +} diff --git a/debian/dconv/haproxy-dconv.py b/debian/dconv/haproxy-dconv.py new file mode 100755 index 0000000..ec800cf --- /dev/null +++ b/debian/dconv/haproxy-dconv.py @@ -0,0 +1,534 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Copyright 2012 Cyril Bonté +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +''' +TODO : ability to split chapters into several files +TODO : manage keyword locality (server/proxy/global ; ex : maxconn) +TODO : Remove global variables where possible +''' +import os +import subprocess +import sys +import html +import re +import time +import datetime + +from optparse import OptionParser + +from mako.template import Template +from mako.lookup import TemplateLookup +from mako.exceptions import TopLevelLookupException + +from parser import PContext +from parser import remove_indent +from parser import * + +from urllib.parse import quote + +VERSION = "" +HAPROXY_GIT_VERSION = False + +def main(): + global VERSION, HAPROXY_GIT_VERSION + + usage="Usage: %prog --infile --outfile " + + optparser = OptionParser(description='Generate HTML Document from HAProxy configuation.txt', + version=VERSION, + usage=usage) + optparser.add_option('--infile', '-i', help='Input file mostly the configuration.txt') + optparser.add_option('--outfile','-o', help='Output file') + optparser.add_option('--base','-b', default = '', help='Base directory for relative links') + (option, args) = optparser.parse_args() + + if not (option.infile and option.outfile) or len(args) > 0: + optparser.print_help() + exit(1) + + option.infile = os.path.abspath(option.infile) + option.outfile = os.path.abspath(option.outfile) + + os.chdir(os.path.dirname(__file__)) + + VERSION = get_git_version() + if not VERSION: + sys.exit(1) + + HAPROXY_GIT_VERSION = get_haproxy_git_version(os.path.dirname(option.infile)) + + convert(option.infile, option.outfile, option.base) + + +# Temporarily determine the version from git to follow which commit generated +# the documentation +def get_git_version(): + if not os.path.isdir(".git"): + print("This does not appear to be a Git repository.", file=sys.stderr) + return + try: + p = subprocess.Popen(["git", "describe", "--tags", "--match", "v*"], stdout=subprocess.PIPE, stderr=subprocess.PIPE) + except EnvironmentError: + print("Unable to run git", file=sys.stderr) + return + version = p.communicate()[0] + if p.returncode != 0: + print("Unable to run git", file=sys.stderr) + return + + if len(version) < 2: + return + + version = version[1:].strip() + version = re.sub(r'-g.*', '', version) + return version + +def get_haproxy_git_version(path): + try: + p = subprocess.Popen(["git", "describe", "--tags", "--match", "v*"], cwd=path, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + except EnvironmentError: + return False + version = p.communicate()[0] + + if p.returncode != 0: + return False + + if len(version) < 2: + return False + + version = version[1:].strip() + version = re.sub(r'-g.*', '', version) + return version + +def getTitleDetails(string): + array = string.split(".") + + title = array.pop().strip() + chapter = ".".join(array) + level = max(1, len(array)) + if array: + toplevel = array[0] + else: + toplevel = False + + return { + "title" : title, + "chapter" : chapter, + "level" : level, + "toplevel": toplevel + } + +# Parse the whole document to insert links on keywords +def createLinks(): + global document, keywords, keywordsCount, keyword_conflicts, chapters + + print("Generating keywords links...", file=sys.stderr) + + delimiters = [ + dict(start='"', end='"', multi=True ), + dict(start='- ' , end='\n' , multi=False), + ] + + for keyword in keywords: + keywordsCount[keyword] = 0 + for delimiter in delimiters: + keywordsCount[keyword] += document.count(delimiter['start'] + keyword + delimiter['end']) + if (keyword in keyword_conflicts) and (not keywordsCount[keyword]): + # The keyword is never used, we can remove it from the conflicts list + del keyword_conflicts[keyword] + + if keyword in keyword_conflicts: + chapter_list = "" + for chapter in keyword_conflicts[keyword]: + chapter_list += '
  • %s
  • ' % (quote("%s (%s)" % (keyword, chapters[chapter]['title'])), chapters[chapter]['title']) + for delimiter in delimiters: + if delimiter['multi']: + document = document.replace(delimiter['start'] + keyword + delimiter['end'], + delimiter['start'] + '' + + '' + + keyword + + '' + + '' + + '' + + '' + delimiter['end']) + else: + document = document.replace(delimiter['start'] + keyword + delimiter['end'], delimiter['start'] + '' + keyword + '' + delimiter['end']) + else: + for delimiter in delimiters: + document = document.replace(delimiter['start'] + keyword + delimiter['end'], delimiter['start'] + '' + keyword + '' + delimiter['end']) + if keyword.startswith("option "): + shortKeyword = keyword[len("option "):] + keywordsCount[shortKeyword] = 0 + for delimiter in delimiters: + keywordsCount[keyword] += document.count(delimiter['start'] + shortKeyword + delimiter['end']) + if (shortKeyword in keyword_conflicts) and (not keywordsCount[shortKeyword]): + # The keyword is never used, we can remove it from the conflicts list + del keyword_conflicts[shortKeyword] + for delimiter in delimiters: + document = document.replace(delimiter['start'] + shortKeyword + delimiter['start'], delimiter['start'] + '' + shortKeyword + '' + delimiter['end']) + +def documentAppend(text, retline = True): + global document + document += text + if retline: + document += "\n" + +def init_parsers(pctxt): + return [ + underline.Parser(pctxt), + arguments.Parser(pctxt), + seealso.Parser(pctxt), + example.Parser(pctxt), + table.Parser(pctxt), + underline.Parser(pctxt), + keyword.Parser(pctxt), + ] + +# The parser itself +def convert(infile, outfile, base=''): + global document, keywords, keywordsCount, chapters, keyword_conflicts + + if len(base) > 0 and base[:-1] != '/': + base += '/' + + hasSummary = False + + data = [] + fd = open(infile,"r") + for line in fd: + line.replace("\t", " " * 8) + line = line.rstrip() + data.append(line) + fd.close() + + pctxt = PContext( + TemplateLookup( + directories=[ + 'templates' + ] + ) + ) + + parsers = init_parsers(pctxt) + + pctxt.context = { + 'headers': {}, + 'document': "", + 'base': base, + } + + sections = [] + currentSection = { + "details": getTitleDetails(""), + "content": "", + } + + chapters = {} + + keywords = {} + keywordsCount = {} + + specialSections = { + "default": { + "hasKeywords": True, + }, + "4.1": { + "hasKeywords": True, + }, + } + + pctxt.keywords = keywords + pctxt.keywordsCount = keywordsCount + pctxt.chapters = chapters + + print("Importing %s..." % infile, file=sys.stderr) + + nblines = len(data) + i = j = 0 + while i < nblines: + line = data[i].rstrip() + if i < nblines - 1: + next = data[i + 1].rstrip() + else: + next = "" + if (line == "Summary" or re.match("^[0-9].*", line)) and (len(next) > 0) and (next[0] == '-') \ + and ("-" * len(line)).startswith(next): # Fuzzy underline length detection + sections.append(currentSection) + currentSection = { + "details": getTitleDetails(line), + "content": "", + } + j = 0 + i += 1 # Skip underline + while not data[i + 1].rstrip(): + i += 1 # Skip empty lines + + else: + if len(line) > 80: + print("Line `%i' exceeds 80 columns" % (i + 1), file=sys.stderr) + + currentSection["content"] = currentSection["content"] + line + "\n" + j += 1 + if currentSection["details"]["title"] == "Summary" and line != "": + hasSummary = True + # Learn chapters from the summary + details = getTitleDetails(line) + if details["chapter"]: + chapters[details["chapter"]] = details + i += 1 + sections.append(currentSection) + + chapterIndexes = sorted(chapters.keys()) + + document = "" + + # Complete the summary + for section in sections: + details = section["details"] + title = details["title"] + if title: + fulltitle = title + if details["chapter"]: + #documentAppend("" % details["chapter"]) + fulltitle = details["chapter"] + ". " + title + if not details["chapter"] in chapters: + print("Adding '%s' to the summary" % details["title"], file=sys.stderr) + chapters[details["chapter"]] = details + chapterIndexes = sorted(chapters.keys()) + + for section in sections: + details = section["details"] + pctxt.details = details + level = details["level"] + title = details["title"] + content = section["content"].rstrip() + + print("Parsing chapter %s..." % title, file=sys.stderr) + + if (title == "Summary") or (title and not hasSummary): + summaryTemplate = pctxt.templates.get_template('summary.html') + documentAppend(summaryTemplate.render( + pctxt = pctxt, + chapters = chapters, + chapterIndexes = chapterIndexes, + )) + if title and not hasSummary: + hasSummary = True + else: + continue + + if title: + documentAppend('' % (details["chapter"], details["chapter"])) + if level == 1: + documentAppend("
    ", False) + documentAppend('%s. %s' % (level, details["chapter"], details["chapter"], details["chapter"], details["chapter"], html.escape(title, True), level)) + if level == 1: + documentAppend("
    ", False) + + if content: + if False and title: + # Display a navigation bar + documentAppend('
      ') + documentAppend('
    • Top
    • ', False) + index = chapterIndexes.index(details["chapter"]) + if index > 0: + documentAppend('' % chapterIndexes[index - 1], False) + if index < len(chapterIndexes) - 1: + documentAppend('' % chapterIndexes[index + 1], False) + documentAppend('
    ', False) + content = html.escape(content, True) + content = re.sub(r'section ([0-9]+(.[0-9]+)*)', r'section \1', content) + + pctxt.set_content(content) + + if not title: + lines = pctxt.get_lines() + pctxt.context['headers'] = { + 'title': '', + 'subtitle': '', + 'version': '', + 'author': '', + 'date': '' + } + if re.match("^-+$", pctxt.get_line().strip()): + # Try to analyze the header of the file, assuming it follows + # those rules : + # - it begins with a "separator line" (several '-' chars) + # - then the document title + # - an optional subtitle + # - a new separator line + # - the version + # - the author + # - the date + pctxt.next() + pctxt.context['headers']['title'] = pctxt.get_line().strip() + pctxt.next() + subtitle = "" + while not re.match("^-+$", pctxt.get_line().strip()): + subtitle += " " + pctxt.get_line().strip() + pctxt.next() + pctxt.context['headers']['subtitle'] += subtitle.strip() + if not pctxt.context['headers']['subtitle']: + # No subtitle, try to guess one from the title if it + # starts with the word "HAProxy" + if pctxt.context['headers']['title'].startswith('HAProxy '): + pctxt.context['headers']['subtitle'] = pctxt.context['headers']['title'][8:] + pctxt.context['headers']['title'] = 'HAProxy' + pctxt.next() + pctxt.context['headers']['version'] = pctxt.get_line().strip() + pctxt.next() + pctxt.context['headers']['author'] = pctxt.get_line().strip() + pctxt.next() + pctxt.context['headers']['date'] = pctxt.get_line().strip() + pctxt.next() + if HAPROXY_GIT_VERSION: + pctxt.context['headers']['version'] = 'version ' + HAPROXY_GIT_VERSION + + # Skip header lines + pctxt.eat_lines() + pctxt.eat_empty_lines() + + documentAppend('
    ', False) + + delay = [] + while pctxt.has_more_lines(): + try: + specialSection = specialSections[details["chapter"]] + except: + specialSection = specialSections["default"] + + line = pctxt.get_line() + if i < nblines - 1: + nextline = pctxt.get_line(1) + else: + nextline = "" + + oldline = line + pctxt.stop = False + for parser in parsers: + line = parser.parse(line) + if pctxt.stop: + break + if oldline == line: + # nothing has changed, + # delays the rendering + if delay or line != "": + delay.append(line) + pctxt.next() + elif pctxt.stop: + while delay and delay[-1].strip() == "": + del delay[-1] + if delay: + remove_indent(delay) + documentAppend('
    %s\n
    ' % "\n".join(delay), False) + delay = [] + documentAppend(line, False) + else: + while delay and delay[-1].strip() == "": + del delay[-1] + if delay: + remove_indent(delay) + documentAppend('
    %s\n
    ' % "\n".join(delay), False) + delay = [] + documentAppend(line, True) + pctxt.next() + + while delay and delay[-1].strip() == "": + del delay[-1] + if delay: + remove_indent(delay) + documentAppend('
    %s\n
    ' % "\n".join(delay), False) + delay = [] + documentAppend('
    ') + + if not hasSummary: + summaryTemplate = pctxt.templates.get_template('summary.html') + print(chapters) + document = summaryTemplate.render( + pctxt = pctxt, + chapters = chapters, + chapterIndexes = chapterIndexes, + ) + document + + + # Log warnings for keywords defined in several chapters + keyword_conflicts = {} + for keyword in keywords: + keyword_chapters = list(keywords[keyword]) + keyword_chapters.sort() + if len(keyword_chapters) > 1: + print('Multi section keyword : "%s" in chapters %s' % (keyword, list(keyword_chapters)), file=sys.stderr) + keyword_conflicts[keyword] = keyword_chapters + + keywords = list(keywords) + keywords.sort() + + createLinks() + + # Add the keywords conflicts to the keywords list to make them available in the search form + # And remove the original keyword which is now useless + for keyword in keyword_conflicts: + sections = keyword_conflicts[keyword] + offset = keywords.index(keyword) + for section in sections: + keywords.insert(offset, "%s (%s)" % (keyword, chapters[section]['title'])) + offset += 1 + keywords.remove(keyword) + + print("Exporting to %s..." % outfile, file=sys.stderr) + + template = pctxt.templates.get_template('template.html') + try: + footerTemplate = pctxt.templates.get_template('footer.html') + footer = footerTemplate.render( + pctxt = pctxt, + headers = pctxt.context['headers'], + document = document, + chapters = chapters, + chapterIndexes = chapterIndexes, + keywords = keywords, + keywordsCount = keywordsCount, + keyword_conflicts = keyword_conflicts, + version = VERSION, + date = datetime.datetime.now().strftime("%Y/%m/%d"), + ) + except TopLevelLookupException: + footer = "" + + fd = open(outfile,'w') + + print(template.render( + pctxt = pctxt, + headers = pctxt.context['headers'], + base = base, + document = document, + chapters = chapters, + chapterIndexes = chapterIndexes, + keywords = keywords, + keywordsCount = keywordsCount, + keyword_conflicts = keyword_conflicts, + version = VERSION, + date = datetime.datetime.now().strftime("%Y/%m/%d"), + footer = footer + ), file=fd) + fd.close() + +if __name__ == '__main__': + main() diff --git a/debian/dconv/img/logo-med.png b/debian/dconv/img/logo-med.png new file mode 100644 index 0000000..1be03b2 Binary files /dev/null and b/debian/dconv/img/logo-med.png differ diff --git a/debian/dconv/js/typeahead.bundle.js b/debian/dconv/js/typeahead.bundle.js new file mode 100644 index 0000000..bb0c8ae --- /dev/null +++ b/debian/dconv/js/typeahead.bundle.js @@ -0,0 +1,2451 @@ +/*! + * typeahead.js 0.11.1 + * https://github.com/twitter/typeahead.js + * Copyright 2013-2015 Twitter, Inc. and other contributors; Licensed MIT + */ + +(function(root, factory) { + if (typeof define === "function" && define.amd) { + define("bloodhound", [ "jquery" ], function(a0) { + return root["Bloodhound"] = factory(a0); + }); + } else if (typeof exports === "object") { + module.exports = factory(require("jquery")); + } else { + root["Bloodhound"] = factory(jQuery); + } +})(this, function($) { + var _ = function() { + "use strict"; + return { + isMsie: function() { + return /(msie|trident)/i.test(navigator.userAgent) ? navigator.userAgent.match(/(msie |rv:)(\d+(.\d+)?)/i)[2] : false; + }, + isBlankString: function(str) { + return !str || /^\s*$/.test(str); + }, + escapeRegExChars: function(str) { + return str.replace(/[\-\[\]\/\{\}\(\)\*\+\?\.\\\^\$\|]/g, "\\$&"); + }, + isString: function(obj) { + return typeof obj === "string"; + }, + isNumber: function(obj) { + return typeof obj === "number"; + }, + isArray: $.isArray, + isFunction: $.isFunction, + isObject: $.isPlainObject, + isUndefined: function(obj) { + return typeof obj === "undefined"; + }, + isElement: function(obj) { + return !!(obj && obj.nodeType === 1); + }, + isJQuery: function(obj) { + return obj instanceof $; + }, + toStr: function toStr(s) { + return _.isUndefined(s) || s === null ? "" : s + ""; + }, + bind: $.proxy, + each: function(collection, cb) { + $.each(collection, reverseArgs); + function reverseArgs(index, value) { + return cb(value, index); + } + }, + map: $.map, + filter: $.grep, + every: function(obj, test) { + var result = true; + if (!obj) { + return result; + } + $.each(obj, function(key, val) { + if (!(result = test.call(null, val, key, obj))) { + return false; + } + }); + return !!result; + }, + some: function(obj, test) { + var result = false; + if (!obj) { + return result; + } + $.each(obj, function(key, val) { + if (result = test.call(null, val, key, obj)) { + return false; + } + }); + return !!result; + }, + mixin: $.extend, + identity: function(x) { + return x; + }, + clone: function(obj) { + return $.extend(true, {}, obj); + }, + getIdGenerator: function() { + var counter = 0; + return function() { + return counter++; + }; + }, + templatify: function templatify(obj) { + return $.isFunction(obj) ? obj : template; + function template() { + return String(obj); + } + }, + defer: function(fn) { + setTimeout(fn, 0); + }, + debounce: function(func, wait, immediate) { + var timeout, result; + return function() { + var context = this, args = arguments, later, callNow; + later = function() { + timeout = null; + if (!immediate) { + result = func.apply(context, args); + } + }; + callNow = immediate && !timeout; + clearTimeout(timeout); + timeout = setTimeout(later, wait); + if (callNow) { + result = func.apply(context, args); + } + return result; + }; + }, + throttle: function(func, wait) { + var context, args, timeout, result, previous, later; + previous = 0; + later = function() { + previous = new Date(); + timeout = null; + result = func.apply(context, args); + }; + return function() { + var now = new Date(), remaining = wait - (now - previous); + context = this; + args = arguments; + if (remaining <= 0) { + clearTimeout(timeout); + timeout = null; + previous = now; + result = func.apply(context, args); + } else if (!timeout) { + timeout = setTimeout(later, remaining); + } + return result; + }; + }, + stringify: function(val) { + return _.isString(val) ? val : JSON.stringify(val); + }, + noop: function() {} + }; + }(); + var VERSION = "0.11.1"; + var tokenizers = function() { + "use strict"; + return { + nonword: nonword, + whitespace: whitespace, + obj: { + nonword: getObjTokenizer(nonword), + whitespace: getObjTokenizer(whitespace) + } + }; + function whitespace(str) { + str = _.toStr(str); + return str ? str.split(/\s+/) : []; + } + function nonword(str) { + str = _.toStr(str); + return str ? str.split(/\W+/) : []; + } + function getObjTokenizer(tokenizer) { + return function setKey(keys) { + keys = _.isArray(keys) ? keys : [].slice.call(arguments, 0); + return function tokenize(o) { + var tokens = []; + _.each(keys, function(k) { + tokens = tokens.concat(tokenizer(_.toStr(o[k]))); + }); + return tokens; + }; + }; + } + }(); + var LruCache = function() { + "use strict"; + function LruCache(maxSize) { + this.maxSize = _.isNumber(maxSize) ? maxSize : 100; + this.reset(); + if (this.maxSize <= 0) { + this.set = this.get = $.noop; + } + } + _.mixin(LruCache.prototype, { + set: function set(key, val) { + var tailItem = this.list.tail, node; + if (this.size >= this.maxSize) { + this.list.remove(tailItem); + delete this.hash[tailItem.key]; + this.size--; + } + if (node = this.hash[key]) { + node.val = val; + this.list.moveToFront(node); + } else { + node = new Node(key, val); + this.list.add(node); + this.hash[key] = node; + this.size++; + } + }, + get: function get(key) { + var node = this.hash[key]; + if (node) { + this.list.moveToFront(node); + return node.val; + } + }, + reset: function reset() { + this.size = 0; + this.hash = {}; + this.list = new List(); + } + }); + function List() { + this.head = this.tail = null; + } + _.mixin(List.prototype, { + add: function add(node) { + if (this.head) { + node.next = this.head; + this.head.prev = node; + } + this.head = node; + this.tail = this.tail || node; + }, + remove: function remove(node) { + node.prev ? node.prev.next = node.next : this.head = node.next; + node.next ? node.next.prev = node.prev : this.tail = node.prev; + }, + moveToFront: function(node) { + this.remove(node); + this.add(node); + } + }); + function Node(key, val) { + this.key = key; + this.val = val; + this.prev = this.next = null; + } + return LruCache; + }(); + var PersistentStorage = function() { + "use strict"; + var LOCAL_STORAGE; + try { + LOCAL_STORAGE = window.localStorage; + LOCAL_STORAGE.setItem("~~~", "!"); + LOCAL_STORAGE.removeItem("~~~"); + } catch (err) { + LOCAL_STORAGE = null; + } + function PersistentStorage(namespace, override) { + this.prefix = [ "__", namespace, "__" ].join(""); + this.ttlKey = "__ttl__"; + this.keyMatcher = new RegExp("^" + _.escapeRegExChars(this.prefix)); + this.ls = override || LOCAL_STORAGE; + !this.ls && this._noop(); + } + _.mixin(PersistentStorage.prototype, { + _prefix: function(key) { + return this.prefix + key; + }, + _ttlKey: function(key) { + return this._prefix(key) + this.ttlKey; + }, + _noop: function() { + this.get = this.set = this.remove = this.clear = this.isExpired = _.noop; + }, + _safeSet: function(key, val) { + try { + this.ls.setItem(key, val); + } catch (err) { + if (err.name === "QuotaExceededError") { + this.clear(); + this._noop(); + } + } + }, + get: function(key) { + if (this.isExpired(key)) { + this.remove(key); + } + return decode(this.ls.getItem(this._prefix(key))); + }, + set: function(key, val, ttl) { + if (_.isNumber(ttl)) { + this._safeSet(this._ttlKey(key), encode(now() + ttl)); + } else { + this.ls.removeItem(this._ttlKey(key)); + } + return this._safeSet(this._prefix(key), encode(val)); + }, + remove: function(key) { + this.ls.removeItem(this._ttlKey(key)); + this.ls.removeItem(this._prefix(key)); + return this; + }, + clear: function() { + var i, keys = gatherMatchingKeys(this.keyMatcher); + for (i = keys.length; i--; ) { + this.remove(keys[i]); + } + return this; + }, + isExpired: function(key) { + var ttl = decode(this.ls.getItem(this._ttlKey(key))); + return _.isNumber(ttl) && now() > ttl ? true : false; + } + }); + return PersistentStorage; + function now() { + return new Date().getTime(); + } + function encode(val) { + return JSON.stringify(_.isUndefined(val) ? null : val); + } + function decode(val) { + return $.parseJSON(val); + } + function gatherMatchingKeys(keyMatcher) { + var i, key, keys = [], len = LOCAL_STORAGE.length; + for (i = 0; i < len; i++) { + if ((key = LOCAL_STORAGE.key(i)).match(keyMatcher)) { + keys.push(key.replace(keyMatcher, "")); + } + } + return keys; + } + }(); + var Transport = function() { + "use strict"; + var pendingRequestsCount = 0, pendingRequests = {}, maxPendingRequests = 6, sharedCache = new LruCache(10); + function Transport(o) { + o = o || {}; + this.cancelled = false; + this.lastReq = null; + this._send = o.transport; + this._get = o.limiter ? o.limiter(this._get) : this._get; + this._cache = o.cache === false ? new LruCache(0) : sharedCache; + } + Transport.setMaxPendingRequests = function setMaxPendingRequests(num) { + maxPendingRequests = num; + }; + Transport.resetCache = function resetCache() { + sharedCache.reset(); + }; + _.mixin(Transport.prototype, { + _fingerprint: function fingerprint(o) { + o = o || {}; + return o.url + o.type + $.param(o.data || {}); + }, + _get: function(o, cb) { + var that = this, fingerprint, jqXhr; + fingerprint = this._fingerprint(o); + if (this.cancelled || fingerprint !== this.lastReq) { + return; + } + if (jqXhr = pendingRequests[fingerprint]) { + jqXhr.done(done).fail(fail); + } else if (pendingRequestsCount < maxPendingRequests) { + pendingRequestsCount++; + pendingRequests[fingerprint] = this._send(o).done(done).fail(fail).always(always); + } else { + this.onDeckRequestArgs = [].slice.call(arguments, 0); + } + function done(resp) { + cb(null, resp); + that._cache.set(fingerprint, resp); + } + function fail() { + cb(true); + } + function always() { + pendingRequestsCount--; + delete pendingRequests[fingerprint]; + if (that.onDeckRequestArgs) { + that._get.apply(that, that.onDeckRequestArgs); + that.onDeckRequestArgs = null; + } + } + }, + get: function(o, cb) { + var resp, fingerprint; + cb = cb || $.noop; + o = _.isString(o) ? { + url: o + } : o || {}; + fingerprint = this._fingerprint(o); + this.cancelled = false; + this.lastReq = fingerprint; + if (resp = this._cache.get(fingerprint)) { + cb(null, resp); + } else { + this._get(o, cb); + } + }, + cancel: function() { + this.cancelled = true; + } + }); + return Transport; + }(); + var SearchIndex = window.SearchIndex = function() { + "use strict"; + var CHILDREN = "c", IDS = "i"; + function SearchIndex(o) { + o = o || {}; + if (!o.datumTokenizer || !o.queryTokenizer) { + $.error("datumTokenizer and queryTokenizer are both required"); + } + this.identify = o.identify || _.stringify; + this.datumTokenizer = o.datumTokenizer; + this.queryTokenizer = o.queryTokenizer; + this.reset(); + } + _.mixin(SearchIndex.prototype, { + bootstrap: function bootstrap(o) { + this.datums = o.datums; + this.trie = o.trie; + }, + add: function(data) { + var that = this; + data = _.isArray(data) ? data : [ data ]; + _.each(data, function(datum) { + var id, tokens; + that.datums[id = that.identify(datum)] = datum; + tokens = normalizeTokens(that.datumTokenizer(datum)); + _.each(tokens, function(token) { + var node, chars, ch; + node = that.trie; + chars = token.split(""); + while (ch = chars.shift()) { + node = node[CHILDREN][ch] || (node[CHILDREN][ch] = newNode()); + node[IDS].push(id); + } + }); + }); + }, + get: function get(ids) { + var that = this; + return _.map(ids, function(id) { + return that.datums[id]; + }); + }, + search: function search(query) { + var that = this, tokens, matches; + tokens = normalizeTokens(this.queryTokenizer(query)); + _.each(tokens, function(token) { + var node, chars, ch, ids; + if (matches && matches.length === 0) { + return false; + } + node = that.trie; + chars = token.split(""); + while (node && (ch = chars.shift())) { + node = node[CHILDREN][ch]; + } + if (node && chars.length === 0) { + ids = node[IDS].slice(0); + matches = matches ? getIntersection(matches, ids) : ids; + } else { + matches = []; + return false; + } + }); + return matches ? _.map(unique(matches), function(id) { + return that.datums[id]; + }) : []; + }, + all: function all() { + var values = []; + for (var key in this.datums) { + values.push(this.datums[key]); + } + return values; + }, + reset: function reset() { + this.datums = {}; + this.trie = newNode(); + }, + serialize: function serialize() { + return { + datums: this.datums, + trie: this.trie + }; + } + }); + return SearchIndex; + function normalizeTokens(tokens) { + tokens = _.filter(tokens, function(token) { + return !!token; + }); + tokens = _.map(tokens, function(token) { + return token.toLowerCase(); + }); + return tokens; + } + function newNode() { + var node = {}; + node[IDS] = []; + node[CHILDREN] = {}; + return node; + } + function unique(array) { + var seen = {}, uniques = []; + for (var i = 0, len = array.length; i < len; i++) { + if (!seen[array[i]]) { + seen[array[i]] = true; + uniques.push(array[i]); + } + } + return uniques; + } + function getIntersection(arrayA, arrayB) { + var ai = 0, bi = 0, intersection = []; + arrayA = arrayA.sort(); + arrayB = arrayB.sort(); + var lenArrayA = arrayA.length, lenArrayB = arrayB.length; + while (ai < lenArrayA && bi < lenArrayB) { + if (arrayA[ai] < arrayB[bi]) { + ai++; + } else if (arrayA[ai] > arrayB[bi]) { + bi++; + } else { + intersection.push(arrayA[ai]); + ai++; + bi++; + } + } + return intersection; + } + }(); + var Prefetch = function() { + "use strict"; + var keys; + keys = { + data: "data", + protocol: "protocol", + thumbprint: "thumbprint" + }; + function Prefetch(o) { + this.url = o.url; + this.ttl = o.ttl; + this.cache = o.cache; + this.prepare = o.prepare; + this.transform = o.transform; + this.transport = o.transport; + this.thumbprint = o.thumbprint; + this.storage = new PersistentStorage(o.cacheKey); + } + _.mixin(Prefetch.prototype, { + _settings: function settings() { + return { + url: this.url, + type: "GET", + dataType: "json" + }; + }, + store: function store(data) { + if (!this.cache) { + return; + } + this.storage.set(keys.data, data, this.ttl); + this.storage.set(keys.protocol, location.protocol, this.ttl); + this.storage.set(keys.thumbprint, this.thumbprint, this.ttl); + }, + fromCache: function fromCache() { + var stored = {}, isExpired; + if (!this.cache) { + return null; + } + stored.data = this.storage.get(keys.data); + stored.protocol = this.storage.get(keys.protocol); + stored.thumbprint = this.storage.get(keys.thumbprint); + isExpired = stored.thumbprint !== this.thumbprint || stored.protocol !== location.protocol; + return stored.data && !isExpired ? stored.data : null; + }, + fromNetwork: function(cb) { + var that = this, settings; + if (!cb) { + return; + } + settings = this.prepare(this._settings()); + this.transport(settings).fail(onError).done(onResponse); + function onError() { + cb(true); + } + function onResponse(resp) { + cb(null, that.transform(resp)); + } + }, + clear: function clear() { + this.storage.clear(); + return this; + } + }); + return Prefetch; + }(); + var Remote = function() { + "use strict"; + function Remote(o) { + this.url = o.url; + this.prepare = o.prepare; + this.transform = o.transform; + this.transport = new Transport({ + cache: o.cache, + limiter: o.limiter, + transport: o.transport + }); + } + _.mixin(Remote.prototype, { + _settings: function settings() { + return { + url: this.url, + type: "GET", + dataType: "json" + }; + }, + get: function get(query, cb) { + var that = this, settings; + if (!cb) { + return; + } + query = query || ""; + settings = this.prepare(query, this._settings()); + return this.transport.get(settings, onResponse); + function onResponse(err, resp) { + err ? cb([]) : cb(that.transform(resp)); + } + }, + cancelLastRequest: function cancelLastRequest() { + this.transport.cancel(); + } + }); + return Remote; + }(); + var oParser = function() { + "use strict"; + return function parse(o) { + var defaults, sorter; + defaults = { + initialize: true, + identify: _.stringify, + datumTokenizer: null, + queryTokenizer: null, + sufficient: 5, + sorter: null, + local: [], + prefetch: null, + remote: null + }; + o = _.mixin(defaults, o || {}); + !o.datumTokenizer && $.error("datumTokenizer is required"); + !o.queryTokenizer && $.error("queryTokenizer is required"); + sorter = o.sorter; + o.sorter = sorter ? function(x) { + return x.sort(sorter); + } : _.identity; + o.local = _.isFunction(o.local) ? o.local() : o.local; + o.prefetch = parsePrefetch(o.prefetch); + o.remote = parseRemote(o.remote); + return o; + }; + function parsePrefetch(o) { + var defaults; + if (!o) { + return null; + } + defaults = { + url: null, + ttl: 24 * 60 * 60 * 1e3, + cache: true, + cacheKey: null, + thumbprint: "", + prepare: _.identity, + transform: _.identity, + transport: null + }; + o = _.isString(o) ? { + url: o + } : o; + o = _.mixin(defaults, o); + !o.url && $.error("prefetch requires url to be set"); + o.transform = o.filter || o.transform; + o.cacheKey = o.cacheKey || o.url; + o.thumbprint = VERSION + o.thumbprint; + o.transport = o.transport ? callbackToDeferred(o.transport) : $.ajax; + return o; + } + function parseRemote(o) { + var defaults; + if (!o) { + return; + } + defaults = { + url: null, + cache: true, + prepare: null, + replace: null, + wildcard: null, + limiter: null, + rateLimitBy: "debounce", + rateLimitWait: 300, + transform: _.identity, + transport: null + }; + o = _.isString(o) ? { + url: o + } : o; + o = _.mixin(defaults, o); + !o.url && $.error("remote requires url to be set"); + o.transform = o.filter || o.transform; + o.prepare = toRemotePrepare(o); + o.limiter = toLimiter(o); + o.transport = o.transport ? callbackToDeferred(o.transport) : $.ajax; + delete o.replace; + delete o.wildcard; + delete o.rateLimitBy; + delete o.rateLimitWait; + return o; + } + function toRemotePrepare(o) { + var prepare, replace, wildcard; + prepare = o.prepare; + replace = o.replace; + wildcard = o.wildcard; + if (prepare) { + return prepare; + } + if (replace) { + prepare = prepareByReplace; + } else if (o.wildcard) { + prepare = prepareByWildcard; + } else { + prepare = idenityPrepare; + } + return prepare; + function prepareByReplace(query, settings) { + settings.url = replace(settings.url, query); + return settings; + } + function prepareByWildcard(query, settings) { + settings.url = settings.url.replace(wildcard, encodeURIComponent(query)); + return settings; + } + function idenityPrepare(query, settings) { + return settings; + } + } + function toLimiter(o) { + var limiter, method, wait; + limiter = o.limiter; + method = o.rateLimitBy; + wait = o.rateLimitWait; + if (!limiter) { + limiter = /^throttle$/i.test(method) ? throttle(wait) : debounce(wait); + } + return limiter; + function debounce(wait) { + return function debounce(fn) { + return _.debounce(fn, wait); + }; + } + function throttle(wait) { + return function throttle(fn) { + return _.throttle(fn, wait); + }; + } + } + function callbackToDeferred(fn) { + return function wrapper(o) { + var deferred = $.Deferred(); + fn(o, onSuccess, onError); + return deferred; + function onSuccess(resp) { + _.defer(function() { + deferred.resolve(resp); + }); + } + function onError(err) { + _.defer(function() { + deferred.reject(err); + }); + } + }; + } + }(); + var Bloodhound = function() { + "use strict"; + var old; + old = window && window.Bloodhound; + function Bloodhound(o) { + o = oParser(o); + this.sorter = o.sorter; + this.identify = o.identify; + this.sufficient = o.sufficient; + this.local = o.local; + this.remote = o.remote ? new Remote(o.remote) : null; + this.prefetch = o.prefetch ? new Prefetch(o.prefetch) : null; + this.index = new SearchIndex({ + identify: this.identify, + datumTokenizer: o.datumTokenizer, + queryTokenizer: o.queryTokenizer + }); + o.initialize !== false && this.initialize(); + } + Bloodhound.noConflict = function noConflict() { + window && (window.Bloodhound = old); + return Bloodhound; + }; + Bloodhound.tokenizers = tokenizers; + _.mixin(Bloodhound.prototype, { + __ttAdapter: function ttAdapter() { + var that = this; + return this.remote ? withAsync : withoutAsync; + function withAsync(query, sync, async) { + return that.search(query, sync, async); + } + function withoutAsync(query, sync) { + return that.search(query, sync); + } + }, + _loadPrefetch: function loadPrefetch() { + var that = this, deferred, serialized; + deferred = $.Deferred(); + if (!this.prefetch) { + deferred.resolve(); + } else if (serialized = this.prefetch.fromCache()) { + this.index.bootstrap(serialized); + deferred.resolve(); + } else { + this.prefetch.fromNetwork(done); + } + return deferred.promise(); + function done(err, data) { + if (err) { + return deferred.reject(); + } + that.add(data); + that.prefetch.store(that.index.serialize()); + deferred.resolve(); + } + }, + _initialize: function initialize() { + var that = this, deferred; + this.clear(); + (this.initPromise = this._loadPrefetch()).done(addLocalToIndex); + return this.initPromise; + function addLocalToIndex() { + that.add(that.local); + } + }, + initialize: function initialize(force) { + return !this.initPromise || force ? this._initialize() : this.initPromise; + }, + add: function add(data) { + this.index.add(data); + return this; + }, + get: function get(ids) { + ids = _.isArray(ids) ? ids : [].slice.call(arguments); + return this.index.get(ids); + }, + search: function search(query, sync, async) { + var that = this, local; + local = this.sorter(this.index.search(query)); + sync(this.remote ? local.slice() : local); + if (this.remote && local.length < this.sufficient) { + this.remote.get(query, processRemote); + } else if (this.remote) { + this.remote.cancelLastRequest(); + } + return this; + function processRemote(remote) { + var nonDuplicates = []; + _.each(remote, function(r) { + !_.some(local, function(l) { + return that.identify(r) === that.identify(l); + }) && nonDuplicates.push(r); + }); + async && async(nonDuplicates); + } + }, + all: function all() { + return this.index.all(); + }, + clear: function clear() { + this.index.reset(); + return this; + }, + clearPrefetchCache: function clearPrefetchCache() { + this.prefetch && this.prefetch.clear(); + return this; + }, + clearRemoteCache: function clearRemoteCache() { + Transport.resetCache(); + return this; + }, + ttAdapter: function ttAdapter() { + return this.__ttAdapter(); + } + }); + return Bloodhound; + }(); + return Bloodhound; +}); + +(function(root, factory) { + if (typeof define === "function" && define.amd) { + define("typeahead.js", [ "jquery" ], function(a0) { + return factory(a0); + }); + } else if (typeof exports === "object") { + module.exports = factory(require("jquery")); + } else { + factory(jQuery); + } +})(this, function($) { + var _ = function() { + "use strict"; + return { + isMsie: function() { + return /(msie|trident)/i.test(navigator.userAgent) ? navigator.userAgent.match(/(msie |rv:)(\d+(.\d+)?)/i)[2] : false; + }, + isBlankString: function(str) { + return !str || /^\s*$/.test(str); + }, + escapeRegExChars: function(str) { + return str.replace(/[\-\[\]\/\{\}\(\)\*\+\?\.\\\^\$\|]/g, "\\$&"); + }, + isString: function(obj) { + return typeof obj === "string"; + }, + isNumber: function(obj) { + return typeof obj === "number"; + }, + isArray: $.isArray, + isFunction: $.isFunction, + isObject: $.isPlainObject, + isUndefined: function(obj) { + return typeof obj === "undefined"; + }, + isElement: function(obj) { + return !!(obj && obj.nodeType === 1); + }, + isJQuery: function(obj) { + return obj instanceof $; + }, + toStr: function toStr(s) { + return _.isUndefined(s) || s === null ? "" : s + ""; + }, + bind: $.proxy, + each: function(collection, cb) { + $.each(collection, reverseArgs); + function reverseArgs(index, value) { + return cb(value, index); + } + }, + map: $.map, + filter: $.grep, + every: function(obj, test) { + var result = true; + if (!obj) { + return result; + } + $.each(obj, function(key, val) { + if (!(result = test.call(null, val, key, obj))) { + return false; + } + }); + return !!result; + }, + some: function(obj, test) { + var result = false; + if (!obj) { + return result; + } + $.each(obj, function(key, val) { + if (result = test.call(null, val, key, obj)) { + return false; + } + }); + return !!result; + }, + mixin: $.extend, + identity: function(x) { + return x; + }, + clone: function(obj) { + return $.extend(true, {}, obj); + }, + getIdGenerator: function() { + var counter = 0; + return function() { + return counter++; + }; + }, + templatify: function templatify(obj) { + return $.isFunction(obj) ? obj : template; + function template() { + return String(obj); + } + }, + defer: function(fn) { + setTimeout(fn, 0); + }, + debounce: function(func, wait, immediate) { + var timeout, result; + return function() { + var context = this, args = arguments, later, callNow; + later = function() { + timeout = null; + if (!immediate) { + result = func.apply(context, args); + } + }; + callNow = immediate && !timeout; + clearTimeout(timeout); + timeout = setTimeout(later, wait); + if (callNow) { + result = func.apply(context, args); + } + return result; + }; + }, + throttle: function(func, wait) { + var context, args, timeout, result, previous, later; + previous = 0; + later = function() { + previous = new Date(); + timeout = null; + result = func.apply(context, args); + }; + return function() { + var now = new Date(), remaining = wait - (now - previous); + context = this; + args = arguments; + if (remaining <= 0) { + clearTimeout(timeout); + timeout = null; + previous = now; + result = func.apply(context, args); + } else if (!timeout) { + timeout = setTimeout(later, remaining); + } + return result; + }; + }, + stringify: function(val) { + return _.isString(val) ? val : JSON.stringify(val); + }, + noop: function() {} + }; + }(); + var WWW = function() { + "use strict"; + var defaultClassNames = { + wrapper: "twitter-typeahead", + input: "tt-input", + hint: "tt-hint", + menu: "tt-menu", + dataset: "tt-dataset", + suggestion: "tt-suggestion", + selectable: "tt-selectable", + empty: "tt-empty", + open: "tt-open", + cursor: "tt-cursor", + highlight: "tt-highlight" + }; + return build; + function build(o) { + var www, classes; + classes = _.mixin({}, defaultClassNames, o); + www = { + css: buildCss(), + classes: classes, + html: buildHtml(classes), + selectors: buildSelectors(classes) + }; + return { + css: www.css, + html: www.html, + classes: www.classes, + selectors: www.selectors, + mixin: function(o) { + _.mixin(o, www); + } + }; + } + function buildHtml(c) { + return { + wrapper: '', + menu: '
    ' + }; + } + function buildSelectors(classes) { + var selectors = {}; + _.each(classes, function(v, k) { + selectors[k] = "." + v; + }); + return selectors; + } + function buildCss() { + var css = { + wrapper: { + position: "relative", + display: "inline-block" + }, + hint: { + position: "absolute", + top: "0", + left: "0", + borderColor: "transparent", + boxShadow: "none", + opacity: "1" + }, + input: { + position: "relative", + verticalAlign: "top", + backgroundColor: "transparent" + }, + inputWithNoHint: { + position: "relative", + verticalAlign: "top" + }, + menu: { + position: "absolute", + top: "100%", + left: "0", + zIndex: "100", + display: "none" + }, + ltr: { + left: "0", + right: "auto" + }, + rtl: { + left: "auto", + right: " 0" + } + }; + if (_.isMsie()) { + _.mixin(css.input, { + backgroundImage: "url(data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7)" + }); + } + return css; + } + }(); + var EventBus = function() { + "use strict"; + var namespace, deprecationMap; + namespace = "typeahead:"; + deprecationMap = { + render: "rendered", + cursorchange: "cursorchanged", + select: "selected", + autocomplete: "autocompleted" + }; + function EventBus(o) { + if (!o || !o.el) { + $.error("EventBus initialized without el"); + } + this.$el = $(o.el); + } + _.mixin(EventBus.prototype, { + _trigger: function(type, args) { + var $e; + $e = $.Event(namespace + type); + (args = args || []).unshift($e); + this.$el.trigger.apply(this.$el, args); + return $e; + }, + before: function(type) { + var args, $e; + args = [].slice.call(arguments, 1); + $e = this._trigger("before" + type, args); + return $e.isDefaultPrevented(); + }, + trigger: function(type) { + var deprecatedType; + this._trigger(type, [].slice.call(arguments, 1)); + if (deprecatedType = deprecationMap[type]) { + this._trigger(deprecatedType, [].slice.call(arguments, 1)); + } + } + }); + return EventBus; + }(); + var EventEmitter = function() { + "use strict"; + var splitter = /\s+/, nextTick = getNextTick(); + return { + onSync: onSync, + onAsync: onAsync, + off: off, + trigger: trigger + }; + function on(method, types, cb, context) { + var type; + if (!cb) { + return this; + } + types = types.split(splitter); + cb = context ? bindContext(cb, context) : cb; + this._callbacks = this._callbacks || {}; + while (type = types.shift()) { + this._callbacks[type] = this._callbacks[type] || { + sync: [], + async: [] + }; + this._callbacks[type][method].push(cb); + } + return this; + } + function onAsync(types, cb, context) { + return on.call(this, "async", types, cb, context); + } + function onSync(types, cb, context) { + return on.call(this, "sync", types, cb, context); + } + function off(types) { + var type; + if (!this._callbacks) { + return this; + } + types = types.split(splitter); + while (type = types.shift()) { + delete this._callbacks[type]; + } + return this; + } + function trigger(types) { + var type, callbacks, args, syncFlush, asyncFlush; + if (!this._callbacks) { + return this; + } + types = types.split(splitter); + args = [].slice.call(arguments, 1); + while ((type = types.shift()) && (callbacks = this._callbacks[type])) { + syncFlush = getFlush(callbacks.sync, this, [ type ].concat(args)); + asyncFlush = getFlush(callbacks.async, this, [ type ].concat(args)); + syncFlush() && nextTick(asyncFlush); + } + return this; + } + function getFlush(callbacks, context, args) { + return flush; + function flush() { + var cancelled; + for (var i = 0, len = callbacks.length; !cancelled && i < len; i += 1) { + cancelled = callbacks[i].apply(context, args) === false; + } + return !cancelled; + } + } + function getNextTick() { + var nextTickFn; + if (window.setImmediate) { + nextTickFn = function nextTickSetImmediate(fn) { + setImmediate(function() { + fn(); + }); + }; + } else { + nextTickFn = function nextTickSetTimeout(fn) { + setTimeout(function() { + fn(); + }, 0); + }; + } + return nextTickFn; + } + function bindContext(fn, context) { + return fn.bind ? fn.bind(context) : function() { + fn.apply(context, [].slice.call(arguments, 0)); + }; + } + }(); + var highlight = function(doc) { + "use strict"; + var defaults = { + node: null, + pattern: null, + tagName: "strong", + className: null, + wordsOnly: false, + caseSensitive: false + }; + return function hightlight(o) { + var regex; + o = _.mixin({}, defaults, o); + if (!o.node || !o.pattern) { + return; + } + o.pattern = _.isArray(o.pattern) ? o.pattern : [ o.pattern ]; + regex = getRegex(o.pattern, o.caseSensitive, o.wordsOnly); + traverse(o.node, hightlightTextNode); + function hightlightTextNode(textNode) { + var match, patternNode, wrapperNode; + if (match = regex.exec(textNode.data)) { + wrapperNode = doc.createElement(o.tagName); + o.className && (wrapperNode.className = o.className); + patternNode = textNode.splitText(match.index); + patternNode.splitText(match[0].length); + wrapperNode.appendChild(patternNode.cloneNode(true)); + textNode.parentNode.replaceChild(wrapperNode, patternNode); + } + return !!match; + } + function traverse(el, hightlightTextNode) { + var childNode, TEXT_NODE_TYPE = 3; + for (var i = 0; i < el.childNodes.length; i++) { + childNode = el.childNodes[i]; + if (childNode.nodeType === TEXT_NODE_TYPE) { + i += hightlightTextNode(childNode) ? 1 : 0; + } else { + traverse(childNode, hightlightTextNode); + } + } + } + }; + function getRegex(patterns, caseSensitive, wordsOnly) { + var escapedPatterns = [], regexStr; + for (var i = 0, len = patterns.length; i < len; i++) { + escapedPatterns.push(_.escapeRegExChars(patterns[i])); + } + regexStr = wordsOnly ? "\\b(" + escapedPatterns.join("|") + ")\\b" : "(" + escapedPatterns.join("|") + ")"; + return caseSensitive ? new RegExp(regexStr) : new RegExp(regexStr, "i"); + } + }(window.document); + var Input = function() { + "use strict"; + var specialKeyCodeMap; + specialKeyCodeMap = { + 9: "tab", + 27: "esc", + 37: "left", + 39: "right", + 13: "enter", + 38: "up", + 40: "down" + }; + function Input(o, www) { + o = o || {}; + if (!o.input) { + $.error("input is missing"); + } + www.mixin(this); + this.$hint = $(o.hint); + this.$input = $(o.input); + this.query = this.$input.val(); + this.queryWhenFocused = this.hasFocus() ? this.query : null; + this.$overflowHelper = buildOverflowHelper(this.$input); + this._checkLanguageDirection(); + if (this.$hint.length === 0) { + this.setHint = this.getHint = this.clearHint = this.clearHintIfInvalid = _.noop; + } + } + Input.normalizeQuery = function(str) { + return _.toStr(str).replace(/^\s*/g, "").replace(/\s{2,}/g, " "); + }; + _.mixin(Input.prototype, EventEmitter, { + _onBlur: function onBlur() { + this.resetInputValue(); + this.trigger("blurred"); + }, + _onFocus: function onFocus() { + this.queryWhenFocused = this.query; + this.trigger("focused"); + }, + _onKeydown: function onKeydown($e) { + var keyName = specialKeyCodeMap[$e.which || $e.keyCode]; + this._managePreventDefault(keyName, $e); + if (keyName && this._shouldTrigger(keyName, $e)) { + this.trigger(keyName + "Keyed", $e); + } + }, + _onInput: function onInput() { + this._setQuery(this.getInputValue()); + this.clearHintIfInvalid(); + this._checkLanguageDirection(); + }, + _managePreventDefault: function managePreventDefault(keyName, $e) { + var preventDefault; + switch (keyName) { + case "up": + case "down": + preventDefault = !withModifier($e); + break; + + default: + preventDefault = false; + } + preventDefault && $e.preventDefault(); + }, + _shouldTrigger: function shouldTrigger(keyName, $e) { + var trigger; + switch (keyName) { + case "tab": + trigger = !withModifier($e); + break; + + default: + trigger = true; + } + return trigger; + }, + _checkLanguageDirection: function checkLanguageDirection() { + var dir = (this.$input.css("direction") || "ltr").toLowerCase(); + if (this.dir !== dir) { + this.dir = dir; + this.$hint.attr("dir", dir); + this.trigger("langDirChanged", dir); + } + }, + _setQuery: function setQuery(val, silent) { + var areEquivalent, hasDifferentWhitespace; + areEquivalent = areQueriesEquivalent(val, this.query); + hasDifferentWhitespace = areEquivalent ? this.query.length !== val.length : false; + this.query = val; + if (!silent && !areEquivalent) { + this.trigger("queryChanged", this.query); + } else if (!silent && hasDifferentWhitespace) { + this.trigger("whitespaceChanged", this.query); + } + }, + bind: function() { + var that = this, onBlur, onFocus, onKeydown, onInput; + onBlur = _.bind(this._onBlur, this); + onFocus = _.bind(this._onFocus, this); + onKeydown = _.bind(this._onKeydown, this); + onInput = _.bind(this._onInput, this); + this.$input.on("blur.tt", onBlur).on("focus.tt", onFocus).on("keydown.tt", onKeydown); + if (!_.isMsie() || _.isMsie() > 9) { + this.$input.on("input.tt", onInput); + } else { + this.$input.on("keydown.tt keypress.tt cut.tt paste.tt", function($e) { + if (specialKeyCodeMap[$e.which || $e.keyCode]) { + return; + } + _.defer(_.bind(that._onInput, that, $e)); + }); + } + return this; + }, + focus: function focus() { + this.$input.focus(); + }, + blur: function blur() { + this.$input.blur(); + }, + getLangDir: function getLangDir() { + return this.dir; + }, + getQuery: function getQuery() { + return this.query || ""; + }, + setQuery: function setQuery(val, silent) { + this.setInputValue(val); + this._setQuery(val, silent); + }, + hasQueryChangedSinceLastFocus: function hasQueryChangedSinceLastFocus() { + return this.query !== this.queryWhenFocused; + }, + getInputValue: function getInputValue() { + return this.$input.val(); + }, + setInputValue: function setInputValue(value) { + this.$input.val(value); + this.clearHintIfInvalid(); + this._checkLanguageDirection(); + }, + resetInputValue: function resetInputValue() { + this.setInputValue(this.query); + }, + getHint: function getHint() { + return this.$hint.val(); + }, + setHint: function setHint(value) { + this.$hint.val(value); + }, + clearHint: function clearHint() { + this.setHint(""); + }, + clearHintIfInvalid: function clearHintIfInvalid() { + var val, hint, valIsPrefixOfHint, isValid; + val = this.getInputValue(); + hint = this.getHint(); + valIsPrefixOfHint = val !== hint && hint.indexOf(val) === 0; + isValid = val !== "" && valIsPrefixOfHint && !this.hasOverflow(); + !isValid && this.clearHint(); + }, + hasFocus: function hasFocus() { + return this.$input.is(":focus"); + }, + hasOverflow: function hasOverflow() { + var constraint = this.$input.width() - 2; + this.$overflowHelper.text(this.getInputValue()); + return this.$overflowHelper.width() >= constraint; + }, + isCursorAtEnd: function() { + var valueLength, selectionStart, range; + valueLength = this.$input.val().length; + selectionStart = this.$input[0].selectionStart; + if (_.isNumber(selectionStart)) { + return selectionStart === valueLength; + } else if (document.selection) { + range = document.selection.createRange(); + range.moveStart("character", -valueLength); + return valueLength === range.text.length; + } + return true; + }, + destroy: function destroy() { + this.$hint.off(".tt"); + this.$input.off(".tt"); + this.$overflowHelper.remove(); + this.$hint = this.$input = this.$overflowHelper = $("
    "); + } + }); + return Input; + function buildOverflowHelper($input) { + return $('').css({ + position: "absolute", + visibility: "hidden", + whiteSpace: "pre", + fontFamily: $input.css("font-family"), + fontSize: $input.css("font-size"), + fontStyle: $input.css("font-style"), + fontVariant: $input.css("font-variant"), + fontWeight: $input.css("font-weight"), + wordSpacing: $input.css("word-spacing"), + letterSpacing: $input.css("letter-spacing"), + textIndent: $input.css("text-indent"), + textRendering: $input.css("text-rendering"), + textTransform: $input.css("text-transform") + }).insertAfter($input); + } + function areQueriesEquivalent(a, b) { + return Input.normalizeQuery(a) === Input.normalizeQuery(b); + } + function withModifier($e) { + return $e.altKey || $e.ctrlKey || $e.metaKey || $e.shiftKey; + } + }(); + var Dataset = function() { + "use strict"; + var keys, nameGenerator; + keys = { + val: "tt-selectable-display", + obj: "tt-selectable-object" + }; + nameGenerator = _.getIdGenerator(); + function Dataset(o, www) { + o = o || {}; + o.templates = o.templates || {}; + o.templates.notFound = o.templates.notFound || o.templates.empty; + if (!o.source) { + $.error("missing source"); + } + if (!o.node) { + $.error("missing node"); + } + if (o.name && !isValidName(o.name)) { + $.error("invalid dataset name: " + o.name); + } + www.mixin(this); + this.highlight = !!o.highlight; + this.name = o.name || nameGenerator(); + this.limit = o.limit || 5; + this.displayFn = getDisplayFn(o.display || o.displayKey); + this.templates = getTemplates(o.templates, this.displayFn); + this.source = o.source.__ttAdapter ? o.source.__ttAdapter() : o.source; + this.async = _.isUndefined(o.async) ? this.source.length > 2 : !!o.async; + this._resetLastSuggestion(); + this.$el = $(o.node).addClass(this.classes.dataset).addClass(this.classes.dataset + "-" + this.name); + } + Dataset.extractData = function extractData(el) { + var $el = $(el); + if ($el.data(keys.obj)) { + return { + val: $el.data(keys.val) || "", + obj: $el.data(keys.obj) || null + }; + } + return null; + }; + _.mixin(Dataset.prototype, EventEmitter, { + _overwrite: function overwrite(query, suggestions) { + suggestions = suggestions || []; + if (suggestions.length) { + this._renderSuggestions(query, suggestions); + } else if (this.async && this.templates.pending) { + this._renderPending(query); + } else if (!this.async && this.templates.notFound) { + this._renderNotFound(query); + } else { + this._empty(); + } + this.trigger("rendered", this.name, suggestions, false); + }, + _append: function append(query, suggestions) { + suggestions = suggestions || []; + if (suggestions.length && this.$lastSuggestion.length) { + this._appendSuggestions(query, suggestions); + } else if (suggestions.length) { + this._renderSuggestions(query, suggestions); + } else if (!this.$lastSuggestion.length && this.templates.notFound) { + this._renderNotFound(query); + } + this.trigger("rendered", this.name, suggestions, true); + }, + _renderSuggestions: function renderSuggestions(query, suggestions) { + var $fragment; + $fragment = this._getSuggestionsFragment(query, suggestions); + this.$lastSuggestion = $fragment.children().last(); + this.$el.html($fragment).prepend(this._getHeader(query, suggestions)).append(this._getFooter(query, suggestions)); + }, + _appendSuggestions: function appendSuggestions(query, suggestions) { + var $fragment, $lastSuggestion; + $fragment = this._getSuggestionsFragment(query, suggestions); + $lastSuggestion = $fragment.children().last(); + this.$lastSuggestion.after($fragment); + this.$lastSuggestion = $lastSuggestion; + }, + _renderPending: function renderPending(query) { + var template = this.templates.pending; + this._resetLastSuggestion(); + template && this.$el.html(template({ + query: query, + dataset: this.name + })); + }, + _renderNotFound: function renderNotFound(query) { + var template = this.templates.notFound; + this._resetLastSuggestion(); + template && this.$el.html(template({ + query: query, + dataset: this.name + })); + }, + _empty: function empty() { + this.$el.empty(); + this._resetLastSuggestion(); + }, + _getSuggestionsFragment: function getSuggestionsFragment(query, suggestions) { + var that = this, fragment; + fragment = document.createDocumentFragment(); + _.each(suggestions, function getSuggestionNode(suggestion) { + var $el, context; + context = that._injectQuery(query, suggestion); + $el = $(that.templates.suggestion(context)).data(keys.obj, suggestion).data(keys.val, that.displayFn(suggestion)).addClass(that.classes.suggestion + " " + that.classes.selectable); + fragment.appendChild($el[0]); + }); + this.highlight && highlight({ + className: this.classes.highlight, + node: fragment, + pattern: query + }); + return $(fragment); + }, + _getFooter: function getFooter(query, suggestions) { + return this.templates.footer ? this.templates.footer({ + query: query, + suggestions: suggestions, + dataset: this.name + }) : null; + }, + _getHeader: function getHeader(query, suggestions) { + return this.templates.header ? this.templates.header({ + query: query, + suggestions: suggestions, + dataset: this.name + }) : null; + }, + _resetLastSuggestion: function resetLastSuggestion() { + this.$lastSuggestion = $(); + }, + _injectQuery: function injectQuery(query, obj) { + return _.isObject(obj) ? _.mixin({ + _query: query + }, obj) : obj; + }, + update: function update(query) { + var that = this, canceled = false, syncCalled = false, rendered = 0; + this.cancel(); + this.cancel = function cancel() { + canceled = true; + that.cancel = $.noop; + that.async && that.trigger("asyncCanceled", query); + }; + this.source(query, sync, async); + !syncCalled && sync([]); + function sync(suggestions) { + if (syncCalled) { + return; + } + syncCalled = true; + suggestions = (suggestions || []).slice(0, that.limit); + rendered = suggestions.length; + that._overwrite(query, suggestions); + if (rendered < that.limit && that.async) { + that.trigger("asyncRequested", query); + } + } + function async(suggestions) { + suggestions = suggestions || []; + if (!canceled && rendered < that.limit) { + that.cancel = $.noop; + rendered += suggestions.length; + that._append(query, suggestions.slice(0, that.limit - rendered)); + that.async && that.trigger("asyncReceived", query); + } + } + }, + cancel: $.noop, + clear: function clear() { + this._empty(); + this.cancel(); + this.trigger("cleared"); + }, + isEmpty: function isEmpty() { + return this.$el.is(":empty"); + }, + destroy: function destroy() { + this.$el = $("
    "); + } + }); + return Dataset; + function getDisplayFn(display) { + display = display || _.stringify; + return _.isFunction(display) ? display : displayFn; + function displayFn(obj) { + return obj[display]; + } + } + function getTemplates(templates, displayFn) { + return { + notFound: templates.notFound && _.templatify(templates.notFound), + pending: templates.pending && _.templatify(templates.pending), + header: templates.header && _.templatify(templates.header), + footer: templates.footer && _.templatify(templates.footer), + suggestion: templates.suggestion || suggestionTemplate + }; + function suggestionTemplate(context) { + return $("
    ").text(displayFn(context)); + } + } + function isValidName(str) { + return /^[_a-zA-Z0-9-]+$/.test(str); + } + }(); + var Menu = function() { + "use strict"; + function Menu(o, www) { + var that = this; + o = o || {}; + if (!o.node) { + $.error("node is required"); + } + www.mixin(this); + this.$node = $(o.node); + this.query = null; + this.datasets = _.map(o.datasets, initializeDataset); + function initializeDataset(oDataset) { + var node = that.$node.find(oDataset.node).first(); + oDataset.node = node.length ? node : $("
    ").appendTo(that.$node); + return new Dataset(oDataset, www); + } + } + _.mixin(Menu.prototype, EventEmitter, { + _onSelectableClick: function onSelectableClick($e) { + this.trigger("selectableClicked", $($e.currentTarget)); + }, + _onRendered: function onRendered(type, dataset, suggestions, async) { + this.$node.toggleClass(this.classes.empty, this._allDatasetsEmpty()); + this.trigger("datasetRendered", dataset, suggestions, async); + }, + _onCleared: function onCleared() { + this.$node.toggleClass(this.classes.empty, this._allDatasetsEmpty()); + this.trigger("datasetCleared"); + }, + _propagate: function propagate() { + this.trigger.apply(this, arguments); + }, + _allDatasetsEmpty: function allDatasetsEmpty() { + return _.every(this.datasets, isDatasetEmpty); + function isDatasetEmpty(dataset) { + return dataset.isEmpty(); + } + }, + _getSelectables: function getSelectables() { + return this.$node.find(this.selectors.selectable); + }, + _removeCursor: function _removeCursor() { + var $selectable = this.getActiveSelectable(); + $selectable && $selectable.removeClass(this.classes.cursor); + }, + _ensureVisible: function ensureVisible($el) { + var elTop, elBottom, nodeScrollTop, nodeHeight; + elTop = $el.position().top; + elBottom = elTop + $el.outerHeight(true); + nodeScrollTop = this.$node.scrollTop(); + nodeHeight = this.$node.height() + parseInt(this.$node.css("paddingTop"), 10) + parseInt(this.$node.css("paddingBottom"), 10); + if (elTop < 0) { + this.$node.scrollTop(nodeScrollTop + elTop); + } else if (nodeHeight < elBottom) { + this.$node.scrollTop(nodeScrollTop + (elBottom - nodeHeight)); + } + }, + bind: function() { + var that = this, onSelectableClick; + onSelectableClick = _.bind(this._onSelectableClick, this); + this.$node.on("click.tt", this.selectors.selectable, onSelectableClick); + _.each(this.datasets, function(dataset) { + dataset.onSync("asyncRequested", that._propagate, that).onSync("asyncCanceled", that._propagate, that).onSync("asyncReceived", that._propagate, that).onSync("rendered", that._onRendered, that).onSync("cleared", that._onCleared, that); + }); + return this; + }, + isOpen: function isOpen() { + return this.$node.hasClass(this.classes.open); + }, + open: function open() { + this.$node.addClass(this.classes.open); + }, + close: function close() { + this.$node.removeClass(this.classes.open); + this._removeCursor(); + }, + setLanguageDirection: function setLanguageDirection(dir) { + this.$node.attr("dir", dir); + }, + selectableRelativeToCursor: function selectableRelativeToCursor(delta) { + var $selectables, $oldCursor, oldIndex, newIndex; + $oldCursor = this.getActiveSelectable(); + $selectables = this._getSelectables(); + oldIndex = $oldCursor ? $selectables.index($oldCursor) : -1; + newIndex = oldIndex + delta; + newIndex = (newIndex + 1) % ($selectables.length + 1) - 1; + newIndex = newIndex < -1 ? $selectables.length - 1 : newIndex; + return newIndex === -1 ? null : $selectables.eq(newIndex); + }, + setCursor: function setCursor($selectable) { + this._removeCursor(); + if ($selectable = $selectable && $selectable.first()) { + $selectable.addClass(this.classes.cursor); + this._ensureVisible($selectable); + } + }, + getSelectableData: function getSelectableData($el) { + return $el && $el.length ? Dataset.extractData($el) : null; + }, + getActiveSelectable: function getActiveSelectable() { + var $selectable = this._getSelectables().filter(this.selectors.cursor).first(); + return $selectable.length ? $selectable : null; + }, + getTopSelectable: function getTopSelectable() { + var $selectable = this._getSelectables().first(); + return $selectable.length ? $selectable : null; + }, + update: function update(query) { + var isValidUpdate = query !== this.query; + if (isValidUpdate) { + this.query = query; + _.each(this.datasets, updateDataset); + } + return isValidUpdate; + function updateDataset(dataset) { + dataset.update(query); + } + }, + empty: function empty() { + _.each(this.datasets, clearDataset); + this.query = null; + this.$node.addClass(this.classes.empty); + function clearDataset(dataset) { + dataset.clear(); + } + }, + destroy: function destroy() { + this.$node.off(".tt"); + this.$node = $("
    "); + _.each(this.datasets, destroyDataset); + function destroyDataset(dataset) { + dataset.destroy(); + } + } + }); + return Menu; + }(); + var DefaultMenu = function() { + "use strict"; + var s = Menu.prototype; + function DefaultMenu() { + Menu.apply(this, [].slice.call(arguments, 0)); + } + _.mixin(DefaultMenu.prototype, Menu.prototype, { + open: function open() { + !this._allDatasetsEmpty() && this._show(); + return s.open.apply(this, [].slice.call(arguments, 0)); + }, + close: function close() { + this._hide(); + return s.close.apply(this, [].slice.call(arguments, 0)); + }, + _onRendered: function onRendered() { + if (this._allDatasetsEmpty()) { + this._hide(); + } else { + this.isOpen() && this._show(); + } + return s._onRendered.apply(this, [].slice.call(arguments, 0)); + }, + _onCleared: function onCleared() { + if (this._allDatasetsEmpty()) { + this._hide(); + } else { + this.isOpen() && this._show(); + } + return s._onCleared.apply(this, [].slice.call(arguments, 0)); + }, + setLanguageDirection: function setLanguageDirection(dir) { + this.$node.css(dir === "ltr" ? this.css.ltr : this.css.rtl); + return s.setLanguageDirection.apply(this, [].slice.call(arguments, 0)); + }, + _hide: function hide() { + this.$node.hide(); + }, + _show: function show() { + this.$node.css("display", "block"); + } + }); + return DefaultMenu; + }(); + var Typeahead = function() { + "use strict"; + function Typeahead(o, www) { + var onFocused, onBlurred, onEnterKeyed, onTabKeyed, onEscKeyed, onUpKeyed, onDownKeyed, onLeftKeyed, onRightKeyed, onQueryChanged, onWhitespaceChanged; + o = o || {}; + if (!o.input) { + $.error("missing input"); + } + if (!o.menu) { + $.error("missing menu"); + } + if (!o.eventBus) { + $.error("missing event bus"); + } + www.mixin(this); + this.eventBus = o.eventBus; + this.minLength = _.isNumber(o.minLength) ? o.minLength : 1; + this.input = o.input; + this.menu = o.menu; + this.enabled = true; + this.active = false; + this.input.hasFocus() && this.activate(); + this.dir = this.input.getLangDir(); + this._hacks(); + this.menu.bind().onSync("selectableClicked", this._onSelectableClicked, this).onSync("asyncRequested", this._onAsyncRequested, this).onSync("asyncCanceled", this._onAsyncCanceled, this).onSync("asyncReceived", this._onAsyncReceived, this).onSync("datasetRendered", this._onDatasetRendered, this).onSync("datasetCleared", this._onDatasetCleared, this); + onFocused = c(this, "activate", "open", "_onFocused"); + onBlurred = c(this, "deactivate", "_onBlurred"); + onEnterKeyed = c(this, "isActive", "isOpen", "_onEnterKeyed"); + onTabKeyed = c(this, "isActive", "isOpen", "_onTabKeyed"); + onEscKeyed = c(this, "isActive", "_onEscKeyed"); + onUpKeyed = c(this, "isActive", "open", "_onUpKeyed"); + onDownKeyed = c(this, "isActive", "open", "_onDownKeyed"); + onLeftKeyed = c(this, "isActive", "isOpen", "_onLeftKeyed"); + onRightKeyed = c(this, "isActive", "isOpen", "_onRightKeyed"); + onQueryChanged = c(this, "_openIfActive", "_onQueryChanged"); + onWhitespaceChanged = c(this, "_openIfActive", "_onWhitespaceChanged"); + this.input.bind().onSync("focused", onFocused, this).onSync("blurred", onBlurred, this).onSync("enterKeyed", onEnterKeyed, this).onSync("tabKeyed", onTabKeyed, this).onSync("escKeyed", onEscKeyed, this).onSync("upKeyed", onUpKeyed, this).onSync("downKeyed", onDownKeyed, this).onSync("leftKeyed", onLeftKeyed, this).onSync("rightKeyed", onRightKeyed, this).onSync("queryChanged", onQueryChanged, this).onSync("whitespaceChanged", onWhitespaceChanged, this).onSync("langDirChanged", this._onLangDirChanged, this); + } + _.mixin(Typeahead.prototype, { + _hacks: function hacks() { + var $input, $menu; + $input = this.input.$input || $("
    "); + $menu = this.menu.$node || $("
    "); + $input.on("blur.tt", function($e) { + var active, isActive, hasActive; + active = document.activeElement; + isActive = $menu.is(active); + hasActive = $menu.has(active).length > 0; + if (_.isMsie() && (isActive || hasActive)) { + $e.preventDefault(); + $e.stopImmediatePropagation(); + _.defer(function() { + $input.focus(); + }); + } + }); + $menu.on("mousedown.tt", function($e) { + $e.preventDefault(); + }); + }, + _onSelectableClicked: function onSelectableClicked(type, $el) { + this.select($el); + }, + _onDatasetCleared: function onDatasetCleared() { + this._updateHint(); + }, + _onDatasetRendered: function onDatasetRendered(type, dataset, suggestions, async) { + this._updateHint(); + this.eventBus.trigger("render", suggestions, async, dataset); + }, + _onAsyncRequested: function onAsyncRequested(type, dataset, query) { + this.eventBus.trigger("asyncrequest", query, dataset); + }, + _onAsyncCanceled: function onAsyncCanceled(type, dataset, query) { + this.eventBus.trigger("asynccancel", query, dataset); + }, + _onAsyncReceived: function onAsyncReceived(type, dataset, query) { + this.eventBus.trigger("asyncreceive", query, dataset); + }, + _onFocused: function onFocused() { + this._minLengthMet() && this.menu.update(this.input.getQuery()); + }, + _onBlurred: function onBlurred() { + if (this.input.hasQueryChangedSinceLastFocus()) { + this.eventBus.trigger("change", this.input.getQuery()); + } + }, + _onEnterKeyed: function onEnterKeyed(type, $e) { + var $selectable; + if ($selectable = this.menu.getActiveSelectable()) { + this.select($selectable) && $e.preventDefault(); + } + }, + _onTabKeyed: function onTabKeyed(type, $e) { + var $selectable; + if ($selectable = this.menu.getActiveSelectable()) { + this.select($selectable) && $e.preventDefault(); + } else if ($selectable = this.menu.getTopSelectable()) { + this.autocomplete($selectable) && $e.preventDefault(); + } + }, + _onEscKeyed: function onEscKeyed() { + this.close(); + }, + _onUpKeyed: function onUpKeyed() { + this.moveCursor(-1); + }, + _onDownKeyed: function onDownKeyed() { + this.moveCursor(+1); + }, + _onLeftKeyed: function onLeftKeyed() { + if (this.dir === "rtl" && this.input.isCursorAtEnd()) { + this.autocomplete(this.menu.getTopSelectable()); + } + }, + _onRightKeyed: function onRightKeyed() { + if (this.dir === "ltr" && this.input.isCursorAtEnd()) { + this.autocomplete(this.menu.getTopSelectable()); + } + }, + _onQueryChanged: function onQueryChanged(e, query) { + this._minLengthMet(query) ? this.menu.update(query) : this.menu.empty(); + }, + _onWhitespaceChanged: function onWhitespaceChanged() { + this._updateHint(); + }, + _onLangDirChanged: function onLangDirChanged(e, dir) { + if (this.dir !== dir) { + this.dir = dir; + this.menu.setLanguageDirection(dir); + } + }, + _openIfActive: function openIfActive() { + this.isActive() && this.open(); + }, + _minLengthMet: function minLengthMet(query) { + query = _.isString(query) ? query : this.input.getQuery() || ""; + return query.length >= this.minLength; + }, + _updateHint: function updateHint() { + var $selectable, data, val, query, escapedQuery, frontMatchRegEx, match; + $selectable = this.menu.getTopSelectable(); + data = this.menu.getSelectableData($selectable); + val = this.input.getInputValue(); + if (data && !_.isBlankString(val) && !this.input.hasOverflow()) { + query = Input.normalizeQuery(val); + escapedQuery = _.escapeRegExChars(query); + frontMatchRegEx = new RegExp("^(?:" + escapedQuery + ")(.+$)", "i"); + match = frontMatchRegEx.exec(data.val); + match && this.input.setHint(val + match[1]); + } else { + this.input.clearHint(); + } + }, + isEnabled: function isEnabled() { + return this.enabled; + }, + enable: function enable() { + this.enabled = true; + }, + disable: function disable() { + this.enabled = false; + }, + isActive: function isActive() { + return this.active; + }, + activate: function activate() { + if (this.isActive()) { + return true; + } else if (!this.isEnabled() || this.eventBus.before("active")) { + return false; + } else { + this.active = true; + this.eventBus.trigger("active"); + return true; + } + }, + deactivate: function deactivate() { + if (!this.isActive()) { + return true; + } else if (this.eventBus.before("idle")) { + return false; + } else { + this.active = false; + this.close(); + this.eventBus.trigger("idle"); + return true; + } + }, + isOpen: function isOpen() { + return this.menu.isOpen(); + }, + open: function open() { + if (!this.isOpen() && !this.eventBus.before("open")) { + this.menu.open(); + this._updateHint(); + this.eventBus.trigger("open"); + } + return this.isOpen(); + }, + close: function close() { + if (this.isOpen() && !this.eventBus.before("close")) { + this.menu.close(); + this.input.clearHint(); + this.input.resetInputValue(); + this.eventBus.trigger("close"); + } + return !this.isOpen(); + }, + setVal: function setVal(val) { + this.input.setQuery(_.toStr(val)); + }, + getVal: function getVal() { + return this.input.getQuery(); + }, + select: function select($selectable) { + var data = this.menu.getSelectableData($selectable); + if (data && !this.eventBus.before("select", data.obj)) { + this.input.setQuery(data.val, true); + this.eventBus.trigger("select", data.obj); + this.close(); + return true; + } + return false; + }, + autocomplete: function autocomplete($selectable) { + var query, data, isValid; + query = this.input.getQuery(); + data = this.menu.getSelectableData($selectable); + isValid = data && query !== data.val; + if (isValid && !this.eventBus.before("autocomplete", data.obj)) { + this.input.setQuery(data.val); + this.eventBus.trigger("autocomplete", data.obj); + return true; + } + return false; + }, + moveCursor: function moveCursor(delta) { + var query, $candidate, data, payload, cancelMove; + query = this.input.getQuery(); + $candidate = this.menu.selectableRelativeToCursor(delta); + data = this.menu.getSelectableData($candidate); + payload = data ? data.obj : null; + cancelMove = this._minLengthMet() && this.menu.update(query); + if (!cancelMove && !this.eventBus.before("cursorchange", payload)) { + this.menu.setCursor($candidate); + if (data) { + this.input.setInputValue(data.val); + } else { + this.input.resetInputValue(); + this._updateHint(); + } + this.eventBus.trigger("cursorchange", payload); + return true; + } + return false; + }, + destroy: function destroy() { + this.input.destroy(); + this.menu.destroy(); + } + }); + return Typeahead; + function c(ctx) { + var methods = [].slice.call(arguments, 1); + return function() { + var args = [].slice.call(arguments); + _.each(methods, function(method) { + return ctx[method].apply(ctx, args); + }); + }; + } + }(); + (function() { + "use strict"; + var old, keys, methods; + old = $.fn.typeahead; + keys = { + www: "tt-www", + attrs: "tt-attrs", + typeahead: "tt-typeahead" + }; + methods = { + initialize: function initialize(o, datasets) { + var www; + datasets = _.isArray(datasets) ? datasets : [].slice.call(arguments, 1); + o = o || {}; + www = WWW(o.classNames); + return this.each(attach); + function attach() { + var $input, $wrapper, $hint, $menu, defaultHint, defaultMenu, eventBus, input, menu, typeahead, MenuConstructor; + _.each(datasets, function(d) { + d.highlight = !!o.highlight; + }); + $input = $(this); + $wrapper = $(www.html.wrapper); + $hint = $elOrNull(o.hint); + $menu = $elOrNull(o.menu); + defaultHint = o.hint !== false && !$hint; + defaultMenu = o.menu !== false && !$menu; + defaultHint && ($hint = buildHintFromInput($input, www)); + defaultMenu && ($menu = $(www.html.menu).css(www.css.menu)); + $hint && $hint.val(""); + $input = prepInput($input, www); + if (defaultHint || defaultMenu) { + $wrapper.css(www.css.wrapper); + $input.css(defaultHint ? www.css.input : www.css.inputWithNoHint); + $input.wrap($wrapper).parent().prepend(defaultHint ? $hint : null).append(defaultMenu ? $menu : null); + } + MenuConstructor = defaultMenu ? DefaultMenu : Menu; + eventBus = new EventBus({ + el: $input + }); + input = new Input({ + hint: $hint, + input: $input + }, www); + menu = new MenuConstructor({ + node: $menu, + datasets: datasets + }, www); + typeahead = new Typeahead({ + input: input, + menu: menu, + eventBus: eventBus, + minLength: o.minLength + }, www); + $input.data(keys.www, www); + $input.data(keys.typeahead, typeahead); + } + }, + isEnabled: function isEnabled() { + var enabled; + ttEach(this.first(), function(t) { + enabled = t.isEnabled(); + }); + return enabled; + }, + enable: function enable() { + ttEach(this, function(t) { + t.enable(); + }); + return this; + }, + disable: function disable() { + ttEach(this, function(t) { + t.disable(); + }); + return this; + }, + isActive: function isActive() { + var active; + ttEach(this.first(), function(t) { + active = t.isActive(); + }); + return active; + }, + activate: function activate() { + ttEach(this, function(t) { + t.activate(); + }); + return this; + }, + deactivate: function deactivate() { + ttEach(this, function(t) { + t.deactivate(); + }); + return this; + }, + isOpen: function isOpen() { + var open; + ttEach(this.first(), function(t) { + open = t.isOpen(); + }); + return open; + }, + open: function open() { + ttEach(this, function(t) { + t.open(); + }); + return this; + }, + close: function close() { + ttEach(this, function(t) { + t.close(); + }); + return this; + }, + select: function select(el) { + var success = false, $el = $(el); + ttEach(this.first(), function(t) { + success = t.select($el); + }); + return success; + }, + autocomplete: function autocomplete(el) { + var success = false, $el = $(el); + ttEach(this.first(), function(t) { + success = t.autocomplete($el); + }); + return success; + }, + moveCursor: function moveCursoe(delta) { + var success = false; + ttEach(this.first(), function(t) { + success = t.moveCursor(delta); + }); + return success; + }, + val: function val(newVal) { + var query; + if (!arguments.length) { + ttEach(this.first(), function(t) { + query = t.getVal(); + }); + return query; + } else { + ttEach(this, function(t) { + t.setVal(newVal); + }); + return this; + } + }, + destroy: function destroy() { + ttEach(this, function(typeahead, $input) { + revert($input); + typeahead.destroy(); + }); + return this; + } + }; + $.fn.typeahead = function(method) { + if (methods[method]) { + return methods[method].apply(this, [].slice.call(arguments, 1)); + } else { + return methods.initialize.apply(this, arguments); + } + }; + $.fn.typeahead.noConflict = function noConflict() { + $.fn.typeahead = old; + return this; + }; + function ttEach($els, fn) { + $els.each(function() { + var $input = $(this), typeahead; + (typeahead = $input.data(keys.typeahead)) && fn(typeahead, $input); + }); + } + function buildHintFromInput($input, www) { + return $input.clone().addClass(www.classes.hint).removeData().css(www.css.hint).css(getBackgroundStyles($input)).prop("readonly", true).removeAttr("id name placeholder required").attr({ + autocomplete: "off", + spellcheck: "false", + tabindex: -1 + }); + } + function prepInput($input, www) { + $input.data(keys.attrs, { + dir: $input.attr("dir"), + autocomplete: $input.attr("autocomplete"), + spellcheck: $input.attr("spellcheck"), + style: $input.attr("style") + }); + $input.addClass(www.classes.input).attr({ + autocomplete: "off", + spellcheck: false + }); + try { + !$input.attr("dir") && $input.attr("dir", "auto"); + } catch (e) {} + return $input; + } + function getBackgroundStyles($el) { + return { + backgroundAttachment: $el.css("background-attachment"), + backgroundClip: $el.css("background-clip"), + backgroundColor: $el.css("background-color"), + backgroundImage: $el.css("background-image"), + backgroundOrigin: $el.css("background-origin"), + backgroundPosition: $el.css("background-position"), + backgroundRepeat: $el.css("background-repeat"), + backgroundSize: $el.css("background-size") + }; + } + function revert($input) { + var www, $wrapper; + www = $input.data(keys.www); + $wrapper = $input.parent().filter(www.selectors.wrapper); + _.each($input.data(keys.attrs), function(val, key) { + _.isUndefined(val) ? $input.removeAttr(key) : $input.attr(key, val); + }); + $input.removeData(keys.typeahead).removeData(keys.www).removeData(keys.attr).removeClass(www.classes.input); + if ($wrapper.length) { + $input.detach().insertAfter($wrapper); + $wrapper.remove(); + } + } + function $elOrNull(obj) { + var isValid, $el; + isValid = _.isJQuery(obj) || _.isElement(obj); + $el = isValid ? $(obj).first() : []; + return $el.length ? $el : null; + } + })(); +}); \ No newline at end of file diff --git a/debian/dconv/parser/__init__.py b/debian/dconv/parser/__init__.py new file mode 100644 index 0000000..82b8522 --- /dev/null +++ b/debian/dconv/parser/__init__.py @@ -0,0 +1,81 @@ +__all__ = [ + 'arguments', + 'example', + 'keyword', + 'seealso', + 'table', + 'underline' +] + + +class Parser: + def __init__(self, pctxt): + self.pctxt = pctxt + + def parse(self, line): + return line + +class PContext: + def __init__(self, templates = None): + self.set_content_list([]) + self.templates = templates + + def set_content(self, content): + self.set_content_list(content.split("\n")) + + def set_content_list(self, content): + self.lines = content + self.nblines = len(self.lines) + self.i = 0 + self.stop = False + + def get_lines(self): + return self.lines + + def eat_lines(self): + count = 0 + while self.has_more_lines() and self.lines[self.i].strip(): + count += 1 + self.next() + return count + + def eat_empty_lines(self): + count = 0 + while self.has_more_lines() and not self.lines[self.i].strip(): + count += 1 + self.next() + return count + + def next(self, count=1): + self.i += count + + def has_more_lines(self, offset=0): + return self.i + offset < self.nblines + + def get_line(self, offset=0): + return self.lines[self.i + offset].rstrip() + + +# Get the indentation of a line +def get_indent(line): + indent = 0 + length = len(line) + while indent < length and line[indent] == ' ': + indent += 1 + return indent + + +# Remove unneeded indentation +def remove_indent(list): + # Detect the minimum indentation in the list + min_indent = -1 + for line in list: + if not line.strip(): + continue + indent = get_indent(line) + if min_indent < 0 or indent < min_indent: + min_indent = indent + # Realign the list content to remove the minimum indentation + if min_indent > 0: + for index, line in enumerate(list): + list[index] = line[min_indent:] diff --git a/debian/dconv/parser/arguments.py b/debian/dconv/parser/arguments.py new file mode 100644 index 0000000..096b269 --- /dev/null +++ b/debian/dconv/parser/arguments.py @@ -0,0 +1,132 @@ +import sys +import re +import parser + +''' +TODO: Allow inner data parsing (this will allow to parse the examples provided in an arguments block) +''' +class Parser(parser.Parser): + def __init__(self, pctxt): + parser.Parser.__init__(self, pctxt) + #template = pctxt.templates.get_template("parser/arguments.tpl") + #self.replace = template.render().strip() + + def parse(self, line): + #return re.sub(r'(Arguments *:)', self.replace, line) + pctxt = self.pctxt + + result = re.search(r'(Arguments? *:)', line) + if result: + label = result.group(0) + content = [] + + desc_indent = False + desc = re.sub(r'.*Arguments? *:', '', line).strip() + + indent = parser.get_indent(line) + + pctxt.next() + pctxt.eat_empty_lines() + + arglines = [] + if desc != "none": + add_empty_lines = 0 + while pctxt.has_more_lines() and (parser.get_indent(pctxt.get_line()) > indent): + for j in range(0, add_empty_lines): + arglines.append("") + arglines.append(pctxt.get_line()) + pctxt.next() + add_empty_lines = pctxt.eat_empty_lines() + ''' + print line + + if parser.get_indent(line) == arg_indent: + argument = re.sub(r' *([^ ]+).*', r'\1', line) + if argument: + #content.append("%s" % argument) + arg_desc = [line.replace(argument, " " * len(self.unescape(argument)), 1)] + #arg_desc = re.sub(r'( *)([^ ]+)(.*)', r'\1\2\3', line) + arg_desc_indent = parser.get_indent(arg_desc[0]) + arg_desc[0] = arg_desc[0][arg_indent:] + pctxt.next() + add_empty_lines = 0 + while pctxt.has_more_lines and parser.get_indent(pctxt.get_line()) >= arg_indent: + for i in range(0, add_empty_lines): + arg_desc.append("") + arg_desc.append(pctxt.get_line()[arg_indent:]) + pctxt.next() + add_empty_lines = pctxt.eat_empty_lines() + # TODO : reduce space at the beginnning + content.append({ + 'name': argument, + 'desc': arg_desc + }) + ''' + + if arglines: + new_arglines = [] + #content = self.parse_args(arglines) + parser.remove_indent(arglines) + ''' + pctxt2 = parser.PContext(pctxt.templates) + pctxt2.set_content_list(arglines) + while pctxt2.has_more_lines(): + new_arglines.append(parser.example.Parser(pctxt2).parse(pctxt2.get_line())) + pctxt2.next() + arglines = new_arglines + ''' + + pctxt.stop = True + + template = pctxt.templates.get_template("parser/arguments.tpl") + return template.render( + pctxt=pctxt, + label=label, + desc=desc, + content=arglines + #content=content + ) + return line + + return line + +''' + def parse_args(self, data): + args = [] + + pctxt = parser.PContext() + pctxt.set_content_list(data) + + while pctxt.has_more_lines(): + line = pctxt.get_line() + arg_indent = parser.get_indent(line) + argument = re.sub(r' *([^ ]+).*', r'\1', line) + if True or argument: + arg_desc = [] + trailing_desc = line.replace(argument, " " * len(self.unescape(argument)), 1)[arg_indent:] + if trailing_desc.strip(): + arg_desc.append(trailing_desc) + pctxt.next() + add_empty_lines = 0 + while pctxt.has_more_lines() and parser.get_indent(pctxt.get_line()) > arg_indent: + for i in range(0, add_empty_lines): + arg_desc.append("") + arg_desc.append(pctxt.get_line()[arg_indent:]) + pctxt.next() + add_empty_lines = pctxt.eat_empty_lines() + + parser.remove_indent(arg_desc) + + args.append({ + 'name': argument, + 'desc': arg_desc + }) + return args + + def unescape(self, s): + s = s.replace("<", "<") + s = s.replace(">", ">") + # this has to be last: + s = s.replace("&", "&") + return s +''' diff --git a/debian/dconv/parser/example.py b/debian/dconv/parser/example.py new file mode 100644 index 0000000..3958992 --- /dev/null +++ b/debian/dconv/parser/example.py @@ -0,0 +1,77 @@ +import re +import parser + +# Detect examples blocks +class Parser(parser.Parser): + def __init__(self, pctxt): + parser.Parser.__init__(self, pctxt) + template = pctxt.templates.get_template("parser/example/comment.tpl") + self.comment = template.render(pctxt=pctxt).strip() + + + def parse(self, line): + pctxt = self.pctxt + + result = re.search(r'^ *(Examples? *:)(.*)', line) + if result: + label = result.group(1) + + desc_indent = False + desc = result.group(2).strip() + + # Some examples have a description + if desc: + desc_indent = len(line) - len(desc) + + indent = parser.get_indent(line) + + if desc: + # And some description are on multiple lines + while pctxt.get_line(1) and parser.get_indent(pctxt.get_line(1)) == desc_indent: + desc += " " + pctxt.get_line(1).strip() + pctxt.next() + + pctxt.next() + add_empty_line = pctxt.eat_empty_lines() + + content = [] + + if parser.get_indent(pctxt.get_line()) > indent: + if desc: + desc = desc[0].upper() + desc[1:] + add_empty_line = 0 + while pctxt.has_more_lines() and ((not pctxt.get_line()) or (parser.get_indent(pctxt.get_line()) > indent)): + if pctxt.get_line(): + for j in range(0, add_empty_line): + content.append("") + + content.append(re.sub(r'(#.*)$', self.comment, pctxt.get_line())) + add_empty_line = 0 + else: + add_empty_line += 1 + pctxt.next() + elif parser.get_indent(pctxt.get_line()) == indent: + # Simple example that can't have empty lines + if add_empty_line and desc: + # This means that the example was on the same line as the 'Example' tag + # and was not a description + content.append(" " * indent + desc) + desc = False + else: + while pctxt.has_more_lines() and (parser.get_indent(pctxt.get_line()) >= indent): + content.append(pctxt.get_line()) + pctxt.next() + pctxt.eat_empty_lines() # Skip empty remaining lines + + pctxt.stop = True + + parser.remove_indent(content) + + template = pctxt.templates.get_template("parser/example.tpl") + return template.render( + pctxt=pctxt, + label=label, + desc=desc, + content=content + ) + return line diff --git a/debian/dconv/parser/keyword.py b/debian/dconv/parser/keyword.py new file mode 100644 index 0000000..f20944f --- /dev/null +++ b/debian/dconv/parser/keyword.py @@ -0,0 +1,142 @@ +import re +import parser +from urllib.parse import quote + +class Parser(parser.Parser): + def __init__(self, pctxt): + parser.Parser.__init__(self, pctxt) + self.keywordPattern = re.compile(r'^(%s%s)(%s)' % ( + '([a-z][a-z0-9\-\+_\.]*[a-z0-9\-\+_)])', # keyword + '( [a-z0-9\-_]+)*', # subkeywords + '(\([^ ]*\))?', # arg (ex: (), (/), (,[,]) ... + )) + + def parse(self, line): + pctxt = self.pctxt + keywords = pctxt.keywords + keywordsCount = pctxt.keywordsCount + chapters = pctxt.chapters + + res = "" + + if line != "" and not re.match(r'^ ', line): + parsed = self.keywordPattern.match(line) + if parsed != None: + keyword = parsed.group(1) + arg = parsed.group(4) + parameters = line[len(keyword) + len(arg):] + if (parameters != "" and not re.match("^ +((<|\[|\{|/).*|(: [a-z +]+))?(\(deprecated\))?$", parameters)): + # Dirty hack + # - parameters should only start with the characer "<", "[", "{", "/" + # - or a column (":") followed by a alpha keywords to identify fetching samples (optionally separated by the character "+") + # - or the string "(deprecated)" at the end + keyword = False + else: + splitKeyword = keyword.split(" ") + + parameters = arg + parameters + else: + keyword = False + + if keyword and (len(splitKeyword) <= 5): + toplevel = pctxt.details["toplevel"] + for j in range(0, len(splitKeyword)): + subKeyword = " ".join(splitKeyword[0:j + 1]) + if subKeyword != "no": + if not subKeyword in keywords: + keywords[subKeyword] = set() + keywords[subKeyword].add(pctxt.details["chapter"]) + res += '' % subKeyword + res += '' % (toplevel, subKeyword) + res += '' % (pctxt.details["chapter"], subKeyword) + res += '' % (subKeyword, chapters[toplevel]['title']) + res += '' % (subKeyword, chapters[pctxt.details["chapter"]]['title']) + + deprecated = parameters.find("(deprecated)") + if deprecated != -1: + prefix = "" + suffix = "" + parameters = parameters.replace("(deprecated)", '(deprecated)') + else: + prefix = "" + suffix = "" + + nextline = pctxt.get_line(1) + + while nextline.startswith(" "): + # Found parameters on the next line + parameters += "\n" + nextline + pctxt.next() + if pctxt.has_more_lines(1): + nextline = pctxt.get_line(1) + else: + nextline = "" + + + parameters = self.colorize(parameters) + res += '
    %s%s%s%s
    ' % (prefix, keyword, quote("%s-%s" % (pctxt.details["chapter"], keyword)), keyword, parameters, suffix) + pctxt.next() + pctxt.stop = True + elif line.startswith("/*"): + # Skip comments in the documentation + while not pctxt.get_line().endswith("*/"): + pctxt.next() + pctxt.next() + else: + # This is probably not a keyword but a text, ignore it + res += line + else: + res += line + + return res + + # Used to colorize keywords parameters + # TODO : use CSS styling + def colorize(self, text): + colorized = "" + tags = [ + [ "[" , "]" , "#008" ], + [ "{" , "}" , "#800" ], + [ "<", ">", "#080" ], + ] + heap = [] + pos = 0 + while pos < len(text): + substring = text[pos:] + found = False + for tag in tags: + if substring.startswith(tag[0]): + # Opening tag + heap.append(tag) + colorized += '%s' % (tag[2], substring[0:len(tag[0])]) + pos += len(tag[0]) + found = True + break + elif substring.startswith(tag[1]): + # Closing tag + + # pop opening tags until the corresponding one is found + openingTag = False + while heap and openingTag != tag: + openingTag = heap.pop() + if openingTag != tag: + colorized += '' + # all intermediate tags are now closed, we can display the tag + colorized += substring[0:len(tag[1])] + # and the close it if it was previously opened + if openingTag == tag: + colorized += '' + pos += len(tag[1]) + found = True + break + if not found: + colorized += substring[0] + pos += 1 + # close all unterminated tags + while heap: + tag = heap.pop() + colorized += '' + + return colorized + + diff --git a/debian/dconv/parser/seealso.py b/debian/dconv/parser/seealso.py new file mode 100644 index 0000000..bbb53f9 --- /dev/null +++ b/debian/dconv/parser/seealso.py @@ -0,0 +1,32 @@ +import re +import parser + +class Parser(parser.Parser): + def parse(self, line): + pctxt = self.pctxt + + result = re.search(r'(See also *:)', line) + if result: + label = result.group(0) + + desc = re.sub(r'.*See also *:', '', line).strip() + + indent = parser.get_indent(line) + + # Some descriptions are on multiple lines + while pctxt.has_more_lines(1) and parser.get_indent(pctxt.get_line(1)) >= indent: + desc += " " + pctxt.get_line(1).strip() + pctxt.next() + + pctxt.eat_empty_lines() + pctxt.next() + pctxt.stop = True + + template = pctxt.templates.get_template("parser/seealso.tpl") + return template.render( + pctxt=pctxt, + label=label, + desc=desc, + ) + + return line diff --git a/debian/dconv/parser/table.py b/debian/dconv/parser/table.py new file mode 100644 index 0000000..e2575b1 --- /dev/null +++ b/debian/dconv/parser/table.py @@ -0,0 +1,244 @@ +import re +import sys +import parser + +class Parser(parser.Parser): + def __init__(self, pctxt): + parser.Parser.__init__(self, pctxt) + self.table1Pattern = re.compile(r'^ *(-+\+)+-+') + self.table2Pattern = re.compile(r'^ *\+(-+\+)+') + + def parse(self, line): + global document, keywords, keywordsCount, chapters, keyword_conflicts + + pctxt = self.pctxt + + if pctxt.context['headers']['subtitle'] != 'Configuration Manual': + # Quick exit + return line + elif pctxt.details['chapter'] == "4": + # BUG: the matrix in chapter 4. Proxies is not well displayed, we skip this chapter + return line + + if pctxt.has_more_lines(1): + nextline = pctxt.get_line(1) + else: + nextline = "" + + if self.table1Pattern.match(nextline): + # activate table rendering only for the Configuration Manual + lineSeparator = nextline + nbColumns = nextline.count("+") + 1 + extraColumns = 0 + print("Entering table mode (%d columns)" % nbColumns, file=sys.stderr) + table = [] + if line.find("|") != -1: + row = [] + while pctxt.has_more_lines(): + line = pctxt.get_line() + if pctxt.has_more_lines(1): + nextline = pctxt.get_line(1) + else: + nextline = "" + if line == lineSeparator: + # New row + table.append(row) + row = [] + if nextline.find("|") == -1: + break # End of table + else: + # Data + columns = line.split("|") + for j in range(0, len(columns)): + try: + if row[j]: + row[j] += "
    " + row[j] += columns[j].strip() + except: + row.append(columns[j].strip()) + pctxt.next() + else: + row = [] + headers = nextline + while pctxt.has_more_lines(): + line = pctxt.get_line() + if pctxt.has_more_lines(1): + nextline = pctxt.get_line(1) + else: + nextline = "" + + if nextline == "": + if row: table.append(row) + break # End of table + + if (line != lineSeparator) and (line[0] != "-"): + start = 0 + + if row and not line.startswith(" "): + # Row is complete, parse a new one + table.append(row) + row = [] + + tmprow = [] + while start != -1: + end = headers.find("+", start) + if end == -1: + end = len(headers) + + realend = end + if realend == len(headers): + realend = len(line) + else: + while realend < len(line) and line[realend] != " ": + realend += 1 + end += 1 + + tmprow.append(line[start:realend]) + + start = end + 1 + if start >= len(headers): + start = -1 + for j in range(0, nbColumns): + try: + row[j] += tmprow[j].strip() + except: + row.append(tmprow[j].strip()) + + deprecated = row[0].endswith("(deprecated)") + if deprecated: + row[0] = row[0][: -len("(deprecated)")].rstrip() + + nooption = row[1].startswith("(*)") + if nooption: + row[1] = row[1][len("(*)"):].strip() + + if deprecated or nooption: + extraColumns = 1 + extra = "" + if deprecated: + extra += '(deprecated)' + if nooption: + extra += '(*)' + row.append(extra) + + pctxt.next() + print("Leaving table mode", file=sys.stderr) + pctxt.next() # skip useless next line + pctxt.stop = True + + return self.renderTable(table, nbColumns, pctxt.details["toplevel"]) + # elif self.table2Pattern.match(line): + # return self.parse_table_format2() + elif line.find("May be used in sections") != -1: + nextline = pctxt.get_line(1) + rows = [] + headers = line.split(":") + rows.append(headers[1].split("|")) + rows.append(nextline.split("|")) + table = { + "rows": rows, + "title": headers[0] + } + pctxt.next(2) # skip this previous table + pctxt.stop = True + + return self.renderTable(table) + + return line + + + def parse_table_format2(self): + pctxt = self.pctxt + + linesep = pctxt.get_line() + rows = [] + + pctxt.next() + maxcols = 0 + while pctxt.get_line().strip().startswith("|"): + row = pctxt.get_line().strip()[1:-1].split("|") + rows.append(row) + maxcols = max(maxcols, len(row)) + pctxt.next() + if pctxt.get_line() == linesep: + # TODO : find a way to define a special style for next row + pctxt.next() + pctxt.stop = True + + return self.renderTable(rows, maxcols) + + # Render tables detected by the conversion parser + def renderTable(self, table, maxColumns = 0, toplevel = None): + pctxt = self.pctxt + template = pctxt.templates.get_template("parser/table.tpl") + + res = "" + + title = None + if isinstance(table, dict): + title = table["title"] + table = table["rows"] + + if not maxColumns: + maxColumns = len(table[0]) + + rows = [] + + mode = "th" + headerLine = "" + hasKeywords = False + i = 0 + for row in table: + line = "" + + if i == 0: + row_template = pctxt.templates.get_template("parser/table/header.tpl") + else: + row_template = pctxt.templates.get_template("parser/table/row.tpl") + + if i > 1 and (i - 1) % 20 == 0 and len(table) > 50: + # Repeat headers periodically for long tables + rows.append(headerLine) + + j = 0 + cols = [] + for column in row: + if j >= maxColumns: + break + + tplcol = {} + + data = column.strip() + keyword = column + if j == 0 and i == 0 and keyword == 'keyword': + hasKeywords = True + if j == 0 and i != 0 and hasKeywords: + if keyword.startswith("[no] "): + keyword = keyword[len("[no] "):] + tplcol['toplevel'] = toplevel + tplcol['keyword'] = keyword + tplcol['extra'] = [] + if j == 0 and len(row) > maxColumns: + for k in range(maxColumns, len(row)): + tplcol['extra'].append(row[k]) + tplcol['data'] = data + cols.append(tplcol) + j += 1 + mode = "td" + + line = row_template.render( + pctxt=pctxt, + columns=cols + ).strip() + if i == 0: + headerLine = line + + rows.append(line) + + i += 1 + + return template.render( + pctxt=pctxt, + title=title, + rows=rows, + ) diff --git a/debian/dconv/parser/underline.py b/debian/dconv/parser/underline.py new file mode 100644 index 0000000..3a2350c --- /dev/null +++ b/debian/dconv/parser/underline.py @@ -0,0 +1,16 @@ +import parser + +class Parser(parser.Parser): + # Detect underlines + def parse(self, line): + pctxt = self.pctxt + if pctxt.has_more_lines(1): + nextline = pctxt.get_line(1) + if (len(line) > 0) and (len(nextline) > 0) and (nextline[0] == '-') and ("-" * len(line) == nextline): + template = pctxt.templates.get_template("parser/underline.tpl") + line = template.render(pctxt=pctxt, data=line).strip() + pctxt.next(2) + pctxt.eat_empty_lines() + pctxt.stop = True + + return line diff --git a/debian/dconv/templates/parser/arguments.tpl b/debian/dconv/templates/parser/arguments.tpl new file mode 100644 index 0000000..b5f91e9 --- /dev/null +++ b/debian/dconv/templates/parser/arguments.tpl @@ -0,0 +1,9 @@ +
    +${label}\ +% if desc: + ${desc} +% endif +% if content: +
    ${"\n".join(content)}
    +% endif +
    diff --git a/debian/dconv/templates/parser/example.tpl b/debian/dconv/templates/parser/example.tpl new file mode 100644 index 0000000..184b6dd --- /dev/null +++ b/debian/dconv/templates/parser/example.tpl @@ -0,0 +1,12 @@ +
    +${label} +
    +% if desc:
    +
    ${desc}
    \ +% endif +\ +% for line in content: +${line} +% endfor +
    +
    \ No newline at end of file diff --git a/debian/dconv/templates/parser/example/comment.tpl b/debian/dconv/templates/parser/example/comment.tpl new file mode 100644 index 0000000..b51ec2d --- /dev/null +++ b/debian/dconv/templates/parser/example/comment.tpl @@ -0,0 +1 @@ +\1 \ No newline at end of file diff --git a/debian/dconv/templates/parser/seealso.tpl b/debian/dconv/templates/parser/seealso.tpl new file mode 100644 index 0000000..72cf5f9 --- /dev/null +++ b/debian/dconv/templates/parser/seealso.tpl @@ -0,0 +1 @@ + diff --git a/debian/dconv/templates/parser/table.tpl b/debian/dconv/templates/parser/table.tpl new file mode 100644 index 0000000..0119176 --- /dev/null +++ b/debian/dconv/templates/parser/table.tpl @@ -0,0 +1,11 @@ +% if title: +

    ${title} :

    \ +% endif + +% for row in rows: +${row} +% endfor +
    \ +% if title: +
    +% endif \ No newline at end of file diff --git a/debian/dconv/templates/parser/table/header.tpl b/debian/dconv/templates/parser/table/header.tpl new file mode 100644 index 0000000..e84b47f --- /dev/null +++ b/debian/dconv/templates/parser/table/header.tpl @@ -0,0 +1,6 @@ +\ +% for col in columns: +<% data = col['data'] %>\ +${data}\ +% endfor + diff --git a/debian/dconv/templates/parser/table/row.tpl b/debian/dconv/templates/parser/table/row.tpl new file mode 100644 index 0000000..e4f2bef --- /dev/null +++ b/debian/dconv/templates/parser/table/row.tpl @@ -0,0 +1,36 @@ +<% from urllib.parse import quote %> +<% base = pctxt.context['base'] %> +\ +% for col in columns: +<% data = col['data'] %>\ +<% + if data in ['yes']: + style = "class=\"alert-success pagination-centered\"" + data = 'yes
    yes' % base + elif data in ['no']: + style = "class=\"alert-error pagination-centered\"" + data = 'no
    no' % base + elif data in ['X']: + style = "class=\"pagination-centered\"" + data = 'X' % base + elif data in ['-']: + style = "class=\"pagination-centered\"" + data = ' ' + elif data in ['*']: + style = "class=\"pagination-centered\"" + else: + style = None +%>\ +\ +% if "keyword" in col: +\ +% for extra in col['extra']: +${extra}\ +% endfor +${data}\ +% else: +${data}\ +% endif +\ +% endfor + diff --git a/debian/dconv/templates/parser/underline.tpl b/debian/dconv/templates/parser/underline.tpl new file mode 100644 index 0000000..4f35f7e --- /dev/null +++ b/debian/dconv/templates/parser/underline.tpl @@ -0,0 +1 @@ +
    ${data}
    diff --git a/debian/dconv/templates/summary.html b/debian/dconv/templates/summary.html new file mode 100644 index 0000000..87c6414 --- /dev/null +++ b/debian/dconv/templates/summary.html @@ -0,0 +1,43 @@ + + +
    +
    + <% previousLevel = None %> + % for k in chapterIndexes: + <% chapter = chapters[k] %> + % if chapter['title']: + <% + if chapter['level'] == 1: + otag = "" + etag = "" + else: + otag = etag = "" + %> + % if chapter['chapter'] == '7': + ## Quick and dirty hack to split the summary in 2 columns + ## TODO : implement a generic way split the summary +
    + <% previousLevel = None %> + % endif + % if otag and previousLevel: +
    + % endif +
    +
    ${otag}${chapter['chapter']}.${etag}
    +
    + % for tab in range(1, chapter['level']): +
    + % endfor + ${otag}${chapter['title']}${etag} + % for tab in range(1, chapter['level']): +
    + % endfor +
    +
    + <% previousLevel = chapter['level'] %> + % endif + % endfor +
    +
    diff --git a/debian/dconv/templates/template.html b/debian/dconv/templates/template.html new file mode 100644 index 0000000..c72b355 --- /dev/null +++ b/debian/dconv/templates/template.html @@ -0,0 +1,238 @@ + + + + + ${headers['title']} ${headers['version']} - ${headers['subtitle']} + + + + + + + +
    + + + + +
    +
    +
    +
    +

    ${headers['title']}

    +

    ${headers['subtitle']}

    +

    ${headers['version']}

    +

    +
    + ${headers['author']}
    + ${headers['date']} +

    +
    + + ${document} +
    +
    +
    + ${headers['title']} ${headers['version'].replace("version ", "")} – ${headers['subtitle']}
    + ${headers['date']}, ${headers['author']} +
    +
    + +
    + +
    +
      + + +
    +
    +
    + + +
    + + + + + + + ${footer} + + + diff --git a/debian/dconv/tools/generate-docs.sh b/debian/dconv/tools/generate-docs.sh new file mode 100755 index 0000000..36fdf1b --- /dev/null +++ b/debian/dconv/tools/generate-docs.sh @@ -0,0 +1,177 @@ +#!/bin/bash + +PROJECT_HOME=$(dirname $(readlink -f $0)) +cd $PROJECT_HOME || exit 1 + +WORK_DIR=$PROJECT_HOME/work + +function on_exit() +{ + echo "-- END $(date)" +} + +function init() +{ + trap on_exit EXIT + + echo + echo "-- START $(date)" + echo "PROJECT_HOME = $PROJECT_HOME" + + echo "Preparing work directories..." + mkdir -p $WORK_DIR || exit 1 + mkdir -p $WORK_DIR/haproxy || exit 1 + mkdir -p $WORK_DIR/haproxy-dconv || exit 1 + + UPDATED=0 + PUSH=0 + +} + +# Needed as "git -C" is only available since git 1.8.5 +function git-C() +{ + _gitpath=$1 + shift + echo "git --git-dir=$_gitpath/.git --work-tree=$_gitpath $@" >&2 + git --git-dir=$_gitpath/.git --work-tree=$_gitpath "$@" +} + +function fetch_haproxy_dconv() +{ + echo "Fetching latest haproxy-dconv public version..." + if [ ! -e $WORK_DIR/haproxy-dconv/master ]; + then + git clone -v git://github.com/cbonte/haproxy-dconv.git $WORK_DIR/haproxy-dconv/master || exit 1 + fi + GIT="git-C $WORK_DIR/haproxy-dconv/master" + + OLD_MD5="$($GIT log -1 | md5sum) $($GIT describe --tags)" + $GIT checkout master && $GIT pull -v + version=$($GIT describe --tags) + version=${version%-g*} + NEW_MD5="$($GIT log -1 | md5sum) $($GIT describe --tags)" + if [ "$OLD_MD5" != "$NEW_MD5" ]; + then + UPDATED=1 + fi + + echo "Fetching last haproxy-dconv public pages version..." + if [ ! -e $WORK_DIR/haproxy-dconv/gh-pages ]; + then + cp -a $WORK_DIR/haproxy-dconv/master $WORK_DIR/haproxy-dconv/gh-pages || exit 1 + fi + GIT="git-C $WORK_DIR/haproxy-dconv/gh-pages" + + $GIT checkout gh-pages && $GIT pull -v +} + +function fetch_haproxy() +{ + url=$1 + path=$2 + + echo "Fetching HAProxy 1.4 repository..." + if [ ! -e $path ]; + then + git clone -v $url $path || exit 1 + fi + GIT="git-C $path" + + $GIT checkout master && $GIT pull -v +} + +function _generate_file() +{ + infile=$1 + destfile=$2 + git_version=$3 + state=$4 + + $GIT checkout $git_version + + if [ -e $gitpath/doc/$infile ]; + then + + git_version_simple=${git_version%-g*} + doc_version=$(tail -n1 $destfile 2>/dev/null | grep " git:" | sed 's/.* git:\([^ ]*\).*/\1/') + if [ $UPDATED -eq 1 -o "$git_version" != "$doc_version" ]; + then + HTAG="VERSION-$(basename $gitpath | sed 's/[.]/\\&/g')" + if [ "$state" == "snapshot" ]; + then + base=".." + HTAG="$HTAG-SNAPSHOT" + else + base="." + fi + + + $WORK_DIR/haproxy-dconv/master/haproxy-dconv.py -i $gitpath/doc/$infile -o $destfile --base=$base && + echo "" >> $destfile && + sed -i "s/\(<\!-- $HTAG -->\)\(.*\)\(<\!-- \/$HTAG -->\)/\1${git_version_simple}\3/" $docroot/index.html + + else + echo "Already up to date." + fi + + if [ "$doc_version" != "" -a "$git_version" != "$doc_version" ]; + then + changelog=$($GIT log --oneline $doc_version..$git_version $gitpath/doc/$infile) + else + changelog="" + fi + + GITDOC="git-C $docroot" + if [ "$($GITDOC status -s $destfile)" != "" ]; + then + $GITDOC add $destfile && + $GITDOC commit -m "Updating HAProxy $state $infile ${git_version_simple} generated by haproxy-dconv $version" -m "$changelog" $destfile $docroot/index.html && + PUSH=1 + fi + fi +} + +function generate_docs() +{ + url=$1 + gitpath=$2 + docroot=$3 + infile=$4 + outfile=$5 + + fetch_haproxy $url $gitpath + + GIT="git-C $gitpath" + + $GIT checkout master + git_version=$($GIT describe --tags --match 'v*') + git_version_stable=${git_version%-*-g*} + + echo "Generating snapshot version $git_version..." + _generate_file $infile $docroot/snapshot/$outfile $git_version snapshot + + echo "Generating stable version $git_version..." + _generate_file $infile $docroot/$outfile $git_version_stable stable +} + +function push() +{ + docroot=$1 + GITDOC="git-C $docroot" + + if [ $PUSH -eq 1 ]; + then + $GITDOC push origin gh-pages + fi + +} + + +init +fetch_haproxy_dconv +generate_docs http://git.1wt.eu/git/haproxy-1.4.git/ $WORK_DIR/haproxy/1.4 $WORK_DIR/haproxy-dconv/gh-pages configuration.txt configuration-1.4.html +generate_docs http://git.1wt.eu/git/haproxy-1.5.git/ $WORK_DIR/haproxy/1.5 $WORK_DIR/haproxy-dconv/gh-pages configuration.txt configuration-1.5.html +generate_docs http://git.1wt.eu/git/haproxy.git/ $WORK_DIR/haproxy/1.6 $WORK_DIR/haproxy-dconv/gh-pages configuration.txt configuration-1.6.html +generate_docs http://git.1wt.eu/git/haproxy.git/ $WORK_DIR/haproxy/1.6 $WORK_DIR/haproxy-dconv/gh-pages intro.txt intro-1.6.html +push $WORK_DIR/haproxy-dconv/gh-pages diff --git a/debian/gbp.conf b/debian/gbp.conf new file mode 100644 index 0000000..33ef1db --- /dev/null +++ b/debian/gbp.conf @@ -0,0 +1,3 @@ +[DEFAULT] +pristine-tar = True +upstream-branch = upstream-2.9 diff --git a/debian/halog.1 b/debian/halog.1 new file mode 100644 index 0000000..f5dd19f --- /dev/null +++ b/debian/halog.1 @@ -0,0 +1,108 @@ +.TH HALOG "1" "July 2013" "halog" "User Commands" +.SH NAME +halog \- HAProxy log statistics reporter +.SH SYNOPSIS +.B halog +[\fI-h|--help\fR] +.br +.B halog +[\fIoptions\fR] +Only match response times larger|smaller than
    + + +@@ -24,31 +24,16 @@ + +@@ -72,7 +57,7 @@ + The feature is automatically disabled when the search field is focused. +

    +

    +- Converted with haproxy-dconv v${version} on ${date} ++ Converted with haproxy-dconv +

    +
    + +@@ -83,7 +68,7 @@ +
    +

    ${headers['title']}

    +

    ${headers['subtitle']}

    +-

    ${headers['version']}

    ++

    ${headers['version']} (Debian)

    +

    +
    + ${headers['author']}
    +@@ -114,9 +99,9 @@ +

    + + +- +- +- ++ ++ ++ +