From 8daa83a594a2e98f39d764422bfbdbc62c9efd44 Mon Sep 17 00:00:00 2001
From: Daniel Baumann <daniel.baumann@progress-linux.org>
Date: Fri, 19 Apr 2024 19:20:00 +0200
Subject: Adding upstream version 2:4.20.0+dfsg.

Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
---
 third_party/waf/update.sh                          |   82 ++
 third_party/waf/waflib/Build.py                    | 1514 ++++++++++++++++++++
 third_party/waf/waflib/ConfigSet.py                |  361 +++++
 third_party/waf/waflib/Configure.py                |  656 +++++++++
 third_party/waf/waflib/Context.py                  |  747 ++++++++++
 third_party/waf/waflib/Errors.py                   |   68 +
 third_party/waf/waflib/Logs.py                     |  382 +++++
 third_party/waf/waflib/Node.py                     |  969 +++++++++++++
 third_party/waf/waflib/Options.py                  |  359 +++++
 third_party/waf/waflib/Runner.py                   |  622 ++++++++
 third_party/waf/waflib/Scripting.py                |  631 ++++++++
 third_party/waf/waflib/Task.py                     | 1406 ++++++++++++++++++
 third_party/waf/waflib/TaskGen.py                  |  913 ++++++++++++
 third_party/waf/waflib/Tools/__init__.py           |    3 +
 third_party/waf/waflib/Tools/ar.py                 |   24 +
 third_party/waf/waflib/Tools/asm.py                |  107 ++
 third_party/waf/waflib/Tools/bison.py              |   49 +
 third_party/waf/waflib/Tools/c.py                  |   39 +
 third_party/waf/waflib/Tools/c_aliases.py          |  146 ++
 third_party/waf/waflib/Tools/c_config.py           | 1370 ++++++++++++++++++
 third_party/waf/waflib/Tools/c_osx.py              |  193 +++
 third_party/waf/waflib/Tools/c_preproc.py          | 1091 ++++++++++++++
 third_party/waf/waflib/Tools/c_tests.py            |  237 +++
 third_party/waf/waflib/Tools/ccroot.py             |  792 ++++++++++
 third_party/waf/waflib/Tools/clang.py              |   29 +
 third_party/waf/waflib/Tools/clangxx.py            |   30 +
 third_party/waf/waflib/Tools/compiler_c.py         |  111 ++
 third_party/waf/waflib/Tools/compiler_cxx.py       |  112 ++
 third_party/waf/waflib/Tools/compiler_d.py         |   85 ++
 third_party/waf/waflib/Tools/compiler_fc.py        |   73 +
 third_party/waf/waflib/Tools/cs.py                 |  211 +++
 third_party/waf/waflib/Tools/cxx.py                |   40 +
 third_party/waf/waflib/Tools/d.py                  |   97 ++
 third_party/waf/waflib/Tools/d_config.py           |   64 +
 third_party/waf/waflib/Tools/d_scan.py             |  211 +++
 third_party/waf/waflib/Tools/dbus.py               |   70 +
 third_party/waf/waflib/Tools/dmd.py                |   80 ++
 third_party/waf/waflib/Tools/errcheck.py           |  237 +++
 third_party/waf/waflib/Tools/fc.py                 |  203 +++
 third_party/waf/waflib/Tools/fc_config.py          |  488 +++++++
 third_party/waf/waflib/Tools/fc_scan.py            |  120 ++
 third_party/waf/waflib/Tools/flex.py               |   62 +
 third_party/waf/waflib/Tools/g95.py                |   66 +
 third_party/waf/waflib/Tools/gas.py                |   19 +
 third_party/waf/waflib/Tools/gcc.py                |  156 ++
 third_party/waf/waflib/Tools/gdc.py                |   55 +
 third_party/waf/waflib/Tools/gfortran.py           |   93 ++
 third_party/waf/waflib/Tools/glib2.py              |  489 +++++++
 third_party/waf/waflib/Tools/gnu_dirs.py           |  131 ++
 third_party/waf/waflib/Tools/gxx.py                |  157 ++
 third_party/waf/waflib/Tools/icc.py                |   30 +
 third_party/waf/waflib/Tools/icpc.py               |   30 +
 third_party/waf/waflib/Tools/ifort.py              |  413 ++++++
 third_party/waf/waflib/Tools/intltool.py           |  231 +++
 third_party/waf/waflib/Tools/irixcc.py             |   54 +
 third_party/waf/waflib/Tools/javaw.py              |  593 ++++++++
 third_party/waf/waflib/Tools/ldc2.py               |   56 +
 third_party/waf/waflib/Tools/lua.py                |   38 +
 third_party/waf/waflib/Tools/md5_tstamp.py         |   41 +
 third_party/waf/waflib/Tools/msvc.py               | 1041 ++++++++++++++
 third_party/waf/waflib/Tools/nasm.py               |   31 +
 third_party/waf/waflib/Tools/nobuild.py            |   24 +
 third_party/waf/waflib/Tools/perl.py               |  156 ++
 third_party/waf/waflib/Tools/python.py             |  657 +++++++++
 third_party/waf/waflib/Tools/qt5.py                |  890 ++++++++++++
 third_party/waf/waflib/Tools/ruby.py               |  186 +++
 third_party/waf/waflib/Tools/suncc.py              |   67 +
 third_party/waf/waflib/Tools/suncxx.py             |   67 +
 third_party/waf/waflib/Tools/tex.py                |  544 +++++++
 third_party/waf/waflib/Tools/vala.py               |  355 +++++
 third_party/waf/waflib/Tools/waf_unit_test.py      |  302 ++++
 third_party/waf/waflib/Tools/winres.py             |  113 ++
 third_party/waf/waflib/Tools/xlc.py                |   65 +
 third_party/waf/waflib/Tools/xlcxx.py              |   65 +
 third_party/waf/waflib/Utils.py                    | 1053 ++++++++++++++
 third_party/waf/waflib/__init__.py                 |    3 +
 third_party/waf/waflib/ansiterm.py                 |  342 +++++
 third_party/waf/waflib/extras/__init__.py          |    3 +
 third_party/waf/waflib/extras/batched_cc.py        |  173 +++
 third_party/waf/waflib/extras/biber.py             |   58 +
 third_party/waf/waflib/extras/bjam.py              |  128 ++
 third_party/waf/waflib/extras/blender.py           |  108 ++
 third_party/waf/waflib/extras/boo.py               |   81 ++
 third_party/waf/waflib/extras/boost.py             |  526 +++++++
 .../waf/waflib/extras/build_file_tracker.py        |   28 +
 third_party/waf/waflib/extras/build_logs.py        |  110 ++
 third_party/waf/waflib/extras/buildcopy.py         |   85 ++
 third_party/waf/waflib/extras/c_bgxlc.py           |   32 +
 third_party/waf/waflib/extras/c_dumbpreproc.py     |   72 +
 third_party/waf/waflib/extras/c_emscripten.py      |   87 ++
 third_party/waf/waflib/extras/c_nec.py             |   74 +
 third_party/waf/waflib/extras/cabal.py             |  152 ++
 third_party/waf/waflib/extras/cfg_altoptions.py    |  110 ++
 .../waflib/extras/clang_compilation_database.py    |  137 ++
 third_party/waf/waflib/extras/clang_cross.py       |   92 ++
 .../waf/waflib/extras/clang_cross_common.py        |  113 ++
 third_party/waf/waflib/extras/clangxx_cross.py     |  106 ++
 third_party/waf/waflib/extras/classic_runner.py    |   68 +
 third_party/waf/waflib/extras/codelite.py          |  875 +++++++++++
 third_party/waf/waflib/extras/color_gcc.py         |   39 +
 third_party/waf/waflib/extras/color_msvc.py        |   59 +
 third_party/waf/waflib/extras/color_rvct.py        |   51 +
 third_party/waf/waflib/extras/compat15.py          |  406 ++++++
 third_party/waf/waflib/extras/cppcheck.py          |  591 ++++++++
 third_party/waf/waflib/extras/cpplint.py           |  209 +++
 third_party/waf/waflib/extras/cross_gnu.py         |  227 +++
 third_party/waf/waflib/extras/cython.py            |  147 ++
 third_party/waf/waflib/extras/dcc.py               |   72 +
 third_party/waf/waflib/extras/distnet.py           |  432 ++++++
 third_party/waf/waflib/extras/doxygen.py           |  236 +++
 third_party/waf/waflib/extras/dpapi.py             |   87 ++
 third_party/waf/waflib/extras/eclipse.py           |  501 +++++++
 third_party/waf/waflib/extras/erlang.py            |  110 ++
 third_party/waf/waflib/extras/fast_partial.py      |  531 +++++++
 third_party/waf/waflib/extras/fc_bgxlf.py          |   32 +
 third_party/waf/waflib/extras/fc_cray.py           |   51 +
 third_party/waf/waflib/extras/fc_fujitsu.py        |   52 +
 third_party/waf/waflib/extras/fc_nag.py            |   61 +
 third_party/waf/waflib/extras/fc_nec.py            |   60 +
 third_party/waf/waflib/extras/fc_nfort.py          |   52 +
 third_party/waf/waflib/extras/fc_open64.py         |   58 +
 third_party/waf/waflib/extras/fc_pgfortran.py      |   68 +
 third_party/waf/waflib/extras/fc_solstudio.py      |   62 +
 third_party/waf/waflib/extras/fc_xlf.py            |   63 +
 third_party/waf/waflib/extras/file_to_object.py    |  142 ++
 third_party/waf/waflib/extras/fluid.py             |   30 +
 third_party/waf/waflib/extras/freeimage.py         |   74 +
 third_party/waf/waflib/extras/fsb.py               |   31 +
 third_party/waf/waflib/extras/fsc.py               |   64 +
 third_party/waf/waflib/extras/gccdeps.py           |  244 ++++
 third_party/waf/waflib/extras/gdbus.py             |   87 ++
 third_party/waf/waflib/extras/genpybind.py         |  194 +++
 third_party/waf/waflib/extras/gob2.py              |   17 +
 third_party/waf/waflib/extras/halide.py            |  151 ++
 third_party/waf/waflib/extras/haxe.py              |  154 ++
 third_party/waf/waflib/extras/javatest.py          |  237 +++
 third_party/waf/waflib/extras/kde4.py              |   93 ++
 third_party/waf/waflib/extras/local_rpath.py       |   21 +
 third_party/waf/waflib/extras/make.py              |  142 ++
 third_party/waf/waflib/extras/midl.py              |   69 +
 third_party/waf/waflib/extras/msvc_pdb.py          |   46 +
 third_party/waf/waflib/extras/msvcdeps.py          |  294 ++++
 third_party/waf/waflib/extras/msvs.py              | 1052 ++++++++++++++
 third_party/waf/waflib/extras/netcache_client.py   |  390 +++++
 third_party/waf/waflib/extras/objcopy.py           |   53 +
 third_party/waf/waflib/extras/ocaml.py             |  348 +++++
 third_party/waf/waflib/extras/package.py           |   76 +
 third_party/waf/waflib/extras/parallel_debug.py    |  462 ++++++
 third_party/waf/waflib/extras/pch.py               |  148 ++
 third_party/waf/waflib/extras/pep8.py              |  106 ++
 third_party/waf/waflib/extras/pgicc.py             |   75 +
 third_party/waf/waflib/extras/pgicxx.py            |   20 +
 third_party/waf/waflib/extras/proc.py              |   54 +
 third_party/waf/waflib/extras/protoc.py            |  224 +++
 third_party/waf/waflib/extras/pyqt5.py             |  246 ++++
 third_party/waf/waflib/extras/pytest.py            |  240 ++++
 third_party/waf/waflib/extras/qnxnto.py            |   72 +
 third_party/waf/waflib/extras/qt4.py               |  695 +++++++++
 third_party/waf/waflib/extras/relocation.py        |   85 ++
 third_party/waf/waflib/extras/remote.py            |  327 +++++
 third_party/waf/waflib/extras/resx.py              |   35 +
 third_party/waf/waflib/extras/review.py            |  325 +++++
 third_party/waf/waflib/extras/rst.py               |  260 ++++
 third_party/waf/waflib/extras/run_do_script.py     |  139 ++
 third_party/waf/waflib/extras/run_m_script.py      |   88 ++
 third_party/waf/waflib/extras/run_py_script.py     |  104 ++
 third_party/waf/waflib/extras/run_r_script.py      |   86 ++
 third_party/waf/waflib/extras/sas.py               |   71 +
 .../waf/waflib/extras/satellite_assembly.py        |   57 +
 third_party/waf/waflib/extras/scala.py             |  128 ++
 third_party/waf/waflib/extras/slow_qt4.py          |   96 ++
 third_party/waf/waflib/extras/softlink_libs.py     |   76 +
 third_party/waf/waflib/extras/sphinx.py            |  120 ++
 third_party/waf/waflib/extras/stale.py             |   98 ++
 third_party/waf/waflib/extras/stracedeps.py        |  174 +++
 third_party/waf/waflib/extras/swig.py              |  237 +++
 third_party/waf/waflib/extras/syms.py              |   84 ++
 third_party/waf/waflib/extras/ticgt.py             |  300 ++++
 third_party/waf/waflib/extras/unity.py             |  108 ++
 third_party/waf/waflib/extras/use_config.py        |  185 +++
 third_party/waf/waflib/extras/valadoc.py           |  140 ++
 third_party/waf/waflib/extras/waf_xattr.py         |  150 ++
 third_party/waf/waflib/extras/wafcache.py          |  648 +++++++++
 third_party/waf/waflib/extras/why.py               |   78 +
 third_party/waf/waflib/extras/win32_opts.py        |  170 +++
 third_party/waf/waflib/extras/wix.py               |   87 ++
 third_party/waf/waflib/extras/xcode6.py            |  727 ++++++++++
 third_party/waf/waflib/fixpy2.py                   |   64 +
 third_party/waf/waflib/processor.py                |   68 +
 189 files changed, 42838 insertions(+)
 create mode 100755 third_party/waf/update.sh
 create mode 100644 third_party/waf/waflib/Build.py
 create mode 100644 third_party/waf/waflib/ConfigSet.py
 create mode 100644 third_party/waf/waflib/Configure.py
 create mode 100644 third_party/waf/waflib/Context.py
 create mode 100644 third_party/waf/waflib/Errors.py
 create mode 100644 third_party/waf/waflib/Logs.py
 create mode 100644 third_party/waf/waflib/Node.py
 create mode 100644 third_party/waf/waflib/Options.py
 create mode 100644 third_party/waf/waflib/Runner.py
 create mode 100644 third_party/waf/waflib/Scripting.py
 create mode 100644 third_party/waf/waflib/Task.py
 create mode 100644 third_party/waf/waflib/TaskGen.py
 create mode 100644 third_party/waf/waflib/Tools/__init__.py
 create mode 100644 third_party/waf/waflib/Tools/ar.py
 create mode 100644 third_party/waf/waflib/Tools/asm.py
 create mode 100644 third_party/waf/waflib/Tools/bison.py
 create mode 100644 third_party/waf/waflib/Tools/c.py
 create mode 100644 third_party/waf/waflib/Tools/c_aliases.py
 create mode 100644 third_party/waf/waflib/Tools/c_config.py
 create mode 100644 third_party/waf/waflib/Tools/c_osx.py
 create mode 100644 third_party/waf/waflib/Tools/c_preproc.py
 create mode 100644 third_party/waf/waflib/Tools/c_tests.py
 create mode 100644 third_party/waf/waflib/Tools/ccroot.py
 create mode 100644 third_party/waf/waflib/Tools/clang.py
 create mode 100644 third_party/waf/waflib/Tools/clangxx.py
 create mode 100644 third_party/waf/waflib/Tools/compiler_c.py
 create mode 100644 third_party/waf/waflib/Tools/compiler_cxx.py
 create mode 100644 third_party/waf/waflib/Tools/compiler_d.py
 create mode 100644 third_party/waf/waflib/Tools/compiler_fc.py
 create mode 100644 third_party/waf/waflib/Tools/cs.py
 create mode 100644 third_party/waf/waflib/Tools/cxx.py
 create mode 100644 third_party/waf/waflib/Tools/d.py
 create mode 100644 third_party/waf/waflib/Tools/d_config.py
 create mode 100644 third_party/waf/waflib/Tools/d_scan.py
 create mode 100644 third_party/waf/waflib/Tools/dbus.py
 create mode 100644 third_party/waf/waflib/Tools/dmd.py
 create mode 100644 third_party/waf/waflib/Tools/errcheck.py
 create mode 100644 third_party/waf/waflib/Tools/fc.py
 create mode 100644 third_party/waf/waflib/Tools/fc_config.py
 create mode 100644 third_party/waf/waflib/Tools/fc_scan.py
 create mode 100644 third_party/waf/waflib/Tools/flex.py
 create mode 100644 third_party/waf/waflib/Tools/g95.py
 create mode 100644 third_party/waf/waflib/Tools/gas.py
 create mode 100644 third_party/waf/waflib/Tools/gcc.py
 create mode 100644 third_party/waf/waflib/Tools/gdc.py
 create mode 100644 third_party/waf/waflib/Tools/gfortran.py
 create mode 100644 third_party/waf/waflib/Tools/glib2.py
 create mode 100644 third_party/waf/waflib/Tools/gnu_dirs.py
 create mode 100644 third_party/waf/waflib/Tools/gxx.py
 create mode 100644 third_party/waf/waflib/Tools/icc.py
 create mode 100644 third_party/waf/waflib/Tools/icpc.py
 create mode 100644 third_party/waf/waflib/Tools/ifort.py
 create mode 100644 third_party/waf/waflib/Tools/intltool.py
 create mode 100644 third_party/waf/waflib/Tools/irixcc.py
 create mode 100644 third_party/waf/waflib/Tools/javaw.py
 create mode 100644 third_party/waf/waflib/Tools/ldc2.py
 create mode 100644 third_party/waf/waflib/Tools/lua.py
 create mode 100644 third_party/waf/waflib/Tools/md5_tstamp.py
 create mode 100644 third_party/waf/waflib/Tools/msvc.py
 create mode 100644 third_party/waf/waflib/Tools/nasm.py
 create mode 100644 third_party/waf/waflib/Tools/nobuild.py
 create mode 100644 third_party/waf/waflib/Tools/perl.py
 create mode 100644 third_party/waf/waflib/Tools/python.py
 create mode 100644 third_party/waf/waflib/Tools/qt5.py
 create mode 100644 third_party/waf/waflib/Tools/ruby.py
 create mode 100644 third_party/waf/waflib/Tools/suncc.py
 create mode 100644 third_party/waf/waflib/Tools/suncxx.py
 create mode 100644 third_party/waf/waflib/Tools/tex.py
 create mode 100644 third_party/waf/waflib/Tools/vala.py
 create mode 100644 third_party/waf/waflib/Tools/waf_unit_test.py
 create mode 100644 third_party/waf/waflib/Tools/winres.py
 create mode 100644 third_party/waf/waflib/Tools/xlc.py
 create mode 100644 third_party/waf/waflib/Tools/xlcxx.py
 create mode 100644 third_party/waf/waflib/Utils.py
 create mode 100644 third_party/waf/waflib/__init__.py
 create mode 100644 third_party/waf/waflib/ansiterm.py
 create mode 100644 third_party/waf/waflib/extras/__init__.py
 create mode 100644 third_party/waf/waflib/extras/batched_cc.py
 create mode 100644 third_party/waf/waflib/extras/biber.py
 create mode 100644 third_party/waf/waflib/extras/bjam.py
 create mode 100644 third_party/waf/waflib/extras/blender.py
 create mode 100644 third_party/waf/waflib/extras/boo.py
 create mode 100644 third_party/waf/waflib/extras/boost.py
 create mode 100644 third_party/waf/waflib/extras/build_file_tracker.py
 create mode 100644 third_party/waf/waflib/extras/build_logs.py
 create mode 100644 third_party/waf/waflib/extras/buildcopy.py
 create mode 100644 third_party/waf/waflib/extras/c_bgxlc.py
 create mode 100644 third_party/waf/waflib/extras/c_dumbpreproc.py
 create mode 100644 third_party/waf/waflib/extras/c_emscripten.py
 create mode 100644 third_party/waf/waflib/extras/c_nec.py
 create mode 100644 third_party/waf/waflib/extras/cabal.py
 create mode 100644 third_party/waf/waflib/extras/cfg_altoptions.py
 create mode 100644 third_party/waf/waflib/extras/clang_compilation_database.py
 create mode 100644 third_party/waf/waflib/extras/clang_cross.py
 create mode 100644 third_party/waf/waflib/extras/clang_cross_common.py
 create mode 100644 third_party/waf/waflib/extras/clangxx_cross.py
 create mode 100644 third_party/waf/waflib/extras/classic_runner.py
 create mode 100644 third_party/waf/waflib/extras/codelite.py
 create mode 100644 third_party/waf/waflib/extras/color_gcc.py
 create mode 100644 third_party/waf/waflib/extras/color_msvc.py
 create mode 100644 third_party/waf/waflib/extras/color_rvct.py
 create mode 100644 third_party/waf/waflib/extras/compat15.py
 create mode 100644 third_party/waf/waflib/extras/cppcheck.py
 create mode 100644 third_party/waf/waflib/extras/cpplint.py
 create mode 100644 third_party/waf/waflib/extras/cross_gnu.py
 create mode 100644 third_party/waf/waflib/extras/cython.py
 create mode 100644 third_party/waf/waflib/extras/dcc.py
 create mode 100644 third_party/waf/waflib/extras/distnet.py
 create mode 100644 third_party/waf/waflib/extras/doxygen.py
 create mode 100644 third_party/waf/waflib/extras/dpapi.py
 create mode 100644 third_party/waf/waflib/extras/eclipse.py
 create mode 100644 third_party/waf/waflib/extras/erlang.py
 create mode 100644 third_party/waf/waflib/extras/fast_partial.py
 create mode 100644 third_party/waf/waflib/extras/fc_bgxlf.py
 create mode 100644 third_party/waf/waflib/extras/fc_cray.py
 create mode 100644 third_party/waf/waflib/extras/fc_fujitsu.py
 create mode 100644 third_party/waf/waflib/extras/fc_nag.py
 create mode 100644 third_party/waf/waflib/extras/fc_nec.py
 create mode 100644 third_party/waf/waflib/extras/fc_nfort.py
 create mode 100644 third_party/waf/waflib/extras/fc_open64.py
 create mode 100644 third_party/waf/waflib/extras/fc_pgfortran.py
 create mode 100644 third_party/waf/waflib/extras/fc_solstudio.py
 create mode 100644 third_party/waf/waflib/extras/fc_xlf.py
 create mode 100644 third_party/waf/waflib/extras/file_to_object.py
 create mode 100644 third_party/waf/waflib/extras/fluid.py
 create mode 100644 third_party/waf/waflib/extras/freeimage.py
 create mode 100644 third_party/waf/waflib/extras/fsb.py
 create mode 100644 third_party/waf/waflib/extras/fsc.py
 create mode 100644 third_party/waf/waflib/extras/gccdeps.py
 create mode 100644 third_party/waf/waflib/extras/gdbus.py
 create mode 100644 third_party/waf/waflib/extras/genpybind.py
 create mode 100644 third_party/waf/waflib/extras/gob2.py
 create mode 100644 third_party/waf/waflib/extras/halide.py
 create mode 100644 third_party/waf/waflib/extras/haxe.py
 create mode 100755 third_party/waf/waflib/extras/javatest.py
 create mode 100644 third_party/waf/waflib/extras/kde4.py
 create mode 100644 third_party/waf/waflib/extras/local_rpath.py
 create mode 100644 third_party/waf/waflib/extras/make.py
 create mode 100644 third_party/waf/waflib/extras/midl.py
 create mode 100644 third_party/waf/waflib/extras/msvc_pdb.py
 create mode 100644 third_party/waf/waflib/extras/msvcdeps.py
 create mode 100644 third_party/waf/waflib/extras/msvs.py
 create mode 100644 third_party/waf/waflib/extras/netcache_client.py
 create mode 100644 third_party/waf/waflib/extras/objcopy.py
 create mode 100644 third_party/waf/waflib/extras/ocaml.py
 create mode 100644 third_party/waf/waflib/extras/package.py
 create mode 100644 third_party/waf/waflib/extras/parallel_debug.py
 create mode 100644 third_party/waf/waflib/extras/pch.py
 create mode 100644 third_party/waf/waflib/extras/pep8.py
 create mode 100644 third_party/waf/waflib/extras/pgicc.py
 create mode 100644 third_party/waf/waflib/extras/pgicxx.py
 create mode 100644 third_party/waf/waflib/extras/proc.py
 create mode 100644 third_party/waf/waflib/extras/protoc.py
 create mode 100644 third_party/waf/waflib/extras/pyqt5.py
 create mode 100644 third_party/waf/waflib/extras/pytest.py
 create mode 100644 third_party/waf/waflib/extras/qnxnto.py
 create mode 100644 third_party/waf/waflib/extras/qt4.py
 create mode 100644 third_party/waf/waflib/extras/relocation.py
 create mode 100644 third_party/waf/waflib/extras/remote.py
 create mode 100644 third_party/waf/waflib/extras/resx.py
 create mode 100644 third_party/waf/waflib/extras/review.py
 create mode 100644 third_party/waf/waflib/extras/rst.py
 create mode 100644 third_party/waf/waflib/extras/run_do_script.py
 create mode 100644 third_party/waf/waflib/extras/run_m_script.py
 create mode 100644 third_party/waf/waflib/extras/run_py_script.py
 create mode 100644 third_party/waf/waflib/extras/run_r_script.py
 create mode 100644 third_party/waf/waflib/extras/sas.py
 create mode 100644 third_party/waf/waflib/extras/satellite_assembly.py
 create mode 100644 third_party/waf/waflib/extras/scala.py
 create mode 100644 third_party/waf/waflib/extras/slow_qt4.py
 create mode 100644 third_party/waf/waflib/extras/softlink_libs.py
 create mode 100644 third_party/waf/waflib/extras/sphinx.py
 create mode 100644 third_party/waf/waflib/extras/stale.py
 create mode 100644 third_party/waf/waflib/extras/stracedeps.py
 create mode 100644 third_party/waf/waflib/extras/swig.py
 create mode 100644 third_party/waf/waflib/extras/syms.py
 create mode 100644 third_party/waf/waflib/extras/ticgt.py
 create mode 100644 third_party/waf/waflib/extras/unity.py
 create mode 100644 third_party/waf/waflib/extras/use_config.py
 create mode 100644 third_party/waf/waflib/extras/valadoc.py
 create mode 100644 third_party/waf/waflib/extras/waf_xattr.py
 create mode 100644 third_party/waf/waflib/extras/wafcache.py
 create mode 100644 third_party/waf/waflib/extras/why.py
 create mode 100644 third_party/waf/waflib/extras/win32_opts.py
 create mode 100644 third_party/waf/waflib/extras/wix.py
 create mode 100644 third_party/waf/waflib/extras/xcode6.py
 create mode 100644 third_party/waf/waflib/fixpy2.py
 create mode 100755 third_party/waf/waflib/processor.py

(limited to 'third_party/waf')

diff --git a/third_party/waf/update.sh b/third_party/waf/update.sh
new file mode 100755
index 0000000..45fbeec
--- /dev/null
+++ b/third_party/waf/update.sh
@@ -0,0 +1,82 @@
+#!/bin/bash
+
+if [[ $# -lt 1 ]]; then
+	echo "Usage: update.sh VERSION"
+	exit 1
+fi
+
+WAF_VERSION="${1}"
+WAF_GIT="https://gitlab.com/ita1024/waf.git"
+WAF_UPDATE_SCRIPT="$(readlink -f "$0")"
+WAF_SAMBA_DIR="$(dirname "${WAF_UPDATE_SCRIPT}")"
+WAF_TMPDIR=$(mktemp --tmpdir -d waf-XXXXXXXX)
+
+echo "VERSION:       ${WAF_VERSION}"
+echo "GIT URL:       ${WAF_GIT}"
+echo "WAF SAMBA DIR: ${WAF_SAMBA_DIR}"
+echo "WAF TMP DIR:    ${WAF_TMPDIR}"
+
+cleanup_tmpdir()
+{
+	popd 2>/dev/null || true
+	rm -rf "$WAF_TMPDIR"
+}
+trap cleanup_tmpdir SIGINT
+
+cleanup_and_exit()
+{
+	cleanup_tmpdir
+	if test "$1" = 0 -o -z "$1"; then
+		exit 0
+	else
+		exit "$1"
+	fi
+}
+
+# Checkout the git tree
+mkdir -p "${WAF_TMPDIR}"
+pushd "${WAF_TMPDIR}" || cleanup_and_exit 1
+
+git clone "${WAF_GIT}"
+ret=$?
+if [ $ret -ne 0 ]; then
+	echo "ERROR: Failed to clone repository"
+	cleanup_and_exit 1
+fi
+
+pushd waf || cleanup_and_exit 1
+git checkout -b "waf-${WAF_VERSION}" "waf-${WAF_VERSION}"
+ret=$?
+if [ $ret -ne 0 ]; then
+	echo "ERROR: Failed to checkout waf-${WAF_VERSION} repository"
+	cleanup_and_exit 1
+fi
+popd || cleanup_and_exit 1
+
+popd || cleanup_and_exit 1
+
+# Update waflib
+pushd "${WAF_SAMBA_DIR}" || cleanup_and_exit 1
+pwd
+
+rm -rf waflib/
+rsync -av "${WAF_TMPDIR}/waf/waflib" .
+ret=$?
+if [ $ret -ne 0 ]; then
+	echo "ERROR: Failed copy waflib"
+	cleanup_and_exit 1
+fi
+chmod -x waflib/Context.py
+
+git add waflib
+
+popd || cleanup_and_exit 1
+
+echo
+echo "Now please change VERSION in buildtools/bin/waf and"
+echo "Context.HEXVERSION in buildtools/wafsamba/wafsamba.py"
+grep WAFVERSION "${WAF_SAMBA_DIR}/waflib/Context.py"
+grep HEXVERSION "${WAF_SAMBA_DIR}/waflib/Context.py"
+echo
+
+cleanup_and_exit 0
diff --git a/third_party/waf/waflib/Build.py b/third_party/waf/waflib/Build.py
new file mode 100644
index 0000000..b49dd83
--- /dev/null
+++ b/third_party/waf/waflib/Build.py
@@ -0,0 +1,1514 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2018 (ita)
+
+"""
+Classes related to the build phase (build, clean, install, step, etc)
+
+The inheritance tree is the following:
+
+"""
+
+import os, sys, errno, re, shutil, stat
+try:
+	import cPickle
+except ImportError:
+	import pickle as cPickle
+from waflib import Node, Runner, TaskGen, Utils, ConfigSet, Task, Logs, Options, Context, Errors
+
+CACHE_DIR = 'c4che'
+"""Name of the cache directory"""
+
+CACHE_SUFFIX = '_cache.py'
+"""ConfigSet cache files for variants are written under :py:attr:´waflib.Build.CACHE_DIR´ in the form ´variant_name´_cache.py"""
+
+INSTALL = 1337
+"""Positive value '->' install, see :py:attr:`waflib.Build.BuildContext.is_install`"""
+
+UNINSTALL = -1337
+"""Negative value '<-' uninstall, see :py:attr:`waflib.Build.BuildContext.is_install`"""
+
+SAVED_ATTRS = 'root node_sigs task_sigs imp_sigs raw_deps node_deps'.split()
+"""Build class members to save between the runs; these should be all dicts
+except for `root` which represents a :py:class:`waflib.Node.Node` instance
+"""
+
+CFG_FILES = 'cfg_files'
+"""Files from the build directory to hash before starting the build (``config.h`` written during the configuration)"""
+
+POST_AT_ONCE = 0
+"""Post mode: all task generators are posted before any task executed"""
+
+POST_LAZY = 1
+"""Post mode: post the task generators group after group, the tasks in the next group are created when the tasks in the previous groups are done"""
+
+PROTOCOL = -1
+if sys.platform == 'cli':
+	PROTOCOL = 0
+
+class BuildContext(Context.Context):
+	'''executes the build'''
+
+	cmd = 'build'
+	variant = ''
+
+	def __init__(self, **kw):
+		super(BuildContext, self).__init__(**kw)
+
+		self.is_install = 0
+		"""Non-zero value when installing or uninstalling file"""
+
+		self.top_dir = kw.get('top_dir', Context.top_dir)
+		"""See :py:attr:`waflib.Context.top_dir`; prefer :py:attr:`waflib.Build.BuildContext.srcnode`"""
+
+		self.out_dir = kw.get('out_dir', Context.out_dir)
+		"""See :py:attr:`waflib.Context.out_dir`; prefer :py:attr:`waflib.Build.BuildContext.bldnode`"""
+
+		self.run_dir = kw.get('run_dir', Context.run_dir)
+		"""See :py:attr:`waflib.Context.run_dir`"""
+
+		self.launch_dir = Context.launch_dir
+		"""See :py:attr:`waflib.Context.out_dir`; prefer :py:meth:`waflib.Build.BuildContext.launch_node`"""
+
+		self.post_mode = POST_LAZY
+		"""Whether to post the task generators at once or group-by-group (default is group-by-group)"""
+
+		self.cache_dir = kw.get('cache_dir')
+		if not self.cache_dir:
+			self.cache_dir = os.path.join(self.out_dir, CACHE_DIR)
+
+		self.all_envs = {}
+		"""Map names to :py:class:`waflib.ConfigSet.ConfigSet`, the empty string must map to the default environment"""
+
+		# ======================================= #
+		# cache variables
+
+		self.node_sigs = {}
+		"""Dict mapping build nodes to task identifier (uid), it indicates whether a task created a particular file (persists across builds)"""
+
+		self.task_sigs = {}
+		"""Dict mapping task identifiers (uid) to task signatures (persists across builds)"""
+
+		self.imp_sigs = {}
+		"""Dict mapping task identifiers (uid) to implicit task dependencies used for scanning targets (persists across builds)"""
+
+		self.node_deps = {}
+		"""Dict mapping task identifiers (uid) to node dependencies found by :py:meth:`waflib.Task.Task.scan` (persists across builds)"""
+
+		self.raw_deps = {}
+		"""Dict mapping task identifiers (uid) to custom data returned by :py:meth:`waflib.Task.Task.scan` (persists across builds)"""
+
+		self.task_gen_cache_names = {}
+
+		self.jobs = Options.options.jobs
+		"""Amount of jobs to run in parallel"""
+
+		self.targets = Options.options.targets
+		"""List of targets to build (default: \\*)"""
+
+		self.keep = Options.options.keep
+		"""Whether the build should continue past errors"""
+
+		self.progress_bar = Options.options.progress_bar
+		"""
+		Level of progress status:
+
+		0. normal output
+		1. progress bar
+		2. IDE output
+		3. No output at all
+		"""
+
+		# Manual dependencies.
+		self.deps_man = Utils.defaultdict(list)
+		"""Manual dependencies set by :py:meth:`waflib.Build.BuildContext.add_manual_dependency`"""
+
+		# just the structure here
+		self.current_group = 0
+		"""
+		Current build group
+		"""
+
+		self.groups = []
+		"""
+		List containing lists of task generators
+		"""
+
+		self.group_names = {}
+		"""
+		Map group names to the group lists. See :py:meth:`waflib.Build.BuildContext.add_group`
+		"""
+
+		for v in SAVED_ATTRS:
+			if not hasattr(self, v):
+				setattr(self, v, {})
+
+	def get_variant_dir(self):
+		"""Getter for the variant_dir attribute"""
+		if not self.variant:
+			return self.out_dir
+		return os.path.join(self.out_dir, os.path.normpath(self.variant))
+	variant_dir = property(get_variant_dir, None)
+
+	def __call__(self, *k, **kw):
+		"""
+		Create a task generator and add it to the current build group. The following forms are equivalent::
+
+			def build(bld):
+				tg = bld(a=1, b=2)
+
+			def build(bld):
+				tg = bld()
+				tg.a = 1
+				tg.b = 2
+
+			def build(bld):
+				tg = TaskGen.task_gen(a=1, b=2)
+				bld.add_to_group(tg, None)
+
+		:param group: group name to add the task generator to
+		:type group: string
+		"""
+		kw['bld'] = self
+		ret = TaskGen.task_gen(*k, **kw)
+		self.task_gen_cache_names = {} # reset the cache, each time
+		self.add_to_group(ret, group=kw.get('group'))
+		return ret
+
+	def __copy__(self):
+		"""
+		Build contexts cannot be copied
+
+		:raises: :py:class:`waflib.Errors.WafError`
+		"""
+		raise Errors.WafError('build contexts cannot be copied')
+
+	def load_envs(self):
+		"""
+		The configuration command creates files of the form ``build/c4che/NAMEcache.py``. This method
+		creates a :py:class:`waflib.ConfigSet.ConfigSet` instance for each ``NAME`` by reading those
+		files and stores them in :py:attr:`waflib.Build.BuildContext.allenvs`.
+		"""
+		node = self.root.find_node(self.cache_dir)
+		if not node:
+			raise Errors.WafError('The project was not configured: run "waf configure" first!')
+		lst = node.ant_glob('**/*%s' % CACHE_SUFFIX, quiet=True)
+
+		if not lst:
+			raise Errors.WafError('The cache directory is empty: reconfigure the project')
+
+		for x in lst:
+			name = x.path_from(node).replace(CACHE_SUFFIX, '').replace('\\', '/')
+			env = ConfigSet.ConfigSet(x.abspath())
+			self.all_envs[name] = env
+			for f in env[CFG_FILES]:
+				newnode = self.root.find_resource(f)
+				if not newnode or not newnode.exists():
+					raise Errors.WafError('Missing configuration file %r, reconfigure the project!' % f)
+
+	def init_dirs(self):
+		"""
+		Initialize the project directory and the build directory by creating the nodes
+		:py:attr:`waflib.Build.BuildContext.srcnode` and :py:attr:`waflib.Build.BuildContext.bldnode`
+		corresponding to ``top_dir`` and ``variant_dir`` respectively. The ``bldnode`` directory is
+		created if necessary.
+		"""
+		if not (os.path.isabs(self.top_dir) and os.path.isabs(self.out_dir)):
+			raise Errors.WafError('The project was not configured: run "waf configure" first!')
+
+		self.path = self.srcnode = self.root.find_dir(self.top_dir)
+		self.bldnode = self.root.make_node(self.variant_dir)
+		self.bldnode.mkdir()
+
+	def execute(self):
+		"""
+		Restore data from previous builds and call :py:meth:`waflib.Build.BuildContext.execute_build`.
+		Overrides from :py:func:`waflib.Context.Context.execute`
+		"""
+		self.restore()
+		if not self.all_envs:
+			self.load_envs()
+		self.execute_build()
+
+	def execute_build(self):
+		"""
+		Execute the build by:
+
+		* reading the scripts (see :py:meth:`waflib.Context.Context.recurse`)
+		* calling :py:meth:`waflib.Build.BuildContext.pre_build` to call user build functions
+		* calling :py:meth:`waflib.Build.BuildContext.compile` to process the tasks
+		* calling :py:meth:`waflib.Build.BuildContext.post_build` to call user build functions
+		"""
+
+		Logs.info("Waf: Entering directory `%s'", self.variant_dir)
+		self.recurse([self.run_dir])
+		self.pre_build()
+
+		# display the time elapsed in the progress bar
+		self.timer = Utils.Timer()
+
+		try:
+			self.compile()
+		finally:
+			if self.progress_bar == 1 and sys.stderr.isatty():
+				c = self.producer.processed or 1
+				m = self.progress_line(c, c, Logs.colors.BLUE, Logs.colors.NORMAL)
+				Logs.info(m, extra={'stream': sys.stderr, 'c1': Logs.colors.cursor_off, 'c2' : Logs.colors.cursor_on})
+			Logs.info("Waf: Leaving directory `%s'", self.variant_dir)
+		try:
+			self.producer.bld = None
+			del self.producer
+		except AttributeError:
+			pass
+		self.post_build()
+
+	def restore(self):
+		"""
+		Load data from a previous run, sets the attributes listed in :py:const:`waflib.Build.SAVED_ATTRS`
+		"""
+		try:
+			env = ConfigSet.ConfigSet(os.path.join(self.cache_dir, 'build.config.py'))
+		except EnvironmentError:
+			pass
+		else:
+			if env.version < Context.HEXVERSION:
+				raise Errors.WafError('Project was configured with a different version of Waf, please reconfigure it')
+
+			for t in env.tools:
+				self.setup(**t)
+
+		dbfn = os.path.join(self.variant_dir, Context.DBFILE)
+		try:
+			data = Utils.readf(dbfn, 'rb')
+		except (EnvironmentError, EOFError):
+			# handle missing file/empty file
+			Logs.debug('build: Could not load the build cache %s (missing)', dbfn)
+		else:
+			try:
+				Node.pickle_lock.acquire()
+				Node.Nod3 = self.node_class
+				try:
+					data = cPickle.loads(data)
+				except Exception as e:
+					Logs.debug('build: Could not pickle the build cache %s: %r', dbfn, e)
+				else:
+					for x in SAVED_ATTRS:
+						setattr(self, x, data.get(x, {}))
+			finally:
+				Node.pickle_lock.release()
+
+		self.init_dirs()
+
+	def store(self):
+		"""
+		Store data for next runs, set the attributes listed in :py:const:`waflib.Build.SAVED_ATTRS`. Uses a temporary
+		file to avoid problems on ctrl+c.
+		"""
+		data = {}
+		for x in SAVED_ATTRS:
+			data[x] = getattr(self, x)
+		db = os.path.join(self.variant_dir, Context.DBFILE)
+
+		try:
+			Node.pickle_lock.acquire()
+			Node.Nod3 = self.node_class
+			x = cPickle.dumps(data, PROTOCOL)
+		finally:
+			Node.pickle_lock.release()
+
+		Utils.writef(db + '.tmp', x, m='wb')
+
+		try:
+			st = os.stat(db)
+			os.remove(db)
+			if not Utils.is_win32: # win32 has no chown but we're paranoid
+				os.chown(db + '.tmp', st.st_uid, st.st_gid)
+		except (AttributeError, OSError):
+			pass
+
+		# do not use shutil.move (copy is not thread-safe)
+		os.rename(db + '.tmp', db)
+
+	def compile(self):
+		"""
+		Run the build by creating an instance of :py:class:`waflib.Runner.Parallel`
+		The cache file is written when at least a task was executed.
+
+		:raises: :py:class:`waflib.Errors.BuildError` in case the build fails
+		"""
+		Logs.debug('build: compile()')
+
+		# delegate the producer-consumer logic to another object to reduce the complexity
+		self.producer = Runner.Parallel(self, self.jobs)
+		self.producer.biter = self.get_build_iterator()
+		try:
+			self.producer.start()
+		except KeyboardInterrupt:
+			if self.is_dirty():
+				self.store()
+			raise
+		else:
+			if self.is_dirty():
+				self.store()
+
+		if self.producer.error:
+			raise Errors.BuildError(self.producer.error)
+
+	def is_dirty(self):
+		return self.producer.dirty
+
+	def setup(self, tool, tooldir=None, funs=None):
+		"""
+		Import waf tools defined during the configuration::
+
+			def configure(conf):
+				conf.load('glib2')
+
+			def build(bld):
+				pass # glib2 is imported implicitly
+
+		:param tool: tool list
+		:type tool: list
+		:param tooldir: optional tool directory (sys.path)
+		:type tooldir: list of string
+		:param funs: unused variable
+		"""
+		if isinstance(tool, list):
+			for i in tool:
+				self.setup(i, tooldir)
+			return
+
+		module = Context.load_tool(tool, tooldir)
+		if hasattr(module, "setup"):
+			module.setup(self)
+
+	def get_env(self):
+		"""Getter for the env property"""
+		try:
+			return self.all_envs[self.variant]
+		except KeyError:
+			return self.all_envs['']
+	def set_env(self, val):
+		"""Setter for the env property"""
+		self.all_envs[self.variant] = val
+
+	env = property(get_env, set_env)
+
+	def add_manual_dependency(self, path, value):
+		"""
+		Adds a dependency from a node object to a value::
+
+			def build(bld):
+				bld.add_manual_dependency(
+					bld.path.find_resource('wscript'),
+					bld.root.find_resource('/etc/fstab'))
+
+		:param path: file path
+		:type path: string or :py:class:`waflib.Node.Node`
+		:param value: value to depend
+		:type value: :py:class:`waflib.Node.Node`, byte object, or function returning a byte object
+		"""
+		if not path:
+			raise ValueError('Invalid input path %r' % path)
+
+		if isinstance(path, Node.Node):
+			node = path
+		elif os.path.isabs(path):
+			node = self.root.find_resource(path)
+		else:
+			node = self.path.find_resource(path)
+		if not node:
+			raise ValueError('Could not find the path %r' % path)
+
+		if isinstance(value, list):
+			self.deps_man[node].extend(value)
+		else:
+			self.deps_man[node].append(value)
+
+	def launch_node(self):
+		"""Returns the launch directory as a :py:class:`waflib.Node.Node` object (cached)"""
+		try:
+			# private cache
+			return self.p_ln
+		except AttributeError:
+			self.p_ln = self.root.find_dir(self.launch_dir)
+			return self.p_ln
+
+	def hash_env_vars(self, env, vars_lst):
+		"""
+		Hashes configuration set variables::
+
+			def build(bld):
+				bld.hash_env_vars(bld.env, ['CXX', 'CC'])
+
+		This method uses an internal cache.
+
+		:param env: Configuration Set
+		:type env: :py:class:`waflib.ConfigSet.ConfigSet`
+		:param vars_lst: list of variables
+		:type vars_list: list of string
+		"""
+
+		if not env.table:
+			env = env.parent
+			if not env:
+				return Utils.SIG_NIL
+
+		idx = str(id(env)) + str(vars_lst)
+		try:
+			cache = self.cache_env
+		except AttributeError:
+			cache = self.cache_env = {}
+		else:
+			try:
+				return self.cache_env[idx]
+			except KeyError:
+				pass
+
+		lst = [env[a] for a in vars_lst]
+		cache[idx] = ret = Utils.h_list(lst)
+		Logs.debug('envhash: %s %r', Utils.to_hex(ret), lst)
+		return ret
+
+	def get_tgen_by_name(self, name):
+		"""
+		Fetches a task generator by its name or its target attribute;
+		the name must be unique in a build::
+
+			def build(bld):
+				tg = bld(name='foo')
+				tg == bld.get_tgen_by_name('foo')
+
+		This method use a private internal cache.
+
+		:param name: Task generator name
+		:raises: :py:class:`waflib.Errors.WafError` in case there is no task genenerator by that name
+		"""
+		cache = self.task_gen_cache_names
+		if not cache:
+			# create the index lazily
+			for g in self.groups:
+				for tg in g:
+					try:
+						cache[tg.name] = tg
+					except AttributeError:
+						# raised if not a task generator, which should be uncommon
+						pass
+		try:
+			return cache[name]
+		except KeyError:
+			raise Errors.WafError('Could not find a task generator for the name %r' % name)
+
+	def progress_line(self, idx, total, col1, col2):
+		"""
+		Computes a progress bar line displayed when running ``waf -p``
+
+		:returns: progress bar line
+		:rtype: string
+		"""
+		if not sys.stderr.isatty():
+			return ''
+
+		n = len(str(total))
+
+		Utils.rot_idx += 1
+		ind = Utils.rot_chr[Utils.rot_idx % 4]
+
+		pc = (100. * idx)/total
+		fs = "[%%%dd/%%d][%%s%%2d%%%%%%s][%s][" % (n, ind)
+		left = fs % (idx, total, col1, pc, col2)
+		right = '][%s%s%s]' % (col1, self.timer, col2)
+
+		cols = Logs.get_term_cols() - len(left) - len(right) + 2*len(col1) + 2*len(col2)
+		if cols < 7:
+			cols = 7
+
+		ratio = ((cols * idx)//total) - 1
+
+		bar = ('='*ratio+'>').ljust(cols)
+		msg = Logs.indicator % (left, bar, right)
+
+		return msg
+
+	def declare_chain(self, *k, **kw):
+		"""
+		Wraps :py:func:`waflib.TaskGen.declare_chain` for convenience
+		"""
+		return TaskGen.declare_chain(*k, **kw)
+
+	def pre_build(self):
+		"""Executes user-defined methods before the build starts, see :py:meth:`waflib.Build.BuildContext.add_pre_fun`"""
+		for m in getattr(self, 'pre_funs', []):
+			m(self)
+
+	def post_build(self):
+		"""Executes user-defined methods after the build is successful, see :py:meth:`waflib.Build.BuildContext.add_post_fun`"""
+		for m in getattr(self, 'post_funs', []):
+			m(self)
+
+	def add_pre_fun(self, meth):
+		"""
+		Binds a callback method to execute after the scripts are read and before the build starts::
+
+			def mycallback(bld):
+				print("Hello, world!")
+
+			def build(bld):
+				bld.add_pre_fun(mycallback)
+		"""
+		try:
+			self.pre_funs.append(meth)
+		except AttributeError:
+			self.pre_funs = [meth]
+
+	def add_post_fun(self, meth):
+		"""
+		Binds a callback method to execute immediately after the build is successful::
+
+			def call_ldconfig(bld):
+				bld.exec_command('/sbin/ldconfig')
+
+			def build(bld):
+				if bld.cmd == 'install':
+					bld.add_pre_fun(call_ldconfig)
+		"""
+		try:
+			self.post_funs.append(meth)
+		except AttributeError:
+			self.post_funs = [meth]
+
+	def get_group(self, x):
+		"""
+		Returns the build group named `x`, or the current group if `x` is None
+
+		:param x: name or number or None
+		:type x: string, int or None
+		"""
+		if not self.groups:
+			self.add_group()
+		if x is None:
+			return self.groups[self.current_group]
+		if x in self.group_names:
+			return self.group_names[x]
+		return self.groups[x]
+
+	def add_to_group(self, tgen, group=None):
+		"""Adds a task or a task generator to the build; there is no attempt to remove it if it was already added."""
+		assert(isinstance(tgen, TaskGen.task_gen) or isinstance(tgen, Task.Task))
+		tgen.bld = self
+		self.get_group(group).append(tgen)
+
+	def get_group_name(self, g):
+		"""
+		Returns the name of the input build group
+
+		:param g: build group object or build group index
+		:type g: integer or list
+		:return: name
+		:rtype: string
+		"""
+		if not isinstance(g, list):
+			g = self.groups[g]
+		for x in self.group_names:
+			if id(self.group_names[x]) == id(g):
+				return x
+		return ''
+
+	def get_group_idx(self, tg):
+		"""
+		Returns the index of the group containing the task generator given as argument::
+
+			def build(bld):
+				tg = bld(name='nada')
+				0 == bld.get_group_idx(tg)
+
+		:param tg: Task generator object
+		:type tg: :py:class:`waflib.TaskGen.task_gen`
+		:rtype: int
+		"""
+		se = id(tg)
+		for i, tmp in enumerate(self.groups):
+			for t in tmp:
+				if id(t) == se:
+					return i
+		return None
+
+	def add_group(self, name=None, move=True):
+		"""
+		Adds a new group of tasks/task generators. By default the new group becomes
+		the default group for new task generators (make sure to create build groups in order).
+
+		:param name: name for this group
+		:type name: string
+		:param move: set this new group as default group (True by default)
+		:type move: bool
+		:raises: :py:class:`waflib.Errors.WafError` if a group by the name given already exists
+		"""
+		if name and name in self.group_names:
+			raise Errors.WafError('add_group: name %s already present', name)
+		g = []
+		self.group_names[name] = g
+		self.groups.append(g)
+		if move:
+			self.current_group = len(self.groups) - 1
+
+	def set_group(self, idx):
+		"""
+		Sets the build group at position idx as current so that newly added
+		task generators are added to this one by default::
+
+			def build(bld):
+				bld(rule='touch ${TGT}', target='foo.txt')
+				bld.add_group() # now the current group is 1
+				bld(rule='touch ${TGT}', target='bar.txt')
+				bld.set_group(0) # now the current group is 0
+				bld(rule='touch ${TGT}', target='truc.txt') # build truc.txt before bar.txt
+
+		:param idx: group name or group index
+		:type idx: string or int
+		"""
+		if isinstance(idx, str):
+			g = self.group_names[idx]
+			for i, tmp in enumerate(self.groups):
+				if id(g) == id(tmp):
+					self.current_group = i
+					break
+		else:
+			self.current_group = idx
+
+	def total(self):
+		"""
+		Approximate task count: this value may be inaccurate if task generators
+		are posted lazily (see :py:attr:`waflib.Build.BuildContext.post_mode`).
+		The value :py:attr:`waflib.Runner.Parallel.total` is updated during the task execution.
+
+		:rtype: int
+		"""
+		total = 0
+		for group in self.groups:
+			for tg in group:
+				try:
+					total += len(tg.tasks)
+				except AttributeError:
+					total += 1
+		return total
+
+	def get_targets(self):
+		"""
+		This method returns a pair containing the index of the last build group to post,
+		and the list of task generator objects corresponding to the target names.
+
+		This is used internally by :py:meth:`waflib.Build.BuildContext.get_build_iterator`
+		to perform partial builds::
+
+			$ waf --targets=myprogram,myshlib
+
+		:return: the minimum build group index, and list of task generators
+		:rtype: tuple
+		"""
+		to_post = []
+		min_grp = 0
+		for name in self.targets.split(','):
+			tg = self.get_tgen_by_name(name)
+			m = self.get_group_idx(tg)
+			if m > min_grp:
+				min_grp = m
+				to_post = [tg]
+			elif m == min_grp:
+				to_post.append(tg)
+		return (min_grp, to_post)
+
+	def get_all_task_gen(self):
+		"""
+		Returns a list of all task generators for troubleshooting purposes.
+		"""
+		lst = []
+		for g in self.groups:
+			lst.extend(g)
+		return lst
+
+	def post_group(self):
+		"""
+		Post task generators from the group indexed by self.current_group; used internally
+		by :py:meth:`waflib.Build.BuildContext.get_build_iterator`
+		"""
+		def tgpost(tg):
+			try:
+				f = tg.post
+			except AttributeError:
+				pass
+			else:
+				f()
+
+		if self.targets == '*':
+			for tg in self.groups[self.current_group]:
+				tgpost(tg)
+		elif self.targets:
+			if self.current_group < self._min_grp:
+				for tg in self.groups[self.current_group]:
+					tgpost(tg)
+			else:
+				for tg in self._exact_tg:
+					tg.post()
+		else:
+			ln = self.launch_node()
+			if ln.is_child_of(self.bldnode):
+				if Logs.verbose > 1:
+					Logs.warn('Building from the build directory, forcing --targets=*')
+				ln = self.srcnode
+			elif not ln.is_child_of(self.srcnode):
+				if Logs.verbose > 1:
+					Logs.warn('CWD %s is not under %s, forcing --targets=* (run distclean?)', ln.abspath(), self.srcnode.abspath())
+				ln = self.srcnode
+
+			def is_post(tg, ln):
+				try:
+					p = tg.path
+				except AttributeError:
+					pass
+				else:
+					if p.is_child_of(ln):
+						return True
+
+			def is_post_group():
+				for i, g in enumerate(self.groups):
+					if i > self.current_group:
+						for tg in g:
+							if is_post(tg, ln):
+								return True
+
+			if self.post_mode == POST_LAZY and ln != self.srcnode:
+				# partial folder builds require all targets from a previous build group
+				if is_post_group():
+					ln = self.srcnode
+
+			for tg in self.groups[self.current_group]:
+				if is_post(tg, ln):
+					tgpost(tg)
+
+	def get_tasks_group(self, idx):
+		"""
+		Returns all task instances for the build group at position idx,
+		used internally by :py:meth:`waflib.Build.BuildContext.get_build_iterator`
+
+		:rtype: list of :py:class:`waflib.Task.Task`
+		"""
+		tasks = []
+		for tg in self.groups[idx]:
+			try:
+				tasks.extend(tg.tasks)
+			except AttributeError: # not a task generator
+				tasks.append(tg)
+		return tasks
+
+	def get_build_iterator(self):
+		"""
+		Creates a Python generator object that returns lists of tasks that may be processed in parallel.
+
+		:return: tasks which can be executed immediately
+		:rtype: generator returning lists of :py:class:`waflib.Task.Task`
+		"""
+		if self.targets and self.targets != '*':
+			(self._min_grp, self._exact_tg) = self.get_targets()
+
+		if self.post_mode != POST_LAZY:
+			for self.current_group, _ in enumerate(self.groups):
+				self.post_group()
+
+		for self.current_group, _ in enumerate(self.groups):
+			# first post the task generators for the group
+			if self.post_mode != POST_AT_ONCE:
+				self.post_group()
+
+			# then extract the tasks
+			tasks = self.get_tasks_group(self.current_group)
+
+			# if the constraints are set properly (ext_in/ext_out, before/after)
+			# the call to set_file_constraints may be removed (can be a 15% penalty on no-op rebuilds)
+			# (but leave set_file_constraints for the installation step)
+			#
+			# if the tasks have only files, set_file_constraints is required but set_precedence_constraints is not necessary
+			#
+			Task.set_file_constraints(tasks)
+			Task.set_precedence_constraints(tasks)
+
+			self.cur_tasks = tasks
+			if tasks:
+				yield tasks
+
+		while 1:
+			# the build stops once there are no tasks to process
+			yield []
+
+	def install_files(self, dest, files, **kw):
+		"""
+		Creates a task generator to install files on the system::
+
+			def build(bld):
+				bld.install_files('${DATADIR}', self.path.find_resource('wscript'))
+
+		:param dest: path representing the destination directory
+		:type dest: :py:class:`waflib.Node.Node` or string (absolute path)
+		:param files: input files
+		:type files: list of strings or list of :py:class:`waflib.Node.Node`
+		:param env: configuration set to expand *dest*
+		:type env: :py:class:`waflib.ConfigSet.ConfigSet`
+		:param relative_trick: preserve the folder hierarchy when installing whole folders
+		:type relative_trick: bool
+		:param cwd: parent node for searching srcfile, when srcfile is not an instance of :py:class:`waflib.Node.Node`
+		:type cwd: :py:class:`waflib.Node.Node`
+		:param postpone: execute the task immediately to perform the installation (False by default)
+		:type postpone: bool
+		"""
+		assert(dest)
+		tg = self(features='install_task', install_to=dest, install_from=files, **kw)
+		tg.dest = tg.install_to
+		tg.type = 'install_files'
+		if not kw.get('postpone', True):
+			tg.post()
+		return tg
+
+	def install_as(self, dest, srcfile, **kw):
+		"""
+		Creates a task generator to install a file on the system with a different name::
+
+			def build(bld):
+				bld.install_as('${PREFIX}/bin', 'myapp', chmod=Utils.O755)
+
+		:param dest: destination file
+		:type dest: :py:class:`waflib.Node.Node` or string (absolute path)
+		:param srcfile: input file
+		:type srcfile: string or :py:class:`waflib.Node.Node`
+		:param cwd: parent node for searching srcfile, when srcfile is not an instance of :py:class:`waflib.Node.Node`
+		:type cwd: :py:class:`waflib.Node.Node`
+		:param env: configuration set for performing substitutions in dest
+		:type env: :py:class:`waflib.ConfigSet.ConfigSet`
+		:param postpone: execute the task immediately to perform the installation (False by default)
+		:type postpone: bool
+		"""
+		assert(dest)
+		tg = self(features='install_task', install_to=dest, install_from=srcfile, **kw)
+		tg.dest = tg.install_to
+		tg.type = 'install_as'
+		if not kw.get('postpone', True):
+			tg.post()
+		return tg
+
+	def symlink_as(self, dest, src, **kw):
+		"""
+		Creates a task generator to install a symlink::
+
+			def build(bld):
+				bld.symlink_as('${PREFIX}/lib/libfoo.so', 'libfoo.so.1.2.3')
+
+		:param dest: absolute path of the symlink
+		:type dest: :py:class:`waflib.Node.Node` or string (absolute path)
+		:param src: link contents, which is a relative or absolute path which may exist or not
+		:type src: string
+		:param env: configuration set for performing substitutions in dest
+		:type env: :py:class:`waflib.ConfigSet.ConfigSet`
+		:param add: add the task created to a build group - set ``False`` only if the installation task is created after the build has started
+		:type add: bool
+		:param postpone: execute the task immediately to perform the installation
+		:type postpone: bool
+		:param relative_trick: make the symlink relative (default: ``False``)
+		:type relative_trick: bool
+		"""
+		assert(dest)
+		tg = self(features='install_task', install_to=dest, install_from=src, **kw)
+		tg.dest = tg.install_to
+		tg.type = 'symlink_as'
+		tg.link = src
+		# TODO if add: self.add_to_group(tsk)
+		if not kw.get('postpone', True):
+			tg.post()
+		return tg
+
+@TaskGen.feature('install_task')
+@TaskGen.before_method('process_rule', 'process_source')
+def process_install_task(self):
+	"""Creates the installation task for the current task generator; uses :py:func:`waflib.Build.add_install_task` internally."""
+	self.add_install_task(**self.__dict__)
+
+@TaskGen.taskgen_method
+def add_install_task(self, **kw):
+	"""
+	Creates the installation task for the current task generator, and executes it immediately if necessary
+
+	:returns: An installation task
+	:rtype: :py:class:`waflib.Build.inst`
+	"""
+	if not self.bld.is_install:
+		return
+	if not kw['install_to']:
+		return
+
+	if kw['type'] == 'symlink_as' and Utils.is_win32:
+		if kw.get('win32_install'):
+			kw['type'] = 'install_as'
+		else:
+			# just exit
+			return
+
+	tsk = self.install_task = self.create_task('inst')
+	tsk.chmod = kw.get('chmod', Utils.O644)
+	tsk.link = kw.get('link', '') or kw.get('install_from', '')
+	tsk.relative_trick = kw.get('relative_trick', False)
+	tsk.type = kw['type']
+	tsk.install_to = tsk.dest = kw['install_to']
+	tsk.install_from = kw['install_from']
+	tsk.relative_base = kw.get('cwd') or kw.get('relative_base', self.path)
+	tsk.install_user = kw.get('install_user')
+	tsk.install_group = kw.get('install_group')
+	tsk.init_files()
+	if not kw.get('postpone', True):
+		tsk.run_now()
+	return tsk
+
+@TaskGen.taskgen_method
+def add_install_files(self, **kw):
+	"""
+	Creates an installation task for files
+
+	:returns: An installation task
+	:rtype: :py:class:`waflib.Build.inst`
+	"""
+	kw['type'] = 'install_files'
+	return self.add_install_task(**kw)
+
+@TaskGen.taskgen_method
+def add_install_as(self, **kw):
+	"""
+	Creates an installation task for a single file
+
+	:returns: An installation task
+	:rtype: :py:class:`waflib.Build.inst`
+	"""
+	kw['type'] = 'install_as'
+	return self.add_install_task(**kw)
+
+@TaskGen.taskgen_method
+def add_symlink_as(self, **kw):
+	"""
+	Creates an installation task for a symbolic link
+
+	:returns: An installation task
+	:rtype: :py:class:`waflib.Build.inst`
+	"""
+	kw['type'] = 'symlink_as'
+	return self.add_install_task(**kw)
+
+class inst(Task.Task):
+	"""Task that installs files or symlinks; it is typically executed by :py:class:`waflib.Build.InstallContext` and :py:class:`waflib.Build.UnInstallContext`"""
+	def __str__(self):
+		"""Returns an empty string to disable the standard task display"""
+		return ''
+
+	def uid(self):
+		"""Returns a unique identifier for the task"""
+		lst = self.inputs + self.outputs + [self.link, self.generator.path.abspath()]
+		return Utils.h_list(lst)
+
+	def init_files(self):
+		"""
+		Initializes the task input and output nodes
+		"""
+		if self.type == 'symlink_as':
+			inputs = []
+		else:
+			inputs = self.generator.to_nodes(self.install_from)
+			if self.type == 'install_as':
+				assert len(inputs) == 1
+		self.set_inputs(inputs)
+
+		dest = self.get_install_path()
+		outputs = []
+		if self.type == 'symlink_as':
+			if self.relative_trick:
+				self.link = os.path.relpath(self.link, os.path.dirname(dest))
+			outputs.append(self.generator.bld.root.make_node(dest))
+		elif self.type == 'install_as':
+			outputs.append(self.generator.bld.root.make_node(dest))
+		else:
+			for y in inputs:
+				if self.relative_trick:
+					destfile = os.path.join(dest, y.path_from(self.relative_base))
+				else:
+					destfile = os.path.join(dest, y.name)
+				outputs.append(self.generator.bld.root.make_node(destfile))
+		self.set_outputs(outputs)
+
+	def runnable_status(self):
+		"""
+		Installation tasks are always executed, so this method returns either :py:const:`waflib.Task.ASK_LATER` or :py:const:`waflib.Task.RUN_ME`.
+		"""
+		ret = super(inst, self).runnable_status()
+		if ret == Task.SKIP_ME and self.generator.bld.is_install:
+			return Task.RUN_ME
+		return ret
+
+	def post_run(self):
+		"""
+		Disables any post-run operations
+		"""
+		pass
+
+	def get_install_path(self, destdir=True):
+		"""
+		Returns the destination path where files will be installed, pre-pending `destdir`.
+
+		Relative paths will be interpreted relative to `PREFIX` if no `destdir` is given.
+
+		:rtype: string
+		"""
+		if isinstance(self.install_to, Node.Node):
+			dest = self.install_to.abspath()
+		else:
+			dest = os.path.normpath(Utils.subst_vars(self.install_to, self.env))
+		if not os.path.isabs(dest):
+			dest = os.path.join(self.env.PREFIX, dest)
+		if destdir and Options.options.destdir:
+			dest = Options.options.destdir.rstrip(os.sep) + os.sep + os.path.splitdrive(dest)[1].lstrip(os.sep)
+		return dest
+
+	def copy_fun(self, src, tgt):
+		"""
+		Copies a file from src to tgt, preserving permissions and trying to work
+		around path limitations on Windows platforms. On Unix-like platforms,
+		the owner/group of the target file may be set through install_user/install_group
+
+		:param src: absolute path
+		:type src: string
+		:param tgt: absolute path
+		:type tgt: string
+		"""
+		# override this if you want to strip executables
+		# kw['tsk'].source is the task that created the files in the build
+		if Utils.is_win32 and len(tgt) > 259 and not tgt.startswith('\\\\?\\'):
+			tgt = '\\\\?\\' + tgt
+		shutil.copy2(src, tgt)
+		self.fix_perms(tgt)
+
+	def rm_empty_dirs(self, tgt):
+		"""
+		Removes empty folders recursively when uninstalling.
+
+		:param tgt: absolute path
+		:type tgt: string
+		"""
+		while tgt:
+			tgt = os.path.dirname(tgt)
+			try:
+				os.rmdir(tgt)
+			except OSError:
+				break
+
+	def run(self):
+		"""
+		Performs file or symlink installation
+		"""
+		is_install = self.generator.bld.is_install
+		if not is_install: # unnecessary?
+			return
+
+		for x in self.outputs:
+			if is_install == INSTALL:
+				x.parent.mkdir()
+		if self.type == 'symlink_as':
+			fun = is_install == INSTALL and self.do_link or self.do_unlink
+			fun(self.link, self.outputs[0].abspath())
+		else:
+			fun = is_install == INSTALL and self.do_install or self.do_uninstall
+			launch_node = self.generator.bld.launch_node()
+			for x, y in zip(self.inputs, self.outputs):
+				fun(x.abspath(), y.abspath(), x.path_from(launch_node))
+
+	def run_now(self):
+		"""
+		Try executing the installation task right now
+
+		:raises: :py:class:`waflib.Errors.TaskNotReady`
+		"""
+		status = self.runnable_status()
+		if status not in (Task.RUN_ME, Task.SKIP_ME):
+			raise Errors.TaskNotReady('Could not process %r: status %r' % (self, status))
+		self.run()
+		self.hasrun = Task.SUCCESS
+
+	def do_install(self, src, tgt, lbl, **kw):
+		"""
+		Copies a file from src to tgt with given file permissions. The actual copy is only performed
+		if the source and target file sizes or timestamps differ. When the copy occurs,
+		the file is always first removed and then copied so as to prevent stale inodes.
+
+		:param src: file name as absolute path
+		:type src: string
+		:param tgt: file destination, as absolute path
+		:type tgt: string
+		:param lbl: file source description
+		:type lbl: string
+		:param chmod: installation mode
+		:type chmod: int
+		:raises: :py:class:`waflib.Errors.WafError` if the file cannot be written
+		"""
+		if not Options.options.force:
+			# check if the file is already there to avoid a copy
+			try:
+				st1 = os.stat(tgt)
+				st2 = os.stat(src)
+			except OSError:
+				pass
+			else:
+				# same size and identical timestamps -> make no copy
+				if st1.st_mtime + 2 >= st2.st_mtime and st1.st_size == st2.st_size:
+					if not self.generator.bld.progress_bar:
+
+						c1 = Logs.colors.NORMAL
+						c2 = Logs.colors.BLUE
+
+						Logs.info('%s- install %s%s%s (from %s)', c1, c2, tgt, c1, lbl)
+					return False
+
+		if not self.generator.bld.progress_bar:
+
+			c1 = Logs.colors.NORMAL
+			c2 = Logs.colors.BLUE
+
+			Logs.info('%s+ install %s%s%s (from %s)', c1, c2, tgt, c1, lbl)
+
+		# Give best attempt at making destination overwritable,
+		# like the 'install' utility used by 'make install' does.
+		try:
+			os.chmod(tgt, Utils.O644 | stat.S_IMODE(os.stat(tgt).st_mode))
+		except EnvironmentError:
+			pass
+
+		# following is for shared libs and stale inodes (-_-)
+		try:
+			os.remove(tgt)
+		except OSError:
+			pass
+
+		try:
+			self.copy_fun(src, tgt)
+		except EnvironmentError as e:
+			if not os.path.exists(src):
+				Logs.error('File %r does not exist', src)
+			elif not os.path.isfile(src):
+				Logs.error('Input %r is not a file', src)
+			raise Errors.WafError('Could not install the file %r' % tgt, e)
+
+	def fix_perms(self, tgt):
+		"""
+		Change the ownership of the file/folder/link pointed by the given path
+		This looks up for `install_user` or `install_group` attributes
+		on the task or on the task generator::
+
+			def build(bld):
+				bld.install_as('${PREFIX}/wscript',
+					'wscript',
+					install_user='nobody', install_group='nogroup')
+				bld.symlink_as('${PREFIX}/wscript_link',
+					Utils.subst_vars('${PREFIX}/wscript', bld.env),
+					install_user='nobody', install_group='nogroup')
+		"""
+		if not Utils.is_win32:
+			user = getattr(self, 'install_user', None) or getattr(self.generator, 'install_user', None)
+			group = getattr(self, 'install_group', None) or getattr(self.generator, 'install_group', None)
+			if user or group:
+				Utils.lchown(tgt, user or -1, group or -1)
+		if not os.path.islink(tgt):
+			os.chmod(tgt, self.chmod)
+
+	def do_link(self, src, tgt, **kw):
+		"""
+		Creates a symlink from tgt to src.
+
+		:param src: file name as absolute path
+		:type src: string
+		:param tgt: file destination, as absolute path
+		:type tgt: string
+		"""
+		if os.path.islink(tgt) and os.readlink(tgt) == src:
+			if not self.generator.bld.progress_bar:
+				c1 = Logs.colors.NORMAL
+				c2 = Logs.colors.BLUE
+				Logs.info('%s- symlink %s%s%s (to %s)', c1, c2, tgt, c1, src)
+		else:
+			try:
+				os.remove(tgt)
+			except OSError:
+				pass
+			if not self.generator.bld.progress_bar:
+				c1 = Logs.colors.NORMAL
+				c2 = Logs.colors.BLUE
+				Logs.info('%s+ symlink %s%s%s (to %s)', c1, c2, tgt, c1, src)
+			os.symlink(src, tgt)
+			self.fix_perms(tgt)
+
+	def do_uninstall(self, src, tgt, lbl, **kw):
+		"""
+		See :py:meth:`waflib.Build.inst.do_install`
+		"""
+		if not self.generator.bld.progress_bar:
+			c1 = Logs.colors.NORMAL
+			c2 = Logs.colors.BLUE
+			Logs.info('%s- remove %s%s%s', c1, c2, tgt, c1)
+
+		#self.uninstall.append(tgt)
+		try:
+			os.remove(tgt)
+		except OSError as e:
+			if e.errno != errno.ENOENT:
+				if not getattr(self, 'uninstall_error', None):
+					self.uninstall_error = True
+					Logs.warn('build: some files could not be uninstalled (retry with -vv to list them)')
+				if Logs.verbose > 1:
+					Logs.warn('Could not remove %s (error code %r)', e.filename, e.errno)
+		self.rm_empty_dirs(tgt)
+
+	def do_unlink(self, src, tgt, **kw):
+		"""
+		See :py:meth:`waflib.Build.inst.do_link`
+		"""
+		try:
+			if not self.generator.bld.progress_bar:
+				c1 = Logs.colors.NORMAL
+				c2 = Logs.colors.BLUE
+				Logs.info('%s- remove %s%s%s', c1, c2, tgt, c1)
+			os.remove(tgt)
+		except OSError:
+			pass
+		self.rm_empty_dirs(tgt)
+
+class InstallContext(BuildContext):
+	'''installs the targets on the system'''
+	cmd = 'install'
+
+	def __init__(self, **kw):
+		super(InstallContext, self).__init__(**kw)
+		self.is_install = INSTALL
+
+class UninstallContext(InstallContext):
+	'''removes the targets installed'''
+	cmd = 'uninstall'
+
+	def __init__(self, **kw):
+		super(UninstallContext, self).__init__(**kw)
+		self.is_install = UNINSTALL
+
+class CleanContext(BuildContext):
+	'''cleans the project'''
+	cmd = 'clean'
+	def execute(self):
+		"""
+		See :py:func:`waflib.Build.BuildContext.execute`.
+		"""
+		self.restore()
+		if not self.all_envs:
+			self.load_envs()
+
+		self.recurse([self.run_dir])
+		try:
+			self.clean()
+		finally:
+			self.store()
+
+	def clean(self):
+		"""
+		Remove most files from the build directory, and reset all caches.
+
+		Custom lists of files to clean can be declared as `bld.clean_files`.
+		For example, exclude `build/program/myprogram` from getting removed::
+
+			def build(bld):
+				bld.clean_files = bld.bldnode.ant_glob('**',
+					excl='.lock* config.log c4che/* config.h program/myprogram',
+					quiet=True, generator=True)
+		"""
+		Logs.debug('build: clean called')
+
+		if hasattr(self, 'clean_files'):
+			for n in self.clean_files:
+				n.delete()
+		elif self.bldnode != self.srcnode:
+			# would lead to a disaster if top == out
+			lst = []
+			for env in self.all_envs.values():
+				lst.extend(self.root.find_or_declare(f) for f in env[CFG_FILES])
+			excluded_dirs = '.lock* *conf_check_*/** config.log %s/*' % CACHE_DIR
+			for n in self.bldnode.ant_glob('**/*', excl=excluded_dirs, quiet=True):
+				if n in lst:
+					continue
+				n.delete()
+		self.root.children = {}
+
+		for v in SAVED_ATTRS:
+			if v == 'root':
+				continue
+			setattr(self, v, {})
+
+class ListContext(BuildContext):
+	'''lists the targets to execute'''
+	cmd = 'list'
+
+	def execute(self):
+		"""
+		In addition to printing the name of each build target,
+		a description column will include text for each task
+		generator which has a "description" field set.
+
+		See :py:func:`waflib.Build.BuildContext.execute`.
+		"""
+		self.restore()
+		if not self.all_envs:
+			self.load_envs()
+
+		self.recurse([self.run_dir])
+		self.pre_build()
+
+		# display the time elapsed in the progress bar
+		self.timer = Utils.Timer()
+
+		for g in self.groups:
+			for tg in g:
+				try:
+					f = tg.post
+				except AttributeError:
+					pass
+				else:
+					f()
+
+		try:
+			# force the cache initialization
+			self.get_tgen_by_name('')
+		except Errors.WafError:
+			pass
+
+		targets = sorted(self.task_gen_cache_names)
+
+		# figure out how much to left-justify, for largest target name
+		line_just = max(len(t) for t in targets) if targets else 0
+
+		for target in targets:
+			tgen = self.task_gen_cache_names[target]
+
+			# Support displaying the description for the target
+			# if it was set on the tgen
+			descript = getattr(tgen, 'description', '')
+			if descript:
+				target = target.ljust(line_just)
+				descript = ': %s' % descript
+
+			Logs.pprint('GREEN', target, label=descript)
+
+class StepContext(BuildContext):
+	'''executes tasks in a step-by-step fashion, for debugging'''
+	cmd = 'step'
+
+	def __init__(self, **kw):
+		super(StepContext, self).__init__(**kw)
+		self.files = Options.options.files
+
+	def compile(self):
+		"""
+		Overrides :py:meth:`waflib.Build.BuildContext.compile` to perform a partial build
+		on tasks matching the input/output pattern given (regular expression matching)::
+
+			$ waf step --files=foo.c,bar.c,in:truc.c,out:bar.o
+			$ waf step --files=in:foo.cpp.1.o # link task only
+
+		"""
+		if not self.files:
+			Logs.warn('Add a pattern for the debug build, for example "waf step --files=main.c,app"')
+			BuildContext.compile(self)
+			return
+
+		targets = []
+		if self.targets and self.targets != '*':
+			targets = self.targets.split(',')
+
+		for g in self.groups:
+			for tg in g:
+				if targets and tg.name not in targets:
+					continue
+
+				try:
+					f = tg.post
+				except AttributeError:
+					pass
+				else:
+					f()
+
+			for pat in self.files.split(','):
+				matcher = self.get_matcher(pat)
+				for tg in g:
+					if isinstance(tg, Task.Task):
+						lst = [tg]
+					else:
+						lst = tg.tasks
+					for tsk in lst:
+						do_exec = False
+						for node in tsk.inputs:
+							if matcher(node, output=False):
+								do_exec = True
+								break
+						for node in tsk.outputs:
+							if matcher(node, output=True):
+								do_exec = True
+								break
+						if do_exec:
+							ret = tsk.run()
+							Logs.info('%s -> exit %r', tsk, ret)
+
+	def get_matcher(self, pat):
+		"""
+		Converts a step pattern into a function
+
+		:param: pat: pattern of the form in:truc.c,out:bar.o
+		:returns: Python function that uses Node objects as inputs and returns matches
+		:rtype: function
+		"""
+		# this returns a function
+		inn = True
+		out = True
+		if pat.startswith('in:'):
+			out = False
+			pat = pat.replace('in:', '')
+		elif pat.startswith('out:'):
+			inn = False
+			pat = pat.replace('out:', '')
+
+		anode = self.root.find_node(pat)
+		pattern = None
+		if not anode:
+			if not pat.startswith('^'):
+				pat = '^.+?%s' % pat
+			if not pat.endswith('$'):
+				pat = '%s$' % pat
+			pattern = re.compile(pat)
+
+		def match(node, output):
+			if output and not out:
+				return False
+			if not output and not inn:
+				return False
+
+			if anode:
+				return anode == node
+			else:
+				return pattern.match(node.abspath())
+		return match
+
+class EnvContext(BuildContext):
+	"""Subclass EnvContext to create commands that require configuration data in 'env'"""
+	fun = cmd = None
+	def execute(self):
+		"""
+		See :py:func:`waflib.Build.BuildContext.execute`.
+		"""
+		self.restore()
+		if not self.all_envs:
+			self.load_envs()
+		self.recurse([self.run_dir])
+
diff --git a/third_party/waf/waflib/ConfigSet.py b/third_party/waf/waflib/ConfigSet.py
new file mode 100644
index 0000000..901fba6
--- /dev/null
+++ b/third_party/waf/waflib/ConfigSet.py
@@ -0,0 +1,361 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2018 (ita)
+
+"""
+
+ConfigSet: a special dict
+
+The values put in :py:class:`ConfigSet` must be serializable (dicts, lists, strings)
+"""
+
+import copy, re, os
+from waflib import Logs, Utils
+re_imp = re.compile(r'^(#)*?([^#=]*?)\ =\ (.*?)$', re.M)
+
+class ConfigSet(object):
+	"""
+	A copy-on-write dict with human-readable serialized format. The serialization format
+	is human-readable (python-like) and performed by using eval() and repr().
+	For high performance prefer pickle. Do not store functions as they are not serializable.
+
+	The values can be accessed by attributes or by keys::
+
+		from waflib.ConfigSet import ConfigSet
+		env = ConfigSet()
+		env.FOO = 'test'
+		env['FOO'] = 'test'
+	"""
+	__slots__ = ('table', 'parent')
+	def __init__(self, filename=None):
+		self.table = {}
+		"""
+		Internal dict holding the object values
+		"""
+		#self.parent = None
+
+		if filename:
+			self.load(filename)
+
+	def __contains__(self, key):
+		"""
+		Enables the *in* syntax::
+
+			if 'foo' in env:
+				print(env['foo'])
+		"""
+		if key in self.table:
+			return True
+		try:
+			return self.parent.__contains__(key)
+		except AttributeError:
+			return False # parent may not exist
+
+	def keys(self):
+		"""Dict interface"""
+		keys = set()
+		cur = self
+		while cur:
+			keys.update(cur.table.keys())
+			cur = getattr(cur, 'parent', None)
+		keys = list(keys)
+		keys.sort()
+		return keys
+
+	def __iter__(self):
+		return iter(self.keys())
+
+	def __str__(self):
+		"""Text representation of the ConfigSet (for debugging purposes)"""
+		return "\n".join(["%r %r" % (x, self.__getitem__(x)) for x in self.keys()])
+
+	def __getitem__(self, key):
+		"""
+		Dictionary interface: get value from key::
+
+			def configure(conf):
+				conf.env['foo'] = {}
+				print(env['foo'])
+		"""
+		try:
+			while 1:
+				x = self.table.get(key)
+				if not x is None:
+					return x
+				self = self.parent
+		except AttributeError:
+			return []
+
+	def __setitem__(self, key, value):
+		"""
+		Dictionary interface: set value from key
+		"""
+		self.table[key] = value
+
+	def __delitem__(self, key):
+		"""
+		Dictionary interface: mark the value as missing
+		"""
+		self[key] = []
+
+	def __getattr__(self, name):
+		"""
+		Attribute access provided for convenience. The following forms are equivalent::
+
+			def configure(conf):
+				conf.env.value
+				conf.env['value']
+		"""
+		if name in self.__slots__:
+			return object.__getattribute__(self, name)
+		else:
+			return self[name]
+
+	def __setattr__(self, name, value):
+		"""
+		Attribute access provided for convenience. The following forms are equivalent::
+
+			def configure(conf):
+				conf.env.value = x
+				env['value'] = x
+		"""
+		if name in self.__slots__:
+			object.__setattr__(self, name, value)
+		else:
+			self[name] = value
+
+	def __delattr__(self, name):
+		"""
+		Attribute access provided for convenience. The following forms are equivalent::
+
+			def configure(conf):
+				del env.value
+				del env['value']
+		"""
+		if name in self.__slots__:
+			object.__delattr__(self, name)
+		else:
+			del self[name]
+
+	def derive(self):
+		"""
+		Returns a new ConfigSet deriving from self. The copy returned
+		will be a shallow copy::
+
+			from waflib.ConfigSet import ConfigSet
+			env = ConfigSet()
+			env.append_value('CFLAGS', ['-O2'])
+			child = env.derive()
+			child.CFLAGS.append('test') # warning! this will modify 'env'
+			child.CFLAGS = ['-O3'] # new list, ok
+			child.append_value('CFLAGS', ['-O3']) # ok
+
+		Use :py:func:`ConfigSet.detach` to detach the child from the parent.
+		"""
+		newenv = ConfigSet()
+		newenv.parent = self
+		return newenv
+
+	def detach(self):
+		"""
+		Detaches this instance from its parent (if present)
+
+		Modifying the parent :py:class:`ConfigSet` will not change the current object
+		Modifying this :py:class:`ConfigSet` will not modify the parent one.
+		"""
+		tbl = self.get_merged_dict()
+		try:
+			delattr(self, 'parent')
+		except AttributeError:
+			pass
+		else:
+			keys = tbl.keys()
+			for x in keys:
+				tbl[x] = copy.deepcopy(tbl[x])
+			self.table = tbl
+		return self
+
+	def get_flat(self, key):
+		"""
+		Returns a value as a string. If the input is a list, the value returned is space-separated.
+
+		:param key: key to use
+		:type key: string
+		"""
+		s = self[key]
+		if isinstance(s, str):
+			return s
+		return ' '.join(s)
+
+	def _get_list_value_for_modification(self, key):
+		"""
+		Returns a list value for further modification.
+
+		The list may be modified inplace and there is no need to do this afterwards::
+
+			self.table[var] = value
+		"""
+		try:
+			value = self.table[key]
+		except KeyError:
+			try:
+				value = self.parent[key]
+			except AttributeError:
+				value = []
+			else:
+				if isinstance(value, list):
+					# force a copy
+					value = value[:]
+				else:
+					value = [value]
+			self.table[key] = value
+		else:
+			if not isinstance(value, list):
+				self.table[key] = value = [value]
+		return value
+
+	def append_value(self, var, val):
+		"""
+		Appends a value to the specified config key::
+
+			def build(bld):
+				bld.env.append_value('CFLAGS', ['-O2'])
+
+		The value must be a list or a tuple
+		"""
+		if isinstance(val, str): # if there were string everywhere we could optimize this
+			val = [val]
+		current_value = self._get_list_value_for_modification(var)
+		current_value.extend(val)
+
+	def prepend_value(self, var, val):
+		"""
+		Prepends a value to the specified item::
+
+			def configure(conf):
+				conf.env.prepend_value('CFLAGS', ['-O2'])
+
+		The value must be a list or a tuple
+		"""
+		if isinstance(val, str):
+			val = [val]
+		self.table[var] =  val + self._get_list_value_for_modification(var)
+
+	def append_unique(self, var, val):
+		"""
+		Appends a value to the specified item only if it's not already present::
+
+			def build(bld):
+				bld.env.append_unique('CFLAGS', ['-O2', '-g'])
+
+		The value must be a list or a tuple
+		"""
+		if isinstance(val, str):
+			val = [val]
+		current_value = self._get_list_value_for_modification(var)
+
+		for x in val:
+			if x not in current_value:
+				current_value.append(x)
+
+	def get_merged_dict(self):
+		"""
+		Computes the merged dictionary from the fusion of self and all its parent
+
+		:rtype: a ConfigSet object
+		"""
+		table_list = []
+		env = self
+		while 1:
+			table_list.insert(0, env.table)
+			try:
+				env = env.parent
+			except AttributeError:
+				break
+		merged_table = {}
+		for table in table_list:
+			merged_table.update(table)
+		return merged_table
+
+	def store(self, filename):
+		"""
+		Serializes the :py:class:`ConfigSet` data to a file. See :py:meth:`ConfigSet.load` for reading such files.
+
+		:param filename: file to use
+		:type filename: string
+		"""
+		try:
+			os.makedirs(os.path.split(filename)[0])
+		except OSError:
+			pass
+
+		buf = []
+		merged_table = self.get_merged_dict()
+		keys = list(merged_table.keys())
+		keys.sort()
+
+		try:
+			fun = ascii
+		except NameError:
+			fun = repr
+
+		for k in keys:
+			if k != 'undo_stack':
+				buf.append('%s = %s\n' % (k, fun(merged_table[k])))
+		Utils.writef(filename, ''.join(buf))
+
+	def load(self, filename):
+		"""
+		Restores contents from a file (current values are not cleared). Files are written using :py:meth:`ConfigSet.store`.
+
+		:param filename: file to use
+		:type filename: string
+		"""
+		tbl = self.table
+		code = Utils.readf(filename, m='r')
+		for m in re_imp.finditer(code):
+			g = m.group
+			tbl[g(2)] = eval(g(3))
+		Logs.debug('env: %s', self.table)
+
+	def update(self, d):
+		"""
+		Dictionary interface: replace values with the ones from another dict
+
+		:param d: object to use the value from
+		:type d: dict-like object
+		"""
+		self.table.update(d)
+
+	def stash(self):
+		"""
+		Stores the object state to provide transactionality semantics::
+
+			env = ConfigSet()
+			env.stash()
+			try:
+				env.append_value('CFLAGS', '-O3')
+				call_some_method(env)
+			finally:
+				env.revert()
+
+		The history is kept in a stack, and is lost during the serialization by :py:meth:`ConfigSet.store`
+		"""
+		orig = self.table
+		tbl = self.table = self.table.copy()
+		for x in tbl.keys():
+			tbl[x] = copy.deepcopy(tbl[x])
+		self.undo_stack = self.undo_stack + [orig]
+
+	def commit(self):
+		"""
+		Commits transactional changes. See :py:meth:`ConfigSet.stash`
+		"""
+		self.undo_stack.pop(-1)
+
+	def revert(self):
+		"""
+		Reverts the object to a previous state. See :py:meth:`ConfigSet.stash`
+		"""
+		self.table = self.undo_stack.pop(-1)
+
diff --git a/third_party/waf/waflib/Configure.py b/third_party/waf/waflib/Configure.py
new file mode 100644
index 0000000..f6fdc4e
--- /dev/null
+++ b/third_party/waf/waflib/Configure.py
@@ -0,0 +1,656 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2018 (ita)
+
+"""
+Configuration system
+
+A :py:class:`waflib.Configure.ConfigurationContext` instance is created when ``waf configure`` is called, it is used to:
+
+* create data dictionaries (ConfigSet instances)
+* store the list of modules to import
+* hold configuration routines such as ``find_program``, etc
+"""
+
+import os, re, shlex, shutil, sys, time, traceback
+from waflib import ConfigSet, Utils, Options, Logs, Context, Build, Errors
+
+WAF_CONFIG_LOG = 'config.log'
+"""Name of the configuration log file"""
+
+autoconfig = False
+"""Execute the configuration automatically"""
+
+conf_template = '''# project %(app)s configured on %(now)s by
+# waf %(wafver)s (abi %(abi)s, python %(pyver)x on %(systype)s)
+# using %(args)s
+#'''
+
+class ConfigurationContext(Context.Context):
+	'''configures the project'''
+
+	cmd = 'configure'
+
+	error_handlers = []
+	"""
+	Additional functions to handle configuration errors
+	"""
+
+	def __init__(self, **kw):
+		super(ConfigurationContext, self).__init__(**kw)
+		self.environ = dict(os.environ)
+		self.all_envs = {}
+
+		self.top_dir = None
+		self.out_dir = None
+
+		self.tools = [] # tools loaded in the configuration, and that will be loaded when building
+
+		self.hash = 0
+		self.files = []
+
+		self.tool_cache = []
+
+		self.setenv('')
+
+	def setenv(self, name, env=None):
+		"""
+		Set a new config set for conf.env. If a config set of that name already exists,
+		recall it without modification.
+
+		The name is the filename prefix to save to ``c4che/NAME_cache.py``, and it
+		is also used as *variants* by the build commands.
+		Though related to variants, whatever kind of data may be stored in the config set::
+
+			def configure(cfg):
+				cfg.env.ONE = 1
+				cfg.setenv('foo')
+				cfg.env.ONE = 2
+
+			def build(bld):
+				2 == bld.env_of_name('foo').ONE
+
+		:param name: name of the configuration set
+		:type name: string
+		:param env: ConfigSet to copy, or an empty ConfigSet is created
+		:type env: :py:class:`waflib.ConfigSet.ConfigSet`
+		"""
+		if name not in self.all_envs or env:
+			if not env:
+				env = ConfigSet.ConfigSet()
+				self.prepare_env(env)
+			else:
+				env = env.derive()
+			self.all_envs[name] = env
+		self.variant = name
+
+	def get_env(self):
+		"""Getter for the env property"""
+		return self.all_envs[self.variant]
+	def set_env(self, val):
+		"""Setter for the env property"""
+		self.all_envs[self.variant] = val
+
+	env = property(get_env, set_env)
+
+	def init_dirs(self):
+		"""
+		Initialize the project directory and the build directory
+		"""
+
+		top = self.top_dir
+		if not top:
+			top = Options.options.top
+		if not top:
+			top = getattr(Context.g_module, Context.TOP, None)
+		if not top:
+			top = self.path.abspath()
+		top = os.path.abspath(top)
+
+		self.srcnode = (os.path.isabs(top) and self.root or self.path).find_dir(top)
+		assert(self.srcnode)
+
+		out = self.out_dir
+		if not out:
+			out = Options.options.out
+		if not out:
+			out = getattr(Context.g_module, Context.OUT, None)
+		if not out:
+			out = Options.lockfile.replace('.lock-waf_%s_' % sys.platform, '').replace('.lock-waf', '')
+
+		# someone can be messing with symlinks
+		out = os.path.realpath(out)
+
+		self.bldnode = (os.path.isabs(out) and self.root or self.path).make_node(out)
+		self.bldnode.mkdir()
+
+		if not os.path.isdir(self.bldnode.abspath()):
+			self.fatal('Could not create the build directory %s' % self.bldnode.abspath())
+
+	def execute(self):
+		"""
+		See :py:func:`waflib.Context.Context.execute`
+		"""
+		self.init_dirs()
+
+		self.cachedir = self.bldnode.make_node(Build.CACHE_DIR)
+		self.cachedir.mkdir()
+
+		path = os.path.join(self.bldnode.abspath(), WAF_CONFIG_LOG)
+		self.logger = Logs.make_logger(path, 'cfg')
+
+		app = getattr(Context.g_module, 'APPNAME', '')
+		if app:
+			ver = getattr(Context.g_module, 'VERSION', '')
+			if ver:
+				app = "%s (%s)" % (app, ver)
+
+		params = {'now': time.ctime(), 'pyver': sys.hexversion, 'systype': sys.platform, 'args': " ".join(sys.argv), 'wafver': Context.WAFVERSION, 'abi': Context.ABI, 'app': app}
+		self.to_log(conf_template % params)
+		self.msg('Setting top to', self.srcnode.abspath())
+		self.msg('Setting out to', self.bldnode.abspath())
+
+		if id(self.srcnode) == id(self.bldnode):
+			Logs.warn('Setting top == out')
+		elif id(self.path) != id(self.srcnode):
+			if self.srcnode.is_child_of(self.path):
+				Logs.warn('Are you certain that you do not want to set top="." ?')
+
+		super(ConfigurationContext, self).execute()
+
+		self.store()
+
+		Context.top_dir = self.srcnode.abspath()
+		Context.out_dir = self.bldnode.abspath()
+
+		# this will write a configure lock so that subsequent builds will
+		# consider the current path as the root directory (see prepare_impl).
+		# to remove: use 'waf distclean'
+		env = ConfigSet.ConfigSet()
+		env.argv = sys.argv
+		env.options = Options.options.__dict__
+		env.config_cmd = self.cmd
+
+		env.run_dir = Context.run_dir
+		env.top_dir = Context.top_dir
+		env.out_dir = Context.out_dir
+
+		# conf.hash & conf.files hold wscript files paths and hash
+		# (used only by Configure.autoconfig)
+		env.hash = self.hash
+		env.files = self.files
+		env.environ = dict(self.environ)
+		env.launch_dir = Context.launch_dir
+
+		if not (self.env.NO_LOCK_IN_RUN or env.environ.get('NO_LOCK_IN_RUN') or getattr(Options.options, 'no_lock_in_run')):
+			env.store(os.path.join(Context.run_dir, Options.lockfile))
+		if not (self.env.NO_LOCK_IN_TOP or env.environ.get('NO_LOCK_IN_TOP') or getattr(Options.options, 'no_lock_in_top')):
+			env.store(os.path.join(Context.top_dir, Options.lockfile))
+		if not (self.env.NO_LOCK_IN_OUT or env.environ.get('NO_LOCK_IN_OUT') or getattr(Options.options, 'no_lock_in_out')):
+			env.store(os.path.join(Context.out_dir, Options.lockfile))
+
+	def prepare_env(self, env):
+		"""
+		Insert *PREFIX*, *BINDIR* and *LIBDIR* values into ``env``
+
+		:type env: :py:class:`waflib.ConfigSet.ConfigSet`
+		:param env: a ConfigSet, usually ``conf.env``
+		"""
+		if not env.PREFIX:
+			if Options.options.prefix or Utils.is_win32:
+				env.PREFIX = Options.options.prefix
+			else:
+				env.PREFIX = '/'
+		if not env.BINDIR:
+			if Options.options.bindir:
+				env.BINDIR = Options.options.bindir
+			else:
+				env.BINDIR = Utils.subst_vars('${PREFIX}/bin', env)
+		if not env.LIBDIR:
+			if Options.options.libdir:
+				env.LIBDIR = Options.options.libdir
+			else:
+				env.LIBDIR = Utils.subst_vars('${PREFIX}/lib%s' % Utils.lib64(), env)
+
+	def store(self):
+		"""Save the config results into the cache file"""
+		n = self.cachedir.make_node('build.config.py')
+		n.write('version = 0x%x\ntools = %r\n' % (Context.HEXVERSION, self.tools))
+
+		if not self.all_envs:
+			self.fatal('nothing to store in the configuration context!')
+
+		for key in self.all_envs:
+			tmpenv = self.all_envs[key]
+			tmpenv.store(os.path.join(self.cachedir.abspath(), key + Build.CACHE_SUFFIX))
+
+	def load(self, tool_list, tooldir=None, funs=None, with_sys_path=True, cache=False):
+		"""
+		Load Waf tools, which will be imported whenever a build is started.
+
+		:param tool_list: waf tools to import
+		:type tool_list: list of string
+		:param tooldir: paths for the imports
+		:type tooldir: list of string
+		:param funs: functions to execute from the waf tools
+		:type funs: list of string
+		:param cache: whether to prevent the tool from running twice
+		:type cache: bool
+		"""
+
+		tools = Utils.to_list(tool_list)
+		if tooldir:
+			tooldir = Utils.to_list(tooldir)
+		for tool in tools:
+			# avoid loading the same tool more than once with the same functions
+			# used by composite projects
+
+			if cache:
+				mag = (tool, id(self.env), tooldir, funs)
+				if mag in self.tool_cache:
+					self.to_log('(tool %s is already loaded, skipping)' % tool)
+					continue
+				self.tool_cache.append(mag)
+
+			module = None
+			try:
+				module = Context.load_tool(tool, tooldir, ctx=self, with_sys_path=with_sys_path)
+			except ImportError as e:
+				self.fatal('Could not load the Waf tool %r from %r\n%s' % (tool, getattr(e, 'waf_sys_path', sys.path), e))
+			except Exception as e:
+				self.to_log('imp %r (%r & %r)' % (tool, tooldir, funs))
+				self.to_log(traceback.format_exc())
+				raise
+
+			if funs is not None:
+				self.eval_rules(funs)
+			else:
+				func = getattr(module, 'configure', None)
+				if func:
+					if type(func) is type(Utils.readf):
+						func(self)
+					else:
+						self.eval_rules(func)
+
+			self.tools.append({'tool':tool, 'tooldir':tooldir, 'funs':funs})
+
+	def post_recurse(self, node):
+		"""
+		Records the path and a hash of the scripts visited, see :py:meth:`waflib.Context.Context.post_recurse`
+
+		:param node: script
+		:type node: :py:class:`waflib.Node.Node`
+		"""
+		super(ConfigurationContext, self).post_recurse(node)
+		self.hash = Utils.h_list((self.hash, node.read('rb')))
+		self.files.append(node.abspath())
+
+	def eval_rules(self, rules):
+		"""
+		Execute configuration tests provided as list of functions to run
+
+		:param rules: list of configuration method names
+		:type rules: list of string
+		"""
+		self.rules = Utils.to_list(rules)
+		for x in self.rules:
+			f = getattr(self, x)
+			if not f:
+				self.fatal('No such configuration function %r' % x)
+			f()
+
+def conf(f):
+	"""
+	Decorator: attach new configuration functions to :py:class:`waflib.Build.BuildContext` and
+	:py:class:`waflib.Configure.ConfigurationContext`. The methods bound will accept a parameter
+	named 'mandatory' to disable the configuration errors::
+
+		def configure(conf):
+			conf.find_program('abc', mandatory=False)
+
+	:param f: method to bind
+	:type f: function
+	"""
+	def fun(*k, **kw):
+		mandatory = kw.pop('mandatory', True)
+		try:
+			return f(*k, **kw)
+		except Errors.ConfigurationError:
+			if mandatory:
+				raise
+
+	fun.__name__ = f.__name__
+	setattr(ConfigurationContext, f.__name__, fun)
+	setattr(Build.BuildContext, f.__name__, fun)
+	return f
+
+@conf
+def add_os_flags(self, var, dest=None, dup=False):
+	"""
+	Import operating system environment values into ``conf.env`` dict::
+
+		def configure(conf):
+			conf.add_os_flags('CFLAGS')
+
+	:param var: variable to use
+	:type var: string
+	:param dest: destination variable, by default the same as var
+	:type dest: string
+	:param dup: add the same set of flags again
+	:type dup: bool
+	"""
+	try:
+		flags = shlex.split(self.environ[var])
+	except KeyError:
+		return
+	if dup or ''.join(flags) not in ''.join(Utils.to_list(self.env[dest or var])):
+		self.env.append_value(dest or var, flags)
+
+@conf
+def cmd_to_list(self, cmd):
+	"""
+	Detect if a command is written in pseudo shell like ``ccache g++`` and return a list.
+
+	:param cmd: command
+	:type cmd: a string or a list of string
+	"""
+	if isinstance(cmd, str):
+		if os.path.isfile(cmd):
+			# do not take any risk
+			return [cmd]
+		if os.sep == '/':
+			return shlex.split(cmd)
+		else:
+			try:
+				return shlex.split(cmd, posix=False)
+			except TypeError:
+				# Python 2.5 on windows?
+				return shlex.split(cmd)
+	return cmd
+
+@conf
+def check_waf_version(self, mini='1.9.99', maxi='2.1.0', **kw):
+	"""
+	Raise a Configuration error if the Waf version does not strictly match the given bounds::
+
+		conf.check_waf_version(mini='1.9.99', maxi='2.1.0')
+
+	:type  mini: number, tuple or string
+	:param mini: Minimum required version
+	:type  maxi: number, tuple or string
+	:param maxi: Maximum allowed version
+	"""
+	self.start_msg('Checking for waf version in %s-%s' % (str(mini), str(maxi)), **kw)
+	ver = Context.HEXVERSION
+	if Utils.num2ver(mini) > ver:
+		self.fatal('waf version should be at least %r (%r found)' % (Utils.num2ver(mini), ver))
+	if Utils.num2ver(maxi) < ver:
+		self.fatal('waf version should be at most %r (%r found)' % (Utils.num2ver(maxi), ver))
+	self.end_msg('ok', **kw)
+
+@conf
+def find_file(self, filename, path_list=[]):
+	"""
+	Find a file in a list of paths
+
+	:param filename: name of the file to search for
+	:param path_list: list of directories to search
+	:return: the first matching filename; else a configuration exception is raised
+	"""
+	for n in Utils.to_list(filename):
+		for d in Utils.to_list(path_list):
+			p = os.path.expanduser(os.path.join(d, n))
+			if os.path.exists(p):
+				return p
+	self.fatal('Could not find %r' % filename)
+
+@conf
+def find_program(self, filename, **kw):
+	"""
+	Search for a program on the operating system
+
+	When var is used, you may set os.environ[var] to help find a specific program version, for example::
+
+		$ CC='ccache gcc' waf configure
+
+	:param path_list: paths to use for searching
+	:type param_list: list of string
+	:param var: store the result to conf.env[var] where var defaults to filename.upper() if not provided; the result is stored as a list of strings
+	:type var: string
+	:param value: obtain the program from the value passed exclusively
+	:type value: list or string (list is preferred)
+	:param exts: list of extensions for the binary (do not add an extension for portability)
+	:type exts: list of string
+	:param msg: name to display in the log, by default filename is used
+	:type msg: string
+	:param interpreter: interpreter for the program
+	:type interpreter: ConfigSet variable key
+	:raises: :py:class:`waflib.Errors.ConfigurationError`
+	"""
+
+	exts = kw.get('exts', Utils.is_win32 and '.exe,.com,.bat,.cmd' or ',.sh,.pl,.py')
+
+	environ = kw.get('environ', getattr(self, 'environ', os.environ))
+
+	ret = ''
+
+	filename = Utils.to_list(filename)
+	msg = kw.get('msg', ', '.join(filename))
+
+	var = kw.get('var', '')
+	if not var:
+		var = re.sub(r'\W', '_', filename[0].upper())
+
+	path_list = kw.get('path_list', '')
+	if path_list:
+		path_list = Utils.to_list(path_list)
+	else:
+		path_list = environ.get('PATH', '').split(os.pathsep)
+
+	if kw.get('value'):
+		# user-provided in command-line options and passed to find_program
+		ret = self.cmd_to_list(kw['value'])
+	elif environ.get(var):
+		# user-provided in the os environment
+		ret = self.cmd_to_list(environ[var])
+	elif self.env[var]:
+		# a default option in the wscript file
+		ret = self.cmd_to_list(self.env[var])
+	else:
+		if not ret:
+			ret = self.find_binary(filename, exts.split(','), path_list)
+		if not ret and Utils.winreg:
+			ret = Utils.get_registry_app_path(Utils.winreg.HKEY_CURRENT_USER, filename)
+		if not ret and Utils.winreg:
+			ret = Utils.get_registry_app_path(Utils.winreg.HKEY_LOCAL_MACHINE, filename)
+		ret = self.cmd_to_list(ret)
+
+	if ret:
+		if len(ret) == 1:
+			retmsg = ret[0]
+		else:
+			retmsg = ret
+	else:
+		retmsg = False
+
+	self.msg('Checking for program %r' % msg, retmsg, **kw)
+	if not kw.get('quiet'):
+		self.to_log('find program=%r paths=%r var=%r -> %r' % (filename, path_list, var, ret))
+
+	if not ret:
+		self.fatal(kw.get('errmsg', '') or 'Could not find the program %r' % filename)
+
+	interpreter = kw.get('interpreter')
+	if interpreter is None:
+		if not Utils.check_exe(ret[0], env=environ):
+			self.fatal('Program %r is not executable' % ret)
+		self.env[var] = ret
+	else:
+		self.env[var] = self.env[interpreter] + ret
+
+	return ret
+
+@conf
+def find_binary(self, filenames, exts, paths):
+	for f in filenames:
+		for ext in exts:
+			exe_name = f + ext
+			if os.path.isabs(exe_name):
+				if os.path.isfile(exe_name):
+					return exe_name
+			else:
+				for path in paths:
+					x = os.path.expanduser(os.path.join(path, exe_name))
+					if os.path.isfile(x):
+						return x
+	return None
+
+@conf
+def run_build(self, *k, **kw):
+	"""
+	Create a temporary build context to execute a build. A temporary reference to that build
+	context is kept on self.test_bld for debugging purposes.
+	The arguments to this function are passed to a single task generator for that build.
+	Only three parameters are mandatory:
+
+	:param features: features to pass to a task generator created in the build
+	:type features: list of string
+	:param compile_filename: file to create for the compilation (default: *test.c*)
+	:type compile_filename: string
+	:param code: input file contents
+	:type code: string
+
+	Though this function returns *0* by default, the build may bind attribute named *retval* on the
+	build context object to return a particular value. See :py:func:`waflib.Tools.c_config.test_exec_fun` for example.
+
+	The temporary builds creates a temporary folder; the name of that folder is calculated
+	by hashing input arguments to this function, with the exception of :py:class:`waflib.ConfigSet.ConfigSet`
+	objects which are used for both reading and writing values.
+
+	This function also features a cache which is disabled by default; that cache relies
+	on the hash value calculated as indicated above::
+
+		def options(opt):
+			opt.add_option('--confcache', dest='confcache', default=0,
+				action='count', help='Use a configuration cache')
+
+	And execute the configuration with the following command-line::
+
+		$ waf configure --confcache
+
+	"""
+	buf = []
+	for key in sorted(kw.keys()):
+		v = kw[key]
+		if isinstance(v, ConfigSet.ConfigSet):
+			# values are being written to, so they are excluded from contributing to the hash
+			continue
+		elif hasattr(v, '__call__'):
+			buf.append(Utils.h_fun(v))
+		else:
+			buf.append(str(v))
+	h = Utils.h_list(buf)
+	dir = self.bldnode.abspath() + os.sep + (not Utils.is_win32 and '.' or '') + 'conf_check_' + Utils.to_hex(h)
+
+	cachemode = kw.get('confcache', getattr(Options.options, 'confcache', None))
+
+	if not cachemode and os.path.exists(dir):
+		shutil.rmtree(dir)
+
+	try:
+		os.makedirs(dir)
+	except OSError:
+		pass
+
+	try:
+		os.stat(dir)
+	except OSError:
+		self.fatal('cannot use the configuration test folder %r' % dir)
+
+	if cachemode == 1:
+		try:
+			proj = ConfigSet.ConfigSet(os.path.join(dir, 'cache_run_build'))
+		except EnvironmentError:
+			pass
+		else:
+			ret = proj['cache_run_build']
+			if isinstance(ret, str) and ret.startswith('Test does not build'):
+				self.fatal(ret)
+			return ret
+
+	bdir = os.path.join(dir, 'testbuild')
+
+	if not os.path.exists(bdir):
+		os.makedirs(bdir)
+
+	cls_name = kw.get('run_build_cls') or getattr(self, 'run_build_cls', 'build')
+	self.test_bld = bld = Context.create_context(cls_name, top_dir=dir, out_dir=bdir)
+	bld.init_dirs()
+	bld.progress_bar = 0
+	bld.targets = '*'
+
+	bld.logger = self.logger
+	bld.all_envs.update(self.all_envs) # not really necessary
+	bld.env = kw['env']
+
+	bld.kw = kw
+	bld.conf = self
+	kw['build_fun'](bld)
+	ret = -1
+	try:
+		try:
+			bld.compile()
+		except Errors.WafError:
+			ret = 'Test does not build: %s' % traceback.format_exc()
+			self.fatal(ret)
+		else:
+			ret = getattr(bld, 'retval', 0)
+	finally:
+		if cachemode:
+			# cache the results each time
+			proj = ConfigSet.ConfigSet()
+			proj['cache_run_build'] = ret
+			proj.store(os.path.join(dir, 'cache_run_build'))
+		else:
+			shutil.rmtree(dir)
+	return ret
+
+@conf
+def ret_msg(self, msg, args):
+	if isinstance(msg, str):
+		return msg
+	return msg(args)
+
+@conf
+def test(self, *k, **kw):
+
+	if not 'env' in kw:
+		kw['env'] = self.env.derive()
+
+	# validate_c for example
+	if kw.get('validate'):
+		kw['validate'](kw)
+
+	self.start_msg(kw['msg'], **kw)
+	ret = None
+	try:
+		ret = self.run_build(*k, **kw)
+	except self.errors.ConfigurationError:
+		self.end_msg(kw['errmsg'], 'YELLOW', **kw)
+		if Logs.verbose > 1:
+			raise
+		else:
+			self.fatal('The configuration failed')
+	else:
+		kw['success'] = ret
+
+	if kw.get('post_check'):
+		ret = kw['post_check'](kw)
+
+	if ret:
+		self.end_msg(kw['errmsg'], 'YELLOW', **kw)
+		self.fatal('The configuration failed %r' % ret)
+	else:
+		self.end_msg(self.ret_msg(kw['okmsg'], kw), **kw)
+	return ret
+
diff --git a/third_party/waf/waflib/Context.py b/third_party/waf/waflib/Context.py
new file mode 100644
index 0000000..3696648
--- /dev/null
+++ b/third_party/waf/waflib/Context.py
@@ -0,0 +1,747 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2010-2018 (ita)
+
+"""
+Classes and functions enabling the command system
+"""
+
+import os, re, sys
+from waflib import Utils, Errors, Logs
+import waflib.Node
+
+if sys.hexversion > 0x3040000:
+	import types
+	class imp(object):
+		new_module = lambda x: types.ModuleType(x)
+else:
+	import imp
+
+# the following 3 constants are updated on each new release (do not touch)
+HEXVERSION=0x2001a00
+"""Constant updated on new releases"""
+
+WAFVERSION="2.0.26"
+"""Constant updated on new releases"""
+
+WAFREVISION="0fb985ce1932c6f3e7533f435e4ee209d673776e"
+"""Git revision when the waf version is updated"""
+
+WAFNAME="waf"
+"""Application name displayed on --help"""
+
+ABI = 20
+"""Version of the build data cache file format (used in :py:const:`waflib.Context.DBFILE`)"""
+
+DBFILE = '.wafpickle-%s-%d-%d' % (sys.platform, sys.hexversion, ABI)
+"""Name of the pickle file for storing the build data"""
+
+APPNAME = 'APPNAME'
+"""Default application name (used by ``waf dist``)"""
+
+VERSION = 'VERSION'
+"""Default application version (used by ``waf dist``)"""
+
+TOP  = 'top'
+"""The variable name for the top-level directory in wscript files"""
+
+OUT  = 'out'
+"""The variable name for the output directory in wscript files"""
+
+WSCRIPT_FILE = 'wscript'
+"""Name of the waf script files"""
+
+launch_dir = ''
+"""Directory from which waf has been called"""
+run_dir = ''
+"""Location of the wscript file to use as the entry point"""
+top_dir = ''
+"""Location of the project directory (top), if the project was configured"""
+out_dir = ''
+"""Location of the build directory (out), if the project was configured"""
+waf_dir = ''
+"""Directory containing the waf modules"""
+
+default_encoding = Utils.console_encoding()
+"""Encoding to use when reading outputs from other processes"""
+
+g_module = None
+"""
+Module representing the top-level wscript file (see :py:const:`waflib.Context.run_dir`)
+"""
+
+STDOUT = 1
+STDERR = -1
+BOTH   = 0
+
+classes = []
+"""
+List of :py:class:`waflib.Context.Context` subclasses that can be used as waf commands. The classes
+are added automatically by a metaclass.
+"""
+
+def create_context(cmd_name, *k, **kw):
+	"""
+	Returns a new :py:class:`waflib.Context.Context` instance corresponding to the given command.
+	Used in particular by :py:func:`waflib.Scripting.run_command`
+
+	:param cmd_name: command name
+	:type cmd_name: string
+	:param k: arguments to give to the context class initializer
+	:type k: list
+	:param k: keyword arguments to give to the context class initializer
+	:type k: dict
+	:return: Context object
+	:rtype: :py:class:`waflib.Context.Context`
+	"""
+	for x in classes:
+		if x.cmd == cmd_name:
+			return x(*k, **kw)
+	ctx = Context(*k, **kw)
+	ctx.fun = cmd_name
+	return ctx
+
+class store_context(type):
+	"""
+	Metaclass that registers command classes into the list :py:const:`waflib.Context.classes`
+	Context classes must provide an attribute 'cmd' representing the command name, and a function
+	attribute 'fun' representing the function name that the command uses.
+	"""
+	def __init__(cls, name, bases, dct):
+		super(store_context, cls).__init__(name, bases, dct)
+		name = cls.__name__
+
+		if name in ('ctx', 'Context'):
+			return
+
+		try:
+			cls.cmd
+		except AttributeError:
+			raise Errors.WafError('Missing command for the context class %r (cmd)' % name)
+
+		if not getattr(cls, 'fun', None):
+			cls.fun = cls.cmd
+
+		classes.insert(0, cls)
+
+ctx = store_context('ctx', (object,), {})
+"""Base class for all :py:class:`waflib.Context.Context` classes"""
+
+class Context(ctx):
+	"""
+	Default context for waf commands, and base class for new command contexts.
+
+	Context objects are passed to top-level functions::
+
+		def foo(ctx):
+			print(ctx.__class__.__name__) # waflib.Context.Context
+
+	Subclasses must define the class attributes 'cmd' and 'fun':
+
+	:param cmd: command to execute as in ``waf cmd``
+	:type cmd: string
+	:param fun: function name to execute when the command is called
+	:type fun: string
+
+	.. inheritance-diagram:: waflib.Context.Context waflib.Build.BuildContext waflib.Build.InstallContext waflib.Build.UninstallContext waflib.Build.StepContext waflib.Build.ListContext waflib.Configure.ConfigurationContext waflib.Scripting.Dist waflib.Scripting.DistCheck waflib.Build.CleanContext
+	   :top-classes: waflib.Context.Context
+	"""
+
+	errors = Errors
+	"""
+	Shortcut to :py:mod:`waflib.Errors` provided for convenience
+	"""
+
+	tools = {}
+	"""
+	A module cache for wscript files; see :py:meth:`Context.Context.load`
+	"""
+
+	def __init__(self, **kw):
+		try:
+			rd = kw['run_dir']
+		except KeyError:
+			rd = run_dir
+
+		# binds the context to the nodes in use to avoid a context singleton
+		self.node_class = type('Nod3', (waflib.Node.Node,), {})
+		self.node_class.__module__ = 'waflib.Node'
+		self.node_class.ctx = self
+
+		self.root = self.node_class('', None)
+		self.cur_script = None
+		self.path = self.root.find_dir(rd)
+
+		self.stack_path = []
+		self.exec_dict = {'ctx':self, 'conf':self, 'bld':self, 'opt':self}
+		self.logger = None
+
+	def finalize(self):
+		"""
+		Called to free resources such as logger files
+		"""
+		try:
+			logger = self.logger
+		except AttributeError:
+			pass
+		else:
+			Logs.free_logger(logger)
+			delattr(self, 'logger')
+
+	def load(self, tool_list, *k, **kw):
+		"""
+		Loads a Waf tool as a module, and try calling the function named :py:const:`waflib.Context.Context.fun`
+		from it.  A ``tooldir`` argument may be provided as a list of module paths.
+
+		:param tool_list: list of Waf tool names to load
+		:type tool_list: list of string or space-separated string
+		"""
+		tools = Utils.to_list(tool_list)
+		path = Utils.to_list(kw.get('tooldir', ''))
+		with_sys_path = kw.get('with_sys_path', True)
+
+		for t in tools:
+			module = load_tool(t, path, with_sys_path=with_sys_path)
+			fun = getattr(module, kw.get('name', self.fun), None)
+			if fun:
+				fun(self)
+
+	def execute(self):
+		"""
+		Here, it calls the function name in the top-level wscript file. Most subclasses
+		redefine this method to provide additional functionality.
+		"""
+		self.recurse([os.path.dirname(g_module.root_path)])
+
+	def pre_recurse(self, node):
+		"""
+		Method executed immediately before a folder is read by :py:meth:`waflib.Context.Context.recurse`.
+		The current script is bound as a Node object on ``self.cur_script``, and the current path
+		is bound to ``self.path``
+
+		:param node: script
+		:type node: :py:class:`waflib.Node.Node`
+		"""
+		self.stack_path.append(self.cur_script)
+
+		self.cur_script = node
+		self.path = node.parent
+
+	def post_recurse(self, node):
+		"""
+		Restores ``self.cur_script`` and ``self.path`` right after :py:meth:`waflib.Context.Context.recurse` terminates.
+
+		:param node: script
+		:type node: :py:class:`waflib.Node.Node`
+		"""
+		self.cur_script = self.stack_path.pop()
+		if self.cur_script:
+			self.path = self.cur_script.parent
+
+	def recurse(self, dirs, name=None, mandatory=True, once=True, encoding=None):
+		"""
+		Runs user-provided functions from the supplied list of directories.
+		The directories can be either absolute, or relative to the directory
+		of the wscript file
+
+		The methods :py:meth:`waflib.Context.Context.pre_recurse` and
+		:py:meth:`waflib.Context.Context.post_recurse` are called immediately before
+		and after a script has been executed.
+
+		:param dirs: List of directories to visit
+		:type dirs: list of string or space-separated string
+		:param name: Name of function to invoke from the wscript
+		:type  name: string
+		:param mandatory: whether sub wscript files are required to exist
+		:type  mandatory: bool
+		:param once: read the script file once for a particular context
+		:type once: bool
+		"""
+		try:
+			cache = self.recurse_cache
+		except AttributeError:
+			cache = self.recurse_cache = {}
+
+		for d in Utils.to_list(dirs):
+
+			if not os.path.isabs(d):
+				# absolute paths only
+				d = os.path.join(self.path.abspath(), d)
+
+			WSCRIPT     = os.path.join(d, WSCRIPT_FILE)
+			WSCRIPT_FUN = WSCRIPT + '_' + (name or self.fun)
+
+			node = self.root.find_node(WSCRIPT_FUN)
+			if node and (not once or node not in cache):
+				cache[node] = True
+				self.pre_recurse(node)
+				try:
+					function_code = node.read('r', encoding)
+					exec(compile(function_code, node.abspath(), 'exec'), self.exec_dict)
+				finally:
+					self.post_recurse(node)
+			elif not node:
+				node = self.root.find_node(WSCRIPT)
+				tup = (node, name or self.fun)
+				if node and (not once or tup not in cache):
+					cache[tup] = True
+					self.pre_recurse(node)
+					try:
+						wscript_module = load_module(node.abspath(), encoding=encoding)
+						user_function = getattr(wscript_module, (name or self.fun), None)
+						if not user_function:
+							if not mandatory:
+								continue
+							raise Errors.WafError('No function %r defined in %s' % (name or self.fun, node.abspath()))
+						user_function(self)
+					finally:
+						self.post_recurse(node)
+				elif not node:
+					if not mandatory:
+						continue
+					try:
+						os.listdir(d)
+					except OSError:
+						raise Errors.WafError('Cannot read the folder %r' % d)
+					raise Errors.WafError('No wscript file in directory %s' % d)
+
+	def log_command(self, cmd, kw):
+		if Logs.verbose:
+			fmt = os.environ.get('WAF_CMD_FORMAT')
+			if fmt == 'string':
+				if not isinstance(cmd, str):
+					cmd = Utils.shell_escape(cmd)
+			Logs.debug('runner: %r', cmd)
+			Logs.debug('runner_env: kw=%s', kw)
+
+	def exec_command(self, cmd, **kw):
+		"""
+		Runs an external process and returns the exit status::
+
+			def run(tsk):
+				ret = tsk.generator.bld.exec_command('touch foo.txt')
+				return ret
+
+		If the context has the attribute 'log', then captures and logs the process stderr/stdout.
+		Unlike :py:meth:`waflib.Context.Context.cmd_and_log`, this method does not return the
+		stdout/stderr values captured.
+
+		:param cmd: command argument for subprocess.Popen
+		:type cmd: string or list
+		:param kw: keyword arguments for subprocess.Popen. The parameters input/timeout will be passed to wait/communicate.
+		:type kw: dict
+		:returns: process exit status
+		:rtype: integer
+		:raises: :py:class:`waflib.Errors.WafError` if an invalid executable is specified for a non-shell process
+		:raises: :py:class:`waflib.Errors.WafError` in case of execution failure
+		"""
+		subprocess = Utils.subprocess
+		kw['shell'] = isinstance(cmd, str)
+		self.log_command(cmd, kw)
+
+		if self.logger:
+			self.logger.info(cmd)
+
+		if 'stdout' not in kw:
+			kw['stdout'] = subprocess.PIPE
+		if 'stderr' not in kw:
+			kw['stderr'] = subprocess.PIPE
+
+		if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]):
+			raise Errors.WafError('Program %s not found!' % cmd[0])
+
+		cargs = {}
+		if 'timeout' in kw:
+			if sys.hexversion >= 0x3030000:
+				cargs['timeout'] = kw['timeout']
+				if not 'start_new_session' in kw:
+					kw['start_new_session'] = True
+			del kw['timeout']
+		if 'input' in kw:
+			if kw['input']:
+				cargs['input'] = kw['input']
+				kw['stdin'] = subprocess.PIPE
+			del kw['input']
+
+		if 'cwd' in kw:
+			if not isinstance(kw['cwd'], str):
+				kw['cwd'] = kw['cwd'].abspath()
+
+		encoding = kw.pop('decode_as', default_encoding)
+
+		try:
+			ret, out, err = Utils.run_process(cmd, kw, cargs)
+		except Exception as e:
+			raise Errors.WafError('Execution failure: %s' % str(e), ex=e)
+
+		if out:
+			if not isinstance(out, str):
+				out = out.decode(encoding, errors='replace')
+			if self.logger:
+				self.logger.debug('out: %s', out)
+			else:
+				Logs.info(out, extra={'stream':sys.stdout, 'c1': ''})
+		if err:
+			if not isinstance(err, str):
+				err = err.decode(encoding, errors='replace')
+			if self.logger:
+				self.logger.error('err: %s' % err)
+			else:
+				Logs.info(err, extra={'stream':sys.stderr, 'c1': ''})
+
+		return ret
+
+	def cmd_and_log(self, cmd, **kw):
+		"""
+		Executes a process and returns stdout/stderr if the execution is successful.
+		An exception is thrown when the exit status is non-0. In that case, both stderr and stdout
+		will be bound to the WafError object (configuration tests)::
+
+			def configure(conf):
+				out = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.STDOUT, quiet=waflib.Context.BOTH)
+				(out, err) = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.BOTH)
+				(out, err) = conf.cmd_and_log(cmd, input='\\n'.encode(), output=waflib.Context.STDOUT)
+				try:
+					conf.cmd_and_log(['which', 'someapp'], output=waflib.Context.BOTH)
+				except Errors.WafError as e:
+					print(e.stdout, e.stderr)
+
+		:param cmd: args for subprocess.Popen
+		:type cmd: list or string
+		:param kw: keyword arguments for subprocess.Popen. The parameters input/timeout will be passed to wait/communicate.
+		:type kw: dict
+		:returns: a tuple containing the contents of stdout and stderr
+		:rtype: string
+		:raises: :py:class:`waflib.Errors.WafError` if an invalid executable is specified for a non-shell process
+		:raises: :py:class:`waflib.Errors.WafError` in case of execution failure; stdout/stderr/returncode are bound to the exception object
+		"""
+		subprocess = Utils.subprocess
+		kw['shell'] = isinstance(cmd, str)
+		self.log_command(cmd, kw)
+
+		quiet = kw.pop('quiet', None)
+		to_ret = kw.pop('output', STDOUT)
+
+		if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]):
+			raise Errors.WafError('Program %r not found!' % cmd[0])
+
+		kw['stdout'] = kw['stderr'] = subprocess.PIPE
+		if quiet is None:
+			self.to_log(cmd)
+
+		cargs = {}
+		if 'timeout' in kw:
+			if sys.hexversion >= 0x3030000:
+				cargs['timeout'] = kw['timeout']
+				if not 'start_new_session' in kw:
+					kw['start_new_session'] = True
+			del kw['timeout']
+		if 'input' in kw:
+			if kw['input']:
+				cargs['input'] = kw['input']
+				kw['stdin'] = subprocess.PIPE
+			del kw['input']
+
+		if 'cwd' in kw:
+			if not isinstance(kw['cwd'], str):
+				kw['cwd'] = kw['cwd'].abspath()
+
+		encoding = kw.pop('decode_as', default_encoding)
+
+		try:
+			ret, out, err = Utils.run_process(cmd, kw, cargs)
+		except Exception as e:
+			raise Errors.WafError('Execution failure: %s' % str(e), ex=e)
+
+		if not isinstance(out, str):
+			out = out.decode(encoding, errors='replace')
+		if not isinstance(err, str):
+			err = err.decode(encoding, errors='replace')
+
+		if out and quiet != STDOUT and quiet != BOTH:
+			self.to_log('out: %s' % out)
+		if err and quiet != STDERR and quiet != BOTH:
+			self.to_log('err: %s' % err)
+
+		if ret:
+			e = Errors.WafError('Command %r returned %r' % (cmd, ret))
+			e.returncode = ret
+			e.stderr = err
+			e.stdout = out
+			raise e
+
+		if to_ret == BOTH:
+			return (out, err)
+		elif to_ret == STDERR:
+			return err
+		return out
+
+	def fatal(self, msg, ex=None):
+		"""
+		Prints an error message in red and stops command execution; this is
+		usually used in the configuration section::
+
+			def configure(conf):
+				conf.fatal('a requirement is missing')
+
+		:param msg: message to display
+		:type msg: string
+		:param ex: optional exception object
+		:type ex: exception
+		:raises: :py:class:`waflib.Errors.ConfigurationError`
+		"""
+		if self.logger:
+			self.logger.info('from %s: %s' % (self.path.abspath(), msg))
+		try:
+			logfile = self.logger.handlers[0].baseFilename
+		except AttributeError:
+			pass
+		else:
+			if os.environ.get('WAF_PRINT_FAILURE_LOG'):
+				# see #1930
+				msg = 'Log from (%s):\n%s\n' % (logfile, Utils.readf(logfile))
+			else:
+				msg = '%s\n(complete log in %s)' % (msg, logfile)
+		raise self.errors.ConfigurationError(msg, ex=ex)
+
+	def to_log(self, msg):
+		"""
+		Logs information to the logger (if present), or to stderr.
+		Empty messages are not printed::
+
+			def build(bld):
+				bld.to_log('starting the build')
+
+		Provide a logger on the context class or override this method if necessary.
+
+		:param msg: message
+		:type msg: string
+		"""
+		if not msg:
+			return
+		if self.logger:
+			self.logger.info(msg)
+		else:
+			sys.stderr.write(str(msg))
+			sys.stderr.flush()
+
+
+	def msg(self, *k, **kw):
+		"""
+		Prints a configuration message of the form ``msg: result``.
+		The second part of the message will be in colors. The output
+		can be disabled easily by setting ``in_msg`` to a positive value::
+
+			def configure(conf):
+				self.in_msg = 1
+				conf.msg('Checking for library foo', 'ok')
+				# no output
+
+		:param msg: message to display to the user
+		:type msg: string
+		:param result: result to display
+		:type result: string or boolean
+		:param color: color to use, see :py:const:`waflib.Logs.colors_lst`
+		:type color: string
+		"""
+		try:
+			msg = kw['msg']
+		except KeyError:
+			msg = k[0]
+
+		self.start_msg(msg, **kw)
+
+		try:
+			result = kw['result']
+		except KeyError:
+			result = k[1]
+
+		color = kw.get('color')
+		if not isinstance(color, str):
+			color = result and 'GREEN' or 'YELLOW'
+
+		self.end_msg(result, color, **kw)
+
+	def start_msg(self, *k, **kw):
+		"""
+		Prints the beginning of a 'Checking for xxx' message. See :py:meth:`waflib.Context.Context.msg`
+		"""
+		if kw.get('quiet'):
+			return
+
+		msg = kw.get('msg') or k[0]
+		try:
+			if self.in_msg:
+				self.in_msg += 1
+				return
+		except AttributeError:
+			self.in_msg = 0
+		self.in_msg += 1
+
+		try:
+			self.line_just = max(self.line_just, len(msg))
+		except AttributeError:
+			self.line_just = max(40, len(msg))
+		for x in (self.line_just * '-', msg):
+			self.to_log(x)
+		Logs.pprint('NORMAL', "%s :" % msg.ljust(self.line_just), sep='')
+
+	def end_msg(self, *k, **kw):
+		"""Prints the end of a 'Checking for' message. See :py:meth:`waflib.Context.Context.msg`"""
+		if kw.get('quiet'):
+			return
+		self.in_msg -= 1
+		if self.in_msg:
+			return
+
+		result = kw.get('result') or k[0]
+
+		defcolor = 'GREEN'
+		if result is True:
+			msg = 'ok'
+		elif not result:
+			msg = 'not found'
+			defcolor = 'YELLOW'
+		else:
+			msg = str(result)
+
+		self.to_log(msg)
+		try:
+			color = kw['color']
+		except KeyError:
+			if len(k) > 1 and k[1] in Logs.colors_lst:
+				# compatibility waf 1.7
+				color = k[1]
+			else:
+				color = defcolor
+		Logs.pprint(color, msg)
+
+	def load_special_tools(self, var, ban=[]):
+		"""
+		Loads third-party extensions modules for certain programming languages
+		by trying to list certain files in the extras/ directory. This method
+		is typically called once for a programming language group, see for
+		example :py:mod:`waflib.Tools.compiler_c`
+
+		:param var: glob expression, for example 'cxx\\_\\*.py'
+		:type var: string
+		:param ban: list of exact file names to exclude
+		:type ban: list of string
+		"""
+		if os.path.isdir(waf_dir):
+			lst = self.root.find_node(waf_dir).find_node('waflib/extras').ant_glob(var)
+			for x in lst:
+				if not x.name in ban:
+					load_tool(x.name.replace('.py', ''))
+		else:
+			from zipfile import PyZipFile
+			waflibs = PyZipFile(waf_dir)
+			lst = waflibs.namelist()
+			for x in lst:
+				if not re.match('waflib/extras/%s' % var.replace('*', '.*'), var):
+					continue
+				f = os.path.basename(x)
+				doban = False
+				for b in ban:
+					r = b.replace('*', '.*')
+					if re.match(r, f):
+						doban = True
+				if not doban:
+					f = f.replace('.py', '')
+					load_tool(f)
+
+cache_modules = {}
+"""
+Dictionary holding already loaded modules (wscript), indexed by their absolute path.
+The modules are added automatically by :py:func:`waflib.Context.load_module`
+"""
+
+def load_module(path, encoding=None):
+	"""
+	Loads a wscript file as a python module. This method caches results in :py:attr:`waflib.Context.cache_modules`
+
+	:param path: file path
+	:type path: string
+	:return: Loaded Python module
+	:rtype: module
+	"""
+	try:
+		return cache_modules[path]
+	except KeyError:
+		pass
+
+	module = imp.new_module(WSCRIPT_FILE)
+	try:
+		code = Utils.readf(path, m='r', encoding=encoding)
+	except EnvironmentError:
+		raise Errors.WafError('Could not read the file %r' % path)
+
+	module_dir = os.path.dirname(path)
+	sys.path.insert(0, module_dir)
+	try:
+		exec(compile(code, path, 'exec'), module.__dict__)
+	finally:
+		sys.path.remove(module_dir)
+
+	cache_modules[path] = module
+	return module
+
+def load_tool(tool, tooldir=None, ctx=None, with_sys_path=True):
+	"""
+	Imports a Waf tool as a python module, and stores it in the dict :py:const:`waflib.Context.Context.tools`
+
+	:type  tool: string
+	:param tool: Name of the tool
+	:type  tooldir: list
+	:param tooldir: List of directories to search for the tool module
+	:type  with_sys_path: boolean
+	:param with_sys_path: whether or not to search the regular sys.path, besides waf_dir and potentially given tooldirs
+	"""
+	if tool == 'java':
+		tool = 'javaw' # jython
+	else:
+		tool = tool.replace('++', 'xx')
+
+	if not with_sys_path:
+		back_path = sys.path
+		sys.path = []
+	try:
+		if tooldir:
+			assert isinstance(tooldir, list)
+			sys.path = tooldir + sys.path
+			try:
+				__import__(tool)
+			except ImportError as e:
+				e.waf_sys_path = list(sys.path)
+				raise
+			finally:
+				for d in tooldir:
+					sys.path.remove(d)
+			ret = sys.modules[tool]
+			Context.tools[tool] = ret
+			return ret
+		else:
+			if not with_sys_path:
+				sys.path.insert(0, waf_dir)
+			try:
+				for x in ('waflib.Tools.%s', 'waflib.extras.%s', 'waflib.%s', '%s'):
+					try:
+						__import__(x % tool)
+						break
+					except ImportError:
+						x = None
+				else: # raise an exception
+					__import__(tool)
+			except ImportError as e:
+				e.waf_sys_path = list(sys.path)
+				raise
+			finally:
+				if not with_sys_path:
+					sys.path.remove(waf_dir)
+			ret = sys.modules[x % tool]
+			Context.tools[tool] = ret
+			return ret
+	finally:
+		if not with_sys_path:
+			sys.path += back_path
+
diff --git a/third_party/waf/waflib/Errors.py b/third_party/waf/waflib/Errors.py
new file mode 100644
index 0000000..bf75c1b
--- /dev/null
+++ b/third_party/waf/waflib/Errors.py
@@ -0,0 +1,68 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2010-2018 (ita)
+
+"""
+Exceptions used in the Waf code
+"""
+
+import traceback, sys
+
+class WafError(Exception):
+	"""Base class for all Waf errors"""
+	def __init__(self, msg='', ex=None):
+		"""
+		:param msg: error message
+		:type msg: string
+		:param ex: exception causing this error (optional)
+		:type ex: exception
+		"""
+		Exception.__init__(self)
+		self.msg = msg
+		assert not isinstance(msg, Exception)
+
+		self.stack = []
+		if ex:
+			if not msg:
+				self.msg = str(ex)
+			if isinstance(ex, WafError):
+				self.stack = ex.stack
+			else:
+				self.stack = traceback.extract_tb(sys.exc_info()[2])
+		self.stack += traceback.extract_stack()[:-1]
+		self.verbose_msg = ''.join(traceback.format_list(self.stack))
+
+	def __str__(self):
+		return str(self.msg)
+
+class BuildError(WafError):
+	"""Error raised during the build and install phases"""
+	def __init__(self, error_tasks=[]):
+		"""
+		:param error_tasks: tasks that could not complete normally
+		:type error_tasks: list of task objects
+		"""
+		self.tasks = error_tasks
+		WafError.__init__(self, self.format_error())
+
+	def format_error(self):
+		"""Formats the error messages from the tasks that failed"""
+		lst = ['Build failed']
+		for tsk in self.tasks:
+			txt = tsk.format_error()
+			if txt:
+				lst.append(txt)
+		return '\n'.join(lst)
+
+class ConfigurationError(WafError):
+	"""Configuration exception raised in particular by :py:meth:`waflib.Context.Context.fatal`"""
+	pass
+
+class TaskRescan(WafError):
+	"""Task-specific exception type signalling required signature recalculations"""
+	pass
+
+class TaskNotReady(WafError):
+	"""Task-specific exception type signalling that task signatures cannot be computed"""
+	pass
+
diff --git a/third_party/waf/waflib/Logs.py b/third_party/waf/waflib/Logs.py
new file mode 100644
index 0000000..298411d
--- /dev/null
+++ b/third_party/waf/waflib/Logs.py
@@ -0,0 +1,382 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2018 (ita)
+
+"""
+logging, colors, terminal width and pretty-print
+"""
+
+import os, re, traceback, sys
+from waflib import Utils, ansiterm
+
+if not os.environ.get('NOSYNC', False):
+	# synchronized output is nearly mandatory to prevent garbled output
+	if sys.stdout.isatty() and id(sys.stdout) == id(sys.__stdout__):
+		sys.stdout = ansiterm.AnsiTerm(sys.stdout)
+	if sys.stderr.isatty() and id(sys.stderr) == id(sys.__stderr__):
+		sys.stderr = ansiterm.AnsiTerm(sys.stderr)
+
+# import the logging module after since it holds a reference on sys.stderr
+# in case someone uses the root logger
+import logging
+
+LOG_FORMAT = os.environ.get('WAF_LOG_FORMAT', '%(asctime)s %(c1)s%(zone)s%(c2)s %(message)s')
+HOUR_FORMAT = os.environ.get('WAF_HOUR_FORMAT', '%H:%M:%S')
+
+zones = []
+"""
+See :py:class:`waflib.Logs.log_filter`
+"""
+
+verbose = 0
+"""
+Global verbosity level, see :py:func:`waflib.Logs.debug` and :py:func:`waflib.Logs.error`
+"""
+
+colors_lst = {
+'USE' : True,
+'BOLD'  :'\x1b[01;1m',
+'RED'   :'\x1b[01;31m',
+'GREEN' :'\x1b[32m',
+'YELLOW':'\x1b[33m',
+'PINK'  :'\x1b[35m',
+'BLUE'  :'\x1b[01;34m',
+'CYAN'  :'\x1b[36m',
+'GREY'  :'\x1b[37m',
+'NORMAL':'\x1b[0m',
+'cursor_on'  :'\x1b[?25h',
+'cursor_off' :'\x1b[?25l',
+}
+
+indicator = '\r\x1b[K%s%s%s'
+
+try:
+	unicode
+except NameError:
+	unicode = None
+
+def enable_colors(use):
+	"""
+	If *1* is given, then the system will perform a few verifications
+	before enabling colors, such as checking whether the interpreter
+	is running in a terminal. A value of zero will disable colors,
+	and a value above *1* will force colors.
+
+	:param use: whether to enable colors or not
+	:type use: integer
+	"""
+	if use == 1:
+		if not (sys.stderr.isatty() or sys.stdout.isatty()):
+			use = 0
+		if Utils.is_win32 and os.name != 'java':
+			term = os.environ.get('TERM', '') # has ansiterm
+		else:
+			term = os.environ.get('TERM', 'dumb')
+
+		if term in ('dumb', 'emacs'):
+			use = 0
+
+	if use >= 1:
+		os.environ['TERM'] = 'vt100'
+
+	colors_lst['USE'] = use
+
+# If console packages are available, replace the dummy function with a real
+# implementation
+try:
+	get_term_cols = ansiterm.get_term_cols
+except AttributeError:
+	def get_term_cols():
+		return 80
+
+get_term_cols.__doc__ = """
+	Returns the console width in characters.
+
+	:return: the number of characters per line
+	:rtype: int
+	"""
+
+def get_color(cl):
+	"""
+	Returns the ansi sequence corresponding to the given color name.
+	An empty string is returned when coloring is globally disabled.
+
+	:param cl: color name in capital letters
+	:type cl: string
+	"""
+	if colors_lst['USE']:
+		return colors_lst.get(cl, '')
+	return ''
+
+class color_dict(object):
+	"""attribute-based color access, eg: colors.PINK"""
+	def __getattr__(self, a):
+		return get_color(a)
+	def __call__(self, a):
+		return get_color(a)
+
+colors = color_dict()
+
+re_log = re.compile(r'(\w+): (.*)', re.M)
+class log_filter(logging.Filter):
+	"""
+	Waf logs are of the form 'name: message', and can be filtered by 'waf --zones=name'.
+	For example, the following::
+
+		from waflib import Logs
+		Logs.debug('test: here is a message')
+
+	Will be displayed only when executing::
+
+		$ waf --zones=test
+	"""
+	def __init__(self, name=''):
+		logging.Filter.__init__(self, name)
+
+	def filter(self, rec):
+		"""
+		Filters log records by zone and by logging level
+
+		:param rec: log entry
+		"""
+		rec.zone = rec.module
+		if rec.levelno >= logging.INFO:
+			return True
+
+		m = re_log.match(rec.msg)
+		if m:
+			rec.zone = m.group(1)
+			rec.msg = m.group(2)
+
+		if zones:
+			return getattr(rec, 'zone', '') in zones or '*' in zones
+		elif not verbose > 2:
+			return False
+		return True
+
+class log_handler(logging.StreamHandler):
+	"""Dispatches messages to stderr/stdout depending on the severity level"""
+	def emit(self, record):
+		"""
+		Delegates the functionality to :py:meth:`waflib.Log.log_handler.emit_override`
+		"""
+		# default implementation
+		try:
+			try:
+				self.stream = record.stream
+			except AttributeError:
+				if record.levelno >= logging.WARNING:
+					record.stream = self.stream = sys.stderr
+				else:
+					record.stream = self.stream = sys.stdout
+			self.emit_override(record)
+			self.flush()
+		except (KeyboardInterrupt, SystemExit):
+			raise
+		except: # from the python library -_-
+			self.handleError(record)
+
+	def emit_override(self, record, **kw):
+		"""
+		Writes the log record to the desired stream (stderr/stdout)
+		"""
+		self.terminator = getattr(record, 'terminator', '\n')
+		stream = self.stream
+		if unicode:
+			# python2
+			msg = self.formatter.format(record)
+			fs = '%s' + self.terminator
+			try:
+				if (isinstance(msg, unicode) and getattr(stream, 'encoding', None)):
+					fs = fs.decode(stream.encoding)
+					try:
+						stream.write(fs % msg)
+					except UnicodeEncodeError:
+						stream.write((fs % msg).encode(stream.encoding))
+				else:
+					stream.write(fs % msg)
+			except UnicodeError:
+				stream.write((fs % msg).encode('utf-8'))
+		else:
+			logging.StreamHandler.emit(self, record)
+
+class formatter(logging.Formatter):
+	"""Simple log formatter which handles colors"""
+	def __init__(self):
+		logging.Formatter.__init__(self, LOG_FORMAT, HOUR_FORMAT)
+
+	def format(self, rec):
+		"""
+		Formats records and adds colors as needed. The records do not get
+		a leading hour format if the logging level is above *INFO*.
+		"""
+		try:
+			msg = rec.msg.decode('utf-8')
+		except Exception:
+			msg = rec.msg
+
+		use = colors_lst['USE']
+		if (use == 1 and rec.stream.isatty()) or use == 2:
+
+			c1 = getattr(rec, 'c1', None)
+			if c1 is None:
+				c1 = ''
+				if rec.levelno >= logging.ERROR:
+					c1 = colors.RED
+				elif rec.levelno >= logging.WARNING:
+					c1 = colors.YELLOW
+				elif rec.levelno >= logging.INFO:
+					c1 = colors.GREEN
+			c2 = getattr(rec, 'c2', colors.NORMAL)
+			msg = '%s%s%s' % (c1, msg, c2)
+		else:
+			# remove single \r that make long lines in text files
+			# and other terminal commands
+			msg = re.sub(r'\r(?!\n)|\x1B\[(K|.*?(m|h|l))', '', msg)
+
+		if rec.levelno >= logging.INFO:
+			# the goal of this is to format without the leading "Logs, hour" prefix
+			if rec.args:
+				try:
+					return msg % rec.args
+				except UnicodeDecodeError:
+					return msg.encode('utf-8') % rec.args
+			return msg
+
+		rec.msg = msg
+		rec.c1 = colors.PINK
+		rec.c2 = colors.NORMAL
+		return logging.Formatter.format(self, rec)
+
+log = None
+"""global logger for Logs.debug, Logs.error, etc"""
+
+def debug(*k, **kw):
+	"""
+	Wraps logging.debug and discards messages if the verbosity level :py:attr:`waflib.Logs.verbose` ≤ 0
+	"""
+	if verbose:
+		k = list(k)
+		k[0] = k[0].replace('\n', ' ')
+		log.debug(*k, **kw)
+
+def error(*k, **kw):
+	"""
+	Wrap logging.errors, adds the stack trace when the verbosity level :py:attr:`waflib.Logs.verbose` ≥ 2
+	"""
+	log.error(*k, **kw)
+	if verbose > 2:
+		st = traceback.extract_stack()
+		if st:
+			st = st[:-1]
+			buf = []
+			for filename, lineno, name, line in st:
+				buf.append('  File %r, line %d, in %s' % (filename, lineno, name))
+				if line:
+					buf.append('	%s' % line.strip())
+			if buf:
+				log.error('\n'.join(buf))
+
+def warn(*k, **kw):
+	"""
+	Wraps logging.warning
+	"""
+	log.warning(*k, **kw)
+
+def info(*k, **kw):
+	"""
+	Wraps logging.info
+	"""
+	log.info(*k, **kw)
+
+def init_log():
+	"""
+	Initializes the logger :py:attr:`waflib.Logs.log`
+	"""
+	global log
+	log = logging.getLogger('waflib')
+	log.handlers = []
+	log.filters = []
+	hdlr = log_handler()
+	hdlr.setFormatter(formatter())
+	log.addHandler(hdlr)
+	log.addFilter(log_filter())
+	log.setLevel(logging.DEBUG)
+
+def make_logger(path, name):
+	"""
+	Creates a simple logger, which is often used to redirect the context command output::
+
+		from waflib import Logs
+		bld.logger = Logs.make_logger('test.log', 'build')
+		bld.check(header_name='sadlib.h', features='cxx cprogram', mandatory=False)
+
+		# have the file closed immediately
+		Logs.free_logger(bld.logger)
+
+		# stop logging
+		bld.logger = None
+
+	The method finalize() of the command will try to free the logger, if any
+
+	:param path: file name to write the log output to
+	:type path: string
+	:param name: logger name (loggers are reused)
+	:type name: string
+	"""
+	logger = logging.getLogger(name)
+	if sys.hexversion > 0x3000000:
+		encoding = sys.stdout.encoding
+	else:
+		encoding = None
+	hdlr = logging.FileHandler(path, 'w', encoding=encoding)
+	formatter = logging.Formatter('%(message)s')
+	hdlr.setFormatter(formatter)
+	logger.addHandler(hdlr)
+	logger.setLevel(logging.DEBUG)
+	return logger
+
+def make_mem_logger(name, to_log, size=8192):
+	"""
+	Creates a memory logger to avoid writing concurrently to the main logger
+	"""
+	from logging.handlers import MemoryHandler
+	logger = logging.getLogger(name)
+	hdlr = MemoryHandler(size, target=to_log)
+	formatter = logging.Formatter('%(message)s')
+	hdlr.setFormatter(formatter)
+	logger.addHandler(hdlr)
+	logger.memhandler = hdlr
+	logger.setLevel(logging.DEBUG)
+	return logger
+
+def free_logger(logger):
+	"""
+	Frees the resources held by the loggers created through make_logger or make_mem_logger.
+	This is used for file cleanup and for handler removal (logger objects are re-used).
+	"""
+	try:
+		for x in logger.handlers:
+			x.close()
+			logger.removeHandler(x)
+	except Exception:
+		pass
+
+def pprint(col, msg, label='', sep='\n'):
+	"""
+	Prints messages in color immediately on stderr::
+
+		from waflib import Logs
+		Logs.pprint('RED', 'Something bad just happened')
+
+	:param col: color name to use in :py:const:`Logs.colors_lst`
+	:type col: string
+	:param msg: message to display
+	:type msg: string or a value that can be printed by %s
+	:param label: a message to add after the colored output
+	:type label: string
+	:param sep: a string to append at the end (line separator)
+	:type sep: string
+	"""
+	info('%s%s%s %s', colors(col), msg, colors.NORMAL, label, extra={'terminator':sep})
+
diff --git a/third_party/waf/waflib/Node.py b/third_party/waf/waflib/Node.py
new file mode 100644
index 0000000..2ad1846
--- /dev/null
+++ b/third_party/waf/waflib/Node.py
@@ -0,0 +1,969 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2018 (ita)
+
+"""
+Node: filesystem structure
+
+#. Each file/folder is represented by exactly one node.
+
+#. Some potential class properties are stored on :py:class:`waflib.Build.BuildContext` : nodes to depend on, etc.
+   Unused class members can increase the `.wafpickle` file size sensibly.
+
+#. Node objects should never be created directly, use
+   the methods :py:func:`Node.make_node` or :py:func:`Node.find_node` for the low-level operations
+
+#. The methods :py:func:`Node.find_resource`, :py:func:`Node.find_dir` :py:func:`Node.find_or_declare` must be
+   used when a build context is present
+
+#. Each instance of :py:class:`waflib.Context.Context` has a unique :py:class:`Node` subclass required for serialization.
+   (:py:class:`waflib.Node.Nod3`, see the :py:class:`waflib.Context.Context` initializer). A reference to the context
+   owning a node is held as *self.ctx*
+"""
+
+import os, re, sys, shutil
+from waflib import Utils, Errors
+
+exclude_regs = '''
+**/*~
+**/#*#
+**/.#*
+**/%*%
+**/._*
+**/*.swp
+**/CVS
+**/CVS/**
+**/.cvsignore
+**/SCCS
+**/SCCS/**
+**/vssver.scc
+**/.svn
+**/.svn/**
+**/BitKeeper
+**/.git
+**/.git/**
+**/.gitignore
+**/.bzr
+**/.bzrignore
+**/.bzr/**
+**/.hg
+**/.hg/**
+**/_MTN
+**/_MTN/**
+**/.arch-ids
+**/{arch}
+**/_darcs
+**/_darcs/**
+**/.intlcache
+**/.DS_Store'''
+"""
+Ant patterns for files and folders to exclude while doing the
+recursive traversal in :py:meth:`waflib.Node.Node.ant_glob`
+"""
+
+def ant_matcher(s, ignorecase):
+	reflags = re.I if ignorecase else 0
+	ret = []
+	for x in Utils.to_list(s):
+		x = x.replace('\\', '/').replace('//', '/')
+		if x.endswith('/'):
+			x += '**'
+		accu = []
+		for k in x.split('/'):
+			if k == '**':
+				accu.append(k)
+			else:
+				k = k.replace('.', '[.]').replace('*', '.*').replace('?', '.').replace('+', '\\+')
+				k = '^%s$' % k
+				try:
+					exp = re.compile(k, flags=reflags)
+				except Exception as e:
+					raise Errors.WafError('Invalid pattern: %s' % k, e)
+				else:
+					accu.append(exp)
+		ret.append(accu)
+	return ret
+
+def ant_sub_filter(name, nn):
+	ret = []
+	for lst in nn:
+		if not lst:
+			pass
+		elif lst[0] == '**':
+			ret.append(lst)
+			if len(lst) > 1:
+				if lst[1].match(name):
+					ret.append(lst[2:])
+			else:
+				ret.append([])
+		elif lst[0].match(name):
+			ret.append(lst[1:])
+	return ret
+
+def ant_sub_matcher(name, pats):
+	nacc = ant_sub_filter(name, pats[0])
+	nrej = ant_sub_filter(name, pats[1])
+	if [] in nrej:
+		nacc = []
+	return [nacc, nrej]
+
+class Node(object):
+	"""
+	This class is organized in two parts:
+
+	* The basic methods meant for filesystem access (compute paths, create folders, etc)
+	* The methods bound to a :py:class:`waflib.Build.BuildContext` (require ``bld.srcnode`` and ``bld.bldnode``)
+	"""
+
+	dict_class = dict
+	"""
+	Subclasses can provide a dict class to enable case insensitivity for example.
+	"""
+
+	__slots__ = ('name', 'parent', 'children', 'cache_abspath', 'cache_isdir')
+	def __init__(self, name, parent):
+		"""
+		.. note:: Use :py:func:`Node.make_node` or :py:func:`Node.find_node` instead of calling this constructor
+		"""
+		self.name = name
+		self.parent = parent
+		if parent:
+			if name in parent.children:
+				raise Errors.WafError('node %s exists in the parent files %r already' % (name, parent))
+			parent.children[name] = self
+
+	def __setstate__(self, data):
+		"Deserializes node information, used for persistence"
+		self.name = data[0]
+		self.parent = data[1]
+		if data[2] is not None:
+			# Issue 1480
+			self.children = self.dict_class(data[2])
+
+	def __getstate__(self):
+		"Serializes node information, used for persistence"
+		return (self.name, self.parent, getattr(self, 'children', None))
+
+	def __str__(self):
+		"""
+		String representation (abspath), for debugging purposes
+
+		:rtype: string
+		"""
+		return self.abspath()
+
+	def __repr__(self):
+		"""
+		String representation (abspath), for debugging purposes
+
+		:rtype: string
+		"""
+		return self.abspath()
+
+	def __copy__(self):
+		"""
+		Provided to prevent nodes from being copied
+
+		:raises: :py:class:`waflib.Errors.WafError`
+		"""
+		raise Errors.WafError('nodes are not supposed to be copied')
+
+	def read(self, flags='r', encoding='latin-1'):
+		"""
+		Reads and returns the contents of the file represented by this node, see :py:func:`waflib.Utils.readf`::
+
+			def build(bld):
+				bld.path.find_node('wscript').read()
+
+		:param flags: Open mode
+		:type  flags: string
+		:param encoding: encoding value for Python3
+		:type encoding: string
+		:rtype: string or bytes
+		:return: File contents
+		"""
+		return Utils.readf(self.abspath(), flags, encoding)
+
+	def write(self, data, flags='w', encoding='latin-1'):
+		"""
+		Writes data to the file represented by this node, see :py:func:`waflib.Utils.writef`::
+
+			def build(bld):
+				bld.path.make_node('foo.txt').write('Hello, world!')
+
+		:param data: data to write
+		:type  data: string
+		:param flags: Write mode
+		:type  flags: string
+		:param encoding: encoding value for Python3
+		:type encoding: string
+		"""
+		Utils.writef(self.abspath(), data, flags, encoding)
+
+	def read_json(self, convert=True, encoding='utf-8'):
+		"""
+		Reads and parses the contents of this node as JSON (Python ≥ 2.6)::
+
+			def build(bld):
+				bld.path.find_node('abc.json').read_json()
+
+		Note that this by default automatically decodes unicode strings on Python2, unlike what the Python JSON module does.
+
+		:type  convert: boolean
+		:param convert: Prevents decoding of unicode strings on Python2
+		:type  encoding: string
+		:param encoding: The encoding of the file to read. This default to UTF8 as per the JSON standard
+		:rtype: object
+		:return: Parsed file contents
+		"""
+		import json # Python 2.6 and up
+		object_pairs_hook = None
+		if convert and sys.hexversion < 0x3000000:
+			try:
+				_type = unicode
+			except NameError:
+				_type = str
+
+			def convert(value):
+				if isinstance(value, list):
+					return [convert(element) for element in value]
+				elif isinstance(value, _type):
+					return str(value)
+				else:
+					return value
+
+			def object_pairs(pairs):
+				return dict((str(pair[0]), convert(pair[1])) for pair in pairs)
+
+			object_pairs_hook = object_pairs
+
+		return json.loads(self.read(encoding=encoding), object_pairs_hook=object_pairs_hook)
+
+	def write_json(self, data, pretty=True):
+		"""
+		Writes a python object as JSON to disk (Python ≥ 2.6) as UTF-8 data (JSON standard)::
+
+			def build(bld):
+				bld.path.find_node('xyz.json').write_json(199)
+
+		:type  data: object
+		:param data: The data to write to disk
+		:type  pretty: boolean
+		:param pretty: Determines if the JSON will be nicely space separated
+		"""
+		import json # Python 2.6 and up
+		indent = 2
+		separators = (',', ': ')
+		sort_keys = pretty
+		newline = os.linesep
+		if not pretty:
+			indent = None
+			separators = (',', ':')
+			newline = ''
+		output = json.dumps(data, indent=indent, separators=separators, sort_keys=sort_keys) + newline
+		self.write(output, encoding='utf-8')
+
+	def exists(self):
+		"""
+		Returns whether the Node is present on the filesystem
+
+		:rtype: bool
+		"""
+		return os.path.exists(self.abspath())
+
+	def isdir(self):
+		"""
+		Returns whether the Node represents a folder
+
+		:rtype: bool
+		"""
+		return os.path.isdir(self.abspath())
+
+	def chmod(self, val):
+		"""
+		Changes the file/dir permissions::
+
+			def build(bld):
+				bld.path.chmod(493) # 0755
+		"""
+		os.chmod(self.abspath(), val)
+
+	def delete(self, evict=True):
+		"""
+		Removes the file/folder from the filesystem (equivalent to `rm -rf`), and remove this object from the Node tree.
+		Do not use this object after calling this method.
+		"""
+		try:
+			try:
+				if os.path.isdir(self.abspath()):
+					shutil.rmtree(self.abspath())
+				else:
+					os.remove(self.abspath())
+			except OSError:
+				if os.path.exists(self.abspath()):
+					raise
+		finally:
+			if evict:
+				self.evict()
+
+	def evict(self):
+		"""
+		Removes this node from the Node tree
+		"""
+		del self.parent.children[self.name]
+
+	def suffix(self):
+		"""
+		Returns the file rightmost extension, for example `a.b.c.d → .d`
+
+		:rtype: string
+		"""
+		k = max(0, self.name.rfind('.'))
+		return self.name[k:]
+
+	def height(self):
+		"""
+		Returns the depth in the folder hierarchy from the filesystem root or from all the file drives
+
+		:returns: filesystem depth
+		:rtype: integer
+		"""
+		d = self
+		val = -1
+		while d:
+			d = d.parent
+			val += 1
+		return val
+
+	def listdir(self):
+		"""
+		Lists the folder contents
+
+		:returns: list of file/folder names ordered alphabetically
+		:rtype: list of string
+		"""
+		lst = Utils.listdir(self.abspath())
+		lst.sort()
+		return lst
+
+	def mkdir(self):
+		"""
+		Creates a folder represented by this node. Intermediate folders are created as needed.
+
+		:raises: :py:class:`waflib.Errors.WafError` when the folder is missing
+		"""
+		if self.isdir():
+			return
+
+		try:
+			self.parent.mkdir()
+		except OSError:
+			pass
+
+		if self.name:
+			try:
+				os.makedirs(self.abspath())
+			except OSError:
+				pass
+
+			if not self.isdir():
+				raise Errors.WafError('Could not create the directory %r' % self)
+
+			try:
+				self.children
+			except AttributeError:
+				self.children = self.dict_class()
+
+	def find_node(self, lst):
+		"""
+		Finds a node on the file system (files or folders), and creates the corresponding Node objects if it exists
+
+		:param lst: relative path
+		:type lst: string or list of string
+		:returns: The corresponding Node object or None if no entry was found on the filesystem
+		:rtype: :py:class:´waflib.Node.Node´
+		"""
+
+		if isinstance(lst, str):
+			lst = [x for x in Utils.split_path(lst) if x and x != '.']
+
+		if lst and lst[0].startswith('\\\\') and not self.parent:
+			node = self.ctx.root.make_node(lst[0])
+			node.cache_isdir = True
+			return node.find_node(lst[1:])
+
+		cur = self
+		for x in lst:
+			if x == '..':
+				cur = cur.parent or cur
+				continue
+
+			try:
+				ch = cur.children
+			except AttributeError:
+				cur.children = self.dict_class()
+			else:
+				try:
+					cur = ch[x]
+					continue
+				except KeyError:
+					pass
+
+			# optimistic: create the node first then look if it was correct to do so
+			cur = self.__class__(x, cur)
+			if not cur.exists():
+				cur.evict()
+				return None
+
+		if not cur.exists():
+			cur.evict()
+			return None
+
+		return cur
+
+	def make_node(self, lst):
+		"""
+		Returns or creates a Node object corresponding to the input path without considering the filesystem.
+
+		:param lst: relative path
+		:type lst: string or list of string
+		:rtype: :py:class:´waflib.Node.Node´
+		"""
+		if isinstance(lst, str):
+			lst = [x for x in Utils.split_path(lst) if x and x != '.']
+
+		cur = self
+		for x in lst:
+			if x == '..':
+				cur = cur.parent or cur
+				continue
+
+			try:
+				cur = cur.children[x]
+			except AttributeError:
+				cur.children = self.dict_class()
+			except KeyError:
+				pass
+			else:
+				continue
+			cur = self.__class__(x, cur)
+		return cur
+
+	def search_node(self, lst):
+		"""
+		Returns a Node previously defined in the data structure. The filesystem is not considered.
+
+		:param lst: relative path
+		:type lst: string or list of string
+		:rtype: :py:class:´waflib.Node.Node´ or None if there is no entry in the Node datastructure
+		"""
+		if isinstance(lst, str):
+			lst = [x for x in Utils.split_path(lst) if x and x != '.']
+
+		cur = self
+		for x in lst:
+			if x == '..':
+				cur = cur.parent or cur
+			else:
+				try:
+					cur = cur.children[x]
+				except (AttributeError, KeyError):
+					return None
+		return cur
+
+	def path_from(self, node):
+		"""
+		Path of this node seen from the other::
+
+			def build(bld):
+				n1 = bld.path.find_node('foo/bar/xyz.txt')
+				n2 = bld.path.find_node('foo/stuff/')
+				n1.path_from(n2) # '../bar/xyz.txt'
+
+		:param node: path to use as a reference
+		:type node: :py:class:`waflib.Node.Node`
+		:returns: a relative path or an absolute one if that is better
+		:rtype: string
+		"""
+		c1 = self
+		c2 = node
+
+		c1h = c1.height()
+		c2h = c2.height()
+
+		lst = []
+		up = 0
+
+		while c1h > c2h:
+			lst.append(c1.name)
+			c1 = c1.parent
+			c1h -= 1
+
+		while c2h > c1h:
+			up += 1
+			c2 = c2.parent
+			c2h -= 1
+
+		while not c1 is c2:
+			lst.append(c1.name)
+			up += 1
+
+			c1 = c1.parent
+			c2 = c2.parent
+
+		if c1.parent:
+			lst.extend(['..'] * up)
+			lst.reverse()
+			return os.sep.join(lst) or '.'
+		else:
+			return self.abspath()
+
+	def abspath(self):
+		"""
+		Returns the absolute path. A cache is kept in the context as ``cache_node_abspath``
+
+		:rtype: string
+		"""
+		try:
+			return self.cache_abspath
+		except AttributeError:
+			pass
+		# think twice before touching this (performance + complexity + correctness)
+
+		if not self.parent:
+			val = os.sep
+		elif not self.parent.name:
+			val = os.sep + self.name
+		else:
+			val = self.parent.abspath() + os.sep + self.name
+		self.cache_abspath = val
+		return val
+
+	if Utils.is_win32:
+		def abspath(self):
+			try:
+				return self.cache_abspath
+			except AttributeError:
+				pass
+			if not self.parent:
+				val = ''
+			elif not self.parent.name:
+				val = self.name + os.sep
+			else:
+				val = self.parent.abspath().rstrip(os.sep) + os.sep + self.name
+			self.cache_abspath = val
+			return val
+
+	def is_child_of(self, node):
+		"""
+		Returns whether the object belongs to a subtree of the input node::
+
+			def build(bld):
+				node = bld.path.find_node('wscript')
+				node.is_child_of(bld.path) # True
+
+		:param node: path to use as a reference
+		:type node: :py:class:`waflib.Node.Node`
+		:rtype: bool
+		"""
+		p = self
+		diff = self.height() - node.height()
+		while diff > 0:
+			diff -= 1
+			p = p.parent
+		return p is node
+
+	def ant_iter(self, accept=None, maxdepth=25, pats=[], dir=False, src=True, remove=True, quiet=False):
+		"""
+		Recursive method used by :py:meth:`waflib.Node.ant_glob`.
+
+		:param accept: function used for accepting/rejecting a node, returns the patterns that can be still accepted in recursion
+		:type accept: function
+		:param maxdepth: maximum depth in the filesystem (25)
+		:type maxdepth: int
+		:param pats: list of patterns to accept and list of patterns to exclude
+		:type pats: tuple
+		:param dir: return folders too (False by default)
+		:type dir: bool
+		:param src: return files (True by default)
+		:type src: bool
+		:param remove: remove files/folders that do not exist (True by default)
+		:type remove: bool
+		:param quiet: disable build directory traversal warnings (verbose mode)
+		:type quiet: bool
+		:returns: A generator object to iterate from
+		:rtype: iterator
+		"""
+		dircont = self.listdir()
+
+		try:
+			lst = set(self.children.keys())
+		except AttributeError:
+			self.children = self.dict_class()
+		else:
+			if remove:
+				for x in lst - set(dircont):
+					self.children[x].evict()
+
+		for name in dircont:
+			npats = accept(name, pats)
+			if npats and npats[0]:
+				accepted = [] in npats[0]
+
+				node = self.make_node([name])
+
+				isdir = node.isdir()
+				if accepted:
+					if isdir:
+						if dir:
+							yield node
+					elif src:
+						yield node
+
+				if isdir:
+					node.cache_isdir = True
+					if maxdepth:
+						for k in node.ant_iter(accept=accept, maxdepth=maxdepth - 1, pats=npats, dir=dir, src=src, remove=remove, quiet=quiet):
+							yield k
+
+	def ant_glob(self, *k, **kw):
+		"""
+		Finds files across folders and returns Node objects:
+
+		* ``**/*`` find all files recursively
+		* ``**/*.class`` find all files ending by .class
+		* ``..`` find files having two dot characters
+
+		For example::
+
+			def configure(cfg):
+				# find all .cpp files
+				cfg.path.ant_glob('**/*.cpp')
+				# find particular files from the root filesystem (can be slow)
+				cfg.root.ant_glob('etc/*.txt')
+				# simple exclusion rule example
+				cfg.path.ant_glob('*.c*', excl=['*.c'], src=True, dir=False)
+
+		For more information about the patterns, consult http://ant.apache.org/manual/dirtasks.html
+		Please remember that the '..' sequence does not represent the parent directory::
+
+			def configure(cfg):
+				cfg.path.ant_glob('../*.h') # incorrect
+				cfg.path.parent.ant_glob('*.h') # correct
+
+		The Node structure is itself a filesystem cache, so certain precautions must
+		be taken while matching files in the build or installation phases.
+		Nodes objects that do have a corresponding file or folder are garbage-collected by default.
+		This garbage collection is usually required to prevent returning files that do not
+		exist anymore. Yet, this may also remove Node objects of files that are yet-to-be built.
+
+		This typically happens when trying to match files in the build directory,
+		but there are also cases when files are created in the source directory.
+		Run ``waf -v`` to display any warnings, and try consider passing ``remove=False``
+		when matching files in the build directory.
+
+		Since ant_glob can traverse both source and build folders, it is a best practice
+		to call this method only from the most specific build node::
+
+			def build(bld):
+				# traverses the build directory, may need ``remove=False``:
+				bld.path.ant_glob('project/dir/**/*.h')
+				# better, no accidental build directory traversal:
+				bld.path.find_node('project/dir').ant_glob('**/*.h') # best
+
+		In addition, files and folders are listed immediately. When matching files in the
+		build folders, consider passing ``generator=True`` so that the generator object
+		returned can defer computation to a later stage. For example::
+
+			def build(bld):
+				bld(rule='tar xvf ${SRC}', source='arch.tar')
+				bld.add_group()
+				gen = bld.bldnode.ant_glob("*.h", generator=True, remove=True)
+				# files will be listed only after the arch.tar is unpacked
+				bld(rule='ls ${SRC}', source=gen, name='XYZ')
+
+
+		:param incl: ant patterns or list of patterns to include
+		:type incl: string or list of strings
+		:param excl: ant patterns or list of patterns to exclude
+		:type excl: string or list of strings
+		:param dir: return folders too (False by default)
+		:type dir: bool
+		:param src: return files (True by default)
+		:type src: bool
+		:param maxdepth: maximum depth of recursion
+		:type maxdepth: int
+		:param ignorecase: ignore case while matching (False by default)
+		:type ignorecase: bool
+		:param generator: Whether to evaluate the Nodes lazily
+		:type generator: bool
+		:param remove: remove files/folders that do not exist (True by default)
+		:type remove: bool
+		:param quiet: disable build directory traversal warnings (verbose mode)
+		:type quiet: bool
+		:returns: The corresponding Node objects as a list or as a generator object (generator=True)
+		:rtype: by default, list of :py:class:`waflib.Node.Node` instances
+		"""
+		src = kw.get('src', True)
+		dir = kw.get('dir')
+		excl = kw.get('excl', exclude_regs)
+		incl = k and k[0] or kw.get('incl', '**')
+		remove = kw.get('remove', True)
+		maxdepth = kw.get('maxdepth', 25)
+		ignorecase = kw.get('ignorecase', False)
+		quiet = kw.get('quiet', False)
+		pats = (ant_matcher(incl, ignorecase), ant_matcher(excl, ignorecase))
+
+		if kw.get('generator'):
+			return Utils.lazy_generator(self.ant_iter, (ant_sub_matcher, maxdepth, pats, dir, src, remove, quiet))
+
+		it = self.ant_iter(ant_sub_matcher, maxdepth, pats, dir, src, remove, quiet)
+		if kw.get('flat'):
+			# returns relative paths as a space-delimited string
+			# prefer Node objects whenever possible
+			return ' '.join(x.path_from(self) for x in it)
+		return list(it)
+
+	# ----------------------------------------------------------------------------
+	# the methods below require the source/build folders (bld.srcnode/bld.bldnode)
+
+	def is_src(self):
+		"""
+		Returns True if the node is below the source directory. Note that ``!is_src() ≠ is_bld()``
+
+		:rtype: bool
+		"""
+		cur = self
+		x = self.ctx.srcnode
+		y = self.ctx.bldnode
+		while cur.parent:
+			if cur is y:
+				return False
+			if cur is x:
+				return True
+			cur = cur.parent
+		return False
+
+	def is_bld(self):
+		"""
+		Returns True if the node is below the build directory. Note that ``!is_bld() ≠ is_src()``
+
+		:rtype: bool
+		"""
+		cur = self
+		y = self.ctx.bldnode
+		while cur.parent:
+			if cur is y:
+				return True
+			cur = cur.parent
+		return False
+
+	def get_src(self):
+		"""
+		Returns the corresponding Node object in the source directory (or self if already
+		under the source directory). Use this method only if the purpose is to create
+		a Node object (this is common with folders but not with files, see ticket 1937)
+
+		:rtype: :py:class:`waflib.Node.Node`
+		"""
+		cur = self
+		x = self.ctx.srcnode
+		y = self.ctx.bldnode
+		lst = []
+		while cur.parent:
+			if cur is y:
+				lst.reverse()
+				return x.make_node(lst)
+			if cur is x:
+				return self
+			lst.append(cur.name)
+			cur = cur.parent
+		return self
+
+	def get_bld(self):
+		"""
+		Return the corresponding Node object in the build directory (or self if already
+		under the build directory). Use this method only if the purpose is to create
+		a Node object (this is common with folders but not with files, see ticket 1937)
+
+		:rtype: :py:class:`waflib.Node.Node`
+		"""
+		cur = self
+		x = self.ctx.srcnode
+		y = self.ctx.bldnode
+		lst = []
+		while cur.parent:
+			if cur is y:
+				return self
+			if cur is x:
+				lst.reverse()
+				return self.ctx.bldnode.make_node(lst)
+			lst.append(cur.name)
+			cur = cur.parent
+		# the file is external to the current project, make a fake root in the current build directory
+		lst.reverse()
+		if lst and Utils.is_win32 and len(lst[0]) == 2 and lst[0].endswith(':'):
+			lst[0] = lst[0][0]
+		return self.ctx.bldnode.make_node(['__root__'] + lst)
+
+	def find_resource(self, lst):
+		"""
+		Use this method in the build phase to find source files corresponding to the relative path given.
+
+		First it looks up the Node data structure to find any declared Node object in the build directory.
+		If None is found, it then considers the filesystem in the source directory.
+
+		:param lst: relative path
+		:type lst: string or list of string
+		:returns: the corresponding Node object or None
+		:rtype: :py:class:`waflib.Node.Node`
+		"""
+		if isinstance(lst, str):
+			lst = [x for x in Utils.split_path(lst) if x and x != '.']
+
+		node = self.get_bld().search_node(lst)
+		if not node:
+			node = self.get_src().find_node(lst)
+		if node and node.isdir():
+			return None
+		return node
+
+	def find_or_declare(self, lst):
+		"""
+		Use this method in the build phase to declare output files which
+		are meant to be written in the build directory.
+
+		This method creates the Node object and its parent folder
+		as needed.
+
+		:param lst: relative path
+		:type lst: string or list of string
+		"""
+		if isinstance(lst, str) and os.path.isabs(lst):
+			node = self.ctx.root.make_node(lst)
+		else:
+			node = self.get_bld().make_node(lst)
+		node.parent.mkdir()
+		return node
+
+	def find_dir(self, lst):
+		"""
+		Searches for a folder on the filesystem (see :py:meth:`waflib.Node.Node.find_node`)
+
+		:param lst: relative path
+		:type lst: string or list of string
+		:returns: The corresponding Node object or None if there is no such folder
+		:rtype: :py:class:`waflib.Node.Node`
+		"""
+		if isinstance(lst, str):
+			lst = [x for x in Utils.split_path(lst) if x and x != '.']
+
+		node = self.find_node(lst)
+		if node and not node.isdir():
+			return None
+		return node
+
+	# helpers for building things
+	def change_ext(self, ext, ext_in=None):
+		"""
+		Declares a build node with a distinct extension; this is uses :py:meth:`waflib.Node.Node.find_or_declare`
+
+		:return: A build node of the same path, but with a different extension
+		:rtype: :py:class:`waflib.Node.Node`
+		"""
+		name = self.name
+		if ext_in is None:
+			k = name.rfind('.')
+			if k >= 0:
+				name = name[:k] + ext
+			else:
+				name = name + ext
+		else:
+			name = name[:- len(ext_in)] + ext
+
+		return self.parent.find_or_declare([name])
+
+	def bldpath(self):
+		"""
+		Returns the relative path seen from the build directory ``src/foo.cpp``
+
+		:rtype: string
+		"""
+		return self.path_from(self.ctx.bldnode)
+
+	def srcpath(self):
+		"""
+		Returns the relative path seen from the source directory ``../src/foo.cpp``
+
+		:rtype: string
+		"""
+		return self.path_from(self.ctx.srcnode)
+
+	def relpath(self):
+		"""
+		If a file in the build directory, returns :py:meth:`waflib.Node.Node.bldpath`,
+		else returns :py:meth:`waflib.Node.Node.srcpath`
+
+		:rtype: string
+		"""
+		cur = self
+		x = self.ctx.bldnode
+		while cur.parent:
+			if cur is x:
+				return self.bldpath()
+			cur = cur.parent
+		return self.srcpath()
+
+	def bld_dir(self):
+		"""
+		Equivalent to self.parent.bldpath()
+
+		:rtype: string
+		"""
+		return self.parent.bldpath()
+
+	def h_file(self):
+		"""
+		See :py:func:`waflib.Utils.h_file`
+
+		:return: a hash representing the file contents
+		:rtype: string or bytes
+		"""
+		return Utils.h_file(self.abspath())
+
+	def get_bld_sig(self):
+		"""
+		Returns a signature (see :py:meth:`waflib.Node.Node.h_file`) for the purpose
+		of build dependency calculation. This method uses a per-context cache.
+
+		:return: a hash representing the object contents
+		:rtype: string or bytes
+		"""
+		# previous behaviour can be set by returning self.ctx.node_sigs[self] when a build node
+		try:
+			cache = self.ctx.cache_sig
+		except AttributeError:
+			cache = self.ctx.cache_sig = {}
+		try:
+			ret = cache[self]
+		except KeyError:
+			p = self.abspath()
+			try:
+				ret = cache[self] = self.h_file()
+			except EnvironmentError:
+				if self.isdir():
+					# allow folders as build nodes, do not use the creation time
+					st = os.stat(p)
+					ret = cache[self] = Utils.h_list([p, st.st_ino, st.st_mode])
+					return ret
+				raise
+		return ret
+
+pickle_lock = Utils.threading.Lock()
+"""Lock mandatory for thread-safe node serialization"""
+
+class Nod3(Node):
+	"""Mandatory subclass for thread-safe node serialization"""
+	pass # do not remove
+
+
diff --git a/third_party/waf/waflib/Options.py b/third_party/waf/waflib/Options.py
new file mode 100644
index 0000000..d410491
--- /dev/null
+++ b/third_party/waf/waflib/Options.py
@@ -0,0 +1,359 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Scott Newton, 2005 (scottn)
+# Thomas Nagy, 2006-2018 (ita)
+
+"""
+Support for waf command-line options
+
+Provides default and command-line options, as well the command
+that reads the ``options`` wscript function.
+"""
+
+import os, tempfile, optparse, sys, re
+from waflib import Logs, Utils, Context, Errors
+
+options = optparse.Values()
+"""
+A global dictionary representing user-provided command-line options::
+
+	$ waf --foo=bar
+"""
+
+commands = []
+"""
+List of commands to execute extracted from the command-line. This list
+is consumed during the execution by :py:func:`waflib.Scripting.run_commands`.
+"""
+
+envvars = []
+"""
+List of environment variable declarations placed after the Waf executable name.
+These are detected by searching for "=" in the remaining arguments.
+You probably do not want to use this.
+"""
+
+lockfile = os.environ.get('WAFLOCK', '.lock-waf_%s_build' % sys.platform)
+"""
+Name of the lock file that marks a project as configured
+"""
+
+class opt_parser(optparse.OptionParser):
+	"""
+	Command-line options parser.
+	"""
+	def __init__(self, ctx, allow_unknown=False):
+		optparse.OptionParser.__init__(self, conflict_handler='resolve', add_help_option=False,
+			version='%s %s (%s)' % (Context.WAFNAME, Context.WAFVERSION, Context.WAFREVISION))
+		self.formatter.width = Logs.get_term_cols()
+		self.ctx = ctx
+		self.allow_unknown = allow_unknown
+
+	def _process_args(self, largs, rargs, values):
+		"""
+		Custom _process_args to allow unknown options according to the allow_unknown status
+		"""
+		while rargs:
+			try:
+				optparse.OptionParser._process_args(self,largs,rargs,values)
+			except (optparse.BadOptionError, optparse.AmbiguousOptionError) as e:
+				if self.allow_unknown:
+					largs.append(e.opt_str)
+				else:
+					self.error(str(e))
+
+	def _process_long_opt(self, rargs, values):
+		# --custom-option=-ftxyz is interpreted as -f -t... see #2280
+		if self.allow_unknown:
+			back = [] + rargs
+			try:
+				optparse.OptionParser._process_long_opt(self, rargs, values)
+			except optparse.BadOptionError:
+				while rargs:
+					rargs.pop()
+				rargs.extend(back)
+				rargs.pop(0)
+				raise
+		else:
+			optparse.OptionParser._process_long_opt(self, rargs, values)
+
+	def print_usage(self, file=None):
+		return self.print_help(file)
+
+	def get_usage(self):
+		"""
+		Builds the message to print on ``waf --help``
+
+		:rtype: string
+		"""
+		cmds_str = {}
+		for cls in Context.classes:
+			if not cls.cmd or cls.cmd == 'options' or cls.cmd.startswith( '_' ):
+				continue
+
+			s = cls.__doc__ or ''
+			cmds_str[cls.cmd] = s
+
+		if Context.g_module:
+			for (k, v) in Context.g_module.__dict__.items():
+				if k in ('options', 'init', 'shutdown'):
+					continue
+
+				if type(v) is type(Context.create_context):
+					if v.__doc__ and not k.startswith('_'):
+						cmds_str[k] = v.__doc__
+
+		just = 0
+		for k in cmds_str:
+			just = max(just, len(k))
+
+		lst = ['  %s: %s' % (k.ljust(just), v) for (k, v) in cmds_str.items()]
+		lst.sort()
+		ret = '\n'.join(lst)
+
+		return '''%s [commands] [options]
+
+Main commands (example: ./%s build -j4)
+%s
+''' % (Context.WAFNAME, Context.WAFNAME, ret)
+
+
+class OptionsContext(Context.Context):
+	"""
+	Collects custom options from wscript files and parses the command line.
+	Sets the global :py:const:`waflib.Options.commands` and :py:const:`waflib.Options.options` values.
+	"""
+	cmd = 'options'
+	fun = 'options'
+
+	def __init__(self, **kw):
+		super(OptionsContext, self).__init__(**kw)
+
+		self.parser = opt_parser(self)
+		"""Instance of :py:class:`waflib.Options.opt_parser`"""
+
+		self.option_groups = {}
+
+		jobs = self.jobs()
+		p = self.add_option
+		color = os.environ.get('NOCOLOR', '') and 'no' or 'auto'
+		if os.environ.get('CLICOLOR', '') == '0':
+			color = 'no'
+		elif os.environ.get('CLICOLOR_FORCE', '') == '1':
+			color = 'yes'
+		p('-c', '--color',    dest='colors',  default=color, action='store', help='whether to use colors (yes/no/auto) [default: auto]', choices=('yes', 'no', 'auto'))
+		p('-j', '--jobs',     dest='jobs',    default=jobs,  type='int', help='amount of parallel jobs (%r)' % jobs)
+		p('-k', '--keep',     dest='keep',    default=0,     action='count', help='continue despite errors (-kk to try harder)')
+		p('-v', '--verbose',  dest='verbose', default=0,     action='count', help='verbosity level -v -vv or -vvv [default: 0]')
+		p('--zones',          dest='zones',   default='',    action='store', help='debugging zones (task_gen, deps, tasks, etc)')
+		p('--profile',        dest='profile', default=0,     action='store_true', help=optparse.SUPPRESS_HELP)
+		p('--pdb',            dest='pdb',     default=0,     action='store_true', help=optparse.SUPPRESS_HELP)
+		p('-h', '--help',     dest='whelp',   default=0,     action='store_true', help="show this help message and exit")
+
+		gr = self.add_option_group('Configuration options')
+		self.option_groups['configure options'] = gr
+
+		gr.add_option('-o', '--out', action='store', default='', help='build dir for the project', dest='out')
+		gr.add_option('-t', '--top', action='store', default='', help='src dir for the project', dest='top')
+
+		gr.add_option('--no-lock-in-run', action='store_true', default=os.environ.get('NO_LOCK_IN_RUN', ''), help=optparse.SUPPRESS_HELP, dest='no_lock_in_run')
+		gr.add_option('--no-lock-in-out', action='store_true', default=os.environ.get('NO_LOCK_IN_OUT', ''), help=optparse.SUPPRESS_HELP, dest='no_lock_in_out')
+		gr.add_option('--no-lock-in-top', action='store_true', default=os.environ.get('NO_LOCK_IN_TOP', ''), help=optparse.SUPPRESS_HELP, dest='no_lock_in_top')
+
+		default_prefix = getattr(Context.g_module, 'default_prefix', os.environ.get('PREFIX'))
+		if not default_prefix:
+			if Utils.unversioned_sys_platform() == 'win32':
+				d = tempfile.gettempdir()
+				default_prefix = d[0].upper() + d[1:]
+				# win32 preserves the case, but gettempdir does not
+			else:
+				default_prefix = '/usr/local/'
+		gr.add_option('--prefix', dest='prefix', default=default_prefix, help='installation prefix [default: %r]' % default_prefix)
+		gr.add_option('--bindir', dest='bindir', help='bindir')
+		gr.add_option('--libdir', dest='libdir', help='libdir')
+
+		gr = self.add_option_group('Build and installation options')
+		self.option_groups['build and install options'] = gr
+		gr.add_option('-p', '--progress', dest='progress_bar', default=0, action='count', help= '-p: progress bar; -pp: ide output')
+		gr.add_option('--targets',        dest='targets', default='', action='store', help='task generators, e.g. "target1,target2"')
+
+		gr = self.add_option_group('Step options')
+		self.option_groups['step options'] = gr
+		gr.add_option('--files',          dest='files', default='', action='store', help='files to process, by regexp, e.g. "*/main.c,*/test/main.o"')
+
+		default_destdir = os.environ.get('DESTDIR', '')
+
+		gr = self.add_option_group('Installation and uninstallation options')
+		self.option_groups['install/uninstall options'] = gr
+		gr.add_option('--destdir', help='installation root [default: %r]' % default_destdir, default=default_destdir, dest='destdir')
+		gr.add_option('-f', '--force', dest='force', default=False, action='store_true', help='force file installation')
+		gr.add_option('--distcheck-args', metavar='ARGS', help='arguments to pass to distcheck', default=None, action='store')
+
+	def jobs(self):
+		"""
+		Finds the optimal amount of cpu cores to use for parallel jobs.
+		At runtime the options can be obtained from :py:const:`waflib.Options.options` ::
+
+			from waflib.Options import options
+			njobs = options.jobs
+
+		:return: the amount of cpu cores
+		:rtype: int
+		"""
+		count = int(os.environ.get('JOBS', 0))
+		if count < 1:
+			if 'NUMBER_OF_PROCESSORS' in os.environ:
+				# on Windows, use the NUMBER_OF_PROCESSORS environment variable
+				count = int(os.environ.get('NUMBER_OF_PROCESSORS', 1))
+			else:
+				# on everything else, first try the POSIX sysconf values
+				if hasattr(os, 'sysconf_names'):
+					if 'SC_NPROCESSORS_ONLN' in os.sysconf_names:
+						count = int(os.sysconf('SC_NPROCESSORS_ONLN'))
+					elif 'SC_NPROCESSORS_CONF' in os.sysconf_names:
+						count = int(os.sysconf('SC_NPROCESSORS_CONF'))
+				if not count and os.name not in ('nt', 'java'):
+					try:
+						tmp = self.cmd_and_log(['sysctl', '-n', 'hw.ncpu'], quiet=0)
+					except Errors.WafError:
+						pass
+					else:
+						if re.match('^[0-9]+$', tmp):
+							count = int(tmp)
+		if count < 1:
+			count = 1
+		elif count > 1024:
+			count = 1024
+		return count
+
+	def add_option(self, *k, **kw):
+		"""
+		Wraps ``optparse.add_option``::
+
+			def options(ctx):
+				ctx.add_option('-u', '--use', dest='use', default=False,
+					action='store_true', help='a boolean option')
+
+		:rtype: optparse option object
+		"""
+		return self.parser.add_option(*k, **kw)
+
+	def add_option_group(self, *k, **kw):
+		"""
+		Wraps ``optparse.add_option_group``::
+
+			def options(ctx):
+				gr = ctx.add_option_group('some options')
+				gr.add_option('-u', '--use', dest='use', default=False, action='store_true')
+
+		:rtype: optparse option group object
+		"""
+		try:
+			gr = self.option_groups[k[0]]
+		except KeyError:
+			gr = self.parser.add_option_group(*k, **kw)
+		self.option_groups[k[0]] = gr
+		return gr
+
+	def get_option_group(self, opt_str):
+		"""
+		Wraps ``optparse.get_option_group``::
+
+			def options(ctx):
+				gr = ctx.get_option_group('configure options')
+				gr.add_option('-o', '--out', action='store', default='',
+					help='build dir for the project', dest='out')
+
+		:rtype: optparse option group object
+		"""
+		try:
+			return self.option_groups[opt_str]
+		except KeyError:
+			for group in self.parser.option_groups:
+				if group.title == opt_str:
+					return group
+			return None
+
+	def sanitize_path(self, path, cwd=None):
+		if not cwd:
+			cwd = Context.launch_dir
+		p = os.path.expanduser(path)
+		p = os.path.join(cwd, p)
+		p = os.path.normpath(p)
+		p = os.path.abspath(p)
+		return p
+
+	def parse_cmd_args(self, _args=None, cwd=None, allow_unknown=False):
+		"""
+		Just parse the arguments
+		"""
+		self.parser.allow_unknown = allow_unknown
+		(options, leftover_args) = self.parser.parse_args(args=_args)
+		envvars = []
+		commands = []
+		for arg in leftover_args:
+			if '=' in arg:
+				envvars.append(arg)
+			elif arg != 'options':
+				commands.append(arg)
+
+		if options.jobs < 1:
+			options.jobs = 1
+		for name in 'top out destdir prefix bindir libdir'.split():
+			# those paths are usually expanded from Context.launch_dir
+			if getattr(options, name, None):
+				path = self.sanitize_path(getattr(options, name), cwd)
+				setattr(options, name, path)
+		return options, commands, envvars
+
+	def init_module_vars(self, arg_options, arg_commands, arg_envvars):
+		options.__dict__.clear()
+		del commands[:]
+		del envvars[:]
+
+		options.__dict__.update(arg_options.__dict__)
+		commands.extend(arg_commands)
+		envvars.extend(arg_envvars)
+
+		for var in envvars:
+			(name, value) = var.split('=', 1)
+			os.environ[name.strip()] = value
+
+	def init_logs(self, options, commands, envvars):
+		Logs.verbose = options.verbose
+		if options.verbose >= 1:
+			self.load('errcheck')
+
+		colors = {'yes' : 2, 'auto' : 1, 'no' : 0}[options.colors]
+		Logs.enable_colors(colors)
+
+		if options.zones:
+			Logs.zones = options.zones.split(',')
+			if not Logs.verbose:
+				Logs.verbose = 1
+		elif Logs.verbose > 0:
+			Logs.zones = ['runner']
+		if Logs.verbose > 2:
+			Logs.zones = ['*']
+
+	def parse_args(self, _args=None):
+		"""
+		Parses arguments from a list which is not necessarily the command-line.
+		Initializes the module variables options, commands and envvars
+		If help is requested, prints it and exit the application
+
+		:param _args: arguments
+		:type _args: list of strings
+		"""
+		options, commands, envvars = self.parse_cmd_args()
+		self.init_logs(options, commands, envvars)
+		self.init_module_vars(options, commands, envvars)
+
+	def execute(self):
+		"""
+		See :py:func:`waflib.Context.Context.execute`
+		"""
+		super(OptionsContext, self).execute()
+		self.parse_args()
+		Utils.alloc_process_pool(options.jobs)
+
diff --git a/third_party/waf/waflib/Runner.py b/third_party/waf/waflib/Runner.py
new file mode 100644
index 0000000..350c86a
--- /dev/null
+++ b/third_party/waf/waflib/Runner.py
@@ -0,0 +1,622 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2018 (ita)
+
+"""
+Runner.py: Task scheduling and execution
+"""
+
+import heapq, traceback
+try:
+	from queue import Queue, PriorityQueue
+except ImportError:
+	from Queue import Queue
+	try:
+		from Queue import PriorityQueue
+	except ImportError:
+		class PriorityQueue(Queue):
+			def _init(self, maxsize):
+				self.maxsize = maxsize
+				self.queue = []
+			def _put(self, item):
+				heapq.heappush(self.queue, item)
+			def _get(self):
+				return heapq.heappop(self.queue)
+
+from waflib import Utils, Task, Errors, Logs
+
+GAP = 5
+"""
+Wait for at least ``GAP * njobs`` before trying to enqueue more tasks to run
+"""
+
+class PriorityTasks(object):
+	def __init__(self):
+		self.lst = []
+	def __len__(self):
+		return len(self.lst)
+	def __iter__(self):
+		return iter(self.lst)
+	def __str__(self):
+		return 'PriorityTasks: [%s]' % '\n  '.join(str(x) for x in self.lst)
+	def clear(self):
+		self.lst = []
+	def append(self, task):
+		heapq.heappush(self.lst, task)
+	def appendleft(self, task):
+		"Deprecated, do not use"
+		heapq.heappush(self.lst, task)
+	def pop(self):
+		return heapq.heappop(self.lst)
+	def extend(self, lst):
+		if self.lst:
+			for x in lst:
+				self.append(x)
+		else:
+			if isinstance(lst, list):
+				self.lst = lst
+				heapq.heapify(lst)
+			else:
+				self.lst = lst.lst
+
+class Consumer(Utils.threading.Thread):
+	"""
+	Daemon thread object that executes a task. It shares a semaphore with
+	the coordinator :py:class:`waflib.Runner.Spawner`. There is one
+	instance per task to consume.
+	"""
+	def __init__(self, spawner, task):
+		Utils.threading.Thread.__init__(self)
+		self.task = task
+		"""Task to execute"""
+		self.spawner = spawner
+		"""Coordinator object"""
+		self.daemon = True
+		self.start()
+	def run(self):
+		"""
+		Processes a single task
+		"""
+		try:
+			if not self.spawner.master.stop:
+				self.spawner.master.process_task(self.task)
+		finally:
+			self.spawner.sem.release()
+			self.spawner.master.out.put(self.task)
+			self.task = None
+			self.spawner = None
+
+class Spawner(Utils.threading.Thread):
+	"""
+	Daemon thread that consumes tasks from :py:class:`waflib.Runner.Parallel` producer and
+	spawns a consuming thread :py:class:`waflib.Runner.Consumer` for each
+	:py:class:`waflib.Task.Task` instance.
+	"""
+	def __init__(self, master):
+		Utils.threading.Thread.__init__(self)
+		self.master = master
+		""":py:class:`waflib.Runner.Parallel` producer instance"""
+		self.sem = Utils.threading.Semaphore(master.numjobs)
+		"""Bounded semaphore that prevents spawning more than *n* concurrent consumers"""
+		self.daemon = True
+		self.start()
+	def run(self):
+		"""
+		Spawns new consumers to execute tasks by delegating to :py:meth:`waflib.Runner.Spawner.loop`
+		"""
+		try:
+			self.loop()
+		except Exception:
+			# Python 2 prints unnecessary messages when shutting down
+			# we also want to stop the thread properly
+			pass
+	def loop(self):
+		"""
+		Consumes task objects from the producer; ends when the producer has no more
+		task to provide.
+		"""
+		master = self.master
+		while 1:
+			task = master.ready.get()
+			self.sem.acquire()
+			if not master.stop:
+				task.log_display(task.generator.bld)
+			Consumer(self, task)
+
+class Parallel(object):
+	"""
+	Schedule the tasks obtained from the build context for execution.
+	"""
+	def __init__(self, bld, j=2):
+		"""
+		The initialization requires a build context reference
+		for computing the total number of jobs.
+		"""
+
+		self.numjobs = j
+		"""
+		Amount of parallel consumers to use
+		"""
+
+		self.bld = bld
+		"""
+		Instance of :py:class:`waflib.Build.BuildContext`
+		"""
+
+		self.outstanding = PriorityTasks()
+		"""Heap of :py:class:`waflib.Task.Task` that may be ready to be executed"""
+
+		self.postponed = PriorityTasks()
+		"""Heap of :py:class:`waflib.Task.Task` which are not ready to run for non-DAG reasons"""
+
+		self.incomplete = set()
+		"""List of :py:class:`waflib.Task.Task` waiting for dependent tasks to complete (DAG)"""
+
+		self.ready = PriorityQueue(0)
+		"""List of :py:class:`waflib.Task.Task` ready to be executed by consumers"""
+
+		self.out = Queue(0)
+		"""List of :py:class:`waflib.Task.Task` returned by the task consumers"""
+
+		self.count = 0
+		"""Amount of tasks that may be processed by :py:class:`waflib.Runner.TaskConsumer`"""
+
+		self.processed = 0
+		"""Amount of tasks processed"""
+
+		self.stop = False
+		"""Error flag to stop the build"""
+
+		self.error = []
+		"""Tasks that could not be executed"""
+
+		self.biter = None
+		"""Task iterator which must give groups of parallelizable tasks when calling ``next()``"""
+
+		self.dirty = False
+		"""
+		Flag that indicates that the build cache must be saved when a task was executed
+		(calls :py:meth:`waflib.Build.BuildContext.store`)"""
+
+		self.revdeps = Utils.defaultdict(set)
+		"""
+		The reverse dependency graph of dependencies obtained from Task.run_after
+		"""
+
+		self.spawner = None
+		"""
+		Coordinating daemon thread that spawns thread consumers
+		"""
+		if self.numjobs > 1:
+			self.spawner = Spawner(self)
+
+	def get_next_task(self):
+		"""
+		Obtains the next Task instance to run
+
+		:rtype: :py:class:`waflib.Task.Task`
+		"""
+		if not self.outstanding:
+			return None
+		return self.outstanding.pop()
+
+	def postpone(self, tsk):
+		"""
+		Adds the task to the list :py:attr:`waflib.Runner.Parallel.postponed`.
+		The order is scrambled so as to consume as many tasks in parallel as possible.
+
+		:param tsk: task instance
+		:type tsk: :py:class:`waflib.Task.Task`
+		"""
+		self.postponed.append(tsk)
+
+	def refill_task_list(self):
+		"""
+		Pulls a next group of tasks to execute in :py:attr:`waflib.Runner.Parallel.outstanding`.
+		Ensures that all tasks in the current build group are complete before processing the next one.
+		"""
+		while self.count > self.numjobs * GAP:
+			self.get_out()
+
+		while not self.outstanding:
+			if self.count:
+				self.get_out()
+				if self.outstanding:
+					break
+			elif self.postponed:
+				try:
+					cond = self.deadlock == self.processed
+				except AttributeError:
+					pass
+				else:
+					if cond:
+						# The most common reason is conflicting build order declaration
+						# for example: "X run_after Y" and "Y run_after X"
+						# Another can be changing "run_after" dependencies while the build is running
+						# for example: updating "tsk.run_after" in the "runnable_status" method
+						lst = []
+						for tsk in self.postponed:
+							deps = [id(x) for x in tsk.run_after if not x.hasrun]
+							lst.append('%s\t-> %r' % (repr(tsk), deps))
+							if not deps:
+								lst.append('\n  task %r dependencies are done, check its *runnable_status*?' % id(tsk))
+						raise Errors.WafError('Deadlock detected: check the task build order%s' % ''.join(lst))
+				self.deadlock = self.processed
+
+			if self.postponed:
+				self.outstanding.extend(self.postponed)
+				self.postponed.clear()
+			elif not self.count:
+				if self.incomplete:
+					for x in self.incomplete:
+						for k in x.run_after:
+							if not k.hasrun:
+								break
+						else:
+							# dependency added after the build started without updating revdeps
+							self.incomplete.remove(x)
+							self.outstanding.append(x)
+							break
+					else:
+						if self.stop or self.error:
+							break
+						raise Errors.WafError('Broken revdeps detected on %r' % self.incomplete)
+				else:
+					tasks = next(self.biter)
+					ready, waiting = self.prio_and_split(tasks)
+					self.outstanding.extend(ready)
+					self.incomplete.update(waiting)
+					self.total = self.bld.total()
+					break
+
+	def add_more_tasks(self, tsk):
+		"""
+		If a task provides :py:attr:`waflib.Task.Task.more_tasks`, then the tasks contained
+		in that list are added to the current build and will be processed before the next build group.
+
+		The priorities for dependent tasks are not re-calculated globally
+
+		:param tsk: task instance
+		:type tsk: :py:attr:`waflib.Task.Task`
+		"""
+		if getattr(tsk, 'more_tasks', None):
+			more = set(tsk.more_tasks)
+			groups_done = set()
+			def iteri(a, b):
+				for x in a:
+					yield x
+				for x in b:
+					yield x
+
+			# Update the dependency tree
+			# this assumes that task.run_after values were updated
+			for x in iteri(self.outstanding, self.incomplete):
+				for k in x.run_after:
+					if isinstance(k, Task.TaskGroup):
+						if k not in groups_done:
+							groups_done.add(k)
+							for j in k.prev & more:
+								self.revdeps[j].add(k)
+					elif k in more:
+						self.revdeps[k].add(x)
+
+			ready, waiting = self.prio_and_split(tsk.more_tasks)
+			self.outstanding.extend(ready)
+			self.incomplete.update(waiting)
+			self.total += len(tsk.more_tasks)
+
+	def mark_finished(self, tsk):
+		def try_unfreeze(x):
+			# DAG ancestors are likely to be in the incomplete set
+			# This assumes that the run_after contents have not changed
+			# after the build starts, else a deadlock may occur
+			if x in self.incomplete:
+				# TODO remove dependencies to free some memory?
+				# x.run_after.remove(tsk)
+				for k in x.run_after:
+					if not k.hasrun:
+						break
+				else:
+					self.incomplete.remove(x)
+					self.outstanding.append(x)
+
+		if tsk in self.revdeps:
+			for x in self.revdeps[tsk]:
+				if isinstance(x, Task.TaskGroup):
+					x.prev.remove(tsk)
+					if not x.prev:
+						for k in x.next:
+							# TODO necessary optimization?
+							k.run_after.remove(x)
+							try_unfreeze(k)
+						# TODO necessary optimization?
+						x.next = []
+				else:
+					try_unfreeze(x)
+			del self.revdeps[tsk]
+
+		if hasattr(tsk, 'semaphore'):
+			sem = tsk.semaphore
+			try:
+				sem.release(tsk)
+			except KeyError:
+				# TODO
+				pass
+			else:
+				while sem.waiting and not sem.is_locked():
+					# take a frozen task, make it ready to run
+					x = sem.waiting.pop()
+					self._add_task(x)
+
+	def get_out(self):
+		"""
+		Waits for a Task that task consumers add to :py:attr:`waflib.Runner.Parallel.out` after execution.
+		Adds more Tasks if necessary through :py:attr:`waflib.Runner.Parallel.add_more_tasks`.
+
+		:rtype: :py:attr:`waflib.Task.Task`
+		"""
+		tsk = self.out.get()
+		if not self.stop:
+			self.add_more_tasks(tsk)
+		self.mark_finished(tsk)
+
+		self.count -= 1
+		self.dirty = True
+		return tsk
+
+	def add_task(self, tsk):
+		"""
+		Enqueue a Task to :py:attr:`waflib.Runner.Parallel.ready` so that consumers can run them.
+
+		:param tsk: task instance
+		:type tsk: :py:attr:`waflib.Task.Task`
+		"""
+		# TODO change in waf 2.1
+		self.ready.put(tsk)
+
+	def _add_task(self, tsk):
+		if hasattr(tsk, 'semaphore'):
+			sem = tsk.semaphore
+			try:
+				sem.acquire(tsk)
+			except IndexError:
+				sem.waiting.add(tsk)
+				return
+
+		self.count += 1
+		self.processed += 1
+		if self.numjobs == 1:
+			tsk.log_display(tsk.generator.bld)
+			try:
+				self.process_task(tsk)
+			finally:
+				self.out.put(tsk)
+		else:
+			self.add_task(tsk)
+
+	def process_task(self, tsk):
+		"""
+		Processes a task and attempts to stop the build in case of errors
+		"""
+		tsk.process()
+		if tsk.hasrun != Task.SUCCESS:
+			self.error_handler(tsk)
+
+	def skip(self, tsk):
+		"""
+		Mark a task as skipped/up-to-date
+		"""
+		tsk.hasrun = Task.SKIPPED
+		self.mark_finished(tsk)
+
+	def cancel(self, tsk):
+		"""
+		Mark a task as failed because of unsatisfiable dependencies
+		"""
+		tsk.hasrun = Task.CANCELED
+		self.mark_finished(tsk)
+
+	def error_handler(self, tsk):
+		"""
+		Called when a task cannot be executed. The flag :py:attr:`waflib.Runner.Parallel.stop` is set,
+		unless the build is executed with::
+
+			$ waf build -k
+
+		:param tsk: task instance
+		:type tsk: :py:attr:`waflib.Task.Task`
+		"""
+		if not self.bld.keep:
+			self.stop = True
+		self.error.append(tsk)
+
+	def task_status(self, tsk):
+		"""
+		Obtains the task status to decide whether to run it immediately or not.
+
+		:return: the exit status, for example :py:attr:`waflib.Task.ASK_LATER`
+		:rtype: integer
+		"""
+		try:
+			return tsk.runnable_status()
+		except Exception:
+			self.processed += 1
+			tsk.err_msg = traceback.format_exc()
+			if not self.stop and self.bld.keep:
+				self.skip(tsk)
+				if self.bld.keep == 1:
+					# if -k stop on the first exception, if -kk try to go as far as possible
+					if Logs.verbose > 1 or not self.error:
+						self.error.append(tsk)
+					self.stop = True
+				else:
+					if Logs.verbose > 1:
+						self.error.append(tsk)
+				return Task.EXCEPTION
+
+			tsk.hasrun = Task.EXCEPTION
+			self.error_handler(tsk)
+
+			return Task.EXCEPTION
+
+	def start(self):
+		"""
+		Obtains Task instances from the BuildContext instance and adds the ones that need to be executed to
+		:py:class:`waflib.Runner.Parallel.ready` so that the :py:class:`waflib.Runner.Spawner` consumer thread
+		has them executed. Obtains the executed Tasks back from :py:class:`waflib.Runner.Parallel.out`
+		and marks the build as failed by setting the ``stop`` flag.
+		If only one job is used, then executes the tasks one by one, without consumers.
+		"""
+		self.total = self.bld.total()
+
+		while not self.stop:
+
+			self.refill_task_list()
+
+			# consider the next task
+			tsk = self.get_next_task()
+			if not tsk:
+				if self.count:
+					# tasks may add new ones after they are run
+					continue
+				else:
+					# no tasks to run, no tasks running, time to exit
+					break
+
+			if tsk.hasrun:
+				# if the task is marked as "run", just skip it
+				self.processed += 1
+				continue
+
+			if self.stop: # stop immediately after a failure is detected
+				break
+
+			st = self.task_status(tsk)
+			if st == Task.RUN_ME:
+				self._add_task(tsk)
+			elif st == Task.ASK_LATER:
+				self.postpone(tsk)
+			elif st == Task.SKIP_ME:
+				self.processed += 1
+				self.skip(tsk)
+				self.add_more_tasks(tsk)
+			elif st == Task.CANCEL_ME:
+				# A dependency problem has occurred, and the
+				# build is most likely run with `waf -k`
+				if Logs.verbose > 1:
+					self.error.append(tsk)
+				self.processed += 1
+				self.cancel(tsk)
+
+		# self.count represents the tasks that have been made available to the consumer threads
+		# collect all the tasks after an error else the message may be incomplete
+		while self.error and self.count:
+			self.get_out()
+
+		self.ready.put(None)
+		if not self.stop:
+			assert not self.count
+			assert not self.postponed
+			assert not self.incomplete
+
+	def prio_and_split(self, tasks):
+		"""
+		Label input tasks with priority values, and return a pair containing
+		the tasks that are ready to run and the tasks that are necessarily
+		waiting for other tasks to complete.
+
+		The priority system is really meant as an optional layer for optimization:
+		dependency cycles are found quickly, and builds should be more efficient.
+		A high priority number means that a task is processed first.
+
+		This method can be overridden to disable the priority system::
+
+			def prio_and_split(self, tasks):
+				return tasks, []
+
+		:return: A pair of task lists
+		:rtype: tuple
+		"""
+		# to disable:
+		#return tasks, []
+		for x in tasks:
+			x.visited = 0
+
+		reverse = self.revdeps
+
+		groups_done = set()
+		for x in tasks:
+			for k in x.run_after:
+				if isinstance(k, Task.TaskGroup):
+					if k not in groups_done:
+						groups_done.add(k)
+						for j in k.prev:
+							reverse[j].add(k)
+				else:
+					reverse[k].add(x)
+
+		# the priority number is not the tree depth
+		def visit(n):
+			if isinstance(n, Task.TaskGroup):
+				return sum(visit(k) for k in n.next)
+
+			if n.visited == 0:
+				n.visited = 1
+
+				if n in reverse:
+					rev = reverse[n]
+					n.prio_order = n.tree_weight + len(rev) + sum(visit(k) for k in rev)
+				else:
+					n.prio_order = n.tree_weight
+
+				n.visited = 2
+			elif n.visited == 1:
+				raise Errors.WafError('Dependency cycle found!')
+			return n.prio_order
+
+		for x in tasks:
+			if x.visited != 0:
+				# must visit all to detect cycles
+				continue
+			try:
+				visit(x)
+			except Errors.WafError:
+				self.debug_cycles(tasks, reverse)
+
+		ready = []
+		waiting = []
+		for x in tasks:
+			for k in x.run_after:
+				if not k.hasrun:
+					waiting.append(x)
+					break
+			else:
+				ready.append(x)
+		return (ready, waiting)
+
+	def debug_cycles(self, tasks, reverse):
+		tmp = {}
+		for x in tasks:
+			tmp[x] = 0
+
+		def visit(n, acc):
+			if isinstance(n, Task.TaskGroup):
+				for k in n.next:
+					visit(k, acc)
+				return
+			if tmp[n] == 0:
+				tmp[n] = 1
+				for k in reverse.get(n, []):
+					visit(k, [n] + acc)
+				tmp[n] = 2
+			elif tmp[n] == 1:
+				lst = []
+				for tsk in acc:
+					lst.append(repr(tsk))
+					if tsk is n:
+						# exclude prior nodes, we want the minimum cycle
+						break
+				raise Errors.WafError('Task dependency cycle in "run_after" constraints: %s' % ''.join(lst))
+		for x in tasks:
+			visit(x, [])
+
diff --git a/third_party/waf/waflib/Scripting.py b/third_party/waf/waflib/Scripting.py
new file mode 100644
index 0000000..a80cb36
--- /dev/null
+++ b/third_party/waf/waflib/Scripting.py
@@ -0,0 +1,631 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2018 (ita)
+
+"Module called for configuring, compiling and installing targets"
+
+from __future__ import with_statement
+
+import os, shlex, shutil, traceback, errno, sys, stat
+from waflib import Utils, Configure, Logs, Options, ConfigSet, Context, Errors, Build, Node
+
+build_dir_override = None
+
+no_climb_commands = ['configure']
+
+default_cmd = "build"
+
+def waf_entry_point(current_directory, version, wafdir):
+	"""
+	This is the main entry point, all Waf execution starts here.
+
+	:param current_directory: absolute path representing the current directory
+	:type current_directory: string
+	:param version: version number
+	:type version: string
+	:param wafdir: absolute path representing the directory of the waf library
+	:type wafdir: string
+	"""
+	Logs.init_log()
+
+	if Context.WAFVERSION != version:
+		Logs.error('Waf script %r and library %r do not match (directory %r)', version, Context.WAFVERSION, wafdir)
+		sys.exit(1)
+
+	# Store current directory before any chdir
+	Context.waf_dir = wafdir
+	Context.run_dir = Context.launch_dir = current_directory
+	start_dir = current_directory
+	no_climb = os.environ.get('NOCLIMB')
+
+	if len(sys.argv) > 1:
+		# os.path.join handles absolute paths
+		# if sys.argv[1] is not an absolute path, then it is relative to the current working directory
+		potential_wscript = os.path.join(current_directory, sys.argv[1])
+		if os.path.basename(potential_wscript) == Context.WSCRIPT_FILE and os.path.isfile(potential_wscript):
+			# need to explicitly normalize the path, as it may contain extra '/.'
+			path = os.path.normpath(os.path.dirname(potential_wscript))
+			start_dir = os.path.abspath(path)
+			no_climb = True
+			sys.argv.pop(1)
+
+	ctx = Context.create_context('options')
+	(options, commands, env) = ctx.parse_cmd_args(allow_unknown=True)
+	if options.top:
+		start_dir = Context.run_dir = Context.top_dir = options.top
+		no_climb = True
+	if options.out:
+		Context.out_dir = options.out
+
+	# if 'configure' is in the commands, do not search any further
+	if not no_climb:
+		for k in no_climb_commands:
+			for y in commands:
+				if y.startswith(k):
+					no_climb = True
+					break
+
+	# try to find a lock file (if the project was configured)
+	# at the same time, store the first wscript file seen
+	cur = start_dir
+	while cur:
+		try:
+			lst = os.listdir(cur)
+		except OSError:
+			lst = []
+			Logs.error('Directory %r is unreadable!', cur)
+		if Options.lockfile in lst:
+			env = ConfigSet.ConfigSet()
+			try:
+				env.load(os.path.join(cur, Options.lockfile))
+				ino = os.stat(cur)[stat.ST_INO]
+			except EnvironmentError:
+				pass
+			else:
+				# check if the folder was not moved
+				for x in (env.run_dir, env.top_dir, env.out_dir):
+					if not x:
+						continue
+					if Utils.is_win32:
+						if cur == x:
+							load = True
+							break
+					else:
+						# if the filesystem features symlinks, compare the inode numbers
+						try:
+							ino2 = os.stat(x)[stat.ST_INO]
+						except OSError:
+							pass
+						else:
+							if ino == ino2:
+								load = True
+								break
+				else:
+					Logs.warn('invalid lock file in %s', cur)
+					load = False
+
+				if load:
+					Context.run_dir = env.run_dir
+					Context.top_dir = env.top_dir
+					Context.out_dir = env.out_dir
+					break
+
+		if not Context.run_dir:
+			if Context.WSCRIPT_FILE in lst:
+				Context.run_dir = cur
+
+		next = os.path.dirname(cur)
+		if next == cur:
+			break
+		cur = next
+
+		if no_climb:
+			break
+
+	wscript = os.path.normpath(os.path.join(Context.run_dir, Context.WSCRIPT_FILE))
+	if not os.path.exists(wscript):
+		if options.whelp:
+			Logs.warn('These are the generic options (no wscript/project found)')
+			ctx.parser.print_help()
+			sys.exit(0)
+		Logs.error('Waf: Run from a folder containing a %r file (or try -h for the generic options)', Context.WSCRIPT_FILE)
+		sys.exit(1)
+
+	try:
+		os.chdir(Context.run_dir)
+	except OSError:
+		Logs.error('Waf: The folder %r is unreadable', Context.run_dir)
+		sys.exit(1)
+
+	try:
+		set_main_module(wscript)
+	except Errors.WafError as e:
+		Logs.pprint('RED', e.verbose_msg)
+		Logs.error(str(e))
+		sys.exit(1)
+	except Exception as e:
+		Logs.error('Waf: The wscript in %r is unreadable', Context.run_dir)
+		traceback.print_exc(file=sys.stdout)
+		sys.exit(2)
+
+	if options.profile:
+		import cProfile, pstats
+		cProfile.runctx('from waflib import Scripting; Scripting.run_commands()', {}, {}, 'profi.txt')
+		p = pstats.Stats('profi.txt')
+		p.sort_stats('time').print_stats(75) # or 'cumulative'
+	else:
+		try:
+			try:
+				run_commands()
+			except:
+				if options.pdb:
+					import pdb
+					type, value, tb = sys.exc_info()
+					traceback.print_exc()
+					pdb.post_mortem(tb)
+				else:
+					raise
+		except Errors.WafError as e:
+			if Logs.verbose > 1:
+				Logs.pprint('RED', e.verbose_msg)
+			Logs.error(e.msg)
+			sys.exit(1)
+		except SystemExit:
+			raise
+		except Exception as e:
+			traceback.print_exc(file=sys.stdout)
+			sys.exit(2)
+		except KeyboardInterrupt:
+			Logs.pprint('RED', 'Interrupted')
+			sys.exit(68)
+
+def set_main_module(file_path):
+	"""
+	Read the main wscript file into :py:const:`waflib.Context.Context.g_module` and
+	bind default functions such as ``init``, ``dist``, ``distclean`` if not defined.
+	Called by :py:func:`waflib.Scripting.waf_entry_point` during the initialization.
+
+	:param file_path: absolute path representing the top-level wscript file
+	:type file_path: string
+	"""
+	Context.g_module = Context.load_module(file_path)
+	Context.g_module.root_path = file_path
+
+	# note: to register the module globally, use the following:
+	# sys.modules['wscript_main'] = g_module
+
+	def set_def(obj):
+		name = obj.__name__
+		if not name in Context.g_module.__dict__:
+			setattr(Context.g_module, name, obj)
+	for k in (dist, distclean, distcheck):
+		set_def(k)
+	# add dummy init and shutdown functions if they're not defined
+	if not 'init' in Context.g_module.__dict__:
+		Context.g_module.init = Utils.nada
+	if not 'shutdown' in Context.g_module.__dict__:
+		Context.g_module.shutdown = Utils.nada
+	if not 'options' in Context.g_module.__dict__:
+		Context.g_module.options = Utils.nada
+
+def parse_options():
+	"""
+	Parses the command-line options and initialize the logging system.
+	Called by :py:func:`waflib.Scripting.waf_entry_point` during the initialization.
+	"""
+	ctx = Context.create_context('options')
+	ctx.execute()
+	if not Options.commands:
+		if isinstance(default_cmd, list):
+			Options.commands.extend(default_cmd)
+		else:
+			Options.commands.append(default_cmd)
+	if Options.options.whelp:
+		ctx.parser.print_help()
+		sys.exit(0)
+
+def run_command(cmd_name):
+	"""
+	Executes a single Waf command. Called by :py:func:`waflib.Scripting.run_commands`.
+
+	:param cmd_name: command to execute, like ``build``
+	:type cmd_name: string
+	"""
+	ctx = Context.create_context(cmd_name)
+	ctx.log_timer = Utils.Timer()
+	ctx.options = Options.options # provided for convenience
+	ctx.cmd = cmd_name
+	try:
+		ctx.execute()
+	finally:
+		# Issue 1374
+		ctx.finalize()
+	return ctx
+
+def run_commands():
+	"""
+	Execute the Waf commands that were given on the command-line, and the other options
+	Called by :py:func:`waflib.Scripting.waf_entry_point` during the initialization, and executed
+	after :py:func:`waflib.Scripting.parse_options`.
+	"""
+	parse_options()
+	run_command('init')
+	while Options.commands:
+		cmd_name = Options.commands.pop(0)
+		ctx = run_command(cmd_name)
+		Logs.info('%r finished successfully (%s)', cmd_name, ctx.log_timer)
+	run_command('shutdown')
+
+###########################################################################################
+
+def distclean_dir(dirname):
+	"""
+	Distclean function called in the particular case when::
+
+		top == out
+
+	:param dirname: absolute path of the folder to clean
+	:type dirname: string
+	"""
+	for (root, dirs, files) in os.walk(dirname):
+		for f in files:
+			if f.endswith(('.o', '.moc', '.exe')):
+				fname = os.path.join(root, f)
+				try:
+					os.remove(fname)
+				except OSError:
+					Logs.warn('Could not remove %r', fname)
+
+	for x in (Context.DBFILE, 'config.log'):
+		try:
+			os.remove(x)
+		except OSError:
+			pass
+
+	try:
+		shutil.rmtree(Build.CACHE_DIR)
+	except OSError:
+		pass
+
+def distclean(ctx):
+	'''removes build folders and data'''
+
+	def remove_and_log(k, fun):
+		try:
+			fun(k)
+		except EnvironmentError as e:
+			if e.errno != errno.ENOENT:
+				Logs.warn('Could not remove %r', k)
+
+	# remove waf cache folders on the top-level
+	if not Options.commands:
+		for k in os.listdir('.'):
+			for x in '.waf-2 waf-2 .waf3-2 waf3-2'.split():
+				if k.startswith(x):
+					remove_and_log(k, shutil.rmtree)
+
+	# remove a build folder, if any
+	cur = '.'
+	if os.environ.get('NO_LOCK_IN_TOP') or ctx.options.no_lock_in_top:
+		cur = ctx.options.out
+
+	try:
+		lst = os.listdir(cur)
+	except OSError:
+		Logs.warn('Could not read %r', cur)
+		return
+
+	if Options.lockfile in lst:
+		f = os.path.join(cur, Options.lockfile)
+		try:
+			env = ConfigSet.ConfigSet(f)
+		except EnvironmentError:
+			Logs.warn('Could not read %r', f)
+			return
+
+		if not env.out_dir or not env.top_dir:
+			Logs.warn('Invalid lock file %r', f)
+			return
+
+		if env.out_dir == env.top_dir:
+			distclean_dir(env.out_dir)
+		else:
+			remove_and_log(env.out_dir, shutil.rmtree)
+
+		env_dirs = [env.out_dir]
+		if not (os.environ.get('NO_LOCK_IN_TOP') or ctx.options.no_lock_in_top):
+			env_dirs.append(env.top_dir)
+		if not (os.environ.get('NO_LOCK_IN_RUN') or ctx.options.no_lock_in_run):
+			env_dirs.append(env.run_dir)
+		for k in env_dirs:
+			p = os.path.join(k, Options.lockfile)
+			remove_and_log(p, os.remove)
+
+class Dist(Context.Context):
+	'''creates an archive containing the project source code'''
+	cmd = 'dist'
+	fun = 'dist'
+	algo = 'tar.bz2'
+	ext_algo = {}
+
+	def execute(self):
+		"""
+		See :py:func:`waflib.Context.Context.execute`
+		"""
+		self.recurse([os.path.dirname(Context.g_module.root_path)])
+		self.archive()
+
+	def archive(self):
+		"""
+		Creates the source archive.
+		"""
+		import tarfile
+
+		arch_name = self.get_arch_name()
+
+		try:
+			self.base_path
+		except AttributeError:
+			self.base_path = self.path
+
+		node = self.base_path.make_node(arch_name)
+		try:
+			node.delete()
+		except OSError:
+			pass
+
+		files = self.get_files()
+
+		if self.algo.startswith('tar.'):
+			tar = tarfile.open(node.abspath(), 'w:' + self.algo.replace('tar.', ''))
+
+			for x in files:
+				self.add_tar_file(x, tar)
+			tar.close()
+		elif self.algo == 'zip':
+			import zipfile
+			zip = zipfile.ZipFile(node.abspath(), 'w', compression=zipfile.ZIP_DEFLATED)
+
+			for x in files:
+				archive_name = self.get_base_name() + '/' + x.path_from(self.base_path)
+				if os.environ.get('SOURCE_DATE_EPOCH'):
+					# TODO: parse that timestamp
+					zip.writestr(zipfile.ZipInfo(archive_name), x.read(), zipfile.ZIP_DEFLATED)
+				else:
+					zip.write(x.abspath(), archive_name, zipfile.ZIP_DEFLATED)
+			zip.close()
+		else:
+			self.fatal('Valid algo types are tar.bz2, tar.gz, tar.xz or zip')
+
+		try:
+			from hashlib import sha256
+		except ImportError:
+			digest = ''
+		else:
+			digest = ' (sha256=%r)' % sha256(node.read(flags='rb')).hexdigest()
+
+		Logs.info('New archive created: %s%s', self.arch_name, digest)
+
+	def get_tar_path(self, node):
+		"""
+		Return the path to use for a node in the tar archive, the purpose of this
+		is to let subclases resolve symbolic links or to change file names
+
+		:return: absolute path
+		:rtype: string
+		"""
+		return node.abspath()
+
+	def add_tar_file(self, x, tar):
+		"""
+		Adds a file to the tar archive. Symlinks are not verified.
+
+		:param x: file path
+		:param tar: tar file object
+		"""
+		p = self.get_tar_path(x)
+		tinfo = tar.gettarinfo(name=p, arcname=self.get_tar_prefix() + '/' + x.path_from(self.base_path))
+		tinfo.uid   = 0
+		tinfo.gid   = 0
+		tinfo.uname = 'root'
+		tinfo.gname = 'root'
+		if os.environ.get('SOURCE_DATE_EPOCH'):
+			tinfo.mtime = int(os.environ.get('SOURCE_DATE_EPOCH'))
+
+		if os.path.isfile(p):
+			with open(p, 'rb') as f:
+				tar.addfile(tinfo, fileobj=f)
+		else:
+			tar.addfile(tinfo)
+
+	def get_tar_prefix(self):
+		"""
+		Returns the base path for files added into the archive tar file
+
+		:rtype: string
+		"""
+		try:
+			return self.tar_prefix
+		except AttributeError:
+			return self.get_base_name()
+
+	def get_arch_name(self):
+		"""
+		Returns the archive file name.
+		Set the attribute *arch_name* to change the default value::
+
+			def dist(ctx):
+				ctx.arch_name = 'ctx.tar.bz2'
+
+		:rtype: string
+		"""
+		try:
+			self.arch_name
+		except AttributeError:
+			self.arch_name = self.get_base_name() + '.' + self.ext_algo.get(self.algo, self.algo)
+		return self.arch_name
+
+	def get_base_name(self):
+		"""
+		Returns the default name of the main directory in the archive, which is set to *appname-version*.
+		Set the attribute *base_name* to change the default value::
+
+			def dist(ctx):
+				ctx.base_name = 'files'
+
+		:rtype: string
+		"""
+		try:
+			self.base_name
+		except AttributeError:
+			appname = getattr(Context.g_module, Context.APPNAME, 'noname')
+			version = getattr(Context.g_module, Context.VERSION, '1.0')
+			self.base_name = appname + '-' + version
+		return self.base_name
+
+	def get_excl(self):
+		"""
+		Returns the patterns to exclude for finding the files in the top-level directory.
+		Set the attribute *excl* to change the default value::
+
+			def dist(ctx):
+				ctx.excl = 'build **/*.o **/*.class'
+
+		:rtype: string
+		"""
+		try:
+			return self.excl
+		except AttributeError:
+			self.excl = Node.exclude_regs + ' **/waf-2.* **/.waf-2.* **/waf3-2.* **/.waf3-2.* **/*~ **/*.rej **/*.orig **/*.pyc **/*.pyo **/*.bak **/*.swp **/.lock-w*'
+			if Context.out_dir:
+				nd = self.root.find_node(Context.out_dir)
+				if nd:
+					self.excl += ' ' + nd.path_from(self.base_path)
+			return self.excl
+
+	def get_files(self):
+		"""
+		Files to package are searched automatically by :py:func:`waflib.Node.Node.ant_glob`.
+		Set *files* to prevent this behaviour::
+
+			def dist(ctx):
+				ctx.files = ctx.path.find_node('wscript')
+
+		Files are also searched from the directory 'base_path', to change it, set::
+
+			def dist(ctx):
+				ctx.base_path = path
+
+		:rtype: list of :py:class:`waflib.Node.Node`
+		"""
+		try:
+			files = self.files
+		except AttributeError:
+			files = self.base_path.ant_glob('**/*', excl=self.get_excl())
+		return files
+
+def dist(ctx):
+	'''makes a tarball for redistributing the sources'''
+	pass
+
+class DistCheck(Dist):
+	"""creates an archive with dist, then tries to build it"""
+	fun = 'distcheck'
+	cmd = 'distcheck'
+
+	def execute(self):
+		"""
+		See :py:func:`waflib.Context.Context.execute`
+		"""
+		self.recurse([os.path.dirname(Context.g_module.root_path)])
+		self.archive()
+		self.check()
+
+	def make_distcheck_cmd(self, tmpdir):
+		cfg = []
+		if Options.options.distcheck_args:
+			cfg = shlex.split(Options.options.distcheck_args)
+		else:
+			cfg = [x for x in sys.argv if x.startswith('-')]
+		cmd = [sys.executable, sys.argv[0], 'configure', 'build', 'install', 'uninstall', '--destdir=' + tmpdir] + cfg
+		return cmd
+
+	def check(self):
+		"""
+		Creates the archive, uncompresses it and tries to build the project
+		"""
+		import tempfile, tarfile
+
+		with tarfile.open(self.get_arch_name()) as t:
+			for x in t:
+				t.extract(x)
+
+		instdir = tempfile.mkdtemp('.inst', self.get_base_name())
+		cmd = self.make_distcheck_cmd(instdir)
+		ret = Utils.subprocess.Popen(cmd, cwd=self.get_base_name()).wait()
+		if ret:
+			raise Errors.WafError('distcheck failed with code %r' % ret)
+
+		if os.path.exists(instdir):
+			raise Errors.WafError('distcheck succeeded, but files were left in %s' % instdir)
+
+		shutil.rmtree(self.get_base_name())
+
+
+def distcheck(ctx):
+	'''checks if the project compiles (tarball from 'dist')'''
+	pass
+
+def autoconfigure(execute_method):
+	"""
+	Decorator that enables context commands to run *configure* as needed.
+	"""
+	def execute(self):
+		"""
+		Wraps :py:func:`waflib.Context.Context.execute` on the context class
+		"""
+		if not Configure.autoconfig:
+			return execute_method(self)
+
+		env = ConfigSet.ConfigSet()
+		do_config = False
+		try:
+			env.load(os.path.join(Context.top_dir, Options.lockfile))
+		except EnvironmentError:
+			Logs.warn('Configuring the project')
+			do_config = True
+		else:
+			if env.run_dir != Context.run_dir:
+				do_config = True
+			else:
+				h = 0
+				for f in env.files:
+					try:
+						h = Utils.h_list((h, Utils.readf(f, 'rb')))
+					except EnvironmentError:
+						do_config = True
+						break
+				else:
+					do_config = h != env.hash
+
+		if do_config:
+			cmd = env.config_cmd or 'configure'
+			if Configure.autoconfig == 'clobber':
+				tmp = Options.options.__dict__
+				launch_dir_tmp = Context.launch_dir
+				if env.options:
+					Options.options.__dict__ = env.options
+				Context.launch_dir = env.launch_dir
+				try:
+					run_command(cmd)
+				finally:
+					Options.options.__dict__ = tmp
+					Context.launch_dir = launch_dir_tmp
+			else:
+				run_command(cmd)
+			run_command(self.cmd)
+		else:
+			return execute_method(self)
+	return execute
+Build.BuildContext.execute = autoconfigure(Build.BuildContext.execute)
+
diff --git a/third_party/waf/waflib/Task.py b/third_party/waf/waflib/Task.py
new file mode 100644
index 0000000..cb49a73
--- /dev/null
+++ b/third_party/waf/waflib/Task.py
@@ -0,0 +1,1406 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2018 (ita)
+
+"""
+Tasks represent atomic operations such as processes.
+"""
+
+import os, re, sys, tempfile, traceback
+from waflib import Utils, Logs, Errors
+
+# task states
+NOT_RUN = 0
+"""The task was not executed yet"""
+
+MISSING = 1
+"""The task has been executed but the files have not been created"""
+
+CRASHED = 2
+"""The task execution returned a non-zero exit status"""
+
+EXCEPTION = 3
+"""An exception occurred in the task execution"""
+
+CANCELED = 4
+"""A dependency for the task is missing so it was cancelled"""
+
+SKIPPED = 8
+"""The task did not have to be executed"""
+
+SUCCESS = 9
+"""The task was successfully executed"""
+
+ASK_LATER = -1
+"""The task is not ready to be executed"""
+
+SKIP_ME = -2
+"""The task does not need to be executed"""
+
+RUN_ME = -3
+"""The task must be executed"""
+
+CANCEL_ME = -4
+"""The task cannot be executed because of a dependency problem"""
+
+COMPILE_TEMPLATE_SHELL = '''
+def f(tsk):
+	env = tsk.env
+	gen = tsk.generator
+	bld = gen.bld
+	cwdx = tsk.get_cwd()
+	p = env.get_flat
+	def to_list(xx):
+		if isinstance(xx, str): return [xx]
+		return xx
+	tsk.last_cmd = cmd = \'\'\' %s \'\'\' % s
+	return tsk.exec_command(cmd, cwd=cwdx, env=env.env or None)
+'''
+
+COMPILE_TEMPLATE_NOSHELL = '''
+def f(tsk):
+	env = tsk.env
+	gen = tsk.generator
+	bld = gen.bld
+	cwdx = tsk.get_cwd()
+	def to_list(xx):
+		if isinstance(xx, str): return [xx]
+		return xx
+	def merge(lst1, lst2):
+		if lst1 and lst2:
+			return lst1[:-1] + [lst1[-1] + lst2[0]] + lst2[1:]
+		return lst1 + lst2
+	lst = []
+	%s
+	if '' in lst:
+		lst = [x for x in lst if x]
+	tsk.last_cmd = lst
+	return tsk.exec_command(lst, cwd=cwdx, env=env.env or None)
+'''
+
+COMPILE_TEMPLATE_SIG_VARS = '''
+def f(tsk):
+	sig = tsk.generator.bld.hash_env_vars(tsk.env, tsk.vars)
+	tsk.m.update(sig)
+	env = tsk.env
+	gen = tsk.generator
+	bld = gen.bld
+	cwdx = tsk.get_cwd()
+	p = env.get_flat
+	buf = []
+	%s
+	tsk.m.update(repr(buf).encode())
+'''
+
+classes = {}
+"""
+The metaclass :py:class:`waflib.Task.store_task_type` stores all class tasks
+created by user scripts or Waf tools to this dict. It maps class names to class objects.
+"""
+
+class store_task_type(type):
+	"""
+	Metaclass: store the task classes into the dict pointed by the
+	class attribute 'register' which defaults to :py:const:`waflib.Task.classes`,
+
+	The attribute 'run_str' is compiled into a method 'run' bound to the task class.
+	"""
+	def __init__(cls, name, bases, dict):
+		super(store_task_type, cls).__init__(name, bases, dict)
+		name = cls.__name__
+
+		if name != 'evil' and name != 'Task':
+			if getattr(cls, 'run_str', None):
+				# if a string is provided, convert it to a method
+				(f, dvars) = compile_fun(cls.run_str, cls.shell)
+				cls.hcode = Utils.h_cmd(cls.run_str)
+				cls.orig_run_str = cls.run_str
+				# change the name of run_str or it is impossible to subclass with a function
+				cls.run_str = None
+				cls.run = f
+				# process variables
+				cls.vars = list(set(cls.vars + dvars))
+				cls.vars.sort()
+				if cls.vars:
+					fun = compile_sig_vars(cls.vars)
+					if fun:
+						cls.sig_vars = fun
+			elif getattr(cls, 'run', None) and not 'hcode' in cls.__dict__:
+				# getattr(cls, 'hcode') would look in the upper classes
+				cls.hcode = Utils.h_cmd(cls.run)
+
+			# be creative
+			getattr(cls, 'register', classes)[name] = cls
+
+evil = store_task_type('evil', (object,), {})
+"Base class provided to avoid writing a metaclass, so the code can run in python 2.6 and 3.x unmodified"
+
+class Task(evil):
+	"""
+	Task objects represents actions to perform such as commands to execute by calling the `run` method.
+
+	Detecting when to execute a task occurs in the method :py:meth:`waflib.Task.Task.runnable_status`.
+
+	Detecting which tasks to execute is performed through a hash value returned by
+	:py:meth:`waflib.Task.Task.signature`. The task signature is persistent from build to build.
+	"""
+	vars = []
+	"""ConfigSet variables that should trigger a rebuild (class attribute used for :py:meth:`waflib.Task.Task.sig_vars`)"""
+
+	always_run = False
+	"""Specify whether task instances must always be executed or not (class attribute)"""
+
+	shell = False
+	"""Execute the command with the shell (class attribute)"""
+
+	color = 'GREEN'
+	"""Color for the console display, see :py:const:`waflib.Logs.colors_lst`"""
+
+	ext_in = []
+	"""File extensions that objects of this task class may use"""
+
+	ext_out = []
+	"""File extensions that objects of this task class may create"""
+
+	before = []
+	"""The instances of this class are executed before the instances of classes whose names are in this list"""
+
+	after = []
+	"""The instances of this class are executed after the instances of classes whose names are in this list"""
+
+	hcode = Utils.SIG_NIL
+	"""String representing an additional hash for the class representation"""
+
+	keep_last_cmd = False
+	"""Whether to keep the last command executed on the instance after execution.
+	This may be useful for certain extensions but it can a lot of memory.
+	"""
+
+	weight = 0
+	"""Optional weight to tune the priority for task instances.
+	The higher, the earlier. The weight only applies to single task objects."""
+
+	tree_weight = 0
+	"""Optional weight to tune the priority of task instances and whole subtrees.
+	The higher, the earlier."""
+
+	prio_order = 0
+	"""Priority order set by the scheduler on instances during the build phase.
+	You most likely do not need to set it.
+	"""
+
+	__slots__ = ('hasrun', 'generator', 'env', 'inputs', 'outputs', 'dep_nodes', 'run_after')
+
+	def __init__(self, *k, **kw):
+		self.hasrun = NOT_RUN
+		try:
+			self.generator = kw['generator']
+		except KeyError:
+			self.generator = self
+
+		self.env = kw['env']
+		""":py:class:`waflib.ConfigSet.ConfigSet` object (make sure to provide one)"""
+
+		self.inputs  = []
+		"""List of input nodes, which represent the files used by the task instance"""
+
+		self.outputs = []
+		"""List of output nodes, which represent the files created by the task instance"""
+
+		self.dep_nodes = []
+		"""List of additional nodes to depend on"""
+
+		self.run_after = set()
+		"""Set of tasks that must be executed before this one"""
+
+	def __lt__(self, other):
+		return self.priority() > other.priority()
+	def __le__(self, other):
+		return self.priority() >= other.priority()
+	def __gt__(self, other):
+		return self.priority() < other.priority()
+	def __ge__(self, other):
+		return self.priority() <= other.priority()
+
+	def get_cwd(self):
+		"""
+		:return: current working directory
+		:rtype: :py:class:`waflib.Node.Node`
+		"""
+		bld = self.generator.bld
+		ret = getattr(self, 'cwd', None) or getattr(bld, 'cwd', bld.bldnode)
+		if isinstance(ret, str):
+			if os.path.isabs(ret):
+				ret = bld.root.make_node(ret)
+			else:
+				ret = self.generator.path.make_node(ret)
+		return ret
+
+	def quote_flag(self, x):
+		"""
+		Surround a process argument by quotes so that a list of arguments can be written to a file
+
+		:param x: flag
+		:type x: string
+		:return: quoted flag
+		:rtype: string
+		"""
+		old = x
+		if '\\' in x:
+			x = x.replace('\\', '\\\\')
+		if '"' in x:
+			x = x.replace('"', '\\"')
+		if old != x or ' ' in x or '\t' in x or "'" in x:
+			x = '"%s"' % x
+		return x
+
+	def priority(self):
+		"""
+		Priority of execution; the higher, the earlier
+
+		:return: the priority value
+		:rtype: a tuple of numeric values
+		"""
+		return (self.weight + self.prio_order, - getattr(self.generator, 'tg_idx_count', 0))
+
+	def split_argfile(self, cmd):
+		"""
+		Splits a list of process commands into the executable part and its list of arguments
+
+		:return: a tuple containing the executable first and then the rest of arguments
+		:rtype: tuple
+		"""
+		return ([cmd[0]], [self.quote_flag(x) for x in cmd[1:]])
+
+	def exec_command(self, cmd, **kw):
+		"""
+		Wrapper for :py:meth:`waflib.Context.Context.exec_command`.
+		This version set the current working directory (``build.variant_dir``),
+		applies PATH settings (if self.env.PATH is provided), and can run long
+		commands through a temporary ``@argfile``.
+
+		:param cmd: process command to execute
+		:type cmd: list of string (best) or string (process will use a shell)
+		:return: the return code
+		:rtype: int
+
+		Optional parameters:
+
+		#. cwd: current working directory (Node or string)
+		#. stdout: set to None to prevent waf from capturing the process standard output
+		#. stderr: set to None to prevent waf from capturing the process standard error
+		#. timeout: timeout value (Python 3)
+		"""
+		if not 'cwd' in kw:
+			kw['cwd'] = self.get_cwd()
+
+		if hasattr(self, 'timeout'):
+			kw['timeout'] = self.timeout
+
+		if self.env.PATH:
+			env = kw['env'] = dict(kw.get('env') or self.env.env or os.environ)
+			env['PATH'] = self.env.PATH if isinstance(self.env.PATH, str) else os.pathsep.join(self.env.PATH)
+
+		if hasattr(self, 'stdout'):
+			kw['stdout'] = self.stdout
+		if hasattr(self, 'stderr'):
+			kw['stderr'] = self.stderr
+
+		if not isinstance(cmd, str):
+			if Utils.is_win32:
+				# win32 compares the resulting length http://support.microsoft.com/kb/830473
+				too_long = sum([len(arg) for arg in cmd]) + len(cmd) > 8192
+			else:
+				# non-win32 counts the amount of arguments (200k)
+				too_long = len(cmd) > 200000
+
+			if too_long and getattr(self, 'allow_argsfile', True):
+				# Shunt arguments to a temporary file if the command is too long.
+				cmd, args = self.split_argfile(cmd)
+				try:
+					(fd, tmp) = tempfile.mkstemp()
+					os.write(fd, '\r\n'.join(args).encode())
+					os.close(fd)
+					if Logs.verbose:
+						Logs.debug('argfile: @%r -> %r', tmp, args)
+					return self.generator.bld.exec_command(cmd + ['@' + tmp], **kw)
+				finally:
+					try:
+						os.remove(tmp)
+					except OSError:
+						# anti-virus and indexers can keep files open -_-
+						pass
+		return self.generator.bld.exec_command(cmd, **kw)
+
+	def process(self):
+		"""
+		Runs the task and handles errors
+
+		:return: 0 or None if everything is fine
+		:rtype: integer
+		"""
+		# remove the task signature immediately before it is executed
+		# so that the task will be executed again in case of failure
+		try:
+			del self.generator.bld.task_sigs[self.uid()]
+		except KeyError:
+			pass
+
+		try:
+			ret = self.run()
+		except Exception:
+			self.err_msg = traceback.format_exc()
+			self.hasrun = EXCEPTION
+		else:
+			if ret:
+				self.err_code = ret
+				self.hasrun = CRASHED
+			else:
+				try:
+					self.post_run()
+				except Errors.WafError:
+					pass
+				except Exception:
+					self.err_msg = traceback.format_exc()
+					self.hasrun = EXCEPTION
+				else:
+					self.hasrun = SUCCESS
+
+		if self.hasrun != SUCCESS and self.scan:
+			# rescan dependencies on next run
+			try:
+				del self.generator.bld.imp_sigs[self.uid()]
+			except KeyError:
+				pass
+
+	def log_display(self, bld):
+		"Writes the execution status on the context logger"
+		if self.generator.bld.progress_bar == 3:
+			return
+
+		s = self.display()
+		if s:
+			if bld.logger:
+				logger = bld.logger
+			else:
+				logger = Logs
+
+			if self.generator.bld.progress_bar == 1:
+				c1 = Logs.colors.cursor_off
+				c2 = Logs.colors.cursor_on
+				logger.info(s, extra={'stream': sys.stderr, 'terminator':'', 'c1': c1, 'c2' : c2})
+			else:
+				logger.info(s, extra={'terminator':'', 'c1': '', 'c2' : ''})
+
+	def display(self):
+		"""
+		Returns an execution status for the console, the progress bar, or the IDE output.
+
+		:rtype: string
+		"""
+		col1 = Logs.colors(self.color)
+		col2 = Logs.colors.NORMAL
+		master = self.generator.bld.producer
+
+		def cur():
+			# the current task position, computed as late as possible
+			return master.processed - master.ready.qsize()
+
+		if self.generator.bld.progress_bar == 1:
+			return self.generator.bld.progress_line(cur(), master.total, col1, col2)
+
+		if self.generator.bld.progress_bar == 2:
+			ela = str(self.generator.bld.timer)
+			try:
+				ins  = ','.join([n.name for n in self.inputs])
+			except AttributeError:
+				ins = ''
+			try:
+				outs = ','.join([n.name for n in self.outputs])
+			except AttributeError:
+				outs = ''
+			return '|Total %s|Current %s|Inputs %s|Outputs %s|Time %s|\n' % (master.total, cur(), ins, outs, ela)
+
+		s = str(self)
+		if not s:
+			return None
+
+		total = master.total
+		n = len(str(total))
+		fs = '[%%%dd/%%%dd] %%s%%s%%s%%s\n' % (n, n)
+		kw = self.keyword()
+		if kw:
+			kw += ' '
+		return fs % (cur(), total, kw, col1, s, col2)
+
+	def hash_constraints(self):
+		"""
+		Identifies a task type for all the constraints relevant for the scheduler: precedence, file production
+
+		:return: a hash value
+		:rtype: string
+		"""
+		return (tuple(self.before), tuple(self.after), tuple(self.ext_in), tuple(self.ext_out), self.__class__.__name__, self.hcode)
+
+	def format_error(self):
+		"""
+		Returns an error message to display the build failure reasons
+
+		:rtype: string
+		"""
+		if Logs.verbose:
+			msg = ': %r\n%r' % (self, getattr(self, 'last_cmd', ''))
+		else:
+			msg = ' (run with -v to display more information)'
+		name = getattr(self.generator, 'name', '')
+		if getattr(self, "err_msg", None):
+			return self.err_msg
+		elif not self.hasrun:
+			return 'task in %r was not executed for some reason: %r' % (name, self)
+		elif self.hasrun == CRASHED:
+			try:
+				return ' -> task in %r failed with exit status %r%s' % (name, self.err_code, msg)
+			except AttributeError:
+				return ' -> task in %r failed%s' % (name, msg)
+		elif self.hasrun == MISSING:
+			return ' -> missing files in %r%s' % (name, msg)
+		elif self.hasrun == CANCELED:
+			return ' -> %r canceled because of missing dependencies' % name
+		else:
+			return 'invalid status for task in %r: %r' % (name, self.hasrun)
+
+	def colon(self, var1, var2):
+		"""
+		Enable scriptlet expressions of the form ${FOO_ST:FOO}
+		If the first variable (FOO_ST) is empty, then an empty list is returned
+
+		The results will be slightly different if FOO_ST is a list, for example::
+
+			env.FOO    = ['p1', 'p2']
+			env.FOO_ST = '-I%s'
+			# ${FOO_ST:FOO} returns
+			['-Ip1', '-Ip2']
+
+			env.FOO_ST = ['-a', '-b']
+			# ${FOO_ST:FOO} returns
+			['-a', '-b', 'p1', '-a', '-b', 'p2']
+		"""
+		tmp = self.env[var1]
+		if not tmp:
+			return []
+
+		if isinstance(var2, str):
+			it = self.env[var2]
+		else:
+			it = var2
+		if isinstance(tmp, str):
+			return [tmp % x for x in it]
+		else:
+			lst = []
+			for y in it:
+				lst.extend(tmp)
+				lst.append(y)
+			return lst
+
+	def __str__(self):
+		"string to display to the user"
+		name = self.__class__.__name__
+		if self.outputs:
+			if name.endswith(('lib', 'program')) or not self.inputs:
+				node = self.outputs[0]
+				return node.path_from(node.ctx.launch_node())
+		if not (self.inputs or self.outputs):
+			return self.__class__.__name__
+		if len(self.inputs) == 1:
+			node = self.inputs[0]
+			return node.path_from(node.ctx.launch_node())
+
+		src_str = ' '.join([a.path_from(a.ctx.launch_node()) for a in self.inputs])
+		tgt_str = ' '.join([a.path_from(a.ctx.launch_node()) for a in self.outputs])
+		if self.outputs:
+			sep = ' -> '
+		else:
+			sep = ''
+		return '%s: %s%s%s' % (self.__class__.__name__, src_str, sep, tgt_str)
+
+	def keyword(self):
+		"Display keyword used to prettify the console outputs"
+		name = self.__class__.__name__
+		if name.endswith(('lib', 'program')):
+			return 'Linking'
+		if len(self.inputs) == 1 and len(self.outputs) == 1:
+			return 'Compiling'
+		if not self.inputs:
+			if self.outputs:
+				return 'Creating'
+			else:
+				return 'Running'
+		return 'Processing'
+
+	def __repr__(self):
+		"for debugging purposes"
+		try:
+			ins = ",".join([x.name for x in self.inputs])
+			outs = ",".join([x.name for x in self.outputs])
+		except AttributeError:
+			ins = ",".join([str(x) for x in self.inputs])
+			outs = ",".join([str(x) for x in self.outputs])
+		return "".join(['\n\t{task %r: ' % id(self), self.__class__.__name__, " ", ins, " -> ", outs, '}'])
+
+	def uid(self):
+		"""
+		Returns an identifier used to determine if tasks are up-to-date. Since the
+		identifier will be stored between executions, it must be:
+
+			- unique for a task: no two tasks return the same value (for a given build context)
+			- the same for a given task instance
+
+		By default, the node paths, the class name, and the function are used
+		as inputs to compute a hash.
+
+		The pointer to the object (python built-in 'id') will change between build executions,
+		and must be avoided in such hashes.
+
+		:return: hash value
+		:rtype: string
+		"""
+		try:
+			return self.uid_
+		except AttributeError:
+			m = Utils.md5(self.__class__.__name__)
+			up = m.update
+			for x in self.inputs + self.outputs:
+				up(x.abspath())
+			self.uid_ = m.digest()
+			return self.uid_
+
+	def set_inputs(self, inp):
+		"""
+		Appends the nodes to the *inputs* list
+
+		:param inp: input nodes
+		:type inp: node or list of nodes
+		"""
+		if isinstance(inp, list):
+			self.inputs += inp
+		else:
+			self.inputs.append(inp)
+
+	def set_outputs(self, out):
+		"""
+		Appends the nodes to the *outputs* list
+
+		:param out: output nodes
+		:type out: node or list of nodes
+		"""
+		if isinstance(out, list):
+			self.outputs += out
+		else:
+			self.outputs.append(out)
+
+	def set_run_after(self, task):
+		"""
+		Run this task only after the given *task*.
+
+		Calling this method from :py:meth:`waflib.Task.Task.runnable_status` may cause
+		build deadlocks; see :py:meth:`waflib.Tools.fc.fc.runnable_status` for details.
+
+		:param task: task
+		:type task: :py:class:`waflib.Task.Task`
+		"""
+		assert isinstance(task, Task)
+		self.run_after.add(task)
+
+	def signature(self):
+		"""
+		Task signatures are stored between build executions, they are use to track the changes
+		made to the input nodes (not to the outputs!). The signature hashes data from various sources:
+
+		* explicit dependencies: files listed in the inputs (list of node objects) :py:meth:`waflib.Task.Task.sig_explicit_deps`
+		* implicit dependencies: list of nodes returned by scanner methods (when present) :py:meth:`waflib.Task.Task.sig_implicit_deps`
+		* hashed data: variables/values read from task.vars/task.env :py:meth:`waflib.Task.Task.sig_vars`
+
+		If the signature is expected to give a different result, clear the cache kept in ``self.cache_sig``::
+
+			from waflib import Task
+			class cls(Task.Task):
+				def signature(self):
+					sig = super(Task.Task, self).signature()
+					delattr(self, 'cache_sig')
+					return super(Task.Task, self).signature()
+
+		:return: the signature value
+		:rtype: string or bytes
+		"""
+		try:
+			return self.cache_sig
+		except AttributeError:
+			pass
+
+		self.m = Utils.md5(self.hcode)
+
+		# explicit deps
+		self.sig_explicit_deps()
+
+		# env vars
+		self.sig_vars()
+
+		# implicit deps / scanner results
+		if self.scan:
+			try:
+				self.sig_implicit_deps()
+			except Errors.TaskRescan:
+				return self.signature()
+
+		ret = self.cache_sig = self.m.digest()
+		return ret
+
+	def runnable_status(self):
+		"""
+		Returns the Task status
+
+		:return: a task state in :py:const:`waflib.Task.RUN_ME`,
+			:py:const:`waflib.Task.SKIP_ME`, :py:const:`waflib.Task.CANCEL_ME` or :py:const:`waflib.Task.ASK_LATER`.
+		:rtype: int
+		"""
+		bld = self.generator.bld
+		if bld.is_install < 0:
+			return SKIP_ME
+
+		for t in self.run_after:
+			if not t.hasrun:
+				return ASK_LATER
+			elif t.hasrun < SKIPPED:
+				# a dependency has an error
+				return CANCEL_ME
+
+		# first compute the signature
+		try:
+			new_sig = self.signature()
+		except Errors.TaskNotReady:
+			return ASK_LATER
+
+		# compare the signature to a signature computed previously
+		key = self.uid()
+		try:
+			prev_sig = bld.task_sigs[key]
+		except KeyError:
+			Logs.debug('task: task %r must run: it was never run before or the task code changed', self)
+			return RUN_ME
+
+		if new_sig != prev_sig:
+			Logs.debug('task: task %r must run: the task signature changed', self)
+			return RUN_ME
+
+		# compare the signatures of the outputs
+		for node in self.outputs:
+			sig = bld.node_sigs.get(node)
+			if not sig:
+				Logs.debug('task: task %r must run: an output node has no signature', self)
+				return RUN_ME
+			if sig != key:
+				Logs.debug('task: task %r must run: an output node was produced by another task', self)
+				return RUN_ME
+			if not node.exists():
+				Logs.debug('task: task %r must run: an output node does not exist', self)
+				return RUN_ME
+
+		return (self.always_run and RUN_ME) or SKIP_ME
+
+	def post_run(self):
+		"""
+		Called after successful execution to record that the task has run by
+		updating the entry in :py:attr:`waflib.Build.BuildContext.task_sigs`.
+		"""
+		bld = self.generator.bld
+		for node in self.outputs:
+			if not node.exists():
+				self.hasrun = MISSING
+				self.err_msg = '-> missing file: %r' % node.abspath()
+				raise Errors.WafError(self.err_msg)
+			bld.node_sigs[node] = self.uid() # make sure this task produced the files in question
+		bld.task_sigs[self.uid()] = self.signature()
+		if not self.keep_last_cmd:
+			try:
+				del self.last_cmd
+			except AttributeError:
+				pass
+
+	def sig_explicit_deps(self):
+		"""
+		Used by :py:meth:`waflib.Task.Task.signature`; it hashes :py:attr:`waflib.Task.Task.inputs`
+		and :py:attr:`waflib.Task.Task.dep_nodes` signatures.
+		"""
+		bld = self.generator.bld
+		upd = self.m.update
+
+		# the inputs
+		for x in self.inputs + self.dep_nodes:
+			upd(x.get_bld_sig())
+
+		# manual dependencies, they can slow down the builds
+		if bld.deps_man:
+			additional_deps = bld.deps_man
+			for x in self.inputs + self.outputs:
+				try:
+					d = additional_deps[x]
+				except KeyError:
+					continue
+
+				for v in d:
+					try:
+						v = v.get_bld_sig()
+					except AttributeError:
+						if hasattr(v, '__call__'):
+							v = v() # dependency is a function, call it
+					upd(v)
+
+	def sig_deep_inputs(self):
+		"""
+		Enable rebuilds on input files task signatures. Not used by default.
+
+		Example: hashes of output programs can be unchanged after being re-linked,
+		despite the libraries being different. This method can thus prevent stale unit test
+		results (waf_unit_test.py).
+
+		Hashing input file timestamps is another possibility for the implementation.
+		This may cause unnecessary rebuilds when input tasks are frequently executed.
+		Here is an implementation example::
+
+			lst = []
+			for node in self.inputs + self.dep_nodes:
+				st = os.stat(node.abspath())
+				lst.append(st.st_mtime)
+				lst.append(st.st_size)
+			self.m.update(Utils.h_list(lst))
+
+		The downside of the implementation is that it absolutely requires all build directory
+		files to be declared within the current build.
+		"""
+		bld = self.generator.bld
+		lst = [bld.task_sigs[bld.node_sigs[node]] for node in (self.inputs + self.dep_nodes) if node.is_bld()]
+		self.m.update(Utils.h_list(lst))
+
+	def sig_vars(self):
+		"""
+		Used by :py:meth:`waflib.Task.Task.signature`; it hashes :py:attr:`waflib.Task.Task.env` variables/values
+		When overriding this method, and if scriptlet expressions are used, make sure to follow
+		the code in :py:meth:`waflib.Task.Task.compile_sig_vars` to enable dependencies on scriptlet results.
+
+		This method may be replaced on subclasses by the metaclass to force dependencies on scriptlet code.
+		"""
+		sig = self.generator.bld.hash_env_vars(self.env, self.vars)
+		self.m.update(sig)
+
+	scan = None
+	"""
+	This method, when provided, returns a tuple containing:
+
+	* a list of nodes corresponding to real files
+	* a list of names for files not found in path_lst
+
+	For example::
+
+		from waflib.Task import Task
+		class mytask(Task):
+			def scan(self, node):
+				return ([], [])
+
+	The first and second lists in the tuple are stored in :py:attr:`waflib.Build.BuildContext.node_deps` and
+	:py:attr:`waflib.Build.BuildContext.raw_deps` respectively.
+	"""
+
+	def sig_implicit_deps(self):
+		"""
+		Used by :py:meth:`waflib.Task.Task.signature`; it hashes node signatures
+		obtained by scanning for dependencies (:py:meth:`waflib.Task.Task.scan`).
+
+		The exception :py:class:`waflib.Errors.TaskRescan` is thrown
+		when a file has changed. In this case, the method :py:meth:`waflib.Task.Task.signature` is called
+		once again, and return here to call :py:meth:`waflib.Task.Task.scan` and searching for dependencies.
+		"""
+		bld = self.generator.bld
+
+		# get the task signatures from previous runs
+		key = self.uid()
+		prev = bld.imp_sigs.get(key, [])
+
+		# for issue #379
+		if prev:
+			try:
+				if prev == self.compute_sig_implicit_deps():
+					return prev
+			except Errors.TaskNotReady:
+				raise
+			except EnvironmentError:
+				# when a file was renamed, remove the stale nodes (headers in folders without source files)
+				# this will break the order calculation for headers created during the build in the source directory (should be uncommon)
+				# the behaviour will differ when top != out
+				for x in bld.node_deps.get(self.uid(), []):
+					if not x.is_bld() and not x.exists():
+						try:
+							del x.parent.children[x.name]
+						except KeyError:
+							pass
+			del bld.imp_sigs[key]
+			raise Errors.TaskRescan('rescan')
+
+		# no previous run or the signature of the dependencies has changed, rescan the dependencies
+		(bld.node_deps[key], bld.raw_deps[key]) = self.scan()
+		if Logs.verbose:
+			Logs.debug('deps: scanner for %s: %r; unresolved: %r', self, bld.node_deps[key], bld.raw_deps[key])
+
+		# recompute the signature and return it
+		try:
+			bld.imp_sigs[key] = self.compute_sig_implicit_deps()
+		except EnvironmentError:
+			for k in bld.node_deps.get(self.uid(), []):
+				if not k.exists():
+					Logs.warn('Dependency %r for %r is missing: check the task declaration and the build order!', k, self)
+			raise
+
+	def compute_sig_implicit_deps(self):
+		"""
+		Used by :py:meth:`waflib.Task.Task.sig_implicit_deps` for computing the actual hash of the
+		:py:class:`waflib.Node.Node` returned by the scanner.
+
+		:return: a hash value for the implicit dependencies
+		:rtype: string or bytes
+		"""
+		upd = self.m.update
+		self.are_implicit_nodes_ready()
+
+		# scanner returns a node that does not have a signature
+		# just *ignore* the error and let them figure out from the compiler output
+		# waf -k behaviour
+		for k in self.generator.bld.node_deps.get(self.uid(), []):
+			upd(k.get_bld_sig())
+		return self.m.digest()
+
+	def are_implicit_nodes_ready(self):
+		"""
+		For each node returned by the scanner, see if there is a task that creates it,
+		and infer the build order
+
+		This has a low performance impact on null builds (1.86s->1.66s) thanks to caching (28s->1.86s)
+		"""
+		bld = self.generator.bld
+		try:
+			cache = bld.dct_implicit_nodes
+		except AttributeError:
+			bld.dct_implicit_nodes = cache = {}
+
+		# one cache per build group
+		try:
+			dct = cache[bld.current_group]
+		except KeyError:
+			dct = cache[bld.current_group] = {}
+			for tsk in bld.cur_tasks:
+				for x in tsk.outputs:
+					dct[x] = tsk
+
+		modified = False
+		for x in bld.node_deps.get(self.uid(), []):
+			if x in dct:
+				self.run_after.add(dct[x])
+				modified = True
+
+		if modified:
+			for tsk in self.run_after:
+				if not tsk.hasrun:
+					#print "task is not ready..."
+					raise Errors.TaskNotReady('not ready')
+if sys.hexversion > 0x3000000:
+	def uid(self):
+		try:
+			return self.uid_
+		except AttributeError:
+			m = Utils.md5(self.__class__.__name__.encode('latin-1', 'xmlcharrefreplace'))
+			up = m.update
+			for x in self.inputs + self.outputs:
+				up(x.abspath().encode('latin-1', 'xmlcharrefreplace'))
+			self.uid_ = m.digest()
+			return self.uid_
+	uid.__doc__ = Task.uid.__doc__
+	Task.uid = uid
+
+def is_before(t1, t2):
+	"""
+	Returns a non-zero value if task t1 is to be executed before task t2::
+
+		t1.ext_out = '.h'
+		t2.ext_in = '.h'
+		t2.after = ['t1']
+		t1.before = ['t2']
+		waflib.Task.is_before(t1, t2) # True
+
+	:param t1: Task object
+	:type t1: :py:class:`waflib.Task.Task`
+	:param t2: Task object
+	:type t2: :py:class:`waflib.Task.Task`
+	"""
+	to_list = Utils.to_list
+	for k in to_list(t2.ext_in):
+		if k in to_list(t1.ext_out):
+			return 1
+
+	if t1.__class__.__name__ in to_list(t2.after):
+		return 1
+
+	if t2.__class__.__name__ in to_list(t1.before):
+		return 1
+
+	return 0
+
+def set_file_constraints(tasks):
+	"""
+	Updates the ``run_after`` attribute of all tasks based on the task inputs and outputs
+
+	:param tasks: tasks
+	:type tasks: list of :py:class:`waflib.Task.Task`
+	"""
+	ins = Utils.defaultdict(set)
+	outs = Utils.defaultdict(set)
+	for x in tasks:
+		for a in x.inputs:
+			ins[a].add(x)
+		for a in x.dep_nodes:
+			ins[a].add(x)
+		for a in x.outputs:
+			outs[a].add(x)
+
+	links = set(ins.keys()).intersection(outs.keys())
+	for k in links:
+		for a in ins[k]:
+			a.run_after.update(outs[k])
+
+
+class TaskGroup(object):
+	"""
+	Wrap nxm task order constraints into a single object
+	to prevent the creation of large list/set objects
+
+	This is an optimization
+	"""
+	def __init__(self, prev, next):
+		self.prev = prev
+		self.next = next
+		self.done = False
+
+	def get_hasrun(self):
+		for k in self.prev:
+			if not k.hasrun:
+				return NOT_RUN
+		return SUCCESS
+
+	hasrun = property(get_hasrun, None)
+
+def set_precedence_constraints(tasks):
+	"""
+	Updates the ``run_after`` attribute of all tasks based on the after/before/ext_out/ext_in attributes
+
+	:param tasks: tasks
+	:type tasks: list of :py:class:`waflib.Task.Task`
+	"""
+	cstr_groups = Utils.defaultdict(list)
+	for x in tasks:
+		h = x.hash_constraints()
+		cstr_groups[h].append(x)
+
+	keys = list(cstr_groups.keys())
+	maxi = len(keys)
+
+	# this list should be short
+	for i in range(maxi):
+		t1 = cstr_groups[keys[i]][0]
+		for j in range(i + 1, maxi):
+			t2 = cstr_groups[keys[j]][0]
+
+			# add the constraints based on the comparisons
+			if is_before(t1, t2):
+				a = i
+				b = j
+			elif is_before(t2, t1):
+				a = j
+				b = i
+			else:
+				continue
+
+			a = cstr_groups[keys[a]]
+			b = cstr_groups[keys[b]]
+
+			if len(a) < 2 or len(b) < 2:
+				for x in b:
+					x.run_after.update(a)
+			else:
+				group = TaskGroup(set(a), set(b))
+				for x in b:
+					x.run_after.add(group)
+
+def funex(c):
+	"""
+	Compiles a scriptlet expression into a Python function
+
+	:param c: function to compile
+	:type c: string
+	:return: the function 'f' declared in the input string
+	:rtype: function
+	"""
+	dc = {}
+	exec(c, dc)
+	return dc['f']
+
+re_cond = re.compile(r'(?P<var>\w+)|(?P<or>\|)|(?P<and>&)')
+re_novar = re.compile(r'^(SRC|TGT)\W+.*?$')
+reg_act = re.compile(r'(?P<backslash>\\)|(?P<dollar>\$\$)|(?P<subst>\$\{(?P<var>\w+)(?P<code>.*?)\})', re.M)
+def compile_fun_shell(line):
+	"""
+	Creates a compiled function to execute a process through a sub-shell
+	"""
+	extr = []
+	def repl(match):
+		g = match.group
+		if g('dollar'):
+			return "$"
+		elif g('backslash'):
+			return '\\\\'
+		elif g('subst'):
+			extr.append((g('var'), g('code')))
+			return "%s"
+		return None
+	line = reg_act.sub(repl, line) or line
+	dvars = []
+	def add_dvar(x):
+		if x not in dvars:
+			dvars.append(x)
+
+	def replc(m):
+		# performs substitutions and populates dvars
+		if m.group('and'):
+			return ' and '
+		elif m.group('or'):
+			return ' or '
+		else:
+			x = m.group('var')
+			add_dvar(x)
+			return 'env[%r]' % x
+
+	parm = []
+	app = parm.append
+	for (var, meth) in extr:
+		if var == 'SRC':
+			if meth:
+				app('tsk.inputs%s' % meth)
+			else:
+				app('" ".join([a.path_from(cwdx) for a in tsk.inputs])')
+		elif var == 'TGT':
+			if meth:
+				app('tsk.outputs%s' % meth)
+			else:
+				app('" ".join([a.path_from(cwdx) for a in tsk.outputs])')
+		elif meth:
+			if meth.startswith(':'):
+				add_dvar(var)
+				m = meth[1:]
+				if m == 'SRC':
+					m = '[a.path_from(cwdx) for a in tsk.inputs]'
+				elif m == 'TGT':
+					m = '[a.path_from(cwdx) for a in tsk.outputs]'
+				elif re_novar.match(m):
+					m = '[tsk.inputs%s]' % m[3:]
+				elif re_novar.match(m):
+					m = '[tsk.outputs%s]' % m[3:]
+				else:
+					add_dvar(m)
+					if m[:3] not in ('tsk', 'gen', 'bld'):
+						m = '%r' % m
+				app('" ".join(tsk.colon(%r, %s))' % (var, m))
+			elif meth.startswith('?'):
+				# In A?B|C output env.A if one of env.B or env.C is non-empty
+				expr = re_cond.sub(replc, meth[1:])
+				app('p(%r) if (%s) else ""' % (var, expr))
+			else:
+				call = '%s%s' % (var, meth)
+				add_dvar(call)
+				app(call)
+		else:
+			add_dvar(var)
+			app("p('%s')" % var)
+	if parm:
+		parm = "%% (%s) " % (',\n\t\t'.join(parm))
+	else:
+		parm = ''
+
+	c = COMPILE_TEMPLATE_SHELL % (line, parm)
+	Logs.debug('action: %s', c.strip().splitlines())
+	return (funex(c), dvars)
+
+reg_act_noshell = re.compile(r"(?P<space>\s+)|(?P<subst>\$\{(?P<var>\w+)(?P<code>.*?)\})|(?P<text>([^$ \t\n\r\f\v]|\$\$)+)", re.M)
+def compile_fun_noshell(line):
+	"""
+	Creates a compiled function to execute a process without a sub-shell
+	"""
+	buf = []
+	dvars = []
+	merge = False
+	app = buf.append
+
+	def add_dvar(x):
+		if x not in dvars:
+			dvars.append(x)
+
+	def replc(m):
+		# performs substitutions and populates dvars
+		if m.group('and'):
+			return ' and '
+		elif m.group('or'):
+			return ' or '
+		else:
+			x = m.group('var')
+			add_dvar(x)
+			return 'env[%r]' % x
+
+	for m in reg_act_noshell.finditer(line):
+		if m.group('space'):
+			merge = False
+			continue
+		elif m.group('text'):
+			app('[%r]' % m.group('text').replace('$$', '$'))
+		elif m.group('subst'):
+			var = m.group('var')
+			code = m.group('code')
+			if var == 'SRC':
+				if code:
+					app('[tsk.inputs%s]' % code)
+				else:
+					app('[a.path_from(cwdx) for a in tsk.inputs]')
+			elif var == 'TGT':
+				if code:
+					app('[tsk.outputs%s]' % code)
+				else:
+					app('[a.path_from(cwdx) for a in tsk.outputs]')
+			elif code:
+				if code.startswith(':'):
+					# a composed variable ${FOO:OUT}
+					add_dvar(var)
+					m = code[1:]
+					if m == 'SRC':
+						m = '[a.path_from(cwdx) for a in tsk.inputs]'
+					elif m == 'TGT':
+						m = '[a.path_from(cwdx) for a in tsk.outputs]'
+					elif re_novar.match(m):
+						m = '[tsk.inputs%s]' % m[3:]
+					elif re_novar.match(m):
+						m = '[tsk.outputs%s]' % m[3:]
+					else:
+						add_dvar(m)
+						if m[:3] not in ('tsk', 'gen', 'bld'):
+							m = '%r' % m
+					app('tsk.colon(%r, %s)' % (var, m))
+				elif code.startswith('?'):
+					# In A?B|C output env.A if one of env.B or env.C is non-empty
+					expr = re_cond.sub(replc, code[1:])
+					app('to_list(env[%r] if (%s) else [])' % (var, expr))
+				else:
+					# plain code such as ${tsk.inputs[0].abspath()}
+					call = '%s%s' % (var, code)
+					add_dvar(call)
+					app('to_list(%s)' % call)
+			else:
+				# a plain variable such as # a plain variable like ${AR}
+				app('to_list(env[%r])' % var)
+				add_dvar(var)
+		if merge:
+			tmp = 'merge(%s, %s)' % (buf[-2], buf[-1])
+			del buf[-1]
+			buf[-1] = tmp
+		merge = True # next turn
+
+	buf = ['lst.extend(%s)' % x for x in buf]
+	fun = COMPILE_TEMPLATE_NOSHELL % "\n\t".join(buf)
+	Logs.debug('action: %s', fun.strip().splitlines())
+	return (funex(fun), dvars)
+
+def compile_fun(line, shell=False):
+	"""
+	Parses a string expression such as '${CC} ${SRC} -o ${TGT}' and returns a pair containing:
+
+	* The function created (compiled) for use as :py:meth:`waflib.Task.Task.run`
+	* The list of variables that must cause rebuilds when *env* data is modified
+
+	for example::
+
+		from waflib.Task import compile_fun
+		compile_fun('cxx', '${CXX} -o ${TGT[0]} ${SRC} -I ${SRC[0].parent.bldpath()}')
+
+		def build(bld):
+			bld(source='wscript', rule='echo "foo\\${SRC[0].name}\\bar"')
+
+	The env variables (CXX, ..) on the task must not hold dicts so as to preserve a consistent order.
+	The reserved keywords ``TGT`` and ``SRC`` represent the task input and output nodes
+
+	"""
+	if isinstance(line, str):
+		if line.find('<') > 0 or line.find('>') > 0 or line.find('&&') > 0:
+			shell = True
+	else:
+		dvars_lst = []
+		funs_lst = []
+		for x in line:
+			if isinstance(x, str):
+				fun, dvars = compile_fun(x, shell)
+				dvars_lst += dvars
+				funs_lst.append(fun)
+			else:
+				# assume a function to let through
+				funs_lst.append(x)
+		def composed_fun(task):
+			for x in funs_lst:
+				ret = x(task)
+				if ret:
+					return ret
+			return None
+		return composed_fun, dvars_lst
+	if shell:
+		return compile_fun_shell(line)
+	else:
+		return compile_fun_noshell(line)
+
+def compile_sig_vars(vars):
+	"""
+	This method produces a sig_vars method suitable for subclasses that provide
+	scriptlet code in their run_str code.
+	If no such method can be created, this method returns None.
+
+	The purpose of the sig_vars method returned is to ensures
+	that rebuilds occur whenever the contents of the expression changes.
+	This is the case B below::
+
+		import time
+		# case A: regular variables
+		tg = bld(rule='echo ${FOO}')
+		tg.env.FOO = '%s' % time.time()
+		# case B
+		bld(rule='echo ${gen.foo}', foo='%s' % time.time())
+
+	:param vars: env variables such as CXXFLAGS or gen.foo
+	:type vars: list of string
+	:return: A sig_vars method relevant for dependencies if adequate, else None
+	:rtype: A function, or None in most cases
+	"""
+	buf = []
+	for x in sorted(vars):
+		if x[:3] in ('tsk', 'gen', 'bld'):
+			buf.append('buf.append(%s)' % x)
+	if buf:
+		return funex(COMPILE_TEMPLATE_SIG_VARS % '\n\t'.join(buf))
+	return None
+
+def task_factory(name, func=None, vars=None, color='GREEN', ext_in=[], ext_out=[], before=[], after=[], shell=False, scan=None):
+	"""
+	Returns a new task subclass with the function ``run`` compiled from the line given.
+
+	:param func: method run
+	:type func: string or function
+	:param vars: list of variables to hash
+	:type vars: list of string
+	:param color: color to use
+	:type color: string
+	:param shell: when *func* is a string, enable/disable the use of the shell
+	:type shell: bool
+	:param scan: method scan
+	:type scan: function
+	:rtype: :py:class:`waflib.Task.Task`
+	"""
+
+	params = {
+		'vars': vars or [], # function arguments are static, and this one may be modified by the class
+		'color': color,
+		'name': name,
+		'shell': shell,
+		'scan': scan,
+	}
+
+	if isinstance(func, str) or isinstance(func, tuple):
+		params['run_str'] = func
+	else:
+		params['run'] = func
+
+	cls = type(Task)(name, (Task,), params)
+	classes[name] = cls
+
+	if ext_in:
+		cls.ext_in = Utils.to_list(ext_in)
+	if ext_out:
+		cls.ext_out = Utils.to_list(ext_out)
+	if before:
+		cls.before = Utils.to_list(before)
+	if after:
+		cls.after = Utils.to_list(after)
+
+	return cls
+
+def deep_inputs(cls):
+	"""
+	Task class decorator to enable rebuilds on input files task signatures
+	"""
+	def sig_explicit_deps(self):
+		Task.sig_explicit_deps(self)
+		Task.sig_deep_inputs(self)
+	cls.sig_explicit_deps = sig_explicit_deps
+	return cls
+
+TaskBase = Task
+"Provided for compatibility reasons, TaskBase should not be used"
+
+class TaskSemaphore(object):
+	"""
+	Task semaphores provide a simple and efficient way of throttling the amount of
+	a particular task to run concurrently. The throttling value is capped
+	by the amount of maximum jobs, so for example, a `TaskSemaphore(10)`
+	has no effect in a `-j2` build.
+
+	Task semaphores are typically specified on the task class level::
+
+		class compile(waflib.Task.Task):
+			semaphore = waflib.Task.TaskSemaphore(2)
+			run_str = 'touch ${TGT}'
+
+	Task semaphores are meant to be used by the build scheduler in the main
+	thread, so there are no guarantees of thread safety.
+	"""
+	def __init__(self, num):
+		"""
+		:param num: maximum value of concurrent tasks
+		:type num: int
+		"""
+		self.num = num
+		self.locking = set()
+		self.waiting = set()
+
+	def is_locked(self):
+		"""Returns True if this semaphore cannot be acquired by more tasks"""
+		return len(self.locking) >= self.num
+
+	def acquire(self, tsk):
+		"""
+		Mark the semaphore as used by the given task (not re-entrant).
+
+		:param tsk: task object
+		:type tsk: :py:class:`waflib.Task.Task`
+		:raises: :py:class:`IndexError` in case the resource is already acquired
+		"""
+		if self.is_locked():
+			raise IndexError('Cannot lock more %r' % self.locking)
+		self.locking.add(tsk)
+
+	def release(self, tsk):
+		"""
+		Mark the semaphore as unused by the given task.
+
+		:param tsk: task object
+		:type tsk: :py:class:`waflib.Task.Task`
+		:raises: :py:class:`KeyError` in case the resource is not acquired by the task
+		"""
+		self.locking.remove(tsk)
+
diff --git a/third_party/waf/waflib/TaskGen.py b/third_party/waf/waflib/TaskGen.py
new file mode 100644
index 0000000..32468f0
--- /dev/null
+++ b/third_party/waf/waflib/TaskGen.py
@@ -0,0 +1,913 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2018 (ita)
+
+"""
+Task generators
+
+The class :py:class:`waflib.TaskGen.task_gen` encapsulates the creation of task objects (low-level code)
+The instances can have various parameters, but the creation of task nodes (Task.py)
+is deferred. To achieve this, various methods are called from the method "apply"
+"""
+
+import copy, re, os, functools
+from waflib import Task, Utils, Logs, Errors, ConfigSet, Node
+
+feats = Utils.defaultdict(set)
+"""remember the methods declaring features"""
+
+HEADER_EXTS = ['.h', '.hpp', '.hxx', '.hh']
+
+class task_gen(object):
+	"""
+	Instances of this class create :py:class:`waflib.Task.Task` when
+	calling the method :py:meth:`waflib.TaskGen.task_gen.post` from the main thread.
+	A few notes:
+
+	* The methods to call (*self.meths*) can be specified dynamically (removing, adding, ..)
+	* The 'features' are used to add methods to self.meths and then execute them
+	* The attribute 'path' is a node representing the location of the task generator
+	* The tasks created are added to the attribute *tasks*
+	* The attribute 'idx' is a counter of task generators in the same path
+	"""
+
+	mappings = Utils.ordered_iter_dict()
+	"""Mappings are global file extension mappings that are retrieved in the order of definition"""
+
+	prec = Utils.defaultdict(set)
+	"""Dict that holds the precedence execution rules for task generator methods"""
+
+	def __init__(self, *k, **kw):
+		"""
+		Task generator objects predefine various attributes (source, target) for possible
+		processing by process_rule (make-like rules) or process_source (extensions, misc methods)
+
+		Tasks are stored on the attribute 'tasks'. They are created by calling methods
+		listed in ``self.meths`` or referenced in the attribute ``features``
+		A topological sort is performed to execute the methods in correct order.
+
+		The extra key/value elements passed in ``kw`` are set as attributes
+		"""
+		self.source = []
+		self.target = ''
+
+		self.meths = []
+		"""
+		List of method names to execute (internal)
+		"""
+
+		self.features = []
+		"""
+		List of feature names for bringing new methods in
+		"""
+
+		self.tasks = []
+		"""
+		Tasks created are added to this list
+		"""
+
+		if not 'bld' in kw:
+			# task generators without a build context :-/
+			self.env = ConfigSet.ConfigSet()
+			self.idx = 0
+			self.path = None
+		else:
+			self.bld = kw['bld']
+			self.env = self.bld.env.derive()
+			self.path = kw.get('path', self.bld.path) # by default, emulate chdir when reading scripts
+
+			# Provide a unique index per folder
+			# This is part of a measure to prevent output file name collisions
+			path = self.path.abspath()
+			try:
+				self.idx = self.bld.idx[path] = self.bld.idx.get(path, 0) + 1
+			except AttributeError:
+				self.bld.idx = {}
+				self.idx = self.bld.idx[path] = 1
+
+			# Record the global task generator count
+			try:
+				self.tg_idx_count = self.bld.tg_idx_count = self.bld.tg_idx_count + 1
+			except AttributeError:
+				self.tg_idx_count = self.bld.tg_idx_count = 1
+
+		for key, val in kw.items():
+			setattr(self, key, val)
+
+	def __str__(self):
+		"""Debugging helper"""
+		return "<task_gen %r declared in %s>" % (self.name, self.path.abspath())
+
+	def __repr__(self):
+		"""Debugging helper"""
+		lst = []
+		for x in self.__dict__:
+			if x not in ('env', 'bld', 'compiled_tasks', 'tasks'):
+				lst.append("%s=%s" % (x, repr(getattr(self, x))))
+		return "bld(%s) in %s" % (", ".join(lst), self.path.abspath())
+
+	def get_cwd(self):
+		"""
+		Current working directory for the task generator, defaults to the build directory.
+		This is still used in a few places but it should disappear at some point as the classes
+		define their own working directory.
+
+		:rtype: :py:class:`waflib.Node.Node`
+		"""
+		return self.bld.bldnode
+
+	def get_name(self):
+		"""
+		If the attribute ``name`` is not set on the instance,
+		the name is computed from the target name::
+
+			def build(bld):
+				x = bld(name='foo')
+				x.get_name() # foo
+				y = bld(target='bar')
+				y.get_name() # bar
+
+		:rtype: string
+		:return: name of this task generator
+		"""
+		try:
+			return self._name
+		except AttributeError:
+			if isinstance(self.target, list):
+				lst = [str(x) for x in self.target]
+				name = self._name = ','.join(lst)
+			else:
+				name = self._name = str(self.target)
+			return name
+	def set_name(self, name):
+		self._name = name
+
+	name = property(get_name, set_name)
+
+	def to_list(self, val):
+		"""
+		Ensures that a parameter is a list, see :py:func:`waflib.Utils.to_list`
+
+		:type val: string or list of string
+		:param val: input to return as a list
+		:rtype: list
+		"""
+		if isinstance(val, str):
+			return val.split()
+		else:
+			return val
+
+	def post(self):
+		"""
+		Creates tasks for this task generators. The following operations are performed:
+
+		#. The body of this method is called only once and sets the attribute ``posted``
+		#. The attribute ``features`` is used to add more methods in ``self.meths``
+		#. The methods are sorted by the precedence table ``self.prec`` or `:waflib:attr:waflib.TaskGen.task_gen.prec`
+		#. The methods are then executed in order
+		#. The tasks created are added to :py:attr:`waflib.TaskGen.task_gen.tasks`
+		"""
+		if getattr(self, 'posted', None):
+			return False
+		self.posted = True
+
+		keys = set(self.meths)
+		keys.update(feats['*'])
+
+		# add the methods listed in the features
+		self.features = Utils.to_list(self.features)
+		for x in self.features:
+			st = feats[x]
+			if st:
+				keys.update(st)
+			elif not x in Task.classes:
+				Logs.warn('feature %r does not exist - bind at least one method to it?', x)
+
+		# copy the precedence table
+		prec = {}
+		prec_tbl = self.prec
+		for x in prec_tbl:
+			if x in keys:
+				prec[x] = prec_tbl[x]
+
+		# elements disconnected
+		tmp = []
+		for a in keys:
+			for x in prec.values():
+				if a in x:
+					break
+			else:
+				tmp.append(a)
+
+		tmp.sort(reverse=True)
+
+		# topological sort
+		out = []
+		while tmp:
+			e = tmp.pop()
+			if e in keys:
+				out.append(e)
+			try:
+				nlst = prec[e]
+			except KeyError:
+				pass
+			else:
+				del prec[e]
+				for x in nlst:
+					for y in prec:
+						if x in prec[y]:
+							break
+					else:
+						tmp.append(x)
+						tmp.sort(reverse=True)
+
+		if prec:
+			buf = ['Cycle detected in the method execution:']
+			for k, v in prec.items():
+				buf.append('- %s after %s' % (k, [x for x in v if x in prec]))
+			raise Errors.WafError('\n'.join(buf))
+		self.meths = out
+
+		# then we run the methods in order
+		Logs.debug('task_gen: posting %s %d', self, id(self))
+		for x in out:
+			try:
+				v = getattr(self, x)
+			except AttributeError:
+				raise Errors.WafError('%r is not a valid task generator method' % x)
+			Logs.debug('task_gen: -> %s (%d)', x, id(self))
+			v()
+
+		Logs.debug('task_gen: posted %s', self.name)
+		return True
+
+	def get_hook(self, node):
+		"""
+		Returns the ``@extension`` method to call for a Node of a particular extension.
+
+		:param node: Input file to process
+		:type node: :py:class:`waflib.Tools.Node.Node`
+		:return: A method able to process the input node by looking at the extension
+		:rtype: function
+		"""
+		name = node.name
+		for k in self.mappings:
+			try:
+				if name.endswith(k):
+					return self.mappings[k]
+			except TypeError:
+				# regexps objects
+				if k.match(name):
+					return self.mappings[k]
+		keys = list(self.mappings.keys())
+		raise Errors.WafError("File %r has no mapping in %r (load a waf tool?)" % (node, keys))
+
+	def create_task(self, name, src=None, tgt=None, **kw):
+		"""
+		Creates task instances.
+
+		:param name: task class name
+		:type name: string
+		:param src: input nodes
+		:type src: list of :py:class:`waflib.Tools.Node.Node`
+		:param tgt: output nodes
+		:type tgt: list of :py:class:`waflib.Tools.Node.Node`
+		:return: A task object
+		:rtype: :py:class:`waflib.Task.Task`
+		"""
+		task = Task.classes[name](env=self.env.derive(), generator=self)
+		if src:
+			task.set_inputs(src)
+		if tgt:
+			task.set_outputs(tgt)
+		task.__dict__.update(kw)
+		self.tasks.append(task)
+		return task
+
+	def clone(self, env):
+		"""
+		Makes a copy of a task generator. Once the copy is made, it is necessary to ensure that the
+		it does not create the same output files as the original, or the same files may
+		be compiled several times.
+
+		:param env: A configuration set
+		:type env: :py:class:`waflib.ConfigSet.ConfigSet`
+		:return: A copy
+		:rtype: :py:class:`waflib.TaskGen.task_gen`
+		"""
+		newobj = self.bld()
+		for x in self.__dict__:
+			if x in ('env', 'bld'):
+				continue
+			elif x in ('path', 'features'):
+				setattr(newobj, x, getattr(self, x))
+			else:
+				setattr(newobj, x, copy.copy(getattr(self, x)))
+
+		newobj.posted = False
+		if isinstance(env, str):
+			newobj.env = self.bld.all_envs[env].derive()
+		else:
+			newobj.env = env.derive()
+
+		return newobj
+
+def declare_chain(name='', rule=None, reentrant=None, color='BLUE',
+	ext_in=[], ext_out=[], before=[], after=[], decider=None, scan=None, install_path=None, shell=False):
+	"""
+	Creates a new mapping and a task class for processing files by extension.
+	See Tools/flex.py for an example.
+
+	:param name: name for the task class
+	:type name: string
+	:param rule: function to execute or string to be compiled in a function
+	:type rule: string or function
+	:param reentrant: re-inject the output file in the process (done automatically, set to 0 to disable)
+	:type reentrant: int
+	:param color: color for the task output
+	:type color: string
+	:param ext_in: execute the task only after the files of such extensions are created
+	:type ext_in: list of string
+	:param ext_out: execute the task only before files of such extensions are processed
+	:type ext_out: list of string
+	:param before: execute instances of this task before classes of the given names
+	:type before: list of string
+	:param after: execute instances of this task after classes of the given names
+	:type after: list of string
+	:param decider: if present, function that returns a list of output file extensions (overrides ext_out for output files, but not for the build order)
+	:type decider: function
+	:param scan: scanner function for the task
+	:type scan: function
+	:param install_path: installation path for the output nodes
+	:type install_path: string
+	"""
+	ext_in = Utils.to_list(ext_in)
+	ext_out = Utils.to_list(ext_out)
+	if not name:
+		name = rule
+	cls = Task.task_factory(name, rule, color=color, ext_in=ext_in, ext_out=ext_out, before=before, after=after, scan=scan, shell=shell)
+
+	def x_file(self, node):
+		if ext_in:
+			_ext_in = ext_in[0]
+
+		tsk = self.create_task(name, node)
+		cnt = 0
+
+		ext = decider(self, node) if decider else cls.ext_out
+		for x in ext:
+			k = node.change_ext(x, ext_in=_ext_in)
+			tsk.outputs.append(k)
+
+			if reentrant != None:
+				if cnt < int(reentrant):
+					self.source.append(k)
+			else:
+				# reinject downstream files into the build
+				for y in self.mappings: # ~ nfile * nextensions :-/
+					if k.name.endswith(y):
+						self.source.append(k)
+						break
+			cnt += 1
+
+		if install_path:
+			self.install_task = self.add_install_files(install_to=install_path, install_from=tsk.outputs)
+		return tsk
+
+	for x in cls.ext_in:
+		task_gen.mappings[x] = x_file
+	return x_file
+
+def taskgen_method(func):
+	"""
+	Decorator that registers method as a task generator method.
+	The function must accept a task generator as first parameter::
+
+		from waflib.TaskGen import taskgen_method
+		@taskgen_method
+		def mymethod(self):
+			pass
+
+	:param func: task generator method to add
+	:type func: function
+	:rtype: function
+	"""
+	setattr(task_gen, func.__name__, func)
+	return func
+
+def feature(*k):
+	"""
+	Decorator that registers a task generator method that will be executed when the
+	object attribute ``feature`` contains the corresponding key(s)::
+
+		from waflib.TaskGen import feature
+		@feature('myfeature')
+		def myfunction(self):
+			print('that is my feature!')
+		def build(bld):
+			bld(features='myfeature')
+
+	:param k: feature names
+	:type k: list of string
+	"""
+	def deco(func):
+		setattr(task_gen, func.__name__, func)
+		for name in k:
+			feats[name].update([func.__name__])
+		return func
+	return deco
+
+def before_method(*k):
+	"""
+	Decorator that registera task generator method which will be executed
+	before the functions of given name(s)::
+
+		from waflib.TaskGen import feature, before
+		@feature('myfeature')
+		@before_method('fun2')
+		def fun1(self):
+			print('feature 1!')
+		@feature('myfeature')
+		def fun2(self):
+			print('feature 2!')
+		def build(bld):
+			bld(features='myfeature')
+
+	:param k: method names
+	:type k: list of string
+	"""
+	def deco(func):
+		setattr(task_gen, func.__name__, func)
+		for fun_name in k:
+			task_gen.prec[func.__name__].add(fun_name)
+		return func
+	return deco
+before = before_method
+
+def after_method(*k):
+	"""
+	Decorator that registers a task generator method which will be executed
+	after the functions of given name(s)::
+
+		from waflib.TaskGen import feature, after
+		@feature('myfeature')
+		@after_method('fun2')
+		def fun1(self):
+			print('feature 1!')
+		@feature('myfeature')
+		def fun2(self):
+			print('feature 2!')
+		def build(bld):
+			bld(features='myfeature')
+
+	:param k: method names
+	:type k: list of string
+	"""
+	def deco(func):
+		setattr(task_gen, func.__name__, func)
+		for fun_name in k:
+			task_gen.prec[fun_name].add(func.__name__)
+		return func
+	return deco
+after = after_method
+
+def extension(*k):
+	"""
+	Decorator that registers a task generator method which will be invoked during
+	the processing of source files for the extension given::
+
+		from waflib import Task
+		class mytask(Task):
+			run_str = 'cp ${SRC} ${TGT}'
+		@extension('.moo')
+		def create_maa_file(self, node):
+			self.create_task('mytask', node, node.change_ext('.maa'))
+		def build(bld):
+			bld(source='foo.moo')
+	"""
+	def deco(func):
+		setattr(task_gen, func.__name__, func)
+		for x in k:
+			task_gen.mappings[x] = func
+		return func
+	return deco
+
+@taskgen_method
+def to_nodes(self, lst, path=None):
+	"""
+	Flatten the input list of string/nodes/lists into a list of nodes.
+
+	It is used by :py:func:`waflib.TaskGen.process_source` and :py:func:`waflib.TaskGen.process_rule`.
+	It is designed for source files, for folders, see :py:func:`waflib.Tools.ccroot.to_incnodes`:
+
+	:param lst: input list
+	:type lst: list of string and nodes
+	:param path: path from which to search the nodes (by default, :py:attr:`waflib.TaskGen.task_gen.path`)
+	:type path: :py:class:`waflib.Tools.Node.Node`
+	:rtype: list of :py:class:`waflib.Tools.Node.Node`
+	"""
+	tmp = []
+	path = path or self.path
+	find = path.find_resource
+
+	if isinstance(lst, Node.Node):
+		lst = [lst]
+
+	for x in Utils.to_list(lst):
+		if isinstance(x, str):
+			node = find(x)
+		elif hasattr(x, 'name'):
+			node = x
+		else:
+			tmp.extend(self.to_nodes(x))
+			continue
+		if not node:
+			raise Errors.WafError('source not found: %r in %r' % (x, self))
+		tmp.append(node)
+	return tmp
+
+@feature('*')
+def process_source(self):
+	"""
+	Processes each element in the attribute ``source`` by extension.
+
+	#. The *source* list is converted through :py:meth:`waflib.TaskGen.to_nodes` to a list of :py:class:`waflib.Node.Node` first.
+	#. File extensions are mapped to methods having the signature: ``def meth(self, node)`` by :py:meth:`waflib.TaskGen.extension`
+	#. The method is retrieved through :py:meth:`waflib.TaskGen.task_gen.get_hook`
+	#. When called, the methods may modify self.source to append more source to process
+	#. The mappings can map an extension or a filename (see the code below)
+	"""
+	self.source = self.to_nodes(getattr(self, 'source', []))
+	for node in self.source:
+		self.get_hook(node)(self, node)
+
+@feature('*')
+@before_method('process_source')
+def process_rule(self):
+	"""
+	Processes the attribute ``rule``. When present, :py:meth:`waflib.TaskGen.process_source` is disabled::
+
+		def build(bld):
+			bld(rule='cp ${SRC} ${TGT}', source='wscript', target='bar.txt')
+
+	Main attributes processed:
+
+	* rule: command to execute, it can be a tuple of strings for multiple commands
+	* chmod: permissions for the resulting files (integer value such as Utils.O755)
+	* shell: set to False to execute the command directly (default is True to use a shell)
+	* scan: scanner function
+	* vars: list of variables to trigger rebuilds, such as CFLAGS
+	* cls_str: string to display when executing the task
+	* cls_keyword: label to display when executing the task
+	* cache_rule: by default, try to re-use similar classes, set to False to disable
+	* source: list of Node or string objects representing the source files required by this task
+	* target: list of Node or string objects representing the files that this task creates
+	* cwd: current working directory (Node or string)
+	* stdout: standard output, set to None to prevent waf from capturing the text
+	* stderr: standard error, set to None to prevent waf from capturing the text
+	* timeout: timeout for command execution (Python 3)
+	* always: whether to always run the command (False by default)
+	* deep_inputs: whether the task must depend on the input file tasks too (False by default)
+	"""
+	if not getattr(self, 'rule', None):
+		return
+
+	# create the task class
+	name = str(getattr(self, 'name', None) or self.target or getattr(self.rule, '__name__', self.rule))
+
+	# or we can put the class in a cache for performance reasons
+	try:
+		cache = self.bld.cache_rule_attr
+	except AttributeError:
+		cache = self.bld.cache_rule_attr = {}
+
+	chmod = getattr(self, 'chmod', None)
+	shell = getattr(self, 'shell', True)
+	color = getattr(self, 'color', 'BLUE')
+	scan = getattr(self, 'scan', None)
+	_vars = getattr(self, 'vars', [])
+	cls_str = getattr(self, 'cls_str', None)
+	cls_keyword = getattr(self, 'cls_keyword', None)
+	use_cache = getattr(self, 'cache_rule', 'True')
+	deep_inputs = getattr(self, 'deep_inputs', False)
+
+	scan_val = has_deps = hasattr(self, 'deps')
+	if scan:
+		scan_val = id(scan)
+
+	key = Utils.h_list((name, self.rule, chmod, shell, color, cls_str, cls_keyword, scan_val, _vars, deep_inputs))
+
+	cls = None
+	if use_cache:
+		try:
+			cls = cache[key]
+		except KeyError:
+			pass
+	if not cls:
+		rule = self.rule
+		if chmod is not None:
+			def chmod_fun(tsk):
+				for x in tsk.outputs:
+					os.chmod(x.abspath(), tsk.generator.chmod)
+			if isinstance(rule, tuple):
+				rule = list(rule)
+				rule.append(chmod_fun)
+				rule = tuple(rule)
+			else:
+				rule = (rule, chmod_fun)
+
+		cls = Task.task_factory(name, rule, _vars, shell=shell, color=color)
+
+		if cls_str:
+			setattr(cls, '__str__', self.cls_str)
+
+		if cls_keyword:
+			setattr(cls, 'keyword', self.cls_keyword)
+
+		if deep_inputs:
+			Task.deep_inputs(cls)
+
+		if scan:
+			cls.scan = self.scan
+		elif has_deps:
+			def scan(self):
+				deps = getattr(self.generator, 'deps', None)
+				nodes = self.generator.to_nodes(deps)
+				return [nodes, []]
+			cls.scan = scan
+
+		if use_cache:
+			cache[key] = cls
+
+	# now create one instance
+	tsk = self.create_task(name)
+
+	for x in ('after', 'before', 'ext_in', 'ext_out'):
+		setattr(tsk, x, getattr(self, x, []))
+
+	if hasattr(self, 'stdout'):
+		tsk.stdout = self.stdout
+
+	if hasattr(self, 'stderr'):
+		tsk.stderr = self.stderr
+
+	if getattr(self, 'timeout', None):
+		tsk.timeout = self.timeout
+
+	if getattr(self, 'always', None):
+		tsk.always_run = True
+
+	if getattr(self, 'target', None):
+		if isinstance(self.target, str):
+			self.target = self.target.split()
+		if not isinstance(self.target, list):
+			self.target = [self.target]
+		for x in self.target:
+			if isinstance(x, str):
+				tsk.outputs.append(self.path.find_or_declare(x))
+			else:
+				x.parent.mkdir() # if a node was given, create the required folders
+				tsk.outputs.append(x)
+		if getattr(self, 'install_path', None):
+			self.install_task = self.add_install_files(install_to=self.install_path,
+				install_from=tsk.outputs, chmod=getattr(self, 'chmod', Utils.O644))
+
+	if getattr(self, 'source', None):
+		tsk.inputs = self.to_nodes(self.source)
+		# bypass the execution of process_source by setting the source to an empty list
+		self.source = []
+
+	if getattr(self, 'cwd', None):
+		tsk.cwd = self.cwd
+
+	if isinstance(tsk.run, functools.partial):
+		# Python documentation says: "partial objects defined in classes
+		# behave like static methods and do not transform into bound
+		# methods during instance attribute look-up."
+		tsk.run = functools.partial(tsk.run, tsk)
+
+@feature('seq')
+def sequence_order(self):
+	"""
+	Adds a strict sequential constraint between the tasks generated by task generators.
+	It works because task generators are posted in order.
+	It will not post objects which belong to other folders.
+
+	Example::
+
+		bld(features='javac seq')
+		bld(features='jar seq')
+
+	To start a new sequence, set the attribute seq_start, for example::
+
+		obj = bld(features='seq')
+		obj.seq_start = True
+
+	Note that the method is executed in last position. This is more an
+	example than a widely-used solution.
+	"""
+	if self.meths and self.meths[-1] != 'sequence_order':
+		self.meths.append('sequence_order')
+		return
+
+	if getattr(self, 'seq_start', None):
+		return
+
+	# all the tasks previously declared must be run before these
+	if getattr(self.bld, 'prev', None):
+		self.bld.prev.post()
+		for x in self.bld.prev.tasks:
+			for y in self.tasks:
+				y.set_run_after(x)
+
+	self.bld.prev = self
+
+
+re_m4 = re.compile(r'@(\w+)@', re.M)
+
+class subst_pc(Task.Task):
+	"""
+	Creates *.pc* files from *.pc.in*. The task is executed whenever an input variable used
+	in the substitution changes.
+	"""
+
+	def force_permissions(self):
+		"Private for the time being, we will probably refactor this into run_str=[run1,chmod]"
+		if getattr(self.generator, 'chmod', None):
+			for x in self.outputs:
+				os.chmod(x.abspath(), self.generator.chmod)
+
+	def run(self):
+		"Substitutes variables in a .in file"
+
+		if getattr(self.generator, 'is_copy', None):
+			for i, x in enumerate(self.outputs):
+				x.write(self.inputs[i].read('rb'), 'wb')
+				stat = os.stat(self.inputs[i].abspath()) # Preserve mtime of the copy
+				os.utime(self.outputs[i].abspath(), (stat.st_atime, stat.st_mtime))
+			self.force_permissions()
+			return None
+
+		if getattr(self.generator, 'fun', None):
+			ret = self.generator.fun(self)
+			if not ret:
+				self.force_permissions()
+			return ret
+
+		code = self.inputs[0].read(encoding=getattr(self.generator, 'encoding', 'latin-1'))
+		if getattr(self.generator, 'subst_fun', None):
+			code = self.generator.subst_fun(self, code)
+			if code is not None:
+				self.outputs[0].write(code, encoding=getattr(self.generator, 'encoding', 'latin-1'))
+			self.force_permissions()
+			return None
+
+		# replace all % by %% to prevent errors by % signs
+		code = code.replace('%', '%%')
+
+		# extract the vars foo into lst and replace @foo@ by %(foo)s
+		lst = []
+		def repl(match):
+			g = match.group
+			if g(1):
+				lst.append(g(1))
+				return "%%(%s)s" % g(1)
+			return ''
+		code = getattr(self.generator, 're_m4', re_m4).sub(repl, code)
+
+		try:
+			d = self.generator.dct
+		except AttributeError:
+			d = {}
+			for x in lst:
+				tmp = getattr(self.generator, x, '') or self.env[x] or self.env[x.upper()]
+				try:
+					tmp = ''.join(tmp)
+				except TypeError:
+					tmp = str(tmp)
+				d[x] = tmp
+
+		code = code % d
+		self.outputs[0].write(code, encoding=getattr(self.generator, 'encoding', 'latin-1'))
+		self.generator.bld.raw_deps[self.uid()] = lst
+
+		# make sure the signature is updated
+		try:
+			delattr(self, 'cache_sig')
+		except AttributeError:
+			pass
+
+		self.force_permissions()
+
+	def sig_vars(self):
+		"""
+		Compute a hash (signature) of the variables used in the substitution
+		"""
+		bld = self.generator.bld
+		env = self.env
+		upd = self.m.update
+
+		if getattr(self.generator, 'fun', None):
+			upd(Utils.h_fun(self.generator.fun).encode())
+		if getattr(self.generator, 'subst_fun', None):
+			upd(Utils.h_fun(self.generator.subst_fun).encode())
+
+		# raw_deps: persistent custom values returned by the scanner
+		vars = self.generator.bld.raw_deps.get(self.uid(), [])
+
+		# hash both env vars and task generator attributes
+		act_sig = bld.hash_env_vars(env, vars)
+		upd(act_sig)
+
+		lst = [getattr(self.generator, x, '') for x in vars]
+		upd(Utils.h_list(lst))
+
+		return self.m.digest()
+
+@extension('.pc.in')
+def add_pcfile(self, node):
+	"""
+	Processes *.pc.in* files to *.pc*. Installs the results to ``${PREFIX}/lib/pkgconfig/`` by default
+
+		def build(bld):
+			bld(source='foo.pc.in', install_path='${LIBDIR}/pkgconfig/')
+	"""
+	tsk = self.create_task('subst_pc', node, node.change_ext('.pc', '.pc.in'))
+	self.install_task = self.add_install_files(
+		install_to=getattr(self, 'install_path', '${LIBDIR}/pkgconfig/'), install_from=tsk.outputs)
+
+class subst(subst_pc):
+	pass
+
+@feature('subst')
+@before_method('process_source', 'process_rule')
+def process_subst(self):
+	"""
+	Defines a transformation that substitutes the contents of *source* files to *target* files::
+
+		def build(bld):
+			bld(
+				features='subst',
+				source='foo.c.in',
+				target='foo.c',
+				install_path='${LIBDIR}/pkgconfig',
+				VAR = 'val'
+			)
+
+	The input files are supposed to contain macros of the form *@VAR@*, where *VAR* is an argument
+	of the task generator object.
+
+	This method overrides the processing by :py:meth:`waflib.TaskGen.process_source`.
+	"""
+
+	src = Utils.to_list(getattr(self, 'source', []))
+	if isinstance(src, Node.Node):
+		src = [src]
+	tgt = Utils.to_list(getattr(self, 'target', []))
+	if isinstance(tgt, Node.Node):
+		tgt = [tgt]
+	if len(src) != len(tgt):
+		raise Errors.WafError('invalid number of source/target for %r' % self)
+
+	for x, y in zip(src, tgt):
+		if not x or not y:
+			raise Errors.WafError('null source or target for %r' % self)
+		a, b = None, None
+
+		if isinstance(x, str) and isinstance(y, str) and x == y:
+			a = self.path.find_node(x)
+			b = self.path.get_bld().make_node(y)
+			if not os.path.isfile(b.abspath()):
+				b.parent.mkdir()
+		else:
+			if isinstance(x, str):
+				a = self.path.find_resource(x)
+			elif isinstance(x, Node.Node):
+				a = x
+			if isinstance(y, str):
+				b = self.path.find_or_declare(y)
+			elif isinstance(y, Node.Node):
+				b = y
+
+		if not a:
+			raise Errors.WafError('could not find %r for %r' % (x, self))
+
+		tsk = self.create_task('subst', a, b)
+		for k in ('after', 'before', 'ext_in', 'ext_out'):
+			val = getattr(self, k, None)
+			if val:
+				setattr(tsk, k, val)
+
+		# paranoid safety measure for the general case foo.in->foo.h with ambiguous dependencies
+		for xt in HEADER_EXTS:
+			if b.name.endswith(xt):
+				tsk.ext_out = tsk.ext_out + ['.h']
+				break
+
+		inst_to = getattr(self, 'install_path', None)
+		if inst_to:
+			self.install_task = self.add_install_files(install_to=inst_to,
+				install_from=b, chmod=getattr(self, 'chmod', Utils.O644))
+
+	self.source = []
+
diff --git a/third_party/waf/waflib/Tools/__init__.py b/third_party/waf/waflib/Tools/__init__.py
new file mode 100644
index 0000000..079df35
--- /dev/null
+++ b/third_party/waf/waflib/Tools/__init__.py
@@ -0,0 +1,3 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2018 (ita)
diff --git a/third_party/waf/waflib/Tools/ar.py b/third_party/waf/waflib/Tools/ar.py
new file mode 100644
index 0000000..b39b645
--- /dev/null
+++ b/third_party/waf/waflib/Tools/ar.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2018 (ita)
+# Ralf Habacker, 2006 (rh)
+
+"""
+The **ar** program creates static libraries. This tool is almost always loaded
+from others (C, C++, D, etc) for static library support.
+"""
+
+from waflib.Configure import conf
+
+@conf
+def find_ar(conf):
+	"""Configuration helper used by C/C++ tools to enable the support for static libraries"""
+	conf.load('ar')
+
+def configure(conf):
+	"""Finds the ar program and sets the default flags in ``conf.env.ARFLAGS``"""
+	conf.find_program('ar', var='AR')
+	conf.add_os_flags('ARFLAGS')
+	if not conf.env.ARFLAGS:
+		conf.env.ARFLAGS = ['rcs']
+
diff --git a/third_party/waf/waflib/Tools/asm.py b/third_party/waf/waflib/Tools/asm.py
new file mode 100644
index 0000000..1d34dda
--- /dev/null
+++ b/third_party/waf/waflib/Tools/asm.py
@@ -0,0 +1,107 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2008-2018 (ita)
+
+"""
+Assembly support, used by tools such as gas and nasm
+
+To declare targets using assembly::
+
+	def configure(conf):
+		conf.load('gcc gas')
+
+	def build(bld):
+		bld(
+			features='c cstlib asm',
+			source = 'test.S',
+			target = 'asmtest')
+
+		bld(
+			features='asm asmprogram',
+			source = 'test.S',
+			target = 'asmtest')
+
+Support for pure asm programs and libraries should also work::
+
+	def configure(conf):
+		conf.load('nasm')
+		conf.find_program('ld', 'ASLINK')
+
+	def build(bld):
+		bld(
+			features='asm asmprogram',
+			source = 'test.S',
+			target = 'asmtest')
+"""
+
+import re
+from waflib import Errors, Logs, Task
+from waflib.Tools.ccroot import link_task, stlink_task
+from waflib.TaskGen import extension
+from waflib.Tools import c_preproc
+
+re_lines = re.compile(
+	'^[ \t]*(?:%)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef)[ \t]*(.*)\r*$',
+	re.IGNORECASE | re.MULTILINE)
+
+class asm_parser(c_preproc.c_parser):
+	def filter_comments(self, node):
+		code = node.read()
+		code = c_preproc.re_nl.sub('', code)
+		code = c_preproc.re_cpp.sub(c_preproc.repl, code)
+		return re_lines.findall(code)
+
+class asm(Task.Task):
+	"""
+	Compiles asm files by gas/nasm/yasm/...
+	"""
+	color = 'BLUE'
+	run_str = '${AS} ${ASFLAGS} ${ASMPATH_ST:INCPATHS} ${ASMDEFINES_ST:DEFINES} ${AS_SRC_F}${SRC} ${AS_TGT_F}${TGT}'
+
+	def scan(self):
+		if self.env.ASM_NAME == 'gas':
+			return c_preproc.scan(self)
+		elif self.env.ASM_NAME == 'nasm':
+			Logs.warn('The Nasm dependency scanner is incomplete!')
+
+		try:
+			incn = self.generator.includes_nodes
+		except AttributeError:
+			raise Errors.WafError('%r is missing the "asm" feature' % self.generator)
+
+		if c_preproc.go_absolute:
+			nodepaths = incn
+		else:
+			nodepaths = [x for x in incn if x.is_child_of(x.ctx.srcnode) or x.is_child_of(x.ctx.bldnode)]
+
+		tmp = asm_parser(nodepaths)
+		tmp.start(self.inputs[0], self.env)
+		return (tmp.nodes, tmp.names)
+
+@extension('.s', '.S', '.asm', '.ASM', '.spp', '.SPP')
+def asm_hook(self, node):
+	"""
+	Binds the asm extension to the asm task
+
+	:param node: input file
+	:type node: :py:class:`waflib.Node.Node`
+	"""
+	return self.create_compiled_task('asm', node)
+
+class asmprogram(link_task):
+	"Links object files into a c program"
+	run_str = '${ASLINK} ${ASLINKFLAGS} ${ASLNK_TGT_F}${TGT} ${ASLNK_SRC_F}${SRC}'
+	ext_out = ['.bin']
+	inst_to = '${BINDIR}'
+
+class asmshlib(asmprogram):
+	"Links object files into a c shared library"
+	inst_to = '${LIBDIR}'
+
+class asmstlib(stlink_task):
+	"Links object files into a c static library"
+	pass # do not remove
+
+def configure(conf):
+	conf.env.ASMPATH_ST = '-I%s'
+	conf.env.ASMDEFINES_ST = '-D%s'
diff --git a/third_party/waf/waflib/Tools/bison.py b/third_party/waf/waflib/Tools/bison.py
new file mode 100644
index 0000000..eef56dc
--- /dev/null
+++ b/third_party/waf/waflib/Tools/bison.py
@@ -0,0 +1,49 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# John O'Meara, 2006
+# Thomas Nagy 2009-2018 (ita)
+
+"""
+The **bison** program is a code generator which creates C or C++ files.
+The generated files are compiled into object files.
+"""
+
+from waflib import Task
+from waflib.TaskGen import extension
+
+class bison(Task.Task):
+	"""Compiles bison files"""
+	color   = 'BLUE'
+	run_str = '${BISON} ${BISONFLAGS} ${SRC[0].abspath()} -o ${TGT[0].name}'
+	ext_out = ['.h'] # just to make sure
+
+@extension('.y', '.yc', '.yy')
+def big_bison(self, node):
+	"""
+	Creates a bison task, which must be executed from the directory of the output file.
+	"""
+	has_h = '-d' in self.env.BISONFLAGS
+
+	outs = []
+	if node.name.endswith('.yc'):
+		outs.append(node.change_ext('.tab.cc'))
+		if has_h:
+			outs.append(node.change_ext('.tab.hh'))
+	else:
+		outs.append(node.change_ext('.tab.c'))
+		if has_h:
+			outs.append(node.change_ext('.tab.h'))
+
+	tsk = self.create_task('bison', node, outs)
+	tsk.cwd = node.parent.get_bld()
+
+	# and the c/cxx file must be compiled too
+	self.source.append(outs[0])
+
+def configure(conf):
+	"""
+	Detects the *bison* program
+	"""
+	conf.find_program('bison', var='BISON')
+	conf.env.BISONFLAGS = ['-d']
+
diff --git a/third_party/waf/waflib/Tools/c.py b/third_party/waf/waflib/Tools/c.py
new file mode 100644
index 0000000..effd6b6
--- /dev/null
+++ b/third_party/waf/waflib/Tools/c.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2018 (ita)
+
+"Base for c programs/libraries"
+
+from waflib import TaskGen, Task
+from waflib.Tools import c_preproc
+from waflib.Tools.ccroot import link_task, stlink_task
+
+@TaskGen.extension('.c')
+def c_hook(self, node):
+	"Binds the c file extensions create :py:class:`waflib.Tools.c.c` instances"
+	if not self.env.CC and self.env.CXX:
+		return self.create_compiled_task('cxx', node)
+	return self.create_compiled_task('c', node)
+
+class c(Task.Task):
+	"Compiles C files into object files"
+	run_str = '${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT[0].abspath()} ${CPPFLAGS}'
+	vars    = ['CCDEPS'] # unused variable to depend on, just in case
+	ext_in  = ['.h'] # set the build order easily by using ext_out=['.h']
+	scan    = c_preproc.scan
+
+class cprogram(link_task):
+	"Links object files into c programs"
+	run_str = '${LINK_CC} ${LINKFLAGS} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB} ${LDFLAGS}'
+	ext_out = ['.bin']
+	vars    = ['LINKDEPS']
+	inst_to = '${BINDIR}'
+
+class cshlib(cprogram):
+	"Links object files into c shared libraries"
+	inst_to = '${LIBDIR}'
+
+class cstlib(stlink_task):
+	"Links object files into a c static libraries"
+	pass # do not remove
+
diff --git a/third_party/waf/waflib/Tools/c_aliases.py b/third_party/waf/waflib/Tools/c_aliases.py
new file mode 100644
index 0000000..928cfe2
--- /dev/null
+++ b/third_party/waf/waflib/Tools/c_aliases.py
@@ -0,0 +1,146 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2015 (ita)
+
+"base for all c/c++ programs and libraries"
+
+from waflib import Utils, Errors
+from waflib.Configure import conf
+
+def get_extensions(lst):
+	"""
+	Returns the file extensions for the list of files given as input
+
+	:param lst: files to process
+	:list lst: list of string or :py:class:`waflib.Node.Node`
+	:return: list of file extensions
+	:rtype: list of string
+	"""
+	ret = []
+	for x in Utils.to_list(lst):
+		if not isinstance(x, str):
+			x = x.name
+		ret.append(x[x.rfind('.') + 1:])
+	return ret
+
+def sniff_features(**kw):
+	"""
+	Computes and returns the features required for a task generator by
+	looking at the file extensions. This aimed for C/C++ mainly::
+
+		snif_features(source=['foo.c', 'foo.cxx'], type='shlib')
+		# returns  ['cxx', 'c', 'cxxshlib', 'cshlib']
+
+	:param source: source files to process
+	:type source: list of string or :py:class:`waflib.Node.Node`
+	:param type: object type in *program*, *shlib* or *stlib*
+	:type type: string
+	:return: the list of features for a task generator processing the source files
+	:rtype: list of string
+	"""
+	exts = get_extensions(kw.get('source', []))
+	typ = kw['typ']
+	feats = []
+
+	# watch the order, cxx will have the precedence
+	for x in 'cxx cpp c++ cc C'.split():
+		if x in exts:
+			feats.append('cxx')
+			break
+	if 'c' in exts or 'vala' in exts or 'gs' in exts:
+		feats.append('c')
+
+	if 's' in exts or 'S' in exts:
+		feats.append('asm')
+
+	for x in 'f f90 F F90 for FOR'.split():
+		if x in exts:
+			feats.append('fc')
+			break
+
+	if 'd' in exts:
+		feats.append('d')
+
+	if 'java' in exts:
+		feats.append('java')
+		return 'java'
+
+	if typ in ('program', 'shlib', 'stlib'):
+		will_link = False
+		for x in feats:
+			if x in ('cxx', 'd', 'fc', 'c', 'asm'):
+				feats.append(x + typ)
+				will_link = True
+		if not will_link and not kw.get('features', []):
+			raise Errors.WafError('Unable to determine how to link %r, try adding eg: features="c cshlib"?' % kw)
+	return feats
+
+def set_features(kw, typ):
+	"""
+	Inserts data in the input dict *kw* based on existing data and on the type of target
+	required (typ).
+
+	:param kw: task generator parameters
+	:type kw: dict
+	:param typ: type of target
+	:type typ: string
+	"""
+	kw['typ'] = typ
+	kw['features'] = Utils.to_list(kw.get('features', [])) + Utils.to_list(sniff_features(**kw))
+
+@conf
+def program(bld, *k, **kw):
+	"""
+	Alias for creating programs by looking at the file extensions::
+
+		def build(bld):
+			bld.program(source='foo.c', target='app')
+			# equivalent to:
+			# bld(features='c cprogram', source='foo.c', target='app')
+
+	"""
+	set_features(kw, 'program')
+	return bld(*k, **kw)
+
+@conf
+def shlib(bld, *k, **kw):
+	"""
+	Alias for creating shared libraries by looking at the file extensions::
+
+		def build(bld):
+			bld.shlib(source='foo.c', target='app')
+			# equivalent to:
+			# bld(features='c cshlib', source='foo.c', target='app')
+
+	"""
+	set_features(kw, 'shlib')
+	return bld(*k, **kw)
+
+@conf
+def stlib(bld, *k, **kw):
+	"""
+	Alias for creating static libraries by looking at the file extensions::
+
+		def build(bld):
+			bld.stlib(source='foo.cpp', target='app')
+			# equivalent to:
+			# bld(features='cxx cxxstlib', source='foo.cpp', target='app')
+
+	"""
+	set_features(kw, 'stlib')
+	return bld(*k, **kw)
+
+@conf
+def objects(bld, *k, **kw):
+	"""
+	Alias for creating object files by looking at the file extensions::
+
+		def build(bld):
+			bld.objects(source='foo.c', target='app')
+			# equivalent to:
+			# bld(features='c', source='foo.c', target='app')
+
+	"""
+	set_features(kw, 'objects')
+	return bld(*k, **kw)
+
diff --git a/third_party/waf/waflib/Tools/c_config.py b/third_party/waf/waflib/Tools/c_config.py
new file mode 100644
index 0000000..f5ab19b
--- /dev/null
+++ b/third_party/waf/waflib/Tools/c_config.py
@@ -0,0 +1,1370 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2018 (ita)
+
+"""
+C/C++/D configuration helpers
+"""
+
+from __future__ import with_statement
+
+import os, re, shlex
+from waflib import Build, Utils, Task, Options, Logs, Errors, Runner
+from waflib.TaskGen import after_method, feature
+from waflib.Configure import conf
+
+WAF_CONFIG_H   = 'config.h'
+"""default name for the config.h file"""
+
+DEFKEYS = 'define_key'
+INCKEYS = 'include_key'
+
+SNIP_EMPTY_PROGRAM = '''
+int main(int argc, char **argv) {
+	(void)argc; (void)argv;
+	return 0;
+}
+'''
+
+MACRO_TO_DESTOS = {
+'__linux__'                                      : 'linux',
+'__GNU__'                                        : 'gnu', # hurd
+'__FreeBSD__'                                    : 'freebsd',
+'__NetBSD__'                                     : 'netbsd',
+'__OpenBSD__'                                    : 'openbsd',
+'__sun'                                          : 'sunos',
+'__hpux'                                         : 'hpux',
+'__sgi'                                          : 'irix',
+'_AIX'                                           : 'aix',
+'__CYGWIN__'                                     : 'cygwin',
+'__MSYS__'                                       : 'cygwin',
+'_UWIN'                                          : 'uwin',
+'_WIN64'                                         : 'win32',
+'_WIN32'                                         : 'win32',
+# Note about darwin: this is also tested with 'defined __APPLE__ && defined __MACH__' somewhere below in this file.
+'__ENVIRONMENT_MAC_OS_X_VERSION_MIN_REQUIRED__'  : 'darwin',
+'__ENVIRONMENT_IPHONE_OS_VERSION_MIN_REQUIRED__' : 'darwin', # iphone
+'__QNX__'                                        : 'qnx',
+'__native_client__'                              : 'nacl' # google native client platform
+}
+
+MACRO_TO_DEST_CPU = {
+'__x86_64__'  : 'x86_64',
+'__amd64__'   : 'x86_64',
+'__i386__'    : 'x86',
+'__ia64__'    : 'ia',
+'__mips__'    : 'mips',
+'__sparc__'   : 'sparc',
+'__alpha__'   : 'alpha',
+'__aarch64__' : 'aarch64',
+'__thumb__'   : 'thumb',
+'__arm__'     : 'arm',
+'__hppa__'    : 'hppa',
+'__powerpc__' : 'powerpc',
+'__ppc__'     : 'powerpc',
+'__convex__'  : 'convex',
+'__m68k__'    : 'm68k',
+'__s390x__'   : 's390x',
+'__s390__'    : 's390',
+'__sh__'      : 'sh',
+'__xtensa__'  : 'xtensa',
+'__e2k__'     : 'e2k',
+'__riscv'     : 'riscv',
+}
+
+@conf
+def parse_flags(self, line, uselib_store, env=None, force_static=False, posix=None):
+	"""
+	Parses flags from the input lines, and adds them to the relevant use variables::
+
+		def configure(conf):
+			conf.parse_flags('-O3', 'FOO')
+			# conf.env.CXXFLAGS_FOO = ['-O3']
+			# conf.env.CFLAGS_FOO = ['-O3']
+
+	:param line: flags
+	:type line: string
+	:param uselib_store: where to add the flags
+	:type uselib_store: string
+	:param env: config set or conf.env by default
+	:type env: :py:class:`waflib.ConfigSet.ConfigSet`
+	:param force_static: force usage of static libraries
+	:type force_static: bool default False
+	:param posix: usage of POSIX mode for shlex lexical analiysis library
+	:type posix: bool default True
+	"""
+
+	assert(isinstance(line, str))
+
+	env = env or self.env
+
+	# Issue 811 and 1371
+	if posix is None:
+		posix = True
+		if '\\' in line:
+			posix = ('\\ ' in line) or ('\\\\' in line)
+
+	lex = shlex.shlex(line, posix=posix)
+	lex.whitespace_split = True
+	lex.commenters = ''
+	lst = list(lex)
+
+	so_re = re.compile(r"\.so(?:\.[0-9]+)*$")
+
+	# append_unique is not always possible
+	# for example, apple flags may require both -arch i386 and -arch ppc
+	uselib = uselib_store
+	def app(var, val):
+		env.append_value('%s_%s' % (var, uselib), val)
+	def appu(var, val):
+		env.append_unique('%s_%s' % (var, uselib), val)
+	static = False
+	while lst:
+		x = lst.pop(0)
+		st = x[:2]
+		ot = x[2:]
+
+		if st == '-I' or st == '/I':
+			if not ot:
+				ot = lst.pop(0)
+			appu('INCLUDES', ot)
+		elif st == '-i':
+			tmp = [x, lst.pop(0)]
+			app('CFLAGS', tmp)
+			app('CXXFLAGS', tmp)
+		elif st == '-D' or (env.CXX_NAME == 'msvc' and st == '/D'): # not perfect but..
+			if not ot:
+				ot = lst.pop(0)
+			app('DEFINES', ot)
+		elif st == '-l':
+			if not ot:
+				ot = lst.pop(0)
+			prefix = 'STLIB' if (force_static or static) else 'LIB'
+			app(prefix, ot)
+		elif st == '-L':
+			if not ot:
+				ot = lst.pop(0)
+			prefix = 'STLIBPATH' if (force_static or static) else 'LIBPATH'
+			appu(prefix, ot)
+		elif x.startswith('/LIBPATH:'):
+			prefix = 'STLIBPATH' if (force_static or static) else 'LIBPATH'
+			appu(prefix, x.replace('/LIBPATH:', ''))
+		elif x.startswith('-std='):
+			prefix = 'CXXFLAGS' if '++' in x else 'CFLAGS'
+			app(prefix, x)
+		elif x.startswith('+') or x in ('-pthread', '-fPIC', '-fpic', '-fPIE', '-fpie', '-flto', '-fno-lto'):
+			app('CFLAGS', x)
+			app('CXXFLAGS', x)
+			app('LINKFLAGS', x)
+		elif x == '-framework':
+			appu('FRAMEWORK', lst.pop(0))
+		elif x.startswith('-F'):
+			appu('FRAMEWORKPATH', x[2:])
+		elif x == '-Wl,-rpath' or x == '-Wl,-R':
+			app('RPATH', lst.pop(0).lstrip('-Wl,'))
+		elif x.startswith('-Wl,-R,'):
+			app('RPATH', x[7:])
+		elif x.startswith('-Wl,-R'):
+			app('RPATH', x[6:])
+		elif x.startswith('-Wl,-rpath,'):
+			app('RPATH', x[11:])
+		elif x == '-Wl,-Bstatic' or x == '-Bstatic':
+			static = True
+		elif x == '-Wl,-Bdynamic' or x == '-Bdynamic':
+			static = False
+		elif x.startswith('-Wl') or x in ('-rdynamic', '-pie'):
+			app('LINKFLAGS', x)
+		elif x.startswith(('-m', '-f', '-dynamic', '-O', '-g')):
+			# Adding the -W option breaks python builds on Openindiana
+			app('CFLAGS', x)
+			app('CXXFLAGS', x)
+		elif x.startswith('-bundle'):
+			app('LINKFLAGS', x)
+		elif x.startswith(('-undefined', '-Xlinker')):
+			arg = lst.pop(0)
+			app('LINKFLAGS', [x, arg])
+		elif x.startswith(('-arch', '-isysroot')):
+			tmp = [x, lst.pop(0)]
+			app('CFLAGS', tmp)
+			app('CXXFLAGS', tmp)
+			app('LINKFLAGS', tmp)
+		elif x.endswith(('.a', '.dylib', '.lib')) or so_re.search(x):
+			appu('LINKFLAGS', x) # not cool, #762
+		else:
+			self.to_log('Unhandled flag %r' % x)
+
+@conf
+def validate_cfg(self, kw):
+	"""
+	Searches for the program *pkg-config* if missing, and validates the
+	parameters to pass to :py:func:`waflib.Tools.c_config.exec_cfg`.
+
+	:param path: the **-config program to use** (default is *pkg-config*)
+	:type path: list of string
+	:param msg: message to display to describe the test executed
+	:type msg: string
+	:param okmsg: message to display when the test is successful
+	:type okmsg: string
+	:param errmsg: message to display in case of error
+	:type errmsg: string
+	"""
+	if not 'path' in kw:
+		if not self.env.PKGCONFIG:
+			self.find_program('pkg-config', var='PKGCONFIG')
+		kw['path'] = self.env.PKGCONFIG
+
+	# verify that exactly one action is requested
+	s = ('atleast_pkgconfig_version' in kw) + ('modversion' in kw) + ('package' in kw)
+	if s != 1:
+		raise ValueError('exactly one of atleast_pkgconfig_version, modversion and package must be set')
+	if not 'msg' in kw:
+		if 'atleast_pkgconfig_version' in kw:
+			kw['msg'] = 'Checking for pkg-config version >= %r' % kw['atleast_pkgconfig_version']
+		elif 'modversion' in kw:
+			kw['msg'] = 'Checking for %r version' % kw['modversion']
+		else:
+			kw['msg'] = 'Checking for %r' %(kw['package'])
+
+	# let the modversion check set the okmsg to the detected version
+	if not 'okmsg' in kw and not 'modversion' in kw:
+		kw['okmsg'] = 'yes'
+	if not 'errmsg' in kw:
+		kw['errmsg'] = 'not found'
+
+	# pkg-config version
+	if 'atleast_pkgconfig_version' in kw:
+		pass
+	elif 'modversion' in kw:
+		if not 'uselib_store' in kw:
+			kw['uselib_store'] = kw['modversion']
+		if not 'define_name' in kw:
+			kw['define_name'] = '%s_VERSION' % Utils.quote_define_name(kw['uselib_store'])
+	else:
+		if not 'uselib_store' in kw:
+			kw['uselib_store'] = Utils.to_list(kw['package'])[0].upper()
+		if not 'define_name' in kw:
+			kw['define_name'] = self.have_define(kw['uselib_store'])
+
+@conf
+def exec_cfg(self, kw):
+	"""
+	Executes ``pkg-config`` or other ``-config`` applications to collect configuration flags:
+
+	* if atleast_pkgconfig_version is given, check that pkg-config has the version n and return
+	* if modversion is given, then return the module version
+	* else, execute the *-config* program with the *args* and *variables* given, and set the flags on the *conf.env.FLAGS_name* variable
+
+	:param path: the **-config program to use**
+	:type path: list of string
+	:param atleast_pkgconfig_version: minimum pkg-config version to use (disable other tests)
+	:type atleast_pkgconfig_version: string
+	:param package: package name, for example *gtk+-2.0*
+	:type package: string
+	:param uselib_store: if the test is successful, define HAVE\\_*name*. It is also used to define *conf.env.FLAGS_name* variables.
+	:type uselib_store: string
+	:param modversion: if provided, return the version of the given module and define *name*\\_VERSION
+	:type modversion: string
+	:param args: arguments to give to *package* when retrieving flags
+	:type args: list of string
+	:param variables: return the values of particular variables
+	:type variables: list of string
+	:param define_variable: additional variables to define (also in conf.env.PKG_CONFIG_DEFINES)
+	:type define_variable: dict(string: string)
+	:param pkg_config_path: paths where pkg-config should search for .pc config files (overrides env.PKG_CONFIG_PATH if exists)
+	:type pkg_config_path: string, list of directories separated by colon
+	:param force_static: force usage of static libraries
+	:type force_static: bool default False
+	:param posix: usage of POSIX mode for shlex lexical analiysis library
+	:type posix: bool default True
+	"""
+
+	path = Utils.to_list(kw['path'])
+	env = self.env.env or None
+	if kw.get('pkg_config_path'):
+		if not env:
+			env = dict(self.environ)
+		env['PKG_CONFIG_PATH'] = kw['pkg_config_path']
+
+	def define_it():
+		define_name = kw['define_name']
+		# by default, add HAVE_X to the config.h, else provide DEFINES_X for use=X
+		if kw.get('global_define', 1):
+			self.define(define_name, 1, False)
+		else:
+			self.env.append_unique('DEFINES_%s' % kw['uselib_store'], "%s=1" % define_name)
+
+		if kw.get('add_have_to_env', 1):
+			self.env[define_name] = 1
+
+	# pkg-config version
+	if 'atleast_pkgconfig_version' in kw:
+		cmd = path + ['--atleast-pkgconfig-version=%s' % kw['atleast_pkgconfig_version']]
+		self.cmd_and_log(cmd, env=env)
+		return
+
+	# single version for a module
+	if 'modversion' in kw:
+		version = self.cmd_and_log(path + ['--modversion', kw['modversion']], env=env).strip()
+		if not 'okmsg' in kw:
+			kw['okmsg'] = version
+		self.define(kw['define_name'], version)
+		return version
+
+	lst = [] + path
+
+	defi = kw.get('define_variable')
+	if not defi:
+		defi = self.env.PKG_CONFIG_DEFINES or {}
+	for key, val in defi.items():
+		lst.append('--define-variable=%s=%s' % (key, val))
+
+	static = kw.get('force_static', False)
+	if 'args' in kw:
+		args = Utils.to_list(kw['args'])
+		if '--static' in args or '--static-libs' in args:
+			static = True
+		lst += args
+
+	# tools like pkgconf expect the package argument after the -- ones -_-
+	lst.extend(Utils.to_list(kw['package']))
+
+	# retrieving variables of a module
+	if 'variables' in kw:
+		v_env = kw.get('env', self.env)
+		vars = Utils.to_list(kw['variables'])
+		for v in vars:
+			val = self.cmd_and_log(lst + ['--variable=' + v], env=env).strip()
+			var = '%s_%s' % (kw['uselib_store'], v)
+			v_env[var] = val
+		return
+
+	# so we assume the command-line will output flags to be parsed afterwards
+	ret = self.cmd_and_log(lst, env=env)
+
+	define_it()
+	self.parse_flags(ret, kw['uselib_store'], kw.get('env', self.env), force_static=static, posix=kw.get('posix'))
+	return ret
+
+@conf
+def check_cfg(self, *k, **kw):
+	"""
+	Checks for configuration flags using a **-config**-like program (pkg-config, sdl-config, etc).
+	This wraps internal calls to :py:func:`waflib.Tools.c_config.validate_cfg` and :py:func:`waflib.Tools.c_config.exec_cfg`
+	so check exec_cfg parameters descriptions for more details on kw passed
+
+	A few examples::
+
+		def configure(conf):
+			conf.load('compiler_c')
+			conf.check_cfg(package='glib-2.0', args='--libs --cflags')
+			conf.check_cfg(package='pango')
+			conf.check_cfg(package='pango', uselib_store='MYPANGO', args=['--cflags', '--libs'])
+			conf.check_cfg(package='pango',
+				args=['pango >= 0.1.0', 'pango < 9.9.9', '--cflags', '--libs'],
+				msg="Checking for 'pango 0.1.0'")
+			conf.check_cfg(path='sdl-config', args='--cflags --libs', package='', uselib_store='SDL')
+			conf.check_cfg(path='mpicc', args='--showme:compile --showme:link',
+				package='', uselib_store='OPEN_MPI', mandatory=False)
+			# variables
+			conf.check_cfg(package='gtk+-2.0', variables=['includedir', 'prefix'], uselib_store='FOO')
+			print(conf.env.FOO_includedir)
+	"""
+	self.validate_cfg(kw)
+	if 'msg' in kw:
+		self.start_msg(kw['msg'], **kw)
+	ret = None
+	try:
+		ret = self.exec_cfg(kw)
+	except self.errors.WafError as e:
+		if 'errmsg' in kw:
+			self.end_msg(kw['errmsg'], 'YELLOW', **kw)
+		if Logs.verbose > 1:
+			self.to_log('Command failure: %s' % e)
+		self.fatal('The configuration failed')
+	else:
+		if not ret:
+			ret = True
+		kw['success'] = ret
+		if 'okmsg' in kw:
+			self.end_msg(self.ret_msg(kw['okmsg'], kw), **kw)
+
+	return ret
+
+def build_fun(bld):
+	"""
+	Build function that is used for running configuration tests with ``conf.check()``
+	"""
+	if bld.kw['compile_filename']:
+		node = bld.srcnode.make_node(bld.kw['compile_filename'])
+		node.write(bld.kw['code'])
+
+	o = bld(features=bld.kw['features'], source=bld.kw['compile_filename'], target='testprog')
+
+	for k, v in bld.kw.items():
+		setattr(o, k, v)
+
+	if not bld.kw.get('quiet'):
+		bld.conf.to_log("==>\n%s\n<==" % bld.kw['code'])
+
+@conf
+def validate_c(self, kw):
+	"""
+	Pre-checks the parameters that will be given to :py:func:`waflib.Configure.run_build`
+
+	:param compiler: c or cxx (tries to guess what is best)
+	:type compiler: string
+	:param type: cprogram, cshlib, cstlib - not required if *features are given directly*
+	:type type: binary to create
+	:param feature: desired features for the task generator that will execute the test, for example ``cxx cxxstlib``
+	:type feature: list of string
+	:param fragment: provide a piece of code for the test (default is to let the system create one)
+	:type fragment: string
+	:param uselib_store: define variables after the test is executed (IMPORTANT!)
+	:type uselib_store: string
+	:param use: parameters to use for building (just like the normal *use* keyword)
+	:type use: list of string
+	:param define_name: define to set when the check is over
+	:type define_name: string
+	:param execute: execute the resulting binary
+	:type execute: bool
+	:param define_ret: if execute is set to True, use the execution output in both the define and the return value
+	:type define_ret: bool
+	:param header_name: check for a particular header
+	:type header_name: string
+	:param auto_add_header_name: if header_name was set, add the headers in env.INCKEYS so the next tests will include these headers
+	:type auto_add_header_name: bool
+	"""
+	for x in ('type_name', 'field_name', 'function_name'):
+		if x in kw:
+			Logs.warn('Invalid argument %r in test' % x)
+
+	if not 'build_fun' in kw:
+		kw['build_fun'] = build_fun
+
+	if not 'env' in kw:
+		kw['env'] = self.env.derive()
+	env = kw['env']
+
+	if not 'compiler' in kw and not 'features' in kw:
+		kw['compiler'] = 'c'
+		if env.CXX_NAME and Task.classes.get('cxx'):
+			kw['compiler'] = 'cxx'
+			if not self.env.CXX:
+				self.fatal('a c++ compiler is required')
+		else:
+			if not self.env.CC:
+				self.fatal('a c compiler is required')
+
+	if not 'compile_mode' in kw:
+		kw['compile_mode'] = 'c'
+		if 'cxx' in Utils.to_list(kw.get('features', [])) or kw.get('compiler') == 'cxx':
+			kw['compile_mode'] = 'cxx'
+
+	if not 'type' in kw:
+		kw['type'] = 'cprogram'
+
+	if not 'features' in kw:
+		if not 'header_name' in kw or kw.get('link_header_test', True):
+			kw['features'] = [kw['compile_mode'], kw['type']] # "c ccprogram"
+		else:
+			kw['features'] = [kw['compile_mode']]
+	else:
+		kw['features'] = Utils.to_list(kw['features'])
+
+	if not 'compile_filename' in kw:
+		kw['compile_filename'] = 'test.c' + ((kw['compile_mode'] == 'cxx') and 'pp' or '')
+
+	def to_header(dct):
+		if 'header_name' in dct:
+			dct = Utils.to_list(dct['header_name'])
+			return ''.join(['#include <%s>\n' % x for x in dct])
+		return ''
+
+	if 'framework_name' in kw:
+		# OSX, not sure this is used anywhere
+		fwkname = kw['framework_name']
+		if not 'uselib_store' in kw:
+			kw['uselib_store'] = fwkname.upper()
+		if not kw.get('no_header'):
+			fwk = '%s/%s.h' % (fwkname, fwkname)
+			if kw.get('remove_dot_h'):
+				fwk = fwk[:-2]
+			val = kw.get('header_name', [])
+			kw['header_name'] = Utils.to_list(val) + [fwk]
+		kw['msg'] = 'Checking for framework %s' % fwkname
+		kw['framework'] = fwkname
+
+	elif 'header_name' in kw:
+		if not 'msg' in kw:
+			kw['msg'] = 'Checking for header %s' % kw['header_name']
+
+		l = Utils.to_list(kw['header_name'])
+		assert len(l), 'list of headers in header_name is empty'
+
+		kw['code'] = to_header(kw) + SNIP_EMPTY_PROGRAM
+		if not 'uselib_store' in kw:
+			kw['uselib_store'] = l[0].upper()
+		if not 'define_name' in kw:
+			kw['define_name'] = self.have_define(l[0])
+
+	if 'lib' in kw:
+		if not 'msg' in kw:
+			kw['msg'] = 'Checking for library %s' % kw['lib']
+		if not 'uselib_store' in kw:
+			kw['uselib_store'] = kw['lib'].upper()
+
+	if 'stlib' in kw:
+		if not 'msg' in kw:
+			kw['msg'] = 'Checking for static library %s' % kw['stlib']
+		if not 'uselib_store' in kw:
+			kw['uselib_store'] = kw['stlib'].upper()
+
+	if 'fragment' in kw:
+		# an additional code fragment may be provided to replace the predefined code
+		# in custom headers
+		kw['code'] = kw['fragment']
+		if not 'msg' in kw:
+			kw['msg'] = 'Checking for code snippet'
+		if not 'errmsg' in kw:
+			kw['errmsg'] = 'no'
+
+	for (flagsname,flagstype) in (('cxxflags','compiler'), ('cflags','compiler'), ('linkflags','linker')):
+		if flagsname in kw:
+			if not 'msg' in kw:
+				kw['msg'] = 'Checking for %s flags %s' % (flagstype, kw[flagsname])
+			if not 'errmsg' in kw:
+				kw['errmsg'] = 'no'
+
+	if not 'execute' in kw:
+		kw['execute'] = False
+	if kw['execute']:
+		kw['features'].append('test_exec')
+		kw['chmod'] = Utils.O755
+
+	if not 'errmsg' in kw:
+		kw['errmsg'] = 'not found'
+
+	if not 'okmsg' in kw:
+		kw['okmsg'] = 'yes'
+
+	if not 'code' in kw:
+		kw['code'] = SNIP_EMPTY_PROGRAM
+
+	# if there are headers to append automatically to the next tests
+	if self.env[INCKEYS]:
+		kw['code'] = '\n'.join(['#include <%s>' % x for x in self.env[INCKEYS]]) + '\n' + kw['code']
+
+	# in case defines lead to very long command-lines
+	if kw.get('merge_config_header') or env.merge_config_header:
+		kw['code'] = '%s\n\n%s' % (self.get_config_header(), kw['code'])
+		env.DEFINES = [] # modify the copy
+
+	if not kw.get('success'):
+		kw['success'] = None
+
+	if 'define_name' in kw:
+		self.undefine(kw['define_name'])
+	if not 'msg' in kw:
+		self.fatal('missing "msg" in conf.check(...)')
+
+@conf
+def post_check(self, *k, **kw):
+	"""
+	Sets the variables after a test executed in
+	:py:func:`waflib.Tools.c_config.check` was run successfully
+	"""
+	is_success = 0
+	if kw['execute']:
+		if kw['success'] is not None:
+			if kw.get('define_ret'):
+				is_success = kw['success']
+			else:
+				is_success = (kw['success'] == 0)
+	else:
+		is_success = (kw['success'] == 0)
+
+	if kw.get('define_name'):
+		comment = kw.get('comment', '')
+		define_name = kw['define_name']
+		if kw['execute'] and kw.get('define_ret') and isinstance(is_success, str):
+			if kw.get('global_define', 1):
+				self.define(define_name, is_success, quote=kw.get('quote', 1), comment=comment)
+			else:
+				if kw.get('quote', 1):
+					succ = '"%s"' % is_success
+				else:
+					succ = int(is_success)
+				val = '%s=%s' % (define_name, succ)
+				var = 'DEFINES_%s' % kw['uselib_store']
+				self.env.append_value(var, val)
+		else:
+			if kw.get('global_define', 1):
+				self.define_cond(define_name, is_success, comment=comment)
+			else:
+				var = 'DEFINES_%s' % kw['uselib_store']
+				self.env.append_value(var, '%s=%s' % (define_name, int(is_success)))
+
+		# define conf.env.HAVE_X to 1
+		if kw.get('add_have_to_env', 1):
+			if kw.get('uselib_store'):
+				self.env[self.have_define(kw['uselib_store'])] = 1
+			elif kw['execute'] and kw.get('define_ret'):
+				self.env[define_name] = is_success
+			else:
+				self.env[define_name] = int(is_success)
+
+	if 'header_name' in kw:
+		if kw.get('auto_add_header_name'):
+			self.env.append_value(INCKEYS, Utils.to_list(kw['header_name']))
+
+	if is_success and 'uselib_store' in kw:
+		from waflib.Tools import ccroot
+		# See get_uselib_vars in ccroot.py
+		_vars = set()
+		for x in kw['features']:
+			if x in ccroot.USELIB_VARS:
+				_vars |= ccroot.USELIB_VARS[x]
+
+		for k in _vars:
+			x = k.lower()
+			if x in kw:
+				self.env.append_value(k + '_' + kw['uselib_store'], kw[x])
+	return is_success
+
+@conf
+def check(self, *k, **kw):
+	"""
+	Performs a configuration test by calling :py:func:`waflib.Configure.run_build`.
+	For the complete list of parameters, see :py:func:`waflib.Tools.c_config.validate_c`.
+	To force a specific compiler, pass ``compiler='c'`` or ``compiler='cxx'`` to the list of arguments
+
+	Besides build targets, complete builds can be given through a build function. All files will
+	be written to a temporary directory::
+
+		def build(bld):
+			lib_node = bld.srcnode.make_node('libdir/liblc1.c')
+			lib_node.parent.mkdir()
+			lib_node.write('#include <stdio.h>\\nint lib_func(void) { FILE *f = fopen("foo", "r");}\\n', 'w')
+			bld(features='c cshlib', source=[lib_node], linkflags=conf.env.EXTRA_LDFLAGS, target='liblc')
+		conf.check(build_fun=build, msg=msg)
+	"""
+	self.validate_c(kw)
+	self.start_msg(kw['msg'], **kw)
+	ret = None
+	try:
+		ret = self.run_build(*k, **kw)
+	except self.errors.ConfigurationError:
+		self.end_msg(kw['errmsg'], 'YELLOW', **kw)
+		if Logs.verbose > 1:
+			raise
+		else:
+			self.fatal('The configuration failed')
+	else:
+		kw['success'] = ret
+
+	ret = self.post_check(*k, **kw)
+	if not ret:
+		self.end_msg(kw['errmsg'], 'YELLOW', **kw)
+		self.fatal('The configuration failed %r' % ret)
+	else:
+		self.end_msg(self.ret_msg(kw['okmsg'], kw), **kw)
+	return ret
+
+class test_exec(Task.Task):
+	"""
+	A task that runs programs after they are built. See :py:func:`waflib.Tools.c_config.test_exec_fun`.
+	"""
+	color = 'PINK'
+	def run(self):
+		cmd = [self.inputs[0].abspath()] + getattr(self.generator, 'test_args', [])
+		if getattr(self.generator, 'rpath', None):
+			if getattr(self.generator, 'define_ret', False):
+				self.generator.bld.retval = self.generator.bld.cmd_and_log(cmd)
+			else:
+				self.generator.bld.retval = self.generator.bld.exec_command(cmd)
+		else:
+			env = self.env.env or {}
+			env.update(dict(os.environ))
+			for var in ('LD_LIBRARY_PATH', 'DYLD_LIBRARY_PATH', 'PATH'):
+				env[var] = self.inputs[0].parent.abspath() + os.path.pathsep + env.get(var, '')
+			if getattr(self.generator, 'define_ret', False):
+				self.generator.bld.retval = self.generator.bld.cmd_and_log(cmd, env=env)
+			else:
+				self.generator.bld.retval = self.generator.bld.exec_command(cmd, env=env)
+
+@feature('test_exec')
+@after_method('apply_link')
+def test_exec_fun(self):
+	"""
+	The feature **test_exec** is used to create a task that will to execute the binary
+	created (link task output) during the build. The exit status will be set
+	on the build context, so only one program may have the feature *test_exec*.
+	This is used by configuration tests::
+
+		def configure(conf):
+			conf.check(execute=True)
+	"""
+	self.create_task('test_exec', self.link_task.outputs[0])
+
+@conf
+def check_cxx(self, *k, **kw):
+	"""
+	Runs a test with a task generator of the form::
+
+		conf.check(features='cxx cxxprogram', ...)
+	"""
+	kw['compiler'] = 'cxx'
+	return self.check(*k, **kw)
+
+@conf
+def check_cc(self, *k, **kw):
+	"""
+	Runs a test with a task generator of the form::
+
+		conf.check(features='c cprogram', ...)
+	"""
+	kw['compiler'] = 'c'
+	return self.check(*k, **kw)
+
+@conf
+def set_define_comment(self, key, comment):
+	"""
+	Sets a comment that will appear in the configuration header
+
+	:type key: string
+	:type comment: string
+	"""
+	coms = self.env.DEFINE_COMMENTS
+	if not coms:
+		coms = self.env.DEFINE_COMMENTS = {}
+	coms[key] = comment or ''
+
+@conf
+def get_define_comment(self, key):
+	"""
+	Returns the comment associated to a define
+
+	:type key: string
+	"""
+	coms = self.env.DEFINE_COMMENTS or {}
+	return coms.get(key, '')
+
+@conf
+def define(self, key, val, quote=True, comment=''):
+	"""
+	Stores a single define and its state into ``conf.env.DEFINES``. The value is cast to an integer (0/1).
+
+	:param key: define name
+	:type key: string
+	:param val: value
+	:type val: int or string
+	:param quote: enclose strings in quotes (yes by default)
+	:type quote: bool
+	"""
+	assert isinstance(key, str)
+	if not key:
+		return
+	if val is True:
+		val = 1
+	elif val in (False, None):
+		val = 0
+
+	if isinstance(val, int) or isinstance(val, float):
+		s = '%s=%s'
+	else:
+		s = quote and '%s="%s"' or '%s=%s'
+	app = s % (key, str(val))
+
+	ban = key + '='
+	lst = self.env.DEFINES
+	for x in lst:
+		if x.startswith(ban):
+			lst[lst.index(x)] = app
+			break
+	else:
+		self.env.append_value('DEFINES', app)
+
+	self.env.append_unique(DEFKEYS, key)
+	self.set_define_comment(key, comment)
+
+@conf
+def undefine(self, key, comment=''):
+	"""
+	Removes a global define from ``conf.env.DEFINES``
+
+	:param key: define name
+	:type key: string
+	"""
+	assert isinstance(key, str)
+	if not key:
+		return
+	ban = key + '='
+	lst = [x for x in self.env.DEFINES if not x.startswith(ban)]
+	self.env.DEFINES = lst
+	self.env.append_unique(DEFKEYS, key)
+	self.set_define_comment(key, comment)
+
+@conf
+def define_cond(self, key, val, comment=''):
+	"""
+	Conditionally defines a name::
+
+		def configure(conf):
+			conf.define_cond('A', True)
+			# equivalent to:
+			# if val: conf.define('A', 1)
+			# else: conf.undefine('A')
+
+	:param key: define name
+	:type key: string
+	:param val: value
+	:type val: int or string
+	"""
+	assert isinstance(key, str)
+	if not key:
+		return
+	if val:
+		self.define(key, 1, comment=comment)
+	else:
+		self.undefine(key, comment=comment)
+
+@conf
+def is_defined(self, key):
+	"""
+	Indicates whether a particular define is globally set in ``conf.env.DEFINES``.
+
+	:param key: define name
+	:type key: string
+	:return: True if the define is set
+	:rtype: bool
+	"""
+	assert key and isinstance(key, str)
+
+	ban = key + '='
+	for x in self.env.DEFINES:
+		if x.startswith(ban):
+			return True
+	return False
+
+@conf
+def get_define(self, key):
+	"""
+	Returns the value of an existing define, or None if not found
+
+	:param key: define name
+	:type key: string
+	:rtype: string
+	"""
+	assert key and isinstance(key, str)
+
+	ban = key + '='
+	for x in self.env.DEFINES:
+		if x.startswith(ban):
+			return x[len(ban):]
+	return None
+
+@conf
+def have_define(self, key):
+	"""
+	Returns a variable suitable for command-line or header use by removing invalid characters
+	and prefixing it with ``HAVE_``
+
+	:param key: define name
+	:type key: string
+	:return: the input key prefixed by *HAVE_* and substitute any invalid characters.
+	:rtype: string
+	"""
+	return (self.env.HAVE_PAT or 'HAVE_%s') % Utils.quote_define_name(key)
+
+@conf
+def write_config_header(self, configfile='', guard='', top=False, defines=True, headers=False, remove=True, define_prefix=''):
+	"""
+	Writes a configuration header containing defines and includes::
+
+		def configure(cnf):
+			cnf.define('A', 1)
+			cnf.write_config_header('config.h')
+
+	This function only adds include guards (if necessary), consult
+	:py:func:`waflib.Tools.c_config.get_config_header` for details on the body.
+
+	:param configfile: path to the file to create (relative or absolute)
+	:type configfile: string
+	:param guard: include guard name to add, by default it is computed from the file name
+	:type guard: string
+	:param top: write the configuration header from the build directory (default is from the current path)
+	:type top: bool
+	:param defines: add the defines (yes by default)
+	:type defines: bool
+	:param headers: add #include in the file
+	:type headers: bool
+	:param remove: remove the defines after they are added (yes by default, works like in autoconf)
+	:type remove: bool
+	:type define_prefix: string
+	:param define_prefix: prefix all the defines in the file with a particular prefix
+	"""
+	if not configfile:
+		configfile = WAF_CONFIG_H
+	waf_guard = guard or 'W_%s_WAF' % Utils.quote_define_name(configfile)
+
+	node = top and self.bldnode or self.path.get_bld()
+	node = node.make_node(configfile)
+	node.parent.mkdir()
+
+	lst = ['/* WARNING! All changes made to this file will be lost! */\n']
+	lst.append('#ifndef %s\n#define %s\n' % (waf_guard, waf_guard))
+	lst.append(self.get_config_header(defines, headers, define_prefix=define_prefix))
+	lst.append('\n#endif /* %s */\n' % waf_guard)
+
+	node.write('\n'.join(lst))
+
+	# config files must not be removed on "waf clean"
+	self.env.append_unique(Build.CFG_FILES, [node.abspath()])
+
+	if remove:
+		for key in self.env[DEFKEYS]:
+			self.undefine(key)
+		self.env[DEFKEYS] = []
+
+@conf
+def get_config_header(self, defines=True, headers=False, define_prefix=''):
+	"""
+	Creates the contents of a ``config.h`` file from the defines and includes
+	set in conf.env.define_key / conf.env.include_key. No include guards are added.
+
+	A prelude will be added from the variable env.WAF_CONFIG_H_PRELUDE if provided. This
+	can be used to insert complex macros or include guards::
+
+		def configure(conf):
+			conf.env.WAF_CONFIG_H_PRELUDE = '#include <unistd.h>\\n'
+			conf.write_config_header('config.h')
+
+	:param defines: write the defines values
+	:type defines: bool
+	:param headers: write include entries for each element in self.env.INCKEYS
+	:type headers: bool
+	:type define_prefix: string
+	:param define_prefix: prefix all the defines with a particular prefix
+	:return: the contents of a ``config.h`` file
+	:rtype: string
+	"""
+	lst = []
+
+	if self.env.WAF_CONFIG_H_PRELUDE:
+		lst.append(self.env.WAF_CONFIG_H_PRELUDE)
+
+	if headers:
+		for x in self.env[INCKEYS]:
+			lst.append('#include <%s>' % x)
+
+	if defines:
+		tbl = {}
+		for k in self.env.DEFINES:
+			a, _, b = k.partition('=')
+			tbl[a] = b
+
+		for k in self.env[DEFKEYS]:
+			caption = self.get_define_comment(k)
+			if caption:
+				caption = ' /* %s */' % caption
+			try:
+				txt = '#define %s%s %s%s' % (define_prefix, k, tbl[k], caption)
+			except KeyError:
+				txt = '/* #undef %s%s */%s' % (define_prefix, k, caption)
+			lst.append(txt)
+	return "\n".join(lst)
+
+@conf
+def cc_add_flags(conf):
+	"""
+	Adds CFLAGS / CPPFLAGS from os.environ to conf.env
+	"""
+	conf.add_os_flags('CPPFLAGS', dup=False)
+	conf.add_os_flags('CFLAGS', dup=False)
+
+@conf
+def cxx_add_flags(conf):
+	"""
+	Adds CXXFLAGS / CPPFLAGS from os.environ to conf.env
+	"""
+	conf.add_os_flags('CPPFLAGS', dup=False)
+	conf.add_os_flags('CXXFLAGS', dup=False)
+
+@conf
+def link_add_flags(conf):
+	"""
+	Adds LINKFLAGS / LDFLAGS from os.environ to conf.env
+	"""
+	conf.add_os_flags('LINKFLAGS', dup=False)
+	conf.add_os_flags('LDFLAGS', dup=False)
+
+@conf
+def cc_load_tools(conf):
+	"""
+	Loads the Waf c extensions
+	"""
+	if not conf.env.DEST_OS:
+		conf.env.DEST_OS = Utils.unversioned_sys_platform()
+	conf.load('c')
+
+@conf
+def cxx_load_tools(conf):
+	"""
+	Loads the Waf c++ extensions
+	"""
+	if not conf.env.DEST_OS:
+		conf.env.DEST_OS = Utils.unversioned_sys_platform()
+	conf.load('cxx')
+
+@conf
+def get_cc_version(conf, cc, gcc=False, icc=False, clang=False):
+	"""
+	Runs the preprocessor to determine the gcc/icc/clang version
+
+	The variables CC_VERSION, DEST_OS, DEST_BINFMT and DEST_CPU will be set in *conf.env*
+
+	:raise: :py:class:`waflib.Errors.ConfigurationError`
+	"""
+	cmd = cc + ['-dM', '-E', '-']
+	env = conf.env.env or None
+	try:
+		out, err = conf.cmd_and_log(cmd, output=0, input='\n'.encode(), env=env)
+	except Errors.WafError:
+		conf.fatal('Could not determine the compiler version %r' % cmd)
+
+	if gcc:
+		if out.find('__INTEL_COMPILER') >= 0:
+			conf.fatal('The intel compiler pretends to be gcc')
+		if out.find('__GNUC__') < 0 and out.find('__clang__') < 0:
+			conf.fatal('Could not determine the compiler type')
+
+	if icc and out.find('__INTEL_COMPILER') < 0:
+		conf.fatal('Not icc/icpc')
+
+	if clang and out.find('__clang__') < 0:
+		conf.fatal('Not clang/clang++')
+	if not clang and out.find('__clang__') >= 0:
+		conf.fatal('Could not find gcc/g++ (only Clang), if renamed try eg: CC=gcc48 CXX=g++48 waf configure')
+
+	k = {}
+	if icc or gcc or clang:
+		out = out.splitlines()
+		for line in out:
+			lst = shlex.split(line)
+			if len(lst)>2:
+				key = lst[1]
+				val = lst[2]
+				k[key] = val
+
+		def isD(var):
+			return var in k
+
+		# Some documentation is available at http://predef.sourceforge.net
+		# The names given to DEST_OS must match what Utils.unversioned_sys_platform() returns.
+		if not conf.env.DEST_OS:
+			conf.env.DEST_OS = ''
+		for i in MACRO_TO_DESTOS:
+			if isD(i):
+				conf.env.DEST_OS = MACRO_TO_DESTOS[i]
+				break
+		else:
+			if isD('__APPLE__') and isD('__MACH__'):
+				conf.env.DEST_OS = 'darwin'
+			elif isD('__unix__'): # unix must be tested last as it's a generic fallback
+				conf.env.DEST_OS = 'generic'
+
+		if isD('__ELF__'):
+			conf.env.DEST_BINFMT = 'elf'
+		elif isD('__WINNT__') or isD('__CYGWIN__') or isD('_WIN32'):
+			conf.env.DEST_BINFMT = 'pe'
+			if not conf.env.IMPLIBDIR:
+				conf.env.IMPLIBDIR = conf.env.LIBDIR # for .lib or .dll.a files
+			conf.env.LIBDIR = conf.env.BINDIR
+		elif isD('__APPLE__'):
+			conf.env.DEST_BINFMT = 'mac-o'
+
+		if not conf.env.DEST_BINFMT:
+			# Infer the binary format from the os name.
+			conf.env.DEST_BINFMT = Utils.destos_to_binfmt(conf.env.DEST_OS)
+
+		for i in MACRO_TO_DEST_CPU:
+			if isD(i):
+				conf.env.DEST_CPU = MACRO_TO_DEST_CPU[i]
+				break
+
+		Logs.debug('ccroot: dest platform: ' + ' '.join([conf.env[x] or '?' for x in ('DEST_OS', 'DEST_BINFMT', 'DEST_CPU')]))
+		if icc:
+			ver = k['__INTEL_COMPILER']
+			conf.env.CC_VERSION = (ver[:-2], ver[-2], ver[-1])
+		else:
+			if isD('__clang__') and isD('__clang_major__'):
+				conf.env.CC_VERSION = (k['__clang_major__'], k['__clang_minor__'], k['__clang_patchlevel__'])
+			else:
+				# older clang versions and gcc
+				conf.env.CC_VERSION = (k['__GNUC__'], k['__GNUC_MINOR__'], k.get('__GNUC_PATCHLEVEL__', '0'))
+	return k
+
+@conf
+def get_xlc_version(conf, cc):
+	"""
+	Returns the Aix compiler version
+
+	:raise: :py:class:`waflib.Errors.ConfigurationError`
+	"""
+	cmd = cc + ['-qversion']
+	try:
+		out, err = conf.cmd_and_log(cmd, output=0)
+	except Errors.WafError:
+		conf.fatal('Could not find xlc %r' % cmd)
+
+	# the intention is to catch the 8.0 in "IBM XL C/C++ Enterprise Edition V8.0 for AIX..."
+	for v in (r"IBM XL C/C\+\+.* V(?P<major>\d*)\.(?P<minor>\d*)",):
+		version_re = re.compile(v, re.I).search
+		match = version_re(out or err)
+		if match:
+			k = match.groupdict()
+			conf.env.CC_VERSION = (k['major'], k['minor'])
+			break
+	else:
+		conf.fatal('Could not determine the XLC version.')
+
+@conf
+def get_suncc_version(conf, cc):
+	"""
+	Returns the Sun compiler version
+
+	:raise: :py:class:`waflib.Errors.ConfigurationError`
+	"""
+	cmd = cc + ['-V']
+	try:
+		out, err = conf.cmd_and_log(cmd, output=0)
+	except Errors.WafError as e:
+		# Older versions of the compiler exit with non-zero status when reporting their version
+		if not (hasattr(e, 'returncode') and hasattr(e, 'stdout') and hasattr(e, 'stderr')):
+			conf.fatal('Could not find suncc %r' % cmd)
+		out = e.stdout
+		err = e.stderr
+
+	version = (out or err)
+	version = version.splitlines()[0]
+
+	# cc: Sun C 5.10 SunOS_i386 2009/06/03
+	# cc: Studio 12.5 Sun C++ 5.14 SunOS_sparc Beta 2015/11/17
+	# cc: WorkShop Compilers 5.0 98/12/15 C 5.0
+	version_re = re.compile(r'cc: (studio.*?|\s+)?(sun\s+(c\+\+|c)|(WorkShop\s+Compilers))?\s+(?P<major>\d*)\.(?P<minor>\d*)', re.I).search
+	match = version_re(version)
+	if match:
+		k = match.groupdict()
+		conf.env.CC_VERSION = (k['major'], k['minor'])
+	else:
+		conf.fatal('Could not determine the suncc version.')
+
+# ============ the --as-needed flag should added during the configuration, not at runtime =========
+
+@conf
+def add_as_needed(self):
+	"""
+	Adds ``--as-needed`` to the *LINKFLAGS*
+	On some platforms, it is a default flag.  In some cases (e.g., in NS-3) it is necessary to explicitly disable this feature with `-Wl,--no-as-needed` flag.
+	"""
+	if self.env.DEST_BINFMT == 'elf' and 'gcc' in (self.env.CXX_NAME, self.env.CC_NAME):
+		self.env.append_unique('LINKFLAGS', '-Wl,--as-needed')
+
+# ============ parallel configuration
+
+class cfgtask(Task.Task):
+	"""
+	A task that executes build configuration tests (calls conf.check)
+
+	Make sure to use locks if concurrent access to the same conf.env data is necessary.
+	"""
+	def __init__(self, *k, **kw):
+		Task.Task.__init__(self, *k, **kw)
+		self.run_after = set()
+
+	def display(self):
+		return ''
+
+	def runnable_status(self):
+		for x in self.run_after:
+			if not x.hasrun:
+				return Task.ASK_LATER
+		return Task.RUN_ME
+
+	def uid(self):
+		return Utils.SIG_NIL
+
+	def signature(self):
+		return Utils.SIG_NIL
+
+	def run(self):
+		conf = self.conf
+		bld = Build.BuildContext(top_dir=conf.srcnode.abspath(), out_dir=conf.bldnode.abspath())
+		bld.env = conf.env
+		bld.init_dirs()
+		bld.in_msg = 1 # suppress top-level start_msg
+		bld.logger = self.logger
+		bld.multicheck_task = self
+		args = self.args
+		try:
+			if 'func' in args:
+				bld.test(build_fun=args['func'],
+					msg=args.get('msg', ''),
+					okmsg=args.get('okmsg', ''),
+					errmsg=args.get('errmsg', ''),
+					)
+			else:
+				args['multicheck_mandatory'] = args.get('mandatory', True)
+				args['mandatory'] = True
+				try:
+					bld.check(**args)
+				finally:
+					args['mandatory'] = args['multicheck_mandatory']
+		except Exception:
+			return 1
+
+	def process(self):
+		Task.Task.process(self)
+		if 'msg' in self.args:
+			with self.generator.bld.multicheck_lock:
+				self.conf.start_msg(self.args['msg'])
+				if self.hasrun == Task.NOT_RUN:
+					self.conf.end_msg('test cancelled', 'YELLOW')
+				elif self.hasrun != Task.SUCCESS:
+					self.conf.end_msg(self.args.get('errmsg', 'no'), 'YELLOW')
+				else:
+					self.conf.end_msg(self.args.get('okmsg', 'yes'), 'GREEN')
+
+@conf
+def multicheck(self, *k, **kw):
+	"""
+	Runs configuration tests in parallel; results are printed sequentially at the end of the build
+	but each test must provide its own msg value to display a line::
+
+		def test_build(ctx):
+			ctx.in_msg = True # suppress console outputs
+			ctx.check_large_file(mandatory=False)
+
+		conf.multicheck(
+			{'header_name':'stdio.h', 'msg':'... stdio', 'uselib_store':'STDIO', 'global_define':False},
+			{'header_name':'xyztabcd.h', 'msg':'... optional xyztabcd.h', 'mandatory': False},
+			{'header_name':'stdlib.h', 'msg':'... stdlib', 'okmsg': 'aye', 'errmsg': 'nope'},
+			{'func': test_build, 'msg':'... testing an arbitrary build function', 'okmsg':'ok'},
+			msg       = 'Checking for headers in parallel',
+			mandatory = True, # mandatory tests raise an error at the end
+			run_all_tests = True, # try running all tests
+		)
+
+	The configuration tests may modify the values in conf.env in any order, and the define
+	values can affect configuration tests being executed. It is hence recommended
+	to provide `uselib_store` values with `global_define=False` to prevent such issues.
+	"""
+	self.start_msg(kw.get('msg', 'Executing %d configuration tests' % len(k)), **kw)
+
+	# Force a copy so that threads append to the same list at least
+	# no order is guaranteed, but the values should not disappear at least
+	for var in ('DEFINES', DEFKEYS):
+		self.env.append_value(var, [])
+	self.env.DEFINE_COMMENTS = self.env.DEFINE_COMMENTS or {}
+
+	# define a task object that will execute our tests
+	class par(object):
+		def __init__(self):
+			self.keep = False
+			self.task_sigs = {}
+			self.progress_bar = 0
+		def total(self):
+			return len(tasks)
+		def to_log(self, *k, **kw):
+			return
+
+	bld = par()
+	bld.keep = kw.get('run_all_tests', True)
+	bld.imp_sigs = {}
+	tasks = []
+
+	id_to_task = {}
+	for counter, dct in enumerate(k):
+		x = Task.classes['cfgtask'](bld=bld, env=None)
+		tasks.append(x)
+		x.args = dct
+		x.args['multicheck_counter'] = counter
+		x.bld = bld
+		x.conf = self
+		x.args = dct
+
+		# bind a logger that will keep the info in memory
+		x.logger = Logs.make_mem_logger(str(id(x)), self.logger)
+
+		if 'id' in dct:
+			id_to_task[dct['id']] = x
+
+	# second pass to set dependencies with after_test/before_test
+	for x in tasks:
+		for key in Utils.to_list(x.args.get('before_tests', [])):
+			tsk = id_to_task[key]
+			if not tsk:
+				raise ValueError('No test named %r' % key)
+			tsk.run_after.add(x)
+		for key in Utils.to_list(x.args.get('after_tests', [])):
+			tsk = id_to_task[key]
+			if not tsk:
+				raise ValueError('No test named %r' % key)
+			x.run_after.add(tsk)
+
+	def it():
+		yield tasks
+		while 1:
+			yield []
+	bld.producer = p = Runner.Parallel(bld, Options.options.jobs)
+	bld.multicheck_lock = Utils.threading.Lock()
+	p.biter = it()
+
+	self.end_msg('started')
+	p.start()
+
+	# flush the logs in order into the config.log
+	for x in tasks:
+		x.logger.memhandler.flush()
+
+	self.start_msg('-> processing test results')
+	if p.error:
+		for x in p.error:
+			if getattr(x, 'err_msg', None):
+				self.to_log(x.err_msg)
+				self.end_msg('fail', color='RED')
+				raise Errors.WafError('There is an error in the library, read config.log for more information')
+
+	failure_count = 0
+	for x in tasks:
+		if x.hasrun not in (Task.SUCCESS, Task.NOT_RUN):
+			failure_count += 1
+
+	if failure_count:
+		self.end_msg(kw.get('errmsg', '%s test failed' % failure_count), color='YELLOW', **kw)
+	else:
+		self.end_msg('all ok', **kw)
+
+	for x in tasks:
+		if x.hasrun != Task.SUCCESS:
+			if x.args.get('mandatory', True):
+				self.fatal(kw.get('fatalmsg') or 'One of the tests has failed, read config.log for more information')
+
+@conf
+def check_gcc_o_space(self, mode='c'):
+	if int(self.env.CC_VERSION[0]) > 4:
+		# this is for old compilers
+		return
+	self.env.stash()
+	if mode == 'c':
+		self.env.CCLNK_TGT_F = ['-o', '']
+	elif mode == 'cxx':
+		self.env.CXXLNK_TGT_F = ['-o', '']
+	features = '%s %sshlib' % (mode, mode)
+	try:
+		self.check(msg='Checking if the -o link must be split from arguments', fragment=SNIP_EMPTY_PROGRAM, features=features)
+	except self.errors.ConfigurationError:
+		self.env.revert()
+	else:
+		self.env.commit()
+
diff --git a/third_party/waf/waflib/Tools/c_osx.py b/third_party/waf/waflib/Tools/c_osx.py
new file mode 100644
index 0000000..f70b128
--- /dev/null
+++ b/third_party/waf/waflib/Tools/c_osx.py
@@ -0,0 +1,193 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy 2008-2018 (ita)
+
+"""
+MacOSX related tools
+"""
+
+import os, shutil, platform
+from waflib import Task, Utils
+from waflib.TaskGen import taskgen_method, feature, after_method, before_method
+
+app_info = '''
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist SYSTEM "file://localhost/System/Library/DTDs/PropertyList.dtd">
+<plist version="0.9">
+<dict>
+	<key>CFBundlePackageType</key>
+	<string>APPL</string>
+	<key>CFBundleGetInfoString</key>
+	<string>Created by Waf</string>
+	<key>CFBundleSignature</key>
+	<string>????</string>
+	<key>NOTE</key>
+	<string>THIS IS A GENERATED FILE, DO NOT MODIFY</string>
+	<key>CFBundleExecutable</key>
+	<string>{app_name}</string>
+</dict>
+</plist>
+'''
+"""
+plist template
+"""
+
+@feature('c', 'cxx')
+def set_macosx_deployment_target(self):
+	"""
+	see WAF issue 285 and also and also http://trac.macports.org/ticket/17059
+	"""
+	if self.env.MACOSX_DEPLOYMENT_TARGET:
+		os.environ['MACOSX_DEPLOYMENT_TARGET'] = self.env.MACOSX_DEPLOYMENT_TARGET
+	elif 'MACOSX_DEPLOYMENT_TARGET' not in os.environ:
+		if Utils.unversioned_sys_platform() == 'darwin':
+			os.environ['MACOSX_DEPLOYMENT_TARGET'] = '.'.join(platform.mac_ver()[0].split('.')[:2])
+
+@taskgen_method
+def create_bundle_dirs(self, name, out):
+	"""
+	Creates bundle folders, used by :py:func:`create_task_macplist` and :py:func:`create_task_macapp`
+	"""
+	dir = out.parent.find_or_declare(name)
+	dir.mkdir()
+	macos = dir.find_or_declare(['Contents', 'MacOS'])
+	macos.mkdir()
+	return dir
+
+def bundle_name_for_output(out):
+	name = out.name
+	k = name.rfind('.')
+	if k >= 0:
+		name = name[:k] + '.app'
+	else:
+		name = name + '.app'
+	return name
+
+@feature('cprogram', 'cxxprogram')
+@after_method('apply_link')
+def create_task_macapp(self):
+	"""
+	To compile an executable into a Mac application (a .app), set its *mac_app* attribute::
+
+		def build(bld):
+			bld.shlib(source='a.c', target='foo', mac_app=True)
+
+	To force *all* executables to be transformed into Mac applications::
+
+		def build(bld):
+			bld.env.MACAPP = True
+			bld.shlib(source='a.c', target='foo')
+	"""
+	if self.env.MACAPP or getattr(self, 'mac_app', False):
+		out = self.link_task.outputs[0]
+
+		name = bundle_name_for_output(out)
+		dir = self.create_bundle_dirs(name, out)
+
+		n1 = dir.find_or_declare(['Contents', 'MacOS', out.name])
+
+		self.apptask = self.create_task('macapp', self.link_task.outputs, n1)
+		inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Contents/MacOS/' % name
+		self.add_install_files(install_to=inst_to, install_from=n1, chmod=Utils.O755)
+
+		if getattr(self, 'mac_files', None):
+			# this only accepts files; they will be installed as seen from mac_files_root
+			mac_files_root = getattr(self, 'mac_files_root', None)
+			if isinstance(mac_files_root, str):
+				mac_files_root = self.path.find_node(mac_files_root)
+				if not mac_files_root:
+					self.bld.fatal('Invalid mac_files_root %r' % self.mac_files_root)
+			res_dir = n1.parent.parent.make_node('Resources')
+			inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Resources' % name
+			for node in self.to_nodes(self.mac_files):
+				relpath = node.path_from(mac_files_root or node.parent)
+				self.create_task('macapp', node, res_dir.make_node(relpath))
+				self.add_install_as(install_to=os.path.join(inst_to, relpath), install_from=node)
+
+		if getattr(self.bld, 'is_install', None):
+			# disable regular binary installation
+			self.install_task.hasrun = Task.SKIP_ME
+
+@feature('cprogram', 'cxxprogram')
+@after_method('apply_link')
+def create_task_macplist(self):
+	"""
+	Creates a :py:class:`waflib.Tools.c_osx.macplist` instance.
+	"""
+	if  self.env.MACAPP or getattr(self, 'mac_app', False):
+		out = self.link_task.outputs[0]
+
+		name = bundle_name_for_output(out)
+
+		dir = self.create_bundle_dirs(name, out)
+		n1 = dir.find_or_declare(['Contents', 'Info.plist'])
+		self.plisttask = plisttask = self.create_task('macplist', [], n1)
+		plisttask.context = {
+			'app_name': self.link_task.outputs[0].name,
+			'env': self.env
+		}
+
+		plist_ctx = getattr(self, 'plist_context', None)
+		if (plist_ctx):
+			plisttask.context.update(plist_ctx)
+
+		if getattr(self, 'mac_plist', False):
+			node = self.path.find_resource(self.mac_plist)
+			if node:
+				plisttask.inputs.append(node)
+			else:
+				plisttask.code = self.mac_plist
+		else:
+			plisttask.code = app_info
+
+		inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Contents/' % name
+		self.add_install_files(install_to=inst_to, install_from=n1)
+
+@feature('cshlib', 'cxxshlib')
+@before_method('apply_link', 'propagate_uselib_vars')
+def apply_bundle(self):
+	"""
+	To make a bundled shared library (a ``.bundle``), set the *mac_bundle* attribute::
+
+		def build(bld):
+			bld.shlib(source='a.c', target='foo', mac_bundle = True)
+
+	To force *all* executables to be transformed into bundles::
+
+		def build(bld):
+			bld.env.MACBUNDLE = True
+			bld.shlib(source='a.c', target='foo')
+	"""
+	if self.env.MACBUNDLE or getattr(self, 'mac_bundle', False):
+		self.env.LINKFLAGS_cshlib = self.env.LINKFLAGS_cxxshlib = [] # disable the '-dynamiclib' flag
+		self.env.cshlib_PATTERN = self.env.cxxshlib_PATTERN = self.env.macbundle_PATTERN
+		use = self.use = self.to_list(getattr(self, 'use', []))
+		if not 'MACBUNDLE' in use:
+			use.append('MACBUNDLE')
+
+app_dirs = ['Contents', 'Contents/MacOS', 'Contents/Resources']
+
+class macapp(Task.Task):
+	"""
+	Creates mac applications
+	"""
+	color = 'PINK'
+	def run(self):
+		self.outputs[0].parent.mkdir()
+		shutil.copy2(self.inputs[0].srcpath(), self.outputs[0].abspath())
+
+class macplist(Task.Task):
+	"""
+	Creates plist files
+	"""
+	color = 'PINK'
+	ext_in = ['.bin']
+	def run(self):
+		if getattr(self, 'code', None):
+			txt = self.code
+		else:
+			txt = self.inputs[0].read()
+		context = getattr(self, 'context', {})
+		txt = txt.format(**context)
+		self.outputs[0].write(txt)
+
diff --git a/third_party/waf/waflib/Tools/c_preproc.py b/third_party/waf/waflib/Tools/c_preproc.py
new file mode 100644
index 0000000..68e5f5a
--- /dev/null
+++ b/third_party/waf/waflib/Tools/c_preproc.py
@@ -0,0 +1,1091 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2018 (ita)
+
+"""
+C/C++ preprocessor for finding dependencies
+
+Reasons for using the Waf preprocessor by default
+
+#. Some c/c++ extensions (Qt) require a custom preprocessor for obtaining the dependencies (.moc files)
+#. Not all compilers provide .d files for obtaining the dependencies (portability)
+#. A naive file scanner will not catch the constructs such as "#include foo()"
+#. A naive file scanner will catch unnecessary dependencies (change an unused header -> recompile everything)
+
+Regarding the speed concerns:
+
+* the preprocessing is performed only when files must be compiled
+* the macros are evaluated only for #if/#elif/#include
+* system headers are not scanned by default
+
+Now if you do not want the Waf preprocessor, the tool +gccdeps* uses the .d files produced
+during the compilation to track the dependencies (useful when used with the boost libraries).
+It only works with gcc >= 4.4 though.
+
+A dumb preprocessor is also available in the tool *c_dumbpreproc*
+"""
+# TODO: more varargs, pragma once
+
+import re, string, traceback
+from waflib import Logs, Utils, Errors
+
+class PreprocError(Errors.WafError):
+	pass
+
+FILE_CACHE_SIZE = 100000
+LINE_CACHE_SIZE = 100000
+
+POPFILE = '-'
+"Constant representing a special token used in :py:meth:`waflib.Tools.c_preproc.c_parser.start` iteration to switch to a header read previously"
+
+recursion_limit = 150
+"Limit on the amount of files to read in the dependency scanner"
+
+go_absolute = False
+"Set to True to track headers on files in /usr/include, else absolute paths are ignored (but it becomes very slow)"
+
+standard_includes = ['/usr/local/include', '/usr/include']
+if Utils.is_win32:
+	standard_includes = []
+
+use_trigraphs = 0
+"""Apply trigraph rules (False by default)"""
+
+# obsolete, do not use
+strict_quotes = 0
+
+g_optrans = {
+'not':'!',
+'not_eq':'!',
+'and':'&&',
+'and_eq':'&=',
+'or':'||',
+'or_eq':'|=',
+'xor':'^',
+'xor_eq':'^=',
+'bitand':'&',
+'bitor':'|',
+'compl':'~',
+}
+"""Operators such as and/or/xor for c++. Set an empty dict to disable."""
+
+# ignore #warning and #error
+re_lines = re.compile(
+	'^[ \t]*(?:#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*)\r*$',
+	re.IGNORECASE | re.MULTILINE)
+"""Match #include lines"""
+
+re_mac = re.compile(r"^[a-zA-Z_]\w*")
+"""Match macro definitions"""
+
+re_fun = re.compile('^[a-zA-Z_][a-zA-Z0-9_]*[(]')
+"""Match macro functions"""
+
+re_pragma_once = re.compile(r'^\s*once\s*', re.IGNORECASE)
+"""Match #pragma once statements"""
+
+re_nl = re.compile('\\\\\r*\n', re.MULTILINE)
+"""Match newlines"""
+
+re_cpp = re.compile(r'//.*?$|/\*.*?\*/|\'(?:\\.|[^\\\'])*\'|"(?:\\.|[^\\"])*"', re.DOTALL | re.MULTILINE )
+"""Filter C/C++ comments"""
+
+trig_def = [('??'+a, b) for a, b in zip("=-/!'()<>", r'#~\|^[]{}')]
+"""Trigraph definitions"""
+
+chr_esc = {'0':0, 'a':7, 'b':8, 't':9, 'n':10, 'f':11, 'v':12, 'r':13, '\\':92, "'":39}
+"""Escape characters"""
+
+NUM   = 'i'
+"""Number token"""
+
+OP    = 'O'
+"""Operator token"""
+
+IDENT = 'T'
+"""Identifier token"""
+
+STR   = 's'
+"""String token"""
+
+CHAR  = 'c'
+"""Character token"""
+
+tok_types = [NUM, STR, IDENT, OP]
+"""Token types"""
+
+exp_types = [
+	r"""0[xX](?P<hex>[a-fA-F0-9]+)(?P<qual1>[uUlL]*)|L*?'(?P<char>(\\.|[^\\'])+)'|(?P<n1>\d+)[Ee](?P<exp0>[+-]*?\d+)(?P<float0>[fFlL]*)|(?P<n2>\d*\.\d+)([Ee](?P<exp1>[+-]*?\d+))?(?P<float1>[fFlL]*)|(?P<n4>\d+\.\d*)([Ee](?P<exp2>[+-]*?\d+))?(?P<float2>[fFlL]*)|(?P<oct>0*)(?P<n0>\d+)(?P<qual2>[uUlL]*)""",
+	r'L?"([^"\\]|\\.)*"',
+	r'[a-zA-Z_]\w*',
+	r'%:%:|<<=|>>=|\.\.\.|<<|<%|<:|<=|>>|>=|\+\+|\+=|--|->|-=|\*=|/=|%:|%=|%>|==|&&|&=|\|\||\|=|\^=|:>|!=|##|[\(\)\{\}\[\]<>\?\|\^\*\+&=:!#;,%/\-\?\~\.]',
+]
+"""Expression types"""
+
+re_clexer = re.compile('|'.join(["(?P<%s>%s)" % (name, part) for name, part in zip(tok_types, exp_types)]), re.M)
+"""Match expressions into tokens"""
+
+accepted  = 'a'
+"""Parser state is *accepted*"""
+
+ignored   = 'i'
+"""Parser state is *ignored*, for example preprocessor lines in an #if 0 block"""
+
+undefined = 'u'
+"""Parser state is *undefined* at the moment"""
+
+skipped   = 's'
+"""Parser state is *skipped*, for example preprocessor lines in a #elif 0 block"""
+
+def repl(m):
+	"""Replace function used with :py:attr:`waflib.Tools.c_preproc.re_cpp`"""
+	s = m.group()
+	if s[0] == '/':
+		return ' '
+	return s
+
+prec = {}
+"""
+Operator precedence rules required for parsing expressions of the form::
+
+	#if 1 && 2 != 0
+"""
+ops = ['* / %', '+ -', '<< >>', '< <= >= >', '== !=', '& | ^', '&& ||', ',']
+for x, syms in enumerate(ops):
+	for u in syms.split():
+		prec[u] = x
+
+def reduce_nums(val_1, val_2, val_op):
+	"""
+	Apply arithmetic rules to compute a result
+
+	:param val1: input parameter
+	:type val1: int or string
+	:param val2: input parameter
+	:type val2: int or string
+	:param val_op: C operator in *+*, */*, *-*, etc
+	:type val_op: string
+	:rtype: int
+	"""
+	#print val_1, val_2, val_op
+
+	# now perform the operation, make certain a and b are numeric
+	try:
+		a = 0 + val_1
+	except TypeError:
+		a = int(val_1)
+	try:
+		b = 0 + val_2
+	except TypeError:
+		b = int(val_2)
+
+	d = val_op
+	if d == '%':
+		c = a % b
+	elif d=='+':
+		c = a + b
+	elif d=='-':
+		c = a - b
+	elif d=='*':
+		c = a * b
+	elif d=='/':
+		c = a / b
+	elif d=='^':
+		c = a ^ b
+	elif d=='==':
+		c = int(a == b)
+	elif d=='|'  or d == 'bitor':
+		c = a | b
+	elif d=='||' or d == 'or' :
+		c = int(a or b)
+	elif d=='&'  or d == 'bitand':
+		c = a & b
+	elif d=='&&' or d == 'and':
+		c = int(a and b)
+	elif d=='!=' or d == 'not_eq':
+		c = int(a != b)
+	elif d=='^'  or d == 'xor':
+		c = int(a^b)
+	elif d=='<=':
+		c = int(a <= b)
+	elif d=='<':
+		c = int(a < b)
+	elif d=='>':
+		c = int(a > b)
+	elif d=='>=':
+		c = int(a >= b)
+	elif d=='<<':
+		c = a << b
+	elif d=='>>':
+		c = a >> b
+	else:
+		c = 0
+	return c
+
+def get_num(lst):
+	"""
+	Try to obtain a number from a list of tokens. The token types are defined in :py:attr:`waflib.Tools.ccroot.tok_types`.
+
+	:param lst: list of preprocessor tokens
+	:type lst: list of tuple (tokentype, value)
+	:return: a pair containing the number and the rest of the list
+	:rtype: tuple(value, list)
+	"""
+	if not lst:
+		raise PreprocError('empty list for get_num')
+	(p, v) = lst[0]
+	if p == OP:
+		if v == '(':
+			count_par = 1
+			i = 1
+			while i < len(lst):
+				(p, v) = lst[i]
+
+				if p == OP:
+					if v == ')':
+						count_par -= 1
+						if count_par == 0:
+							break
+					elif v == '(':
+						count_par += 1
+				i += 1
+			else:
+				raise PreprocError('rparen expected %r' % lst)
+
+			(num, _) = get_term(lst[1:i])
+			return (num, lst[i+1:])
+
+		elif v == '+':
+			return get_num(lst[1:])
+		elif v == '-':
+			num, lst = get_num(lst[1:])
+			return (reduce_nums('-1', num, '*'), lst)
+		elif v == '!':
+			num, lst = get_num(lst[1:])
+			return (int(not int(num)), lst)
+		elif v == '~':
+			num, lst = get_num(lst[1:])
+			return (~ int(num), lst)
+		else:
+			raise PreprocError('Invalid op token %r for get_num' % lst)
+	elif p == NUM:
+		return v, lst[1:]
+	elif p == IDENT:
+		# all macros should have been replaced, remaining identifiers eval to 0
+		return 0, lst[1:]
+	else:
+		raise PreprocError('Invalid token %r for get_num' % lst)
+
+def get_term(lst):
+	"""
+	Evaluate an expression recursively, for example::
+
+		1+1+1 -> 2+1 -> 3
+
+	:param lst: list of tokens
+	:type lst: list of tuple(token, value)
+	:return: the value and the remaining tokens
+	:rtype: value, list
+	"""
+
+	if not lst:
+		raise PreprocError('empty list for get_term')
+	num, lst = get_num(lst)
+	if not lst:
+		return (num, [])
+	(p, v) = lst[0]
+	if p == OP:
+		if v == ',':
+			# skip
+			return get_term(lst[1:])
+		elif v == '?':
+			count_par = 0
+			i = 1
+			while i < len(lst):
+				(p, v) = lst[i]
+
+				if p == OP:
+					if v == ')':
+						count_par -= 1
+					elif v == '(':
+						count_par += 1
+					elif v == ':':
+						if count_par == 0:
+							break
+				i += 1
+			else:
+				raise PreprocError('rparen expected %r' % lst)
+
+			if int(num):
+				return get_term(lst[1:i])
+			else:
+				return get_term(lst[i+1:])
+
+		else:
+			num2, lst = get_num(lst[1:])
+
+			if not lst:
+				# no more tokens to process
+				num2 = reduce_nums(num, num2, v)
+				return get_term([(NUM, num2)] + lst)
+
+			# operator precedence
+			p2, v2 = lst[0]
+			if p2 != OP:
+				raise PreprocError('op expected %r' % lst)
+
+			if prec[v2] >= prec[v]:
+				num2 = reduce_nums(num, num2, v)
+				return get_term([(NUM, num2)] + lst)
+			else:
+				num3, lst = get_num(lst[1:])
+				num3 = reduce_nums(num2, num3, v2)
+				return get_term([(NUM, num), (p, v), (NUM, num3)] + lst)
+
+
+	raise PreprocError('cannot reduce %r' % lst)
+
+def reduce_eval(lst):
+	"""
+	Take a list of tokens and output true or false for #if/#elif conditions.
+
+	:param lst: a list of tokens
+	:type lst: list of tuple(token, value)
+	:return: a token
+	:rtype: tuple(NUM, int)
+	"""
+	num, lst = get_term(lst)
+	return (NUM, num)
+
+def stringize(lst):
+	"""
+	Merge a list of tokens into a string
+
+	:param lst: a list of tokens
+	:type lst: list of tuple(token, value)
+	:rtype: string
+	"""
+	lst = [str(v2) for (p2, v2) in lst]
+	return "".join(lst)
+
+def paste_tokens(t1, t2):
+	"""
+	Token pasting works between identifiers, particular operators, and identifiers and numbers::
+
+		a ## b  ->  ab
+		> ## =  ->  >=
+		a ## 2  ->  a2
+
+	:param t1: token
+	:type t1: tuple(type, value)
+	:param t2: token
+	:type t2: tuple(type, value)
+	"""
+	p1 = None
+	if t1[0] == OP and t2[0] == OP:
+		p1 = OP
+	elif t1[0] == IDENT and (t2[0] == IDENT or t2[0] == NUM):
+		p1 = IDENT
+	elif t1[0] == NUM and t2[0] == NUM:
+		p1 = NUM
+	if not p1:
+		raise PreprocError('tokens do not make a valid paste %r and %r' % (t1, t2))
+	return (p1, t1[1] + t2[1])
+
+def reduce_tokens(lst, defs, ban=[]):
+	"""
+	Replace the tokens in lst, using the macros provided in defs, and a list of macros that cannot be re-applied
+
+	:param lst: list of tokens
+	:type lst: list of tuple(token, value)
+	:param defs: macro definitions
+	:type defs: dict
+	:param ban: macros that cannot be substituted (recursion is not allowed)
+	:type ban: list of string
+	:return: the new list of tokens
+	:rtype: value, list
+	"""
+
+	i = 0
+	while i < len(lst):
+		(p, v) = lst[i]
+
+		if p == IDENT and v == "defined":
+			del lst[i]
+			if i < len(lst):
+				(p2, v2) = lst[i]
+				if p2 == IDENT:
+					if v2 in defs:
+						lst[i] = (NUM, 1)
+					else:
+						lst[i] = (NUM, 0)
+				elif p2 == OP and v2 == '(':
+					del lst[i]
+					(p2, v2) = lst[i]
+					del lst[i] # remove the ident, and change the ) for the value
+					if v2 in defs:
+						lst[i] = (NUM, 1)
+					else:
+						lst[i] = (NUM, 0)
+				else:
+					raise PreprocError('Invalid define expression %r' % lst)
+
+		elif p == IDENT and v in defs:
+
+			if isinstance(defs[v], str):
+				a, b = extract_macro(defs[v])
+				defs[v] = b
+			macro_def = defs[v]
+			to_add = macro_def[1]
+
+			if isinstance(macro_def[0], list):
+				# macro without arguments
+				del lst[i]
+				accu = to_add[:]
+				reduce_tokens(accu, defs, ban+[v])
+				for tmp in accu:
+					lst.insert(i, tmp)
+					i += 1
+			else:
+				# collect the arguments for the funcall
+
+				args = []
+				del lst[i]
+
+				if i >= len(lst):
+					raise PreprocError('expected ( after %r (got nothing)' % v)
+
+				(p2, v2) = lst[i]
+				if p2 != OP or v2 != '(':
+					raise PreprocError('expected ( after %r' % v)
+
+				del lst[i]
+
+				one_param = []
+				count_paren = 0
+				while i < len(lst):
+					p2, v2 = lst[i]
+
+					del lst[i]
+					if p2 == OP and count_paren == 0:
+						if v2 == '(':
+							one_param.append((p2, v2))
+							count_paren += 1
+						elif v2 == ')':
+							if one_param:
+								args.append(one_param)
+							break
+						elif v2 == ',':
+							if not one_param:
+								raise PreprocError('empty param in funcall %r' % v)
+							args.append(one_param)
+							one_param = []
+						else:
+							one_param.append((p2, v2))
+					else:
+						one_param.append((p2, v2))
+						if   v2 == '(':
+							count_paren += 1
+						elif v2 == ')':
+							count_paren -= 1
+				else:
+					raise PreprocError('malformed macro')
+
+				# substitute the arguments within the define expression
+				accu = []
+				arg_table = macro_def[0]
+				j = 0
+				while j < len(to_add):
+					(p2, v2) = to_add[j]
+
+					if p2 == OP and v2 == '#':
+						# stringize is for arguments only
+						if j+1 < len(to_add) and to_add[j+1][0] == IDENT and to_add[j+1][1] in arg_table:
+							toks = args[arg_table[to_add[j+1][1]]]
+							accu.append((STR, stringize(toks)))
+							j += 1
+						else:
+							accu.append((p2, v2))
+					elif p2 == OP and v2 == '##':
+						# token pasting, how can man invent such a complicated system?
+						if accu and j+1 < len(to_add):
+							# we have at least two tokens
+
+							t1 = accu[-1]
+
+							if to_add[j+1][0] == IDENT and to_add[j+1][1] in arg_table:
+								toks = args[arg_table[to_add[j+1][1]]]
+
+								if toks:
+									accu[-1] = paste_tokens(t1, toks[0]) #(IDENT, accu[-1][1] + toks[0][1])
+									accu.extend(toks[1:])
+								else:
+									# error, case "a##"
+									accu.append((p2, v2))
+									accu.extend(toks)
+							elif to_add[j+1][0] == IDENT and to_add[j+1][1] == '__VA_ARGS__':
+								# first collect the tokens
+								va_toks = []
+								st = len(macro_def[0])
+								pt = len(args)
+								for x in args[pt-st+1:]:
+									va_toks.extend(x)
+									va_toks.append((OP, ','))
+								if va_toks:
+									va_toks.pop() # extra comma
+								if len(accu)>1:
+									(p3, v3) = accu[-1]
+									(p4, v4) = accu[-2]
+									if v3 == '##':
+										# remove the token paste
+										accu.pop()
+										if v4 == ',' and pt < st:
+											# remove the comma
+											accu.pop()
+								accu += va_toks
+							else:
+								accu[-1] = paste_tokens(t1, to_add[j+1])
+
+							j += 1
+						else:
+							# Invalid paste, case    "##a" or "b##"
+							accu.append((p2, v2))
+
+					elif p2 == IDENT and v2 in arg_table:
+						toks = args[arg_table[v2]]
+						reduce_tokens(toks, defs, ban+[v])
+						accu.extend(toks)
+					else:
+						accu.append((p2, v2))
+
+					j += 1
+
+
+				reduce_tokens(accu, defs, ban+[v])
+
+				for x in range(len(accu)-1, -1, -1):
+					lst.insert(i, accu[x])
+
+		i += 1
+
+
+def eval_macro(lst, defs):
+	"""
+	Reduce the tokens by :py:func:`waflib.Tools.c_preproc.reduce_tokens` and try to return a 0/1 result by :py:func:`waflib.Tools.c_preproc.reduce_eval`.
+
+	:param lst: list of tokens
+	:type lst: list of tuple(token, value)
+	:param defs: macro definitions
+	:type defs: dict
+	:rtype: int
+	"""
+	reduce_tokens(lst, defs, [])
+	if not lst:
+		raise PreprocError('missing tokens to evaluate')
+
+	if lst:
+		p, v = lst[0]
+		if p == IDENT and v not in defs:
+			raise PreprocError('missing macro %r' % lst)
+
+	p, v = reduce_eval(lst)
+	return int(v) != 0
+
+def extract_macro(txt):
+	"""
+	Process a macro definition of the form::
+		 #define f(x, y) x * y
+
+	into a function or a simple macro without arguments
+
+	:param txt: expression to exact a macro definition from
+	:type txt: string
+	:return: a tuple containing the name, the list of arguments and the replacement
+	:rtype: tuple(string, [list, list])
+	"""
+	t = tokenize(txt)
+	if re_fun.search(txt):
+		p, name = t[0]
+
+		p, v = t[1]
+		if p != OP:
+			raise PreprocError('expected (')
+
+		i = 1
+		pindex = 0
+		params = {}
+		prev = '('
+
+		while 1:
+			i += 1
+			p, v = t[i]
+
+			if prev == '(':
+				if p == IDENT:
+					params[v] = pindex
+					pindex += 1
+					prev = p
+				elif p == OP and v == ')':
+					break
+				else:
+					raise PreprocError('unexpected token (3)')
+			elif prev == IDENT:
+				if p == OP and v == ',':
+					prev = v
+				elif p == OP and v == ')':
+					break
+				else:
+					raise PreprocError('comma or ... expected')
+			elif prev == ',':
+				if p == IDENT:
+					params[v] = pindex
+					pindex += 1
+					prev = p
+				elif p == OP and v == '...':
+					raise PreprocError('not implemented (1)')
+				else:
+					raise PreprocError('comma or ... expected (2)')
+			elif prev == '...':
+				raise PreprocError('not implemented (2)')
+			else:
+				raise PreprocError('unexpected else')
+
+		#~ print (name, [params, t[i+1:]])
+		return (name, [params, t[i+1:]])
+	else:
+		(p, v) = t[0]
+		if len(t) > 1:
+			return (v, [[], t[1:]])
+		else:
+			# empty define, assign an empty token
+			return (v, [[], [('T','')]])
+
+re_include = re.compile(r'^\s*(<(?:.*)>|"(?:.*)")')
+def extract_include(txt, defs):
+	"""
+	Process a line in the form::
+
+		#include foo
+
+	:param txt: include line to process
+	:type txt: string
+	:param defs: macro definitions
+	:type defs: dict
+	:return: the file name
+	:rtype: string
+	"""
+	m = re_include.search(txt)
+	if m:
+		txt = m.group(1)
+		return txt[0], txt[1:-1]
+
+	# perform preprocessing and look at the result, it must match an include
+	toks = tokenize(txt)
+	reduce_tokens(toks, defs, ['waf_include'])
+
+	if not toks:
+		raise PreprocError('could not parse include %r' % txt)
+
+	if len(toks) == 1:
+		if toks[0][0] == STR:
+			return '"', toks[0][1]
+	else:
+		if toks[0][1] == '<' and toks[-1][1] == '>':
+			ret = '<', stringize(toks).lstrip('<').rstrip('>')
+			return ret
+
+	raise PreprocError('could not parse include %r' % txt)
+
+def parse_char(txt):
+	"""
+	Parse a c character
+
+	:param txt: character to parse
+	:type txt: string
+	:return: a character literal
+	:rtype: string
+	"""
+
+	if not txt:
+		raise PreprocError('attempted to parse a null char')
+	if txt[0] != '\\':
+		return ord(txt)
+	c = txt[1]
+	if c == 'x':
+		if len(txt) == 4 and txt[3] in string.hexdigits:
+			return int(txt[2:], 16)
+		return int(txt[2:], 16)
+	elif c.isdigit():
+		if c == '0' and len(txt)==2:
+			return 0
+		for i in 3, 2, 1:
+			if len(txt) > i and txt[1:1+i].isdigit():
+				return (1+i, int(txt[1:1+i], 8))
+	else:
+		try:
+			return chr_esc[c]
+		except KeyError:
+			raise PreprocError('could not parse char literal %r' % txt)
+
+def tokenize(s):
+	"""
+	Convert a string into a list of tokens (shlex.split does not apply to c/c++/d)
+
+	:param s: input to tokenize
+	:type s: string
+	:return: a list of tokens
+	:rtype: list of tuple(token, value)
+	"""
+	return tokenize_private(s)[:] # force a copy of the results
+
+def tokenize_private(s):
+	ret = []
+	for match in re_clexer.finditer(s):
+		m = match.group
+		for name in tok_types:
+			v = m(name)
+			if v:
+				if name == IDENT:
+					if v in g_optrans:
+						name = OP
+					elif v.lower() == "true":
+						v = 1
+						name = NUM
+					elif v.lower() == "false":
+						v = 0
+						name = NUM
+				elif name == NUM:
+					if m('oct'):
+						v = int(v, 8)
+					elif m('hex'):
+						v = int(m('hex'), 16)
+					elif m('n0'):
+						v = m('n0')
+					else:
+						v = m('char')
+						if v:
+							v = parse_char(v)
+						else:
+							v = m('n2') or m('n4')
+				elif name == OP:
+					if v == '%:':
+						v = '#'
+					elif v == '%:%:':
+						v = '##'
+				elif name == STR:
+					# remove the quotes around the string
+					v = v[1:-1]
+				ret.append((name, v))
+				break
+	return ret
+
+def format_defines(lst):
+	ret = []
+	for y in lst:
+		if y:
+			pos = y.find('=')
+			if pos == -1:
+				# "-DFOO" should give "#define FOO 1"
+				ret.append(y)
+			elif pos > 0:
+				# all others are assumed to be -DX=Y
+				ret.append('%s %s' % (y[:pos], y[pos+1:]))
+			else:
+				raise ValueError('Invalid define expression %r' % y)
+	return ret
+
+class c_parser(object):
+	"""
+	Used by :py:func:`waflib.Tools.c_preproc.scan` to parse c/h files. Note that by default,
+	only project headers are parsed.
+	"""
+	def __init__(self, nodepaths=None, defines=None):
+		self.lines = []
+		"""list of lines read"""
+
+		if defines is None:
+			self.defs  = {}
+		else:
+			self.defs  = dict(defines) # make a copy
+		self.state = []
+
+		self.count_files = 0
+		self.currentnode_stack = []
+
+		self.nodepaths = nodepaths or []
+		"""Include paths"""
+
+		self.nodes = []
+		"""List of :py:class:`waflib.Node.Node` found so far"""
+
+		self.names = []
+		"""List of file names that could not be matched by any file"""
+
+		self.curfile = ''
+		"""Current file"""
+
+		self.ban_includes = set()
+		"""Includes that must not be read (#pragma once)"""
+
+		self.listed = set()
+		"""Include nodes/names already listed to avoid duplicates in self.nodes/self.names"""
+
+	def cached_find_resource(self, node, filename):
+		"""
+		Find a file from the input directory
+
+		:param node: directory
+		:type node: :py:class:`waflib.Node.Node`
+		:param filename: header to find
+		:type filename: string
+		:return: the node if found, or None
+		:rtype: :py:class:`waflib.Node.Node`
+		"""
+		try:
+			cache = node.ctx.preproc_cache_node
+		except AttributeError:
+			cache = node.ctx.preproc_cache_node = Utils.lru_cache(FILE_CACHE_SIZE)
+
+		key = (node, filename)
+		try:
+			return cache[key]
+		except KeyError:
+			ret = node.find_resource(filename)
+			if ret:
+				if getattr(ret, 'children', None):
+					ret = None
+				elif ret.is_child_of(node.ctx.bldnode):
+					tmp = node.ctx.srcnode.search_node(ret.path_from(node.ctx.bldnode))
+					if tmp and getattr(tmp, 'children', None):
+						ret = None
+			cache[key] = ret
+			return ret
+
+	def tryfind(self, filename, kind='"', env=None):
+		"""
+		Try to obtain a node from the filename based from the include paths. Will add
+		the node found to :py:attr:`waflib.Tools.c_preproc.c_parser.nodes` or the file name to
+		:py:attr:`waflib.Tools.c_preproc.c_parser.names` if no corresponding file is found. Called by
+		:py:attr:`waflib.Tools.c_preproc.c_parser.start`.
+
+		:param filename: header to find
+		:type filename: string
+		:return: the node if found
+		:rtype: :py:class:`waflib.Node.Node`
+		"""
+		if filename.endswith('.moc'):
+			# we could let the qt4 module use a subclass, but then the function "scan" below must be duplicated
+			# in the qt4 and in the qt5 classes. So we have two lines here and it is sufficient.
+			self.names.append(filename)
+			return None
+
+		self.curfile = filename
+
+		found = None
+		if kind == '"':
+			if env.MSVC_VERSION:
+				for n in reversed(self.currentnode_stack):
+					found = self.cached_find_resource(n, filename)
+					if found:
+						break
+			else:
+				found = self.cached_find_resource(self.currentnode_stack[-1], filename)
+
+		if not found:
+			for n in self.nodepaths:
+				found = self.cached_find_resource(n, filename)
+				if found:
+					break
+
+		listed = self.listed
+		if found and not found in self.ban_includes:
+			if found not in listed:
+				listed.add(found)
+				self.nodes.append(found)
+			self.addlines(found)
+		else:
+			if filename not in listed:
+				listed.add(filename)
+				self.names.append(filename)
+		return found
+
+	def filter_comments(self, node):
+		"""
+		Filter the comments from a c/h file, and return the preprocessor lines.
+		The regexps :py:attr:`waflib.Tools.c_preproc.re_cpp`, :py:attr:`waflib.Tools.c_preproc.re_nl` and :py:attr:`waflib.Tools.c_preproc.re_lines` are used internally.
+
+		:return: the preprocessor directives as a list of (keyword, line)
+		:rtype: a list of string pairs
+		"""
+		# return a list of tuples : keyword, line
+		code = node.read()
+		if use_trigraphs:
+			for (a, b) in trig_def:
+				code = code.split(a).join(b)
+		code = re_nl.sub('', code)
+		code = re_cpp.sub(repl, code)
+		return re_lines.findall(code)
+
+	def parse_lines(self, node):
+		try:
+			cache = node.ctx.preproc_cache_lines
+		except AttributeError:
+			cache = node.ctx.preproc_cache_lines = Utils.lru_cache(LINE_CACHE_SIZE)
+		try:
+			return cache[node]
+		except KeyError:
+			cache[node] = lines = self.filter_comments(node)
+			lines.append((POPFILE, ''))
+			lines.reverse()
+			return lines
+
+	def addlines(self, node):
+		"""
+		Add the lines from a header in the list of preprocessor lines to parse
+
+		:param node: header
+		:type node: :py:class:`waflib.Node.Node`
+		"""
+
+		self.currentnode_stack.append(node.parent)
+
+		self.count_files += 1
+		if self.count_files > recursion_limit:
+			# issue #812
+			raise PreprocError('recursion limit exceeded')
+
+		if Logs.verbose:
+			Logs.debug('preproc: reading file %r', node)
+		try:
+			lines = self.parse_lines(node)
+		except EnvironmentError:
+			raise PreprocError('could not read the file %r' % node)
+		except Exception:
+			if Logs.verbose > 0:
+				Logs.error('parsing %r failed %s', node, traceback.format_exc())
+		else:
+			self.lines.extend(lines)
+
+	def start(self, node, env):
+		"""
+		Preprocess a source file to obtain the dependencies, which are accumulated to :py:attr:`waflib.Tools.c_preproc.c_parser.nodes`
+		and :py:attr:`waflib.Tools.c_preproc.c_parser.names`.
+
+		:param node: source file
+		:type node: :py:class:`waflib.Node.Node`
+		:param env: config set containing additional defines to take into account
+		:type env: :py:class:`waflib.ConfigSet.ConfigSet`
+		"""
+		Logs.debug('preproc: scanning %s (in %s)', node.name, node.parent.name)
+
+		self.current_file = node
+		self.addlines(node)
+
+		# macros may be defined on the command-line, so they must be parsed as if they were part of the file
+		if env.DEFINES:
+			lst = format_defines(env.DEFINES)
+			lst.reverse()
+			self.lines.extend([('define', x) for x in lst])
+
+		while self.lines:
+			(token, line) = self.lines.pop()
+			if token == POPFILE:
+				self.count_files -= 1
+				self.currentnode_stack.pop()
+				continue
+
+			try:
+				state = self.state
+
+				# make certain we define the state if we are about to enter in an if block
+				if token[:2] == 'if':
+					state.append(undefined)
+				elif token == 'endif':
+					state.pop()
+
+				# skip lines when in a dead 'if' branch, wait for the endif
+				if token[0] != 'e':
+					if skipped in self.state or ignored in self.state:
+						continue
+
+				if token == 'if':
+					ret = eval_macro(tokenize(line), self.defs)
+					if ret:
+						state[-1] = accepted
+					else:
+						state[-1] = ignored
+				elif token == 'ifdef':
+					m = re_mac.match(line)
+					if m and m.group() in self.defs:
+						state[-1] = accepted
+					else:
+						state[-1] = ignored
+				elif token == 'ifndef':
+					m = re_mac.match(line)
+					if m and m.group() in self.defs:
+						state[-1] = ignored
+					else:
+						state[-1] = accepted
+				elif token == 'include' or token == 'import':
+					(kind, inc) = extract_include(line, self.defs)
+					self.current_file = self.tryfind(inc, kind, env)
+					if token == 'import':
+						self.ban_includes.add(self.current_file)
+				elif token == 'elif':
+					if state[-1] == accepted:
+						state[-1] = skipped
+					elif state[-1] == ignored:
+						if eval_macro(tokenize(line), self.defs):
+							state[-1] = accepted
+				elif token == 'else':
+					if state[-1] == accepted:
+						state[-1] = skipped
+					elif state[-1] == ignored:
+						state[-1] = accepted
+				elif token == 'define':
+					try:
+						self.defs[self.define_name(line)] = line
+					except AttributeError:
+						raise PreprocError('Invalid define line %r' % line)
+				elif token == 'undef':
+					m = re_mac.match(line)
+					if m and m.group() in self.defs:
+						self.defs.__delitem__(m.group())
+						#print "undef %s" % name
+				elif token == 'pragma':
+					if re_pragma_once.match(line.lower()):
+						self.ban_includes.add(self.current_file)
+			except Exception as e:
+				if Logs.verbose:
+					Logs.debug('preproc: line parsing failed (%s): %s %s', e, line, traceback.format_exc())
+
+	def define_name(self, line):
+		"""
+		:param line: define line
+		:type line: string
+		:rtype: string
+		:return: the define name
+		"""
+		return re_mac.match(line).group()
+
+def scan(task):
+	"""
+	Get the dependencies using a c/c++ preprocessor, this is required for finding dependencies of the kind::
+
+		#include some_macro()
+
+	This function is bound as a task method on :py:class:`waflib.Tools.c.c` and :py:class:`waflib.Tools.cxx.cxx` for example
+	"""
+	try:
+		incn = task.generator.includes_nodes
+	except AttributeError:
+		raise Errors.WafError('%r is missing a feature such as "c", "cxx" or "includes": ' % task.generator)
+
+	if go_absolute:
+		nodepaths = incn + [task.generator.bld.root.find_dir(x) for x in standard_includes]
+	else:
+		nodepaths = [x for x in incn if x.is_child_of(x.ctx.srcnode) or x.is_child_of(x.ctx.bldnode)]
+
+	tmp = c_parser(nodepaths)
+	tmp.start(task.inputs[0], task.env)
+	return (tmp.nodes, tmp.names)
diff --git a/third_party/waf/waflib/Tools/c_tests.py b/third_party/waf/waflib/Tools/c_tests.py
new file mode 100644
index 0000000..bdd186c
--- /dev/null
+++ b/third_party/waf/waflib/Tools/c_tests.py
@@ -0,0 +1,237 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2016-2018 (ita)
+
+"""
+Various configuration tests.
+"""
+
+from waflib import Task
+from waflib.Configure import conf
+from waflib.TaskGen import feature, before_method, after_method
+
+LIB_CODE = '''
+#ifdef _MSC_VER
+#define testEXPORT __declspec(dllexport)
+#else
+#define testEXPORT
+#endif
+testEXPORT int lib_func(void) { return 9; }
+'''
+
+MAIN_CODE = '''
+#ifdef _MSC_VER
+#define testEXPORT __declspec(dllimport)
+#else
+#define testEXPORT
+#endif
+testEXPORT int lib_func(void);
+int main(int argc, char **argv) {
+	(void)argc; (void)argv;
+	return !(lib_func() == 9);
+}
+'''
+
+@feature('link_lib_test')
+@before_method('process_source')
+def link_lib_test_fun(self):
+	"""
+	The configuration test :py:func:`waflib.Configure.run_build` declares a unique task generator,
+	so we need to create other task generators from here to check if the linker is able to link libraries.
+	"""
+	def write_test_file(task):
+		task.outputs[0].write(task.generator.code)
+
+	rpath = []
+	if getattr(self, 'add_rpath', False):
+		rpath = [self.bld.path.get_bld().abspath()]
+
+	mode = self.mode
+	m = '%s %s' % (mode, mode)
+	ex = self.test_exec and 'test_exec' or ''
+	bld = self.bld
+	bld(rule=write_test_file, target='test.' + mode, code=LIB_CODE)
+	bld(rule=write_test_file, target='main.' + mode, code=MAIN_CODE)
+	bld(features='%sshlib' % m, source='test.' + mode, target='test')
+	bld(features='%sprogram %s' % (m, ex), source='main.' + mode, target='app', use='test', rpath=rpath)
+
+@conf
+def check_library(self, mode=None, test_exec=True):
+	"""
+	Checks if libraries can be linked with the current linker. Uses :py:func:`waflib.Tools.c_tests.link_lib_test_fun`.
+
+	:param mode: c or cxx or d
+	:type mode: string
+	"""
+	if not mode:
+		mode = 'c'
+		if self.env.CXX:
+			mode = 'cxx'
+	self.check(
+		compile_filename = [],
+		features = 'link_lib_test',
+		msg = 'Checking for libraries',
+		mode = mode,
+		test_exec = test_exec)
+
+########################################################################################
+
+INLINE_CODE = '''
+typedef int foo_t;
+static %s foo_t static_foo () {return 0; }
+%s foo_t foo () {
+	return 0;
+}
+'''
+INLINE_VALUES = ['inline', '__inline__', '__inline']
+
+@conf
+def check_inline(self, **kw):
+	"""
+	Checks for the right value for inline macro.
+	Define INLINE_MACRO to 1 if the define is found.
+	If the inline macro is not 'inline', add a define to the ``config.h`` (#define inline __inline__)
+
+	:param define_name: define INLINE_MACRO by default to 1 if the macro is defined
+	:type define_name: string
+	:param features: by default *c* or *cxx* depending on the compiler present
+	:type features: list of string
+	"""
+	self.start_msg('Checking for inline')
+
+	if not 'define_name' in kw:
+		kw['define_name'] = 'INLINE_MACRO'
+	if not 'features' in kw:
+		if self.env.CXX:
+			kw['features'] = ['cxx']
+		else:
+			kw['features'] = ['c']
+
+	for x in INLINE_VALUES:
+		kw['fragment'] = INLINE_CODE % (x, x)
+
+		try:
+			self.check(**kw)
+		except self.errors.ConfigurationError:
+			continue
+		else:
+			self.end_msg(x)
+			if x != 'inline':
+				self.define('inline', x, quote=False)
+			return x
+	self.fatal('could not use inline functions')
+
+########################################################################################
+
+LARGE_FRAGMENT = '''#include <unistd.h>
+int main(int argc, char **argv) {
+	(void)argc; (void)argv;
+	return !(sizeof(off_t) >= 8);
+}
+'''
+
+@conf
+def check_large_file(self, **kw):
+	"""
+	Checks for large file support and define the macro HAVE_LARGEFILE
+	The test is skipped on win32 systems (DEST_BINFMT == pe).
+
+	:param define_name: define to set, by default *HAVE_LARGEFILE*
+	:type define_name: string
+	:param execute: execute the test (yes by default)
+	:type execute: bool
+	"""
+	if not 'define_name' in kw:
+		kw['define_name'] = 'HAVE_LARGEFILE'
+	if not 'execute' in kw:
+		kw['execute'] = True
+
+	if not 'features' in kw:
+		if self.env.CXX:
+			kw['features'] = ['cxx', 'cxxprogram']
+		else:
+			kw['features'] = ['c', 'cprogram']
+
+	kw['fragment'] = LARGE_FRAGMENT
+
+	kw['msg'] = 'Checking for large file support'
+	ret = True
+	try:
+		if self.env.DEST_BINFMT != 'pe':
+			ret = self.check(**kw)
+	except self.errors.ConfigurationError:
+		pass
+	else:
+		if ret:
+			return True
+
+	kw['msg'] = 'Checking for -D_FILE_OFFSET_BITS=64'
+	kw['defines'] = ['_FILE_OFFSET_BITS=64']
+	try:
+		ret = self.check(**kw)
+	except self.errors.ConfigurationError:
+		pass
+	else:
+		self.define('_FILE_OFFSET_BITS', 64)
+		return ret
+
+	self.fatal('There is no support for large files')
+
+########################################################################################
+
+ENDIAN_FRAGMENT = '''
+#ifdef _MSC_VER
+#define testshlib_EXPORT __declspec(dllexport)
+#else
+#define testshlib_EXPORT
+#endif
+
+short int ascii_mm[] = { 0x4249, 0x4765, 0x6E44, 0x6961, 0x6E53, 0x7953, 0 };
+short int ascii_ii[] = { 0x694C, 0x5454, 0x656C, 0x6E45, 0x6944, 0x6E61, 0 };
+int testshlib_EXPORT use_ascii (int i) {
+	return ascii_mm[i] + ascii_ii[i];
+}
+short int ebcdic_ii[] = { 0x89D3, 0xE3E3, 0x8593, 0x95C5, 0x89C4, 0x9581, 0 };
+short int ebcdic_mm[] = { 0xC2C9, 0xC785, 0x95C4, 0x8981, 0x95E2, 0xA8E2, 0 };
+int use_ebcdic (int i) {
+	return ebcdic_mm[i] + ebcdic_ii[i];
+}
+extern int foo;
+'''
+
+class grep_for_endianness(Task.Task):
+	"""
+	Task that reads a binary and tries to determine the endianness
+	"""
+	color = 'PINK'
+	def run(self):
+		txt = self.inputs[0].read(flags='rb').decode('latin-1')
+		if txt.find('LiTTleEnDian') > -1:
+			self.generator.tmp.append('little')
+		elif txt.find('BIGenDianSyS') > -1:
+			self.generator.tmp.append('big')
+		else:
+			return -1
+
+@feature('grep_for_endianness')
+@after_method('apply_link')
+def grep_for_endianness_fun(self):
+	"""
+	Used by the endianness configuration test
+	"""
+	self.create_task('grep_for_endianness', self.link_task.outputs[0])
+
+@conf
+def check_endianness(self):
+	"""
+	Executes a configuration test to determine the endianness
+	"""
+	tmp = []
+	def check_msg(self):
+		return tmp[0]
+
+	self.check(fragment=ENDIAN_FRAGMENT, features='c cshlib grep_for_endianness',
+		msg='Checking for endianness', define='ENDIANNESS', tmp=tmp,
+		okmsg=check_msg, confcache=None)
+	return tmp[0]
+
diff --git a/third_party/waf/waflib/Tools/ccroot.py b/third_party/waf/waflib/Tools/ccroot.py
new file mode 100644
index 0000000..76deff5
--- /dev/null
+++ b/third_party/waf/waflib/Tools/ccroot.py
@@ -0,0 +1,792 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2018 (ita)
+
+"""
+Classes and methods shared by tools providing support for C-like language such
+as C/C++/D/Assembly/Go (this support module is almost never used alone).
+"""
+
+import os, re
+from waflib import Task, Utils, Node, Errors, Logs
+from waflib.TaskGen import after_method, before_method, feature, taskgen_method, extension
+from waflib.Tools import c_aliases, c_preproc, c_config, c_osx, c_tests
+from waflib.Configure import conf
+
+SYSTEM_LIB_PATHS = ['/usr/lib64', '/usr/lib', '/usr/local/lib64', '/usr/local/lib']
+
+USELIB_VARS = Utils.defaultdict(set)
+"""
+Mapping for features to :py:class:`waflib.ConfigSet.ConfigSet` variables. See :py:func:`waflib.Tools.ccroot.propagate_uselib_vars`.
+"""
+
+USELIB_VARS['c']        = set(['INCLUDES', 'FRAMEWORKPATH', 'DEFINES', 'CPPFLAGS', 'CCDEPS', 'CFLAGS', 'ARCH'])
+USELIB_VARS['cxx']      = set(['INCLUDES', 'FRAMEWORKPATH', 'DEFINES', 'CPPFLAGS', 'CXXDEPS', 'CXXFLAGS', 'ARCH'])
+USELIB_VARS['d']        = set(['INCLUDES', 'DFLAGS'])
+USELIB_VARS['includes'] = set(['INCLUDES', 'FRAMEWORKPATH', 'ARCH'])
+
+USELIB_VARS['cprogram'] = USELIB_VARS['cxxprogram'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS', 'FRAMEWORK', 'FRAMEWORKPATH', 'ARCH', 'LDFLAGS'])
+USELIB_VARS['cshlib']   = USELIB_VARS['cxxshlib']   = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS', 'FRAMEWORK', 'FRAMEWORKPATH', 'ARCH', 'LDFLAGS'])
+USELIB_VARS['cstlib']   = USELIB_VARS['cxxstlib']   = set(['ARFLAGS', 'LINKDEPS'])
+
+USELIB_VARS['dprogram'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS'])
+USELIB_VARS['dshlib']   = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS'])
+USELIB_VARS['dstlib']   = set(['ARFLAGS', 'LINKDEPS'])
+
+USELIB_VARS['asm'] = set(['ASFLAGS'])
+
+# =================================================================================================
+
+@taskgen_method
+def create_compiled_task(self, name, node):
+	"""
+	Create the compilation task: c, cxx, asm, etc. The output node is created automatically (object file with a typical **.o** extension).
+	The task is appended to the list *compiled_tasks* which is then used by :py:func:`waflib.Tools.ccroot.apply_link`
+
+	:param name: name of the task class
+	:type name: string
+	:param node: the file to compile
+	:type node: :py:class:`waflib.Node.Node`
+	:return: The task created
+	:rtype: :py:class:`waflib.Task.Task`
+	"""
+	out = '%s.%d.o' % (node.name, self.idx)
+	task = self.create_task(name, node, node.parent.find_or_declare(out))
+	try:
+		self.compiled_tasks.append(task)
+	except AttributeError:
+		self.compiled_tasks = [task]
+	return task
+
+@taskgen_method
+def to_incnodes(self, inlst):
+	"""
+	Task generator method provided to convert a list of string/nodes into a list of includes folders.
+
+	The paths are assumed to be relative to the task generator path, except if they begin by **#**
+	in which case they are searched from the top-level directory (``bld.srcnode``).
+	The folders are simply assumed to be existing.
+
+	The node objects in the list are returned in the output list. The strings are converted
+	into node objects if possible. The node is searched from the source directory, and if a match is found,
+	the equivalent build directory is created and added to the returned list too. When a folder cannot be found, it is ignored.
+
+	:param inlst: list of folders
+	:type inlst: space-delimited string or a list of string/nodes
+	:rtype: list of :py:class:`waflib.Node.Node`
+	:return: list of include folders as nodes
+	"""
+	lst = []
+	seen = set()
+	for x in self.to_list(inlst):
+		if x in seen or not x:
+			continue
+		seen.add(x)
+
+		# with a real lot of targets, it is sometimes interesting to cache the results below
+		if isinstance(x, Node.Node):
+			lst.append(x)
+		else:
+			if os.path.isabs(x):
+				lst.append(self.bld.root.make_node(x) or x)
+			else:
+				if x[0] == '#':
+					p = self.bld.bldnode.make_node(x[1:])
+					v = self.bld.srcnode.make_node(x[1:])
+				else:
+					p = self.path.get_bld().make_node(x)
+					v = self.path.make_node(x)
+				if p.is_child_of(self.bld.bldnode):
+					p.mkdir()
+				lst.append(p)
+				lst.append(v)
+	return lst
+
+@feature('c', 'cxx', 'd', 'asm', 'fc', 'includes')
+@after_method('propagate_uselib_vars', 'process_source')
+def apply_incpaths(self):
+	"""
+	Task generator method that processes the attribute *includes*::
+
+		tg = bld(features='includes', includes='.')
+
+	The folders only need to be relative to the current directory, the equivalent build directory is
+	added automatically (for headers created in the build directory). This enables using a build directory
+	or not (``top == out``).
+
+	This method will add a list of nodes read by :py:func:`waflib.Tools.ccroot.to_incnodes` in ``tg.env.INCPATHS``,
+	and the list of include paths in ``tg.env.INCLUDES``.
+	"""
+
+	lst = self.to_incnodes(self.to_list(getattr(self, 'includes', [])) + self.env.INCLUDES)
+	self.includes_nodes = lst
+	cwd = self.get_cwd()
+	self.env.INCPATHS = [x.path_from(cwd) for x in lst]
+
+class link_task(Task.Task):
+	"""
+	Base class for all link tasks. A task generator is supposed to have at most one link task bound in the attribute *link_task*. See :py:func:`waflib.Tools.ccroot.apply_link`.
+
+	.. inheritance-diagram:: waflib.Tools.ccroot.stlink_task waflib.Tools.c.cprogram waflib.Tools.c.cshlib waflib.Tools.cxx.cxxstlib  waflib.Tools.cxx.cxxprogram waflib.Tools.cxx.cxxshlib waflib.Tools.d.dprogram waflib.Tools.d.dshlib waflib.Tools.d.dstlib waflib.Tools.ccroot.fake_shlib waflib.Tools.ccroot.fake_stlib waflib.Tools.asm.asmprogram waflib.Tools.asm.asmshlib waflib.Tools.asm.asmstlib
+	  :top-classes: waflib.Tools.ccroot.link_task
+	"""
+	color   = 'YELLOW'
+
+	weight  = 3
+	"""Try to process link tasks as early as possible"""
+
+	inst_to = None
+	"""Default installation path for the link task outputs, or None to disable"""
+
+	chmod   = Utils.O755
+	"""Default installation mode for the link task outputs"""
+
+	def add_target(self, target):
+		"""
+		Process the *target* attribute to add the platform-specific prefix/suffix such as *.so* or *.exe*.
+		The settings are retrieved from ``env.clsname_PATTERN``
+		"""
+		if isinstance(target, str):
+			base = self.generator.path
+			if target.startswith('#'):
+				# for those who like flat structures
+				target = target[1:]
+				base = self.generator.bld.bldnode
+
+			pattern = self.env[self.__class__.__name__ + '_PATTERN']
+			if not pattern:
+				pattern = '%s'
+			folder, name = os.path.split(target)
+
+			if self.__class__.__name__.find('shlib') > 0 and getattr(self.generator, 'vnum', None):
+				nums = self.generator.vnum.split('.')
+				if self.env.DEST_BINFMT == 'pe':
+					# include the version in the dll file name,
+					# the import lib file name stays unversioned.
+					name = name + '-' + nums[0]
+				elif self.env.DEST_OS == 'openbsd':
+					pattern = '%s.%s' % (pattern, nums[0])
+					if len(nums) >= 2:
+						pattern += '.%s' % nums[1]
+
+			if folder:
+				tmp = folder + os.sep + pattern % name
+			else:
+				tmp = pattern % name
+			target = base.find_or_declare(tmp)
+		self.set_outputs(target)
+
+	def exec_command(self, *k, **kw):
+		ret = super(link_task, self).exec_command(*k, **kw)
+		if not ret and self.env.DO_MANIFEST:
+			ret = self.exec_mf()
+		return ret
+
+	def exec_mf(self):
+		"""
+		Create manifest files for VS-like compilers (msvc, ifort, ...)
+		"""
+		if not self.env.MT:
+			return 0
+
+		manifest = None
+		for out_node in self.outputs:
+			if out_node.name.endswith('.manifest'):
+				manifest = out_node.abspath()
+				break
+		else:
+			# Should never get here.  If we do, it means the manifest file was
+			# never added to the outputs list, thus we don't have a manifest file
+			# to embed, so we just return.
+			return 0
+
+		# embedding mode. Different for EXE's and DLL's.
+		# see: http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx
+		mode = ''
+		for x in Utils.to_list(self.generator.features):
+			if x in ('cprogram', 'cxxprogram', 'fcprogram', 'fcprogram_test'):
+				mode = 1
+			elif x in ('cshlib', 'cxxshlib', 'fcshlib'):
+				mode = 2
+
+		Logs.debug('msvc: embedding manifest in mode %r', mode)
+
+		lst = [] + self.env.MT
+		lst.extend(Utils.to_list(self.env.MTFLAGS))
+		lst.extend(['-manifest', manifest])
+		lst.append('-outputresource:%s;%s' % (self.outputs[0].abspath(), mode))
+
+		return super(link_task, self).exec_command(lst)
+
+class stlink_task(link_task):
+	"""
+	Base for static link tasks, which use *ar* most of the time.
+	The target is always removed before being written.
+	"""
+	run_str = '${AR} ${ARFLAGS} ${AR_TGT_F}${TGT} ${AR_SRC_F}${SRC}'
+
+	chmod   = Utils.O644
+	"""Default installation mode for the static libraries"""
+
+def rm_tgt(cls):
+	old = cls.run
+	def wrap(self):
+		try:
+			os.remove(self.outputs[0].abspath())
+		except OSError:
+			pass
+		return old(self)
+	setattr(cls, 'run', wrap)
+rm_tgt(stlink_task)
+
+@feature('skip_stlib_link_deps')
+@before_method('process_use')
+def apply_skip_stlib_link_deps(self):
+	"""
+	This enables an optimization in the :py:func:wafilb.Tools.ccroot.processes_use: method that skips dependency and
+	link flag optimizations for targets that generate static libraries (via the :py:class:Tools.ccroot.stlink_task task).
+	The actual behavior is implemented in :py:func:wafilb.Tools.ccroot.processes_use: method so this feature only tells waf
+	to enable the new behavior.
+	"""
+	self.env.SKIP_STLIB_LINK_DEPS = True
+
+@feature('c', 'cxx', 'd', 'fc', 'asm')
+@after_method('process_source')
+def apply_link(self):
+	"""
+	Collect the tasks stored in ``compiled_tasks`` (created by :py:func:`waflib.Tools.ccroot.create_compiled_task`), and
+	use the outputs for a new instance of :py:class:`waflib.Tools.ccroot.link_task`. The class to use is the first link task
+	matching a name from the attribute *features*, for example::
+
+			def build(bld):
+				tg = bld(features='cxx cxxprogram cprogram', source='main.c', target='app')
+
+	will create the task ``tg.link_task`` as a new instance of :py:class:`waflib.Tools.cxx.cxxprogram`
+	"""
+
+	for x in self.features:
+		if x == 'cprogram' and 'cxx' in self.features: # limited compat
+			x = 'cxxprogram'
+		elif x == 'cshlib' and 'cxx' in self.features:
+			x = 'cxxshlib'
+
+		if x in Task.classes:
+			if issubclass(Task.classes[x], link_task):
+				link = x
+				break
+	else:
+		return
+
+	objs = [t.outputs[0] for t in getattr(self, 'compiled_tasks', [])]
+	self.link_task = self.create_task(link, objs)
+	self.link_task.add_target(self.target)
+
+	# remember that the install paths are given by the task generators
+	try:
+		inst_to = self.install_path
+	except AttributeError:
+		inst_to = self.link_task.inst_to
+	if inst_to:
+		# install a copy of the node list we have at this moment (implib not added)
+		self.install_task = self.add_install_files(
+			install_to=inst_to, install_from=self.link_task.outputs[:],
+			chmod=self.link_task.chmod, task=self.link_task)
+
+@taskgen_method
+def use_rec(self, name, **kw):
+	"""
+	Processes the ``use`` keyword recursively. This method is kind of private and only meant to be used from ``process_use``
+	"""
+
+	if name in self.tmp_use_not or name in self.tmp_use_seen:
+		return
+
+	try:
+		y = self.bld.get_tgen_by_name(name)
+	except Errors.WafError:
+		self.uselib.append(name)
+		self.tmp_use_not.add(name)
+		return
+
+	self.tmp_use_seen.append(name)
+	y.post()
+
+	# bind temporary attributes on the task generator
+	y.tmp_use_objects = objects = kw.get('objects', True)
+	y.tmp_use_stlib   = stlib   = kw.get('stlib', True)
+	try:
+		link_task = y.link_task
+	except AttributeError:
+		y.tmp_use_var = ''
+	else:
+		objects = False
+		if not isinstance(link_task, stlink_task):
+			stlib = False
+			y.tmp_use_var = 'LIB'
+		else:
+			y.tmp_use_var = 'STLIB'
+
+	p = self.tmp_use_prec
+	for x in self.to_list(getattr(y, 'use', [])):
+		if self.env["STLIB_" + x]:
+			continue
+		try:
+			p[x].append(name)
+		except KeyError:
+			p[x] = [name]
+		self.use_rec(x, objects=objects, stlib=stlib)
+
+@feature('c', 'cxx', 'd', 'use', 'fc')
+@before_method('apply_incpaths', 'propagate_uselib_vars')
+@after_method('apply_link', 'process_source')
+def process_use(self):
+	"""
+	Process the ``use`` attribute which contains a list of task generator names::
+
+		def build(bld):
+			bld.shlib(source='a.c', target='lib1')
+			bld.program(source='main.c', target='app', use='lib1')
+
+	See :py:func:`waflib.Tools.ccroot.use_rec`.
+	"""
+
+	use_not = self.tmp_use_not = set()
+	self.tmp_use_seen = [] # we would like an ordered set
+	use_prec = self.tmp_use_prec = {}
+	self.uselib = self.to_list(getattr(self, 'uselib', []))
+	self.includes = self.to_list(getattr(self, 'includes', []))
+	names = self.to_list(getattr(self, 'use', []))
+
+	for x in names:
+		self.use_rec(x)
+
+	for x in use_not:
+		if x in use_prec:
+			del use_prec[x]
+
+	# topological sort
+	out = self.tmp_use_sorted = []
+	tmp = []
+	for x in self.tmp_use_seen:
+		for k in use_prec.values():
+			if x in k:
+				break
+		else:
+			tmp.append(x)
+
+	while tmp:
+		e = tmp.pop()
+		out.append(e)
+		try:
+			nlst = use_prec[e]
+		except KeyError:
+			pass
+		else:
+			del use_prec[e]
+			for x in nlst:
+				for y in use_prec:
+					if x in use_prec[y]:
+						break
+				else:
+					tmp.append(x)
+	if use_prec:
+		raise Errors.WafError('Cycle detected in the use processing %r' % use_prec)
+	out.reverse()
+
+	link_task = getattr(self, 'link_task', None)
+	for x in out:
+		y = self.bld.get_tgen_by_name(x)
+		var = y.tmp_use_var
+		if var and link_task:
+			if self.env.SKIP_STLIB_LINK_DEPS and isinstance(link_task, stlink_task):
+				# If the skip_stlib_link_deps feature is enabled then we should
+				# avoid adding lib deps to the stlink_task instance.
+				pass
+			elif var == 'LIB' or y.tmp_use_stlib or x in names:
+				self.env.append_value(var, [y.target[y.target.rfind(os.sep) + 1:]])
+				self.link_task.dep_nodes.extend(y.link_task.outputs)
+				tmp_path = y.link_task.outputs[0].parent.path_from(self.get_cwd())
+				self.env.append_unique(var + 'PATH', [tmp_path])
+		else:
+			if y.tmp_use_objects:
+				self.add_objects_from_tgen(y)
+
+		if getattr(y, 'export_includes', None):
+			# self.includes may come from a global variable #2035
+			self.includes = self.includes + y.to_incnodes(y.export_includes)
+
+		if getattr(y, 'export_defines', None):
+			self.env.append_value('DEFINES', self.to_list(y.export_defines))
+
+
+	# and finally, add the use variables (no recursion needed)
+	for x in names:
+		try:
+			y = self.bld.get_tgen_by_name(x)
+		except Errors.WafError:
+			if not self.env['STLIB_' + x] and not x in self.uselib:
+				self.uselib.append(x)
+		else:
+			for k in self.to_list(getattr(y, 'use', [])):
+				if not self.env['STLIB_' + k] and not k in self.uselib:
+					self.uselib.append(k)
+
+@taskgen_method
+def accept_node_to_link(self, node):
+	"""
+	PRIVATE INTERNAL USE ONLY
+	"""
+	return not node.name.endswith('.pdb')
+
+@taskgen_method
+def add_objects_from_tgen(self, tg):
+	"""
+	Add the objects from the depending compiled tasks as link task inputs.
+
+	Some objects are filtered: for instance, .pdb files are added
+	to the compiled tasks but not to the link tasks (to avoid errors)
+	PRIVATE INTERNAL USE ONLY
+	"""
+	try:
+		link_task = self.link_task
+	except AttributeError:
+		pass
+	else:
+		for tsk in getattr(tg, 'compiled_tasks', []):
+			for x in tsk.outputs:
+				if self.accept_node_to_link(x):
+					link_task.inputs.append(x)
+
+@taskgen_method
+def get_uselib_vars(self):
+	"""
+	:return: the *uselib* variables associated to the *features* attribute (see :py:attr:`waflib.Tools.ccroot.USELIB_VARS`)
+	:rtype: list of string
+	"""
+	_vars = set()
+	for x in self.features:
+		if x in USELIB_VARS:
+			_vars |= USELIB_VARS[x]
+	return _vars
+
+@feature('c', 'cxx', 'd', 'fc', 'javac', 'cs', 'uselib', 'asm')
+@after_method('process_use')
+def propagate_uselib_vars(self):
+	"""
+	Process uselib variables for adding flags. For example, the following target::
+
+		def build(bld):
+			bld.env.AFLAGS_aaa = ['bar']
+			from waflib.Tools.ccroot import USELIB_VARS
+			USELIB_VARS['aaa'] = ['AFLAGS']
+
+			tg = bld(features='aaa', aflags='test')
+
+	The *aflags* attribute will be processed and this method will set::
+
+			tg.env.AFLAGS = ['bar', 'test']
+	"""
+	_vars = self.get_uselib_vars()
+	env = self.env
+	app = env.append_value
+	feature_uselib = self.features + self.to_list(getattr(self, 'uselib', []))
+	for var in _vars:
+		y = var.lower()
+		val = getattr(self, y, [])
+		if val:
+			app(var, self.to_list(val))
+
+		for x in feature_uselib:
+			val = env['%s_%s' % (var, x)]
+			if val:
+				app(var, val)
+
+# ============ the code above must not know anything about import libs ==========
+
+@feature('cshlib', 'cxxshlib', 'fcshlib')
+@after_method('apply_link')
+def apply_implib(self):
+	"""
+	Handle dlls and their import libs on Windows-like systems.
+
+	A ``.dll.a`` file called *import library* is generated.
+	It must be installed as it is required for linking the library.
+	"""
+	if not self.env.DEST_BINFMT == 'pe':
+		return
+
+	dll = self.link_task.outputs[0]
+	if isinstance(self.target, Node.Node):
+		name = self.target.name
+	else:
+		name = os.path.split(self.target)[1]
+	implib = self.env.implib_PATTERN % name
+	implib = dll.parent.find_or_declare(implib)
+	self.env.append_value('LINKFLAGS', self.env.IMPLIB_ST % implib.bldpath())
+	self.link_task.outputs.append(implib)
+
+	if getattr(self, 'defs', None) and self.env.DEST_BINFMT == 'pe':
+		node = self.path.find_resource(self.defs)
+		if not node:
+			raise Errors.WafError('invalid def file %r' % self.defs)
+		if self.env.def_PATTERN:
+			self.env.append_value('LINKFLAGS', self.env.def_PATTERN % node.path_from(self.get_cwd()))
+			self.link_task.dep_nodes.append(node)
+		else:
+			# gcc for windows takes *.def file as input without any special flag
+			self.link_task.inputs.append(node)
+
+	# where to put the import library
+	if getattr(self, 'install_task', None):
+		try:
+			# user has given a specific installation path for the import library
+			inst_to = self.install_path_implib
+		except AttributeError:
+			try:
+				# user has given an installation path for the main library, put the import library in it
+				inst_to = self.install_path
+			except AttributeError:
+				# else, put the library in BINDIR and the import library in LIBDIR
+				inst_to = '${IMPLIBDIR}'
+				self.install_task.install_to = '${BINDIR}'
+				if not self.env.IMPLIBDIR:
+					self.env.IMPLIBDIR = self.env.LIBDIR
+		self.implib_install_task = self.add_install_files(install_to=inst_to, install_from=implib,
+			chmod=self.link_task.chmod, task=self.link_task)
+
+# ============ the code above must not know anything about vnum processing on unix platforms =========
+
+re_vnum = re.compile('^([1-9]\\d*|0)([.]([1-9]\\d*|0)){0,2}?$')
+@feature('cshlib', 'cxxshlib', 'dshlib', 'fcshlib', 'vnum')
+@after_method('apply_link', 'propagate_uselib_vars')
+def apply_vnum(self):
+	"""
+	Enforce version numbering on shared libraries. The valid version numbers must have either zero or two dots::
+
+		def build(bld):
+			bld.shlib(source='a.c', target='foo', vnum='14.15.16')
+
+	In this example on Linux platform, ``libfoo.so`` is installed as ``libfoo.so.14.15.16``, and the following symbolic links are created:
+
+	* ``libfoo.so    → libfoo.so.14.15.16``
+	* ``libfoo.so.14 → libfoo.so.14.15.16``
+
+	By default, the library will be assigned SONAME ``libfoo.so.14``, effectively declaring ABI compatibility between all minor and patch releases for the major version of the library.  When necessary, the compatibility can be explicitly defined using `cnum` parameter:
+
+		def build(bld):
+			bld.shlib(source='a.c', target='foo', vnum='14.15.16', cnum='14.15')
+
+	In this case, the assigned SONAME will be ``libfoo.so.14.15`` with ABI compatibility only between path releases for a specific major and minor version of the library.
+
+	On OS X platform, install-name parameter will follow the above logic for SONAME with exception that it also specifies an absolute path (based on install_path) of the library.
+	"""
+	if not getattr(self, 'vnum', '') or os.name != 'posix' or self.env.DEST_BINFMT not in ('elf', 'mac-o'):
+		return
+
+	link = self.link_task
+	if not re_vnum.match(self.vnum):
+		raise Errors.WafError('Invalid vnum %r for target %r' % (self.vnum, getattr(self, 'name', self)))
+	nums = self.vnum.split('.')
+	node = link.outputs[0]
+
+	cnum = getattr(self, 'cnum', str(nums[0]))
+	cnums = cnum.split('.')
+	if len(cnums)>len(nums) or nums[0:len(cnums)] != cnums:
+		raise Errors.WafError('invalid compatibility version %s' % cnum)
+
+	libname = node.name
+	if libname.endswith('.dylib'):
+		name3 = libname.replace('.dylib', '.%s.dylib' % self.vnum)
+		name2 = libname.replace('.dylib', '.%s.dylib' % cnum)
+	else:
+		name3 = libname + '.' + self.vnum
+		name2 = libname + '.' + cnum
+
+	# add the so name for the ld linker - to disable, just unset env.SONAME_ST
+	if self.env.SONAME_ST:
+		v = self.env.SONAME_ST % name2
+		self.env.append_value('LINKFLAGS', v.split())
+
+	# the following task is just to enable execution from the build dir :-/
+	if self.env.DEST_OS != 'openbsd':
+		outs = [node.parent.make_node(name3)]
+		if name2 != name3:
+			outs.append(node.parent.make_node(name2))
+		self.create_task('vnum', node, outs)
+
+	if getattr(self, 'install_task', None):
+		self.install_task.hasrun = Task.SKIPPED
+		self.install_task.no_errcheck_out = True
+		path = self.install_task.install_to
+		if self.env.DEST_OS == 'openbsd':
+			libname = self.link_task.outputs[0].name
+			t1 = self.add_install_as(install_to='%s/%s' % (path, libname), install_from=node, chmod=self.link_task.chmod)
+			self.vnum_install_task = (t1,)
+		else:
+			t1 = self.add_install_as(install_to=path + os.sep + name3, install_from=node, chmod=self.link_task.chmod)
+			t3 = self.add_symlink_as(install_to=path + os.sep + libname, install_from=name3)
+			if name2 != name3:
+				t2 = self.add_symlink_as(install_to=path + os.sep + name2, install_from=name3)
+				self.vnum_install_task = (t1, t2, t3)
+			else:
+				self.vnum_install_task = (t1, t3)
+
+	if '-dynamiclib' in self.env.LINKFLAGS:
+		# this requires after(propagate_uselib_vars)
+		try:
+			inst_to = self.install_path
+		except AttributeError:
+			inst_to = self.link_task.inst_to
+		if inst_to:
+			p = Utils.subst_vars(inst_to, self.env)
+			path = os.path.join(p, name2)
+			self.env.append_value('LINKFLAGS', ['-install_name', path])
+			self.env.append_value('LINKFLAGS', '-Wl,-compatibility_version,%s' % cnum)
+			self.env.append_value('LINKFLAGS', '-Wl,-current_version,%s' % self.vnum)
+
+class vnum(Task.Task):
+	"""
+	Create the symbolic links for a versioned shared library. Instances are created by :py:func:`waflib.Tools.ccroot.apply_vnum`
+	"""
+	color = 'CYAN'
+	ext_in = ['.bin']
+	def keyword(self):
+		return 'Symlinking'
+	def run(self):
+		for x in self.outputs:
+			path = x.abspath()
+			try:
+				os.remove(path)
+			except OSError:
+				pass
+
+			try:
+				os.symlink(self.inputs[0].name, path)
+			except OSError:
+				return 1
+
+class fake_shlib(link_task):
+	"""
+	Task used for reading a system library and adding the dependency on it
+	"""
+	def runnable_status(self):
+		for t in self.run_after:
+			if not t.hasrun:
+				return Task.ASK_LATER
+		return Task.SKIP_ME
+
+class fake_stlib(stlink_task):
+	"""
+	Task used for reading a system library and adding the dependency on it
+	"""
+	def runnable_status(self):
+		for t in self.run_after:
+			if not t.hasrun:
+				return Task.ASK_LATER
+		return Task.SKIP_ME
+
+@conf
+def read_shlib(self, name, paths=[], export_includes=[], export_defines=[]):
+	"""
+	Read a system shared library, enabling its use as a local library. Will trigger a rebuild if the file changes::
+
+		def build(bld):
+			bld.read_shlib('m')
+			bld.program(source='main.c', use='m')
+	"""
+	return self(name=name, features='fake_lib', lib_paths=paths, lib_type='shlib', export_includes=export_includes, export_defines=export_defines)
+
+@conf
+def read_stlib(self, name, paths=[], export_includes=[], export_defines=[]):
+	"""
+	Read a system static library, enabling a use as a local library. Will trigger a rebuild if the file changes.
+	"""
+	return self(name=name, features='fake_lib', lib_paths=paths, lib_type='stlib', export_includes=export_includes, export_defines=export_defines)
+
+lib_patterns = {
+	'shlib' : ['lib%s.so', '%s.so', 'lib%s.dylib', 'lib%s.dll', '%s.dll'],
+	'stlib' : ['lib%s.a', '%s.a', 'lib%s.dll', '%s.dll', 'lib%s.lib', '%s.lib'],
+}
+
+@feature('fake_lib')
+def process_lib(self):
+	"""
+	Find the location of a foreign library. Used by :py:class:`waflib.Tools.ccroot.read_shlib` and :py:class:`waflib.Tools.ccroot.read_stlib`.
+	"""
+	node = None
+
+	names = [x % self.name for x in lib_patterns[self.lib_type]]
+	for x in self.lib_paths + [self.path] + SYSTEM_LIB_PATHS:
+		if not isinstance(x, Node.Node):
+			x = self.bld.root.find_node(x) or self.path.find_node(x)
+			if not x:
+				continue
+
+		for y in names:
+			node = x.find_node(y)
+			if node:
+				try:
+					Utils.h_file(node.abspath())
+				except EnvironmentError:
+					raise ValueError('Could not read %r' % y)
+				break
+		else:
+			continue
+		break
+	else:
+		raise Errors.WafError('could not find library %r' % self.name)
+	self.link_task = self.create_task('fake_%s' % self.lib_type, [], [node])
+	self.target = self.name
+
+
+class fake_o(Task.Task):
+	def runnable_status(self):
+		return Task.SKIP_ME
+
+@extension('.o', '.obj')
+def add_those_o_files(self, node):
+	tsk = self.create_task('fake_o', [], node)
+	try:
+		self.compiled_tasks.append(tsk)
+	except AttributeError:
+		self.compiled_tasks = [tsk]
+
+@feature('fake_obj')
+@before_method('process_source')
+def process_objs(self):
+	"""
+	Puts object files in the task generator outputs
+	"""
+	for node in self.to_nodes(self.source):
+		self.add_those_o_files(node)
+	self.source = []
+
+@conf
+def read_object(self, obj):
+	"""
+	Read an object file, enabling injection in libs/programs. Will trigger a rebuild if the file changes.
+
+	:param obj: object file path, as string or Node
+	"""
+	if not isinstance(obj, self.path.__class__):
+		obj = self.path.find_resource(obj)
+	return self(features='fake_obj', source=obj, name=obj.name)
+
+@feature('cxxprogram', 'cprogram')
+@after_method('apply_link', 'process_use')
+def set_full_paths_hpux(self):
+	"""
+	On hp-ux, extend the libpaths and static library paths to absolute paths
+	"""
+	if self.env.DEST_OS != 'hp-ux':
+		return
+	base = self.bld.bldnode.abspath()
+	for var in ['LIBPATH', 'STLIBPATH']:
+		lst = []
+		for x in self.env[var]:
+			if x.startswith('/'):
+				lst.append(x)
+			else:
+				lst.append(os.path.normpath(os.path.join(base, x)))
+		self.env[var] = lst
+
diff --git a/third_party/waf/waflib/Tools/clang.py b/third_party/waf/waflib/Tools/clang.py
new file mode 100644
index 0000000..3828e39
--- /dev/null
+++ b/third_party/waf/waflib/Tools/clang.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Krzysztof Kosiński 2014
+
+"""
+Detect the Clang C compiler
+"""
+
+from waflib.Tools import ccroot, ar, gcc
+from waflib.Configure import conf
+
+@conf
+def find_clang(conf):
+	"""
+	Finds the program clang and executes it to ensure it really is clang
+	"""
+	cc = conf.find_program('clang', var='CC')
+	conf.get_cc_version(cc, clang=True)
+	conf.env.CC_NAME = 'clang'
+
+def configure(conf):
+	conf.find_clang()
+	conf.find_program(['llvm-ar', 'ar'], var='AR')
+	conf.find_ar()
+	conf.gcc_common_flags()
+	conf.gcc_modifier_platform()
+	conf.cc_load_tools()
+	conf.cc_add_flags()
+	conf.link_add_flags()
diff --git a/third_party/waf/waflib/Tools/clangxx.py b/third_party/waf/waflib/Tools/clangxx.py
new file mode 100644
index 0000000..152013c
--- /dev/null
+++ b/third_party/waf/waflib/Tools/clangxx.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy 2009-2018 (ita)
+
+"""
+Detect the Clang++ C++ compiler
+"""
+
+from waflib.Tools import ccroot, ar, gxx
+from waflib.Configure import conf
+
+@conf
+def find_clangxx(conf):
+	"""
+	Finds the program clang++, and executes it to ensure it really is clang++
+	"""
+	cxx = conf.find_program('clang++', var='CXX')
+	conf.get_cc_version(cxx, clang=True)
+	conf.env.CXX_NAME = 'clang'
+
+def configure(conf):
+	conf.find_clangxx()
+	conf.find_program(['llvm-ar', 'ar'], var='AR')
+	conf.find_ar()
+	conf.gxx_common_flags()
+	conf.gxx_modifier_platform()
+	conf.cxx_load_tools()
+	conf.cxx_add_flags()
+	conf.link_add_flags()
+
diff --git a/third_party/waf/waflib/Tools/compiler_c.py b/third_party/waf/waflib/Tools/compiler_c.py
new file mode 100644
index 0000000..e033ce6
--- /dev/null
+++ b/third_party/waf/waflib/Tools/compiler_c.py
@@ -0,0 +1,111 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Matthias Jahn jahn dôt matthias ât freenet dôt de, 2007 (pmarat)
+
+"""
+Try to detect a C compiler from the list of supported compilers (gcc, msvc, etc)::
+
+	def options(opt):
+		opt.load('compiler_c')
+	def configure(cnf):
+		cnf.load('compiler_c')
+	def build(bld):
+		bld.program(source='main.c', target='app')
+
+The compilers are associated to platforms in :py:attr:`waflib.Tools.compiler_c.c_compiler`. To register
+a new C compiler named *cfoo* (assuming the tool ``waflib/extras/cfoo.py`` exists), use::
+
+	from waflib.Tools.compiler_c import c_compiler
+	c_compiler['win32'] = ['cfoo', 'msvc', 'gcc']
+
+	def options(opt):
+		opt.load('compiler_c')
+	def configure(cnf):
+		cnf.load('compiler_c')
+	def build(bld):
+		bld.program(source='main.c', target='app')
+
+Not all compilers need to have a specific tool. For example, the clang compilers can be detected by the gcc tools when using::
+
+	$ CC=clang waf configure
+"""
+
+import re
+from waflib.Tools import ccroot
+from waflib import Utils
+from waflib.Logs import debug
+
+c_compiler = {
+'win32':       ['msvc', 'gcc', 'clang'],
+'cygwin':      ['gcc', 'clang'],
+'darwin':      ['clang', 'gcc'],
+'aix':         ['xlc', 'gcc', 'clang'],
+'linux':       ['gcc', 'clang', 'icc'],
+'sunos':       ['suncc', 'gcc'],
+'irix':        ['gcc', 'irixcc'],
+'hpux':        ['gcc'],
+'osf1V':       ['gcc'],
+'gnu':         ['gcc', 'clang'],
+'java':        ['gcc', 'msvc', 'clang', 'icc'],
+'gnukfreebsd': ['gcc', 'clang'],
+'default':     ['clang', 'gcc'],
+}
+"""
+Dict mapping platform names to Waf tools finding specific C compilers::
+
+	from waflib.Tools.compiler_c import c_compiler
+	c_compiler['linux'] = ['gcc', 'icc', 'suncc']
+"""
+
+def default_compilers():
+	build_platform = Utils.unversioned_sys_platform()
+	possible_compiler_list = c_compiler.get(build_platform, c_compiler['default'])
+	return ' '.join(possible_compiler_list)
+
+def configure(conf):
+	"""
+	Detects a suitable C compiler
+
+	:raises: :py:class:`waflib.Errors.ConfigurationError` when no suitable compiler is found
+	"""
+	try:
+		test_for_compiler = conf.options.check_c_compiler or default_compilers()
+	except AttributeError:
+		conf.fatal("Add options(opt): opt.load('compiler_c')")
+
+	for compiler in re.split('[ ,]+', test_for_compiler):
+		conf.env.stash()
+		conf.start_msg('Checking for %r (C compiler)' % compiler)
+		try:
+			conf.load(compiler)
+		except conf.errors.ConfigurationError as e:
+			conf.env.revert()
+			conf.end_msg(False)
+			debug('compiler_c: %r', e)
+		else:
+			if conf.env.CC:
+				conf.end_msg(conf.env.get_flat('CC'))
+				conf.env.COMPILER_CC = compiler
+				conf.env.commit()
+				break
+			conf.env.revert()
+			conf.end_msg(False)
+	else:
+		conf.fatal('could not configure a C compiler!')
+
+def options(opt):
+	"""
+	This is how to provide compiler preferences on the command-line::
+
+		$ waf configure --check-c-compiler=gcc
+	"""
+	test_for_compiler = default_compilers()
+	opt.load_special_tools('c_*.py', ban=['c_dumbpreproc.py'])
+	cc_compiler_opts = opt.add_option_group('Configuration options')
+	cc_compiler_opts.add_option('--check-c-compiler', default=None,
+		help='list of C compilers to try [%s]' % test_for_compiler,
+		dest="check_c_compiler")
+
+	for x in test_for_compiler.split():
+		opt.load('%s' % x)
+
diff --git a/third_party/waf/waflib/Tools/compiler_cxx.py b/third_party/waf/waflib/Tools/compiler_cxx.py
new file mode 100644
index 0000000..42658c5
--- /dev/null
+++ b/third_party/waf/waflib/Tools/compiler_cxx.py
@@ -0,0 +1,112 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Matthias Jahn jahn dôt matthias ât freenet dôt de 2007 (pmarat)
+
+"""
+Try to detect a C++ compiler from the list of supported compilers (g++, msvc, etc)::
+
+	def options(opt):
+		opt.load('compiler_cxx')
+	def configure(cnf):
+		cnf.load('compiler_cxx')
+	def build(bld):
+		bld.program(source='main.cpp', target='app')
+
+The compilers are associated to platforms in :py:attr:`waflib.Tools.compiler_cxx.cxx_compiler`. To register
+a new C++ compiler named *cfoo* (assuming the tool ``waflib/extras/cfoo.py`` exists), use::
+
+	from waflib.Tools.compiler_cxx import cxx_compiler
+	cxx_compiler['win32'] = ['cfoo', 'msvc', 'gcc']
+
+	def options(opt):
+		opt.load('compiler_cxx')
+	def configure(cnf):
+		cnf.load('compiler_cxx')
+	def build(bld):
+		bld.program(source='main.c', target='app')
+
+Not all compilers need to have a specific tool. For example, the clang compilers can be detected by the gcc tools when using::
+
+	$ CXX=clang waf configure
+"""
+
+
+import re
+from waflib.Tools import ccroot
+from waflib import Utils
+from waflib.Logs import debug
+
+cxx_compiler = {
+'win32':       ['msvc', 'g++', 'clang++'],
+'cygwin':      ['g++', 'clang++'],
+'darwin':      ['clang++', 'g++'],
+'aix':         ['xlc++', 'g++', 'clang++'],
+'linux':       ['g++', 'clang++', 'icpc'],
+'sunos':       ['sunc++', 'g++'],
+'irix':        ['g++'],
+'hpux':        ['g++'],
+'osf1V':       ['g++'],
+'gnu':         ['g++', 'clang++'],
+'java':        ['g++', 'msvc', 'clang++', 'icpc'],
+'gnukfreebsd': ['g++', 'clang++'],
+'default':     ['clang++', 'g++']
+}
+"""
+Dict mapping the platform names to Waf tools finding specific C++ compilers::
+
+	from waflib.Tools.compiler_cxx import cxx_compiler
+	cxx_compiler['linux'] = ['gxx', 'icpc', 'suncxx']
+"""
+
+def default_compilers():
+	build_platform = Utils.unversioned_sys_platform()
+	possible_compiler_list = cxx_compiler.get(build_platform, cxx_compiler['default'])
+	return ' '.join(possible_compiler_list)
+
+def configure(conf):
+	"""
+	Detects a suitable C++ compiler
+
+	:raises: :py:class:`waflib.Errors.ConfigurationError` when no suitable compiler is found
+	"""
+	try:
+		test_for_compiler = conf.options.check_cxx_compiler or default_compilers()
+	except AttributeError:
+		conf.fatal("Add options(opt): opt.load('compiler_cxx')")
+
+	for compiler in re.split('[ ,]+', test_for_compiler):
+		conf.env.stash()
+		conf.start_msg('Checking for %r (C++ compiler)' % compiler)
+		try:
+			conf.load(compiler)
+		except conf.errors.ConfigurationError as e:
+			conf.env.revert()
+			conf.end_msg(False)
+			debug('compiler_cxx: %r', e)
+		else:
+			if conf.env.CXX:
+				conf.end_msg(conf.env.get_flat('CXX'))
+				conf.env.COMPILER_CXX = compiler
+				conf.env.commit()
+				break
+			conf.env.revert()
+			conf.end_msg(False)
+	else:
+		conf.fatal('could not configure a C++ compiler!')
+
+def options(opt):
+	"""
+	This is how to provide compiler preferences on the command-line::
+
+		$ waf configure --check-cxx-compiler=gxx
+	"""
+	test_for_compiler = default_compilers()
+	opt.load_special_tools('cxx_*.py')
+	cxx_compiler_opts = opt.add_option_group('Configuration options')
+	cxx_compiler_opts.add_option('--check-cxx-compiler', default=None,
+		help='list of C++ compilers to try [%s]' % test_for_compiler,
+		dest="check_cxx_compiler")
+
+	for x in test_for_compiler.split():
+		opt.load('%s' % x)
+
diff --git a/third_party/waf/waflib/Tools/compiler_d.py b/third_party/waf/waflib/Tools/compiler_d.py
new file mode 100644
index 0000000..43bb1f6
--- /dev/null
+++ b/third_party/waf/waflib/Tools/compiler_d.py
@@ -0,0 +1,85 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Carlos Rafael Giani, 2007 (dv)
+# Thomas Nagy, 2016-2018 (ita)
+
+"""
+Try to detect a D compiler from the list of supported compilers::
+
+	def options(opt):
+		opt.load('compiler_d')
+	def configure(cnf):
+		cnf.load('compiler_d')
+	def build(bld):
+		bld.program(source='main.d', target='app')
+
+Only three D compilers are really present at the moment:
+
+* gdc
+* dmd, the ldc compiler having a very similar command-line interface
+* ldc2
+"""
+
+import re
+from waflib import Utils, Logs
+
+d_compiler = {
+'default' : ['gdc', 'dmd', 'ldc2']
+}
+"""
+Dict mapping the platform names to lists of names of D compilers to try, in order of preference::
+
+	from waflib.Tools.compiler_d import d_compiler
+	d_compiler['default'] = ['gdc', 'dmd', 'ldc2']
+"""
+
+def default_compilers():
+	build_platform = Utils.unversioned_sys_platform()
+	possible_compiler_list = d_compiler.get(build_platform, d_compiler['default'])
+	return ' '.join(possible_compiler_list)
+
+def configure(conf):
+	"""
+	Detects a suitable D compiler
+
+	:raises: :py:class:`waflib.Errors.ConfigurationError` when no suitable compiler is found
+	"""
+	try:
+		test_for_compiler = conf.options.check_d_compiler or default_compilers()
+	except AttributeError:
+		conf.fatal("Add options(opt): opt.load('compiler_d')")
+
+	for compiler in re.split('[ ,]+', test_for_compiler):
+		conf.env.stash()
+		conf.start_msg('Checking for %r (D compiler)' % compiler)
+		try:
+			conf.load(compiler)
+		except conf.errors.ConfigurationError as e:
+			conf.env.revert()
+			conf.end_msg(False)
+			Logs.debug('compiler_d: %r', e)
+		else:
+			if conf.env.D:
+				conf.end_msg(conf.env.get_flat('D'))
+				conf.env.COMPILER_D = compiler
+				conf.env.commit()
+				break
+			conf.env.revert()
+			conf.end_msg(False)
+	else:
+		conf.fatal('could not configure a D compiler!')
+
+def options(opt):
+	"""
+	This is how to provide compiler preferences on the command-line::
+
+		$ waf configure --check-d-compiler=dmd
+	"""
+	test_for_compiler = default_compilers()
+	d_compiler_opts = opt.add_option_group('Configuration options')
+	d_compiler_opts.add_option('--check-d-compiler', default=None,
+		help='list of D compilers to try [%s]' % test_for_compiler, dest='check_d_compiler')
+
+	for x in test_for_compiler.split():
+		opt.load('%s' % x)
+
diff --git a/third_party/waf/waflib/Tools/compiler_fc.py b/third_party/waf/waflib/Tools/compiler_fc.py
new file mode 100644
index 0000000..96b58e7
--- /dev/null
+++ b/third_party/waf/waflib/Tools/compiler_fc.py
@@ -0,0 +1,73 @@
+#!/usr/bin/env python
+# encoding: utf-8
+
+import re
+from waflib import Utils, Logs
+from waflib.Tools import fc
+
+fc_compiler = {
+	'win32'  : ['gfortran','ifort'],
+	'darwin' : ['gfortran', 'g95', 'ifort'],
+	'linux'  : ['gfortran', 'g95', 'ifort'],
+	'java'   : ['gfortran', 'g95', 'ifort'],
+	'default': ['gfortran'],
+	'aix'    : ['gfortran']
+}
+"""
+Dict mapping the platform names to lists of names of Fortran compilers to try, in order of preference::
+
+	from waflib.Tools.compiler_c import c_compiler
+	c_compiler['linux'] = ['gfortran', 'g95', 'ifort']
+"""
+
+def default_compilers():
+	build_platform = Utils.unversioned_sys_platform()
+	possible_compiler_list = fc_compiler.get(build_platform, fc_compiler['default'])
+	return ' '.join(possible_compiler_list)
+
+def configure(conf):
+	"""
+	Detects a suitable Fortran compiler
+
+	:raises: :py:class:`waflib.Errors.ConfigurationError` when no suitable compiler is found
+	"""
+	try:
+		test_for_compiler = conf.options.check_fortran_compiler or default_compilers()
+	except AttributeError:
+		conf.fatal("Add options(opt): opt.load('compiler_fc')")
+	for compiler in re.split('[ ,]+', test_for_compiler):
+		conf.env.stash()
+		conf.start_msg('Checking for %r (Fortran compiler)' % compiler)
+		try:
+			conf.load(compiler)
+		except conf.errors.ConfigurationError as e:
+			conf.env.revert()
+			conf.end_msg(False)
+			Logs.debug('compiler_fortran: %r', e)
+		else:
+			if conf.env.FC:
+				conf.end_msg(conf.env.get_flat('FC'))
+				conf.env.COMPILER_FORTRAN = compiler
+				conf.env.commit()
+				break
+			conf.env.revert()
+			conf.end_msg(False)
+	else:
+		conf.fatal('could not configure a Fortran compiler!')
+
+def options(opt):
+	"""
+	This is how to provide compiler preferences on the command-line::
+
+		$ waf configure --check-fortran-compiler=ifort
+	"""
+	test_for_compiler = default_compilers()
+	opt.load_special_tools('fc_*.py')
+	fortran_compiler_opts = opt.add_option_group('Configuration options')
+	fortran_compiler_opts.add_option('--check-fortran-compiler', default=None,
+			help='list of Fortran compiler to try [%s]' % test_for_compiler,
+		dest="check_fortran_compiler")
+
+	for x in test_for_compiler.split():
+		opt.load('%s' % x)
+
diff --git a/third_party/waf/waflib/Tools/cs.py b/third_party/waf/waflib/Tools/cs.py
new file mode 100644
index 0000000..aecca6d
--- /dev/null
+++ b/third_party/waf/waflib/Tools/cs.py
@@ -0,0 +1,211 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2018 (ita)
+
+"""
+C# support. A simple example::
+
+	def configure(conf):
+		conf.load('cs')
+	def build(bld):
+		bld(features='cs', source='main.cs', gen='foo')
+
+Note that the configuration may compile C# snippets::
+
+	FRAG = '''
+	namespace Moo {
+		public class Test { public static int Main(string[] args) { return 0; } }
+	}'''
+	def configure(conf):
+		conf.check(features='cs', fragment=FRAG, compile_filename='test.cs', gen='test.exe',
+			bintype='exe', csflags=['-pkg:gtk-sharp-2.0'], msg='Checking for Gtksharp support')
+"""
+
+from waflib import Utils, Task, Options, Errors
+from waflib.TaskGen import before_method, after_method, feature
+from waflib.Tools import ccroot
+from waflib.Configure import conf
+
+ccroot.USELIB_VARS['cs'] = set(['CSFLAGS', 'ASSEMBLIES', 'RESOURCES'])
+ccroot.lib_patterns['csshlib'] = ['%s']
+
+@feature('cs')
+@before_method('process_source')
+def apply_cs(self):
+	"""
+	Create a C# task bound to the attribute *cs_task*. There can be only one C# task by task generator.
+	"""
+	cs_nodes = []
+	no_nodes = []
+	for x in self.to_nodes(self.source):
+		if x.name.endswith('.cs'):
+			cs_nodes.append(x)
+		else:
+			no_nodes.append(x)
+	self.source = no_nodes
+
+	bintype = getattr(self, 'bintype', self.gen.endswith('.dll') and 'library' or 'exe')
+	self.cs_task = tsk = self.create_task('mcs', cs_nodes, self.path.find_or_declare(self.gen))
+	tsk.env.CSTYPE = '/target:%s' % bintype
+	tsk.env.OUT = '/out:%s' % tsk.outputs[0].abspath()
+	self.env.append_value('CSFLAGS', '/platform:%s' % getattr(self, 'platform', 'anycpu'))
+
+	inst_to = getattr(self, 'install_path', bintype=='exe' and '${BINDIR}' or '${LIBDIR}')
+	if inst_to:
+		# note: we are making a copy, so the files added to cs_task.outputs won't be installed automatically
+		mod = getattr(self, 'chmod', bintype=='exe' and Utils.O755 or Utils.O644)
+		self.install_task = self.add_install_files(install_to=inst_to, install_from=self.cs_task.outputs[:], chmod=mod)
+
+@feature('cs')
+@after_method('apply_cs')
+def use_cs(self):
+	"""
+	C# applications honor the **use** keyword::
+
+		def build(bld):
+			bld(features='cs', source='My.cs', bintype='library', gen='my.dll', name='mylib')
+			bld(features='cs', source='Hi.cs', includes='.', bintype='exe', gen='hi.exe', use='mylib', name='hi')
+	"""
+	names = self.to_list(getattr(self, 'use', []))
+	get = self.bld.get_tgen_by_name
+	for x in names:
+		try:
+			y = get(x)
+		except Errors.WafError:
+			self.env.append_value('CSFLAGS', '/reference:%s' % x)
+			continue
+		y.post()
+
+		tsk = getattr(y, 'cs_task', None) or getattr(y, 'link_task', None)
+		if not tsk:
+			self.bld.fatal('cs task has no link task for use %r' % self)
+		self.cs_task.dep_nodes.extend(tsk.outputs) # dependency
+		self.cs_task.set_run_after(tsk) # order (redundant, the order is inferred from the nodes inputs/outputs)
+		self.env.append_value('CSFLAGS', '/reference:%s' % tsk.outputs[0].abspath())
+
+@feature('cs')
+@after_method('apply_cs', 'use_cs')
+def debug_cs(self):
+	"""
+	The C# targets may create .mdb or .pdb files::
+
+		def build(bld):
+			bld(features='cs', source='My.cs', bintype='library', gen='my.dll', csdebug='full')
+			# csdebug is a value in (True, 'full', 'pdbonly')
+	"""
+	csdebug = getattr(self, 'csdebug', self.env.CSDEBUG)
+	if not csdebug:
+		return
+
+	node = self.cs_task.outputs[0]
+	if self.env.CS_NAME == 'mono':
+		out = node.parent.find_or_declare(node.name + '.mdb')
+	else:
+		out = node.change_ext('.pdb')
+	self.cs_task.outputs.append(out)
+
+	if getattr(self, 'install_task', None):
+		self.pdb_install_task = self.add_install_files(
+			install_to=self.install_task.install_to, install_from=out)
+
+	if csdebug == 'pdbonly':
+		val = ['/debug+', '/debug:pdbonly']
+	elif csdebug == 'full':
+		val = ['/debug+', '/debug:full']
+	else:
+		val = ['/debug-']
+	self.env.append_value('CSFLAGS', val)
+
+@feature('cs')
+@after_method('debug_cs')
+def doc_cs(self):
+	"""
+	The C# targets may create .xml documentation files::
+
+		def build(bld):
+			bld(features='cs', source='My.cs', bintype='library', gen='my.dll', csdoc=True)
+			# csdoc is a boolean value
+	"""
+	csdoc = getattr(self, 'csdoc', self.env.CSDOC)
+	if not csdoc:
+		return
+
+	node = self.cs_task.outputs[0]
+	out = node.change_ext('.xml')
+	self.cs_task.outputs.append(out)
+
+	if getattr(self, 'install_task', None):
+		self.doc_install_task = self.add_install_files(
+			install_to=self.install_task.install_to, install_from=out)
+
+	self.env.append_value('CSFLAGS', '/doc:%s' % out.abspath())
+
+class mcs(Task.Task):
+	"""
+	Compile C# files
+	"""
+	color   = 'YELLOW'
+	run_str = '${MCS} ${CSTYPE} ${CSFLAGS} ${ASS_ST:ASSEMBLIES} ${RES_ST:RESOURCES} ${OUT} ${SRC}'
+
+	def split_argfile(self, cmd):
+		inline = [cmd[0]]
+		infile = []
+		for x in cmd[1:]:
+			# csc doesn't want /noconfig in @file
+			if x.lower() == '/noconfig':
+				inline.append(x)
+			else:
+				infile.append(self.quote_flag(x))
+		return (inline, infile)
+
+def configure(conf):
+	"""
+	Find a C# compiler, set the variable MCS for the compiler and CS_NAME (mono or csc)
+	"""
+	csc = getattr(Options.options, 'cscbinary', None)
+	if csc:
+		conf.env.MCS = csc
+	conf.find_program(['csc', 'mcs', 'gmcs'], var='MCS')
+	conf.env.ASS_ST = '/r:%s'
+	conf.env.RES_ST = '/resource:%s'
+
+	conf.env.CS_NAME = 'csc'
+	if str(conf.env.MCS).lower().find('mcs') > -1:
+		conf.env.CS_NAME = 'mono'
+
+def options(opt):
+	"""
+	Add a command-line option for the configuration::
+
+		$ waf configure --with-csc-binary=/foo/bar/mcs
+	"""
+	opt.add_option('--with-csc-binary', type='string', dest='cscbinary')
+
+class fake_csshlib(Task.Task):
+	"""
+	Task used for reading a foreign .net assembly and adding the dependency on it
+	"""
+	color   = 'YELLOW'
+	inst_to = None
+
+	def runnable_status(self):
+		return Task.SKIP_ME
+
+@conf
+def read_csshlib(self, name, paths=[]):
+	"""
+	Read a foreign .net assembly for the *use* system::
+
+		def build(bld):
+			bld.read_csshlib('ManagedLibrary.dll', paths=[bld.env.mylibrarypath])
+			bld(features='cs', source='Hi.cs', bintype='exe', gen='hi.exe', use='ManagedLibrary.dll')
+
+	:param name: Name of the library
+	:type name: string
+	:param paths: Folders in which the library may be found
+	:type paths: list of string
+	:return: A task generator having the feature *fake_lib* which will call :py:func:`waflib.Tools.ccroot.process_lib`
+	:rtype: :py:class:`waflib.TaskGen.task_gen`
+	"""
+	return self(name=name, features='fake_lib', lib_paths=paths, lib_type='csshlib')
+
diff --git a/third_party/waf/waflib/Tools/cxx.py b/third_party/waf/waflib/Tools/cxx.py
new file mode 100644
index 0000000..194fad7
--- /dev/null
+++ b/third_party/waf/waflib/Tools/cxx.py
@@ -0,0 +1,40 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2018 (ita)
+
+"Base for c++ programs and libraries"
+
+from waflib import TaskGen, Task
+from waflib.Tools import c_preproc
+from waflib.Tools.ccroot import link_task, stlink_task
+
+@TaskGen.extension('.cpp','.cc','.cxx','.C','.c++')
+def cxx_hook(self, node):
+	"Binds c++ file extensions to create :py:class:`waflib.Tools.cxx.cxx` instances"
+	return self.create_compiled_task('cxx', node)
+
+if not '.c' in TaskGen.task_gen.mappings:
+	TaskGen.task_gen.mappings['.c'] = TaskGen.task_gen.mappings['.cpp']
+
+class cxx(Task.Task):
+	"Compiles C++ files into object files"
+	run_str = '${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXX_SRC_F}${SRC} ${CXX_TGT_F}${TGT[0].abspath()} ${CPPFLAGS}'
+	vars    = ['CXXDEPS'] # unused variable to depend on, just in case
+	ext_in  = ['.h'] # set the build order easily by using ext_out=['.h']
+	scan    = c_preproc.scan
+
+class cxxprogram(link_task):
+	"Links object files into c++ programs"
+	run_str = '${LINK_CXX} ${LINKFLAGS} ${CXXLNK_SRC_F}${SRC} ${CXXLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB} ${LDFLAGS}'
+	vars    = ['LINKDEPS']
+	ext_out = ['.bin']
+	inst_to = '${BINDIR}'
+
+class cxxshlib(cxxprogram):
+	"Links object files into c++ shared libraries"
+	inst_to = '${LIBDIR}'
+
+class cxxstlib(stlink_task):
+	"Links object files into c++ static libraries"
+	pass # do not remove
+
diff --git a/third_party/waf/waflib/Tools/d.py b/third_party/waf/waflib/Tools/d.py
new file mode 100644
index 0000000..e4cf73b
--- /dev/null
+++ b/third_party/waf/waflib/Tools/d.py
@@ -0,0 +1,97 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Carlos Rafael Giani, 2007 (dv)
+# Thomas Nagy, 2007-2018 (ita)
+
+from waflib import Utils, Task, Errors
+from waflib.TaskGen import taskgen_method, feature, extension
+from waflib.Tools import d_scan, d_config
+from waflib.Tools.ccroot import link_task, stlink_task
+
+class d(Task.Task):
+	"Compile a d file into an object file"
+	color   = 'GREEN'
+	run_str = '${D} ${DFLAGS} ${DINC_ST:INCPATHS} ${D_SRC_F:SRC} ${D_TGT_F:TGT}'
+	scan    = d_scan.scan
+
+class d_with_header(d):
+	"Compile a d file and generate a header"
+	run_str = '${D} ${DFLAGS} ${DINC_ST:INCPATHS} ${D_HDR_F:tgt.outputs[1].bldpath()} ${D_SRC_F:SRC} ${D_TGT_F:tgt.outputs[0].bldpath()}'
+
+class d_header(Task.Task):
+	"Compile d headers"
+	color   = 'BLUE'
+	run_str = '${D} ${D_HEADER} ${SRC}'
+
+class dprogram(link_task):
+	"Link object files into a d program"
+	run_str = '${D_LINKER} ${LINKFLAGS} ${DLNK_SRC_F}${SRC} ${DLNK_TGT_F:TGT} ${RPATH_ST:RPATH} ${DSTLIB_MARKER} ${DSTLIBPATH_ST:STLIBPATH} ${DSTLIB_ST:STLIB} ${DSHLIB_MARKER} ${DLIBPATH_ST:LIBPATH} ${DSHLIB_ST:LIB}'
+	inst_to = '${BINDIR}'
+
+class dshlib(dprogram):
+	"Link object files into a d shared library"
+	inst_to = '${LIBDIR}'
+
+class dstlib(stlink_task):
+	"Link object files into a d static library"
+	pass # do not remove
+
+@extension('.d', '.di', '.D')
+def d_hook(self, node):
+	"""
+	Compile *D* files. To get .di files as well as .o files, set the following::
+
+		def build(bld):
+			bld.program(source='foo.d', target='app', generate_headers=True)
+
+	"""
+	ext = Utils.destos_to_binfmt(self.env.DEST_OS) == 'pe' and 'obj' or 'o'
+	out = '%s.%d.%s' % (node.name, self.idx, ext)
+	def create_compiled_task(self, name, node):
+		task = self.create_task(name, node, node.parent.find_or_declare(out))
+		try:
+			self.compiled_tasks.append(task)
+		except AttributeError:
+			self.compiled_tasks = [task]
+		return task
+
+	if getattr(self, 'generate_headers', None):
+		tsk = create_compiled_task(self, 'd_with_header', node)
+		tsk.outputs.append(node.change_ext(self.env.DHEADER_ext))
+	else:
+		tsk = create_compiled_task(self, 'd', node)
+	return tsk
+
+@taskgen_method
+def generate_header(self, filename):
+	"""
+	See feature request #104::
+
+		def build(bld):
+			tg = bld.program(source='foo.d', target='app')
+			tg.generate_header('blah.d')
+			# is equivalent to:
+			#tg = bld.program(source='foo.d', target='app', header_lst='blah.d')
+
+	:param filename: header to create
+	:type filename: string
+	"""
+	try:
+		self.header_lst.append([filename, self.install_path])
+	except AttributeError:
+		self.header_lst = [[filename, self.install_path]]
+
+@feature('d')
+def process_header(self):
+	"""
+	Process the attribute 'header_lst' to create the d header compilation tasks::
+
+		def build(bld):
+			bld.program(source='foo.d', target='app', header_lst='blah.d')
+	"""
+	for i in getattr(self, 'header_lst', []):
+		node = self.path.find_resource(i[0])
+		if not node:
+			raise Errors.WafError('file %r not found on d obj' % i[0])
+		self.create_task('d_header', node, node.change_ext('.di'))
+
diff --git a/third_party/waf/waflib/Tools/d_config.py b/third_party/waf/waflib/Tools/d_config.py
new file mode 100644
index 0000000..6637556
--- /dev/null
+++ b/third_party/waf/waflib/Tools/d_config.py
@@ -0,0 +1,64 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2016-2018 (ita)
+
+from waflib import Utils
+from waflib.Configure import conf
+
+@conf
+def d_platform_flags(self):
+	"""
+	Sets the extensions dll/so for d programs and libraries
+	"""
+	v = self.env
+	if not v.DEST_OS:
+		v.DEST_OS = Utils.unversioned_sys_platform()
+	binfmt = Utils.destos_to_binfmt(self.env.DEST_OS)
+	if binfmt == 'pe':
+		v.dprogram_PATTERN = '%s.exe'
+		v.dshlib_PATTERN   = 'lib%s.dll'
+		v.dstlib_PATTERN   = 'lib%s.a'
+	elif binfmt == 'mac-o':
+		v.dprogram_PATTERN = '%s'
+		v.dshlib_PATTERN   = 'lib%s.dylib'
+		v.dstlib_PATTERN   = 'lib%s.a'
+	else:
+		v.dprogram_PATTERN = '%s'
+		v.dshlib_PATTERN   = 'lib%s.so'
+		v.dstlib_PATTERN   = 'lib%s.a'
+
+DLIB = '''
+version(D_Version2) {
+	import std.stdio;
+	int main() {
+		writefln("phobos2");
+		return 0;
+	}
+} else {
+	version(Tango) {
+		import tango.stdc.stdio;
+		int main() {
+			printf("tango");
+			return 0;
+		}
+	} else {
+		import std.stdio;
+		int main() {
+			writefln("phobos1");
+			return 0;
+		}
+	}
+}
+'''
+"""Detection string for the D standard library"""
+
+@conf
+def check_dlibrary(self, execute=True):
+	"""
+	Detects the kind of standard library that comes with the compiler,
+	and sets conf.env.DLIBRARY to tango, phobos1 or phobos2
+	"""
+	ret = self.check_cc(features='d dprogram', fragment=DLIB, compile_filename='test.d', execute=execute, define_ret=True)
+	if execute:
+		self.env.DLIBRARY = ret.strip()
+
diff --git a/third_party/waf/waflib/Tools/d_scan.py b/third_party/waf/waflib/Tools/d_scan.py
new file mode 100644
index 0000000..4e807a6
--- /dev/null
+++ b/third_party/waf/waflib/Tools/d_scan.py
@@ -0,0 +1,211 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2016-2018 (ita)
+
+"""
+Provide a scanner for finding dependencies on d files
+"""
+
+import re
+from waflib import Utils
+
+def filter_comments(filename):
+	"""
+	:param filename: d file name
+	:type filename: string
+	:rtype: list
+	:return: a list of characters
+	"""
+	txt = Utils.readf(filename)
+	i = 0
+	buf = []
+	max = len(txt)
+	begin = 0
+	while i < max:
+		c = txt[i]
+		if c == '"' or c == "'":  # skip a string or character literal
+			buf.append(txt[begin:i])
+			delim = c
+			i += 1
+			while i < max:
+				c = txt[i]
+				if c == delim:
+					break
+				elif c == '\\':  # skip the character following backslash
+					i += 1
+				i += 1
+			i += 1
+			begin = i
+		elif c == '/':  # try to replace a comment with whitespace
+			buf.append(txt[begin:i])
+			i += 1
+			if i == max:
+				break
+			c = txt[i]
+			if c == '+':  # eat nesting /+ +/ comment
+				i += 1
+				nesting = 1
+				c = None
+				while i < max:
+					prev = c
+					c = txt[i]
+					if prev == '/' and c == '+':
+						nesting += 1
+						c = None
+					elif prev == '+' and c == '/':
+						nesting -= 1
+						if nesting == 0:
+							break
+						c = None
+					i += 1
+			elif c == '*':  # eat /* */ comment
+				i += 1
+				c = None
+				while i < max:
+					prev = c
+					c = txt[i]
+					if prev == '*' and c == '/':
+						break
+					i += 1
+			elif c == '/':  # eat // comment
+				i += 1
+				while i < max and txt[i] != '\n':
+					i += 1
+			else:  # no comment
+				begin = i - 1
+				continue
+			i += 1
+			begin = i
+			buf.append(' ')
+		else:
+			i += 1
+	buf.append(txt[begin:])
+	return buf
+
+class d_parser(object):
+	"""
+	Parser for d files
+	"""
+	def __init__(self, env, incpaths):
+		#self.code = ''
+		#self.module = ''
+		#self.imports = []
+
+		self.allnames = []
+
+		self.re_module = re.compile(r"module\s+([^;]+)")
+		self.re_import = re.compile(r"import\s+([^;]+)")
+		self.re_import_bindings = re.compile("([^:]+):(.*)")
+		self.re_import_alias = re.compile("[^=]+=(.+)")
+
+		self.env = env
+
+		self.nodes = []
+		self.names = []
+
+		self.incpaths = incpaths
+
+	def tryfind(self, filename):
+		"""
+		Search file a file matching an module/import directive
+
+		:param filename: file to read
+		:type filename: string
+		"""
+		found = 0
+		for n in self.incpaths:
+			found = n.find_resource(filename.replace('.', '/') + '.d')
+			if found:
+				self.nodes.append(found)
+				self.waiting.append(found)
+				break
+		if not found:
+			if not filename in self.names:
+				self.names.append(filename)
+
+	def get_strings(self, code):
+		"""
+		:param code: d code to parse
+		:type code: string
+		:return: the modules that the code uses
+		:rtype: a list of match objects
+		"""
+		#self.imports = []
+		self.module = ''
+		lst = []
+
+		# get the module name (if present)
+
+		mod_name = self.re_module.search(code)
+		if mod_name:
+			self.module = re.sub(r'\s+', '', mod_name.group(1)) # strip all whitespaces
+
+		# go through the code, have a look at all import occurrences
+
+		# first, lets look at anything beginning with "import" and ending with ";"
+		import_iterator = self.re_import.finditer(code)
+		if import_iterator:
+			for import_match in import_iterator:
+				import_match_str = re.sub(r'\s+', '', import_match.group(1)) # strip all whitespaces
+
+				# does this end with an import bindings declaration?
+				# (import bindings always terminate the list of imports)
+				bindings_match = self.re_import_bindings.match(import_match_str)
+				if bindings_match:
+					import_match_str = bindings_match.group(1)
+					# if so, extract the part before the ":" (since the module declaration(s) is/are located there)
+
+				# split the matching string into a bunch of strings, separated by a comma
+				matches = import_match_str.split(',')
+
+				for match in matches:
+					alias_match = self.re_import_alias.match(match)
+					if alias_match:
+						# is this an alias declaration? (alias = module name) if so, extract the module name
+						match = alias_match.group(1)
+
+					lst.append(match)
+		return lst
+
+	def start(self, node):
+		"""
+		The parsing starts here
+
+		:param node: input file
+		:type node: :py:class:`waflib.Node.Node`
+		"""
+		self.waiting = [node]
+		# while the stack is not empty, add the dependencies
+		while self.waiting:
+			nd = self.waiting.pop(0)
+			self.iter(nd)
+
+	def iter(self, node):
+		"""
+		Find all the modules that a file depends on, uses :py:meth:`waflib.Tools.d_scan.d_parser.tryfind` to process dependent files
+
+		:param node: input file
+		:type node: :py:class:`waflib.Node.Node`
+		"""
+		path = node.abspath() # obtain the absolute path
+		code = "".join(filter_comments(path)) # read the file and filter the comments
+		names = self.get_strings(code) # obtain the import strings
+		for x in names:
+			# optimization
+			if x in self.allnames:
+				continue
+			self.allnames.append(x)
+
+			# for each name, see if it is like a node or not
+			self.tryfind(x)
+
+def scan(self):
+	"look for .d/.di used by a d file"
+	env = self.env
+	gruik = d_parser(env, self.generator.includes_nodes)
+	node = self.inputs[0]
+	gruik.start(node)
+	nodes = gruik.nodes
+	names = gruik.names
+	return (nodes, names)
+
diff --git a/third_party/waf/waflib/Tools/dbus.py b/third_party/waf/waflib/Tools/dbus.py
new file mode 100644
index 0000000..d520f1c
--- /dev/null
+++ b/third_party/waf/waflib/Tools/dbus.py
@@ -0,0 +1,70 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Ali Sabil, 2007
+
+"""
+Compiles dbus files with **dbus-binding-tool**
+
+Typical usage::
+
+	def options(opt):
+		opt.load('compiler_c dbus')
+	def configure(conf):
+		conf.load('compiler_c dbus')
+	def build(bld):
+		tg = bld.program(
+			includes = '.',
+			source = bld.path.ant_glob('*.c'),
+			target = 'gnome-hello')
+		tg.add_dbus_file('test.xml', 'test_prefix', 'glib-server')
+"""
+
+from waflib import Task, Errors
+from waflib.TaskGen import taskgen_method, before_method
+
+@taskgen_method
+def add_dbus_file(self, filename, prefix, mode):
+	"""
+	Adds a dbus file to the list of dbus files to process. Store them in the attribute *dbus_lst*.
+
+	:param filename: xml file to compile
+	:type filename: string
+	:param prefix: dbus binding tool prefix (--prefix=prefix)
+	:type prefix: string
+	:param mode: dbus binding tool mode (--mode=mode)
+	:type mode: string
+	"""
+	if not hasattr(self, 'dbus_lst'):
+		self.dbus_lst = []
+	if not 'process_dbus' in self.meths:
+		self.meths.append('process_dbus')
+	self.dbus_lst.append([filename, prefix, mode])
+
+@before_method('process_source')
+def process_dbus(self):
+	"""
+	Processes the dbus files stored in the attribute *dbus_lst* to create :py:class:`waflib.Tools.dbus.dbus_binding_tool` instances.
+	"""
+	for filename, prefix, mode in getattr(self, 'dbus_lst', []):
+		node = self.path.find_resource(filename)
+		if not node:
+			raise Errors.WafError('file not found ' + filename)
+		tsk = self.create_task('dbus_binding_tool', node, node.change_ext('.h'))
+		tsk.env.DBUS_BINDING_TOOL_PREFIX = prefix
+		tsk.env.DBUS_BINDING_TOOL_MODE   = mode
+
+class dbus_binding_tool(Task.Task):
+	"""
+	Compiles a dbus file
+	"""
+	color   = 'BLUE'
+	ext_out = ['.h']
+	run_str = '${DBUS_BINDING_TOOL} --prefix=${DBUS_BINDING_TOOL_PREFIX} --mode=${DBUS_BINDING_TOOL_MODE} --output=${TGT} ${SRC}'
+	shell   = True # temporary workaround for #795
+
+def configure(conf):
+	"""
+	Detects the program dbus-binding-tool and sets ``conf.env.DBUS_BINDING_TOOL``
+	"""
+	conf.find_program('dbus-binding-tool', var='DBUS_BINDING_TOOL')
+
diff --git a/third_party/waf/waflib/Tools/dmd.py b/third_party/waf/waflib/Tools/dmd.py
new file mode 100644
index 0000000..8917ca1
--- /dev/null
+++ b/third_party/waf/waflib/Tools/dmd.py
@@ -0,0 +1,80 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Carlos Rafael Giani, 2007 (dv)
+# Thomas Nagy, 2008-2018 (ita)
+
+import sys
+from waflib.Tools import ar, d
+from waflib.Configure import conf
+
+@conf
+def find_dmd(conf):
+	"""
+	Finds the program *dmd*, *dmd2*, or *ldc* and set the variable *D*
+	"""
+	conf.find_program(['dmd', 'dmd2', 'ldc'], var='D')
+
+	# make sure that we're dealing with dmd1, dmd2, or ldc(1)
+	out = conf.cmd_and_log(conf.env.D + ['--help'])
+	if out.find("D Compiler v") == -1:
+		out = conf.cmd_and_log(conf.env.D + ['-version'])
+		if out.find("based on DMD v1.") == -1:
+			conf.fatal("detected compiler is not dmd/ldc")
+
+@conf
+def common_flags_ldc(conf):
+	"""
+	Sets the D flags required by *ldc*
+	"""
+	v = conf.env
+	v.DFLAGS        = ['-d-version=Posix']
+	v.LINKFLAGS     = []
+	v.DFLAGS_dshlib = ['-relocation-model=pic']
+
+@conf
+def common_flags_dmd(conf):
+	"""
+	Set the flags required by *dmd* or *dmd2*
+	"""
+	v = conf.env
+
+	v.D_SRC_F           = ['-c']
+	v.D_TGT_F           = '-of%s'
+
+	v.D_LINKER          = v.D
+	v.DLNK_SRC_F        = ''
+	v.DLNK_TGT_F        = '-of%s'
+	v.DINC_ST           = '-I%s'
+
+	v.DSHLIB_MARKER = v.DSTLIB_MARKER = ''
+	v.DSTLIB_ST = v.DSHLIB_ST         = '-L-l%s'
+	v.DSTLIBPATH_ST = v.DLIBPATH_ST   = '-L-L%s'
+
+	v.LINKFLAGS_dprogram= ['-quiet']
+
+	v.DFLAGS_dshlib     = ['-fPIC']
+	v.LINKFLAGS_dshlib  = ['-L-shared']
+
+	v.DHEADER_ext       = '.di'
+	v.DFLAGS_d_with_header = ['-H', '-Hf']
+	v.D_HDR_F           = '%s'
+
+def configure(conf):
+	"""
+	Configuration for *dmd*, *dmd2*, and *ldc*
+	"""
+	conf.find_dmd()
+
+	if sys.platform == 'win32':
+		out = conf.cmd_and_log(conf.env.D + ['--help'])
+		if out.find('D Compiler v2.') > -1:
+			conf.fatal('dmd2 on Windows is not supported, use gdc or ldc2 instead')
+
+	conf.load('ar')
+	conf.load('d')
+	conf.common_flags_dmd()
+	conf.d_platform_flags()
+
+	if str(conf.env.D).find('ldc') > -1:
+		conf.common_flags_ldc()
+
diff --git a/third_party/waf/waflib/Tools/errcheck.py b/third_party/waf/waflib/Tools/errcheck.py
new file mode 100644
index 0000000..de8d75a
--- /dev/null
+++ b/third_party/waf/waflib/Tools/errcheck.py
@@ -0,0 +1,237 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2011 (ita)
+
+"""
+Common mistakes highlighting.
+
+There is a performance impact, so this tool is only loaded when running ``waf -v``
+"""
+
+typos = {
+'feature':'features',
+'sources':'source',
+'targets':'target',
+'include':'includes',
+'export_include':'export_includes',
+'define':'defines',
+'importpath':'includes',
+'installpath':'install_path',
+'iscopy':'is_copy',
+'uses':'use',
+}
+
+meths_typos = ['__call__', 'program', 'shlib', 'stlib', 'objects']
+
+import sys
+from waflib import Logs, Build, Node, Task, TaskGen, ConfigSet, Errors, Utils
+from waflib.Tools import ccroot
+
+def check_same_targets(self):
+	mp = Utils.defaultdict(list)
+	uids = {}
+
+	def check_task(tsk):
+		if not isinstance(tsk, Task.Task):
+			return
+		if hasattr(tsk, 'no_errcheck_out'):
+			return
+
+		for node in tsk.outputs:
+			mp[node].append(tsk)
+		try:
+			uids[tsk.uid()].append(tsk)
+		except KeyError:
+			uids[tsk.uid()] = [tsk]
+
+	for g in self.groups:
+		for tg in g:
+			try:
+				for tsk in tg.tasks:
+					check_task(tsk)
+			except AttributeError:
+				# raised if not a task generator, which should be uncommon
+				check_task(tg)
+
+	dupe = False
+	for (k, v) in mp.items():
+		if len(v) > 1:
+			dupe = True
+			msg = '* Node %r is created more than once%s. The task generators are:' % (k, Logs.verbose == 1 and " (full message on 'waf -v -v')" or "")
+			Logs.error(msg)
+			for x in v:
+				if Logs.verbose > 1:
+					Logs.error('  %d. %r', 1 + v.index(x), x.generator)
+				else:
+					Logs.error('  %d. %r in %r', 1 + v.index(x), x.generator.name, getattr(x.generator, 'path', None))
+			Logs.error('If you think that this is an error, set no_errcheck_out on the task instance')
+
+	if not dupe:
+		for (k, v) in uids.items():
+			if len(v) > 1:
+				Logs.error('* Several tasks use the same identifier. Please check the information on\n   https://waf.io/apidocs/Task.html?highlight=uid#waflib.Task.Task.uid')
+				tg_details = tsk.generator.name
+				if Logs.verbose > 2:
+					tg_details = tsk.generator
+				for tsk in v:
+					Logs.error('  - object %r (%r) defined in %r', tsk.__class__.__name__, tsk, tg_details)
+
+def check_invalid_constraints(self):
+	feat = set()
+	for x in list(TaskGen.feats.values()):
+		feat.union(set(x))
+	for (x, y) in TaskGen.task_gen.prec.items():
+		feat.add(x)
+		feat.union(set(y))
+	ext = set()
+	for x in TaskGen.task_gen.mappings.values():
+		ext.add(x.__name__)
+	invalid = ext & feat
+	if invalid:
+		Logs.error('The methods %r have invalid annotations:  @extension <-> @feature/@before_method/@after_method', list(invalid))
+
+	# the build scripts have been read, so we can check for invalid after/before attributes on task classes
+	for cls in list(Task.classes.values()):
+		if sys.hexversion > 0x3000000 and issubclass(cls, Task.Task) and isinstance(cls.hcode, str):
+			raise Errors.WafError('Class %r has hcode value %r of type <str>, expecting <bytes> (use Utils.h_cmd() ?)' % (cls, cls.hcode))
+
+		for x in ('before', 'after'):
+			for y in Utils.to_list(getattr(cls, x, [])):
+				if not Task.classes.get(y):
+					Logs.error('Erroneous order constraint %r=%r on task class %r', x, y, cls.__name__)
+		if getattr(cls, 'rule', None):
+			Logs.error('Erroneous attribute "rule" on task class %r (rename to "run_str")', cls.__name__)
+
+def replace(m):
+	"""
+	Replaces existing BuildContext methods to verify parameter names,
+	for example ``bld(source=)`` has no ending *s*
+	"""
+	oldcall = getattr(Build.BuildContext, m)
+	def call(self, *k, **kw):
+		ret = oldcall(self, *k, **kw)
+		for x in typos:
+			if x in kw:
+				if x == 'iscopy' and 'subst' in getattr(self, 'features', ''):
+					continue
+				Logs.error('Fix the typo %r -> %r on %r', x, typos[x], ret)
+		return ret
+	setattr(Build.BuildContext, m, call)
+
+def enhance_lib():
+	"""
+	Modifies existing classes and methods to enable error verification
+	"""
+	for m in meths_typos:
+		replace(m)
+
+	# catch '..' in ant_glob patterns
+	def ant_glob(self, *k, **kw):
+		if k:
+			lst = Utils.to_list(k[0])
+			for pat in lst:
+				sp = pat.split('/')
+				if '..' in sp:
+					Logs.error("In ant_glob pattern %r: '..' means 'two dots', not 'parent directory'", k[0])
+				if '.' in sp:
+					Logs.error("In ant_glob pattern %r: '.' means 'one dot', not 'current directory'", k[0])
+		return self.old_ant_glob(*k, **kw)
+	Node.Node.old_ant_glob = Node.Node.ant_glob
+	Node.Node.ant_glob = ant_glob
+
+	# catch ant_glob on build folders
+	def ant_iter(self, accept=None, maxdepth=25, pats=[], dir=False, src=True, remove=True, quiet=False):
+		if remove:
+			try:
+				if self.is_child_of(self.ctx.bldnode) and not quiet:
+					quiet = True
+					Logs.error('Calling ant_glob on build folders (%r) is dangerous: add quiet=True / remove=False', self)
+			except AttributeError:
+				pass
+		return self.old_ant_iter(accept, maxdepth, pats, dir, src, remove, quiet)
+	Node.Node.old_ant_iter = Node.Node.ant_iter
+	Node.Node.ant_iter = ant_iter
+
+	# catch conflicting ext_in/ext_out/before/after declarations
+	old = Task.is_before
+	def is_before(t1, t2):
+		ret = old(t1, t2)
+		if ret and old(t2, t1):
+			Logs.error('Contradictory order constraints in classes %r %r', t1, t2)
+		return ret
+	Task.is_before = is_before
+
+	# check for bld(feature='cshlib') where no 'c' is given - this can be either a mistake or on purpose
+	# so we only issue a warning
+	def check_err_features(self):
+		lst = self.to_list(self.features)
+		if 'shlib' in lst:
+			Logs.error('feature shlib -> cshlib, dshlib or cxxshlib')
+		for x in ('c', 'cxx', 'd', 'fc'):
+			if not x in lst and lst and lst[0] in [x+y for y in ('program', 'shlib', 'stlib')]:
+				Logs.error('%r features is probably missing %r', self, x)
+	TaskGen.feature('*')(check_err_features)
+
+	# check for erroneous order constraints
+	def check_err_order(self):
+		if not hasattr(self, 'rule') and not 'subst' in Utils.to_list(self.features):
+			for x in ('before', 'after', 'ext_in', 'ext_out'):
+				if hasattr(self, x):
+					Logs.warn('Erroneous order constraint %r on non-rule based task generator %r', x, self)
+		else:
+			for x in ('before', 'after'):
+				for y in self.to_list(getattr(self, x, [])):
+					if not Task.classes.get(y):
+						Logs.error('Erroneous order constraint %s=%r on %r (no such class)', x, y, self)
+	TaskGen.feature('*')(check_err_order)
+
+	# check for @extension used with @feature/@before_method/@after_method
+	def check_compile(self):
+		check_invalid_constraints(self)
+		try:
+			ret = self.orig_compile()
+		finally:
+			check_same_targets(self)
+		return ret
+	Build.BuildContext.orig_compile = Build.BuildContext.compile
+	Build.BuildContext.compile = check_compile
+
+	# check for invalid build groups #914
+	def use_rec(self, name, **kw):
+		try:
+			y = self.bld.get_tgen_by_name(name)
+		except Errors.WafError:
+			pass
+		else:
+			idx = self.bld.get_group_idx(self)
+			odx = self.bld.get_group_idx(y)
+			if odx > idx:
+				msg = "Invalid 'use' across build groups:"
+				if Logs.verbose > 1:
+					msg += '\n  target %r\n  uses:\n  %r' % (self, y)
+				else:
+					msg += " %r uses %r (try 'waf -v -v' for the full error)" % (self.name, name)
+				raise Errors.WafError(msg)
+		self.orig_use_rec(name, **kw)
+	TaskGen.task_gen.orig_use_rec = TaskGen.task_gen.use_rec
+	TaskGen.task_gen.use_rec = use_rec
+
+	# check for env.append
+	def _getattr(self, name, default=None):
+		if name == 'append' or name == 'add':
+			raise Errors.WafError('env.append and env.add do not exist: use env.append_value/env.append_unique')
+		elif name == 'prepend':
+			raise Errors.WafError('env.prepend does not exist: use env.prepend_value')
+		if name in self.__slots__:
+			return super(ConfigSet.ConfigSet, self).__getattr__(name, default)
+		else:
+			return self[name]
+	ConfigSet.ConfigSet.__getattr__ = _getattr
+
+
+def options(opt):
+	"""
+	Error verification can be enabled by default (not just on ``waf -v``) by adding to the user script options
+	"""
+	enhance_lib()
+
diff --git a/third_party/waf/waflib/Tools/fc.py b/third_party/waf/waflib/Tools/fc.py
new file mode 100644
index 0000000..7fbd76d
--- /dev/null
+++ b/third_party/waf/waflib/Tools/fc.py
@@ -0,0 +1,203 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# DC 2008
+# Thomas Nagy 2016-2018 (ita)
+
+"""
+Fortran support
+"""
+
+from waflib import Utils, Task, Errors
+from waflib.Tools import ccroot, fc_config, fc_scan
+from waflib.TaskGen import extension
+from waflib.Configure import conf
+
+ccroot.USELIB_VARS['fc'] = set(['FCFLAGS', 'DEFINES', 'INCLUDES', 'FCPPFLAGS'])
+ccroot.USELIB_VARS['fcprogram_test'] = ccroot.USELIB_VARS['fcprogram'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS', 'LDFLAGS'])
+ccroot.USELIB_VARS['fcshlib'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS', 'LDFLAGS'])
+ccroot.USELIB_VARS['fcstlib'] = set(['ARFLAGS', 'LINKDEPS'])
+
+@extension('.f','.F','.f90','.F90','.for','.FOR','.f95','.F95','.f03','.F03','.f08','.F08')
+def fc_hook(self, node):
+	"Binds the Fortran file extensions create :py:class:`waflib.Tools.fc.fc` instances"
+	return self.create_compiled_task('fc', node)
+
+@conf
+def modfile(conf, name):
+	"""
+	Turns a module name into the right module file name.
+	Defaults to all lower case.
+	"""
+	if name.find(':') >= 0:
+		# Depending on a submodule!
+		separator = conf.env.FC_SUBMOD_SEPARATOR or '@'
+		# Ancestors of the submodule will be prefixed to the
+		# submodule name, separated by a colon.
+		modpath = name.split(':')
+		# Only the ancestor (actual) module and the submodule name
+		# will be used for the filename.
+		modname = modpath[0] + separator + modpath[-1]
+		suffix = conf.env.FC_SUBMOD_SUFFIX or '.smod'
+	else:
+		modname = name
+		suffix = '.mod'
+
+	return {'lower'     :modname.lower() + suffix.lower(),
+		'lower.MOD' :modname.lower() + suffix.upper(),
+		'UPPER.mod' :modname.upper() + suffix.lower(),
+		'UPPER'     :modname.upper() + suffix.upper()}[conf.env.FC_MOD_CAPITALIZATION or 'lower']
+
+def get_fortran_tasks(tsk):
+	"""
+	Obtains all fortran tasks from the same build group. Those tasks must not have
+	the attribute 'nomod' or 'mod_fortran_done'
+
+	:return: a list of :py:class:`waflib.Tools.fc.fc` instances
+	"""
+	bld = tsk.generator.bld
+	tasks = bld.get_tasks_group(bld.get_group_idx(tsk.generator))
+	return [x for x in tasks if isinstance(x, fc) and not getattr(x, 'nomod', None) and not getattr(x, 'mod_fortran_done', None)]
+
+class fc(Task.Task):
+	"""
+	Fortran tasks can only run when all fortran tasks in a current task group are ready to be executed
+	This may cause a deadlock if some fortran task is waiting for something that cannot happen (circular dependency)
+	Should this ever happen, set the 'nomod=True' on those tasks instances to break the loop
+	"""
+	color = 'GREEN'
+	run_str = '${FC} ${FCFLAGS} ${FCINCPATH_ST:INCPATHS} ${FCDEFINES_ST:DEFINES} ${_FCMODOUTFLAGS} ${FC_TGT_F}${TGT[0].abspath()} ${FC_SRC_F}${SRC[0].abspath()} ${FCPPFLAGS}'
+	vars = ["FORTRANMODPATHFLAG"]
+
+	def scan(self):
+		"""Fortran dependency scanner"""
+		tmp = fc_scan.fortran_parser(self.generator.includes_nodes)
+		tmp.task = self
+		tmp.start(self.inputs[0])
+		return (tmp.nodes, tmp.names)
+
+	def runnable_status(self):
+		"""
+		Sets the mod file outputs and the dependencies on the mod files over all Fortran tasks
+		executed by the main thread so there are no concurrency issues
+		"""
+		if getattr(self, 'mod_fortran_done', None):
+			return super(fc, self).runnable_status()
+
+		# now, if we reach this part it is because this fortran task is the first in the list
+		bld = self.generator.bld
+
+		# obtain the fortran tasks
+		lst = get_fortran_tasks(self)
+
+		# disable this method for other tasks
+		for tsk in lst:
+			tsk.mod_fortran_done = True
+
+		# wait for all the .f tasks to be ready for execution
+		# and ensure that the scanners are called at least once
+		for tsk in lst:
+			ret = tsk.runnable_status()
+			if ret == Task.ASK_LATER:
+				# we have to wait for one of the other fortran tasks to be ready
+				# this may deadlock if there are dependencies between fortran tasks
+				# but this should not happen (we are setting them here!)
+				for x in lst:
+					x.mod_fortran_done = None
+
+				return Task.ASK_LATER
+
+		ins = Utils.defaultdict(set)
+		outs = Utils.defaultdict(set)
+
+		# the .mod files to create
+		for tsk in lst:
+			key = tsk.uid()
+			for x in bld.raw_deps[key]:
+				if x.startswith('MOD@'):
+					name = bld.modfile(x.replace('MOD@', ''))
+					node = bld.srcnode.find_or_declare(name)
+					tsk.set_outputs(node)
+					outs[node].add(tsk)
+
+		# the .mod files to use
+		for tsk in lst:
+			key = tsk.uid()
+			for x in bld.raw_deps[key]:
+				if x.startswith('USE@'):
+					name = bld.modfile(x.replace('USE@', ''))
+					node = bld.srcnode.find_resource(name)
+					if node and node not in tsk.outputs:
+						if not node in bld.node_deps[key]:
+							bld.node_deps[key].append(node)
+						ins[node].add(tsk)
+
+		# if the intersection matches, set the order
+		for k in ins.keys():
+			for a in ins[k]:
+				a.run_after.update(outs[k])
+				for x in outs[k]:
+					self.generator.bld.producer.revdeps[x].add(a)
+
+				# the scanner cannot output nodes, so we have to set them
+				# ourselves as task.dep_nodes (additional input nodes)
+				tmp = []
+				for t in outs[k]:
+					tmp.extend(t.outputs)
+				a.dep_nodes.extend(tmp)
+				a.dep_nodes.sort(key=lambda x: x.abspath())
+
+		# the task objects have changed: clear the signature cache
+		for tsk in lst:
+			try:
+				delattr(tsk, 'cache_sig')
+			except AttributeError:
+				pass
+
+		return super(fc, self).runnable_status()
+
+class fcprogram(ccroot.link_task):
+	"""Links Fortran programs"""
+	color = 'YELLOW'
+	run_str = '${FC} ${LINKFLAGS} ${FCLNK_SRC_F}${SRC} ${FCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FCSTLIB_MARKER} ${FCSTLIBPATH_ST:STLIBPATH} ${FCSTLIB_ST:STLIB} ${FCSHLIB_MARKER} ${FCLIBPATH_ST:LIBPATH} ${FCLIB_ST:LIB} ${LDFLAGS}'
+	inst_to = '${BINDIR}'
+
+class fcshlib(fcprogram):
+	"""Links Fortran libraries"""
+	inst_to = '${LIBDIR}'
+
+class fcstlib(ccroot.stlink_task):
+	"""Links Fortran static libraries (uses ar by default)"""
+	pass # do not remove the pass statement
+
+class fcprogram_test(fcprogram):
+	"""Custom link task to obtain compiler outputs for Fortran configuration tests"""
+
+	def runnable_status(self):
+		"""This task is always executed"""
+		ret = super(fcprogram_test, self).runnable_status()
+		if ret == Task.SKIP_ME:
+			ret = Task.RUN_ME
+		return ret
+
+	def exec_command(self, cmd, **kw):
+		"""Stores the compiler std our/err onto the build context, to bld.out + bld.err"""
+		bld = self.generator.bld
+
+		kw['shell'] = isinstance(cmd, str)
+		kw['stdout'] = kw['stderr'] = Utils.subprocess.PIPE
+		kw['cwd'] = self.get_cwd()
+		bld.out = bld.err = ''
+
+		bld.to_log('command: %s\n' % cmd)
+
+		kw['output'] = 0
+		try:
+			(bld.out, bld.err) = bld.cmd_and_log(cmd, **kw)
+		except Errors.WafError:
+			return -1
+
+		if bld.out:
+			bld.to_log('out: %s\n' % bld.out)
+		if bld.err:
+			bld.to_log('err: %s\n' % bld.err)
+
diff --git a/third_party/waf/waflib/Tools/fc_config.py b/third_party/waf/waflib/Tools/fc_config.py
new file mode 100644
index 0000000..dc5e5c9
--- /dev/null
+++ b/third_party/waf/waflib/Tools/fc_config.py
@@ -0,0 +1,488 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# DC 2008
+# Thomas Nagy 2016-2018 (ita)
+
+"""
+Fortran configuration helpers
+"""
+
+import re, os, sys, shlex
+from waflib.Configure import conf
+from waflib.TaskGen import feature, before_method
+
+FC_FRAGMENT = '        program main\n        end     program main\n'
+FC_FRAGMENT2 = '        PROGRAM MAIN\n        END\n' # what's the actual difference between these?
+
+@conf
+def fc_flags(conf):
+	"""
+	Defines common fortran configuration flags and file extensions
+	"""
+	v = conf.env
+
+	v.FC_SRC_F    = []
+	v.FC_TGT_F    = ['-c', '-o']
+	v.FCINCPATH_ST  = '-I%s'
+	v.FCDEFINES_ST  = '-D%s'
+
+	if not v.LINK_FC:
+		v.LINK_FC = v.FC
+
+	v.FCLNK_SRC_F = []
+	v.FCLNK_TGT_F = ['-o']
+
+	v.FCFLAGS_fcshlib   = ['-fpic']
+	v.LINKFLAGS_fcshlib = ['-shared']
+	v.fcshlib_PATTERN   = 'lib%s.so'
+
+	v.fcstlib_PATTERN   = 'lib%s.a'
+
+	v.FCLIB_ST       = '-l%s'
+	v.FCLIBPATH_ST   = '-L%s'
+	v.FCSTLIB_ST     = '-l%s'
+	v.FCSTLIBPATH_ST = '-L%s'
+	v.FCSTLIB_MARKER = '-Wl,-Bstatic'
+	v.FCSHLIB_MARKER = '-Wl,-Bdynamic'
+
+	v.SONAME_ST      = '-Wl,-h,%s'
+
+@conf
+def fc_add_flags(conf):
+	"""
+	Adds FCFLAGS / LDFLAGS / LINKFLAGS from os.environ to conf.env
+	"""
+	conf.add_os_flags('FCPPFLAGS', dup=False)
+	conf.add_os_flags('FCFLAGS', dup=False)
+	conf.add_os_flags('LINKFLAGS', dup=False)
+	conf.add_os_flags('LDFLAGS', dup=False)
+
+@conf
+def check_fortran(self, *k, **kw):
+	"""
+	Compiles a Fortran program to ensure that the settings are correct
+	"""
+	self.check_cc(
+		fragment         = FC_FRAGMENT,
+		compile_filename = 'test.f',
+		features         = 'fc fcprogram',
+		msg              = 'Compiling a simple fortran app')
+
+@conf
+def check_fc(self, *k, **kw):
+	"""
+	Same as :py:func:`waflib.Tools.c_config.check` but defaults to the *Fortran* programming language
+	(this overrides the C defaults in :py:func:`waflib.Tools.c_config.validate_c`)
+	"""
+	kw['compiler'] = 'fc'
+	if not 'compile_mode' in kw:
+		kw['compile_mode'] = 'fc'
+	if not 'type' in kw:
+		kw['type'] = 'fcprogram'
+	if not 'compile_filename' in kw:
+		kw['compile_filename'] = 'test.f90'
+	if not 'code' in kw:
+		kw['code'] = FC_FRAGMENT
+	return self.check(*k, **kw)
+
+# ------------------------------------------------------------------------
+# --- These are the default platform modifiers, refactored here for
+#     convenience.  gfortran and g95 have much overlap.
+# ------------------------------------------------------------------------
+
+@conf
+def fortran_modifier_darwin(conf):
+	"""
+	Defines Fortran flags and extensions for OSX systems
+	"""
+	v = conf.env
+	v.FCFLAGS_fcshlib   = ['-fPIC']
+	v.LINKFLAGS_fcshlib = ['-dynamiclib']
+	v.fcshlib_PATTERN   = 'lib%s.dylib'
+	v.FRAMEWORKPATH_ST  = '-F%s'
+	v.FRAMEWORK_ST      = ['-framework']
+
+	v.LINKFLAGS_fcstlib = []
+
+	v.FCSHLIB_MARKER    = ''
+	v.FCSTLIB_MARKER    = ''
+	v.SONAME_ST         = ''
+
+@conf
+def fortran_modifier_win32(conf):
+	"""
+	Defines Fortran flags for Windows platforms
+	"""
+	v = conf.env
+	v.fcprogram_PATTERN = v.fcprogram_test_PATTERN  = '%s.exe'
+
+	v.fcshlib_PATTERN   = '%s.dll'
+	v.implib_PATTERN    = '%s.dll.a'
+	v.IMPLIB_ST         = '-Wl,--out-implib,%s'
+
+	v.FCFLAGS_fcshlib   = []
+
+	# Auto-import is enabled by default even without this option,
+	# but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages
+	# that the linker emits otherwise.
+	v.append_value('LINKFLAGS', ['-Wl,--enable-auto-import'])
+
+@conf
+def fortran_modifier_cygwin(conf):
+	"""
+	Defines Fortran flags for use on cygwin
+	"""
+	fortran_modifier_win32(conf)
+	v = conf.env
+	v.fcshlib_PATTERN = 'cyg%s.dll'
+	v.append_value('LINKFLAGS_fcshlib', ['-Wl,--enable-auto-image-base'])
+	v.FCFLAGS_fcshlib = []
+
+# ------------------------------------------------------------------------
+
+@conf
+def check_fortran_dummy_main(self, *k, **kw):
+	"""
+	Determines if a main function is needed by compiling a code snippet with
+	the C compiler and linking it with the Fortran compiler (useful on unix-like systems)
+	"""
+	if not self.env.CC:
+		self.fatal('A c compiler is required for check_fortran_dummy_main')
+
+	lst = ['MAIN__', '__MAIN', '_MAIN', 'MAIN_', 'MAIN']
+	lst.extend([m.lower() for m in lst])
+	lst.append('')
+
+	self.start_msg('Detecting whether we need a dummy main')
+	for main in lst:
+		kw['fortran_main'] = main
+		try:
+			self.check_cc(
+				fragment = 'int %s() { return 0; }\n' % (main or 'test'),
+				features = 'c fcprogram',
+				mandatory = True
+			)
+			if not main:
+				self.env.FC_MAIN = -1
+				self.end_msg('no')
+			else:
+				self.env.FC_MAIN = main
+				self.end_msg('yes %s' % main)
+			break
+		except self.errors.ConfigurationError:
+			pass
+	else:
+		self.end_msg('not found')
+		self.fatal('could not detect whether fortran requires a dummy main, see the config.log')
+
+# ------------------------------------------------------------------------
+
+GCC_DRIVER_LINE = re.compile('^Driving:')
+POSIX_STATIC_EXT = re.compile(r'\S+\.a')
+POSIX_LIB_FLAGS = re.compile(r'-l\S+')
+
+@conf
+def is_link_verbose(self, txt):
+	"""Returns True if 'useful' link options can be found in txt"""
+	assert isinstance(txt, str)
+	for line in txt.splitlines():
+		if not GCC_DRIVER_LINE.search(line):
+			if POSIX_STATIC_EXT.search(line) or POSIX_LIB_FLAGS.search(line):
+				return True
+	return False
+
+@conf
+def check_fortran_verbose_flag(self, *k, **kw):
+	"""
+	Checks what kind of verbose (-v) flag works, then sets it to env.FC_VERBOSE_FLAG
+	"""
+	self.start_msg('fortran link verbose flag')
+	for x in ('-v', '--verbose', '-verbose', '-V'):
+		try:
+			self.check_cc(
+				features = 'fc fcprogram_test',
+				fragment = FC_FRAGMENT2,
+				compile_filename = 'test.f',
+				linkflags = [x],
+				mandatory=True)
+		except self.errors.ConfigurationError:
+			pass
+		else:
+			# output is on stderr or stdout (for xlf)
+			if self.is_link_verbose(self.test_bld.err) or self.is_link_verbose(self.test_bld.out):
+				self.end_msg(x)
+				break
+	else:
+		self.end_msg('failure')
+		self.fatal('Could not obtain the fortran link verbose flag (see config.log)')
+
+	self.env.FC_VERBOSE_FLAG = x
+	return x
+
+# ------------------------------------------------------------------------
+
+# linkflags which match those are ignored
+LINKFLAGS_IGNORED = [r'-lang*', r'-lcrt[a-zA-Z0-9\.]*\.o', r'-lc$', r'-lSystem', r'-libmil', r'-LIST:*', r'-LNO:*']
+if os.name == 'nt':
+	LINKFLAGS_IGNORED.extend([r'-lfrt*', r'-luser32', r'-lkernel32', r'-ladvapi32', r'-lmsvcrt', r'-lshell32', r'-lmingw', r'-lmoldname'])
+else:
+	LINKFLAGS_IGNORED.append(r'-lgcc*')
+RLINKFLAGS_IGNORED = [re.compile(f) for f in LINKFLAGS_IGNORED]
+
+def _match_ignore(line):
+	"""Returns True if the line should be ignored (Fortran verbose flag test)"""
+	for i in RLINKFLAGS_IGNORED:
+		if i.match(line):
+			return True
+	return False
+
+def parse_fortran_link(lines):
+	"""Given the output of verbose link of Fortran compiler, this returns a
+	list of flags necessary for linking using the standard linker."""
+	final_flags = []
+	for line in lines:
+		if not GCC_DRIVER_LINE.match(line):
+			_parse_flink_line(line, final_flags)
+	return final_flags
+
+SPACE_OPTS = re.compile('^-[LRuYz]$')
+NOSPACE_OPTS = re.compile('^-[RL]')
+
+def _parse_flink_token(lexer, token, tmp_flags):
+	# Here we go (convention for wildcard is shell, not regex !)
+	#   1 TODO: we first get some root .a libraries
+	#   2 TODO: take everything starting by -bI:*
+	#   3 Ignore the following flags: -lang* | -lcrt*.o | -lc |
+	#   -lgcc* | -lSystem | -libmil | -LANG:=* | -LIST:* | -LNO:*)
+	#   4 take into account -lkernel32
+	#   5 For options of the kind -[[LRuYz]], as they take one argument
+	#   after, the actual option is the next token
+	#   6 For -YP,*: take and replace by -Larg where arg is the old
+	#   argument
+	#   7 For -[lLR]*: take
+
+	# step 3
+	if _match_ignore(token):
+		pass
+	# step 4
+	elif token.startswith('-lkernel32') and sys.platform == 'cygwin':
+		tmp_flags.append(token)
+	# step 5
+	elif SPACE_OPTS.match(token):
+		t = lexer.get_token()
+		if t.startswith('P,'):
+			t = t[2:]
+		for opt in t.split(os.pathsep):
+			tmp_flags.append('-L%s' % opt)
+	# step 6
+	elif NOSPACE_OPTS.match(token):
+		tmp_flags.append(token)
+	# step 7
+	elif POSIX_LIB_FLAGS.match(token):
+		tmp_flags.append(token)
+	else:
+		# ignore anything not explicitly taken into account
+		pass
+
+	t = lexer.get_token()
+	return t
+
+def _parse_flink_line(line, final_flags):
+	"""private"""
+	lexer = shlex.shlex(line, posix = True)
+	lexer.whitespace_split = True
+
+	t = lexer.get_token()
+	tmp_flags = []
+	while t:
+		t = _parse_flink_token(lexer, t, tmp_flags)
+
+	final_flags.extend(tmp_flags)
+	return final_flags
+
+@conf
+def check_fortran_clib(self, autoadd=True, *k, **kw):
+	"""
+	Obtains the flags for linking with the C library
+	if this check works, add uselib='CLIB' to your task generators
+	"""
+	if not self.env.FC_VERBOSE_FLAG:
+		self.fatal('env.FC_VERBOSE_FLAG is not set: execute check_fortran_verbose_flag?')
+
+	self.start_msg('Getting fortran runtime link flags')
+	try:
+		self.check_cc(
+			fragment = FC_FRAGMENT2,
+			compile_filename = 'test.f',
+			features = 'fc fcprogram_test',
+			linkflags = [self.env.FC_VERBOSE_FLAG]
+		)
+	except Exception:
+		self.end_msg(False)
+		if kw.get('mandatory', True):
+			conf.fatal('Could not find the c library flags')
+	else:
+		out = self.test_bld.err
+		flags = parse_fortran_link(out.splitlines())
+		self.end_msg('ok (%s)' % ' '.join(flags))
+		self.env.LINKFLAGS_CLIB = flags
+		return flags
+	return []
+
+def getoutput(conf, cmd, stdin=False):
+	"""
+	Obtains Fortran command outputs
+	"""
+	from waflib import Errors
+	if conf.env.env:
+		env = conf.env.env
+	else:
+		env = dict(os.environ)
+		env['LANG'] = 'C'
+	input = stdin and '\n'.encode() or None
+	try:
+		out, err = conf.cmd_and_log(cmd, env=env, output=0, input=input)
+	except Errors.WafError as e:
+		# An WafError might indicate an error code during the command
+		# execution, in this case we still obtain the stderr and stdout,
+		# which we can use to find the version string.
+		if not (hasattr(e, 'stderr') and hasattr(e, 'stdout')):
+			raise e
+		else:
+			# Ignore the return code and return the original
+			# stdout and stderr.
+			out = e.stdout
+			err = e.stderr
+	except Exception:
+		conf.fatal('could not determine the compiler version %r' % cmd)
+	return (out, err)
+
+# ------------------------------------------------------------------------
+
+ROUTINES_CODE = """\
+      subroutine foobar()
+      return
+      end
+      subroutine foo_bar()
+      return
+      end
+"""
+
+MAIN_CODE = """
+void %(dummy_func_nounder)s(void);
+void %(dummy_func_under)s(void);
+int %(main_func_name)s() {
+  %(dummy_func_nounder)s();
+  %(dummy_func_under)s();
+  return 0;
+}
+"""
+
+@feature('link_main_routines_func')
+@before_method('process_source')
+def link_main_routines_tg_method(self):
+	"""
+	The configuration test declares a unique task generator,
+	so we create other task generators from there for fortran link tests
+	"""
+	def write_test_file(task):
+		task.outputs[0].write(task.generator.code)
+	bld = self.bld
+	bld(rule=write_test_file, target='main.c', code=MAIN_CODE % self.__dict__)
+	bld(rule=write_test_file, target='test.f', code=ROUTINES_CODE)
+	bld(features='fc fcstlib', source='test.f', target='test')
+	bld(features='c fcprogram', source='main.c', target='app', use='test')
+
+def mangling_schemes():
+	"""
+	Generate triplets for use with mangle_name
+	(used in check_fortran_mangling)
+	the order is tuned for gfortan
+	"""
+	for u in ('_', ''):
+		for du in ('', '_'):
+			for c in ("lower", "upper"):
+				yield (u, du, c)
+
+def mangle_name(u, du, c, name):
+	"""Mangle a name from a triplet (used in check_fortran_mangling)"""
+	return getattr(name, c)() + u + (name.find('_') != -1 and du or '')
+
+@conf
+def check_fortran_mangling(self, *k, **kw):
+	"""
+	Detect the mangling scheme, sets FORTRAN_MANGLING to the triplet found
+
+	This test will compile a fortran static library, then link a c app against it
+	"""
+	if not self.env.CC:
+		self.fatal('A c compiler is required for link_main_routines')
+	if not self.env.FC:
+		self.fatal('A fortran compiler is required for link_main_routines')
+	if not self.env.FC_MAIN:
+		self.fatal('Checking for mangling requires self.env.FC_MAIN (execute "check_fortran_dummy_main" first?)')
+
+	self.start_msg('Getting fortran mangling scheme')
+	for (u, du, c) in mangling_schemes():
+		try:
+			self.check_cc(
+				compile_filename   = [],
+				features           = 'link_main_routines_func',
+				msg                = 'nomsg',
+				errmsg             = 'nomsg',
+				dummy_func_nounder = mangle_name(u, du, c, 'foobar'),
+				dummy_func_under   = mangle_name(u, du, c, 'foo_bar'),
+				main_func_name     = self.env.FC_MAIN
+			)
+		except self.errors.ConfigurationError:
+			pass
+		else:
+			self.end_msg("ok ('%s', '%s', '%s-case')" % (u, du, c))
+			self.env.FORTRAN_MANGLING = (u, du, c)
+			break
+	else:
+		self.end_msg(False)
+		self.fatal('mangler not found')
+	return (u, du, c)
+
+@feature('pyext')
+@before_method('propagate_uselib_vars', 'apply_link')
+def set_lib_pat(self):
+	"""Sets the Fortran flags for linking with Python"""
+	self.env.fcshlib_PATTERN = self.env.pyext_PATTERN
+
+@conf
+def detect_openmp(self):
+	"""
+	Detects openmp flags and sets the OPENMP ``FCFLAGS``/``LINKFLAGS``
+	"""
+	for x in ('-fopenmp','-openmp','-mp','-xopenmp','-omp','-qsmp=omp'):
+		try:
+			self.check_fc(
+				msg          = 'Checking for OpenMP flag %s' % x,
+				fragment     = 'program main\n  call omp_get_num_threads()\nend program main',
+				fcflags      = x,
+				linkflags    = x,
+				uselib_store = 'OPENMP'
+			)
+		except self.errors.ConfigurationError:
+			pass
+		else:
+			break
+	else:
+		self.fatal('Could not find OpenMP')
+
+@conf
+def check_gfortran_o_space(self):
+	if self.env.FC_NAME != 'GFORTRAN' or int(self.env.FC_VERSION[0]) > 4:
+		# This is for old compilers and only for gfortran.
+		# No idea how other implementations handle this. Be safe and bail out.
+		return
+	self.env.stash()
+	self.env.FCLNK_TGT_F = ['-o', '']
+	try:
+		self.check_fc(msg='Checking if the -o link must be split from arguments', fragment=FC_FRAGMENT, features='fc fcshlib')
+	except self.errors.ConfigurationError:
+		self.env.revert()
+	else:
+		self.env.commit()
diff --git a/third_party/waf/waflib/Tools/fc_scan.py b/third_party/waf/waflib/Tools/fc_scan.py
new file mode 100644
index 0000000..0824c92
--- /dev/null
+++ b/third_party/waf/waflib/Tools/fc_scan.py
@@ -0,0 +1,120 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# DC 2008
+# Thomas Nagy 2016-2018 (ita)
+
+import re
+
+INC_REGEX = r"""(?:^|['">]\s*;)\s*(?:|#\s*)INCLUDE\s+(?:\w+_)?[<"'](.+?)(?=["'>])"""
+USE_REGEX = r"""(?:^|;)\s*USE(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)"""
+MOD_REGEX = r"""(?:^|;)\s*MODULE(?!\s+(?:PROCEDURE|SUBROUTINE|FUNCTION))\s+(\w+)"""
+SMD_REGEX = r"""(?:^|;)\s*SUBMODULE\s*\(([\w:]+)\)\s*(\w+)"""
+
+re_inc = re.compile(INC_REGEX, re.I)
+re_use = re.compile(USE_REGEX, re.I)
+re_mod = re.compile(MOD_REGEX, re.I)
+re_smd = re.compile(SMD_REGEX, re.I)
+
+class fortran_parser(object):
+	"""
+	This parser returns:
+
+	* the nodes corresponding to the module names to produce
+	* the nodes corresponding to the include files used
+	* the module names used by the fortran files
+	"""
+	def __init__(self, incpaths):
+		self.seen = []
+		"""Files already parsed"""
+
+		self.nodes = []
+		"""List of :py:class:`waflib.Node.Node` representing the dependencies to return"""
+
+		self.names = []
+		"""List of module names to return"""
+
+		self.incpaths = incpaths
+		"""List of :py:class:`waflib.Node.Node` representing the include paths"""
+
+	def find_deps(self, node):
+		"""
+		Parses a Fortran file to obtain the dependencies used/provided
+
+		:param node: fortran file to read
+		:type node: :py:class:`waflib.Node.Node`
+		:return: lists representing the includes, the modules used, and the modules created by a fortran file
+		:rtype: tuple of list of strings
+		"""
+		txt = node.read()
+		incs = []
+		uses = []
+		mods = []
+		for line in txt.splitlines():
+			# line by line regexp search? optimize?
+			m = re_inc.search(line)
+			if m:
+				incs.append(m.group(1))
+			m = re_use.search(line)
+			if m:
+				uses.append(m.group(1))
+			m = re_mod.search(line)
+			if m:
+				mods.append(m.group(1))
+			m = re_smd.search(line)
+			if m:
+				uses.append(m.group(1))
+				mods.append('{0}:{1}'.format(m.group(1),m.group(2)))
+		return (incs, uses, mods)
+
+	def start(self, node):
+		"""
+		Start parsing. Use the stack ``self.waiting`` to hold nodes to iterate on
+
+		:param node: fortran file
+		:type node: :py:class:`waflib.Node.Node`
+		"""
+		self.waiting = [node]
+		while self.waiting:
+			nd = self.waiting.pop(0)
+			self.iter(nd)
+
+	def iter(self, node):
+		"""
+		Processes a single file during dependency parsing. Extracts files used
+		modules used and modules provided.
+		"""
+		incs, uses, mods = self.find_deps(node)
+		for x in incs:
+			if x in self.seen:
+				continue
+			self.seen.append(x)
+			self.tryfind_header(x)
+
+		for x in uses:
+			name = "USE@%s" % x
+			if not name in self.names:
+				self.names.append(name)
+
+		for x in mods:
+			name = "MOD@%s" % x
+			if not name in self.names:
+				self.names.append(name)
+
+	def tryfind_header(self, filename):
+		"""
+		Adds an include file to the list of nodes to process
+
+		:param filename: file name
+		:type filename: string
+		"""
+		found = None
+		for n in self.incpaths:
+			found = n.find_resource(filename)
+			if found:
+				self.nodes.append(found)
+				self.waiting.append(found)
+				break
+		if not found:
+			if not filename in self.names:
+				self.names.append(filename)
+
diff --git a/third_party/waf/waflib/Tools/flex.py b/third_party/waf/waflib/Tools/flex.py
new file mode 100644
index 0000000..2256657
--- /dev/null
+++ b/third_party/waf/waflib/Tools/flex.py
@@ -0,0 +1,62 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# John O'Meara, 2006
+# Thomas Nagy, 2006-2018 (ita)
+
+"""
+The **flex** program is a code generator which creates C or C++ files.
+The generated files are compiled into object files.
+"""
+
+import os, re
+from waflib import Task, TaskGen
+from waflib.Tools import ccroot
+
+def decide_ext(self, node):
+	if 'cxx' in self.features:
+		return ['.lex.cc']
+	return ['.lex.c']
+
+def flexfun(tsk):
+	env = tsk.env
+	bld = tsk.generator.bld
+	wd = bld.variant_dir
+	def to_list(xx):
+		if isinstance(xx, str):
+			return [xx]
+		return xx
+	tsk.last_cmd = lst = []
+	lst.extend(to_list(env.FLEX))
+	lst.extend(to_list(env.FLEXFLAGS))
+	inputs = [a.path_from(tsk.get_cwd()) for a in tsk.inputs]
+	if env.FLEX_MSYS:
+		inputs = [x.replace(os.sep, '/') for x in inputs]
+	lst.extend(inputs)
+	lst = [x for x in lst if x]
+	txt = bld.cmd_and_log(lst, cwd=wd, env=env.env or None, quiet=0)
+	tsk.outputs[0].write(txt.replace('\r\n', '\n').replace('\r', '\n')) # issue #1207
+
+TaskGen.declare_chain(
+	name = 'flex',
+	rule = flexfun, # issue #854
+	ext_in = '.l',
+	decider = decide_ext,
+)
+
+# To support the following:
+# bld(features='c', flexflags='-P/foo')
+Task.classes['flex'].vars = ['FLEXFLAGS', 'FLEX']
+ccroot.USELIB_VARS['c'].add('FLEXFLAGS')
+ccroot.USELIB_VARS['cxx'].add('FLEXFLAGS')
+
+def configure(conf):
+	"""
+	Detect the *flex* program
+	"""
+	conf.find_program('flex', var='FLEX')
+	conf.env.FLEXFLAGS = ['-t']
+
+	if re.search (r"\\msys\\[0-9.]+\\bin\\flex.exe$", conf.env.FLEX[0]):
+		# this is the flex shipped with MSYS
+		conf.env.FLEX_MSYS = True
+
diff --git a/third_party/waf/waflib/Tools/g95.py b/third_party/waf/waflib/Tools/g95.py
new file mode 100644
index 0000000..f69ba4f
--- /dev/null
+++ b/third_party/waf/waflib/Tools/g95.py
@@ -0,0 +1,66 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# KWS 2010
+# Thomas Nagy 2016-2018 (ita)
+
+import re
+from waflib import Utils
+from waflib.Tools import fc, fc_config, fc_scan, ar
+from waflib.Configure import conf
+
+@conf
+def find_g95(conf):
+	fc = conf.find_program('g95', var='FC')
+	conf.get_g95_version(fc)
+	conf.env.FC_NAME = 'G95'
+
+@conf
+def g95_flags(conf):
+	v = conf.env
+	v.FCFLAGS_fcshlib   = ['-fPIC']
+	v.FORTRANMODFLAG  = ['-fmod=', ''] # template for module path
+	v.FCFLAGS_DEBUG = ['-Werror'] # why not
+
+@conf
+def g95_modifier_win32(conf):
+	fc_config.fortran_modifier_win32(conf)
+
+@conf
+def g95_modifier_cygwin(conf):
+	fc_config.fortran_modifier_cygwin(conf)
+
+@conf
+def g95_modifier_darwin(conf):
+	fc_config.fortran_modifier_darwin(conf)
+
+@conf
+def g95_modifier_platform(conf):
+	dest_os = conf.env.DEST_OS or Utils.unversioned_sys_platform()
+	g95_modifier_func = getattr(conf, 'g95_modifier_' + dest_os, None)
+	if g95_modifier_func:
+		g95_modifier_func()
+
+@conf
+def get_g95_version(conf, fc):
+	"""get the compiler version"""
+
+	version_re = re.compile(r"g95\s*(?P<major>\d*)\.(?P<minor>\d*)").search
+	cmd = fc + ['--version']
+	out, err = fc_config.getoutput(conf, cmd, stdin=False)
+	if out:
+		match = version_re(out)
+	else:
+		match = version_re(err)
+	if not match:
+		conf.fatal('cannot determine g95 version')
+	k = match.groupdict()
+	conf.env.FC_VERSION = (k['major'], k['minor'])
+
+def configure(conf):
+	conf.find_g95()
+	conf.find_ar()
+	conf.fc_flags()
+	conf.fc_add_flags()
+	conf.g95_flags()
+	conf.g95_modifier_platform()
+
diff --git a/third_party/waf/waflib/Tools/gas.py b/third_party/waf/waflib/Tools/gas.py
new file mode 100644
index 0000000..4a8745a
--- /dev/null
+++ b/third_party/waf/waflib/Tools/gas.py
@@ -0,0 +1,19 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2008-2018 (ita)
+
+"Detect as/gas/gcc for compiling assembly files"
+
+import waflib.Tools.asm # - leave this
+from waflib.Tools import ar
+
+def configure(conf):
+	"""
+	Find the programs gas/as/gcc and set the variable *AS*
+	"""
+	conf.find_program(['gas', 'gcc'], var='AS')
+	conf.env.AS_TGT_F = ['-c', '-o']
+	conf.env.ASLNK_TGT_F = ['-o']
+	conf.find_ar()
+	conf.load('asm')
+	conf.env.ASM_NAME = 'gas'
diff --git a/third_party/waf/waflib/Tools/gcc.py b/third_party/waf/waflib/Tools/gcc.py
new file mode 100644
index 0000000..acdd473
--- /dev/null
+++ b/third_party/waf/waflib/Tools/gcc.py
@@ -0,0 +1,156 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2018 (ita)
+# Ralf Habacker, 2006 (rh)
+# Yinon Ehrlich, 2009
+
+"""
+gcc/llvm detection.
+"""
+
+from waflib.Tools import ccroot, ar
+from waflib.Configure import conf
+
+@conf
+def find_gcc(conf):
+	"""
+	Find the program gcc, and if present, try to detect its version number
+	"""
+	cc = conf.find_program(['gcc', 'cc'], var='CC')
+	conf.get_cc_version(cc, gcc=True)
+	conf.env.CC_NAME = 'gcc'
+
+@conf
+def gcc_common_flags(conf):
+	"""
+	Common flags for gcc on nearly all platforms
+	"""
+	v = conf.env
+
+	v.CC_SRC_F            = []
+	v.CC_TGT_F            = ['-c', '-o']
+
+	if not v.LINK_CC:
+		v.LINK_CC = v.CC
+
+	v.CCLNK_SRC_F         = []
+	v.CCLNK_TGT_F         = ['-o']
+	v.CPPPATH_ST          = '-I%s'
+	v.DEFINES_ST          = '-D%s'
+
+	v.LIB_ST              = '-l%s' # template for adding libs
+	v.LIBPATH_ST          = '-L%s' # template for adding libpaths
+	v.STLIB_ST            = '-l%s'
+	v.STLIBPATH_ST        = '-L%s'
+	v.RPATH_ST            = '-Wl,-rpath,%s'
+
+	v.SONAME_ST           = '-Wl,-h,%s'
+	v.SHLIB_MARKER        = '-Wl,-Bdynamic'
+	v.STLIB_MARKER        = '-Wl,-Bstatic'
+
+	v.cprogram_PATTERN    = '%s'
+
+	v.CFLAGS_cshlib       = ['-fPIC']
+	v.LINKFLAGS_cshlib    = ['-shared']
+	v.cshlib_PATTERN      = 'lib%s.so'
+
+	v.LINKFLAGS_cstlib    = ['-Wl,-Bstatic']
+	v.cstlib_PATTERN      = 'lib%s.a'
+
+	v.LINKFLAGS_MACBUNDLE = ['-bundle', '-undefined', 'dynamic_lookup']
+	v.CFLAGS_MACBUNDLE    = ['-fPIC']
+	v.macbundle_PATTERN   = '%s.bundle'
+
+@conf
+def gcc_modifier_win32(conf):
+	"""Configuration flags for executing gcc on Windows"""
+	v = conf.env
+	v.cprogram_PATTERN    = '%s.exe'
+
+	v.cshlib_PATTERN      = '%s.dll'
+	v.implib_PATTERN      = '%s.dll.a'
+	v.IMPLIB_ST           = '-Wl,--out-implib,%s'
+
+	v.CFLAGS_cshlib       = []
+
+	# Auto-import is enabled by default even without this option,
+	# but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages
+	# that the linker emits otherwise.
+	v.append_value('LINKFLAGS', ['-Wl,--enable-auto-import'])
+
+@conf
+def gcc_modifier_cygwin(conf):
+	"""Configuration flags for executing gcc on Cygwin"""
+	gcc_modifier_win32(conf)
+	v = conf.env
+	v.cshlib_PATTERN = 'cyg%s.dll'
+	v.append_value('LINKFLAGS_cshlib', ['-Wl,--enable-auto-image-base'])
+	v.CFLAGS_cshlib = []
+
+@conf
+def gcc_modifier_darwin(conf):
+	"""Configuration flags for executing gcc on MacOS"""
+	v = conf.env
+	v.CFLAGS_cshlib       = ['-fPIC']
+	v.LINKFLAGS_cshlib    = ['-dynamiclib']
+	v.cshlib_PATTERN      = 'lib%s.dylib'
+	v.FRAMEWORKPATH_ST    = '-F%s'
+	v.FRAMEWORK_ST        = ['-framework']
+	v.ARCH_ST             = ['-arch']
+
+	v.LINKFLAGS_cstlib    = []
+
+	v.SHLIB_MARKER        = []
+	v.STLIB_MARKER        = []
+	v.SONAME_ST           = []
+
+@conf
+def gcc_modifier_aix(conf):
+	"""Configuration flags for executing gcc on AIX"""
+	v = conf.env
+	v.LINKFLAGS_cprogram  = ['-Wl,-brtl']
+	v.LINKFLAGS_cshlib    = ['-shared','-Wl,-brtl,-bexpfull']
+	v.SHLIB_MARKER        = []
+
+@conf
+def gcc_modifier_hpux(conf):
+	v = conf.env
+	v.SHLIB_MARKER        = []
+	v.STLIB_MARKER        = []
+	v.CFLAGS_cshlib       = ['-fPIC','-DPIC']
+	v.cshlib_PATTERN      = 'lib%s.sl'
+
+@conf
+def gcc_modifier_openbsd(conf):
+	conf.env.SONAME_ST = []
+
+@conf
+def gcc_modifier_osf1V(conf):
+	v = conf.env
+	v.SHLIB_MARKER        = []
+	v.STLIB_MARKER        = []
+	v.SONAME_ST           = []
+
+@conf
+def gcc_modifier_platform(conf):
+	"""Execute platform-specific functions based on *gcc_modifier_+NAME*"""
+	# * set configurations specific for a platform.
+	# * the destination platform is detected automatically by looking at the macros the compiler predefines,
+	#   and if it's not recognised, it fallbacks to sys.platform.
+	gcc_modifier_func = getattr(conf, 'gcc_modifier_' + conf.env.DEST_OS, None)
+	if gcc_modifier_func:
+		gcc_modifier_func()
+
+def configure(conf):
+	"""
+	Configuration for gcc
+	"""
+	conf.find_gcc()
+	conf.find_ar()
+	conf.gcc_common_flags()
+	conf.gcc_modifier_platform()
+	conf.cc_load_tools()
+	conf.cc_add_flags()
+	conf.link_add_flags()
+	conf.check_gcc_o_space()
+
diff --git a/third_party/waf/waflib/Tools/gdc.py b/third_party/waf/waflib/Tools/gdc.py
new file mode 100644
index 0000000..d89a66d
--- /dev/null
+++ b/third_party/waf/waflib/Tools/gdc.py
@@ -0,0 +1,55 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Carlos Rafael Giani, 2007 (dv)
+
+from waflib.Tools import ar, d
+from waflib.Configure import conf
+
+@conf
+def find_gdc(conf):
+	"""
+	Finds the program gdc and set the variable *D*
+	"""
+	conf.find_program('gdc', var='D')
+
+	out = conf.cmd_and_log(conf.env.D + ['--version'])
+	if out.find("gdc") == -1:
+		conf.fatal("detected compiler is not gdc")
+
+@conf
+def common_flags_gdc(conf):
+	"""
+	Sets the flags required by *gdc*
+	"""
+	v = conf.env
+
+	v.DFLAGS            = []
+
+	v.D_SRC_F           = ['-c']
+	v.D_TGT_F           = '-o%s'
+
+	v.D_LINKER          = v.D
+	v.DLNK_SRC_F        = ''
+	v.DLNK_TGT_F        = '-o%s'
+	v.DINC_ST           = '-I%s'
+
+	v.DSHLIB_MARKER = v.DSTLIB_MARKER = ''
+	v.DSTLIB_ST = v.DSHLIB_ST         = '-l%s'
+	v.DSTLIBPATH_ST = v.DLIBPATH_ST   = '-L%s'
+
+	v.LINKFLAGS_dshlib  = ['-shared']
+
+	v.DHEADER_ext       = '.di'
+	v.DFLAGS_d_with_header = '-fintfc'
+	v.D_HDR_F           = '-fintfc-file=%s'
+
+def configure(conf):
+	"""
+	Configuration for gdc
+	"""
+	conf.find_gdc()
+	conf.load('ar')
+	conf.load('d')
+	conf.common_flags_gdc()
+	conf.d_platform_flags()
+
diff --git a/third_party/waf/waflib/Tools/gfortran.py b/third_party/waf/waflib/Tools/gfortran.py
new file mode 100644
index 0000000..1050667
--- /dev/null
+++ b/third_party/waf/waflib/Tools/gfortran.py
@@ -0,0 +1,93 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# DC 2008
+# Thomas Nagy 2016-2018 (ita)
+
+import re
+from waflib import Utils
+from waflib.Tools import fc, fc_config, fc_scan, ar
+from waflib.Configure import conf
+
+@conf
+def find_gfortran(conf):
+	"""Find the gfortran program (will look in the environment variable 'FC')"""
+	fc = conf.find_program(['gfortran','g77'], var='FC')
+	# (fallback to g77 for systems, where no gfortran is available)
+	conf.get_gfortran_version(fc)
+	conf.env.FC_NAME = 'GFORTRAN'
+
+@conf
+def gfortran_flags(conf):
+	v = conf.env
+	v.FCFLAGS_fcshlib = ['-fPIC']
+	v.FORTRANMODFLAG = ['-J', ''] # template for module path
+	v.FCFLAGS_DEBUG = ['-Werror'] # why not
+
+@conf
+def gfortran_modifier_win32(conf):
+	fc_config.fortran_modifier_win32(conf)
+
+@conf
+def gfortran_modifier_cygwin(conf):
+	fc_config.fortran_modifier_cygwin(conf)
+
+@conf
+def gfortran_modifier_darwin(conf):
+	fc_config.fortran_modifier_darwin(conf)
+
+@conf
+def gfortran_modifier_platform(conf):
+	dest_os = conf.env.DEST_OS or Utils.unversioned_sys_platform()
+	gfortran_modifier_func = getattr(conf, 'gfortran_modifier_' + dest_os, None)
+	if gfortran_modifier_func:
+		gfortran_modifier_func()
+
+@conf
+def get_gfortran_version(conf, fc):
+	"""Get the compiler version"""
+
+	# ensure this is actually gfortran, not an imposter.
+	version_re = re.compile(r"GNU\s*Fortran", re.I).search
+	cmd = fc + ['--version']
+	out, err = fc_config.getoutput(conf, cmd, stdin=False)
+	if out:
+		match = version_re(out)
+	else:
+		match = version_re(err)
+	if not match:
+		conf.fatal('Could not determine the compiler type')
+
+	# --- now get more detailed info -- see c_config.get_cc_version
+	cmd = fc + ['-dM', '-E', '-']
+	out, err = fc_config.getoutput(conf, cmd, stdin=True)
+
+	if out.find('__GNUC__') < 0:
+		conf.fatal('Could not determine the compiler type')
+
+	k = {}
+	out = out.splitlines()
+	import shlex
+
+	for line in out:
+		lst = shlex.split(line)
+		if len(lst)>2:
+			key = lst[1]
+			val = lst[2]
+			k[key] = val
+
+	def isD(var):
+		return var in k
+
+	def isT(var):
+		return var in k and k[var] != '0'
+
+	conf.env.FC_VERSION = (k['__GNUC__'], k['__GNUC_MINOR__'], k['__GNUC_PATCHLEVEL__'])
+
+def configure(conf):
+	conf.find_gfortran()
+	conf.find_ar()
+	conf.fc_flags()
+	conf.fc_add_flags()
+	conf.gfortran_flags()
+	conf.gfortran_modifier_platform()
+	conf.check_gfortran_o_space()
diff --git a/third_party/waf/waflib/Tools/glib2.py b/third_party/waf/waflib/Tools/glib2.py
new file mode 100644
index 0000000..949fe37
--- /dev/null
+++ b/third_party/waf/waflib/Tools/glib2.py
@@ -0,0 +1,489 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2018 (ita)
+
+"""
+Support for GLib2 tools:
+
+* marshal
+* enums
+* gsettings
+* gresource
+"""
+
+import os
+import functools
+from waflib import Context, Task, Utils, Options, Errors, Logs
+from waflib.TaskGen import taskgen_method, before_method, feature, extension
+from waflib.Configure import conf
+
+################## marshal files
+
+@taskgen_method
+def add_marshal_file(self, filename, prefix):
+	"""
+	Adds a file to the list of marshal files to process. Store them in the attribute *marshal_list*.
+
+	:param filename: xml file to compile
+	:type filename: string
+	:param prefix: marshal prefix (--prefix=prefix)
+	:type prefix: string
+	"""
+	if not hasattr(self, 'marshal_list'):
+		self.marshal_list = []
+	self.meths.append('process_marshal')
+	self.marshal_list.append((filename, prefix))
+
+@before_method('process_source')
+def process_marshal(self):
+	"""
+	Processes the marshal files stored in the attribute *marshal_list* to create :py:class:`waflib.Tools.glib2.glib_genmarshal` instances.
+	Adds the c file created to the list of source to process.
+	"""
+	for f, prefix in getattr(self, 'marshal_list', []):
+		node = self.path.find_resource(f)
+
+		if not node:
+			raise Errors.WafError('file not found %r' % f)
+
+		h_node = node.change_ext('.h')
+		c_node = node.change_ext('.c')
+
+		task = self.create_task('glib_genmarshal', node, [h_node, c_node])
+		task.env.GLIB_GENMARSHAL_PREFIX = prefix
+	self.source = self.to_nodes(getattr(self, 'source', []))
+	self.source.append(c_node)
+
+class glib_genmarshal(Task.Task):
+	vars    = ['GLIB_GENMARSHAL_PREFIX', 'GLIB_GENMARSHAL']
+	color   = 'BLUE'
+	ext_out = ['.h']
+	def run(self):
+		bld = self.generator.bld
+
+		get = self.env.get_flat
+		cmd1 = "%s %s --prefix=%s --header > %s" % (
+			get('GLIB_GENMARSHAL'),
+			self.inputs[0].srcpath(),
+			get('GLIB_GENMARSHAL_PREFIX'),
+			self.outputs[0].abspath()
+		)
+
+		ret = bld.exec_command(cmd1)
+		if ret:
+			return ret
+
+		#print self.outputs[1].abspath()
+		c = '''#include "%s"\n''' % self.outputs[0].name
+		self.outputs[1].write(c)
+
+		cmd2 = "%s %s --prefix=%s --body >> %s" % (
+			get('GLIB_GENMARSHAL'),
+			self.inputs[0].srcpath(),
+			get('GLIB_GENMARSHAL_PREFIX'),
+			self.outputs[1].abspath()
+		)
+		return bld.exec_command(cmd2)
+
+########################## glib-mkenums
+
+@taskgen_method
+def add_enums_from_template(self, source='', target='', template='', comments=''):
+	"""
+	Adds a file to the list of enum files to process. Stores them in the attribute *enums_list*.
+
+	:param source: enum file to process
+	:type source: string
+	:param target: target file
+	:type target: string
+	:param template: template file
+	:type template: string
+	:param comments: comments
+	:type comments: string
+	"""
+	if not hasattr(self, 'enums_list'):
+		self.enums_list = []
+	self.meths.append('process_enums')
+	self.enums_list.append({'source': source,
+	                        'target': target,
+	                        'template': template,
+	                        'file-head': '',
+	                        'file-prod': '',
+	                        'file-tail': '',
+	                        'enum-prod': '',
+	                        'value-head': '',
+	                        'value-prod': '',
+	                        'value-tail': '',
+	                        'comments': comments})
+
+@taskgen_method
+def add_enums(self, source='', target='',
+              file_head='', file_prod='', file_tail='', enum_prod='',
+              value_head='', value_prod='', value_tail='', comments=''):
+	"""
+	Adds a file to the list of enum files to process. Stores them in the attribute *enums_list*.
+
+	:param source: enum file to process
+	:type source: string
+	:param target: target file
+	:type target: string
+	:param file_head: unused
+	:param file_prod: unused
+	:param file_tail: unused
+	:param enum_prod: unused
+	:param value_head: unused
+	:param value_prod: unused
+	:param value_tail: unused
+	:param comments: comments
+	:type comments: string
+	"""
+	if not hasattr(self, 'enums_list'):
+		self.enums_list = []
+	self.meths.append('process_enums')
+	self.enums_list.append({'source': source,
+	                        'template': '',
+	                        'target': target,
+	                        'file-head': file_head,
+	                        'file-prod': file_prod,
+	                        'file-tail': file_tail,
+	                        'enum-prod': enum_prod,
+	                        'value-head': value_head,
+	                        'value-prod': value_prod,
+	                        'value-tail': value_tail,
+	                        'comments': comments})
+
+@before_method('process_source')
+def process_enums(self):
+	"""
+	Processes the enum files stored in the attribute *enum_list* to create :py:class:`waflib.Tools.glib2.glib_mkenums` instances.
+	"""
+	for enum in getattr(self, 'enums_list', []):
+		task = self.create_task('glib_mkenums')
+		env = task.env
+
+		inputs = []
+
+		# process the source
+		source_list = self.to_list(enum['source'])
+		if not source_list:
+			raise Errors.WafError('missing source ' + str(enum))
+		source_list = [self.path.find_resource(k) for k in source_list]
+		inputs += source_list
+		env.GLIB_MKENUMS_SOURCE = [k.abspath() for k in source_list]
+
+		# find the target
+		if not enum['target']:
+			raise Errors.WafError('missing target ' + str(enum))
+		tgt_node = self.path.find_or_declare(enum['target'])
+		if tgt_node.name.endswith('.c'):
+			self.source.append(tgt_node)
+		env.GLIB_MKENUMS_TARGET = tgt_node.abspath()
+
+
+		options = []
+
+		if enum['template']: # template, if provided
+			template_node = self.path.find_resource(enum['template'])
+			options.append('--template %s' % (template_node.abspath()))
+			inputs.append(template_node)
+		params = {'file-head' : '--fhead',
+		           'file-prod' : '--fprod',
+		           'file-tail' : '--ftail',
+		           'enum-prod' : '--eprod',
+		           'value-head' : '--vhead',
+		           'value-prod' : '--vprod',
+		           'value-tail' : '--vtail',
+		           'comments': '--comments'}
+		for param, option in params.items():
+			if enum[param]:
+				options.append('%s %r' % (option, enum[param]))
+
+		env.GLIB_MKENUMS_OPTIONS = ' '.join(options)
+
+		# update the task instance
+		task.set_inputs(inputs)
+		task.set_outputs(tgt_node)
+
+class glib_mkenums(Task.Task):
+	"""
+	Processes enum files
+	"""
+	run_str = '${GLIB_MKENUMS} ${GLIB_MKENUMS_OPTIONS} ${GLIB_MKENUMS_SOURCE} > ${GLIB_MKENUMS_TARGET}'
+	color   = 'PINK'
+	ext_out = ['.h']
+
+######################################### gsettings
+
+@taskgen_method
+def add_settings_schemas(self, filename_list):
+	"""
+	Adds settings files to process to *settings_schema_files*
+
+	:param filename_list: files
+	:type filename_list: list of string
+	"""
+	if not hasattr(self, 'settings_schema_files'):
+		self.settings_schema_files = []
+
+	if not isinstance(filename_list, list):
+		filename_list = [filename_list]
+
+	self.settings_schema_files.extend(filename_list)
+
+@taskgen_method
+def add_settings_enums(self, namespace, filename_list):
+	"""
+	Called only once by task generator to set the enums namespace.
+
+	:param namespace: namespace
+	:type namespace: string
+	:param filename_list: enum files to process
+	:type filename_list: file list
+	"""
+	if hasattr(self, 'settings_enum_namespace'):
+		raise Errors.WafError("Tried to add gsettings enums to %r more than once" % self.name)
+	self.settings_enum_namespace = namespace
+
+	if not isinstance(filename_list, list):
+		filename_list = [filename_list]
+	self.settings_enum_files = filename_list
+
+@feature('glib2')
+def process_settings(self):
+	"""
+	Processes the schema files in *settings_schema_files* to create :py:class:`waflib.Tools.glib2.glib_mkenums` instances. The
+	same files are validated through :py:class:`waflib.Tools.glib2.glib_validate_schema` tasks.
+
+	"""
+	enums_tgt_node = []
+	install_files = []
+
+	settings_schema_files = getattr(self, 'settings_schema_files', [])
+	if settings_schema_files and not self.env.GLIB_COMPILE_SCHEMAS:
+		raise Errors.WafError ("Unable to process GSettings schemas - glib-compile-schemas was not found during configure")
+
+	# 1. process gsettings_enum_files (generate .enums.xml)
+	#
+	if hasattr(self, 'settings_enum_files'):
+		enums_task = self.create_task('glib_mkenums')
+
+		source_list = self.settings_enum_files
+		source_list = [self.path.find_resource(k) for k in source_list]
+		enums_task.set_inputs(source_list)
+		enums_task.env.GLIB_MKENUMS_SOURCE = [k.abspath() for k in source_list]
+
+		target = self.settings_enum_namespace + '.enums.xml'
+		tgt_node = self.path.find_or_declare(target)
+		enums_task.set_outputs(tgt_node)
+		enums_task.env.GLIB_MKENUMS_TARGET = tgt_node.abspath()
+		enums_tgt_node = [tgt_node]
+
+		install_files.append(tgt_node)
+
+		options = '--comments "<!-- @comment@ -->" --fhead "<schemalist>" --vhead "  <@type@ id=\\"%s.@EnumName@\\">" --vprod "    <value nick=\\"@valuenick@\\" value=\\"@valuenum@\\"/>" --vtail "  </@type@>" --ftail "</schemalist>" ' % (self.settings_enum_namespace)
+		enums_task.env.GLIB_MKENUMS_OPTIONS = options
+
+	# 2. process gsettings_schema_files (validate .gschema.xml files)
+	#
+	for schema in settings_schema_files:
+		schema_task = self.create_task ('glib_validate_schema')
+
+		schema_node = self.path.find_resource(schema)
+		if not schema_node:
+			raise Errors.WafError("Cannot find the schema file %r" % schema)
+		install_files.append(schema_node)
+		source_list = enums_tgt_node + [schema_node]
+
+		schema_task.set_inputs (source_list)
+		schema_task.env.GLIB_COMPILE_SCHEMAS_OPTIONS = [("--schema-file=" + k.abspath()) for k in source_list]
+
+		target_node = schema_node.change_ext('.xml.valid')
+		schema_task.set_outputs (target_node)
+		schema_task.env.GLIB_VALIDATE_SCHEMA_OUTPUT = target_node.abspath()
+
+	# 3. schemas install task
+	def compile_schemas_callback(bld):
+		if not bld.is_install:
+			return
+		compile_schemas = Utils.to_list(bld.env.GLIB_COMPILE_SCHEMAS)
+		destdir = Options.options.destdir
+		paths = bld._compile_schemas_registered
+		if destdir:
+			paths = (os.path.join(destdir, path.lstrip(os.sep)) for path in paths)
+		for path in paths:
+			Logs.pprint('YELLOW', 'Updating GSettings schema cache %r' % path)
+			if self.bld.exec_command(compile_schemas + [path]):
+				Logs.warn('Could not update GSettings schema cache %r' % path)
+
+	if self.bld.is_install:
+		schemadir = self.env.GSETTINGSSCHEMADIR
+		if not schemadir:
+			raise Errors.WafError ('GSETTINGSSCHEMADIR not defined (should have been set up automatically during configure)')
+
+		if install_files:
+			self.add_install_files(install_to=schemadir, install_from=install_files)
+			registered_schemas = getattr(self.bld, '_compile_schemas_registered', None)
+			if not registered_schemas:
+				registered_schemas = self.bld._compile_schemas_registered = set()
+				self.bld.add_post_fun(compile_schemas_callback)
+			registered_schemas.add(schemadir)
+
+class glib_validate_schema(Task.Task):
+	"""
+	Validates schema files
+	"""
+	run_str = 'rm -f ${GLIB_VALIDATE_SCHEMA_OUTPUT} && ${GLIB_COMPILE_SCHEMAS} --dry-run ${GLIB_COMPILE_SCHEMAS_OPTIONS} && touch ${GLIB_VALIDATE_SCHEMA_OUTPUT}'
+	color   = 'PINK'
+
+################## gresource
+
+@extension('.gresource.xml')
+def process_gresource_source(self, node):
+	"""
+	Creates tasks that turn ``.gresource.xml`` files to C code
+	"""
+	if not self.env.GLIB_COMPILE_RESOURCES:
+		raise Errors.WafError ("Unable to process GResource file - glib-compile-resources was not found during configure")
+
+	if 'gresource' in self.features:
+		return
+
+	h_node = node.change_ext('_xml.h')
+	c_node = node.change_ext('_xml.c')
+	self.create_task('glib_gresource_source', node, [h_node, c_node])
+	self.source.append(c_node)
+
+@feature('gresource')
+def process_gresource_bundle(self):
+	"""
+	Creates tasks to turn ``.gresource`` files from ``.gresource.xml`` files::
+
+		def build(bld):
+			bld(
+				features='gresource',
+				source=['resources1.gresource.xml', 'resources2.gresource.xml'],
+				install_path='${LIBDIR}/${PACKAGE}'
+			)
+
+	:param source: XML files to process
+	:type source: list of string
+	:param install_path: installation path
+	:type install_path: string
+	"""
+	for i in self.to_list(self.source):
+		node = self.path.find_resource(i)
+
+		task = self.create_task('glib_gresource_bundle', node, node.change_ext(''))
+		inst_to = getattr(self, 'install_path', None)
+		if inst_to:
+			self.add_install_files(install_to=inst_to, install_from=task.outputs)
+
+class glib_gresource_base(Task.Task):
+	"""
+	Base class for gresource based tasks
+	"""
+	color    = 'BLUE'
+	base_cmd = '${GLIB_COMPILE_RESOURCES} --sourcedir=${SRC[0].parent.srcpath()} --sourcedir=${SRC[0].bld_dir()}'
+
+	def scan(self):
+		"""
+		Scans gresource dependencies through ``glib-compile-resources --generate-dependencies command``
+		"""
+		bld = self.generator.bld
+		kw = {}
+		kw['cwd'] = self.get_cwd()
+		kw['quiet'] = Context.BOTH
+
+		cmd = Utils.subst_vars('${GLIB_COMPILE_RESOURCES} --sourcedir=%s --sourcedir=%s --generate-dependencies %s' % (
+			self.inputs[0].parent.srcpath(),
+			self.inputs[0].bld_dir(),
+			self.inputs[0].bldpath()
+		), self.env)
+
+		output = bld.cmd_and_log(cmd, **kw)
+
+		nodes = []
+		names = []
+		for dep in output.splitlines():
+			if dep:
+				node = bld.bldnode.find_node(dep)
+				if node:
+					nodes.append(node)
+				else:
+					names.append(dep)
+
+		return (nodes, names)
+
+class glib_gresource_source(glib_gresource_base):
+	"""
+	Task to generate C source code (.h and .c files) from a gresource.xml file
+	"""
+	vars    = ['GLIB_COMPILE_RESOURCES']
+	fun_h   = Task.compile_fun_shell(glib_gresource_base.base_cmd + ' --target=${TGT[0].abspath()} --generate-header ${SRC}')
+	fun_c   = Task.compile_fun_shell(glib_gresource_base.base_cmd + ' --target=${TGT[1].abspath()} --generate-source ${SRC}')
+	ext_out = ['.h']
+
+	def run(self):
+		return self.fun_h[0](self) or self.fun_c[0](self)
+
+class glib_gresource_bundle(glib_gresource_base):
+	"""
+	Task to generate a .gresource binary file from a gresource.xml file
+	"""
+	run_str = glib_gresource_base.base_cmd + ' --target=${TGT} ${SRC}'
+	shell   = True # temporary workaround for #795
+
+@conf
+def find_glib_genmarshal(conf):
+	conf.find_program('glib-genmarshal', var='GLIB_GENMARSHAL')
+
+@conf
+def find_glib_mkenums(conf):
+	if not conf.env.PERL:
+		conf.find_program('perl', var='PERL')
+	conf.find_program('glib-mkenums', interpreter='PERL', var='GLIB_MKENUMS')
+
+@conf
+def find_glib_compile_schemas(conf):
+	# when cross-compiling, gsettings.m4 locates the program with the following:
+	#   pkg-config --variable glib_compile_schemas gio-2.0
+	conf.find_program('glib-compile-schemas', var='GLIB_COMPILE_SCHEMAS')
+
+	def getstr(varname):
+		return getattr(Options.options, varname, getattr(conf.env,varname, ''))
+
+	gsettingsschemadir = getstr('GSETTINGSSCHEMADIR')
+	if not gsettingsschemadir:
+		datadir = getstr('DATADIR')
+		if not datadir:
+			prefix = conf.env.PREFIX
+			datadir = os.path.join(prefix, 'share')
+		gsettingsschemadir = os.path.join(datadir, 'glib-2.0', 'schemas')
+
+	conf.env.GSETTINGSSCHEMADIR = gsettingsschemadir
+
+@conf
+def find_glib_compile_resources(conf):
+	conf.find_program('glib-compile-resources', var='GLIB_COMPILE_RESOURCES')
+
+def configure(conf):
+	"""
+	Finds the following programs:
+
+	* *glib-genmarshal* and set *GLIB_GENMARSHAL*
+	* *glib-mkenums* and set *GLIB_MKENUMS*
+	* *glib-compile-schemas* and set *GLIB_COMPILE_SCHEMAS* (not mandatory)
+	* *glib-compile-resources* and set *GLIB_COMPILE_RESOURCES* (not mandatory)
+	"""
+	conf.find_glib_genmarshal()
+	conf.find_glib_mkenums()
+	conf.find_glib_compile_schemas(mandatory=False)
+	conf.find_glib_compile_resources(mandatory=False)
+
+def options(opt):
+	"""
+	Adds the ``--gsettingsschemadir`` command-line option
+	"""
+	gr = opt.add_option_group('Installation directories')
+	gr.add_option('--gsettingsschemadir', help='GSettings schema location [DATADIR/glib-2.0/schemas]', default='', dest='GSETTINGSSCHEMADIR')
+
diff --git a/third_party/waf/waflib/Tools/gnu_dirs.py b/third_party/waf/waflib/Tools/gnu_dirs.py
new file mode 100644
index 0000000..2847071
--- /dev/null
+++ b/third_party/waf/waflib/Tools/gnu_dirs.py
@@ -0,0 +1,131 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Ali Sabil, 2007
+
+"""
+Sets various standard variables such as INCLUDEDIR. SBINDIR and others. To use this module just call::
+
+	opt.load('gnu_dirs')
+
+and::
+
+	conf.load('gnu_dirs')
+
+Add options for the standard GNU directories, this tool will add the options
+found in autotools, and will update the environment with the following
+installation variables:
+
+============== ========================================= =======================
+Variable       Description                               Default Value
+============== ========================================= =======================
+PREFIX         installation prefix                       /usr/local
+EXEC_PREFIX    installation prefix for binaries          PREFIX
+BINDIR         user commands                             EXEC_PREFIX/bin
+SBINDIR        system binaries                           EXEC_PREFIX/sbin
+LIBEXECDIR     program-specific binaries                 EXEC_PREFIX/libexec
+SYSCONFDIR     host-specific configuration               PREFIX/etc
+SHAREDSTATEDIR architecture-independent variable data    PREFIX/com
+LOCALSTATEDIR  variable data                             PREFIX/var
+LIBDIR         object code libraries                     EXEC_PREFIX/lib
+INCLUDEDIR     header files                              PREFIX/include
+OLDINCLUDEDIR  header files for non-GCC compilers        /usr/include
+DATAROOTDIR    architecture-independent data root        PREFIX/share
+DATADIR        architecture-independent data             DATAROOTDIR
+INFODIR        GNU "info" documentation                  DATAROOTDIR/info
+LOCALEDIR      locale-dependent data                     DATAROOTDIR/locale
+MANDIR         manual pages                              DATAROOTDIR/man
+DOCDIR         documentation root                        DATAROOTDIR/doc/APPNAME
+HTMLDIR        HTML documentation                        DOCDIR
+DVIDIR         DVI documentation                         DOCDIR
+PDFDIR         PDF documentation                         DOCDIR
+PSDIR          PostScript documentation                  DOCDIR
+============== ========================================= =======================
+"""
+
+import os, re
+from waflib import Utils, Options, Context
+
+gnuopts = '''
+bindir, user commands, ${EXEC_PREFIX}/bin
+sbindir, system binaries, ${EXEC_PREFIX}/sbin
+libexecdir, program-specific binaries, ${EXEC_PREFIX}/libexec
+sysconfdir, host-specific configuration, ${PREFIX}/etc
+sharedstatedir, architecture-independent variable data, ${PREFIX}/com
+localstatedir, variable data, ${PREFIX}/var
+libdir, object code libraries, ${EXEC_PREFIX}/lib%s
+includedir, header files, ${PREFIX}/include
+oldincludedir, header files for non-GCC compilers, /usr/include
+datarootdir, architecture-independent data root, ${PREFIX}/share
+datadir, architecture-independent data, ${DATAROOTDIR}
+infodir, GNU "info" documentation, ${DATAROOTDIR}/info
+localedir, locale-dependent data, ${DATAROOTDIR}/locale
+mandir, manual pages, ${DATAROOTDIR}/man
+docdir, documentation root, ${DATAROOTDIR}/doc/${PACKAGE}
+htmldir, HTML documentation, ${DOCDIR}
+dvidir, DVI documentation, ${DOCDIR}
+pdfdir, PDF documentation, ${DOCDIR}
+psdir, PostScript documentation, ${DOCDIR}
+''' % Utils.lib64()
+
+_options = [x.split(', ') for x in gnuopts.splitlines() if x]
+
+def configure(conf):
+	"""
+	Reads the command-line options to set lots of variables in *conf.env*. The variables
+	BINDIR and LIBDIR will be overwritten.
+	"""
+	def get_param(varname, default):
+		return getattr(Options.options, varname, '') or default
+
+	env = conf.env
+	env.LIBDIR = env.BINDIR = []
+	env.EXEC_PREFIX = get_param('EXEC_PREFIX', env.PREFIX)
+	env.PACKAGE = getattr(Context.g_module, 'APPNAME', None) or env.PACKAGE
+
+	complete = False
+	iter = 0
+	while not complete and iter < len(_options) + 1:
+		iter += 1
+		complete = True
+		for name, help, default in _options:
+			name = name.upper()
+			if not env[name]:
+				try:
+					env[name] = Utils.subst_vars(get_param(name, default).replace('/', os.sep), env)
+				except TypeError:
+					complete = False
+
+	if not complete:
+		lst = [x for x, _, _ in _options if not env[x.upper()]]
+		raise conf.errors.WafError('Variable substitution failure %r' % lst)
+
+def options(opt):
+	"""
+	Adds lots of command-line options, for example::
+
+		--exec-prefix: EXEC_PREFIX
+	"""
+	inst_dir = opt.add_option_group('Installation prefix',
+'By default, "waf install" will put the files in\
+ "/usr/local/bin", "/usr/local/lib" etc. An installation prefix other\
+ than "/usr/local" can be given using "--prefix", for example "--prefix=$HOME"')
+
+	for k in ('--prefix', '--destdir'):
+		option = opt.parser.get_option(k)
+		if option:
+			opt.parser.remove_option(k)
+			inst_dir.add_option(option)
+
+	inst_dir.add_option('--exec-prefix',
+		help = 'installation prefix for binaries [PREFIX]',
+		default = '',
+		dest = 'EXEC_PREFIX')
+
+	dirs_options = opt.add_option_group('Installation directories')
+
+	for name, help, default in _options:
+		option_name = '--' + name
+		str_default = default
+		str_help = '%s [%s]' % (help, re.sub(r'\$\{([^}]+)\}', r'\1', str_default))
+		dirs_options.add_option(option_name, help=str_help, default='', dest=name.upper())
+
diff --git a/third_party/waf/waflib/Tools/gxx.py b/third_party/waf/waflib/Tools/gxx.py
new file mode 100644
index 0000000..22c5d26
--- /dev/null
+++ b/third_party/waf/waflib/Tools/gxx.py
@@ -0,0 +1,157 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2018 (ita)
+# Ralf Habacker, 2006 (rh)
+# Yinon Ehrlich, 2009
+
+"""
+g++/llvm detection.
+"""
+
+from waflib.Tools import ccroot, ar
+from waflib.Configure import conf
+
+@conf
+def find_gxx(conf):
+	"""
+	Finds the program g++, and if present, try to detect its version number
+	"""
+	cxx = conf.find_program(['g++', 'c++'], var='CXX')
+	conf.get_cc_version(cxx, gcc=True)
+	conf.env.CXX_NAME = 'gcc'
+
+@conf
+def gxx_common_flags(conf):
+	"""
+	Common flags for g++ on nearly all platforms
+	"""
+	v = conf.env
+
+	v.CXX_SRC_F           = []
+	v.CXX_TGT_F           = ['-c', '-o']
+
+	if not v.LINK_CXX:
+		v.LINK_CXX = v.CXX
+
+	v.CXXLNK_SRC_F        = []
+	v.CXXLNK_TGT_F        = ['-o']
+	v.CPPPATH_ST          = '-I%s'
+	v.DEFINES_ST          = '-D%s'
+
+	v.LIB_ST              = '-l%s' # template for adding libs
+	v.LIBPATH_ST          = '-L%s' # template for adding libpaths
+	v.STLIB_ST            = '-l%s'
+	v.STLIBPATH_ST        = '-L%s'
+	v.RPATH_ST            = '-Wl,-rpath,%s'
+
+	v.SONAME_ST           = '-Wl,-h,%s'
+	v.SHLIB_MARKER        = '-Wl,-Bdynamic'
+	v.STLIB_MARKER        = '-Wl,-Bstatic'
+
+	v.cxxprogram_PATTERN  = '%s'
+
+	v.CXXFLAGS_cxxshlib   = ['-fPIC']
+	v.LINKFLAGS_cxxshlib  = ['-shared']
+	v.cxxshlib_PATTERN    = 'lib%s.so'
+
+	v.LINKFLAGS_cxxstlib  = ['-Wl,-Bstatic']
+	v.cxxstlib_PATTERN    = 'lib%s.a'
+
+	v.LINKFLAGS_MACBUNDLE = ['-bundle', '-undefined', 'dynamic_lookup']
+	v.CXXFLAGS_MACBUNDLE  = ['-fPIC']
+	v.macbundle_PATTERN   = '%s.bundle'
+
+@conf
+def gxx_modifier_win32(conf):
+	"""Configuration flags for executing gcc on Windows"""
+	v = conf.env
+	v.cxxprogram_PATTERN  = '%s.exe'
+
+	v.cxxshlib_PATTERN    = '%s.dll'
+	v.implib_PATTERN      = '%s.dll.a'
+	v.IMPLIB_ST           = '-Wl,--out-implib,%s'
+
+	v.CXXFLAGS_cxxshlib   = []
+
+	# Auto-import is enabled by default even without this option,
+	# but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages
+	# that the linker emits otherwise.
+	v.append_value('LINKFLAGS', ['-Wl,--enable-auto-import'])
+
+@conf
+def gxx_modifier_cygwin(conf):
+	"""Configuration flags for executing g++ on Cygwin"""
+	gxx_modifier_win32(conf)
+	v = conf.env
+	v.cxxshlib_PATTERN    = 'cyg%s.dll'
+	v.append_value('LINKFLAGS_cxxshlib', ['-Wl,--enable-auto-image-base'])
+	v.CXXFLAGS_cxxshlib   = []
+
+@conf
+def gxx_modifier_darwin(conf):
+	"""Configuration flags for executing g++ on MacOS"""
+	v = conf.env
+	v.CXXFLAGS_cxxshlib   = ['-fPIC']
+	v.LINKFLAGS_cxxshlib  = ['-dynamiclib']
+	v.cxxshlib_PATTERN    = 'lib%s.dylib'
+	v.FRAMEWORKPATH_ST    = '-F%s'
+	v.FRAMEWORK_ST        = ['-framework']
+	v.ARCH_ST             = ['-arch']
+
+	v.LINKFLAGS_cxxstlib  = []
+
+	v.SHLIB_MARKER        = []
+	v.STLIB_MARKER        = []
+	v.SONAME_ST           = []
+
+@conf
+def gxx_modifier_aix(conf):
+	"""Configuration flags for executing g++ on AIX"""
+	v = conf.env
+	v.LINKFLAGS_cxxprogram= ['-Wl,-brtl']
+
+	v.LINKFLAGS_cxxshlib  = ['-shared', '-Wl,-brtl,-bexpfull']
+	v.SHLIB_MARKER        = []
+
+@conf
+def gxx_modifier_hpux(conf):
+	v = conf.env
+	v.SHLIB_MARKER        = []
+	v.STLIB_MARKER        = []
+	v.CFLAGS_cxxshlib     = ['-fPIC','-DPIC']
+	v.cxxshlib_PATTERN    = 'lib%s.sl'
+
+@conf
+def gxx_modifier_openbsd(conf):
+	conf.env.SONAME_ST = []
+
+@conf
+def gcc_modifier_osf1V(conf):
+	v = conf.env
+	v.SHLIB_MARKER        = []
+	v.STLIB_MARKER        = []
+	v.SONAME_ST           = []
+
+@conf
+def gxx_modifier_platform(conf):
+	"""Execute platform-specific functions based on *gxx_modifier_+NAME*"""
+	# * set configurations specific for a platform.
+	# * the destination platform is detected automatically by looking at the macros the compiler predefines,
+	#   and if it's not recognised, it fallbacks to sys.platform.
+	gxx_modifier_func = getattr(conf, 'gxx_modifier_' + conf.env.DEST_OS, None)
+	if gxx_modifier_func:
+		gxx_modifier_func()
+
+def configure(conf):
+	"""
+	Configuration for g++
+	"""
+	conf.find_gxx()
+	conf.find_ar()
+	conf.gxx_common_flags()
+	conf.gxx_modifier_platform()
+	conf.cxx_load_tools()
+	conf.cxx_add_flags()
+	conf.link_add_flags()
+	conf.check_gcc_o_space('cxx')
+
diff --git a/third_party/waf/waflib/Tools/icc.py b/third_party/waf/waflib/Tools/icc.py
new file mode 100644
index 0000000..b6492c8
--- /dev/null
+++ b/third_party/waf/waflib/Tools/icc.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Stian Selnes 2008
+# Thomas Nagy 2009-2018 (ita)
+
+"""
+Detects the Intel C compiler
+"""
+
+import sys
+from waflib.Tools import ccroot, ar, gcc
+from waflib.Configure import conf
+
+@conf
+def find_icc(conf):
+	"""
+	Finds the program icc and execute it to ensure it really is icc
+	"""
+	cc = conf.find_program(['icc', 'ICL'], var='CC')
+	conf.get_cc_version(cc, icc=True)
+	conf.env.CC_NAME = 'icc'
+
+def configure(conf):
+	conf.find_icc()
+	conf.find_ar()
+	conf.gcc_common_flags()
+	conf.gcc_modifier_platform()
+	conf.cc_load_tools()
+	conf.cc_add_flags()
+	conf.link_add_flags()
diff --git a/third_party/waf/waflib/Tools/icpc.py b/third_party/waf/waflib/Tools/icpc.py
new file mode 100644
index 0000000..8a6cc6c
--- /dev/null
+++ b/third_party/waf/waflib/Tools/icpc.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy 2009-2018 (ita)
+
+"""
+Detects the Intel C++ compiler
+"""
+
+import sys
+from waflib.Tools import ccroot, ar, gxx
+from waflib.Configure import conf
+
+@conf
+def find_icpc(conf):
+	"""
+	Finds the program icpc, and execute it to ensure it really is icpc
+	"""
+	cxx = conf.find_program('icpc', var='CXX')
+	conf.get_cc_version(cxx, icc=True)
+	conf.env.CXX_NAME = 'icc'
+
+def configure(conf):
+	conf.find_icpc()
+	conf.find_ar()
+	conf.gxx_common_flags()
+	conf.gxx_modifier_platform()
+	conf.cxx_load_tools()
+	conf.cxx_add_flags()
+	conf.link_add_flags()
+
diff --git a/third_party/waf/waflib/Tools/ifort.py b/third_party/waf/waflib/Tools/ifort.py
new file mode 100644
index 0000000..17d3052
--- /dev/null
+++ b/third_party/waf/waflib/Tools/ifort.py
@@ -0,0 +1,413 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# DC 2008
+# Thomas Nagy 2016-2018 (ita)
+
+import os, re, traceback
+from waflib import Utils, Logs, Errors
+from waflib.Tools import fc, fc_config, fc_scan, ar, ccroot
+from waflib.Configure import conf
+from waflib.TaskGen import after_method, feature
+
+@conf
+def find_ifort(conf):
+	fc = conf.find_program('ifort', var='FC')
+	conf.get_ifort_version(fc)
+	conf.env.FC_NAME = 'IFORT'
+
+@conf
+def ifort_modifier_win32(self):
+	v = self.env
+	v.IFORT_WIN32 = True
+	v.FCSTLIB_MARKER = ''
+	v.FCSHLIB_MARKER = ''
+
+	v.FCLIB_ST = v.FCSTLIB_ST = '%s.lib'
+	v.FCLIBPATH_ST = v.STLIBPATH_ST = '/LIBPATH:%s'
+	v.FCINCPATH_ST = '/I%s'
+	v.FCDEFINES_ST = '/D%s'
+
+	v.fcprogram_PATTERN = v.fcprogram_test_PATTERN = '%s.exe'
+	v.fcshlib_PATTERN = '%s.dll'
+	v.fcstlib_PATTERN = v.implib_PATTERN = '%s.lib'
+
+	v.FCLNK_TGT_F = '/out:'
+	v.FC_TGT_F = ['/c', '/o', '']
+	v.FCFLAGS_fcshlib = ''
+	v.LINKFLAGS_fcshlib = '/DLL'
+	v.AR_TGT_F = '/out:'
+	v.IMPLIB_ST = '/IMPLIB:%s'
+
+	v.append_value('LINKFLAGS', '/subsystem:console')
+	if v.IFORT_MANIFEST:
+		v.append_value('LINKFLAGS', ['/MANIFEST'])
+
+@conf
+def ifort_modifier_darwin(conf):
+	fc_config.fortran_modifier_darwin(conf)
+
+@conf
+def ifort_modifier_platform(conf):
+	dest_os = conf.env.DEST_OS or Utils.unversioned_sys_platform()
+	ifort_modifier_func = getattr(conf, 'ifort_modifier_' + dest_os, None)
+	if ifort_modifier_func:
+		ifort_modifier_func()
+
+@conf
+def get_ifort_version(conf, fc):
+	"""
+	Detects the compiler version and sets ``conf.env.FC_VERSION``
+	"""
+	version_re = re.compile(r"\bIntel\b.*\bVersion\s*(?P<major>\d*)\.(?P<minor>\d*)",re.I).search
+	if Utils.is_win32:
+		cmd = fc
+	else:
+		cmd = fc + ['-logo']
+
+	out, err = fc_config.getoutput(conf, cmd, stdin=False)
+	match = version_re(out) or version_re(err)
+	if not match:
+		conf.fatal('cannot determine ifort version.')
+	k = match.groupdict()
+	conf.env.FC_VERSION = (k['major'], k['minor'])
+
+def configure(conf):
+	"""
+	Detects the Intel Fortran compilers
+	"""
+	if Utils.is_win32:
+		compiler, version, path, includes, libdirs, arch = conf.detect_ifort()
+		v = conf.env
+		v.DEST_CPU = arch
+		v.PATH = path
+		v.INCLUDES = includes
+		v.LIBPATH = libdirs
+		v.MSVC_COMPILER = compiler
+		try:
+			v.MSVC_VERSION = float(version)
+		except ValueError:
+			v.MSVC_VERSION = float(version[:-3])
+
+		conf.find_ifort_win32()
+		conf.ifort_modifier_win32()
+	else:
+		conf.find_ifort()
+		conf.find_program('xiar', var='AR')
+		conf.find_ar()
+		conf.fc_flags()
+		conf.fc_add_flags()
+		conf.ifort_modifier_platform()
+
+
+all_ifort_platforms = [ ('intel64', 'amd64'), ('em64t', 'amd64'), ('ia32', 'x86'), ('Itanium', 'ia64')]
+"""List of icl platforms"""
+
+@conf
+def gather_ifort_versions(conf, versions):
+	"""
+	List compiler versions by looking up registry keys
+	"""
+	version_pattern = re.compile(r'^...?.?\....?.?')
+	try:
+		all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Intel\\Compilers\\Fortran')
+	except OSError:
+		try:
+			all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Intel\\Compilers\\Fortran')
+		except OSError:
+			return
+	index = 0
+	while 1:
+		try:
+			version = Utils.winreg.EnumKey(all_versions, index)
+		except OSError:
+			break
+		index += 1
+		if not version_pattern.match(version):
+			continue
+		targets = {}
+		for target,arch in all_ifort_platforms:
+			if target=='intel64':
+				targetDir='EM64T_NATIVE'
+			else:
+				targetDir=target
+			try:
+				Utils.winreg.OpenKey(all_versions,version+'\\'+targetDir)
+				icl_version=Utils.winreg.OpenKey(all_versions,version)
+				path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir')
+			except OSError:
+				pass
+			else:
+				batch_file=os.path.join(path,'bin','ifortvars.bat')
+				if os.path.isfile(batch_file):
+					targets[target] = target_compiler(conf, 'intel', arch, version, target, batch_file)
+
+		for target,arch in all_ifort_platforms:
+			try:
+				icl_version = Utils.winreg.OpenKey(all_versions, version+'\\'+target)
+				path,type = Utils.winreg.QueryValueEx(icl_version,'ProductDir')
+			except OSError:
+				continue
+			else:
+				batch_file=os.path.join(path,'bin','ifortvars.bat')
+				if os.path.isfile(batch_file):
+					targets[target] = target_compiler(conf, 'intel', arch, version, target, batch_file)
+		major = version[0:2]
+		versions['intel ' + major] = targets
+
+@conf
+def setup_ifort(conf, versiondict):
+	"""
+	Checks installed compilers and targets and returns the first combination from the user's
+	options, env, or the global supported lists that checks.
+
+	:param versiondict: dict(platform -> dict(architecture -> configuration))
+	:type versiondict: dict(string -> dict(string -> target_compiler)
+	:return: the compiler, revision, path, include dirs, library paths and target architecture
+	:rtype: tuple of strings
+	"""
+	platforms = Utils.to_list(conf.env.MSVC_TARGETS) or [i for i,j in all_ifort_platforms]
+	desired_versions = conf.env.MSVC_VERSIONS or list(reversed(list(versiondict.keys())))
+	for version in desired_versions:
+		try:
+			targets = versiondict[version]
+		except KeyError:
+			continue
+		for arch in platforms:
+			try:
+				cfg = targets[arch]
+			except KeyError:
+				continue
+			cfg.evaluate()
+			if cfg.is_valid:
+				compiler,revision = version.rsplit(' ', 1)
+				return compiler,revision,cfg.bindirs,cfg.incdirs,cfg.libdirs,cfg.cpu
+	conf.fatal('ifort: Impossible to find a valid architecture for building %r - %r' % (desired_versions, list(versiondict.keys())))
+
+@conf
+def get_ifort_version_win32(conf, compiler, version, target, vcvars):
+	# FIXME hack
+	try:
+		conf.msvc_cnt += 1
+	except AttributeError:
+		conf.msvc_cnt = 1
+	batfile = conf.bldnode.make_node('waf-print-msvc-%d.bat' % conf.msvc_cnt)
+	batfile.write("""@echo off
+set INCLUDE=
+set LIB=
+call "%s" %s
+echo PATH=%%PATH%%
+echo INCLUDE=%%INCLUDE%%
+echo LIB=%%LIB%%;%%LIBPATH%%
+""" % (vcvars,target))
+	sout = conf.cmd_and_log(['cmd.exe', '/E:on', '/V:on', '/C', batfile.abspath()])
+	batfile.delete()
+	lines = sout.splitlines()
+
+	if not lines[0]:
+		lines.pop(0)
+
+	MSVC_PATH = MSVC_INCDIR = MSVC_LIBDIR = None
+	for line in lines:
+		if line.startswith('PATH='):
+			path = line[5:]
+			MSVC_PATH = path.split(';')
+		elif line.startswith('INCLUDE='):
+			MSVC_INCDIR = [i for i in line[8:].split(';') if i]
+		elif line.startswith('LIB='):
+			MSVC_LIBDIR = [i for i in line[4:].split(';') if i]
+	if None in (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR):
+		conf.fatal('ifort: Could not find a valid architecture for building (get_ifort_version_win32)')
+
+	# Check if the compiler is usable at all.
+	# The detection may return 64-bit versions even on 32-bit systems, and these would fail to run.
+	env = dict(os.environ)
+	env.update(PATH = path)
+	compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler)
+	fc = conf.find_program(compiler_name, path_list=MSVC_PATH)
+
+	# delete CL if exists. because it could contain parameters which can change cl's behaviour rather catastrophically.
+	if 'CL' in env:
+		del(env['CL'])
+
+	try:
+		conf.cmd_and_log(fc + ['/help'], env=env)
+	except UnicodeError:
+		st = traceback.format_exc()
+		if conf.logger:
+			conf.logger.error(st)
+		conf.fatal('ifort: Unicode error - check the code page?')
+	except Exception as e:
+		Logs.debug('ifort: get_ifort_version: %r %r %r -> failure %s', compiler, version, target, str(e))
+		conf.fatal('ifort: cannot run the compiler in get_ifort_version (run with -v to display errors)')
+	else:
+		Logs.debug('ifort: get_ifort_version: %r %r %r -> OK', compiler, version, target)
+	finally:
+		conf.env[compiler_name] = ''
+
+	return (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR)
+
+class target_compiler(object):
+	"""
+	Wraps a compiler configuration; call evaluate() to determine
+	whether the configuration is usable.
+	"""
+	def __init__(self, ctx, compiler, cpu, version, bat_target, bat, callback=None):
+		"""
+		:param ctx: configuration context to use to eventually get the version environment
+		:param compiler: compiler name
+		:param cpu: target cpu
+		:param version: compiler version number
+		:param bat_target: ?
+		:param bat: path to the batch file to run
+		:param callback: optional function to take the realized environment variables tup and map it (e.g. to combine other constant paths)
+		"""
+		self.conf = ctx
+		self.name = None
+		self.is_valid = False
+		self.is_done = False
+
+		self.compiler = compiler
+		self.cpu = cpu
+		self.version = version
+		self.bat_target = bat_target
+		self.bat = bat
+		self.callback = callback
+
+	def evaluate(self):
+		if self.is_done:
+			return
+		self.is_done = True
+		try:
+			vs = self.conf.get_ifort_version_win32(self.compiler, self.version, self.bat_target, self.bat)
+		except Errors.ConfigurationError:
+			self.is_valid = False
+			return
+		if self.callback:
+			vs = self.callback(self, vs)
+		self.is_valid = True
+		(self.bindirs, self.incdirs, self.libdirs) = vs
+
+	def __str__(self):
+		return str((self.bindirs, self.incdirs, self.libdirs))
+
+	def __repr__(self):
+		return repr((self.bindirs, self.incdirs, self.libdirs))
+
+@conf
+def detect_ifort(self):
+	return self.setup_ifort(self.get_ifort_versions(False))
+
+@conf
+def get_ifort_versions(self, eval_and_save=True):
+	"""
+	:return: platforms to compiler configurations
+	:rtype: dict
+	"""
+	dct = {}
+	self.gather_ifort_versions(dct)
+	return dct
+
+def _get_prog_names(self, compiler):
+	if compiler=='intel':
+		compiler_name = 'ifort'
+		linker_name = 'XILINK'
+		lib_name = 'XILIB'
+	else:
+		# assumes CL.exe
+		compiler_name = 'CL'
+		linker_name = 'LINK'
+		lib_name = 'LIB'
+	return compiler_name, linker_name, lib_name
+
+@conf
+def find_ifort_win32(conf):
+	# the autodetection is supposed to be performed before entering in this method
+	v = conf.env
+	path = v.PATH
+	compiler = v.MSVC_COMPILER
+	version = v.MSVC_VERSION
+
+	compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler)
+	v.IFORT_MANIFEST = (compiler == 'intel' and version >= 11)
+
+	# compiler
+	fc = conf.find_program(compiler_name, var='FC', path_list=path)
+
+	# before setting anything, check if the compiler is really intel fortran
+	env = dict(conf.environ)
+	if path:
+		env.update(PATH = ';'.join(path))
+	if not conf.cmd_and_log(fc + ['/nologo', '/help'], env=env):
+		conf.fatal('not intel fortran compiler could not be identified')
+
+	v.FC_NAME = 'IFORT'
+
+	if not v.LINK_FC:
+		conf.find_program(linker_name, var='LINK_FC', path_list=path, mandatory=True)
+
+	if not v.AR:
+		conf.find_program(lib_name, path_list=path, var='AR', mandatory=True)
+		v.ARFLAGS = ['/nologo']
+
+	# manifest tool. Not required for VS 2003 and below. Must have for VS 2005 and later
+	if v.IFORT_MANIFEST:
+		conf.find_program('MT', path_list=path, var='MT')
+		v.MTFLAGS = ['/nologo']
+
+	try:
+		conf.load('winres')
+	except Errors.WafError:
+		Logs.warn('Resource compiler not found. Compiling resource file is disabled')
+
+#######################################################################################################
+##### conf above, build below
+
+@after_method('apply_link')
+@feature('fc')
+def apply_flags_ifort(self):
+	"""
+	Adds additional flags implied by msvc, such as subsystems and pdb files::
+
+		def build(bld):
+			bld.stlib(source='main.c', target='bar', subsystem='gruik')
+	"""
+	if not self.env.IFORT_WIN32 or not getattr(self, 'link_task', None):
+		return
+
+	is_static = isinstance(self.link_task, ccroot.stlink_task)
+
+	subsystem = getattr(self, 'subsystem', '')
+	if subsystem:
+		subsystem = '/subsystem:%s' % subsystem
+		flags = is_static and 'ARFLAGS' or 'LINKFLAGS'
+		self.env.append_value(flags, subsystem)
+
+	if not is_static:
+		for f in self.env.LINKFLAGS:
+			d = f.lower()
+			if d[1:] == 'debug':
+				pdbnode = self.link_task.outputs[0].change_ext('.pdb')
+				self.link_task.outputs.append(pdbnode)
+
+				if getattr(self, 'install_task', None):
+					self.pdb_install_task = self.add_install_files(install_to=self.install_task.install_to, install_from=pdbnode)
+
+				break
+
+@feature('fcprogram', 'fcshlib', 'fcprogram_test')
+@after_method('apply_link')
+def apply_manifest_ifort(self):
+	"""
+	Enables manifest embedding in Fortran DLLs when using ifort on Windows
+	See: http://msdn2.microsoft.com/en-us/library/ms235542(VS.80).aspx
+	"""
+	if self.env.IFORT_WIN32 and getattr(self, 'link_task', None):
+		# it seems ifort.exe cannot be called for linking
+		self.link_task.env.FC = self.env.LINK_FC
+
+	if self.env.IFORT_WIN32 and self.env.IFORT_MANIFEST and getattr(self, 'link_task', None):
+		out_node = self.link_task.outputs[0]
+		man_node = out_node.parent.find_or_declare(out_node.name + '.manifest')
+		self.link_task.outputs.append(man_node)
+		self.env.DO_MANIFEST = True
+
diff --git a/third_party/waf/waflib/Tools/intltool.py b/third_party/waf/waflib/Tools/intltool.py
new file mode 100644
index 0000000..af95ba8
--- /dev/null
+++ b/third_party/waf/waflib/Tools/intltool.py
@@ -0,0 +1,231 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2018 (ita)
+
+"""
+Support for translation tools such as msgfmt and intltool
+
+Usage::
+
+	def configure(conf):
+		conf.load('gnu_dirs intltool')
+
+	def build(bld):
+		# process the .po files into .gmo files, and install them in LOCALEDIR
+		bld(features='intltool_po', appname='myapp', podir='po', install_path="${LOCALEDIR}")
+
+		# process an input file, substituting the translations from the po dir
+		bld(
+			features  = "intltool_in",
+			podir     = "../po",
+			style     = "desktop",
+			flags     = ["-u"],
+			source    = 'kupfer.desktop.in',
+			install_path = "${DATADIR}/applications",
+		)
+
+Usage of the :py:mod:`waflib.Tools.gnu_dirs` is recommended, but not obligatory.
+"""
+
+from __future__ import with_statement
+
+import os, re
+from waflib import Context, Task, Utils, Logs
+import waflib.Tools.ccroot
+from waflib.TaskGen import feature, before_method, taskgen_method
+from waflib.Logs import error
+from waflib.Configure import conf
+
+_style_flags = {
+	'ba': '-b',
+	'desktop': '-d',
+	'keys': '-k',
+	'quoted': '--quoted-style',
+	'quotedxml': '--quotedxml-style',
+	'rfc822deb': '-r',
+	'schemas': '-s',
+	'xml': '-x',
+}
+
+@taskgen_method
+def ensure_localedir(self):
+	"""
+	Expands LOCALEDIR from DATAROOTDIR/locale if possible, or falls back to PREFIX/share/locale
+	"""
+	# use the tool gnu_dirs to provide options to define this
+	if not self.env.LOCALEDIR:
+		if self.env.DATAROOTDIR:
+			self.env.LOCALEDIR = os.path.join(self.env.DATAROOTDIR, 'locale')
+		else:
+			self.env.LOCALEDIR = os.path.join(self.env.PREFIX, 'share', 'locale')
+
+@before_method('process_source')
+@feature('intltool_in')
+def apply_intltool_in_f(self):
+	"""
+	Creates tasks to translate files by intltool-merge::
+
+		def build(bld):
+			bld(
+				features  = "intltool_in",
+				podir     = "../po",
+				style     = "desktop",
+				flags     = ["-u"],
+				source    = 'kupfer.desktop.in',
+				install_path = "${DATADIR}/applications",
+			)
+
+	:param podir: location of the .po files
+	:type podir: string
+	:param source: source files to process
+	:type source: list of string
+	:param style: the intltool-merge mode of operation, can be one of the following values:
+	  ``ba``, ``desktop``, ``keys``, ``quoted``, ``quotedxml``, ``rfc822deb``, ``schemas`` and ``xml``.
+	  See the ``intltool-merge`` man page for more information about supported modes of operation.
+	:type style: string
+	:param flags: compilation flags ("-quc" by default)
+	:type flags: list of string
+	:param install_path: installation path
+	:type install_path: string
+	"""
+	try:
+		self.meths.remove('process_source')
+	except ValueError:
+		pass
+
+	self.ensure_localedir()
+
+	podir = getattr(self, 'podir', '.')
+	podirnode = self.path.find_dir(podir)
+	if not podirnode:
+		error("could not find the podir %r" % podir)
+		return
+
+	cache = getattr(self, 'intlcache', '.intlcache')
+	self.env.INTLCACHE = [os.path.join(str(self.path.get_bld()), podir, cache)]
+	self.env.INTLPODIR = podirnode.bldpath()
+	self.env.append_value('INTLFLAGS', getattr(self, 'flags', self.env.INTLFLAGS_DEFAULT))
+
+	if '-c' in self.env.INTLFLAGS:
+		self.bld.fatal('Redundant -c flag in intltool task %r' % self)
+
+	style = getattr(self, 'style', None)
+	if style:
+		try:
+			style_flag = _style_flags[style]
+		except KeyError:
+			self.bld.fatal('intltool_in style "%s" is not valid' % style)
+
+		self.env.append_unique('INTLFLAGS', [style_flag])
+
+	for i in self.to_list(self.source):
+		node = self.path.find_resource(i)
+
+		task = self.create_task('intltool', node, node.change_ext(''))
+		inst = getattr(self, 'install_path', None)
+		if inst:
+			self.add_install_files(install_to=inst, install_from=task.outputs)
+
+@feature('intltool_po')
+def apply_intltool_po(self):
+	"""
+	Creates tasks to process po files::
+
+		def build(bld):
+			bld(features='intltool_po', appname='myapp', podir='po', install_path="${LOCALEDIR}")
+
+	The relevant task generator arguments are:
+
+	:param podir: directory of the .po files
+	:type podir: string
+	:param appname: name of the application
+	:type appname: string
+	:param install_path: installation directory
+	:type install_path: string
+
+	The file LINGUAS must be present in the directory pointed by *podir* and list the translation files to process.
+	"""
+	try:
+		self.meths.remove('process_source')
+	except ValueError:
+		pass
+
+	self.ensure_localedir()
+
+	appname = getattr(self, 'appname', getattr(Context.g_module, Context.APPNAME, 'set_your_app_name'))
+	podir = getattr(self, 'podir', '.')
+	inst = getattr(self, 'install_path', '${LOCALEDIR}')
+
+	linguas = self.path.find_node(os.path.join(podir, 'LINGUAS'))
+	if linguas:
+		# scan LINGUAS file for locales to process
+		with open(linguas.abspath()) as f:
+			langs = []
+			for line in f.readlines():
+				# ignore lines containing comments
+				if not line.startswith('#'):
+					langs += line.split()
+		re_linguas = re.compile('[-a-zA-Z_@.]+')
+		for lang in langs:
+			# Make sure that we only process lines which contain locales
+			if re_linguas.match(lang):
+				node = self.path.find_resource(os.path.join(podir, re_linguas.match(lang).group() + '.po'))
+				task = self.create_task('po', node, node.change_ext('.mo'))
+
+				if inst:
+					filename = task.outputs[0].name
+					(langname, ext) = os.path.splitext(filename)
+					inst_file = inst + os.sep + langname + os.sep + 'LC_MESSAGES' + os.sep + appname + '.mo'
+					self.add_install_as(install_to=inst_file, install_from=task.outputs[0],
+						chmod=getattr(self, 'chmod', Utils.O644))
+
+	else:
+		Logs.pprint('RED', "Error no LINGUAS file found in po directory")
+
+class po(Task.Task):
+	"""
+	Compiles .po files into .gmo files
+	"""
+	run_str = '${MSGFMT} -o ${TGT} ${SRC}'
+	color   = 'BLUE'
+
+class intltool(Task.Task):
+	"""
+	Calls intltool-merge to update translation files
+	"""
+	run_str = '${INTLTOOL} ${INTLFLAGS} ${INTLCACHE_ST:INTLCACHE} ${INTLPODIR} ${SRC} ${TGT}'
+	color   = 'BLUE'
+
+@conf
+def find_msgfmt(conf):
+	"""
+	Detects msgfmt and sets the ``MSGFMT`` variable
+	"""
+	conf.find_program('msgfmt', var='MSGFMT')
+
+@conf
+def find_intltool_merge(conf):
+	"""
+	Detects intltool-merge
+	"""
+	if not conf.env.PERL:
+		conf.find_program('perl', var='PERL')
+	conf.env.INTLCACHE_ST = '--cache=%s'
+	conf.env.INTLFLAGS_DEFAULT = ['-q', '-u']
+	conf.find_program('intltool-merge', interpreter='PERL', var='INTLTOOL')
+
+def configure(conf):
+	"""
+	Detects the program *msgfmt* and set *conf.env.MSGFMT*.
+	Detects the program *intltool-merge* and set *conf.env.INTLTOOL*.
+	It is possible to set INTLTOOL in the environment, but it must not have spaces in it::
+
+		$ INTLTOOL="/path/to/the program/intltool" waf configure
+
+	If a C/C++ compiler is present, execute a compilation test to find the header *locale.h*.
+	"""
+	conf.find_msgfmt()
+	conf.find_intltool_merge()
+	if conf.env.CC or conf.env.CXX:
+		conf.check(header_name='locale.h')
+
diff --git a/third_party/waf/waflib/Tools/irixcc.py b/third_party/waf/waflib/Tools/irixcc.py
new file mode 100644
index 0000000..0335c13
--- /dev/null
+++ b/third_party/waf/waflib/Tools/irixcc.py
@@ -0,0 +1,54 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# imported from samba
+
+"""
+Compiler definition for irix/MIPSpro cc compiler
+"""
+
+from waflib import Errors
+from waflib.Tools import ccroot, ar
+from waflib.Configure import conf
+
+@conf
+def find_irixcc(conf):
+	v = conf.env
+	cc = conf.find_program('cc', var='CC')
+	try:
+		conf.cmd_and_log(cc + ['-version'])
+	except Errors.WafError:
+		conf.fatal('%r -version could not be executed' % cc)
+	v.CC_NAME = 'irix'
+
+@conf
+def irixcc_common_flags(conf):
+	v = conf.env
+
+	v.CC_SRC_F            = ''
+	v.CC_TGT_F            = ['-c', '-o']
+	v.CPPPATH_ST          = '-I%s'
+	v.DEFINES_ST          = '-D%s'
+
+	if not v.LINK_CC:
+		v.LINK_CC = v.CC
+
+	v.CCLNK_SRC_F         = ''
+	v.CCLNK_TGT_F         = ['-o']
+
+	v.LIB_ST              = '-l%s' # template for adding libs
+	v.LIBPATH_ST          = '-L%s' # template for adding libpaths
+	v.STLIB_ST            = '-l%s'
+	v.STLIBPATH_ST        = '-L%s'
+
+	v.cprogram_PATTERN    = '%s'
+	v.cshlib_PATTERN      = 'lib%s.so'
+	v.cstlib_PATTERN      = 'lib%s.a'
+
+def configure(conf):
+	conf.find_irixcc()
+	conf.find_ar()
+	conf.irixcc_common_flags()
+	conf.cc_load_tools()
+	conf.cc_add_flags()
+	conf.link_add_flags()
+
diff --git a/third_party/waf/waflib/Tools/javaw.py b/third_party/waf/waflib/Tools/javaw.py
new file mode 100644
index 0000000..b7f5dd1
--- /dev/null
+++ b/third_party/waf/waflib/Tools/javaw.py
@@ -0,0 +1,593 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2018 (ita)
+
+"""
+Java support
+
+Javac is one of the few compilers that behaves very badly:
+
+#. it outputs files where it wants to (-d is only for the package root)
+
+#. it recompiles files silently behind your back
+
+#. it outputs an undefined amount of files (inner classes)
+
+Remember that the compilation can be performed using Jython[1] rather than regular Python. Instead of
+running one of the following commands::
+
+   ./waf configure
+   python waf configure
+
+You would have to run::
+
+   java -jar /path/to/jython.jar waf configure
+
+[1] http://www.jython.org/
+
+Usage
+=====
+
+Load the "java" tool.
+
+def configure(conf):
+	conf.load('java')
+
+Java tools will be autodetected and eventually, if present, the quite
+standard JAVA_HOME environment variable will be used. The also standard
+CLASSPATH variable is used for library searching.
+
+In configuration phase checks can be done on the system environment, for
+example to check if a class is known in the classpath::
+
+	conf.check_java_class('java.io.FileOutputStream')
+
+or if the system supports JNI applications building::
+
+	conf.check_jni_headers()
+
+
+The java tool supports compiling java code, creating jar files and
+creating javadoc documentation. This can be either done separately or
+together in a single definition. For example to manage them separately::
+
+	bld(features  = 'javac',
+		srcdir    = 'src',
+		compat    = '1.7',
+		use       = 'animals',
+		name      = 'cats-src',
+	)
+
+	bld(features  = 'jar',
+		basedir   = '.',
+		destfile  = '../cats.jar',
+		name      = 'cats',
+		use       = 'cats-src'
+	)
+
+
+Or together by defining all the needed attributes::
+
+	bld(features   = 'javac jar javadoc',
+		srcdir     = 'src/',  # folder containing the sources to compile
+		outdir     = 'src',   # folder where to output the classes (in the build directory)
+		compat     = '1.6',   # java compatibility version number
+		classpath  = ['.', '..'],
+
+		# jar
+		basedir    = 'src', # folder containing the classes and other files to package (must match outdir)
+		destfile   = 'foo.jar', # do not put the destfile in the folder of the java classes!
+		use        = 'NNN',
+		jaropts    = ['-C', 'default/src/', '.'], # can be used to give files
+		manifest   = 'src/Manifest.mf', # Manifest file to include
+
+		# javadoc
+		javadoc_package = ['com.meow' , 'com.meow.truc.bar', 'com.meow.truc.foo'],
+		javadoc_output  = 'javadoc',
+	)
+
+External jar dependencies can be mapped to a standard waf "use" dependency by
+setting an environment variable with a CLASSPATH prefix in the configuration,
+for example::
+
+	conf.env.CLASSPATH_NNN = ['aaaa.jar', 'bbbb.jar']
+
+and then NNN can be freely used in rules as::
+
+	use        = 'NNN',
+
+In the java tool the dependencies via use are not transitive by default, as
+this necessity depends on the code. To enable recursive dependency scanning
+use on a specific rule:
+
+		recurse_use = True
+
+Or build-wise by setting RECURSE_JAVA:
+
+		bld.env.RECURSE_JAVA = True
+
+Unit tests can be integrated in the waf unit test environment using the javatest extra.
+"""
+
+import os, shutil
+from waflib import Task, Utils, Errors, Node
+from waflib.Configure import conf
+from waflib.TaskGen import feature, before_method, after_method, taskgen_method
+
+from waflib.Tools import ccroot
+ccroot.USELIB_VARS['javac'] = set(['CLASSPATH', 'JAVACFLAGS'])
+
+SOURCE_RE = '**/*.java'
+JAR_RE = '**/*'
+
+class_check_source = '''
+public class Test {
+	public static void main(String[] argv) {
+		Class lib;
+		if (argv.length < 1) {
+			System.err.println("Missing argument");
+			System.exit(77);
+		}
+		try {
+			lib = Class.forName(argv[0]);
+		} catch (ClassNotFoundException e) {
+			System.err.println("ClassNotFoundException");
+			System.exit(1);
+		}
+		lib = null;
+		System.exit(0);
+	}
+}
+'''
+
+@feature('javac')
+@before_method('process_source')
+def apply_java(self):
+	"""
+	Create a javac task for compiling *.java files*. There can be
+	only one javac task by task generator.
+	"""
+	Utils.def_attrs(self, jarname='', classpath='',
+		sourcepath='.', srcdir='.',
+		jar_mf_attributes={}, jar_mf_classpath=[])
+
+	outdir = getattr(self, 'outdir', None)
+	if outdir:
+		if not isinstance(outdir, Node.Node):
+			outdir = self.path.get_bld().make_node(self.outdir)
+	else:
+		outdir = self.path.get_bld()
+	outdir.mkdir()
+	self.outdir = outdir
+	self.env.OUTDIR = outdir.abspath()
+
+	self.javac_task = tsk = self.create_task('javac')
+	tmp = []
+
+	srcdir = getattr(self, 'srcdir', '')
+	if isinstance(srcdir, Node.Node):
+		srcdir = [srcdir]
+	for x in Utils.to_list(srcdir):
+		if isinstance(x, Node.Node):
+			y = x
+		else:
+			y = self.path.find_dir(x)
+			if not y:
+				self.bld.fatal('Could not find the folder %s from %s' % (x, self.path))
+		tmp.append(y)
+
+	tsk.srcdir = tmp
+
+	if getattr(self, 'compat', None):
+		tsk.env.append_value('JAVACFLAGS', ['-source', str(self.compat)])
+
+	if hasattr(self, 'sourcepath'):
+		fold = [isinstance(x, Node.Node) and x or self.path.find_dir(x) for x in self.to_list(self.sourcepath)]
+		names = os.pathsep.join([x.srcpath() for x in fold])
+	else:
+		names = [x.srcpath() for x in tsk.srcdir]
+
+	if names:
+		tsk.env.append_value('JAVACFLAGS', ['-sourcepath', names])
+
+
+@taskgen_method
+def java_use_rec(self, name, **kw):
+	"""
+	Processes recursively the *use* attribute for each referred java compilation
+	"""
+	if name in self.tmp_use_seen:
+		return
+
+	self.tmp_use_seen.append(name)
+
+	try:
+		y = self.bld.get_tgen_by_name(name)
+	except Errors.WafError:
+		self.uselib.append(name)
+		return
+	else:
+		y.post()
+		# Add generated JAR name for CLASSPATH. Task ordering (set_run_after)
+		# is already guaranteed by ordering done between the single tasks
+		if hasattr(y, 'jar_task'):
+			self.use_lst.append(y.jar_task.outputs[0].abspath())
+		else:
+			if hasattr(y,'outdir'):
+				self.use_lst.append(y.outdir.abspath())
+			else:
+				self.use_lst.append(y.path.get_bld().abspath())
+
+	for x in self.to_list(getattr(y, 'use', [])):
+		self.java_use_rec(x)
+
+@feature('javac')
+@before_method('propagate_uselib_vars')
+@after_method('apply_java')
+def use_javac_files(self):
+	"""
+	Processes the *use* attribute referring to other java compilations
+	"""
+	self.use_lst = []
+	self.tmp_use_seen = []
+	self.uselib = self.to_list(getattr(self, 'uselib', []))
+	names = self.to_list(getattr(self, 'use', []))
+	get = self.bld.get_tgen_by_name
+	for x in names:
+		try:
+			tg = get(x)
+		except Errors.WafError:
+			self.uselib.append(x)
+		else:
+			tg.post()
+			if hasattr(tg, 'jar_task'):
+				self.use_lst.append(tg.jar_task.outputs[0].abspath())
+				self.javac_task.set_run_after(tg.jar_task)
+				self.javac_task.dep_nodes.extend(tg.jar_task.outputs)
+			else:
+				if hasattr(tg, 'outdir'):
+					base_node = tg.outdir
+				else:
+					base_node = tg.path.get_bld()
+
+				self.use_lst.append(base_node.abspath())
+				self.javac_task.dep_nodes.extend([dx for dx in base_node.ant_glob(JAR_RE, remove=False, quiet=True)])
+
+				for tsk in tg.tasks:
+					self.javac_task.set_run_after(tsk)
+
+		# If recurse use scan is enabled recursively add use attribute for each used one
+		if getattr(self, 'recurse_use', False) or self.bld.env.RECURSE_JAVA:
+			self.java_use_rec(x)
+
+	self.env.append_value('CLASSPATH', self.use_lst)
+
+@feature('javac')
+@after_method('apply_java', 'propagate_uselib_vars', 'use_javac_files')
+def set_classpath(self):
+	"""
+	Sets the CLASSPATH value on the *javac* task previously created.
+	"""
+	if getattr(self, 'classpath', None):
+		self.env.append_unique('CLASSPATH', getattr(self, 'classpath', []))
+	for x in self.tasks:
+		x.env.CLASSPATH = os.pathsep.join(self.env.CLASSPATH) + os.pathsep
+
+@feature('jar')
+@after_method('apply_java', 'use_javac_files')
+@before_method('process_source')
+def jar_files(self):
+	"""
+	Creates a jar task (one maximum per task generator)
+	"""
+	destfile = getattr(self, 'destfile', 'test.jar')
+	jaropts = getattr(self, 'jaropts', [])
+	manifest = getattr(self, 'manifest', None)
+
+	basedir = getattr(self, 'basedir', None)
+	if basedir:
+		if not isinstance(self.basedir, Node.Node):
+			basedir = self.path.get_bld().make_node(basedir)
+	else:
+		basedir = self.path.get_bld()
+	if not basedir:
+		self.bld.fatal('Could not find the basedir %r for %r' % (self.basedir, self))
+
+	self.jar_task = tsk = self.create_task('jar_create')
+	if manifest:
+		jarcreate = getattr(self, 'jarcreate', 'cfm')
+		if not isinstance(manifest,Node.Node):
+			node = self.path.find_resource(manifest)
+		else:
+			node = manifest
+		if not node:
+			self.bld.fatal('invalid manifest file %r for %r' % (manifest, self))
+		tsk.dep_nodes.append(node)
+		jaropts.insert(0, node.abspath())
+	else:
+		jarcreate = getattr(self, 'jarcreate', 'cf')
+	if not isinstance(destfile, Node.Node):
+		destfile = self.path.find_or_declare(destfile)
+	if not destfile:
+		self.bld.fatal('invalid destfile %r for %r' % (destfile, self))
+	tsk.set_outputs(destfile)
+	tsk.basedir = basedir
+
+	jaropts.append('-C')
+	jaropts.append(basedir.bldpath())
+	jaropts.append('.')
+
+	tsk.env.JAROPTS = jaropts
+	tsk.env.JARCREATE = jarcreate
+
+	if getattr(self, 'javac_task', None):
+		tsk.set_run_after(self.javac_task)
+
+@feature('jar')
+@after_method('jar_files')
+def use_jar_files(self):
+	"""
+	Processes the *use* attribute to set the build order on the
+	tasks created by another task generator.
+	"""
+	self.uselib = self.to_list(getattr(self, 'uselib', []))
+	names = self.to_list(getattr(self, 'use', []))
+	get = self.bld.get_tgen_by_name
+	for x in names:
+		try:
+			y = get(x)
+		except Errors.WafError:
+			self.uselib.append(x)
+		else:
+			y.post()
+			self.jar_task.run_after.update(y.tasks)
+
+class JTask(Task.Task):
+	"""
+	Base class for java and jar tasks; provides functionality to run long commands
+	"""
+	def split_argfile(self, cmd):
+		inline = [cmd[0]]
+		infile = []
+		for x in cmd[1:]:
+			# jar and javac do not want -J flags in @file
+			if x.startswith('-J'):
+				inline.append(x)
+			else:
+				infile.append(self.quote_flag(x))
+		return (inline, infile)
+
+class jar_create(JTask):
+	"""
+	Creates a jar file
+	"""
+	color   = 'GREEN'
+	run_str = '${JAR} ${JARCREATE} ${TGT} ${JAROPTS}'
+
+	def runnable_status(self):
+		"""
+		Wait for dependent tasks to be executed, then read the
+		files to update the list of inputs.
+		"""
+		for t in self.run_after:
+			if not t.hasrun:
+				return Task.ASK_LATER
+		if not self.inputs:
+			try:
+				self.inputs = [x for x in self.basedir.ant_glob(JAR_RE, remove=False, quiet=True) if id(x) != id(self.outputs[0])]
+			except Exception:
+				raise Errors.WafError('Could not find the basedir %r for %r' % (self.basedir, self))
+		return super(jar_create, self).runnable_status()
+
+class javac(JTask):
+	"""
+	Compiles java files
+	"""
+	color   = 'BLUE'
+	run_str = '${JAVAC} -classpath ${CLASSPATH} -d ${OUTDIR} ${JAVACFLAGS} ${SRC}'
+	vars = ['CLASSPATH', 'JAVACFLAGS', 'JAVAC', 'OUTDIR']
+	"""
+	The javac task will be executed again if the variables CLASSPATH, JAVACFLAGS, JAVAC or OUTDIR change.
+	"""
+	def uid(self):
+		"""Identify java tasks by input&output folder"""
+		lst = [self.__class__.__name__, self.generator.outdir.abspath()]
+		for x in self.srcdir:
+			lst.append(x.abspath())
+		return Utils.h_list(lst)
+
+	def runnable_status(self):
+		"""
+		Waits for dependent tasks to be complete, then read the file system to find the input nodes.
+		"""
+		for t in self.run_after:
+			if not t.hasrun:
+				return Task.ASK_LATER
+
+		if not self.inputs:
+			self.inputs  = []
+			for x in self.srcdir:
+				if x.exists():
+					self.inputs.extend(x.ant_glob(SOURCE_RE, remove=False, quiet=True))
+		return super(javac, self).runnable_status()
+
+	def post_run(self):
+		"""
+		List class files created
+		"""
+		for node in self.generator.outdir.ant_glob('**/*.class', quiet=True):
+			self.generator.bld.node_sigs[node] = self.uid()
+		self.generator.bld.task_sigs[self.uid()] = self.cache_sig
+
+@feature('javadoc')
+@after_method('process_rule')
+def create_javadoc(self):
+	"""
+	Creates a javadoc task (feature 'javadoc')
+	"""
+	tsk = self.create_task('javadoc')
+	tsk.classpath = getattr(self, 'classpath', [])
+	self.javadoc_package = Utils.to_list(self.javadoc_package)
+	if not isinstance(self.javadoc_output, Node.Node):
+		self.javadoc_output = self.bld.path.find_or_declare(self.javadoc_output)
+
+class javadoc(Task.Task):
+	"""
+	Builds java documentation
+	"""
+	color = 'BLUE'
+
+	def __str__(self):
+		return '%s: %s -> %s\n' % (self.__class__.__name__, self.generator.srcdir, self.generator.javadoc_output)
+
+	def run(self):
+		env = self.env
+		bld = self.generator.bld
+		wd = bld.bldnode
+
+		#add src node + bld node (for generated java code)
+		srcpath = self.generator.path.abspath() + os.sep + self.generator.srcdir
+		srcpath += os.pathsep
+		srcpath += self.generator.path.get_bld().abspath() + os.sep + self.generator.srcdir
+
+		classpath = env.CLASSPATH
+		classpath += os.pathsep
+		classpath += os.pathsep.join(self.classpath)
+		classpath = "".join(classpath)
+
+		self.last_cmd = lst = []
+		lst.extend(Utils.to_list(env.JAVADOC))
+		lst.extend(['-d', self.generator.javadoc_output.abspath()])
+		lst.extend(['-sourcepath', srcpath])
+		lst.extend(['-classpath', classpath])
+		lst.extend(['-subpackages'])
+		lst.extend(self.generator.javadoc_package)
+		lst = [x for x in lst if x]
+
+		self.generator.bld.cmd_and_log(lst, cwd=wd, env=env.env or None, quiet=0)
+
+	def post_run(self):
+		nodes = self.generator.javadoc_output.ant_glob('**', quiet=True)
+		for node in nodes:
+			self.generator.bld.node_sigs[node] = self.uid()
+		self.generator.bld.task_sigs[self.uid()] = self.cache_sig
+
+def configure(self):
+	"""
+	Detects the javac, java and jar programs
+	"""
+	# If JAVA_PATH is set, we prepend it to the path list
+	java_path = self.environ['PATH'].split(os.pathsep)
+	v = self.env
+
+	if 'JAVA_HOME' in self.environ:
+		java_path = [os.path.join(self.environ['JAVA_HOME'], 'bin')] + java_path
+		self.env.JAVA_HOME = [self.environ['JAVA_HOME']]
+
+	for x in 'javac java jar javadoc'.split():
+		self.find_program(x, var=x.upper(), path_list=java_path, mandatory=(x not in ('javadoc')))
+
+	if 'CLASSPATH' in self.environ:
+		v.CLASSPATH = self.environ['CLASSPATH']
+
+	if not v.JAR:
+		self.fatal('jar is required for making java packages')
+	if not v.JAVAC:
+		self.fatal('javac is required for compiling java classes')
+
+	v.JARCREATE = 'cf' # can use cvf
+	v.JAVACFLAGS = []
+
+@conf
+def check_java_class(self, classname, with_classpath=None):
+	"""
+	Checks if the specified java class exists
+
+	:param classname: class to check, like java.util.HashMap
+	:type classname: string
+	:param with_classpath: additional classpath to give
+	:type with_classpath: string
+	"""
+	javatestdir = '.waf-javatest'
+
+	classpath = javatestdir
+	if self.env.CLASSPATH:
+		classpath += os.pathsep + self.env.CLASSPATH
+	if isinstance(with_classpath, str):
+		classpath += os.pathsep + with_classpath
+
+	shutil.rmtree(javatestdir, True)
+	os.mkdir(javatestdir)
+
+	Utils.writef(os.path.join(javatestdir, 'Test.java'), class_check_source)
+
+	# Compile the source
+	self.exec_command(self.env.JAVAC + [os.path.join(javatestdir, 'Test.java')], shell=False)
+
+	# Try to run the app
+	cmd = self.env.JAVA + ['-cp', classpath, 'Test', classname]
+	self.to_log("%s\n" % str(cmd))
+	found = self.exec_command(cmd, shell=False)
+
+	self.msg('Checking for java class %s' % classname, not found)
+
+	shutil.rmtree(javatestdir, True)
+
+	return found
+
+@conf
+def check_jni_headers(conf):
+	"""
+	Checks for jni headers and libraries. On success the conf.env variables xxx_JAVA are added for use in C/C++ targets::
+
+		def options(opt):
+			opt.load('compiler_c')
+
+		def configure(conf):
+			conf.load('compiler_c java')
+			conf.check_jni_headers()
+
+		def build(bld):
+			bld.shlib(source='a.c', target='app', use='JAVA')
+	"""
+	if not conf.env.CC_NAME and not conf.env.CXX_NAME:
+		conf.fatal('load a compiler first (gcc, g++, ..)')
+
+	if not conf.env.JAVA_HOME:
+		conf.fatal('set JAVA_HOME in the system environment')
+
+	# jni requires the jvm
+	javaHome = conf.env.JAVA_HOME[0]
+
+	dir = conf.root.find_dir(conf.env.JAVA_HOME[0] + '/include')
+	if dir is None:
+		dir = conf.root.find_dir(conf.env.JAVA_HOME[0] + '/../Headers') # think different?!
+	if dir is None:
+		conf.fatal('JAVA_HOME does not seem to be set properly')
+
+	f = dir.ant_glob('**/(jni|jni_md).h')
+	incDirs = [x.parent.abspath() for x in f]
+
+	dir = conf.root.find_dir(conf.env.JAVA_HOME[0])
+	f = dir.ant_glob('**/*jvm.(so|dll|dylib)')
+	libDirs = [x.parent.abspath() for x in f] or [javaHome]
+
+	# On windows, we need both the .dll and .lib to link.  On my JDK, they are
+	# in different directories...
+	f = dir.ant_glob('**/*jvm.(lib)')
+	if f:
+		libDirs = [[x, y.parent.abspath()] for x in libDirs for y in f]
+
+	if conf.env.DEST_OS == 'freebsd':
+		conf.env.append_unique('LINKFLAGS_JAVA', '-pthread')
+	for d in libDirs:
+		try:
+			conf.check(header_name='jni.h', define_name='HAVE_JNI_H', lib='jvm',
+				libpath=d, includes=incDirs, uselib_store='JAVA', uselib='JAVA')
+		except Exception:
+			pass
+		else:
+			break
+	else:
+		conf.fatal('could not find lib jvm in %r (see config.log)' % libDirs)
+
diff --git a/third_party/waf/waflib/Tools/ldc2.py b/third_party/waf/waflib/Tools/ldc2.py
new file mode 100644
index 0000000..a51c344
--- /dev/null
+++ b/third_party/waf/waflib/Tools/ldc2.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Alex Rønne Petersen, 2012 (alexrp/Zor)
+
+from waflib.Tools import ar, d
+from waflib.Configure import conf
+
+@conf
+def find_ldc2(conf):
+	"""
+	Finds the program *ldc2* and set the variable *D*
+	"""
+	conf.find_program(['ldc2'], var='D')
+
+	out = conf.cmd_and_log(conf.env.D + ['-version'])
+	if out.find("based on DMD v2.") == -1:
+		conf.fatal("detected compiler is not ldc2")
+
+@conf
+def common_flags_ldc2(conf):
+	"""
+	Sets the D flags required by *ldc2*
+	"""
+	v = conf.env
+
+	v.D_SRC_F           = ['-c']
+	v.D_TGT_F           = '-of%s'
+
+	v.D_LINKER          = v.D
+	v.DLNK_SRC_F        = ''
+	v.DLNK_TGT_F        = '-of%s'
+	v.DINC_ST           = '-I%s'
+
+	v.DSHLIB_MARKER = v.DSTLIB_MARKER = ''
+	v.DSTLIB_ST = v.DSHLIB_ST         = '-L-l%s'
+	v.DSTLIBPATH_ST = v.DLIBPATH_ST   = '-L-L%s'
+
+	v.LINKFLAGS_dshlib  = ['-L-shared']
+
+	v.DHEADER_ext       = '.di'
+	v.DFLAGS_d_with_header = ['-H', '-Hf']
+	v.D_HDR_F           = '%s'
+
+	v.LINKFLAGS     = []
+	v.DFLAGS_dshlib = ['-relocation-model=pic']
+
+def configure(conf):
+	"""
+	Configuration for *ldc2*
+	"""
+	conf.find_ldc2()
+	conf.load('ar')
+	conf.load('d')
+	conf.common_flags_ldc2()
+	conf.d_platform_flags()
+
diff --git a/third_party/waf/waflib/Tools/lua.py b/third_party/waf/waflib/Tools/lua.py
new file mode 100644
index 0000000..15a333a
--- /dev/null
+++ b/third_party/waf/waflib/Tools/lua.py
@@ -0,0 +1,38 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Sebastian Schlingmann, 2008
+# Thomas Nagy, 2008-2018 (ita)
+
+"""
+Lua support.
+
+Compile *.lua* files into *.luac*::
+
+	def configure(conf):
+		conf.load('lua')
+		conf.env.LUADIR = '/usr/local/share/myapp/scripts/'
+	def build(bld):
+		bld(source='foo.lua')
+"""
+
+from waflib.TaskGen import extension
+from waflib import Task
+
+@extension('.lua')
+def add_lua(self, node):
+	tsk = self.create_task('luac', node, node.change_ext('.luac'))
+	inst_to = getattr(self, 'install_path', self.env.LUADIR and '${LUADIR}' or None)
+	if inst_to:
+		self.add_install_files(install_to=inst_to, install_from=tsk.outputs)
+	return tsk
+
+class luac(Task.Task):
+	run_str = '${LUAC} -s -o ${TGT} ${SRC}'
+	color   = 'PINK'
+
+def configure(conf):
+	"""
+	Detect the luac compiler and set *conf.env.LUAC*
+	"""
+	conf.find_program('luac', var='LUAC')
+
diff --git a/third_party/waf/waflib/Tools/md5_tstamp.py b/third_party/waf/waflib/Tools/md5_tstamp.py
new file mode 100644
index 0000000..d1569fa
--- /dev/null
+++ b/third_party/waf/waflib/Tools/md5_tstamp.py
@@ -0,0 +1,41 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+"""
+Re-calculate md5 hashes of files only when the file time have changed::
+
+	def options(opt):
+		opt.load('md5_tstamp')
+
+The hashes can also reflect either the file contents (STRONGEST=True) or the
+file time and file size.
+
+The performance benefits of this module are usually insignificant.
+"""
+
+import os, stat
+from waflib import Utils, Build, Node
+
+STRONGEST = True
+
+Build.SAVED_ATTRS.append('hashes_md5_tstamp')
+def h_file(self):
+	filename = self.abspath()
+	st = os.stat(filename)
+
+	cache = self.ctx.hashes_md5_tstamp
+	if filename in cache and cache[filename][0] == st.st_mtime:
+		return cache[filename][1]
+
+	if STRONGEST:
+		ret = Utils.h_file(filename)
+	else:
+		if stat.S_ISDIR(st[stat.ST_MODE]):
+			raise IOError('Not a file')
+		ret = Utils.md5(str((st.st_mtime, st.st_size)).encode()).digest()
+
+	cache[filename] = (st.st_mtime, ret)
+	return ret
+h_file.__doc__ = Node.Node.h_file.__doc__
+Node.Node.h_file = h_file
+
diff --git a/third_party/waf/waflib/Tools/msvc.py b/third_party/waf/waflib/Tools/msvc.py
new file mode 100644
index 0000000..d60f670
--- /dev/null
+++ b/third_party/waf/waflib/Tools/msvc.py
@@ -0,0 +1,1041 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Carlos Rafael Giani, 2006 (dv)
+# Tamas Pal, 2007 (folti)
+# Nicolas Mercier, 2009
+# Matt Clarkson, 2012
+
+"""
+Microsoft Visual C++/Intel C++ compiler support
+
+If you get detection problems, first try any of the following::
+
+	chcp 65001
+	set PYTHONIOENCODING=...
+	set PYTHONLEGACYWINDOWSSTDIO=1
+
+Usage::
+
+	$ waf configure --msvc_version="msvc 10.0,msvc 9.0" --msvc_target="x64"
+
+or::
+
+	def configure(conf):
+		conf.env.MSVC_VERSIONS = ['msvc 10.0', 'msvc 9.0', 'msvc 8.0', 'msvc 7.1', 'msvc 7.0', 'msvc 6.0', 'wsdk 7.0', 'intel 11', 'PocketPC 9.0', 'Smartphone 8.0']
+		conf.env.MSVC_TARGETS = ['x64']
+		conf.load('msvc')
+
+or::
+
+	def configure(conf):
+		conf.load('msvc', funs='no_autodetect')
+		conf.check_lib_msvc('gdi32')
+		conf.check_libs_msvc('kernel32 user32')
+	def build(bld):
+		tg = bld.program(source='main.c', target='app', use='KERNEL32 USER32 GDI32')
+
+Platforms and targets will be tested in the order they appear;
+the first good configuration will be used.
+
+To force testing all the configurations that are not used, use the ``--no-msvc-lazy`` option
+or set ``conf.env.MSVC_LAZY_AUTODETECT=False``.
+
+Supported platforms: ia64, x64, x86, x86_amd64, x86_ia64, x86_arm, amd64_x86, amd64_arm
+
+Compilers supported:
+
+* msvc       => Visual Studio, versions 6.0 (VC 98, VC .NET 2002) to 15 (Visual Studio 2017)
+* wsdk       => Windows SDK, versions 6.0, 6.1, 7.0, 7.1, 8.0
+* icl        => Intel compiler, versions 9, 10, 11, 13
+* winphone   => Visual Studio to target Windows Phone 8 native (version 8.0 for now)
+* Smartphone => Compiler/SDK for Smartphone devices (armv4/v4i)
+* PocketPC   => Compiler/SDK for PocketPC devices (armv4/v4i)
+
+To use WAF in a VS2008 Make file project (see http://code.google.com/p/waf/issues/detail?id=894)
+You may consider to set the environment variable "VS_UNICODE_OUTPUT" to nothing before calling waf.
+So in your project settings use something like 'cmd.exe /C "set VS_UNICODE_OUTPUT=& set PYTHONUNBUFFERED=true & waf build"'.
+cmd.exe  /C  "chcp 1252 & set PYTHONUNBUFFERED=true && set && waf  configure"
+Setting PYTHONUNBUFFERED gives the unbuffered output.
+"""
+
+import os, sys, re, traceback
+from waflib import Utils, Logs, Options, Errors
+from waflib.TaskGen import after_method, feature
+
+from waflib.Configure import conf
+from waflib.Tools import ccroot, c, cxx, ar
+
+g_msvc_systemlibs = '''
+aclui activeds ad1 adptif adsiid advapi32 asycfilt authz bhsupp bits bufferoverflowu cabinet
+cap certadm certidl ciuuid clusapi comctl32 comdlg32 comsupp comsuppd comsuppw comsuppwd comsvcs
+credui crypt32 cryptnet cryptui d3d8thk daouuid dbgeng dbghelp dciman32 ddao35 ddao35d
+ddao35u ddao35ud delayimp dhcpcsvc dhcpsapi dlcapi dnsapi dsprop dsuiext dtchelp
+faultrep fcachdll fci fdi framedyd framedyn gdi32 gdiplus glauxglu32 gpedit gpmuuid
+gtrts32w gtrtst32hlink htmlhelp httpapi icm32 icmui imagehlp imm32 iphlpapi iprop
+kernel32 ksguid ksproxy ksuser libcmt libcmtd libcpmt libcpmtd loadperf lz32 mapi
+mapi32 mgmtapi minidump mmc mobsync mpr mprapi mqoa mqrt msacm32 mscms mscoree
+msdasc msimg32 msrating mstask msvcmrt msvcurt msvcurtd mswsock msxml2 mtx mtxdm
+netapi32 nmapinmsupp npptools ntdsapi ntdsbcli ntmsapi ntquery odbc32 odbcbcp
+odbccp32 oldnames ole32 oleacc oleaut32 oledb oledlgolepro32 opends60 opengl32
+osptk parser pdh penter pgobootrun pgort powrprof psapi ptrustm ptrustmd ptrustu
+ptrustud qosname rasapi32 rasdlg rassapi resutils riched20 rpcndr rpcns4 rpcrt4 rtm
+rtutils runtmchk scarddlg scrnsave scrnsavw secur32 sensapi setupapi sfc shell32
+shfolder shlwapi sisbkup snmpapi sporder srclient sti strsafe svcguid tapi32 thunk32
+traffic unicows url urlmon user32 userenv usp10 uuid uxtheme vcomp vcompd vdmdbg
+version vfw32 wbemuuid  webpost wiaguid wininet winmm winscard winspool winstrm
+wintrust wldap32 wmiutils wow32 ws2_32 wsnmp32 wsock32 wst wtsapi32 xaswitch xolehlp
+'''.split()
+"""importlibs provided by MSVC/Platform SDK. Do NOT search them"""
+
+all_msvc_platforms = [	('x64', 'amd64'), ('x86', 'x86'), ('ia64', 'ia64'),
+						('x86_amd64', 'amd64'), ('x86_ia64', 'ia64'), ('x86_arm', 'arm'), ('x86_arm64', 'arm64'),
+						('amd64_x86', 'x86'), ('amd64_arm', 'arm'), ('amd64_arm64', 'arm64') ]
+"""List of msvc platforms"""
+
+all_wince_platforms = [ ('armv4', 'arm'), ('armv4i', 'arm'), ('mipsii', 'mips'), ('mipsii_fp', 'mips'), ('mipsiv', 'mips'), ('mipsiv_fp', 'mips'), ('sh4', 'sh'), ('x86', 'cex86') ]
+"""List of wince platforms"""
+
+all_icl_platforms = [ ('intel64', 'amd64'), ('em64t', 'amd64'), ('ia32', 'x86'), ('Itanium', 'ia64')]
+"""List of icl platforms"""
+
+def options(opt):
+	default_ver = ''
+	vsver = os.getenv('VSCMD_VER')
+	if vsver:
+		m = re.match(r'(^\d+\.\d+).*', vsver)
+		if m:
+			default_ver = 'msvc %s' % m.group(1)
+	opt.add_option('--msvc_version', type='string', help = 'msvc version, eg: "msvc 10.0,msvc 9.0"', default=default_ver)
+	opt.add_option('--msvc_targets', type='string', help = 'msvc targets, eg: "x64,arm"', default='')
+	opt.add_option('--no-msvc-lazy', action='store_false', help = 'lazily check msvc target environments', default=True, dest='msvc_lazy')
+
+class MSVCVersion(object):
+	def __init__(self, ver):
+		m = re.search(r'^(.*)\s+(\d+[.]\d+)', ver)
+		if m:
+			self.name = m.group(1)
+			self.number = float(m.group(2))
+		else:
+			self.name = ver
+			self.number = 0.
+
+	def __lt__(self, other):
+		if self.number == other.number:
+			return self.name < other.name
+		return self.number < other.number
+
+@conf
+def setup_msvc(conf, versiondict):
+	"""
+	Checks installed compilers and targets and returns the first combination from the user's
+	options, env, or the global supported lists that checks.
+
+	:param versiondict: dict(platform -> dict(architecture -> configuration))
+	:type versiondict: dict(string -> dict(string -> target_compiler)
+	:return: the compiler, revision, path, include dirs, library paths and target architecture
+	:rtype: tuple of strings
+	"""
+	platforms = getattr(Options.options, 'msvc_targets', '').split(',')
+	if platforms == ['']:
+		platforms=Utils.to_list(conf.env.MSVC_TARGETS) or [i for i,j in all_msvc_platforms+all_icl_platforms+all_wince_platforms]
+	desired_versions = getattr(Options.options, 'msvc_version', '').split(',')
+	if desired_versions == ['']:
+		desired_versions = conf.env.MSVC_VERSIONS or list(sorted(versiondict.keys(), key=MSVCVersion, reverse=True))
+
+	# Override lazy detection by evaluating after the fact.
+	lazy_detect = getattr(Options.options, 'msvc_lazy', True)
+	if conf.env.MSVC_LAZY_AUTODETECT is False:
+		lazy_detect = False
+
+	if not lazy_detect:
+		for val in versiondict.values():
+			for arch in list(val.keys()):
+				cfg = val[arch]
+				cfg.evaluate()
+				if not cfg.is_valid:
+					del val[arch]
+		conf.env.MSVC_INSTALLED_VERSIONS = versiondict
+
+	for version in desired_versions:
+		Logs.debug('msvc: detecting %r - %r', version, desired_versions)
+		try:
+			targets = versiondict[version]
+		except KeyError:
+			continue
+
+		seen = set()
+		for arch in platforms:
+			if arch in seen:
+				continue
+			else:
+				seen.add(arch)
+			try:
+				cfg = targets[arch]
+			except KeyError:
+				continue
+
+			cfg.evaluate()
+			if cfg.is_valid:
+				compiler,revision = version.rsplit(' ', 1)
+				return compiler,revision,cfg.bindirs,cfg.incdirs,cfg.libdirs,cfg.cpu
+	conf.fatal('msvc: Impossible to find a valid architecture for building %r - %r' % (desired_versions, list(versiondict.keys())))
+
+@conf
+def get_msvc_version(conf, compiler, version, target, vcvars):
+	"""
+	Checks that an installed compiler actually runs and uses vcvars to obtain the
+	environment needed by the compiler.
+
+	:param compiler: compiler type, for looking up the executable name
+	:param version: compiler version, for debugging only
+	:param target: target architecture
+	:param vcvars: batch file to run to check the environment
+	:return: the location of the compiler executable, the location of include dirs, and the library paths
+	:rtype: tuple of strings
+	"""
+	Logs.debug('msvc: get_msvc_version: %r %r %r', compiler, version, target)
+
+	try:
+		conf.msvc_cnt += 1
+	except AttributeError:
+		conf.msvc_cnt = 1
+	batfile = conf.bldnode.make_node('waf-print-msvc-%d.bat' % conf.msvc_cnt)
+	batfile.write("""@echo off
+set INCLUDE=
+set LIB=
+call "%s" %s
+echo PATH=%%PATH%%
+echo INCLUDE=%%INCLUDE%%
+echo LIB=%%LIB%%;%%LIBPATH%%
+""" % (vcvars,target))
+	sout = conf.cmd_and_log(['cmd.exe', '/E:on', '/V:on', '/C', batfile.abspath()], stdin=getattr(Utils.subprocess, 'DEVNULL', None))
+	lines = sout.splitlines()
+
+	if not lines[0]:
+		lines.pop(0)
+
+	MSVC_PATH = MSVC_INCDIR = MSVC_LIBDIR = None
+	for line in lines:
+		if line.startswith('PATH='):
+			path = line[5:]
+			MSVC_PATH = path.split(';')
+		elif line.startswith('INCLUDE='):
+			MSVC_INCDIR = [i for i in line[8:].split(';') if i]
+		elif line.startswith('LIB='):
+			MSVC_LIBDIR = [i for i in line[4:].split(';') if i]
+	if None in (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR):
+		conf.fatal('msvc: Could not find a valid architecture for building (get_msvc_version_3)')
+
+	# Check if the compiler is usable at all.
+	# The detection may return 64-bit versions even on 32-bit systems, and these would fail to run.
+	env = dict(os.environ)
+	env.update(PATH = path)
+	compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler)
+	cxx = conf.find_program(compiler_name, path_list=MSVC_PATH)
+
+	# delete CL if exists. because it could contain parameters which can change cl's behaviour rather catastrophically.
+	if 'CL' in env:
+		del(env['CL'])
+
+	try:
+		conf.cmd_and_log(cxx + ['/help'], env=env)
+	except UnicodeError:
+		st = traceback.format_exc()
+		if conf.logger:
+			conf.logger.error(st)
+		conf.fatal('msvc: Unicode error - check the code page?')
+	except Exception as e:
+		Logs.debug('msvc: get_msvc_version: %r %r %r -> failure %s', compiler, version, target, str(e))
+		conf.fatal('msvc: cannot run the compiler in get_msvc_version (run with -v to display errors)')
+	else:
+		Logs.debug('msvc: get_msvc_version: %r %r %r -> OK', compiler, version, target)
+	finally:
+		conf.env[compiler_name] = ''
+
+	return (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR)
+
+def gather_wince_supported_platforms():
+	"""
+	Checks SmartPhones SDKs
+
+	:param versions: list to modify
+	:type versions: list
+	"""
+	supported_wince_platforms = []
+	try:
+		ce_sdk = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\Windows CE Tools\\SDKs')
+	except OSError:
+		try:
+			ce_sdk = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\Windows CE Tools\\SDKs')
+		except OSError:
+			ce_sdk = ''
+	if not ce_sdk:
+		return supported_wince_platforms
+
+	index = 0
+	while 1:
+		try:
+			sdk_device = Utils.winreg.EnumKey(ce_sdk, index)
+			sdk = Utils.winreg.OpenKey(ce_sdk, sdk_device)
+		except OSError:
+			break
+		index += 1
+		try:
+			path,type = Utils.winreg.QueryValueEx(sdk, 'SDKRootDir')
+		except OSError:
+			try:
+				path,type = Utils.winreg.QueryValueEx(sdk,'SDKInformation')
+			except OSError:
+				continue
+			path,xml = os.path.split(path)
+		path = str(path)
+		path,device = os.path.split(path)
+		if not device:
+			path,device = os.path.split(path)
+		platforms = []
+		for arch,compiler in all_wince_platforms:
+			if os.path.isdir(os.path.join(path, device, 'Lib', arch)):
+				platforms.append((arch, compiler, os.path.join(path, device, 'Include', arch), os.path.join(path, device, 'Lib', arch)))
+		if platforms:
+			supported_wince_platforms.append((device, platforms))
+	return supported_wince_platforms
+
+def gather_msvc_detected_versions():
+	#Detected MSVC versions!
+	version_pattern = re.compile(r'^(\d\d?\.\d\d?)(Exp)?$')
+	detected_versions = []
+	for vcver,vcvar in (('VCExpress','Exp'), ('VisualStudio','')):
+		prefix = 'SOFTWARE\\Wow6432node\\Microsoft\\' + vcver
+		try:
+			all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, prefix)
+		except OSError:
+			prefix = 'SOFTWARE\\Microsoft\\' + vcver
+			try:
+				all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, prefix)
+			except OSError:
+				continue
+
+		index = 0
+		while 1:
+			try:
+				version = Utils.winreg.EnumKey(all_versions, index)
+			except OSError:
+				break
+			index += 1
+			match = version_pattern.match(version)
+			if match:
+				versionnumber = float(match.group(1))
+			else:
+				continue
+			detected_versions.append((versionnumber, version+vcvar, prefix+'\\'+version))
+	def fun(tup):
+		return tup[0]
+
+	detected_versions.sort(key = fun)
+	return detected_versions
+
+class target_compiler(object):
+	"""
+	Wrap a compiler configuration; call evaluate() to determine
+	whether the configuration is usable.
+	"""
+	def __init__(self, ctx, compiler, cpu, version, bat_target, bat, callback=None):
+		"""
+		:param ctx: configuration context to use to eventually get the version environment
+		:param compiler: compiler name
+		:param cpu: target cpu
+		:param version: compiler version number
+		:param bat_target: ?
+		:param bat: path to the batch file to run
+		"""
+		self.conf = ctx
+		self.name = None
+		self.is_valid = False
+		self.is_done = False
+
+		self.compiler = compiler
+		self.cpu = cpu
+		self.version = version
+		self.bat_target = bat_target
+		self.bat = bat
+		self.callback = callback
+
+	def evaluate(self):
+		if self.is_done:
+			return
+		self.is_done = True
+		try:
+			vs = self.conf.get_msvc_version(self.compiler, self.version, self.bat_target, self.bat)
+		except Errors.ConfigurationError:
+			self.is_valid = False
+			return
+		if self.callback:
+			vs = self.callback(self, vs)
+		self.is_valid = True
+		(self.bindirs, self.incdirs, self.libdirs) = vs
+
+	def __str__(self):
+		return str((self.compiler, self.cpu, self.version, self.bat_target, self.bat))
+
+	def __repr__(self):
+		return repr((self.compiler, self.cpu, self.version, self.bat_target, self.bat))
+
+@conf
+def gather_wsdk_versions(conf, versions):
+	"""
+	Use winreg to add the msvc versions to the input list
+
+	:param versions: list to modify
+	:type versions: list
+	"""
+	version_pattern = re.compile(r'^v..?.?\...?.?')
+	try:
+		all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\Microsoft SDKs\\Windows')
+	except OSError:
+		try:
+			all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\Microsoft SDKs\\Windows')
+		except OSError:
+			return
+	index = 0
+	while 1:
+		try:
+			version = Utils.winreg.EnumKey(all_versions, index)
+		except OSError:
+			break
+		index += 1
+		if not version_pattern.match(version):
+			continue
+		try:
+			msvc_version = Utils.winreg.OpenKey(all_versions, version)
+			path,type = Utils.winreg.QueryValueEx(msvc_version,'InstallationFolder')
+		except OSError:
+			continue
+		if path and os.path.isfile(os.path.join(path, 'bin', 'SetEnv.cmd')):
+			targets = {}
+			for target,arch in all_msvc_platforms:
+				targets[target] = target_compiler(conf, 'wsdk', arch, version, '/'+target, os.path.join(path, 'bin', 'SetEnv.cmd'))
+			versions['wsdk ' + version[1:]] = targets
+
+@conf
+def gather_msvc_targets(conf, versions, version, vc_path):
+	#Looking for normal MSVC compilers!
+	targets = {}
+
+	if os.path.isfile(os.path.join(vc_path, 'VC', 'Auxiliary', 'Build', 'vcvarsall.bat')):
+		for target,realtarget in all_msvc_platforms[::-1]:
+			targets[target] = target_compiler(conf, 'msvc', realtarget, version, target, os.path.join(vc_path, 'VC', 'Auxiliary', 'Build', 'vcvarsall.bat'))
+	elif os.path.isfile(os.path.join(vc_path, 'vcvarsall.bat')):
+		for target,realtarget in all_msvc_platforms[::-1]:
+			targets[target] = target_compiler(conf, 'msvc', realtarget, version, target, os.path.join(vc_path, 'vcvarsall.bat'))
+	elif os.path.isfile(os.path.join(vc_path, 'Common7', 'Tools', 'vsvars32.bat')):
+		targets['x86'] = target_compiler(conf, 'msvc', 'x86', version, 'x86', os.path.join(vc_path, 'Common7', 'Tools', 'vsvars32.bat'))
+	elif os.path.isfile(os.path.join(vc_path, 'Bin', 'vcvars32.bat')):
+		targets['x86'] = target_compiler(conf, 'msvc', 'x86', version, '', os.path.join(vc_path, 'Bin', 'vcvars32.bat'))
+	if targets:
+		versions['msvc %s' % version] = targets
+
+@conf
+def gather_wince_targets(conf, versions, version, vc_path, vsvars, supported_platforms):
+	#Looking for Win CE compilers!
+	for device,platforms in supported_platforms:
+		targets = {}
+		for platform,compiler,include,lib in platforms:
+			winCEpath = os.path.join(vc_path, 'ce')
+			if not os.path.isdir(winCEpath):
+				continue
+
+			if os.path.isdir(os.path.join(winCEpath, 'lib', platform)):
+				bindirs = [os.path.join(winCEpath, 'bin', compiler), os.path.join(winCEpath, 'bin', 'x86_'+compiler)]
+				incdirs = [os.path.join(winCEpath, 'include'), os.path.join(winCEpath, 'atlmfc', 'include'), include]
+				libdirs = [os.path.join(winCEpath, 'lib', platform), os.path.join(winCEpath, 'atlmfc', 'lib', platform), lib]
+				def combine_common(obj, compiler_env):
+					# TODO this is likely broken, remove in waf 2.1
+					(common_bindirs,_1,_2) = compiler_env
+					return (bindirs + common_bindirs, incdirs, libdirs)
+				targets[platform] = target_compiler(conf, 'msvc', platform, version, 'x86', vsvars, combine_common)
+		if targets:
+			versions[device + ' ' + version] = targets
+
+@conf
+def gather_winphone_targets(conf, versions, version, vc_path, vsvars):
+	#Looking for WinPhone compilers
+	targets = {}
+	for target,realtarget in all_msvc_platforms[::-1]:
+		targets[target] = target_compiler(conf, 'winphone', realtarget, version, target, vsvars)
+	if targets:
+		versions['winphone ' + version] = targets
+
+@conf
+def gather_vswhere_versions(conf, versions):
+	try:
+		import json
+	except ImportError:
+		Logs.error('Visual Studio 2017 detection requires Python 2.6')
+		return
+
+	prg_path = os.environ.get('ProgramFiles(x86)', os.environ.get('ProgramFiles', 'C:\\Program Files (x86)'))
+
+	vswhere = os.path.join(prg_path, 'Microsoft Visual Studio', 'Installer', 'vswhere.exe')
+	args = [vswhere, '-products', '*', '-legacy', '-format', 'json']
+	try:
+		txt = conf.cmd_and_log(args)
+	except Errors.WafError as e:
+		Logs.debug('msvc: vswhere.exe failed %s', e)
+		return
+
+	if sys.version_info[0] < 3:
+		txt = txt.decode(Utils.console_encoding())
+
+	arr = json.loads(txt)
+	arr.sort(key=lambda x: x['installationVersion'])
+	for entry in arr:
+		ver = entry['installationVersion']
+		ver = str('.'.join(ver.split('.')[:2]))
+		path = str(os.path.abspath(entry['installationPath']))
+		if os.path.exists(path) and ('msvc %s' % ver) not in versions:
+			conf.gather_msvc_targets(versions, ver, path)
+
+@conf
+def gather_msvc_versions(conf, versions):
+	vc_paths = []
+	for (v,version,reg) in gather_msvc_detected_versions():
+		try:
+			try:
+				msvc_version = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, reg + "\\Setup\\VC")
+			except OSError:
+				msvc_version = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, reg + "\\Setup\\Microsoft Visual C++")
+			path,type = Utils.winreg.QueryValueEx(msvc_version, 'ProductDir')
+		except OSError:
+			try:
+				msvc_version = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, "SOFTWARE\\Wow6432node\\Microsoft\\VisualStudio\\SxS\\VS7")
+				path,type = Utils.winreg.QueryValueEx(msvc_version, version)
+			except OSError:
+				continue
+			else:
+				vc_paths.append((version, os.path.abspath(str(path))))
+			continue
+		else:
+			vc_paths.append((version, os.path.abspath(str(path))))
+
+	wince_supported_platforms = gather_wince_supported_platforms()
+
+	for version,vc_path in vc_paths:
+		vs_path = os.path.dirname(vc_path)
+		vsvars = os.path.join(vs_path, 'Common7', 'Tools', 'vsvars32.bat')
+		if wince_supported_platforms and os.path.isfile(vsvars):
+			conf.gather_wince_targets(versions, version, vc_path, vsvars, wince_supported_platforms)
+
+	# WP80 works with 11.0Exp and 11.0, both of which resolve to the same vc_path.
+	# Stop after one is found.
+	for version,vc_path in vc_paths:
+		vs_path = os.path.dirname(vc_path)
+		vsvars = os.path.join(vs_path, 'VC', 'WPSDK', 'WP80', 'vcvarsphoneall.bat')
+		if os.path.isfile(vsvars):
+			conf.gather_winphone_targets(versions, '8.0', vc_path, vsvars)
+			break
+
+	for version,vc_path in vc_paths:
+		vs_path = os.path.dirname(vc_path)
+		conf.gather_msvc_targets(versions, version, vc_path)
+
+@conf
+def gather_icl_versions(conf, versions):
+	"""
+	Checks ICL compilers
+
+	:param versions: list to modify
+	:type versions: list
+	"""
+	version_pattern = re.compile(r'^...?.?\....?.?')
+	try:
+		all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Intel\\Compilers\\C++')
+	except OSError:
+		try:
+			all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Intel\\Compilers\\C++')
+		except OSError:
+			return
+	index = 0
+	while 1:
+		try:
+			version = Utils.winreg.EnumKey(all_versions, index)
+		except OSError:
+			break
+		index += 1
+		if not version_pattern.match(version):
+			continue
+		targets = {}
+		for target,arch in all_icl_platforms:
+			if target=='intel64':
+				targetDir='EM64T_NATIVE'
+			else:
+				targetDir=target
+			try:
+				Utils.winreg.OpenKey(all_versions,version+'\\'+targetDir)
+				icl_version=Utils.winreg.OpenKey(all_versions,version)
+				path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir')
+			except OSError:
+				pass
+			else:
+				batch_file=os.path.join(path,'bin','iclvars.bat')
+				if os.path.isfile(batch_file):
+					targets[target] = target_compiler(conf, 'intel', arch, version, target, batch_file)
+		for target,arch in all_icl_platforms:
+			try:
+				icl_version = Utils.winreg.OpenKey(all_versions, version+'\\'+target)
+				path,type = Utils.winreg.QueryValueEx(icl_version,'ProductDir')
+			except OSError:
+				continue
+			else:
+				batch_file=os.path.join(path,'bin','iclvars.bat')
+				if os.path.isfile(batch_file):
+					targets[target] = target_compiler(conf, 'intel', arch, version, target, batch_file)
+		major = version[0:2]
+		versions['intel ' + major] = targets
+
+@conf
+def gather_intel_composer_versions(conf, versions):
+	"""
+	Checks ICL compilers that are part of Intel Composer Suites
+
+	:param versions: list to modify
+	:type versions: list
+	"""
+	version_pattern = re.compile(r'^...?.?\...?.?.?')
+	try:
+		all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Intel\\Suites')
+	except OSError:
+		try:
+			all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Intel\\Suites')
+		except OSError:
+			return
+	index = 0
+	while 1:
+		try:
+			version = Utils.winreg.EnumKey(all_versions, index)
+		except OSError:
+			break
+		index += 1
+		if not version_pattern.match(version):
+			continue
+		targets = {}
+		for target,arch in all_icl_platforms:
+			if target=='intel64':
+				targetDir='EM64T_NATIVE'
+			else:
+				targetDir=target
+			try:
+				try:
+					defaults = Utils.winreg.OpenKey(all_versions,version+'\\Defaults\\C++\\'+targetDir)
+				except OSError:
+					if targetDir == 'EM64T_NATIVE':
+						defaults = Utils.winreg.OpenKey(all_versions,version+'\\Defaults\\C++\\EM64T')
+					else:
+						raise
+				uid,type = Utils.winreg.QueryValueEx(defaults, 'SubKey')
+				Utils.winreg.OpenKey(all_versions,version+'\\'+uid+'\\C++\\'+targetDir)
+				icl_version=Utils.winreg.OpenKey(all_versions,version+'\\'+uid+'\\C++')
+				path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir')
+			except OSError:
+				pass
+			else:
+				batch_file=os.path.join(path,'bin','iclvars.bat')
+				if os.path.isfile(batch_file):
+					targets[target] = target_compiler(conf, 'intel', arch, version, target, batch_file)
+				# The intel compilervar_arch.bat is broken when used with Visual Studio Express 2012
+				# http://software.intel.com/en-us/forums/topic/328487
+				compilervars_warning_attr = '_compilervars_warning_key'
+				if version[0:2] == '13' and getattr(conf, compilervars_warning_attr, True):
+					setattr(conf, compilervars_warning_attr, False)
+					patch_url = 'http://software.intel.com/en-us/forums/topic/328487'
+					compilervars_arch = os.path.join(path, 'bin', 'compilervars_arch.bat')
+					for vscomntool in ('VS110COMNTOOLS', 'VS100COMNTOOLS'):
+						if vscomntool in os.environ:
+							vs_express_path = os.environ[vscomntool] + r'..\IDE\VSWinExpress.exe'
+							dev_env_path = os.environ[vscomntool] + r'..\IDE\devenv.exe'
+							if (r'if exist "%VS110COMNTOOLS%..\IDE\VSWinExpress.exe"' in Utils.readf(compilervars_arch) and
+								not os.path.exists(vs_express_path) and not os.path.exists(dev_env_path)):
+								Logs.warn(('The Intel compilervar_arch.bat only checks for one Visual Studio SKU '
+								'(VSWinExpress.exe) but it does not seem to be installed at %r. '
+								'The intel command line set up will fail to configure unless the file %r'
+								'is patched. See: %s') % (vs_express_path, compilervars_arch, patch_url))
+		major = version[0:2]
+		versions['intel ' + major] = targets
+
+@conf
+def detect_msvc(self):
+	return self.setup_msvc(self.get_msvc_versions())
+
+@conf
+def get_msvc_versions(self):
+	"""
+	:return: platform to compiler configurations
+	:rtype: dict
+	"""
+	dct = Utils.ordered_iter_dict()
+	self.gather_icl_versions(dct)
+	self.gather_intel_composer_versions(dct)
+	self.gather_wsdk_versions(dct)
+	self.gather_msvc_versions(dct)
+	self.gather_vswhere_versions(dct)
+	Logs.debug('msvc: detected versions %r', list(dct.keys()))
+	return dct
+
+@conf
+def find_lt_names_msvc(self, libname, is_static=False):
+	"""
+	Win32/MSVC specific code to glean out information from libtool la files.
+	this function is not attached to the task_gen class. Returns a triplet:
+	(library absolute path, library name without extension, whether the library is static)
+	"""
+	lt_names=[
+		'lib%s.la' % libname,
+		'%s.la' % libname,
+	]
+
+	for path in self.env.LIBPATH:
+		for la in lt_names:
+			laf=os.path.join(path,la)
+			dll=None
+			if os.path.exists(laf):
+				ltdict = Utils.read_la_file(laf)
+				lt_libdir=None
+				if ltdict.get('libdir', ''):
+					lt_libdir = ltdict['libdir']
+				if not is_static and ltdict.get('library_names', ''):
+					dllnames=ltdict['library_names'].split()
+					dll=dllnames[0].lower()
+					dll=re.sub(r'\.dll$', '', dll)
+					return (lt_libdir, dll, False)
+				elif ltdict.get('old_library', ''):
+					olib=ltdict['old_library']
+					if os.path.exists(os.path.join(path,olib)):
+						return (path, olib, True)
+					elif lt_libdir != '' and os.path.exists(os.path.join(lt_libdir,olib)):
+						return (lt_libdir, olib, True)
+					else:
+						return (None, olib, True)
+				else:
+					raise self.errors.WafError('invalid libtool object file: %s' % laf)
+	return (None, None, None)
+
+@conf
+def libname_msvc(self, libname, is_static=False):
+	lib = libname.lower()
+	lib = re.sub(r'\.lib$','',lib)
+
+	if lib in g_msvc_systemlibs:
+		return lib
+
+	lib=re.sub('^lib','',lib)
+
+	if lib == 'm':
+		return None
+
+	(lt_path, lt_libname, lt_static) = self.find_lt_names_msvc(lib, is_static)
+
+	if lt_path != None and lt_libname != None:
+		if lt_static:
+			# file existence check has been made by find_lt_names
+			return os.path.join(lt_path,lt_libname)
+
+	if lt_path != None:
+		_libpaths = [lt_path] + self.env.LIBPATH
+	else:
+		_libpaths = self.env.LIBPATH
+
+	static_libs=[
+		'lib%ss.lib' % lib,
+		'lib%s.lib' % lib,
+		'%ss.lib' % lib,
+		'%s.lib' %lib,
+		]
+
+	dynamic_libs=[
+		'lib%s.dll.lib' % lib,
+		'lib%s.dll.a' % lib,
+		'%s.dll.lib' % lib,
+		'%s.dll.a' % lib,
+		'lib%s_d.lib' % lib,
+		'%s_d.lib' % lib,
+		'%s.lib' %lib,
+		]
+
+	libnames=static_libs
+	if not is_static:
+		libnames=dynamic_libs + static_libs
+
+	for path in _libpaths:
+		for libn in libnames:
+			if os.path.exists(os.path.join(path, libn)):
+				Logs.debug('msvc: lib found: %s', os.path.join(path,libn))
+				return re.sub(r'\.lib$', '',libn)
+
+	#if no lib can be found, just return the libname as msvc expects it
+	self.fatal('The library %r could not be found' % libname)
+	return re.sub(r'\.lib$', '', libname)
+
+@conf
+def check_lib_msvc(self, libname, is_static=False, uselib_store=None):
+	"""
+	Ideally we should be able to place the lib in the right env var, either STLIB or LIB,
+	but we don't distinguish static libs from shared libs.
+	This is ok since msvc doesn't have any special linker flag to select static libs (no env.STLIB_MARKER)
+	"""
+	libn = self.libname_msvc(libname, is_static)
+
+	if not uselib_store:
+		uselib_store = libname.upper()
+
+	if False and is_static: # disabled
+		self.env['STLIB_' + uselib_store] = [libn]
+	else:
+		self.env['LIB_' + uselib_store] = [libn]
+
+@conf
+def check_libs_msvc(self, libnames, is_static=False):
+	for libname in Utils.to_list(libnames):
+		self.check_lib_msvc(libname, is_static)
+
+def configure(conf):
+	"""
+	Configuration methods to call for detecting msvc
+	"""
+	conf.autodetect(True)
+	conf.find_msvc()
+	conf.msvc_common_flags()
+	conf.cc_load_tools()
+	conf.cxx_load_tools()
+	conf.cc_add_flags()
+	conf.cxx_add_flags()
+	conf.link_add_flags()
+	conf.visual_studio_add_flags()
+
+@conf
+def no_autodetect(conf):
+	conf.env.NO_MSVC_DETECT = 1
+	configure(conf)
+
+@conf
+def autodetect(conf, arch=False):
+	v = conf.env
+	if v.NO_MSVC_DETECT:
+		return
+
+	compiler, version, path, includes, libdirs, cpu = conf.detect_msvc()
+	if arch:
+		v.DEST_CPU = cpu
+
+	v.PATH = path
+	v.INCLUDES = includes
+	v.LIBPATH = libdirs
+	v.MSVC_COMPILER = compiler
+	try:
+		v.MSVC_VERSION = float(version)
+	except ValueError:
+		v.MSVC_VERSION = float(version[:-3])
+
+def _get_prog_names(conf, compiler):
+	if compiler == 'intel':
+		compiler_name = 'ICL'
+		linker_name = 'XILINK'
+		lib_name = 'XILIB'
+	else:
+		# assumes CL.exe
+		compiler_name = 'CL'
+		linker_name = 'LINK'
+		lib_name = 'LIB'
+	return compiler_name, linker_name, lib_name
+
+@conf
+def find_msvc(conf):
+	"""Due to path format limitations, limit operation only to native Win32. Yeah it sucks."""
+	if sys.platform == 'cygwin':
+		conf.fatal('MSVC module does not work under cygwin Python!')
+
+	# the autodetection is supposed to be performed before entering in this method
+	v = conf.env
+	path = v.PATH
+	compiler = v.MSVC_COMPILER
+	version = v.MSVC_VERSION
+
+	compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler)
+	v.MSVC_MANIFEST = (compiler == 'msvc' and version >= 8) or (compiler == 'wsdk' and version >= 6) or (compiler == 'intel' and version >= 11)
+
+	# compiler
+	cxx = conf.find_program(compiler_name, var='CXX', path_list=path)
+
+	# before setting anything, check if the compiler is really msvc
+	env = dict(conf.environ)
+	if path:
+		env.update(PATH = ';'.join(path))
+	if not conf.cmd_and_log(cxx + ['/nologo', '/help'], env=env):
+		conf.fatal('the msvc compiler could not be identified')
+
+	# c/c++ compiler
+	v.CC = v.CXX = cxx
+	v.CC_NAME = v.CXX_NAME = 'msvc'
+
+	# linker
+	if not v.LINK_CXX:
+		conf.find_program(linker_name, path_list=path, errmsg='%s was not found (linker)' % linker_name, var='LINK_CXX')
+
+	if not v.LINK_CC:
+		v.LINK_CC = v.LINK_CXX
+
+	# staticlib linker
+	if not v.AR:
+		stliblink = conf.find_program(lib_name, path_list=path, var='AR')
+		if not stliblink:
+			return
+		v.ARFLAGS = ['/nologo']
+
+	# manifest tool. Not required for VS 2003 and below. Must have for VS 2005 and later
+	if v.MSVC_MANIFEST:
+		conf.find_program('MT', path_list=path, var='MT')
+		v.MTFLAGS = ['/nologo']
+
+	try:
+		conf.load('winres')
+	except Errors.ConfigurationError:
+		Logs.warn('Resource compiler not found. Compiling resource file is disabled')
+
+@conf
+def visual_studio_add_flags(self):
+	"""visual studio flags found in the system environment"""
+	v = self.env
+	if self.environ.get('INCLUDE'):
+		v.prepend_value('INCLUDES', [x for x in self.environ['INCLUDE'].split(';') if x]) # notice the 'S'
+	if self.environ.get('LIB'):
+		v.prepend_value('LIBPATH', [x for x in self.environ['LIB'].split(';') if x])
+
+@conf
+def msvc_common_flags(conf):
+	"""
+	Setup the flags required for executing the msvc compiler
+	"""
+	v = conf.env
+
+	v.DEST_BINFMT = 'pe'
+	v.append_value('CFLAGS', ['/nologo'])
+	v.append_value('CXXFLAGS', ['/nologo'])
+	v.append_value('LINKFLAGS', ['/nologo'])
+	v.DEFINES_ST   = '/D%s'
+
+	v.CC_SRC_F     = ''
+	v.CC_TGT_F     = ['/c', '/Fo']
+	v.CXX_SRC_F    = ''
+	v.CXX_TGT_F    = ['/c', '/Fo']
+
+	if (v.MSVC_COMPILER == 'msvc' and v.MSVC_VERSION >= 8) or (v.MSVC_COMPILER == 'wsdk' and v.MSVC_VERSION >= 6):
+		v.CC_TGT_F = ['/FC'] + v.CC_TGT_F
+		v.CXX_TGT_F = ['/FC'] + v.CXX_TGT_F
+
+	v.CPPPATH_ST = '/I%s' # template for adding include paths
+
+	v.AR_TGT_F = v.CCLNK_TGT_F = v.CXXLNK_TGT_F = '/OUT:'
+
+	# CRT specific flags
+	v.CFLAGS_CRT_MULTITHREADED     = v.CXXFLAGS_CRT_MULTITHREADED     = ['/MT']
+	v.CFLAGS_CRT_MULTITHREADED_DLL = v.CXXFLAGS_CRT_MULTITHREADED_DLL = ['/MD']
+
+	v.CFLAGS_CRT_MULTITHREADED_DBG     = v.CXXFLAGS_CRT_MULTITHREADED_DBG     = ['/MTd']
+	v.CFLAGS_CRT_MULTITHREADED_DLL_DBG = v.CXXFLAGS_CRT_MULTITHREADED_DLL_DBG = ['/MDd']
+
+	v.LIB_ST            = '%s.lib'
+	v.LIBPATH_ST        = '/LIBPATH:%s'
+	v.STLIB_ST          = '%s.lib'
+	v.STLIBPATH_ST      = '/LIBPATH:%s'
+
+	if v.MSVC_MANIFEST:
+		v.append_value('LINKFLAGS', ['/MANIFEST'])
+
+	v.CFLAGS_cshlib     = []
+	v.CXXFLAGS_cxxshlib = []
+	v.LINKFLAGS_cshlib  = v.LINKFLAGS_cxxshlib = ['/DLL']
+	v.cshlib_PATTERN    = v.cxxshlib_PATTERN = '%s.dll'
+	v.implib_PATTERN    = '%s.lib'
+	v.IMPLIB_ST         = '/IMPLIB:%s'
+
+	v.LINKFLAGS_cstlib  = []
+	v.cstlib_PATTERN    = v.cxxstlib_PATTERN = '%s.lib'
+
+	v.cprogram_PATTERN  = v.cxxprogram_PATTERN = '%s.exe'
+
+	v.def_PATTERN       = '/def:%s'
+
+
+#######################################################################################################
+##### conf above, build below
+
+@after_method('apply_link')
+@feature('c', 'cxx')
+def apply_flags_msvc(self):
+	"""
+	Add additional flags implied by msvc, such as subsystems and pdb files::
+
+		def build(bld):
+			bld.stlib(source='main.c', target='bar', subsystem='gruik')
+	"""
+	if self.env.CC_NAME != 'msvc' or not getattr(self, 'link_task', None):
+		return
+
+	is_static = isinstance(self.link_task, ccroot.stlink_task)
+
+	subsystem = getattr(self, 'subsystem', '')
+	if subsystem:
+		subsystem = '/subsystem:%s' % subsystem
+		flags = is_static and 'ARFLAGS' or 'LINKFLAGS'
+		self.env.append_value(flags, subsystem)
+
+	if not is_static:
+		for f in self.env.LINKFLAGS:
+			d = f.lower()
+			if d[1:] in ('debug', 'debug:full', 'debug:fastlink'):
+				pdbnode = self.link_task.outputs[0].change_ext('.pdb')
+				self.link_task.outputs.append(pdbnode)
+
+				if getattr(self, 'install_task', None):
+					self.pdb_install_task = self.add_install_files(
+						install_to=self.install_task.install_to, install_from=pdbnode)
+				break
+
+@feature('cprogram', 'cshlib', 'cxxprogram', 'cxxshlib')
+@after_method('apply_link')
+def apply_manifest(self):
+	"""
+	Special linker for MSVC with support for embedding manifests into DLL's
+	and executables compiled by Visual Studio 2005 or probably later. Without
+	the manifest file, the binaries are unusable.
+	See: http://msdn2.microsoft.com/en-us/library/ms235542(VS.80).aspx
+	"""
+	if self.env.CC_NAME == 'msvc' and self.env.MSVC_MANIFEST and getattr(self, 'link_task', None):
+		out_node = self.link_task.outputs[0]
+		man_node = out_node.parent.find_or_declare(out_node.name + '.manifest')
+		self.link_task.outputs.append(man_node)
+		self.env.DO_MANIFEST = True
+
+def make_winapp(self, family):
+	append = self.env.append_unique
+	append('DEFINES', 'WINAPI_FAMILY=%s' % family)
+	append('CXXFLAGS', ['/ZW', '/TP'])
+	for lib_path in self.env.LIBPATH:
+		append('CXXFLAGS','/AI%s'%lib_path)
+
+@feature('winphoneapp')
+@after_method('process_use')
+@after_method('propagate_uselib_vars')
+def make_winphone_app(self):
+	"""
+	Insert configuration flags for windows phone applications (adds /ZW, /TP...)
+	"""
+	make_winapp(self, 'WINAPI_FAMILY_PHONE_APP')
+	self.env.append_unique('LINKFLAGS', ['/NODEFAULTLIB:ole32.lib', 'PhoneAppModelHost.lib'])
+
+@feature('winapp')
+@after_method('process_use')
+@after_method('propagate_uselib_vars')
+def make_windows_app(self):
+	"""
+	Insert configuration flags for windows applications (adds /ZW, /TP...)
+	"""
+	make_winapp(self, 'WINAPI_FAMILY_DESKTOP_APP')
diff --git a/third_party/waf/waflib/Tools/nasm.py b/third_party/waf/waflib/Tools/nasm.py
new file mode 100644
index 0000000..9c51c18
--- /dev/null
+++ b/third_party/waf/waflib/Tools/nasm.py
@@ -0,0 +1,31 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2008-2018 (ita)
+
+"""
+Nasm tool (asm processing)
+"""
+
+import os
+import waflib.Tools.asm # leave this
+from waflib.TaskGen import feature
+
+@feature('asm')
+def apply_nasm_vars(self):
+	"""provided for compatibility"""
+	self.env.append_value('ASFLAGS', self.to_list(getattr(self, 'nasm_flags', [])))
+
+def configure(conf):
+	"""
+	Detect nasm/yasm and set the variable *AS*
+	"""
+	conf.find_program(['nasm', 'yasm'], var='AS')
+	conf.env.AS_TGT_F = ['-o']
+	conf.env.ASLNK_TGT_F = ['-o']
+	conf.load('asm')
+	conf.env.ASMPATH_ST = '-I%s' + os.sep
+	txt = conf.cmd_and_log(conf.env.AS + ['--version'])
+	if 'yasm' in txt.lower():
+		conf.env.ASM_NAME = 'yasm'
+	else:
+		conf.env.ASM_NAME = 'nasm'
diff --git a/third_party/waf/waflib/Tools/nobuild.py b/third_party/waf/waflib/Tools/nobuild.py
new file mode 100644
index 0000000..2e4b055
--- /dev/null
+++ b/third_party/waf/waflib/Tools/nobuild.py
@@ -0,0 +1,24 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2015 (ita)
+
+"""
+Override the build commands to write empty files.
+This is useful for profiling and evaluating the Python overhead.
+
+To use::
+
+    def build(bld):
+        ...
+        bld.load('nobuild')
+
+"""
+
+from waflib import Task
+def build(bld):
+	def run(self):
+		for x in self.outputs:
+			x.write('')
+	for (name, cls) in Task.classes.items():
+		cls.run = run
+
diff --git a/third_party/waf/waflib/Tools/perl.py b/third_party/waf/waflib/Tools/perl.py
new file mode 100644
index 0000000..32b03fb
--- /dev/null
+++ b/third_party/waf/waflib/Tools/perl.py
@@ -0,0 +1,156 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# andersg at 0x63.nu 2007
+# Thomas Nagy 2016-2018 (ita)
+
+"""
+Support for Perl extensions. A C/C++ compiler is required::
+
+	def options(opt):
+		opt.load('compiler_c perl')
+	def configure(conf):
+		conf.load('compiler_c perl')
+		conf.check_perl_version((5,6,0))
+		conf.check_perl_ext_devel()
+		conf.check_perl_module('Cairo')
+		conf.check_perl_module('Devel::PPPort 4.89')
+	def build(bld):
+		bld(
+			features     = 'c cshlib perlext',
+			source       = 'Mytest.xs',
+			target       = 'Mytest',
+			install_path = '${ARCHDIR_PERL}/auto')
+		bld.install_files('${ARCHDIR_PERL}', 'Mytest.pm')
+"""
+
+import os
+from waflib import Task, Options, Utils, Errors
+from waflib.Configure import conf
+from waflib.TaskGen import extension, feature, before_method
+
+@before_method('apply_incpaths', 'apply_link', 'propagate_uselib_vars')
+@feature('perlext')
+def init_perlext(self):
+	"""
+	Change the values of *cshlib_PATTERN* and *cxxshlib_PATTERN* to remove the
+	*lib* prefix from library names.
+	"""
+	self.uselib = self.to_list(getattr(self, 'uselib', []))
+	if not 'PERLEXT' in self.uselib:
+		self.uselib.append('PERLEXT')
+	self.env.cshlib_PATTERN = self.env.cxxshlib_PATTERN = self.env.perlext_PATTERN
+
+@extension('.xs')
+def xsubpp_file(self, node):
+	"""
+	Create :py:class:`waflib.Tools.perl.xsubpp` tasks to process *.xs* files
+	"""
+	outnode = node.change_ext('.c')
+	self.create_task('xsubpp', node, outnode)
+	self.source.append(outnode)
+
+class xsubpp(Task.Task):
+	"""
+	Process *.xs* files
+	"""
+	run_str = '${PERL} ${XSUBPP} -noprototypes -typemap ${EXTUTILS_TYPEMAP} ${SRC} > ${TGT}'
+	color   = 'BLUE'
+	ext_out = ['.h']
+
+@conf
+def check_perl_version(self, minver=None):
+	"""
+	Check if Perl is installed, and set the variable PERL.
+	minver is supposed to be a tuple
+	"""
+	res = True
+	if minver:
+		cver = '.'.join(map(str,minver))
+	else:
+		cver = ''
+
+	self.start_msg('Checking for minimum perl version %s' % cver)
+
+	perl = self.find_program('perl', var='PERL', value=getattr(Options.options, 'perlbinary', None))
+	version = self.cmd_and_log(perl + ["-e", 'printf \"%vd\", $^V'])
+	if not version:
+		res = False
+		version = "Unknown"
+	elif not minver is None:
+		ver = tuple(map(int, version.split(".")))
+		if ver < minver:
+			res = False
+
+	self.end_msg(version, color=res and 'GREEN' or 'YELLOW')
+	return res
+
+@conf
+def check_perl_module(self, module):
+	"""
+	Check if specified perlmodule is installed.
+
+	The minimum version can be specified by specifying it after modulename
+	like this::
+
+		def configure(conf):
+			conf.check_perl_module("Some::Module 2.92")
+	"""
+	cmd = self.env.PERL + ['-e', 'use %s' % module]
+	self.start_msg('perl module %s' % module)
+	try:
+		r = self.cmd_and_log(cmd)
+	except Errors.WafError:
+		self.end_msg(False)
+		return None
+	self.end_msg(r or True)
+	return r
+
+@conf
+def check_perl_ext_devel(self):
+	"""
+	Check for configuration needed to build perl extensions.
+
+	Sets different xxx_PERLEXT variables in the environment.
+
+	Also sets the ARCHDIR_PERL variable useful as installation path,
+	which can be overridden by ``--with-perl-archdir`` option.
+	"""
+
+	env = self.env
+	perl = env.PERL
+	if not perl:
+		self.fatal('find perl first')
+
+	def cmd_perl_config(s):
+		return perl + ['-MConfig', '-e', 'print \"%s\"' % s]
+	def cfg_str(cfg):
+		return self.cmd_and_log(cmd_perl_config(cfg))
+	def cfg_lst(cfg):
+		return Utils.to_list(cfg_str(cfg))
+	def find_xsubpp():
+		for var in ('privlib', 'vendorlib'):
+			xsubpp = cfg_lst('$Config{%s}/ExtUtils/xsubpp$Config{exe_ext}' % var)
+			if xsubpp and os.path.isfile(xsubpp[0]):
+				return xsubpp
+		return self.find_program('xsubpp')
+
+	env.LINKFLAGS_PERLEXT = cfg_lst('$Config{lddlflags}')
+	env.INCLUDES_PERLEXT = cfg_lst('$Config{archlib}/CORE')
+	env.CFLAGS_PERLEXT = cfg_lst('$Config{ccflags} $Config{cccdlflags}')
+	env.EXTUTILS_TYPEMAP = cfg_lst('$Config{privlib}/ExtUtils/typemap')
+	env.XSUBPP = find_xsubpp()
+
+	if not getattr(Options.options, 'perlarchdir', None):
+		env.ARCHDIR_PERL = cfg_str('$Config{sitearch}')
+	else:
+		env.ARCHDIR_PERL = getattr(Options.options, 'perlarchdir')
+
+	env.perlext_PATTERN = '%s.' + cfg_str('$Config{dlext}')
+
+def options(opt):
+	"""
+	Add the ``--with-perl-archdir`` and ``--with-perl-binary`` command-line options.
+	"""
+	opt.add_option('--with-perl-binary', type='string', dest='perlbinary', help = 'Specify alternate perl binary', default=None)
+	opt.add_option('--with-perl-archdir', type='string', dest='perlarchdir', help = 'Specify directory where to install arch specific files', default=None)
+
diff --git a/third_party/waf/waflib/Tools/python.py b/third_party/waf/waflib/Tools/python.py
new file mode 100644
index 0000000..b2dd1a9
--- /dev/null
+++ b/third_party/waf/waflib/Tools/python.py
@@ -0,0 +1,657 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2007-2015 (ita)
+# Gustavo Carneiro (gjc), 2007
+
+"""
+Support for Python, detect the headers and libraries and provide
+*use* variables to link C/C++ programs against them::
+
+	def options(opt):
+		opt.load('compiler_c python')
+	def configure(conf):
+		conf.load('compiler_c python')
+		conf.check_python_version((2,4,2))
+		conf.check_python_headers()
+	def build(bld):
+		bld.program(features='pyembed', source='a.c', target='myprog')
+		bld.shlib(features='pyext', source='b.c', target='mylib')
+"""
+
+import os, sys
+from waflib import Errors, Logs, Node, Options, Task, Utils
+from waflib.TaskGen import extension, before_method, after_method, feature
+from waflib.Configure import conf
+
+FRAG = '''
+#include <Python.h>
+#ifdef __cplusplus
+extern "C" {
+#endif
+	void Py_Initialize(void);
+	void Py_Finalize(void);
+#ifdef __cplusplus
+}
+#endif
+int main(int argc, char **argv)
+{
+   (void)argc; (void)argv;
+   Py_Initialize();
+   Py_Finalize();
+   return 0;
+}
+'''
+"""
+Piece of C/C++ code used in :py:func:`waflib.Tools.python.check_python_headers`
+"""
+
+INST = '''
+import sys, py_compile
+py_compile.compile(sys.argv[1], sys.argv[2], sys.argv[3], True)
+'''
+"""
+Piece of Python code used in :py:class:`waflib.Tools.python.pyo` and :py:class:`waflib.Tools.python.pyc` for byte-compiling python files
+"""
+
+DISTUTILS_IMP = """
+try:
+	from distutils.sysconfig import get_config_var, get_python_lib
+except ImportError:
+	from sysconfig import get_config_var, get_path
+	def get_python_lib(*k, **kw):
+		keyword='platlib' if kw.get('plat_specific') else 'purelib'
+		if 'prefix' in kw:
+			return get_path(keyword, vars={'installed_base': kw['prefix'], 'platbase': kw['prefix']})
+		return get_path(keyword)
+""".splitlines()
+
+@before_method('process_source')
+@feature('py')
+def feature_py(self):
+	"""
+	Create tasks to byte-compile .py files and install them, if requested
+	"""
+	self.install_path = getattr(self, 'install_path', '${PYTHONDIR}')
+	install_from = getattr(self, 'install_from', None)
+	if install_from and not isinstance(install_from, Node.Node):
+		install_from = self.path.find_dir(install_from)
+	self.install_from = install_from
+
+	ver = self.env.PYTHON_VERSION
+	if not ver:
+		self.bld.fatal('Installing python files requires PYTHON_VERSION, try conf.check_python_version')
+
+	if int(ver.replace('.', '')) > 31:
+		self.install_32 = True
+
+@extension('.py')
+def process_py(self, node):
+	"""
+	Add signature of .py file, so it will be byte-compiled when necessary
+	"""
+	assert(hasattr(self, 'install_path')), 'add features="py" for target "%s" in "%s/wscript".' % (self.target, self.path.nice_path())
+	self.install_from = getattr(self, 'install_from', None)
+	relative_trick = getattr(self, 'relative_trick', True)
+	if self.install_from:
+		assert isinstance(self.install_from, Node.Node), \
+		'add features="py" for target "%s" in "%s/wscript" (%s).' % (self.target, self.path.nice_path(), type(self.install_from))
+
+	# where to install the python file
+	if self.install_path:
+		if self.install_from:
+			self.add_install_files(install_to=self.install_path, install_from=node, cwd=self.install_from, relative_trick=relative_trick)
+		else:
+			self.add_install_files(install_to=self.install_path, install_from=node, relative_trick=relative_trick)
+
+	lst = []
+	if self.env.PYC:
+		lst.append('pyc')
+	if self.env.PYO:
+		lst.append('pyo')
+
+	if self.install_path:
+		if self.install_from:
+			target_dir = node.path_from(self.install_from) if relative_trick else node.name
+			pyd = Utils.subst_vars("%s/%s" % (self.install_path, target_dir), self.env)
+		else:
+			target_dir = node.path_from(self.path) if relative_trick else node.name
+			pyd = Utils.subst_vars("%s/%s" % (self.install_path, target_dir), self.env)
+	else:
+		pyd = node.abspath()
+
+	for ext in lst:
+		if self.env.PYTAG and not self.env.NOPYCACHE:
+			# __pycache__ installation for python 3.2 - PEP 3147
+			name = node.name[:-3]
+			pyobj = node.parent.get_bld().make_node('__pycache__').make_node("%s.%s.%s" % (name, self.env.PYTAG, ext))
+			pyobj.parent.mkdir()
+		else:
+			pyobj = node.change_ext(".%s" % ext)
+
+		tsk = self.create_task(ext, node, pyobj)
+		tsk.pyd = pyd
+
+		if self.install_path:
+			self.add_install_files(install_to=os.path.dirname(pyd), install_from=pyobj, cwd=node.parent.get_bld(), relative_trick=relative_trick)
+
+class pyc(Task.Task):
+	"""
+	Byte-compiling python files
+	"""
+	color = 'PINK'
+	def __str__(self):
+		node = self.outputs[0]
+		return node.path_from(node.ctx.launch_node())
+	def run(self):
+		cmd = [Utils.subst_vars('${PYTHON}', self.env), '-c', INST, self.inputs[0].abspath(), self.outputs[0].abspath(), self.pyd]
+		ret = self.generator.bld.exec_command(cmd)
+		return ret
+
+class pyo(Task.Task):
+	"""
+	Byte-compiling python files
+	"""
+	color = 'PINK'
+	def __str__(self):
+		node = self.outputs[0]
+		return node.path_from(node.ctx.launch_node())
+	def run(self):
+		cmd = [Utils.subst_vars('${PYTHON}', self.env), Utils.subst_vars('${PYFLAGS_OPT}', self.env), '-c', INST, self.inputs[0].abspath(), self.outputs[0].abspath(), self.pyd]
+		ret = self.generator.bld.exec_command(cmd)
+		return ret
+
+@feature('pyext')
+@before_method('propagate_uselib_vars', 'apply_link')
+@after_method('apply_bundle')
+def init_pyext(self):
+	"""
+	Change the values of *cshlib_PATTERN* and *cxxshlib_PATTERN* to remove the
+	*lib* prefix from library names.
+	"""
+	self.uselib = self.to_list(getattr(self, 'uselib', []))
+	if not 'PYEXT' in self.uselib:
+		self.uselib.append('PYEXT')
+	# override shlib_PATTERN set by the osx module
+	self.env.cshlib_PATTERN = self.env.cxxshlib_PATTERN = self.env.macbundle_PATTERN = self.env.pyext_PATTERN
+	self.env.fcshlib_PATTERN = self.env.dshlib_PATTERN = self.env.pyext_PATTERN
+
+	try:
+		if not self.install_path:
+			return
+	except AttributeError:
+		self.install_path = '${PYTHONARCHDIR}'
+
+@feature('pyext')
+@before_method('apply_link', 'apply_bundle')
+def set_bundle(self):
+	"""Mac-specific pyext extension that enables bundles from c_osx.py"""
+	if Utils.unversioned_sys_platform() == 'darwin':
+		self.mac_bundle = True
+
+@before_method('propagate_uselib_vars')
+@feature('pyembed')
+def init_pyembed(self):
+	"""
+	Add the PYEMBED variable.
+	"""
+	self.uselib = self.to_list(getattr(self, 'uselib', []))
+	if not 'PYEMBED' in self.uselib:
+		self.uselib.append('PYEMBED')
+
+@conf
+def get_python_variables(self, variables, imports=None):
+	"""
+	Spawn a new python process to dump configuration variables
+
+	:param variables: variables to print
+	:type variables: list of string
+	:param imports: one import by element
+	:type imports: list of string
+	:return: the variable values
+	:rtype: list of string
+	"""
+	if not imports:
+		try:
+			imports = self.python_imports
+		except AttributeError:
+			imports = DISTUTILS_IMP
+
+	program = list(imports) # copy
+	program.append('')
+	for v in variables:
+		program.append("print(repr(%s))" % v)
+	os_env = dict(os.environ)
+	try:
+		del os_env['MACOSX_DEPLOYMENT_TARGET'] # see comments in the OSX tool
+	except KeyError:
+		pass
+
+	try:
+		out = self.cmd_and_log(self.env.PYTHON + ['-c', '\n'.join(program)], env=os_env)
+	except Errors.WafError:
+		self.fatal('Could not run %r' % self.env.PYTHON)
+	self.to_log(out)
+	return_values = []
+	for s in out.splitlines():
+		s = s.strip()
+		if not s:
+			continue
+		if s == 'None':
+			return_values.append(None)
+		elif (s[0] == "'" and s[-1] == "'") or (s[0] == '"' and s[-1] == '"'):
+			return_values.append(eval(s))
+		elif s[0].isdigit():
+			return_values.append(int(s))
+		else: break
+	return return_values
+
+@conf
+def test_pyembed(self, mode, msg='Testing pyembed configuration'):
+	self.check(header_name='Python.h', define_name='HAVE_PYEMBED', msg=msg,
+		fragment=FRAG, errmsg='Could not build a python embedded interpreter',
+		features='%s %sprogram pyembed' % (mode, mode))
+
+@conf
+def test_pyext(self, mode, msg='Testing pyext configuration'):
+	self.check(header_name='Python.h', define_name='HAVE_PYEXT', msg=msg,
+		fragment=FRAG, errmsg='Could not build python extensions',
+		features='%s %sshlib pyext' % (mode, mode))
+
+@conf
+def python_cross_compile(self, features='pyembed pyext'):
+	"""
+	For cross-compilation purposes, it is possible to bypass the normal detection and set the flags that you want:
+	PYTHON_VERSION='3.4' PYTAG='cpython34' pyext_PATTERN="%s.so" PYTHON_LDFLAGS='-lpthread -ldl' waf configure
+
+	The following variables are used:
+	PYTHON_VERSION    required
+	PYTAG             required
+	PYTHON_LDFLAGS    required
+	pyext_PATTERN     required
+	PYTHON_PYEXT_LDFLAGS
+	PYTHON_PYEMBED_LDFLAGS
+	"""
+	features = Utils.to_list(features)
+	if not ('PYTHON_LDFLAGS' in self.environ or 'PYTHON_PYEXT_LDFLAGS' in self.environ or 'PYTHON_PYEMBED_LDFLAGS' in self.environ):
+		return False
+
+	for x in 'PYTHON_VERSION PYTAG pyext_PATTERN'.split():
+		if not x in self.environ:
+			self.fatal('Please set %s in the os environment' % x)
+		else:
+			self.env[x] = self.environ[x]
+
+	xx = self.env.CXX_NAME and 'cxx' or 'c'
+	if 'pyext' in features:
+		flags = self.environ.get('PYTHON_PYEXT_LDFLAGS', self.environ.get('PYTHON_LDFLAGS'))
+		if flags is None:
+			self.fatal('No flags provided through PYTHON_PYEXT_LDFLAGS as required')
+		else:
+			self.parse_flags(flags, 'PYEXT')
+		self.test_pyext(xx)
+	if 'pyembed' in features:
+		flags = self.environ.get('PYTHON_PYEMBED_LDFLAGS', self.environ.get('PYTHON_LDFLAGS'))
+		if flags is None:
+			self.fatal('No flags provided through PYTHON_PYEMBED_LDFLAGS as required')
+		else:
+			self.parse_flags(flags, 'PYEMBED')
+		self.test_pyembed(xx)
+	return True
+
+@conf
+def check_python_headers(conf, features='pyembed pyext'):
+	"""
+	Check for headers and libraries necessary to extend or embed python.
+	It may use the module *distutils* or sysconfig in newer Python versions.
+	On success the environment variables xxx_PYEXT and xxx_PYEMBED are added:
+
+	* PYEXT: for compiling python extensions
+	* PYEMBED: for embedding a python interpreter
+	"""
+	features = Utils.to_list(features)
+	assert ('pyembed' in features) or ('pyext' in features), "check_python_headers features must include 'pyembed' and/or 'pyext'"
+	env = conf.env
+	if not env.CC_NAME and not env.CXX_NAME:
+		conf.fatal('load a compiler first (gcc, g++, ..)')
+
+	# bypass all the code below for cross-compilation
+	if conf.python_cross_compile(features):
+		return
+
+	if not env.PYTHON_VERSION:
+		conf.check_python_version()
+
+	pybin = env.PYTHON
+	if not pybin:
+		conf.fatal('Could not find the python executable')
+
+	# so we actually do all this for compatibility reasons and for obtaining pyext_PATTERN below
+	v = 'prefix SO EXT_SUFFIX LDFLAGS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET LDSHARED CFLAGS LDVERSION'.split()
+	try:
+		lst = conf.get_python_variables(["get_config_var('%s') or ''" % x for x in v])
+	except RuntimeError:
+		conf.fatal("Python development headers not found (-v for details).")
+
+	vals = ['%s = %r' % (x, y) for (x, y) in zip(v, lst)]
+	conf.to_log("Configuration returned from %r:\n%s\n" % (pybin, '\n'.join(vals)))
+
+	dct = dict(zip(v, lst))
+	x = 'MACOSX_DEPLOYMENT_TARGET'
+	if dct[x]:
+		env[x] = conf.environ[x] = str(dct[x])
+	env.pyext_PATTERN = '%s' + (dct['EXT_SUFFIX'] or dct['SO']) # SO is deprecated in 3.5 and removed in 3.11
+
+
+	# Try to get pythonX.Y-config
+	num = '.'.join(env.PYTHON_VERSION.split('.')[:2])
+	conf.find_program([''.join(pybin) + '-config', 'python%s-config' % num, 'python-config-%s' % num, 'python%sm-config' % num], var='PYTHON_CONFIG', msg="python-config", mandatory=False)
+
+	if env.PYTHON_CONFIG:
+		# check python-config output only once
+		if conf.env.HAVE_PYTHON_H:
+			return
+
+		# python2.6-config requires 3 runs
+		all_flags = [['--cflags', '--libs', '--ldflags']]
+		if sys.hexversion < 0x2070000:
+			all_flags = [[k] for k in all_flags[0]]
+
+		xx = env.CXX_NAME and 'cxx' or 'c'
+
+		if 'pyembed' in features:
+			for flags in all_flags:
+				# Python 3.8 has different flags for pyembed, needs --embed
+				embedflags = flags + ['--embed']
+				try:
+					conf.check_cfg(msg='Asking python-config for pyembed %r flags' % ' '.join(embedflags), path=env.PYTHON_CONFIG, package='', uselib_store='PYEMBED', args=embedflags)
+				except conf.errors.ConfigurationError:
+					# However Python < 3.8 doesn't accept --embed, so we need a fallback
+					conf.check_cfg(msg='Asking python-config for pyembed %r flags' % ' '.join(flags), path=env.PYTHON_CONFIG, package='', uselib_store='PYEMBED', args=flags)
+
+			try:
+				conf.test_pyembed(xx)
+			except conf.errors.ConfigurationError:
+				# python bug 7352
+				if dct['Py_ENABLE_SHARED'] and dct['LIBDIR']:
+					env.append_unique('LIBPATH_PYEMBED', [dct['LIBDIR']])
+					conf.test_pyembed(xx)
+				else:
+					raise
+
+		if 'pyext' in features:
+			for flags in all_flags:
+				conf.check_cfg(msg='Asking python-config for pyext %r flags' % ' '.join(flags), path=env.PYTHON_CONFIG, package='', uselib_store='PYEXT', args=flags)
+
+			try:
+				conf.test_pyext(xx)
+			except conf.errors.ConfigurationError:
+				# python bug 7352
+				if dct['Py_ENABLE_SHARED'] and dct['LIBDIR']:
+					env.append_unique('LIBPATH_PYEXT', [dct['LIBDIR']])
+					conf.test_pyext(xx)
+				else:
+					raise
+
+		conf.define('HAVE_PYTHON_H', 1)
+		return
+
+	# No python-config, do something else on windows systems
+	all_flags = dct['LDFLAGS'] + ' ' + dct['CFLAGS']
+	conf.parse_flags(all_flags, 'PYEMBED')
+
+	all_flags = dct['LDFLAGS'] + ' ' + dct['LDSHARED'] + ' ' + dct['CFLAGS']
+	conf.parse_flags(all_flags, 'PYEXT')
+
+	result = None
+	if not dct["LDVERSION"]:
+		dct["LDVERSION"] = env.PYTHON_VERSION
+
+	# further simplification will be complicated
+	for name in ('python' + dct['LDVERSION'], 'python' + env.PYTHON_VERSION + 'm', 'python' + env.PYTHON_VERSION.replace('.', '')):
+
+		# LIBPATH_PYEMBED is already set; see if it works.
+		if not result and env.LIBPATH_PYEMBED:
+			path = env.LIBPATH_PYEMBED
+			conf.to_log("\n\n# Trying default LIBPATH_PYEMBED: %r\n" % path)
+			result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in LIBPATH_PYEMBED' % name)
+
+		if not result and dct['LIBDIR']:
+			path = [dct['LIBDIR']]
+			conf.to_log("\n\n# try again with -L$python_LIBDIR: %r\n" % path)
+			result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in LIBDIR' % name)
+
+		if not result and dct['LIBPL']:
+			path = [dct['LIBPL']]
+			conf.to_log("\n\n# try again with -L$python_LIBPL (some systems don't install the python library in $prefix/lib)\n")
+			result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in python_LIBPL' % name)
+
+		if not result:
+			path = [os.path.join(dct['prefix'], "libs")]
+			conf.to_log("\n\n# try again with -L$prefix/libs, and pythonXY rather than pythonX.Y (win32)\n")
+			result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in $prefix/libs' % name)
+
+		if not result:
+			path = [os.path.normpath(os.path.join(dct['INCLUDEPY'], '..', 'libs'))]
+			conf.to_log("\n\n# try again with -L$INCLUDEPY/../libs, and pythonXY rather than pythonX.Y (win32)\n")
+			result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in $INCLUDEPY/../libs' % name)
+
+		if result:
+			break # do not forget to set LIBPATH_PYEMBED
+
+	if result:
+		env.LIBPATH_PYEMBED = path
+		env.append_value('LIB_PYEMBED', [name])
+	else:
+		conf.to_log("\n\n### LIB NOT FOUND\n")
+
+	# under certain conditions, python extensions must link to
+	# python libraries, not just python embedding programs.
+	if Utils.is_win32 or dct['Py_ENABLE_SHARED']:
+		env.LIBPATH_PYEXT = env.LIBPATH_PYEMBED
+		env.LIB_PYEXT = env.LIB_PYEMBED
+
+	conf.to_log("Found an include path for Python extensions: %r\n" % (dct['INCLUDEPY'],))
+	env.INCLUDES_PYEXT = [dct['INCLUDEPY']]
+	env.INCLUDES_PYEMBED = [dct['INCLUDEPY']]
+
+	# Code using the Python API needs to be compiled with -fno-strict-aliasing
+	if env.CC_NAME == 'gcc':
+		env.append_unique('CFLAGS_PYEMBED', ['-fno-strict-aliasing'])
+		env.append_unique('CFLAGS_PYEXT', ['-fno-strict-aliasing'])
+	if env.CXX_NAME == 'gcc':
+		env.append_unique('CXXFLAGS_PYEMBED', ['-fno-strict-aliasing'])
+		env.append_unique('CXXFLAGS_PYEXT', ['-fno-strict-aliasing'])
+
+	if env.CC_NAME == "msvc":
+		try:
+			from distutils.msvccompiler import MSVCCompiler
+		except ImportError:
+			# From https://github.com/python/cpython/blob/main/Lib/distutils/msvccompiler.py
+			env.append_value('CFLAGS_PYEXT', [ '/nologo', '/Ox', '/MD', '/W3', '/GX', '/DNDEBUG'])
+			env.append_value('CXXFLAGS_PYEXT', [ '/nologo', '/Ox', '/MD', '/W3', '/GX', '/DNDEBUG'])
+			env.append_value('LINKFLAGS_PYEXT', ['/DLL', '/nologo', '/INCREMENTAL:NO'])
+		else:
+			dist_compiler = MSVCCompiler()
+			dist_compiler.initialize()
+			env.append_value('CFLAGS_PYEXT', dist_compiler.compile_options)
+			env.append_value('CXXFLAGS_PYEXT', dist_compiler.compile_options)
+			env.append_value('LINKFLAGS_PYEXT', dist_compiler.ldflags_shared)
+
+	conf.check(header_name='Python.h', define_name='HAVE_PYTHON_H', uselib='PYEMBED', fragment=FRAG, errmsg='Could not build a Python embedded interpreter')
+
+@conf
+def check_python_version(conf, minver=None):
+	"""
+	Check if the python interpreter is found matching a given minimum version.
+	minver should be a tuple, eg. to check for python >= 2.4.2 pass (2,4,2) as minver.
+
+	If successful, PYTHON_VERSION is defined as 'MAJOR.MINOR' (eg. '2.4')
+	of the actual python version found, and PYTHONDIR and PYTHONARCHDIR
+	are defined, pointing to the site-packages directories appropriate for
+	this python version, where modules/packages/extensions should be
+	installed.
+
+	:param minver: minimum version
+	:type minver: tuple of int
+	"""
+	assert minver is None or isinstance(minver, tuple)
+	pybin = conf.env.PYTHON
+	if not pybin:
+		conf.fatal('could not find the python executable')
+
+	# Get python version string
+	cmd = pybin + ['-c', 'import sys\nfor x in sys.version_info: print(str(x))']
+	Logs.debug('python: Running python command %r', cmd)
+	lines = conf.cmd_and_log(cmd).split()
+	assert len(lines) == 5, "found %r lines, expected 5: %r" % (len(lines), lines)
+	pyver_tuple = (int(lines[0]), int(lines[1]), int(lines[2]), lines[3], int(lines[4]))
+
+	# Compare python version with the minimum required
+	result = (minver is None) or (pyver_tuple >= minver)
+
+	if result:
+		# define useful environment variables
+		pyver = '.'.join([str(x) for x in pyver_tuple[:2]])
+		conf.env.PYTHON_VERSION = pyver
+
+		if 'PYTHONDIR' in conf.env:
+			# Check if --pythondir was specified
+			pydir = conf.env.PYTHONDIR
+		elif 'PYTHONDIR' in conf.environ:
+			# Check environment for PYTHONDIR
+			pydir = conf.environ['PYTHONDIR']
+		else:
+			# Finally, try to guess
+			if Utils.is_win32:
+				(pydir,) = conf.get_python_variables(["get_python_lib(standard_lib=0) or ''"])
+			else:
+				(pydir,) = conf.get_python_variables(["get_python_lib(standard_lib=0, prefix=%r) or ''" % conf.env.PREFIX])
+
+		if 'PYTHONARCHDIR' in conf.env:
+			# Check if --pythonarchdir was specified
+			pyarchdir = conf.env.PYTHONARCHDIR
+		elif 'PYTHONARCHDIR' in conf.environ:
+			# Check environment for PYTHONDIR
+			pyarchdir = conf.environ['PYTHONARCHDIR']
+		else:
+			# Finally, try to guess
+			(pyarchdir, ) = conf.get_python_variables(["get_python_lib(plat_specific=1, standard_lib=0, prefix=%r) or ''" % conf.env.PREFIX])
+			if not pyarchdir:
+				pyarchdir = pydir
+
+		if hasattr(conf, 'define'): # conf.define is added by the C tool, so may not exist
+			conf.define('PYTHONDIR', pydir)
+			conf.define('PYTHONARCHDIR', pyarchdir)
+
+		conf.env.PYTHONDIR = pydir
+		conf.env.PYTHONARCHDIR = pyarchdir
+
+	# Feedback
+	pyver_full = '.'.join(map(str, pyver_tuple[:3]))
+	if minver is None:
+		conf.msg('Checking for python version', pyver_full)
+	else:
+		minver_str = '.'.join(map(str, minver))
+		conf.msg('Checking for python version >= %s' % (minver_str,), pyver_full, color=result and 'GREEN' or 'YELLOW')
+
+	if not result:
+		conf.fatal('The python version is too old, expecting %r' % (minver,))
+
+PYTHON_MODULE_TEMPLATE = '''
+import %s as current_module
+version = getattr(current_module, '__version__', None)
+if version is not None:
+	print(str(version))
+else:
+	print('unknown version')
+'''
+
+@conf
+def check_python_module(conf, module_name, condition=''):
+	"""
+	Check if the selected python interpreter can import the given python module::
+
+		def configure(conf):
+			conf.check_python_module('pygccxml')
+			conf.check_python_module('re', condition="ver > num(2, 0, 4) and ver <= num(3, 0, 0)")
+
+	:param module_name: module
+	:type module_name: string
+	"""
+	msg = "Checking for python module %r" % module_name
+	if condition:
+		msg = '%s (%s)' % (msg, condition)
+	conf.start_msg(msg)
+	try:
+		ret = conf.cmd_and_log(conf.env.PYTHON + ['-c', PYTHON_MODULE_TEMPLATE % module_name])
+	except Errors.WafError:
+		conf.end_msg(False)
+		conf.fatal('Could not find the python module %r' % module_name)
+
+	ret = ret.strip()
+	if condition:
+		conf.end_msg(ret)
+		if ret == 'unknown version':
+			conf.fatal('Could not check the %s version' % module_name)
+
+		def num(*k):
+			if isinstance(k[0], int):
+				return Utils.loose_version('.'.join([str(x) for x in k]))
+			else:
+				return Utils.loose_version(k[0])
+		d = {'num': num, 'ver': Utils.loose_version(ret)}
+		ev = eval(condition, {}, d)
+		if not ev:
+			conf.fatal('The %s version does not satisfy the requirements' % module_name)
+	else:
+		if ret == 'unknown version':
+			conf.end_msg(True)
+		else:
+			conf.end_msg(ret)
+
+def configure(conf):
+	"""
+	Detect the python interpreter
+	"""
+	v = conf.env
+	if getattr(Options.options, 'pythondir', None):
+		v.PYTHONDIR = Options.options.pythondir
+	if getattr(Options.options, 'pythonarchdir', None):
+		v.PYTHONARCHDIR = Options.options.pythonarchdir
+	if getattr(Options.options, 'nopycache', None):
+		v.NOPYCACHE=Options.options.nopycache
+
+	if not v.PYTHON:
+		v.PYTHON = [getattr(Options.options, 'python', None) or sys.executable]
+	v.PYTHON = Utils.to_list(v.PYTHON)
+	conf.find_program('python', var='PYTHON')
+
+	v.PYFLAGS = ''
+	v.PYFLAGS_OPT = '-O'
+
+	v.PYC = getattr(Options.options, 'pyc', 1)
+	v.PYO = getattr(Options.options, 'pyo', 1)
+
+	try:
+		v.PYTAG = conf.cmd_and_log(conf.env.PYTHON + ['-c', "import sys\ntry:\n print(sys.implementation.cache_tag)\nexcept AttributeError:\n import imp\n print(imp.get_tag())\n"]).strip()
+	except Errors.WafError:
+		pass
+
+def options(opt):
+	"""
+	Add python-specific options
+	"""
+	pyopt=opt.add_option_group("Python Options")
+	pyopt.add_option('--nopyc', dest = 'pyc', action='store_false', default=1,
+					 help = 'Do not install bytecode compiled .pyc files (configuration) [Default:install]')
+	pyopt.add_option('--nopyo', dest='pyo', action='store_false', default=1,
+					 help='Do not install optimised compiled .pyo files (configuration) [Default:install]')
+	pyopt.add_option('--nopycache',dest='nopycache', action='store_true',
+					 help='Do not use __pycache__ directory to install objects [Default:auto]')
+	pyopt.add_option('--python', dest="python",
+					 help='python binary to be used [Default: %s]' % sys.executable)
+	pyopt.add_option('--pythondir', dest='pythondir',
+					 help='Installation path for python modules (py, platform-independent .py and .pyc files)')
+	pyopt.add_option('--pythonarchdir', dest='pythonarchdir',
+					 help='Installation path for python extension (pyext, platform-dependent .so or .dylib files)')
+
diff --git a/third_party/waf/waflib/Tools/qt5.py b/third_party/waf/waflib/Tools/qt5.py
new file mode 100644
index 0000000..0932e94
--- /dev/null
+++ b/third_party/waf/waflib/Tools/qt5.py
@@ -0,0 +1,890 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2018 (ita)
+# Rafaël Kooi, 2023 (RA-Kooi)
+
+"""
+This tool helps with finding Qt5 and Qt6 tools and libraries,
+and also provides syntactic sugar for using Qt5 and Qt6 tools.
+
+The following snippet illustrates the tool usage::
+
+	def options(opt):
+		opt.load('compiler_cxx qt5')
+
+	def configure(conf):
+		conf.load('compiler_cxx qt5')
+
+	def build(bld):
+		bld(
+			features = 'qt5 cxx cxxprogram',
+			uselib   = 'QT5CORE QT5GUI QT5OPENGL QT5SVG',
+			source   = 'main.cpp textures.qrc aboutDialog.ui',
+			target   = 'window',
+		)
+
+Alternatively the following snippet illustrates Qt6 tool usage::
+
+    def options(opt):
+        opt.load('compiler_cxx qt5')
+
+    def configure(conf):
+        conf.want_qt6 = True
+        conf.load('compiler_cxx qt5')
+
+    def build(bld):
+        bld(
+            features = 'qt6 cxx cxxprogram',
+            uselib   = 'QT6CORE QT6GUI QT6OPENGL QT6SVG',
+            source   = 'main.cpp textures.qrc aboutDialog.ui',
+            target   = 'window',
+        )
+
+Here, the UI description and resource files will be processed
+to generate code.
+
+Usage
+=====
+
+Load the "qt5" tool.
+
+You also need to edit your sources accordingly:
+
+- the normal way of doing things is to have your C++ files
+  include the .moc file.
+  This is regarded as the best practice (and provides much faster
+  compilations).
+  It also implies that the include paths have beenset properly.
+
+- to have the include paths added automatically, use the following::
+
+     from waflib.TaskGen import feature, before_method, after_method
+     @feature('cxx')
+     @after_method('process_source')
+     @before_method('apply_incpaths')
+     def add_includes_paths(self):
+        incs = set(self.to_list(getattr(self, 'includes', '')))
+        for x in self.compiled_tasks:
+            incs.add(x.inputs[0].parent.path_from(self.path))
+        self.includes = sorted(incs)
+
+Note: another tool provides Qt processing that does not require
+.moc includes, see 'playground/slow_qt/'.
+
+A few options (--qt{dir,bin,...}) and environment variables
+(QT5_{ROOT,DIR,MOC,UIC,XCOMPILE}) allow finer tuning of the tool,
+tool path selection, etc; please read the source for more info.
+For Qt6 replace the QT5_ prefix with QT6_.
+
+The detection uses pkg-config on Linux by default. The list of
+libraries to be requested to pkg-config is formulated by scanning
+in the QTLIBS directory (that can be passed via --qtlibs or by
+setting the environment variable QT5_LIBDIR or QT6_LIBDIR otherwise is
+derived by querying qmake for QT_INSTALL_LIBS directory) for
+shared/static libraries present.
+Alternatively the list of libraries to be requested via pkg-config
+can be set using the qt5_vars attribute, ie:
+
+      conf.qt5_vars = ['Qt5Core', 'Qt5Gui', 'Qt5Widgets', 'Qt5Test'];
+
+For Qt6 use the qt6_vars attribute.
+
+This can speed up configuration phase if needed libraries are
+known beforehand, can improve detection on systems with a
+sparse QT5/Qt6 libraries installation (ie. NIX) and can improve
+detection of some header-only Qt modules (ie. Qt5UiPlugin).
+
+To force static library detection use:
+QT5_XCOMPILE=1 QT5_FORCE_STATIC=1 waf configure
+
+To use Qt6 set the want_qt6 attribute, ie:
+
+    conf.want_qt6 = True;
+"""
+
+from __future__ import with_statement
+
+try:
+	from xml.sax import make_parser
+	from xml.sax.handler import ContentHandler
+except ImportError:
+	has_xml = False
+	ContentHandler = object
+else:
+	has_xml = True
+
+import os, sys, re
+from waflib.Tools import cxx
+from waflib import Build, Task, Utils, Options, Errors, Context
+from waflib.TaskGen import feature, after_method, extension, before_method
+from waflib.Configure import conf
+from waflib import Logs
+
+MOC_H = ['.h', '.hpp', '.hxx', '.hh']
+"""
+File extensions associated to .moc files
+"""
+
+EXT_RCC = ['.qrc']
+"""
+File extension for the resource (.qrc) files
+"""
+
+EXT_UI  = ['.ui']
+"""
+File extension for the user interface (.ui) files
+"""
+
+EXT_QT5 = ['.cpp', '.cc', '.cxx', '.C']
+"""
+File extensions of C++ files that may require a .moc processing
+"""
+
+class qxx(Task.classes['cxx']):
+	"""
+	Each C++ file can have zero or several .moc files to create.
+	They are known only when the files are scanned (preprocessor)
+	To avoid scanning the c++ files each time (parsing C/C++), the results
+	are retrieved from the task cache (bld.node_deps/bld.raw_deps).
+	The moc tasks are also created *dynamically* during the build.
+	"""
+
+	def __init__(self, *k, **kw):
+		Task.Task.__init__(self, *k, **kw)
+		self.moc_done = 0
+
+	def runnable_status(self):
+		"""
+		Compute the task signature to make sure the scanner was executed. Create the
+		moc tasks by using :py:meth:`waflib.Tools.qt5.qxx.add_moc_tasks` (if necessary),
+		then postpone the task execution (there is no need to recompute the task signature).
+		"""
+		if self.moc_done:
+			return Task.Task.runnable_status(self)
+		else:
+			for t in self.run_after:
+				if not t.hasrun:
+					return Task.ASK_LATER
+			self.add_moc_tasks()
+			return Task.Task.runnable_status(self)
+
+	def create_moc_task(self, h_node, m_node):
+		"""
+		If several libraries use the same classes, it is possible that moc will run several times (Issue 1318)
+		It is not possible to change the file names, but we can assume that the moc transformation will be identical,
+		and the moc tasks can be shared in a global cache.
+		"""
+		try:
+			moc_cache = self.generator.bld.moc_cache
+		except AttributeError:
+			moc_cache = self.generator.bld.moc_cache = {}
+
+		try:
+			return moc_cache[h_node]
+		except KeyError:
+			tsk = moc_cache[h_node] = Task.classes['moc'](env=self.env, generator=self.generator)
+			tsk.set_inputs(h_node)
+			tsk.set_outputs(m_node)
+			tsk.env.append_unique('MOC_FLAGS', '-i')
+
+			if self.generator:
+				self.generator.tasks.append(tsk)
+
+			# direct injection in the build phase (safe because called from the main thread)
+			gen = self.generator.bld.producer
+			gen.outstanding.append(tsk)
+			gen.total += 1
+
+			return tsk
+
+		else:
+			# remove the signature, it must be recomputed with the moc task
+			delattr(self, 'cache_sig')
+
+	def add_moc_tasks(self):
+		"""
+		Creates moc tasks by looking in the list of file dependencies ``bld.raw_deps[self.uid()]``
+		"""
+		node = self.inputs[0]
+		bld = self.generator.bld
+
+		# skip on uninstall due to generated files
+		if bld.is_install == Build.UNINSTALL:
+			return
+
+		try:
+			# compute the signature once to know if there is a moc file to create
+			self.signature()
+		except KeyError:
+			# the moc file may be referenced somewhere else
+			pass
+		else:
+			# remove the signature, it must be recomputed with the moc task
+			delattr(self, 'cache_sig')
+
+		include_nodes = [node.parent] + self.generator.includes_nodes
+
+		moctasks = []
+		mocfiles = set()
+		for d in bld.raw_deps.get(self.uid(), []):
+			if not d.endswith('.moc'):
+				continue
+
+			# process that base.moc only once
+			if d in mocfiles:
+				continue
+			mocfiles.add(d)
+
+			# find the source associated with the moc file
+			h_node = None
+			base2 = d[:-4]
+
+			# foo.moc from foo.cpp
+			prefix = node.name[:node.name.rfind('.')]
+			if base2 == prefix:
+				h_node = node
+			else:
+				# this deviates from the standard
+				# if bar.cpp includes foo.moc, then assume it is from foo.h
+				for x in include_nodes:
+					for e in MOC_H:
+						h_node = x.find_node(base2 + e)
+						if h_node:
+							break
+					else:
+						continue
+					break
+			if h_node:
+				m_node = h_node.change_ext('.moc')
+			else:
+				raise Errors.WafError('No source found for %r which is a moc file' % d)
+
+			# create the moc task
+			task = self.create_moc_task(h_node, m_node)
+			moctasks.append(task)
+
+		# simple scheduler dependency: run the moc task before others
+		self.run_after.update(set(moctasks))
+		self.moc_done = 1
+
+class trans_update(Task.Task):
+	"""Updates a .ts files from a list of C++ files"""
+	run_str = '${QT_LUPDATE} ${SRC} -ts ${TGT}'
+	color   = 'BLUE'
+
+class XMLHandler(ContentHandler):
+	"""
+	Parses ``.qrc`` files
+	"""
+	def __init__(self):
+		ContentHandler.__init__(self)
+		self.buf = []
+		self.files = []
+	def startElement(self, name, attrs):
+		if name == 'file':
+			self.buf = []
+	def endElement(self, name):
+		if name == 'file':
+			self.files.append(str(''.join(self.buf)))
+	def characters(self, cars):
+		self.buf.append(cars)
+
+@extension(*EXT_RCC)
+def create_rcc_task(self, node):
+	"Creates rcc and cxx tasks for ``.qrc`` files"
+	rcnode = node.change_ext('_rc.%d.cpp' % self.idx)
+	self.create_task('rcc', node, rcnode)
+	cpptask = self.create_task('cxx', rcnode, rcnode.change_ext('.o'))
+	try:
+		self.compiled_tasks.append(cpptask)
+	except AttributeError:
+		self.compiled_tasks = [cpptask]
+	return cpptask
+
+@extension(*EXT_UI)
+def create_uic_task(self, node):
+	"Create uic tasks for user interface ``.ui`` definition files"
+
+	"""
+	If UIC file is used in more than one bld, we would have a conflict in parallel execution
+	It is not possible to change the file names (like .self.idx. as for objects) as they have
+	to be referenced by the source file, but we can assume that the transformation will be identical
+	and the tasks can be shared in a global cache.
+	"""
+	try:
+		uic_cache = self.bld.uic_cache
+	except AttributeError:
+		uic_cache = self.bld.uic_cache = {}
+
+	if node not in uic_cache:
+		uictask = uic_cache[node] = self.create_task('ui5', node)
+		uictask.outputs = [node.parent.find_or_declare(self.env.ui_PATTERN % node.name[:-3])]
+
+@extension('.ts')
+def add_lang(self, node):
+	"""Adds all the .ts file into ``self.lang``"""
+	self.lang = self.to_list(getattr(self, 'lang', [])) + [node]
+
+@feature('qt5', 'qt6')
+@before_method('process_source')
+def process_mocs(self):
+	"""
+	Processes MOC files included in headers::
+
+		def build(bld):
+			bld.program(features='qt5', source='main.cpp', target='app', use='QT5CORE', moc='foo.h')
+
+	The build will run moc on foo.h to create moc_foo.n.cpp. The number in the file name
+	is provided to avoid name clashes when the same headers are used by several targets.
+	"""
+	lst = self.to_nodes(getattr(self, 'moc', []))
+	self.source = self.to_list(getattr(self, 'source', []))
+	for x in lst:
+		prefix = x.name[:x.name.rfind('.')] # foo.h -> foo
+		moc_target = 'moc_%s.%d.cpp' % (prefix, self.idx)
+		moc_node = x.parent.find_or_declare(moc_target)
+		self.source.append(moc_node)
+
+		self.create_task('moc', x, moc_node)
+
+@feature('qt5', 'qt6')
+@after_method('apply_link')
+def apply_qt5(self):
+	"""
+	Adds MOC_FLAGS which may be necessary for moc::
+
+		def build(bld):
+			bld.program(features='qt5', source='main.cpp', target='app', use='QT5CORE')
+
+	The additional parameters are:
+
+	:param lang: list of translation files (\\*.ts) to process
+	:type lang: list of :py:class:`waflib.Node.Node` or string without the .ts extension
+	:param update: whether to process the C++ files to update the \\*.ts files (use **waf --translate**)
+	:type update: bool
+	:param langname: if given, transform the \\*.ts files into a .qrc files to include in the binary file
+	:type langname: :py:class:`waflib.Node.Node` or string without the .qrc extension
+	"""
+	if getattr(self, 'lang', None):
+		qmtasks = []
+		for x in self.to_list(self.lang):
+			if isinstance(x, str):
+				x = self.path.find_resource(x + '.ts')
+			qmtasks.append(self.create_task('ts2qm', x, x.change_ext('.%d.qm' % self.idx)))
+
+		if getattr(self, 'update', None) and Options.options.trans_qt5:
+			cxxnodes = [a.inputs[0] for a in self.compiled_tasks] + [
+				a.inputs[0] for a in self.tasks if a.inputs and a.inputs[0].name.endswith('.ui')]
+			for x in qmtasks:
+				self.create_task('trans_update', cxxnodes, x.inputs)
+
+		if getattr(self, 'langname', None):
+			qmnodes = [x.outputs[0] for x in qmtasks]
+			rcnode = self.langname
+			if isinstance(rcnode, str):
+				rcnode = self.path.find_or_declare(rcnode + ('.%d.qrc' % self.idx))
+			t = self.create_task('qm2rcc', qmnodes, rcnode)
+			k = create_rcc_task(self, t.outputs[0])
+			self.link_task.inputs.append(k.outputs[0])
+
+	lst = []
+	for flag in self.to_list(self.env.CXXFLAGS):
+		if len(flag) < 2:
+			continue
+		f = flag[0:2]
+		if f in ('-D', '-I', '/D', '/I'):
+			if (f[0] == '/'):
+				lst.append('-' + flag[1:])
+			else:
+				lst.append(flag)
+	self.env.append_value('MOC_FLAGS', lst)
+
+@extension(*EXT_QT5)
+def cxx_hook(self, node):
+	"""
+	Re-maps C++ file extensions to the :py:class:`waflib.Tools.qt5.qxx` task.
+	"""
+	return self.create_compiled_task('qxx', node)
+
+class rcc(Task.Task):
+	"""
+	Processes ``.qrc`` files
+	"""
+	color   = 'BLUE'
+	run_str = '${QT_RCC} -name ${tsk.rcname()} ${SRC[0].abspath()} ${RCC_ST} -o ${TGT}'
+	ext_out = ['.h']
+
+	def rcname(self):
+		return os.path.splitext(self.inputs[0].name)[0]
+
+	def scan(self):
+		"""Parse the *.qrc* files"""
+		if not has_xml:
+			Logs.error('No xml.sax support was found, rcc dependencies will be incomplete!')
+			return ([], [])
+
+		parser = make_parser()
+		curHandler = XMLHandler()
+		parser.setContentHandler(curHandler)
+		with open(self.inputs[0].abspath(), 'r') as f:
+			parser.parse(f)
+
+		nodes = []
+		names = []
+		root = self.inputs[0].parent
+		for x in curHandler.files:
+			nd = root.find_resource(x)
+			if nd:
+				nodes.append(nd)
+			else:
+				names.append(x)
+		return (nodes, names)
+
+	def quote_flag(self, x):
+		"""
+		Override Task.quote_flag. QT parses the argument files
+		differently than cl.exe and link.exe
+
+		:param x: flag
+		:type x: string
+		:return: quoted flag
+		:rtype: string
+		"""
+		return x
+
+
+class moc(Task.Task):
+	"""
+	Creates ``.moc`` files
+	"""
+	color   = 'BLUE'
+	run_str = '${QT_MOC} ${MOC_FLAGS} ${MOCCPPPATH_ST:INCPATHS} ${MOCDEFINES_ST:DEFINES} ${SRC} ${MOC_ST} ${TGT}'
+
+	def quote_flag(self, x):
+		"""
+		Override Task.quote_flag. QT parses the argument files
+		differently than cl.exe and link.exe
+
+		:param x: flag
+		:type x: string
+		:return: quoted flag
+		:rtype: string
+		"""
+		return x
+
+
+class ui5(Task.Task):
+	"""
+	Processes ``.ui`` files
+	"""
+	color   = 'BLUE'
+	run_str = '${QT_UIC} ${SRC} -o ${TGT}'
+	ext_out = ['.h']
+
+class ts2qm(Task.Task):
+	"""
+	Generates ``.qm`` files from ``.ts`` files
+	"""
+	color   = 'BLUE'
+	run_str = '${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}'
+
+class qm2rcc(Task.Task):
+	"""
+	Generates ``.qrc`` files from ``.qm`` files
+	"""
+	color = 'BLUE'
+	after = 'ts2qm'
+	def run(self):
+		"""Create a qrc file including the inputs"""
+		txt = '\n'.join(['<file>%s</file>' % k.path_from(self.outputs[0].parent) for k in self.inputs])
+		code = '<!DOCTYPE RCC><RCC version="1.0">\n<qresource>\n%s\n</qresource>\n</RCC>' % txt
+		self.outputs[0].write(code)
+
+def configure(self):
+	"""
+	Besides the configuration options, the environment variable QT5_ROOT may be used
+	to give the location of the qt5 libraries (absolute path).
+
+	The detection uses the program ``pkg-config`` through :py:func:`waflib.Tools.config_c.check_cfg`
+	"""
+	if 'COMPILER_CXX' not in self.env:
+		self.fatal('No CXX compiler defined: did you forget to configure compiler_cxx first?')
+
+	self.want_qt6 = getattr(self, 'want_qt6', False)
+
+	if self.want_qt6:
+		self.qt_vars = Utils.to_list(getattr(self, 'qt6_vars', []))
+	else:
+		self.qt_vars = Utils.to_list(getattr(self, 'qt5_vars', []))
+
+	self.find_qt5_binaries()
+	self.set_qt5_libs_dir()
+	self.set_qt5_libs_to_check()
+	self.set_qt5_defines()
+	self.find_qt5_libraries()
+	self.add_qt5_rpath()
+	self.simplify_qt5_libs()
+
+	# warn about this during the configuration too
+	if not has_xml:
+		Logs.error('No xml.sax support was found, rcc dependencies will be incomplete!')
+
+	feature = 'qt6' if self.want_qt6 else 'qt5'
+	# Qt6 requires C++17 (https://www.qt.io/blog/qt-6.0-released)
+	stdflag = '-std=c++17' if self.want_qt6 else '-std=c++11'
+
+	# Qt5 may be compiled with '-reduce-relocations' which requires dependent programs to have -fPIE or -fPIC?
+	frag = '#include <QMap>\nint main(int argc, char **argv) {QMap<int,int> m;return m.keys().size();}\n'
+	uses = 'QT6CORE' if self.want_qt6 else 'QT5CORE'
+	for flag in [[], '-fPIE', '-fPIC', stdflag, [stdflag, '-fPIE'], [stdflag, '-fPIC']]:
+		msg = 'See if Qt files compile '
+		if flag:
+			msg += 'with %s' % flag
+		try:
+			self.check(features=feature + ' cxx', use=uses, uselib_store=feature, cxxflags=flag, fragment=frag, msg=msg)
+		except self.errors.ConfigurationError:
+			pass
+		else:
+			break
+	else:
+		self.fatal('Could not build a simple Qt application')
+
+	# FreeBSD does not add /usr/local/lib and the pkg-config files do not provide it either :-/
+	if Utils.unversioned_sys_platform() == 'freebsd':
+		frag = '#include <QMap>\nint main(int argc, char **argv) {QMap<int,int> m;return m.keys().size();}\n'
+		try:
+			self.check(features=feature + ' cxx cxxprogram', use=uses, fragment=frag, msg='Can we link Qt programs on FreeBSD directly?')
+		except self.errors.ConfigurationError:
+			self.check(features=feature + ' cxx cxxprogram', use=uses, uselib_store=feature, libpath='/usr/local/lib', fragment=frag, msg='Is /usr/local/lib required?')
+
+@conf
+def find_qt5_binaries(self):
+	"""
+	Detects Qt programs such as qmake, moc, uic, lrelease
+	"""
+	env = self.env
+	opt = Options.options
+
+	qtdir = getattr(opt, 'qtdir', '')
+	qtbin = getattr(opt, 'qtbin', '')
+	qt_ver = '6' if self.want_qt6 else '5'
+
+	paths = []
+
+	if qtdir:
+		qtbin = os.path.join(qtdir, 'bin')
+
+	# the qt directory has been given from QT5_ROOT - deduce the qt binary path
+	if not qtdir:
+		qtdir = self.environ.get('QT' + qt_ver + '_ROOT', '')
+		qtbin = self.environ.get('QT' + qt_ver + '_BIN') or os.path.join(qtdir, 'bin')
+
+	if qtbin:
+		paths = [qtbin]
+
+	# no qtdir, look in the path and in /usr/local/Trolltech
+	if not qtdir:
+		paths = self.environ.get('PATH', '').split(os.pathsep)
+		paths.extend([
+			'/usr/share/qt' + qt_ver + '/bin',
+			'/usr/local/lib/qt' + qt_ver + '/bin'])
+
+		try:
+			lst = Utils.listdir('/usr/local/Trolltech/')
+		except OSError:
+			pass
+		else:
+			if lst:
+				lst.sort()
+				lst.reverse()
+
+				# keep the highest version
+				qtdir = '/usr/local/Trolltech/%s/' % lst[0]
+				qtbin = os.path.join(qtdir, 'bin')
+				paths.append(qtbin)
+
+	# at the end, try to find qmake in the paths given
+	# keep the one with the highest version
+	cand = None
+	prev_ver = ['0', '0', '0']
+	qmake_vars = ['qmake-qt' + qt_ver, 'qmake' + qt_ver, 'qmake']
+
+	for qmk in qmake_vars:
+		try:
+			qmake = self.find_program(qmk, path_list=paths)
+		except self.errors.ConfigurationError:
+			pass
+		else:
+			try:
+				version = self.cmd_and_log(qmake + ['-query', 'QT_VERSION']).strip()
+			except self.errors.WafError:
+				pass
+			else:
+				if version:
+					new_ver = version.split('.')
+					if new_ver[0] == qt_ver and new_ver > prev_ver:
+						cand = qmake
+						prev_ver = new_ver
+
+	# qmake could not be found easily, rely on qtchooser
+	if not cand:
+		try:
+			self.find_program('qtchooser')
+		except self.errors.ConfigurationError:
+			pass
+		else:
+			cmd = self.env.QTCHOOSER + ['-qt=' + qt_ver, '-run-tool=qmake']
+			try:
+				version = self.cmd_and_log(cmd + ['-query', 'QT_VERSION'])
+			except self.errors.WafError:
+				pass
+			else:
+				cand = cmd
+
+	if cand:
+		self.env.QMAKE = cand
+	else:
+		self.fatal('Could not find qmake for qt' + qt_ver)
+
+	# Once we have qmake, we want to query qmake for the paths where we want to look for tools instead
+	paths = []
+
+	self.env.QT_HOST_BINS = qtbin = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_HOST_BINS']).strip()
+	paths.append(qtbin)
+
+	if self.want_qt6:
+		self.env.QT_HOST_LIBEXECS = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_HOST_LIBEXECS']).strip()
+		paths.append(self.env.QT_HOST_LIBEXECS)
+
+	def find_bin(lst, var):
+		if var in env:
+			return
+		for f in lst:
+			try:
+				ret = self.find_program(f, path_list=paths)
+			except self.errors.ConfigurationError:
+				pass
+			else:
+				env[var]=ret
+				break
+
+	find_bin(['uic-qt' + qt_ver, 'uic'], 'QT_UIC')
+	if not env.QT_UIC:
+		self.fatal('cannot find the uic compiler for qt' + qt_ver)
+
+	self.start_msg('Checking for uic version')
+	uicver = self.cmd_and_log(env.QT_UIC + ['-version'], output=Context.BOTH)
+	uicver = ''.join(uicver).strip()
+	uicver = uicver.replace('Qt User Interface Compiler ','').replace('User Interface Compiler for Qt', '')
+	self.end_msg(uicver)
+	if uicver.find(' 3.') != -1 or uicver.find(' 4.') != -1 or (self.want_qt6 and uicver.find(' 5.') != -1):
+		if self.want_qt6:
+			self.fatal('this uic compiler is for qt3 or qt4 or qt5, add uic for qt6 to your path')
+		else:
+			self.fatal('this uic compiler is for qt3 or qt4, add uic for qt5 to your path')
+
+	find_bin(['moc-qt' + qt_ver, 'moc'], 'QT_MOC')
+	find_bin(['rcc-qt' + qt_ver, 'rcc'], 'QT_RCC')
+	find_bin(['lrelease-qt' + qt_ver, 'lrelease'], 'QT_LRELEASE')
+	find_bin(['lupdate-qt' + qt_ver, 'lupdate'], 'QT_LUPDATE')
+
+	env.UIC_ST = '%s -o %s'
+	env.MOC_ST = '-o'
+	env.ui_PATTERN = 'ui_%s.h'
+	env.QT_LRELEASE_FLAGS = ['-silent']
+	env.MOCCPPPATH_ST = '-I%s'
+	env.MOCDEFINES_ST = '-D%s'
+
+@conf
+def set_qt5_libs_dir(self):
+	env = self.env
+	qt_ver = '6' if self.want_qt6 else '5'
+
+	qtlibs = getattr(Options.options, 'qtlibs', None) or self.environ.get('QT' + qt_ver + '_LIBDIR')
+
+	if not qtlibs:
+		try:
+			qtlibs = self.cmd_and_log(env.QMAKE + ['-query', 'QT_INSTALL_LIBS']).strip()
+		except Errors.WafError:
+			qtdir = self.cmd_and_log(env.QMAKE + ['-query', 'QT_INSTALL_PREFIX']).strip()
+			qtlibs = os.path.join(qtdir, 'lib')
+
+	self.msg('Found the Qt' + qt_ver + ' library path', qtlibs)
+
+	env.QTLIBS = qtlibs
+
+@conf
+def find_single_qt5_lib(self, name, uselib, qtlibs, qtincludes, force_static):
+	env = self.env
+	qt_ver = '6' if self.want_qt6 else '5'
+
+	if force_static:
+		exts = ('.a', '.lib')
+		prefix = 'STLIB'
+	else:
+		exts = ('.so', '.lib')
+		prefix = 'LIB'
+
+	def lib_names():
+		for x in exts:
+			for k in ('', qt_ver) if Utils.is_win32 else ['']:
+				for p in ('lib', ''):
+					yield (p, name, k, x)
+
+	for tup in lib_names():
+		k = ''.join(tup)
+		path = os.path.join(qtlibs, k)
+		if os.path.exists(path):
+			if env.DEST_OS == 'win32':
+				libval = ''.join(tup[:-1])
+			else:
+				libval = name
+			env.append_unique(prefix + '_' + uselib, libval)
+			env.append_unique('%sPATH_%s' % (prefix, uselib), qtlibs)
+			env.append_unique('INCLUDES_' + uselib, qtincludes)
+			env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, name.replace('Qt' + qt_ver, 'Qt')))
+			return k
+	return False
+
+@conf
+def find_qt5_libraries(self):
+	env = self.env
+	qt_ver = '6' if self.want_qt6 else '5'
+
+	qtincludes =  self.environ.get('QT' + qt_ver + '_INCLUDES') or self.cmd_and_log(env.QMAKE + ['-query', 'QT_INSTALL_HEADERS']).strip()
+	force_static = self.environ.get('QT' + qt_ver + '_FORCE_STATIC')
+
+	try:
+		if self.environ.get('QT' + qt_ver + '_XCOMPILE'):
+			self.fatal('QT' + qt_ver + '_XCOMPILE Disables pkg-config detection')
+		self.check_cfg(atleast_pkgconfig_version='0.1')
+	except self.errors.ConfigurationError:
+		for i in self.qt_vars:
+			uselib = i.upper()
+			if Utils.unversioned_sys_platform() == 'darwin':
+				# Since at least qt 4.7.3 each library locates in separate directory
+				fwk = i.replace('Qt' + qt_ver, 'Qt')
+				frameworkName = fwk + '.framework'
+
+				qtDynamicLib = os.path.join(env.QTLIBS, frameworkName, fwk)
+				if os.path.exists(qtDynamicLib):
+					env.append_unique('FRAMEWORK_' + uselib, fwk)
+					env.append_unique('FRAMEWORKPATH_' + uselib, env.QTLIBS)
+					self.msg('Checking for %s' % i, qtDynamicLib, 'GREEN')
+				else:
+					self.msg('Checking for %s' % i, False, 'YELLOW')
+				env.append_unique('INCLUDES_' + uselib, os.path.join(env.QTLIBS, frameworkName, 'Headers'))
+			else:
+				ret = self.find_single_qt5_lib(i, uselib, env.QTLIBS, qtincludes, force_static)
+				if not force_static and not ret:
+					ret = self.find_single_qt5_lib(i, uselib, env.QTLIBS, qtincludes, True)
+				self.msg('Checking for %s' % i, ret, 'GREEN' if ret else 'YELLOW')
+	else:
+		path = '%s:%s:%s/pkgconfig:/usr/lib/qt%s/lib/pkgconfig:/opt/qt%s/lib/pkgconfig:/usr/lib/qt%s/lib:/opt/qt%s/lib' % (
+			self.environ.get('PKG_CONFIG_PATH', ''), env.QTLIBS, env.QTLIBS, qt_ver, qt_ver, qt_ver, qt_ver)
+		for i in self.qt_vars:
+			self.check_cfg(package=i, args='--cflags --libs', mandatory=False, force_static=force_static, pkg_config_path=path)
+
+@conf
+def simplify_qt5_libs(self):
+	"""
+	Since library paths make really long command-lines,
+	and since everything depends on qtcore, remove the qtcore ones from qtgui, etc
+	"""
+	env = self.env
+	def process_lib(vars_, coreval):
+		for d in vars_:
+			var = d.upper()
+			if var == 'QTCORE':
+				continue
+
+			value = env['LIBPATH_'+var]
+			if value:
+				core = env[coreval]
+				accu = []
+				for lib in value:
+					if lib in core:
+						continue
+					accu.append(lib)
+				env['LIBPATH_'+var] = accu
+	process_lib(self.qt_vars, 'LIBPATH_QTCORE')
+
+@conf
+def add_qt5_rpath(self):
+	"""
+	Defines rpath entries for Qt libraries
+	"""
+	env = self.env
+	if getattr(Options.options, 'want_rpath', False):
+		def process_rpath(vars_, coreval):
+			for d in vars_:
+				var = d.upper()
+				value = env['LIBPATH_' + var]
+				if value:
+					core = env[coreval]
+					accu = []
+					for lib in value:
+						if var != 'QTCORE':
+							if lib in core:
+								continue
+						accu.append('-Wl,--rpath='+lib)
+					env['RPATH_' + var] = accu
+		process_rpath(self.qt_vars, 'LIBPATH_QTCORE')
+
+@conf
+def set_qt5_libs_to_check(self):
+	qt_ver = '6' if self.want_qt6 else '5'
+
+	if not self.qt_vars:
+		dirlst = Utils.listdir(self.env.QTLIBS)
+
+		pat = self.env.cxxshlib_PATTERN
+		if Utils.is_win32:
+			pat = pat.replace('.dll', '.lib')
+		if self.environ.get('QT' + qt_ver + '_FORCE_STATIC'):
+			pat = self.env.cxxstlib_PATTERN
+		if Utils.unversioned_sys_platform() == 'darwin':
+			pat = r"%s\.framework"
+
+		# We only want to match Qt5 or Qt in the case of Qt5, in the case
+		# of Qt6 we want to match Qt6 or Qt. This speeds up configuration
+		# and reduces the chattiness of the configuration. Should also prevent
+		# possible misconfiguration.
+		if self.want_qt6:
+			re_qt = re.compile(pat % 'Qt6?(?!\\d)(?P<name>\\w+)' + '$')
+		else:
+			re_qt = re.compile(pat % 'Qt5?(?!\\d)(?P<name>\\w+)' + '$')
+
+		for x in sorted(dirlst):
+			m = re_qt.match(x)
+			if m:
+				self.qt_vars.append("Qt%s%s" % (qt_ver, m.group('name')))
+		if not self.qt_vars:
+			self.fatal('cannot find any Qt%s library (%r)' % (qt_ver, self.env.QTLIBS))
+
+	qtextralibs = getattr(Options.options, 'qtextralibs', None)
+	if qtextralibs:
+		self.qt_vars.extend(qtextralibs.split(','))
+
+@conf
+def set_qt5_defines(self):
+	qt_ver = '6' if self.want_qt6 else '5'
+
+	if sys.platform != 'win32':
+		return
+
+	for x in self.qt_vars:
+		y=x.replace('Qt' + qt_ver, 'Qt')[2:].upper()
+		self.env.append_unique('DEFINES_%s' % x.upper(), 'QT_%s_LIB' % y)
+
+def options(opt):
+	"""
+	Command-line options
+	"""
+	opt.add_option('--want-rpath', action='store_true', default=False, dest='want_rpath', help='enable the rpath for qt libraries')
+	for i in 'qtdir qtbin qtlibs'.split():
+		opt.add_option('--'+i, type='string', default='', dest=i)
+
+	opt.add_option('--translate', action='store_true', help='collect translation strings', dest='trans_qt5', default=False)
+	opt.add_option('--qtextralibs', type='string', default='', dest='qtextralibs', help='additional qt libraries on the system to add to default ones, comma separated')
+
diff --git a/third_party/waf/waflib/Tools/ruby.py b/third_party/waf/waflib/Tools/ruby.py
new file mode 100644
index 0000000..8d92a79
--- /dev/null
+++ b/third_party/waf/waflib/Tools/ruby.py
@@ -0,0 +1,186 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# daniel.svensson at purplescout.se 2008
+# Thomas Nagy 2016-2018 (ita)
+
+"""
+Support for Ruby extensions. A C/C++ compiler is required::
+
+	def options(opt):
+		opt.load('compiler_c ruby')
+	def configure(conf):
+		conf.load('compiler_c ruby')
+		conf.check_ruby_version((1,8,0))
+		conf.check_ruby_ext_devel()
+		conf.check_ruby_module('libxml')
+	def build(bld):
+		bld(
+			features = 'c cshlib rubyext',
+			source = 'rb_mytest.c',
+			target = 'mytest_ext',
+			install_path = '${ARCHDIR_RUBY}')
+		bld.install_files('${LIBDIR_RUBY}', 'Mytest.rb')
+"""
+
+import os
+from waflib import Errors, Options, Task, Utils
+from waflib.TaskGen import before_method, feature, extension
+from waflib.Configure import conf
+
+@feature('rubyext')
+@before_method('apply_incpaths', 'process_source', 'apply_bundle', 'apply_link')
+def init_rubyext(self):
+	"""
+	Add required variables for ruby extensions
+	"""
+	self.install_path = '${ARCHDIR_RUBY}'
+	self.uselib = self.to_list(getattr(self, 'uselib', ''))
+	if not 'RUBY' in self.uselib:
+		self.uselib.append('RUBY')
+	if not 'RUBYEXT' in self.uselib:
+		self.uselib.append('RUBYEXT')
+
+@feature('rubyext')
+@before_method('apply_link', 'propagate_uselib_vars')
+def apply_ruby_so_name(self):
+	"""
+	Strip the *lib* prefix from ruby extensions
+	"""
+	self.env.cshlib_PATTERN = self.env.cxxshlib_PATTERN = self.env.rubyext_PATTERN
+
+@conf
+def check_ruby_version(self, minver=()):
+	"""
+	Checks if ruby is installed.
+	If installed the variable RUBY will be set in environment.
+	The ruby binary can be overridden by ``--with-ruby-binary`` command-line option.
+	"""
+
+	ruby = self.find_program('ruby', var='RUBY', value=Options.options.rubybinary)
+
+	try:
+		version = self.cmd_and_log(ruby + ['-e', 'puts defined?(VERSION) ? VERSION : RUBY_VERSION']).strip()
+	except Errors.WafError:
+		self.fatal('could not determine ruby version')
+	self.env.RUBY_VERSION = version
+
+	try:
+		ver = tuple(map(int, version.split('.')))
+	except Errors.WafError:
+		self.fatal('unsupported ruby version %r' % version)
+
+	cver = ''
+	if minver:
+		cver = '> ' + '.'.join(str(x) for x in minver)
+		if ver < minver:
+			self.fatal('ruby is too old %r' % ver)
+
+	self.msg('Checking for ruby version %s' % cver, version)
+
+@conf
+def check_ruby_ext_devel(self):
+	"""
+	Check if a ruby extension can be created
+	"""
+	if not self.env.RUBY:
+		self.fatal('ruby detection is required first')
+
+	if not self.env.CC_NAME and not self.env.CXX_NAME:
+		self.fatal('load a c/c++ compiler first')
+
+	version = tuple(map(int, self.env.RUBY_VERSION.split(".")))
+
+	def read_out(cmd):
+		return Utils.to_list(self.cmd_and_log(self.env.RUBY + ['-rrbconfig', '-e', cmd]))
+
+	def read_config(key):
+		return read_out('puts RbConfig::CONFIG[%r]' % key)
+
+	cpppath = archdir = read_config('archdir')
+
+	if version >= (1, 9, 0):
+		ruby_hdrdir = read_config('rubyhdrdir')
+		cpppath += ruby_hdrdir
+		if version >= (2, 0, 0):
+			cpppath += read_config('rubyarchhdrdir')
+		cpppath += [os.path.join(ruby_hdrdir[0], read_config('arch')[0])]
+
+	self.check(header_name='ruby.h', includes=cpppath, errmsg='could not find ruby header file', link_header_test=False)
+
+	self.env.LIBPATH_RUBYEXT = read_config('libdir')
+	self.env.LIBPATH_RUBYEXT += archdir
+	self.env.INCLUDES_RUBYEXT = cpppath
+	self.env.CFLAGS_RUBYEXT = read_config('CCDLFLAGS')
+	self.env.rubyext_PATTERN = '%s.' + read_config('DLEXT')[0]
+
+	# ok this is really stupid, but the command and flags are combined.
+	# so we try to find the first argument...
+	flags = read_config('LDSHARED')
+	while flags and flags[0][0] != '-':
+		flags = flags[1:]
+
+	# we also want to strip out the deprecated ppc flags
+	if len(flags) > 1 and flags[1] == "ppc":
+		flags = flags[2:]
+
+	self.env.LINKFLAGS_RUBYEXT = flags
+	self.env.LINKFLAGS_RUBYEXT += read_config('LIBS')
+	self.env.LINKFLAGS_RUBYEXT += read_config('LIBRUBYARG_SHARED')
+
+	if Options.options.rubyarchdir:
+		self.env.ARCHDIR_RUBY = Options.options.rubyarchdir
+	else:
+		self.env.ARCHDIR_RUBY = read_config('sitearchdir')[0]
+
+	if Options.options.rubylibdir:
+		self.env.LIBDIR_RUBY = Options.options.rubylibdir
+	else:
+		self.env.LIBDIR_RUBY = read_config('sitelibdir')[0]
+
+@conf
+def check_ruby_module(self, module_name):
+	"""
+	Check if the selected ruby interpreter can require the given ruby module::
+
+		def configure(conf):
+			conf.check_ruby_module('libxml')
+
+	:param module_name: module
+	:type  module_name: string
+	"""
+	self.start_msg('Ruby module %s' % module_name)
+	try:
+		self.cmd_and_log(self.env.RUBY + ['-e', 'require \'%s\';puts 1' % module_name])
+	except Errors.WafError:
+		self.end_msg(False)
+		self.fatal('Could not find the ruby module %r' % module_name)
+	self.end_msg(True)
+
+@extension('.rb')
+def process(self, node):
+	return self.create_task('run_ruby', node)
+
+class run_ruby(Task.Task):
+	"""
+	Task to run ruby files detected by file extension .rb::
+
+		def options(opt):
+			opt.load('ruby')
+
+		def configure(ctx):
+			ctx.check_ruby_version()
+
+		def build(bld):
+			bld.env.RBFLAGS = '-e puts "hello world"'
+			bld(source='a_ruby_file.rb')
+	"""
+	run_str = '${RUBY} ${RBFLAGS} -I ${SRC[0].parent.abspath()} ${SRC}'
+
+def options(opt):
+	"""
+	Add the ``--with-ruby-archdir``, ``--with-ruby-libdir`` and ``--with-ruby-binary`` options
+	"""
+	opt.add_option('--with-ruby-archdir', type='string', dest='rubyarchdir', help='Specify directory where to install arch specific files')
+	opt.add_option('--with-ruby-libdir', type='string', dest='rubylibdir', help='Specify alternate ruby library path')
+	opt.add_option('--with-ruby-binary', type='string', dest='rubybinary', help='Specify alternate ruby binary')
+
diff --git a/third_party/waf/waflib/Tools/suncc.py b/third_party/waf/waflib/Tools/suncc.py
new file mode 100644
index 0000000..33d34fc
--- /dev/null
+++ b/third_party/waf/waflib/Tools/suncc.py
@@ -0,0 +1,67 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2018 (ita)
+# Ralf Habacker, 2006 (rh)
+
+from waflib import Errors
+from waflib.Tools import ccroot, ar
+from waflib.Configure import conf
+
+@conf
+def find_scc(conf):
+	"""
+	Detects the Sun C compiler
+	"""
+	v = conf.env
+	cc = conf.find_program('cc', var='CC')
+	try:
+		conf.cmd_and_log(cc + ['-flags'])
+	except Errors.WafError:
+		conf.fatal('%r is not a Sun compiler' % cc)
+	v.CC_NAME = 'sun'
+	conf.get_suncc_version(cc)
+
+@conf
+def scc_common_flags(conf):
+	"""
+	Flags required for executing the sun C compiler
+	"""
+	v = conf.env
+
+	v.CC_SRC_F            = []
+	v.CC_TGT_F            = ['-c', '-o', '']
+
+	if not v.LINK_CC:
+		v.LINK_CC = v.CC
+
+	v.CCLNK_SRC_F         = ''
+	v.CCLNK_TGT_F         = ['-o', '']
+	v.CPPPATH_ST          = '-I%s'
+	v.DEFINES_ST          = '-D%s'
+
+	v.LIB_ST              = '-l%s' # template for adding libs
+	v.LIBPATH_ST          = '-L%s' # template for adding libpaths
+	v.STLIB_ST            = '-l%s'
+	v.STLIBPATH_ST        = '-L%s'
+
+	v.SONAME_ST           = '-Wl,-h,%s'
+	v.SHLIB_MARKER        = '-Bdynamic'
+	v.STLIB_MARKER        = '-Bstatic'
+
+	v.cprogram_PATTERN    = '%s'
+
+	v.CFLAGS_cshlib       = ['-xcode=pic32', '-DPIC']
+	v.LINKFLAGS_cshlib    = ['-G']
+	v.cshlib_PATTERN      = 'lib%s.so'
+
+	v.LINKFLAGS_cstlib    = ['-Bstatic']
+	v.cstlib_PATTERN      = 'lib%s.a'
+
+def configure(conf):
+	conf.find_scc()
+	conf.find_ar()
+	conf.scc_common_flags()
+	conf.cc_load_tools()
+	conf.cc_add_flags()
+	conf.link_add_flags()
+
diff --git a/third_party/waf/waflib/Tools/suncxx.py b/third_party/waf/waflib/Tools/suncxx.py
new file mode 100644
index 0000000..3b384f6
--- /dev/null
+++ b/third_party/waf/waflib/Tools/suncxx.py
@@ -0,0 +1,67 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2018 (ita)
+# Ralf Habacker, 2006 (rh)
+
+from waflib import Errors
+from waflib.Tools import ccroot, ar
+from waflib.Configure import conf
+
+@conf
+def find_sxx(conf):
+	"""
+	Detects the sun C++ compiler
+	"""
+	v = conf.env
+	cc = conf.find_program(['CC', 'c++'], var='CXX')
+	try:
+		conf.cmd_and_log(cc + ['-flags'])
+	except Errors.WafError:
+		conf.fatal('%r is not a Sun compiler' % cc)
+	v.CXX_NAME = 'sun'
+	conf.get_suncc_version(cc)
+
+@conf
+def sxx_common_flags(conf):
+	"""
+	Flags required for executing the sun C++ compiler
+	"""
+	v = conf.env
+
+	v.CXX_SRC_F           = []
+	v.CXX_TGT_F           = ['-c', '-o', '']
+
+	if not v.LINK_CXX:
+		v.LINK_CXX = v.CXX
+
+	v.CXXLNK_SRC_F        = []
+	v.CXXLNK_TGT_F        = ['-o', '']
+	v.CPPPATH_ST          = '-I%s'
+	v.DEFINES_ST          = '-D%s'
+
+	v.LIB_ST              = '-l%s' # template for adding libs
+	v.LIBPATH_ST          = '-L%s' # template for adding libpaths
+	v.STLIB_ST            = '-l%s'
+	v.STLIBPATH_ST        = '-L%s'
+
+	v.SONAME_ST           = '-Wl,-h,%s'
+	v.SHLIB_MARKER        = '-Bdynamic'
+	v.STLIB_MARKER        = '-Bstatic'
+
+	v.cxxprogram_PATTERN  = '%s'
+
+	v.CXXFLAGS_cxxshlib   = ['-xcode=pic32', '-DPIC']
+	v.LINKFLAGS_cxxshlib  = ['-G']
+	v.cxxshlib_PATTERN    = 'lib%s.so'
+
+	v.LINKFLAGS_cxxstlib  = ['-Bstatic']
+	v.cxxstlib_PATTERN    = 'lib%s.a'
+
+def configure(conf):
+	conf.find_sxx()
+	conf.find_ar()
+	conf.sxx_common_flags()
+	conf.cxx_load_tools()
+	conf.cxx_add_flags()
+	conf.link_add_flags()
+
diff --git a/third_party/waf/waflib/Tools/tex.py b/third_party/waf/waflib/Tools/tex.py
new file mode 100644
index 0000000..b4792c3
--- /dev/null
+++ b/third_party/waf/waflib/Tools/tex.py
@@ -0,0 +1,544 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2018 (ita)
+
+"""
+TeX/LaTeX/PDFLaTeX/XeLaTeX support
+
+Example::
+
+	def configure(conf):
+		conf.load('tex')
+		if not conf.env.LATEX:
+			conf.fatal('The program LaTex is required')
+
+	def build(bld):
+		bld(
+			features = 'tex',
+			type     = 'latex', # pdflatex or xelatex
+			source   = 'document.ltx', # mandatory, the source
+			outs     = 'ps', # 'pdf' or 'ps pdf'
+			deps     = 'crossreferencing.lst', # to give dependencies directly
+			prompt   = 1, # 0 for the batch mode
+		)
+
+Notes:
+
+- To configure with a special program, use::
+
+     $ PDFLATEX=luatex waf configure
+
+- This tool does not use the target attribute of the task generator
+  (``bld(target=...)``); the target file name is built from the source
+  base name and the output type(s)
+"""
+
+import os, re
+from waflib import Utils, Task, Errors, Logs, Node
+from waflib.TaskGen import feature, before_method
+
+re_bibunit = re.compile(r'\\(?P<type>putbib)\[(?P<file>[^\[\]]*)\]',re.M)
+def bibunitscan(self):
+	"""
+	Parses TeX inputs and try to find the *bibunit* file dependencies
+
+	:return: list of bibunit files
+	:rtype: list of :py:class:`waflib.Node.Node`
+	"""
+	node = self.inputs[0]
+
+	nodes = []
+	if not node:
+		return nodes
+
+	code = node.read()
+	for match in re_bibunit.finditer(code):
+		path = match.group('file')
+		if path:
+			found = None
+			for k in ('', '.bib'):
+				# add another loop for the tex include paths?
+				Logs.debug('tex: trying %s%s', path, k)
+				fi = node.parent.find_resource(path + k)
+				if fi:
+					found = True
+					nodes.append(fi)
+					# no break
+			if not found:
+				Logs.debug('tex: could not find %s', path)
+
+	Logs.debug('tex: found the following bibunit files: %s', nodes)
+	return nodes
+
+exts_deps_tex = ['', '.ltx', '.tex', '.bib', '.pdf', '.png', '.eps', '.ps', '.sty']
+"""List of typical file extensions included in latex files"""
+
+exts_tex = ['.ltx', '.tex']
+"""List of typical file extensions that contain latex"""
+
+re_tex = re.compile(r'\\(?P<type>usepackage|RequirePackage|include|bibliography([^\[\]{}]*)|putbib|includegraphics|input|import|bringin|lstinputlisting)(\[[^\[\]]*\])?{(?P<file>[^{}]*)}',re.M)
+"""Regexp for expressions that may include latex files"""
+
+g_bibtex_re = re.compile('bibdata', re.M)
+"""Regexp for bibtex files"""
+
+g_glossaries_re = re.compile('\\@newglossary', re.M)
+"""Regexp for expressions that create glossaries"""
+
+class tex(Task.Task):
+	"""
+	Compiles a tex/latex file.
+
+	.. inheritance-diagram:: waflib.Tools.tex.latex waflib.Tools.tex.xelatex waflib.Tools.tex.pdflatex
+	   :top-classes: waflib.Tools.tex.tex
+	"""
+
+	bibtex_fun, _ = Task.compile_fun('${BIBTEX} ${BIBTEXFLAGS} ${SRCFILE}', shell=False)
+	bibtex_fun.__doc__ = """
+	Execute the program **bibtex**
+	"""
+
+	makeindex_fun, _ = Task.compile_fun('${MAKEINDEX} ${MAKEINDEXFLAGS} ${SRCFILE}', shell=False)
+	makeindex_fun.__doc__ = """
+	Execute the program **makeindex**
+	"""
+
+	makeglossaries_fun, _ = Task.compile_fun('${MAKEGLOSSARIES} ${SRCFILE}', shell=False)
+	makeglossaries_fun.__doc__ = """
+	Execute the program **makeglossaries**
+	"""
+
+	def exec_command(self, cmd, **kw):
+		"""
+		Executes TeX commands without buffering (latex may prompt for inputs)
+
+		:return: the return code
+		:rtype: int
+		"""
+		if self.env.PROMPT_LATEX:
+			# capture the outputs in configuration tests
+			kw['stdout'] = kw['stderr'] = None
+		return super(tex, self).exec_command(cmd, **kw)
+
+	def scan_aux(self, node):
+		"""
+		Recursive regex-based scanner that finds included auxiliary files.
+		"""
+		nodes = [node]
+		re_aux = re.compile(r'\\@input{(?P<file>[^{}]*)}', re.M)
+
+		def parse_node(node):
+			code = node.read()
+			for match in re_aux.finditer(code):
+				path = match.group('file')
+				found = node.parent.find_or_declare(path)
+				if found and found not in nodes:
+					Logs.debug('tex: found aux node %r', found)
+					nodes.append(found)
+					parse_node(found)
+		parse_node(node)
+		return nodes
+
+	def scan(self):
+		"""
+		Recursive regex-based scanner that finds latex dependencies. It uses :py:attr:`waflib.Tools.tex.re_tex`
+
+		Depending on your needs you might want:
+
+		* to change re_tex::
+
+			from waflib.Tools import tex
+			tex.re_tex = myregex
+
+		* or to change the method scan from the latex tasks::
+
+			from waflib.Task import classes
+			classes['latex'].scan = myscanfunction
+		"""
+		node = self.inputs[0]
+
+		nodes = []
+		names = []
+		seen = []
+		if not node:
+			return (nodes, names)
+
+		def parse_node(node):
+			if node in seen:
+				return
+			seen.append(node)
+			code = node.read()
+			for match in re_tex.finditer(code):
+
+				multibib = match.group('type')
+				if multibib and multibib.startswith('bibliography'):
+					multibib = multibib[len('bibliography'):]
+					if multibib.startswith('style'):
+						continue
+				else:
+					multibib = None
+
+				for path in match.group('file').split(','):
+					if path:
+						add_name = True
+						found = None
+						for k in exts_deps_tex:
+
+							# issue 1067, scan in all texinputs folders
+							for up in self.texinputs_nodes:
+								Logs.debug('tex: trying %s%s', path, k)
+								found = up.find_resource(path + k)
+								if found:
+									break
+
+
+							for tsk in self.generator.tasks:
+								if not found or found in tsk.outputs:
+									break
+							else:
+								nodes.append(found)
+								add_name = False
+								for ext in exts_tex:
+									if found.name.endswith(ext):
+										parse_node(found)
+										break
+
+							# multibib stuff
+							if found and multibib and found.name.endswith('.bib'):
+								try:
+									self.multibibs.append(found)
+								except AttributeError:
+									self.multibibs = [found]
+
+							# no break, people are crazy
+						if add_name:
+							names.append(path)
+		parse_node(node)
+
+		for x in nodes:
+			x.parent.get_bld().mkdir()
+
+		Logs.debug("tex: found the following : %s and names %s", nodes, names)
+		return (nodes, names)
+
+	def check_status(self, msg, retcode):
+		"""
+		Checks an exit status and raise an error with a particular message
+
+		:param msg: message to display if the code is non-zero
+		:type msg: string
+		:param retcode: condition
+		:type retcode: boolean
+		"""
+		if retcode != 0:
+			raise Errors.WafError('%r command exit status %r' % (msg, retcode))
+
+	def info(self, *k, **kw):
+		try:
+			info = self.generator.bld.conf.logger.info
+		except AttributeError:
+			info = Logs.info
+		info(*k, **kw)
+
+	def bibfile(self):
+		"""
+		Parses *.aux* files to find bibfiles to process.
+		If present, execute :py:meth:`waflib.Tools.tex.tex.bibtex_fun`
+		"""
+		for aux_node in self.aux_nodes:
+			try:
+				ct = aux_node.read()
+			except EnvironmentError:
+				Logs.error('Error reading %s: %r', aux_node.abspath())
+				continue
+
+			if g_bibtex_re.findall(ct):
+				self.info('calling bibtex')
+
+				self.env.env = {}
+				self.env.env.update(os.environ)
+				self.env.env.update({'BIBINPUTS': self.texinputs(), 'BSTINPUTS': self.texinputs()})
+				self.env.SRCFILE = aux_node.name[:-4]
+				self.check_status('error when calling bibtex', self.bibtex_fun())
+
+		for node in getattr(self, 'multibibs', []):
+			self.env.env = {}
+			self.env.env.update(os.environ)
+			self.env.env.update({'BIBINPUTS': self.texinputs(), 'BSTINPUTS': self.texinputs()})
+			self.env.SRCFILE = node.name[:-4]
+			self.check_status('error when calling bibtex', self.bibtex_fun())
+
+	def bibunits(self):
+		"""
+		Parses *.aux* file to find bibunit files. If there are bibunit files,
+		runs :py:meth:`waflib.Tools.tex.tex.bibtex_fun`.
+		"""
+		try:
+			bibunits = bibunitscan(self)
+		except OSError:
+			Logs.error('error bibunitscan')
+		else:
+			if bibunits:
+				fn  = ['bu' + str(i) for i in range(1, len(bibunits) + 1)]
+				if fn:
+					self.info('calling bibtex on bibunits')
+
+				for f in fn:
+					self.env.env = {'BIBINPUTS': self.texinputs(), 'BSTINPUTS': self.texinputs()}
+					self.env.SRCFILE = f
+					self.check_status('error when calling bibtex', self.bibtex_fun())
+
+	def makeindex(self):
+		"""
+		Searches the filesystem for *.idx* files to process. If present,
+		runs :py:meth:`waflib.Tools.tex.tex.makeindex_fun`
+		"""
+		self.idx_node = self.inputs[0].change_ext('.idx')
+		try:
+			idx_path = self.idx_node.abspath()
+			os.stat(idx_path)
+		except OSError:
+			self.info('index file %s absent, not calling makeindex', idx_path)
+		else:
+			self.info('calling makeindex')
+
+			self.env.SRCFILE = self.idx_node.name
+			self.env.env = {}
+			self.check_status('error when calling makeindex %s' % idx_path, self.makeindex_fun())
+
+	def bibtopic(self):
+		"""
+		Lists additional .aux files from the bibtopic package
+		"""
+		p = self.inputs[0].parent.get_bld()
+		if os.path.exists(os.path.join(p.abspath(), 'btaux.aux')):
+			self.aux_nodes += p.ant_glob('*[0-9].aux')
+
+	def makeglossaries(self):
+		"""
+		Lists additional glossaries from .aux files. If present, runs the makeglossaries program.
+		"""
+		src_file = self.inputs[0].abspath()
+		base_file = os.path.basename(src_file)
+		base, _ = os.path.splitext(base_file)
+		for aux_node in self.aux_nodes:
+			try:
+				ct = aux_node.read()
+			except EnvironmentError:
+				Logs.error('Error reading %s: %r', aux_node.abspath())
+				continue
+
+			if g_glossaries_re.findall(ct):
+				if not self.env.MAKEGLOSSARIES:
+					raise Errors.WafError("The program 'makeglossaries' is missing!")
+				Logs.warn('calling makeglossaries')
+				self.env.SRCFILE = base
+				self.check_status('error when calling makeglossaries %s' % base, self.makeglossaries_fun())
+				return
+
+	def texinputs(self):
+		"""
+		Returns the list of texinput nodes as a string suitable for the TEXINPUTS environment variables
+
+		:rtype: string
+		"""
+		return os.pathsep.join([k.abspath() for k in self.texinputs_nodes]) + os.pathsep
+
+	def run(self):
+		"""
+		Runs the whole TeX build process
+
+		Multiple passes are required depending on the usage of cross-references,
+		bibliographies, glossaries, indexes and additional contents
+		The appropriate TeX compiler is called until the *.aux* files stop changing.
+		"""
+		env = self.env
+
+		if not env.PROMPT_LATEX:
+			env.append_value('LATEXFLAGS', '-interaction=batchmode')
+			env.append_value('PDFLATEXFLAGS', '-interaction=batchmode')
+			env.append_value('XELATEXFLAGS', '-interaction=batchmode')
+
+		# important, set the cwd for everybody
+		self.cwd = self.inputs[0].parent.get_bld()
+
+		self.info('first pass on %s', self.__class__.__name__)
+
+		# Hash .aux files before even calling the LaTeX compiler
+		cur_hash = self.hash_aux_nodes()
+
+		self.call_latex()
+
+		# Find the .aux files again since bibtex processing can require it
+		self.hash_aux_nodes()
+
+		self.bibtopic()
+		self.bibfile()
+		self.bibunits()
+		self.makeindex()
+		self.makeglossaries()
+
+		for i in range(10):
+			# There is no need to call latex again if the .aux hash value has not changed
+			prev_hash = cur_hash
+			cur_hash = self.hash_aux_nodes()
+			if not cur_hash:
+				Logs.error('No aux.h to process')
+			if cur_hash and cur_hash == prev_hash:
+				break
+
+			# run the command
+			self.info('calling %s', self.__class__.__name__)
+			self.call_latex()
+
+	def hash_aux_nodes(self):
+		"""
+		Returns a hash of the .aux file contents
+
+		:rtype: string or bytes
+		"""
+		try:
+			self.aux_nodes
+		except AttributeError:
+			try:
+				self.aux_nodes = self.scan_aux(self.inputs[0].change_ext('.aux'))
+			except IOError:
+				return None
+		return Utils.h_list([Utils.h_file(x.abspath()) for x in self.aux_nodes])
+
+	def call_latex(self):
+		"""
+		Runs the TeX compiler once
+		"""
+		self.env.env = {}
+		self.env.env.update(os.environ)
+		self.env.env.update({'TEXINPUTS': self.texinputs()})
+		self.env.SRCFILE = self.inputs[0].abspath()
+		self.check_status('error when calling latex', self.texfun())
+
+class latex(tex):
+	"Compiles LaTeX files"
+	texfun, vars = Task.compile_fun('${LATEX} ${LATEXFLAGS} ${SRCFILE}', shell=False)
+
+class pdflatex(tex):
+	"Compiles PdfLaTeX files"
+	texfun, vars =  Task.compile_fun('${PDFLATEX} ${PDFLATEXFLAGS} ${SRCFILE}', shell=False)
+
+class xelatex(tex):
+	"XeLaTeX files"
+	texfun, vars = Task.compile_fun('${XELATEX} ${XELATEXFLAGS} ${SRCFILE}', shell=False)
+
+class dvips(Task.Task):
+	"Converts dvi files to postscript"
+	run_str = '${DVIPS} ${DVIPSFLAGS} ${SRC} -o ${TGT}'
+	color   = 'BLUE'
+	after   = ['latex', 'pdflatex', 'xelatex']
+
+class dvipdf(Task.Task):
+	"Converts dvi files to pdf"
+	run_str = '${DVIPDF} ${DVIPDFFLAGS} ${SRC} ${TGT}'
+	color   = 'BLUE'
+	after   = ['latex', 'pdflatex', 'xelatex']
+
+class pdf2ps(Task.Task):
+	"Converts pdf files to postscript"
+	run_str = '${PDF2PS} ${PDF2PSFLAGS} ${SRC} ${TGT}'
+	color   = 'BLUE'
+	after   = ['latex', 'pdflatex', 'xelatex']
+
+@feature('tex')
+@before_method('process_source')
+def apply_tex(self):
+	"""
+	Creates :py:class:`waflib.Tools.tex.tex` objects, and
+	dvips/dvipdf/pdf2ps tasks if necessary (outs='ps', etc).
+	"""
+	if not getattr(self, 'type', None) in ('latex', 'pdflatex', 'xelatex'):
+		self.type = 'pdflatex'
+
+	outs = Utils.to_list(getattr(self, 'outs', []))
+
+	# prompt for incomplete files (else the batchmode is used)
+	try:
+		self.generator.bld.conf
+	except AttributeError:
+		default_prompt = False
+	else:
+		default_prompt = True
+	self.env.PROMPT_LATEX = getattr(self, 'prompt', default_prompt)
+
+	deps_lst = []
+
+	if getattr(self, 'deps', None):
+		deps = self.to_list(self.deps)
+		for dep in deps:
+			if isinstance(dep, str):
+				n = self.path.find_resource(dep)
+				if not n:
+					self.bld.fatal('Could not find %r for %r' % (dep, self))
+				if not n in deps_lst:
+					deps_lst.append(n)
+			elif isinstance(dep, Node.Node):
+				deps_lst.append(dep)
+
+	for node in self.to_nodes(self.source):
+		if self.type == 'latex':
+			task = self.create_task('latex', node, node.change_ext('.dvi'))
+		elif self.type == 'pdflatex':
+			task = self.create_task('pdflatex', node, node.change_ext('.pdf'))
+		elif self.type == 'xelatex':
+			task = self.create_task('xelatex', node, node.change_ext('.pdf'))
+
+		task.env = self.env
+
+		# add the manual dependencies
+		if deps_lst:
+			for n in deps_lst:
+				if not n in task.dep_nodes:
+					task.dep_nodes.append(n)
+
+		# texinputs is a nasty beast
+		if hasattr(self, 'texinputs_nodes'):
+			task.texinputs_nodes = self.texinputs_nodes
+		else:
+			task.texinputs_nodes = [node.parent, node.parent.get_bld(), self.path, self.path.get_bld()]
+			lst = os.environ.get('TEXINPUTS', '')
+			if self.env.TEXINPUTS:
+				lst += os.pathsep + self.env.TEXINPUTS
+			if lst:
+				lst = lst.split(os.pathsep)
+			for x in lst:
+				if x:
+					if os.path.isabs(x):
+						p = self.bld.root.find_node(x)
+						if p:
+							task.texinputs_nodes.append(p)
+						else:
+							Logs.error('Invalid TEXINPUTS folder %s', x)
+					else:
+						Logs.error('Cannot resolve relative paths in TEXINPUTS %s', x)
+
+		if self.type == 'latex':
+			if 'ps' in outs:
+				tsk = self.create_task('dvips', task.outputs, node.change_ext('.ps'))
+				tsk.env.env = dict(os.environ)
+			if 'pdf' in outs:
+				tsk = self.create_task('dvipdf', task.outputs, node.change_ext('.pdf'))
+				tsk.env.env = dict(os.environ)
+		elif self.type == 'pdflatex':
+			if 'ps' in outs:
+				self.create_task('pdf2ps', task.outputs, node.change_ext('.ps'))
+	self.source = []
+
+def configure(self):
+	"""
+	Find the programs tex, latex and others without raising errors.
+	"""
+	v = self.env
+	for p in 'tex latex pdflatex xelatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps makeglossaries'.split():
+		try:
+			self.find_program(p, var=p.upper())
+		except self.errors.ConfigurationError:
+			pass
+	v.DVIPSFLAGS = '-Ppdf'
+
diff --git a/third_party/waf/waflib/Tools/vala.py b/third_party/waf/waflib/Tools/vala.py
new file mode 100644
index 0000000..822ec50
--- /dev/null
+++ b/third_party/waf/waflib/Tools/vala.py
@@ -0,0 +1,355 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Ali Sabil, 2007
+# Radosław Szkodziński, 2010
+
+"""
+At this point, vala is still unstable, so do not expect
+this tool to be too stable either (apis, etc)
+"""
+
+import re
+from waflib import Build, Context, Errors, Logs, Node, Options, Task, Utils
+from waflib.TaskGen import extension, taskgen_method
+from waflib.Configure import conf
+
+class valac(Task.Task):
+	"""
+	Compiles vala files
+	"""
+	#run_str = "${VALAC} ${VALAFLAGS}" # ideally
+	#vars = ['VALAC_VERSION']
+	vars = ["VALAC", "VALAC_VERSION", "VALAFLAGS"]
+	ext_out = ['.h']
+
+	def run(self):
+		cmd = self.env.VALAC + self.env.VALAFLAGS
+		resources = getattr(self, 'vala_exclude', [])
+		cmd.extend([a.abspath() for a in self.inputs if a not in resources])
+		ret = self.exec_command(cmd, cwd=self.vala_dir_node.abspath())
+
+		if ret:
+			return ret
+
+		if self.generator.dump_deps_node:
+			self.generator.dump_deps_node.write('\n'.join(self.generator.packages))
+
+		return ret
+
+@taskgen_method
+def init_vala_task(self):
+	"""
+	Initializes the vala task with the relevant data (acts as a constructor)
+	"""
+	self.profile = getattr(self, 'profile', 'gobject')
+
+	self.packages = packages = Utils.to_list(getattr(self, 'packages', []))
+	self.use = Utils.to_list(getattr(self, 'use', []))
+	if packages and not self.use:
+		self.use = packages[:] # copy
+
+	if self.profile == 'gobject':
+		if not 'GOBJECT' in self.use:
+			self.use.append('GOBJECT')
+
+	def addflags(flags):
+		self.env.append_value('VALAFLAGS', flags)
+
+	if self.profile:
+		addflags('--profile=%s' % self.profile)
+
+	valatask = self.valatask
+
+	# output directory
+	if hasattr(self, 'vala_dir'):
+		if isinstance(self.vala_dir, str):
+			valatask.vala_dir_node = self.path.get_bld().make_node(self.vala_dir)
+			try:
+				valatask.vala_dir_node.mkdir()
+			except OSError:
+				raise self.bld.fatal('Cannot create the vala dir %r' % valatask.vala_dir_node)
+		else:
+			valatask.vala_dir_node = self.vala_dir
+	else:
+		valatask.vala_dir_node = self.path.get_bld()
+	addflags('--directory=%s' % valatask.vala_dir_node.abspath())
+
+	if hasattr(self, 'thread'):
+		if self.profile == 'gobject':
+			if not 'GTHREAD' in self.use:
+				self.use.append('GTHREAD')
+		else:
+			#Vala doesn't have threading support for dova nor posix
+			Logs.warn('Profile %s means no threading support', self.profile)
+			self.thread = False
+
+		if self.thread:
+			addflags('--thread')
+
+	self.is_lib = 'cprogram' not in self.features
+	if self.is_lib:
+		addflags('--library=%s' % self.target)
+
+		h_node = valatask.vala_dir_node.find_or_declare('%s.h' % self.target)
+		valatask.outputs.append(h_node)
+		addflags('--header=%s' % h_node.name)
+
+		valatask.outputs.append(valatask.vala_dir_node.find_or_declare('%s.vapi' % self.target))
+
+		if getattr(self, 'gir', None):
+			gir_node = valatask.vala_dir_node.find_or_declare('%s.gir' % self.gir)
+			addflags('--gir=%s' % gir_node.name)
+			valatask.outputs.append(gir_node)
+
+	self.vala_target_glib = getattr(self, 'vala_target_glib', getattr(Options.options, 'vala_target_glib', None))
+	if self.vala_target_glib:
+		addflags('--target-glib=%s' % self.vala_target_glib)
+
+	addflags(['--define=%s' % x for x in Utils.to_list(getattr(self, 'vala_defines', []))])
+
+	packages_private = Utils.to_list(getattr(self, 'packages_private', []))
+	addflags(['--pkg=%s' % x for x in packages_private])
+
+	def _get_api_version():
+		api_version = '1.0'
+		if hasattr(Context.g_module, 'API_VERSION'):
+			version = Context.g_module.API_VERSION.split(".")
+			if version[0] == "0":
+				api_version = "0." + version[1]
+			else:
+				api_version = version[0] + ".0"
+		return api_version
+
+	self.includes = Utils.to_list(getattr(self, 'includes', []))
+	valatask.install_path = getattr(self, 'install_path', '')
+
+	valatask.vapi_path = getattr(self, 'vapi_path', '${DATAROOTDIR}/vala/vapi')
+	valatask.pkg_name = getattr(self, 'pkg_name', self.env.PACKAGE)
+	valatask.header_path = getattr(self, 'header_path', '${INCLUDEDIR}/%s-%s' % (valatask.pkg_name, _get_api_version()))
+	valatask.install_binding = getattr(self, 'install_binding', True)
+
+	self.vapi_dirs = vapi_dirs = Utils.to_list(getattr(self, 'vapi_dirs', []))
+	#includes =  []
+
+	if hasattr(self, 'use'):
+		local_packages = Utils.to_list(self.use)[:] # make sure to have a copy
+		seen = []
+		while len(local_packages) > 0:
+			package = local_packages.pop()
+			if package in seen:
+				continue
+			seen.append(package)
+
+			# check if the package exists
+			try:
+				package_obj = self.bld.get_tgen_by_name(package)
+			except Errors.WafError:
+				continue
+
+			# in practice the other task is already processed
+			# but this makes it explicit
+			package_obj.post()
+			package_name = package_obj.target
+			task = getattr(package_obj, 'valatask', None)
+			if task:
+				for output in task.outputs:
+					if output.name == package_name + ".vapi":
+						valatask.set_run_after(task)
+						if package_name not in packages:
+							packages.append(package_name)
+						if output.parent not in vapi_dirs:
+							vapi_dirs.append(output.parent)
+						if output.parent not in self.includes:
+							self.includes.append(output.parent)
+
+			if hasattr(package_obj, 'use'):
+				lst = self.to_list(package_obj.use)
+				lst.reverse()
+				local_packages = [pkg for pkg in lst if pkg not in seen] + local_packages
+
+	addflags(['--pkg=%s' % p for p in packages])
+
+	for vapi_dir in vapi_dirs:
+		if isinstance(vapi_dir, Node.Node):
+			v_node = vapi_dir
+		else:
+			v_node = self.path.find_dir(vapi_dir)
+		if not v_node:
+			Logs.warn('Unable to locate Vala API directory: %r', vapi_dir)
+		else:
+			addflags('--vapidir=%s' % v_node.abspath())
+
+	self.dump_deps_node = None
+	if self.is_lib and self.packages:
+		self.dump_deps_node = valatask.vala_dir_node.find_or_declare('%s.deps' % self.target)
+		valatask.outputs.append(self.dump_deps_node)
+
+	if self.is_lib and valatask.install_binding:
+		headers_list = [o for o in valatask.outputs if o.suffix() == ".h"]
+		if headers_list:
+			self.install_vheader = self.add_install_files(install_to=valatask.header_path, install_from=headers_list)
+
+		vapi_list = [o for o in valatask.outputs if (o.suffix() in (".vapi", ".deps"))]
+		if vapi_list:
+			self.install_vapi = self.add_install_files(install_to=valatask.vapi_path, install_from=vapi_list)
+
+		gir_list = [o for o in valatask.outputs if o.suffix() == '.gir']
+		if gir_list:
+			self.install_gir = self.add_install_files(
+				install_to=getattr(self, 'gir_path', '${DATAROOTDIR}/gir-1.0'), install_from=gir_list)
+
+	if hasattr(self, 'vala_resources'):
+		nodes = self.to_nodes(self.vala_resources)
+		valatask.vala_exclude = getattr(valatask, 'vala_exclude', []) + nodes
+		valatask.inputs.extend(nodes)
+		for x in nodes:
+			addflags(['--gresources', x.abspath()])
+
+@extension('.vala', '.gs')
+def vala_file(self, node):
+	"""
+	Compile a vala file and bind the task to *self.valatask*. If an existing vala task is already set, add the node
+	to its inputs. The typical example is::
+
+		def build(bld):
+			bld.program(
+				packages      = 'gtk+-2.0',
+				target        = 'vala-gtk-example',
+				use           = 'GTK GLIB',
+				source        = 'vala-gtk-example.vala foo.vala',
+				vala_defines  = ['DEBUG'] # adds --define=<xyz> values to the command-line
+
+				# the following arguments are for libraries
+				#gir          = 'hello-1.0',
+				#gir_path     = '/tmp',
+				#vapi_path = '/tmp',
+				#pkg_name = 'hello'
+				# disable installing of gir, vapi and header
+				#install_binding = False
+
+				# profile     = 'xyz' # adds --profile=<xyz> to enable profiling
+				# thread      = True, # adds --thread, except if profile is on or not on 'gobject'
+				# vala_target_glib = 'xyz' # adds --target-glib=<xyz>, can be given through the command-line option --vala-target-glib=<xyz>
+			)
+
+
+	:param node: vala file
+	:type node: :py:class:`waflib.Node.Node`
+	"""
+
+	try:
+		valatask = self.valatask
+	except AttributeError:
+		valatask = self.valatask = self.create_task('valac')
+		self.init_vala_task()
+
+	valatask.inputs.append(node)
+	name = node.name[:node.name.rfind('.')] + '.c'
+	c_node = valatask.vala_dir_node.find_or_declare(name)
+	valatask.outputs.append(c_node)
+	self.source.append(c_node)
+
+@extension('.vapi')
+def vapi_file(self, node):
+	try:
+		valatask = self.valatask
+	except AttributeError:
+		valatask = self.valatask = self.create_task('valac')
+		self.init_vala_task()
+	valatask.inputs.append(node)
+
+@conf
+def find_valac(self, valac_name, min_version):
+	"""
+	Find the valac program, and execute it to store the version
+	number in *conf.env.VALAC_VERSION*
+
+	:param valac_name: program name
+	:type valac_name: string or list of string
+	:param min_version: minimum version acceptable
+	:type min_version: tuple of int
+	"""
+	valac = self.find_program(valac_name, var='VALAC')
+	try:
+		output = self.cmd_and_log(valac + ['--version'])
+	except Errors.WafError:
+		valac_version = None
+	else:
+		ver = re.search(r'\d+.\d+.\d+', output).group().split('.')
+		valac_version = tuple([int(x) for x in ver])
+
+	self.msg('Checking for %s version >= %r' % (valac_name, min_version),
+	         valac_version, valac_version and valac_version >= min_version)
+	if valac and valac_version < min_version:
+		self.fatal("%s version %r is too old, need >= %r" % (valac_name, valac_version, min_version))
+
+	self.env.VALAC_VERSION = valac_version
+	return valac
+
+@conf
+def check_vala(self, min_version=(0,8,0), branch=None):
+	"""
+	Check if vala compiler from a given branch exists of at least a given
+	version.
+
+	:param min_version: minimum version acceptable (0.8.0)
+	:type min_version: tuple
+	:param branch: first part of the version number, in case a snapshot is used (0, 8)
+	:type branch: tuple of int
+	"""
+	if self.env.VALA_MINVER:
+		min_version = self.env.VALA_MINVER
+	if self.env.VALA_MINVER_BRANCH:
+		branch = self.env.VALA_MINVER_BRANCH
+	if not branch:
+		branch = min_version[:2]
+	try:
+		find_valac(self, 'valac-%d.%d' % (branch[0], branch[1]), min_version)
+	except self.errors.ConfigurationError:
+		find_valac(self, 'valac', min_version)
+
+@conf
+def check_vala_deps(self):
+	"""
+	Load the gobject and gthread packages if they are missing.
+	"""
+	if not self.env.HAVE_GOBJECT:
+		pkg_args = {'package':      'gobject-2.0',
+		            'uselib_store': 'GOBJECT',
+		            'args':         '--cflags --libs'}
+		if getattr(Options.options, 'vala_target_glib', None):
+			pkg_args['atleast_version'] = Options.options.vala_target_glib
+		self.check_cfg(**pkg_args)
+
+	if not self.env.HAVE_GTHREAD:
+		pkg_args = {'package':      'gthread-2.0',
+		            'uselib_store': 'GTHREAD',
+		            'args':         '--cflags --libs'}
+		if getattr(Options.options, 'vala_target_glib', None):
+			pkg_args['atleast_version'] = Options.options.vala_target_glib
+		self.check_cfg(**pkg_args)
+
+def configure(self):
+	"""
+	Use the following to enforce minimum vala version::
+
+		def configure(conf):
+			conf.env.VALA_MINVER = (0, 10, 0)
+			conf.load('vala')
+	"""
+	self.load('gnu_dirs')
+	self.check_vala_deps()
+	self.check_vala()
+	self.add_os_flags('VALAFLAGS')
+	self.env.append_unique('VALAFLAGS', ['-C'])
+
+def options(opt):
+	"""
+	Load the :py:mod:`waflib.Tools.gnu_dirs` tool and add the ``--vala-target-glib`` command-line option
+	"""
+	opt.load('gnu_dirs')
+	valaopts = opt.add_option_group('Vala Compiler Options')
+	valaopts.add_option('--vala-target-glib', default=None,
+		dest='vala_target_glib', metavar='MAJOR.MINOR',
+		help='Target version of glib for Vala GObject code generation')
+
diff --git a/third_party/waf/waflib/Tools/waf_unit_test.py b/third_party/waf/waflib/Tools/waf_unit_test.py
new file mode 100644
index 0000000..8cff89b
--- /dev/null
+++ b/third_party/waf/waflib/Tools/waf_unit_test.py
@@ -0,0 +1,302 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Carlos Rafael Giani, 2006
+# Thomas Nagy, 2010-2018 (ita)
+
+"""
+Unit testing system for C/C++/D and interpreted languages providing test execution:
+
+* in parallel, by using ``waf -j``
+* partial (only the tests that have changed) or full (by using ``waf --alltests``)
+
+The tests are declared by adding the **test** feature to programs::
+
+	def options(opt):
+		opt.load('compiler_cxx waf_unit_test')
+	def configure(conf):
+		conf.load('compiler_cxx waf_unit_test')
+	def build(bld):
+		bld(features='cxx cxxprogram test', source='main.cpp', target='app')
+		# or
+		bld.program(features='test', source='main2.cpp', target='app2')
+
+When the build is executed, the program 'test' will be built and executed without arguments.
+The success/failure is detected by looking at the return code. The status and the standard output/error
+are stored on the build context.
+
+The results can be displayed by registering a callback function. Here is how to call
+the predefined callback::
+
+	def build(bld):
+		bld(features='cxx cxxprogram test', source='main.c', target='app')
+		from waflib.Tools import waf_unit_test
+		bld.add_post_fun(waf_unit_test.summary)
+
+By passing --dump-test-scripts the build outputs corresponding python files
+(with extension _run.py) that are useful for debugging purposes.
+"""
+
+import os, shlex, sys
+from waflib.TaskGen import feature, after_method, taskgen_method
+from waflib import Utils, Task, Logs, Options
+from waflib.Tools import ccroot
+testlock = Utils.threading.Lock()
+
+SCRIPT_TEMPLATE = """#! %(python)s
+import subprocess, sys
+cmd = %(cmd)r
+# if you want to debug with gdb:
+#cmd = ['gdb', '-args'] + cmd
+env = %(env)r
+status = subprocess.call(cmd, env=env, cwd=%(cwd)r, shell=isinstance(cmd, str))
+sys.exit(status)
+"""
+
+@taskgen_method
+def handle_ut_cwd(self, key):
+	"""
+	Task generator method, used internally to limit code duplication.
+	This method may disappear anytime.
+	"""
+	cwd = getattr(self, key, None)
+	if cwd:
+		if isinstance(cwd, str):
+			# we want a Node instance
+			if os.path.isabs(cwd):
+				self.ut_cwd = self.bld.root.make_node(cwd)
+			else:
+				self.ut_cwd = self.path.make_node(cwd)
+
+@feature('test_scripts')
+def make_interpreted_test(self):
+	"""Create interpreted unit tests."""
+	for x in ['test_scripts_source', 'test_scripts_template']:
+		if not hasattr(self, x):
+			Logs.warn('a test_scripts taskgen i missing %s' % x)
+			return
+
+	self.ut_run, lst = Task.compile_fun(self.test_scripts_template, shell=getattr(self, 'test_scripts_shell', False))
+
+	script_nodes = self.to_nodes(self.test_scripts_source)
+	for script_node in script_nodes:
+		tsk = self.create_task('utest', [script_node])
+		tsk.vars = lst + tsk.vars
+		tsk.env['SCRIPT'] = script_node.path_from(tsk.get_cwd())
+
+	self.handle_ut_cwd('test_scripts_cwd')
+
+	env = getattr(self, 'test_scripts_env', None)
+	if env:
+		self.ut_env = env
+	else:
+		self.ut_env = dict(os.environ)
+
+	paths = getattr(self, 'test_scripts_paths', {})
+	for (k,v) in paths.items():
+		p = self.ut_env.get(k, '').split(os.pathsep)
+		if isinstance(v, str):
+			v = v.split(os.pathsep)
+		self.ut_env[k] = os.pathsep.join(p + v)
+	self.env.append_value('UT_DEPS', ['%r%r' % (key, self.ut_env[key]) for key in self.ut_env])
+
+@feature('test')
+@after_method('apply_link', 'process_use')
+def make_test(self):
+	"""Create the unit test task. There can be only one unit test task by task generator."""
+	if not getattr(self, 'link_task', None):
+		return
+
+	tsk = self.create_task('utest', self.link_task.outputs)
+	if getattr(self, 'ut_str', None):
+		self.ut_run, lst = Task.compile_fun(self.ut_str, shell=getattr(self, 'ut_shell', False))
+		tsk.vars = tsk.vars + lst
+		self.env.append_value('UT_DEPS', self.ut_str)
+
+	self.handle_ut_cwd('ut_cwd')
+
+	if not hasattr(self, 'ut_paths'):
+		paths = []
+		for x in self.tmp_use_sorted:
+			try:
+				y = self.bld.get_tgen_by_name(x).link_task
+			except AttributeError:
+				pass
+			else:
+				if not isinstance(y, ccroot.stlink_task):
+					paths.append(y.outputs[0].parent.abspath())
+		self.ut_paths = os.pathsep.join(paths) + os.pathsep
+
+	if not hasattr(self, 'ut_env'):
+		self.ut_env = dct = dict(os.environ)
+		def add_path(var):
+			dct[var] = self.ut_paths + dct.get(var,'')
+		if Utils.is_win32:
+			add_path('PATH')
+		elif Utils.unversioned_sys_platform() == 'darwin':
+			add_path('DYLD_LIBRARY_PATH')
+			add_path('LD_LIBRARY_PATH')
+		else:
+			add_path('LD_LIBRARY_PATH')
+
+	if not hasattr(self, 'ut_cmd'):
+		self.ut_cmd = getattr(Options.options, 'testcmd', False)
+
+	self.env.append_value('UT_DEPS', str(self.ut_cmd))
+	self.env.append_value('UT_DEPS', self.ut_paths)
+	self.env.append_value('UT_DEPS', ['%r%r' % (key, self.ut_env[key]) for key in self.ut_env])
+
+@taskgen_method
+def add_test_results(self, tup):
+	"""Override and return tup[1] to interrupt the build immediately if a test does not run"""
+	Logs.debug("ut: %r", tup)
+	try:
+		self.utest_results.append(tup)
+	except AttributeError:
+		self.utest_results = [tup]
+	try:
+		self.bld.utest_results.append(tup)
+	except AttributeError:
+		self.bld.utest_results = [tup]
+
+@Task.deep_inputs
+class utest(Task.Task):
+	"""
+	Execute a unit test
+	"""
+	color = 'PINK'
+	after = ['vnum', 'inst']
+	vars = ['UT_DEPS']
+
+	def runnable_status(self):
+		"""
+		Always execute the task if `waf --alltests` was used or no
+		tests if ``waf --notests`` was used
+		"""
+		if getattr(Options.options, 'no_tests', False):
+			return Task.SKIP_ME
+
+		ret = super(utest, self).runnable_status()
+		if ret == Task.SKIP_ME:
+			if getattr(Options.options, 'all_tests', False):
+				return Task.RUN_ME
+		return ret
+
+	def get_test_env(self):
+		"""
+		In general, tests may require any library built anywhere in the project.
+		Override this method if fewer paths are needed
+		"""
+		return self.generator.ut_env
+
+	def post_run(self):
+		super(utest, self).post_run()
+		if getattr(Options.options, 'clear_failed_tests', False) and self.waf_unit_test_results[1]:
+			self.generator.bld.task_sigs[self.uid()] = None
+
+	def run(self):
+		"""
+		Execute the test. The execution is always successful, and the results
+		are stored on ``self.generator.bld.utest_results`` for postprocessing.
+
+		Override ``add_test_results`` to interrupt the build
+		"""
+		if hasattr(self.generator, 'ut_run'):
+			return self.generator.ut_run(self)
+
+		self.ut_exec = getattr(self.generator, 'ut_exec', [self.inputs[0].abspath()])
+		ut_cmd = getattr(self.generator, 'ut_cmd', False)
+		if ut_cmd:
+			self.ut_exec = shlex.split(ut_cmd % Utils.shell_escape(self.ut_exec))
+
+		return self.exec_command(self.ut_exec)
+
+	def exec_command(self, cmd, **kw):
+		self.generator.bld.log_command(cmd, kw)
+		if getattr(Options.options, 'dump_test_scripts', False):
+			script_code = SCRIPT_TEMPLATE % {
+				'python': sys.executable,
+				'env': self.get_test_env(),
+				'cwd': self.get_cwd().abspath(),
+				'cmd': cmd
+			}
+			script_file = self.inputs[0].abspath() + '_run.py'
+			Utils.writef(script_file, script_code, encoding='utf-8')
+			os.chmod(script_file, Utils.O755)
+			if Logs.verbose > 1:
+				Logs.info('Test debug file written as %r' % script_file)
+
+		proc = Utils.subprocess.Popen(cmd, cwd=self.get_cwd().abspath(), env=self.get_test_env(),
+			stderr=Utils.subprocess.PIPE, stdout=Utils.subprocess.PIPE, shell=isinstance(cmd,str))
+		(stdout, stderr) = proc.communicate()
+		self.waf_unit_test_results = tup = (self.inputs[0].abspath(), proc.returncode, stdout, stderr)
+		testlock.acquire()
+		try:
+			return self.generator.add_test_results(tup)
+		finally:
+			testlock.release()
+
+	def get_cwd(self):
+		return getattr(self.generator, 'ut_cwd', self.inputs[0].parent)
+
+def summary(bld):
+	"""
+	Display an execution summary::
+
+		def build(bld):
+			bld(features='cxx cxxprogram test', source='main.c', target='app')
+			from waflib.Tools import waf_unit_test
+			bld.add_post_fun(waf_unit_test.summary)
+	"""
+	lst = getattr(bld, 'utest_results', [])
+	if lst:
+		Logs.pprint('CYAN', 'execution summary')
+
+		total = len(lst)
+		tfail = len([x for x in lst if x[1]])
+
+		Logs.pprint('GREEN', '  tests that pass %d/%d' % (total-tfail, total))
+		for (f, code, out, err) in lst:
+			if not code:
+				Logs.pprint('GREEN', '    %s' % f)
+
+		Logs.pprint('GREEN' if tfail == 0 else 'RED', '  tests that fail %d/%d' % (tfail, total))
+		for (f, code, out, err) in lst:
+			if code:
+				Logs.pprint('RED', '    %s' % f)
+
+def set_exit_code(bld):
+	"""
+	If any of the tests fail waf will exit with that exit code.
+	This is useful if you have an automated build system which need
+	to report on errors from the tests.
+	You may use it like this:
+
+		def build(bld):
+			bld(features='cxx cxxprogram test', source='main.c', target='app')
+			from waflib.Tools import waf_unit_test
+			bld.add_post_fun(waf_unit_test.set_exit_code)
+	"""
+	lst = getattr(bld, 'utest_results', [])
+	for (f, code, out, err) in lst:
+		if code:
+			msg = []
+			if out:
+				msg.append('stdout:%s%s' % (os.linesep, out.decode('utf-8')))
+			if err:
+				msg.append('stderr:%s%s' % (os.linesep, err.decode('utf-8')))
+			bld.fatal(os.linesep.join(msg))
+
+
+def options(opt):
+	"""
+	Provide the ``--alltests``, ``--notests`` and ``--testcmd`` command-line options.
+	"""
+	opt.add_option('--notests', action='store_true', default=False, help='Exec no unit tests', dest='no_tests')
+	opt.add_option('--alltests', action='store_true', default=False, help='Exec all unit tests', dest='all_tests')
+	opt.add_option('--clear-failed', action='store_true', default=False,
+		help='Force failed unit tests to run again next time', dest='clear_failed_tests')
+	opt.add_option('--testcmd', action='store', default=False, dest='testcmd',
+		help='Run the unit tests using the test-cmd string example "--testcmd="valgrind --error-exitcode=1 %s" to run under valgrind')
+	opt.add_option('--dump-test-scripts', action='store_true', default=False,
+		help='Create python scripts to help debug tests', dest='dump_test_scripts')
+
diff --git a/third_party/waf/waflib/Tools/winres.py b/third_party/waf/waflib/Tools/winres.py
new file mode 100644
index 0000000..73c0e95
--- /dev/null
+++ b/third_party/waf/waflib/Tools/winres.py
@@ -0,0 +1,113 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Brant Young, 2007
+
+"Process *.rc* files for C/C++: X{.rc -> [.res|.rc.o]}"
+
+import os
+import re
+from waflib import Task
+from waflib.TaskGen import extension
+from waflib.Tools import c_preproc
+from waflib import Utils
+
+@extension('.rc')
+def rc_file(self, node):
+	"""
+	Binds the .rc extension to a winrc task
+	"""
+	obj_ext = '.rc.o'
+	if self.env.WINRC_TGT_F == '/fo':
+		obj_ext = '.res'
+	rctask = self.create_task('winrc', node, node.change_ext(obj_ext))
+	try:
+		self.compiled_tasks.append(rctask)
+	except AttributeError:
+		self.compiled_tasks = [rctask]
+
+re_lines = re.compile(
+	r'(?:^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*?)\s*$)|'\
+	r'(?:^\w+[ \t]*(ICON|BITMAP|CURSOR|HTML|FONT|MESSAGETABLE|TYPELIB|REGISTRY|D3DFX)[ \t]*(.*?)\s*$)',
+	re.IGNORECASE | re.MULTILINE)
+
+class rc_parser(c_preproc.c_parser):
+	"""
+	Calculates dependencies in .rc files
+	"""
+	def filter_comments(self, node):
+		"""
+		Overrides :py:meth:`waflib.Tools.c_preproc.c_parser.filter_comments`
+		"""
+		code = node.read()
+		if c_preproc.use_trigraphs:
+			for (a, b) in c_preproc.trig_def:
+				code = code.split(a).join(b)
+		code = c_preproc.re_nl.sub('', code)
+		code = c_preproc.re_cpp.sub(c_preproc.repl, code)
+		ret = []
+		for m in re.finditer(re_lines, code):
+			if m.group(2):
+				ret.append((m.group(2), m.group(3)))
+			else:
+				ret.append(('include', m.group(5)))
+		return ret
+
+class winrc(Task.Task):
+	"""
+	Compiles resource files
+	"""
+	run_str = '${WINRC} ${WINRCFLAGS} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${WINRC_TGT_F} ${TGT} ${WINRC_SRC_F} ${SRC}'
+	color   = 'BLUE'
+	def scan(self):
+		tmp = rc_parser(self.generator.includes_nodes)
+		tmp.start(self.inputs[0], self.env)
+		return (tmp.nodes, tmp.names)
+
+	def exec_command(self, cmd, **kw):
+		if self.env.WINRC_TGT_F == '/fo':
+			# Since winres include paths may contain spaces, they do not fit in
+			# response files and are best passed as environment variables
+			replace_cmd = []
+			incpaths = []
+			while cmd:
+				# filter include path flags
+				flag = cmd.pop(0)
+				if flag.upper().startswith('/I'):
+					if len(flag) == 2:
+						incpaths.append(cmd.pop(0))
+					else:
+						incpaths.append(flag[2:])
+				else:
+					replace_cmd.append(flag)
+			cmd = replace_cmd
+			if incpaths:
+				# append to existing environment variables in INCLUDE
+				env = kw['env'] = dict(kw.get('env') or self.env.env or os.environ)
+				pre_includes = env.get('INCLUDE', '')
+				env['INCLUDE'] = pre_includes + os.pathsep + os.pathsep.join(incpaths)
+
+		return super(winrc, self).exec_command(cmd, **kw)
+
+	def quote_flag(self, flag):
+		if self.env.WINRC_TGT_F == '/fo':
+			# winres does not support quotes around flags in response files
+			return flag
+
+		return super(winrc, self).quote_flag(flag)
+
+
+def configure(conf):
+	"""
+	Detects the programs RC or windres, depending on the C/C++ compiler in use
+	"""
+	v = conf.env
+	if not v.WINRC:
+		if v.CC_NAME == 'msvc':
+			conf.find_program('RC', var='WINRC', path_list=v.PATH)
+			v.WINRC_TGT_F = '/fo'
+			v.WINRC_SRC_F = ''
+		else:
+			conf.find_program('windres', var='WINRC', path_list=v.PATH)
+			v.WINRC_TGT_F = '-o'
+			v.WINRC_SRC_F = '-i'
+
diff --git a/third_party/waf/waflib/Tools/xlc.py b/third_party/waf/waflib/Tools/xlc.py
new file mode 100644
index 0000000..134dd41
--- /dev/null
+++ b/third_party/waf/waflib/Tools/xlc.py
@@ -0,0 +1,65 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2018 (ita)
+# Ralf Habacker, 2006 (rh)
+# Yinon Ehrlich, 2009
+# Michael Kuhn, 2009
+
+from waflib.Tools import ccroot, ar
+from waflib.Configure import conf
+
+@conf
+def find_xlc(conf):
+	"""
+	Detects the Aix C compiler
+	"""
+	cc = conf.find_program(['xlc_r', 'xlc'], var='CC')
+	conf.get_xlc_version(cc)
+	conf.env.CC_NAME = 'xlc'
+
+@conf
+def xlc_common_flags(conf):
+	"""
+	Flags required for executing the Aix C compiler
+	"""
+	v = conf.env
+
+	v.CC_SRC_F            = []
+	v.CC_TGT_F            = ['-c', '-o']
+
+	if not v.LINK_CC:
+		v.LINK_CC = v.CC
+
+	v.CCLNK_SRC_F         = []
+	v.CCLNK_TGT_F         = ['-o']
+	v.CPPPATH_ST          = '-I%s'
+	v.DEFINES_ST          = '-D%s'
+
+	v.LIB_ST              = '-l%s' # template for adding libs
+	v.LIBPATH_ST          = '-L%s' # template for adding libpaths
+	v.STLIB_ST            = '-l%s'
+	v.STLIBPATH_ST        = '-L%s'
+	v.RPATH_ST            = '-Wl,-rpath,%s'
+
+	v.SONAME_ST           = []
+	v.SHLIB_MARKER        = []
+	v.STLIB_MARKER        = []
+
+	v.LINKFLAGS_cprogram  = ['-Wl,-brtl']
+	v.cprogram_PATTERN    = '%s'
+
+	v.CFLAGS_cshlib       = ['-fPIC']
+	v.LINKFLAGS_cshlib    = ['-G', '-Wl,-brtl,-bexpfull']
+	v.cshlib_PATTERN      = 'lib%s.so'
+
+	v.LINKFLAGS_cstlib    = []
+	v.cstlib_PATTERN      = 'lib%s.a'
+
+def configure(conf):
+	conf.find_xlc()
+	conf.find_ar()
+	conf.xlc_common_flags()
+	conf.cc_load_tools()
+	conf.cc_add_flags()
+	conf.link_add_flags()
+
diff --git a/third_party/waf/waflib/Tools/xlcxx.py b/third_party/waf/waflib/Tools/xlcxx.py
new file mode 100644
index 0000000..76aa59b
--- /dev/null
+++ b/third_party/waf/waflib/Tools/xlcxx.py
@@ -0,0 +1,65 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2018 (ita)
+# Ralf Habacker, 2006 (rh)
+# Yinon Ehrlich, 2009
+# Michael Kuhn, 2009
+
+from waflib.Tools import ccroot, ar
+from waflib.Configure import conf
+
+@conf
+def find_xlcxx(conf):
+	"""
+	Detects the Aix C++ compiler
+	"""
+	cxx = conf.find_program(['xlc++_r', 'xlc++'], var='CXX')
+	conf.get_xlc_version(cxx)
+	conf.env.CXX_NAME = 'xlc++'
+
+@conf
+def xlcxx_common_flags(conf):
+	"""
+	Flags required for executing the Aix C++ compiler
+	"""
+	v = conf.env
+
+	v.CXX_SRC_F           = []
+	v.CXX_TGT_F           = ['-c', '-o']
+
+	if not v.LINK_CXX:
+		v.LINK_CXX = v.CXX
+
+	v.CXXLNK_SRC_F        = []
+	v.CXXLNK_TGT_F        = ['-o']
+	v.CPPPATH_ST          = '-I%s'
+	v.DEFINES_ST          = '-D%s'
+
+	v.LIB_ST              = '-l%s' # template for adding libs
+	v.LIBPATH_ST          = '-L%s' # template for adding libpaths
+	v.STLIB_ST            = '-l%s'
+	v.STLIBPATH_ST        = '-L%s'
+	v.RPATH_ST            = '-Wl,-rpath,%s'
+
+	v.SONAME_ST           = []
+	v.SHLIB_MARKER        = []
+	v.STLIB_MARKER        = []
+
+	v.LINKFLAGS_cxxprogram= ['-Wl,-brtl']
+	v.cxxprogram_PATTERN  = '%s'
+
+	v.CXXFLAGS_cxxshlib   = ['-fPIC']
+	v.LINKFLAGS_cxxshlib  = ['-G', '-Wl,-brtl,-bexpfull']
+	v.cxxshlib_PATTERN    = 'lib%s.so'
+
+	v.LINKFLAGS_cxxstlib  = []
+	v.cxxstlib_PATTERN    = 'lib%s.a'
+
+def configure(conf):
+	conf.find_xlcxx()
+	conf.find_ar()
+	conf.xlcxx_common_flags()
+	conf.cxx_load_tools()
+	conf.cxx_add_flags()
+	conf.link_add_flags()
+
diff --git a/third_party/waf/waflib/Utils.py b/third_party/waf/waflib/Utils.py
new file mode 100644
index 0000000..ea0f7a9
--- /dev/null
+++ b/third_party/waf/waflib/Utils.py
@@ -0,0 +1,1053 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2018 (ita)
+
+"""
+Utilities and platform-specific fixes
+
+The portability fixes try to provide a consistent behavior of the Waf API
+through Python versions 2.5 to 3.X and across different platforms (win32, linux, etc)
+"""
+
+from __future__ import with_statement
+
+import atexit, os, sys, errno, inspect, re, datetime, platform, base64, signal, functools, time, shlex
+
+try:
+	import cPickle
+except ImportError:
+	import pickle as cPickle
+
+# leave this
+if os.name == 'posix' and sys.version_info[0] < 3:
+	try:
+		import subprocess32 as subprocess
+	except ImportError:
+		import subprocess
+else:
+	import subprocess
+
+try:
+	TimeoutExpired = subprocess.TimeoutExpired
+except AttributeError:
+	class TimeoutExpired(Exception):
+		pass
+
+from collections import deque, defaultdict
+
+try:
+	import _winreg as winreg
+except ImportError:
+	try:
+		import winreg
+	except ImportError:
+		winreg = None
+
+from waflib import Errors
+
+try:
+	from hashlib import md5
+except ImportError:
+	try:
+		from hashlib import sha1 as md5
+	except ImportError:
+		# never fail to enable potential fixes from another module
+		pass
+else:
+	try:
+		md5().digest()
+	except ValueError:
+		# Fips? #2213
+		from hashlib import sha1 as md5
+
+try:
+	import threading
+except ImportError:
+	if not 'JOBS' in os.environ:
+		# no threading :-(
+		os.environ['JOBS'] = '1'
+
+	class threading(object):
+		"""
+		A fake threading class for platforms lacking the threading module.
+		Use ``waf -j1`` on those platforms
+		"""
+		pass
+	class Lock(object):
+		"""Fake Lock class"""
+		def acquire(self):
+			pass
+		def release(self):
+			pass
+	threading.Lock = threading.Thread = Lock
+
+SIG_NIL = 'SIG_NIL_SIG_NIL_'.encode()
+"""Arbitrary null value for hashes. Modify this value according to the hash function in use"""
+
+O644 = 420
+"""Constant representing the permissions for regular files (0644 raises a syntax error on python 3)"""
+
+O755 = 493
+"""Constant representing the permissions for executable files (0755 raises a syntax error on python 3)"""
+
+rot_chr = ['\\', '|', '/', '-']
+"List of characters to use when displaying the throbber (progress bar)"
+
+rot_idx = 0
+"Index of the current throbber character (progress bar)"
+
+class ordered_iter_dict(dict):
+	"""Ordered dictionary that provides iteration from the most recently inserted keys first"""
+	def __init__(self, *k, **kw):
+		self.lst = deque()
+		dict.__init__(self, *k, **kw)
+	def clear(self):
+		dict.clear(self)
+		self.lst = deque()
+	def __setitem__(self, key, value):
+		if key in dict.keys(self):
+			self.lst.remove(key)
+		dict.__setitem__(self, key, value)
+		self.lst.append(key)
+	def __delitem__(self, key):
+		dict.__delitem__(self, key)
+		try:
+			self.lst.remove(key)
+		except ValueError:
+			pass
+	def __iter__(self):
+		return reversed(self.lst)
+	def keys(self):
+		return reversed(self.lst)
+
+class lru_node(object):
+	"""
+	Used by :py:class:`waflib.Utils.lru_cache`
+	"""
+	__slots__ = ('next', 'prev', 'key', 'val')
+	def __init__(self):
+		self.next = self
+		self.prev = self
+		self.key = None
+		self.val = None
+
+class lru_cache(object):
+	"""
+	A simple least-recently used cache with lazy allocation
+	"""
+	__slots__ = ('maxlen', 'table', 'head')
+	def __init__(self, maxlen=100):
+		self.maxlen = maxlen
+		"""
+		Maximum amount of elements in the cache
+		"""
+		self.table = {}
+		"""
+		Mapping key-value
+		"""
+		self.head = lru_node()
+		self.head.next = self.head
+		self.head.prev = self.head
+
+	def __getitem__(self, key):
+		node = self.table[key]
+		# assert(key==node.key)
+		if node is self.head:
+			return node.val
+
+		# detach the node found
+		node.prev.next = node.next
+		node.next.prev = node.prev
+
+		# replace the head
+		node.next = self.head.next
+		node.prev = self.head
+		self.head = node.next.prev = node.prev.next = node
+
+		return node.val
+
+	def __setitem__(self, key, val):
+		if key in self.table:
+			# update the value for an existing key
+			node = self.table[key]
+			node.val = val
+			self.__getitem__(key)
+		else:
+			if len(self.table) < self.maxlen:
+				# the very first item is unused until the maximum is reached
+				node = lru_node()
+				node.prev = self.head
+				node.next = self.head.next
+				node.prev.next = node.next.prev = node
+			else:
+				node = self.head = self.head.next
+				try:
+					# that's another key
+					del self.table[node.key]
+				except KeyError:
+					pass
+
+			node.key = key
+			node.val = val
+			self.table[key] = node
+
+class lazy_generator(object):
+	def __init__(self, fun, params):
+		self.fun = fun
+		self.params = params
+
+	def __iter__(self):
+		return self
+
+	def __next__(self):
+		try:
+			it = self.it
+		except AttributeError:
+			it = self.it = self.fun(*self.params)
+		return next(it)
+
+	next = __next__
+
+is_win32 = os.sep == '\\' or sys.platform == 'win32' or os.name == 'nt' # msys2
+"""
+Whether this system is a Windows series
+"""
+
+def readf(fname, m='r', encoding='latin-1'):
+	"""
+	Reads an entire file into a string. See also :py:meth:`waflib.Node.Node.readf`::
+
+		def build(ctx):
+			from waflib import Utils
+			txt = Utils.readf(self.path.find_node('wscript').abspath())
+			txt = ctx.path.find_node('wscript').read()
+
+	:type  fname: string
+	:param fname: Path to file
+	:type  m: string
+	:param m: Open mode
+	:type encoding: string
+	:param encoding: encoding value, only used for python 3
+	:rtype: string
+	:return: Content of the file
+	"""
+
+	if sys.hexversion > 0x3000000 and not 'b' in m:
+		m += 'b'
+		with open(fname, m) as f:
+			txt = f.read()
+		if encoding:
+			txt = txt.decode(encoding)
+		else:
+			txt = txt.decode()
+	else:
+		with open(fname, m) as f:
+			txt = f.read()
+	return txt
+
+def writef(fname, data, m='w', encoding='latin-1'):
+	"""
+	Writes an entire file from a string.
+	See also :py:meth:`waflib.Node.Node.writef`::
+
+		def build(ctx):
+			from waflib import Utils
+			txt = Utils.writef(self.path.make_node('i_like_kittens').abspath(), 'some data')
+			self.path.make_node('i_like_kittens').write('some data')
+
+	:type  fname: string
+	:param fname: Path to file
+	:type   data: string
+	:param  data: The contents to write to the file
+	:type  m: string
+	:param m: Open mode
+	:type encoding: string
+	:param encoding: encoding value, only used for python 3
+	"""
+	if sys.hexversion > 0x3000000 and not 'b' in m:
+		data = data.encode(encoding)
+		m += 'b'
+	with open(fname, m) as f:
+		f.write(data)
+
+def h_file(fname):
+	"""
+	Computes a hash value for a file by using md5. Use the md5_tstamp
+	extension to get faster build hashes if necessary.
+
+	:type fname: string
+	:param fname: path to the file to hash
+	:return: hash of the file contents
+	:rtype: string or bytes
+	"""
+	m = md5()
+	with open(fname, 'rb') as f:
+		while fname:
+			fname = f.read(200000)
+			m.update(fname)
+	return m.digest()
+
+def readf_win32(f, m='r', encoding='latin-1'):
+	flags = os.O_NOINHERIT | os.O_RDONLY
+	if 'b' in m:
+		flags |= os.O_BINARY
+	if '+' in m:
+		flags |= os.O_RDWR
+	try:
+		fd = os.open(f, flags)
+	except OSError:
+		raise IOError('Cannot read from %r' % f)
+
+	if sys.hexversion > 0x3000000 and not 'b' in m:
+		m += 'b'
+		with os.fdopen(fd, m) as f:
+			txt = f.read()
+		if encoding:
+			txt = txt.decode(encoding)
+		else:
+			txt = txt.decode()
+	else:
+		with os.fdopen(fd, m) as f:
+			txt = f.read()
+	return txt
+
+def writef_win32(f, data, m='w', encoding='latin-1'):
+	if sys.hexversion > 0x3000000 and not 'b' in m:
+		data = data.encode(encoding)
+		m += 'b'
+	flags = os.O_CREAT | os.O_TRUNC | os.O_WRONLY | os.O_NOINHERIT
+	if 'b' in m:
+		flags |= os.O_BINARY
+	if '+' in m:
+		flags |= os.O_RDWR
+	try:
+		fd = os.open(f, flags)
+	except OSError:
+		raise OSError('Cannot write to %r' % f)
+	with os.fdopen(fd, m) as f:
+		f.write(data)
+
+def h_file_win32(fname):
+	try:
+		fd = os.open(fname, os.O_BINARY | os.O_RDONLY | os.O_NOINHERIT)
+	except OSError:
+		raise OSError('Cannot read from %r' % fname)
+	m = md5()
+	with os.fdopen(fd, 'rb') as f:
+		while fname:
+			fname = f.read(200000)
+			m.update(fname)
+	return m.digest()
+
+# always save these
+readf_unix = readf
+writef_unix = writef
+h_file_unix = h_file
+if hasattr(os, 'O_NOINHERIT') and sys.hexversion < 0x3040000:
+	# replace the default functions
+	readf = readf_win32
+	writef = writef_win32
+	h_file = h_file_win32
+
+try:
+	x = ''.encode('hex')
+except LookupError:
+	import binascii
+	def to_hex(s):
+		ret = binascii.hexlify(s)
+		if not isinstance(ret, str):
+			ret = ret.decode('utf-8')
+		return ret
+else:
+	def to_hex(s):
+		return s.encode('hex')
+
+to_hex.__doc__ = """
+Return the hexadecimal representation of a string
+
+:param s: string to convert
+:type s: string
+"""
+
+def listdir_win32(s):
+	"""
+	Lists the contents of a folder in a portable manner.
+	On Win32, returns the list of drive letters: ['C:', 'X:', 'Z:'] when an empty string is given.
+
+	:type s: string
+	:param s: a string, which can be empty on Windows
+	"""
+	if not s:
+		try:
+			import ctypes
+		except ImportError:
+			# there is nothing much we can do
+			return [x + ':\\' for x in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ']
+		else:
+			dlen = 4 # length of "?:\\x00"
+			maxdrives = 26
+			buf = ctypes.create_string_buffer(maxdrives * dlen)
+			ndrives = ctypes.windll.kernel32.GetLogicalDriveStringsA(maxdrives*dlen, ctypes.byref(buf))
+			return [ str(buf.raw[4*i:4*i+2].decode('ascii')) for i in range(int(ndrives/dlen)) ]
+
+	if len(s) == 2 and s[1] == ":":
+		s += os.sep
+
+	if not os.path.isdir(s):
+		e = OSError('%s is not a directory' % s)
+		e.errno = errno.ENOENT
+		raise e
+	return os.listdir(s)
+
+listdir = os.listdir
+if is_win32:
+	listdir = listdir_win32
+
+def num2ver(ver):
+	"""
+	Converts a string, tuple or version number into an integer. The number is supposed to have at most 4 digits::
+
+		from waflib.Utils import num2ver
+		num2ver('1.3.2') == num2ver((1,3,2)) == num2ver((1,3,2,0))
+
+	:type ver: string or tuple of numbers
+	:param ver: a version number
+	"""
+	if isinstance(ver, str):
+		ver = tuple(ver.split('.'))
+	if isinstance(ver, tuple):
+		ret = 0
+		for i in range(4):
+			if i < len(ver):
+				ret += 256**(3 - i) * int(ver[i])
+		return ret
+	return ver
+
+def to_list(val):
+	"""
+	Converts a string argument to a list by splitting it by spaces.
+	Returns the object if not a string::
+
+		from waflib.Utils import to_list
+		lst = to_list('a b c d')
+
+	:param val: list of string or space-separated string
+	:rtype: list
+	:return: Argument converted to list
+	"""
+	if isinstance(val, str):
+		return val.split()
+	else:
+		return val
+
+def console_encoding():
+	try:
+		import ctypes
+	except ImportError:
+		pass
+	else:
+		try:
+			codepage = ctypes.windll.kernel32.GetConsoleCP()
+		except AttributeError:
+			pass
+		else:
+			if codepage:
+				if 65001 == codepage and sys.version_info < (3, 3):
+					return 'utf-8'
+				return 'cp%d' % codepage
+	return sys.stdout.encoding or ('cp1252' if is_win32 else 'latin-1')
+
+def split_path_unix(path):
+	return path.split('/')
+
+def split_path_cygwin(path):
+	if path.startswith('//'):
+		ret = path.split('/')[2:]
+		ret[0] = '/' + ret[0]
+		return ret
+	return path.split('/')
+
+re_sp = re.compile('[/\\\\]+')
+def split_path_win32(path):
+	if path.startswith('\\\\'):
+		ret = re_sp.split(path)[1:]
+		ret[0] = '\\\\' + ret[0]
+		if ret[0] == '\\\\?':
+			return ret[1:]
+		return ret
+	return re_sp.split(path)
+
+msysroot = None
+def split_path_msys(path):
+	if path.startswith(('/', '\\')) and not path.startswith(('//', '\\\\')):
+		# msys paths can be in the form /usr/bin
+		global msysroot
+		if not msysroot:
+			# msys has python 2.7 or 3, so we can use this
+			msysroot = subprocess.check_output(['cygpath', '-w', '/']).decode(sys.stdout.encoding or 'latin-1')
+			msysroot = msysroot.strip()
+		path = os.path.normpath(msysroot + os.sep + path)
+	return split_path_win32(path)
+
+if sys.platform == 'cygwin':
+	split_path = split_path_cygwin
+elif is_win32:
+	# Consider this an MSYSTEM environment if $MSYSTEM is set and python
+	# reports is executable from a unix like path on a windows host.
+	if os.environ.get('MSYSTEM') and sys.executable.startswith('/'):
+		split_path = split_path_msys
+	else:
+		split_path = split_path_win32
+else:
+	split_path = split_path_unix
+
+split_path.__doc__ = """
+Splits a path by / or \\; do not confuse this function with with ``os.path.split``
+
+:type  path: string
+:param path: path to split
+:return:     list of string
+"""
+
+def check_dir(path):
+	"""
+	Ensures that a directory exists (similar to ``mkdir -p``).
+
+	:type  path: string
+	:param path: Path to directory
+	:raises: :py:class:`waflib.Errors.WafError` if the folder cannot be added.
+	"""
+	if not os.path.isdir(path):
+		try:
+			os.makedirs(path)
+		except OSError as e:
+			if not os.path.isdir(path):
+				raise Errors.WafError('Cannot create the folder %r' % path, ex=e)
+
+def check_exe(name, env=None):
+	"""
+	Ensures that a program exists
+
+	:type name: string
+	:param name: path to the program
+	:param env: configuration object
+	:type env: :py:class:`waflib.ConfigSet.ConfigSet`
+	:return: path of the program or None
+	:raises: :py:class:`waflib.Errors.WafError` if the folder cannot be added.
+	"""
+	if not name:
+		raise ValueError('Cannot execute an empty string!')
+	def is_exe(fpath):
+		return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
+
+	fpath, fname = os.path.split(name)
+	if fpath and is_exe(name):
+		return os.path.abspath(name)
+	else:
+		env = env or os.environ
+		for path in env['PATH'].split(os.pathsep):
+			path = path.strip('"')
+			exe_file = os.path.join(path, name)
+			if is_exe(exe_file):
+				return os.path.abspath(exe_file)
+	return None
+
+def def_attrs(cls, **kw):
+	"""
+	Sets default attributes on a class instance
+
+	:type cls: class
+	:param cls: the class to update the given attributes in.
+	:type kw: dict
+	:param kw: dictionary of attributes names and values.
+	"""
+	for k, v in kw.items():
+		if not hasattr(cls, k):
+			setattr(cls, k, v)
+
+def quote_define_name(s):
+	"""
+	Converts a string into an identifier suitable for C defines.
+
+	:type  s: string
+	:param s: String to convert
+	:rtype: string
+	:return: Identifier suitable for C defines
+	"""
+	fu = re.sub('[^a-zA-Z0-9]', '_', s)
+	fu = re.sub('_+', '_', fu)
+	fu = fu.upper()
+	return fu
+
+# shlex.quote didn't exist until python 3.3. Prior to that it was a non-documented
+# function in pipes.
+try:
+	shell_quote = shlex.quote
+except AttributeError:
+	import pipes
+	shell_quote = pipes.quote
+
+def shell_escape(cmd):
+	"""
+	Escapes a command:
+	['ls', '-l', 'arg space'] -> ls -l 'arg space'
+	"""
+	if isinstance(cmd, str):
+		return cmd
+	return ' '.join(shell_quote(x) for x in cmd)
+
+def h_list(lst):
+	"""
+	Hashes lists of ordered data.
+
+	Using hash(tup) for tuples would be much more efficient,
+	but Python now enforces hash randomization
+
+	:param lst: list to hash
+	:type lst: list of strings
+	:return: hash of the list
+	"""
+	return md5(repr(lst).encode()).digest()
+
+if sys.hexversion < 0x3000000:
+	def h_list_python2(lst):
+		return md5(repr(lst)).digest()
+	h_list_python2.__doc__ = h_list.__doc__
+	h_list = h_list_python2
+
+def h_fun(fun):
+	"""
+	Hash functions
+
+	:param fun: function to hash
+	:type  fun: function
+	:return: hash of the function
+	:rtype: string or bytes
+	"""
+	try:
+		return fun.code
+	except AttributeError:
+		if isinstance(fun, functools.partial):
+			code = list(fun.args)
+			# The method items() provides a sequence of tuples where the first element
+			# represents an optional argument of the partial function application
+			#
+			# The sorting result outcome will be consistent because:
+			# 1. tuples are compared in order of their elements
+			# 2. optional argument namess are unique
+			code.extend(sorted(fun.keywords.items()))
+			code.append(h_fun(fun.func))
+			fun.code = h_list(code)
+			return fun.code
+		try:
+			h = inspect.getsource(fun)
+		except EnvironmentError:
+			h = 'nocode'
+		try:
+			fun.code = h
+		except AttributeError:
+			pass
+		return h
+
+def h_cmd(ins):
+	"""
+	Hashes objects recursively
+
+	:param ins: input object
+	:type ins: string or list or tuple or function
+	:rtype: string or bytes
+	"""
+	# this function is not meant to be particularly fast
+	if isinstance(ins, str):
+		# a command is either a string
+		ret = ins
+	elif isinstance(ins, list) or isinstance(ins, tuple):
+		# or a list of functions/strings
+		ret = str([h_cmd(x) for x in ins])
+	else:
+		# or just a python function
+		ret = str(h_fun(ins))
+	if sys.hexversion > 0x3000000:
+		ret = ret.encode('latin-1', 'xmlcharrefreplace')
+	return ret
+
+reg_subst = re.compile(r"(\\\\)|(\$\$)|\$\{([^}]+)\}")
+def subst_vars(expr, params):
+	"""
+	Replaces ${VAR} with the value of VAR taken from a dict or a config set::
+
+		from waflib import Utils
+		s = Utils.subst_vars('${PREFIX}/bin', env)
+
+	:type  expr: string
+	:param expr: String to perform substitution on
+	:param params: Dictionary or config set to look up variable values.
+	"""
+	def repl_var(m):
+		if m.group(1):
+			return '\\'
+		if m.group(2):
+			return '$'
+		try:
+			# ConfigSet instances may contain lists
+			return params.get_flat(m.group(3))
+		except AttributeError:
+			return params[m.group(3)]
+		# if you get a TypeError, it means that 'expr' is not a string...
+		# Utils.subst_vars(None, env)  will not work
+	return reg_subst.sub(repl_var, expr)
+
+def destos_to_binfmt(key):
+	"""
+	Returns the binary format based on the unversioned platform name,
+	and defaults to ``elf`` if nothing is found.
+
+	:param key: platform name
+	:type  key: string
+	:return: string representing the binary format
+	"""
+	if key == 'darwin':
+		return 'mac-o'
+	elif key in ('win32', 'cygwin', 'uwin', 'msys'):
+		return 'pe'
+	return 'elf'
+
+def unversioned_sys_platform():
+	"""
+	Returns the unversioned platform name.
+	Some Python platform names contain versions, that depend on
+	the build environment, e.g. linux2, freebsd6, etc.
+	This returns the name without the version number. Exceptions are
+	os2 and win32, which are returned verbatim.
+
+	:rtype: string
+	:return: Unversioned platform name
+	"""
+	s = sys.platform
+	if s.startswith('java'):
+		# The real OS is hidden under the JVM.
+		from java.lang import System
+		s = System.getProperty('os.name')
+		# see http://lopica.sourceforge.net/os.html for a list of possible values
+		if s == 'Mac OS X':
+			return 'darwin'
+		elif s.startswith('Windows '):
+			return 'win32'
+		elif s == 'OS/2':
+			return 'os2'
+		elif s == 'HP-UX':
+			return 'hp-ux'
+		elif s in ('SunOS', 'Solaris'):
+			return 'sunos'
+		else: s = s.lower()
+
+	# powerpc == darwin for our purposes
+	if s == 'powerpc':
+		return 'darwin'
+	if s == 'win32' or s == 'os2':
+		return s
+	if s == 'cli' and os.name == 'nt':
+		# ironpython is only on windows as far as we know
+		return 'win32'
+	return re.split(r'\d+$', s)[0]
+
+def nada(*k, **kw):
+	"""
+	Does nothing
+
+	:return: None
+	"""
+	pass
+
+class Timer(object):
+	"""
+	Simple object for timing the execution of commands.
+	Its string representation is the duration::
+
+		from waflib.Utils import Timer
+		timer = Timer()
+		a_few_operations()
+		s = str(timer)
+	"""
+	def __init__(self):
+		self.start_time = self.now()
+
+	def __str__(self):
+		delta = self.now() - self.start_time
+		if not isinstance(delta, datetime.timedelta):
+			delta = datetime.timedelta(seconds=delta)
+		days = delta.days
+		hours, rem = divmod(delta.seconds, 3600)
+		minutes, seconds = divmod(rem, 60)
+		seconds += delta.microseconds * 1e-6
+		result = ''
+		if days:
+			result += '%dd' % days
+		if days or hours:
+			result += '%dh' % hours
+		if days or hours or minutes:
+			result += '%dm' % minutes
+		return '%s%.3fs' % (result, seconds)
+
+	def now(self):
+		return datetime.datetime.utcnow()
+
+	if hasattr(time, 'perf_counter'):
+		def now(self):
+			return time.perf_counter()
+
+def read_la_file(path):
+	"""
+	Reads property files, used by msvc.py
+
+	:param path: file to read
+	:type path: string
+	"""
+	sp = re.compile(r'^([^=]+)=\'(.*)\'$')
+	dc = {}
+	for line in readf(path).splitlines():
+		try:
+			_, left, right, _ = sp.split(line.strip())
+			dc[left] = right
+		except ValueError:
+			pass
+	return dc
+
+def run_once(fun):
+	"""
+	Decorator: let a function cache its results, use like this::
+
+		@run_once
+		def foo(k):
+			return 345*2343
+
+	.. note:: in practice this can cause memory leaks, prefer a :py:class:`waflib.Utils.lru_cache`
+
+	:param fun: function to execute
+	:type fun: function
+	:return: the return value of the function executed
+	"""
+	cache = {}
+	def wrap(*k):
+		try:
+			return cache[k]
+		except KeyError:
+			ret = fun(*k)
+			cache[k] = ret
+			return ret
+	wrap.__cache__ = cache
+	wrap.__name__ = fun.__name__
+	return wrap
+
+def get_registry_app_path(key, filename):
+	"""
+	Returns the value of a registry key for an executable
+
+	:type key: string
+	:type filename: list of string
+	"""
+	if not winreg:
+		return None
+	try:
+		result = winreg.QueryValue(key, "Software\\Microsoft\\Windows\\CurrentVersion\\App Paths\\%s.exe" % filename[0])
+	except OSError:
+		pass
+	else:
+		if os.path.isfile(result):
+			return result
+
+def lib64():
+	"""
+	Guess the default ``/usr/lib`` extension for 64-bit applications
+
+	:return: '64' or ''
+	:rtype: string
+	"""
+	# default settings for /usr/lib
+	if os.sep == '/':
+		if platform.architecture()[0] == '64bit':
+			if os.path.exists('/usr/lib64') and not os.path.exists('/usr/lib32'):
+				return '64'
+	return ''
+
+def loose_version(ver_str):
+	# private for the time being!
+	# see #2402
+	lst = re.split(r'([.]|\\d+|[a-zA-Z])', ver_str)
+	ver = []
+	for i, val in enumerate(lst):
+		try:
+			ver.append(int(val))
+		except ValueError:
+			if val != '.':
+				ver.append(val)
+	return ver
+
+def sane_path(p):
+	# private function for the time being!
+	return os.path.abspath(os.path.expanduser(p))
+
+process_pool = []
+"""
+List of processes started to execute sub-process commands
+"""
+
+def get_process():
+	"""
+	Returns a process object that can execute commands as sub-processes
+
+	:rtype: subprocess.Popen
+	"""
+	try:
+		return process_pool.pop()
+	except IndexError:
+		filepath = os.path.dirname(os.path.abspath(__file__)) + os.sep + 'processor.py'
+		cmd = [sys.executable, '-c', readf(filepath)]
+		return subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, bufsize=0, close_fds=not is_win32)
+
+def run_prefork_process(cmd, kwargs, cargs):
+	"""
+	Delegates process execution to a pre-forked process instance.
+	"""
+	if not kwargs.get('env'):
+		kwargs['env'] = dict(os.environ)
+	try:
+		obj = base64.b64encode(cPickle.dumps([cmd, kwargs, cargs]))
+	except (TypeError, AttributeError):
+		return run_regular_process(cmd, kwargs, cargs)
+
+	proc = get_process()
+	if not proc:
+		return run_regular_process(cmd, kwargs, cargs)
+
+	proc.stdin.write(obj)
+	proc.stdin.write('\n'.encode())
+	proc.stdin.flush()
+	obj = proc.stdout.readline()
+	if not obj:
+		raise OSError('Preforked sub-process %r died' % proc.pid)
+
+	process_pool.append(proc)
+	lst = cPickle.loads(base64.b64decode(obj))
+	# Jython wrapper failures (bash/execvp)
+	assert len(lst) == 5
+	ret, out, err, ex, trace = lst
+	if ex:
+		if ex == 'OSError':
+			raise OSError(trace)
+		elif ex == 'ValueError':
+			raise ValueError(trace)
+		elif ex == 'TimeoutExpired':
+			exc = TimeoutExpired(cmd, timeout=cargs['timeout'], output=out)
+			exc.stderr = err
+			raise exc
+		else:
+			raise Exception(trace)
+	return ret, out, err
+
+def lchown(path, user=-1, group=-1):
+	"""
+	Change the owner/group of a path, raises an OSError if the
+	ownership change fails.
+
+	:param user: user to change
+	:type user: int or str
+	:param group: group to change
+	:type group: int or str
+	"""
+	if isinstance(user, str):
+		import pwd
+		entry = pwd.getpwnam(user)
+		if not entry:
+			raise OSError('Unknown user %r' % user)
+		user = entry[2]
+	if isinstance(group, str):
+		import grp
+		entry = grp.getgrnam(group)
+		if not entry:
+			raise OSError('Unknown group %r' % group)
+		group = entry[2]
+	return os.lchown(path, user, group)
+
+def run_regular_process(cmd, kwargs, cargs={}):
+	"""
+	Executes a subprocess command by using subprocess.Popen
+	"""
+	proc = subprocess.Popen(cmd, **kwargs)
+	if kwargs.get('stdout') or kwargs.get('stderr'):
+		try:
+			out, err = proc.communicate(**cargs)
+		except TimeoutExpired:
+			if kwargs.get('start_new_session') and hasattr(os, 'killpg'):
+				os.killpg(proc.pid, signal.SIGKILL)
+			else:
+				proc.kill()
+			out, err = proc.communicate()
+			exc = TimeoutExpired(proc.args, timeout=cargs['timeout'], output=out)
+			exc.stderr = err
+			raise exc
+		status = proc.returncode
+	else:
+		out, err = (None, None)
+		try:
+			status = proc.wait(**cargs)
+		except TimeoutExpired as e:
+			if kwargs.get('start_new_session') and hasattr(os, 'killpg'):
+				os.killpg(proc.pid, signal.SIGKILL)
+			else:
+				proc.kill()
+			proc.wait()
+			raise e
+	return status, out, err
+
+def run_process(cmd, kwargs, cargs={}):
+	"""
+	Executes a subprocess by using a pre-forked process when possible
+	or falling back to subprocess.Popen. See :py:func:`waflib.Utils.run_prefork_process`
+	and :py:func:`waflib.Utils.run_regular_process`
+	"""
+	if kwargs.get('stdout') and kwargs.get('stderr'):
+		return run_prefork_process(cmd, kwargs, cargs)
+	else:
+		return run_regular_process(cmd, kwargs, cargs)
+
+def alloc_process_pool(n, force=False):
+	"""
+	Allocates an amount of processes to the default pool so its size is at least *n*.
+	It is useful to call this function early so that the pre-forked
+	processes use as little memory as possible.
+
+	:param n: pool size
+	:type n: integer
+	:param force: if True then *n* more processes are added to the existing pool
+	:type force: bool
+	"""
+	# mandatory on python2, unnecessary on python >= 3.2
+	global run_process, get_process, alloc_process_pool
+	if not force:
+		n = max(n - len(process_pool), 0)
+	try:
+		lst = [get_process() for x in range(n)]
+	except OSError:
+		run_process = run_regular_process
+		get_process = alloc_process_pool = nada
+	else:
+		for x in lst:
+			process_pool.append(x)
+
+def atexit_pool():
+	for k in process_pool:
+		try:
+			os.kill(k.pid, 9)
+		except OSError:
+			pass
+		else:
+			k.wait()
+# see #1889
+if (sys.hexversion<0x207000f and not is_win32) or sys.hexversion>=0x306000f:
+	atexit.register(atexit_pool)
+
+if os.environ.get('WAF_NO_PREFORK') or sys.platform == 'cli' or not sys.executable:
+	run_process = run_regular_process
+	get_process = alloc_process_pool = nada
+
diff --git a/third_party/waf/waflib/__init__.py b/third_party/waf/waflib/__init__.py
new file mode 100644
index 0000000..079df35
--- /dev/null
+++ b/third_party/waf/waflib/__init__.py
@@ -0,0 +1,3 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2018 (ita)
diff --git a/third_party/waf/waflib/ansiterm.py b/third_party/waf/waflib/ansiterm.py
new file mode 100644
index 0000000..027f0ad
--- /dev/null
+++ b/third_party/waf/waflib/ansiterm.py
@@ -0,0 +1,342 @@
+#!/usr/bin/env python
+# encoding: utf-8
+
+"""
+Emulate a vt100 terminal in cmd.exe
+
+By wrapping sys.stdout / sys.stderr with Ansiterm,
+the vt100 escape characters will be interpreted and
+the equivalent actions will be performed with Win32
+console commands.
+
+"""
+
+import os, re, sys
+from waflib import Utils
+
+wlock = Utils.threading.Lock()
+
+try:
+	from ctypes import Structure, windll, c_short, c_ushort, c_ulong, c_int, byref, c_wchar, POINTER, c_long
+except ImportError:
+
+	class AnsiTerm(object):
+		def __init__(self, stream):
+			self.stream = stream
+			try:
+				self.errors = self.stream.errors
+			except AttributeError:
+				pass # python 2.5
+			self.encoding = self.stream.encoding
+
+		def write(self, txt):
+			try:
+				wlock.acquire()
+				self.stream.write(txt)
+				self.stream.flush()
+			finally:
+				wlock.release()
+
+		def fileno(self):
+			return self.stream.fileno()
+
+		def flush(self):
+			self.stream.flush()
+
+		def isatty(self):
+			return self.stream.isatty()
+else:
+
+	class COORD(Structure):
+		_fields_ = [("X", c_short), ("Y", c_short)]
+
+	class SMALL_RECT(Structure):
+		_fields_ = [("Left", c_short), ("Top", c_short), ("Right", c_short), ("Bottom", c_short)]
+
+	class CONSOLE_SCREEN_BUFFER_INFO(Structure):
+		_fields_ = [("Size", COORD), ("CursorPosition", COORD), ("Attributes", c_ushort), ("Window", SMALL_RECT), ("MaximumWindowSize", COORD)]
+
+	class CONSOLE_CURSOR_INFO(Structure):
+		_fields_ = [('dwSize', c_ulong), ('bVisible', c_int)]
+
+	try:
+		_type = unicode
+	except NameError:
+		_type = str
+
+	to_int = lambda number, default: number and int(number) or default
+
+	STD_OUTPUT_HANDLE = -11
+	STD_ERROR_HANDLE = -12
+
+	windll.kernel32.GetStdHandle.argtypes = [c_ulong]
+	windll.kernel32.GetStdHandle.restype = c_ulong
+	windll.kernel32.GetConsoleScreenBufferInfo.argtypes = [c_ulong, POINTER(CONSOLE_SCREEN_BUFFER_INFO)]
+	windll.kernel32.GetConsoleScreenBufferInfo.restype = c_long
+	windll.kernel32.SetConsoleTextAttribute.argtypes = [c_ulong, c_ushort]
+	windll.kernel32.SetConsoleTextAttribute.restype = c_long
+	windll.kernel32.FillConsoleOutputCharacterW.argtypes = [c_ulong, c_wchar, c_ulong, POINTER(COORD), POINTER(c_ulong)]
+	windll.kernel32.FillConsoleOutputCharacterW.restype = c_long
+	windll.kernel32.FillConsoleOutputAttribute.argtypes = [c_ulong, c_ushort, c_ulong, POINTER(COORD), POINTER(c_ulong) ]
+	windll.kernel32.FillConsoleOutputAttribute.restype = c_long
+	windll.kernel32.SetConsoleCursorPosition.argtypes = [c_ulong, POINTER(COORD) ]
+	windll.kernel32.SetConsoleCursorPosition.restype = c_long
+	windll.kernel32.SetConsoleCursorInfo.argtypes = [c_ulong, POINTER(CONSOLE_CURSOR_INFO)]
+	windll.kernel32.SetConsoleCursorInfo.restype = c_long
+
+	class AnsiTerm(object):
+		"""
+		emulate a vt100 terminal in cmd.exe
+		"""
+		def __init__(self, s):
+			self.stream = s
+			try:
+				self.errors = s.errors
+			except AttributeError:
+				pass # python2.5
+			self.encoding = s.encoding
+			self.cursor_history = []
+
+			handle = (s.fileno() == 2) and STD_ERROR_HANDLE or STD_OUTPUT_HANDLE
+			self.hconsole = windll.kernel32.GetStdHandle(handle)
+
+			self._sbinfo = CONSOLE_SCREEN_BUFFER_INFO()
+
+			self._csinfo = CONSOLE_CURSOR_INFO()
+			windll.kernel32.GetConsoleCursorInfo(self.hconsole, byref(self._csinfo))
+
+			# just to double check that the console is usable
+			self._orig_sbinfo = CONSOLE_SCREEN_BUFFER_INFO()
+			r = windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole, byref(self._orig_sbinfo))
+			self._isatty = r == 1
+
+		def screen_buffer_info(self):
+			"""
+			Updates self._sbinfo and returns it
+			"""
+			windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole, byref(self._sbinfo))
+			return self._sbinfo
+
+		def clear_line(self, param):
+			mode = param and int(param) or 0
+			sbinfo = self.screen_buffer_info()
+			if mode == 1: # Clear from beginning of line to cursor position
+				line_start = COORD(0, sbinfo.CursorPosition.Y)
+				line_length = sbinfo.Size.X
+			elif mode == 2: # Clear entire line
+				line_start = COORD(sbinfo.CursorPosition.X, sbinfo.CursorPosition.Y)
+				line_length = sbinfo.Size.X - sbinfo.CursorPosition.X
+			else: # Clear from cursor position to end of line
+				line_start = sbinfo.CursorPosition
+				line_length = sbinfo.Size.X - sbinfo.CursorPosition.X
+			chars_written = c_ulong()
+			windll.kernel32.FillConsoleOutputCharacterW(self.hconsole, c_wchar(' '), line_length, line_start, byref(chars_written))
+			windll.kernel32.FillConsoleOutputAttribute(self.hconsole, sbinfo.Attributes, line_length, line_start, byref(chars_written))
+
+		def clear_screen(self, param):
+			mode = to_int(param, 0)
+			sbinfo = self.screen_buffer_info()
+			if mode == 1: # Clear from beginning of screen to cursor position
+				clear_start = COORD(0, 0)
+				clear_length = sbinfo.CursorPosition.X * sbinfo.CursorPosition.Y
+			elif mode == 2: # Clear entire screen and return cursor to home
+				clear_start = COORD(0, 0)
+				clear_length = sbinfo.Size.X * sbinfo.Size.Y
+				windll.kernel32.SetConsoleCursorPosition(self.hconsole, clear_start)
+			else: # Clear from cursor position to end of screen
+				clear_start = sbinfo.CursorPosition
+				clear_length = ((sbinfo.Size.X - sbinfo.CursorPosition.X) + sbinfo.Size.X * (sbinfo.Size.Y - sbinfo.CursorPosition.Y))
+			chars_written = c_ulong()
+			windll.kernel32.FillConsoleOutputCharacterW(self.hconsole, c_wchar(' '), clear_length, clear_start, byref(chars_written))
+			windll.kernel32.FillConsoleOutputAttribute(self.hconsole, sbinfo.Attributes, clear_length, clear_start, byref(chars_written))
+
+		def push_cursor(self, param):
+			sbinfo = self.screen_buffer_info()
+			self.cursor_history.append(sbinfo.CursorPosition)
+
+		def pop_cursor(self, param):
+			if self.cursor_history:
+				old_pos = self.cursor_history.pop()
+				windll.kernel32.SetConsoleCursorPosition(self.hconsole, old_pos)
+
+		def set_cursor(self, param):
+			y, sep, x = param.partition(';')
+			x = to_int(x, 1) - 1
+			y = to_int(y, 1) - 1
+			sbinfo = self.screen_buffer_info()
+			new_pos = COORD(
+				min(max(0, x), sbinfo.Size.X),
+				min(max(0, y), sbinfo.Size.Y)
+			)
+			windll.kernel32.SetConsoleCursorPosition(self.hconsole, new_pos)
+
+		def set_column(self, param):
+			x = to_int(param, 1) - 1
+			sbinfo = self.screen_buffer_info()
+			new_pos = COORD(
+				min(max(0, x), sbinfo.Size.X),
+				sbinfo.CursorPosition.Y
+			)
+			windll.kernel32.SetConsoleCursorPosition(self.hconsole, new_pos)
+
+		def move_cursor(self, x_offset=0, y_offset=0):
+			sbinfo = self.screen_buffer_info()
+			new_pos = COORD(
+				min(max(0, sbinfo.CursorPosition.X + x_offset), sbinfo.Size.X),
+				min(max(0, sbinfo.CursorPosition.Y + y_offset), sbinfo.Size.Y)
+			)
+			windll.kernel32.SetConsoleCursorPosition(self.hconsole, new_pos)
+
+		def move_up(self, param):
+			self.move_cursor(y_offset = -to_int(param, 1))
+
+		def move_down(self, param):
+			self.move_cursor(y_offset = to_int(param, 1))
+
+		def move_left(self, param):
+			self.move_cursor(x_offset = -to_int(param, 1))
+
+		def move_right(self, param):
+			self.move_cursor(x_offset = to_int(param, 1))
+
+		def next_line(self, param):
+			sbinfo = self.screen_buffer_info()
+			self.move_cursor(
+				x_offset = -sbinfo.CursorPosition.X,
+				y_offset = to_int(param, 1)
+			)
+
+		def prev_line(self, param):
+			sbinfo = self.screen_buffer_info()
+			self.move_cursor(
+				x_offset = -sbinfo.CursorPosition.X,
+				y_offset = -to_int(param, 1)
+			)
+
+		def rgb2bgr(self, c):
+			return ((c&1) << 2) | (c&2) | ((c&4)>>2)
+
+		def set_color(self, param):
+			cols = param.split(';')
+			sbinfo = self.screen_buffer_info()
+			attr = sbinfo.Attributes
+			for c in cols:
+				c = to_int(c, 0)
+				if 29 < c < 38: # fgcolor
+					attr = (attr & 0xfff0) | self.rgb2bgr(c - 30)
+				elif 39 < c < 48: # bgcolor
+					attr = (attr & 0xff0f) | (self.rgb2bgr(c - 40) << 4)
+				elif c == 0: # reset
+					attr = self._orig_sbinfo.Attributes
+				elif c == 1: # strong
+					attr |= 0x08
+				elif c == 4: # blink not available -> bg intensity
+					attr |= 0x80
+				elif c == 7: # negative
+					attr = (attr & 0xff88) | ((attr & 0x70) >> 4) | ((attr & 0x07) << 4)
+
+			windll.kernel32.SetConsoleTextAttribute(self.hconsole, attr)
+
+		def show_cursor(self,param):
+			self._csinfo.bVisible = 1
+			windll.kernel32.SetConsoleCursorInfo(self.hconsole, byref(self._csinfo))
+
+		def hide_cursor(self,param):
+			self._csinfo.bVisible = 0
+			windll.kernel32.SetConsoleCursorInfo(self.hconsole, byref(self._csinfo))
+
+		ansi_command_table = {
+			'A': move_up,
+			'B': move_down,
+			'C': move_right,
+			'D': move_left,
+			'E': next_line,
+			'F': prev_line,
+			'G': set_column,
+			'H': set_cursor,
+			'f': set_cursor,
+			'J': clear_screen,
+			'K': clear_line,
+			'h': show_cursor,
+			'l': hide_cursor,
+			'm': set_color,
+			's': push_cursor,
+			'u': pop_cursor,
+		}
+		# Match either the escape sequence or text not containing escape sequence
+		ansi_tokens = re.compile(r'(?:\x1b\[([0-9?;]*)([a-zA-Z])|([^\x1b]+))')
+		def write(self, text):
+			try:
+				wlock.acquire()
+				if self._isatty:
+					for param, cmd, txt in self.ansi_tokens.findall(text):
+						if cmd:
+							cmd_func = self.ansi_command_table.get(cmd)
+							if cmd_func:
+								cmd_func(self, param)
+						else:
+							self.writeconsole(txt)
+				else:
+					# no support for colors in the console, just output the text:
+					# eclipse or msys may be able to interpret the escape sequences
+					self.stream.write(text)
+			finally:
+				wlock.release()
+
+		def writeconsole(self, txt):
+			chars_written = c_ulong()
+			writeconsole = windll.kernel32.WriteConsoleA
+			if isinstance(txt, _type):
+				writeconsole = windll.kernel32.WriteConsoleW
+
+			# MSDN says that there is a shared buffer of 64 KB for the console
+			# writes. Attempt to not get ERROR_NOT_ENOUGH_MEMORY, see waf issue #746
+			done = 0
+			todo = len(txt)
+			chunk = 32<<10
+			while todo != 0:
+				doing = min(chunk, todo)
+				buf = txt[done:done+doing]
+				r = writeconsole(self.hconsole, buf, doing, byref(chars_written), None)
+				if r == 0:
+					chunk >>= 1
+					continue
+				done += doing
+				todo -= doing
+
+
+		def fileno(self):
+			return self.stream.fileno()
+
+		def flush(self):
+			pass
+
+		def isatty(self):
+			return self._isatty
+
+	if sys.stdout.isatty() or sys.stderr.isatty():
+		handle = sys.stdout.isatty() and STD_OUTPUT_HANDLE or STD_ERROR_HANDLE
+		console = windll.kernel32.GetStdHandle(handle)
+		sbinfo = CONSOLE_SCREEN_BUFFER_INFO()
+		def get_term_cols():
+			windll.kernel32.GetConsoleScreenBufferInfo(console, byref(sbinfo))
+			# Issue 1401 - the progress bar cannot reach the last character
+			return sbinfo.Size.X - 1
+
+# just try and see
+try:
+	import struct, fcntl, termios
+except ImportError:
+	pass
+else:
+	if (sys.stdout.isatty() or sys.stderr.isatty()) and os.environ.get('TERM', '') not in ('dumb', 'emacs'):
+		FD = sys.stdout.isatty() and sys.stdout.fileno() or sys.stderr.fileno()
+		def fun():
+			return struct.unpack("HHHH", fcntl.ioctl(FD, termios.TIOCGWINSZ, struct.pack("HHHH", 0, 0, 0, 0)))[1]
+		try:
+			fun()
+		except Exception as e:
+			pass
+		else:
+			get_term_cols = fun
+
diff --git a/third_party/waf/waflib/extras/__init__.py b/third_party/waf/waflib/extras/__init__.py
new file mode 100644
index 0000000..c8a3c34
--- /dev/null
+++ b/third_party/waf/waflib/extras/__init__.py
@@ -0,0 +1,3 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2010 (ita)
diff --git a/third_party/waf/waflib/extras/batched_cc.py b/third_party/waf/waflib/extras/batched_cc.py
new file mode 100644
index 0000000..aad2872
--- /dev/null
+++ b/third_party/waf/waflib/extras/batched_cc.py
@@ -0,0 +1,173 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2015 (ita)
+
+"""
+Instead of compiling object files one by one, c/c++ compilers are often able to compile at once:
+cc -c ../file1.c ../file2.c ../file3.c
+
+Files are output on the directory where the compiler is called, and dependencies are more difficult
+to track (do not run the command on all source files if only one file changes)
+As such, we do as if the files were compiled one by one, but no command is actually run:
+replace each cc/cpp Task by a TaskSlave. A new task called TaskMaster collects the
+signatures from each slave and finds out the command-line to run.
+
+Just import this module to start using it:
+def build(bld):
+	bld.load('batched_cc')
+
+Note that this is provided as an example, unity builds are recommended
+for best performance results (fewer tasks and fewer jobs to execute).
+See waflib/extras/unity.py.
+"""
+
+from waflib import Task, Utils
+from waflib.TaskGen import extension, feature, after_method
+from waflib.Tools import c, cxx
+
+MAX_BATCH = 50
+
+c_str = '${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${tsk.batch_incpaths()} ${DEFINES_ST:DEFINES} -c ${SRCLST} ${CXX_TGT_F_BATCHED} ${CPPFLAGS}'
+c_fun, _ = Task.compile_fun_noshell(c_str)
+
+cxx_str = '${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${tsk.batch_incpaths()} ${DEFINES_ST:DEFINES} -c ${SRCLST} ${CXX_TGT_F_BATCHED} ${CPPFLAGS}'
+cxx_fun, _ = Task.compile_fun_noshell(cxx_str)
+
+count = 70000
+class batch(Task.Task):
+	color = 'PINK'
+
+	after = ['c', 'cxx']
+	before = ['cprogram', 'cshlib', 'cstlib', 'cxxprogram', 'cxxshlib', 'cxxstlib']
+
+	def uid(self):
+		return Utils.h_list([Task.Task.uid(self), self.generator.idx, self.generator.path.abspath(), self.generator.target])
+
+	def __str__(self):
+		return 'Batch compilation for %d slaves' % len(self.slaves)
+
+	def __init__(self, *k, **kw):
+		Task.Task.__init__(self, *k, **kw)
+		self.slaves = []
+		self.inputs = []
+		self.hasrun = 0
+
+		global count
+		count += 1
+		self.idx = count
+
+	def add_slave(self, slave):
+		self.slaves.append(slave)
+		self.set_run_after(slave)
+
+	def runnable_status(self):
+		for t in self.run_after:
+			if not t.hasrun:
+				return Task.ASK_LATER
+
+		for t in self.slaves:
+			#if t.executed:
+			if t.hasrun != Task.SKIPPED:
+				return Task.RUN_ME
+
+		return Task.SKIP_ME
+
+	def get_cwd(self):
+		return self.slaves[0].outputs[0].parent
+
+	def batch_incpaths(self):
+		st = self.env.CPPPATH_ST
+		return [st % node.abspath() for node in self.generator.includes_nodes]
+
+	def run(self):
+		self.outputs = []
+
+		srclst = []
+		slaves = []
+		for t in self.slaves:
+			if t.hasrun != Task.SKIPPED:
+				slaves.append(t)
+				srclst.append(t.inputs[0].abspath())
+
+		self.env.SRCLST = srclst
+
+		if self.slaves[0].__class__.__name__ == 'c':
+			ret = c_fun(self)
+		else:
+			ret = cxx_fun(self)
+
+		if ret:
+			return ret
+
+		for t in slaves:
+			t.old_post_run()
+
+def hook(cls_type):
+	def n_hook(self, node):
+
+		ext = '.obj' if self.env.CC_NAME == 'msvc' else '.o'
+		name = node.name
+		k = name.rfind('.')
+		if k >= 0:
+			basename = name[:k] + ext
+		else:
+			basename = name + ext
+
+		outdir = node.parent.get_bld().make_node('%d' % self.idx)
+		outdir.mkdir()
+		out = outdir.find_or_declare(basename)
+
+		task = self.create_task(cls_type, node, out)
+
+		try:
+			self.compiled_tasks.append(task)
+		except AttributeError:
+			self.compiled_tasks = [task]
+
+		if not getattr(self, 'masters', None):
+			self.masters = {}
+			self.allmasters = []
+
+		def fix_path(tsk):
+			if self.env.CC_NAME == 'msvc':
+				tsk.env.append_unique('CXX_TGT_F_BATCHED', '/Fo%s\\' % outdir.abspath())
+
+		if not node.parent in self.masters:
+			m = self.masters[node.parent] = self.master = self.create_task('batch')
+			fix_path(m)
+			self.allmasters.append(m)
+		else:
+			m = self.masters[node.parent]
+			if len(m.slaves) > MAX_BATCH:
+				m = self.masters[node.parent] = self.master = self.create_task('batch')
+				fix_path(m)
+				self.allmasters.append(m)
+		m.add_slave(task)
+		return task
+	return n_hook
+
+extension('.c')(hook('c'))
+extension('.cpp','.cc','.cxx','.C','.c++')(hook('cxx'))
+
+@feature('cprogram', 'cshlib', 'cstaticlib', 'cxxprogram', 'cxxshlib', 'cxxstlib')
+@after_method('apply_link')
+def link_after_masters(self):
+	if getattr(self, 'allmasters', None):
+		for m in self.allmasters:
+			self.link_task.set_run_after(m)
+
+# Modify the c and cxx task classes - in theory it would be best to
+# create subclasses and to re-map the c/c++ extensions
+for x in ('c', 'cxx'):
+	t = Task.classes[x]
+	def run(self):
+		pass
+
+	def post_run(self):
+		pass
+
+	setattr(t, 'oldrun', getattr(t, 'run', None))
+	setattr(t, 'run', run)
+	setattr(t, 'old_post_run', t.post_run)
+	setattr(t, 'post_run', post_run)
+
diff --git a/third_party/waf/waflib/extras/biber.py b/third_party/waf/waflib/extras/biber.py
new file mode 100644
index 0000000..fd9db4e
--- /dev/null
+++ b/third_party/waf/waflib/extras/biber.py
@@ -0,0 +1,58 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2011 (ita)
+
+"""
+Latex processing using "biber"
+"""
+
+import os
+from waflib import Task, Logs
+
+from waflib.Tools import tex as texmodule
+
+class tex(texmodule.tex):
+	biber_fun, _ = Task.compile_fun('${BIBER} ${BIBERFLAGS} ${SRCFILE}',shell=False)
+	biber_fun.__doc__ = """
+	Execute the program **biber**
+	"""
+
+	def bibfile(self):
+		return None
+
+	def bibunits(self):
+		self.env.env = {}
+		self.env.env.update(os.environ)
+		self.env.env.update({'BIBINPUTS': self.texinputs(), 'BSTINPUTS': self.texinputs()})
+		self.env.SRCFILE = self.aux_nodes[0].name[:-4]
+
+		if not self.env['PROMPT_LATEX']:
+			self.env.append_unique('BIBERFLAGS', '--quiet')
+
+		path = self.aux_nodes[0].abspath()[:-4] + '.bcf'
+		if os.path.isfile(path):
+			Logs.warn('calling biber')
+			self.check_status('error when calling biber, check %s.blg for errors' % (self.env.SRCFILE), self.biber_fun())
+		else:
+			super(tex, self).bibfile()
+			super(tex, self).bibunits()
+
+class latex(tex):
+	texfun, vars = Task.compile_fun('${LATEX} ${LATEXFLAGS} ${SRCFILE}', shell=False)
+class pdflatex(tex):
+	texfun, vars =  Task.compile_fun('${PDFLATEX} ${PDFLATEXFLAGS} ${SRCFILE}', shell=False)
+class xelatex(tex):
+	texfun, vars = Task.compile_fun('${XELATEX} ${XELATEXFLAGS} ${SRCFILE}', shell=False)
+
+def configure(self):
+	"""
+	Almost the same as in tex.py, but try to detect 'biber'
+	"""
+	v = self.env
+	for p in ' biber tex latex pdflatex xelatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps'.split():
+		try:
+			self.find_program(p, var=p.upper())
+		except self.errors.ConfigurationError:
+			pass
+	v['DVIPSFLAGS'] = '-Ppdf'
+
diff --git a/third_party/waf/waflib/extras/bjam.py b/third_party/waf/waflib/extras/bjam.py
new file mode 100644
index 0000000..8e04d3a
--- /dev/null
+++ b/third_party/waf/waflib/extras/bjam.py
@@ -0,0 +1,128 @@
+#! /usr/bin/env python
+# per rosengren 2011
+
+from os import sep, readlink
+from waflib import Logs
+from waflib.TaskGen import feature, after_method
+from waflib.Task import Task, always_run
+
+def options(opt):
+	grp = opt.add_option_group('Bjam Options')
+	grp.add_option('--bjam_src', default=None, help='You can find it in <boost root>/tools/jam/src')
+	grp.add_option('--bjam_uname', default='linuxx86_64', help='bjam is built in <src>/bin.<uname>/bjam')
+	grp.add_option('--bjam_config', default=None)
+	grp.add_option('--bjam_toolset', default=None)
+
+def configure(cnf):
+	if not cnf.env.BJAM_SRC:
+		cnf.env.BJAM_SRC = cnf.options.bjam_src
+	if not cnf.env.BJAM_UNAME:
+		cnf.env.BJAM_UNAME = cnf.options.bjam_uname
+	try:
+		cnf.find_program('bjam', path_list=[
+			cnf.env.BJAM_SRC + sep + 'bin.' + cnf.env.BJAM_UNAME
+		])
+	except Exception:
+		cnf.env.BJAM = None
+	if not cnf.env.BJAM_CONFIG:
+		cnf.env.BJAM_CONFIG = cnf.options.bjam_config
+	if not cnf.env.BJAM_TOOLSET:
+		cnf.env.BJAM_TOOLSET = cnf.options.bjam_toolset
+
+@feature('bjam')
+@after_method('process_rule')
+def process_bjam(self):
+	if not self.bld.env.BJAM:
+		self.create_task('bjam_creator')
+	self.create_task('bjam_build')
+	self.create_task('bjam_installer')
+	if getattr(self, 'always', False):
+		always_run(bjam_creator)
+		always_run(bjam_build)
+	always_run(bjam_installer)
+
+class bjam_creator(Task):
+	ext_out = 'bjam_exe'
+	vars=['BJAM_SRC', 'BJAM_UNAME']
+	def run(self):
+		env = self.env
+		gen = self.generator
+		bjam = gen.bld.root.find_dir(env.BJAM_SRC)
+		if not bjam:
+			Logs.error('Can not find bjam source')
+			return -1
+		bjam_exe_relpath = 'bin.' + env.BJAM_UNAME + '/bjam'
+		bjam_exe = bjam.find_resource(bjam_exe_relpath)
+		if bjam_exe:
+			env.BJAM = bjam_exe.srcpath()
+			return 0
+		bjam_cmd = ['./build.sh']
+		Logs.debug('runner: ' + bjam.srcpath() + '> ' + str(bjam_cmd))
+		result = self.exec_command(bjam_cmd, cwd=bjam.srcpath())
+		if not result == 0:
+			Logs.error('bjam failed')
+			return -1
+		bjam_exe = bjam.find_resource(bjam_exe_relpath)
+		if bjam_exe:
+			env.BJAM = bjam_exe.srcpath()
+			return 0
+		Logs.error('bjam failed')
+		return -1
+
+class bjam_build(Task):
+	ext_in = 'bjam_exe'
+	ext_out = 'install'
+	vars = ['BJAM_TOOLSET']
+	def run(self):
+		env = self.env
+		gen = self.generator
+		path = gen.path
+		bld = gen.bld
+		if hasattr(gen, 'root'):
+			build_root = path.find_node(gen.root)
+		else:
+			build_root = path
+		jam = bld.srcnode.find_resource(env.BJAM_CONFIG)
+		if jam:
+			Logs.debug('bjam: Using jam configuration from ' + jam.srcpath())
+			jam_rel = jam.relpath_gen(build_root)
+		else:
+			Logs.warn('No build configuration in build_config/user-config.jam. Using default')
+			jam_rel = None
+		bjam_exe = bld.srcnode.find_node(env.BJAM)
+		if not bjam_exe:
+			Logs.error('env.BJAM is not set')
+			return -1
+		bjam_exe_rel = bjam_exe.relpath_gen(build_root)
+		cmd = ([bjam_exe_rel] +
+			(['--user-config=' + jam_rel] if jam_rel else []) +
+			['--stagedir=' + path.get_bld().path_from(build_root)] +
+			['--debug-configuration'] +
+			['--with-' + lib for lib in self.generator.target] +
+			(['toolset=' + env.BJAM_TOOLSET] if env.BJAM_TOOLSET else []) +
+			['link=' + 'shared'] +
+			['variant=' + 'release']
+		)
+		Logs.debug('runner: ' + build_root.srcpath() + '> ' + str(cmd))
+		ret = self.exec_command(cmd, cwd=build_root.srcpath())
+		if ret != 0:
+			return ret
+		self.set_outputs(path.get_bld().ant_glob('lib/*') + path.get_bld().ant_glob('bin/*'))
+		return 0
+
+class bjam_installer(Task):
+	ext_in = 'install'
+	def run(self):
+		gen = self.generator
+		path = gen.path
+		for idir, pat in (('${LIBDIR}', 'lib/*'), ('${BINDIR}', 'bin/*')):
+			files = []
+			for n in path.get_bld().ant_glob(pat):
+				try:
+					t = readlink(n.srcpath())
+					gen.bld.symlink_as(sep.join([idir, n.name]), t, postpone=False)
+				except OSError:
+					files.append(n)
+			gen.bld.install_files(idir, files, postpone=False)
+		return 0
+
diff --git a/third_party/waf/waflib/extras/blender.py b/third_party/waf/waflib/extras/blender.py
new file mode 100644
index 0000000..e5efc28
--- /dev/null
+++ b/third_party/waf/waflib/extras/blender.py
@@ -0,0 +1,108 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Michal Proszek, 2014 (poxip)
+
+"""
+Detect the version of Blender, path
+and install the extension:
+
+	def options(opt):
+		opt.load('blender')
+	def configure(cnf):
+		cnf.load('blender')
+	def build(bld):
+		bld(name='io_mesh_raw',
+			feature='blender',
+			files=['file1.py', 'file2.py']
+		)
+If name variable is empty, files are installed in scripts/addons, otherwise scripts/addons/name
+Use ./waf configure --system to set the installation directory to system path
+"""
+import os
+import re
+from getpass import getuser
+
+from waflib import Utils
+from waflib.TaskGen import feature
+from waflib.Configure import conf
+
+def options(opt):
+	opt.add_option(
+		'-s', '--system',
+		dest='directory_system',
+		default=False,
+		action='store_true',
+		help='determines installation directory (default: user)'
+	)
+
+@conf
+def find_blender(ctx):
+	'''Return version number of blender, if not exist return None'''
+	blender = ctx.find_program('blender')
+	output = ctx.cmd_and_log(blender + ['--version'])
+	m = re.search(r'Blender\s*((\d+(\.|))*)', output)
+	if not m:
+		ctx.fatal('Could not retrieve blender version')
+
+	try:
+		blender_version = m.group(1)
+	except IndexError:
+		ctx.fatal('Could not retrieve blender version')
+
+	ctx.env['BLENDER_VERSION'] = blender_version
+	return blender
+
+@conf
+def configure_paths(ctx):
+	"""Setup blender paths"""
+	# Get the username
+	user = getuser()
+	_platform = Utils.unversioned_sys_platform()
+	config_path = {'user': '', 'system': ''}
+	if _platform.startswith('linux'):
+		config_path['user'] = '/home/%s/.config/blender/' % user
+		config_path['system'] = '/usr/share/blender/'
+	elif _platform == 'darwin':
+		# MAC OS X
+		config_path['user'] = \
+			'/Users/%s/Library/Application Support/Blender/' % user
+		config_path['system'] = '/Library/Application Support/Blender/'
+	elif Utils.is_win32:
+		# Windows
+		appdata_path = ctx.getenv('APPDATA').replace('\\', '/')
+		homedrive = ctx.getenv('HOMEDRIVE').replace('\\', '/')
+
+		config_path['user'] = '%s/Blender Foundation/Blender/' % appdata_path
+		config_path['system'] = \
+			'%sAll Users/AppData/Roaming/Blender Foundation/Blender/' % homedrive
+	else:
+		ctx.fatal(
+			'Unsupported platform. '
+			'Available platforms: Linux, OSX, MS-Windows.'
+		)
+
+	blender_version = ctx.env['BLENDER_VERSION']
+
+	config_path['user'] += blender_version + '/'
+	config_path['system'] += blender_version + '/'
+
+	ctx.env['BLENDER_CONFIG_DIR'] = os.path.abspath(config_path['user'])
+	if ctx.options.directory_system:
+		ctx.env['BLENDER_CONFIG_DIR'] = config_path['system']
+
+	ctx.env['BLENDER_ADDONS_DIR'] = os.path.join(
+		ctx.env['BLENDER_CONFIG_DIR'], 'scripts/addons'
+	)
+	Utils.check_dir(ctx.env['BLENDER_ADDONS_DIR'])
+
+def configure(ctx):
+	ctx.find_blender()
+	ctx.configure_paths()
+
+@feature('blender_list')
+def blender(self):
+	# Two ways to install a blender extension: as a module or just .py files
+	dest_dir = os.path.join(self.env.BLENDER_ADDONS_DIR, self.get_name())
+	Utils.check_dir(dest_dir)
+	self.add_install_files(install_to=dest_dir, install_from=getattr(self, 'files', '.'))
+
diff --git a/third_party/waf/waflib/extras/boo.py b/third_party/waf/waflib/extras/boo.py
new file mode 100644
index 0000000..06623d4
--- /dev/null
+++ b/third_party/waf/waflib/extras/boo.py
@@ -0,0 +1,81 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Yannick LM 2011
+
+"""
+Support for the boo programming language, for example::
+
+	bld(features = "boo",       # necessary feature
+		source   = "src.boo",   # list of boo files
+		gen      = "world.dll", # target
+		type     = "library",   # library/exe ("-target:xyz" flag)
+		name     = "world"      # necessary if the target is referenced by 'use'
+	)
+"""
+
+from waflib import Task
+from waflib.Configure import conf
+from waflib.TaskGen import feature, after_method, before_method, extension
+
+@extension('.boo')
+def boo_hook(self, node):
+	# Nothing here yet ...
+	# TODO filter the non-boo source files in 'apply_booc' and remove this method
+	pass
+
+@feature('boo')
+@before_method('process_source')
+def apply_booc(self):
+	"""Create a booc task """
+	src_nodes = self.to_nodes(self.source)
+	out_node = self.path.find_or_declare(self.gen)
+
+	self.boo_task = self.create_task('booc', src_nodes, [out_node])
+
+	# Set variables used by the 'booc' task
+	self.boo_task.env.OUT = '-o:%s' % out_node.abspath()
+
+	# type is "exe" by default
+	type = getattr(self, "type", "exe")
+	self.boo_task.env.BOO_TARGET_TYPE = "-target:%s" % type
+
+@feature('boo')
+@after_method('apply_boo')
+def use_boo(self):
+	""""
+	boo applications honor the **use** keyword::
+	"""
+	dep_names = self.to_list(getattr(self, 'use', []))
+	for dep_name in dep_names:
+		dep_task_gen = self.bld.get_tgen_by_name(dep_name)
+		if not dep_task_gen:
+			continue
+		dep_task_gen.post()
+		dep_task = getattr(dep_task_gen, 'boo_task', None)
+		if not dep_task:
+			# Try a cs task:
+			dep_task = getattr(dep_task_gen, 'cs_task', None)
+			if not dep_task:
+				# Try a link task:
+				dep_task = getattr(dep_task, 'link_task', None)
+				if not dep_task:
+					# Abort ...
+					continue
+		self.boo_task.set_run_after(dep_task) # order
+		self.boo_task.dep_nodes.extend(dep_task.outputs) # dependency
+		self.boo_task.env.append_value('BOO_FLAGS', '-reference:%s' % dep_task.outputs[0].abspath())
+
+class booc(Task.Task):
+	"""Compiles .boo files """
+	color   = 'YELLOW'
+	run_str = '${BOOC} ${BOO_FLAGS} ${BOO_TARGET_TYPE} ${OUT} ${SRC}'
+
+@conf
+def check_booc(self):
+	self.find_program('booc', 'BOOC')
+	self.env.BOO_FLAGS = ['-nologo']
+
+def configure(self):
+	"""Check that booc is available """
+	self.check_booc()
+
diff --git a/third_party/waf/waflib/extras/boost.py b/third_party/waf/waflib/extras/boost.py
new file mode 100644
index 0000000..93b312a
--- /dev/null
+++ b/third_party/waf/waflib/extras/boost.py
@@ -0,0 +1,526 @@
+#!/usr/bin/env python
+# encoding: utf-8
+#
+# partially based on boost.py written by Gernot Vormayr
+# written by Ruediger Sonderfeld <ruediger@c-plusplus.de>, 2008
+# modified by Bjoern Michaelsen, 2008
+# modified by Luca Fossati, 2008
+# rewritten for waf 1.5.1, Thomas Nagy, 2008
+# rewritten for waf 1.6.2, Sylvain Rouquette, 2011
+
+'''
+
+This is an extra tool, not bundled with the default waf binary.
+To add the boost tool to the waf file:
+$ ./waf-light --tools=compat15,boost
+	or, if you have waf >= 1.6.2
+$ ./waf update --files=boost
+
+When using this tool, the wscript will look like:
+
+	def options(opt):
+		opt.load('compiler_cxx boost')
+
+	def configure(conf):
+		conf.load('compiler_cxx boost')
+		conf.check_boost(lib='system filesystem')
+
+	def build(bld):
+		bld(source='main.cpp', target='app', use='BOOST')
+
+Options are generated, in order to specify the location of boost includes/libraries.
+The `check_boost` configuration function allows to specify the used boost libraries.
+It can also provide default arguments to the --boost-mt command-line arguments.
+Everything will be packaged together in a BOOST component that you can use.
+
+When using MSVC, a lot of compilation flags need to match your BOOST build configuration:
+ - you may have to add /EHsc to your CXXFLAGS or define boost::throw_exception if BOOST_NO_EXCEPTIONS is defined.
+   Errors: C4530
+ - boost libraries will try to be smart and use the (pretty but often not useful) auto-linking feature of MSVC
+   So before calling `conf.check_boost` you might want to disabling by adding
+		conf.env.DEFINES_BOOST += ['BOOST_ALL_NO_LIB']
+   Errors:
+ - boost might also be compiled with /MT, which links the runtime statically.
+   If you have problems with redefined symbols,
+		self.env['DEFINES_%s' % var] += ['BOOST_ALL_NO_LIB']
+		self.env['CXXFLAGS_%s' % var] += ['/MD', '/EHsc']
+Passing `--boost-linkage_autodetect` might help ensuring having a correct linkage in some basic cases.
+
+'''
+
+import sys
+import re
+from waflib import Utils, Logs, Errors
+from waflib.Configure import conf
+from waflib.TaskGen import feature, after_method
+
+BOOST_LIBS = ['/usr/lib', '/usr/local/lib', '/opt/local/lib', '/sw/lib', '/lib']
+BOOST_INCLUDES = ['/usr/include', '/usr/local/include', '/opt/local/include', '/sw/include']
+BOOST_VERSION_FILE = 'boost/version.hpp'
+BOOST_VERSION_CODE = '''
+#include <iostream>
+#include <boost/version.hpp>
+int main() { std::cout << BOOST_LIB_VERSION << ":" << BOOST_VERSION << std::endl; }
+'''
+
+BOOST_ERROR_CODE = '''
+#include <boost/system/error_code.hpp>
+int main() { boost::system::error_code c; }
+'''
+
+PTHREAD_CODE = '''
+#include <pthread.h>
+static void* f(void*) { return 0; }
+int main() {
+	pthread_t th;
+	pthread_attr_t attr;
+	pthread_attr_init(&attr);
+	pthread_create(&th, &attr, &f, 0);
+	pthread_join(th, 0);
+	pthread_cleanup_push(0, 0);
+	pthread_cleanup_pop(0);
+	pthread_attr_destroy(&attr);
+}
+'''
+
+BOOST_THREAD_CODE = '''
+#include <boost/thread.hpp>
+int main() { boost::thread t; }
+'''
+
+BOOST_LOG_CODE = '''
+#include <boost/log/trivial.hpp>
+#include <boost/log/utility/setup/console.hpp>
+#include <boost/log/utility/setup/common_attributes.hpp>
+int main() {
+	using namespace boost::log;
+	add_common_attributes();
+	add_console_log(std::clog, keywords::format = "%Message%");
+	BOOST_LOG_TRIVIAL(debug) << "log is working" << std::endl;
+}
+'''
+
+# toolsets from {boost_dir}/tools/build/v2/tools/common.jam
+PLATFORM = Utils.unversioned_sys_platform()
+detect_intel = lambda env: (PLATFORM == 'win32') and 'iw' or 'il'
+detect_clang = lambda env: (PLATFORM == 'darwin') and 'clang-darwin' or 'clang'
+detect_mingw = lambda env: (re.search('MinGW', env.CXX[0])) and 'mgw' or 'gcc'
+BOOST_TOOLSETS = {
+	'borland':  'bcb',
+	'clang':	detect_clang,
+	'como':	 'como',
+	'cw':	   'cw',
+	'darwin':   'xgcc',
+	'edg':	  'edg',
+	'g++':	  detect_mingw,
+	'gcc':	  detect_mingw,
+	'icpc':	 detect_intel,
+	'intel':	detect_intel,
+	'kcc':	  'kcc',
+	'kylix':	'bck',
+	'mipspro':  'mp',
+	'mingw':	'mgw',
+	'msvc':	 'vc',
+	'qcc':	  'qcc',
+	'sun':	  'sw',
+	'sunc++':   'sw',
+	'tru64cxx': 'tru',
+	'vacpp':	'xlc'
+}
+
+
+def options(opt):
+	opt = opt.add_option_group('Boost Options')
+	opt.add_option('--boost-includes', type='string',
+				   default='', dest='boost_includes',
+				   help='''path to the directory where the boost includes are,
+				   e.g., /path/to/boost_1_55_0/stage/include''')
+	opt.add_option('--boost-libs', type='string',
+				   default='', dest='boost_libs',
+				   help='''path to the directory where the boost libs are,
+				   e.g., path/to/boost_1_55_0/stage/lib''')
+	opt.add_option('--boost-mt', action='store_true',
+				   default=False, dest='boost_mt',
+				   help='select multi-threaded libraries')
+	opt.add_option('--boost-abi', type='string', default='', dest='boost_abi',
+				   help='''select libraries with tags (gd for debug, static is automatically added),
+				   see doc Boost, Getting Started, chapter 6.1''')
+	opt.add_option('--boost-linkage_autodetect', action="store_true", dest='boost_linkage_autodetect',
+				   help="auto-detect boost linkage options (don't get used to it / might break other stuff)")
+	opt.add_option('--boost-toolset', type='string',
+				   default='', dest='boost_toolset',
+				   help='force a toolset e.g. msvc, vc90, \
+						gcc, mingw, mgw45 (default: auto)')
+	py_version = '%d%d' % (sys.version_info[0], sys.version_info[1])
+	opt.add_option('--boost-python', type='string',
+				   default=py_version, dest='boost_python',
+				   help='select the lib python with this version \
+						(default: %s)' % py_version)
+
+
+@conf
+def __boost_get_version_file(self, d):
+	if not d:
+		return None
+	dnode = self.root.find_dir(d)
+	if dnode:
+		return dnode.find_node(BOOST_VERSION_FILE)
+	return None
+
+@conf
+def boost_get_version(self, d):
+	"""silently retrieve the boost version number"""
+	node = self.__boost_get_version_file(d)
+	if node:
+		try:
+			txt = node.read()
+		except EnvironmentError:
+			Logs.error("Could not read the file %r", node.abspath())
+		else:
+			re_but1 = re.compile('^#define\\s+BOOST_LIB_VERSION\\s+"(.+)"', re.M)
+			m1 = re_but1.search(txt)
+			re_but2 = re.compile('^#define\\s+BOOST_VERSION\\s+(\\d+)', re.M)
+			m2 = re_but2.search(txt)
+			if m1 and m2:
+				return (m1.group(1), m2.group(1))
+	return self.check_cxx(fragment=BOOST_VERSION_CODE, includes=[d], execute=True, define_ret=True).split(":")
+
+@conf
+def boost_get_includes(self, *k, **kw):
+	includes = k and k[0] or kw.get('includes')
+	if includes and self.__boost_get_version_file(includes):
+		return includes
+	for d in self.environ.get('INCLUDE', '').split(';') + BOOST_INCLUDES:
+		if self.__boost_get_version_file(d):
+			return d
+	if includes:
+		self.end_msg('headers not found in %s' % includes)
+		self.fatal('The configuration failed')
+	else:
+		self.end_msg('headers not found, please provide a --boost-includes argument (see help)')
+		self.fatal('The configuration failed')
+
+
+@conf
+def boost_get_toolset(self, cc):
+	toolset = cc
+	if not cc:
+		build_platform = Utils.unversioned_sys_platform()
+		if build_platform in BOOST_TOOLSETS:
+			cc = build_platform
+		else:
+			cc = self.env.CXX_NAME
+	if cc in BOOST_TOOLSETS:
+		toolset = BOOST_TOOLSETS[cc]
+	return isinstance(toolset, str) and toolset or toolset(self.env)
+
+
+@conf
+def __boost_get_libs_path(self, *k, **kw):
+	''' return the lib path and all the files in it '''
+	if 'files' in kw:
+		return self.root.find_dir('.'), Utils.to_list(kw['files'])
+	libs = k and k[0] or kw.get('libs')
+	if libs:
+		path = self.root.find_dir(libs)
+		files = path.ant_glob('*boost_*')
+	if not libs or not files:
+		for d in self.environ.get('LIB', '').split(';') + BOOST_LIBS:
+			if not d:
+				continue
+			path = self.root.find_dir(d)
+			if path:
+				files = path.ant_glob('*boost_*')
+				if files:
+					break
+			path = self.root.find_dir(d + '64')
+			if path:
+				files = path.ant_glob('*boost_*')
+				if files:
+					break
+	if not path:
+		if libs:
+			self.end_msg('libs not found in %s' % libs)
+			self.fatal('The configuration failed')
+		else:
+			self.end_msg('libs not found, please provide a --boost-libs argument (see help)')
+			self.fatal('The configuration failed')
+
+	self.to_log('Found the boost path in %r with the libraries:' % path)
+	for x in files:
+		self.to_log('    %r' % x)
+	return path, files
+
+@conf
+def boost_get_libs(self, *k, **kw):
+	'''
+	return the lib path and the required libs
+	according to the parameters
+	'''
+	path, files = self.__boost_get_libs_path(**kw)
+	files = sorted(files, key=lambda f: (len(f.name), f.name), reverse=True)
+	toolset = self.boost_get_toolset(kw.get('toolset', ''))
+	toolset_pat = '(-%s[0-9]{0,3})' % toolset
+	version = '-%s' % self.env.BOOST_VERSION
+
+	def find_lib(re_lib, files):
+		for file in files:
+			if re_lib.search(file.name):
+				self.to_log('Found boost lib %s' % file)
+				return file
+		return None
+
+	# extensions from Tools.ccroot.lib_patterns
+	wo_ext = re.compile(r"\.(a|so|lib|dll|dylib)(\.[0-9\.]+)?$")
+	def format_lib_name(name):
+		if name.startswith('lib') and self.env.CC_NAME != 'msvc':
+			name = name[3:]
+		return wo_ext.sub("", name)
+
+	def match_libs(lib_names, is_static):
+		libs = []
+		lib_names = Utils.to_list(lib_names)
+		if not lib_names:
+			return libs
+		t = []
+		if kw.get('mt', False):
+			t.append('-mt')
+		if kw.get('abi'):
+			t.append('%s%s' % (is_static and '-s' or '-', kw['abi']))
+		elif is_static:
+			t.append('-s')
+		tags_pat = t and ''.join(t) or ''
+		ext = is_static and self.env.cxxstlib_PATTERN or self.env.cxxshlib_PATTERN
+		ext = ext.partition('%s')[2] # remove '%s' or 'lib%s' from PATTERN
+
+		for lib in lib_names:
+			if lib == 'python':
+				# for instance, with python='27',
+				# accepts '-py27', '-py2', '27', '-2.7' and '2'
+				# but will reject '-py3', '-py26', '26' and '3'
+				tags = '({0})?((-py{2})|(-py{1}(?=[^0-9]))|({2})|(-{1}.{3})|({1}(?=[^0-9]))|(?=[^0-9])(?!-py))'.format(tags_pat, kw['python'][0], kw['python'], kw['python'][1])
+			else:
+				tags = tags_pat
+			# Trying libraries, from most strict match to least one
+			for pattern in ['boost_%s%s%s%s%s$' % (lib, toolset_pat, tags, version, ext),
+							'boost_%s%s%s%s$' % (lib, tags, version, ext),
+							# Give up trying to find the right version
+							'boost_%s%s%s%s$' % (lib, toolset_pat, tags, ext),
+							'boost_%s%s%s$' % (lib, tags, ext),
+							'boost_%s%s$' % (lib, ext),
+							'boost_%s' % lib]:
+				self.to_log('Trying pattern %s' % pattern)
+				file = find_lib(re.compile(pattern), files)
+				if file:
+					libs.append(format_lib_name(file.name))
+					break
+			else:
+				self.end_msg('lib %s not found in %s' % (lib, path.abspath()))
+				self.fatal('The configuration failed')
+		return libs
+
+	return  path.abspath(), match_libs(kw.get('lib'), False), match_libs(kw.get('stlib'), True)
+
+@conf
+def _check_pthread_flag(self, *k, **kw):
+	'''
+	Computes which flags should be added to CXXFLAGS and LINKFLAGS to compile in multi-threading mode
+
+	Yes, we *need* to put the -pthread thing in CPPFLAGS because with GCC3,
+	boost/thread.hpp will trigger a #error if -pthread isn't used:
+	  boost/config/requires_threads.hpp:47:5: #error "Compiler threading support
+	  is not turned on. Please set the correct command line options for
+	  threading: -pthread (Linux), -pthreads (Solaris) or -mthreads (Mingw32)"
+
+	Based on _BOOST_PTHREAD_FLAG(): https://github.com/tsuna/boost.m4/blob/master/build-aux/boost.m4
+    '''
+
+	var = kw.get('uselib_store', 'BOOST')
+
+	self.start_msg('Checking the flags needed to use pthreads')
+
+	# The ordering *is* (sometimes) important.  Some notes on the
+	# individual items follow:
+	# (none): in case threads are in libc; should be tried before -Kthread and
+	#       other compiler flags to prevent continual compiler warnings
+	# -lpthreads: AIX (must check this before -lpthread)
+	# -Kthread: Sequent (threads in libc, but -Kthread needed for pthread.h)
+	# -kthread: FreeBSD kernel threads (preferred to -pthread since SMP-able)
+	# -llthread: LinuxThreads port on FreeBSD (also preferred to -pthread)
+	# -pthread: GNU Linux/GCC (kernel threads), BSD/GCC (userland threads)
+	# -pthreads: Solaris/GCC
+	# -mthreads: MinGW32/GCC, Lynx/GCC
+	# -mt: Sun Workshop C (may only link SunOS threads [-lthread], but it
+	#      doesn't hurt to check since this sometimes defines pthreads too;
+	#      also defines -D_REENTRANT)
+	#      ... -mt is also the pthreads flag for HP/aCC
+	# -lpthread: GNU Linux, etc.
+	# --thread-safe: KAI C++
+	if Utils.unversioned_sys_platform() == "sunos":
+		# On Solaris (at least, for some versions), libc contains stubbed
+		# (non-functional) versions of the pthreads routines, so link-based
+		# tests will erroneously succeed.  (We need to link with -pthreads/-mt/
+		# -lpthread.)  (The stubs are missing pthread_cleanup_push, or rather
+		# a function called by this macro, so we could check for that, but
+		# who knows whether they'll stub that too in a future libc.)  So,
+		# we'll just look for -pthreads and -lpthread first:
+		boost_pthread_flags = ["-pthreads", "-lpthread", "-mt", "-pthread"]
+	else:
+		boost_pthread_flags = ["", "-lpthreads", "-Kthread", "-kthread", "-llthread", "-pthread",
+							   "-pthreads", "-mthreads", "-lpthread", "--thread-safe", "-mt"]
+
+	for boost_pthread_flag in boost_pthread_flags:
+		try:
+			self.env.stash()
+			self.env.append_value('CXXFLAGS_%s' % var, boost_pthread_flag)
+			self.env.append_value('LINKFLAGS_%s' % var, boost_pthread_flag)
+			self.check_cxx(code=PTHREAD_CODE, msg=None, use=var, execute=False)
+
+			self.end_msg(boost_pthread_flag)
+			return
+		except self.errors.ConfigurationError:
+			self.env.revert()
+	self.end_msg('None')
+
+@conf
+def check_boost(self, *k, **kw):
+	"""
+	Initialize boost libraries to be used.
+
+	Keywords: you can pass the same parameters as with the command line (without "--boost-").
+	Note that the command line has the priority, and should preferably be used.
+	"""
+	if not self.env['CXX']:
+		self.fatal('load a c++ compiler first, conf.load("compiler_cxx")')
+
+	params = {
+		'lib': k and k[0] or kw.get('lib'),
+		'stlib': kw.get('stlib')
+	}
+	for key, value in self.options.__dict__.items():
+		if not key.startswith('boost_'):
+			continue
+		key = key[len('boost_'):]
+		params[key] = value and value or kw.get(key, '')
+
+	var = kw.get('uselib_store', 'BOOST')
+
+	self.find_program('dpkg-architecture', var='DPKG_ARCHITECTURE', mandatory=False)
+	if self.env.DPKG_ARCHITECTURE:
+		deb_host_multiarch = self.cmd_and_log([self.env.DPKG_ARCHITECTURE[0], '-qDEB_HOST_MULTIARCH'])
+		BOOST_LIBS.insert(0, '/usr/lib/%s' % deb_host_multiarch.strip())
+
+	self.start_msg('Checking boost includes')
+	self.env['INCLUDES_%s' % var] = inc = self.boost_get_includes(**params)
+	versions = self.boost_get_version(inc)
+	self.env.BOOST_VERSION = versions[0]
+	self.env.BOOST_VERSION_NUMBER = int(versions[1])
+	self.end_msg("%d.%d.%d" % (int(versions[1]) / 100000,
+							   int(versions[1]) / 100 % 1000,
+							   int(versions[1]) % 100))
+	if Logs.verbose:
+		Logs.pprint('CYAN', '	path : %s' % self.env['INCLUDES_%s' % var])
+
+	if not params['lib'] and not params['stlib']:
+		return
+	if 'static' in kw or 'static' in params:
+		Logs.warn('boost: static parameter is deprecated, use stlib instead.')
+	self.start_msg('Checking boost libs')
+	path, libs, stlibs = self.boost_get_libs(**params)
+	self.env['LIBPATH_%s' % var] = [path]
+	self.env['STLIBPATH_%s' % var] = [path]
+	self.env['LIB_%s' % var] = libs
+	self.env['STLIB_%s' % var] = stlibs
+	self.end_msg('ok')
+	if Logs.verbose:
+		Logs.pprint('CYAN', '	path : %s' % path)
+		Logs.pprint('CYAN', '	shared libs : %s' % libs)
+		Logs.pprint('CYAN', '	static libs : %s' % stlibs)
+
+	def has_shlib(lib):
+		return params['lib'] and lib in params['lib']
+	def has_stlib(lib):
+		return params['stlib'] and lib in params['stlib']
+	def has_lib(lib):
+		return has_shlib(lib) or has_stlib(lib)
+	if has_lib('thread'):
+		# not inside try_link to make check visible in the output
+		self._check_pthread_flag(k, kw)
+
+	def try_link():
+		if has_lib('system'):
+			self.check_cxx(fragment=BOOST_ERROR_CODE, use=var, execute=False)
+		if has_lib('thread'):
+			self.check_cxx(fragment=BOOST_THREAD_CODE, use=var, execute=False)
+		if has_lib('log'):
+			if not has_lib('thread'):
+				self.env['DEFINES_%s' % var] += ['BOOST_LOG_NO_THREADS']
+			if has_shlib('log'):
+				self.env['DEFINES_%s' % var] += ['BOOST_LOG_DYN_LINK']
+			self.check_cxx(fragment=BOOST_LOG_CODE, use=var, execute=False)
+
+	if params.get('linkage_autodetect', False):
+		self.start_msg("Attempting to detect boost linkage flags")
+		toolset = self.boost_get_toolset(kw.get('toolset', ''))
+		if toolset in ('vc',):
+			# disable auto-linking feature, causing error LNK1181
+			# because the code wants to be linked against
+			self.env['DEFINES_%s' % var] += ['BOOST_ALL_NO_LIB']
+
+			# if no dlls are present, we guess the .lib files are not stubs
+			has_dlls = False
+			for x in Utils.listdir(path):
+				if x.endswith(self.env.cxxshlib_PATTERN % ''):
+					has_dlls = True
+					break
+			if not has_dlls:
+				self.env['STLIBPATH_%s' % var] = [path]
+				self.env['STLIB_%s' % var] = libs
+				del self.env['LIB_%s' % var]
+				del self.env['LIBPATH_%s' % var]
+
+			# we attempt to play with some known-to-work CXXFLAGS combinations
+			for cxxflags in (['/MD', '/EHsc'], []):
+				self.env.stash()
+				self.env["CXXFLAGS_%s" % var] += cxxflags
+				try:
+					try_link()
+				except Errors.ConfigurationError as e:
+					self.env.revert()
+					exc = e
+				else:
+					self.end_msg("ok: winning cxxflags combination: %s" % (self.env["CXXFLAGS_%s" % var]))
+					exc = None
+					self.env.commit()
+					break
+
+			if exc is not None:
+				self.end_msg("Could not auto-detect boost linking flags combination, you may report it to boost.py author", ex=exc)
+				self.fatal('The configuration failed')
+		else:
+			self.end_msg("Boost linkage flags auto-detection not implemented (needed ?) for this toolchain")
+			self.fatal('The configuration failed')
+	else:
+		self.start_msg('Checking for boost linkage')
+		try:
+			try_link()
+		except Errors.ConfigurationError as e:
+			self.end_msg("Could not link against boost libraries using supplied options")
+			self.fatal('The configuration failed')
+		self.end_msg('ok')
+
+
+@feature('cxx')
+@after_method('apply_link')
+def install_boost(self):
+	if install_boost.done or not Utils.is_win32 or not self.bld.cmd.startswith('install'):
+		return
+	install_boost.done = True
+	inst_to = getattr(self, 'install_path', '${BINDIR}')
+	for lib in self.env.LIB_BOOST:
+		try:
+			file = self.bld.find_file(self.env.cxxshlib_PATTERN % lib, self.env.LIBPATH_BOOST)
+			self.add_install_files(install_to=inst_to, install_from=self.bld.root.find_node(file))
+		except:
+			continue
+install_boost.done = False
diff --git a/third_party/waf/waflib/extras/build_file_tracker.py b/third_party/waf/waflib/extras/build_file_tracker.py
new file mode 100644
index 0000000..c4f26fd
--- /dev/null
+++ b/third_party/waf/waflib/extras/build_file_tracker.py
@@ -0,0 +1,28 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2015
+
+"""
+Force files to depend on the timestamps of those located in the build directory. You may
+want to use this to force partial rebuilds, see playground/track_output_files/ for a working example.
+
+Note that there is a variety of ways to implement this, one may want use timestamps on source files too for example,
+or one may want to hash the files in the source directory only under certain conditions (md5_tstamp tool)
+or to hash the file in the build directory with its timestamp
+"""
+
+import os
+from waflib import Node, Utils
+
+def get_bld_sig(self):
+	if not self.is_bld() or self.ctx.bldnode is self.ctx.srcnode:
+		return Utils.h_file(self.abspath())
+
+	try:
+		# add the creation time to the signature
+		return self.sig + str(os.stat(self.abspath()).st_mtime)
+	except AttributeError:
+		return None
+
+Node.Node.get_bld_sig = get_bld_sig
+
diff --git a/third_party/waf/waflib/extras/build_logs.py b/third_party/waf/waflib/extras/build_logs.py
new file mode 100644
index 0000000..cdf8ed0
--- /dev/null
+++ b/third_party/waf/waflib/extras/build_logs.py
@@ -0,0 +1,110 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2013 (ita)
+
+"""
+A system for recording all outputs to a log file. Just add the following to your wscript file::
+
+  def init(ctx):
+    ctx.load('build_logs')
+"""
+
+import atexit, sys, time, os, shutil, threading
+from waflib import ansiterm, Logs, Context
+
+# adding the logs under the build/ directory will clash with the clean/ command
+try:
+	up = os.path.dirname(Context.g_module.__file__)
+except AttributeError:
+	up = '.'
+LOGFILE = os.path.join(up, 'logs', time.strftime('%Y_%m_%d_%H_%M.log'))
+
+wlock = threading.Lock()
+class log_to_file(object):
+	def __init__(self, stream, fileobj, filename):
+		self.stream = stream
+		self.encoding = self.stream.encoding
+		self.fileobj = fileobj
+		self.filename = filename
+		self.is_valid = True
+	def replace_colors(self, data):
+		for x in Logs.colors_lst.values():
+			if isinstance(x, str):
+				data = data.replace(x, '')
+		return data
+	def write(self, data):
+		try:
+			wlock.acquire()
+			self.stream.write(data)
+			self.stream.flush()
+			if self.is_valid:
+				self.fileobj.write(self.replace_colors(data))
+		finally:
+			wlock.release()
+	def fileno(self):
+		return self.stream.fileno()
+	def flush(self):
+		self.stream.flush()
+		if self.is_valid:
+			self.fileobj.flush()
+	def isatty(self):
+		return self.stream.isatty()
+
+def init(ctx):
+	global LOGFILE
+	filename = os.path.abspath(LOGFILE)
+	try:
+		os.makedirs(os.path.dirname(os.path.abspath(filename)))
+	except OSError:
+		pass
+
+	if hasattr(os, 'O_NOINHERIT'):
+		fd = os.open(LOGFILE, os.O_CREAT | os.O_TRUNC | os.O_WRONLY | os.O_NOINHERIT)
+		fileobj = os.fdopen(fd, 'w')
+	else:
+		fileobj = open(LOGFILE, 'w')
+	old_stderr = sys.stderr
+
+	# sys.stdout has already been replaced, so __stdout__ will be faster
+	#sys.stdout = log_to_file(sys.stdout, fileobj, filename)
+	#sys.stderr = log_to_file(sys.stderr, fileobj, filename)
+	def wrap(stream):
+		if stream.isatty():
+			return ansiterm.AnsiTerm(stream)
+		return stream
+	sys.stdout = log_to_file(wrap(sys.__stdout__), fileobj, filename)
+	sys.stderr = log_to_file(wrap(sys.__stderr__), fileobj, filename)
+
+	# now mess with the logging module...
+	for x in Logs.log.handlers:
+		try:
+			stream = x.stream
+		except AttributeError:
+			pass
+		else:
+			if id(stream) == id(old_stderr):
+				x.stream = sys.stderr
+
+def exit_cleanup():
+	try:
+		fileobj = sys.stdout.fileobj
+	except AttributeError:
+		pass
+	else:
+		sys.stdout.is_valid = False
+		sys.stderr.is_valid = False
+		fileobj.close()
+		filename = sys.stdout.filename
+
+		Logs.info('Output logged to %r', filename)
+
+		# then copy the log file to "latest.log" if possible
+		up = os.path.dirname(os.path.abspath(filename))
+		try:
+			shutil.copy(filename, os.path.join(up, 'latest.log'))
+		except OSError:
+			# this may fail on windows due to processes spawned
+			pass
+
+atexit.register(exit_cleanup)
+
diff --git a/third_party/waf/waflib/extras/buildcopy.py b/third_party/waf/waflib/extras/buildcopy.py
new file mode 100644
index 0000000..eaff7e6
--- /dev/null
+++ b/third_party/waf/waflib/extras/buildcopy.py
@@ -0,0 +1,85 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Calle Rosenquist, 2017 (xbreak)
+"""
+Create task that copies source files to the associated build node.
+This is useful to e.g. construct a complete Python package so it can be unit tested
+without installation.
+
+Source files to be copied can be specified either in `buildcopy_source` attribute, or
+`source` attribute. If both are specified `buildcopy_source` has priority.
+
+Examples::
+
+	def build(bld):
+		bld(name             = 'bar',
+			features         = 'py buildcopy',
+			source           = bld.path.ant_glob('src/bar/*.py'))
+
+		bld(name             = 'py baz',
+			features         = 'buildcopy',
+			buildcopy_source = bld.path.ant_glob('src/bar/*.py') + ['src/bar/resource.txt'])
+
+"""
+import os, shutil
+from waflib import Errors, Task, TaskGen, Utils, Node, Logs
+
+@TaskGen.before_method('process_source')
+@TaskGen.feature('buildcopy')
+def make_buildcopy(self):
+	"""
+	Creates the buildcopy task.
+	"""
+	def to_src_nodes(lst):
+		"""Find file nodes only in src, TaskGen.to_nodes will not work for this since it gives
+		preference to nodes in build.
+		"""
+		if isinstance(lst, Node.Node):
+			if not lst.is_src():
+				raise Errors.WafError('buildcopy: node %s is not in src'%lst)
+			if not os.path.isfile(lst.abspath()):
+				raise Errors.WafError('buildcopy: Cannot copy directory %s (unsupported action)'%lst)
+			return lst
+
+		if isinstance(lst, str):
+			lst = [x for x in Utils.split_path(lst) if x and x != '.']
+
+		node = self.bld.path.get_src().search_node(lst)
+		if node:
+			if not os.path.isfile(node.abspath()):
+				raise Errors.WafError('buildcopy: Cannot copy directory %s (unsupported action)'%node)
+			return node
+
+		node = self.bld.path.get_src().find_node(lst)
+		if node:
+			if not os.path.isfile(node.abspath()):
+				raise Errors.WafError('buildcopy: Cannot copy directory %s (unsupported action)'%node)
+			return node
+		raise Errors.WafError('buildcopy: File not found in src: %s'%os.path.join(*lst))
+
+	nodes = [ to_src_nodes(n) for n in getattr(self, 'buildcopy_source', getattr(self, 'source', [])) ]
+	if not nodes:
+		Logs.warn('buildcopy: No source files provided to buildcopy in %s (set `buildcopy_source` or `source`)',
+			self)
+		return
+	node_pairs = [(n, n.get_bld()) for n in nodes]
+	self.create_task('buildcopy', [n[0] for n in node_pairs], [n[1] for n in node_pairs], node_pairs=node_pairs)
+
+class buildcopy(Task.Task):
+	"""
+	Copy for each pair `n` in `node_pairs`: n[0] -> n[1].
+
+	Attribute `node_pairs` should contain a list of tuples describing source and target:
+
+		node_pairs = [(in, out), ...]
+
+	"""
+	color = 'PINK'
+
+	def keyword(self):
+		return 'Copying'
+
+	def run(self):
+		for f,t in self.node_pairs:
+			t.parent.mkdir()
+			shutil.copy2(f.abspath(), t.abspath())
diff --git a/third_party/waf/waflib/extras/c_bgxlc.py b/third_party/waf/waflib/extras/c_bgxlc.py
new file mode 100644
index 0000000..6e3eaf7
--- /dev/null
+++ b/third_party/waf/waflib/extras/c_bgxlc.py
@@ -0,0 +1,32 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# harald at klimachs.de
+
+"""
+IBM XL Compiler for Blue Gene
+"""
+
+from waflib.Tools import ccroot,ar
+from waflib.Configure import conf
+
+from waflib.Tools import xlc # method xlc_common_flags
+from waflib.Tools.compiler_c import c_compiler
+c_compiler['linux'].append('c_bgxlc')
+
+@conf
+def find_bgxlc(conf):
+	cc = conf.find_program(['bgxlc_r','bgxlc'], var='CC')
+	conf.get_xlc_version(cc)
+	conf.env.CC = cc
+	conf.env.CC_NAME = 'bgxlc'
+
+def configure(conf):
+	conf.find_bgxlc()
+	conf.find_ar()
+	conf.xlc_common_flags()
+	conf.env.LINKFLAGS_cshlib = ['-G','-Wl,-bexpfull']
+	conf.env.LINKFLAGS_cprogram = []
+	conf.cc_load_tools()
+	conf.cc_add_flags()
+	conf.link_add_flags()
+
diff --git a/third_party/waf/waflib/extras/c_dumbpreproc.py b/third_party/waf/waflib/extras/c_dumbpreproc.py
new file mode 100644
index 0000000..1fdd5c3
--- /dev/null
+++ b/third_party/waf/waflib/extras/c_dumbpreproc.py
@@ -0,0 +1,72 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2010 (ita)
+
+"""
+Dumb C/C++ preprocessor for finding dependencies
+
+It will look at all include files it can find after removing the comments, so the following
+will always add the dependency on both "a.h" and "b.h"::
+
+	#include "a.h"
+	#ifdef B
+		#include "b.h"
+	#endif
+	int main() {
+		return 0;
+	}
+
+To use::
+
+	def configure(conf):
+		conf.load('compiler_c')
+		conf.load('c_dumbpreproc')
+"""
+
+import re
+from waflib.Tools import c_preproc
+
+re_inc = re.compile(
+	'^[ \t]*(#|%:)[ \t]*(include)[ \t]*[<"](.*)[>"]\r*$',
+	re.IGNORECASE | re.MULTILINE)
+
+def lines_includes(node):
+	code = node.read()
+	if c_preproc.use_trigraphs:
+		for (a, b) in c_preproc.trig_def:
+			code = code.split(a).join(b)
+	code = c_preproc.re_nl.sub('', code)
+	code = c_preproc.re_cpp.sub(c_preproc.repl, code)
+	return [(m.group(2), m.group(3)) for m in re.finditer(re_inc, code)]
+
+parser = c_preproc.c_parser
+class dumb_parser(parser):
+	def addlines(self, node):
+		if node in self.nodes[:-1]:
+			return
+		self.currentnode_stack.append(node.parent)
+
+		# Avoid reading the same files again
+		try:
+			lines = self.parse_cache[node]
+		except KeyError:
+			lines = self.parse_cache[node] = lines_includes(node)
+
+		self.lines = lines + [(c_preproc.POPFILE, '')] +  self.lines
+
+	def start(self, node, env):
+		try:
+			self.parse_cache = node.ctx.parse_cache
+		except AttributeError:
+			self.parse_cache = node.ctx.parse_cache = {}
+
+		self.addlines(node)
+		while self.lines:
+			(x, y) = self.lines.pop(0)
+			if x == c_preproc.POPFILE:
+				self.currentnode_stack.pop()
+				continue
+			self.tryfind(y, env=env)
+
+c_preproc.c_parser = dumb_parser
+
diff --git a/third_party/waf/waflib/extras/c_emscripten.py b/third_party/waf/waflib/extras/c_emscripten.py
new file mode 100644
index 0000000..e1ac494
--- /dev/null
+++ b/third_party/waf/waflib/extras/c_emscripten.py
@@ -0,0 +1,87 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 vi:ts=4:noexpandtab
+
+import subprocess, shlex, sys
+
+from waflib.Tools import ccroot, gcc, gxx
+from waflib.Configure import conf
+from waflib.TaskGen import after_method, feature
+
+from waflib.Tools.compiler_c import c_compiler
+from waflib.Tools.compiler_cxx import cxx_compiler
+
+for supported_os in ('linux', 'darwin', 'gnu', 'aix'):
+	c_compiler[supported_os].append('c_emscripten')
+	cxx_compiler[supported_os].append('c_emscripten')
+
+
+@conf
+def get_emscripten_version(conf, cc):
+	"""
+	Emscripten doesn't support processing '-' like clang/gcc
+	"""
+
+	dummy = conf.cachedir.parent.make_node("waf-emscripten.c")
+	dummy.write("")
+	cmd = cc + ['-dM', '-E', '-x', 'c', dummy.abspath()]
+	env = conf.env.env or None
+	try:
+		p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env)
+		out = p.communicate()[0]
+	except Exception as e:
+		conf.fatal('Could not determine emscripten version %r: %s' % (cmd, e))
+
+	if not isinstance(out, str):
+		out = out.decode(sys.stdout.encoding or 'latin-1')
+
+	k = {}
+	out = out.splitlines()
+	for line in out:
+		lst = shlex.split(line)
+		if len(lst)>2:
+			key = lst[1]
+			val = lst[2]
+			k[key] = val
+
+	if not ('__clang__' in k and 'EMSCRIPTEN' in k):
+		conf.fatal('Could not determine the emscripten compiler version.')
+
+	conf.env.DEST_OS = 'generic'
+	conf.env.DEST_BINFMT = 'elf'
+	conf.env.DEST_CPU = 'asm-js'
+	conf.env.CC_VERSION = (k['__clang_major__'], k['__clang_minor__'], k['__clang_patchlevel__'])
+	return k
+
+@conf
+def find_emscripten(conf):
+	cc = conf.find_program(['emcc'], var='CC')
+	conf.get_emscripten_version(cc)
+	conf.env.CC = cc
+	conf.env.CC_NAME = 'emscripten'
+	cxx = conf.find_program(['em++'], var='CXX')
+	conf.env.CXX = cxx
+	conf.env.CXX_NAME = 'emscripten'
+	conf.find_program(['emar'], var='AR')
+
+def configure(conf):
+	conf.find_emscripten()
+	conf.find_ar()
+	conf.gcc_common_flags()
+	conf.gxx_common_flags()
+	conf.cc_load_tools()
+	conf.cc_add_flags()
+	conf.cxx_load_tools()
+	conf.cxx_add_flags()
+	conf.link_add_flags()
+	conf.env.ARFLAGS = ['rcs']
+	conf.env.cshlib_PATTERN = '%s.js'
+	conf.env.cxxshlib_PATTERN = '%s.js'
+	conf.env.cstlib_PATTERN = '%s.a'
+	conf.env.cxxstlib_PATTERN = '%s.a'
+	conf.env.cprogram_PATTERN = '%s.html'
+	conf.env.cxxprogram_PATTERN = '%s.html'
+	conf.env.CXX_TGT_F           = ['-c', '-o', '']
+	conf.env.CC_TGT_F            = ['-c', '-o', '']
+	conf.env.CXXLNK_TGT_F        = ['-o', '']
+	conf.env.CCLNK_TGT_F         = ['-o', '']
+	conf.env.append_value('LINKFLAGS',['-Wl,--enable-auto-import'])
diff --git a/third_party/waf/waflib/extras/c_nec.py b/third_party/waf/waflib/extras/c_nec.py
new file mode 100644
index 0000000..96bfae4
--- /dev/null
+++ b/third_party/waf/waflib/extras/c_nec.py
@@ -0,0 +1,74 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# harald at klimachs.de
+
+"""
+NEC SX Compiler for SX vector systems
+"""
+
+import re
+from waflib import Utils
+from waflib.Tools import ccroot,ar
+from waflib.Configure import conf
+
+from waflib.Tools import xlc # method xlc_common_flags
+from waflib.Tools.compiler_c import c_compiler
+c_compiler['linux'].append('c_nec')
+
+@conf
+def find_sxc(conf):
+	cc = conf.find_program(['sxcc'], var='CC')
+	conf.get_sxc_version(cc)
+	conf.env.CC = cc
+	conf.env.CC_NAME = 'sxcc'
+
+@conf
+def get_sxc_version(conf, fc):
+	version_re = re.compile(r"C\+\+/SX\s*Version\s*(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
+	cmd = fc + ['-V']
+	p = Utils.subprocess.Popen(cmd, stdin=False, stdout=Utils.subprocess.PIPE, stderr=Utils.subprocess.PIPE, env=None)
+	out, err = p.communicate()
+
+	if out:
+		match = version_re(out)
+	else:
+		match = version_re(err)
+	if not match:
+		conf.fatal('Could not determine the NEC C compiler version.')
+	k = match.groupdict()
+	conf.env['C_VERSION'] = (k['major'], k['minor'])
+
+@conf
+def sxc_common_flags(conf):
+	v=conf.env
+	v['CC_SRC_F']=[]
+	v['CC_TGT_F']=['-c','-o']
+	if not v['LINK_CC']:
+		v['LINK_CC']=v['CC']
+	v['CCLNK_SRC_F']=[]
+	v['CCLNK_TGT_F']=['-o']
+	v['CPPPATH_ST']='-I%s'
+	v['DEFINES_ST']='-D%s'
+	v['LIB_ST']='-l%s'
+	v['LIBPATH_ST']='-L%s'
+	v['STLIB_ST']='-l%s'
+	v['STLIBPATH_ST']='-L%s'
+	v['RPATH_ST']=''
+	v['SONAME_ST']=[]
+	v['SHLIB_MARKER']=[]
+	v['STLIB_MARKER']=[]
+	v['LINKFLAGS_cprogram']=['']
+	v['cprogram_PATTERN']='%s'
+	v['CFLAGS_cshlib']=['-fPIC']
+	v['LINKFLAGS_cshlib']=['']
+	v['cshlib_PATTERN']='lib%s.so'
+	v['LINKFLAGS_cstlib']=[]
+	v['cstlib_PATTERN']='lib%s.a'
+
+def configure(conf):
+	conf.find_sxc()
+	conf.find_program('sxar',VAR='AR')
+	conf.sxc_common_flags()
+	conf.cc_load_tools()
+	conf.cc_add_flags()
+	conf.link_add_flags()
diff --git a/third_party/waf/waflib/extras/cabal.py b/third_party/waf/waflib/extras/cabal.py
new file mode 100644
index 0000000..e10a0d1
--- /dev/null
+++ b/third_party/waf/waflib/extras/cabal.py
@@ -0,0 +1,152 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Anton Feldmann, 2012
+# "Base for cabal"
+
+from waflib import Task, Utils
+from waflib.TaskGen import extension
+from waflib.Utils import threading
+from shutil import rmtree
+
+lock = threading.Lock()
+registering = False
+
+def configure(self):
+    self.find_program('cabal', var='CABAL')
+    self.find_program('ghc-pkg', var='GHCPKG')
+    pkgconfd = self.bldnode.abspath() + '/package.conf.d'
+    self.env.PREFIX = self.bldnode.abspath() + '/dist'
+    self.env.PKGCONFD = pkgconfd
+    if self.root.find_node(pkgconfd + '/package.cache'):
+        self.msg('Using existing package database', pkgconfd, color='CYAN')
+    else:
+        pkgdir = self.root.find_dir(pkgconfd)
+        if pkgdir:
+            self.msg('Deleting corrupt package database', pkgdir.abspath(), color ='RED')
+            rmtree(pkgdir.abspath())
+            pkgdir = None
+
+        self.cmd_and_log(self.env.GHCPKG + ['init', pkgconfd])
+        self.msg('Created package database', pkgconfd, color = 'YELLOW' if pkgdir else 'GREEN')
+
+@extension('.cabal')
+def process_cabal(self, node):
+    out_dir_node = self.bld.root.find_dir(self.bld.out_dir)
+    package_node = node.change_ext('.package')
+    package_node = out_dir_node.find_or_declare(package_node.name)
+    build_node   = node.parent.get_bld()
+    build_path   = build_node.abspath()
+    config_node  = build_node.find_or_declare('setup-config')
+    inplace_node = build_node.find_or_declare('package.conf.inplace')
+
+    config_task = self.create_task('cabal_configure', node)
+    config_task.cwd = node.parent.abspath()
+    config_task.depends_on = getattr(self, 'depends_on', '')
+    config_task.build_path = build_path
+    config_task.set_outputs(config_node)
+
+    build_task = self.create_task('cabal_build', config_node)
+    build_task.cwd = node.parent.abspath()
+    build_task.build_path = build_path
+    build_task.set_outputs(inplace_node)
+
+    copy_task = self.create_task('cabal_copy', inplace_node)
+    copy_task.cwd = node.parent.abspath()
+    copy_task.depends_on = getattr(self, 'depends_on', '')
+    copy_task.build_path = build_path
+
+    last_task = copy_task
+    task_list = [config_task, build_task, copy_task]
+
+    if (getattr(self, 'register', False)):
+        register_task = self.create_task('cabal_register', inplace_node)
+        register_task.cwd = node.parent.abspath()
+        register_task.set_run_after(copy_task)
+        register_task.build_path = build_path
+
+        pkgreg_task = self.create_task('ghcpkg_register', inplace_node)
+        pkgreg_task.cwd = node.parent.abspath()
+        pkgreg_task.set_run_after(register_task)
+        pkgreg_task.build_path = build_path
+
+        last_task = pkgreg_task
+        task_list += [register_task, pkgreg_task]
+
+    touch_task = self.create_task('cabal_touch', inplace_node)
+    touch_task.set_run_after(last_task)
+    touch_task.set_outputs(package_node)
+    touch_task.build_path = build_path
+
+    task_list += [touch_task]
+
+    return task_list
+
+def get_all_src_deps(node):
+    hs_deps = node.ant_glob('**/*.hs')
+    hsc_deps = node.ant_glob('**/*.hsc')
+    lhs_deps = node.ant_glob('**/*.lhs')
+    c_deps = node.ant_glob('**/*.c')
+    cpp_deps = node.ant_glob('**/*.cpp')
+    proto_deps = node.ant_glob('**/*.proto')
+    return sum([hs_deps, hsc_deps, lhs_deps, c_deps, cpp_deps, proto_deps], [])
+
+class Cabal(Task.Task):
+    def scan(self):
+        return (get_all_src_deps(self.generator.path), ())
+
+class cabal_configure(Cabal):
+    run_str = '${CABAL} configure -v0 --prefix=${PREFIX} --global --user --package-db=${PKGCONFD} --builddir=${tsk.build_path}'
+    shell = True
+
+    def scan(self):
+        out_node = self.generator.bld.root.find_dir(self.generator.bld.out_dir)
+        deps = [out_node.find_or_declare(dep).change_ext('.package') for dep in Utils.to_list(self.depends_on)]
+        return (deps, ())
+
+class cabal_build(Cabal):
+    run_str = '${CABAL} build -v1 --builddir=${tsk.build_path}/'
+    shell = True
+
+class cabal_copy(Cabal):
+    run_str = '${CABAL} copy -v0 --builddir=${tsk.build_path}'
+    shell = True
+
+class cabal_register(Cabal):
+    run_str = '${CABAL} register -v0 --gen-pkg-config=${tsk.build_path}/pkg.config --builddir=${tsk.build_path}'
+    shell = True
+
+class ghcpkg_register(Cabal):
+    run_str = '${GHCPKG} update -v0 --global --user --package-conf=${PKGCONFD} ${tsk.build_path}/pkg.config'
+    shell = True
+
+    def runnable_status(self):
+        global lock, registering
+
+        val = False 
+        lock.acquire()
+        val = registering
+        lock.release()
+
+        if val:
+            return Task.ASK_LATER
+
+        ret = Task.Task.runnable_status(self)
+        if ret == Task.RUN_ME:
+            lock.acquire()
+            registering = True
+            lock.release()
+
+        return ret
+
+    def post_run(self):
+        global lock, registering
+
+        lock.acquire()
+        registering = False
+        lock.release()
+
+        return Task.Task.post_run(self)
+
+class cabal_touch(Cabal):
+    run_str = 'touch ${TGT}'
+
diff --git a/third_party/waf/waflib/extras/cfg_altoptions.py b/third_party/waf/waflib/extras/cfg_altoptions.py
new file mode 100644
index 0000000..47b1189
--- /dev/null
+++ b/third_party/waf/waflib/extras/cfg_altoptions.py
@@ -0,0 +1,110 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+# Tool to extend c_config.check_cfg()
+
+__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
+__copyright__ = "Jérôme Carretero, 2014"
+
+"""
+
+This tool allows to work around the absence of ``*-config`` programs
+on systems, by keeping the same clean configuration syntax but inferring
+values or permitting their modification via the options interface.
+
+Note that pkg-config can also support setting ``PKG_CONFIG_PATH``,
+so you can put custom files in a folder containing new .pc files.
+This tool could also be implemented by taking advantage of this fact.
+
+Usage::
+
+   def options(opt):
+     opt.load('c_config_alt')
+     opt.add_package_option('package')
+
+   def configure(cfg):
+     conf.load('c_config_alt')
+     conf.check_cfg(...)
+
+Known issues:
+
+- Behavior with different build contexts...
+
+"""
+
+import os
+import functools
+from waflib import Configure, Options, Errors
+
+def name_to_dest(x):
+	return x.lower().replace('-', '_')
+
+
+def options(opt):
+	def x(opt, param):
+		dest = name_to_dest(param)
+		gr = opt.get_option_group("configure options")
+		gr.add_option('--%s-root' % dest,
+		 help="path containing include and lib subfolders for %s" \
+		  % param,
+		)
+
+	opt.add_package_option = functools.partial(x, opt)
+
+
+check_cfg_old = getattr(Configure.ConfigurationContext, 'check_cfg')
+
+@Configure.conf
+def check_cfg(conf, *k, **kw):
+	if k:
+		lst = k[0].split()
+		kw['package'] = lst[0]
+		kw['args'] = ' '.join(lst[1:])
+
+	if not 'package' in kw:
+		return check_cfg_old(conf, **kw)
+
+	package = kw['package']
+
+	package_lo = name_to_dest(package)
+	package_hi = package.upper().replace('-', '_') # TODO FIXME
+	package_hi = kw.get('uselib_store', package_hi)
+
+	def check_folder(path, name):
+		try:
+			assert os.path.isdir(path)
+		except AssertionError:
+			raise Errors.ConfigurationError(
+				"%s_%s (%s) is not a folder!" \
+				% (package_lo, name, path))
+		return path
+
+	root = getattr(Options.options, '%s_root' % package_lo, None)
+
+	if root is None:
+		return check_cfg_old(conf, **kw)
+	else:
+		def add_manual_var(k, v):
+			conf.start_msg('Adding for %s a manual var' % (package))
+			conf.env["%s_%s" % (k, package_hi)] = v
+			conf.end_msg("%s = %s" % (k, v))
+
+
+		check_folder(root, 'root')
+
+		pkg_inc = check_folder(os.path.join(root, "include"), 'inc')
+		add_manual_var('INCLUDES', [pkg_inc])
+		pkg_lib = check_folder(os.path.join(root, "lib"), 'libpath')
+		add_manual_var('LIBPATH', [pkg_lib])
+		add_manual_var('LIB', [package])
+
+		for x in kw.get('manual_deps', []):
+			for k, v in sorted(conf.env.get_merged_dict().items()):
+				if k.endswith('_%s' % x):
+					k = k.replace('_%s' % x, '')
+					conf.start_msg('Adding for %s a manual dep' \
+					 %(package))
+					conf.env["%s_%s" % (k, package_hi)] += v
+					conf.end_msg('%s += %s' % (k, v))
+
+		return True
+
diff --git a/third_party/waf/waflib/extras/clang_compilation_database.py b/third_party/waf/waflib/extras/clang_compilation_database.py
new file mode 100644
index 0000000..bd29db9
--- /dev/null
+++ b/third_party/waf/waflib/extras/clang_compilation_database.py
@@ -0,0 +1,137 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Christoph Koke, 2013
+# Alibek Omarov, 2019
+
+"""
+Writes the c and cpp compile commands into build/compile_commands.json
+see http://clang.llvm.org/docs/JSONCompilationDatabase.html
+
+Usage:
+
+	Load this tool in `options` to be able to generate database
+	by request in command-line and before build:
+
+	$ waf clangdb
+
+	def options(opt):
+		opt.load('clang_compilation_database')
+
+	Otherwise, load only in `configure` to generate it always before build.
+
+	def configure(conf):
+		conf.load('compiler_cxx')
+		...
+		conf.load('clang_compilation_database')
+"""
+
+from waflib import Logs, TaskGen, Task, Build, Scripting
+
+Task.Task.keep_last_cmd = True
+
+class ClangDbContext(Build.BuildContext):
+	'''generates compile_commands.json by request'''
+	cmd = 'clangdb'
+
+	def write_compilation_database(self):
+		"""
+		Write the clang compilation database as JSON
+		"""
+		database_file = self.bldnode.make_node('compile_commands.json')
+		Logs.info('Build commands will be stored in %s', database_file.path_from(self.path))
+		try:
+			root = database_file.read_json()
+		except IOError:
+			root = []
+		clang_db = dict((x['file'], x) for x in root)
+		for task in self.clang_compilation_database_tasks:
+			try:
+				cmd = task.last_cmd
+			except AttributeError:
+				continue
+			f_node = task.inputs[0]
+			filename = f_node.path_from(task.get_cwd())
+			entry = {
+				"directory": task.get_cwd().abspath(),
+				"arguments": cmd,
+				"file": filename,
+			}
+			clang_db[filename] = entry
+		root = list(clang_db.values())
+		database_file.write_json(root)
+
+	def execute(self):
+		"""
+		Build dry run
+		"""
+		self.restore()
+		self.cur_tasks = []
+		self.clang_compilation_database_tasks = []
+
+		if not self.all_envs:
+			self.load_envs()
+
+		self.recurse([self.run_dir])
+		self.pre_build()
+
+		# we need only to generate last_cmd, so override
+		# exec_command temporarily
+		def exec_command(self, *k, **kw):
+			return 0
+
+		for g in self.groups:
+			for tg in g:
+				try:
+					f = tg.post
+				except AttributeError:
+					pass
+				else:
+					f()
+
+				if isinstance(tg, Task.Task):
+					lst = [tg]
+				else: lst = tg.tasks
+				for tsk in lst:
+					if tsk.__class__.__name__ == "swig":
+						tsk.runnable_status()
+						if hasattr(tsk, 'more_tasks'):
+							lst.extend(tsk.more_tasks)
+					# Not all dynamic tasks can be processed, in some cases
+					# one may have to call the method "run()" like this:
+					#elif tsk.__class__.__name__ == 'src2c':
+					#	tsk.run()
+					#	if hasattr(tsk, 'more_tasks'):
+					#		lst.extend(tsk.more_tasks)
+
+					tup = tuple(y for y in [Task.classes.get(x) for x in ('c', 'cxx')] if y)
+					if isinstance(tsk, tup):
+						self.clang_compilation_database_tasks.append(tsk)
+						tsk.nocache = True
+						old_exec = tsk.exec_command
+						tsk.exec_command = exec_command
+						tsk.run()
+						tsk.exec_command = old_exec
+
+		self.write_compilation_database()
+
+EXECUTE_PATCHED = False
+def patch_execute():
+	global EXECUTE_PATCHED
+
+	if EXECUTE_PATCHED:
+		return
+
+	def new_execute_build(self):
+		"""
+		Invoke clangdb command before build
+		"""
+		if self.cmd.startswith('build'):
+			Scripting.run_command(self.cmd.replace('build','clangdb'))
+
+		old_execute_build(self)
+
+	old_execute_build = getattr(Build.BuildContext, 'execute_build', None)
+	setattr(Build.BuildContext, 'execute_build', new_execute_build)
+	EXECUTE_PATCHED = True
+
+patch_execute()
diff --git a/third_party/waf/waflib/extras/clang_cross.py b/third_party/waf/waflib/extras/clang_cross.py
new file mode 100644
index 0000000..1b51e28
--- /dev/null
+++ b/third_party/waf/waflib/extras/clang_cross.py
@@ -0,0 +1,92 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Krzysztof Kosiński 2014
+# DragoonX6 2018
+
+"""
+Detect the Clang C compiler
+This version is an attempt at supporting the -target and -sysroot flag of Clang.
+"""
+
+from waflib.Tools import ccroot, ar, gcc
+from waflib.Configure import conf
+import waflib.Context
+import waflib.extras.clang_cross_common
+
+def options(opt):
+	"""
+	Target triplet for clang::
+			$ waf configure --clang-target-triple=x86_64-pc-linux-gnu
+	"""
+	cc_compiler_opts = opt.add_option_group('Configuration options')
+	cc_compiler_opts.add_option('--clang-target-triple', default=None,
+		help='Target triple for clang',
+		dest='clang_target_triple')
+	cc_compiler_opts.add_option('--clang-sysroot', default=None,
+		help='Sysroot for clang',
+		dest='clang_sysroot')
+
+@conf
+def find_clang(conf):
+	"""
+	Finds the program clang and executes it to ensure it really is clang
+	"""
+
+	import os
+
+	cc = conf.find_program('clang', var='CC')
+
+	if conf.options.clang_target_triple != None:
+		conf.env.append_value('CC', ['-target', conf.options.clang_target_triple])
+
+	if conf.options.clang_sysroot != None:
+		sysroot = str()
+
+		if os.path.isabs(conf.options.clang_sysroot):
+			sysroot = conf.options.clang_sysroot
+		else:
+			sysroot = os.path.normpath(os.path.join(os.getcwd(), conf.options.clang_sysroot))
+
+		conf.env.append_value('CC', ['--sysroot', sysroot])
+
+	conf.get_cc_version(cc, clang=True)
+	conf.env.CC_NAME = 'clang'
+
+@conf
+def clang_modifier_x86_64_w64_mingw32(conf):
+	conf.gcc_modifier_win32()
+
+@conf
+def clang_modifier_i386_w64_mingw32(conf):
+	conf.gcc_modifier_win32()
+
+@conf
+def clang_modifier_x86_64_windows_msvc(conf):
+	conf.clang_modifier_msvc()
+
+	# Allow the user to override any flags if they so desire.
+	clang_modifier_user_func = getattr(conf, 'clang_modifier_x86_64_windows_msvc_user', None)
+	if clang_modifier_user_func:
+		clang_modifier_user_func()
+
+@conf
+def clang_modifier_i386_windows_msvc(conf):
+	conf.clang_modifier_msvc()
+
+	# Allow the user to override any flags if they so desire.
+	clang_modifier_user_func = getattr(conf, 'clang_modifier_i386_windows_msvc_user', None)
+	if clang_modifier_user_func:
+		clang_modifier_user_func()
+
+def configure(conf):
+	conf.find_clang()
+	conf.find_program(['llvm-ar', 'ar'], var='AR')
+	conf.find_ar()
+	conf.gcc_common_flags()
+	# Allow the user to provide flags for the target platform.
+	conf.gcc_modifier_platform()
+	# And allow more fine grained control based on the compiler's triplet.
+	conf.clang_modifier_target_triple()
+	conf.cc_load_tools()
+	conf.cc_add_flags()
+	conf.link_add_flags()
diff --git a/third_party/waf/waflib/extras/clang_cross_common.py b/third_party/waf/waflib/extras/clang_cross_common.py
new file mode 100644
index 0000000..b76a070
--- /dev/null
+++ b/third_party/waf/waflib/extras/clang_cross_common.py
@@ -0,0 +1,113 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# DragoonX6 2018
+
+"""
+Common routines for cross_clang.py and cross_clangxx.py
+"""
+
+from waflib.Configure import conf
+import waflib.Context
+
+def normalize_target_triple(target_triple):
+	target_triple = target_triple[:-1]
+	normalized_triple = target_triple.replace('--', '-unknown-')
+
+	if normalized_triple.startswith('-'):
+		normalized_triple = 'unknown' + normalized_triple
+
+	if normalized_triple.endswith('-'):
+		normalized_triple += 'unknown'
+
+	# Normalize MinGW builds to *arch*-w64-mingw32
+	if normalized_triple.endswith('windows-gnu'):
+		normalized_triple = normalized_triple[:normalized_triple.index('-')] + '-w64-mingw32'
+
+	# Strip the vendor when doing msvc builds, since it's unused anyway.
+	if normalized_triple.endswith('windows-msvc'):
+		normalized_triple = normalized_triple[:normalized_triple.index('-')] + '-windows-msvc'
+
+	return normalized_triple.replace('-', '_')
+
+@conf
+def clang_modifier_msvc(conf):
+	import os
+
+	"""
+	Really basic setup to use clang in msvc mode.
+	We actually don't really want to do a lot, even though clang is msvc compatible
+	in this mode, that doesn't mean we're actually using msvc.
+	It's probably the best to leave it to the user, we can assume msvc mode if the user
+	uses the clang-cl frontend, but this module only concerns itself with the gcc-like frontend.
+	"""
+	v = conf.env
+	v.cprogram_PATTERN = '%s.exe'
+
+	v.cshlib_PATTERN   = '%s.dll'
+	v.implib_PATTERN   = '%s.lib'
+	v.IMPLIB_ST        = '-Wl,-IMPLIB:%s'
+	v.SHLIB_MARKER     = []
+
+	v.CFLAGS_cshlib    = []
+	v.LINKFLAGS_cshlib = ['-Wl,-DLL']
+	v.cstlib_PATTERN   = '%s.lib'
+	v.STLIB_MARKER     = []
+
+	del(v.AR)
+	conf.find_program(['llvm-lib', 'lib'], var='AR')
+	v.ARFLAGS          = ['-nologo']
+	v.AR_TGT_F         = ['-out:']
+
+	# Default to the linker supplied with llvm instead of link.exe or ld
+	v.LINK_CC          = v.CC + ['-fuse-ld=lld', '-nostdlib']
+	v.CCLNK_TGT_F      = ['-o']
+	v.def_PATTERN      = '-Wl,-def:%s'
+
+	v.LINKFLAGS = []
+
+	v.LIB_ST            = '-l%s'
+	v.LIBPATH_ST        = '-Wl,-LIBPATH:%s'
+	v.STLIB_ST          = '-l%s'
+	v.STLIBPATH_ST      = '-Wl,-LIBPATH:%s'
+
+	CFLAGS_CRT_COMMON = [
+		'-Xclang', '--dependent-lib=oldnames',
+		'-Xclang', '-fno-rtti-data',
+		'-D_MT'
+	]
+
+	v.CFLAGS_CRT_MULTITHREADED = CFLAGS_CRT_COMMON + [
+		'-Xclang', '-flto-visibility-public-std',
+		'-Xclang', '--dependent-lib=libcmt',
+	]
+	v.CXXFLAGS_CRT_MULTITHREADED = v.CFLAGS_CRT_MULTITHREADED
+
+	v.CFLAGS_CRT_MULTITHREADED_DBG = CFLAGS_CRT_COMMON + [
+		'-D_DEBUG',
+		'-Xclang', '-flto-visibility-public-std',
+		'-Xclang', '--dependent-lib=libcmtd',
+	]
+	v.CXXFLAGS_CRT_MULTITHREADED_DBG = v.CFLAGS_CRT_MULTITHREADED_DBG
+
+	v.CFLAGS_CRT_MULTITHREADED_DLL = CFLAGS_CRT_COMMON + [
+		'-D_DLL',
+		'-Xclang', '--dependent-lib=msvcrt'
+	]
+	v.CXXFLAGS_CRT_MULTITHREADED_DLL = v.CFLAGS_CRT_MULTITHREADED_DLL
+
+	v.CFLAGS_CRT_MULTITHREADED_DLL_DBG = CFLAGS_CRT_COMMON + [
+		'-D_DLL',
+		'-D_DEBUG',
+		'-Xclang', '--dependent-lib=msvcrtd',
+	]
+	v.CXXFLAGS_CRT_MULTITHREADED_DLL_DBG = v.CFLAGS_CRT_MULTITHREADED_DLL_DBG
+
+@conf
+def clang_modifier_target_triple(conf, cpp=False):
+	compiler = conf.env.CXX if cpp else conf.env.CC
+	output = conf.cmd_and_log(compiler + ['-dumpmachine'], output=waflib.Context.STDOUT)
+
+	modifier = ('clangxx' if cpp else 'clang') + '_modifier_'
+	clang_modifier_func = getattr(conf, modifier + normalize_target_triple(output), None)
+	if clang_modifier_func:
+		clang_modifier_func()
diff --git a/third_party/waf/waflib/extras/clangxx_cross.py b/third_party/waf/waflib/extras/clangxx_cross.py
new file mode 100644
index 0000000..0ad38ad
--- /dev/null
+++ b/third_party/waf/waflib/extras/clangxx_cross.py
@@ -0,0 +1,106 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy 2009-2018 (ita)
+# DragoonX6 2018
+
+"""
+Detect the Clang++ C++ compiler
+This version is an attempt at supporting the -target and -sysroot flag of Clang++.
+"""
+
+from waflib.Tools import ccroot, ar, gxx
+from waflib.Configure import conf
+import waflib.extras.clang_cross_common
+
+def options(opt):
+	"""
+	Target triplet for clang++::
+			$ waf configure --clangxx-target-triple=x86_64-pc-linux-gnu
+	"""
+	cxx_compiler_opts = opt.add_option_group('Configuration options')
+	cxx_compiler_opts.add_option('--clangxx-target-triple', default=None,
+		help='Target triple for clang++',
+		dest='clangxx_target_triple')
+	cxx_compiler_opts.add_option('--clangxx-sysroot', default=None,
+		help='Sysroot for clang++',
+		dest='clangxx_sysroot')
+
+@conf
+def find_clangxx(conf):
+	"""
+	Finds the program clang++, and executes it to ensure it really is clang++
+	"""
+
+	import os
+
+	cxx = conf.find_program('clang++', var='CXX')
+
+	if conf.options.clangxx_target_triple != None:
+		conf.env.append_value('CXX', ['-target', conf.options.clangxx_target_triple])
+
+	if conf.options.clangxx_sysroot != None:
+		sysroot = str()
+
+		if os.path.isabs(conf.options.clangxx_sysroot):
+			sysroot = conf.options.clangxx_sysroot
+		else:
+			sysroot = os.path.normpath(os.path.join(os.getcwd(), conf.options.clangxx_sysroot))
+
+		conf.env.append_value('CXX', ['--sysroot', sysroot])
+
+	conf.get_cc_version(cxx, clang=True)
+	conf.env.CXX_NAME = 'clang'
+
+@conf
+def clangxx_modifier_x86_64_w64_mingw32(conf):
+	conf.gcc_modifier_win32()
+
+@conf
+def clangxx_modifier_i386_w64_mingw32(conf):
+	conf.gcc_modifier_win32()
+
+@conf
+def clangxx_modifier_msvc(conf):
+	v = conf.env
+	v.cxxprogram_PATTERN = v.cprogram_PATTERN
+	v.cxxshlib_PATTERN   = v.cshlib_PATTERN
+
+	v.CXXFLAGS_cxxshlib  = []
+	v.LINKFLAGS_cxxshlib = v.LINKFLAGS_cshlib
+	v.cxxstlib_PATTERN   = v.cstlib_PATTERN
+
+	v.LINK_CXX           = v.CXX + ['-fuse-ld=lld', '-nostdlib']
+	v.CXXLNK_TGT_F       = v.CCLNK_TGT_F
+
+@conf
+def clangxx_modifier_x86_64_windows_msvc(conf):
+	conf.clang_modifier_msvc()
+	conf.clangxx_modifier_msvc()
+
+	# Allow the user to override any flags if they so desire.
+	clang_modifier_user_func = getattr(conf, 'clangxx_modifier_x86_64_windows_msvc_user', None)
+	if clang_modifier_user_func:
+		clang_modifier_user_func()
+
+@conf
+def clangxx_modifier_i386_windows_msvc(conf):
+	conf.clang_modifier_msvc()
+	conf.clangxx_modifier_msvc()
+
+	# Allow the user to override any flags if they so desire.
+	clang_modifier_user_func = getattr(conf, 'clangxx_modifier_i386_windows_msvc_user', None)
+	if clang_modifier_user_func:
+		clang_modifier_user_func()
+
+def configure(conf):
+	conf.find_clangxx()
+	conf.find_program(['llvm-ar', 'ar'], var='AR')
+	conf.find_ar()
+	conf.gxx_common_flags()
+	# Allow the user to provide flags for the target platform.
+	conf.gxx_modifier_platform()
+	# And allow more fine grained control based on the compiler's triplet.
+	conf.clang_modifier_target_triple(cpp=True)
+	conf.cxx_load_tools()
+	conf.cxx_add_flags()
+	conf.link_add_flags()
diff --git a/third_party/waf/waflib/extras/classic_runner.py b/third_party/waf/waflib/extras/classic_runner.py
new file mode 100644
index 0000000..b08c794
--- /dev/null
+++ b/third_party/waf/waflib/extras/classic_runner.py
@@ -0,0 +1,68 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2021 (ita)
+
+from waflib import Utils, Runner
+
+"""
+Re-enable the classic threading system from waf 1.x
+
+def configure(conf):
+	conf.load('classic_runner')
+"""
+
+class TaskConsumer(Utils.threading.Thread):
+	"""
+	Task consumers belong to a pool of workers
+
+	They wait for tasks in the queue and then use ``task.process(...)``
+	"""
+	def __init__(self, spawner):
+		Utils.threading.Thread.__init__(self)
+		"""
+		Obtain :py:class:`waflib.Task.TaskBase` instances from this queue.
+		"""
+		self.spawner = spawner
+		self.daemon = True
+		self.start()
+
+	def run(self):
+		"""
+		Loop over the tasks to execute
+		"""
+		try:
+			self.loop()
+		except Exception:
+			pass
+
+	def loop(self):
+		"""
+		Obtain tasks from :py:attr:`waflib.Runner.TaskConsumer.ready` and call
+		:py:meth:`waflib.Task.TaskBase.process`. If the object is a function, execute it.
+		"""
+		master = self.spawner.master
+		while 1:
+			if not master.stop:
+				try:
+					tsk = master.ready.get()
+					if tsk:
+						tsk.log_display(tsk.generator.bld)
+						master.process_task(tsk)
+					else:
+						break
+				finally:
+					master.out.put(tsk)
+
+class Spawner(object):
+	"""
+	Daemon thread that consumes tasks from :py:class:`waflib.Runner.Parallel` producer and
+	spawns a consuming thread :py:class:`waflib.Runner.Consumer` for each
+	:py:class:`waflib.Task.Task` instance.
+	"""
+	def __init__(self, master):
+		self.master = master
+		""":py:class:`waflib.Runner.Parallel` producer instance"""
+
+		self.pool = [TaskConsumer(self) for i in range(master.numjobs)]
+
+Runner.Spawner = Spawner
diff --git a/third_party/waf/waflib/extras/codelite.py b/third_party/waf/waflib/extras/codelite.py
new file mode 100644
index 0000000..523302c
--- /dev/null
+++ b/third_party/waf/waflib/extras/codelite.py
@@ -0,0 +1,875 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# CodeLite Project
+# Christian Klein (chrikle@berlios.de)
+# Created: Jan 2012
+# As templete for this file I used the msvs.py
+# I hope this template will work proper
+
+"""
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+
+1. Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in the
+   documentation and/or other materials provided with the distribution.
+
+3. The name of the author may not be used to endorse or promote products
+   derived from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR
+IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
+INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
+IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGE.
+"""
+
+"""
+ 
+
+To add this tool to your project:
+def options(conf):
+        opt.load('codelite')
+
+It can be a good idea to add the sync_exec tool too.
+
+To generate solution files:
+$ waf configure codelite
+
+To customize the outputs, provide subclasses in your wscript files:
+
+from waflib.extras import codelite
+class vsnode_target(codelite.vsnode_target):
+        def get_build_command(self, props):
+                # likely to be required
+                return "waf.bat build"
+        def collect_source(self):
+                # likely to be required
+                ...
+class codelite_bar(codelite.codelite_generator):
+        def init(self):
+                codelite.codelite_generator.init(self)
+                self.vsnode_target = vsnode_target
+
+The codelite class re-uses the same build() function for reading the targets (task generators),
+you may therefore specify codelite settings on the context object:
+
+def build(bld):
+        bld.codelite_solution_name = 'foo.workspace'
+        bld.waf_command = 'waf.bat'
+        bld.projects_dir = bld.srcnode.make_node('')
+        bld.projects_dir.mkdir()
+
+
+ASSUMPTIONS:
+* a project can be either a directory or a target, project files are written only for targets that have source files
+* each project is a vcxproj file, therefore the project uuid needs only to be a hash of the absolute path
+"""
+
+import os, re, sys
+import uuid # requires python 2.5
+from waflib.Build import BuildContext
+from waflib import Utils, TaskGen, Logs, Task, Context, Node, Options
+
+HEADERS_GLOB = '**/(*.h|*.hpp|*.H|*.inl)'
+
+PROJECT_TEMPLATE = r'''<?xml version="1.0" encoding="utf-8"?>
+<CodeLite_Project Name="${project.name}" InternalType="Library">
+  <Plugins>
+    <Plugin Name="qmake">
+      <![CDATA[00010001N0005Release000000000000]]>
+    </Plugin>
+  </Plugins>
+  <Description/>
+  <Dependencies/>
+  <VirtualDirectory Name="src">
+  ${for x in project.source}  
+  ${if (project.get_key(x)=="sourcefile")}
+  <File Name="${x.abspath()}"/>
+  ${endif}
+  ${endfor}  
+  </VirtualDirectory>
+  <VirtualDirectory Name="include">  
+  ${for x in project.source}
+  ${if (project.get_key(x)=="headerfile")}
+  <File Name="${x.abspath()}"/>
+  ${endif}
+  ${endfor}
+  </VirtualDirectory>  
+  <Settings Type="Dynamic Library">
+    <GlobalSettings>
+      <Compiler Options="" C_Options="">
+        <IncludePath Value="."/>
+      </Compiler>
+      <Linker Options="">
+        <LibraryPath Value="."/>
+      </Linker>
+      <ResourceCompiler Options=""/>
+    </GlobalSettings>
+    <Configuration Name="Release" CompilerType="gnu gcc" ReleasegerType="GNU gdb Releaseger" Type="Dynamic Library" BuildCmpWithGlobalSettings="append" BuildLnkWithGlobalSettings="append" BuildResWithGlobalSettings="append">
+      <Compiler Options="" C_Options="" Required="yes" PreCompiledHeader="" PCHInCommandLine="no" UseDifferentPCHFlags="no" PCHFlags="">
+        <IncludePath Value="."/>
+        <IncludePath Value="."/>
+      </Compiler>
+      <Linker Options="" Required="yes">
+        <LibraryPath Value=""/>
+      </Linker>
+      <ResourceCompiler Options="" Required="no"/>
+      <General OutputFile="${xml:project.build_properties[0].output_file}" IntermediateDirectory="" Command="" CommandArguments="" PauseExecWhenProcTerminates="yes"/>
+      <Environment EnvVarSetName="&lt;Use Defaults&gt;" DbgSetName="&lt;Use Defaults&gt;">
+        <![CDATA[]]>
+      </Environment>
+      <Releaseger IsRemote="no" RemoteHostName="" RemoteHostPort="" ReleasegerPath="">
+        <PostConnectCommands/>
+        <StartupCommands/>
+      </Releaseger>
+      <PreBuild/>
+      <PostBuild/>
+      <CustomBuild Enabled="yes">
+        $b = project.build_properties[0]}
+        <RebuildCommand>${xml:project.get_rebuild_command(project.build_properties[0])}</RebuildCommand>
+        <CleanCommand>${xml:project.get_clean_command(project.build_properties[0])}</CleanCommand>
+        <BuildCommand>${xml:project.get_build_command(project.build_properties[0])}</BuildCommand> 
+        <Target Name="Install">${xml:project.get_install_command(project.build_properties[0])}</Target>
+        <Target Name="Build and Install">${xml:project.get_build_and_install_command(project.build_properties[0])}</Target>        
+        <Target Name="Build All">${xml:project.get_build_all_command(project.build_properties[0])}</Target>
+        <Target Name="Rebuild All">${xml:project.get_rebuild_all_command(project.build_properties[0])}</Target>
+        <Target Name="Clean All">${xml:project.get_clean_all_command(project.build_properties[0])}</Target>
+        <Target Name="Build and Install All">${xml:project.get_build_and_install_all_command(project.build_properties[0])}</Target>
+        <PreprocessFileCommand/>
+        <SingleFileCommand/>
+        <MakefileGenerationCommand/>
+        <ThirdPartyToolName>None</ThirdPartyToolName>
+        <WorkingDirectory/>
+      </CustomBuild>
+      <AdditionalRules>
+        <CustomPostBuild/>
+        <CustomPreBuild/>
+      </AdditionalRules>
+      <Completion>
+        <ClangCmpFlags/>
+        <ClangPP/>
+        <SearchPaths/>
+      </Completion>
+    </Configuration>
+    <Configuration Name="Release" CompilerType="gnu gcc" ReleasegerType="GNU gdb Releaseger" Type="" BuildCmpWithGlobalSettings="append" BuildLnkWithGlobalSettings="append" BuildResWithGlobalSettings="append">
+      <Compiler Options="" C_Options="" Required="yes" PreCompiledHeader="" PCHInCommandLine="no" UseDifferentPCHFlags="no" PCHFlags="">
+        <IncludePath Value="."/>
+      </Compiler>
+      <Linker Options="" Required="yes"/>
+      <ResourceCompiler Options="" Required="no"/>
+      <General OutputFile="" IntermediateDirectory="./Release" Command="" CommandArguments="" UseSeparateReleaseArgs="no" ReleaseArguments="" WorkingDirectory="$(IntermediateDirectory)" PauseExecWhenProcTerminates="yes"/>
+      <Environment EnvVarSetName="&lt;Use Defaults&gt;" DbgSetName="&lt;Use Defaults&gt;">
+        <![CDATA[
+      
+      
+      
+      ]]>
+      </Environment>
+      <Releaseger IsRemote="no" RemoteHostName="" RemoteHostPort="" ReleasegerPath="">
+        <PostConnectCommands/>
+        <StartupCommands/>
+      </Releaseger>
+      <PreBuild/>
+      <PostBuild/>
+      <CustomBuild Enabled="no">
+        <RebuildCommand/>
+        <CleanCommand/>
+        <BuildCommand/>
+        <PreprocessFileCommand/>
+        <SingleFileCommand/>
+        <MakefileGenerationCommand/>
+        <ThirdPartyToolName/>
+        <WorkingDirectory/>
+      </CustomBuild>
+      <AdditionalRules>
+        <CustomPostBuild/>
+        <CustomPreBuild/>
+      </AdditionalRules>
+      <Completion>
+        <ClangCmpFlags/>
+        <ClangPP/>
+        <SearchPaths/>
+      </Completion>
+    </Configuration>
+  </Settings>
+</CodeLite_Project>'''
+
+
+
+
+SOLUTION_TEMPLATE = '''<?xml version="1.0" encoding="utf-8"?>
+<CodeLite_Workspace Name="${getattr(project, 'codelite_solution_name', None)[:-10]}" Database="./${getattr(project, 'codelite_solution_name', None)[:-10]}.tags">
+${for p in project.all_projects}
+  <Project Name = "${p.name}" Path = "${p.title}" Active="No"/>
+${endfor}
+  <BuildMatrix>
+    <WorkspaceConfiguration Name="Release" Selected="yes">
+${for p in project.all_projects}
+      <Project Name="${p.name}" ConfigName="Release"/>        
+${endfor}
+    </WorkspaceConfiguration>        
+  </BuildMatrix>
+</CodeLite_Workspace>'''
+
+
+
+COMPILE_TEMPLATE = '''def f(project):
+        lst = []
+        def xml_escape(value):
+                return value.replace("&", "&amp;").replace('"', "&quot;").replace("'", "&apos;").replace("<", "&lt;").replace(">", "&gt;")
+
+        %s
+
+        #f = open('cmd.txt', 'w')
+        #f.write(str(lst))
+        #f.close()
+        return ''.join(lst)
+'''
+reg_act = re.compile(r"(?P<backslash>\\)|(?P<dollar>\$\$)|(?P<subst>\$\{(?P<code>[^}]*?)\})", re.M)
+def compile_template(line):
+        """
+        Compile a template expression into a python function (like jsps, but way shorter)
+        """
+        extr = []
+        def repl(match):
+                g = match.group
+                if g('dollar'):
+                        return "$"
+                elif g('backslash'):
+                        return "\\"
+                elif g('subst'):
+                        extr.append(g('code'))
+                        return "<<|@|>>"
+                return None
+
+        line2 = reg_act.sub(repl, line)
+        params = line2.split('<<|@|>>')
+        assert(extr)
+
+
+        indent = 0
+        buf = []
+        app = buf.append
+
+        def app(txt):
+                buf.append(indent * '\t' + txt)
+
+        for x in range(len(extr)):
+                if params[x]:
+                        app("lst.append(%r)" % params[x])
+
+                f = extr[x]
+                if f.startswith(('if', 'for')):
+                        app(f + ':')
+                        indent += 1
+                elif f.startswith('py:'):
+                        app(f[3:])
+                elif f.startswith(('endif', 'endfor')):
+                        indent -= 1
+                elif f.startswith(('else', 'elif')):
+                        indent -= 1
+                        app(f + ':')
+                        indent += 1
+                elif f.startswith('xml:'):
+                        app('lst.append(xml_escape(%s))' % f[4:])
+                else:
+                        #app('lst.append((%s) or "cannot find %s")' % (f, f))
+                        app('lst.append(%s)' % f)
+
+        if extr:
+                if params[-1]:
+                        app("lst.append(%r)" % params[-1])
+
+        fun = COMPILE_TEMPLATE % "\n\t".join(buf)
+        #print(fun)
+        return Task.funex(fun)
+
+
+re_blank = re.compile('(\n|\r|\\s)*\n', re.M)
+def rm_blank_lines(txt):
+        txt = re_blank.sub('\r\n', txt)
+        return txt
+
+BOM = '\xef\xbb\xbf'
+try:
+        BOM = bytes(BOM, 'latin-1') # python 3
+except (TypeError, NameError):
+        pass
+
+def stealth_write(self, data, flags='wb'):
+        try:
+                unicode
+        except NameError:
+                data = data.encode('utf-8') # python 3
+        else:
+                data = data.decode(sys.getfilesystemencoding(), 'replace')
+                data = data.encode('utf-8')
+
+        if self.name.endswith('.project'):
+                data = BOM + data
+
+        try:
+                txt = self.read(flags='rb')
+                if txt != data:
+                        raise ValueError('must write')
+        except (IOError, ValueError):
+                self.write(data, flags=flags)
+        else:
+                Logs.debug('codelite: skipping %r', self)
+Node.Node.stealth_write = stealth_write
+
+re_quote = re.compile("[^a-zA-Z0-9-]")
+def quote(s):
+        return re_quote.sub("_", s)
+
+def xml_escape(value):
+        return value.replace("&", "&amp;").replace('"', "&quot;").replace("'", "&apos;").replace("<", "&lt;").replace(">", "&gt;")
+
+def make_uuid(v, prefix = None):
+        """
+        simple utility function
+        """
+        if isinstance(v, dict):
+                keys = list(v.keys())
+                keys.sort()
+                tmp = str([(k, v[k]) for k in keys])
+        else:
+                tmp = str(v)
+        d = Utils.md5(tmp.encode()).hexdigest().upper()
+        if prefix:
+                d = '%s%s' % (prefix, d[8:])
+        gid = uuid.UUID(d, version = 4)
+        return str(gid).upper()
+
+def diff(node, fromnode):
+        # difference between two nodes, but with "(..)" instead of ".."
+        c1 = node
+        c2 = fromnode
+
+        c1h = c1.height()
+        c2h = c2.height()
+
+        lst = []
+        up = 0
+
+        while c1h > c2h:
+                lst.append(c1.name)
+                c1 = c1.parent
+                c1h -= 1
+
+        while c2h > c1h:
+                up += 1
+                c2 = c2.parent
+                c2h -= 1
+
+        while id(c1) != id(c2):
+                lst.append(c1.name)
+                up += 1
+
+                c1 = c1.parent
+                c2 = c2.parent
+
+        for i in range(up):
+                lst.append('(..)')
+        lst.reverse()
+        return tuple(lst)
+
+class build_property(object):
+        pass
+
+class vsnode(object):
+        """
+        Abstract class representing visual studio elements
+        We assume that all visual studio nodes have a uuid and a parent
+        """
+        def __init__(self, ctx):
+                self.ctx = ctx # codelite context
+                self.name = '' # string, mandatory
+                self.vspath = '' # path in visual studio (name for dirs, absolute path for projects)
+                self.uuid = '' # string, mandatory
+                self.parent = None # parent node for visual studio nesting
+
+        def get_waf(self):
+                """
+                Override in subclasses...
+                """
+                return '%s/%s' % (self.ctx.srcnode.abspath(), getattr(self.ctx, 'waf_command', 'waf'))
+
+        def ptype(self):
+                """
+                Return a special uuid for projects written in the solution file
+                """
+                pass
+
+        def write(self):
+                """
+                Write the project file, by default, do nothing
+                """
+                pass
+
+        def make_uuid(self, val):
+                """
+                Alias for creating uuid values easily (the templates cannot access global variables)
+                """
+                return make_uuid(val)
+
+class vsnode_vsdir(vsnode):
+        """
+        Nodes representing visual studio folders (which do not match the filesystem tree!)
+        """
+        VS_GUID_SOLUTIONFOLDER = "2150E333-8FDC-42A3-9474-1A3956D46DE8"
+        def __init__(self, ctx, uuid, name, vspath=''):
+                vsnode.__init__(self, ctx)
+                self.title = self.name = name
+                self.uuid = uuid
+                self.vspath = vspath or name
+
+        def ptype(self):
+                return self.VS_GUID_SOLUTIONFOLDER
+
+class vsnode_project(vsnode):
+        """
+        Abstract class representing visual studio project elements
+        A project is assumed to be writable, and has a node representing the file to write to
+        """
+        VS_GUID_VCPROJ = "8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942"
+        def ptype(self):
+                return self.VS_GUID_VCPROJ
+
+        def __init__(self, ctx, node):
+                vsnode.__init__(self, ctx)
+                self.path = node
+                self.uuid = make_uuid(node.abspath())
+                self.name = node.name
+                self.title = self.path.abspath()
+                self.source = [] # list of node objects
+                self.build_properties = [] # list of properties (nmake commands, output dir, etc)
+
+        def dirs(self):
+                """
+                Get the list of parent folders of the source files (header files included)
+                for writing the filters
+                """
+                lst = []
+                def add(x):
+                        if x.height() > self.tg.path.height() and x not in lst:
+                                lst.append(x)
+                                add(x.parent)
+                for x in self.source:
+                        add(x.parent)
+                return lst
+
+        def write(self):
+                Logs.debug('codelite: creating %r', self.path)
+                #print "self.name:",self.name
+
+                # first write the project file
+                template1 = compile_template(PROJECT_TEMPLATE)
+                proj_str = template1(self)
+                proj_str = rm_blank_lines(proj_str)
+                self.path.stealth_write(proj_str)
+
+                # then write the filter
+                #template2 = compile_template(FILTER_TEMPLATE)
+                #filter_str = template2(self)
+                #filter_str = rm_blank_lines(filter_str)
+                #tmp = self.path.parent.make_node(self.path.name + '.filters')
+                #tmp.stealth_write(filter_str)
+
+        def get_key(self, node):
+                """
+                required for writing the source files
+                """
+                name = node.name
+                if name.endswith(('.cpp', '.c')):
+                        return 'sourcefile'
+                return 'headerfile'
+
+        def collect_properties(self):
+                """
+                Returns a list of triplet (configuration, platform, output_directory)
+                """
+                ret = []
+                for c in self.ctx.configurations:
+                        for p in self.ctx.platforms:
+                                x = build_property()
+                                x.outdir = ''
+
+                                x.configuration = c
+                                x.platform = p
+
+                                x.preprocessor_definitions = ''
+                                x.includes_search_path = ''
+
+                                # can specify "deploy_dir" too
+                                ret.append(x)
+                self.build_properties = ret
+
+        def get_build_params(self, props):
+                opt = ''
+                return (self.get_waf(), opt)
+
+        def get_build_command(self, props):
+                return "%s build %s" % self.get_build_params(props)
+
+        def get_clean_command(self, props):
+                return "%s clean %s" % self.get_build_params(props)
+
+        def get_rebuild_command(self, props):
+                return "%s clean build %s" % self.get_build_params(props)
+                
+        def get_install_command(self, props):
+                return "%s install %s" % self.get_build_params(props)
+        def get_build_and_install_command(self, props):
+                return "%s build install %s" % self.get_build_params(props)
+                
+        def get_build_and_install_all_command(self, props):
+                return "%s build install" % self.get_build_params(props)[0]
+                
+        def get_clean_all_command(self, props):
+                return "%s clean" % self.get_build_params(props)[0]
+        
+        def get_build_all_command(self, props):
+                return "%s build" % self.get_build_params(props)[0]
+                
+        def get_rebuild_all_command(self, props):
+                return "%s clean build" % self.get_build_params(props)[0]
+
+        def get_filter_name(self, node):
+                lst = diff(node, self.tg.path)
+                return '\\'.join(lst) or '.'
+
+class vsnode_alias(vsnode_project):
+        def __init__(self, ctx, node, name):
+                vsnode_project.__init__(self, ctx, node)
+                self.name = name
+                self.output_file = ''
+
+class vsnode_build_all(vsnode_alias):
+        """
+        Fake target used to emulate the behaviour of "make all" (starting one process by target is slow)
+        This is the only alias enabled by default
+        """
+        def __init__(self, ctx, node, name='build_all_projects'):
+                vsnode_alias.__init__(self, ctx, node, name)
+                self.is_active = True
+
+class vsnode_install_all(vsnode_alias):
+        """
+        Fake target used to emulate the behaviour of "make install"
+        """
+        def __init__(self, ctx, node, name='install_all_projects'):
+                vsnode_alias.__init__(self, ctx, node, name)
+
+        def get_build_command(self, props):
+                return "%s build install %s" % self.get_build_params(props)
+
+        def get_clean_command(self, props):
+                return "%s clean %s" % self.get_build_params(props)
+
+        def get_rebuild_command(self, props):
+                return "%s clean build install %s" % self.get_build_params(props)
+
+class vsnode_project_view(vsnode_alias):
+        """
+        Fake target used to emulate a file system view
+        """
+        def __init__(self, ctx, node, name='project_view'):
+                vsnode_alias.__init__(self, ctx, node, name)
+                self.tg = self.ctx() # fake one, cannot remove
+                self.exclude_files = Node.exclude_regs + '''
+waf-2*
+waf3-2*/**
+.waf-2*
+.waf3-2*/**
+**/*.sdf
+**/*.suo
+**/*.ncb
+**/%s
+                ''' % Options.lockfile
+
+        def collect_source(self):
+                # this is likely to be slow
+                self.source = self.ctx.srcnode.ant_glob('**', excl=self.exclude_files)
+
+        def get_build_command(self, props):
+                params = self.get_build_params(props) + (self.ctx.cmd,)
+                return "%s %s %s" % params
+
+        def get_clean_command(self, props):
+                return ""
+
+        def get_rebuild_command(self, props):
+                return self.get_build_command(props)
+
+class vsnode_target(vsnode_project):
+        """
+        CodeLite project representing a targets (programs, libraries, etc) and bound
+        to a task generator
+        """
+        def __init__(self, ctx, tg):
+                """
+                A project is more or less equivalent to a file/folder
+                """
+                base = getattr(ctx, 'projects_dir', None) or tg.path
+                node = base.make_node(quote(tg.name) + ctx.project_extension) # the project file as a Node
+                vsnode_project.__init__(self, ctx, node)
+                self.name = quote(tg.name)
+                self.tg     = tg  # task generator
+
+        def get_build_params(self, props):
+                """
+                Override the default to add the target name
+                """
+                opt = ''
+                if getattr(self, 'tg', None):
+                        opt += " --targets=%s" % self.tg.name
+                return (self.get_waf(), opt)
+
+        def collect_source(self):
+                tg = self.tg
+                source_files = tg.to_nodes(getattr(tg, 'source', []))
+                include_dirs = Utils.to_list(getattr(tg, 'codelite_includes', []))
+                include_files = []
+                for x in include_dirs:
+                        if isinstance(x, str):
+                                x = tg.path.find_node(x)
+                        if x:
+                                lst = [y for y in x.ant_glob(HEADERS_GLOB, flat=False)]
+                                include_files.extend(lst)
+
+                # remove duplicates
+                self.source.extend(list(set(source_files + include_files)))
+                self.source.sort(key=lambda x: x.abspath())
+
+        def collect_properties(self):
+                """
+                CodeLite projects are associated with platforms and configurations (for building especially)
+                """
+                super(vsnode_target, self).collect_properties()
+                for x in self.build_properties:
+                        x.outdir = self.path.parent.abspath()
+                        x.preprocessor_definitions = ''
+                        x.includes_search_path = ''
+
+                        try:
+                                tsk = self.tg.link_task
+                        except AttributeError:
+                                pass
+                        else:                                
+                                x.output_file = tsk.outputs[0].abspath()
+                                x.preprocessor_definitions = ';'.join(tsk.env.DEFINES)
+                                x.includes_search_path = ';'.join(self.tg.env.INCPATHS)
+
+class codelite_generator(BuildContext):
+        '''generates a CodeLite workspace'''
+        cmd = 'codelite'
+        fun = 'build'
+
+        def init(self):
+                """
+                Some data that needs to be present
+                """
+                if not getattr(self, 'configurations', None):
+                        self.configurations = ['Release'] # LocalRelease, RemoteDebug, etc
+                if not getattr(self, 'platforms', None):
+                        self.platforms = ['Win32']
+                if not getattr(self, 'all_projects', None):
+                        self.all_projects = []
+                if not getattr(self, 'project_extension', None):
+                        self.project_extension = '.project'
+                if not getattr(self, 'projects_dir', None):
+                        self.projects_dir = self.srcnode.make_node('')
+                        self.projects_dir.mkdir()
+
+                # bind the classes to the object, so that subclass can provide custom generators
+                if not getattr(self, 'vsnode_vsdir', None):
+                        self.vsnode_vsdir = vsnode_vsdir
+                if not getattr(self, 'vsnode_target', None):
+                        self.vsnode_target = vsnode_target
+                if not getattr(self, 'vsnode_build_all', None):
+                        self.vsnode_build_all = vsnode_build_all
+                if not getattr(self, 'vsnode_install_all', None):
+                        self.vsnode_install_all = vsnode_install_all
+                if not getattr(self, 'vsnode_project_view', None):
+                        self.vsnode_project_view = vsnode_project_view
+
+                self.numver = '11.00'
+                self.vsver  = '2010'
+
+        def execute(self):
+                """
+                Entry point
+                """
+                self.restore()
+                if not self.all_envs:
+                        self.load_envs()
+                self.recurse([self.run_dir])
+
+                # user initialization
+                self.init()
+
+                # two phases for creating the solution
+                self.collect_projects() # add project objects into "self.all_projects"
+                self.write_files() # write the corresponding project and solution files
+
+        def collect_projects(self):
+                """
+                Fill the list self.all_projects with project objects
+                Fill the list of build targets
+                """
+                self.collect_targets()
+                #self.add_aliases()
+                #self.collect_dirs()
+                default_project = getattr(self, 'default_project', None)
+                def sortfun(x):
+                        if x.name == default_project:
+                                return ''
+                        return getattr(x, 'path', None) and x.path.abspath() or x.name
+                self.all_projects.sort(key=sortfun)
+
+        def write_files(self):
+                """
+                Write the project and solution files from the data collected
+                so far. It is unlikely that you will want to change this
+                """
+                for p in self.all_projects:
+                        p.write()
+
+                # and finally write the solution file
+                node = self.get_solution_node()
+                node.parent.mkdir()
+                Logs.warn('Creating %r', node)
+                #a = dir(self.root)
+                #for b in a:
+                #        print b
+                #print self.group_names
+                #print "Hallo2:   ",self.root.listdir()
+                #print getattr(self, 'codelite_solution_name', None)
+                template1 = compile_template(SOLUTION_TEMPLATE)
+                sln_str = template1(self)
+                sln_str = rm_blank_lines(sln_str)
+                node.stealth_write(sln_str)
+
+        def get_solution_node(self):
+                """
+                The solution filename is required when writing the .vcproj files
+                return self.solution_node and if it does not exist, make one
+                """
+                try:
+                        return self.solution_node
+                except:
+                        pass
+
+                codelite_solution_name = getattr(self, 'codelite_solution_name', None)
+                if not codelite_solution_name:
+                        codelite_solution_name = getattr(Context.g_module, Context.APPNAME, 'project') + '.workspace'
+                        setattr(self, 'codelite_solution_name', codelite_solution_name)
+                if os.path.isabs(codelite_solution_name):
+                        self.solution_node = self.root.make_node(codelite_solution_name)
+                else:
+                        self.solution_node = self.srcnode.make_node(codelite_solution_name)
+                return self.solution_node
+
+        def project_configurations(self):
+                """
+                Helper that returns all the pairs (config,platform)
+                """
+                ret = []
+                for c in self.configurations:
+                        for p in self.platforms:
+                                ret.append((c, p))
+                return ret
+
+        def collect_targets(self):
+                """
+                Process the list of task generators
+                """
+                for g in self.groups:
+                        for tg in g:
+                                if not isinstance(tg, TaskGen.task_gen):
+                                        continue
+
+                                if not hasattr(tg, 'codelite_includes'):
+                                        tg.codelite_includes = tg.to_list(getattr(tg, 'includes', [])) + tg.to_list(getattr(tg, 'export_includes', []))
+                                tg.post()
+                                if not getattr(tg, 'link_task', None):
+                                        continue
+
+                                p = self.vsnode_target(self, tg)
+                                p.collect_source() # delegate this processing
+                                p.collect_properties()                               
+                                self.all_projects.append(p)
+
+        def add_aliases(self):
+                """
+                Add a specific target that emulates the "make all" necessary for Visual studio when pressing F7
+                We also add an alias for "make install" (disabled by default)
+                """
+                base = getattr(self, 'projects_dir', None) or self.tg.path
+
+                node_project = base.make_node('build_all_projects' + self.project_extension) # Node
+                p_build = self.vsnode_build_all(self, node_project)
+                p_build.collect_properties()
+                self.all_projects.append(p_build)
+
+                node_project = base.make_node('install_all_projects' + self.project_extension) # Node
+                p_install = self.vsnode_install_all(self, node_project)
+                p_install.collect_properties()
+                self.all_projects.append(p_install)
+
+                node_project = base.make_node('project_view' + self.project_extension) # Node
+                p_view = self.vsnode_project_view(self, node_project)
+                p_view.collect_source()
+                p_view.collect_properties()
+                self.all_projects.append(p_view)
+
+                n = self.vsnode_vsdir(self, make_uuid(self.srcnode.abspath() + 'build_aliases'), "build_aliases")
+                p_build.parent = p_install.parent = p_view.parent = n
+                self.all_projects.append(n)
+
+        def collect_dirs(self):
+                """
+                Create the folder structure in the CodeLite project view
+                """
+                seen = {}
+                def make_parents(proj):
+                        # look at a project, try to make a parent
+                        if getattr(proj, 'parent', None):
+                                # aliases already have parents
+                                return
+                        x = proj.iter_path
+                        if x in seen:
+                                proj.parent = seen[x]
+                                return
+
+                        # There is not vsnode_vsdir for x.
+                        # So create a project representing the folder "x"
+                        n = proj.parent = seen[x] = self.vsnode_vsdir(self, make_uuid(x.abspath()), x.name)
+                        n.iter_path = x.parent
+                        self.all_projects.append(n)
+
+                        # recurse up to the project directory
+                        if x.height() > self.srcnode.height() + 1:
+                                make_parents(n)
+
+                for p in self.all_projects[:]: # iterate over a copy of all projects
+                        if not getattr(p, 'tg', None):
+                                # but only projects that have a task generator
+                                continue
+
+                        # make a folder for each task generator
+                        p.iter_path = p.tg.path
+                        make_parents(p)
+
diff --git a/third_party/waf/waflib/extras/color_gcc.py b/third_party/waf/waflib/extras/color_gcc.py
new file mode 100644
index 0000000..0972903
--- /dev/null
+++ b/third_party/waf/waflib/extras/color_gcc.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+# encoding: utf-8
+
+# Replaces the default formatter by one which understands GCC output and colorizes it.
+
+__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
+__copyright__ = "Jérôme Carretero, 2012"
+
+import sys
+from waflib import Logs
+
+class ColorGCCFormatter(Logs.formatter):
+	def __init__(self, colors):
+		self.colors = colors
+		Logs.formatter.__init__(self)
+	def format(self, rec):
+		frame = sys._getframe()
+		while frame:
+			func = frame.f_code.co_name
+			if func == 'exec_command':
+				cmd = frame.f_locals.get('cmd')
+				if isinstance(cmd, list) and (len(cmd) > 0) and ('gcc' in cmd[0] or 'g++' in cmd[0]):
+					lines = []
+					for line in rec.msg.splitlines():
+						if 'warning: ' in line:
+							lines.append(self.colors.YELLOW + line)
+						elif 'error: ' in line:
+							lines.append(self.colors.RED + line)
+						elif 'note: ' in line:
+							lines.append(self.colors.CYAN + line)
+						else:
+							lines.append(line)
+					rec.msg = "\n".join(lines)
+			frame = frame.f_back
+		return Logs.formatter.format(self, rec)
+
+def options(opt):
+	Logs.log.handlers[0].setFormatter(ColorGCCFormatter(Logs.colors))
+
diff --git a/third_party/waf/waflib/extras/color_msvc.py b/third_party/waf/waflib/extras/color_msvc.py
new file mode 100644
index 0000000..60bacb7
--- /dev/null
+++ b/third_party/waf/waflib/extras/color_msvc.py
@@ -0,0 +1,59 @@
+#!/usr/bin/env python
+# encoding: utf-8
+
+# Replaces the default formatter by one which understands MSVC output and colorizes it.
+# Modified from color_gcc.py
+
+__author__ = __maintainer__ = "Alibek Omarov <a1ba.omarov@gmail.com>"
+__copyright__ = "Alibek Omarov, 2019"
+
+import sys
+from waflib import Logs
+
+class ColorMSVCFormatter(Logs.formatter):
+	def __init__(self, colors):
+		self.colors = colors
+		Logs.formatter.__init__(self)
+	
+	def parseMessage(self, line, color):
+		# Split messaage from 'disk:filepath: type: message'
+		arr = line.split(':', 3)
+		if len(arr) < 4:
+			return line
+		
+		colored = self.colors.BOLD + arr[0] + ':' + arr[1] + ':' + self.colors.NORMAL
+		colored += color + arr[2] + ':' + self.colors.NORMAL
+		colored += arr[3]
+		return colored
+	
+	def format(self, rec):
+		frame = sys._getframe()
+		while frame:
+			func = frame.f_code.co_name
+			if func == 'exec_command':
+				cmd = frame.f_locals.get('cmd')
+				if isinstance(cmd, list):
+					# Fix file case, it may be CL.EXE or cl.exe
+					argv0 = cmd[0].lower()
+					if 'cl.exe' in argv0:
+						lines = []
+						# This will not work with "localized" versions
+						# of MSVC
+						for line in rec.msg.splitlines():
+							if ': warning ' in line:
+								lines.append(self.parseMessage(line, self.colors.YELLOW))
+							elif ': error ' in line:
+								lines.append(self.parseMessage(line, self.colors.RED))
+							elif ': fatal error ' in line:
+								lines.append(self.parseMessage(line, self.colors.RED + self.colors.BOLD))
+							elif ': note: ' in line:
+								lines.append(self.parseMessage(line, self.colors.CYAN))
+							else:
+								lines.append(line)
+						rec.msg = "\n".join(lines)
+			frame = frame.f_back
+		return Logs.formatter.format(self, rec)
+
+def options(opt):
+	Logs.log.handlers[0].setFormatter(ColorMSVCFormatter(Logs.colors))
+
diff --git a/third_party/waf/waflib/extras/color_rvct.py b/third_party/waf/waflib/extras/color_rvct.py
new file mode 100644
index 0000000..f89ccbd
--- /dev/null
+++ b/third_party/waf/waflib/extras/color_rvct.py
@@ -0,0 +1,51 @@
+#!/usr/bin/env python
+# encoding: utf-8
+
+# Replaces the default formatter by one which understands RVCT output and colorizes it.
+
+__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
+__copyright__ = "Jérôme Carretero, 2012"
+
+import sys
+import atexit
+from waflib import Logs
+
+errors = []
+
+def show_errors():
+	for i, e in enumerate(errors):
+		if i > 5:
+			break
+		print("Error: %s" % e)
+
+atexit.register(show_errors)
+
+class RcvtFormatter(Logs.formatter):
+	def __init__(self, colors):
+		Logs.formatter.__init__(self)
+		self.colors = colors
+	def format(self, rec):
+		frame = sys._getframe()
+		while frame:
+			func = frame.f_code.co_name
+			if func == 'exec_command':
+				cmd = frame.f_locals['cmd']
+				if isinstance(cmd, list) and ('armcc' in cmd[0] or 'armld' in cmd[0]):
+					lines = []
+					for line in rec.msg.splitlines():
+						if 'Warning: ' in line:
+							lines.append(self.colors.YELLOW + line)
+						elif 'Error: ' in line:
+							lines.append(self.colors.RED + line)
+							errors.append(line)
+						elif 'note: ' in line:
+							lines.append(self.colors.CYAN + line)
+						else:
+							lines.append(line)
+					rec.msg = "\n".join(lines)
+			frame = frame.f_back
+		return Logs.formatter.format(self, rec)
+
+def options(opt):
+	Logs.log.handlers[0].setFormatter(RcvtFormatter(Logs.colors))
+
diff --git a/third_party/waf/waflib/extras/compat15.py b/third_party/waf/waflib/extras/compat15.py
new file mode 100644
index 0000000..0e74df8
--- /dev/null
+++ b/third_party/waf/waflib/extras/compat15.py
@@ -0,0 +1,406 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2010 (ita)
+
+"""
+This file is provided to enable compatibility with waf 1.5
+It was enabled by default in waf 1.6, but it is not used in waf 1.7
+"""
+
+import sys
+from waflib import ConfigSet, Logs, Options, Scripting, Task, Build, Configure, Node, Runner, TaskGen, Utils, Errors, Context
+
+# the following is to bring some compatibility with waf 1.5 "import waflib.Configure → import Configure"
+sys.modules['Environment'] = ConfigSet
+ConfigSet.Environment = ConfigSet.ConfigSet
+
+sys.modules['Logs'] = Logs
+sys.modules['Options'] = Options
+sys.modules['Scripting'] = Scripting
+sys.modules['Task'] = Task
+sys.modules['Build'] = Build
+sys.modules['Configure'] = Configure
+sys.modules['Node'] = Node
+sys.modules['Runner'] = Runner
+sys.modules['TaskGen'] = TaskGen
+sys.modules['Utils'] = Utils
+sys.modules['Constants'] = Context
+Context.SRCDIR = ''
+Context.BLDDIR = ''
+
+from waflib.Tools import c_preproc
+sys.modules['preproc'] = c_preproc
+
+from waflib.Tools import c_config
+sys.modules['config_c'] = c_config
+
+ConfigSet.ConfigSet.copy = ConfigSet.ConfigSet.derive
+ConfigSet.ConfigSet.set_variant = Utils.nada
+
+Utils.pproc = Utils.subprocess
+
+Build.BuildContext.add_subdirs = Build.BuildContext.recurse
+Build.BuildContext.new_task_gen = Build.BuildContext.__call__
+Build.BuildContext.is_install = 0
+Node.Node.relpath_gen = Node.Node.path_from
+
+Utils.pproc = Utils.subprocess
+Utils.get_term_cols = Logs.get_term_cols
+
+def cmd_output(cmd, **kw):
+
+	silent = False
+	if 'silent' in kw:
+		silent = kw['silent']
+		del(kw['silent'])
+
+	if 'e' in kw:
+		tmp = kw['e']
+		del(kw['e'])
+		kw['env'] = tmp
+
+	kw['shell'] = isinstance(cmd, str)
+	kw['stdout'] = Utils.subprocess.PIPE
+	if silent:
+		kw['stderr'] = Utils.subprocess.PIPE
+
+	try:
+		p = Utils.subprocess.Popen(cmd, **kw)
+		output = p.communicate()[0]
+	except OSError as e:
+		raise ValueError(str(e))
+
+	if p.returncode:
+		if not silent:
+			msg = "command execution failed: %s -> %r" % (cmd, str(output))
+			raise ValueError(msg)
+		output = ''
+	return output
+Utils.cmd_output = cmd_output
+
+def name_to_obj(self, s, env=None):
+	if Logs.verbose:
+		Logs.warn('compat: change "name_to_obj(name, env)" by "get_tgen_by_name(name)"')
+	return self.get_tgen_by_name(s)
+Build.BuildContext.name_to_obj = name_to_obj
+
+def env_of_name(self, name):
+	try:
+		return self.all_envs[name]
+	except KeyError:
+		Logs.error('no such environment: '+name)
+		return None
+Build.BuildContext.env_of_name = env_of_name
+
+
+def set_env_name(self, name, env):
+	self.all_envs[name] = env
+	return env
+Configure.ConfigurationContext.set_env_name = set_env_name
+
+def retrieve(self, name, fromenv=None):
+	try:
+		env = self.all_envs[name]
+	except KeyError:
+		env = ConfigSet.ConfigSet()
+		self.prepare_env(env)
+		self.all_envs[name] = env
+	else:
+		if fromenv:
+			Logs.warn('The environment %s may have been configured already', name)
+	return env
+Configure.ConfigurationContext.retrieve = retrieve
+
+Configure.ConfigurationContext.sub_config = Configure.ConfigurationContext.recurse
+Configure.ConfigurationContext.check_tool = Configure.ConfigurationContext.load
+Configure.conftest = Configure.conf
+Configure.ConfigurationError = Errors.ConfigurationError
+Utils.WafError = Errors.WafError
+
+Options.OptionsContext.sub_options = Options.OptionsContext.recurse
+Options.OptionsContext.tool_options = Context.Context.load
+Options.Handler = Options.OptionsContext
+
+Task.simple_task_type = Task.task_type_from_func = Task.task_factory
+Task.Task.classes = Task.classes
+
+def setitem(self, key, value):
+	if key.startswith('CCFLAGS'):
+		key = key[1:]
+	self.table[key] = value
+ConfigSet.ConfigSet.__setitem__ = setitem
+
+@TaskGen.feature('d')
+@TaskGen.before('apply_incpaths')
+def old_importpaths(self):
+	if getattr(self, 'importpaths', []):
+		self.includes = self.importpaths
+
+from waflib import Context
+eld = Context.load_tool
+def load_tool(*k, **kw):
+	ret = eld(*k, **kw)
+	if 'set_options' in ret.__dict__:
+		if Logs.verbose:
+			Logs.warn('compat: rename "set_options" to options')
+		ret.options = ret.set_options
+	if 'detect' in ret.__dict__:
+		if Logs.verbose:
+			Logs.warn('compat: rename "detect" to "configure"')
+		ret.configure = ret.detect
+	return ret
+Context.load_tool = load_tool
+
+def get_curdir(self):
+	return self.path.abspath()
+Context.Context.curdir = property(get_curdir, Utils.nada)
+
+def get_srcdir(self):
+	return self.srcnode.abspath()
+Configure.ConfigurationContext.srcdir = property(get_srcdir, Utils.nada)
+
+def get_blddir(self):
+	return self.bldnode.abspath()
+Configure.ConfigurationContext.blddir = property(get_blddir, Utils.nada)
+
+Configure.ConfigurationContext.check_message_1 = Configure.ConfigurationContext.start_msg
+Configure.ConfigurationContext.check_message_2 = Configure.ConfigurationContext.end_msg
+
+rev = Context.load_module
+def load_module(path, encoding=None):
+	ret = rev(path, encoding)
+	if 'set_options' in ret.__dict__:
+		if Logs.verbose:
+			Logs.warn('compat: rename "set_options" to "options" (%r)', path)
+		ret.options = ret.set_options
+	if 'srcdir' in ret.__dict__:
+		if Logs.verbose:
+			Logs.warn('compat: rename "srcdir" to "top" (%r)', path)
+		ret.top = ret.srcdir
+	if 'blddir' in ret.__dict__:
+		if Logs.verbose:
+			Logs.warn('compat: rename "blddir" to "out" (%r)', path)
+		ret.out = ret.blddir
+	Utils.g_module = Context.g_module
+	Options.launch_dir = Context.launch_dir
+	return ret
+Context.load_module = load_module
+
+old_post = TaskGen.task_gen.post
+def post(self):
+	self.features = self.to_list(self.features)
+	if 'cc' in self.features:
+		if Logs.verbose:
+			Logs.warn('compat: the feature cc does not exist anymore (use "c")')
+		self.features.remove('cc')
+		self.features.append('c')
+	if 'cstaticlib' in self.features:
+		if Logs.verbose:
+			Logs.warn('compat: the feature cstaticlib does not exist anymore (use "cstlib" or "cxxstlib")')
+		self.features.remove('cstaticlib')
+		self.features.append(('cxx' in self.features) and 'cxxstlib' or 'cstlib')
+	if getattr(self, 'ccflags', None):
+		if Logs.verbose:
+			Logs.warn('compat: "ccflags" was renamed to "cflags"')
+		self.cflags = self.ccflags
+	return old_post(self)
+TaskGen.task_gen.post = post
+
+def waf_version(*k, **kw):
+	Logs.warn('wrong version (waf_version was removed in waf 1.6)')
+Utils.waf_version = waf_version
+
+
+import os
+@TaskGen.feature('c', 'cxx', 'd')
+@TaskGen.before('apply_incpaths', 'propagate_uselib_vars')
+@TaskGen.after('apply_link', 'process_source')
+def apply_uselib_local(self):
+	"""
+	process the uselib_local attribute
+	execute after apply_link because of the execution order set on 'link_task'
+	"""
+	env = self.env
+	from waflib.Tools.ccroot import stlink_task
+
+	# 1. the case of the libs defined in the project (visit ancestors first)
+	# the ancestors external libraries (uselib) will be prepended
+	self.uselib = self.to_list(getattr(self, 'uselib', []))
+	self.includes = self.to_list(getattr(self, 'includes', []))
+	names = self.to_list(getattr(self, 'uselib_local', []))
+	get = self.bld.get_tgen_by_name
+	seen = set()
+	seen_uselib = set()
+	tmp = Utils.deque(names) # consume a copy of the list of names
+	if tmp:
+		if Logs.verbose:
+			Logs.warn('compat: "uselib_local" is deprecated, replace by "use"')
+	while tmp:
+		lib_name = tmp.popleft()
+		# visit dependencies only once
+		if lib_name in seen:
+			continue
+
+		y = get(lib_name)
+		y.post()
+		seen.add(lib_name)
+
+		# object has ancestors to process (shared libraries): add them to the end of the list
+		if getattr(y, 'uselib_local', None):
+			for x in self.to_list(getattr(y, 'uselib_local', [])):
+				obj = get(x)
+				obj.post()
+				if getattr(obj, 'link_task', None):
+					if not isinstance(obj.link_task, stlink_task):
+						tmp.append(x)
+
+		# link task and flags
+		if getattr(y, 'link_task', None):
+
+			link_name = y.target[y.target.rfind(os.sep) + 1:]
+			if isinstance(y.link_task, stlink_task):
+				env.append_value('STLIB', [link_name])
+			else:
+				# some linkers can link against programs
+				env.append_value('LIB', [link_name])
+
+			# the order
+			self.link_task.set_run_after(y.link_task)
+
+			# for the recompilation
+			self.link_task.dep_nodes += y.link_task.outputs
+
+			# add the link path too
+			tmp_path = y.link_task.outputs[0].parent.bldpath()
+			if not tmp_path in env['LIBPATH']:
+				env.prepend_value('LIBPATH', [tmp_path])
+
+		# add ancestors uselib too - but only propagate those that have no staticlib defined
+		for v in self.to_list(getattr(y, 'uselib', [])):
+			if v not in seen_uselib:
+				seen_uselib.add(v)
+				if not env['STLIB_' + v]:
+					if not v in self.uselib:
+						self.uselib.insert(0, v)
+
+		# if the library task generator provides 'export_includes', add to the include path
+		# the export_includes must be a list of paths relative to the other library
+		if getattr(y, 'export_includes', None):
+			self.includes.extend(y.to_incnodes(y.export_includes))
+
+@TaskGen.feature('cprogram', 'cxxprogram', 'cstlib', 'cxxstlib', 'cshlib', 'cxxshlib', 'dprogram', 'dstlib', 'dshlib')
+@TaskGen.after('apply_link')
+def apply_objdeps(self):
+	"add the .o files produced by some other object files in the same manner as uselib_local"
+	names = getattr(self, 'add_objects', [])
+	if not names:
+		return
+	names = self.to_list(names)
+
+	get = self.bld.get_tgen_by_name
+	seen = []
+	while names:
+		x = names[0]
+
+		# visit dependencies only once
+		if x in seen:
+			names = names[1:]
+			continue
+
+		# object does not exist ?
+		y = get(x)
+
+		# object has ancestors to process first ? update the list of names
+		if getattr(y, 'add_objects', None):
+			added = 0
+			lst = y.to_list(y.add_objects)
+			lst.reverse()
+			for u in lst:
+				if u in seen:
+					continue
+				added = 1
+				names = [u]+names
+			if added:
+				continue # list of names modified, loop
+
+		# safe to process the current object
+		y.post()
+		seen.append(x)
+
+		for t in getattr(y, 'compiled_tasks', []):
+			self.link_task.inputs.extend(t.outputs)
+
+@TaskGen.after('apply_link')
+def process_obj_files(self):
+	if not hasattr(self, 'obj_files'):
+		return
+	for x in self.obj_files:
+		node = self.path.find_resource(x)
+		self.link_task.inputs.append(node)
+
+@TaskGen.taskgen_method
+def add_obj_file(self, file):
+	"""Small example on how to link object files as if they were source
+	obj = bld.create_obj('cc')
+	obj.add_obj_file('foo.o')"""
+	if not hasattr(self, 'obj_files'):
+		self.obj_files = []
+	if not 'process_obj_files' in self.meths:
+		self.meths.append('process_obj_files')
+	self.obj_files.append(file)
+
+
+old_define = Configure.ConfigurationContext.__dict__['define']
+
+@Configure.conf
+def define(self, key, val, quote=True, comment=''):
+	old_define(self, key, val, quote, comment)
+	if key.startswith('HAVE_'):
+		self.env[key] = 1
+
+old_undefine = Configure.ConfigurationContext.__dict__['undefine']
+
+@Configure.conf
+def undefine(self, key, comment=''):
+	old_undefine(self, key, comment)
+	if key.startswith('HAVE_'):
+		self.env[key] = 0
+
+# some people might want to use export_incdirs, but it was renamed
+def set_incdirs(self, val):
+	Logs.warn('compat: change "export_incdirs" by "export_includes"')
+	self.export_includes = val
+TaskGen.task_gen.export_incdirs = property(None, set_incdirs)
+
+def install_dir(self, path):
+	if not path:
+		return []
+
+	destpath = Utils.subst_vars(path, self.env)
+
+	if self.is_install > 0:
+		Logs.info('* creating %s', destpath)
+		Utils.check_dir(destpath)
+	elif self.is_install < 0:
+		Logs.info('* removing %s', destpath)
+		try:
+			os.remove(destpath)
+		except OSError:
+			pass
+Build.BuildContext.install_dir = install_dir
+
+# before/after names
+repl = {'apply_core': 'process_source',
+	'apply_lib_vars': 'process_source',
+	'apply_obj_vars': 'propagate_uselib_vars',
+	'exec_rule': 'process_rule'
+}
+def after(*k):
+	k = [repl.get(key, key) for key in k]
+	return TaskGen.after_method(*k)
+
+def before(*k):
+	k = [repl.get(key, key) for key in k]
+	return TaskGen.before_method(*k)
+TaskGen.before = before
+
diff --git a/third_party/waf/waflib/extras/cppcheck.py b/third_party/waf/waflib/extras/cppcheck.py
new file mode 100644
index 0000000..13ff424
--- /dev/null
+++ b/third_party/waf/waflib/extras/cppcheck.py
@@ -0,0 +1,591 @@
+#! /usr/bin/env python
+# -*- encoding: utf-8 -*-
+# Michel Mooij, michel.mooij7@gmail.com
+
+"""
+Tool Description
+================
+This module provides a waf wrapper (i.e. waftool) around the C/C++ source code
+checking tool 'cppcheck'.
+
+See http://cppcheck.sourceforge.net/ for more information on the cppcheck tool
+itself.
+Note that many linux distributions already provide a ready to install version
+of cppcheck. On fedora, for instance, it can be installed using yum:
+
+	'sudo yum install cppcheck'
+
+
+Usage
+=====
+In order to use this waftool simply add it to the 'options' and 'configure'
+functions of your main waf script as shown in the example below:
+
+	def options(opt):
+		opt.load('cppcheck', tooldir='./waftools')
+
+	def configure(conf):
+		conf.load('cppcheck')
+
+Note that example shown above assumes that the cppcheck waftool is located in
+the sub directory named 'waftools'.
+
+When configured as shown in the example above, cppcheck will automatically
+perform a source code analysis on all C/C++ build tasks that have been
+defined in your waf build system.
+
+The example shown below for a C program will be used as input for cppcheck when
+building the task.
+
+	def build(bld):
+		bld.program(name='foo', src='foobar.c')
+
+The result of the source code analysis will be stored both as xml and html
+files in the build location for the task. Should any error be detected by
+cppcheck the build will be aborted and a link to the html report will be shown.
+By default, one index.html file is created for each task generator. A global
+index.html file can be obtained by setting the following variable
+in the configuration section:
+
+	conf.env.CPPCHECK_SINGLE_HTML = False
+
+When needed source code checking by cppcheck can be disabled per task, per
+detected error or warning for a particular task. It can be also be disabled for
+all tasks.
+
+In order to exclude a task from source code checking add the skip option to the
+task as shown below:
+
+	def build(bld):
+		bld.program(
+				name='foo',
+				src='foobar.c'
+				cppcheck_skip=True
+		)
+
+When needed problems detected by cppcheck may be suppressed using a file
+containing a list of suppression rules. The relative or absolute path to this
+file can be added to the build task as shown in the example below:
+
+		bld.program(
+				name='bar',
+				src='foobar.c',
+				cppcheck_suppress='bar.suppress'
+		)
+
+A cppcheck suppress file should contain one suppress rule per line. Each of
+these rules will be passed as an '--suppress=<rule>' argument to cppcheck.
+
+Dependencies
+================
+This waftool depends on the python pygments module, it is used for source code
+syntax highlighting when creating the html reports. see http://pygments.org/ for
+more information on this package.
+
+Remarks
+================
+The generation of the html report is originally based on the cppcheck-htmlreport.py
+script that comes shipped with the cppcheck tool.
+"""
+
+import sys
+import xml.etree.ElementTree as ElementTree
+from waflib import Task, TaskGen, Logs, Context, Options
+
+PYGMENTS_EXC_MSG= '''
+The required module 'pygments' could not be found. Please install it using your
+platform package manager (e.g. apt-get or yum), using 'pip' or 'easy_install',
+see 'http://pygments.org/download/' for installation instructions.
+'''
+
+try:
+	import pygments
+	from pygments import formatters, lexers
+except ImportError as e:
+	Logs.warn(PYGMENTS_EXC_MSG)
+	raise e
+
+
+def options(opt):
+	opt.add_option('--cppcheck-skip', dest='cppcheck_skip',
+		default=False, action='store_true',
+		help='do not check C/C++ sources (default=False)')
+
+	opt.add_option('--cppcheck-err-resume', dest='cppcheck_err_resume',
+		default=False, action='store_true',
+		help='continue in case of errors (default=False)')
+
+	opt.add_option('--cppcheck-bin-enable', dest='cppcheck_bin_enable',
+		default='warning,performance,portability,style,unusedFunction', action='store',
+		help="cppcheck option '--enable=' for binaries (default=warning,performance,portability,style,unusedFunction)")
+
+	opt.add_option('--cppcheck-lib-enable', dest='cppcheck_lib_enable',
+		default='warning,performance,portability,style', action='store',
+		help="cppcheck option '--enable=' for libraries (default=warning,performance,portability,style)")
+
+	opt.add_option('--cppcheck-std-c', dest='cppcheck_std_c',
+		default='c99', action='store',
+		help='cppcheck standard to use when checking C (default=c99)')
+
+	opt.add_option('--cppcheck-std-cxx', dest='cppcheck_std_cxx',
+		default='c++03', action='store',
+		help='cppcheck standard to use when checking C++ (default=c++03)')
+
+	opt.add_option('--cppcheck-check-config', dest='cppcheck_check_config',
+		default=False, action='store_true',
+		help='forced check for missing buildin include files, e.g. stdio.h (default=False)')
+
+	opt.add_option('--cppcheck-max-configs', dest='cppcheck_max_configs',
+		default='20', action='store',
+		help='maximum preprocessor (--max-configs) define iterations (default=20)')
+
+	opt.add_option('--cppcheck-jobs', dest='cppcheck_jobs',
+		default='1', action='store',
+		help='number of jobs (-j) to do the checking work (default=1)')
+
+def configure(conf):
+	if conf.options.cppcheck_skip:
+		conf.env.CPPCHECK_SKIP = [True]
+	conf.env.CPPCHECK_STD_C = conf.options.cppcheck_std_c
+	conf.env.CPPCHECK_STD_CXX = conf.options.cppcheck_std_cxx
+	conf.env.CPPCHECK_MAX_CONFIGS = conf.options.cppcheck_max_configs
+	conf.env.CPPCHECK_BIN_ENABLE = conf.options.cppcheck_bin_enable
+	conf.env.CPPCHECK_LIB_ENABLE = conf.options.cppcheck_lib_enable
+	conf.env.CPPCHECK_JOBS = conf.options.cppcheck_jobs
+	if conf.options.cppcheck_jobs != '1' and ('unusedFunction' in conf.options.cppcheck_bin_enable or 'unusedFunction' in conf.options.cppcheck_lib_enable or 'all' in conf.options.cppcheck_bin_enable or 'all' in conf.options.cppcheck_lib_enable):
+		Logs.warn('cppcheck: unusedFunction cannot be used with multiple threads, cppcheck will disable it automatically')
+	conf.find_program('cppcheck', var='CPPCHECK')
+
+	# set to True to get a single index.html file
+	conf.env.CPPCHECK_SINGLE_HTML = False
+
+@TaskGen.feature('c')
+@TaskGen.feature('cxx')
+def cppcheck_execute(self):
+	if hasattr(self.bld, 'conf'):
+		return
+	if len(self.env.CPPCHECK_SKIP) or Options.options.cppcheck_skip:
+		return
+	if getattr(self, 'cppcheck_skip', False):
+		return
+	task = self.create_task('cppcheck')
+	task.cmd = _tgen_create_cmd(self)
+	task.fatal = []
+	if not Options.options.cppcheck_err_resume:
+		task.fatal.append('error')
+
+
+def _tgen_create_cmd(self):
+	features = getattr(self, 'features', [])
+	std_c = self.env.CPPCHECK_STD_C
+	std_cxx = self.env.CPPCHECK_STD_CXX
+	max_configs = self.env.CPPCHECK_MAX_CONFIGS
+	bin_enable = self.env.CPPCHECK_BIN_ENABLE
+	lib_enable = self.env.CPPCHECK_LIB_ENABLE
+	jobs = self.env.CPPCHECK_JOBS
+
+	cmd  = self.env.CPPCHECK
+	args = ['--inconclusive','--report-progress','--verbose','--xml','--xml-version=2']
+	args.append('--max-configs=%s' % max_configs)
+	args.append('-j %s' % jobs)
+
+	if 'cxx' in features:
+		args.append('--language=c++')
+		args.append('--std=%s' % std_cxx)
+	else:
+		args.append('--language=c')
+		args.append('--std=%s' % std_c)
+
+	if Options.options.cppcheck_check_config:
+		args.append('--check-config')
+
+	if set(['cprogram','cxxprogram']) & set(features):
+		args.append('--enable=%s' % bin_enable)
+	else:
+		args.append('--enable=%s' % lib_enable)
+
+	for src in self.to_list(getattr(self, 'source', [])):
+		if not isinstance(src, str):
+			src = repr(src)
+		args.append(src)
+	for inc in self.to_incnodes(self.to_list(getattr(self, 'includes', []))):
+		if not isinstance(inc, str):
+			inc = repr(inc)
+		args.append('-I%s' % inc)
+	for inc in self.to_incnodes(self.to_list(self.env.INCLUDES)):
+		if not isinstance(inc, str):
+			inc = repr(inc)
+		args.append('-I%s' % inc)
+	return cmd + args
+
+
+class cppcheck(Task.Task):
+	quiet = True
+
+	def run(self):
+		stderr = self.generator.bld.cmd_and_log(self.cmd, quiet=Context.STDERR, output=Context.STDERR)
+		self._save_xml_report(stderr)
+		defects = self._get_defects(stderr)
+		index = self._create_html_report(defects)
+		self._errors_evaluate(defects, index)
+		return 0
+
+	def _save_xml_report(self, s):
+		'''use cppcheck xml result string, add the command string used to invoke cppcheck
+		and save as xml file.
+		'''
+		header = '%s\n' % s.splitlines()[0]
+		root = ElementTree.fromstring(s)
+		cmd = ElementTree.SubElement(root.find('cppcheck'), 'cmd')
+		cmd.text = str(self.cmd)
+		body = ElementTree.tostring(root).decode('us-ascii')
+		body_html_name = 'cppcheck-%s.xml' % self.generator.get_name()
+		if self.env.CPPCHECK_SINGLE_HTML:
+			body_html_name = 'cppcheck.xml'
+		node = self.generator.path.get_bld().find_or_declare(body_html_name)
+		node.write(header + body)
+
+	def _get_defects(self, xml_string):
+		'''evaluate the xml string returned by cppcheck (on sdterr) and use it to create
+		a list of defects.
+		'''
+		defects = []
+		for error in ElementTree.fromstring(xml_string).iter('error'):
+			defect = {}
+			defect['id'] = error.get('id')
+			defect['severity'] = error.get('severity')
+			defect['msg'] = str(error.get('msg')).replace('<','&lt;')
+			defect['verbose'] = error.get('verbose')
+			for location in error.findall('location'):
+				defect['file'] = location.get('file')
+				defect['line'] = str(int(location.get('line')) - 1)
+			defects.append(defect)
+		return defects
+
+	def _create_html_report(self, defects):
+		files, css_style_defs = self._create_html_files(defects)
+		index = self._create_html_index(files)
+		self._create_css_file(css_style_defs)
+		return index
+
+	def _create_html_files(self, defects):
+		sources = {}
+		defects = [defect for defect in defects if 'file' in defect]
+		for defect in defects:
+			name = defect['file']
+			if not name in sources:
+				sources[name] = [defect]
+			else:
+				sources[name].append(defect)
+
+		files = {}
+		css_style_defs = None
+		bpath = self.generator.path.get_bld().abspath()
+		names = list(sources.keys())
+		for i in range(0,len(names)):
+			name = names[i]
+			if self.env.CPPCHECK_SINGLE_HTML:
+				htmlfile = 'cppcheck/%i.html' % (i)
+			else:
+				htmlfile = 'cppcheck/%s%i.html' % (self.generator.get_name(),i)
+			errors = sources[name]
+			files[name] = { 'htmlfile': '%s/%s' % (bpath, htmlfile), 'errors': errors }
+			css_style_defs = self._create_html_file(name, htmlfile, errors)
+		return files, css_style_defs
+
+	def _create_html_file(self, sourcefile, htmlfile, errors):
+		name = self.generator.get_name()
+		root = ElementTree.fromstring(CPPCHECK_HTML_FILE)
+		title = root.find('head/title')
+		title.text = 'cppcheck - report - %s' % name
+
+		body = root.find('body')
+		for div in body.findall('div'):
+			if div.get('id') == 'page':
+				page = div
+				break
+		for div in page.findall('div'):
+			if div.get('id') == 'header':
+				h1 = div.find('h1')
+				h1.text = 'cppcheck report - %s' % name
+			if div.get('id') == 'menu':
+				indexlink = div.find('a')
+				if self.env.CPPCHECK_SINGLE_HTML:
+					indexlink.attrib['href'] = 'index.html'
+				else:
+					indexlink.attrib['href'] = 'index-%s.html' % name
+			if div.get('id') == 'content':
+				content = div
+				srcnode = self.generator.bld.root.find_node(sourcefile)
+				hl_lines = [e['line'] for e in errors if 'line' in e]
+				formatter = CppcheckHtmlFormatter(linenos=True, style='colorful', hl_lines=hl_lines, lineanchors='line')
+				formatter.errors = [e for e in errors if 'line' in e]
+				css_style_defs = formatter.get_style_defs('.highlight')
+				lexer = pygments.lexers.guess_lexer_for_filename(sourcefile, "")
+				s = pygments.highlight(srcnode.read(), lexer, formatter)
+				table = ElementTree.fromstring(s)
+				content.append(table)
+
+		s = ElementTree.tostring(root, method='html').decode('us-ascii')
+		s = CCPCHECK_HTML_TYPE + s
+		node = self.generator.path.get_bld().find_or_declare(htmlfile)
+		node.write(s)
+		return css_style_defs
+
+	def _create_html_index(self, files):
+		name = self.generator.get_name()
+		root = ElementTree.fromstring(CPPCHECK_HTML_FILE)
+		title = root.find('head/title')
+		title.text = 'cppcheck - report - %s' % name
+
+		body = root.find('body')
+		for div in body.findall('div'):
+			if div.get('id') == 'page':
+				page = div
+				break
+		for div in page.findall('div'):
+			if div.get('id') == 'header':
+				h1 = div.find('h1')
+				h1.text = 'cppcheck report - %s' % name
+			if div.get('id') == 'content':
+				content = div
+				self._create_html_table(content, files)
+			if div.get('id') == 'menu':
+				indexlink = div.find('a')
+				if self.env.CPPCHECK_SINGLE_HTML:
+					indexlink.attrib['href'] = 'index.html'
+				else:
+					indexlink.attrib['href'] = 'index-%s.html' % name
+
+		s = ElementTree.tostring(root, method='html').decode('us-ascii')
+		s = CCPCHECK_HTML_TYPE + s
+		index_html_name = 'cppcheck/index-%s.html' % name
+		if self.env.CPPCHECK_SINGLE_HTML:
+			index_html_name = 'cppcheck/index.html'
+		node = self.generator.path.get_bld().find_or_declare(index_html_name)
+		node.write(s)
+		return node
+
+	def _create_html_table(self, content, files):
+		table = ElementTree.fromstring(CPPCHECK_HTML_TABLE)
+		for name, val in files.items():
+			f = val['htmlfile']
+			s = '<tr><td colspan="4"><a href="%s">%s</a></td></tr>\n' % (f,name)
+			row = ElementTree.fromstring(s)
+			table.append(row)
+
+			errors = sorted(val['errors'], key=lambda e: int(e['line']) if 'line' in e else sys.maxint)
+			for e in errors:
+				if not 'line' in e:
+					s = '<tr><td></td><td>%s</td><td>%s</td><td>%s</td></tr>\n' % (e['id'], e['severity'], e['msg'])
+				else:
+					attr = ''
+					if e['severity'] == 'error':
+						attr = 'class="error"'
+					s = '<tr><td><a href="%s#line-%s">%s</a></td>' % (f, e['line'], e['line'])
+					s+= '<td>%s</td><td>%s</td><td %s>%s</td></tr>\n' % (e['id'], e['severity'], attr, e['msg'])
+				row = ElementTree.fromstring(s)
+				table.append(row)
+		content.append(table)
+
+	def _create_css_file(self, css_style_defs):
+		css = str(CPPCHECK_CSS_FILE)
+		if css_style_defs:
+			css = "%s\n%s\n" % (css, css_style_defs)
+		node = self.generator.path.get_bld().find_or_declare('cppcheck/style.css')
+		node.write(css)
+
+	def _errors_evaluate(self, errors, http_index):
+		name = self.generator.get_name()
+		fatal = self.fatal
+		severity = [err['severity'] for err in errors]
+		problems = [err for err in errors if err['severity'] != 'information']
+
+		if set(fatal) & set(severity):
+			exc  = "\n"
+			exc += "\nccpcheck detected fatal error(s) in task '%s', see report for details:" % name
+			exc += "\n    file://%r" % (http_index)
+			exc += "\n"
+			self.generator.bld.fatal(exc)
+
+		elif len(problems):
+			msg =  "\nccpcheck detected (possible) problem(s) in task '%s', see report for details:" % name
+			msg += "\n    file://%r" % http_index
+			msg += "\n"
+			Logs.error(msg)
+
+
+class CppcheckHtmlFormatter(pygments.formatters.HtmlFormatter):
+	errors = []
+
+	def wrap(self, source, outfile):
+		line_no = 1
+		for i, t in super(CppcheckHtmlFormatter, self).wrap(source, outfile):
+			# If this is a source code line we want to add a span tag at the end.
+			if i == 1:
+				for error in self.errors:
+					if int(error['line']) == line_no:
+						t = t.replace('\n', CPPCHECK_HTML_ERROR % error['msg'])
+				line_no += 1
+			yield i, t
+
+
+CCPCHECK_HTML_TYPE = \
+'<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">\n'
+
+CPPCHECK_HTML_FILE = """
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd" [<!ENTITY nbsp "&#160;">]>
+<html>
+	<head>
+		<title>cppcheck - report - XXX</title>
+		<link href="style.css" rel="stylesheet" type="text/css" />
+		<style type="text/css">
+		</style>
+	</head>
+	<body class="body">
+		<div id="page-header">&nbsp;</div>
+		<div id="page">
+			<div id="header">
+				<h1>cppcheck report - XXX</h1>
+			</div>
+			<div id="menu">
+				<a href="index.html">Defect list</a>
+			</div>
+			<div id="content">
+			</div>
+			<div id="footer">
+				<div>cppcheck - a tool for static C/C++ code analysis</div>
+				<div>
+				Internet: <a href="http://cppcheck.sourceforge.net">http://cppcheck.sourceforge.net</a><br/>
+          		Forum: <a href="http://apps.sourceforge.net/phpbb/cppcheck/">http://apps.sourceforge.net/phpbb/cppcheck/</a><br/>
+				IRC: #cppcheck at irc.freenode.net
+				</div>
+				&nbsp;
+			</div>
+		&nbsp;
+		</div>
+		<div id="page-footer">&nbsp;</div>
+	</body>
+</html>
+"""
+
+CPPCHECK_HTML_TABLE = """
+<table>
+	<tr>
+		<th>Line</th>
+		<th>Id</th>
+		<th>Severity</th>
+		<th>Message</th>
+	</tr>
+</table>
+"""
+
+CPPCHECK_HTML_ERROR = \
+'<span style="background: #ffaaaa;padding: 3px;">&lt;--- %s</span>\n'
+
+CPPCHECK_CSS_FILE = """
+body.body {
+	font-family: Arial;
+	font-size: 13px;
+	background-color: black;
+	padding: 0px;
+	margin: 0px;
+}
+
+.error {
+	font-family: Arial;
+	font-size: 13px;
+	background-color: #ffb7b7;
+	padding: 0px;
+	margin: 0px;
+}
+
+th, td {
+	min-width: 100px;
+	text-align: left;
+}
+
+#page-header {
+	clear: both;
+	width: 1200px;
+	margin: 20px auto 0px auto;
+	height: 10px;
+	border-bottom-width: 2px;
+	border-bottom-style: solid;
+	border-bottom-color: #aaaaaa;
+}
+
+#page {
+	width: 1160px;
+	margin: auto;
+	border-left-width: 2px;
+	border-left-style: solid;
+	border-left-color: #aaaaaa;
+	border-right-width: 2px;
+	border-right-style: solid;
+	border-right-color: #aaaaaa;
+	background-color: White;
+	padding: 20px;
+}
+
+#page-footer {
+	clear: both;
+	width: 1200px;
+	margin: auto;
+	height: 10px;
+	border-top-width: 2px;
+	border-top-style: solid;
+	border-top-color: #aaaaaa;
+}
+
+#header {
+	width: 100%;
+	height: 70px;
+	background-image: url(logo.png);
+	background-repeat: no-repeat;
+	background-position: left top;
+	border-bottom-style: solid;
+	border-bottom-width: thin;
+	border-bottom-color: #aaaaaa;
+}
+
+#menu {
+	margin-top: 5px;
+	text-align: left;
+	float: left;
+	width: 100px;
+	height: 300px;
+}
+
+#menu > a {
+	margin-left: 10px;
+	display: block;
+}
+
+#content {
+	float: left;
+	width: 1020px;
+	margin: 5px;
+	padding: 0px 10px 10px 10px;
+	border-left-style: solid;
+	border-left-width: thin;
+	border-left-color: #aaaaaa;
+}
+
+#footer {
+	padding-bottom: 5px;
+	padding-top: 5px;
+	border-top-style: solid;
+	border-top-width: thin;
+	border-top-color: #aaaaaa;
+	clear: both;
+	font-size: 10px;
+}
+
+#footer > div {
+	float: left;
+	width: 33%;
+}
+
+"""
+
diff --git a/third_party/waf/waflib/extras/cpplint.py b/third_party/waf/waflib/extras/cpplint.py
new file mode 100644
index 0000000..afc09c9
--- /dev/null
+++ b/third_party/waf/waflib/extras/cpplint.py
@@ -0,0 +1,209 @@
+#! /usr/bin/env python
+# encoding: utf-8
+#
+# written by Sylvain Rouquette, 2014
+
+'''
+
+This is an extra tool, not bundled with the default waf binary.
+To add the cpplint tool to the waf file:
+$ ./waf-light --tools=compat15,cpplint
+
+this tool also requires cpplint for python.
+If you have PIP, you can install it like this: pip install cpplint
+
+When using this tool, the wscript will look like:
+
+    def options(opt):
+        opt.load('compiler_cxx cpplint')
+
+    def configure(conf):
+        conf.load('compiler_cxx cpplint')
+        # optional, you can also specify them on the command line
+        conf.env.CPPLINT_FILTERS = ','.join((
+            '-whitespace/newline',      # c++11 lambda
+            '-readability/braces',      # c++11 constructor
+            '-whitespace/braces',       # c++11 constructor
+            '-build/storage_class',     # c++11 for-range
+            '-whitespace/blank_line',   # user pref
+            '-whitespace/labels'        # user pref
+            ))
+
+    def build(bld):
+        bld(features='cpplint', source='main.cpp', target='app')
+        # add include files, because they aren't usually built
+        bld(features='cpplint', source=bld.path.ant_glob('**/*.hpp'))
+'''
+
+from __future__ import absolute_import
+import sys, re
+import logging
+from waflib import Errors, Task, TaskGen, Logs, Options, Node, Utils
+
+
+critical_errors = 0
+CPPLINT_FORMAT = '[CPPLINT] %(filename)s:\nline %(linenum)s, severity %(confidence)s, category: %(category)s\n%(message)s\n'
+RE_EMACS = re.compile(r'(?P<filename>.*):(?P<linenum>\d+):  (?P<message>.*)  \[(?P<category>.*)\] \[(?P<confidence>\d+)\]')
+CPPLINT_RE = {
+    'waf': RE_EMACS,
+    'emacs': RE_EMACS,
+    'vs7': re.compile(r'(?P<filename>.*)\((?P<linenum>\d+)\):  (?P<message>.*)  \[(?P<category>.*)\] \[(?P<confidence>\d+)\]'),
+    'eclipse': re.compile(r'(?P<filename>.*):(?P<linenum>\d+): warning: (?P<message>.*)  \[(?P<category>.*)\] \[(?P<confidence>\d+)\]'),
+}
+CPPLINT_STR = ('${CPPLINT} '
+               '--verbose=${CPPLINT_LEVEL} '
+               '--output=${CPPLINT_OUTPUT} '
+               '--filter=${CPPLINT_FILTERS} '
+               '--root=${CPPLINT_ROOT} '
+               '--linelength=${CPPLINT_LINE_LENGTH} ')
+
+
+def options(opt):
+    opt.add_option('--cpplint-filters', type='string',
+                   default='', dest='CPPLINT_FILTERS',
+                   help='add filters to cpplint')
+    opt.add_option('--cpplint-length', type='int',
+                   default=80, dest='CPPLINT_LINE_LENGTH',
+                   help='specify the line length (default: 80)')
+    opt.add_option('--cpplint-level', default=1, type='int', dest='CPPLINT_LEVEL',
+                   help='specify the log level (default: 1)')
+    opt.add_option('--cpplint-break', default=5, type='int', dest='CPPLINT_BREAK',
+                   help='break the build if error >= level (default: 5)')
+    opt.add_option('--cpplint-root', type='string',
+                   default='', dest='CPPLINT_ROOT',
+                   help='root directory used to derive header guard')
+    opt.add_option('--cpplint-skip', action='store_true',
+                   default=False, dest='CPPLINT_SKIP',
+                   help='skip cpplint during build')
+    opt.add_option('--cpplint-output', type='string',
+                   default='waf', dest='CPPLINT_OUTPUT',
+                   help='select output format (waf, emacs, vs7, eclipse)')
+
+
+def configure(conf):
+    try:
+        conf.find_program('cpplint', var='CPPLINT')
+    except Errors.ConfigurationError:
+        conf.env.CPPLINT_SKIP = True
+
+
+class cpplint_formatter(Logs.formatter, object):
+    def __init__(self, fmt):
+        logging.Formatter.__init__(self, CPPLINT_FORMAT)
+        self.fmt = fmt
+
+    def format(self, rec):
+        if self.fmt == 'waf':
+            result = CPPLINT_RE[self.fmt].match(rec.msg).groupdict()
+            rec.msg = CPPLINT_FORMAT % result
+        if rec.levelno <= logging.INFO:
+            rec.c1 = Logs.colors.CYAN
+        return super(cpplint_formatter, self).format(rec)
+
+
+class cpplint_handler(Logs.log_handler, object):
+    def __init__(self, stream=sys.stderr, **kw):
+        super(cpplint_handler, self).__init__(stream, **kw)
+        self.stream = stream
+
+    def emit(self, rec):
+        rec.stream = self.stream
+        self.emit_override(rec)
+        self.flush()
+
+
+class cpplint_wrapper(object):
+    def __init__(self, logger, threshold, fmt):
+        self.logger = logger
+        self.threshold = threshold
+        self.fmt = fmt
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, exc_type, exc_value, traceback):
+        if isinstance(exc_value, Utils.subprocess.CalledProcessError):
+            messages = [m for m in exc_value.output.splitlines() 
+                        if 'Done processing' not in m 
+                        and 'Total errors found' not in m]
+            for message in messages:
+                self.write(message)
+            return True
+
+    def write(self, message):
+        global critical_errors
+        result = CPPLINT_RE[self.fmt].match(message)
+        if not result:
+            return
+        level = int(result.groupdict()['confidence'])
+        if level >= self.threshold:
+            critical_errors += 1
+        if level <= 2:
+            self.logger.info(message)
+        elif level <= 4:
+            self.logger.warning(message)
+        else:
+            self.logger.error(message)
+
+
+cpplint_logger = None
+def get_cpplint_logger(fmt):
+    global cpplint_logger
+    if cpplint_logger:
+        return cpplint_logger
+    cpplint_logger = logging.getLogger('cpplint')
+    hdlr = cpplint_handler()
+    hdlr.setFormatter(cpplint_formatter(fmt))
+    cpplint_logger.addHandler(hdlr)
+    cpplint_logger.setLevel(logging.DEBUG)
+    return cpplint_logger
+
+
+class cpplint(Task.Task):
+    color = 'PINK'
+
+    def __init__(self, *k, **kw):
+        super(cpplint, self).__init__(*k, **kw)
+
+    def run(self):
+        global critical_errors
+        with cpplint_wrapper(get_cpplint_logger(self.env.CPPLINT_OUTPUT), self.env.CPPLINT_BREAK, self.env.CPPLINT_OUTPUT):
+            params = {key: str(self.env[key]) for key in self.env if 'CPPLINT_' in key}
+            if params['CPPLINT_OUTPUT'] == 'waf':
+                params['CPPLINT_OUTPUT'] = 'emacs'
+            params['CPPLINT'] = self.env.get_flat('CPPLINT')
+            cmd = Utils.subst_vars(CPPLINT_STR, params)
+            env = self.env.env or None
+            Utils.subprocess.check_output(cmd + self.inputs[0].abspath(),
+                                          stderr=Utils.subprocess.STDOUT,
+                                          env=env, shell=True)
+        return critical_errors
+
+@TaskGen.extension('.h', '.hh', '.hpp', '.hxx')
+def cpplint_includes(self, node):
+    pass
+
+@TaskGen.feature('cpplint')
+@TaskGen.before_method('process_source')
+def post_cpplint(self):
+    if not self.env.CPPLINT_INITIALIZED:
+        for key, value in Options.options.__dict__.items():
+            if not key.startswith('CPPLINT_') or self.env[key]:
+                continue
+            self.env[key] = value
+        self.env.CPPLINT_INITIALIZED = True
+
+    if self.env.CPPLINT_SKIP:
+        return
+
+    if not self.env.CPPLINT_OUTPUT in CPPLINT_RE:
+        return
+
+    for src in self.to_list(getattr(self, 'source', [])):
+        if isinstance(src, Node.Node):
+            node = src
+        else:
+            node = self.path.find_or_declare(src)
+        if not node:
+            self.bld.fatal('Could not find %r' % src)
+        self.create_task('cpplint', node)
diff --git a/third_party/waf/waflib/extras/cross_gnu.py b/third_party/waf/waflib/extras/cross_gnu.py
new file mode 100644
index 0000000..309f53b
--- /dev/null
+++ b/third_party/waf/waflib/extras/cross_gnu.py
@@ -0,0 +1,227 @@
+#!/usr/bin/python
+# -*- coding: utf-8 vi:ts=4:noexpandtab
+# Tool to provide dedicated variables for cross-compilation
+
+__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
+__copyright__ = "Jérôme Carretero, 2014"
+
+"""
+This tool allows to use environment variables to define cross-compilation
+variables intended for build variants.
+
+The variables are obtained from the environment in 3 ways:
+
+1. By defining CHOST, they can be derived as ${CHOST}-${TOOL}
+2. By defining HOST_x
+3. By defining ${CHOST//-/_}_x
+
+else one can set ``cfg.env.CHOST`` in ``wscript`` before loading ``cross_gnu``.
+
+Usage:
+
+- In your build script::
+
+	def configure(cfg):
+		...
+		for variant in x_variants:
+			setenv(variant)
+			conf.load('cross_gnu')
+			conf.xcheck_host_var('POUET')
+			...
+
+
+- Then::
+
+	CHOST=arm-hardfloat-linux-gnueabi waf configure
+	env arm-hardfloat-linux-gnueabi-CC="clang -..." waf configure
+	CFLAGS=... CHOST=arm-hardfloat-linux-gnueabi HOST_CFLAGS=-g waf configure
+	HOST_CC="clang -..." waf configure
+
+This example ``wscript`` compiles to Microchip PIC (xc16-gcc-xyz must be in PATH):
+
+.. code:: python
+
+		from waflib import Configure
+
+		#from https://gist.github.com/rpuntaie/2bddfb5d7b77db26415ee14371289971
+		import waf_variants
+
+		variants='pc fw/variant1 fw/variant2'.split()
+
+		top = "."
+		out = "../build"
+
+		PIC = '33FJ128GP804' #dsPICxxx
+
+		@Configure.conf
+		def gcc_modifier_xc16(cfg):
+				v = cfg.env
+				v.cprogram_PATTERN = '%s.elf'
+				v.LINKFLAGS_cprogram = ','.join(['-Wl','','','--defsym=__MPLAB_BUILD=0','','--script=p'+PIC+'.gld',
+						'--stack=16','--check-sections','--data-init','--pack-data','--handles','--isr','--no-gc-sections',
+						'--fill-upper=0','--stackguard=16','--no-force-link','--smart-io']) #,'--report-mem'])
+				v.CFLAGS_cprogram=['-mcpu='+PIC,'-omf=elf','-mlarge-code','-msmart-io=1',
+						'-msfr-warn=off','-mno-override-inline','-finline','-Winline']
+
+		def configure(cfg):
+				if 'fw' in cfg.variant: #firmware
+						cfg.env.DEST_OS = 'xc16' #cfg.env.CHOST = 'xc16' #works too
+						cfg.load('c cross_gnu') #cfg.env.CHOST becomes ['xc16']
+						...
+				else: #configure for pc SW
+						...
+
+		def build(bld):
+				if 'fw' in bld.variant: #firmware
+						bld.program(source='maintst.c', target='maintst');
+						bld(source='maintst.elf', target='maintst.hex', rule="xc16-bin2hex ${SRC} -a -omf=elf")
+				else: #build for pc SW
+						...
+
+"""
+
+import os
+from waflib import Utils, Configure
+from waflib.Tools import ccroot, gcc
+
+try:
+	from shlex import quote
+except ImportError:
+	from pipes import quote
+
+def get_chost_stuff(conf):
+	"""
+	Get the CHOST environment variable contents
+	"""
+	chost = None
+	chost_envar = None
+	if conf.env.CHOST:
+		chost = conf.env.CHOST[0]
+		chost_envar = chost.replace('-', '_')
+	return chost, chost_envar
+
+
+@Configure.conf
+def xcheck_var(conf, name, wafname=None, cross=False):
+	wafname = wafname or name
+
+	if wafname in conf.env:
+		value = conf.env[wafname]
+		if isinstance(value, str):
+			value = [value]
+	else:
+		envar = os.environ.get(name)
+		if not envar:
+			return
+		value = Utils.to_list(envar) if envar != '' else [envar]
+
+	conf.env[wafname] = value
+	if cross:
+		pretty = 'cross-compilation %s' % wafname
+	else:
+		pretty = wafname
+	conf.msg('Will use %s' % pretty, " ".join(quote(x) for x in value))
+
+@Configure.conf
+def xcheck_host_prog(conf, name, tool, wafname=None):
+	wafname = wafname or name
+
+	chost, chost_envar = get_chost_stuff(conf)
+
+	specific = None
+	if chost:
+		specific = os.environ.get('%s_%s' % (chost_envar, name))
+
+	if specific:
+		value = Utils.to_list(specific)
+		conf.env[wafname] += value
+		conf.msg('Will use cross-compilation %s from %s_%s' % (name, chost_envar, name),
+		 " ".join(quote(x) for x in value))
+		return
+	else:
+		envar = os.environ.get('HOST_%s' % name)
+		if envar is not None:
+			value = Utils.to_list(envar)
+			conf.env[wafname] = value
+			conf.msg('Will use cross-compilation %s from HOST_%s' % (name, name),
+			 " ".join(quote(x) for x in value))
+			return
+
+	if conf.env[wafname]:
+		return
+
+	value = None
+	if chost:
+		value = '%s-%s' % (chost, tool)
+
+	if value:
+		conf.env[wafname] = value
+		conf.msg('Will use cross-compilation %s from CHOST' % wafname, value)
+
+@Configure.conf
+def xcheck_host_envar(conf, name, wafname=None):
+	wafname = wafname or name
+
+	chost, chost_envar = get_chost_stuff(conf)
+
+	specific = None
+	if chost:
+		specific = os.environ.get('%s_%s' % (chost_envar, name))
+
+	if specific:
+		value = Utils.to_list(specific)
+		conf.env[wafname] += value
+		conf.msg('Will use cross-compilation %s from %s_%s' \
+		 % (name, chost_envar, name),
+		 " ".join(quote(x) for x in value))
+		return
+
+
+	envar = os.environ.get('HOST_%s' % name)
+	if envar is None:
+		return
+
+	value = Utils.to_list(envar) if envar != '' else [envar]
+
+	conf.env[wafname] = value
+	conf.msg('Will use cross-compilation %s from HOST_%s' % (name, name),
+	 " ".join(quote(x) for x in value))
+
+
+@Configure.conf
+def xcheck_host(conf):
+	conf.xcheck_var('CHOST', cross=True)
+	conf.env.CHOST = conf.env.CHOST or [conf.env.DEST_OS]
+	conf.env.DEST_OS = conf.env.CHOST[0].replace('-','_')
+	conf.xcheck_host_prog('CC', 'gcc')
+	conf.xcheck_host_prog('CXX', 'g++')
+	conf.xcheck_host_prog('LINK_CC', 'gcc')
+	conf.xcheck_host_prog('LINK_CXX', 'g++')
+	conf.xcheck_host_prog('AR', 'ar')
+	conf.xcheck_host_prog('AS', 'as')
+	conf.xcheck_host_prog('LD', 'ld')
+	conf.xcheck_host_envar('CFLAGS')
+	conf.xcheck_host_envar('CXXFLAGS')
+	conf.xcheck_host_envar('LDFLAGS', 'LINKFLAGS')
+	conf.xcheck_host_envar('LIB')
+	conf.xcheck_host_envar('PKG_CONFIG_LIBDIR')
+	conf.xcheck_host_envar('PKG_CONFIG_PATH')
+
+	if not conf.env.env:
+		conf.env.env = {}
+		conf.env.env.update(os.environ)
+	if conf.env.PKG_CONFIG_LIBDIR:
+		conf.env.env['PKG_CONFIG_LIBDIR'] = conf.env.PKG_CONFIG_LIBDIR[0]
+	if conf.env.PKG_CONFIG_PATH:
+		conf.env.env['PKG_CONFIG_PATH'] = conf.env.PKG_CONFIG_PATH[0]
+
+def configure(conf):
+	"""
+	Configuration example for gcc, it will not work for g++/clang/clang++
+	"""
+	conf.xcheck_host()
+	conf.gcc_common_flags()
+	conf.gcc_modifier_platform()
+	conf.cc_load_tools()
+	conf.cc_add_flags()
+	conf.link_add_flags()
diff --git a/third_party/waf/waflib/extras/cython.py b/third_party/waf/waflib/extras/cython.py
new file mode 100644
index 0000000..591c274
--- /dev/null
+++ b/third_party/waf/waflib/extras/cython.py
@@ -0,0 +1,147 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2010-2015
+
+import re
+from waflib import Task, Logs
+from waflib.TaskGen import extension
+
+cy_api_pat = re.compile(r'\s*?cdef\s*?(public|api)\w*')
+re_cyt = re.compile(r"""
+	^\s*                           # must begin with some whitespace characters
+	(?:from\s+(\w+)(?:\.\w+)*\s+)? # optionally match "from foo(.baz)" and capture foo
+	c?import\s(\w+|[*])            # require "import bar" and capture bar
+	""", re.M | re.VERBOSE)
+
+@extension('.pyx')
+def add_cython_file(self, node):
+	"""
+	Process a *.pyx* file given in the list of source files. No additional
+	feature is required::
+
+		def build(bld):
+			bld(features='c cshlib pyext', source='main.c foo.pyx', target='app')
+	"""
+	ext = '.c'
+	if 'cxx' in self.features:
+		self.env.append_unique('CYTHONFLAGS', '--cplus')
+		ext = '.cc'
+
+	for x in getattr(self, 'cython_includes', []):
+		# TODO re-use these nodes in "scan" below
+		d = self.path.find_dir(x)
+		if d:
+			self.env.append_unique('CYTHONFLAGS', '-I%s' % d.abspath())
+
+	tsk = self.create_task('cython', node, node.change_ext(ext))
+	self.source += tsk.outputs
+
+class cython(Task.Task):
+	run_str = '${CYTHON} ${CYTHONFLAGS} -o ${TGT[0].abspath()} ${SRC}'
+	color   = 'GREEN'
+
+	vars    = ['INCLUDES']
+	"""
+	Rebuild whenever the INCLUDES change. The variables such as CYTHONFLAGS will be appended
+	by the metaclass.
+	"""
+
+	ext_out = ['.h']
+	"""
+	The creation of a .h file is known only after the build has begun, so it is not
+	possible to compute a build order just by looking at the task inputs/outputs.
+	"""
+
+	def runnable_status(self):
+		"""
+		Perform a double-check to add the headers created by cython
+		to the output nodes. The scanner is executed only when the cython task
+		must be executed (optimization).
+		"""
+		ret = super(cython, self).runnable_status()
+		if ret == Task.ASK_LATER:
+			return ret
+		for x in self.generator.bld.raw_deps[self.uid()]:
+			if x.startswith('header:'):
+				self.outputs.append(self.inputs[0].parent.find_or_declare(x.replace('header:', '')))
+		return super(cython, self).runnable_status()
+
+	def post_run(self):
+		for x in self.outputs:
+			if x.name.endswith('.h'):
+				if not x.exists():
+					if Logs.verbose:
+						Logs.warn('Expected %r', x.abspath())
+					x.write('')
+		return Task.Task.post_run(self)
+
+	def scan(self):
+		"""
+		Return the dependent files (.pxd) by looking in the include folders.
+		Put the headers to generate in the custom list "bld.raw_deps".
+		To inspect the scanne results use::
+
+			$ waf clean build --zones=deps
+		"""
+		node = self.inputs[0]
+		txt = node.read()
+
+		mods = set()
+		for m in re_cyt.finditer(txt):
+			if m.group(1):  # matches "from foo import bar"
+				mods.add(m.group(1))
+			else:
+				mods.add(m.group(2))
+
+		Logs.debug('cython: mods %r', mods)
+		incs = getattr(self.generator, 'cython_includes', [])
+		incs = [self.generator.path.find_dir(x) for x in incs]
+		incs.append(node.parent)
+
+		found = []
+		missing = []
+		for x in sorted(mods):
+			for y in incs:
+				k = y.find_resource(x + '.pxd')
+				if k:
+					found.append(k)
+					break
+			else:
+				missing.append(x)
+
+		# the cython file implicitly depends on a pxd file that might be present
+		implicit = node.parent.find_resource(node.name[:-3] + 'pxd')
+		if implicit:
+			found.append(implicit)
+
+		Logs.debug('cython: found %r', found)
+
+		# Now the .h created - store them in bld.raw_deps for later use
+		has_api = False
+		has_public = False
+		for l in txt.splitlines():
+			if cy_api_pat.match(l):
+				if ' api ' in l:
+					has_api = True
+				if ' public ' in l:
+					has_public = True
+		name = node.name.replace('.pyx', '')
+		if has_api:
+			missing.append('header:%s_api.h' % name)
+		if has_public:
+			missing.append('header:%s.h' % name)
+
+		return (found, missing)
+
+def options(ctx):
+	ctx.add_option('--cython-flags', action='store', default='', help='space separated list of flags to pass to cython')
+
+def configure(ctx):
+	if not ctx.env.CC and not ctx.env.CXX:
+		ctx.fatal('Load a C/C++ compiler first')
+	if not ctx.env.PYTHON:
+		ctx.fatal('Load the python tool first!')
+	ctx.find_program('cython', var='CYTHON')
+	if hasattr(ctx.options, 'cython_flags'):
+		ctx.env.CYTHONFLAGS = ctx.options.cython_flags
+
diff --git a/third_party/waf/waflib/extras/dcc.py b/third_party/waf/waflib/extras/dcc.py
new file mode 100644
index 0000000..c1a57c0
--- /dev/null
+++ b/third_party/waf/waflib/extras/dcc.py
@@ -0,0 +1,72 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Jérôme Carretero, 2011 (zougloub)
+
+from waflib import Options
+from waflib.Tools import ccroot
+from waflib.Configure import conf
+
+@conf
+def find_dcc(conf):
+	conf.find_program(['dcc'], var='CC', path_list=getattr(Options.options, 'diabbindir', ""))
+	conf.env.CC_NAME = 'dcc'
+
+@conf
+def find_dld(conf):
+	conf.find_program(['dld'], var='LINK_CC', path_list=getattr(Options.options, 'diabbindir', ""))
+	conf.env.LINK_CC_NAME = 'dld'
+
+@conf
+def find_dar(conf):
+	conf.find_program(['dar'], var='AR', path_list=getattr(Options.options, 'diabbindir', ""))
+	conf.env.AR_NAME = 'dar'
+	conf.env.ARFLAGS = 'rcs'
+
+@conf
+def find_ddump(conf):
+	conf.find_program(['ddump'], var='DDUMP', path_list=getattr(Options.options, 'diabbindir', ""))
+
+@conf
+def dcc_common_flags(conf):
+	v = conf.env
+	v['CC_SRC_F']            = []
+	v['CC_TGT_F']            = ['-c', '-o']
+
+	# linker
+	if not v['LINK_CC']:
+		v['LINK_CC'] = v['CC']
+	v['CCLNK_SRC_F']         = []
+	v['CCLNK_TGT_F']         = ['-o']
+	v['CPPPATH_ST']          = '-I%s'
+	v['DEFINES_ST']          = '-D%s'
+
+	v['LIB_ST']              = '-l:%s' # template for adding libs
+	v['LIBPATH_ST']          = '-L%s' # template for adding libpaths
+	v['STLIB_ST']            = '-l:%s'
+	v['STLIBPATH_ST']        = '-L%s'
+	v['RPATH_ST']            = '-Wl,-rpath,%s'
+	#v['STLIB_MARKER']        = '-Wl,-Bstatic'
+
+	# program
+	v['cprogram_PATTERN']    = '%s.elf'
+
+	# static lib
+	v['LINKFLAGS_cstlib']    = ['-Wl,-Bstatic']
+	v['cstlib_PATTERN']      = 'lib%s.a'
+
+def configure(conf):
+	conf.find_dcc()
+	conf.find_dar()
+	conf.find_dld()
+	conf.find_ddump()
+	conf.dcc_common_flags()
+	conf.cc_load_tools()
+	conf.cc_add_flags()
+	conf.link_add_flags()
+
+def options(opt):
+	"""
+	Add the ``--with-diab-bindir`` command-line options.
+	"""
+	opt.add_option('--with-diab-bindir', type='string', dest='diabbindir', help = 'Specify alternate diab bin folder', default="")
+
diff --git a/third_party/waf/waflib/extras/distnet.py b/third_party/waf/waflib/extras/distnet.py
new file mode 100644
index 0000000..8084b15
--- /dev/null
+++ b/third_party/waf/waflib/extras/distnet.py
@@ -0,0 +1,432 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+"""
+waf-powered distributed network builds, with a network cache.
+
+Caching files from a server has advantages over a NFS/Samba shared folder:
+
+- builds are much faster because they use local files
+- builds just continue to work in case of a network glitch
+- permissions are much simpler to manage
+"""
+
+import os, urllib, tarfile, re, shutil, tempfile, sys
+from collections import OrderedDict
+from waflib import Context, Utils, Logs
+
+try:
+	from urllib.parse import urlencode
+except ImportError:
+	urlencode = urllib.urlencode
+
+def safe_urlencode(data):
+	x = urlencode(data)
+	try:
+		x = x.encode('utf-8')
+	except Exception:
+		pass
+	return x
+
+try:
+	from urllib.error import URLError
+except ImportError:
+	from urllib2 import URLError
+
+try:
+	from urllib.request import Request, urlopen
+except ImportError:
+	from urllib2 import Request, urlopen
+
+DISTNETCACHE = os.environ.get('DISTNETCACHE', '/tmp/distnetcache')
+DISTNETSERVER = os.environ.get('DISTNETSERVER', 'http://localhost:8000/cgi-bin/')
+TARFORMAT = 'w:bz2'
+TIMEOUT = 60
+REQUIRES = 'requires.txt'
+
+re_com = re.compile(r'\s*#.*', re.M)
+
+def total_version_order(num):
+	lst = num.split('.')
+	template = '%10s' * len(lst)
+	ret = template % tuple(lst)
+	return ret
+
+def get_distnet_cache():
+	return getattr(Context.g_module, 'DISTNETCACHE', DISTNETCACHE)
+
+def get_server_url():
+	return getattr(Context.g_module, 'DISTNETSERVER', DISTNETSERVER)
+
+def get_download_url():
+	return '%s/download.py' % get_server_url()
+
+def get_upload_url():
+	return '%s/upload.py' % get_server_url()
+
+def get_resolve_url():
+	return '%s/resolve.py' % get_server_url()
+
+def send_package_name():
+	out = getattr(Context.g_module, 'out', 'build')
+	pkgfile = '%s/package_to_upload.tarfile' % out
+	return pkgfile
+
+class package(Context.Context):
+	fun = 'package'
+	cmd = 'package'
+
+	def execute(self):
+		try:
+			files = self.files
+		except AttributeError:
+			files = self.files = []
+
+		Context.Context.execute(self)
+		pkgfile = send_package_name()
+		if not pkgfile in files:
+			if not REQUIRES in files:
+				files.append(REQUIRES)
+			self.make_tarfile(pkgfile, files, add_to_package=False)
+
+	def make_tarfile(self, filename, files, **kw):
+		if kw.get('add_to_package', True):
+			self.files.append(filename)
+
+		with tarfile.open(filename, TARFORMAT) as tar:
+			endname = os.path.split(filename)[-1]
+			endname = endname.split('.')[0] + '/'
+			for x in files:
+				tarinfo = tar.gettarinfo(x, x)
+				tarinfo.uid   = tarinfo.gid   = 0
+				tarinfo.uname = tarinfo.gname = 'root'
+				tarinfo.size = os.stat(x).st_size
+				if os.environ.get('SOURCE_DATE_EPOCH'):
+					tarinfo.mtime = int(os.environ.get('SOURCE_DATE_EPOCH'))
+
+				# TODO - more archive creation options?
+				if kw.get('bare', True):
+					tarinfo.name = os.path.split(x)[1]
+				else:
+					tarinfo.name = endname + x # todo, if tuple, then..
+				Logs.debug('distnet: adding %r to %s', tarinfo.name, filename)
+				with open(x, 'rb') as f:
+					tar.addfile(tarinfo, f)
+		Logs.info('Created %s', filename)
+
+class publish(Context.Context):
+	fun = 'publish'
+	cmd = 'publish'
+	def execute(self):
+		if hasattr(Context.g_module, 'publish'):
+			Context.Context.execute(self)
+		mod = Context.g_module
+
+		rfile = getattr(self, 'rfile', send_package_name())
+		if not os.path.isfile(rfile):
+			self.fatal('Create the release file with "waf release" first! %r' % rfile)
+
+		fdata = Utils.readf(rfile, m='rb')
+		data = safe_urlencode([('pkgdata', fdata), ('pkgname', mod.APPNAME), ('pkgver', mod.VERSION)])
+
+		req = Request(get_upload_url(), data)
+		response = urlopen(req, timeout=TIMEOUT)
+		data = response.read().strip()
+
+		if sys.hexversion>0x300000f:
+			data = data.decode('utf-8')
+
+		if data != 'ok':
+			self.fatal('Could not publish the package %r' % data)
+
+class constraint(object):
+	def __init__(self, line=''):
+		self.required_line = line
+		self.info = []
+
+		line = line.strip()
+		if not line:
+			return
+
+		lst = line.split(',')
+		if lst:
+			self.pkgname = lst[0]
+			self.required_version = lst[1]
+			for k in lst:
+				a, b, c = k.partition('=')
+				if a and c:
+					self.info.append((a, c))
+	def __str__(self):
+		buf = []
+		buf.append(self.pkgname)
+		buf.append(self.required_version)
+		for k in self.info:
+			buf.append('%s=%s' % k)
+		return ','.join(buf)
+
+	def __repr__(self):
+		return "requires %s-%s" % (self.pkgname, self.required_version)
+
+	def human_display(self, pkgname, pkgver):
+		return '%s-%s requires %s-%s' % (pkgname, pkgver, self.pkgname, self.required_version)
+
+	def why(self):
+		ret = []
+		for x in self.info:
+			if x[0] == 'reason':
+				ret.append(x[1])
+		return ret
+
+	def add_reason(self, reason):
+		self.info.append(('reason', reason))
+
+def parse_constraints(text):
+	assert(text is not None)
+	constraints = []
+	text = re.sub(re_com, '', text)
+	lines = text.splitlines()
+	for line in lines:
+		line = line.strip()
+		if not line:
+			continue
+		constraints.append(constraint(line))
+	return constraints
+
+def list_package_versions(cachedir, pkgname):
+	pkgdir = os.path.join(cachedir, pkgname)
+	try:
+		versions = os.listdir(pkgdir)
+	except OSError:
+		return []
+	versions.sort(key=total_version_order)
+	versions.reverse()
+	return versions
+
+class package_reader(Context.Context):
+	cmd = 'solver'
+	fun = 'solver'
+
+	def __init__(self, **kw):
+		Context.Context.__init__(self, **kw)
+
+		self.myproject = getattr(Context.g_module, 'APPNAME', 'project')
+		self.myversion = getattr(Context.g_module, 'VERSION', '1.0')
+		self.cache_constraints = {}
+		self.constraints = []
+
+	def compute_dependencies(self, filename=REQUIRES):
+		text = Utils.readf(filename)
+		data = safe_urlencode([('text', text)])
+
+		if '--offline' in sys.argv:
+			self.constraints = self.local_resolve(text)
+		else:
+			req = Request(get_resolve_url(), data)
+			try:
+				response = urlopen(req, timeout=TIMEOUT)
+			except URLError as e:
+				Logs.warn('The package server is down! %r', e)
+				self.constraints = self.local_resolve(text)
+			else:
+				ret = response.read()
+				try:
+					ret = ret.decode('utf-8')
+				except Exception:
+					pass
+				self.trace(ret)
+				self.constraints = parse_constraints(ret)
+		self.check_errors()
+
+	def check_errors(self):
+		errors = False
+		for c in self.constraints:
+			if not c.required_version:
+				errors = True
+
+				reasons = c.why()
+				if len(reasons) == 1:
+					Logs.error('%s but no matching package could be found in this repository', reasons[0])
+				else:
+					Logs.error('Conflicts on package %r:', c.pkgname)
+					for r in reasons:
+						Logs.error('  %s', r)
+		if errors:
+			self.fatal('The package requirements cannot be satisfied!')
+
+	def load_constraints(self, pkgname, pkgver, requires=REQUIRES):
+		try:
+			return self.cache_constraints[(pkgname, pkgver)]
+		except KeyError:
+			text = Utils.readf(os.path.join(get_distnet_cache(), pkgname, pkgver, requires))
+			ret = parse_constraints(text)
+			self.cache_constraints[(pkgname, pkgver)] = ret
+			return ret
+
+	def apply_constraint(self, domain, constraint):
+		vname = constraint.required_version.replace('*', '.*')
+		rev = re.compile(vname, re.M)
+		ret = [x for x in domain if rev.match(x)]
+		return ret
+
+	def trace(self, *k):
+		if getattr(self, 'debug', None):
+			Logs.error(*k)
+
+	def solve(self, packages_to_versions={}, packages_to_constraints={}, pkgname='', pkgver='', todo=[], done=[]):
+		# breadth first search
+		n_packages_to_versions = dict(packages_to_versions)
+		n_packages_to_constraints = dict(packages_to_constraints)
+
+		self.trace("calling solve with %r    %r %r" % (packages_to_versions, todo, done))
+		done = done + [pkgname]
+
+		constraints = self.load_constraints(pkgname, pkgver)
+		self.trace("constraints %r" % constraints)
+
+		for k in constraints:
+			try:
+				domain = n_packages_to_versions[k.pkgname]
+			except KeyError:
+				domain = list_package_versions(get_distnet_cache(), k.pkgname)
+
+
+			self.trace("constraints?")
+			if not k.pkgname in done:
+				todo = todo + [k.pkgname]
+
+			self.trace("domain before %s -> %s, %r" % (pkgname, k.pkgname, domain))
+
+			# apply the constraint
+			domain = self.apply_constraint(domain, k)
+
+			self.trace("domain after %s -> %s, %r" % (pkgname, k.pkgname, domain))
+
+			n_packages_to_versions[k.pkgname] = domain
+
+			# then store the constraint applied
+			constraints = list(packages_to_constraints.get(k.pkgname, []))
+			constraints.append((pkgname, pkgver, k))
+			n_packages_to_constraints[k.pkgname] = constraints
+
+			if not domain:
+				self.trace("no domain while processing constraint %r from %r %r" % (domain, pkgname, pkgver))
+				return (n_packages_to_versions, n_packages_to_constraints)
+
+		# next package on the todo list
+		if not todo:
+			return (n_packages_to_versions, n_packages_to_constraints)
+
+		n_pkgname = todo[0]
+		n_pkgver = n_packages_to_versions[n_pkgname][0]
+		tmp = dict(n_packages_to_versions)
+		tmp[n_pkgname] = [n_pkgver]
+
+		self.trace("fixed point %s" % n_pkgname)
+
+		return self.solve(tmp, n_packages_to_constraints, n_pkgname, n_pkgver, todo[1:], done)
+
+	def get_results(self):
+		return '\n'.join([str(c) for c in self.constraints])
+
+	def solution_to_constraints(self, versions, constraints):
+		solution = []
+		for p in versions:
+			c = constraint()
+			solution.append(c)
+
+			c.pkgname = p
+			if versions[p]:
+				c.required_version = versions[p][0]
+			else:
+				c.required_version = ''
+			for (from_pkgname, from_pkgver, c2) in constraints.get(p, ''):
+				c.add_reason(c2.human_display(from_pkgname, from_pkgver))
+		return solution
+
+	def local_resolve(self, text):
+		self.cache_constraints[(self.myproject, self.myversion)] = parse_constraints(text)
+		p2v = OrderedDict({self.myproject: [self.myversion]})
+		(versions, constraints) = self.solve(p2v, {}, self.myproject, self.myversion, [])
+		return self.solution_to_constraints(versions, constraints)
+
+	def download_to_file(self, pkgname, pkgver, subdir, tmp):
+		data = safe_urlencode([('pkgname', pkgname), ('pkgver', pkgver), ('pkgfile', subdir)])
+		req = urlopen(get_download_url(), data, timeout=TIMEOUT)
+		with open(tmp, 'wb') as f:
+			while True:
+				buf = req.read(8192)
+				if not buf:
+					break
+				f.write(buf)
+
+	def extract_tar(self, subdir, pkgdir, tmpfile):
+		with tarfile.open(tmpfile) as f:
+			temp = tempfile.mkdtemp(dir=pkgdir)
+			try:
+				f.extractall(temp)
+				os.rename(temp, os.path.join(pkgdir, subdir))
+			finally:
+				try:
+					shutil.rmtree(temp)
+				except Exception:
+					pass
+
+	def get_pkg_dir(self, pkgname, pkgver, subdir):
+		pkgdir = os.path.join(get_distnet_cache(), pkgname, pkgver)
+		if not os.path.isdir(pkgdir):
+			os.makedirs(pkgdir)
+
+		target = os.path.join(pkgdir, subdir)
+
+		if os.path.exists(target):
+			return target
+
+		(fd, tmp) = tempfile.mkstemp(dir=pkgdir)
+		try:
+			os.close(fd)
+			self.download_to_file(pkgname, pkgver, subdir, tmp)
+			if subdir == REQUIRES:
+				os.rename(tmp, target)
+			else:
+				self.extract_tar(subdir, pkgdir, tmp)
+		finally:
+			try:
+				os.remove(tmp)
+			except OSError:
+				pass
+
+		return target
+
+	def __iter__(self):
+		if not self.constraints:
+			self.compute_dependencies()
+		for x in self.constraints:
+			if x.pkgname == self.myproject:
+				continue
+			yield x
+
+	def execute(self):
+		self.compute_dependencies()
+
+packages = package_reader()
+
+def load_tools(ctx, extra):
+	global packages
+	for c in packages:
+		packages.get_pkg_dir(c.pkgname, c.required_version, extra)
+		noarchdir = packages.get_pkg_dir(c.pkgname, c.required_version, 'noarch')
+		for x in os.listdir(noarchdir):
+			if x.startswith('waf_') and x.endswith('.py'):
+				ctx.load([x.rstrip('.py')], tooldir=[noarchdir])
+
+def options(opt):
+	opt.add_option('--offline', action='store_true')
+	packages.execute()
+	load_tools(opt, REQUIRES)
+
+def configure(conf):
+	load_tools(conf, conf.variant)
+
+def build(bld):
+	load_tools(bld, bld.variant)
+
diff --git a/third_party/waf/waflib/extras/doxygen.py b/third_party/waf/waflib/extras/doxygen.py
new file mode 100644
index 0000000..0fda703
--- /dev/null
+++ b/third_party/waf/waflib/extras/doxygen.py
@@ -0,0 +1,236 @@
+#! /usr/bin/env python
+# encoding: UTF-8
+# Thomas Nagy 2008-2010 (ita)
+
+"""
+
+Doxygen support
+
+Variables passed to bld():
+* doxyfile -- the Doxyfile to use
+* doxy_tar -- destination archive for generated documentation (if desired)
+* install_path -- where to install the documentation
+* pars -- dictionary overriding doxygen configuration settings
+
+When using this tool, the wscript will look like:
+
+	def options(opt):
+		opt.load('doxygen')
+
+	def configure(conf):
+		conf.load('doxygen')
+		# check conf.env.DOXYGEN, if it is mandatory
+
+	def build(bld):
+		if bld.env.DOXYGEN:
+			bld(features="doxygen", doxyfile='Doxyfile', ...)
+"""
+
+import os, os.path, re
+from collections import OrderedDict
+from waflib import Task, Utils, Node
+from waflib.TaskGen import feature
+
+DOXY_STR = '"${DOXYGEN}" - '
+DOXY_FMTS = 'html latex man rft xml'.split()
+DOXY_FILE_PATTERNS = '*.' + ' *.'.join('''
+c cc cxx cpp c++ java ii ixx ipp i++ inl h hh hxx hpp h++ idl odl cs php php3
+inc m mm py f90c cc cxx cpp c++ java ii ixx ipp i++ inl h hh hxx
+'''.split())
+
+re_rl = re.compile('\\\\\r*\n', re.MULTILINE)
+re_nl = re.compile('\r*\n', re.M)
+def parse_doxy(txt):
+	'''
+	Parses a doxygen file.
+	Returns an ordered dictionary. We cannot return a default dictionary, as the
+	order in which the entries are reported does matter, especially for the
+	'@INCLUDE' lines.
+	'''
+	tbl = OrderedDict()
+	txt   = re_rl.sub('', txt)
+	lines = re_nl.split(txt)
+	for x in lines:
+		x = x.strip()
+		if not x or x.startswith('#') or x.find('=') < 0:
+			continue
+		if x.find('+=') >= 0:
+			tmp = x.split('+=')
+			key = tmp[0].strip()
+			if key in tbl:
+				tbl[key] += ' ' + '+='.join(tmp[1:]).strip()
+			else:
+				tbl[key] = '+='.join(tmp[1:]).strip()
+		else:
+			tmp = x.split('=')
+			tbl[tmp[0].strip()] = '='.join(tmp[1:]).strip()
+	return tbl
+
+class doxygen(Task.Task):
+	vars  = ['DOXYGEN', 'DOXYFLAGS']
+	color = 'BLUE'
+	ext_in = [ '.py', '.c', '.h', '.java', '.pb.cc' ]
+
+	def runnable_status(self):
+		'''
+		self.pars are populated in runnable_status - because this function is being
+		run *before* both self.pars "consumers" - scan() and run()
+
+		set output_dir (node) for the output
+		'''
+
+		for x in self.run_after:
+			if not x.hasrun:
+				return Task.ASK_LATER
+
+		if not getattr(self, 'pars', None):
+			txt = self.inputs[0].read()
+			self.pars = parse_doxy(txt)
+
+			# Override with any parameters passed to the task generator
+			if getattr(self.generator, 'pars', None):
+				for k, v in self.generator.pars.items():
+					self.pars[k] = v
+
+			if self.pars.get('OUTPUT_DIRECTORY'):
+				# Use the path parsed from the Doxyfile as an absolute path
+				output_node = self.inputs[0].parent.get_bld().make_node(self.pars['OUTPUT_DIRECTORY'])
+			else:
+				# If no OUTPUT_PATH was specified in the Doxyfile, build path from the Doxyfile name + '.doxy'
+				output_node = self.inputs[0].parent.get_bld().make_node(self.inputs[0].name + '.doxy')
+			output_node.mkdir()
+			self.pars['OUTPUT_DIRECTORY'] = output_node.abspath()
+
+			self.doxy_inputs = getattr(self, 'doxy_inputs', [])
+			if not self.pars.get('INPUT'):
+				self.doxy_inputs.append(self.inputs[0].parent)
+			else:
+				for i in self.pars.get('INPUT').split():
+					if os.path.isabs(i):
+						node = self.generator.bld.root.find_node(i)
+					else:
+						node = self.inputs[0].parent.find_node(i)
+					if not node:
+						self.generator.bld.fatal('Could not find the doxygen input %r' % i)
+					self.doxy_inputs.append(node)
+
+		if not getattr(self, 'output_dir', None):
+			bld = self.generator.bld
+			# Output path is always an absolute path as it was transformed above.
+			self.output_dir = bld.root.find_dir(self.pars['OUTPUT_DIRECTORY'])
+
+		self.signature()
+		ret = Task.Task.runnable_status(self)
+		if ret == Task.SKIP_ME:
+			# in case the files were removed
+			self.add_install()
+		return ret
+
+	def scan(self):
+		exclude_patterns = self.pars.get('EXCLUDE_PATTERNS','').split()
+		exclude_patterns = [pattern.replace('*/', '**/') for pattern in exclude_patterns]
+		file_patterns = self.pars.get('FILE_PATTERNS','').split()
+		if not file_patterns:
+			file_patterns = DOXY_FILE_PATTERNS.split()
+		if self.pars.get('RECURSIVE') == 'YES':
+			file_patterns = ["**/%s" % pattern for pattern in file_patterns]
+		nodes = []
+		names = []
+		for node in self.doxy_inputs:
+			if os.path.isdir(node.abspath()):
+				for m in node.ant_glob(incl=file_patterns, excl=exclude_patterns):
+					nodes.append(m)
+			else:
+				nodes.append(node)
+		return (nodes, names)
+
+	def run(self):
+		dct = self.pars.copy()
+		code = '\n'.join(['%s = %s' % (x, dct[x]) for x in self.pars])
+		code = code.encode() # for python 3
+		#fmt = DOXY_STR % (self.inputs[0].parent.abspath())
+		cmd = Utils.subst_vars(DOXY_STR, self.env)
+		env = self.env.env or None
+		proc = Utils.subprocess.Popen(cmd, shell=True, stdin=Utils.subprocess.PIPE, env=env, cwd=self.inputs[0].parent.abspath())
+		proc.communicate(code)
+		return proc.returncode
+
+	def post_run(self):
+		nodes = self.output_dir.ant_glob('**/*', quiet=True)
+		for x in nodes:
+			self.generator.bld.node_sigs[x] = self.uid()
+		self.add_install()
+		return Task.Task.post_run(self)
+
+	def add_install(self):
+		nodes = self.output_dir.ant_glob('**/*', quiet=True)
+		self.outputs += nodes
+		if getattr(self.generator, 'install_path', None):
+			if not getattr(self.generator, 'doxy_tar', None):
+				self.generator.add_install_files(install_to=self.generator.install_path,
+					install_from=self.outputs,
+					postpone=False,
+					cwd=self.output_dir,
+					relative_trick=True)
+
+class tar(Task.Task):
+	"quick tar creation"
+	run_str = '${TAR} ${TAROPTS} ${TGT} ${SRC}'
+	color   = 'RED'
+	after   = ['doxygen']
+	def runnable_status(self):
+		for x in getattr(self, 'input_tasks', []):
+			if not x.hasrun:
+				return Task.ASK_LATER
+
+		if not getattr(self, 'tar_done_adding', None):
+			# execute this only once
+			self.tar_done_adding = True
+			for x in getattr(self, 'input_tasks', []):
+				self.set_inputs(x.outputs)
+			if not self.inputs:
+				return Task.SKIP_ME
+		return Task.Task.runnable_status(self)
+
+	def __str__(self):
+		tgt_str = ' '.join([a.path_from(a.ctx.launch_node()) for a in self.outputs])
+		return '%s: %s\n' % (self.__class__.__name__, tgt_str)
+
+@feature('doxygen')
+def process_doxy(self):
+	if not getattr(self, 'doxyfile', None):
+		self.bld.fatal('no doxyfile variable specified??')
+
+	node = self.doxyfile
+	if not isinstance(node, Node.Node):
+		node = self.path.find_resource(node)
+	if not node:
+		self.bld.fatal('doxygen file %s not found' % self.doxyfile)
+
+	# the task instance
+	dsk = self.create_task('doxygen', node, always_run=getattr(self, 'always', False))
+
+	if getattr(self, 'doxy_tar', None):
+		tsk = self.create_task('tar', always_run=getattr(self, 'always', False))
+		tsk.input_tasks = [dsk]
+		tsk.set_outputs(self.path.find_or_declare(self.doxy_tar))
+		if self.doxy_tar.endswith('bz2'):
+			tsk.env['TAROPTS'] = ['cjf']
+		elif self.doxy_tar.endswith('gz'):
+			tsk.env['TAROPTS'] = ['czf']
+		else:
+			tsk.env['TAROPTS'] = ['cf']
+		if getattr(self, 'install_path', None):
+			self.add_install_files(install_to=self.install_path, install_from=tsk.outputs)
+
+def configure(conf):
+	'''
+	Check if doxygen and tar commands are present in the system
+
+	If the commands are present, then conf.env.DOXYGEN and conf.env.TAR
+	variables will be set. Detection can be controlled by setting DOXYGEN and
+	TAR environmental variables.
+	'''
+
+	conf.find_program('doxygen', var='DOXYGEN', mandatory=False)
+	conf.find_program('tar', var='TAR', mandatory=False)
diff --git a/third_party/waf/waflib/extras/dpapi.py b/third_party/waf/waflib/extras/dpapi.py
new file mode 100644
index 0000000..b94d482
--- /dev/null
+++ b/third_party/waf/waflib/extras/dpapi.py
@@ -0,0 +1,87 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Matt Clarkson, 2012
+
+'''
+DPAPI access library (http://msdn.microsoft.com/en-us/library/ms995355.aspx)
+This file uses code originally created by Crusher Joe:
+http://article.gmane.org/gmane.comp.python.ctypes/420
+And modified by Wayne Koorts:
+http://stackoverflow.com/questions/463832/using-dpapi-with-python
+'''
+
+from ctypes import windll, byref, cdll, Structure, POINTER, c_char, c_buffer
+from ctypes.wintypes import DWORD
+from waflib.Configure import conf
+
+LocalFree = windll.kernel32.LocalFree
+memcpy = cdll.msvcrt.memcpy
+CryptProtectData = windll.crypt32.CryptProtectData
+CryptUnprotectData = windll.crypt32.CryptUnprotectData
+CRYPTPROTECT_UI_FORBIDDEN = 0x01
+try:
+	extra_entropy = 'cl;ad13 \0al;323kjd #(adl;k$#ajsd'.encode('ascii')
+except AttributeError:
+	extra_entropy = 'cl;ad13 \0al;323kjd #(adl;k$#ajsd'
+
+class DATA_BLOB(Structure):
+	_fields_ = [
+		('cbData', DWORD),
+		('pbData', POINTER(c_char))
+	]
+
+def get_data(blob_out):
+	cbData = int(blob_out.cbData)
+	pbData = blob_out.pbData
+	buffer = c_buffer(cbData)
+	memcpy(buffer, pbData, cbData)
+	LocalFree(pbData)
+	return buffer.raw
+
+@conf
+def dpapi_encrypt_data(self, input_bytes, entropy = extra_entropy):
+	'''
+	Encrypts data and returns byte string
+
+	:param input_bytes: The data to be encrypted
+	:type input_bytes: String or Bytes
+	:param entropy: Extra entropy to add to the encryption process (optional)
+	:type entropy: String or Bytes
+	'''
+	if not isinstance(input_bytes, bytes) or not isinstance(entropy, bytes):
+		self.fatal('The inputs to dpapi must be bytes')
+	buffer_in      = c_buffer(input_bytes, len(input_bytes))
+	buffer_entropy = c_buffer(entropy, len(entropy))
+	blob_in        = DATA_BLOB(len(input_bytes), buffer_in)
+	blob_entropy   = DATA_BLOB(len(entropy), buffer_entropy)
+	blob_out       = DATA_BLOB()
+
+	if CryptProtectData(byref(blob_in), 'python_data', byref(blob_entropy), 
+		None, None, CRYPTPROTECT_UI_FORBIDDEN, byref(blob_out)):
+		return get_data(blob_out)
+	else:
+		self.fatal('Failed to decrypt data')
+
+@conf
+def dpapi_decrypt_data(self, encrypted_bytes, entropy = extra_entropy):
+	'''
+	Decrypts data and returns byte string
+
+	:param encrypted_bytes: The encrypted data
+	:type encrypted_bytes: Bytes
+	:param entropy: Extra entropy to add to the encryption process (optional)
+	:type entropy: String or Bytes
+	'''
+	if not isinstance(encrypted_bytes, bytes) or not isinstance(entropy, bytes):
+		self.fatal('The inputs to dpapi must be bytes')
+	buffer_in      = c_buffer(encrypted_bytes, len(encrypted_bytes))
+	buffer_entropy = c_buffer(entropy, len(entropy))
+	blob_in        = DATA_BLOB(len(encrypted_bytes), buffer_in)
+	blob_entropy   = DATA_BLOB(len(entropy), buffer_entropy)
+	blob_out       = DATA_BLOB()
+	if CryptUnprotectData(byref(blob_in), None, byref(blob_entropy), None,
+		None, CRYPTPROTECT_UI_FORBIDDEN, byref(blob_out)):
+		return get_data(blob_out)
+	else:
+		self.fatal('Failed to decrypt data')
+
diff --git a/third_party/waf/waflib/extras/eclipse.py b/third_party/waf/waflib/extras/eclipse.py
new file mode 100644
index 0000000..49ca968
--- /dev/null
+++ b/third_party/waf/waflib/extras/eclipse.py
@@ -0,0 +1,501 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Eclipse CDT 5.0 generator for Waf
+# Richard Quirk 2009-1011 (New BSD License)
+# Thomas Nagy 2011 (ported to Waf 1.6)
+
+"""
+Usage:
+
+def options(opt):
+	opt.load('eclipse')
+
+To add additional targets beside standard ones (configure, dist, install, check)
+the environment ECLIPSE_EXTRA_TARGETS can be set (ie. to ['test', 'lint', 'docs'])
+
+$ waf configure eclipse
+"""
+
+import sys, os
+from waflib import Utils, Logs, Context, Build, TaskGen, Scripting, Errors, Node
+from xml.dom.minidom import Document
+
+STANDARD_INCLUDES = [ '/usr/local/include', '/usr/include' ]
+
+oe_cdt = 'org.eclipse.cdt'
+cdt_mk = oe_cdt + '.make.core'
+cdt_core = oe_cdt + '.core'
+cdt_bld = oe_cdt + '.build.core'
+extbuilder_dir = '.externalToolBuilders'
+extbuilder_name = 'Waf_Builder.launch'
+settings_dir = '.settings'
+settings_name = 'language.settings.xml'
+
+class eclipse(Build.BuildContext):
+	cmd = 'eclipse'
+	fun = Scripting.default_cmd
+
+	def execute(self):
+		"""
+		Entry point
+		"""
+		self.restore()
+		if not self.all_envs:
+			self.load_envs()
+		self.recurse([self.run_dir])
+
+		appname = getattr(Context.g_module, Context.APPNAME, os.path.basename(self.srcnode.abspath()))
+		self.create_cproject(appname, pythonpath=self.env['ECLIPSE_PYTHON_PATH'])
+
+	# Helper to dump the XML document content to XML with UTF-8 encoding
+	def write_conf_to_xml(self, filename, document):
+		self.srcnode.make_node(filename).write(document.toprettyxml(encoding='UTF-8'), flags='wb')
+
+	def create_cproject(self, appname, workspace_includes=[], pythonpath=[]):
+		"""
+		Create the Eclipse CDT .project and .cproject files
+		@param appname The name that will appear in the Project Explorer
+		@param build The BuildContext object to extract includes from
+		@param workspace_includes Optional project includes to prevent
+			  "Unresolved Inclusion" errors in the Eclipse editor
+		@param pythonpath Optional project specific python paths
+		"""
+		hasc = hasjava = haspython = False
+		source_dirs = []
+		cpppath = self.env['CPPPATH']
+		javasrcpath = []
+		javalibpath = []
+		includes = STANDARD_INCLUDES
+		if sys.platform != 'win32':
+			cc = self.env.CC or self.env.CXX
+			if cc:
+				cmd = cc + ['-xc++', '-E', '-Wp,-v', '-']
+				try:
+					gccout = self.cmd_and_log(cmd, output=Context.STDERR, quiet=Context.BOTH, input='\n'.encode()).splitlines()
+				except Errors.WafError:
+					pass
+				else:
+					includes = []
+					for ipath in gccout:
+						if ipath.startswith(' /'):
+							includes.append(ipath[1:])
+			cpppath += includes
+		Logs.warn('Generating Eclipse CDT project files')
+
+		for g in self.groups:
+			for tg in g:
+				if not isinstance(tg, TaskGen.task_gen):
+					continue
+
+				tg.post()
+
+				# Add local Python modules paths to configuration so object resolving will work in IDE
+				# This may also contain generated files (ie. pyqt5 or protoc) that get picked from build
+				if 'py' in tg.features:
+					pypath = tg.path.relpath()
+					py_installfrom = getattr(tg, 'install_from', None)
+					if isinstance(py_installfrom, Node.Node):
+						pypath = py_installfrom.path_from(self.root.make_node(self.top_dir))
+					if pypath not in pythonpath:
+						pythonpath.append(pypath)
+					haspython = True
+
+				# Add Java source directories so object resolving works in IDE
+				# This may also contain generated files (ie. protoc) that get picked from build
+				if 'javac' in tg.features:
+					java_src = tg.path.relpath()
+					java_srcdir = getattr(tg.javac_task, 'srcdir', None)
+					if java_srcdir:
+						if isinstance(java_srcdir, Node.Node):
+							java_srcdir = [java_srcdir]
+						for x in Utils.to_list(java_srcdir):
+							x = x.path_from(self.root.make_node(self.top_dir))
+							if x not in javasrcpath:
+								javasrcpath.append(x)
+					else:
+						if java_src not in javasrcpath:
+							javasrcpath.append(java_src)
+					hasjava = True
+
+					# Check if there are external dependencies and add them as external jar so they will be resolved by Eclipse
+					usedlibs=getattr(tg, 'use', [])
+					for x in Utils.to_list(usedlibs):
+						for cl in Utils.to_list(tg.env['CLASSPATH_'+x]):
+							if cl not in javalibpath:
+								javalibpath.append(cl)
+
+				if not getattr(tg, 'link_task', None):
+					continue
+
+				features = Utils.to_list(getattr(tg, 'features', ''))
+
+				is_cc = 'c' in features or 'cxx' in features
+
+				incnodes = tg.to_incnodes(tg.to_list(getattr(tg, 'includes', [])) + tg.env['INCLUDES'])
+				for p in incnodes:
+					path = p.path_from(self.srcnode)
+
+					if (path.startswith("/")):
+						if path not in cpppath:
+							cpppath.append(path)
+					else:
+						if path not in workspace_includes:
+							workspace_includes.append(path)
+
+					if is_cc and path not in source_dirs:
+						source_dirs.append(path)
+
+					hasc = True
+
+		waf_executable = os.path.abspath(sys.argv[0])
+		project = self.impl_create_project(sys.executable, appname, hasc, hasjava, haspython, waf_executable)
+		self.write_conf_to_xml('.project', project)
+
+		if hasc:
+			project = self.impl_create_cproject(sys.executable, waf_executable, appname, workspace_includes, cpppath, source_dirs)
+			self.write_conf_to_xml('.cproject', project)
+
+		if haspython:
+			project = self.impl_create_pydevproject(sys.path, pythonpath)
+			self.write_conf_to_xml('.pydevproject', project)
+
+		if hasjava:
+			project = self.impl_create_javaproject(javasrcpath, javalibpath)
+			self.write_conf_to_xml('.classpath', project)
+
+		# Create editor language settings to have correct standards applied in IDE, as per project configuration
+		try:
+			os.mkdir(settings_dir)
+		except OSError:
+			pass	# Ignore if dir already exists
+
+		lang_settings = Document()
+		project = lang_settings.createElement('project')
+
+		# Language configurations for C and C++ via cdt
+		if hasc:
+			configuration = self.add(lang_settings, project, 'configuration',
+							{'id' : 'org.eclipse.cdt.core.default.config.1', 'name': 'Default'})
+
+			extension = self.add(lang_settings, configuration, 'extension', {'point': 'org.eclipse.cdt.core.LanguageSettingsProvider'})
+
+			provider = self.add(lang_settings, extension, 'provider',
+							{ 'copy-of': 'extension',
+							  'id': 'org.eclipse.cdt.ui.UserLanguageSettingsProvider'})
+
+			provider = self.add(lang_settings, extension, 'provider-reference',
+							{ 'id': 'org.eclipse.cdt.core.ReferencedProjectsLanguageSettingsProvider',
+							  'ref': 'shared-provider'})
+
+			provider = self.add(lang_settings, extension, 'provider-reference',
+							{ 'id': 'org.eclipse.cdt.managedbuilder.core.MBSLanguageSettingsProvider',
+							  'ref': 'shared-provider'})
+
+			# C and C++ are kept as separated providers so appropriate flags are used also in mixed projects
+			if self.env.CC:
+				provider = self.add(lang_settings, extension, 'provider',
+							{ 'class': 'org.eclipse.cdt.managedbuilder.language.settings.providers.GCCBuiltinSpecsDetector',
+							  'console': 'false',
+							  'id': 'org.eclipse.cdt.managedbuilder.language.settings.providers.GCCBuiltinSpecsDetector.1',
+							  'keep-relative-paths' : 'false',
+							  'name': 'CDT GCC Built-in Compiler Settings',
+							  'parameter': '%s %s ${FLAGS} -E -P -v -dD "${INPUTS}"'%(self.env.CC[0],' '.join(self.env['CFLAGS'])),
+							  'prefer-non-shared': 'true' })
+
+				self.add(lang_settings, provider, 'language-scope', { 'id': 'org.eclipse.cdt.core.gcc'})
+
+			if self.env.CXX:
+				provider = self.add(lang_settings, extension, 'provider',
+							{ 'class': 'org.eclipse.cdt.managedbuilder.language.settings.providers.GCCBuiltinSpecsDetector',
+							  'console': 'false',
+							  'id': 'org.eclipse.cdt.managedbuilder.language.settings.providers.GCCBuiltinSpecsDetector.2',
+							  'keep-relative-paths' : 'false',
+							  'name': 'CDT GCC Built-in Compiler Settings',
+							  'parameter': '%s %s ${FLAGS} -E -P -v -dD "${INPUTS}"'%(self.env.CXX[0],' '.join(self.env['CXXFLAGS'])),
+							  'prefer-non-shared': 'true' })
+				self.add(lang_settings, provider, 'language-scope', { 'id': 'org.eclipse.cdt.core.g++'})
+
+		lang_settings.appendChild(project)
+		self.write_conf_to_xml('%s%s%s'%(settings_dir, os.path.sep, settings_name), lang_settings)
+
+	def impl_create_project(self, executable, appname, hasc, hasjava, haspython, waf_executable):
+		doc = Document()
+		projectDescription = doc.createElement('projectDescription')
+		self.add(doc, projectDescription, 'name', appname)
+		self.add(doc, projectDescription, 'comment')
+		self.add(doc, projectDescription, 'projects')
+		buildSpec = self.add(doc, projectDescription, 'buildSpec')
+		buildCommand = self.add(doc, buildSpec, 'buildCommand')
+		self.add(doc, buildCommand, 'triggers', 'clean,full,incremental,')
+		arguments = self.add(doc, buildCommand, 'arguments')
+		dictionaries = {}
+
+		# If CDT is present, instruct this one to call waf as it is more flexible (separate build/clean ...)
+		if hasc:
+			self.add(doc, buildCommand, 'name', oe_cdt + '.managedbuilder.core.genmakebuilder')
+			# the default make-style targets are overwritten by the .cproject values
+			dictionaries = {
+					cdt_mk + '.contents': cdt_mk + '.activeConfigSettings',
+					cdt_mk + '.enableAutoBuild': 'false',
+					cdt_mk + '.enableCleanBuild': 'true',
+					cdt_mk + '.enableFullBuild': 'true',
+					}
+		else:
+			# Otherwise for Java/Python an external builder tool is created that will call waf build
+			self.add(doc, buildCommand, 'name', 'org.eclipse.ui.externaltools.ExternalToolBuilder')
+			dictionaries = {
+					'LaunchConfigHandle': '<project>/%s/%s'%(extbuilder_dir, extbuilder_name),
+					}
+			# The definition is in a separate directory XML file
+			try:
+				os.mkdir(extbuilder_dir)
+			except OSError:
+				pass	# Ignore error if already exists
+
+			# Populate here the external builder XML calling waf
+			builder = Document()
+			launchConfiguration = doc.createElement('launchConfiguration')
+			launchConfiguration.setAttribute('type', 'org.eclipse.ui.externaltools.ProgramBuilderLaunchConfigurationType')
+			self.add(doc, launchConfiguration, 'booleanAttribute', {'key': 'org.eclipse.debug.ui.ATTR_LAUNCH_IN_BACKGROUND', 'value': 'false'})
+			self.add(doc, launchConfiguration, 'booleanAttribute', {'key': 'org.eclipse.ui.externaltools.ATTR_TRIGGERS_CONFIGURED', 'value': 'true'})
+			self.add(doc, launchConfiguration, 'stringAttribute', {'key': 'org.eclipse.ui.externaltools.ATTR_LOCATION', 'value': waf_executable})
+			self.add(doc, launchConfiguration, 'stringAttribute', {'key': 'org.eclipse.ui.externaltools.ATTR_RUN_BUILD_KINDS', 'value': 'full,incremental,'})
+			self.add(doc, launchConfiguration, 'stringAttribute', {'key': 'org.eclipse.ui.externaltools.ATTR_TOOL_ARGUMENTS', 'value': 'build'})
+			self.add(doc, launchConfiguration, 'stringAttribute', {'key': 'org.eclipse.ui.externaltools.ATTR_WORKING_DIRECTORY', 'value': '${project_loc}'})
+			builder.appendChild(launchConfiguration)
+			# And write the XML to the file references before
+			self.write_conf_to_xml('%s%s%s'%(extbuilder_dir, os.path.sep, extbuilder_name), builder)
+
+
+		for k, v in dictionaries.items():
+			self.addDictionary(doc, arguments, k, v)
+
+		natures = self.add(doc, projectDescription, 'natures')
+
+		if hasc:
+			nature_list = """
+				core.ccnature
+				managedbuilder.core.ScannerConfigNature
+				managedbuilder.core.managedBuildNature
+				core.cnature
+			""".split()
+			for n in nature_list:
+				self.add(doc, natures, 'nature', oe_cdt + '.' + n)
+
+		if haspython:
+			self.add(doc, natures, 'nature', 'org.python.pydev.pythonNature')
+		if hasjava:
+			self.add(doc, natures, 'nature', 'org.eclipse.jdt.core.javanature')
+
+		doc.appendChild(projectDescription)
+		return doc
+
+	def impl_create_cproject(self, executable, waf_executable, appname, workspace_includes, cpppath, source_dirs=[]):
+		doc = Document()
+		doc.appendChild(doc.createProcessingInstruction('fileVersion', '4.0.0'))
+		cconf_id = cdt_core + '.default.config.1'
+		cproject = doc.createElement('cproject')
+		storageModule = self.add(doc, cproject, 'storageModule',
+				{'moduleId': cdt_core + '.settings'})
+		cconf = self.add(doc, storageModule, 'cconfiguration', {'id':cconf_id})
+
+		storageModule = self.add(doc, cconf, 'storageModule',
+				{'buildSystemId': oe_cdt + '.managedbuilder.core.configurationDataProvider',
+				 'id': cconf_id,
+				 'moduleId': cdt_core + '.settings',
+				 'name': 'Default'})
+
+		self.add(doc, storageModule, 'externalSettings')
+
+		extensions = self.add(doc, storageModule, 'extensions')
+		extension_list = """
+			VCErrorParser
+			MakeErrorParser
+			GCCErrorParser
+			GASErrorParser
+			GLDErrorParser
+		""".split()
+		self.add(doc, extensions, 'extension', {'id': cdt_core + '.ELF', 'point':cdt_core + '.BinaryParser'})
+		for e in extension_list:
+			self.add(doc, extensions, 'extension', {'id': cdt_core + '.' + e, 'point':cdt_core + '.ErrorParser'})
+
+		storageModule = self.add(doc, cconf, 'storageModule',
+				{'moduleId': 'cdtBuildSystem', 'version': '4.0.0'})
+		config = self.add(doc, storageModule, 'configuration',
+					{'artifactName': appname,
+					 'id': cconf_id,
+					 'name': 'Default',
+					 'parent': cdt_bld + '.prefbase.cfg'})
+		folderInfo = self.add(doc, config, 'folderInfo',
+							{'id': cconf_id+'.', 'name': '/', 'resourcePath': ''})
+
+		toolChain = self.add(doc, folderInfo, 'toolChain',
+				{'id': cdt_bld + '.prefbase.toolchain.1',
+				 'name': 'No ToolChain',
+				 'resourceTypeBasedDiscovery': 'false',
+				 'superClass': cdt_bld + '.prefbase.toolchain'})
+
+		self.add(doc, toolChain, 'targetPlatform', {'binaryParser': 'org.eclipse.cdt.core.ELF', 'id': cdt_bld + '.prefbase.toolchain.1', 'name': ''})
+
+		waf_build = '"%s" %s'%(waf_executable, eclipse.fun)
+		waf_clean = '"%s" clean'%(waf_executable)
+		self.add(doc, toolChain, 'builder',
+					{'autoBuildTarget': waf_build,
+					 'command': executable,
+					 'enableAutoBuild': 'false',
+					 'cleanBuildTarget': waf_clean,
+					 'enableIncrementalBuild': 'true',
+					 'id': cdt_bld + '.settings.default.builder.1',
+					 'incrementalBuildTarget': waf_build,
+					 'managedBuildOn': 'false',
+					 'name': 'Gnu Make Builder',
+					 'superClass': cdt_bld + '.settings.default.builder'})
+
+		tool_index = 1;
+		for tool_name in ("Assembly", "GNU C++", "GNU C"):
+			tool = self.add(doc, toolChain, 'tool',
+					{'id': cdt_bld + '.settings.holder.' + str(tool_index),
+					 'name': tool_name,
+					 'superClass': cdt_bld + '.settings.holder'})
+			if cpppath or workspace_includes:
+				incpaths = cdt_bld + '.settings.holder.incpaths'
+				option = self.add(doc, tool, 'option',
+						{'id': incpaths + '.' +  str(tool_index),
+						 'name': 'Include Paths',
+						 'superClass': incpaths,
+						 'valueType': 'includePath'})
+				for i in workspace_includes:
+					self.add(doc, option, 'listOptionValue',
+								{'builtIn': 'false',
+								'value': '"${workspace_loc:/%s/%s}"'%(appname, i)})
+				for i in cpppath:
+					self.add(doc, option, 'listOptionValue',
+								{'builtIn': 'false',
+								'value': '"%s"'%(i)})
+			if tool_name == "GNU C++" or tool_name == "GNU C":
+				self.add(doc,tool,'inputType',{ 'id':'org.eclipse.cdt.build.core.settings.holder.inType.' + str(tool_index), \
+					'languageId':'org.eclipse.cdt.core.gcc' if tool_name == "GNU C" else 'org.eclipse.cdt.core.g++','languageName':tool_name, \
+					'sourceContentType':'org.eclipse.cdt.core.cSource,org.eclipse.cdt.core.cHeader', \
+					'superClass':'org.eclipse.cdt.build.core.settings.holder.inType' })
+			tool_index += 1
+
+		if source_dirs:
+			sourceEntries = self.add(doc, config, 'sourceEntries')
+			for i in source_dirs:
+				 self.add(doc, sourceEntries, 'entry',
+							{'excluding': i,
+							'flags': 'VALUE_WORKSPACE_PATH|RESOLVED',
+							'kind': 'sourcePath',
+							'name': ''})
+				 self.add(doc, sourceEntries, 'entry',
+							{
+							'flags': 'VALUE_WORKSPACE_PATH|RESOLVED',
+							'kind': 'sourcePath',
+							'name': i})
+
+		storageModule = self.add(doc, cconf, 'storageModule',
+							{'moduleId': cdt_mk + '.buildtargets'})
+		buildTargets = self.add(doc, storageModule, 'buildTargets')
+		def addTargetWrap(name, runAll):
+			return self.addTarget(doc, buildTargets, executable, name,
+								'"%s" %s'%(waf_executable, name), runAll)
+		addTargetWrap('configure', True)
+		addTargetWrap('dist', False)
+		addTargetWrap('install', False)
+		addTargetWrap('check', False)
+		for addTgt in self.env.ECLIPSE_EXTRA_TARGETS or []:
+			addTargetWrap(addTgt, False)
+
+		storageModule = self.add(doc, cproject, 'storageModule',
+							{'moduleId': 'cdtBuildSystem',
+							 'version': '4.0.0'})
+
+		self.add(doc, storageModule, 'project', {'id': '%s.null.1'%appname, 'name': appname})
+
+		storageModule = self.add(doc, cproject, 'storageModule',
+							{'moduleId': 'org.eclipse.cdt.core.LanguageSettingsProviders'})
+
+		storageModule = self.add(doc, cproject, 'storageModule',
+							{'moduleId': 'scannerConfiguration'})
+
+		doc.appendChild(cproject)
+		return doc
+
+	def impl_create_pydevproject(self, system_path, user_path):
+		# create a pydevproject file
+		doc = Document()
+		doc.appendChild(doc.createProcessingInstruction('eclipse-pydev', 'version="1.0"'))
+		pydevproject = doc.createElement('pydev_project')
+		prop = self.add(doc, pydevproject,
+					   'pydev_property',
+					   'python %d.%d'%(sys.version_info[0], sys.version_info[1]))
+		prop.setAttribute('name', 'org.python.pydev.PYTHON_PROJECT_VERSION')
+		prop = self.add(doc, pydevproject, 'pydev_property', 'Default')
+		prop.setAttribute('name', 'org.python.pydev.PYTHON_PROJECT_INTERPRETER')
+		# add waf's paths
+		wafadmin = [p for p in system_path if p.find('wafadmin') != -1]
+		if wafadmin:
+			prop = self.add(doc, pydevproject, 'pydev_pathproperty',
+					{'name':'org.python.pydev.PROJECT_EXTERNAL_SOURCE_PATH'})
+			for i in wafadmin:
+				self.add(doc, prop, 'path', i)
+		if user_path:
+			prop = self.add(doc, pydevproject, 'pydev_pathproperty',
+					{'name':'org.python.pydev.PROJECT_SOURCE_PATH'})
+			for i in user_path:
+				self.add(doc, prop, 'path', '/${PROJECT_DIR_NAME}/'+i)
+
+		doc.appendChild(pydevproject)
+		return doc
+
+	def impl_create_javaproject(self, javasrcpath, javalibpath):
+		# create a .classpath file for java usage
+		doc = Document()
+		javaproject = doc.createElement('classpath')
+		if javasrcpath:
+			for i in javasrcpath:
+				self.add(doc, javaproject, 'classpathentry',
+					{'kind': 'src', 'path': i})
+
+		if javalibpath:
+			for i in javalibpath:
+				self.add(doc, javaproject, 'classpathentry',
+					{'kind': 'lib', 'path': i})
+
+		self.add(doc, javaproject, 'classpathentry', {'kind': 'con', 'path': 'org.eclipse.jdt.launching.JRE_CONTAINER'})
+		self.add(doc, javaproject, 'classpathentry', {'kind': 'output', 'path': self.bldnode.name })
+		doc.appendChild(javaproject)
+		return doc
+
+	def addDictionary(self, doc, parent, k, v):
+		dictionary = self.add(doc, parent, 'dictionary')
+		self.add(doc, dictionary, 'key', k)
+		self.add(doc, dictionary, 'value', v)
+		return dictionary
+
+	def addTarget(self, doc, buildTargets, executable, name, buildTarget, runAllBuilders=True):
+		target = self.add(doc, buildTargets, 'target',
+						{'name': name,
+						 'path': '',
+						 'targetID': oe_cdt + '.build.MakeTargetBuilder'})
+		self.add(doc, target, 'buildCommand', executable)
+		self.add(doc, target, 'buildArguments', None)
+		self.add(doc, target, 'buildTarget', buildTarget)
+		self.add(doc, target, 'stopOnError', 'true')
+		self.add(doc, target, 'useDefaultCommand', 'false')
+		self.add(doc, target, 'runAllBuilders', str(runAllBuilders).lower())
+
+	def add(self, doc, parent, tag, value = None):
+		el = doc.createElement(tag)
+		if (value):
+			if type(value) == type(str()):
+				el.appendChild(doc.createTextNode(value))
+			elif type(value) == type(dict()):
+				self.setAttributes(el, value)
+		parent.appendChild(el)
+		return el
+
+	def setAttributes(self, node, attrs):
+		for k, v in attrs.items():
+			node.setAttribute(k, v)
+
diff --git a/third_party/waf/waflib/extras/erlang.py b/third_party/waf/waflib/extras/erlang.py
new file mode 100644
index 0000000..0b93d9a
--- /dev/null
+++ b/third_party/waf/waflib/extras/erlang.py
@@ -0,0 +1,110 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2010 (ita)
+# Przemyslaw Rzepecki, 2016
+
+"""
+Erlang support
+"""
+
+import re
+from waflib import Task, TaskGen
+from waflib.TaskGen import feature, after_method, before_method
+# to load the method "to_incnodes" below
+from waflib.Tools import ccroot
+
+# Those flags are required by the Erlang VM to execute/evaluate code in
+# non-interactive mode. It is used in this tool to create Erlang modules
+# documentation and run unit tests. The user can pass additional arguments to the
+# 'erl' command with ERL_FLAGS environment variable.
+EXEC_NON_INTERACTIVE = ['-noshell', '-noinput', '-eval']
+
+def configure(conf):
+	conf.find_program('erlc', var='ERLC')
+	conf.find_program('erl', var='ERL')
+	conf.add_os_flags('ERLC_FLAGS')
+	conf.add_os_flags('ERL_FLAGS')
+	conf.env.ERLC_DEF_PATTERN = '-D%s'
+	conf.env.ERLC_INC_PATTERN = '-I%s'
+
+@TaskGen.extension('.erl')
+def process_erl_node(self, node):
+	tsk = self.create_task('erl', node, node.change_ext('.beam'))
+	tsk.erlc_incnodes = [tsk.outputs[0].parent] + self.to_incnodes(self.includes)
+	tsk.env.append_value('ERLC_INCPATHS', [x.abspath() for x in tsk.erlc_incnodes])
+	tsk.env.append_value('ERLC_DEFINES', self.to_list(getattr(self, 'defines', [])))
+	tsk.env.append_value('ERLC_FLAGS', self.to_list(getattr(self, 'flags', [])))
+	tsk.cwd = tsk.outputs[0].parent
+
+class erl(Task.Task):
+	color = 'GREEN'
+	run_str = '${ERLC} ${ERL_FLAGS} ${ERLC_INC_PATTERN:ERLC_INCPATHS} ${ERLC_DEF_PATTERN:ERLC_DEFINES} ${SRC}'
+
+	def scan(task):
+		node = task.inputs[0]
+
+		deps = []
+		scanned = set([])
+		nodes_to_scan = [node]
+
+		for n in nodes_to_scan:
+			if n.abspath() in scanned:
+				continue
+
+			for i in re.findall(r'-include\("(.*)"\)\.', n.read()):
+				for d in task.erlc_incnodes:
+					r = d.find_node(i)
+					if r:
+						deps.append(r)
+						nodes_to_scan.append(r)
+						break
+			scanned.add(n.abspath())
+
+		return (deps, [])
+
+@TaskGen.extension('.beam')
+def process(self, node):
+	pass
+
+
+class erl_test(Task.Task):
+	color = 'BLUE'
+	run_str = '${ERL} ${ERL_FLAGS} ${ERL_TEST_FLAGS}'
+
+@feature('eunit')
+@after_method('process_source')
+def add_erl_test_run(self):
+	test_modules = [t.outputs[0] for t in self.tasks]
+	test_task = self.create_task('erl_test')
+	test_task.set_inputs(self.source + test_modules)
+	test_task.cwd = test_modules[0].parent
+
+	test_task.env.append_value('ERL_FLAGS', self.to_list(getattr(self, 'flags', [])))
+
+	test_list = ", ".join([m.change_ext("").path_from(test_task.cwd)+":test()" for m in test_modules])
+	test_flag = 'halt(case lists:all(fun(Elem) -> Elem == ok end, [%s]) of true -> 0; false -> 1 end).' % test_list
+	test_task.env.append_value('ERL_TEST_FLAGS', EXEC_NON_INTERACTIVE)
+	test_task.env.append_value('ERL_TEST_FLAGS', test_flag)
+
+
+class edoc(Task.Task):
+	color = 'BLUE'
+	run_str = "${ERL} ${ERL_FLAGS} ${ERL_DOC_FLAGS}"
+	def keyword(self):
+		return 'Generating edoc'
+
+@feature('edoc')
+@before_method('process_source')
+def add_edoc_task(self):
+	# do not process source, it would create double erl->beam task
+	self.meths.remove('process_source')
+	e = self.path.find_resource(self.source)
+	t = e.change_ext('.html')
+	png = t.parent.make_node('erlang.png')
+	css = t.parent.make_node('stylesheet.css')
+	tsk = self.create_task('edoc', e, [t, png, css])
+	tsk.cwd = tsk.outputs[0].parent
+	tsk.env.append_value('ERL_DOC_FLAGS', EXEC_NON_INTERACTIVE)
+	tsk.env.append_value('ERL_DOC_FLAGS', 'edoc:files(["%s"]), halt(0).' % tsk.inputs[0].abspath())
+	# TODO the above can break if a file path contains '"'
+
diff --git a/third_party/waf/waflib/extras/fast_partial.py b/third_party/waf/waflib/extras/fast_partial.py
new file mode 100644
index 0000000..90a9472
--- /dev/null
+++ b/third_party/waf/waflib/extras/fast_partial.py
@@ -0,0 +1,531 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2017-2018 (ita)
+
+"""
+A system for fast partial rebuilds
+
+Creating a large amount of task objects up front can take some time.
+By making a few assumptions, it is possible to avoid posting creating
+task objects for targets that are already up-to-date.
+
+On a silly benchmark the gain observed for 1M tasks can be 5m->10s
+for a single file change.
+
+Usage::
+
+	def options(opt):
+		opt.load('fast_partial')
+
+Assumptions:
+* Start with a clean build (run "waf distclean" after enabling)
+* Mostly for C/C++/Fortran targets with link tasks (object-only targets are not handled)
+  try it in the folder generated by utils/genbench.py
+* For full project builds: no --targets and no pruning from subfolders
+* The installation phase is ignored
+* `use=` dependencies are specified up front even across build groups
+* Task generator source files are not obtained from globs
+
+Implementation details:
+* The first layer obtains file timestamps to recalculate file hashes only
+  when necessary (similar to md5_tstamp); the timestamps are then stored
+  in a dedicated pickle file
+* A second layer associates each task generator to a file set to help
+  detecting changes. Task generators are to create their tasks only when
+  the related files have been modified. A specific db file is created
+  to store such data (5m -> 1m10)
+* A third layer binds build context proxies onto task generators, replacing
+  the default context. While loading data for the full build uses more memory
+  (4GB -> 9GB), partial builds are then much faster (1m10 -> 13s)
+* A fourth layer enables a 2-level cache on file signatures to
+  reduce the size of the main pickle file (13s -> 10s)
+"""
+
+import os
+from waflib import Build, Context, Errors, Logs, Task, TaskGen, Utils
+from waflib.TaskGen import feature, after_method, taskgen_method
+import waflib.Node
+
+DONE = 0
+DIRTY = 1
+NEEDED = 2
+
+SKIPPABLE = ['cshlib', 'cxxshlib', 'cstlib', 'cxxstlib', 'cprogram', 'cxxprogram']
+
+TSTAMP_DB = '.wafpickle_tstamp_db_file'
+
+SAVED_ATTRS = 'root node_sigs task_sigs imp_sigs raw_deps node_deps'.split()
+
+class bld_proxy(object):
+	def __init__(self, bld):
+		object.__setattr__(self, 'bld', bld)
+
+		object.__setattr__(self, 'node_class', type('Nod3', (waflib.Node.Node,), {}))
+		self.node_class.__module__ = 'waflib.Node'
+		self.node_class.ctx = self
+
+		object.__setattr__(self, 'root', self.node_class('', None))
+		for x in SAVED_ATTRS:
+			if x != 'root':
+				object.__setattr__(self, x, {})
+
+		self.fix_nodes()
+
+	def __setattr__(self, name, value):
+		bld = object.__getattribute__(self, 'bld')
+		setattr(bld, name, value)
+
+	def __delattr__(self, name):
+		bld = object.__getattribute__(self, 'bld')
+		delattr(bld, name)
+
+	def __getattribute__(self, name):
+		try:
+			return object.__getattribute__(self, name)
+		except AttributeError:
+			bld = object.__getattribute__(self, 'bld')
+			return getattr(bld, name)
+
+	def __call__(self, *k, **kw):
+		return self.bld(*k, **kw)
+
+	def fix_nodes(self):
+		for x in ('srcnode', 'path', 'bldnode'):
+			node = self.root.find_dir(getattr(self.bld, x).abspath())
+			object.__setattr__(self, x, node)
+
+	def set_key(self, store_key):
+		object.__setattr__(self, 'store_key', store_key)
+
+	def fix_tg_path(self, *tgs):
+		# changing Node objects on task generators is possible
+		# yet, all Node objects must belong to the same parent
+		for tg in tgs:
+			tg.path = self.root.make_node(tg.path.abspath())
+
+	def restore(self):
+		dbfn = os.path.join(self.variant_dir, Context.DBFILE + self.store_key)
+		Logs.debug('rev_use: reading %s', dbfn)
+		try:
+			data = Utils.readf(dbfn, 'rb')
+		except (EnvironmentError, EOFError):
+			# handle missing file/empty file
+			Logs.debug('rev_use: Could not load the build cache %s (missing)', dbfn)
+		else:
+			try:
+				waflib.Node.pickle_lock.acquire()
+				waflib.Node.Nod3 = self.node_class
+				try:
+					data = Build.cPickle.loads(data)
+				except Exception as e:
+					Logs.debug('rev_use: Could not pickle the build cache %s: %r', dbfn, e)
+				else:
+					for x in SAVED_ATTRS:
+						object.__setattr__(self, x, data.get(x, {}))
+			finally:
+				waflib.Node.pickle_lock.release()
+		self.fix_nodes()
+
+	def store(self):
+		data = {}
+		for x in Build.SAVED_ATTRS:
+			data[x] = getattr(self, x)
+		db = os.path.join(self.variant_dir, Context.DBFILE + self.store_key)
+
+		with waflib.Node.pickle_lock:
+			waflib.Node.Nod3 = self.node_class
+			try:
+				x = Build.cPickle.dumps(data, Build.PROTOCOL)
+			except Build.cPickle.PicklingError:
+				root = data['root']
+				for node_deps in data['node_deps'].values():
+					for idx, node in enumerate(node_deps):
+						# there may be more cross-context Node objects to fix,
+						# but this should be the main source
+						node_deps[idx] = root.find_node(node.abspath())
+				x = Build.cPickle.dumps(data, Build.PROTOCOL)
+
+		Logs.debug('rev_use: storing %s', db)
+		Utils.writef(db + '.tmp', x, m='wb')
+		try:
+			st = os.stat(db)
+			os.remove(db)
+			if not Utils.is_win32:
+				os.chown(db + '.tmp', st.st_uid, st.st_gid)
+		except (AttributeError, OSError):
+			pass
+		os.rename(db + '.tmp', db)
+
+class bld(Build.BuildContext):
+	def __init__(self, **kw):
+		super(bld, self).__init__(**kw)
+		self.hashes_md5_tstamp = {}
+
+	def __call__(self, *k, **kw):
+		# this is one way of doing it, one could use a task generator method too
+		bld = kw['bld'] = bld_proxy(self)
+		ret = TaskGen.task_gen(*k, **kw)
+		self.task_gen_cache_names = {}
+		self.add_to_group(ret, group=kw.get('group'))
+		ret.bld = bld
+		bld.set_key(ret.path.abspath().replace(os.sep, '') + str(ret.idx))
+		return ret
+
+	def is_dirty(self):
+		return True
+
+	def store_tstamps(self):
+		# Called after a build is finished
+		# For each task generator, record all files involved in task objects
+		# optimization: done only if there was something built
+		do_store = False
+		try:
+			f_deps = self.f_deps
+		except AttributeError:
+			f_deps = self.f_deps = {}
+			self.f_tstamps = {}
+
+		allfiles = set()
+		for g in self.groups:
+			for tg in g:
+				try:
+					staleness = tg.staleness
+				except AttributeError:
+					staleness = DIRTY
+
+				if staleness != DIRTY:
+					# DONE case: there was nothing built
+					# NEEDED case: the tg was brought in because of 'use' propagation
+					# but nothing really changed for them, there may be incomplete
+					# tasks (object files) and in this case it is best to let the next build
+					# figure out if an input/output file changed
+					continue
+
+				do_cache = False
+				for tsk in tg.tasks:
+					if tsk.hasrun == Task.SUCCESS:
+						do_cache = True
+						pass
+					elif tsk.hasrun == Task.SKIPPED:
+						pass
+					else:
+						# one failed task, clear the cache for this tg
+						try:
+							del f_deps[(tg.path.abspath(), tg.idx)]
+						except KeyError:
+							pass
+						else:
+							# just store the new state because there is a change
+							do_store = True
+
+						# skip the rest because there is no valid cache possible
+						break
+				else:
+					if not do_cache:
+						# all skipped, but is there anything in cache?
+						try:
+							f_deps[(tg.path.abspath(), tg.idx)]
+						except KeyError:
+							# probably cleared because a wscript file changed
+							# store it
+							do_cache = True
+
+					if do_cache:
+
+						# there was a rebuild, store the data structure too
+						tg.bld.store()
+
+						# all tasks skipped but no cache
+						# or a successful task build
+						do_store = True
+						st = set()
+						for tsk in tg.tasks:
+							st.update(tsk.inputs)
+							st.update(self.node_deps.get(tsk.uid(), []))
+
+						# TODO do last/when loading the tgs?
+						lst = []
+						for k in ('wscript', 'wscript_build'):
+							n = tg.path.find_node(k)
+							if n:
+								n.get_bld_sig()
+								lst.append(n.abspath())
+
+						lst.extend(sorted(x.abspath() for x in st))
+						allfiles.update(lst)
+						f_deps[(tg.path.abspath(), tg.idx)] = lst
+
+		for x in allfiles:
+			# f_tstamps has everything, while md5_tstamp can be relatively empty on partial builds
+			self.f_tstamps[x] = self.hashes_md5_tstamp[x][0]
+
+		if do_store:
+			dbfn = os.path.join(self.variant_dir, TSTAMP_DB)
+			Logs.debug('rev_use: storing %s', dbfn)
+			dbfn_tmp = dbfn + '.tmp'
+			x = Build.cPickle.dumps([self.f_tstamps, f_deps], Build.PROTOCOL)
+			Utils.writef(dbfn_tmp, x, m='wb')
+			os.rename(dbfn_tmp, dbfn)
+			Logs.debug('rev_use: stored %s', dbfn)
+
+	def store(self):
+		self.store_tstamps()
+		if self.producer.dirty:
+			Build.BuildContext.store(self)
+
+	def compute_needed_tgs(self):
+		# assume the 'use' keys are not modified during the build phase
+
+		dbfn = os.path.join(self.variant_dir, TSTAMP_DB)
+		Logs.debug('rev_use: Loading %s', dbfn)
+		try:
+			data = Utils.readf(dbfn, 'rb')
+		except (EnvironmentError, EOFError):
+			Logs.debug('rev_use: Could not load the build cache %s (missing)', dbfn)
+			self.f_deps = {}
+			self.f_tstamps = {}
+		else:
+			try:
+				self.f_tstamps, self.f_deps = Build.cPickle.loads(data)
+			except Exception as e:
+				Logs.debug('rev_use: Could not pickle the build cache %s: %r', dbfn, e)
+				self.f_deps = {}
+				self.f_tstamps = {}
+			else:
+				Logs.debug('rev_use: Loaded %s', dbfn)
+
+
+		# 1. obtain task generators that contain rebuilds
+		# 2. obtain the 'use' graph and its dual
+		stales = set()
+		reverse_use_map = Utils.defaultdict(list)
+		use_map = Utils.defaultdict(list)
+
+		for g in self.groups:
+			for tg in g:
+				if tg.is_stale():
+					stales.add(tg)
+
+				try:
+					lst = tg.use = Utils.to_list(tg.use)
+				except AttributeError:
+					pass
+				else:
+					for x in lst:
+						try:
+							xtg = self.get_tgen_by_name(x)
+						except Errors.WafError:
+							pass
+						else:
+							use_map[tg].append(xtg)
+							reverse_use_map[xtg].append(tg)
+
+		Logs.debug('rev_use: found %r stale tgs', len(stales))
+
+		# 3. dfs to post downstream tg as stale
+		visited = set()
+		def mark_down(tg):
+			if tg in visited:
+				return
+			visited.add(tg)
+			Logs.debug('rev_use: marking down %r as stale', tg.name)
+			tg.staleness = DIRTY
+			for x in reverse_use_map[tg]:
+				mark_down(x)
+		for tg in stales:
+			mark_down(tg)
+
+		# 4. dfs to find ancestors tg to mark as needed
+		self.needed_tgs = needed_tgs = set()
+		def mark_needed(tg):
+			if tg in needed_tgs:
+				return
+			needed_tgs.add(tg)
+			if tg.staleness == DONE:
+				Logs.debug('rev_use: marking up %r as needed', tg.name)
+				tg.staleness = NEEDED
+			for x in use_map[tg]:
+				mark_needed(x)
+		for xx in visited:
+			mark_needed(xx)
+
+		# so we have the whole tg trees to post in the set "needed"
+		# load their build trees
+		for tg in needed_tgs:
+			tg.bld.restore()
+			tg.bld.fix_tg_path(tg)
+
+		# the stale ones should be fully build, while the needed ones
+		# may skip a few tasks, see create_compiled_task and apply_link_after below
+		Logs.debug('rev_use: amount of needed task gens: %r', len(needed_tgs))
+
+	def post_group(self):
+		# assumption: we can ignore the folder/subfolders cuts
+		def tgpost(tg):
+			try:
+				f = tg.post
+			except AttributeError:
+				pass
+			else:
+				f()
+
+		if not self.targets or self.targets == '*':
+			for tg in self.groups[self.current_group]:
+				# this can cut quite a lot of tg objects
+				if tg in self.needed_tgs:
+					tgpost(tg)
+		else:
+			# default implementation
+			return Build.BuildContext.post_group()
+
+	def get_build_iterator(self):
+		if not self.targets or self.targets == '*':
+			self.compute_needed_tgs()
+		return Build.BuildContext.get_build_iterator(self)
+
+@taskgen_method
+def is_stale(self):
+	# assume no globs
+	self.staleness = DIRTY
+
+	# 1. the case of always stale targets
+	if getattr(self, 'always_stale', False):
+		return True
+
+	# 2. check if the db file exists
+	db = os.path.join(self.bld.variant_dir, Context.DBFILE)
+	try:
+		dbstat = os.stat(db).st_mtime
+	except OSError:
+		Logs.debug('rev_use: must post %r because this is a clean build')
+		return True
+
+	# 3.a check if the configuration exists
+	cache_node = self.bld.bldnode.find_node('c4che/build.config.py')
+	if not cache_node:
+		return True
+
+	# 3.b check if the configuration changed
+	if os.stat(cache_node.abspath()).st_mtime > dbstat:
+		Logs.debug('rev_use: must post %r because the configuration has changed', self.name)
+		return True
+
+	# 3.c any tstamp data?
+	try:
+		f_deps = self.bld.f_deps
+	except AttributeError:
+		Logs.debug('rev_use: must post %r because there is no f_deps', self.name)
+		return True
+
+	# 4. check if this is the first build (no cache)
+	try:
+		lst = f_deps[(self.path.abspath(), self.idx)]
+	except KeyError:
+		Logs.debug('rev_use: must post %r because there it has no cached data', self.name)
+		return True
+
+	try:
+		cache = self.bld.cache_tstamp_rev_use
+	except AttributeError:
+		cache = self.bld.cache_tstamp_rev_use = {}
+
+	# 5. check the timestamp of each dependency files listed is unchanged
+	f_tstamps = self.bld.f_tstamps
+	for x in lst:
+		try:
+			old_ts = f_tstamps[x]
+		except KeyError:
+			Logs.debug('rev_use: must post %r because %r is not in cache', self.name, x)
+			return True
+
+		try:
+			try:
+				ts = cache[x]
+			except KeyError:
+				ts = cache[x] = os.stat(x).st_mtime
+		except OSError:
+			del f_deps[(self.path.abspath(), self.idx)]
+			Logs.debug('rev_use: must post %r because %r does not exist anymore', self.name, x)
+			return True
+		else:
+			if ts != old_ts:
+				Logs.debug('rev_use: must post %r because the timestamp on %r changed %r %r', self.name, x, old_ts, ts)
+				return True
+
+	self.staleness = DONE
+	return False
+
+@taskgen_method
+def create_compiled_task(self, name, node):
+	# skip the creation of object files
+	# assumption: object-only targets are not skippable
+	if self.staleness == NEEDED:
+		# only libraries/programs can skip object files
+		for x in SKIPPABLE:
+			if x in self.features:
+				return None
+
+	out = '%s.%d.o' % (node.name, self.idx)
+	task = self.create_task(name, node, node.parent.find_or_declare(out))
+	try:
+		self.compiled_tasks.append(task)
+	except AttributeError:
+		self.compiled_tasks = [task]
+	return task
+
+@feature(*SKIPPABLE)
+@after_method('apply_link')
+def apply_link_after(self):
+	# cprogram/cxxprogram might be unnecessary
+	if self.staleness != NEEDED:
+		return
+	for tsk in self.tasks:
+		tsk.hasrun = Task.SKIPPED
+
+def path_from(self, node):
+	# handle nodes of distinct types
+	if node.ctx is not self.ctx:
+		node = self.ctx.root.make_node(node.abspath())
+	return self.default_path_from(node)
+waflib.Node.Node.default_path_from = waflib.Node.Node.path_from
+waflib.Node.Node.path_from = path_from
+
+def h_file(self):
+	# similar to md5_tstamp.py, but with 2-layer cache
+	# global_cache for the build context common for all task generators
+	# local_cache for the build context proxy (one by task generator)
+	#
+	# the global cache is not persistent
+	# the local cache is persistent and meant for partial builds
+	#
+	# assume all calls are made from a single thread
+	#
+	filename = self.abspath()
+	st = os.stat(filename)
+
+	global_cache = self.ctx.bld.hashes_md5_tstamp
+	local_cache = self.ctx.hashes_md5_tstamp
+
+	if filename in global_cache:
+		# value already calculated in this build
+		cval = global_cache[filename]
+
+		# the value in global cache is assumed to be calculated once
+		# reverifying it could cause task generators
+		# to get distinct tstamp values, thus missing rebuilds
+		local_cache[filename] = cval
+		return cval[1]
+
+	if filename in local_cache:
+		cval = local_cache[filename]
+		if cval[0] == st.st_mtime:
+			# correct value from a previous build
+			# put it in the global cache
+			global_cache[filename] = cval
+			return cval[1]
+
+	ret = Utils.h_file(filename)
+	local_cache[filename] = global_cache[filename] = (st.st_mtime, ret)
+	return ret
+waflib.Node.Node.h_file = h_file
+
diff --git a/third_party/waf/waflib/extras/fc_bgxlf.py b/third_party/waf/waflib/extras/fc_bgxlf.py
new file mode 100644
index 0000000..cca1810
--- /dev/null
+++ b/third_party/waf/waflib/extras/fc_bgxlf.py
@@ -0,0 +1,32 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# harald at klimachs.de
+
+from waflib.Tools import fc, fc_config, fc_scan
+from waflib.Configure import conf
+
+from waflib.Tools.compiler_fc import fc_compiler
+fc_compiler['linux'].insert(0, 'fc_bgxlf')
+
+@conf
+def find_bgxlf(conf):
+	fc = conf.find_program(['bgxlf2003_r','bgxlf2003'], var='FC')
+	conf.get_xlf_version(fc)
+	conf.env.FC_NAME = 'BGXLF'
+
+@conf
+def bg_flags(self):
+	self.env.SONAME_ST		 = ''
+	self.env.FCSHLIB_MARKER	= ''
+	self.env.FCSTLIB_MARKER	= ''
+	self.env.FCFLAGS_fcshlib   = ['-fPIC']
+	self.env.LINKFLAGS_fcshlib = ['-G', '-Wl,-bexpfull']
+
+def configure(conf):
+	conf.find_bgxlf()
+	conf.find_ar()
+	conf.fc_flags()
+	conf.fc_add_flags()
+	conf.xlf_flags()
+	conf.bg_flags()
+
diff --git a/third_party/waf/waflib/extras/fc_cray.py b/third_party/waf/waflib/extras/fc_cray.py
new file mode 100644
index 0000000..da733fa
--- /dev/null
+++ b/third_party/waf/waflib/extras/fc_cray.py
@@ -0,0 +1,51 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# harald at klimachs.de
+
+import re
+from waflib.Tools import fc, fc_config, fc_scan
+from waflib.Configure import conf
+
+from waflib.Tools.compiler_fc import fc_compiler
+fc_compiler['linux'].append('fc_cray')
+
+@conf
+def find_crayftn(conf):
+	"""Find the Cray fortran compiler (will look in the environment variable 'FC')"""
+	fc = conf.find_program(['crayftn'], var='FC')
+	conf.get_crayftn_version(fc)
+	conf.env.FC_NAME = 'CRAY'
+	conf.env.FC_MOD_CAPITALIZATION = 'UPPER.mod'
+
+@conf
+def crayftn_flags(conf):
+	v = conf.env
+	v['_FCMODOUTFLAGS']  = ['-em', '-J.'] # enable module files and put them in the current directory
+	v['FCFLAGS_DEBUG'] = ['-m1'] # more verbose compiler warnings
+	v['FCFLAGS_fcshlib']   = ['-h pic']
+	v['LINKFLAGS_fcshlib'] = ['-h shared']
+
+	v['FCSTLIB_MARKER'] = '-h static'
+	v['FCSHLIB_MARKER'] = '-h dynamic'
+
+@conf
+def get_crayftn_version(conf, fc):
+		version_re = re.compile(r"Cray Fortran\s*:\s*Version\s*(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
+		cmd = fc + ['-V']
+		out,err = fc_config.getoutput(conf, cmd, stdin=False)
+		if out:
+			match = version_re(out)
+		else:
+			match = version_re(err)
+		if not match:
+				conf.fatal('Could not determine the Cray Fortran compiler version.')
+		k = match.groupdict()
+		conf.env['FC_VERSION'] = (k['major'], k['minor'])
+
+def configure(conf):
+	conf.find_crayftn()
+	conf.find_ar()
+	conf.fc_flags()
+	conf.fc_add_flags()
+	conf.crayftn_flags()
+
diff --git a/third_party/waf/waflib/extras/fc_fujitsu.py b/third_party/waf/waflib/extras/fc_fujitsu.py
new file mode 100644
index 0000000..cae676c
--- /dev/null
+++ b/third_party/waf/waflib/extras/fc_fujitsu.py
@@ -0,0 +1,52 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Detection of the Fujitsu Fortran compiler for ARM64FX
+
+import re
+from waflib.Tools import fc,fc_config,fc_scan
+from waflib.Configure import conf
+from waflib.Tools.compiler_fc import fc_compiler
+fc_compiler['linux'].append('fc_fujitsu')
+
+@conf
+def find_fujitsu(conf):
+	fc=conf.find_program(['frtpx'],var='FC')
+	conf.get_fujitsu_version(fc)
+	conf.env.FC_NAME='FUJITSU'
+	conf.env.FC_MOD_CAPITALIZATION='lower'
+
+@conf
+def fujitsu_flags(conf):
+	v=conf.env
+	v['_FCMODOUTFLAGS']=[]
+	v['FCFLAGS_DEBUG']=[]
+	v['FCFLAGS_fcshlib']=[]
+	v['LINKFLAGS_fcshlib']=[]
+	v['FCSTLIB_MARKER']=''
+	v['FCSHLIB_MARKER']=''
+
+@conf
+def get_fujitsu_version(conf,fc):
+	version_re=re.compile(r"frtpx\s*\(FRT\)\s*(?P<major>\d+)\.(?P<minor>\d+)\.",re.I).search
+	cmd=fc+['--version']
+	out,err=fc_config.getoutput(conf,cmd,stdin=False)
+	if out:
+		match=version_re(out)
+	else:
+		match=version_re(err)
+	if not match:
+		return(False)
+		conf.fatal('Could not determine the Fujitsu FRT Fortran compiler version.')
+	else:
+		k=match.groupdict()
+		conf.env['FC_VERSION']=(k['major'],k['minor'])
+
+def configure(conf):
+	conf.find_fujitsu()
+	conf.find_program('ar',var='AR')
+	conf.add_os_flags('ARFLAGS')
+	if not conf.env.ARFLAGS:
+		conf.env.ARFLAGS=['rcs']
+	conf.fc_flags()
+	conf.fc_add_flags()
+	conf.fujitsu_flags()
diff --git a/third_party/waf/waflib/extras/fc_nag.py b/third_party/waf/waflib/extras/fc_nag.py
new file mode 100644
index 0000000..edcb218
--- /dev/null
+++ b/third_party/waf/waflib/extras/fc_nag.py
@@ -0,0 +1,61 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# harald at klimachs.de
+
+import re
+from waflib import Utils
+from waflib.Tools import fc,fc_config,fc_scan
+from waflib.Configure import conf
+
+from waflib.Tools.compiler_fc import fc_compiler
+fc_compiler['linux'].insert(0, 'fc_nag')
+
+@conf
+def find_nag(conf):
+	"""Find the NAG Fortran Compiler (will look in the environment variable 'FC')"""
+
+	fc = conf.find_program(['nagfor'], var='FC')
+	conf.get_nag_version(fc)
+	conf.env.FC_NAME = 'NAG'
+	conf.env.FC_MOD_CAPITALIZATION = 'lower'
+
+@conf
+def nag_flags(conf):
+	v = conf.env
+	v.FCFLAGS_DEBUG = ['-C=all']
+	v.FCLNK_TGT_F = ['-o', '']
+	v.FC_TGT_F = ['-c', '-o', '']
+
+@conf
+def nag_modifier_platform(conf):
+	dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
+	nag_modifier_func = getattr(conf, 'nag_modifier_' + dest_os, None)
+	if nag_modifier_func:
+		nag_modifier_func()
+
+@conf
+def get_nag_version(conf, fc):
+	"""Get the NAG compiler version"""
+
+	version_re = re.compile(r"^NAG Fortran Compiler *Release *(?P<major>\d*)\.(?P<minor>\d*)", re.M).search
+	cmd = fc + ['-V']
+
+	out, err = fc_config.getoutput(conf,cmd,stdin=False)
+	if out:
+		match = version_re(out)
+		if not match:
+			match = version_re(err)
+	else: match = version_re(err)
+	if not match:
+		conf.fatal('Could not determine the NAG version.')
+	k = match.groupdict()
+	conf.env['FC_VERSION'] = (k['major'], k['minor'])
+
+def configure(conf):
+	conf.find_nag()
+	conf.find_ar()
+	conf.fc_flags()
+	conf.fc_add_flags()
+	conf.nag_flags()
+	conf.nag_modifier_platform()
+
diff --git a/third_party/waf/waflib/extras/fc_nec.py b/third_party/waf/waflib/extras/fc_nec.py
new file mode 100644
index 0000000..67c8680
--- /dev/null
+++ b/third_party/waf/waflib/extras/fc_nec.py
@@ -0,0 +1,60 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# harald at klimachs.de
+
+import re
+from waflib.Tools import fc, fc_config, fc_scan
+from waflib.Configure import conf
+
+from waflib.Tools.compiler_fc import fc_compiler
+fc_compiler['linux'].append('fc_nec')
+
+@conf
+def find_sxfc(conf):
+	"""Find the NEC fortran compiler (will look in the environment variable 'FC')"""
+	fc = conf.find_program(['sxf90','sxf03'], var='FC')
+	conf.get_sxfc_version(fc)
+	conf.env.FC_NAME = 'NEC'
+	conf.env.FC_MOD_CAPITALIZATION = 'lower'
+
+@conf
+def sxfc_flags(conf):
+	v = conf.env
+	v['_FCMODOUTFLAGS']  = [] # enable module files and put them in the current directory
+	v['FCFLAGS_DEBUG'] = [] # more verbose compiler warnings
+	v['FCFLAGS_fcshlib']   = []
+	v['LINKFLAGS_fcshlib'] = []
+
+	v['FCSTLIB_MARKER'] = ''
+	v['FCSHLIB_MARKER'] = ''
+
+@conf
+def get_sxfc_version(conf, fc):
+	version_re = re.compile(r"FORTRAN90/SX\s*Version\s*(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
+	cmd = fc + ['-V']
+	out,err = fc_config.getoutput(conf, cmd, stdin=False)
+	if out:
+		match = version_re(out)
+	else:
+		match = version_re(err)
+	if not match:
+		version_re=re.compile(r"NEC Fortran 2003 Compiler for\s*(?P<major>\S*)\s*\(c\)\s*(?P<minor>\d*)",re.I).search
+		if out:
+			match = version_re(out)
+		else:
+			match = version_re(err)
+		if not match:
+			conf.fatal('Could not determine the NEC Fortran compiler version.')
+	k = match.groupdict()
+	conf.env['FC_VERSION'] = (k['major'], k['minor'])
+
+def configure(conf):
+	conf.find_sxfc()
+	conf.find_program('sxar',var='AR')
+	conf.add_os_flags('ARFLAGS')
+	if not conf.env.ARFLAGS:
+		conf.env.ARFLAGS=['rcs']
+
+	conf.fc_flags()
+	conf.fc_add_flags()
+	conf.sxfc_flags()
diff --git a/third_party/waf/waflib/extras/fc_nfort.py b/third_party/waf/waflib/extras/fc_nfort.py
new file mode 100644
index 0000000..c25886b
--- /dev/null
+++ b/third_party/waf/waflib/extras/fc_nfort.py
@@ -0,0 +1,52 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Detection of the NEC Fortran compiler for Aurora Tsubasa
+
+import re
+from waflib.Tools import fc,fc_config,fc_scan
+from waflib.Configure import conf
+from waflib.Tools.compiler_fc import fc_compiler
+fc_compiler['linux'].append('fc_nfort')
+
+@conf
+def find_nfort(conf):
+	fc=conf.find_program(['nfort'],var='FC')
+	conf.get_nfort_version(fc)
+	conf.env.FC_NAME='NFORT'
+	conf.env.FC_MOD_CAPITALIZATION='lower'
+
+@conf
+def nfort_flags(conf):
+	v=conf.env
+	v['_FCMODOUTFLAGS']=[]
+	v['FCFLAGS_DEBUG']=[]
+	v['FCFLAGS_fcshlib']=[]
+	v['LINKFLAGS_fcshlib']=[]
+	v['FCSTLIB_MARKER']=''
+	v['FCSHLIB_MARKER']=''
+
+@conf
+def get_nfort_version(conf,fc):
+	version_re=re.compile(r"nfort\s*\(NFORT\)\s*(?P<major>\d+)\.(?P<minor>\d+)\.",re.I).search
+	cmd=fc+['--version']
+	out,err=fc_config.getoutput(conf,cmd,stdin=False)
+	if out:
+		match=version_re(out)
+	else:
+		match=version_re(err)
+	if not match:
+		return(False)
+		conf.fatal('Could not determine the NEC NFORT Fortran compiler version.')
+	else:
+		k=match.groupdict()
+		conf.env['FC_VERSION']=(k['major'],k['minor'])
+
+def configure(conf):
+	conf.find_nfort()
+	conf.find_program('nar',var='AR')
+	conf.add_os_flags('ARFLAGS')
+	if not conf.env.ARFLAGS:
+		conf.env.ARFLAGS=['rcs']
+	conf.fc_flags()
+	conf.fc_add_flags()
+	conf.nfort_flags()
diff --git a/third_party/waf/waflib/extras/fc_open64.py b/third_party/waf/waflib/extras/fc_open64.py
new file mode 100644
index 0000000..413719f
--- /dev/null
+++ b/third_party/waf/waflib/extras/fc_open64.py
@@ -0,0 +1,58 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# harald at klimachs.de
+
+import re
+from waflib import Utils
+from waflib.Tools import fc,fc_config,fc_scan
+from waflib.Configure import conf
+
+from waflib.Tools.compiler_fc import fc_compiler
+fc_compiler['linux'].insert(0, 'fc_open64')
+
+@conf
+def find_openf95(conf):
+	"""Find the Open64 Fortran Compiler (will look in the environment variable 'FC')"""
+
+	fc = conf.find_program(['openf95', 'openf90'], var='FC')
+	conf.get_open64_version(fc)
+	conf.env.FC_NAME = 'OPEN64'
+	conf.env.FC_MOD_CAPITALIZATION = 'UPPER.mod'
+
+@conf
+def openf95_flags(conf):
+	v = conf.env
+	v['FCFLAGS_DEBUG'] = ['-fullwarn']
+
+@conf
+def openf95_modifier_platform(conf):
+	dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
+	openf95_modifier_func = getattr(conf, 'openf95_modifier_' + dest_os, None)
+	if openf95_modifier_func:
+		openf95_modifier_func()
+
+@conf
+def get_open64_version(conf, fc):
+	"""Get the Open64 compiler version"""
+
+	version_re = re.compile(r"Open64 Compiler Suite: *Version *(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
+	cmd = fc + ['-version']
+
+	out, err = fc_config.getoutput(conf,cmd,stdin=False)
+	if out:
+		match = version_re(out)
+	else:
+		match = version_re(err)
+	if not match:
+		conf.fatal('Could not determine the Open64 version.')
+	k = match.groupdict()
+	conf.env['FC_VERSION'] = (k['major'], k['minor'])
+
+def configure(conf):
+	conf.find_openf95()
+	conf.find_ar()
+	conf.fc_flags()
+	conf.fc_add_flags()
+	conf.openf95_flags()
+	conf.openf95_modifier_platform()
+
diff --git a/third_party/waf/waflib/extras/fc_pgfortran.py b/third_party/waf/waflib/extras/fc_pgfortran.py
new file mode 100644
index 0000000..afb2817
--- /dev/null
+++ b/third_party/waf/waflib/extras/fc_pgfortran.py
@@ -0,0 +1,68 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# harald at klimachs.de
+
+import re
+from waflib.Tools import fc, fc_config, fc_scan
+from waflib.Configure import conf
+
+from waflib.Tools.compiler_fc import fc_compiler
+fc_compiler['linux'].append('fc_pgfortran')
+
+@conf
+def find_pgfortran(conf):
+	"""Find the PGI fortran compiler (will look in the environment variable 'FC')"""
+	fc = conf.find_program(['pgfortran', 'pgf95', 'pgf90'], var='FC')
+	conf.get_pgfortran_version(fc)
+	conf.env.FC_NAME = 'PGFC'
+
+@conf
+def pgfortran_flags(conf):
+	v = conf.env
+	v['FCFLAGS_fcshlib']   = ['-shared']
+	v['FCFLAGS_DEBUG'] = ['-Minform=inform', '-Mstandard'] # why not
+	v['FCSTLIB_MARKER'] = '-Bstatic'
+	v['FCSHLIB_MARKER'] = '-Bdynamic'
+	v['SONAME_ST']	  = '-soname %s'
+
+@conf
+def get_pgfortran_version(conf,fc):
+		version_re = re.compile(r"The Portland Group", re.I).search
+		cmd = fc + ['-V']
+		out,err = fc_config.getoutput(conf, cmd, stdin=False)
+		if out:
+			match = version_re(out)
+		else:
+			match = version_re(err)
+		if not match:
+				conf.fatal('Could not verify PGI signature')
+		cmd = fc + ['-help=variable']
+		out,err = fc_config.getoutput(conf, cmd, stdin=False)
+		if out.find('COMPVER')<0:
+				conf.fatal('Could not determine the compiler type')
+		k = {}
+		prevk = ''
+		out = out.splitlines()
+		for line in out:
+				lst = line.partition('=')
+				if lst[1] == '=':
+						key = lst[0].rstrip()
+						if key == '':
+							key = prevk
+						val = lst[2].rstrip()
+						k[key] = val
+				else:
+					prevk = line.partition(' ')[0]
+		def isD(var):
+				return var in k
+		def isT(var):
+				return var in k and k[var]!='0'
+		conf.env['FC_VERSION'] = (k['COMPVER'].split('.'))
+
+def configure(conf):
+	conf.find_pgfortran()
+	conf.find_ar()
+	conf.fc_flags()
+	conf.fc_add_flags()
+	conf.pgfortran_flags()
+
diff --git a/third_party/waf/waflib/extras/fc_solstudio.py b/third_party/waf/waflib/extras/fc_solstudio.py
new file mode 100644
index 0000000..53766df
--- /dev/null
+++ b/third_party/waf/waflib/extras/fc_solstudio.py
@@ -0,0 +1,62 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# harald at klimachs.de
+
+import re
+from waflib import Utils
+from waflib.Tools import fc,fc_config,fc_scan
+from waflib.Configure import conf
+
+from waflib.Tools.compiler_fc import fc_compiler
+fc_compiler['linux'].append('fc_solstudio')
+
+@conf
+def find_solstudio(conf):
+	"""Find the Solaris Studio compiler (will look in the environment variable 'FC')"""
+
+	fc = conf.find_program(['sunf95', 'f95', 'sunf90', 'f90'], var='FC')
+	conf.get_solstudio_version(fc)
+	conf.env.FC_NAME = 'SOL'
+
+@conf
+def solstudio_flags(conf):
+	v = conf.env
+	v['FCFLAGS_fcshlib'] = ['-Kpic']
+	v['FCFLAGS_DEBUG'] = ['-w3']
+	v['LINKFLAGS_fcshlib'] = ['-G']
+	v['FCSTLIB_MARKER'] = '-Bstatic'
+	v['FCSHLIB_MARKER'] = '-Bdynamic'
+	v['SONAME_ST']      = '-h %s'
+
+@conf
+def solstudio_modifier_platform(conf):
+	dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
+	solstudio_modifier_func = getattr(conf, 'solstudio_modifier_' + dest_os, None)
+	if solstudio_modifier_func:
+		solstudio_modifier_func()
+
+@conf
+def get_solstudio_version(conf, fc):
+	"""Get the compiler version"""
+
+	version_re = re.compile(r"Sun Fortran 95 *(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
+	cmd = fc + ['-V']
+
+	out, err = fc_config.getoutput(conf,cmd,stdin=False)
+	if out:
+		match = version_re(out)
+	else:
+		match = version_re(err)
+	if not match:
+		conf.fatal('Could not determine the Sun Studio Fortran version.')
+	k = match.groupdict()
+	conf.env['FC_VERSION'] = (k['major'], k['minor'])
+
+def configure(conf):
+	conf.find_solstudio()
+	conf.find_ar()
+	conf.fc_flags()
+	conf.fc_add_flags()
+	conf.solstudio_flags()
+	conf.solstudio_modifier_platform()
+
diff --git a/third_party/waf/waflib/extras/fc_xlf.py b/third_party/waf/waflib/extras/fc_xlf.py
new file mode 100644
index 0000000..5a3da03
--- /dev/null
+++ b/third_party/waf/waflib/extras/fc_xlf.py
@@ -0,0 +1,63 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# harald at klimachs.de
+
+import re
+from waflib import Utils,Errors
+from waflib.Tools import fc,fc_config,fc_scan
+from waflib.Configure import conf
+
+from waflib.Tools.compiler_fc import fc_compiler
+fc_compiler['aix'].insert(0, 'fc_xlf')
+
+@conf
+def find_xlf(conf):
+	"""Find the xlf program (will look in the environment variable 'FC')"""
+
+	fc = conf.find_program(['xlf2003_r', 'xlf2003', 'xlf95_r', 'xlf95', 'xlf90_r', 'xlf90', 'xlf_r', 'xlf'], var='FC')
+	conf.get_xlf_version(fc)
+	conf.env.FC_NAME='XLF'
+
+@conf
+def xlf_flags(conf):
+	v = conf.env
+	v['FCDEFINES_ST'] = '-WF,-D%s'
+	v['FCFLAGS_fcshlib'] = ['-qpic=small']
+	v['FCFLAGS_DEBUG'] = ['-qhalt=w']
+	v['LINKFLAGS_fcshlib'] = ['-Wl,-shared']
+
+@conf
+def xlf_modifier_platform(conf):
+	dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
+	xlf_modifier_func = getattr(conf, 'xlf_modifier_' + dest_os, None)
+	if xlf_modifier_func:
+		xlf_modifier_func()
+
+@conf
+def get_xlf_version(conf, fc):
+	"""Get the compiler version"""
+
+	cmd = fc + ['-qversion']
+	try:
+		out, err = conf.cmd_and_log(cmd, output=0)
+	except Errors.WafError:
+		conf.fatal('Could not find xlf %r' % cmd)
+
+	for v in (r"IBM XL Fortran.* V(?P<major>\d*)\.(?P<minor>\d*)",):
+		version_re = re.compile(v, re.I).search
+		match = version_re(out or err)
+		if match:
+			k = match.groupdict()
+			conf.env['FC_VERSION'] = (k['major'], k['minor'])
+			break
+	else:
+		conf.fatal('Could not determine the XLF version.')
+
+def configure(conf):
+	conf.find_xlf()
+	conf.find_ar()
+	conf.fc_flags()
+	conf.fc_add_flags()
+	conf.xlf_flags()
+	conf.xlf_modifier_platform()
+
diff --git a/third_party/waf/waflib/extras/file_to_object.py b/third_party/waf/waflib/extras/file_to_object.py
new file mode 100644
index 0000000..13d2aef
--- /dev/null
+++ b/third_party/waf/waflib/extras/file_to_object.py
@@ -0,0 +1,142 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+# Tool to embed file into objects
+
+__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
+__copyright__ = "Jérôme Carretero, 2014"
+
+"""
+
+This tool allows to embed file contents in object files (.o).
+It is not exactly portable, and the file contents are reachable
+using various non-portable fashions.
+The goal here is to provide a functional interface to the embedding
+of file data in objects.
+See the ``playground/embedded_resources`` example for an example.
+
+Usage::
+
+   bld(
+    name='pipeline',
+     # ^ Reference this in use="..." for things using the generated code
+    features='file_to_object',
+    source='some.file',
+     # ^ Name of the file to embed in binary section.
+   )
+
+Known issues:
+
+- Destination is named like source, with extension renamed to .o
+  eg. some.file -> some.o
+
+"""
+
+import os, sys
+from waflib import Task, TaskGen, Errors
+
+def filename_c_escape(x):
+	return x.replace("\\", "\\\\")
+
+class file_to_object_s(Task.Task):
+	color = 'CYAN'
+	vars = ['DEST_CPU', 'DEST_BINFMT']
+
+	def run(self):
+		name = []
+		for i, x in enumerate(self.inputs[0].name):
+			if x.isalnum():
+				name.append(x)
+			else:
+				name.append('_')
+		file = self.inputs[0].abspath()
+		size = os.path.getsize(file)
+		if self.env.DEST_CPU in ('x86_64', 'ia', 'aarch64'):
+			unit = 'quad'
+			align = 8
+		elif self.env.DEST_CPU in ('x86','arm', 'thumb', 'm68k'):
+			unit = 'long'
+			align = 4
+		else:
+			raise Errors.WafError("Unsupported DEST_CPU, please report bug!")
+
+		file = filename_c_escape(file)
+		name = "_binary_" + "".join(name)
+		rodata = ".section .rodata"
+		if self.env.DEST_BINFMT == "mac-o":
+			name = "_" + name
+			rodata = ".section __TEXT,__const"
+
+		with open(self.outputs[0].abspath(), 'w') as f:
+			f.write(\
+"""
+	.global %(name)s_start
+	.global %(name)s_end
+	.global %(name)s_size
+	%(rodata)s
+%(name)s_start:
+	.incbin "%(file)s"
+%(name)s_end:
+	.align %(align)d
+%(name)s_size:
+	.%(unit)s 0x%(size)x
+""" % locals())
+
+class file_to_object_c(Task.Task):
+	color = 'CYAN'
+	def run(self):
+		name = []
+		for i, x in enumerate(self.inputs[0].name):
+			if x.isalnum():
+				name.append(x)
+			else:
+				name.append('_')
+		file = self.inputs[0].abspath()
+		size = os.path.getsize(file)
+
+		name = "_binary_" + "".join(name)
+
+		def char_to_num(ch):
+			if sys.version_info[0] < 3:
+				return ord(ch)
+			return ch
+
+		data = self.inputs[0].read('rb')
+		lines, line = [], []
+		for idx_byte, byte in enumerate(data):
+			line.append(byte)
+			if len(line) > 15 or idx_byte == size-1:
+				lines.append(", ".join(("0x%02x" % char_to_num(x)) for x in line))
+				line = []
+		data = ",\n ".join(lines)
+
+		self.outputs[0].write(\
+"""
+unsigned long %(name)s_size = %(size)dL;
+char const %(name)s_start[] = {
+ %(data)s
+};
+char const %(name)s_end[] = {};
+""" % locals())
+
+@TaskGen.feature('file_to_object')
+@TaskGen.before_method('process_source')
+def tg_file_to_object(self):
+	bld = self.bld
+	sources = self.to_nodes(self.source)
+	targets = []
+	for src in sources:
+		if bld.env.F2O_METHOD == ["asm"]:
+			tgt = src.parent.find_or_declare(src.name + '.f2o.s')
+			tsk = self.create_task('file_to_object_s', src, tgt)
+			tsk.cwd = src.parent.abspath() # verify
+		else:
+			tgt = src.parent.find_or_declare(src.name + '.f2o.c')
+			tsk = self.create_task('file_to_object_c', src, tgt)
+			tsk.cwd = src.parent.abspath() # verify
+		targets.append(tgt)
+	self.source = targets
+
+def configure(conf):
+	conf.load('gas')
+	conf.env.F2O_METHOD = ["c"]
+
diff --git a/third_party/waf/waflib/extras/fluid.py b/third_party/waf/waflib/extras/fluid.py
new file mode 100644
index 0000000..4814a35
--- /dev/null
+++ b/third_party/waf/waflib/extras/fluid.py
@@ -0,0 +1,30 @@
+#!/usr/bin/python
+# encoding: utf-8
+# Grygoriy Fuchedzhy 2009
+
+"""
+Compile fluid files (fltk graphic library). Use the 'fluid' feature in conjunction with the 'cxx' feature.
+"""
+
+from waflib import Task
+from waflib.TaskGen import extension
+
+class fluid(Task.Task):
+	color   = 'BLUE'
+	ext_out = ['.h']
+	run_str = '${FLUID} -c -o ${TGT[0].abspath()} -h ${TGT[1].abspath()} ${SRC}'
+
+@extension('.fl')
+def process_fluid(self, node):
+	"""add the .fl to the source list; the cxx file generated will be compiled when possible"""
+	cpp = node.change_ext('.cpp')
+	hpp = node.change_ext('.hpp')
+	self.create_task('fluid', node, [cpp, hpp])
+
+	if 'cxx' in self.features:
+		self.source.append(cpp)
+
+def configure(conf):
+	conf.find_program('fluid', var='FLUID')
+	conf.check_cfg(path='fltk-config', package='', args='--cxxflags --ldflags', uselib_store='FLTK', mandatory=True)
+
diff --git a/third_party/waf/waflib/extras/freeimage.py b/third_party/waf/waflib/extras/freeimage.py
new file mode 100644
index 0000000..f27e525
--- /dev/null
+++ b/third_party/waf/waflib/extras/freeimage.py
@@ -0,0 +1,74 @@
+#!/usr/bin/env python
+# encoding: utf-8
+#
+# written by Sylvain Rouquette, 2011
+
+'''
+To add the freeimage tool to the waf file:
+$ ./waf-light --tools=compat15,freeimage
+	or, if you have waf >= 1.6.2
+$ ./waf update --files=freeimage
+
+The wscript will look like:
+
+def options(opt):
+	opt.load('compiler_cxx freeimage')
+
+def configure(conf):
+	conf.load('compiler_cxx freeimage')
+
+	# you can call check_freeimage with some parameters.
+	# It's optional on Linux, it's 'mandatory' on Windows if
+	# you didn't use --fi-path on the command-line
+
+	# conf.check_freeimage(path='FreeImage/Dist', fip=True)
+
+def build(bld):
+	bld(source='main.cpp', target='app', use='FREEIMAGE')
+'''
+
+from waflib import Utils
+from waflib.Configure import conf
+
+
+def options(opt):
+	opt.add_option('--fi-path', type='string', default='', dest='fi_path',
+				   help='''path to the FreeImage directory \
+						where the files are e.g. /FreeImage/Dist''')
+	opt.add_option('--fip', action='store_true', default=False, dest='fip',
+				   help='link with FreeImagePlus')
+	opt.add_option('--fi-static', action='store_true',
+				   default=False, dest='fi_static',
+				   help="link as shared libraries")
+
+
+@conf
+def check_freeimage(self, path=None, fip=False):
+	self.start_msg('Checking FreeImage')
+	if not self.env['CXX']:
+		self.fatal('you must load compiler_cxx before loading freeimage')
+	prefix = self.options.fi_static and 'ST' or ''
+	platform = Utils.unversioned_sys_platform()
+	if platform == 'win32':
+		if not path:
+			self.fatal('you must specify the path to FreeImage. \
+					   use --fi-path=/FreeImage/Dist')
+		else:
+			self.env['INCLUDES_FREEIMAGE'] = path
+			self.env['%sLIBPATH_FREEIMAGE' % prefix] = path
+	libs = ['FreeImage']
+	if self.options.fip:
+		libs.append('FreeImagePlus')
+	if platform == 'win32':
+		self.env['%sLIB_FREEIMAGE' % prefix] = libs
+	else:
+		self.env['%sLIB_FREEIMAGE' % prefix] = [i.lower() for i in libs]
+	self.end_msg('ok')
+
+
+def configure(conf):
+	platform = Utils.unversioned_sys_platform()
+	if platform == 'win32' and not conf.options.fi_path:
+		return
+	conf.check_freeimage(conf.options.fi_path, conf.options.fip)
+
diff --git a/third_party/waf/waflib/extras/fsb.py b/third_party/waf/waflib/extras/fsb.py
new file mode 100644
index 0000000..1b8f398
--- /dev/null
+++ b/third_party/waf/waflib/extras/fsb.py
@@ -0,0 +1,31 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2011 (ita)
+
+"""
+Fully sequential builds
+
+The previous tasks from task generators are re-processed, and this may lead to speed issues
+Yet, if you are using this, speed is probably a minor concern
+"""
+
+from waflib import Build
+
+def options(opt):
+	pass
+
+def configure(conf):
+	pass
+
+class FSBContext(Build.BuildContext):
+	def __call__(self, *k, **kw):
+		ret = Build.BuildContext.__call__(self, *k, **kw)
+
+		# evaluate the results immediately
+		Build.BuildContext.compile(self)
+
+		return ret
+
+	def compile(self):
+		pass
+
diff --git a/third_party/waf/waflib/extras/fsc.py b/third_party/waf/waflib/extras/fsc.py
new file mode 100644
index 0000000..c67e70b
--- /dev/null
+++ b/third_party/waf/waflib/extras/fsc.py
@@ -0,0 +1,64 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2011 (ita)
+
+"""
+Experimental F# stuff
+
+FSC="mono /path/to/fsc.exe" waf configure build
+"""
+
+from waflib import Utils, Task
+from waflib.TaskGen import before_method, after_method, feature
+from waflib.Tools import ccroot, cs
+
+ccroot.USELIB_VARS['fsc'] = set(['CSFLAGS', 'ASSEMBLIES', 'RESOURCES'])
+
+@feature('fs')
+@before_method('process_source')
+def apply_fsc(self):
+	cs_nodes = []
+	no_nodes = []
+	for x in self.to_nodes(self.source):
+		if x.name.endswith('.fs'):
+			cs_nodes.append(x)
+		else:
+			no_nodes.append(x)
+	self.source = no_nodes
+
+	bintype = getattr(self, 'type', self.gen.endswith('.dll') and 'library' or 'exe')
+	self.cs_task = tsk = self.create_task('fsc', cs_nodes, self.path.find_or_declare(self.gen))
+	tsk.env.CSTYPE = '/target:%s' % bintype
+	tsk.env.OUT    = '/out:%s' % tsk.outputs[0].abspath()
+
+	inst_to = getattr(self, 'install_path', bintype=='exe' and '${BINDIR}' or '${LIBDIR}')
+	if inst_to:
+		# note: we are making a copy, so the files added to cs_task.outputs won't be installed automatically
+		mod = getattr(self, 'chmod', bintype=='exe' and Utils.O755 or Utils.O644)
+		self.install_task = self.add_install_files(install_to=inst_to, install_from=self.cs_task.outputs[:], chmod=mod)
+
+feature('fs')(cs.use_cs)
+after_method('apply_fsc')(cs.use_cs)
+
+feature('fs')(cs.debug_cs)
+after_method('apply_fsc', 'use_cs')(cs.debug_cs)
+
+class fsc(Task.Task):
+	"""
+	Compile F# files
+	"""
+	color   = 'YELLOW'
+	run_str = '${FSC} ${CSTYPE} ${CSFLAGS} ${ASS_ST:ASSEMBLIES} ${RES_ST:RESOURCES} ${OUT} ${SRC}'
+
+def configure(conf):
+	"""
+	Find a F# compiler, set the variable FSC for the compiler and FS_NAME (mono or fsc)
+	"""
+	conf.find_program(['fsc.exe', 'fsharpc'], var='FSC')
+	conf.env.ASS_ST = '/r:%s'
+	conf.env.RES_ST = '/resource:%s'
+
+	conf.env.FS_NAME = 'fsc'
+	if str(conf.env.FSC).lower().find('fsharpc') > -1:
+		conf.env.FS_NAME = 'mono'
+
diff --git a/third_party/waf/waflib/extras/gccdeps.py b/third_party/waf/waflib/extras/gccdeps.py
new file mode 100644
index 0000000..5d2f0dd
--- /dev/null
+++ b/third_party/waf/waflib/extras/gccdeps.py
@@ -0,0 +1,244 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2008-2010 (ita)
+
+"""
+Execute the tasks with gcc -MD, read the dependencies from the .d file
+and prepare the dependency calculation for the next run.
+This affects the cxx class, so make sure to load Qt5 after this tool.
+
+Usage::
+
+	def options(opt):
+		opt.load('compiler_cxx')
+	def configure(conf):
+		conf.load('compiler_cxx gccdeps')
+"""
+
+import os, re, threading
+from waflib import Task, Logs, Utils, Errors
+from waflib.Tools import asm, c, c_preproc, cxx
+from waflib.TaskGen import before_method, feature
+
+lock = threading.Lock()
+
+gccdeps_flags = ['-MD']
+if not c_preproc.go_absolute:
+	gccdeps_flags = ['-MMD']
+
+# Third-party tools are allowed to add extra names in here with append()
+supported_compilers = ['gas', 'gcc', 'icc', 'clang']
+
+re_o = re.compile(r"\.o$")
+re_splitter = re.compile(r'(?<!\\)\s+') # split by space, except when spaces are escaped
+
+def remove_makefile_rule_lhs(line):
+	# Splitting on a plain colon would accidentally match inside a
+	# Windows absolute-path filename, so we must search for a colon
+	# followed by whitespace to find the divider between LHS and RHS
+	# of the Makefile rule.
+	rulesep = ': '
+
+	sep_idx = line.find(rulesep)
+	if sep_idx >= 0:
+		return line[sep_idx + 2:]
+	else:
+		return line
+
+def path_to_node(base_node, path, cached_nodes):
+	# Take the base node and the path and return a node
+	# Results are cached because searching the node tree is expensive
+	# The following code is executed by threads, it is not safe, so a lock is needed...
+	if getattr(path, '__hash__'):
+		node_lookup_key = (base_node, path)
+	else:
+		# Not hashable, assume it is a list and join into a string
+		node_lookup_key = (base_node, os.path.sep.join(path))
+
+	try:
+		node = cached_nodes[node_lookup_key]
+	except KeyError:
+		# retry with lock on cache miss
+		with lock:
+			try:
+				node = cached_nodes[node_lookup_key]
+			except KeyError:
+				node = cached_nodes[node_lookup_key] = base_node.find_resource(path)
+
+	return node
+
+def post_run(self):
+	if not self.__class__.__name__ in self.env.ENABLE_GCCDEPS:
+		return super(self.derived_gccdeps, self).post_run()
+
+	deps_filename = self.outputs[0].abspath()
+	deps_filename = re_o.sub('.d', deps_filename)
+	try:
+		deps_txt = Utils.readf(deps_filename)
+	except EnvironmentError:
+		Logs.error('Could not find a .d dependency file, are cflags/cxxflags overwritten?')
+		raise
+
+	# Compilers have the choice to either output the file's dependencies
+	# as one large Makefile rule:
+	#
+	#   /path/to/file.o: /path/to/dep1.h \
+	#                    /path/to/dep2.h \
+	#                    /path/to/dep3.h \
+	#                    ...
+	#
+	# or as many individual rules:
+	#
+	#   /path/to/file.o: /path/to/dep1.h
+	#   /path/to/file.o: /path/to/dep2.h
+	#   /path/to/file.o: /path/to/dep3.h
+	#   ...
+	#
+	# So the first step is to sanitize the input by stripping out the left-
+	# hand side of all these lines. After that, whatever remains are the
+	# implicit dependencies of task.outputs[0]
+	deps_txt = '\n'.join([remove_makefile_rule_lhs(line) for line in deps_txt.splitlines()])
+
+	# Now join all the lines together
+	deps_txt = deps_txt.replace('\\\n', '')
+
+	dep_paths = deps_txt.strip()
+	dep_paths = [x.replace('\\ ', ' ') for x in re_splitter.split(dep_paths) if x]
+
+	resolved_nodes = []
+	unresolved_names = []
+	bld = self.generator.bld
+
+	# Dynamically bind to the cache
+	try:
+		cached_nodes = bld.cached_nodes
+	except AttributeError:
+		cached_nodes = bld.cached_nodes = {}
+
+	for path in dep_paths:
+
+		node = None
+		if os.path.isabs(path):
+			node = path_to_node(bld.root, path, cached_nodes)
+		else:
+			# TODO waf 1.9 - single cwd value
+			base_node = getattr(bld, 'cwdx', bld.bldnode)
+			# when calling find_resource, make sure the path does not contain '..'
+			path = [k for k in Utils.split_path(path) if k and k != '.']
+			while '..' in path:
+				idx = path.index('..')
+				if idx == 0:
+					path = path[1:]
+					base_node = base_node.parent
+				else:
+					del path[idx]
+					del path[idx-1]
+
+			node = path_to_node(base_node, path, cached_nodes)
+
+		if not node:
+			raise ValueError('could not find %r for %r' % (path, self))
+
+		if id(node) == id(self.inputs[0]):
+			# ignore the source file, it is already in the dependencies
+			# this way, successful config tests may be retrieved from the cache
+			continue
+
+		resolved_nodes.append(node)
+
+	Logs.debug('deps: gccdeps for %s returned %s', self, resolved_nodes)
+
+	bld.node_deps[self.uid()] = resolved_nodes
+	bld.raw_deps[self.uid()] = unresolved_names
+
+	try:
+		del self.cache_sig
+	except AttributeError:
+		pass
+
+	Task.Task.post_run(self)
+
+def scan(self):
+	if not self.__class__.__name__ in self.env.ENABLE_GCCDEPS:
+		return super(self.derived_gccdeps, self).scan()
+
+	resolved_nodes = self.generator.bld.node_deps.get(self.uid(), [])
+	unresolved_names = []
+	return (resolved_nodes, unresolved_names)
+
+def sig_implicit_deps(self):
+	if not self.__class__.__name__ in self.env.ENABLE_GCCDEPS:
+		return super(self.derived_gccdeps, self).sig_implicit_deps()
+	bld = self.generator.bld
+
+	try:
+		return self.compute_sig_implicit_deps()
+	except Errors.TaskNotReady:
+		raise ValueError("Please specify the build order precisely with gccdeps (asm/c/c++ tasks)")
+	except EnvironmentError:
+		# If a file is renamed, assume the dependencies are stale and must be recalculated
+		for x in bld.node_deps.get(self.uid(), []):
+			if not x.is_bld() and not x.exists():
+				try:
+					del x.parent.children[x.name]
+				except KeyError:
+					pass
+
+	key = self.uid()
+	bld.node_deps[key] = []
+	bld.raw_deps[key] = []
+	return Utils.SIG_NIL
+
+def wrap_compiled_task(classname):
+	derived_class = type(classname, (Task.classes[classname],), {})
+	derived_class.derived_gccdeps = derived_class
+	derived_class.post_run = post_run
+	derived_class.scan = scan
+	derived_class.sig_implicit_deps = sig_implicit_deps
+
+for k in ('asm', 'c', 'cxx'):
+	if k in Task.classes:
+		wrap_compiled_task(k)
+
+@before_method('process_source')
+@feature('force_gccdeps')
+def force_gccdeps(self):
+	self.env.ENABLE_GCCDEPS = ['asm', 'c', 'cxx']
+
+def configure(conf):
+	# in case someone provides a --enable-gccdeps command-line option
+	if not getattr(conf.options, 'enable_gccdeps', True):
+		return
+
+	global gccdeps_flags
+	flags = conf.env.GCCDEPS_FLAGS or gccdeps_flags
+	if conf.env.ASM_NAME in supported_compilers:
+		try:
+			conf.check(fragment='', features='asm force_gccdeps', asflags=flags, compile_filename='test.S', msg='Checking for asm flags %r' % ''.join(flags))
+		except Errors.ConfigurationError:
+			pass
+		else:
+			conf.env.append_value('ASFLAGS', flags)
+			conf.env.append_unique('ENABLE_GCCDEPS', 'asm')
+
+	if conf.env.CC_NAME in supported_compilers:
+		try:
+			conf.check(fragment='int main() { return 0; }', features='c force_gccdeps', cflags=flags, msg='Checking for c flags %r' % ''.join(flags))
+		except Errors.ConfigurationError:
+			pass
+		else:
+			conf.env.append_value('CFLAGS', flags)
+			conf.env.append_unique('ENABLE_GCCDEPS', 'c')
+
+	if conf.env.CXX_NAME in supported_compilers:
+		try:
+			conf.check(fragment='int main() { return 0; }', features='cxx force_gccdeps', cxxflags=flags, msg='Checking for cxx flags %r' % ''.join(flags))
+		except Errors.ConfigurationError:
+			pass
+		else:
+			conf.env.append_value('CXXFLAGS', flags)
+			conf.env.append_unique('ENABLE_GCCDEPS', 'cxx')
+
+def options(opt):
+	raise ValueError('Do not load gccdeps options')
+
diff --git a/third_party/waf/waflib/extras/gdbus.py b/third_party/waf/waflib/extras/gdbus.py
new file mode 100644
index 0000000..0e0476e
--- /dev/null
+++ b/third_party/waf/waflib/extras/gdbus.py
@@ -0,0 +1,87 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Copyright Garmin International or its subsidiaries, 2018
+#
+# Heavily based on dbus.py
+
+"""
+Compiles dbus files with **gdbus-codegen**
+Typical usage::
+	def options(opt):
+		opt.load('compiler_c gdbus')
+	def configure(conf):
+		conf.load('compiler_c gdbus')
+	def build(bld):
+		tg = bld.program(
+			includes = '.',
+			source = bld.path.ant_glob('*.c'),
+			target = 'gnome-hello')
+		tg.add_gdbus_file('test.xml', 'com.example.example.', 'Example')
+"""
+
+from waflib import Task, Errors, Utils
+from waflib.TaskGen import taskgen_method, before_method
+
+@taskgen_method
+def add_gdbus_file(self, filename, prefix, namespace, export=False):
+	"""
+	Adds a dbus file to the list of dbus files to process. Store them in the attribute *dbus_lst*.
+	:param filename: xml file to compile
+	:type filename: string
+	:param prefix: interface prefix (--interface-prefix=prefix)
+	:type prefix: string
+	:param mode: C namespace (--c-namespace=namespace)
+	:type mode: string
+	:param export: Export Headers?
+	:type export: boolean
+	"""
+	if not hasattr(self, 'gdbus_lst'):
+		self.gdbus_lst = []
+	if not 'process_gdbus' in self.meths:
+		self.meths.append('process_gdbus')
+	self.gdbus_lst.append([filename, prefix, namespace, export])
+
+@before_method('process_source')
+def process_gdbus(self):
+	"""
+	Processes the dbus files stored in the attribute *gdbus_lst* to create :py:class:`gdbus_binding_tool` instances.
+	"""
+	output_node = self.path.get_bld().make_node(['gdbus', self.get_name()])
+	sources = []
+
+	for filename, prefix, namespace, export in getattr(self, 'gdbus_lst', []):
+		node = self.path.find_resource(filename)
+		if not node:
+			raise Errors.WafError('file not found ' + filename)
+		c_file = output_node.find_or_declare(node.change_ext('.c').name)
+		h_file = output_node.find_or_declare(node.change_ext('.h').name)
+		tsk = self.create_task('gdbus_binding_tool', node, [c_file, h_file])
+		tsk.cwd = output_node.abspath()
+
+		tsk.env.GDBUS_CODEGEN_INTERFACE_PREFIX = prefix
+		tsk.env.GDBUS_CODEGEN_NAMESPACE = namespace
+		tsk.env.GDBUS_CODEGEN_OUTPUT = node.change_ext('').name
+		sources.append(c_file)
+
+	if sources:
+		output_node.mkdir()
+		self.source = Utils.to_list(self.source) + sources
+		self.includes = [output_node] + self.to_incnodes(getattr(self, 'includes', []))
+		if export:
+			self.export_includes = [output_node] + self.to_incnodes(getattr(self, 'export_includes', []))
+
+class gdbus_binding_tool(Task.Task):
+	"""
+	Compiles a dbus file
+	"""
+	color   = 'BLUE'
+	ext_out = ['.h', '.c']
+	run_str = '${GDBUS_CODEGEN} --interface-prefix ${GDBUS_CODEGEN_INTERFACE_PREFIX} --generate-c-code ${GDBUS_CODEGEN_OUTPUT} --c-namespace ${GDBUS_CODEGEN_NAMESPACE} --c-generate-object-manager ${SRC[0].abspath()}'
+	shell = True
+
+def configure(conf):
+	"""
+	Detects the program gdbus-codegen and sets ``conf.env.GDBUS_CODEGEN``
+	"""
+	conf.find_program('gdbus-codegen', var='GDBUS_CODEGEN')
+
diff --git a/third_party/waf/waflib/extras/genpybind.py b/third_party/waf/waflib/extras/genpybind.py
new file mode 100644
index 0000000..ac206ee
--- /dev/null
+++ b/third_party/waf/waflib/extras/genpybind.py
@@ -0,0 +1,194 @@
+import os
+import pipes
+import subprocess
+import sys
+
+from waflib import Logs, Task, Context
+from waflib.Tools.c_preproc import scan as scan_impl
+# ^-- Note: waflib.extras.gccdeps.scan does not work for us,
+# due to its current implementation:
+# The -MD flag is injected into the {C,CXX}FLAGS environment variable and
+# dependencies are read out in a separate step after compiling by reading
+# the .d file saved alongside the object file.
+# As the genpybind task refers to a header file that is never compiled itself,
+# gccdeps will not be able to extract the list of dependencies.
+
+from waflib.TaskGen import feature, before_method
+
+
+def join_args(args):
+    return " ".join(pipes.quote(arg) for arg in args)
+
+
+def configure(cfg):
+    cfg.load("compiler_cxx")
+    cfg.load("python")
+    cfg.check_python_version(minver=(2, 7))
+    if not cfg.env.LLVM_CONFIG:
+        cfg.find_program("llvm-config", var="LLVM_CONFIG")
+    if not cfg.env.GENPYBIND:
+        cfg.find_program("genpybind", var="GENPYBIND")
+
+    # find clang reasource dir for builtin headers
+    cfg.env.GENPYBIND_RESOURCE_DIR = os.path.join(
+            cfg.cmd_and_log(cfg.env.LLVM_CONFIG + ["--libdir"]).strip(),
+            "clang",
+            cfg.cmd_and_log(cfg.env.LLVM_CONFIG + ["--version"]).strip())
+    if os.path.exists(cfg.env.GENPYBIND_RESOURCE_DIR):
+        cfg.msg("Checking clang resource dir", cfg.env.GENPYBIND_RESOURCE_DIR)
+    else:
+        cfg.fatal("Clang resource dir not found")
+
+
+@feature("genpybind")
+@before_method("process_source")
+def generate_genpybind_source(self):
+    """
+    Run genpybind on the headers provided in `source` and compile/link the
+    generated code instead.  This works by generating the code on the fly and
+    swapping the source node before `process_source` is run.
+    """
+    # name of module defaults to name of target
+    module = getattr(self, "module", self.target)
+
+    # create temporary source file in build directory to hold generated code
+    out = "genpybind-%s.%d.cpp" % (module, self.idx)
+    out = self.path.get_bld().find_or_declare(out)
+
+    task = self.create_task("genpybind", self.to_nodes(self.source), out)
+    # used to detect whether CFLAGS or CXXFLAGS should be passed to genpybind
+    task.features = self.features
+    task.module = module
+    # can be used to select definitions to include in the current module
+    # (when header files are shared by more than one module)
+    task.genpybind_tags = self.to_list(getattr(self, "genpybind_tags", []))
+    # additional include directories
+    task.includes = self.to_list(getattr(self, "includes", []))
+    task.genpybind = self.env.GENPYBIND
+
+    # Tell waf to compile/link the generated code instead of the headers
+    # originally passed-in via the `source` parameter. (see `process_source`)
+    self.source = [out]
+
+
+class genpybind(Task.Task): # pylint: disable=invalid-name
+    """
+    Runs genpybind on headers provided as input to this task.
+    Generated code will be written to the first (and only) output node.
+    """
+    quiet = True
+    color = "PINK"
+    scan = scan_impl
+
+    @staticmethod
+    def keyword():
+        return "Analyzing"
+
+    def run(self):
+        if not self.inputs:
+            return
+
+        args = self.find_genpybind() + self._arguments(
+                resource_dir=self.env.GENPYBIND_RESOURCE_DIR)
+
+        output = self.run_genpybind(args)
+
+        # For debugging / log output
+        pasteable_command = join_args(args)
+
+        # write generated code to file in build directory
+        # (will be compiled during process_source stage)
+        (output_node,) = self.outputs
+        output_node.write("// {}\n{}\n".format(
+            pasteable_command.replace("\n", "\n// "), output))
+
+    def find_genpybind(self):
+        return self.genpybind
+
+    def run_genpybind(self, args):
+        bld = self.generator.bld
+
+        kwargs = dict(cwd=bld.variant_dir)
+        if hasattr(bld, "log_command"):
+            bld.log_command(args, kwargs)
+        else:
+            Logs.debug("runner: {!r}".format(args))
+        proc = subprocess.Popen(
+            args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs)
+        stdout, stderr = proc.communicate()
+
+        if not isinstance(stdout, str):
+            stdout = stdout.decode(sys.stdout.encoding, errors="replace")
+        if not isinstance(stderr, str):
+            stderr = stderr.decode(sys.stderr.encoding, errors="replace")
+
+        if proc.returncode != 0:
+            bld.fatal(
+                "genpybind returned {code} during the following call:"
+                "\n{command}\n\n{stdout}\n\n{stderr}".format(
+                    code=proc.returncode,
+                    command=join_args(args),
+                    stdout=stdout,
+                    stderr=stderr,
+                ))
+
+        if stderr.strip():
+            Logs.debug("non-fatal warnings during genpybind run:\n{}".format(stderr))
+
+        return stdout
+
+    def _include_paths(self):
+        return self.generator.to_incnodes(self.includes + self.env.INCLUDES)
+
+    def _inputs_as_relative_includes(self):
+        include_paths = self._include_paths()
+        relative_includes = []
+        for node in self.inputs:
+            for inc in include_paths:
+                if node.is_child_of(inc):
+                    relative_includes.append(node.path_from(inc))
+                    break
+            else:
+                self.generator.bld.fatal("could not resolve {}".format(node))
+        return relative_includes
+
+    def _arguments(self, genpybind_parse=None, resource_dir=None):
+        args = []
+        relative_includes = self._inputs_as_relative_includes()
+        is_cxx = "cxx" in self.features
+
+        # options for genpybind
+        args.extend(["--genpybind-module", self.module])
+        if self.genpybind_tags:
+            args.extend(["--genpybind-tag"] + self.genpybind_tags)
+        if relative_includes:
+            args.extend(["--genpybind-include"] + relative_includes)
+        if genpybind_parse:
+            args.extend(["--genpybind-parse", genpybind_parse])
+
+        args.append("--")
+
+        # headers to be processed by genpybind
+        args.extend(node.abspath() for node in self.inputs)
+
+        args.append("--")
+
+        # options for clang/genpybind-parse
+        args.append("-D__GENPYBIND__")
+        args.append("-xc++" if is_cxx else "-xc")
+        has_std_argument = False
+        for flag in self.env["CXXFLAGS" if is_cxx else "CFLAGS"]:
+            flag = flag.replace("-std=gnu", "-std=c")
+            if flag.startswith("-std=c"):
+                has_std_argument = True
+            args.append(flag)
+        if not has_std_argument:
+            args.append("-std=c++14")
+        args.extend("-I{}".format(n.abspath()) for n in self._include_paths())
+        args.extend("-D{}".format(p) for p in self.env.DEFINES)
+
+        # point to clang resource dir, if specified
+        if resource_dir:
+            args.append("-resource-dir={}".format(resource_dir))
+
+        return args
diff --git a/third_party/waf/waflib/extras/gob2.py b/third_party/waf/waflib/extras/gob2.py
new file mode 100644
index 0000000..b4fa3b9
--- /dev/null
+++ b/third_party/waf/waflib/extras/gob2.py
@@ -0,0 +1,17 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Ali Sabil, 2007
+
+from waflib import TaskGen
+
+TaskGen.declare_chain(
+	name = 'gob2',
+	rule = '${GOB2} -o ${TGT[0].bld_dir()} ${GOB2FLAGS} ${SRC}',
+	ext_in = '.gob',
+	ext_out = '.c'
+)
+
+def configure(conf):
+	conf.find_program('gob2', var='GOB2')
+	conf.env['GOB2FLAGS'] = ''
+
diff --git a/third_party/waf/waflib/extras/halide.py b/third_party/waf/waflib/extras/halide.py
new file mode 100644
index 0000000..6078e38
--- /dev/null
+++ b/third_party/waf/waflib/extras/halide.py
@@ -0,0 +1,151 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+# Halide code generation tool
+
+__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
+__copyright__ = "Jérôme Carretero, 2014"
+
+"""
+
+Tool to run `Halide <http://halide-lang.org>`_ code generators.
+
+Usage::
+
+   bld(
+    name='pipeline',
+     # ^ Reference this in use="..." for things using the generated code
+    #target=['pipeline.o', 'pipeline.h']
+     # ^ by default, name.{o,h} is added, but you can set the outputs here
+    features='halide',
+    halide_env="HL_TRACE=1 HL_TARGET=host-opencl-gpu_debug",
+     # ^ Environment passed to the generator,
+     # can be a dict, k/v list, or string.
+    args=[],
+     # ^ Command-line arguments to the generator (optional),
+     # eg. to give parameters to the scheduling
+    source='pipeline_gen',
+     # ^ Name of the source executable
+   )
+
+
+Known issues:
+
+
+- Currently only supports Linux (no ".exe")
+
+- Doesn't rerun on input modification when input is part of a build
+  chain, and has been modified externally.
+
+"""
+
+import os
+from waflib import Task, Utils, Options, TaskGen, Errors
+
+class run_halide_gen(Task.Task):
+	color = 'CYAN'
+	vars = ['HALIDE_ENV', 'HALIDE_ARGS']
+	run_str = "${SRC[0].abspath()} ${HALIDE_ARGS}"
+	def __str__(self):
+		stuff = "halide"
+		stuff += ("[%s]" % (",".join(
+		 ('%s=%s' % (k,v)) for k, v in sorted(self.env.env.items()))))
+		return Task.Task.__str__(self).replace(self.__class__.__name__,
+		 stuff)
+
+@TaskGen.feature('halide')
+@TaskGen.before_method('process_source')
+def halide(self):
+	Utils.def_attrs(self,
+	 args=[],
+	 halide_env={},
+	)
+
+	bld = self.bld
+
+	env = self.halide_env
+	try:
+		if isinstance(env, str):
+			env = dict(x.split('=') for x in env.split())
+		elif isinstance(env, list):
+			env = dict(x.split('=') for x in env)
+		assert isinstance(env, dict)
+	except Exception as e:
+		if not isinstance(e, ValueError) \
+		 and not isinstance(e, AssertionError):
+			raise
+		raise Errors.WafError(
+		 "halide_env must be under the form" \
+		 " {'HL_x':'a', 'HL_y':'b'}" \
+		 " or ['HL_x=y', 'HL_y=b']" \
+		 " or 'HL_x=y HL_y=b'")
+
+	src = self.to_nodes(self.source)
+	assert len(src) == 1, "Only one source expected"
+	src = src[0]
+
+	args = Utils.to_list(self.args)
+
+	def change_ext(src, ext):
+		# Return a node with a new extension, in an appropriate folder
+		name = src.name
+		xpos = src.name.rfind('.')
+		if xpos == -1:
+			xpos = len(src.name)
+		newname = name[:xpos] + ext
+		if src.is_child_of(bld.bldnode):
+			node = src.get_src().parent.find_or_declare(newname)
+		else:
+			node = bld.bldnode.find_or_declare(newname)
+		return node
+
+	def to_nodes(self, lst, path=None):
+		tmp = []
+		path = path or self.path
+		find = path.find_or_declare
+
+		if isinstance(lst, self.path.__class__):
+			lst = [lst]
+
+		for x in Utils.to_list(lst):
+			if isinstance(x, str):
+				node = find(x)
+			else:
+				node = x
+			tmp.append(node)
+		return tmp
+
+	tgt = to_nodes(self, self.target)
+	if not tgt:
+		tgt = [change_ext(src, '.o'), change_ext(src, '.h')]
+	cwd = tgt[0].parent.abspath()
+	task = self.create_task('run_halide_gen', src, tgt, cwd=cwd)
+	task.env.append_unique('HALIDE_ARGS', args)
+	if task.env.env == []:
+		task.env.env = {}
+	task.env.env.update(env)
+	task.env.HALIDE_ENV = " ".join(("%s=%s" % (k,v)) for (k,v) in sorted(env.items()))
+	task.env.HALIDE_ARGS = args
+
+	try:
+		self.compiled_tasks.append(task)
+	except AttributeError:
+		self.compiled_tasks = [task]
+	self.source = []
+
+def configure(conf):
+	if Options.options.halide_root is None:
+		conf.check_cfg(package='Halide', args='--cflags --libs')
+	else:
+		halide_root = Options.options.halide_root
+		conf.env.INCLUDES_HALIDE = [ os.path.join(halide_root, "include") ]
+		conf.env.LIBPATH_HALIDE = [ os.path.join(halide_root, "lib") ]
+		conf.env.LIB_HALIDE = ["Halide"]
+
+		# You might want to add this, while upstream doesn't fix it
+		#conf.env.LIB_HALIDE += ['ncurses', 'dl', 'pthread']
+
+def options(opt):
+	opt.add_option('--halide-root',
+	 help="path to Halide include and lib files",
+	)
+
diff --git a/third_party/waf/waflib/extras/haxe.py b/third_party/waf/waflib/extras/haxe.py
new file mode 100644
index 0000000..4ff3745
--- /dev/null
+++ b/third_party/waf/waflib/extras/haxe.py
@@ -0,0 +1,154 @@
+import re
+
+from waflib import Utils, Task, Errors, Logs
+from waflib.Configure import conf
+from waflib.TaskGen import extension, taskgen_method
+
+HAXE_COMPILERS = {
+    'JS': {'tgt': '--js', 'ext_out': ['.js']},
+    'LUA': {'tgt': '--lua', 'ext_out': ['.lua']},
+    'SWF': {'tgt': '--swf', 'ext_out': ['.swf']},
+    'NEKO': {'tgt': '--neko', 'ext_out': ['.n']},
+    'PHP': {'tgt': '--php', 'ext_out': ['.php']},
+    'CPP': {'tgt': '--cpp', 'ext_out': ['.h', '.cpp']},
+    'CPPIA': {'tgt': '--cppia', 'ext_out': ['.cppia']},
+    'CS': {'tgt': '--cs', 'ext_out': ['.cs']},
+    'JAVA': {'tgt': '--java', 'ext_out': ['.java']},
+    'JVM': {'tgt': '--jvm', 'ext_out': ['.jar']},
+    'PYTHON': {'tgt': '--python', 'ext_out': ['.py']},
+    'HL': {'tgt': '--hl', 'ext_out': ['.hl']},
+    'HLC': {'tgt': '--hl', 'ext_out': ['.h', '.c']},
+}
+
+@conf
+def check_haxe_pkg(self, **kw):
+    self.find_program('haxelib')
+    libs = kw.get('libs')
+    if not libs or not (type(libs) == str or (type(libs) == list and all(isinstance(s, str) for s in libs))):
+        self.fatal('Specify correct libs value in ensure call')
+        return
+    fetch = kw.get('fetch')
+    if not fetch is None and not type(fetch) == bool:
+        self.fatal('Specify correct fetch value in ensure call')
+
+    libs = [libs] if type(libs) == str else libs
+    halt = False
+    for lib in libs:
+        try:
+            self.start_msg('Checking for library %s' % lib)
+            output = self.cmd_and_log(self.env.HAXELIB + ['list', lib])
+        except Errors.WafError:
+            self.end_msg(False)
+            self.fatal('Can\'t run haxelib list, ensuring halted')
+            return
+
+        if lib in output:
+            self.end_msg(lib in output)
+        else:
+            if not fetch:
+                self.end_msg(False)
+                halt = True
+                continue
+            try:
+                status = self.exec_command(self.env.HAXELIB + ['install', lib])
+                if status:
+                    self.end_msg(False)
+                    self.fatal('Can\'t get %s with haxelib, ensuring halted' % lib)
+                    return
+                else:
+                    self.end_msg('downloaded', color='YELLOW')
+            except Errors.WafError:
+                self.end_msg(False)
+                self.fatal('Can\'t run haxelib install, ensuring halted')
+                return
+        postfix = kw.get('uselib_store') or lib.upper()
+        self.env.append_unique('LIB_' + postfix, lib)
+
+    if halt:
+        self.fatal('Can\'t find libraries in haxelib list, ensuring halted')
+        return
+
+class haxe(Task.Task):
+    vars = ['HAXE_VERSION', 'HAXE_FLAGS']
+    ext_in = ['.hx']
+
+    def run(self):
+        cmd = self.env.HAXE + self.env.HAXE_FLAGS_DEFAULT + self.env.HAXE_FLAGS
+        return self.exec_command(cmd)
+
+for COMP in HAXE_COMPILERS:
+    # create runners for each compile target
+    type("haxe_" + COMP, (haxe,), {'ext_out': HAXE_COMPILERS[COMP]['ext_out']})
+
+@taskgen_method
+def init_haxe(self):
+    errmsg = '%s not found, specify correct value'
+    try:
+        compiler = HAXE_COMPILERS[self.compiler]
+        comp_tgt = compiler['tgt']
+        comp_mod = '/main.c' if self.compiler == 'HLC' else ''
+    except (AttributeError, KeyError):
+        self.bld.fatal(errmsg % 'COMPILER' + ': ' + ', '.join(HAXE_COMPILERS.keys()))
+        return
+
+    self.env.append_value(
+        'HAXE_FLAGS',
+        [comp_tgt, self.path.get_bld().make_node(self.target + comp_mod).abspath()])
+    if hasattr(self, 'use'):
+        if not (type(self.use) == str or type(self.use) == list):
+            self.bld.fatal(errmsg % 'USE')
+            return
+        self.use = [self.use] if type(self.use) == str else self.use
+
+        for dep in self.use:
+            if self.env['LIB_' + dep]:
+                for lib in self.env['LIB_' + dep]:
+                    self.env.append_value('HAXE_FLAGS', ['-lib', lib])
+
+    if hasattr(self, 'res'):
+        if not type(self.res) == str:
+            self.bld.fatal(errmsg % 'RES')
+            return
+        self.env.append_value('HAXE_FLAGS', ['-D', 'resourcesPath=%s' % self.res])
+
+@extension('.hx')
+def haxe_hook(self, node):
+    if len(self.source) > 1:
+        self.bld.fatal('Use separate task generators for multiple files')
+        return
+
+    src = node
+    tgt = self.path.get_bld().find_or_declare(self.target)
+
+    self.init_haxe()
+    self.create_task('haxe_' + self.compiler, src, tgt)
+
+@conf
+def check_haxe(self, mini=None, maxi=None):
+    self.start_msg('Checking for haxe version')
+    try:
+        curr = re.search(
+            r'(\d+.?)+',
+            self.cmd_and_log(self.env.HAXE + ['-version'])).group()
+    except Errors.WafError:
+        self.end_msg(False)
+        self.fatal('Can\'t get haxe version')
+        return
+
+    if mini and Utils.num2ver(curr) < Utils.num2ver(mini):
+        self.end_msg('wrong', color='RED')
+        self.fatal('%s is too old, need >= %s' % (curr, mini))
+        return
+    if maxi and Utils.num2ver(curr) > Utils.num2ver(maxi):
+        self.end_msg('wrong', color='RED')
+        self.fatal('%s is too new, need <= %s' % (curr, maxi))
+        return
+    self.end_msg(curr, color='GREEN')
+    self.env.HAXE_VERSION = curr
+
+def configure(self):
+    self.env.append_value(
+        'HAXE_FLAGS_DEFAULT',
+        ['-D', 'no-compilation', '-cp', self.path.abspath()])
+    Logs.warn('Default flags: %s' % ' '.join(self.env.HAXE_FLAGS_DEFAULT))
+    self.find_program('haxe')
diff --git a/third_party/waf/waflib/extras/javatest.py b/third_party/waf/waflib/extras/javatest.py
new file mode 100755
index 0000000..76d40ed
--- /dev/null
+++ b/third_party/waf/waflib/extras/javatest.py
@@ -0,0 +1,237 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Federico Pellegrin, 2019 (fedepell)
+
+"""
+Provides Java Unit test support using :py:class:`waflib.Tools.waf_unit_test.utest`
+task via the **javatest** feature.
+
+This gives the possibility to run unit test and have them integrated into the
+standard waf unit test environment. It has been tested with TestNG and JUnit
+but should be easily expandable to other frameworks given the flexibility of
+ut_str provided by the standard waf unit test environment.
+
+The extra takes care also of managing non-java dependencies (ie. C/C++ libraries
+using JNI or Python modules via JEP) and setting up the environment needed to run
+them.
+
+Example usage:
+
+def options(opt):
+	opt.load('java waf_unit_test javatest')
+
+def configure(conf):
+	conf.load('java javatest')
+
+def build(bld):
+
+	[ ... mainprog is built here ... ]
+
+	bld(features = 'javac javatest',
+		srcdir     = 'test/',
+		outdir     = 'test',
+		sourcepath = ['test'],
+		classpath  = [ 'src' ],
+		basedir    = 'test',
+		use = ['JAVATEST', 'mainprog'], # mainprog is the program being tested in src/
+		ut_str = 'java -cp ${CLASSPATH} ${JTRUNNER} ${SRC}',
+		jtest_source = bld.path.ant_glob('test/*.xml'),
+	)
+
+
+At command line the CLASSPATH where to find the testing environment and the
+test runner (default TestNG) that will then be seen in the environment as
+CLASSPATH_JAVATEST (then used for use) and JTRUNNER and can be used for
+dependencies and ut_str generation.
+
+Example configure for TestNG:
+	waf configure --jtpath=/tmp/testng-6.12.jar:/tmp/jcommander-1.71.jar --jtrunner=org.testng.TestNG
+		 or as default runner is TestNG:
+	waf configure --jtpath=/tmp/testng-6.12.jar:/tmp/jcommander-1.71.jar
+
+Example configure for JUnit:
+	waf configure --jtpath=/tmp/junit.jar --jtrunner=org.junit.runner.JUnitCore
+
+The runner class presence on the system is checked for at configuration stage.
+
+"""
+
+import os
+from waflib import Task, TaskGen, Options, Errors, Utils, Logs
+from waflib.Tools import ccroot
+
+JAR_RE = '**/*'
+
+def _process_use_rec(self, name):
+	"""
+	Recursively process ``use`` for task generator with name ``name``..
+	Used by javatest_process_use.
+	"""
+	if name in self.javatest_use_not or name in self.javatest_use_seen:
+		return
+	try:
+		tg = self.bld.get_tgen_by_name(name)
+	except Errors.WafError:
+		self.javatest_use_not.add(name)
+		return
+
+	self.javatest_use_seen.append(name)
+	tg.post()
+
+	for n in self.to_list(getattr(tg, 'use', [])):
+		_process_use_rec(self, n)
+
+@TaskGen.feature('javatest')
+@TaskGen.after_method('process_source', 'apply_link', 'use_javac_files')
+def javatest_process_use(self):
+	"""
+	Process the ``use`` attribute which contains a list of task generator names and store
+	paths that later is used to populate the unit test runtime environment.
+	"""
+	self.javatest_use_not = set()
+	self.javatest_use_seen = []
+	self.javatest_libpaths = [] # strings or Nodes
+	self.javatest_pypaths = [] # strings or Nodes
+	self.javatest_dep_nodes = []
+
+	names = self.to_list(getattr(self, 'use', []))
+	for name in names:
+		_process_use_rec(self, name)
+
+	def extend_unique(lst, varlst):
+		ext = []
+		for x in varlst:
+			if x not in lst:
+				ext.append(x)
+		lst.extend(ext)
+
+	# Collect type specific info needed to construct a valid runtime environment
+	# for the test.
+	for name in self.javatest_use_seen:
+		tg = self.bld.get_tgen_by_name(name)
+
+		# Python-Java embedding crosstools such as JEP
+		if 'py' in tg.features:
+			# Python dependencies are added to PYTHONPATH
+			pypath = getattr(tg, 'install_from', tg.path)
+
+			if 'buildcopy' in tg.features:
+				# Since buildcopy is used we assume that PYTHONPATH in build should be used,
+				# not source
+				extend_unique(self.javatest_pypaths, [pypath.get_bld().abspath()])
+
+				# Add buildcopy output nodes to dependencies
+				extend_unique(self.javatest_dep_nodes, [o for task in getattr(tg, 'tasks', []) for o in getattr(task, 'outputs', [])])
+			else:
+				# If buildcopy is not used, depend on sources instead
+				extend_unique(self.javatest_dep_nodes, tg.source)
+				extend_unique(self.javatest_pypaths, [pypath.abspath()])
+
+
+		if getattr(tg, 'link_task', None):
+			# For tasks with a link_task (C, C++, D et.c.) include their library paths:
+			if not isinstance(tg.link_task, ccroot.stlink_task):
+				extend_unique(self.javatest_dep_nodes, tg.link_task.outputs)
+				extend_unique(self.javatest_libpaths, tg.link_task.env.LIBPATH)
+
+				if 'pyext' in tg.features:
+					# If the taskgen is extending Python we also want to add the interpreter libpath.
+					extend_unique(self.javatest_libpaths, tg.link_task.env.LIBPATH_PYEXT)
+				else:
+					# Only add to libpath if the link task is not a Python extension
+					extend_unique(self.javatest_libpaths, [tg.link_task.outputs[0].parent.abspath()])
+
+		if 'javac' in tg.features or 'jar' in tg.features:
+			if hasattr(tg, 'jar_task'):
+				# For Java JAR tasks depend on generated JAR
+				extend_unique(self.javatest_dep_nodes, tg.jar_task.outputs)
+			else:
+				# For Java non-JAR ones we need to glob generated files (Java output files are not predictable)
+				if hasattr(tg, 'outdir'):
+					base_node = tg.outdir
+				else:
+					base_node = tg.path.get_bld()
+
+				self.javatest_dep_nodes.extend([dx for dx in base_node.ant_glob(JAR_RE, remove=False, quiet=True)])
+
+
+
+@TaskGen.feature('javatest')
+@TaskGen.after_method('apply_java', 'use_javac_files', 'set_classpath', 'javatest_process_use')
+def make_javatest(self):
+	"""
+	Creates a ``utest`` task with a populated environment for Java Unit test execution
+
+	"""
+	tsk = self.create_task('utest')
+	tsk.set_run_after(self.javac_task)
+
+	# Dependencies from recursive use analysis
+	tsk.dep_nodes.extend(self.javatest_dep_nodes)
+
+	# Put test input files as waf_unit_test relies on that for some prints and log generation
+	# If jtest_source is there, this is specially useful for passing XML for TestNG
+	# that contain test specification, use that as inputs, otherwise test sources
+	if getattr(self, 'jtest_source', None):
+		tsk.inputs = self.to_nodes(self.jtest_source)
+	else:
+		if self.javac_task.srcdir[0].exists():
+			tsk.inputs = self.javac_task.srcdir[0].ant_glob('**/*.java', remove=False)
+
+	if getattr(self, 'ut_str', None):
+		self.ut_run, lst = Task.compile_fun(self.ut_str, shell=getattr(self, 'ut_shell', False))
+		tsk.vars = lst + tsk.vars
+
+	if getattr(self, 'ut_cwd', None):
+		if isinstance(self.ut_cwd, str):
+			# we want a Node instance
+			if os.path.isabs(self.ut_cwd):
+				self.ut_cwd = self.bld.root.make_node(self.ut_cwd)
+			else:
+				self.ut_cwd = self.path.make_node(self.ut_cwd)
+	else:
+		self.ut_cwd = self.bld.bldnode
+
+	# Get parent CLASSPATH and add output dir of test, we run from wscript dir
+	# We have to change it from list to the standard java -cp format (: separated)
+	tsk.env.CLASSPATH = ':'.join(self.env.CLASSPATH) + ':' + self.outdir.abspath()
+
+	if not self.ut_cwd.exists():
+		self.ut_cwd.mkdir()
+
+	if not hasattr(self, 'ut_env'):
+		self.ut_env = dict(os.environ)
+		def add_paths(var, lst):
+			# Add list of paths to a variable, lst can contain strings or nodes
+			lst = [ str(n) for n in lst ]
+			Logs.debug("ut: %s: Adding paths %s=%s", self, var, lst)
+			self.ut_env[var] = os.pathsep.join(lst) + os.pathsep + self.ut_env.get(var, '')
+
+		add_paths('PYTHONPATH', self.javatest_pypaths)
+
+		if Utils.is_win32:
+			add_paths('PATH', self.javatest_libpaths)
+		elif Utils.unversioned_sys_platform() == 'darwin':
+			add_paths('DYLD_LIBRARY_PATH', self.javatest_libpaths)
+			add_paths('LD_LIBRARY_PATH', self.javatest_libpaths)
+		else:
+			add_paths('LD_LIBRARY_PATH', self.javatest_libpaths)
+
+def configure(ctx):
+	cp = ctx.env.CLASSPATH or '.'
+	if getattr(Options.options, 'jtpath', None):
+		ctx.env.CLASSPATH_JAVATEST = getattr(Options.options, 'jtpath').split(':')
+		cp += ':' + getattr(Options.options, 'jtpath')
+
+	if getattr(Options.options, 'jtrunner', None):
+		ctx.env.JTRUNNER = getattr(Options.options, 'jtrunner')
+
+	if ctx.check_java_class(ctx.env.JTRUNNER, with_classpath=cp):
+		ctx.fatal('Could not run test class %r' % ctx.env.JTRUNNER)
+
+def options(opt):
+	opt.add_option('--jtpath', action='store', default='', dest='jtpath',
+		help='Path to jar(s) needed for javatest execution, colon separated, if not in the system CLASSPATH')
+	opt.add_option('--jtrunner', action='store', default='org.testng.TestNG', dest='jtrunner',
+		help='Class to run javatest test [default: org.testng.TestNG]')
+
diff --git a/third_party/waf/waflib/extras/kde4.py b/third_party/waf/waflib/extras/kde4.py
new file mode 100644
index 0000000..aed9bfb
--- /dev/null
+++ b/third_party/waf/waflib/extras/kde4.py
@@ -0,0 +1,93 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2010 (ita)
+
+"""
+Support for the KDE4 libraries and msgfmt
+"""
+
+import os, re
+from waflib import Task, Utils
+from waflib.TaskGen import feature
+
+@feature('msgfmt')
+def apply_msgfmt(self):
+	"""
+	Process all languages to create .mo files and to install them::
+
+		def build(bld):
+			bld(features='msgfmt', langs='es de fr', appname='myapp', install_path='${KDE4_LOCALE_INSTALL_DIR}')
+	"""
+	for lang in self.to_list(self.langs):
+		node = self.path.find_resource(lang+'.po')
+		task = self.create_task('msgfmt', node, node.change_ext('.mo'))
+
+		langname = lang.split('/')
+		langname = langname[-1]
+
+		inst = getattr(self, 'install_path', '${KDE4_LOCALE_INSTALL_DIR}')
+
+		self.add_install_as(
+			inst_to = inst + os.sep + langname + os.sep + 'LC_MESSAGES' + os.sep + getattr(self, 'appname', 'set_your_appname') + '.mo',
+			inst_from = task.outputs[0],
+			chmod = getattr(self, 'chmod', Utils.O644))
+
+class msgfmt(Task.Task):
+	"""
+	Transform .po files into .mo files
+	"""
+	color   = 'BLUE'
+	run_str = '${MSGFMT} ${SRC} -o ${TGT}'
+
+def configure(self):
+	"""
+	Detect kde4-config and set various variables for the *use* system::
+
+		def options(opt):
+			opt.load('compiler_cxx kde4')
+		def configure(conf):
+			conf.load('compiler_cxx kde4')
+		def build(bld):
+			bld.program(source='main.c', target='app', use='KDECORE KIO KHTML')
+	"""
+	kdeconfig = self.find_program('kde4-config')
+	prefix = self.cmd_and_log(kdeconfig + ['--prefix']).strip()
+	fname = '%s/share/apps/cmake/modules/KDELibsDependencies.cmake' % prefix
+	try:
+		os.stat(fname)
+	except OSError:
+		fname = '%s/share/kde4/apps/cmake/modules/KDELibsDependencies.cmake' % prefix
+		try:
+			os.stat(fname)
+		except OSError:
+			self.fatal('could not open %s' % fname)
+
+	try:
+		txt = Utils.readf(fname)
+	except EnvironmentError:
+		self.fatal('could not read %s' % fname)
+
+	txt = txt.replace('\\\n', '\n')
+	fu = re.compile('#(.*)\n')
+	txt = fu.sub('', txt)
+
+	setregexp = re.compile(r'([sS][eE][tT]\s*\()\s*([^\s]+)\s+\"([^"]+)\"\)')
+	found = setregexp.findall(txt)
+
+	for (_, key, val) in found:
+		#print key, val
+		self.env[key] = val
+
+	# well well, i could just write an interpreter for cmake files
+	self.env['LIB_KDECORE']= ['kdecore']
+	self.env['LIB_KDEUI']  = ['kdeui']
+	self.env['LIB_KIO']    = ['kio']
+	self.env['LIB_KHTML']  = ['khtml']
+	self.env['LIB_KPARTS'] = ['kparts']
+
+	self.env['LIBPATH_KDECORE']  = [os.path.join(self.env.KDE4_LIB_INSTALL_DIR, 'kde4', 'devel'), self.env.KDE4_LIB_INSTALL_DIR]
+	self.env['INCLUDES_KDECORE'] = [self.env['KDE4_INCLUDE_INSTALL_DIR']]
+	self.env.append_value('INCLUDES_KDECORE', [self.env['KDE4_INCLUDE_INSTALL_DIR']+ os.sep + 'KDE'])
+
+	self.find_program('msgfmt', var='MSGFMT')
+
diff --git a/third_party/waf/waflib/extras/local_rpath.py b/third_party/waf/waflib/extras/local_rpath.py
new file mode 100644
index 0000000..e3923d9
--- /dev/null
+++ b/third_party/waf/waflib/extras/local_rpath.py
@@ -0,0 +1,21 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2011 (ita)
+
+import copy
+from waflib.TaskGen import after_method, feature
+
+@after_method('propagate_uselib_vars')
+@feature('cprogram', 'cshlib', 'cxxprogram', 'cxxshlib', 'fcprogram', 'fcshlib')
+def add_rpath_stuff(self):
+	all = copy.copy(self.to_list(getattr(self, 'use', [])))
+	while all:
+		name = all.pop()
+		try:
+			tg = self.bld.get_tgen_by_name(name)
+		except:
+			continue
+		if hasattr(tg, 'link_task'):
+			self.env.append_value('RPATH', tg.link_task.outputs[0].parent.abspath())
+			all.extend(self.to_list(getattr(tg, 'use', [])))
+
diff --git a/third_party/waf/waflib/extras/make.py b/third_party/waf/waflib/extras/make.py
new file mode 100644
index 0000000..933d9ca
--- /dev/null
+++ b/third_party/waf/waflib/extras/make.py
@@ -0,0 +1,142 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2011 (ita)
+
+"""
+A make-like way of executing the build, following the relationships between inputs/outputs
+
+This algorithm will lead to slower builds, will not be as flexible as "waf build", but
+it might be useful for building data files (?)
+
+It is likely to break in the following cases:
+- files are created dynamically (no inputs or outputs)
+- headers
+- building two files from different groups
+"""
+
+import re
+from waflib import Options, Task
+from waflib.Build import BuildContext
+
+class MakeContext(BuildContext):
+	'''executes tasks in a step-by-step manner, following dependencies between inputs/outputs'''
+	cmd = 'make'
+	fun = 'build'
+
+	def __init__(self, **kw):
+		super(MakeContext, self).__init__(**kw)
+		self.files = Options.options.files
+
+	def get_build_iterator(self):
+		if not self.files:
+			while 1:
+				yield super(MakeContext, self).get_build_iterator()
+
+		for g in self.groups:
+			for tg in g:
+				try:
+					f = tg.post
+				except AttributeError:
+					pass
+				else:
+					f()
+
+			provides = {}
+			uses = {}
+			all_tasks = []
+			tasks = []
+			for pat in self.files.split(','):
+				matcher = self.get_matcher(pat)
+				for tg in g:
+					if isinstance(tg, Task.Task):
+						lst = [tg]
+					else:
+						lst = tg.tasks
+					for tsk in lst:
+						all_tasks.append(tsk)
+
+						do_exec = False
+						for node in tsk.inputs:
+							try:
+								uses[node].append(tsk)
+							except:
+								uses[node] = [tsk]
+
+							if matcher(node, output=False):
+								do_exec = True
+								break
+
+						for node in tsk.outputs:
+							try:
+								provides[node].append(tsk)
+							except:
+								provides[node] = [tsk]
+
+							if matcher(node, output=True):
+								do_exec = True
+								break
+						if do_exec:
+							tasks.append(tsk)
+
+			# so we have the tasks that we need to process, the list of all tasks,
+			# the map of the tasks providing nodes, and the map of tasks using nodes
+
+			if not tasks:
+				# if there are no tasks matching, return everything in the current group
+				result = all_tasks
+			else:
+				# this is like a big filter...
+				result = set()
+				seen = set()
+				cur = set(tasks)
+				while cur:
+					result |= cur
+					tosee = set()
+					for tsk in cur:
+						for node in tsk.inputs:
+							if node in seen:
+								continue
+							seen.add(node)
+							tosee |= set(provides.get(node, []))
+					cur = tosee
+				result = list(result)
+
+			Task.set_file_constraints(result)
+			Task.set_precedence_constraints(result)
+			yield result
+
+		while 1:
+			yield []
+
+	def get_matcher(self, pat):
+		# this returns a function
+		inn = True
+		out = True
+		if pat.startswith('in:'):
+			out = False
+			pat = pat.replace('in:', '')
+		elif pat.startswith('out:'):
+			inn = False
+			pat = pat.replace('out:', '')
+
+		anode = self.root.find_node(pat)
+		pattern = None
+		if not anode:
+			if not pat.startswith('^'):
+				pat = '^.+?%s' % pat
+			if not pat.endswith('$'):
+				pat = '%s$' % pat
+			pattern = re.compile(pat)
+
+		def match(node, output):
+			if output and not out:
+				return False
+			if not output and not inn:
+				return False
+
+			if anode:
+				return anode == node
+			else:
+				return pattern.match(node.abspath())
+		return match
+
diff --git a/third_party/waf/waflib/extras/midl.py b/third_party/waf/waflib/extras/midl.py
new file mode 100644
index 0000000..43e6cf9
--- /dev/null
+++ b/third_party/waf/waflib/extras/midl.py
@@ -0,0 +1,69 @@
+#!/usr/bin/env python
+# Issue 1185 ultrix gmail com
+
+"""
+Microsoft Interface Definition Language support.  Given ComObject.idl, this tool
+will generate ComObject.tlb ComObject_i.h ComObject_i.c ComObject_p.c and dlldata.c
+
+To declare targets using midl::
+
+	def configure(conf):
+		conf.load('msvc')
+		conf.load('midl')
+
+	def build(bld):
+		bld(
+			features='c cshlib',
+			# Note: ComObject_i.c is generated from ComObject.idl
+			source = 'main.c ComObject.idl ComObject_i.c',
+			target = 'ComObject.dll')
+"""
+
+from waflib import Task, Utils
+from waflib.TaskGen import feature, before_method
+import os
+
+def configure(conf):
+	conf.find_program(['midl'], var='MIDL')
+
+	conf.env.MIDLFLAGS = [
+		'/nologo',
+		'/D',
+		'_DEBUG',
+		'/W1',
+		'/char',
+		'signed',
+		'/Oicf',
+	]
+
+@feature('c', 'cxx')
+@before_method('process_source')
+def idl_file(self):
+	# Do this before process_source so that the generated header can be resolved
+	# when scanning source dependencies.
+	idl_nodes = []
+	src_nodes = []
+	for node in Utils.to_list(self.source):
+		if str(node).endswith('.idl'):
+			idl_nodes.append(node)
+		else:
+			src_nodes.append(node)
+
+	for node in self.to_nodes(idl_nodes):
+		t = node.change_ext('.tlb')
+		h = node.change_ext('_i.h')
+		c = node.change_ext('_i.c')
+		p = node.change_ext('_p.c')
+		d = node.parent.find_or_declare('dlldata.c')
+		self.create_task('midl', node, [t, h, c, p, d])
+
+	self.source = src_nodes
+
+class midl(Task.Task):
+	"""
+	Compile idl files
+	"""
+	color   = 'YELLOW'
+	run_str = '${MIDL} ${MIDLFLAGS} ${CPPPATH_ST:INCLUDES} /tlb ${TGT[0].bldpath()} /header ${TGT[1].bldpath()} /iid ${TGT[2].bldpath()} /proxy ${TGT[3].bldpath()} /dlldata ${TGT[4].bldpath()} ${SRC}'
+	before  = ['winrc']
+
diff --git a/third_party/waf/waflib/extras/msvc_pdb.py b/third_party/waf/waflib/extras/msvc_pdb.py
new file mode 100644
index 0000000..077656b
--- /dev/null
+++ b/third_party/waf/waflib/extras/msvc_pdb.py
@@ -0,0 +1,46 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Rafaël Kooi 2019
+
+from waflib import TaskGen
+
+@TaskGen.feature('c', 'cxx', 'fc')
+@TaskGen.after_method('propagate_uselib_vars')
+def add_pdb_per_object(self):
+	"""For msvc/fortran, specify a unique compile pdb per object, to work
+	around LNK4099. Flags are updated with a unique /Fd flag based on the
+	task output name. This is separate from the link pdb.
+	"""
+	if not hasattr(self, 'compiled_tasks'):
+		return
+
+	link_task = getattr(self, 'link_task', None)
+
+	for task in self.compiled_tasks:
+		if task.inputs and task.inputs[0].name.lower().endswith('.rc'):
+			continue
+
+		add_pdb = False
+		for flagname in ('CFLAGS', 'CXXFLAGS', 'FCFLAGS'):
+			# several languages may be used at once
+			for flag in task.env[flagname]:
+				if flag[1:].lower() == 'zi':
+					add_pdb = True
+					break
+
+		if add_pdb:
+			node = task.outputs[0].change_ext('.pdb')
+			pdb_flag = '/Fd:' + node.abspath()
+
+			for flagname in ('CFLAGS', 'CXXFLAGS', 'FCFLAGS'):
+				buf = [pdb_flag]
+				for flag in task.env[flagname]:
+					if flag[1:3] == 'Fd' or flag[1:].lower() == 'fs' or flag[1:].lower() == 'mp':
+						continue
+					buf.append(flag)
+				task.env[flagname] = buf
+
+			if link_task and not node in link_task.dep_nodes:
+				link_task.dep_nodes.append(node)
+			if not node in task.outputs:
+				task.outputs.append(node)
diff --git a/third_party/waf/waflib/extras/msvcdeps.py b/third_party/waf/waflib/extras/msvcdeps.py
new file mode 100644
index 0000000..e8985bd
--- /dev/null
+++ b/third_party/waf/waflib/extras/msvcdeps.py
@@ -0,0 +1,294 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Copyright Garmin International or its subsidiaries, 2012-2013
+
+'''
+Off-load dependency scanning from Python code to MSVC compiler
+
+This tool is safe to load in any environment; it will only activate the
+MSVC exploits when it finds that a particular taskgen uses MSVC to
+compile.
+
+Empirical testing shows about a 10% execution time savings from using
+this tool as compared to c_preproc.
+
+The technique of gutting scan() and pushing the dependency calculation
+down to post_run() is cribbed from gccdeps.py.
+
+This affects the cxx class, so make sure to load Qt5 after this tool.
+
+Usage::
+
+	def options(opt):
+		opt.load('compiler_cxx')
+	def configure(conf):
+		conf.load('compiler_cxx msvcdeps')
+'''
+
+import os, sys, tempfile, threading
+
+from waflib import Context, Errors, Logs, Task, Utils
+from waflib.Tools import c_preproc, c, cxx, msvc
+from waflib.TaskGen import feature, before_method
+
+lock = threading.Lock()
+
+PREPROCESSOR_FLAG = '/showIncludes'
+INCLUDE_PATTERN = 'Note: including file:'
+
+# Extensible by outside tools
+supported_compilers = ['msvc']
+
+@feature('c', 'cxx')
+@before_method('process_source')
+def apply_msvcdeps_flags(taskgen):
+	if taskgen.env.CC_NAME not in supported_compilers:
+		return
+
+	for flag in ('CFLAGS', 'CXXFLAGS'):
+		if taskgen.env.get_flat(flag).find(PREPROCESSOR_FLAG) < 0:
+			taskgen.env.append_value(flag, PREPROCESSOR_FLAG)
+
+
+def get_correct_path_case(base_path, path):
+	'''
+	Return a case-corrected version of ``path`` by searching the filesystem for
+	``path``, relative to ``base_path``, using the case returned by the filesystem.
+	'''
+	components = Utils.split_path(path)
+
+	corrected_path = ''
+	if os.path.isabs(path):
+		corrected_path = components.pop(0).upper() + os.sep
+
+	for part in components:
+		part = part.lower()
+		search_path = os.path.join(base_path, corrected_path)
+		if part == '..':
+			corrected_path = os.path.join(corrected_path, part)
+			search_path = os.path.normpath(search_path)
+			continue
+
+		for item in sorted(os.listdir(search_path)):
+			if item.lower() == part:
+				corrected_path = os.path.join(corrected_path, item)
+				break
+		else:
+			raise ValueError("Can't find %r in %r" % (part, search_path))
+
+	return corrected_path
+
+
+def path_to_node(base_node, path, cached_nodes):
+	'''
+	Take the base node and the path and return a node
+	Results are cached because searching the node tree is expensive
+	The following code is executed by threads, it is not safe, so a lock is needed...
+	'''
+	# normalize the path to remove parent path components (..)
+	path = os.path.normpath(path)
+
+	# normalize the path case to increase likelihood of a cache hit
+	node_lookup_key = (base_node, os.path.normcase(path))
+
+	try:
+		node = cached_nodes[node_lookup_key]
+	except KeyError:
+		# retry with lock on cache miss
+		with lock:
+			try:
+				node = cached_nodes[node_lookup_key]
+			except KeyError:
+				path = get_correct_path_case(base_node.abspath(), path)
+				node = cached_nodes[node_lookup_key] = base_node.find_node(path)
+
+	return node
+
+def post_run(self):
+	if self.env.CC_NAME not in supported_compilers:
+		return super(self.derived_msvcdeps, self).post_run()
+
+	# TODO this is unlikely to work with netcache
+	if getattr(self, 'cached', None):
+		return Task.Task.post_run(self)
+
+	resolved_nodes = []
+	unresolved_names = []
+	bld = self.generator.bld
+
+	# Dynamically bind to the cache
+	try:
+		cached_nodes = bld.cached_nodes
+	except AttributeError:
+		cached_nodes = bld.cached_nodes = {}
+
+	for path in self.msvcdeps_paths:
+		node = None
+		if os.path.isabs(path):
+			node = path_to_node(bld.root, path, cached_nodes)
+		else:
+			# when calling find_resource, make sure the path does not begin with '..'
+			base_node = bld.bldnode
+			path = [k for k in Utils.split_path(path) if k and k != '.']
+			while path[0] == '..':
+				path.pop(0)
+				base_node = base_node.parent
+			path = os.sep.join(path)
+
+			node = path_to_node(base_node, path, cached_nodes)
+
+		if not node:
+			raise ValueError('could not find %r for %r' % (path, self))
+		else:
+			if not c_preproc.go_absolute:
+				if not (node.is_child_of(bld.srcnode) or node.is_child_of(bld.bldnode)):
+					# System library
+					Logs.debug('msvcdeps: Ignoring system include %r', node)
+					continue
+
+			if id(node) == id(self.inputs[0]):
+				# ignore the source file, it is already in the dependencies
+				# this way, successful config tests may be retrieved from the cache
+				continue
+
+			resolved_nodes.append(node)
+
+	Logs.debug('deps: msvcdeps for %s returned %s', self, resolved_nodes)
+
+	bld.node_deps[self.uid()] = resolved_nodes
+	bld.raw_deps[self.uid()] = unresolved_names
+
+	try:
+		del self.cache_sig
+	except AttributeError:
+		pass
+
+	Task.Task.post_run(self)
+
+def scan(self):
+	if self.env.CC_NAME not in supported_compilers:
+		return super(self.derived_msvcdeps, self).scan()
+
+	resolved_nodes = self.generator.bld.node_deps.get(self.uid(), [])
+	unresolved_names = []
+	return (resolved_nodes, unresolved_names)
+
+def sig_implicit_deps(self):
+	if self.env.CC_NAME not in supported_compilers:
+		return super(self.derived_msvcdeps, self).sig_implicit_deps()
+	bld = self.generator.bld
+
+	try:
+		return self.compute_sig_implicit_deps()
+	except Errors.TaskNotReady:
+		raise ValueError("Please specify the build order precisely with msvcdeps (c/c++ tasks)")
+	except EnvironmentError:
+		# If a file is renamed, assume the dependencies are stale and must be recalculated
+		for x in bld.node_deps.get(self.uid(), []):
+			if not x.is_bld() and not x.exists():
+				try:
+					del x.parent.children[x.name]
+				except KeyError:
+					pass
+
+	key = self.uid()
+	bld.node_deps[key] = []
+	bld.raw_deps[key] = []
+	return Utils.SIG_NIL
+
+def exec_command(self, cmd, **kw):
+	if self.env.CC_NAME not in supported_compilers:
+		return super(self.derived_msvcdeps, self).exec_command(cmd, **kw)
+
+	if not 'cwd' in kw:
+		kw['cwd'] = self.get_cwd()
+
+	if self.env.PATH:
+		env = kw['env'] = dict(kw.get('env') or self.env.env or os.environ)
+		env['PATH'] = self.env.PATH if isinstance(self.env.PATH, str) else os.pathsep.join(self.env.PATH)
+
+	# The Visual Studio IDE adds an environment variable that causes
+	# the MS compiler to send its textual output directly to the
+	# debugging window rather than normal stdout/stderr.
+	#
+	# This is unrecoverably bad for this tool because it will cause
+	# all the dependency scanning to see an empty stdout stream and
+	# assume that the file being compiled uses no headers.
+	#
+	# See http://blogs.msdn.com/b/freik/archive/2006/04/05/569025.aspx
+	#
+	# Attempting to repair the situation by deleting the offending
+	# envvar at this point in tool execution will not be good enough--
+	# its presence poisons the 'waf configure' step earlier. We just
+	# want to put a sanity check here in order to help developers
+	# quickly diagnose the issue if an otherwise-good Waf tree
+	# is then executed inside the MSVS IDE.
+	assert 'VS_UNICODE_OUTPUT' not in kw['env']
+
+	cmd, args = self.split_argfile(cmd)
+	try:
+		(fd, tmp) = tempfile.mkstemp()
+		os.write(fd, '\r\n'.join(args).encode())
+		os.close(fd)
+
+		self.msvcdeps_paths = []
+		kw['env'] = kw.get('env', os.environ.copy())
+		kw['cwd'] = kw.get('cwd', os.getcwd())
+		kw['quiet'] = Context.STDOUT
+		kw['output'] = Context.STDOUT
+
+		out = []
+		if Logs.verbose:
+			Logs.debug('argfile: @%r -> %r', tmp, args)
+		try:
+			raw_out = self.generator.bld.cmd_and_log(cmd + ['@' + tmp], **kw)
+			ret = 0
+		except Errors.WafError as e:
+			# Use e.msg if e.stdout is not set
+			raw_out = getattr(e, 'stdout', e.msg)
+
+			# Return non-zero error code even if we didn't
+			# get one from the exception object
+			ret = getattr(e, 'returncode', 1)
+
+		Logs.debug('msvcdeps: Running for: %s' % self.inputs[0])
+		for line in raw_out.splitlines():
+			if line.startswith(INCLUDE_PATTERN):
+				# Only strip whitespace after log to preserve
+				# dependency structure in debug output
+				inc_path = line[len(INCLUDE_PATTERN):]
+				Logs.debug('msvcdeps: Regex matched %s', inc_path)
+				self.msvcdeps_paths.append(inc_path.strip())
+			else:
+				out.append(line)
+
+		# Pipe through the remaining stdout content (not related to /showIncludes)
+		if self.generator.bld.logger:
+			self.generator.bld.logger.debug('out: %s' % os.linesep.join(out))
+		else:
+			sys.stdout.write(os.linesep.join(out) + os.linesep)
+
+		return ret
+	finally:
+		try:
+			os.remove(tmp)
+		except OSError:
+			# anti-virus and indexers can keep files open -_-
+			pass
+
+
+def wrap_compiled_task(classname):
+	derived_class = type(classname, (Task.classes[classname],), {})
+	derived_class.derived_msvcdeps = derived_class
+	derived_class.post_run = post_run
+	derived_class.scan = scan
+	derived_class.sig_implicit_deps = sig_implicit_deps
+	derived_class.exec_command = exec_command
+
+for k in ('c', 'cxx'):
+	if k in Task.classes:
+		wrap_compiled_task(k)
+
+def options(opt):
+	raise ValueError('Do not load msvcdeps options')
+
diff --git a/third_party/waf/waflib/extras/msvs.py b/third_party/waf/waflib/extras/msvs.py
new file mode 100644
index 0000000..f987bb5
--- /dev/null
+++ b/third_party/waf/waflib/extras/msvs.py
@@ -0,0 +1,1052 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Avalanche Studios 2009-2011
+# Thomas Nagy 2011
+
+"""
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+
+1. Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in the
+   documentation and/or other materials provided with the distribution.
+
+3. The name of the author may not be used to endorse or promote products
+   derived from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR
+IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
+INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
+IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGE.
+"""
+
+"""
+To add this tool to your project:
+def options(conf):
+	opt.load('msvs')
+
+It can be a good idea to add the sync_exec tool too.
+
+To generate solution files:
+$ waf configure msvs
+
+To customize the outputs, provide subclasses in your wscript files::
+
+	from waflib.extras import msvs
+	class vsnode_target(msvs.vsnode_target):
+		def get_build_command(self, props):
+			# likely to be required
+			return "waf.bat build"
+		def collect_source(self):
+			# likely to be required
+			...
+	class msvs_bar(msvs.msvs_generator):
+		def init(self):
+			msvs.msvs_generator.init(self)
+			self.vsnode_target = vsnode_target
+
+The msvs class re-uses the same build() function for reading the targets (task generators),
+you may therefore specify msvs settings on the context object::
+
+	def build(bld):
+		bld.solution_name = 'foo.sln'
+		bld.waf_command = 'waf.bat'
+		bld.projects_dir = bld.srcnode.make_node('.depproj')
+		bld.projects_dir.mkdir()
+
+For visual studio 2008, the command is called 'msvs2008', and the classes
+such as vsnode_target are wrapped by a decorator class 'wrap_2008' to
+provide special functionality.
+
+To customize platform toolsets, pass additional parameters, for example::
+
+	class msvs_2013(msvs.msvs_generator):
+		cmd = 'msvs2013'
+		numver = '13.00'
+		vsver = '2013'
+		platform_toolset_ver = 'v120'
+
+ASSUMPTIONS:
+* a project can be either a directory or a target, vcxproj files are written only for targets that have source files
+* each project is a vcxproj file, therefore the project uuid needs only to be a hash of the absolute path
+"""
+
+import os, re, sys
+import uuid # requires python 2.5
+from waflib.Build import BuildContext
+from waflib import Utils, TaskGen, Logs, Task, Context, Node, Options
+
+HEADERS_GLOB = '**/(*.h|*.hpp|*.H|*.inl)'
+
+PROJECT_TEMPLATE = r'''<?xml version="1.0" encoding="UTF-8"?>
+<Project DefaultTargets="Build" ToolsVersion="4.0"
+	xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+
+	<ItemGroup Label="ProjectConfigurations">
+		${for b in project.build_properties}
+		<ProjectConfiguration Include="${b.configuration}|${b.platform}">
+			<Configuration>${b.configuration}</Configuration>
+			<Platform>${b.platform}</Platform>
+		</ProjectConfiguration>
+		${endfor}
+	</ItemGroup>
+
+	<PropertyGroup Label="Globals">
+		<ProjectGuid>{${project.uuid}}</ProjectGuid>
+		<Keyword>MakeFileProj</Keyword>
+		<ProjectName>${project.name}</ProjectName>
+	</PropertyGroup>
+	<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
+
+	${for b in project.build_properties}
+	<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='${b.configuration}|${b.platform}'" Label="Configuration">
+		<ConfigurationType>Makefile</ConfigurationType>
+		<OutDir>${b.outdir}</OutDir>
+		<PlatformToolset>${project.platform_toolset_ver}</PlatformToolset>
+	</PropertyGroup>
+	${endfor}
+
+	<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
+	<ImportGroup Label="ExtensionSettings">
+	</ImportGroup>
+
+	${for b in project.build_properties}
+	<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='${b.configuration}|${b.platform}'">
+		<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
+	</ImportGroup>
+	${endfor}
+
+	${for b in project.build_properties}
+	<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='${b.configuration}|${b.platform}'">
+		<NMakeBuildCommandLine>${xml:project.get_build_command(b)}</NMakeBuildCommandLine>
+		<NMakeReBuildCommandLine>${xml:project.get_rebuild_command(b)}</NMakeReBuildCommandLine>
+		<NMakeCleanCommandLine>${xml:project.get_clean_command(b)}</NMakeCleanCommandLine>
+		<NMakeIncludeSearchPath>${xml:b.includes_search_path}</NMakeIncludeSearchPath>
+		<NMakePreprocessorDefinitions>${xml:b.preprocessor_definitions};$(NMakePreprocessorDefinitions)</NMakePreprocessorDefinitions>
+		<IncludePath>${xml:b.includes_search_path}</IncludePath>
+		<ExecutablePath>$(ExecutablePath)</ExecutablePath>
+
+		${if getattr(b, 'output_file', None)}
+		<NMakeOutput>${xml:b.output_file}</NMakeOutput>
+		${endif}
+		${if getattr(b, 'deploy_dir', None)}
+		<RemoteRoot>${xml:b.deploy_dir}</RemoteRoot>
+		${endif}
+	</PropertyGroup>
+	${endfor}
+
+	${for b in project.build_properties}
+		${if getattr(b, 'deploy_dir', None)}
+	<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='${b.configuration}|${b.platform}'">
+		<Deploy>
+			<DeploymentType>CopyToHardDrive</DeploymentType>
+		</Deploy>
+	</ItemDefinitionGroup>
+		${endif}
+	${endfor}
+
+	<ItemGroup>
+		${for x in project.source}
+		<${project.get_key(x)} Include='${x.win32path()}' />
+		${endfor}
+	</ItemGroup>
+	<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
+	<ImportGroup Label="ExtensionTargets">
+	</ImportGroup>
+</Project>
+'''
+
+FILTER_TEMPLATE = '''<?xml version="1.0" encoding="UTF-8"?>
+<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+	<ItemGroup>
+		${for x in project.source}
+			<${project.get_key(x)} Include="${x.win32path()}">
+				<Filter>${project.get_filter_name(x.parent)}</Filter>
+			</${project.get_key(x)}>
+		${endfor}
+	</ItemGroup>
+	<ItemGroup>
+		${for x in project.dirs()}
+			<Filter Include="${project.get_filter_name(x)}">
+				<UniqueIdentifier>{${project.make_uuid(x.win32path())}}</UniqueIdentifier>
+			</Filter>
+		${endfor}
+	</ItemGroup>
+</Project>
+'''
+
+PROJECT_2008_TEMPLATE = r'''<?xml version="1.0" encoding="UTF-8"?>
+<VisualStudioProject ProjectType="Visual C++" Version="9,00"
+	Name="${xml: project.name}" ProjectGUID="{${project.uuid}}"
+	Keyword="MakeFileProj"
+	TargetFrameworkVersion="196613">
+	<Platforms>
+		${if project.build_properties}
+		${for b in project.build_properties}
+		   <Platform Name="${xml: b.platform}" />
+		${endfor}
+		${else}
+		   <Platform Name="Win32" />
+		${endif}
+	</Platforms>
+	<ToolFiles>
+	</ToolFiles>
+	<Configurations>
+		${if project.build_properties}
+		${for b in project.build_properties}
+		<Configuration
+			Name="${xml: b.configuration}|${xml: b.platform}"
+			IntermediateDirectory="$ConfigurationName"
+			OutputDirectory="${xml: b.outdir}"
+			ConfigurationType="0">
+			<Tool
+				Name="VCNMakeTool"
+				BuildCommandLine="${xml: project.get_build_command(b)}"
+				ReBuildCommandLine="${xml: project.get_rebuild_command(b)}"
+				CleanCommandLine="${xml: project.get_clean_command(b)}"
+				${if getattr(b, 'output_file', None)}
+				Output="${xml: b.output_file}"
+				${endif}
+				PreprocessorDefinitions="${xml: b.preprocessor_definitions}"
+				IncludeSearchPath="${xml: b.includes_search_path}"
+				ForcedIncludes=""
+				ForcedUsingAssemblies=""
+				AssemblySearchPath=""
+				CompileAsManaged=""
+			/>
+		</Configuration>
+		${endfor}
+		${else}
+			<Configuration Name="Release|Win32" >
+		</Configuration>
+		${endif}
+	</Configurations>
+	<References>
+	</References>
+	<Files>
+${project.display_filter()}
+	</Files>
+</VisualStudioProject>
+'''
+
+SOLUTION_TEMPLATE = '''Microsoft Visual Studio Solution File, Format Version ${project.numver}
+# Visual Studio ${project.vsver}
+${for p in project.all_projects}
+Project("{${p.ptype()}}") = "${p.name}", "${p.title}", "{${p.uuid}}"
+EndProject${endfor}
+Global
+	GlobalSection(SolutionConfigurationPlatforms) = preSolution
+		${if project.all_projects}
+		${for (configuration, platform) in project.all_projects[0].ctx.project_configurations()}
+		${configuration}|${platform} = ${configuration}|${platform}
+		${endfor}
+		${endif}
+	EndGlobalSection
+	GlobalSection(ProjectConfigurationPlatforms) = postSolution
+		${for p in project.all_projects}
+			${if hasattr(p, 'source')}
+			${for b in p.build_properties}
+		{${p.uuid}}.${b.configuration}|${b.platform}.ActiveCfg = ${b.configuration}|${b.platform}
+			${if getattr(p, 'is_active', None)}
+		{${p.uuid}}.${b.configuration}|${b.platform}.Build.0 = ${b.configuration}|${b.platform}
+			${endif}
+			${if getattr(p, 'is_deploy', None)}
+		{${p.uuid}}.${b.configuration}|${b.platform}.Deploy.0 = ${b.configuration}|${b.platform}
+			${endif}
+			${endfor}
+			${endif}
+		${endfor}
+	EndGlobalSection
+	GlobalSection(SolutionProperties) = preSolution
+		HideSolutionNode = FALSE
+	EndGlobalSection
+	GlobalSection(NestedProjects) = preSolution
+	${for p in project.all_projects}
+		${if p.parent}
+		{${p.uuid}} = {${p.parent.uuid}}
+		${endif}
+	${endfor}
+	EndGlobalSection
+EndGlobal
+'''
+
+COMPILE_TEMPLATE = '''def f(project):
+	lst = []
+	def xml_escape(value):
+		return value.replace("&", "&amp;").replace('"', "&quot;").replace("'", "&apos;").replace("<", "&lt;").replace(">", "&gt;")
+
+	%s
+
+	#f = open('cmd.txt', 'w')
+	#f.write(str(lst))
+	#f.close()
+	return ''.join(lst)
+'''
+reg_act = re.compile(r"(?P<backslash>\\)|(?P<dollar>\$\$)|(?P<subst>\$\{(?P<code>[^}]*?)\})", re.M)
+def compile_template(line):
+	"""
+	Compile a template expression into a python function (like jsps, but way shorter)
+	"""
+	extr = []
+	def repl(match):
+		g = match.group
+		if g('dollar'):
+			return "$"
+		elif g('backslash'):
+			return "\\"
+		elif g('subst'):
+			extr.append(g('code'))
+			return "<<|@|>>"
+		return None
+
+	line2 = reg_act.sub(repl, line)
+	params = line2.split('<<|@|>>')
+	assert(extr)
+
+
+	indent = 0
+	buf = []
+	app = buf.append
+
+	def app(txt):
+		buf.append(indent * '\t' + txt)
+
+	for x in range(len(extr)):
+		if params[x]:
+			app("lst.append(%r)" % params[x])
+
+		f = extr[x]
+		if f.startswith(('if', 'for')):
+			app(f + ':')
+			indent += 1
+		elif f.startswith('py:'):
+			app(f[3:])
+		elif f.startswith(('endif', 'endfor')):
+			indent -= 1
+		elif f.startswith(('else', 'elif')):
+			indent -= 1
+			app(f + ':')
+			indent += 1
+		elif f.startswith('xml:'):
+			app('lst.append(xml_escape(%s))' % f[4:])
+		else:
+			#app('lst.append((%s) or "cannot find %s")' % (f, f))
+			app('lst.append(%s)' % f)
+
+	if extr:
+		if params[-1]:
+			app("lst.append(%r)" % params[-1])
+
+	fun = COMPILE_TEMPLATE % "\n\t".join(buf)
+	#print(fun)
+	return Task.funex(fun)
+
+
+re_blank = re.compile('(\n|\r|\\s)*\n', re.M)
+def rm_blank_lines(txt):
+	txt = re_blank.sub('\r\n', txt)
+	return txt
+
+BOM = '\xef\xbb\xbf'
+try:
+	BOM = bytes(BOM, 'latin-1') # python 3
+except TypeError:
+	pass
+
+def stealth_write(self, data, flags='wb'):
+	try:
+		unicode
+	except NameError:
+		data = data.encode('utf-8') # python 3
+	else:
+		data = data.decode(sys.getfilesystemencoding(), 'replace')
+		data = data.encode('utf-8')
+
+	if self.name.endswith(('.vcproj', '.vcxproj')):
+		data = BOM + data
+
+	try:
+		txt = self.read(flags='rb')
+		if txt != data:
+			raise ValueError('must write')
+	except (IOError, ValueError):
+		self.write(data, flags=flags)
+	else:
+		Logs.debug('msvs: skipping %s', self.win32path())
+Node.Node.stealth_write = stealth_write
+
+re_win32 = re.compile(r'^([/\\]cygdrive)?[/\\]([a-z])([^a-z0-9_-].*)', re.I)
+def win32path(self):
+	p = self.abspath()
+	m = re_win32.match(p)
+	if m:
+		return "%s:%s" % (m.group(2).upper(), m.group(3))
+	return p
+Node.Node.win32path = win32path
+
+re_quote = re.compile("[^a-zA-Z0-9-]")
+def quote(s):
+	return re_quote.sub("_", s)
+
+def xml_escape(value):
+	return value.replace("&", "&amp;").replace('"', "&quot;").replace("'", "&apos;").replace("<", "&lt;").replace(">", "&gt;")
+
+def make_uuid(v, prefix = None):
+	"""
+	simple utility function
+	"""
+	if isinstance(v, dict):
+		keys = list(v.keys())
+		keys.sort()
+		tmp = str([(k, v[k]) for k in keys])
+	else:
+		tmp = str(v)
+	d = Utils.md5(tmp.encode()).hexdigest().upper()
+	if prefix:
+		d = '%s%s' % (prefix, d[8:])
+	gid = uuid.UUID(d, version = 4)
+	return str(gid).upper()
+
+def diff(node, fromnode):
+	# difference between two nodes, but with "(..)" instead of ".."
+	c1 = node
+	c2 = fromnode
+
+	c1h = c1.height()
+	c2h = c2.height()
+
+	lst = []
+	up = 0
+
+	while c1h > c2h:
+		lst.append(c1.name)
+		c1 = c1.parent
+		c1h -= 1
+
+	while c2h > c1h:
+		up += 1
+		c2 = c2.parent
+		c2h -= 1
+
+	while id(c1) != id(c2):
+		lst.append(c1.name)
+		up += 1
+
+		c1 = c1.parent
+		c2 = c2.parent
+
+	for i in range(up):
+		lst.append('(..)')
+	lst.reverse()
+	return tuple(lst)
+
+class build_property(object):
+	pass
+
+class vsnode(object):
+	"""
+	Abstract class representing visual studio elements
+	We assume that all visual studio nodes have a uuid and a parent
+	"""
+	def __init__(self, ctx):
+		self.ctx = ctx # msvs context
+		self.name = '' # string, mandatory
+		self.vspath = '' # path in visual studio (name for dirs, absolute path for projects)
+		self.uuid = '' # string, mandatory
+		self.parent = None # parent node for visual studio nesting
+
+	def get_waf(self):
+		"""
+		Override in subclasses...
+		"""
+		return 'cd /d "%s" & %s' % (self.ctx.srcnode.win32path(), getattr(self.ctx, 'waf_command', 'waf.bat'))
+
+	def ptype(self):
+		"""
+		Return a special uuid for projects written in the solution file
+		"""
+		pass
+
+	def write(self):
+		"""
+		Write the project file, by default, do nothing
+		"""
+		pass
+
+	def make_uuid(self, val):
+		"""
+		Alias for creating uuid values easily (the templates cannot access global variables)
+		"""
+		return make_uuid(val)
+
+class vsnode_vsdir(vsnode):
+	"""
+	Nodes representing visual studio folders (which do not match the filesystem tree!)
+	"""
+	VS_GUID_SOLUTIONFOLDER = "2150E333-8FDC-42A3-9474-1A3956D46DE8"
+	def __init__(self, ctx, uuid, name, vspath=''):
+		vsnode.__init__(self, ctx)
+		self.title = self.name = name
+		self.uuid = uuid
+		self.vspath = vspath or name
+
+	def ptype(self):
+		return self.VS_GUID_SOLUTIONFOLDER
+
+class vsnode_project(vsnode):
+	"""
+	Abstract class representing visual studio project elements
+	A project is assumed to be writable, and has a node representing the file to write to
+	"""
+	VS_GUID_VCPROJ = "8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942"
+	def ptype(self):
+		return self.VS_GUID_VCPROJ
+
+	def __init__(self, ctx, node):
+		vsnode.__init__(self, ctx)
+		self.path = node
+		self.uuid = make_uuid(node.win32path())
+		self.name = node.name
+		self.platform_toolset_ver = getattr(ctx, 'platform_toolset_ver', None)
+		self.title = self.path.win32path()
+		self.source = [] # list of node objects
+		self.build_properties = [] # list of properties (nmake commands, output dir, etc)
+
+	def dirs(self):
+		"""
+		Get the list of parent folders of the source files (header files included)
+		for writing the filters
+		"""
+		lst = []
+		def add(x):
+			if x.height() > self.tg.path.height() and x not in lst:
+				lst.append(x)
+				add(x.parent)
+		for x in self.source:
+			add(x.parent)
+		return lst
+
+	def write(self):
+		Logs.debug('msvs: creating %r', self.path)
+
+		# first write the project file
+		template1 = compile_template(PROJECT_TEMPLATE)
+		proj_str = template1(self)
+		proj_str = rm_blank_lines(proj_str)
+		self.path.stealth_write(proj_str)
+
+		# then write the filter
+		template2 = compile_template(FILTER_TEMPLATE)
+		filter_str = template2(self)
+		filter_str = rm_blank_lines(filter_str)
+		tmp = self.path.parent.make_node(self.path.name + '.filters')
+		tmp.stealth_write(filter_str)
+
+	def get_key(self, node):
+		"""
+		required for writing the source files
+		"""
+		name = node.name
+		if name.endswith(('.cpp', '.c')):
+			return 'ClCompile'
+		return 'ClInclude'
+
+	def collect_properties(self):
+		"""
+		Returns a list of triplet (configuration, platform, output_directory)
+		"""
+		ret = []
+		for c in self.ctx.configurations:
+			for p in self.ctx.platforms:
+				x = build_property()
+				x.outdir = ''
+
+				x.configuration = c
+				x.platform = p
+
+				x.preprocessor_definitions = ''
+				x.includes_search_path = ''
+
+				# can specify "deploy_dir" too
+				ret.append(x)
+		self.build_properties = ret
+
+	def get_build_params(self, props):
+		opt = '--execsolution="%s"' % self.ctx.get_solution_node().win32path()
+		return (self.get_waf(), opt)
+
+	def get_build_command(self, props):
+		return "%s build %s" % self.get_build_params(props)
+
+	def get_clean_command(self, props):
+		return "%s clean %s" % self.get_build_params(props)
+
+	def get_rebuild_command(self, props):
+		return "%s clean build %s" % self.get_build_params(props)
+
+	def get_filter_name(self, node):
+		lst = diff(node, self.tg.path)
+		return '\\'.join(lst) or '.'
+
+class vsnode_alias(vsnode_project):
+	def __init__(self, ctx, node, name):
+		vsnode_project.__init__(self, ctx, node)
+		self.name = name
+		self.output_file = ''
+
+class vsnode_build_all(vsnode_alias):
+	"""
+	Fake target used to emulate the behaviour of "make all" (starting one process by target is slow)
+	This is the only alias enabled by default
+	"""
+	def __init__(self, ctx, node, name='build_all_projects'):
+		vsnode_alias.__init__(self, ctx, node, name)
+		self.is_active = True
+
+class vsnode_install_all(vsnode_alias):
+	"""
+	Fake target used to emulate the behaviour of "make install"
+	"""
+	def __init__(self, ctx, node, name='install_all_projects'):
+		vsnode_alias.__init__(self, ctx, node, name)
+
+	def get_build_command(self, props):
+		return "%s build install %s" % self.get_build_params(props)
+
+	def get_clean_command(self, props):
+		return "%s clean %s" % self.get_build_params(props)
+
+	def get_rebuild_command(self, props):
+		return "%s clean build install %s" % self.get_build_params(props)
+
+class vsnode_project_view(vsnode_alias):
+	"""
+	Fake target used to emulate a file system view
+	"""
+	def __init__(self, ctx, node, name='project_view'):
+		vsnode_alias.__init__(self, ctx, node, name)
+		self.tg = self.ctx() # fake one, cannot remove
+		self.exclude_files = Node.exclude_regs + '''
+waf-2*
+waf3-2*/**
+.waf-2*
+.waf3-2*/**
+**/*.sdf
+**/*.suo
+**/*.ncb
+**/%s
+		''' % Options.lockfile
+
+	def collect_source(self):
+		# this is likely to be slow
+		self.source = self.ctx.srcnode.ant_glob('**', excl=self.exclude_files)
+
+	def get_build_command(self, props):
+		params = self.get_build_params(props) + (self.ctx.cmd,)
+		return "%s %s %s" % params
+
+	def get_clean_command(self, props):
+		return ""
+
+	def get_rebuild_command(self, props):
+		return self.get_build_command(props)
+
+class vsnode_target(vsnode_project):
+	"""
+	Visual studio project representing a targets (programs, libraries, etc) and bound
+	to a task generator
+	"""
+	def __init__(self, ctx, tg):
+		"""
+		A project is more or less equivalent to a file/folder
+		"""
+		base = getattr(ctx, 'projects_dir', None) or tg.path
+		node = base.make_node(quote(tg.name) + ctx.project_extension) # the project file as a Node
+		vsnode_project.__init__(self, ctx, node)
+		self.name = quote(tg.name)
+		self.tg     = tg  # task generator
+
+	def get_build_params(self, props):
+		"""
+		Override the default to add the target name
+		"""
+		opt = '--execsolution="%s"' % self.ctx.get_solution_node().win32path()
+		if getattr(self, 'tg', None):
+			opt += " --targets=%s" % self.tg.name
+		return (self.get_waf(), opt)
+
+	def collect_source(self):
+		tg = self.tg
+		source_files = tg.to_nodes(getattr(tg, 'source', []))
+		include_dirs = Utils.to_list(getattr(tg, 'msvs_includes', []))
+		include_files = []
+		for x in include_dirs:
+			if isinstance(x, str):
+				x = tg.path.find_node(x)
+			if x:
+				lst = [y for y in x.ant_glob(HEADERS_GLOB, flat=False)]
+				include_files.extend(lst)
+
+		# remove duplicates
+		self.source.extend(list(set(source_files + include_files)))
+		self.source.sort(key=lambda x: x.win32path())
+
+	def collect_properties(self):
+		"""
+		Visual studio projects are associated with platforms and configurations (for building especially)
+		"""
+		super(vsnode_target, self).collect_properties()
+		for x in self.build_properties:
+			x.outdir = self.path.parent.win32path()
+			x.preprocessor_definitions = ''
+			x.includes_search_path = ''
+
+			try:
+				tsk = self.tg.link_task
+			except AttributeError:
+				pass
+			else:
+				x.output_file = tsk.outputs[0].win32path()
+				x.preprocessor_definitions = ';'.join(tsk.env.DEFINES)
+				x.includes_search_path = ';'.join(self.tg.env.INCPATHS)
+
+class msvs_generator(BuildContext):
+	'''generates a visual studio 2010 solution'''
+	cmd = 'msvs'
+	fun = 'build'
+	numver = '11.00' # Visual Studio Version Number
+	vsver  = '2010'  # Visual Studio Version Year
+	platform_toolset_ver = 'v110' # Platform Toolset Version Number
+
+	def init(self):
+		"""
+		Some data that needs to be present
+		"""
+		if not getattr(self, 'configurations', None):
+			self.configurations = ['Release'] # LocalRelease, RemoteDebug, etc
+		if not getattr(self, 'platforms', None):
+			self.platforms = ['Win32']
+		if not getattr(self, 'all_projects', None):
+			self.all_projects = []
+		if not getattr(self, 'project_extension', None):
+			self.project_extension = '.vcxproj'
+		if not getattr(self, 'projects_dir', None):
+			self.projects_dir = self.srcnode.make_node('.depproj')
+			self.projects_dir.mkdir()
+
+		# bind the classes to the object, so that subclass can provide custom generators
+		if not getattr(self, 'vsnode_vsdir', None):
+			self.vsnode_vsdir = vsnode_vsdir
+		if not getattr(self, 'vsnode_target', None):
+			self.vsnode_target = vsnode_target
+		if not getattr(self, 'vsnode_build_all', None):
+			self.vsnode_build_all = vsnode_build_all
+		if not getattr(self, 'vsnode_install_all', None):
+			self.vsnode_install_all = vsnode_install_all
+		if not getattr(self, 'vsnode_project_view', None):
+			self.vsnode_project_view = vsnode_project_view
+
+		self.numver = self.__class__.numver
+		self.vsver  = self.__class__.vsver
+		self.platform_toolset_ver = self.__class__.platform_toolset_ver
+
+	def execute(self):
+		"""
+		Entry point
+		"""
+		self.restore()
+		if not self.all_envs:
+			self.load_envs()
+		self.recurse([self.run_dir])
+
+		# user initialization
+		self.init()
+
+		# two phases for creating the solution
+		self.collect_projects() # add project objects into "self.all_projects"
+		self.write_files() # write the corresponding project and solution files
+
+	def collect_projects(self):
+		"""
+		Fill the list self.all_projects with project objects
+		Fill the list of build targets
+		"""
+		self.collect_targets()
+		self.add_aliases()
+		self.collect_dirs()
+		default_project = getattr(self, 'default_project', None)
+		def sortfun(x):
+			# folders should sort to the top
+			if getattr(x, 'VS_GUID_SOLUTIONFOLDER', None):
+				return ''
+			# followed by the default project
+			elif x.name == default_project:
+				return ' '
+			return getattr(x, 'path', None) and x.path.win32path() or x.name
+		self.all_projects.sort(key=sortfun)
+
+	def write_files(self):
+		"""
+		Write the project and solution files from the data collected
+		so far. It is unlikely that you will want to change this
+		"""
+		for p in self.all_projects:
+			p.write()
+
+		# and finally write the solution file
+		node = self.get_solution_node()
+		node.parent.mkdir()
+		Logs.warn('Creating %r', node)
+		template1 = compile_template(SOLUTION_TEMPLATE)
+		sln_str = template1(self)
+		sln_str = rm_blank_lines(sln_str)
+		node.stealth_write(sln_str)
+
+	def get_solution_node(self):
+		"""
+		The solution filename is required when writing the .vcproj files
+		return self.solution_node and if it does not exist, make one
+		"""
+		try:
+			return self.solution_node
+		except AttributeError:
+			pass
+
+		solution_name = getattr(self, 'solution_name', None)
+		if not solution_name:
+			solution_name = getattr(Context.g_module, Context.APPNAME, 'project') + '.sln'
+		if os.path.isabs(solution_name):
+			self.solution_node = self.root.make_node(solution_name)
+		else:
+			self.solution_node = self.srcnode.make_node(solution_name)
+		return self.solution_node
+
+	def project_configurations(self):
+		"""
+		Helper that returns all the pairs (config,platform)
+		"""
+		ret = []
+		for c in self.configurations:
+			for p in self.platforms:
+				ret.append((c, p))
+		return ret
+
+	def collect_targets(self):
+		"""
+		Process the list of task generators
+		"""
+		for g in self.groups:
+			for tg in g:
+				if not isinstance(tg, TaskGen.task_gen):
+					continue
+
+				if not hasattr(tg, 'msvs_includes'):
+					tg.msvs_includes = tg.to_list(getattr(tg, 'includes', [])) + tg.to_list(getattr(tg, 'export_includes', []))
+				tg.post()
+				if not getattr(tg, 'link_task', None):
+					continue
+
+				p = self.vsnode_target(self, tg)
+				p.collect_source() # delegate this processing
+				p.collect_properties()
+				self.all_projects.append(p)
+
+	def add_aliases(self):
+		"""
+		Add a specific target that emulates the "make all" necessary for Visual studio when pressing F7
+		We also add an alias for "make install" (disabled by default)
+		"""
+		base = getattr(self, 'projects_dir', None) or self.tg.path
+
+		node_project = base.make_node('build_all_projects' + self.project_extension) # Node
+		p_build = self.vsnode_build_all(self, node_project)
+		p_build.collect_properties()
+		self.all_projects.append(p_build)
+
+		node_project = base.make_node('install_all_projects' + self.project_extension) # Node
+		p_install = self.vsnode_install_all(self, node_project)
+		p_install.collect_properties()
+		self.all_projects.append(p_install)
+
+		node_project = base.make_node('project_view' + self.project_extension) # Node
+		p_view = self.vsnode_project_view(self, node_project)
+		p_view.collect_source()
+		p_view.collect_properties()
+		self.all_projects.append(p_view)
+
+		n = self.vsnode_vsdir(self, make_uuid(self.srcnode.win32path() + 'build_aliases'), "build_aliases")
+		p_build.parent = p_install.parent = p_view.parent = n
+		self.all_projects.append(n)
+
+	def collect_dirs(self):
+		"""
+		Create the folder structure in the Visual studio project view
+		"""
+		seen = {}
+		def make_parents(proj):
+			# look at a project, try to make a parent
+			if getattr(proj, 'parent', None):
+				# aliases already have parents
+				return
+			x = proj.iter_path
+			if x in seen:
+				proj.parent = seen[x]
+				return
+
+			# There is not vsnode_vsdir for x.
+			# So create a project representing the folder "x"
+			n = proj.parent = seen[x] = self.vsnode_vsdir(self, make_uuid(x.win32path()), x.name)
+			n.iter_path = x.parent
+			self.all_projects.append(n)
+
+			# recurse up to the project directory
+			if x.height() > self.srcnode.height() + 1:
+				make_parents(n)
+
+		for p in self.all_projects[:]: # iterate over a copy of all projects
+			if not getattr(p, 'tg', None):
+				# but only projects that have a task generator
+				continue
+
+			# make a folder for each task generator
+			p.iter_path = p.tg.path
+			make_parents(p)
+
+def wrap_2008(cls):
+	class dec(cls):
+		def __init__(self, *k, **kw):
+			cls.__init__(self, *k, **kw)
+			self.project_template = PROJECT_2008_TEMPLATE
+
+		def display_filter(self):
+
+			root = build_property()
+			root.subfilters = []
+			root.sourcefiles = []
+			root.source = []
+			root.name = ''
+
+			@Utils.run_once
+			def add_path(lst):
+				if not lst:
+					return root
+				child = build_property()
+				child.subfilters = []
+				child.sourcefiles = []
+				child.source = []
+				child.name = lst[-1]
+
+				par = add_path(lst[:-1])
+				par.subfilters.append(child)
+				return child
+
+			for x in self.source:
+				# this crap is for enabling subclasses to override get_filter_name
+				tmp = self.get_filter_name(x.parent)
+				tmp = tmp != '.' and tuple(tmp.split('\\')) or ()
+				par = add_path(tmp)
+				par.source.append(x)
+
+			def display(n):
+				buf = []
+				for x in n.source:
+					buf.append('<File RelativePath="%s" FileType="%s"/>\n' % (xml_escape(x.win32path()), self.get_key(x)))
+				for x in n.subfilters:
+					buf.append('<Filter Name="%s">' % xml_escape(x.name))
+					buf.append(display(x))
+					buf.append('</Filter>')
+				return '\n'.join(buf)
+
+			return display(root)
+
+		def get_key(self, node):
+			"""
+			If you do not want to let visual studio use the default file extensions,
+			override this method to return a value:
+				0: C/C++ Code, 1: C++ Class, 2: C++ Header File, 3: C++ Form,
+				4: C++ Control, 5: Text File, 6: DEF File, 7: IDL File,
+				8: Makefile, 9: RGS File, 10: RC File, 11: RES File, 12: XSD File,
+				13: XML File, 14: HTML File, 15: CSS File, 16: Bitmap, 17: Icon,
+				18: Resx File, 19: BSC File, 20: XSX File, 21: C++ Web Service,
+				22: ASAX File, 23: Asp Page, 24: Document, 25: Discovery File,
+				26: C# File, 27: eFileTypeClassDiagram, 28: MHTML Document,
+				29: Property Sheet, 30: Cursor, 31: Manifest, 32: eFileTypeRDLC
+			"""
+			return ''
+
+		def write(self):
+			Logs.debug('msvs: creating %r', self.path)
+			template1 = compile_template(self.project_template)
+			proj_str = template1(self)
+			proj_str = rm_blank_lines(proj_str)
+			self.path.stealth_write(proj_str)
+
+	return dec
+
+class msvs_2008_generator(msvs_generator):
+	'''generates a visual studio 2008 solution'''
+	cmd = 'msvs2008'
+	fun = msvs_generator.fun
+	numver = '10.00'
+	vsver = '2008'
+
+	def init(self):
+		if not getattr(self, 'project_extension', None):
+			self.project_extension = '_2008.vcproj'
+		if not getattr(self, 'solution_name', None):
+			self.solution_name = getattr(Context.g_module, Context.APPNAME, 'project') + '_2008.sln'
+
+		if not getattr(self, 'vsnode_target', None):
+			self.vsnode_target = wrap_2008(vsnode_target)
+		if not getattr(self, 'vsnode_build_all', None):
+			self.vsnode_build_all = wrap_2008(vsnode_build_all)
+		if not getattr(self, 'vsnode_install_all', None):
+			self.vsnode_install_all = wrap_2008(vsnode_install_all)
+		if not getattr(self, 'vsnode_project_view', None):
+			self.vsnode_project_view = wrap_2008(vsnode_project_view)
+
+		msvs_generator.init(self)
+
+def options(ctx):
+	"""
+	If the msvs option is used, try to detect if the build is made from visual studio
+	"""
+	ctx.add_option('--execsolution', action='store', help='when building with visual studio, use a build state file')
+
+	old = BuildContext.execute
+	def override_build_state(ctx):
+		def lock(rm, add):
+			uns = ctx.options.execsolution.replace('.sln', rm)
+			uns = ctx.root.make_node(uns)
+			try:
+				uns.delete()
+			except OSError:
+				pass
+
+			uns = ctx.options.execsolution.replace('.sln', add)
+			uns = ctx.root.make_node(uns)
+			try:
+				uns.write('')
+			except EnvironmentError:
+				pass
+
+		if ctx.options.execsolution:
+			ctx.launch_dir = Context.top_dir # force a build for the whole project (invalid cwd when called by visual studio)
+			lock('.lastbuildstate', '.unsuccessfulbuild')
+			old(ctx)
+			lock('.unsuccessfulbuild', '.lastbuildstate')
+		else:
+			old(ctx)
+	BuildContext.execute = override_build_state
+
diff --git a/third_party/waf/waflib/extras/netcache_client.py b/third_party/waf/waflib/extras/netcache_client.py
new file mode 100644
index 0000000..dc49048
--- /dev/null
+++ b/third_party/waf/waflib/extras/netcache_client.py
@@ -0,0 +1,390 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2011-2015 (ita)
+
+"""
+A client for the network cache (playground/netcache/). Launch the server with:
+./netcache_server, then use it for the builds by adding the following:
+
+	def build(bld):
+		bld.load('netcache_client')
+
+The parameters should be present in the environment in the form:
+	NETCACHE=host:port waf configure build
+
+Or in a more detailed way:
+	NETCACHE_PUSH=host:port NETCACHE_PULL=host:port waf configure build
+
+where:
+	host: host where the server resides, by default localhost
+	port: by default push on 11001 and pull on 12001
+
+Use the server provided in playground/netcache/Netcache.java
+"""
+
+import os, socket, time, atexit, sys
+from waflib import Task, Logs, Utils, Build, Runner
+from waflib.Configure import conf
+
+BUF = 8192 * 16
+HEADER_SIZE = 128
+MODES = ['PUSH', 'PULL', 'PUSH_PULL']
+STALE_TIME = 30 # seconds
+
+GET = 'GET'
+PUT = 'PUT'
+LST = 'LST'
+BYE = 'BYE'
+
+all_sigs_in_cache = (0.0, [])
+
+def put_data(conn, data):
+	if sys.hexversion > 0x3000000:
+		data = data.encode('latin-1')
+	cnt = 0
+	while cnt < len(data):
+		sent = conn.send(data[cnt:])
+		if sent == 0:
+			raise RuntimeError('connection ended')
+		cnt += sent
+
+push_connections = Runner.Queue(0)
+pull_connections = Runner.Queue(0)
+def get_connection(push=False):
+	# return a new connection... do not forget to release it!
+	try:
+		if push:
+			ret = push_connections.get(block=False)
+		else:
+			ret = pull_connections.get(block=False)
+	except Exception:
+		ret = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+		if push:
+			ret.connect(Task.push_addr)
+		else:
+			ret.connect(Task.pull_addr)
+	return ret
+
+def release_connection(conn, msg='', push=False):
+	if conn:
+		if push:
+			push_connections.put(conn)
+		else:
+			pull_connections.put(conn)
+
+def close_connection(conn, msg=''):
+	if conn:
+		data = '%s,%s' % (BYE, msg)
+		try:
+			put_data(conn, data.ljust(HEADER_SIZE))
+		except:
+			pass
+		try:
+			conn.close()
+		except:
+			pass
+
+def close_all():
+	for q in (push_connections, pull_connections):
+		while q.qsize():
+			conn = q.get()
+			try:
+				close_connection(conn)
+			except:
+				# ignore errors when cleaning up
+				pass
+atexit.register(close_all)
+
+def read_header(conn):
+	cnt = 0
+	buf = []
+	while cnt < HEADER_SIZE:
+		data = conn.recv(HEADER_SIZE - cnt)
+		if not data:
+			#import traceback
+			#traceback.print_stack()
+			raise ValueError('connection ended when reading a header %r' % buf)
+		buf.append(data)
+		cnt += len(data)
+	if sys.hexversion > 0x3000000:
+		ret = ''.encode('latin-1').join(buf)
+		ret = ret.decode('latin-1')
+	else:
+		ret = ''.join(buf)
+	return ret
+
+def check_cache(conn, ssig):
+	"""
+	List the files on the server, this is an optimization because it assumes that
+	concurrent builds are rare
+	"""
+	global all_sigs_in_cache
+	if not STALE_TIME:
+		return
+	if time.time() - all_sigs_in_cache[0] > STALE_TIME:
+
+		params = (LST,'')
+		put_data(conn, ','.join(params).ljust(HEADER_SIZE))
+
+		# read what is coming back
+		ret = read_header(conn)
+		size = int(ret.split(',')[0])
+
+		buf = []
+		cnt = 0
+		while cnt < size:
+			data = conn.recv(min(BUF, size-cnt))
+			if not data:
+				raise ValueError('connection ended %r %r' % (cnt, size))
+			buf.append(data)
+			cnt += len(data)
+
+		if sys.hexversion > 0x3000000:
+			ret = ''.encode('latin-1').join(buf)
+			ret = ret.decode('latin-1')
+		else:
+			ret = ''.join(buf)
+
+		all_sigs_in_cache = (time.time(), ret.splitlines())
+		Logs.debug('netcache: server cache has %r entries', len(all_sigs_in_cache[1]))
+
+	if not ssig in all_sigs_in_cache[1]:
+		raise ValueError('no file %s in cache' % ssig)
+
+class MissingFile(Exception):
+	pass
+
+def recv_file(conn, ssig, count, p):
+	check_cache(conn, ssig)
+
+	params = (GET, ssig, str(count))
+	put_data(conn, ','.join(params).ljust(HEADER_SIZE))
+	data = read_header(conn)
+
+	size = int(data.split(',')[0])
+
+	if size == -1:
+		raise MissingFile('no file %s - %s in cache' % (ssig, count))
+
+	# get the file, writing immediately
+	# TODO a tmp file would be better
+	f = open(p, 'wb')
+	cnt = 0
+	while cnt < size:
+		data = conn.recv(min(BUF, size-cnt))
+		if not data:
+			raise ValueError('connection ended %r %r' % (cnt, size))
+		f.write(data)
+		cnt += len(data)
+	f.close()
+
+def sock_send(conn, ssig, cnt, p):
+	#print "pushing %r %r %r" % (ssig, cnt, p)
+	size = os.stat(p).st_size
+	params = (PUT, ssig, str(cnt), str(size))
+	put_data(conn, ','.join(params).ljust(HEADER_SIZE))
+	f = open(p, 'rb')
+	cnt = 0
+	while cnt < size:
+		r = f.read(min(BUF, size-cnt))
+		while r:
+			k = conn.send(r)
+			if not k:
+				raise ValueError('connection ended')
+			cnt += k
+			r = r[k:]
+
+def can_retrieve_cache(self):
+	if not Task.pull_addr:
+		return False
+	if not self.outputs:
+		return False
+	self.cached = False
+
+	cnt = 0
+	sig = self.signature()
+	ssig = Utils.to_hex(self.uid() + sig)
+
+	conn = None
+	err = False
+	try:
+		try:
+			conn = get_connection()
+			for node in self.outputs:
+				p = node.abspath()
+				recv_file(conn, ssig, cnt, p)
+				cnt += 1
+		except MissingFile as e:
+			Logs.debug('netcache: file is not in the cache %r', e)
+			err = True
+		except Exception as e:
+			Logs.debug('netcache: could not get the files %r', self.outputs)
+			if Logs.verbose > 1:
+				Logs.debug('netcache: exception %r', e)
+			err = True
+
+			# broken connection? remove this one
+			close_connection(conn)
+			conn = None
+		else:
+			Logs.debug('netcache: obtained %r from cache', self.outputs)
+
+	finally:
+		release_connection(conn)
+	if err:
+		return False
+
+	self.cached = True
+	return True
+
+@Utils.run_once
+def put_files_cache(self):
+	if not Task.push_addr:
+		return
+	if not self.outputs:
+		return
+	if getattr(self, 'cached', None):
+		return
+
+	#print "called put_files_cache", id(self)
+	bld = self.generator.bld
+	sig = self.signature()
+	ssig = Utils.to_hex(self.uid() + sig)
+
+	conn = None
+	cnt = 0
+	try:
+		for node in self.outputs:
+			# We could re-create the signature of the task with the signature of the outputs
+			# in practice, this means hashing the output files
+			# this is unnecessary
+			try:
+				if not conn:
+					conn = get_connection(push=True)
+				sock_send(conn, ssig, cnt, node.abspath())
+				Logs.debug('netcache: sent %r', node)
+			except Exception as e:
+				Logs.debug('netcache: could not push the files %r', e)
+
+				# broken connection? remove this one
+				close_connection(conn)
+				conn = None
+			cnt += 1
+	finally:
+		release_connection(conn, push=True)
+
+	bld.task_sigs[self.uid()] = self.cache_sig
+
+def hash_env_vars(self, env, vars_lst):
+	# reimplement so that the resulting hash does not depend on local paths
+	if not env.table:
+		env = env.parent
+		if not env:
+			return Utils.SIG_NIL
+
+	idx = str(id(env)) + str(vars_lst)
+	try:
+		cache = self.cache_env
+	except AttributeError:
+		cache = self.cache_env = {}
+	else:
+		try:
+			return self.cache_env[idx]
+		except KeyError:
+			pass
+
+	v = str([env[a] for a in vars_lst])
+	v = v.replace(self.srcnode.abspath().__repr__()[:-1], '')
+	m = Utils.md5()
+	m.update(v.encode())
+	ret = m.digest()
+
+	Logs.debug('envhash: %r %r', ret, v)
+
+	cache[idx] = ret
+
+	return ret
+
+def uid(self):
+	# reimplement so that the signature does not depend on local paths
+	try:
+		return self.uid_
+	except AttributeError:
+		m = Utils.md5()
+		src = self.generator.bld.srcnode
+		up = m.update
+		up(self.__class__.__name__.encode())
+		for x in self.inputs + self.outputs:
+			up(x.path_from(src).encode())
+		self.uid_ = m.digest()
+		return self.uid_
+
+
+def make_cached(cls):
+	if getattr(cls, 'nocache', None):
+		return
+
+	m1 = cls.run
+	def run(self):
+		if getattr(self, 'nocache', False):
+			return m1(self)
+		if self.can_retrieve_cache():
+			return 0
+		return m1(self)
+	cls.run = run
+
+	m2 = cls.post_run
+	def post_run(self):
+		if getattr(self, 'nocache', False):
+			return m2(self)
+		bld = self.generator.bld
+		ret = m2(self)
+		if bld.cache_global:
+			self.put_files_cache()
+		if hasattr(self, 'chmod'):
+			for node in self.outputs:
+				os.chmod(node.abspath(), self.chmod)
+		return ret
+	cls.post_run = post_run
+
+@conf
+def setup_netcache(ctx, push_addr, pull_addr):
+	Task.Task.can_retrieve_cache = can_retrieve_cache
+	Task.Task.put_files_cache = put_files_cache
+	Task.Task.uid = uid
+	Task.push_addr = push_addr
+	Task.pull_addr = pull_addr
+	Build.BuildContext.hash_env_vars = hash_env_vars
+	ctx.cache_global = True
+
+	for x in Task.classes.values():
+		make_cached(x)
+
+def build(bld):
+	if not 'NETCACHE' in os.environ and not 'NETCACHE_PULL' in os.environ and not 'NETCACHE_PUSH' in os.environ:
+		Logs.warn('Setting  NETCACHE_PULL=127.0.0.1:11001 and NETCACHE_PUSH=127.0.0.1:12001')
+		os.environ['NETCACHE_PULL'] = '127.0.0.1:12001'
+		os.environ['NETCACHE_PUSH'] = '127.0.0.1:11001'
+
+	if 'NETCACHE' in os.environ:
+		if not 'NETCACHE_PUSH' in os.environ:
+			os.environ['NETCACHE_PUSH'] = os.environ['NETCACHE']
+		if not 'NETCACHE_PULL' in os.environ:
+			os.environ['NETCACHE_PULL'] = os.environ['NETCACHE']
+
+	v = os.environ['NETCACHE_PULL']
+	if v:
+		h, p = v.split(':')
+		pull_addr = (h, int(p))
+	else:
+		pull_addr = None
+
+	v = os.environ['NETCACHE_PUSH']
+	if v:
+		h, p = v.split(':')
+		push_addr = (h, int(p))
+	else:
+		push_addr = None
+
+	setup_netcache(bld, push_addr, pull_addr)
+
diff --git a/third_party/waf/waflib/extras/objcopy.py b/third_party/waf/waflib/extras/objcopy.py
new file mode 100644
index 0000000..bb7ca6e
--- /dev/null
+++ b/third_party/waf/waflib/extras/objcopy.py
@@ -0,0 +1,53 @@
+#!/usr/bin/python
+# Grygoriy Fuchedzhy 2010
+
+"""
+Support for converting linked targets to ihex, srec or binary files using
+objcopy. Use the 'objcopy' feature in conjunction with the 'cc' or 'cxx'
+feature. The 'objcopy' feature uses the following attributes:
+
+objcopy_bfdname		Target object format name (eg. ihex, srec, binary).
+					   Defaults to ihex.
+objcopy_target		 File name used for objcopy output. This defaults to the
+					   target name with objcopy_bfdname as extension.
+objcopy_install_path   Install path for objcopy_target file. Defaults to ${PREFIX}/fw.
+objcopy_flags		  Additional flags passed to objcopy.
+"""
+
+from waflib.Utils import def_attrs
+from waflib import Task, Options
+from waflib.TaskGen import feature, after_method
+
+class objcopy(Task.Task):
+	run_str = '${OBJCOPY} -O ${TARGET_BFDNAME} ${OBJCOPYFLAGS} ${SRC} ${TGT}'
+	color   = 'CYAN'
+
+@feature('objcopy')
+@after_method('apply_link')
+def map_objcopy(self):
+	def_attrs(self,
+	   objcopy_bfdname = 'ihex',
+	   objcopy_target = None,
+	   objcopy_install_path = "${PREFIX}/firmware",
+	   objcopy_flags = '')
+
+	link_output = self.link_task.outputs[0]
+	if not self.objcopy_target:
+		self.objcopy_target = link_output.change_ext('.' + self.objcopy_bfdname).name
+	task = self.create_task('objcopy', src=link_output, tgt=self.path.find_or_declare(self.objcopy_target))
+
+	task.env.append_unique('TARGET_BFDNAME', self.objcopy_bfdname)
+	try:
+		task.env.append_unique('OBJCOPYFLAGS', getattr(self, 'objcopy_flags'))
+	except AttributeError:
+		pass
+
+	if self.objcopy_install_path:
+		self.add_install_files(install_to=self.objcopy_install_path, install_from=task.outputs[0])
+
+def configure(ctx):
+	program_name = 'objcopy'
+	prefix = getattr(Options.options, 'cross_prefix', None)
+	if prefix:
+		program_name = '{}-{}'.format(prefix, program_name)
+	ctx.find_program(program_name, var='OBJCOPY', mandatory=True)
diff --git a/third_party/waf/waflib/extras/ocaml.py b/third_party/waf/waflib/extras/ocaml.py
new file mode 100644
index 0000000..7d785c6
--- /dev/null
+++ b/third_party/waf/waflib/extras/ocaml.py
@@ -0,0 +1,348 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2010 (ita)
+
+"ocaml support"
+
+import os, re
+from waflib import Utils, Task
+from waflib.Logs import error
+from waflib.TaskGen import feature, before_method, after_method, extension
+
+EXT_MLL = ['.mll']
+EXT_MLY = ['.mly']
+EXT_MLI = ['.mli']
+EXT_MLC = ['.c']
+EXT_ML  = ['.ml']
+
+open_re = re.compile(r'^\s*open\s+([a-zA-Z]+)(;;){0,1}$', re.M)
+foo = re.compile(r"""(\(\*)|(\*\))|("(\\.|[^"\\])*"|'(\\.|[^'\\])*'|.[^()*"'\\]*)""", re.M)
+def filter_comments(txt):
+	meh = [0]
+	def repl(m):
+		if m.group(1):
+			meh[0] += 1
+		elif m.group(2):
+			meh[0] -= 1
+		elif not meh[0]:
+			return m.group()
+		return ''
+	return foo.sub(repl, txt)
+
+def scan(self):
+	node = self.inputs[0]
+	code = filter_comments(node.read())
+
+	global open_re
+	names = []
+	import_iterator = open_re.finditer(code)
+	if import_iterator:
+		for import_match in import_iterator:
+			names.append(import_match.group(1))
+	found_lst = []
+	raw_lst = []
+	for name in names:
+		nd = None
+		for x in self.incpaths:
+			nd = x.find_resource(name.lower()+'.ml')
+			if not nd:
+				nd = x.find_resource(name+'.ml')
+			if nd:
+				found_lst.append(nd)
+				break
+		else:
+			raw_lst.append(name)
+
+	return (found_lst, raw_lst)
+
+native_lst=['native', 'all', 'c_object']
+bytecode_lst=['bytecode', 'all']
+
+@feature('ocaml')
+def init_ml(self):
+	Utils.def_attrs(self,
+		type = 'all',
+		incpaths_lst = [],
+		bld_incpaths_lst = [],
+		mlltasks = [],
+		mlytasks = [],
+		mlitasks = [],
+		native_tasks = [],
+		bytecode_tasks = [],
+		linktasks = [],
+		bytecode_env = None,
+		native_env = None,
+		compiled_tasks = [],
+		includes = '',
+		uselib = '',
+		are_deps_set = 0)
+
+@feature('ocaml')
+@after_method('init_ml')
+def init_envs_ml(self):
+
+	self.islibrary = getattr(self, 'islibrary', False)
+
+	global native_lst, bytecode_lst
+	self.native_env = None
+	if self.type in native_lst:
+		self.native_env = self.env.derive()
+		if self.islibrary:
+			self.native_env['OCALINKFLAGS']   = '-a'
+
+	self.bytecode_env = None
+	if self.type in bytecode_lst:
+		self.bytecode_env = self.env.derive()
+		if self.islibrary:
+			self.bytecode_env['OCALINKFLAGS'] = '-a'
+
+	if self.type == 'c_object':
+		self.native_env.append_unique('OCALINKFLAGS_OPT', '-output-obj')
+
+@feature('ocaml')
+@before_method('apply_vars_ml')
+@after_method('init_envs_ml')
+def apply_incpaths_ml(self):
+	inc_lst = self.includes.split()
+	lst = self.incpaths_lst
+	for dir in inc_lst:
+		node = self.path.find_dir(dir)
+		if not node:
+			error("node not found: " + str(dir))
+			continue
+		if not node in lst:
+			lst.append(node)
+		self.bld_incpaths_lst.append(node)
+	# now the nodes are added to self.incpaths_lst
+
+@feature('ocaml')
+@before_method('process_source')
+def apply_vars_ml(self):
+	for i in self.incpaths_lst:
+		if self.bytecode_env:
+			app = self.bytecode_env.append_value
+			app('OCAMLPATH', ['-I', i.bldpath(), '-I', i.srcpath()])
+
+		if self.native_env:
+			app = self.native_env.append_value
+			app('OCAMLPATH', ['-I', i.bldpath(), '-I', i.srcpath()])
+
+	varnames = ['INCLUDES', 'OCAMLFLAGS', 'OCALINKFLAGS', 'OCALINKFLAGS_OPT']
+	for name in self.uselib.split():
+		for vname in varnames:
+			cnt = self.env[vname+'_'+name]
+			if cnt:
+				if self.bytecode_env:
+					self.bytecode_env.append_value(vname, cnt)
+				if self.native_env:
+					self.native_env.append_value(vname, cnt)
+
+@feature('ocaml')
+@after_method('process_source')
+def apply_link_ml(self):
+
+	if self.bytecode_env:
+		ext = self.islibrary and '.cma' or '.run'
+
+		linktask = self.create_task('ocalink')
+		linktask.bytecode = 1
+		linktask.set_outputs(self.path.find_or_declare(self.target + ext))
+		linktask.env = self.bytecode_env
+		self.linktasks.append(linktask)
+
+	if self.native_env:
+		if self.type == 'c_object':
+			ext = '.o'
+		elif self.islibrary:
+			ext = '.cmxa'
+		else:
+			ext = ''
+
+		linktask = self.create_task('ocalinkx')
+		linktask.set_outputs(self.path.find_or_declare(self.target + ext))
+		linktask.env = self.native_env
+		self.linktasks.append(linktask)
+
+		# we produce a .o file to be used by gcc
+		self.compiled_tasks.append(linktask)
+
+@extension(*EXT_MLL)
+def mll_hook(self, node):
+	mll_task = self.create_task('ocamllex', node, node.change_ext('.ml'))
+	mll_task.env = self.native_env.derive()
+	self.mlltasks.append(mll_task)
+
+	self.source.append(mll_task.outputs[0])
+
+@extension(*EXT_MLY)
+def mly_hook(self, node):
+	mly_task = self.create_task('ocamlyacc', node, [node.change_ext('.ml'), node.change_ext('.mli')])
+	mly_task.env = self.native_env.derive()
+	self.mlytasks.append(mly_task)
+	self.source.append(mly_task.outputs[0])
+
+	task = self.create_task('ocamlcmi', mly_task.outputs[1], mly_task.outputs[1].change_ext('.cmi'))
+	task.env = self.native_env.derive()
+
+@extension(*EXT_MLI)
+def mli_hook(self, node):
+	task = self.create_task('ocamlcmi', node, node.change_ext('.cmi'))
+	task.env = self.native_env.derive()
+	self.mlitasks.append(task)
+
+@extension(*EXT_MLC)
+def mlc_hook(self, node):
+	task = self.create_task('ocamlcc', node, node.change_ext('.o'))
+	task.env = self.native_env.derive()
+	self.compiled_tasks.append(task)
+
+@extension(*EXT_ML)
+def ml_hook(self, node):
+	if self.native_env:
+		task = self.create_task('ocamlx', node, node.change_ext('.cmx'))
+		task.env = self.native_env.derive()
+		task.incpaths = self.bld_incpaths_lst
+		self.native_tasks.append(task)
+
+	if self.bytecode_env:
+		task = self.create_task('ocaml', node, node.change_ext('.cmo'))
+		task.env = self.bytecode_env.derive()
+		task.bytecode = 1
+		task.incpaths = self.bld_incpaths_lst
+		self.bytecode_tasks.append(task)
+
+def compile_may_start(self):
+
+	if not getattr(self, 'flag_deps', ''):
+		self.flag_deps = 1
+
+		# the evil part is that we can only compute the dependencies after the
+		# source files can be read (this means actually producing the source files)
+		if getattr(self, 'bytecode', ''):
+			alltasks = self.generator.bytecode_tasks
+		else:
+			alltasks = self.generator.native_tasks
+
+		self.signature() # ensure that files are scanned - unfortunately
+		tree = self.generator.bld
+		for node in self.inputs:
+			lst = tree.node_deps[self.uid()]
+			for depnode in lst:
+				for t in alltasks:
+					if t == self:
+						continue
+					if depnode in t.inputs:
+						self.set_run_after(t)
+
+		# TODO necessary to get the signature right - for now
+		delattr(self, 'cache_sig')
+		self.signature()
+
+	return Task.Task.runnable_status(self)
+
+class ocamlx(Task.Task):
+	"""native caml compilation"""
+	color   = 'GREEN'
+	run_str = '${OCAMLOPT} ${OCAMLPATH} ${OCAMLFLAGS} ${OCAMLINCLUDES} -c -o ${TGT} ${SRC}'
+	scan    = scan
+	runnable_status = compile_may_start
+
+class ocaml(Task.Task):
+	"""bytecode caml compilation"""
+	color   = 'GREEN'
+	run_str = '${OCAMLC} ${OCAMLPATH} ${OCAMLFLAGS} ${OCAMLINCLUDES} -c -o ${TGT} ${SRC}'
+	scan    = scan
+	runnable_status = compile_may_start
+
+class ocamlcmi(Task.Task):
+	"""interface generator (the .i files?)"""
+	color   = 'BLUE'
+	run_str = '${OCAMLC} ${OCAMLPATH} ${OCAMLINCLUDES} -o ${TGT} -c ${SRC}'
+	before  = ['ocamlcc', 'ocaml', 'ocamlcc']
+
+class ocamlcc(Task.Task):
+	"""ocaml to c interfaces"""
+	color   = 'GREEN'
+	run_str = 'cd ${TGT[0].bld_dir()} && ${OCAMLOPT} ${OCAMLFLAGS} ${OCAMLPATH} ${OCAMLINCLUDES} -c ${SRC[0].abspath()}'
+
+class ocamllex(Task.Task):
+	"""lexical generator"""
+	color   = 'BLUE'
+	run_str = '${OCAMLLEX} ${SRC} -o ${TGT}'
+	before  = ['ocamlcmi', 'ocaml', 'ocamlcc']
+
+class ocamlyacc(Task.Task):
+	"""parser generator"""
+	color   = 'BLUE'
+	run_str = '${OCAMLYACC} -b ${tsk.base()} ${SRC}'
+	before  = ['ocamlcmi', 'ocaml', 'ocamlcc']
+
+	def base(self):
+		node = self.outputs[0]
+		s = os.path.splitext(node.name)[0]
+		return node.bld_dir() + os.sep + s
+
+def link_may_start(self):
+
+	if getattr(self, 'bytecode', 0):
+		alltasks = self.generator.bytecode_tasks
+	else:
+		alltasks = self.generator.native_tasks
+
+	for x in alltasks:
+		if not x.hasrun:
+			return Task.ASK_LATER
+
+	if not getattr(self, 'order', ''):
+
+		# now reorder the inputs given the task dependencies
+		# this part is difficult, we do not have a total order on the tasks
+		# if the dependencies are wrong, this may not stop
+		seen = []
+		pendant = []+alltasks
+		while pendant:
+			task = pendant.pop(0)
+			if task in seen:
+				continue
+			for x in task.run_after:
+				if not x in seen:
+					pendant.append(task)
+					break
+			else:
+				seen.append(task)
+		self.inputs = [x.outputs[0] for x in seen]
+		self.order = 1
+	return Task.Task.runnable_status(self)
+
+class ocalink(Task.Task):
+	"""bytecode caml link"""
+	color   = 'YELLOW'
+	run_str = '${OCAMLC} -o ${TGT} ${OCAMLINCLUDES} ${OCALINKFLAGS} ${SRC}'
+	runnable_status = link_may_start
+	after = ['ocaml', 'ocamlcc']
+
+class ocalinkx(Task.Task):
+	"""native caml link"""
+	color   = 'YELLOW'
+	run_str = '${OCAMLOPT} -o ${TGT} ${OCAMLINCLUDES} ${OCALINKFLAGS_OPT} ${SRC}'
+	runnable_status = link_may_start
+	after = ['ocamlx', 'ocamlcc']
+
+def configure(conf):
+	opt = conf.find_program('ocamlopt', var='OCAMLOPT', mandatory=False)
+	occ = conf.find_program('ocamlc', var='OCAMLC', mandatory=False)
+	if (not opt) or (not occ):
+		conf.fatal('The objective caml compiler was not found:\ninstall it or make it available in your PATH')
+
+	v = conf.env
+	v['OCAMLC']       = occ
+	v['OCAMLOPT']     = opt
+	v['OCAMLLEX']     = conf.find_program('ocamllex', var='OCAMLLEX', mandatory=False)
+	v['OCAMLYACC']    = conf.find_program('ocamlyacc', var='OCAMLYACC', mandatory=False)
+	v['OCAMLFLAGS']   = ''
+	where = conf.cmd_and_log(conf.env.OCAMLC + ['-where']).strip()+os.sep
+	v['OCAMLLIB']     = where
+	v['LIBPATH_OCAML'] = where
+	v['INCLUDES_OCAML'] = where
+	v['LIB_OCAML'] = 'camlrun'
+
diff --git a/third_party/waf/waflib/extras/package.py b/third_party/waf/waflib/extras/package.py
new file mode 100644
index 0000000..c06498e
--- /dev/null
+++ b/third_party/waf/waflib/extras/package.py
@@ -0,0 +1,76 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2011
+
+"""
+Obtain packages, unpack them in a location, and add associated uselib variables
+(CFLAGS_pkgname, LIBPATH_pkgname, etc).
+
+The default is use a Dependencies.txt file in the source directory.
+
+This is a work in progress.
+
+Usage:
+
+def options(opt):
+	opt.load('package')
+
+def configure(conf):
+	conf.load_packages()
+"""
+
+from waflib import Logs
+from waflib.Configure import conf
+
+try:
+	from urllib import request
+except ImportError:
+	from urllib import urlopen
+else:
+	urlopen = request.urlopen
+
+
+CACHEVAR = 'WAFCACHE_PACKAGE'
+
+@conf
+def get_package_cache_dir(self):
+	cache = None
+	if CACHEVAR in conf.environ:
+		cache = conf.environ[CACHEVAR]
+		cache = self.root.make_node(cache)
+	elif self.env[CACHEVAR]:
+		cache = self.env[CACHEVAR]
+		cache = self.root.make_node(cache)
+	else:
+		cache = self.srcnode.make_node('.wafcache_package')
+	cache.mkdir()
+	return cache
+
+@conf
+def download_archive(self, src, dst):
+	for x in self.env.PACKAGE_REPO:
+		url = '/'.join((x, src))
+		try:
+			web = urlopen(url)
+			try:
+				if web.getcode() != 200:
+					continue
+			except AttributeError:
+				pass
+		except Exception:
+			# on python3 urlopen throws an exception
+			# python 2.3 does not have getcode and throws an exception to fail
+			continue
+		else:
+			tmp = self.root.make_node(dst)
+			tmp.write(web.read())
+			Logs.warn('Downloaded %s from %s', tmp.abspath(), url)
+			break
+	else:
+		self.fatal('Could not get the package %s' % src)
+
+@conf
+def load_packages(self):
+	self.get_package_cache_dir()
+	# read the dependencies, get the archives, ..
+
diff --git a/third_party/waf/waflib/extras/parallel_debug.py b/third_party/waf/waflib/extras/parallel_debug.py
new file mode 100644
index 0000000..4ffec5e
--- /dev/null
+++ b/third_party/waf/waflib/extras/parallel_debug.py
@@ -0,0 +1,462 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2007-2010 (ita)
+
+"""
+Debugging helper for parallel compilation.
+
+Copy it to your project and load it with::
+
+	def options(opt):
+		opt.load('parallel_debug', tooldir='.')
+	def build(bld):
+		...
+
+The build will then output a file named pdebug.svg in the source directory.
+"""
+
+import re, sys, threading, time, traceback
+try:
+	from Queue import Queue
+except:
+	from queue import Queue
+from waflib import Runner, Options, Task, Logs, Errors
+
+SVG_TEMPLATE = """<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.0//EN" "http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd">
+<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" version="1.0"
+   x="${project.x}" y="${project.y}" width="${project.width}" height="${project.height}" id="svg602" xml:space="preserve">
+
+<style type='text/css' media='screen'>
+	g.over rect { stroke:#FF0000; fill-opacity:0.4 }
+</style>
+
+<script type='text/javascript'><![CDATA[
+var svg  = document.getElementsByTagName('svg')[0];
+
+svg.addEventListener('mouseover', function(e) {
+	var g = e.target.parentNode;
+	var x = document.getElementById('r_' + g.id);
+	if (x) {
+		g.setAttribute('class', g.getAttribute('class') + ' over');
+		x.setAttribute('class', x.getAttribute('class') + ' over');
+		showInfo(e, g.id, e.target.attributes.tooltip.value);
+	}
+}, false);
+
+svg.addEventListener('mouseout', function(e) {
+		var g = e.target.parentNode;
+		var x = document.getElementById('r_' + g.id);
+		if (x) {
+			g.setAttribute('class', g.getAttribute('class').replace(' over', ''));
+			x.setAttribute('class', x.getAttribute('class').replace(' over', ''));
+			hideInfo(e);
+		}
+}, false);
+
+function showInfo(evt, txt, details) {
+${if project.tooltip}
+	tooltip = document.getElementById('tooltip');
+
+	var t = document.getElementById('tooltiptext');
+	t.firstChild.data = txt + " " + details;
+
+	var x = evt.clientX + 9;
+	if (x > 250) { x -= t.getComputedTextLength() + 16; }
+	var y = evt.clientY + 20;
+	tooltip.setAttribute("transform", "translate(" + x + "," + y + ")");
+	tooltip.setAttributeNS(null, "visibility", "visible");
+
+	var r = document.getElementById('tooltiprect');
+	r.setAttribute('width', t.getComputedTextLength() + 6);
+${endif}
+}
+
+function hideInfo(evt) {
+	var tooltip = document.getElementById('tooltip');
+	tooltip.setAttributeNS(null,"visibility","hidden");
+}
+]]></script>
+
+<!-- inkscape requires a big rectangle or it will not export the pictures properly -->
+<rect
+   x='${project.x}' y='${project.y}' width='${project.width}' height='${project.height}'
+   style="font-size:10;fill:#ffffff;fill-opacity:0.01;fill-rule:evenodd;stroke:#ffffff;"></rect>
+
+${if project.title}
+  <text x="${project.title_x}" y="${project.title_y}"
+    style="font-size:15px; text-anchor:middle; font-style:normal;font-weight:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Bitstream Vera Sans">${project.title}</text>
+${endif}
+
+
+${for cls in project.groups}
+  <g id='${cls.classname}'>
+    ${for rect in cls.rects}
+    <rect x='${rect.x}' y='${rect.y}' width='${rect.width}' height='${rect.height}' tooltip='${rect.name}' style="font-size:10;fill:${rect.color};fill-rule:evenodd;stroke:#000000;stroke-width:0.4;" />
+    ${endfor}
+  </g>
+${endfor}
+
+${for info in project.infos}
+  <g id='r_${info.classname}'>
+   <rect x='${info.x}' y='${info.y}' width='${info.width}' height='${info.height}' style="font-size:10;fill:${info.color};fill-rule:evenodd;stroke:#000000;stroke-width:0.4;" />
+   <text x="${info.text_x}" y="${info.text_y}"
+       style="font-size:12px;font-style:normal;font-weight:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Bitstream Vera Sans"
+   >${info.text}</text>
+  </g>
+${endfor}
+
+${if project.tooltip}
+  <g transform="translate(0,0)" visibility="hidden" id="tooltip">
+       <rect id="tooltiprect" y="-15" x="-3" width="1" height="20" style="stroke:black;fill:#edefc2;stroke-width:1"/>
+       <text id="tooltiptext" style="font-family:Arial; font-size:12;fill:black;"> </text>
+  </g>
+${endif}
+
+</svg>
+"""
+
+COMPILE_TEMPLATE = '''def f(project):
+	lst = []
+	def xml_escape(value):
+		return value.replace("&", "&amp;").replace('"', "&quot;").replace("'", "&apos;").replace("<", "&lt;").replace(">", "&gt;")
+
+	%s
+	return ''.join(lst)
+'''
+reg_act = re.compile(r"(?P<backslash>\\)|(?P<dollar>\$\$)|(?P<subst>\$\{(?P<code>[^}]*?)\})", re.M)
+def compile_template(line):
+
+	extr = []
+	def repl(match):
+		g = match.group
+		if g('dollar'):
+			return "$"
+		elif g('backslash'):
+			return "\\"
+		elif g('subst'):
+			extr.append(g('code'))
+			return "<<|@|>>"
+		return None
+
+	line2 = reg_act.sub(repl, line)
+	params = line2.split('<<|@|>>')
+	assert(extr)
+
+
+	indent = 0
+	buf = []
+	app = buf.append
+
+	def app(txt):
+		buf.append(indent * '\t' + txt)
+
+	for x in range(len(extr)):
+		if params[x]:
+			app("lst.append(%r)" % params[x])
+
+		f = extr[x]
+		if f.startswith(('if', 'for')):
+			app(f + ':')
+			indent += 1
+		elif f.startswith('py:'):
+			app(f[3:])
+		elif f.startswith(('endif', 'endfor')):
+			indent -= 1
+		elif f.startswith(('else', 'elif')):
+			indent -= 1
+			app(f + ':')
+			indent += 1
+		elif f.startswith('xml:'):
+			app('lst.append(xml_escape(%s))' % f[4:])
+		else:
+			#app('lst.append((%s) or "cannot find %s")' % (f, f))
+			app('lst.append(str(%s))' % f)
+
+	if extr:
+		if params[-1]:
+			app("lst.append(%r)" % params[-1])
+
+	fun = COMPILE_TEMPLATE % "\n\t".join(buf)
+	# uncomment the following to debug the template
+	#for i, x in enumerate(fun.splitlines()):
+	#	print i, x
+	return Task.funex(fun)
+
+# red   #ff4d4d
+# green #4da74d
+# lila  #a751ff
+
+color2code = {
+	'GREEN'  : '#4da74d',
+	'YELLOW' : '#fefe44',
+	'PINK'   : '#a751ff',
+	'RED'    : '#cc1d1d',
+	'BLUE'   : '#6687bb',
+	'CYAN'   : '#34e2e2',
+}
+
+mp = {}
+info = [] # list of (text,color)
+
+def map_to_color(name):
+	if name in mp:
+		return mp[name]
+	try:
+		cls = Task.classes[name]
+	except KeyError:
+		return color2code['RED']
+	if cls.color in mp:
+		return mp[cls.color]
+	if cls.color in color2code:
+		return color2code[cls.color]
+	return color2code['RED']
+
+def process(self):
+	m = self.generator.bld.producer
+	try:
+		# TODO another place for this?
+		del self.generator.bld.task_sigs[self.uid()]
+	except KeyError:
+		pass
+
+	self.generator.bld.producer.set_running(1, self)
+
+	try:
+		ret = self.run()
+	except Exception:
+		self.err_msg = traceback.format_exc()
+		self.hasrun = Task.EXCEPTION
+
+		# TODO cleanup
+		m.error_handler(self)
+		return
+
+	if ret:
+		self.err_code = ret
+		self.hasrun = Task.CRASHED
+	else:
+		try:
+			self.post_run()
+		except Errors.WafError:
+			pass
+		except Exception:
+			self.err_msg = traceback.format_exc()
+			self.hasrun = Task.EXCEPTION
+		else:
+			self.hasrun = Task.SUCCESS
+	if self.hasrun != Task.SUCCESS:
+		m.error_handler(self)
+
+	self.generator.bld.producer.set_running(-1, self)
+
+Task.Task.process_back = Task.Task.process
+Task.Task.process = process
+
+old_start = Runner.Parallel.start
+def do_start(self):
+	try:
+		Options.options.dband
+	except AttributeError:
+		self.bld.fatal('use def options(opt): opt.load("parallel_debug")!')
+
+	self.taskinfo = Queue()
+	old_start(self)
+	if self.dirty:
+		make_picture(self)
+Runner.Parallel.start = do_start
+
+lock_running = threading.Lock()
+def set_running(self, by, tsk):
+	with lock_running:
+		try:
+			cache = self.lock_cache
+		except AttributeError:
+			cache = self.lock_cache = {}
+
+		i = 0
+		if by > 0:
+			vals = cache.values()
+			for i in range(self.numjobs):
+				if i not in vals:
+					cache[tsk] = i
+					break
+		else:
+			i = cache[tsk]
+			del cache[tsk]
+
+		self.taskinfo.put( (i, id(tsk), time.time(), tsk.__class__.__name__, self.processed, self.count, by, ",".join(map(str, tsk.outputs)))  )
+Runner.Parallel.set_running = set_running
+
+def name2class(name):
+	return name.replace(' ', '_').replace('.', '_')
+
+def make_picture(producer):
+	# first, cast the parameters
+	if not hasattr(producer.bld, 'path'):
+		return
+
+	tmp = []
+	try:
+		while True:
+			tup = producer.taskinfo.get(False)
+			tmp.append(list(tup))
+	except:
+		pass
+
+	try:
+		ini = float(tmp[0][2])
+	except:
+		return
+
+	if not info:
+		seen = []
+		for x in tmp:
+			name = x[3]
+			if not name in seen:
+				seen.append(name)
+			else:
+				continue
+
+			info.append((name, map_to_color(name)))
+		info.sort(key=lambda x: x[0])
+
+	thread_count = 0
+	acc = []
+	for x in tmp:
+		thread_count += x[6]
+		acc.append("%d %d %f %r %d %d %d %s" % (x[0], x[1], x[2] - ini, x[3], x[4], x[5], thread_count, x[7]))
+
+	data_node = producer.bld.path.make_node('pdebug.dat')
+	data_node.write('\n'.join(acc))
+
+	tmp = [lst[:2] + [float(lst[2]) - ini] + lst[3:] for lst in tmp]
+
+	st = {}
+	for l in tmp:
+		if not l[0] in st:
+			st[l[0]] = len(st.keys())
+	tmp = [  [st[lst[0]]] + lst[1:] for lst in tmp ]
+	THREAD_AMOUNT = len(st.keys())
+
+	st = {}
+	for l in tmp:
+		if not l[1] in st:
+			st[l[1]] = len(st.keys())
+	tmp = [  [lst[0]] + [st[lst[1]]] + lst[2:] for lst in tmp ]
+
+
+	BAND = Options.options.dband
+
+	seen = {}
+	acc = []
+	for x in range(len(tmp)):
+		line = tmp[x]
+		id = line[1]
+
+		if id in seen:
+			continue
+		seen[id] = True
+
+		begin = line[2]
+		thread_id = line[0]
+		for y in range(x + 1, len(tmp)):
+			line = tmp[y]
+			if line[1] == id:
+				end = line[2]
+				#print id, thread_id, begin, end
+				#acc.append(  ( 10*thread_id, 10*(thread_id+1), 10*begin, 10*end ) )
+				acc.append( (BAND * begin, BAND*thread_id, BAND*end - BAND*begin, BAND, line[3], line[7]) )
+				break
+
+	if Options.options.dmaxtime < 0.1:
+		gwidth = 1
+		for x in tmp:
+			m = BAND * x[2]
+			if m > gwidth:
+				gwidth = m
+	else:
+		gwidth = BAND * Options.options.dmaxtime
+
+	ratio = float(Options.options.dwidth) / gwidth
+	gwidth = Options.options.dwidth
+	gheight = BAND * (THREAD_AMOUNT + len(info) + 1.5)
+
+
+	# simple data model for our template
+	class tobject(object):
+		pass
+
+	model = tobject()
+	model.x = 0
+	model.y = 0
+	model.width = gwidth + 4
+	model.height = gheight + 4
+
+	model.tooltip = not Options.options.dnotooltip
+
+	model.title = Options.options.dtitle
+	model.title_x = gwidth / 2
+	model.title_y = gheight + - 5
+
+	groups = {}
+	for (x, y, w, h, clsname, name) in acc:
+		try:
+			groups[clsname].append((x, y, w, h, name))
+		except:
+			groups[clsname] = [(x, y, w, h, name)]
+
+	# groups of rectangles (else js highlighting is slow)
+	model.groups = []
+	for cls in groups:
+		g = tobject()
+		model.groups.append(g)
+		g.classname = name2class(cls)
+		g.rects = []
+		for (x, y, w, h, name) in groups[cls]:
+			r = tobject()
+			g.rects.append(r)
+			r.x = 2 + x * ratio
+			r.y = 2 + y
+			r.width = w * ratio
+			r.height = h
+			r.name = name
+			r.color = map_to_color(cls)
+
+	cnt = THREAD_AMOUNT
+
+	# caption
+	model.infos = []
+	for (text, color) in info:
+		inf = tobject()
+		model.infos.append(inf)
+		inf.classname = name2class(text)
+		inf.x = 2 + BAND
+		inf.y = 5 + (cnt + 0.5) * BAND
+		inf.width = BAND/2
+		inf.height = BAND/2
+		inf.color = color
+
+		inf.text = text
+		inf.text_x = 2 + 2 * BAND
+		inf.text_y = 5 + (cnt + 0.5) * BAND + 10
+
+		cnt += 1
+
+	# write the file...
+	template1 = compile_template(SVG_TEMPLATE)
+	txt = template1(model)
+
+	node = producer.bld.path.make_node('pdebug.svg')
+	node.write(txt)
+	Logs.warn('Created the diagram %r', node)
+
+def options(opt):
+	opt.add_option('--dtitle', action='store', default='Parallel build representation for %r' % ' '.join(sys.argv),
+		help='title for the svg diagram', dest='dtitle')
+	opt.add_option('--dwidth', action='store', type='int', help='diagram width', default=800, dest='dwidth')
+	opt.add_option('--dtime', action='store', type='float', help='recording interval in seconds', default=0.009, dest='dtime')
+	opt.add_option('--dband', action='store', type='int', help='band width', default=22, dest='dband')
+	opt.add_option('--dmaxtime', action='store', type='float', help='maximum time, for drawing fair comparisons', default=0, dest='dmaxtime')
+	opt.add_option('--dnotooltip', action='store_true', help='disable tooltips', default=False, dest='dnotooltip')
+
diff --git a/third_party/waf/waflib/extras/pch.py b/third_party/waf/waflib/extras/pch.py
new file mode 100644
index 0000000..b44c7a2
--- /dev/null
+++ b/third_party/waf/waflib/extras/pch.py
@@ -0,0 +1,148 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Alexander Afanasyev (UCLA), 2014
+
+"""
+Enable precompiled C++ header support (currently only clang++ and g++ are supported)
+
+To use this tool, wscript should look like:
+
+	def options(opt):
+		opt.load('pch')
+		# This will add `--with-pch` configure option.
+		# Unless --with-pch during configure stage specified, the precompiled header support is disabled
+
+	def configure(conf):
+		conf.load('pch')
+		# this will set conf.env.WITH_PCH if --with-pch is specified and the supported compiler is used
+		# Unless conf.env.WITH_PCH is set, the precompiled header support is disabled
+
+	def build(bld):
+		bld(features='cxx pch',
+			target='precompiled-headers',
+			name='precompiled-headers',
+			headers='a.h b.h c.h', # headers to pre-compile into `precompiled-headers`
+
+			# Other parameters to compile precompiled headers
+			# includes=...,
+			# export_includes=...,
+			# use=...,
+			# ...
+
+			# Exported parameters will be propagated even if precompiled headers are disabled
+		)
+
+		bld(
+			target='test',
+			features='cxx cxxprogram',
+			source='a.cpp b.cpp d.cpp main.cpp',
+			use='precompiled-headers',
+		)
+
+		# or
+
+		bld(
+			target='test',
+			features='pch cxx cxxprogram',
+			source='a.cpp b.cpp d.cpp main.cpp',
+			headers='a.h b.h c.h',
+		)
+
+Note that precompiled header must have multiple inclusion guards.  If the guards are missing, any benefit of precompiled header will be voided and compilation may fail in some cases.
+"""
+
+import os
+from waflib import Task, TaskGen, Utils
+from waflib.Tools import c_preproc, cxx
+
+
+PCH_COMPILER_OPTIONS = {
+	'clang++': [['-include'], '.pch', ['-x', 'c++-header']],
+	'g++':     [['-include'], '.gch', ['-x', 'c++-header']],
+}
+
+
+def options(opt):
+	opt.add_option('--without-pch', action='store_false', default=True, dest='with_pch', help='''Try to use precompiled header to speed up compilation (only g++ and clang++)''')
+
+def configure(conf):
+	if (conf.options.with_pch and conf.env['COMPILER_CXX'] in PCH_COMPILER_OPTIONS.keys()):
+		conf.env.WITH_PCH = True
+		flags = PCH_COMPILER_OPTIONS[conf.env['COMPILER_CXX']]
+		conf.env.CXXPCH_F = flags[0]
+		conf.env.CXXPCH_EXT = flags[1]
+		conf.env.CXXPCH_FLAGS = flags[2]
+
+
+@TaskGen.feature('pch')
+@TaskGen.before('process_source')
+def apply_pch(self):
+	if not self.env.WITH_PCH:
+		return
+
+	if getattr(self.bld, 'pch_tasks', None) is None:
+		self.bld.pch_tasks = {}
+
+	if getattr(self, 'headers', None) is None:
+		return
+
+	self.headers = self.to_nodes(self.headers)
+
+	if getattr(self, 'name', None):
+		try:
+			task = self.bld.pch_tasks[self.name]
+			self.bld.fatal("Duplicated 'pch' task with name %r" % "%s.%s" % (self.name, self.idx))
+		except KeyError:
+			pass
+
+	out = '%s.%d%s' % (self.target, self.idx, self.env['CXXPCH_EXT'])
+	out = self.path.find_or_declare(out)
+	task = self.create_task('gchx', self.headers, out)
+
+	# target should be an absolute path of `out`, but without precompiled header extension
+	task.target = out.abspath()[:-len(out.suffix())]
+
+	self.pch_task = task
+	if getattr(self, 'name', None):
+		self.bld.pch_tasks[self.name] = task
+
+@TaskGen.feature('cxx')
+@TaskGen.after_method('process_source', 'propagate_uselib_vars')
+def add_pch(self):
+	if not (self.env['WITH_PCH'] and getattr(self, 'use', None) and getattr(self, 'compiled_tasks', None) and getattr(self.bld, 'pch_tasks', None)):
+		return
+
+	pch = None
+	# find pch task, if any
+
+	if getattr(self, 'pch_task', None):
+		pch = self.pch_task
+	else:
+		for use in Utils.to_list(self.use):
+			try:
+				pch = self.bld.pch_tasks[use]
+			except KeyError:
+				pass
+
+	if pch:
+		for x in self.compiled_tasks:
+			x.env.append_value('CXXFLAGS', self.env['CXXPCH_F'] + [pch.target])
+
+class gchx(Task.Task):
+	run_str = '${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${CXXPCH_FLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXXPCH_F:SRC} ${CXX_SRC_F}${SRC[0].abspath()} ${CXX_TGT_F}${TGT[0].abspath()} ${CPPFLAGS}'
+	scan    = c_preproc.scan
+	color   = 'BLUE'
+	ext_out=['.h']
+
+	def runnable_status(self):
+		try:
+			node_deps = self.generator.bld.node_deps[self.uid()]
+		except KeyError:
+			node_deps = []
+		ret = Task.Task.runnable_status(self)
+		if ret == Task.SKIP_ME and self.env.CXX_NAME == 'clang':
+			t = os.stat(self.outputs[0].abspath()).st_mtime
+			for n in self.inputs + node_deps:
+				if os.stat(n.abspath()).st_mtime > t:
+					return Task.RUN_ME
+		return ret
diff --git a/third_party/waf/waflib/extras/pep8.py b/third_party/waf/waflib/extras/pep8.py
new file mode 100644
index 0000000..676beed
--- /dev/null
+++ b/third_party/waf/waflib/extras/pep8.py
@@ -0,0 +1,106 @@
+#! /usr/bin/env python
+# encoding: utf-8
+#
+# written by Sylvain Rouquette, 2011
+
+'''
+Install pep8 module:
+$ easy_install pep8
+	or
+$ pip install pep8
+
+To add the pep8 tool to the waf file:
+$ ./waf-light --tools=compat15,pep8
+	or, if you have waf >= 1.6.2
+$ ./waf update --files=pep8
+
+
+Then add this to your wscript:
+
+[at]extension('.py', 'wscript')
+def run_pep8(self, node):
+	self.create_task('Pep8', node)
+
+'''
+
+import threading
+from waflib import Task, Options
+
+pep8 = __import__('pep8')
+
+
+class Pep8(Task.Task):
+	color = 'PINK'
+	lock = threading.Lock()
+
+	def check_options(self):
+		if pep8.options:
+			return
+		pep8.options = Options.options
+		pep8.options.prog = 'pep8'
+		excl = pep8.options.exclude.split(',')
+		pep8.options.exclude = [s.rstrip('/') for s in excl]
+		if pep8.options.filename:
+			pep8.options.filename = pep8.options.filename.split(',')
+		if pep8.options.select:
+			pep8.options.select = pep8.options.select.split(',')
+		else:
+			pep8.options.select = []
+		if pep8.options.ignore:
+			pep8.options.ignore = pep8.options.ignore.split(',')
+		elif pep8.options.select:
+			# Ignore all checks which are not explicitly selected
+			pep8.options.ignore = ['']
+		elif pep8.options.testsuite or pep8.options.doctest:
+			# For doctest and testsuite, all checks are required
+			pep8.options.ignore = []
+		else:
+			# The default choice: ignore controversial checks
+			pep8.options.ignore = pep8.DEFAULT_IGNORE.split(',')
+		pep8.options.physical_checks = pep8.find_checks('physical_line')
+		pep8.options.logical_checks = pep8.find_checks('logical_line')
+		pep8.options.counters = dict.fromkeys(pep8.BENCHMARK_KEYS, 0)
+		pep8.options.messages = {}
+
+	def run(self):
+		with Pep8.lock:
+			self.check_options()
+		pep8.input_file(self.inputs[0].abspath())
+		return 0 if not pep8.get_count() else -1
+
+
+def options(opt):
+	opt.add_option('-q', '--quiet', default=0, action='count',
+				   help="report only file names, or nothing with -qq")
+	opt.add_option('-r', '--repeat', action='store_true',
+				   help="show all occurrences of the same error")
+	opt.add_option('--exclude', metavar='patterns',
+				   default=pep8.DEFAULT_EXCLUDE,
+				   help="exclude files or directories which match these "
+				   "comma separated patterns (default: %s)" %
+				   pep8.DEFAULT_EXCLUDE,
+				   dest='exclude')
+	opt.add_option('--filename', metavar='patterns', default='*.py',
+				   help="when parsing directories, only check filenames "
+				   "matching these comma separated patterns (default: "
+				   "*.py)")
+	opt.add_option('--select', metavar='errors', default='',
+				   help="select errors and warnings (e.g. E,W6)")
+	opt.add_option('--ignore', metavar='errors', default='',
+				   help="skip errors and warnings (e.g. E4,W)")
+	opt.add_option('--show-source', action='store_true',
+				   help="show source code for each error")
+	opt.add_option('--show-pep8', action='store_true',
+				   help="show text of PEP 8 for each error")
+	opt.add_option('--statistics', action='store_true',
+				   help="count errors and warnings")
+	opt.add_option('--count', action='store_true',
+				   help="print total number of errors and warnings "
+				   "to standard error and set exit code to 1 if "
+				   "total is not null")
+	opt.add_option('--benchmark', action='store_true',
+				   help="measure processing speed")
+	opt.add_option('--testsuite', metavar='dir',
+				   help="run regression tests from dir")
+	opt.add_option('--doctest', action='store_true',
+				   help="run doctest on myself")
diff --git a/third_party/waf/waflib/extras/pgicc.py b/third_party/waf/waflib/extras/pgicc.py
new file mode 100644
index 0000000..f8068d5
--- /dev/null
+++ b/third_party/waf/waflib/extras/pgicc.py
@@ -0,0 +1,75 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Antoine Dechaume 2011
+
+"""
+Detect the PGI C compiler
+"""
+
+import sys, re
+from waflib import Errors
+from waflib.Configure import conf
+from waflib.Tools.compiler_c import c_compiler
+c_compiler['linux'].append('pgicc')
+
+@conf
+def find_pgi_compiler(conf, var, name):
+	"""
+	Find the program name, and execute it to ensure it really is itself.
+	"""
+	if sys.platform == 'cygwin':
+		conf.fatal('The PGI compiler does not work on Cygwin')
+
+	v = conf.env
+	cc = None
+	if v[var]:
+		cc = v[var]
+	elif var in conf.environ:
+		cc = conf.environ[var]
+	if not cc:
+		cc = conf.find_program(name, var=var)
+	if not cc:
+		conf.fatal('PGI Compiler (%s) was not found' % name)
+
+	v[var + '_VERSION'] = conf.get_pgi_version(cc)
+	v[var] = cc
+	v[var + '_NAME'] = 'pgi'
+
+@conf
+def get_pgi_version(conf, cc):
+	"""Find the version of a pgi compiler."""
+	version_re = re.compile(r"The Portland Group", re.I).search
+	cmd = cc + ['-V', '-E'] # Issue 1078, prevent wrappers from linking
+
+	try:
+		out, err = conf.cmd_and_log(cmd, output=0)
+	except Errors.WafError:
+		conf.fatal('Could not find pgi compiler %r' % cmd)
+
+	if out:
+		match = version_re(out)
+	else:
+		match = version_re(err)
+
+	if not match:
+		conf.fatal('Could not verify PGI signature')
+
+	cmd = cc + ['-help=variable']
+	try:
+		out, err = conf.cmd_and_log(cmd, output=0)
+	except Errors.WafError:
+		conf.fatal('Could not find pgi compiler %r' % cmd)
+
+	version = re.findall(r'^COMPVER\s*=(.*)', out, re.M)
+	if len(version) != 1:
+		conf.fatal('Could not determine the compiler version')
+	return version[0]
+
+def configure(conf):
+	conf.find_pgi_compiler('CC', 'pgcc')
+	conf.find_ar()
+	conf.gcc_common_flags()
+	conf.cc_load_tools()
+	conf.cc_add_flags()
+	conf.link_add_flags()
+
diff --git a/third_party/waf/waflib/extras/pgicxx.py b/third_party/waf/waflib/extras/pgicxx.py
new file mode 100644
index 0000000..eae121c
--- /dev/null
+++ b/third_party/waf/waflib/extras/pgicxx.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Antoine Dechaume 2011
+
+"""
+Detect the PGI C++ compiler
+"""
+
+from waflib.Tools.compiler_cxx import cxx_compiler
+cxx_compiler['linux'].append('pgicxx')
+
+from waflib.extras import pgicc
+
+def configure(conf):
+	conf.find_pgi_compiler('CXX', 'pgCC')
+	conf.find_ar()
+	conf.gxx_common_flags()
+	conf.cxx_load_tools()
+	conf.cxx_add_flags()
+	conf.link_add_flags()
diff --git a/third_party/waf/waflib/extras/proc.py b/third_party/waf/waflib/extras/proc.py
new file mode 100644
index 0000000..764abec
--- /dev/null
+++ b/third_party/waf/waflib/extras/proc.py
@@ -0,0 +1,54 @@
+#! /usr/bin/env python
+# per rosengren 2011
+
+from os import environ, path
+from waflib import TaskGen, Utils
+
+def options(opt):
+	grp = opt.add_option_group('Oracle ProC Options')
+	grp.add_option('--oracle_home', action='store', default=environ.get('PROC_ORACLE'), help='Path to Oracle installation home (has bin/lib)')
+	grp.add_option('--tns_admin', action='store', default=environ.get('TNS_ADMIN'), help='Directory containing server list (TNS_NAMES.ORA)')
+	grp.add_option('--connection', action='store', default='dummy-user/dummy-password@dummy-server', help='Format: user/password@server')
+
+def configure(cnf):
+	env = cnf.env
+	if not env.PROC_ORACLE:
+		env.PROC_ORACLE = cnf.options.oracle_home
+	if not env.PROC_TNS_ADMIN:
+		env.PROC_TNS_ADMIN = cnf.options.tns_admin
+	if not env.PROC_CONNECTION:
+		env.PROC_CONNECTION = cnf.options.connection
+	cnf.find_program('proc', var='PROC', path_list=env.PROC_ORACLE + path.sep + 'bin')
+
+def proc(tsk):
+	env = tsk.env
+	gen = tsk.generator
+	inc_nodes = gen.to_incnodes(Utils.to_list(getattr(gen,'includes',[])) + env['INCLUDES'])
+
+	cmd = (
+		[env.PROC] +
+		['SQLCHECK=SEMANTICS'] +
+		(['SYS_INCLUDE=(' + ','.join(env.PROC_INCLUDES) + ')']
+			if env.PROC_INCLUDES else []) +
+		['INCLUDE=(' + ','.join(
+			[i.bldpath() for i in inc_nodes]
+		) + ')'] +
+		['userid=' + env.PROC_CONNECTION] +
+		['INAME=' + tsk.inputs[0].bldpath()] +
+		['ONAME=' + tsk.outputs[0].bldpath()]
+	)
+	exec_env = {
+		'ORACLE_HOME': env.PROC_ORACLE,
+		'LD_LIBRARY_PATH': env.PROC_ORACLE + path.sep + 'lib',
+	}
+	if env.PROC_TNS_ADMIN:
+		exec_env['TNS_ADMIN'] = env.PROC_TNS_ADMIN
+	return tsk.exec_command(cmd, env=exec_env)
+
+TaskGen.declare_chain(
+	name = 'proc',
+	rule = proc,
+	ext_in = '.pc',
+	ext_out = '.c',
+)
+
diff --git a/third_party/waf/waflib/extras/protoc.py b/third_party/waf/waflib/extras/protoc.py
new file mode 100644
index 0000000..4a519cc
--- /dev/null
+++ b/third_party/waf/waflib/extras/protoc.py
@@ -0,0 +1,224 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Philipp Bender, 2012
+# Matt Clarkson, 2012
+
+import re, os
+from waflib.Task import Task
+from waflib.TaskGen import extension
+from waflib import Errors, Context, Logs
+
+"""
+A simple tool to integrate protocol buffers into your build system.
+
+Example for C++:
+
+    def configure(conf):
+        conf.load('compiler_cxx cxx protoc')
+
+    def build(bld):
+        bld(
+                features = 'cxx cxxprogram'
+                source   = 'main.cpp file1.proto proto/file2.proto',
+                includes = '. proto',
+                target   = 'executable')
+
+Example for Python:
+
+    def configure(conf):
+        conf.load('python protoc')
+
+    def build(bld):
+        bld(
+                features = 'py'
+                source   = 'main.py file1.proto proto/file2.proto',
+                protoc_includes  = 'proto')
+
+Example for both Python and C++ at same time:
+
+    def configure(conf):
+        conf.load('cxx python protoc')
+
+    def build(bld):
+        bld(
+                features = 'cxx py'
+                source   = 'file1.proto proto/file2.proto',
+                protoc_includes  = 'proto')	# or includes
+
+
+Example for Java:
+
+    def options(opt):
+        opt.load('java')
+
+    def configure(conf):
+        conf.load('python java protoc')
+        # Here you have to point to your protobuf-java JAR and have it in classpath
+        conf.env.CLASSPATH_PROTOBUF = ['protobuf-java-2.5.0.jar']
+
+    def build(bld):
+        bld(
+                features = 'javac protoc',
+                name = 'pbjava',
+                srcdir = 'inc/ src',	# directories used by javac
+                source   = ['inc/message_inc.proto', 'inc/message.proto'],
+					# source is used by protoc for .proto files
+                use = 'PROTOBUF',
+                protoc_includes = ['inc']) # for protoc to search dependencies
+
+
+Protoc includes passed via protoc_includes are either relative to the taskgen
+or to the project and are searched in this order.
+
+Include directories external to the waf project can also be passed to the
+extra by using protoc_extincludes
+
+                protoc_extincludes = ['/usr/include/pblib']
+
+
+Notes when using this tool:
+
+- protoc command line parsing is tricky.
+
+  The generated files can be put in subfolders which depend on
+  the order of the include paths.
+
+  Try to be simple when creating task generators
+  containing protoc stuff.
+
+"""
+
+class protoc(Task):
+	run_str = '${PROTOC} ${PROTOC_FL:PROTOC_FLAGS} ${PROTOC_ST:INCPATHS} ${PROTOC_ST:PROTOC_INCPATHS} ${PROTOC_ST:PROTOC_EXTINCPATHS} ${SRC[0].bldpath()}'
+	color   = 'BLUE'
+	ext_out = ['.h', 'pb.cc', '.py', '.java']
+	def scan(self):
+		"""
+		Scan .proto dependencies
+		"""
+		node = self.inputs[0]
+
+		nodes = []
+		names = []
+		seen = []
+		search_nodes = []
+
+		if not node:
+			return (nodes, names)
+
+		if 'cxx' in self.generator.features:
+			search_nodes = self.generator.includes_nodes
+
+		if 'py' in self.generator.features or 'javac' in self.generator.features:
+			for incpath in getattr(self.generator, 'protoc_includes', []):
+				incpath_node = self.generator.path.find_node(incpath)
+				if incpath_node:
+					search_nodes.append(incpath_node)
+				else:
+					# Check if relative to top-level for extra tg dependencies
+					incpath_node = self.generator.bld.path.find_node(incpath)
+					if incpath_node:
+						search_nodes.append(incpath_node)
+					else:
+						raise Errors.WafError('protoc: include path %r does not exist' % incpath)
+
+
+		def parse_node(node):
+			if node in seen:
+				return
+			seen.append(node)
+			code = node.read().splitlines()
+			for line in code:
+				m = re.search(r'^import\s+"(.*)";.*(//)?.*', line)
+				if m:
+					dep = m.groups()[0]
+					for incnode in search_nodes:
+						found = incnode.find_resource(dep)
+						if found:
+							nodes.append(found)
+							parse_node(found)
+						else:
+							names.append(dep)
+
+		parse_node(node)
+		# Add also dependencies path to INCPATHS so protoc will find the included file
+		for deppath in nodes:
+			self.env.append_unique('INCPATHS', deppath.parent.bldpath())
+		return (nodes, names)
+
+@extension('.proto')
+def process_protoc(self, node):
+	incdirs = []
+	out_nodes = []
+	protoc_flags = []
+
+	# ensure PROTOC_FLAGS is a list; a copy is used below anyway
+	self.env.PROTOC_FLAGS = self.to_list(self.env.PROTOC_FLAGS)
+
+	if 'cxx' in self.features:
+		cpp_node = node.change_ext('.pb.cc')
+		hpp_node = node.change_ext('.pb.h')
+		self.source.append(cpp_node)
+		out_nodes.append(cpp_node)
+		out_nodes.append(hpp_node)
+		protoc_flags.append('--cpp_out=%s' % node.parent.get_bld().bldpath())
+
+	if 'py' in self.features:
+		py_node = node.change_ext('_pb2.py')
+		self.source.append(py_node)
+		out_nodes.append(py_node)
+		protoc_flags.append('--python_out=%s' % node.parent.get_bld().bldpath())
+
+	if 'javac' in self.features:
+		# Make javac get also pick java code generated in build
+		if not node.parent.get_bld() in self.javac_task.srcdir:
+			self.javac_task.srcdir.append(node.parent.get_bld())
+
+		protoc_flags.append('--java_out=%s' % node.parent.get_bld().bldpath())
+		node.parent.get_bld().mkdir()
+
+	tsk = self.create_task('protoc', node, out_nodes)
+	tsk.env.append_value('PROTOC_FLAGS', protoc_flags)
+
+	if 'javac' in self.features:
+		self.javac_task.set_run_after(tsk)
+
+	# Instruct protoc where to search for .proto included files.
+	# For C++ standard include files dirs are used,
+	# but this doesn't apply to Python for example
+	for incpath in getattr(self, 'protoc_includes', []):
+		incpath_node = self.path.find_node(incpath)
+		if incpath_node:
+			incdirs.append(incpath_node.bldpath())
+		else:
+			# Check if relative to top-level for extra tg dependencies
+			incpath_node = self.bld.path.find_node(incpath)
+			if incpath_node:
+				incdirs.append(incpath_node.bldpath())
+			else:
+				raise Errors.WafError('protoc: include path %r does not exist' % incpath)
+
+	tsk.env.PROTOC_INCPATHS = incdirs
+
+	# Include paths external to the waf project (ie. shared pb repositories)
+	tsk.env.PROTOC_EXTINCPATHS = getattr(self, 'protoc_extincludes', [])
+
+	# PR2115: protoc generates output of .proto files in nested
+	# directories  by canonicalizing paths. To avoid this we have to pass
+	# as first include the full directory file of the .proto file
+	tsk.env.prepend_value('INCPATHS', node.parent.bldpath())
+
+	use = getattr(self, 'use', '')
+	if not 'PROTOBUF' in use:
+		self.use = self.to_list(use) + ['PROTOBUF']
+
+def configure(conf):
+	conf.check_cfg(package='protobuf', uselib_store='PROTOBUF', args=['--cflags', '--libs'])
+	conf.find_program('protoc', var='PROTOC')
+	conf.start_msg('Checking for protoc version')
+	protocver = conf.cmd_and_log(conf.env.PROTOC + ['--version'], output=Context.BOTH)
+	protocver = ''.join(protocver).strip()[protocver[0].rfind(' ')+1:]
+	conf.end_msg(protocver)
+	conf.env.PROTOC_MAJOR = protocver[:protocver.find('.')]
+	conf.env.PROTOC_ST = '-I%s'
+	conf.env.PROTOC_FL = '%s'
diff --git a/third_party/waf/waflib/extras/pyqt5.py b/third_party/waf/waflib/extras/pyqt5.py
new file mode 100644
index 0000000..0c083a1
--- /dev/null
+++ b/third_party/waf/waflib/extras/pyqt5.py
@@ -0,0 +1,246 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Federico Pellegrin, 2016-2022 (fedepell) adapted for Python
+
+"""
+This tool helps with finding Python Qt5 tools and libraries,
+and provides translation from QT5 files to Python code.
+
+The following snippet illustrates the tool usage::
+
+	def options(opt):
+		opt.load('py pyqt5')
+
+	def configure(conf):
+		conf.load('py pyqt5')
+
+	def build(bld):
+		bld(
+			features = 'py pyqt5',
+			source   = 'main.py textures.qrc aboutDialog.ui',
+		)
+
+Here, the UI description and resource files will be processed
+to generate code.
+
+Usage
+=====
+
+Load the "pyqt5" tool.
+
+Add into the sources list also the qrc resources files or ui5
+definition files and they will be translated into python code
+with the system tools (PyQt5, PySide2, PyQt4 are searched in this
+order) and then compiled
+"""
+
+try:
+	from xml.sax import make_parser
+	from xml.sax.handler import ContentHandler
+except ImportError:
+	has_xml = False
+	ContentHandler = object
+else:
+	has_xml = True
+
+import os
+from waflib.Tools import python
+from waflib import Task, Options
+from waflib.TaskGen import feature, extension
+from waflib.Configure import conf
+from waflib import Logs
+
+EXT_RCC = ['.qrc']
+"""
+File extension for the resource (.qrc) files
+"""
+
+EXT_UI  = ['.ui']
+"""
+File extension for the user interface (.ui) files
+"""
+
+
+class XMLHandler(ContentHandler):
+	"""
+	Parses ``.qrc`` files
+	"""
+	def __init__(self):
+		self.buf = []
+		self.files = []
+	def startElement(self, name, attrs):
+		if name == 'file':
+			self.buf = []
+	def endElement(self, name):
+		if name == 'file':
+			self.files.append(str(''.join(self.buf)))
+	def characters(self, cars):
+		self.buf.append(cars)
+
+@extension(*EXT_RCC)
+def create_pyrcc_task(self, node):
+	"Creates rcc and py task for ``.qrc`` files"
+	rcnode = node.change_ext('.py')
+	self.create_task('pyrcc', node, rcnode)
+	if getattr(self, 'install_from', None):
+		self.install_from = self.install_from.get_bld()
+	else:
+		self.install_from = self.path.get_bld()
+	self.install_path = getattr(self, 'install_path', '${PYTHONDIR}')
+	self.process_py(rcnode)
+
+@extension(*EXT_UI)
+def create_pyuic_task(self, node):
+	"Create uic tasks and py for user interface ``.ui`` definition files"
+	uinode = node.change_ext('.py')
+	self.create_task('ui5py', node, uinode)
+	if getattr(self, 'install_from', None):
+		self.install_from = self.install_from.get_bld()
+	else:
+		self.install_from = self.path.get_bld()
+	self.install_path = getattr(self, 'install_path', '${PYTHONDIR}')
+	self.process_py(uinode)
+
+@extension('.ts')
+def add_pylang(self, node):
+	"""Adds all the .ts file into ``self.lang``"""
+	self.lang = self.to_list(getattr(self, 'lang', [])) + [node]
+
+@feature('pyqt5')
+def apply_pyqt5(self):
+	"""
+	The additional parameters are:
+
+	:param lang: list of translation files (\\*.ts) to process
+	:type lang: list of :py:class:`waflib.Node.Node` or string without the .ts extension
+	:param langname: if given, transform the \\*.ts files into a .qrc files to include in the binary file
+	:type langname: :py:class:`waflib.Node.Node` or string without the .qrc extension
+	"""
+	if getattr(self, 'lang', None):
+		qmtasks = []
+		for x in self.to_list(self.lang):
+			if isinstance(x, str):
+				x = self.path.find_resource(x + '.ts')
+			qmtasks.append(self.create_task('ts2qm', x, x.change_ext('.qm')))
+
+
+		if getattr(self, 'langname', None):
+			qmnodes = [k.outputs[0] for k in qmtasks]
+			rcnode = self.langname
+			if isinstance(rcnode, str):
+				rcnode = self.path.find_or_declare(rcnode + '.qrc')
+			t = self.create_task('qm2rcc', qmnodes, rcnode)
+			create_pyrcc_task(self, t.outputs[0])
+
+class pyrcc(Task.Task):
+	"""
+	Processes ``.qrc`` files
+	"""
+	color   = 'BLUE'
+	run_str = '${QT_PYRCC} ${QT_PYRCC_FLAGS} ${SRC} -o ${TGT}'
+	ext_out = ['.py']
+
+	def rcname(self):
+		return os.path.splitext(self.inputs[0].name)[0]
+
+	def scan(self):
+		"""Parse the *.qrc* files"""
+		if not has_xml:
+			Logs.error('No xml.sax support was found, rcc dependencies will be incomplete!')
+			return ([], [])
+
+		parser = make_parser()
+		curHandler = XMLHandler()
+		parser.setContentHandler(curHandler)
+		fi = open(self.inputs[0].abspath(), 'r')
+		try:
+			parser.parse(fi)
+		finally:
+			fi.close()
+
+		nodes = []
+		names = []
+		root = self.inputs[0].parent
+		for x in curHandler.files:
+			nd = root.find_resource(x)
+			if nd:
+				nodes.append(nd)
+			else:
+				names.append(x)
+		return (nodes, names)
+
+
+class ui5py(Task.Task):
+	"""
+	Processes ``.ui`` files for python
+	"""
+	color   = 'BLUE'
+	run_str = '${QT_PYUIC} ${QT_PYUIC_FLAGS} ${SRC} -o ${TGT}'
+	ext_out = ['.py']
+
+class ts2qm(Task.Task):
+	"""
+	Generates ``.qm`` files from ``.ts`` files
+	"""
+	color   = 'BLUE'
+	run_str = '${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}'
+
+class qm2rcc(Task.Task):
+	"""
+	Generates ``.qrc`` files from ``.qm`` files
+	"""
+	color = 'BLUE'
+	after = 'ts2qm'
+	def run(self):
+		"""Create a qrc file including the inputs"""
+		txt = '\n'.join(['<file>%s</file>' % k.path_from(self.outputs[0].parent) for k in self.inputs])
+		code = '<!DOCTYPE RCC><RCC version="1.0">\n<qresource>\n%s\n</qresource>\n</RCC>' % txt
+		self.outputs[0].write(code)
+
+def configure(self):
+	self.find_pyqt5_binaries()
+
+	# warn about this during the configuration too
+	if not has_xml:
+		Logs.error('No xml.sax support was found, rcc dependencies will be incomplete!')
+
+@conf
+def find_pyqt5_binaries(self):
+	"""
+	Detects PyQt5 or PySide2 programs such as pyuic5/pyside2-uic, pyrcc5/pyside2-rcc
+	"""
+	env = self.env
+
+	if getattr(Options.options, 'want_pyqt5', True):
+		self.find_program(['pyuic5'], var='QT_PYUIC')
+		self.find_program(['pyrcc5'], var='QT_PYRCC')
+		self.find_program(['pylupdate5'], var='QT_PYLUPDATE')
+	elif getattr(Options.options, 'want_pyside2', True):
+		self.find_program(['pyside2-uic','uic-qt5'], var='QT_PYUIC')
+		self.find_program(['pyside2-rcc','rcc-qt5'], var='QT_PYRCC')
+		self.find_program(['pyside2-lupdate','lupdate-qt5'], var='QT_PYLUPDATE')
+	elif getattr(Options.options, 'want_pyqt4', True):
+		self.find_program(['pyuic4'], var='QT_PYUIC')
+		self.find_program(['pyrcc4'], var='QT_PYRCC')
+		self.find_program(['pylupdate4'], var='QT_PYLUPDATE')
+	else:
+		self.find_program(['pyuic5','pyside2-uic','pyuic4','uic-qt5'], var='QT_PYUIC')
+		self.find_program(['pyrcc5','pyside2-rcc','pyrcc4','rcc-qt5'], var='QT_PYRCC')
+		self.find_program(['pylupdate5', 'pyside2-lupdate','pylupdate4','lupdate-qt5'], var='QT_PYLUPDATE')
+
+	if not env.QT_PYUIC:
+		self.fatal('cannot find the uic compiler for python for qt5')
+
+	if not env.QT_PYRCC:
+		self.fatal('cannot find the rcc compiler for python for qt5')
+
+	self.find_program(['lrelease-qt5', 'lrelease'], var='QT_LRELEASE')
+
+def options(opt):
+	"""
+	Command-line options
+	"""
+	pyqt5opt=opt.add_option_group("Python QT5 Options")
+	pyqt5opt.add_option('--pyqt5-pyqt5', action='store_true', default=False, dest='want_pyqt5', help='use PyQt5 bindings as python QT5 bindings (default PyQt5 is searched first, PySide2 after, PyQt4 last)')
+	pyqt5opt.add_option('--pyqt5-pyside2', action='store_true', default=False, dest='want_pyside2', help='use PySide2 bindings as python QT5 bindings (default PyQt5 is searched first, PySide2 after, PyQt4 last)')
+	pyqt5opt.add_option('--pyqt5-pyqt4', action='store_true', default=False, dest='want_pyqt4', help='use PyQt4 bindings as python QT5 bindings (default PyQt5 is searched first, PySide2 after, PyQt4 last)')
diff --git a/third_party/waf/waflib/extras/pytest.py b/third_party/waf/waflib/extras/pytest.py
new file mode 100644
index 0000000..fc9ad1c
--- /dev/null
+++ b/third_party/waf/waflib/extras/pytest.py
@@ -0,0 +1,240 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Calle Rosenquist, 2016-2018 (xbreak)
+
+"""
+Provides Python unit test support using :py:class:`waflib.Tools.waf_unit_test.utest`
+task via the **pytest** feature.
+
+To use pytest the following is needed:
+
+1. Load `pytest` and the dependency `waf_unit_test` tools.
+2. Create a task generator with feature `pytest` (not `test`) and customize behaviour with
+   the following attributes:
+
+   - `pytest_source`: Test input files.
+   - `ut_str`: Test runner command, e.g. ``${PYTHON} -B -m unittest discover`` or
+               if nose is used: ``${NOSETESTS} --no-byte-compile ${SRC}``.
+   - `ut_shell`: Determines if ``ut_str`` is executed in a shell. Default: False.
+   - `ut_cwd`: Working directory for test runner. Defaults to directory of
+               first ``pytest_source`` file.
+
+   Additionally the following `pytest` specific attributes are used in dependent taskgens:
+
+   - `pytest_path`: Node or string list of additional Python paths.
+   - `pytest_libpath`: Node or string list of additional library paths.
+
+The `use` dependencies are used for both update calculation and to populate
+the following environment variables for the `pytest` test runner:
+
+1. `PYTHONPATH` (`sys.path`) of any dependent taskgen that has the feature `py`:
+
+   - `install_from` attribute is used to determine where the root of the Python sources
+      are located. If `install_from` is not specified the default is to use the taskgen path
+      as the root.
+
+   - `pytest_path` attribute is used to manually specify additional Python paths.
+
+2. Dynamic linker search path variable (e.g. `LD_LIBRARY_PATH`) of any dependent taskgen with
+   non-static link_task.
+
+   - `pytest_libpath` attribute is used to manually specify additional linker paths.
+
+3. Java class search path (CLASSPATH) of any Java/Javalike dependency
+
+Note: `pytest` cannot automatically determine the correct `PYTHONPATH` for `pyext` taskgens
+      because the extension might be part of a Python package or used standalone:
+
+      - When used as part of another `py` package, the `PYTHONPATH` is provided by
+      that taskgen so no additional action is required.
+
+      - When used as a standalone module, the user needs to specify the `PYTHONPATH` explicitly
+      via the `pytest_path` attribute on the `pyext` taskgen.
+
+      For details c.f. the pytest playground examples.
+
+
+For example::
+
+    # A standalone Python C extension that demonstrates unit test environment population
+    # of PYTHONPATH and LD_LIBRARY_PATH/PATH/DYLD_LIBRARY_PATH.
+    #
+    # Note: `pytest_path` is provided here because pytest cannot automatically determine
+    # if the extension is part of another Python package or is used standalone.
+    bld(name         = 'foo_ext',
+        features     = 'c cshlib pyext',
+        source       = 'src/foo_ext.c',
+        target       = 'foo_ext',
+        pytest_path  = [ bld.path.get_bld() ])
+
+    # Python package under test that also depend on the Python module `foo_ext`
+    #
+    # Note: `install_from` is added automatically to `PYTHONPATH`.
+    bld(name         = 'foo',
+        features     = 'py',
+        use          = 'foo_ext',
+        source       = bld.path.ant_glob('src/foo/*.py'),
+        install_from = 'src')
+
+    # Unit test example using the built in module unittest and let that discover
+    # any test cases.
+    bld(name          = 'foo_test',
+        features      = 'pytest',
+        use           = 'foo',
+        pytest_source = bld.path.ant_glob('test/*.py'),
+        ut_str        = '${PYTHON} -B -m unittest discover')
+
+"""
+
+import os
+from waflib import Task, TaskGen, Errors, Utils, Logs
+from waflib.Tools import ccroot
+
+def _process_use_rec(self, name):
+	"""
+	Recursively process ``use`` for task generator with name ``name``..
+	Used by pytest_process_use.
+	"""
+	if name in self.pytest_use_not or name in self.pytest_use_seen:
+		return
+	try:
+		tg = self.bld.get_tgen_by_name(name)
+	except Errors.WafError:
+		self.pytest_use_not.add(name)
+		return
+
+	self.pytest_use_seen.append(name)
+	tg.post()
+
+	for n in self.to_list(getattr(tg, 'use', [])):
+		_process_use_rec(self, n)
+
+
+@TaskGen.feature('pytest')
+@TaskGen.after_method('process_source', 'apply_link')
+def pytest_process_use(self):
+	"""
+	Process the ``use`` attribute which contains a list of task generator names and store
+	paths that later is used to populate the unit test runtime environment.
+	"""
+	self.pytest_use_not = set()
+	self.pytest_use_seen = []
+	self.pytest_paths = [] # strings or Nodes
+	self.pytest_libpaths = [] # strings or Nodes
+	self.pytest_javapaths = [] # strings or Nodes
+	self.pytest_dep_nodes = []
+
+	names = self.to_list(getattr(self, 'use', []))
+	for name in names:
+		_process_use_rec(self, name)
+	
+	def extend_unique(lst, varlst):
+		ext = []
+		for x in varlst:
+			if x not in lst:
+				ext.append(x)
+		lst.extend(ext)
+
+	# Collect type specific info needed to construct a valid runtime environment
+	# for the test.
+	for name in self.pytest_use_seen:
+		tg = self.bld.get_tgen_by_name(name)
+
+		extend_unique(self.pytest_paths, Utils.to_list(getattr(tg, 'pytest_path', [])))
+		extend_unique(self.pytest_libpaths, Utils.to_list(getattr(tg, 'pytest_libpath', [])))
+
+		if 'py' in tg.features:
+			# Python dependencies are added to PYTHONPATH
+			pypath = getattr(tg, 'install_from', tg.path)
+
+			if 'buildcopy' in tg.features:
+				# Since buildcopy is used we assume that PYTHONPATH in build should be used,
+				# not source
+				extend_unique(self.pytest_paths, [pypath.get_bld().abspath()])
+
+				# Add buildcopy output nodes to dependencies
+				extend_unique(self.pytest_dep_nodes, [o for task in getattr(tg, 'tasks', []) \
+														for o in getattr(task, 'outputs', [])])
+			else:
+				# If buildcopy is not used, depend on sources instead
+				extend_unique(self.pytest_dep_nodes, tg.source)
+				extend_unique(self.pytest_paths, [pypath.abspath()])
+
+		if 'javac' in tg.features:
+			# If a JAR is generated point to that, otherwise to directory
+			if getattr(tg, 'jar_task', None):
+				extend_unique(self.pytest_javapaths, [tg.jar_task.outputs[0].abspath()])
+			else:
+				extend_unique(self.pytest_javapaths, [tg.path.get_bld()])
+
+			# And add respective dependencies if present
+			if tg.use_lst:
+				extend_unique(self.pytest_javapaths, tg.use_lst)
+
+		if getattr(tg, 'link_task', None):
+			# For tasks with a link_task (C, C++, D et.c.) include their library paths:
+			if not isinstance(tg.link_task, ccroot.stlink_task):
+				extend_unique(self.pytest_dep_nodes, tg.link_task.outputs)
+				extend_unique(self.pytest_libpaths, tg.link_task.env.LIBPATH)
+
+				if 'pyext' in tg.features:
+					# If the taskgen is extending Python we also want to add the interpreter libpath.
+					extend_unique(self.pytest_libpaths, tg.link_task.env.LIBPATH_PYEXT)
+				else:
+					# Only add to libpath if the link task is not a Python extension
+					extend_unique(self.pytest_libpaths, [tg.link_task.outputs[0].parent.abspath()])
+
+
+@TaskGen.feature('pytest')
+@TaskGen.after_method('pytest_process_use')
+def make_pytest(self):
+	"""
+	Creates a ``utest`` task with a populated environment for Python if not specified in ``ut_env``:
+
+	- Paths in `pytest_paths` attribute are used to populate PYTHONPATH
+	- Paths in `pytest_libpaths` attribute are used to populate the system library path (e.g. LD_LIBRARY_PATH)
+	"""
+	nodes = self.to_nodes(self.pytest_source)
+	tsk = self.create_task('utest', nodes)
+	
+	tsk.dep_nodes.extend(self.pytest_dep_nodes)
+	if getattr(self, 'ut_str', None):
+		self.ut_run, lst = Task.compile_fun(self.ut_str, shell=getattr(self, 'ut_shell', False))
+		tsk.vars = lst + tsk.vars
+
+	if getattr(self, 'ut_cwd', None):
+		if isinstance(self.ut_cwd, str):
+			# we want a Node instance
+			if os.path.isabs(self.ut_cwd):
+				self.ut_cwd = self.bld.root.make_node(self.ut_cwd)
+			else:
+				self.ut_cwd = self.path.make_node(self.ut_cwd)
+	else:
+		if tsk.inputs:
+			self.ut_cwd = tsk.inputs[0].parent
+		else:
+			raise Errors.WafError("no valid input files for pytest task, check pytest_source value")
+
+	if not self.ut_cwd.exists():
+		self.ut_cwd.mkdir()
+
+	if not hasattr(self, 'ut_env'):
+		self.ut_env = dict(os.environ)
+		def add_paths(var, lst):
+			# Add list of paths to a variable, lst can contain strings or nodes
+			lst = [ str(n) for n in lst ]
+			Logs.debug("ut: %s: Adding paths %s=%s", self, var, lst)
+			self.ut_env[var] = os.pathsep.join(lst) + os.pathsep + self.ut_env.get(var, '')
+
+		# Prepend dependency paths to PYTHONPATH, CLASSPATH and LD_LIBRARY_PATH
+		add_paths('PYTHONPATH', self.pytest_paths)
+		add_paths('CLASSPATH', self.pytest_javapaths)
+
+		if Utils.is_win32:
+			add_paths('PATH', self.pytest_libpaths)
+		elif Utils.unversioned_sys_platform() == 'darwin':
+			add_paths('DYLD_LIBRARY_PATH', self.pytest_libpaths)
+			add_paths('LD_LIBRARY_PATH', self.pytest_libpaths)
+		else:
+			add_paths('LD_LIBRARY_PATH', self.pytest_libpaths)
+
diff --git a/third_party/waf/waflib/extras/qnxnto.py b/third_party/waf/waflib/extras/qnxnto.py
new file mode 100644
index 0000000..1158124
--- /dev/null
+++ b/third_party/waf/waflib/extras/qnxnto.py
@@ -0,0 +1,72 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Jérôme Carretero 2011 (zougloub)
+# QNX neutrino compatibility functions
+
+import sys, os
+from waflib import Utils
+
+class Popen(object):
+	"""
+	Popen cannot work on QNX from a threaded program:
+	Forking in threads is not implemented in neutrino.
+
+	Python's os.popen / spawn / fork won't work when running in threads (they will if in the main program thread)
+
+	In waf, this happens mostly in build.
+	And the use cases can be replaced by os.system() calls.
+	"""
+	__slots__ = ["prog", "kw", "popen", "verbose"]
+	verbose = 0
+	def __init__(self, prog, **kw):
+		try:
+			self.prog = prog
+			self.kw = kw
+			self.popen = None
+			if Popen.verbose:
+				sys.stdout.write("Popen created: %r, kw=%r..." % (prog, kw))
+
+			do_delegate = kw.get('stdout') == -1 and kw.get('stderr') == -1
+			if do_delegate:
+				if Popen.verbose:
+					print("Delegating to real Popen")
+				self.popen = self.real_Popen(prog, **kw)
+			else:
+				if Popen.verbose:
+					print("Emulating")
+		except Exception as e:
+			if Popen.verbose:
+				print("Exception: %s" % e)
+			raise
+
+	def __getattr__(self, name):
+		if Popen.verbose:
+			sys.stdout.write("Getattr: %s..." % name)
+		if name in Popen.__slots__:
+			return object.__getattribute__(self, name)
+		else:
+			if self.popen is not None:
+				if Popen.verbose:
+					print("from Popen")
+				return getattr(self.popen, name)
+			else:
+				if name == "wait":
+					return self.emu_wait
+				else:
+					raise Exception("subprocess emulation: not implemented: %s" % name)
+
+	def emu_wait(self):
+		if Popen.verbose:
+			print("emulated wait (%r kw=%r)" % (self.prog, self.kw))
+		if isinstance(self.prog, str):
+			cmd = self.prog
+		else:
+			cmd = " ".join(self.prog)
+		if 'cwd' in self.kw:
+			cmd = 'cd "%s" && %s' % (self.kw['cwd'], cmd)
+		return os.system(cmd)
+
+if sys.platform == "qnx6":
+	Popen.real_Popen = Utils.subprocess.Popen
+	Utils.subprocess.Popen = Popen
+
diff --git a/third_party/waf/waflib/extras/qt4.py b/third_party/waf/waflib/extras/qt4.py
new file mode 100644
index 0000000..d19a4dd
--- /dev/null
+++ b/third_party/waf/waflib/extras/qt4.py
@@ -0,0 +1,695 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2010 (ita)
+
+"""
+
+Tool Description
+================
+
+This tool helps with finding Qt4 tools and libraries,
+and also provides syntactic sugar for using Qt4 tools.
+
+The following snippet illustrates the tool usage::
+
+	def options(opt):
+		opt.load('compiler_cxx qt4')
+
+	def configure(conf):
+		conf.load('compiler_cxx qt4')
+
+	def build(bld):
+		bld(
+			features = 'qt4 cxx cxxprogram',
+			uselib   = 'QTCORE QTGUI QTOPENGL QTSVG',
+			source   = 'main.cpp textures.qrc aboutDialog.ui',
+			target   = 'window',
+		)
+
+Here, the UI description and resource files will be processed
+to generate code.
+
+Usage
+=====
+
+Load the "qt4" tool.
+
+You also need to edit your sources accordingly:
+
+- the normal way of doing things is to have your C++ files
+  include the .moc file.
+  This is regarded as the best practice (and provides much faster
+  compilations).
+  It also implies that the include paths have beenset properly.
+
+- to have the include paths added automatically, use the following::
+
+     from waflib.TaskGen import feature, before_method, after_method
+     @feature('cxx')
+     @after_method('process_source')
+     @before_method('apply_incpaths')
+     def add_includes_paths(self):
+        incs = set(self.to_list(getattr(self, 'includes', '')))
+        for x in self.compiled_tasks:
+            incs.add(x.inputs[0].parent.path_from(self.path))
+        self.includes = sorted(incs)
+
+Note: another tool provides Qt processing that does not require
+.moc includes, see 'playground/slow_qt/'.
+
+A few options (--qt{dir,bin,...}) and environment variables
+(QT4_{ROOT,DIR,MOC,UIC,XCOMPILE}) allow finer tuning of the tool,
+tool path selection, etc; please read the source for more info.
+
+"""
+
+try:
+	from xml.sax import make_parser
+	from xml.sax.handler import ContentHandler
+except ImportError:
+	has_xml = False
+	ContentHandler = object
+else:
+	has_xml = True
+
+import os, sys
+from waflib.Tools import cxx
+from waflib import Task, Utils, Options, Errors, Context
+from waflib.TaskGen import feature, after_method, extension
+from waflib.Configure import conf
+from waflib import Logs
+
+MOC_H = ['.h', '.hpp', '.hxx', '.hh']
+"""
+File extensions associated to the .moc files
+"""
+
+EXT_RCC = ['.qrc']
+"""
+File extension for the resource (.qrc) files
+"""
+
+EXT_UI  = ['.ui']
+"""
+File extension for the user interface (.ui) files
+"""
+
+EXT_QT4 = ['.cpp', '.cc', '.cxx', '.C']
+"""
+File extensions of C++ files that may require a .moc processing
+"""
+
+QT4_LIBS = "QtCore QtGui QtUiTools QtNetwork QtOpenGL QtSql QtSvg QtTest QtXml QtXmlPatterns QtWebKit Qt3Support QtHelp QtScript QtDeclarative QtDesigner"
+
+class qxx(Task.classes['cxx']):
+	"""
+	Each C++ file can have zero or several .moc files to create.
+	They are known only when the files are scanned (preprocessor)
+	To avoid scanning the c++ files each time (parsing C/C++), the results
+	are retrieved from the task cache (bld.node_deps/bld.raw_deps).
+	The moc tasks are also created *dynamically* during the build.
+	"""
+
+	def __init__(self, *k, **kw):
+		Task.Task.__init__(self, *k, **kw)
+		self.moc_done = 0
+
+	def runnable_status(self):
+		"""
+		Compute the task signature to make sure the scanner was executed. Create the
+		moc tasks by using :py:meth:`waflib.Tools.qt4.qxx.add_moc_tasks` (if necessary),
+		then postpone the task execution (there is no need to recompute the task signature).
+		"""
+		if self.moc_done:
+			return Task.Task.runnable_status(self)
+		else:
+			for t in self.run_after:
+				if not t.hasrun:
+					return Task.ASK_LATER
+			self.add_moc_tasks()
+			return Task.Task.runnable_status(self)
+
+	def create_moc_task(self, h_node, m_node):
+		"""
+		If several libraries use the same classes, it is possible that moc will run several times (Issue 1318)
+		It is not possible to change the file names, but we can assume that the moc transformation will be identical,
+		and the moc tasks can be shared in a global cache.
+
+		The defines passed to moc will then depend on task generator order. If this is not acceptable, then
+		use the tool slow_qt4 instead (and enjoy the slow builds... :-( )
+		"""
+		try:
+			moc_cache = self.generator.bld.moc_cache
+		except AttributeError:
+			moc_cache = self.generator.bld.moc_cache = {}
+
+		try:
+			return moc_cache[h_node]
+		except KeyError:
+			tsk = moc_cache[h_node] = Task.classes['moc'](env=self.env, generator=self.generator)
+			tsk.set_inputs(h_node)
+			tsk.set_outputs(m_node)
+
+			if self.generator:
+				self.generator.tasks.append(tsk)
+
+			# direct injection in the build phase (safe because called from the main thread)
+			gen = self.generator.bld.producer
+			gen.outstanding.append(tsk)
+			gen.total += 1
+
+			return tsk
+
+	def moc_h_ext(self):
+		ext = []
+		try:
+			ext = Options.options.qt_header_ext.split()
+		except AttributeError:
+			pass
+		if not ext:
+			ext = MOC_H
+		return ext
+
+	def add_moc_tasks(self):
+		"""
+		Create the moc tasks by looking in ``bld.raw_deps[self.uid()]``
+		"""
+		node = self.inputs[0]
+		bld = self.generator.bld
+
+		try:
+			# compute the signature once to know if there is a moc file to create
+			self.signature()
+		except KeyError:
+			# the moc file may be referenced somewhere else
+			pass
+		else:
+			# remove the signature, it must be recomputed with the moc task
+			delattr(self, 'cache_sig')
+
+		include_nodes = [node.parent] + self.generator.includes_nodes
+
+		moctasks = []
+		mocfiles = set()
+		for d in bld.raw_deps.get(self.uid(), []):
+			if not d.endswith('.moc'):
+				continue
+
+			# process that base.moc only once
+			if d in mocfiles:
+				continue
+			mocfiles.add(d)
+
+			# find the source associated with the moc file
+			h_node = None
+
+			base2 = d[:-4]
+			for x in include_nodes:
+				for e in self.moc_h_ext():
+					h_node = x.find_node(base2 + e)
+					if h_node:
+						break
+				if h_node:
+					m_node = h_node.change_ext('.moc')
+					break
+			else:
+				# foo.cpp -> foo.cpp.moc
+				for k in EXT_QT4:
+					if base2.endswith(k):
+						for x in include_nodes:
+							h_node = x.find_node(base2)
+							if h_node:
+								break
+						if h_node:
+							m_node = h_node.change_ext(k + '.moc')
+							break
+
+			if not h_node:
+				raise Errors.WafError('No source found for %r which is a moc file' % d)
+
+			# create the moc task
+			task = self.create_moc_task(h_node, m_node)
+			moctasks.append(task)
+
+		# simple scheduler dependency: run the moc task before others
+		self.run_after.update(set(moctasks))
+		self.moc_done = 1
+
+class trans_update(Task.Task):
+	"""Update a .ts files from a list of C++ files"""
+	run_str = '${QT_LUPDATE} ${SRC} -ts ${TGT}'
+	color   = 'BLUE'
+
+class XMLHandler(ContentHandler):
+	"""
+	Parser for *.qrc* files
+	"""
+	def __init__(self):
+		self.buf = []
+		self.files = []
+	def startElement(self, name, attrs):
+		if name == 'file':
+			self.buf = []
+	def endElement(self, name):
+		if name == 'file':
+			self.files.append(str(''.join(self.buf)))
+	def characters(self, cars):
+		self.buf.append(cars)
+
+@extension(*EXT_RCC)
+def create_rcc_task(self, node):
+	"Create rcc and cxx tasks for *.qrc* files"
+	rcnode = node.change_ext('_rc.cpp')
+	self.create_task('rcc', node, rcnode)
+	cpptask = self.create_task('cxx', rcnode, rcnode.change_ext('.o'))
+	try:
+		self.compiled_tasks.append(cpptask)
+	except AttributeError:
+		self.compiled_tasks = [cpptask]
+	return cpptask
+
+@extension(*EXT_UI)
+def create_uic_task(self, node):
+	"hook for uic tasks"
+	uictask = self.create_task('ui4', node)
+	uictask.outputs = [self.path.find_or_declare(self.env['ui_PATTERN'] % node.name[:-3])]
+
+@extension('.ts')
+def add_lang(self, node):
+	"""add all the .ts file into self.lang"""
+	self.lang = self.to_list(getattr(self, 'lang', [])) + [node]
+
+@feature('qt4')
+@after_method('apply_link')
+def apply_qt4(self):
+	"""
+	Add MOC_FLAGS which may be necessary for moc::
+
+		def build(bld):
+			bld.program(features='qt4', source='main.cpp', target='app', use='QTCORE')
+
+	The additional parameters are:
+
+	:param lang: list of translation files (\\*.ts) to process
+	:type lang: list of :py:class:`waflib.Node.Node` or string without the .ts extension
+	:param update: whether to process the C++ files to update the \\*.ts files (use **waf --translate**)
+	:type update: bool
+	:param langname: if given, transform the \\*.ts files into a .qrc files to include in the binary file
+	:type langname: :py:class:`waflib.Node.Node` or string without the .qrc extension
+	"""
+	if getattr(self, 'lang', None):
+		qmtasks = []
+		for x in self.to_list(self.lang):
+			if isinstance(x, str):
+				x = self.path.find_resource(x + '.ts')
+			qmtasks.append(self.create_task('ts2qm', x, x.change_ext('.qm')))
+
+		if getattr(self, 'update', None) and Options.options.trans_qt4:
+			cxxnodes = [a.inputs[0] for a in self.compiled_tasks] + [
+				a.inputs[0] for a in self.tasks if getattr(a, 'inputs', None) and a.inputs[0].name.endswith('.ui')]
+			for x in qmtasks:
+				self.create_task('trans_update', cxxnodes, x.inputs)
+
+		if getattr(self, 'langname', None):
+			qmnodes = [x.outputs[0] for x in qmtasks]
+			rcnode = self.langname
+			if isinstance(rcnode, str):
+				rcnode = self.path.find_or_declare(rcnode + '.qrc')
+			t = self.create_task('qm2rcc', qmnodes, rcnode)
+			k = create_rcc_task(self, t.outputs[0])
+			self.link_task.inputs.append(k.outputs[0])
+
+	lst = []
+	for flag in self.to_list(self.env['CXXFLAGS']):
+		if len(flag) < 2:
+			continue
+		f = flag[0:2]
+		if f in ('-D', '-I', '/D', '/I'):
+			if (f[0] == '/'):
+				lst.append('-' + flag[1:])
+			else:
+				lst.append(flag)
+	self.env.append_value('MOC_FLAGS', lst)
+
+@extension(*EXT_QT4)
+def cxx_hook(self, node):
+	"""
+	Re-map C++ file extensions to the :py:class:`waflib.Tools.qt4.qxx` task.
+	"""
+	return self.create_compiled_task('qxx', node)
+
+class rcc(Task.Task):
+	"""
+	Process *.qrc* files
+	"""
+	color   = 'BLUE'
+	run_str = '${QT_RCC} -name ${tsk.rcname()} ${SRC[0].abspath()} ${RCC_ST} -o ${TGT}'
+	ext_out = ['.h']
+
+	def rcname(self):
+		return os.path.splitext(self.inputs[0].name)[0]
+
+	def scan(self):
+		"""Parse the *.qrc* files"""
+		if not has_xml:
+			Logs.error('no xml support was found, the rcc dependencies will be incomplete!')
+			return ([], [])
+
+		parser = make_parser()
+		curHandler = XMLHandler()
+		parser.setContentHandler(curHandler)
+		fi = open(self.inputs[0].abspath(), 'r')
+		try:
+			parser.parse(fi)
+		finally:
+			fi.close()
+
+		nodes = []
+		names = []
+		root = self.inputs[0].parent
+		for x in curHandler.files:
+			nd = root.find_resource(x)
+			if nd:
+				nodes.append(nd)
+			else:
+				names.append(x)
+		return (nodes, names)
+
+class moc(Task.Task):
+	"""
+	Create *.moc* files
+	"""
+	color   = 'BLUE'
+	run_str = '${QT_MOC} ${MOC_FLAGS} ${MOCCPPPATH_ST:INCPATHS} ${MOCDEFINES_ST:DEFINES} ${SRC} ${MOC_ST} ${TGT}'
+	def keyword(self):
+		return "Creating"
+	def __str__(self):
+		return self.outputs[0].path_from(self.generator.bld.launch_node())
+
+class ui4(Task.Task):
+	"""
+	Process *.ui* files
+	"""
+	color   = 'BLUE'
+	run_str = '${QT_UIC} ${SRC} -o ${TGT}'
+	ext_out = ['.h']
+
+class ts2qm(Task.Task):
+	"""
+	Create *.qm* files from *.ts* files
+	"""
+	color   = 'BLUE'
+	run_str = '${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}'
+
+class qm2rcc(Task.Task):
+	"""
+	Transform *.qm* files into *.rc* files
+	"""
+	color = 'BLUE'
+	after = 'ts2qm'
+
+	def run(self):
+		"""Create a qrc file including the inputs"""
+		txt = '\n'.join(['<file>%s</file>' % k.path_from(self.outputs[0].parent) for k in self.inputs])
+		code = '<!DOCTYPE RCC><RCC version="1.0">\n<qresource>\n%s\n</qresource>\n</RCC>' % txt
+		self.outputs[0].write(code)
+
+def configure(self):
+	"""
+	Besides the configuration options, the environment variable QT4_ROOT may be used
+	to give the location of the qt4 libraries (absolute path).
+
+	The detection will use the program *pkg-config* through :py:func:`waflib.Tools.config_c.check_cfg`
+	"""
+	self.find_qt4_binaries()
+	self.set_qt4_libs_to_check()
+	self.set_qt4_defines()
+	self.find_qt4_libraries()
+	self.add_qt4_rpath()
+	self.simplify_qt4_libs()
+
+@conf
+def find_qt4_binaries(self):
+	env = self.env
+	opt = Options.options
+
+	qtdir = getattr(opt, 'qtdir', '')
+	qtbin = getattr(opt, 'qtbin', '')
+
+	paths = []
+
+	if qtdir:
+		qtbin = os.path.join(qtdir, 'bin')
+
+	# the qt directory has been given from QT4_ROOT - deduce the qt binary path
+	if not qtdir:
+		qtdir = os.environ.get('QT4_ROOT', '')
+		qtbin = os.environ.get('QT4_BIN') or os.path.join(qtdir, 'bin')
+
+	if qtbin:
+		paths = [qtbin]
+
+	# no qtdir, look in the path and in /usr/local/Trolltech
+	if not qtdir:
+		paths = os.environ.get('PATH', '').split(os.pathsep)
+		paths.append('/usr/share/qt4/bin/')
+		try:
+			lst = Utils.listdir('/usr/local/Trolltech/')
+		except OSError:
+			pass
+		else:
+			if lst:
+				lst.sort()
+				lst.reverse()
+
+				# keep the highest version
+				qtdir = '/usr/local/Trolltech/%s/' % lst[0]
+				qtbin = os.path.join(qtdir, 'bin')
+				paths.append(qtbin)
+
+	# at the end, try to find qmake in the paths given
+	# keep the one with the highest version
+	cand = None
+	prev_ver = ['4', '0', '0']
+	for qmk in ('qmake-qt4', 'qmake4', 'qmake'):
+		try:
+			qmake = self.find_program(qmk, path_list=paths)
+		except self.errors.ConfigurationError:
+			pass
+		else:
+			try:
+				version = self.cmd_and_log(qmake + ['-query', 'QT_VERSION']).strip()
+			except self.errors.WafError:
+				pass
+			else:
+				if version:
+					new_ver = version.split('.')
+					if new_ver > prev_ver:
+						cand = qmake
+						prev_ver = new_ver
+	if cand:
+		self.env.QMAKE = cand
+	else:
+		self.fatal('Could not find qmake for qt4')
+
+	qtbin = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_BINS']).strip() + os.sep
+
+	def find_bin(lst, var):
+		if var in env:
+			return
+		for f in lst:
+			try:
+				ret = self.find_program(f, path_list=paths)
+			except self.errors.ConfigurationError:
+				pass
+			else:
+				env[var]=ret
+				break
+
+	find_bin(['uic-qt3', 'uic3'], 'QT_UIC3')
+	find_bin(['uic-qt4', 'uic'], 'QT_UIC')
+	if not env.QT_UIC:
+		self.fatal('cannot find the uic compiler for qt4')
+
+	self.start_msg('Checking for uic version')
+	uicver = self.cmd_and_log(env.QT_UIC + ["-version"], output=Context.BOTH)
+	uicver = ''.join(uicver).strip()
+	uicver = uicver.replace('Qt User Interface Compiler ','').replace('User Interface Compiler for Qt', '')
+	self.end_msg(uicver)
+	if uicver.find(' 3.') != -1:
+		self.fatal('this uic compiler is for qt3, add uic for qt4 to your path')
+
+	find_bin(['moc-qt4', 'moc'], 'QT_MOC')
+	find_bin(['rcc-qt4', 'rcc'], 'QT_RCC')
+	find_bin(['lrelease-qt4', 'lrelease'], 'QT_LRELEASE')
+	find_bin(['lupdate-qt4', 'lupdate'], 'QT_LUPDATE')
+
+	env['UIC3_ST']= '%s -o %s'
+	env['UIC_ST'] = '%s -o %s'
+	env['MOC_ST'] = '-o'
+	env['ui_PATTERN'] = 'ui_%s.h'
+	env['QT_LRELEASE_FLAGS'] = ['-silent']
+	env.MOCCPPPATH_ST = '-I%s'
+	env.MOCDEFINES_ST = '-D%s'
+
+@conf
+def find_qt4_libraries(self):
+	qtlibs = getattr(Options.options, 'qtlibs', None) or os.environ.get("QT4_LIBDIR")
+	if not qtlibs:
+		try:
+			qtlibs = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_LIBS']).strip()
+		except Errors.WafError:
+			qtdir = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_PREFIX']).strip() + os.sep
+			qtlibs = os.path.join(qtdir, 'lib')
+	self.msg('Found the Qt4 libraries in', qtlibs)
+
+	qtincludes =  os.environ.get("QT4_INCLUDES") or self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_HEADERS']).strip()
+	env = self.env
+	if not 'PKG_CONFIG_PATH' in os.environ:
+		os.environ['PKG_CONFIG_PATH'] = '%s:%s/pkgconfig:/usr/lib/qt4/lib/pkgconfig:/opt/qt4/lib/pkgconfig:/usr/lib/qt4/lib:/opt/qt4/lib' % (qtlibs, qtlibs)
+
+	try:
+		if os.environ.get("QT4_XCOMPILE"):
+			raise self.errors.ConfigurationError()
+		self.check_cfg(atleast_pkgconfig_version='0.1')
+	except self.errors.ConfigurationError:
+		for i in self.qt4_vars:
+			uselib = i.upper()
+			if Utils.unversioned_sys_platform() == "darwin":
+				# Since at least qt 4.7.3 each library locates in separate directory
+				frameworkName = i + ".framework"
+				qtDynamicLib = os.path.join(qtlibs, frameworkName, i)
+				if os.path.exists(qtDynamicLib):
+					env.append_unique('FRAMEWORK_' + uselib, i)
+					self.msg('Checking for %s' % i, qtDynamicLib, 'GREEN')
+				else:
+					self.msg('Checking for %s' % i, False, 'YELLOW')
+				env.append_unique('INCLUDES_' + uselib, os.path.join(qtlibs, frameworkName, 'Headers'))
+			elif env.DEST_OS != "win32":
+				qtDynamicLib = os.path.join(qtlibs, "lib" + i + ".so")
+				qtStaticLib = os.path.join(qtlibs, "lib" + i + ".a")
+				if os.path.exists(qtDynamicLib):
+					env.append_unique('LIB_' + uselib, i)
+					self.msg('Checking for %s' % i, qtDynamicLib, 'GREEN')
+				elif os.path.exists(qtStaticLib):
+					env.append_unique('LIB_' + uselib, i)
+					self.msg('Checking for %s' % i, qtStaticLib, 'GREEN')
+				else:
+					self.msg('Checking for %s' % i, False, 'YELLOW')
+
+				env.append_unique('LIBPATH_' + uselib, qtlibs)
+				env.append_unique('INCLUDES_' + uselib, qtincludes)
+				env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, i))
+			else:
+				# Release library names are like QtCore4
+				for k in ("lib%s.a", "lib%s4.a", "%s.lib", "%s4.lib"):
+					lib = os.path.join(qtlibs, k % i)
+					if os.path.exists(lib):
+						env.append_unique('LIB_' + uselib, i + k[k.find("%s") + 2 : k.find('.')])
+						self.msg('Checking for %s' % i, lib, 'GREEN')
+						break
+				else:
+					self.msg('Checking for %s' % i, False, 'YELLOW')
+
+				env.append_unique('LIBPATH_' + uselib, qtlibs)
+				env.append_unique('INCLUDES_' + uselib, qtincludes)
+				env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, i))
+
+				# Debug library names are like QtCore4d
+				uselib = i.upper() + "_debug"
+				for k in ("lib%sd.a", "lib%sd4.a", "%sd.lib", "%sd4.lib"):
+					lib = os.path.join(qtlibs, k % i)
+					if os.path.exists(lib):
+						env.append_unique('LIB_' + uselib, i + k[k.find("%s") + 2 : k.find('.')])
+						self.msg('Checking for %s' % i, lib, 'GREEN')
+						break
+				else:
+					self.msg('Checking for %s' % i, False, 'YELLOW')
+
+				env.append_unique('LIBPATH_' + uselib, qtlibs)
+				env.append_unique('INCLUDES_' + uselib, qtincludes)
+				env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, i))
+	else:
+		for i in self.qt4_vars_debug + self.qt4_vars:
+			self.check_cfg(package=i, args='--cflags --libs', mandatory=False)
+
+@conf
+def simplify_qt4_libs(self):
+	# the libpaths make really long command-lines
+	# remove the qtcore ones from qtgui, etc
+	env = self.env
+	def process_lib(vars_, coreval):
+		for d in vars_:
+			var = d.upper()
+			if var == 'QTCORE':
+				continue
+
+			value = env['LIBPATH_'+var]
+			if value:
+				core = env[coreval]
+				accu = []
+				for lib in value:
+					if lib in core:
+						continue
+					accu.append(lib)
+				env['LIBPATH_'+var] = accu
+
+	process_lib(self.qt4_vars,       'LIBPATH_QTCORE')
+	process_lib(self.qt4_vars_debug, 'LIBPATH_QTCORE_DEBUG')
+
+@conf
+def add_qt4_rpath(self):
+	# rpath if wanted
+	env = self.env
+	if getattr(Options.options, 'want_rpath', False):
+		def process_rpath(vars_, coreval):
+			for d in vars_:
+				var = d.upper()
+				value = env['LIBPATH_'+var]
+				if value:
+					core = env[coreval]
+					accu = []
+					for lib in value:
+						if var != 'QTCORE':
+							if lib in core:
+								continue
+						accu.append('-Wl,--rpath='+lib)
+					env['RPATH_'+var] = accu
+		process_rpath(self.qt4_vars,       'LIBPATH_QTCORE')
+		process_rpath(self.qt4_vars_debug, 'LIBPATH_QTCORE_DEBUG')
+
+@conf
+def set_qt4_libs_to_check(self):
+	if not hasattr(self, 'qt4_vars'):
+		self.qt4_vars = QT4_LIBS
+	self.qt4_vars = Utils.to_list(self.qt4_vars)
+	if not hasattr(self, 'qt4_vars_debug'):
+		self.qt4_vars_debug = [a + '_debug' for a in self.qt4_vars]
+	self.qt4_vars_debug = Utils.to_list(self.qt4_vars_debug)
+
+@conf
+def set_qt4_defines(self):
+	if sys.platform != 'win32':
+		return
+	for x in self.qt4_vars:
+		y = x[2:].upper()
+		self.env.append_unique('DEFINES_%s' % x.upper(), 'QT_%s_LIB' % y)
+		self.env.append_unique('DEFINES_%s_DEBUG' % x.upper(), 'QT_%s_LIB' % y)
+
+def options(opt):
+	"""
+	Command-line options
+	"""
+	opt.add_option('--want-rpath', action='store_true', default=False, dest='want_rpath', help='enable the rpath for qt libraries')
+
+	opt.add_option('--header-ext',
+		type='string',
+		default='',
+		help='header extension for moc files',
+		dest='qt_header_ext')
+
+	for i in 'qtdir qtbin qtlibs'.split():
+		opt.add_option('--'+i, type='string', default='', dest=i)
+
+	opt.add_option('--translate', action="store_true", help="collect translation strings", dest="trans_qt4", default=False)
+
diff --git a/third_party/waf/waflib/extras/relocation.py b/third_party/waf/waflib/extras/relocation.py
new file mode 100644
index 0000000..7e821f4
--- /dev/null
+++ b/third_party/waf/waflib/extras/relocation.py
@@ -0,0 +1,85 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+"""
+Waf 1.6
+
+Try to detect if the project directory was relocated, and if it was,
+change the node representing the project directory. Just call:
+
+ waf configure build
+
+Note that if the project directory name changes, the signatures for the tasks using
+files in that directory will change, causing a partial build.
+"""
+
+import os
+from waflib import Build, ConfigSet, Task, Utils, Errors
+from waflib.TaskGen import feature, after_method
+
+EXTRA_LOCK = '.old_srcdir'
+
+old1 = Build.BuildContext.store
+def store(self):
+	old1(self)
+	db = os.path.join(self.variant_dir, EXTRA_LOCK)
+	env = ConfigSet.ConfigSet()
+	env.SRCDIR = self.srcnode.abspath()
+	env.store(db)
+Build.BuildContext.store = store
+
+old2 = Build.BuildContext.init_dirs
+def init_dirs(self):
+
+	if not (os.path.isabs(self.top_dir) and os.path.isabs(self.out_dir)):
+		raise Errors.WafError('The project was not configured: run "waf configure" first!')
+
+	srcdir = None
+	db = os.path.join(self.variant_dir, EXTRA_LOCK)
+	env = ConfigSet.ConfigSet()
+	try:
+		env.load(db)
+		srcdir = env.SRCDIR
+	except:
+		pass
+
+	if srcdir:
+		d = self.root.find_node(srcdir)
+		if d and srcdir != self.top_dir and getattr(d, 'children', ''):
+			srcnode = self.root.make_node(self.top_dir)
+			print("relocating the source directory %r -> %r" % (srcdir, self.top_dir))
+			srcnode.children = {}
+
+			for (k, v) in d.children.items():
+				srcnode.children[k] = v
+				v.parent = srcnode
+			d.children = {}
+
+	old2(self)
+
+Build.BuildContext.init_dirs = init_dirs
+
+
+def uid(self):
+	try:
+		return self.uid_
+	except AttributeError:
+		# this is not a real hot zone, but we want to avoid surprises here
+		m = Utils.md5()
+		up = m.update
+		up(self.__class__.__name__.encode())
+		for x in self.inputs + self.outputs:
+			up(x.path_from(x.ctx.srcnode).encode())
+		self.uid_ = m.digest()
+		return self.uid_
+Task.Task.uid = uid
+
+@feature('c', 'cxx', 'd', 'go', 'asm', 'fc', 'includes')
+@after_method('propagate_uselib_vars', 'process_source')
+def apply_incpaths(self):
+	lst = self.to_incnodes(self.to_list(getattr(self, 'includes', [])) + self.env['INCLUDES'])
+	self.includes_nodes = lst
+	bld = self.bld
+	self.env['INCPATHS'] = [x.is_child_of(bld.srcnode) and x.path_from(bld.bldnode) or x.abspath() for x in lst]
+
+
diff --git a/third_party/waf/waflib/extras/remote.py b/third_party/waf/waflib/extras/remote.py
new file mode 100644
index 0000000..f43b600
--- /dev/null
+++ b/third_party/waf/waflib/extras/remote.py
@@ -0,0 +1,327 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Remote Builds tool using rsync+ssh
+
+__author__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
+__copyright__ = "Jérôme Carretero, 2013"
+
+"""
+Simple Remote Builds
+********************
+
+This tool is an *experimental* tool (meaning, do not even try to pollute
+the waf bug tracker with bugs in here, contact me directly) providing simple
+remote builds.
+
+It uses rsync and ssh to perform the remote builds.
+It is intended for performing cross-compilation on platforms where
+a cross-compiler is either unavailable (eg. MacOS, QNX) a specific product
+does not exist (eg. Windows builds using Visual Studio) or simply not installed.
+This tool sends the sources and the waf script to the remote host,
+and commands the usual waf execution.
+
+There are alternatives to using this tool, such as setting up shared folders,
+logging on to remote machines, and building on the shared folders.
+Electing one method or another depends on the size of the program.
+
+
+Usage
+=====
+
+1. Set your wscript file so it includes a list of variants,
+   e.g.::
+
+     from waflib import Utils
+     top = '.'
+     out = 'build'
+
+     variants = [
+      'linux_64_debug',
+      'linux_64_release',
+      'linux_32_debug',
+      'linux_32_release',
+      ]
+
+     from waflib.extras import remote
+
+     def options(opt):
+         # normal stuff from here on
+         opt.load('compiler_c')
+
+     def configure(conf):
+         if not conf.variant:
+             return
+         # normal stuff from here on
+         conf.load('compiler_c')
+
+     def build(bld):
+         if not bld.variant:
+             return
+         # normal stuff from here on
+         bld(features='c cprogram', target='app', source='main.c')
+
+
+2. Build the waf file, so it includes this tool, and put it in the current
+   directory
+
+   .. code:: bash
+
+      ./waf-light --tools=remote
+
+3. Set the host names to access the hosts:
+
+   .. code:: bash
+
+      export REMOTE_QNX=user@kiunix
+
+4. Setup the ssh server and ssh keys
+
+   The ssh key should not be protected by a password, or it will prompt for it every time.
+   Create the key on the client:
+
+   .. code:: bash
+
+      ssh-keygen -t rsa -f foo.rsa
+
+   Then copy foo.rsa.pub to the remote machine (user@kiunix:/home/user/.ssh/authorized_keys),
+   and make sure the permissions are correct (chmod go-w ~ ~/.ssh ~/.ssh/authorized_keys)
+
+   A separate key for the build processes can be set in the environment variable WAF_SSH_KEY.
+   The tool will then use 'ssh-keyscan' to avoid prompting for remote hosts, so
+   be warned to use this feature on internal networks only (MITM).
+
+   .. code:: bash
+
+      export WAF_SSH_KEY=~/foo.rsa
+
+5. Perform the build:
+
+   .. code:: bash
+
+      waf configure_all build_all --remote
+
+"""
+
+
+import getpass, os, re, sys
+from collections import OrderedDict
+from waflib import Context, Options, Utils, ConfigSet
+
+from waflib.Build import BuildContext, CleanContext, InstallContext, UninstallContext
+from waflib.Configure import ConfigurationContext
+
+
+is_remote = False
+if '--remote' in sys.argv:
+	is_remote = True
+	sys.argv.remove('--remote')
+
+class init(Context.Context):
+	"""
+	Generates the *_all commands
+	"""
+	cmd = 'init'
+	fun = 'init'
+	def execute(self):
+		for x in list(Context.g_module.variants):
+			self.make_variant(x)
+		lst = ['remote']
+		for k in Options.commands:
+			if k.endswith('_all'):
+				name = k.replace('_all', '')
+				for x in Context.g_module.variants:
+					lst.append('%s_%s' % (name, x))
+			else:
+				lst.append(k)
+		del Options.commands[:]
+		Options.commands += lst
+
+	def make_variant(self, x):
+		for y in (BuildContext, CleanContext, InstallContext, UninstallContext):
+			name = y.__name__.replace('Context','').lower()
+			class tmp(y):
+				cmd = name + '_' + x
+				fun = 'build'
+				variant = x
+		class tmp(ConfigurationContext):
+			cmd = 'configure_' + x
+			fun = 'configure'
+			variant = x
+			def __init__(self, **kw):
+				ConfigurationContext.__init__(self, **kw)
+				self.setenv(x)
+
+class remote(BuildContext):
+	cmd = 'remote'
+	fun = 'build'
+
+	def get_ssh_hosts(self):
+		lst = []
+		for v in Context.g_module.variants:
+			self.env.HOST = self.login_to_host(self.variant_to_login(v))
+			cmd = Utils.subst_vars('${SSH_KEYSCAN} -t rsa,ecdsa ${HOST}', self.env)
+			out, err = self.cmd_and_log(cmd, output=Context.BOTH, quiet=Context.BOTH)
+			lst.append(out.strip())
+		return lst
+
+	def setup_private_ssh_key(self):
+		"""
+		When WAF_SSH_KEY points to a private key, a .ssh directory will be created in the build directory
+		Make sure that the ssh key does not prompt for a password
+		"""
+		key = os.environ.get('WAF_SSH_KEY', '')
+		if not key:
+			return
+		if not os.path.isfile(key):
+			self.fatal('Key in WAF_SSH_KEY must point to a valid file')
+		self.ssh_dir = os.path.join(self.path.abspath(), 'build', '.ssh')
+		self.ssh_hosts = os.path.join(self.ssh_dir, 'known_hosts')
+		self.ssh_key = os.path.join(self.ssh_dir, os.path.basename(key))
+		self.ssh_config = os.path.join(self.ssh_dir, 'config')
+		for x in self.ssh_hosts, self.ssh_key, self.ssh_config:
+			if not os.path.isfile(x):
+				if not os.path.isdir(self.ssh_dir):
+					os.makedirs(self.ssh_dir)
+				Utils.writef(self.ssh_key, Utils.readf(key), 'wb')
+				os.chmod(self.ssh_key, 448)
+
+				Utils.writef(self.ssh_hosts, '\n'.join(self.get_ssh_hosts()))
+				os.chmod(self.ssh_key, 448)
+
+				Utils.writef(self.ssh_config, 'UserKnownHostsFile %s' % self.ssh_hosts, 'wb')
+				os.chmod(self.ssh_config, 448)
+		self.env.SSH_OPTS = ['-F', self.ssh_config, '-i', self.ssh_key]
+		self.env.append_value('RSYNC_SEND_OPTS', '--exclude=build/.ssh')
+
+	def skip_unbuildable_variant(self):
+		# skip variants that cannot be built on this OS
+		for k in Options.commands:
+			a, _, b = k.partition('_')
+			if b in Context.g_module.variants:
+				c, _, _ = b.partition('_')
+				if c != Utils.unversioned_sys_platform():
+					Options.commands.remove(k)
+
+	def login_to_host(self, login):
+		return re.sub(r'(\w+@)', '', login)
+
+	def variant_to_login(self, variant):
+		"""linux_32_debug -> search env.LINUX_32 and then env.LINUX"""
+		x = variant[:variant.rfind('_')]
+		ret = os.environ.get('REMOTE_' + x.upper(), '')
+		if not ret:
+			x = x[:x.find('_')]
+			ret = os.environ.get('REMOTE_' + x.upper(), '')
+		if not ret:
+			ret = '%s@localhost' % getpass.getuser()
+		return ret
+
+	def execute(self):
+		global is_remote
+		if not is_remote:
+			self.skip_unbuildable_variant()
+		else:
+			BuildContext.execute(self)
+
+	def restore(self):
+		self.top_dir = os.path.abspath(Context.g_module.top)
+		self.srcnode = self.root.find_node(self.top_dir)
+		self.path = self.srcnode
+
+		self.out_dir = os.path.join(self.top_dir, Context.g_module.out)
+		self.bldnode = self.root.make_node(self.out_dir)
+		self.bldnode.mkdir()
+
+		self.env = ConfigSet.ConfigSet()
+
+	def extract_groups_of_builds(self):
+		"""Return a dict mapping each variants to the commands to build"""
+		self.vgroups = {}
+		for x in reversed(Options.commands):
+			_, _, variant = x.partition('_')
+			if variant in Context.g_module.variants:
+				try:
+					dct = self.vgroups[variant]
+				except KeyError:
+					dct = self.vgroups[variant] = OrderedDict()
+				try:
+					dct[variant].append(x)
+				except KeyError:
+					dct[variant] = [x]
+				Options.commands.remove(x)
+
+	def custom_options(self, login):
+		try:
+			return Context.g_module.host_options[login]
+		except (AttributeError, KeyError):
+			return {}
+
+	def recurse(self, *k, **kw):
+		self.env.RSYNC = getattr(Context.g_module, 'rsync', 'rsync -a --chmod=u+rwx')
+		self.env.SSH = getattr(Context.g_module, 'ssh', 'ssh')
+		self.env.SSH_KEYSCAN = getattr(Context.g_module, 'ssh_keyscan', 'ssh-keyscan')
+		try:
+			self.env.WAF = getattr(Context.g_module, 'waf')
+		except AttributeError:
+			try:
+				os.stat('waf')
+			except KeyError:
+				self.fatal('Put a waf file in the directory (./waf-light --tools=remote)')
+			else:
+				self.env.WAF = './waf'
+
+		self.extract_groups_of_builds()
+		self.setup_private_ssh_key()
+		for k, v in self.vgroups.items():
+			task = self(rule=rsync_and_ssh, always=True)
+			task.env.login = self.variant_to_login(k)
+
+			task.env.commands = []
+			for opt, value in v.items():
+				task.env.commands += value
+			task.env.variant = task.env.commands[0].partition('_')[2]
+			for opt, value in self.custom_options(k):
+				task.env[opt] = value
+		self.jobs = len(self.vgroups)
+
+	def make_mkdir_command(self, task):
+		return Utils.subst_vars('${SSH} ${SSH_OPTS} ${login} "rm -fr ${remote_dir} && mkdir -p ${remote_dir}"', task.env)
+
+	def make_send_command(self, task):
+		return Utils.subst_vars('${RSYNC} ${RSYNC_SEND_OPTS} -e "${SSH} ${SSH_OPTS}" ${local_dir} ${login}:${remote_dir}', task.env)
+
+	def make_exec_command(self, task):
+		txt = '''${SSH} ${SSH_OPTS} ${login} "cd ${remote_dir} && ${WAF} ${commands}"'''
+		return Utils.subst_vars(txt, task.env)
+
+	def make_save_command(self, task):
+		return Utils.subst_vars('${RSYNC} ${RSYNC_SAVE_OPTS} -e "${SSH} ${SSH_OPTS}" ${login}:${remote_dir_variant} ${build_dir}', task.env)
+
+def rsync_and_ssh(task):
+
+	# remove a warning
+	task.uid_ = id(task)
+
+	bld = task.generator.bld
+
+	task.env.user, _, _ = task.env.login.partition('@')
+	task.env.hdir = Utils.to_hex(Utils.h_list((task.generator.path.abspath(), task.env.variant)))
+	task.env.remote_dir = '~%s/wafremote/%s' % (task.env.user, task.env.hdir)
+	task.env.local_dir = bld.srcnode.abspath() + '/'
+
+	task.env.remote_dir_variant = '%s/%s/%s' % (task.env.remote_dir, Context.g_module.out, task.env.variant)
+	task.env.build_dir = bld.bldnode.abspath()
+
+	ret = task.exec_command(bld.make_mkdir_command(task))
+	if ret:
+		return ret
+	ret = task.exec_command(bld.make_send_command(task))
+	if ret:
+		return ret
+	ret = task.exec_command(bld.make_exec_command(task))
+	if ret:
+		return ret
+	ret = task.exec_command(bld.make_save_command(task))
+	if ret:
+		return ret
+
diff --git a/third_party/waf/waflib/extras/resx.py b/third_party/waf/waflib/extras/resx.py
new file mode 100644
index 0000000..caf4d31
--- /dev/null
+++ b/third_party/waf/waflib/extras/resx.py
@@ -0,0 +1,35 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+import os
+from waflib import Task
+from waflib.TaskGen import extension
+
+def configure(conf):
+	conf.find_program(['resgen'], var='RESGEN')
+	conf.env.RESGENFLAGS = '/useSourcePath'
+
+@extension('.resx')
+def resx_file(self, node):
+	"""
+	Bind the .resx extension to a resgen task
+	"""
+	if not getattr(self, 'cs_task', None):
+		self.bld.fatal('resx_file has no link task for use %r' % self)
+
+	# Given assembly 'Foo' and file 'Sub/Dir/File.resx', create 'Foo.Sub.Dir.File.resources'
+	assembly = getattr(self, 'namespace', os.path.splitext(self.gen)[0])
+	res = os.path.splitext(node.path_from(self.path))[0].replace('/', '.').replace('\\', '.')
+	out = self.path.find_or_declare(assembly + '.' + res + '.resources')
+
+	tsk = self.create_task('resgen', node, out)
+
+	self.cs_task.dep_nodes.extend(tsk.outputs) # dependency
+	self.env.append_value('RESOURCES', tsk.outputs[0].bldpath())
+
+class resgen(Task.Task):
+	"""
+	Compile C# resource files
+	"""
+	color   = 'YELLOW'
+	run_str = '${RESGEN} ${RESGENFLAGS} ${SRC} ${TGT}'
diff --git a/third_party/waf/waflib/extras/review.py b/third_party/waf/waflib/extras/review.py
new file mode 100644
index 0000000..561e062
--- /dev/null
+++ b/third_party/waf/waflib/extras/review.py
@@ -0,0 +1,325 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Laurent Birtz, 2011
+# moved the code into a separate tool (ita)
+
+"""
+There are several things here:
+- a different command-line option management making options persistent
+- the review command to display the options set
+
+Assumptions:
+- configuration options are not always added to the right group (and do not count on the users to do it...)
+- the options are persistent between the executions (waf options are NOT persistent by design), even for the configuration
+- when the options change, the build is invalidated (forcing a reconfiguration)
+"""
+
+import os, textwrap, shutil
+from waflib import Logs, Context, ConfigSet, Options, Build, Configure
+
+class Odict(dict):
+	"""Ordered dictionary"""
+	def __init__(self, data=None):
+		self._keys = []
+		dict.__init__(self)
+		if data:
+			# we were provided a regular dict
+			if isinstance(data, dict):
+				self.append_from_dict(data)
+
+			# we were provided a tuple list
+			elif type(data) == list:
+				self.append_from_plist(data)
+
+			# we were provided invalid input
+			else:
+				raise Exception("expected a dict or a tuple list")
+
+	def append_from_dict(self, dict):
+		map(self.__setitem__, dict.keys(), dict.values())
+
+	def append_from_plist(self, plist):
+		for pair in plist:
+			if len(pair) != 2:
+				raise Exception("invalid pairs list")
+		for (k, v) in plist:
+			self.__setitem__(k, v)
+
+	def __delitem__(self, key):
+		if not key in self._keys:
+			raise KeyError(key)
+		dict.__delitem__(self, key)
+		self._keys.remove(key)
+
+	def __setitem__(self, key, item):
+		dict.__setitem__(self, key, item)
+		if key not in self._keys:
+			self._keys.append(key)
+
+	def clear(self):
+		dict.clear(self)
+		self._keys = []
+
+	def copy(self):
+		return Odict(self.plist())
+
+	def items(self):
+		return zip(self._keys, self.values())
+
+	def keys(self):
+		return list(self._keys) # return a copy of the list
+
+	def values(self):
+		return map(self.get, self._keys)
+
+	def plist(self):
+		p = []
+		for k, v in self.items():
+			p.append( (k, v) )
+		return p
+
+	def __str__(self):
+		buf = []
+		buf.append("{ ")
+		for k, v in self.items():
+			buf.append('%r : %r, ' % (k, v))
+		buf.append("}")
+		return ''.join(buf)
+
+review_options = Odict()
+"""
+Ordered dictionary mapping configuration option names to their optparse option.
+"""
+
+review_defaults = {}
+"""
+Dictionary mapping configuration option names to their default value.
+"""
+
+old_review_set = None
+"""
+Review set containing the configuration values before parsing the command line.
+"""
+
+new_review_set = None
+"""
+Review set containing the configuration values after parsing the command line.
+"""
+
+class OptionsReview(Options.OptionsContext):
+	def __init__(self, **kw):
+		super(self.__class__, self).__init__(**kw)
+
+	def prepare_config_review(self):
+		"""
+		Find the configuration options that are reviewable, detach
+		their default value from their optparse object and store them
+		into the review dictionaries.
+		"""
+		gr = self.get_option_group('configure options')
+		for opt in gr.option_list:
+			if opt.action != 'store' or opt.dest in ("out", "top"):
+				continue
+			review_options[opt.dest] = opt
+			review_defaults[opt.dest] = opt.default
+			if gr.defaults.has_key(opt.dest):
+				del gr.defaults[opt.dest]
+			opt.default = None
+
+	def parse_args(self):
+		self.prepare_config_review()
+		self.parser.get_option('--prefix').help = 'installation prefix'
+		super(OptionsReview, self).parse_args()
+		Context.create_context('review').refresh_review_set()
+
+class ReviewContext(Context.Context):
+	'''reviews the configuration values'''
+
+	cmd = 'review'
+
+	def __init__(self, **kw):
+		super(self.__class__, self).__init__(**kw)
+
+		out = Options.options.out
+		if not out:
+			out = getattr(Context.g_module, Context.OUT, None)
+		if not out:
+			out = Options.lockfile.replace('.lock-waf', '')
+		self.build_path = (os.path.isabs(out) and self.root or self.path).make_node(out).abspath()
+		"""Path to the build directory"""
+
+		self.cache_path = os.path.join(self.build_path, Build.CACHE_DIR)
+		"""Path to the cache directory"""
+
+		self.review_path = os.path.join(self.cache_path, 'review.cache')
+		"""Path to the review cache file"""
+
+	def execute(self):
+		"""
+		Display and store the review set. Invalidate the cache as required.
+		"""
+		if not self.compare_review_set(old_review_set, new_review_set):
+			self.invalidate_cache()
+		self.store_review_set(new_review_set)
+		print(self.display_review_set(new_review_set))
+
+	def invalidate_cache(self):
+		"""Invalidate the cache to prevent bad builds."""
+		try:
+			Logs.warn("Removing the cached configuration since the options have changed")
+			shutil.rmtree(self.cache_path)
+		except:
+			pass
+
+	def refresh_review_set(self):
+		"""
+		Obtain the old review set and the new review set, and import the new set.
+		"""
+		global old_review_set, new_review_set
+		old_review_set = self.load_review_set()
+		new_review_set = self.update_review_set(old_review_set)
+		self.import_review_set(new_review_set)
+
+	def load_review_set(self):
+		"""
+		Load and return the review set from the cache if it exists.
+		Otherwise, return an empty set.
+		"""
+		if os.path.isfile(self.review_path):
+			return ConfigSet.ConfigSet(self.review_path)
+		return ConfigSet.ConfigSet()
+
+	def store_review_set(self, review_set):
+		"""
+		Store the review set specified in the cache.
+		"""
+		if not os.path.isdir(self.cache_path):
+			os.makedirs(self.cache_path)
+		review_set.store(self.review_path)
+
+	def update_review_set(self, old_set):
+		"""
+		Merge the options passed on the command line with those imported
+		from the previous review set and return the corresponding
+		preview set.
+		"""
+
+		# Convert value to string. It's important that 'None' maps to
+		# the empty string.
+		def val_to_str(val):
+			if val == None or val == '':
+				return ''
+			return str(val)
+
+		new_set = ConfigSet.ConfigSet()
+		opt_dict = Options.options.__dict__
+
+		for name in review_options.keys():
+			# the option is specified explicitly on the command line
+			if name in opt_dict:
+				# if the option is the default, pretend it was never specified
+				if val_to_str(opt_dict[name]) != val_to_str(review_defaults[name]):
+					new_set[name] = opt_dict[name]
+			# the option was explicitly specified in a previous command
+			elif name in old_set:
+				new_set[name] = old_set[name]
+
+		return new_set
+
+	def import_review_set(self, review_set):
+		"""
+		Import the actual value of the reviewable options in the option
+		dictionary, given the current review set.
+		"""
+		for name in review_options.keys():
+			if name in review_set:
+				value = review_set[name]
+			else:
+				value = review_defaults[name]
+			setattr(Options.options, name, value)
+
+	def compare_review_set(self, set1, set2):
+		"""
+		Return true if the review sets specified are equal.
+		"""
+		if len(set1.keys()) != len(set2.keys()):
+			return False
+		for key in set1.keys():
+			if not key in set2 or set1[key] != set2[key]:
+				return False
+		return True
+
+	def display_review_set(self, review_set):
+		"""
+		Return the string representing the review set specified.
+		"""
+		term_width = Logs.get_term_cols()
+		lines = []
+		for dest in review_options.keys():
+			opt = review_options[dest]
+			name = ", ".join(opt._short_opts + opt._long_opts)
+			help = opt.help
+			actual = None
+			if dest in review_set:
+				actual = review_set[dest]
+			default = review_defaults[dest]
+			lines.append(self.format_option(name, help, actual, default, term_width))
+		return "Configuration:\n\n" + "\n\n".join(lines) + "\n"
+
+	def format_option(self, name, help, actual, default, term_width):
+		"""
+		Return the string representing the option specified.
+		"""
+		def val_to_str(val):
+			if val == None or val == '':
+				return "(void)"
+			return str(val)
+
+		max_name_len = 20
+		sep_len = 2
+
+		w = textwrap.TextWrapper()
+		w.width = term_width - 1
+		if w.width < 60:
+			w.width = 60
+
+		out = ""
+
+		# format the help
+		out += w.fill(help) + "\n"
+
+		# format the name
+		name_len = len(name)
+		out += Logs.colors.CYAN + name + Logs.colors.NORMAL
+
+		# set the indentation used when the value wraps to the next line
+		w.subsequent_indent = " ".rjust(max_name_len + sep_len)
+		w.width -= (max_name_len + sep_len)
+
+		# the name string is too long, switch to the next line
+		if name_len > max_name_len:
+			out += "\n" + w.subsequent_indent
+
+		# fill the remaining of the line with spaces
+		else:
+			out += " ".rjust(max_name_len + sep_len - name_len)
+
+		# format the actual value, if there is one
+		if actual != None:
+			out += Logs.colors.BOLD + w.fill(val_to_str(actual)) + Logs.colors.NORMAL + "\n" + w.subsequent_indent
+
+		# format the default value
+		default_fmt = val_to_str(default)
+		if actual != None:
+			default_fmt = "default: " + default_fmt
+		out += Logs.colors.NORMAL + w.fill(default_fmt) + Logs.colors.NORMAL
+
+		return out
+
+# Monkey-patch ConfigurationContext.execute() to have it store the review set.
+old_configure_execute = Configure.ConfigurationContext.execute
+def new_configure_execute(self):
+	old_configure_execute(self)
+	Context.create_context('review').store_review_set(new_review_set)
+Configure.ConfigurationContext.execute = new_configure_execute
+
diff --git a/third_party/waf/waflib/extras/rst.py b/third_party/waf/waflib/extras/rst.py
new file mode 100644
index 0000000..f3c3a5e
--- /dev/null
+++ b/third_party/waf/waflib/extras/rst.py
@@ -0,0 +1,260 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Jérôme Carretero, 2013 (zougloub)
+
+"""
+reStructuredText support (experimental)
+
+Example::
+
+	def configure(conf):
+		conf.load('rst')
+		if not conf.env.RST2HTML:
+			conf.fatal('The program rst2html is required')
+
+	def build(bld):
+		bld(
+		 features = 'rst',
+		 type     = 'rst2html', # rst2html, rst2pdf, ...
+		 source   = 'index.rst', # mandatory, the source
+		 deps     = 'image.png', # to give additional non-trivial dependencies
+		)
+
+By default the tool looks for a set of programs in PATH.
+The tools are defined in `rst_progs`.
+To configure with a special program use::
+
+	$ RST2HTML=/path/to/rst2html waf configure
+
+This tool is experimental; don't hesitate to contribute to it.
+
+"""
+
+import re
+from waflib import Node, Utils, Task, Errors, Logs
+from waflib.TaskGen import feature, before_method
+
+rst_progs = "rst2html rst2xetex rst2latex rst2xml rst2pdf rst2s5 rst2man rst2odt rst2rtf".split()
+
+def parse_rst_node(task, node, nodes, names, seen, dirs=None):
+	# TODO add extensibility, to handle custom rst include tags...
+	if dirs is None:
+		dirs = (node.parent,node.get_bld().parent)
+
+	if node in seen:
+		return
+	seen.append(node)
+	code = node.read()
+	re_rst = re.compile(r'^\s*.. ((?P<subst>\|\S+\|) )?(?P<type>include|image|figure):: (?P<file>.*)$', re.M)
+	for match in re_rst.finditer(code):
+		ipath = match.group('file')
+		itype = match.group('type')
+		Logs.debug('rst: visiting %s: %s', itype, ipath)
+		found = False
+		for d in dirs:
+			Logs.debug('rst: looking for %s in %s', ipath, d.abspath())
+			found = d.find_node(ipath)
+			if found:
+				Logs.debug('rst: found %s as %s', ipath, found.abspath())
+				nodes.append((itype, found))
+				if itype == 'include':
+					parse_rst_node(task, found, nodes, names, seen)
+				break
+		if not found:
+			names.append((itype, ipath))
+
+class docutils(Task.Task):
+	"""
+	Compile a rst file.
+	"""
+
+	def scan(self):
+		"""
+		A recursive regex-based scanner that finds rst dependencies.
+		"""
+
+		nodes = []
+		names = []
+		seen = []
+
+		node = self.inputs[0]
+
+		if not node:
+			return (nodes, names)
+
+		parse_rst_node(self, node, nodes, names, seen)
+
+		Logs.debug('rst: %r: found the following file deps: %r', self, nodes)
+		if names:
+			Logs.warn('rst: %r: could not find the following file deps: %r', self, names)
+
+		return ([v for (t,v) in nodes], [v for (t,v) in names])
+
+	def check_status(self, msg, retcode):
+		"""
+		Check an exit status and raise an error with a particular message
+
+		:param msg: message to display if the code is non-zero
+		:type msg: string
+		:param retcode: condition
+		:type retcode: boolean
+		"""
+		if retcode != 0:
+			raise Errors.WafError('%r command exit status %r' % (msg, retcode))
+
+	def run(self):
+		"""
+		Runs the rst compilation using docutils
+		"""
+		raise NotImplementedError()
+
+class rst2html(docutils):
+	color = 'BLUE'
+
+	def __init__(self, *args, **kw):
+		docutils.__init__(self, *args, **kw)
+		self.command = self.generator.env.RST2HTML
+		self.attributes = ['stylesheet']
+
+	def scan(self):
+		nodes, names = docutils.scan(self)
+
+		for attribute in self.attributes:
+			stylesheet = getattr(self.generator, attribute, None)
+			if stylesheet is not None:
+				ssnode = self.generator.to_nodes(stylesheet)[0]
+				nodes.append(ssnode)
+				Logs.debug('rst: adding dep to %s %s', attribute, stylesheet)
+
+		return nodes, names
+
+	def run(self):
+		cwdn = self.outputs[0].parent
+		src = self.inputs[0].path_from(cwdn)
+		dst = self.outputs[0].path_from(cwdn)
+
+		cmd = self.command + [src, dst]
+		cmd += Utils.to_list(getattr(self.generator, 'options', []))
+		for attribute in self.attributes:
+			stylesheet = getattr(self.generator, attribute, None)
+			if stylesheet is not None:
+				stylesheet = self.generator.to_nodes(stylesheet)[0]
+				cmd += ['--%s' % attribute, stylesheet.path_from(cwdn)]
+
+		return self.exec_command(cmd, cwd=cwdn.abspath())
+
+class rst2s5(rst2html):
+	def __init__(self, *args, **kw):
+		rst2html.__init__(self, *args, **kw)
+		self.command = self.generator.env.RST2S5
+		self.attributes = ['stylesheet']
+
+class rst2latex(rst2html):
+	def __init__(self, *args, **kw):
+		rst2html.__init__(self, *args, **kw)
+		self.command = self.generator.env.RST2LATEX
+		self.attributes = ['stylesheet']
+
+class rst2xetex(rst2html):
+	def __init__(self, *args, **kw):
+		rst2html.__init__(self, *args, **kw)
+		self.command = self.generator.env.RST2XETEX
+		self.attributes = ['stylesheet']
+
+class rst2pdf(docutils):
+	color = 'BLUE'
+	def run(self):
+		cwdn = self.outputs[0].parent
+		src = self.inputs[0].path_from(cwdn)
+		dst = self.outputs[0].path_from(cwdn)
+
+		cmd = self.generator.env.RST2PDF + [src, '-o', dst]
+		cmd += Utils.to_list(getattr(self.generator, 'options', []))
+
+		return self.exec_command(cmd, cwd=cwdn.abspath())
+
+
+@feature('rst')
+@before_method('process_source')
+def apply_rst(self):
+	"""
+	Create :py:class:`rst` or other rst-related task objects
+	"""
+
+	if self.target:
+		if isinstance(self.target, Node.Node):
+			tgt = self.target
+		elif isinstance(self.target, str):
+			tgt = self.path.get_bld().make_node(self.target)
+		else:
+			self.bld.fatal("rst: Don't know how to build target name %s which is not a string or Node for %s" % (self.target, self))
+	else:
+		tgt = None
+
+	tsk_type = getattr(self, 'type', None)
+
+	src = self.to_nodes(self.source)
+	assert len(src) == 1
+	src = src[0]
+
+	if tsk_type is not None and tgt is None:
+		if tsk_type.startswith('rst2'):
+			ext = tsk_type[4:]
+		else:
+			self.bld.fatal("rst: Could not detect the output file extension for %s" % self)
+		tgt = src.change_ext('.%s' % ext)
+	elif tsk_type is None and tgt is not None:
+		out = tgt.name
+		ext = out[out.rfind('.')+1:]
+		self.type = 'rst2' + ext
+	elif tsk_type is not None and tgt is not None:
+		# the user knows what he wants
+		pass
+	else:
+		self.bld.fatal("rst: Need to indicate task type or target name for %s" % self)
+
+	deps_lst = []
+
+	if getattr(self, 'deps', None):
+		deps = self.to_list(self.deps)
+		for filename in deps:
+			n = self.path.find_resource(filename)
+			if not n:
+				self.bld.fatal('Could not find %r for %r' % (filename, self))
+			if not n in deps_lst:
+				deps_lst.append(n)
+
+	try:
+		task = self.create_task(self.type, src, tgt)
+	except KeyError:
+		self.bld.fatal("rst: Task of type %s not implemented (created by %s)" % (self.type, self))
+
+	task.env = self.env
+
+	# add the manual dependencies
+	if deps_lst:
+		try:
+			lst = self.bld.node_deps[task.uid()]
+			for n in deps_lst:
+				if not n in lst:
+					lst.append(n)
+		except KeyError:
+			self.bld.node_deps[task.uid()] = deps_lst
+
+	inst_to = getattr(self, 'install_path', None)
+	if inst_to:
+		self.install_task = self.add_install_files(install_to=inst_to, install_from=task.outputs[:])
+
+	self.source = []
+
+def configure(self):
+	"""
+	Try to find the rst programs.
+
+	Do not raise any error if they are not found.
+	You'll have to use additional code in configure() to die
+	if programs were not found.
+	"""
+	for p in rst_progs:
+		self.find_program(p, mandatory=False)
+
diff --git a/third_party/waf/waflib/extras/run_do_script.py b/third_party/waf/waflib/extras/run_do_script.py
new file mode 100644
index 0000000..07e3aa2
--- /dev/null
+++ b/third_party/waf/waflib/extras/run_do_script.py
@@ -0,0 +1,139 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Hans-Martin von Gaudecker, 2012
+
+"""
+Run a Stata do-script in the directory specified by **ctx.bldnode**. The
+first and only argument will be the name of the do-script (no extension),
+which can be accessed inside the do-script by the local macro `1'. Useful
+for keeping a log file.
+
+The tool uses the log file that is automatically kept by Stata only 
+for error-catching purposes, it will be destroyed if the task finished
+without error. In case of an error in **some_script.do**, you can inspect
+it as **some_script.log** in the **ctx.bldnode** directory.
+
+Note that Stata will not return an error code if it exits abnormally -- 
+catching errors relies on parsing the log file mentioned before. Should
+the parser behave incorrectly please send an email to hmgaudecker [at] gmail.
+
+**WARNING**
+
+    The tool will not work if multiple do-scripts of the same name---but in
+    different directories---are run at the same time! Avoid this situation.
+
+Usage::
+
+    ctx(features='run_do_script', 
+        source='some_script.do',
+        target=['some_table.tex', 'some_figure.eps'],
+        deps='some_data.csv')
+"""
+
+
+import os, re, sys
+from waflib import Task, TaskGen, Logs
+
+if sys.platform == 'darwin':
+	STATA_COMMANDS = ['Stata64MP', 'StataMP',
+								'Stata64SE', 'StataSE', 
+								'Stata64', 'Stata']
+	STATAFLAGS = '-e -q do'
+	STATAENCODING = 'MacRoman'
+elif sys.platform.startswith('linux'):
+	STATA_COMMANDS = ['stata-mp', 'stata-se', 'stata']
+	STATAFLAGS = '-b -q do'
+	# Not sure whether this is correct...
+	STATAENCODING = 'Latin-1'
+elif sys.platform.lower().startswith('win'):
+	STATA_COMMANDS = ['StataMP-64', 'StataMP-ia',
+								'StataMP', 'StataSE-64',
+								'StataSE-ia', 'StataSE',
+								'Stata-64', 'Stata-ia',
+								'Stata.e', 'WMPSTATA',
+								'WSESTATA', 'WSTATA']
+	STATAFLAGS = '/e do'
+	STATAENCODING = 'Latin-1'
+else:
+	raise Exception("Unknown sys.platform: %s " % sys.platform)
+
+def configure(ctx):
+	ctx.find_program(STATA_COMMANDS, var='STATACMD', errmsg="""\n
+No Stata executable found!\n\n
+If Stata is needed:\n
+	1) Check the settings of your system path.
+	2) Note we are looking for Stata executables called: %s
+	   If yours has a different name, please report to hmgaudecker [at] gmail\n
+Else:\n
+	Do not load the 'run_do_script' tool in the main wscript.\n\n""" % STATA_COMMANDS)
+	ctx.env.STATAFLAGS = STATAFLAGS
+	ctx.env.STATAENCODING = STATAENCODING
+
+class run_do_script_base(Task.Task):
+	"""Run a Stata do-script from the bldnode directory."""
+	run_str = '"${STATACMD}" ${STATAFLAGS} "${SRC[0].abspath()}" "${DOFILETRUNK}"'
+	shell = True
+
+class run_do_script(run_do_script_base):
+	"""Use the log file automatically kept by Stata for error-catching.
+	Erase it if the task finished without error. If not, it will show 
+	up as do_script.log in the bldnode directory.
+	"""
+	def run(self):
+		run_do_script_base.run(self)
+		ret, log_tail  = self.check_erase_log_file()
+		if ret:
+			Logs.error("""Running Stata on %r failed with code %r.\n\nCheck the log file %s, last 10 lines\n\n%s\n\n\n""",
+				self.inputs[0], ret, self.env.LOGFILEPATH, log_tail)
+		return ret
+
+	def check_erase_log_file(self):
+		"""Parse Stata's default log file and erase it if everything okay.
+
+		Parser is based on Brendan Halpin's shell script found here:
+			http://teaching.sociology.ul.ie/bhalpin/wordpress/?p=122
+		"""
+
+		if sys.version_info.major >= 3:
+			kwargs = {'file': self.env.LOGFILEPATH, 'mode': 'r', 'encoding': self.env.STATAENCODING}
+		else:
+			kwargs = {'name': self.env.LOGFILEPATH, 'mode': 'r'}
+		with open(**kwargs) as log:
+			log_tail = log.readlines()[-10:]
+			for line in log_tail:
+				error_found = re.match(r"r\(([0-9]+)\)", line)
+				if error_found:
+					return error_found.group(1), ''.join(log_tail)
+				else:
+					pass
+		# Only end up here if the parser did not identify an error.
+		os.remove(self.env.LOGFILEPATH)
+		return None, None
+
+
+@TaskGen.feature('run_do_script')
+@TaskGen.before_method('process_source')
+def apply_run_do_script(tg):
+	"""Task generator customising the options etc. to call Stata in batch
+	mode for running a do-script.
+	"""
+
+	# Convert sources and targets to nodes
+	src_node = tg.path.find_resource(tg.source)
+	tgt_nodes = [tg.path.find_or_declare(t) for t in tg.to_list(tg.target)]
+
+	tsk = tg.create_task('run_do_script', src=src_node, tgt=tgt_nodes)
+	tsk.env.DOFILETRUNK = os.path.splitext(src_node.name)[0]
+	tsk.env.LOGFILEPATH = os.path.join(tg.bld.bldnode.abspath(), '%s.log' % (tsk.env.DOFILETRUNK))
+
+	# dependencies (if the attribute 'deps' changes, trigger a recompilation)
+	for x in tg.to_list(getattr(tg, 'deps', [])):
+		node = tg.path.find_resource(x)
+		if not node:
+			tg.bld.fatal('Could not find dependency %r for running %r' % (x, src_node.abspath()))
+		tsk.dep_nodes.append(node)
+	Logs.debug('deps: found dependencies %r for running %r', tsk.dep_nodes, src_node.abspath())
+
+	# Bypass the execution of process_source by setting the source to an empty list
+	tg.source = []
+
diff --git a/third_party/waf/waflib/extras/run_m_script.py b/third_party/waf/waflib/extras/run_m_script.py
new file mode 100644
index 0000000..b5f27eb
--- /dev/null
+++ b/third_party/waf/waflib/extras/run_m_script.py
@@ -0,0 +1,88 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Hans-Martin von Gaudecker, 2012
+
+"""
+Run a Matlab script.
+
+Note that the script is run in the directory where it lives -- Matlab won't
+allow it any other way.
+
+For error-catching purposes, keep an own log-file that is destroyed if the
+task finished without error. If not, it will show up as mscript_[index].log 
+in the bldnode directory.
+
+Usage::
+
+    ctx(features='run_m_script', 
+        source='some_script.m',
+        target=['some_table.tex', 'some_figure.eps'],
+        deps='some_data.mat')
+"""
+
+import os, sys
+from waflib import Task, TaskGen, Logs
+
+MATLAB_COMMANDS = ['matlab']
+
+def configure(ctx):
+	ctx.find_program(MATLAB_COMMANDS, var='MATLABCMD', errmsg = """\n
+No Matlab executable found!\n\n
+If Matlab is needed:\n
+    1) Check the settings of your system path.
+    2) Note we are looking for Matlab executables called: %s
+       If yours has a different name, please report to hmgaudecker [at] gmail\n
+Else:\n
+    Do not load the 'run_m_script' tool in the main wscript.\n\n"""  % MATLAB_COMMANDS)
+	ctx.env.MATLABFLAGS = '-wait -nojvm -nosplash -minimize'
+
+class run_m_script_base(Task.Task):
+	"""Run a Matlab script."""
+	run_str = '"${MATLABCMD}" ${MATLABFLAGS} -logfile "${LOGFILEPATH}" -r "try, ${MSCRIPTTRUNK}, exit(0), catch err, disp(err.getReport()), exit(1), end"'
+	shell = True
+
+class run_m_script(run_m_script_base):
+	"""Erase the Matlab overall log file if everything went okay, else raise an
+	error and print its 10 last lines.
+	"""
+	def run(self):
+		ret = run_m_script_base.run(self)
+		logfile = self.env.LOGFILEPATH
+		if ret:
+			mode = 'r'
+			if sys.version_info.major >= 3:
+				mode = 'rb'
+			with open(logfile, mode=mode) as f:
+				tail = f.readlines()[-10:]
+			Logs.error("""Running Matlab on %r returned the error %r\n\nCheck the log file %s, last 10 lines\n\n%s\n\n\n""",
+				self.inputs[0], ret, logfile, '\n'.join(tail))
+		else:
+			os.remove(logfile)
+		return ret
+
+@TaskGen.feature('run_m_script')
+@TaskGen.before_method('process_source')
+def apply_run_m_script(tg):
+	"""Task generator customising the options etc. to call Matlab in batch
+	mode for running a m-script.
+	"""
+
+	# Convert sources and targets to nodes 
+	src_node = tg.path.find_resource(tg.source)
+	tgt_nodes = [tg.path.find_or_declare(t) for t in tg.to_list(tg.target)]
+
+	tsk = tg.create_task('run_m_script', src=src_node, tgt=tgt_nodes)
+	tsk.cwd = src_node.parent.abspath()
+	tsk.env.MSCRIPTTRUNK = os.path.splitext(src_node.name)[0]
+	tsk.env.LOGFILEPATH = os.path.join(tg.bld.bldnode.abspath(), '%s_%d.log' % (tsk.env.MSCRIPTTRUNK, tg.idx))
+
+	# dependencies (if the attribute 'deps' changes, trigger a recompilation)
+	for x in tg.to_list(getattr(tg, 'deps', [])):
+		node = tg.path.find_resource(x)
+		if not node:
+			tg.bld.fatal('Could not find dependency %r for running %r' % (x, src_node.abspath()))
+		tsk.dep_nodes.append(node)
+	Logs.debug('deps: found dependencies %r for running %r', tsk.dep_nodes, src_node.abspath())
+
+	# Bypass the execution of process_source by setting the source to an empty list
+	tg.source = []
diff --git a/third_party/waf/waflib/extras/run_py_script.py b/third_party/waf/waflib/extras/run_py_script.py
new file mode 100644
index 0000000..3670381
--- /dev/null
+++ b/third_party/waf/waflib/extras/run_py_script.py
@@ -0,0 +1,104 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Hans-Martin von Gaudecker, 2012
+
+"""
+Run a Python script in the directory specified by **ctx.bldnode**.
+
+Select a Python version by specifying the **version** keyword for
+the task generator instance as integer 2 or 3. Default is 3.
+
+If the build environment has an attribute "PROJECT_PATHS" with
+a key "PROJECT_ROOT", its value will be appended to the PYTHONPATH.
+Same a string passed to the optional **add_to_pythonpath**
+keyword (appended after the PROJECT_ROOT).
+
+Usage::
+
+    ctx(features='run_py_script', version=3,
+        source='some_script.py',
+        target=['some_table.tex', 'some_figure.eps'],
+        deps='some_data.csv',
+        add_to_pythonpath='src/some/library')
+"""
+
+import os, re
+from waflib import Task, TaskGen, Logs
+
+
+def configure(conf):
+	"""TODO: Might need to be updated for Windows once
+	"PEP 397":http://www.python.org/dev/peps/pep-0397/ is settled.
+	"""
+	conf.find_program('python', var='PY2CMD', mandatory=False)
+	conf.find_program('python3', var='PY3CMD', mandatory=False)
+	if not conf.env.PY2CMD and not conf.env.PY3CMD:
+		conf.fatal("No Python interpreter found!")
+
+class run_py_2_script(Task.Task):
+	"""Run a Python 2 script."""
+	run_str = '${PY2CMD} ${SRC[0].abspath()}'
+	shell=True
+
+class run_py_3_script(Task.Task):
+	"""Run a Python 3 script."""
+	run_str = '${PY3CMD} ${SRC[0].abspath()}'
+	shell=True
+
+@TaskGen.feature('run_py_script')
+@TaskGen.before_method('process_source')
+def apply_run_py_script(tg):
+	"""Task generator for running either Python 2 or Python 3 on a single
+	script.
+
+	Attributes:
+
+		* source -- A **single** source node or string. (required)
+		* target -- A single target or list of targets (nodes or strings)
+		* deps -- A single dependency or list of dependencies (nodes or strings)
+		* add_to_pythonpath -- A string that will be appended to the PYTHONPATH environment variable
+
+	If the build environment has an attribute "PROJECT_PATHS" with
+	a key "PROJECT_ROOT", its value will be appended to the PYTHONPATH.
+	"""
+
+	# Set the Python version to use, default to 3.
+	v = getattr(tg, 'version', 3)
+	if v not in (2, 3):
+		raise ValueError("Specify the 'version' attribute for run_py_script task generator as integer 2 or 3.\n Got: %s" %v)
+
+	# Convert sources and targets to nodes
+	src_node = tg.path.find_resource(tg.source)
+	tgt_nodes = [tg.path.find_or_declare(t) for t in tg.to_list(tg.target)]
+
+	# Create the task.
+	tsk = tg.create_task('run_py_%d_script' %v, src=src_node, tgt=tgt_nodes)
+
+	# custom execution environment
+	# TODO use a list and  os.sep.join(lst) at the end instead of concatenating strings
+	tsk.env.env = dict(os.environ)
+	tsk.env.env['PYTHONPATH'] = tsk.env.env.get('PYTHONPATH', '')
+	project_paths = getattr(tsk.env, 'PROJECT_PATHS', None)
+	if project_paths and 'PROJECT_ROOT' in project_paths:
+		tsk.env.env['PYTHONPATH'] += os.pathsep + project_paths['PROJECT_ROOT'].abspath()
+	if getattr(tg, 'add_to_pythonpath', None):
+		tsk.env.env['PYTHONPATH'] += os.pathsep + tg.add_to_pythonpath
+
+	# Clean up the PYTHONPATH -- replace double occurrences of path separator
+	tsk.env.env['PYTHONPATH'] = re.sub(os.pathsep + '+', os.pathsep, tsk.env.env['PYTHONPATH'])
+
+	# Clean up the PYTHONPATH -- doesn't like starting with path separator
+	if tsk.env.env['PYTHONPATH'].startswith(os.pathsep):
+		 tsk.env.env['PYTHONPATH'] = tsk.env.env['PYTHONPATH'][1:]
+
+	# dependencies (if the attribute 'deps' changes, trigger a recompilation)
+	for x in tg.to_list(getattr(tg, 'deps', [])):
+		node = tg.path.find_resource(x)
+		if not node:
+			tg.bld.fatal('Could not find dependency %r for running %r' % (x, src_node.abspath()))
+		tsk.dep_nodes.append(node)
+	Logs.debug('deps: found dependencies %r for running %r', tsk.dep_nodes, src_node.abspath())
+
+	# Bypass the execution of process_source by setting the source to an empty list
+	tg.source = []
+
diff --git a/third_party/waf/waflib/extras/run_r_script.py b/third_party/waf/waflib/extras/run_r_script.py
new file mode 100644
index 0000000..b0d8f2b
--- /dev/null
+++ b/third_party/waf/waflib/extras/run_r_script.py
@@ -0,0 +1,86 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Hans-Martin von Gaudecker, 2012
+
+"""
+Run a R script in the directory specified by **ctx.bldnode**.
+
+For error-catching purposes, keep an own log-file that is destroyed if the
+task finished without error. If not, it will show up as rscript_[index].log
+in the bldnode directory.
+
+Usage::
+
+    ctx(features='run_r_script',
+        source='some_script.r',
+        target=['some_table.tex', 'some_figure.eps'],
+        deps='some_data.csv')
+"""
+
+
+import os, sys
+from waflib import Task, TaskGen, Logs
+
+R_COMMANDS = ['RTerm', 'R', 'r']
+
+def configure(ctx):
+	ctx.find_program(R_COMMANDS, var='RCMD', errmsg = """\n
+No R executable found!\n\n
+If R is needed:\n
+	1) Check the settings of your system path.
+	2) Note we are looking for R executables called: %s
+	   If yours has a different name, please report to hmgaudecker [at] gmail\n
+Else:\n
+	Do not load the 'run_r_script' tool in the main wscript.\n\n"""  % R_COMMANDS)
+	ctx.env.RFLAGS = 'CMD BATCH --slave'
+
+class run_r_script_base(Task.Task):
+	"""Run a R script."""
+	run_str = '"${RCMD}" ${RFLAGS} "${SRC[0].abspath()}" "${LOGFILEPATH}"'
+	shell = True
+
+class run_r_script(run_r_script_base):
+	"""Erase the R overall log file if everything went okay, else raise an
+	error and print its 10 last lines.
+	"""
+	def run(self):
+		ret = run_r_script_base.run(self)
+		logfile = self.env.LOGFILEPATH
+		if ret:
+			mode = 'r'
+			if sys.version_info.major >= 3:
+				mode = 'rb'
+			with open(logfile, mode=mode) as f:
+				tail = f.readlines()[-10:]
+			Logs.error("""Running R on %r returned the error %r\n\nCheck the log file %s, last 10 lines\n\n%s\n\n\n""",
+				self.inputs[0], ret, logfile, '\n'.join(tail))
+		else:
+			os.remove(logfile)
+		return ret
+
+
+@TaskGen.feature('run_r_script')
+@TaskGen.before_method('process_source')
+def apply_run_r_script(tg):
+	"""Task generator customising the options etc. to call R in batch
+	mode for running a R script.
+	"""
+
+	# Convert sources and targets to nodes
+	src_node = tg.path.find_resource(tg.source)
+	tgt_nodes = [tg.path.find_or_declare(t) for t in tg.to_list(tg.target)]
+
+	tsk = tg.create_task('run_r_script', src=src_node, tgt=tgt_nodes)
+	tsk.env.LOGFILEPATH = os.path.join(tg.bld.bldnode.abspath(), '%s_%d.log' % (os.path.splitext(src_node.name)[0], tg.idx))
+
+	# dependencies (if the attribute 'deps' changes, trigger a recompilation)
+	for x in tg.to_list(getattr(tg, 'deps', [])):
+		node = tg.path.find_resource(x)
+		if not node:
+			tg.bld.fatal('Could not find dependency %r for running %r' % (x, src_node.abspath()))
+		tsk.dep_nodes.append(node)
+	Logs.debug('deps: found dependencies %r for running %r', tsk.dep_nodes, src_node.abspath())
+
+	# Bypass the execution of process_source by setting the source to an empty list
+	tg.source = []
+
diff --git a/third_party/waf/waflib/extras/sas.py b/third_party/waf/waflib/extras/sas.py
new file mode 100644
index 0000000..754c614
--- /dev/null
+++ b/third_party/waf/waflib/extras/sas.py
@@ -0,0 +1,71 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Mark Coggeshall, 2010
+
+"SAS support"
+
+import os
+from waflib import Task, Errors, Logs
+from waflib.TaskGen import feature, before_method
+
+sas_fun, _ = Task.compile_fun('sas -sysin ${SRCFILE} -log ${LOGFILE} -print ${LSTFILE}', shell=False)
+
+class sas(Task.Task):
+	vars = ['SAS', 'SASFLAGS']
+	def run(task):
+		command = 'SAS'
+		fun = sas_fun
+
+		node = task.inputs[0]
+		logfilenode = node.change_ext('.log')
+		lstfilenode = node.change_ext('.lst')
+
+		# set the cwd
+		task.cwd = task.inputs[0].parent.get_src().abspath()
+		Logs.debug('runner: %r on %r', command, node)
+
+		SASINPUTS = node.parent.get_bld().abspath() + os.pathsep + node.parent.get_src().abspath() + os.pathsep
+		task.env.env = {'SASINPUTS': SASINPUTS}
+
+		task.env.SRCFILE = node.abspath()
+		task.env.LOGFILE = logfilenode.abspath()
+		task.env.LSTFILE = lstfilenode.abspath()
+		ret = fun(task)
+		if ret:
+			Logs.error('Running %s on %r returned a non-zero exit', command, node)
+			Logs.error('SRCFILE = %r', node)
+			Logs.error('LOGFILE = %r', logfilenode)
+			Logs.error('LSTFILE = %r', lstfilenode)
+		return ret
+
+@feature('sas')
+@before_method('process_source')
+def apply_sas(self):
+	if not getattr(self, 'type', None) in ('sas',):
+		self.type = 'sas'
+
+	self.env['logdir'] = getattr(self, 'logdir', 'log')
+	self.env['lstdir'] = getattr(self, 'lstdir', 'lst')
+
+	deps_lst = []
+
+	if getattr(self, 'deps', None):
+		deps = self.to_list(self.deps)
+		for filename in deps:
+			n = self.path.find_resource(filename)
+			if not n:
+				n = self.bld.root.find_resource(filename)
+			if not n:
+				raise Errors.WafError('cannot find input file %s for processing' % filename)
+			if not n in deps_lst:
+				deps_lst.append(n)
+
+	for node in self.to_nodes(self.source):
+		if self.type == 'sas':
+			task = self.create_task('sas', src=node)
+		task.dep_nodes = deps_lst
+	self.source = []
+
+def configure(self):
+	self.find_program('sas', var='SAS', mandatory=False)
+
diff --git a/third_party/waf/waflib/extras/satellite_assembly.py b/third_party/waf/waflib/extras/satellite_assembly.py
new file mode 100644
index 0000000..005eb07
--- /dev/null
+++ b/third_party/waf/waflib/extras/satellite_assembly.py
@@ -0,0 +1,57 @@
+#!/usr/bin/python
+# encoding: utf-8
+# vim: tabstop=4 noexpandtab
+
+"""
+Create a satellite assembly from "*.??.txt" files. ?? stands for a language code.
+
+The projects Resources subfolder contains resources.??.txt string files for several languages.
+The build folder will hold the satellite assemblies as ./??/ExeName.resources.dll
+
+#gen becomes template (It is called gen because it also uses resx.py).
+bld(source='Resources/resources.de.txt',gen=ExeName)
+"""
+
+import os, re
+from waflib import Task
+from waflib.TaskGen import feature,before_method
+
+class al(Task.Task):
+	run_str = '${AL} ${ALFLAGS}'
+
+@feature('satellite_assembly')
+@before_method('process_source')
+def satellite_assembly(self):
+	if not getattr(self, 'gen', None):
+		self.bld.fatal('satellite_assembly needs a template assembly provided with the "gen" parameter')
+	res_lang = re.compile(r'(.*)\.(\w\w)\.(?:resx|txt)',flags=re.I)
+
+	# self.source can contain node objects, so this will break in one way or another
+	self.source = self.to_list(self.source)
+	for i, x in enumerate(self.source):
+		#x = 'resources/resources.de.resx'
+		#x = 'resources/resources.de.txt'
+		mo = res_lang.match(x)
+		if mo:
+			template = os.path.splitext(self.gen)[0]
+			templatedir, templatename = os.path.split(template)
+			res = mo.group(1)
+			lang = mo.group(2)
+			#./Resources/resources.de.resources
+			resources = self.path.find_or_declare(res+ '.' + lang + '.resources')
+			self.create_task('resgen', self.to_nodes(x), [resources])
+			#./de/Exename.resources.dll
+			satellite = self.path.find_or_declare(os.path.join(templatedir,lang,templatename) + '.resources.dll')
+			tsk = self.create_task('al',[resources],[satellite])
+			tsk.env.append_value('ALFLAGS','/template:'+os.path.join(self.path.relpath(),self.gen))
+			tsk.env.append_value('ALFLAGS','/embed:'+resources.relpath())
+			tsk.env.append_value('ALFLAGS','/culture:'+lang)
+			tsk.env.append_value('ALFLAGS','/out:'+satellite.relpath())
+			self.source[i] = None
+	# remove the None elements that we just substituted
+	self.source = list(filter(lambda x:x, self.source))
+
+def configure(ctx):
+	ctx.find_program('al', var='AL', mandatory=True)
+	ctx.load('resx')
+
diff --git a/third_party/waf/waflib/extras/scala.py b/third_party/waf/waflib/extras/scala.py
new file mode 100644
index 0000000..a9880f0
--- /dev/null
+++ b/third_party/waf/waflib/extras/scala.py
@@ -0,0 +1,128 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2010 (ita)
+
+"""
+Scala support
+
+scalac outputs files a bit where it wants to
+"""
+
+import os
+from waflib import Task, Utils, Node
+from waflib.TaskGen import feature, before_method, after_method
+
+from waflib.Tools import ccroot
+ccroot.USELIB_VARS['scalac'] = set(['CLASSPATH', 'SCALACFLAGS'])
+
+from waflib.Tools import javaw
+
+@feature('scalac')
+@before_method('process_source')
+def apply_scalac(self):
+
+	Utils.def_attrs(self, jarname='', classpath='',
+		sourcepath='.', srcdir='.',
+		jar_mf_attributes={}, jar_mf_classpath=[])
+
+	outdir = getattr(self, 'outdir', None)
+	if outdir:
+		if not isinstance(outdir, Node.Node):
+			outdir = self.path.get_bld().make_node(self.outdir)
+	else:
+		outdir = self.path.get_bld()
+	outdir.mkdir()
+	self.env['OUTDIR'] = outdir.abspath()
+
+	self.scalac_task = tsk = self.create_task('scalac')
+	tmp = []
+
+	srcdir = getattr(self, 'srcdir', '')
+	if isinstance(srcdir, Node.Node):
+		srcdir = [srcdir]
+	for x in Utils.to_list(srcdir):
+		if isinstance(x, Node.Node):
+			y = x
+		else:
+			y = self.path.find_dir(x)
+			if not y:
+				self.bld.fatal('Could not find the folder %s from %s' % (x, self.path))
+		tmp.append(y)
+	tsk.srcdir = tmp
+
+# reuse some code
+feature('scalac')(javaw.use_javac_files)
+after_method('apply_scalac')(javaw.use_javac_files)
+
+feature('scalac')(javaw.set_classpath)
+after_method('apply_scalac', 'use_scalac_files')(javaw.set_classpath)
+
+
+SOURCE_RE = '**/*.scala'
+class scalac(javaw.javac):
+	color = 'GREEN'
+	vars    = ['CLASSPATH', 'SCALACFLAGS', 'SCALAC', 'OUTDIR']
+
+	def runnable_status(self):
+		"""
+		Wait for dependent tasks to be complete, then read the file system to find the input nodes.
+		"""
+		for t in self.run_after:
+			if not t.hasrun:
+				return Task.ASK_LATER
+
+		if not self.inputs:
+			global SOURCE_RE
+			self.inputs  = []
+			for x in self.srcdir:
+				self.inputs.extend(x.ant_glob(SOURCE_RE, remove=False))
+		return super(javaw.javac, self).runnable_status()
+
+	def run(self):
+		"""
+		Execute the scalac compiler
+		"""
+		env = self.env
+		gen = self.generator
+		bld = gen.bld
+		wd = bld.bldnode.abspath()
+		def to_list(xx):
+			if isinstance(xx, str):
+				return [xx]
+			return xx
+		self.last_cmd = lst = []
+		lst.extend(to_list(env['SCALAC']))
+		lst.extend(['-classpath'])
+		lst.extend(to_list(env['CLASSPATH']))
+		lst.extend(['-d'])
+		lst.extend(to_list(env['OUTDIR']))
+		lst.extend(to_list(env['SCALACFLAGS']))
+		lst.extend([a.abspath() for a in self.inputs])
+		lst = [x for x in lst if x]
+		try:
+			self.out = self.generator.bld.cmd_and_log(lst, cwd=wd, env=env.env or None, output=0, quiet=0)[1]
+		except:
+			self.generator.bld.cmd_and_log(lst, cwd=wd, env=env.env or None)
+
+def configure(self):
+	"""
+	Detect the scalac program
+	"""
+	# If SCALA_HOME is set, we prepend it to the path list
+	java_path = self.environ['PATH'].split(os.pathsep)
+	v = self.env
+
+	if 'SCALA_HOME' in self.environ:
+		java_path = [os.path.join(self.environ['SCALA_HOME'], 'bin')] + java_path
+		self.env['SCALA_HOME'] = [self.environ['SCALA_HOME']]
+
+	for x in 'scalac scala'.split():
+		self.find_program(x, var=x.upper(), path_list=java_path)
+
+	if 'CLASSPATH' in self.environ:
+		v['CLASSPATH'] = self.environ['CLASSPATH']
+
+	v.SCALACFLAGS = ['-verbose']
+	if not v['SCALAC']:
+		self.fatal('scalac is required for compiling scala classes')
+
diff --git a/third_party/waf/waflib/extras/slow_qt4.py b/third_party/waf/waflib/extras/slow_qt4.py
new file mode 100644
index 0000000..ec7880b
--- /dev/null
+++ b/third_party/waf/waflib/extras/slow_qt4.py
@@ -0,0 +1,96 @@
+#! /usr/bin/env python
+# Thomas Nagy, 2011 (ita)
+
+"""
+Create _moc.cpp files
+
+The builds are 30-40% faster when .moc files are included,
+you should NOT use this tool. If you really
+really want it:
+
+def configure(conf):
+	conf.load('compiler_cxx qt4')
+	conf.load('slow_qt4')
+
+See playground/slow_qt/wscript for a complete example.
+"""
+
+from waflib.TaskGen import extension
+from waflib import Task
+import waflib.Tools.qt4
+import waflib.Tools.cxx
+
+@extension(*waflib.Tools.qt4.EXT_QT4)
+def cxx_hook(self, node):
+	return self.create_compiled_task('cxx_qt', node)
+
+class cxx_qt(Task.classes['cxx']):
+	def runnable_status(self):
+		ret = Task.classes['cxx'].runnable_status(self)
+		if ret != Task.ASK_LATER and not getattr(self, 'moc_done', None):
+
+			try:
+				cache = self.generator.moc_cache
+			except AttributeError:
+				cache = self.generator.moc_cache = {}
+
+			deps = self.generator.bld.node_deps[self.uid()]
+			for x in [self.inputs[0]] + deps:
+				if x.read().find('Q_OBJECT') > 0:
+
+					# process "foo.h -> foo.moc" only if "foo.cpp" is in the sources for the current task generator
+					# this code will work because it is in the main thread (runnable_status)
+					if x.name.rfind('.') > -1: # a .h file...
+						name = x.name[:x.name.rfind('.')]
+						for tsk in self.generator.compiled_tasks:
+							if tsk.inputs and tsk.inputs[0].name.startswith(name):
+								break
+						else:
+							# no corresponding file, continue
+							continue
+
+					# the file foo.cpp could be compiled for a static and a shared library - hence the %number in the name
+					cxx_node = x.parent.get_bld().make_node(x.name.replace('.', '_') + '_%d_moc.cpp' % self.generator.idx)
+					if cxx_node in cache:
+						continue
+					cache[cxx_node] = self
+
+					tsk = Task.classes['moc'](env=self.env, generator=self.generator)
+					tsk.set_inputs(x)
+					tsk.set_outputs(cxx_node)
+
+					if x.name.endswith('.cpp'):
+						# moc is trying to be too smart but it is too dumb:
+						# why forcing the #include when Q_OBJECT is in the cpp file?
+						gen = self.generator.bld.producer
+						gen.outstanding.append(tsk)
+						gen.total += 1
+						self.set_run_after(tsk)
+					else:
+						cxxtsk = Task.classes['cxx'](env=self.env, generator=self.generator)
+						cxxtsk.set_inputs(tsk.outputs)
+						cxxtsk.set_outputs(cxx_node.change_ext('.o'))
+						cxxtsk.set_run_after(tsk)
+
+						try:
+							self.more_tasks.extend([tsk, cxxtsk])
+						except AttributeError:
+							self.more_tasks = [tsk, cxxtsk]
+
+						try:
+							link = self.generator.link_task
+						except AttributeError:
+							pass
+						else:
+							link.set_run_after(cxxtsk)
+							link.inputs.extend(cxxtsk.outputs)
+							link.inputs.sort(key=lambda x: x.abspath())
+
+			self.moc_done = True
+
+		for t in self.run_after:
+			if not t.hasrun:
+				return Task.ASK_LATER
+
+		return ret
+
diff --git a/third_party/waf/waflib/extras/softlink_libs.py b/third_party/waf/waflib/extras/softlink_libs.py
new file mode 100644
index 0000000..50c777f
--- /dev/null
+++ b/third_party/waf/waflib/extras/softlink_libs.py
@@ -0,0 +1,76 @@
+#! /usr/bin/env python
+# per rosengren 2011
+
+from waflib.TaskGen import feature, after_method
+from waflib.Task import Task, always_run
+from os.path import basename, isabs
+from os import tmpfile, linesep
+
+def options(opt):
+	grp = opt.add_option_group('Softlink Libraries Options')
+	grp.add_option('--exclude', default='/usr/lib,/lib', help='No symbolic links are created for libs within [%default]')
+
+def configure(cnf):
+	cnf.find_program('ldd')
+	if not cnf.env.SOFTLINK_EXCLUDE:
+		cnf.env.SOFTLINK_EXCLUDE = cnf.options.exclude.split(',')
+
+@feature('softlink_libs')
+@after_method('process_rule')
+def add_finder(self):
+	tgt = self.path.find_or_declare(self.target)
+	self.create_task('sll_finder', tgt=tgt)
+	self.create_task('sll_installer', tgt=tgt)
+	always_run(sll_installer)
+
+class sll_finder(Task):
+	ext_out = 'softlink_libs'
+	def run(self):
+		bld = self.generator.bld
+		linked=[]
+		target_paths = []
+		for g in bld.groups:
+			for tgen in g:
+				# FIXME it might be better to check if there is a link_task (getattr?)
+				target_paths += [tgen.path.get_bld().bldpath()]
+				linked += [t.outputs[0].bldpath()
+					for t in getattr(tgen, 'tasks', [])
+					if t.__class__.__name__ in
+					['cprogram', 'cshlib', 'cxxprogram', 'cxxshlib']]
+		lib_list = []
+		if len(linked):
+			cmd = [self.env.LDD] + linked
+			# FIXME add DYLD_LIBRARY_PATH+PATH for osx+win32
+			ldd_env = {'LD_LIBRARY_PATH': ':'.join(target_paths + self.env.LIBPATH)}
+			# FIXME the with syntax will not work in python 2
+			with tmpfile() as result:
+				self.exec_command(cmd, env=ldd_env, stdout=result)
+				result.seek(0)
+				for line in result.readlines():
+					words = line.split()
+					if len(words) < 3 or words[1] != '=>':
+						continue
+					lib = words[2]
+					if lib == 'not':
+						continue
+					if any([lib.startswith(p) for p in
+							[bld.bldnode.abspath(), '('] +
+							self.env.SOFTLINK_EXCLUDE]):
+						continue
+					if not isabs(lib):
+						continue
+					lib_list.append(lib)
+			lib_list = sorted(set(lib_list))
+		self.outputs[0].write(linesep.join(lib_list + self.env.DYNAMIC_LIBS))
+		return 0
+
+class sll_installer(Task):
+	ext_in = 'softlink_libs'
+	def run(self):
+		tgt = self.outputs[0]
+		self.generator.bld.install_files('${LIBDIR}', tgt, postpone=False)
+		lib_list=tgt.read().split()
+		for lib in lib_list:
+			self.generator.bld.symlink_as('${LIBDIR}/'+basename(lib), lib, postpone=False)
+		return 0
+
diff --git a/third_party/waf/waflib/extras/sphinx.py b/third_party/waf/waflib/extras/sphinx.py
new file mode 100644
index 0000000..08f3cfd
--- /dev/null
+++ b/third_party/waf/waflib/extras/sphinx.py
@@ -0,0 +1,120 @@
+"""Support for Sphinx documentation
+
+This is a wrapper for sphinx-build program. Please note that sphinx-build supports only
+one output format at a time, but the tool can create multiple tasks to handle more.
+The output formats can be passed via the sphinx_output_format, which is an array of
+strings. For backwards compatibility if only one output is needed, it can be passed
+as a single string.
+The default output format is html.
+
+Specific formats can be installed in different directories by specifying the
+install_path_<FORMAT> attribute. If not defined, the standard install_path
+will be used instead.
+
+Example wscript:
+
+def configure(cnf):
+    conf.load('sphinx')
+
+def build(bld):
+    bld(
+        features='sphinx',
+        sphinx_source='sources',  # path to source directory
+        sphinx_options='-a -v',  # sphinx-build program additional options
+        sphinx_output_format=['html', 'man'],  # output format of sphinx documentation
+        install_path_man='${DOCDIR}/man'       # put man pages in a specific directory
+        )
+
+"""
+
+from waflib.Node import Node
+from waflib import Utils
+from waflib import Task
+from waflib.TaskGen import feature, after_method
+
+
+def configure(cnf):
+    """Check if sphinx-build program is available and loads gnu_dirs tool."""
+    cnf.find_program('sphinx-build', var='SPHINX_BUILD', mandatory=False)
+    cnf.load('gnu_dirs')
+
+
+@feature('sphinx')
+def build_sphinx(self):
+    """Builds sphinx sources.
+    """
+    if not self.env.SPHINX_BUILD:
+        self.bld.fatal('Program SPHINX_BUILD not defined.')
+    if not getattr(self, 'sphinx_source', None):
+        self.bld.fatal('Attribute sphinx_source not defined.')
+    if not isinstance(self.sphinx_source, Node):
+        self.sphinx_source = self.path.find_node(self.sphinx_source)
+    if not self.sphinx_source:
+        self.bld.fatal('Can\'t find sphinx_source: %r' % self.sphinx_source)
+
+    # In the taskgen we have the complete list of formats
+    Utils.def_attrs(self, sphinx_output_format='html')
+    self.sphinx_output_format = Utils.to_list(self.sphinx_output_format)
+
+    self.env.SPHINX_OPTIONS = getattr(self, 'sphinx_options', [])
+
+    for source_file in self.sphinx_source.ant_glob('**/*'):
+        self.bld.add_manual_dependency(self.sphinx_source, source_file)
+
+    for cfmt in self.sphinx_output_format:
+        sphinx_build_task = self.create_task('SphinxBuildingTask')
+        sphinx_build_task.set_inputs(self.sphinx_source)
+        # In task we keep the specific format this task is generating
+        sphinx_build_task.env.SPHINX_OUTPUT_FORMAT = cfmt
+
+        # the sphinx-build results are in <build + output_format> directory
+        sphinx_build_task.sphinx_output_directory = self.path.get_bld().make_node(cfmt)
+        sphinx_build_task.set_outputs(sphinx_build_task.sphinx_output_directory)
+        sphinx_build_task.sphinx_output_directory.mkdir()
+
+        Utils.def_attrs(sphinx_build_task, install_path=getattr(self, 'install_path_' + cfmt, getattr(self, 'install_path', get_install_path(sphinx_build_task))))
+
+
+def get_install_path(object):
+    if object.env.SPHINX_OUTPUT_FORMAT == 'man':
+        return object.env.MANDIR
+    elif object.env.SPHINX_OUTPUT_FORMAT == 'info':
+        return object.env.INFODIR
+    else:
+        return object.env.DOCDIR
+
+
+class SphinxBuildingTask(Task.Task):
+    color = 'BOLD'
+    run_str = '${SPHINX_BUILD} -M ${SPHINX_OUTPUT_FORMAT} ${SRC} ${TGT} -d ${TGT[0].bld_dir()}/doctrees-${SPHINX_OUTPUT_FORMAT} ${SPHINX_OPTIONS}'
+
+    def keyword(self):
+        return 'Compiling (%s)' % self.env.SPHINX_OUTPUT_FORMAT
+
+    def runnable_status(self):
+
+        for x in self.run_after:
+            if not x.hasrun:
+                return Task.ASK_LATER
+
+        self.signature()
+        ret = Task.Task.runnable_status(self)
+        if ret == Task.SKIP_ME:
+            # in case the files were removed
+            self.add_install()
+        return ret
+
+
+    def post_run(self):
+        self.add_install()
+        return Task.Task.post_run(self)
+
+
+    def add_install(self):
+        nodes = self.sphinx_output_directory.ant_glob('**/*', quiet=True)
+        self.outputs += nodes
+        self.generator.add_install_files(install_to=self.install_path,
+                                         install_from=nodes,
+                                         postpone=False,
+                                         cwd=self.sphinx_output_directory.make_node(self.env.SPHINX_OUTPUT_FORMAT),
+                                         relative_trick=True)
diff --git a/third_party/waf/waflib/extras/stale.py b/third_party/waf/waflib/extras/stale.py
new file mode 100644
index 0000000..cac3f46
--- /dev/null
+++ b/third_party/waf/waflib/extras/stale.py
@@ -0,0 +1,98 @@
+#! /usr/bin/env python
+# encoding: UTF-8
+# Thomas Nagy, 2006-2015 (ita)
+
+"""
+Add a pre-build hook to remove build files (declared in the system)
+that do not have a corresponding target
+
+This can be used for example to remove the targets
+that have changed name without performing
+a full 'waf clean'
+
+Of course, it will only work if there are no dynamically generated
+nodes/tasks, in which case the method will have to be modified
+to exclude some folders for example.
+
+Make sure to set bld.post_mode = waflib.Build.POST_AT_ONCE
+"""
+
+from waflib import Logs, Build
+from waflib.Runner import Parallel
+
+DYNAMIC_EXT = [] # add your non-cleanable files/extensions here
+MOC_H_EXTS = '.cpp .cxx .hpp .hxx .h'.split()
+
+def can_delete(node):
+	"""Imperfect moc cleanup which does not look for a Q_OBJECT macro in the files"""
+	if not node.name.endswith('.moc'):
+		return True
+	base = node.name[:-4]
+	p1 = node.parent.get_src()
+	p2 = node.parent.get_bld()
+	for k in MOC_H_EXTS:
+		h_name = base + k
+		n = p1.search_node(h_name)
+		if n:
+			return False
+		n = p2.search_node(h_name)
+		if n:
+			return False
+
+		# foo.cpp.moc, foo.h.moc, etc.
+		if base.endswith(k):
+			return False
+
+	return True
+
+# recursion over the nodes to find the stale files
+def stale_rec(node, nodes):
+	if node.abspath() in node.ctx.env[Build.CFG_FILES]:
+		return
+
+	if getattr(node, 'children', []):
+		for x in node.children.values():
+			if x.name != "c4che":
+				stale_rec(x, nodes)
+	else:
+		for ext in DYNAMIC_EXT:
+			if node.name.endswith(ext):
+				break
+		else:
+			if not node in nodes:
+				if can_delete(node):
+					Logs.warn('Removing stale file -> %r', node)
+					node.delete()
+
+old = Parallel.refill_task_list
+def refill_task_list(self):
+	iit = old(self)
+	bld = self.bld
+
+	# execute this operation only once
+	if getattr(self, 'stale_done', False):
+		return iit
+	self.stale_done = True
+
+	# this does not work in partial builds
+	if bld.targets != '*':
+		return iit
+
+	# this does not work in dynamic builds
+	if getattr(bld, 'post_mode') == Build.POST_AT_ONCE:
+		return iit
+
+	# obtain the nodes to use during the build
+	nodes = []
+	for tasks in bld.groups:
+		for x in tasks:
+			try:
+				nodes.extend(x.outputs)
+			except AttributeError:
+				pass
+
+	stale_rec(bld.bldnode, nodes)
+	return iit
+
+Parallel.refill_task_list = refill_task_list
+
diff --git a/third_party/waf/waflib/extras/stracedeps.py b/third_party/waf/waflib/extras/stracedeps.py
new file mode 100644
index 0000000..37d82cb
--- /dev/null
+++ b/third_party/waf/waflib/extras/stracedeps.py
@@ -0,0 +1,174 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2015 (ita)
+
+"""
+Execute tasks through strace to obtain dependencies after the process is run. This
+scheme is similar to that of the Fabricate script.
+
+To use::
+
+  def configure(conf):
+     conf.load('strace')
+
+WARNING:
+* This will not work when advanced scanners are needed (qt4/qt5)
+* The overhead of running 'strace' is significant (56s -> 1m29s)
+* It will not work on Windows :-)
+"""
+
+import os, re, threading
+from waflib import Task, Logs, Utils
+
+#TRACECALLS = 'trace=access,chdir,clone,creat,execve,exit_group,fork,lstat,lstat64,mkdir,open,rename,stat,stat64,symlink,vfork'
+TRACECALLS = 'trace=process,file'
+
+BANNED = ('/tmp', '/proc', '/sys', '/dev')
+
+s_process = r'(?:clone|fork|vfork)\(.*?(?P<npid>\d+)'
+s_file = r'(?P<call>\w+)\("(?P<path>([^"\\]|\\.)*)"(.*)'
+re_lines = re.compile(r'^(?P<pid>\d+)\s+(?:(?:%s)|(?:%s))\r*$' % (s_file, s_process), re.IGNORECASE | re.MULTILINE)
+strace_lock = threading.Lock()
+
+def configure(conf):
+	conf.find_program('strace')
+
+def task_method(func):
+	# Decorator function to bind/replace methods on the base Task class
+	#
+	# The methods Task.exec_command and Task.sig_implicit_deps already exists and are rarely overridden
+	# we thus expect that we are the only ones doing this
+	try:
+		setattr(Task.Task, 'nostrace_%s' % func.__name__, getattr(Task.Task, func.__name__))
+	except AttributeError:
+		pass
+	setattr(Task.Task, func.__name__, func)
+	return func
+
+@task_method
+def get_strace_file(self):
+	try:
+		return self.strace_file
+	except AttributeError:
+		pass
+
+	if self.outputs:
+		ret = self.outputs[0].abspath() + '.strace'
+	else:
+		ret = '%s%s%d%s' % (self.generator.bld.bldnode.abspath(), os.sep, id(self), '.strace')
+	self.strace_file = ret
+	return ret
+
+@task_method
+def get_strace_args(self):
+	return (self.env.STRACE or ['strace']) + ['-e', TRACECALLS, '-f', '-o', self.get_strace_file()]
+
+@task_method
+def exec_command(self, cmd, **kw):
+	bld = self.generator.bld
+	if not 'cwd' in kw:
+		kw['cwd'] = self.get_cwd()
+
+	args = self.get_strace_args()
+	fname = self.get_strace_file()
+	if isinstance(cmd, list):
+		cmd = args + cmd
+	else:
+		cmd = '%s %s' % (' '.join(args), cmd)
+
+	try:
+		ret = bld.exec_command(cmd, **kw)
+	finally:
+		if not ret:
+			self.parse_strace_deps(fname, kw['cwd'])
+	return ret
+
+@task_method
+def sig_implicit_deps(self):
+	# bypass the scanner functions
+	return
+
+@task_method
+def parse_strace_deps(self, path, cwd):
+	# uncomment the following line to disable the dependencies and force a file scan
+	# return
+	try:
+		cnt = Utils.readf(path)
+	finally:
+		try:
+			os.remove(path)
+		except OSError:
+			pass
+
+	if not isinstance(cwd, str):
+		cwd = cwd.abspath()
+
+	nodes = []
+	bld = self.generator.bld
+	try:
+		cache = bld.strace_cache
+	except AttributeError:
+		cache = bld.strace_cache = {}
+
+	# chdir and relative paths
+	pid_to_cwd = {}
+
+	global BANNED
+	done = set()
+	for m in re.finditer(re_lines, cnt):
+		# scraping the output of strace
+		pid = m.group('pid')
+		if m.group('npid'):
+			npid = m.group('npid')
+			pid_to_cwd[npid] = pid_to_cwd.get(pid, cwd)
+			continue
+
+		p = m.group('path').replace('\\"', '"')
+
+		if p == '.' or m.group().find('= -1 ENOENT') > -1:
+			# just to speed it up a bit
+			continue
+
+		if not os.path.isabs(p):
+			p = os.path.join(pid_to_cwd.get(pid, cwd), p)
+
+		call = m.group('call')
+		if call == 'chdir':
+			pid_to_cwd[pid] = p
+			continue
+
+		if p in done:
+			continue
+		done.add(p)
+
+		for x in BANNED:
+			if p.startswith(x):
+				break
+		else:
+			if p.endswith('/') or os.path.isdir(p):
+				continue
+
+			try:
+				node = cache[p]
+			except KeyError:
+				strace_lock.acquire()
+				try:
+					cache[p] = node = bld.root.find_node(p)
+					if not node:
+						continue
+				finally:
+					strace_lock.release()
+			nodes.append(node)
+
+	# record the dependencies then force the task signature recalculation for next time
+	if Logs.verbose:
+		Logs.debug('deps: real scanner for %r returned %r', self, nodes)
+	bld = self.generator.bld
+	bld.node_deps[self.uid()] = nodes
+	bld.raw_deps[self.uid()] = []
+	try:
+		del self.cache_sig
+	except AttributeError:
+		pass
+	self.signature()
+
diff --git a/third_party/waf/waflib/extras/swig.py b/third_party/waf/waflib/extras/swig.py
new file mode 100644
index 0000000..967caeb
--- /dev/null
+++ b/third_party/waf/waflib/extras/swig.py
@@ -0,0 +1,237 @@
+#! /usr/bin/env python
+# encoding: UTF-8
+# Petar Forai
+# Thomas Nagy 2008-2010 (ita)
+
+import re
+from waflib import Task, Logs
+from waflib.TaskGen import extension, feature, after_method
+from waflib.Configure import conf
+from waflib.Tools import c_preproc
+
+"""
+tasks have to be added dynamically:
+- swig interface files may be created at runtime
+- the module name may be unknown in advance
+"""
+
+SWIG_EXTS = ['.swig', '.i']
+
+re_module = re.compile(r'%module(?:\s*\(.*\))?\s+([^\r\n]+)', re.M)
+
+re_1 = re.compile(r'^%module.*?\s+([\w]+)\s*?$', re.M)
+re_2 = re.compile(r'[#%](?:include|import(?:\(module=".*"\))+|python(?:begin|code)) [<"](.*)[">]', re.M)
+
+class swig(Task.Task):
+	color   = 'BLUE'
+	run_str = '${SWIG} ${SWIGFLAGS} ${SWIGPATH_ST:INCPATHS} ${SWIGDEF_ST:DEFINES} ${SRC}'
+	ext_out = ['.h'] # might produce .h files although it is not mandatory
+	vars = ['SWIG_VERSION', 'SWIGDEPS']
+
+	def runnable_status(self):
+		for t in self.run_after:
+			if not t.hasrun:
+				return Task.ASK_LATER
+
+		if not getattr(self, 'init_outputs', None):
+			self.init_outputs = True
+			if not getattr(self, 'module', None):
+				# search the module name
+				txt = self.inputs[0].read()
+				m = re_module.search(txt)
+				if not m:
+					raise ValueError("could not find the swig module name")
+				self.module = m.group(1)
+
+			swig_c(self)
+
+			# add the language-specific output files as nodes
+			# call funs in the dict swig_langs
+			for x in self.env['SWIGFLAGS']:
+				# obtain the language
+				x = x[1:]
+				try:
+					fun = swig_langs[x]
+				except KeyError:
+					pass
+				else:
+					fun(self)
+
+		return super(swig, self).runnable_status()
+
+	def scan(self):
+		"scan for swig dependencies, climb the .i files"
+		lst_src = []
+
+		seen = []
+		missing = []
+		to_see = [self.inputs[0]]
+
+		while to_see:
+			node = to_see.pop(0)
+			if node in seen:
+				continue
+			seen.append(node)
+			lst_src.append(node)
+
+			# read the file
+			code = node.read()
+			code = c_preproc.re_nl.sub('', code)
+			code = c_preproc.re_cpp.sub(c_preproc.repl, code)
+
+			# find .i files and project headers
+			names = re_2.findall(code)
+			for n in names:
+				for d in self.generator.includes_nodes + [node.parent]:
+					u = d.find_resource(n)
+					if u:
+						to_see.append(u)
+						break
+				else:
+					missing.append(n)
+		return (lst_src, missing)
+
+# provide additional language processing
+swig_langs = {}
+def swigf(fun):
+	swig_langs[fun.__name__.replace('swig_', '')] = fun
+	return fun
+swig.swigf = swigf
+
+def swig_c(self):
+	ext = '.swigwrap_%d.c' % self.generator.idx
+	flags = self.env['SWIGFLAGS']
+	if '-c++' in flags:
+		ext += 'xx'
+	out_node = self.inputs[0].parent.find_or_declare(self.module + ext)
+
+	if '-c++' in flags:
+		c_tsk = self.generator.cxx_hook(out_node)
+	else:
+		c_tsk = self.generator.c_hook(out_node)
+
+	c_tsk.set_run_after(self)
+
+	# transfer weights from swig task to c task
+	if getattr(self, 'weight', None):
+		c_tsk.weight = self.weight
+	if getattr(self, 'tree_weight', None):
+		c_tsk.tree_weight = self.tree_weight
+
+	try:
+		self.more_tasks.append(c_tsk)
+	except AttributeError:
+		self.more_tasks = [c_tsk]
+
+	try:
+		ltask = self.generator.link_task
+	except AttributeError:
+		pass
+	else:
+		ltask.set_run_after(c_tsk)
+		# setting input nodes does not declare the build order
+		# because the build already started, but it sets
+		# the dependency to enable rebuilds
+		ltask.inputs.append(c_tsk.outputs[0])
+
+	self.outputs.append(out_node)
+
+	if not '-o' in self.env['SWIGFLAGS']:
+		self.env.append_value('SWIGFLAGS', ['-o', self.outputs[0].abspath()])
+
+@swigf
+def swig_python(tsk):
+	node = tsk.inputs[0].parent
+	if tsk.outdir:
+		node = tsk.outdir
+	tsk.set_outputs(node.find_or_declare(tsk.module+'.py'))
+
+@swigf
+def swig_ocaml(tsk):
+	node = tsk.inputs[0].parent
+	if tsk.outdir:
+		node = tsk.outdir
+	tsk.set_outputs(node.find_or_declare(tsk.module+'.ml'))
+	tsk.set_outputs(node.find_or_declare(tsk.module+'.mli'))
+
+@extension(*SWIG_EXTS)
+def i_file(self, node):
+	# the task instance
+	tsk = self.create_task('swig')
+	tsk.set_inputs(node)
+	tsk.module = getattr(self, 'swig_module', None)
+
+	flags = self.to_list(getattr(self, 'swig_flags', []))
+	tsk.env.append_value('SWIGFLAGS', flags)
+
+	tsk.outdir = None
+	if '-outdir' in flags:
+		outdir = flags[flags.index('-outdir')+1]
+		outdir = tsk.generator.bld.bldnode.make_node(outdir)
+		outdir.mkdir()
+		tsk.outdir = outdir
+
+@feature('c', 'cxx', 'd', 'fc', 'asm')
+@after_method('apply_link', 'process_source')
+def enforce_swig_before_link(self):
+	try:
+		link_task = self.link_task
+	except AttributeError:
+		pass
+	else:
+		for x in self.tasks:
+			if x.__class__.__name__ == 'swig':
+				link_task.run_after.add(x)
+
+@conf
+def check_swig_version(conf, minver=None):
+	"""
+	Check if the swig tool is found matching a given minimum version.
+	minver should be a tuple, eg. to check for swig >= 1.3.28 pass (1,3,28) as minver.
+
+	If successful, SWIG_VERSION is defined as 'MAJOR.MINOR'
+	(eg. '1.3') of the actual swig version found.
+
+	:param minver: minimum version
+	:type minver: tuple of int
+	:return: swig version
+	:rtype: tuple of int
+	"""
+	assert minver is None or isinstance(minver, tuple)
+	swigbin = conf.env['SWIG']
+	if not swigbin:
+		conf.fatal('could not find the swig executable')
+
+	# Get swig version string
+	cmd = swigbin + ['-version']
+	Logs.debug('swig: Running swig command %r', cmd)
+	reg_swig = re.compile(r'SWIG Version\s(.*)', re.M)
+	swig_out = conf.cmd_and_log(cmd)
+	swigver_tuple = tuple([int(s) for s in reg_swig.findall(swig_out)[0].split('.')])
+
+	# Compare swig version with the minimum required
+	result = (minver is None) or (swigver_tuple >= minver)
+
+	if result:
+		# Define useful environment variables
+		swigver = '.'.join([str(x) for x in swigver_tuple[:2]])
+		conf.env['SWIG_VERSION'] = swigver
+
+	# Feedback
+	swigver_full = '.'.join(map(str, swigver_tuple[:3]))
+	if minver is None:
+		conf.msg('Checking for swig version', swigver_full)
+	else:
+		minver_str = '.'.join(map(str, minver))
+		conf.msg('Checking for swig version >= %s' % (minver_str,), swigver_full, color=result and 'GREEN' or 'YELLOW')
+
+	if not result:
+		conf.fatal('The swig version is too old, expecting %r' % (minver,))
+
+	return swigver_tuple
+
+def configure(conf):
+	conf.find_program('swig', var='SWIG')
+	conf.env.SWIGPATH_ST = '-I%s'
+	conf.env.SWIGDEF_ST = '-D%s'
+
diff --git a/third_party/waf/waflib/extras/syms.py b/third_party/waf/waflib/extras/syms.py
new file mode 100644
index 0000000..562f708
--- /dev/null
+++ b/third_party/waf/waflib/extras/syms.py
@@ -0,0 +1,84 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+"""
+This tool supports the export_symbols_regex to export the symbols in a shared library.
+by default, all symbols are exported by gcc, and nothing by msvc.
+to use the tool, do something like:
+
+def build(ctx):
+	ctx(features='c cshlib syms', source='a.c b.c', export_symbols_regex='mylib_.*', target='testlib')
+
+only the symbols starting with 'mylib_' will be exported.
+"""
+
+import re
+from waflib.Context import STDOUT
+from waflib.Task import Task
+from waflib.Errors import WafError
+from waflib.TaskGen import feature, after_method
+
+class gen_sym(Task):
+	def run(self):
+		obj = self.inputs[0]
+		kw = {}
+
+		reg = getattr(self.generator, 'export_symbols_regex', '.+?')
+		if 'msvc' in (self.env.CC_NAME, self.env.CXX_NAME):
+			re_nm = re.compile(r'External\s+\|\s+_(?P<symbol>%s)\b' % reg)
+			cmd = (self.env.DUMPBIN or ['dumpbin']) + ['/symbols', obj.abspath()]
+		else:
+			if self.env.DEST_BINFMT == 'pe': #gcc uses nm, and has a preceding _ on windows
+				re_nm = re.compile(r'(T|D)\s+_(?P<symbol>%s)\b' % reg)
+			elif self.env.DEST_BINFMT=='mac-o':
+				re_nm=re.compile(r'(T|D)\s+(?P<symbol>_?(%s))\b' % reg)
+			else:
+				re_nm = re.compile(r'(T|D)\s+(?P<symbol>%s)\b' % reg)
+			cmd = (self.env.NM or ['nm']) + ['-g', obj.abspath()]
+		syms = [m.group('symbol') for m in re_nm.finditer(self.generator.bld.cmd_and_log(cmd, quiet=STDOUT, **kw))]
+		self.outputs[0].write('%r' % syms)
+
+class compile_sym(Task):
+	def run(self):
+		syms = {}
+		for x in self.inputs:
+			slist = eval(x.read())
+			for s in slist:
+				syms[s] = 1
+		lsyms = list(syms.keys())
+		lsyms.sort()
+		if self.env.DEST_BINFMT == 'pe':
+			self.outputs[0].write('EXPORTS\n' + '\n'.join(lsyms))
+		elif self.env.DEST_BINFMT == 'elf':
+			self.outputs[0].write('{ global:\n' + ';\n'.join(lsyms) + ";\nlocal: *; };\n")
+		elif self.env.DEST_BINFMT=='mac-o':
+			self.outputs[0].write('\n'.join(lsyms) + '\n')
+		else:
+			raise WafError('NotImplemented')
+
+@feature('syms')
+@after_method('process_source', 'process_use', 'apply_link', 'process_uselib_local', 'propagate_uselib_vars')
+def do_the_symbol_stuff(self):
+	def_node = self.path.find_or_declare(getattr(self, 'sym_file', self.target + '.def'))
+	compiled_tasks = getattr(self, 'compiled_tasks', None)
+	if compiled_tasks:
+		ins = [x.outputs[0] for x in compiled_tasks]
+		self.gen_sym_tasks = [self.create_task('gen_sym', x, x.change_ext('.%d.sym' % self.idx)) for x in ins]
+		self.create_task('compile_sym', [x.outputs[0] for x in self.gen_sym_tasks], def_node)
+
+	link_task = getattr(self, 'link_task', None)
+	if link_task:
+		self.link_task.dep_nodes.append(def_node)
+
+		if 'msvc' in (self.env.CC_NAME, self.env.CXX_NAME):
+			self.link_task.env.append_value('LINKFLAGS', ['/def:' + def_node.bldpath()])
+		elif self.env.DEST_BINFMT == 'pe':
+			# gcc on windows takes *.def as an additional input
+			self.link_task.inputs.append(def_node)
+		elif self.env.DEST_BINFMT == 'elf':
+			self.link_task.env.append_value('LINKFLAGS', ['-Wl,-version-script', '-Wl,' + def_node.bldpath()])
+		elif self.env.DEST_BINFMT=='mac-o':
+			self.link_task.env.append_value('LINKFLAGS',['-Wl,-exported_symbols_list,' + def_node.bldpath()])
+		else:
+			raise WafError('NotImplemented')
+
diff --git a/third_party/waf/waflib/extras/ticgt.py b/third_party/waf/waflib/extras/ticgt.py
new file mode 100644
index 0000000..f43a7ea
--- /dev/null
+++ b/third_party/waf/waflib/extras/ticgt.py
@@ -0,0 +1,300 @@
+#!/usr/bin/env python
+# encoding: utf-8
+
+# Texas Instruments code generator support (experimental)
+# When reporting issues, please directly assign the bug to the maintainer.
+
+__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
+__copyright__ = "Jérôme Carretero, 2012"
+
+"""
+TI cgt6x is a compiler suite for TI DSPs.
+
+The toolchain does pretty weird things, and I'm sure I'm missing some of them.
+But still, the tool saves time.
+
+What this tool does is:
+
+- create a TI compiler environment
+- create TI compiler features, to handle some specifics about this compiler
+  It has a few idiosyncracies, such as not giving the liberty of the .o file names
+- automatically activate them when using the TI compiler
+- handle the tconf tool
+  The tool
+
+TODO:
+
+- the set_platform_flags() function is not nice
+- more tests
+- broaden tool scope, if needed
+
+"""
+
+import os, re
+
+from waflib import Options, Utils, Task, TaskGen
+from waflib.Tools import c, ccroot, c_preproc
+from waflib.Configure import conf
+from waflib.TaskGen import feature, before_method
+from waflib.Tools.c import cprogram
+
+opj = os.path.join
+
+@conf
+def find_ticc(conf):
+	conf.find_program(['cl6x'], var='CC', path_list=opj(getattr(Options.options, 'ti-cgt-dir', ""), 'bin'))
+	conf.env.CC_NAME = 'ticc'
+
+@conf
+def find_tild(conf):
+	conf.find_program(['lnk6x'], var='LINK_CC', path_list=opj(getattr(Options.options, 'ti-cgt-dir', ""), 'bin'))
+	conf.env.LINK_CC_NAME = 'tild'
+
+@conf
+def find_tiar(conf):
+	conf.find_program(['ar6x'], var='AR', path_list=opj(getattr(Options.options, 'ti-cgt-dir', ""), 'bin'))
+	conf.env.AR_NAME = 'tiar'
+	conf.env.ARFLAGS = 'qru'
+
+@conf
+def ticc_common_flags(conf):
+	v = conf.env
+
+	if not v['LINK_CC']:
+		v['LINK_CC'] = v['CC']
+	v['CCLNK_SRC_F']	 = []
+	v['CCLNK_TGT_F']	 = ['-o']
+	v['CPPPATH_ST']	  = '-I%s'
+	v['DEFINES_ST']	  = '-d%s'
+
+	v['LIB_ST']	      = '-l%s' # template for adding libs
+	v['LIBPATH_ST']	  = '-i%s' # template for adding libpaths
+	v['STLIB_ST']	    = '-l=%s.lib'
+	v['STLIBPATH_ST']	= '-i%s'
+
+	# program
+	v['cprogram_PATTERN']    = '%s.out'
+
+	# static lib
+	#v['LINKFLAGS_cstlib']    = ['-Wl,-Bstatic']
+	v['cstlib_PATTERN']      = '%s.lib'
+
+def configure(conf):
+	v = conf.env
+	v.TI_CGT_DIR = getattr(Options.options, 'ti-cgt-dir', "")
+	v.TI_DSPLINK_DIR = getattr(Options.options, 'ti-dsplink-dir', "")
+	v.TI_BIOSUTILS_DIR = getattr(Options.options, 'ti-biosutils-dir', "")
+	v.TI_DSPBIOS_DIR = getattr(Options.options, 'ti-dspbios-dir', "")
+	v.TI_XDCTOOLS_DIR = getattr(Options.options, 'ti-xdctools-dir', "")
+	conf.find_ticc()
+	conf.find_tiar()
+	conf.find_tild()
+	conf.ticc_common_flags()
+	conf.cc_load_tools()
+	conf.cc_add_flags()
+	conf.link_add_flags()
+	conf.find_program(['tconf'], var='TCONF', path_list=v.TI_XDCTOOLS_DIR)
+
+	conf.env.TCONF_INCLUDES += [
+	 opj(conf.env.TI_DSPBIOS_DIR, 'packages'),
+	]
+
+	conf.env.INCLUDES += [
+	 opj(conf.env.TI_CGT_DIR, 'include'),
+	]
+
+	conf.env.LIBPATH += [
+	 opj(conf.env.TI_CGT_DIR, "lib"),
+	]
+
+	conf.env.INCLUDES_DSPBIOS += [
+	 opj(conf.env.TI_DSPBIOS_DIR, 'packages', 'ti', 'bios', 'include'),
+	]
+
+	conf.env.LIBPATH_DSPBIOS += [
+	 opj(conf.env.TI_DSPBIOS_DIR, 'packages', 'ti', 'bios', 'lib'),
+	]
+
+	conf.env.INCLUDES_DSPLINK += [
+	 opj(conf.env.TI_DSPLINK_DIR, 'dsplink', 'dsp', 'inc'),
+	]
+
+@conf
+def ti_set_debug(cfg, debug=1):
+	"""
+	Sets debug flags for the compiler.
+
+	TODO:
+	- for each TI CFLAG/INCLUDES/LINKFLAGS/LIBPATH replace RELEASE by DEBUG
+	- -g --no_compress
+	"""
+	if debug:
+		cfg.env.CFLAGS += "-d_DEBUG -dDEBUG -dDDSP_DEBUG".split()
+
+@conf
+def ti_dsplink_set_platform_flags(cfg, splat, dsp, dspbios_ver, board):
+	"""
+	Sets the INCLUDES, LINKFLAGS for DSPLINK and TCONF_INCLUDES
+	For the specific hardware.
+
+	Assumes that DSPLINK was built in its own folder.
+
+	:param splat: short platform name (eg. OMAPL138)
+	:param dsp: DSP name (eg. 674X)
+	:param dspbios_ver: string identifying DspBios version (eg. 5.XX)
+	:param board: board name (eg. OMAPL138GEM)
+
+	"""
+	d1 = opj(cfg.env.TI_DSPLINK_DIR, 'dsplink', 'dsp', 'inc', 'DspBios', dspbios_ver)
+	d = opj(cfg.env.TI_DSPLINK_DIR, 'dsplink', 'dsp', 'inc', 'DspBios', dspbios_ver, board)
+	cfg.env.TCONF_INCLUDES += [d1, d]
+	cfg.env.INCLUDES_DSPLINK += [
+	 opj(cfg.env.TI_DSPLINK_DIR, 'dsplink', 'dsp', 'inc', dsp),
+	 d,
+	]
+
+	cfg.env.LINKFLAGS_DSPLINK += [
+	 opj(cfg.env.TI_DSPLINK_DIR, 'dsplink', 'dsp', 'export', 'BIN', 'DspBios', splat, board+'_0', 'RELEASE', 'dsplink%s.lib' % x)
+	 for x in ('', 'pool', 'mpcs', 'mplist', 'msg', 'data', 'notify', 'ringio')
+	]
+
+
+def options(opt):
+	opt.add_option('--with-ti-cgt', type='string', dest='ti-cgt-dir', help = 'Specify alternate cgt root folder', default="")
+	opt.add_option('--with-ti-biosutils', type='string', dest='ti-biosutils-dir', help = 'Specify alternate biosutils folder', default="")
+	opt.add_option('--with-ti-dspbios', type='string', dest='ti-dspbios-dir', help = 'Specify alternate dspbios folder', default="")
+	opt.add_option('--with-ti-dsplink', type='string', dest='ti-dsplink-dir', help = 'Specify alternate dsplink folder', default="")
+	opt.add_option('--with-ti-xdctools', type='string', dest='ti-xdctools-dir', help = 'Specify alternate xdctools folder', default="")
+
+class ti_cprogram(cprogram):
+	"""
+	Link object files into a c program
+	
+	Changes:
+
+	- the linked executable to have a relative path (because we can)
+	- put the LIBPATH first
+	"""
+	run_str = '${LINK_CC} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB} ${LINKFLAGS} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT[0].bldpath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} '
+
+@feature("c")
+@before_method('apply_link')
+def use_ti_cprogram(self):
+	"""
+	Automatically uses ti_cprogram link process
+	"""
+	if 'cprogram' in self.features and self.env.CC_NAME == 'ticc':
+		self.features.insert(0, "ti_cprogram")
+
+class ti_c(Task.Task):
+	"""
+	Compile task for the TI codegen compiler
+
+	This compiler does not allow specifying the output file name, only the output path.
+
+	"""
+	"Compile C files into object files"
+	run_str = '${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${SRC} -c ${OUT} ${CPPFLAGS}'
+	vars    = ['CCDEPS'] # unused variable to depend on, just in case
+	ext_in  = ['.h'] # set the build order easily by using ext_out=['.h']
+	scan    = c_preproc.scan
+
+def create_compiled_task(self, name, node):
+	"""
+	Overrides ccroot.create_compiled_task to support ti_c
+	"""
+	out = '%s' % (node.change_ext('.obj').name)
+	if self.env.CC_NAME == 'ticc':
+		name = 'ti_c'
+	task = self.create_task(name, node, node.parent.find_or_declare(out))
+	self.env.OUT = '-fr%s' % (node.parent.get_bld().abspath())
+	try:
+		self.compiled_tasks.append(task)
+	except AttributeError:
+		self.compiled_tasks = [task]
+	return task
+
+@TaskGen.extension('.c')
+def c_hook(self, node):
+	"Bind the c file extension to the creation of a :py:class:`waflib.Tools.c.c` instance"
+	if self.env.CC_NAME == 'ticc':
+		return create_compiled_task(self, 'ti_c', node)
+	else:
+		return self.create_compiled_task('c', node)
+
+
+@feature("ti-tconf")
+@before_method('process_source')
+def apply_tconf(self):
+	sources = [x.get_src() for x in self.to_nodes(self.source, path=self.path.get_src())]
+	node = sources[0]
+	assert(sources[0].name.endswith(".tcf"))
+	if len(sources) > 1:
+		assert(sources[1].name.endswith(".cmd"))
+
+	target = getattr(self, 'target', self.source)
+	target_node = node.get_bld().parent.find_or_declare(node.name)
+	
+	procid = "%d" % int(getattr(self, 'procid', 0))
+
+	importpaths = []
+	includes = Utils.to_list(getattr(self, 'includes', []))
+	for x in includes + self.env.TCONF_INCLUDES:
+		if x == os.path.abspath(x):
+			importpaths.append(x)
+		else:
+			relpath = self.path.find_node(x).path_from(target_node.parent)
+			importpaths.append(relpath)
+
+	task = self.create_task('ti_tconf', sources, target_node.change_ext('.cdb'))
+	task.path = self.path
+	task.includes = includes
+	task.cwd = target_node.parent.abspath()
+	task.env = self.env.derive()
+	task.env["TCONFSRC"] = node.path_from(target_node.parent)
+	task.env["TCONFINC"] = '-Dconfig.importPath=%s' % ";".join(importpaths)
+	task.env['TCONFPROGNAME'] = '-Dconfig.programName=%s' % target
+	task.env['PROCID'] = procid
+	task.outputs = [
+	 target_node.change_ext("cfg_c.c"),
+	 target_node.change_ext("cfg.s62"),
+	 target_node.change_ext("cfg.cmd"),
+	]
+
+	create_compiled_task(self, 'ti_c', task.outputs[1])
+	ctask = create_compiled_task(self, 'ti_c', task.outputs[0])
+	ctask.env = self.env.derive()
+
+	self.add_those_o_files(target_node.change_ext("cfg.cmd"))
+	if len(sources) > 1:
+		self.add_those_o_files(sources[1])
+	self.source = []
+
+re_tconf_include = re.compile(r'(?P<type>utils\.importFile)\("(?P<file>.*)"\)',re.M)
+class ti_tconf(Task.Task):
+	run_str = '${TCONF} ${TCONFINC} ${TCONFPROGNAME} ${TCONFSRC} ${PROCID}'
+	color   = 'PINK'
+
+	def scan(self):
+		includes = Utils.to_list(getattr(self, 'includes', []))
+
+		def deps(node):
+			nodes, names = [], []
+			if node:
+				code = Utils.readf(node.abspath())
+				for match in re_tconf_include.finditer(code):
+					path = match.group('file')
+					if path:
+						for x in includes:
+							filename = opj(x, path)
+							fi = self.path.find_resource(filename)
+							if fi:
+								subnodes, subnames = deps(fi)
+								nodes += subnodes
+								names += subnames
+								nodes.append(fi)
+								names.append(path)
+								break
+			return nodes, names
+		return deps(self.inputs[0])
+
diff --git a/third_party/waf/waflib/extras/unity.py b/third_party/waf/waflib/extras/unity.py
new file mode 100644
index 0000000..78128ed
--- /dev/null
+++ b/third_party/waf/waflib/extras/unity.py
@@ -0,0 +1,108 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+"""
+Compile whole groups of C/C++ files at once
+(C and C++ files are processed independently though).
+
+To enable globally::
+
+	def options(opt):
+		opt.load('compiler_cxx')
+	def build(bld):
+		bld.load('compiler_cxx unity')
+
+To enable for specific task generators only::
+
+	def build(bld):
+		bld(features='c cprogram unity', source='main.c', ...)
+
+The file order is often significant in such builds, so it can be
+necessary to adjust the order of source files and the batch sizes.
+To control the amount of files processed in a batch per target
+(the default is 50)::
+
+	def build(bld):
+		bld(features='c cprogram', unity_size=20)
+
+"""
+
+from waflib import Task, Options
+from waflib.Tools import c_preproc
+from waflib import TaskGen
+
+MAX_BATCH = 50
+
+EXTS_C = ('.c',)
+EXTS_CXX = ('.cpp','.cc','.cxx','.C','.c++')
+
+def options(opt):
+	global MAX_BATCH
+	opt.add_option('--batchsize', action='store', dest='batchsize', type='int', default=MAX_BATCH,
+		help='default unity batch size (0 disables unity builds)')
+
+@TaskGen.taskgen_method
+def batch_size(self):
+	default = getattr(Options.options, 'batchsize', MAX_BATCH)
+	if default < 1:
+		return 0
+	return getattr(self, 'unity_size', default)
+
+
+class unity(Task.Task):
+	color = 'BLUE'
+	scan = c_preproc.scan
+	def to_include(self, node):
+		ret = node.path_from(self.outputs[0].parent)
+		ret = ret.replace('\\', '\\\\').replace('"', '\\"')
+		return ret
+	def run(self):
+		lst = ['#include "%s"\n' % self.to_include(node) for node in self.inputs]
+		txt = ''.join(lst)
+		self.outputs[0].write(txt)
+	def __str__(self):
+		node = self.outputs[0]
+		return node.path_from(node.ctx.launch_node())
+
+def bind_unity(obj, cls_name, exts):
+	if not 'mappings' in obj.__dict__:
+		obj.mappings = dict(obj.mappings)
+
+	for j in exts:
+		fun = obj.mappings[j]
+		if fun.__name__ == 'unity_fun':
+			raise ValueError('Attempt to bind unity mappings multiple times %r' % j)
+
+		def unity_fun(self, node):
+			cnt = self.batch_size()
+			if cnt <= 1:
+				return fun(self, node)
+			x = getattr(self, 'master_%s' % cls_name, None)
+			if not x or len(x.inputs) >= cnt:
+				x = self.create_task('unity')
+				setattr(self, 'master_%s' % cls_name, x)
+
+				cnt_cur = getattr(self, 'cnt_%s' % cls_name, 0)
+				c_node = node.parent.find_or_declare('unity_%s_%d_%d.%s' % (self.idx, cnt_cur, cnt, cls_name))
+				x.outputs = [c_node]
+				setattr(self, 'cnt_%s' % cls_name, cnt_cur + 1)
+				fun(self, c_node)
+			x.inputs.append(node)
+
+		obj.mappings[j] = unity_fun
+
+@TaskGen.feature('unity')
+@TaskGen.before('process_source')
+def single_unity(self):
+	lst = self.to_list(self.features)
+	if 'c' in lst:
+		bind_unity(self, 'c', EXTS_C)
+	if 'cxx' in lst:
+		bind_unity(self, 'cxx', EXTS_CXX)
+
+def build(bld):
+	if bld.env.CC_NAME:
+		bind_unity(TaskGen.task_gen, 'c', EXTS_C)
+	if bld.env.CXX_NAME:
+		bind_unity(TaskGen.task_gen, 'cxx', EXTS_CXX)
+
diff --git a/third_party/waf/waflib/extras/use_config.py b/third_party/waf/waflib/extras/use_config.py
new file mode 100644
index 0000000..ef5129f
--- /dev/null
+++ b/third_party/waf/waflib/extras/use_config.py
@@ -0,0 +1,185 @@
+#!/usr/bin/env python
+# coding=utf-8
+# Mathieu Courtois - EDF R&D, 2013 - http://www.code-aster.org
+
+"""
+When a project has a lot of options the 'waf configure' command line can be
+very long and it becomes a cause of error.
+This tool provides a convenient way to load a set of configuration parameters
+from a local file or from a remote url.
+
+The configuration parameters are stored in a Python file that is imported as
+an extra waf tool can be.
+
+Example:
+$ waf configure --use-config-dir=http://www.anywhere.org --use-config=myconf1 ...
+
+The file 'myconf1' will be downloaded from 'http://www.anywhere.org'
+(or 'http://www.anywhere.org/wafcfg').
+If the files are available locally, it could be:
+$ waf configure --use-config-dir=/somewhere/myconfigurations --use-config=myconf1 ...
+
+The configuration of 'myconf1.py' is automatically loaded by calling
+its 'configure' function. In this example, it defines environment variables and
+set options:
+
+def configure(self):
+	self.env['CC'] = 'gcc-4.8'
+	self.env.append_value('LIBPATH', [...])
+	self.options.perlbinary = '/usr/local/bin/perl'
+	self.options.pyc = False
+
+The corresponding command line should have been:
+$ CC=gcc-4.8 LIBPATH=... waf configure --nopyc --with-perl-binary=/usr/local/bin/perl
+
+
+This is an extra tool, not bundled with the default waf binary.
+To add the use_config tool to the waf file:
+$ ./waf-light --tools=use_config
+
+When using this tool, the wscript will look like:
+
+	def options(opt):
+		opt.load('use_config')
+
+	def configure(conf):
+		conf.load('use_config')
+"""
+
+import sys
+import os.path as osp
+import os
+
+local_repo = ''
+"""Local repository containing additional Waf tools (plugins)"""
+remote_repo = 'https://gitlab.com/ita1024/waf/raw/master/'
+"""
+Remote directory containing downloadable waf tools. The missing tools can be downloaded by using::
+
+	$ waf configure --download
+"""
+
+remote_locs = ['waflib/extras', 'waflib/Tools']
+"""
+Remote directories for use with :py:const:`waflib.extras.use_config.remote_repo`
+"""
+
+
+try:
+	from urllib import request
+except ImportError:
+	from urllib import urlopen
+else:
+	urlopen = request.urlopen
+
+
+from waflib import Errors, Context, Logs, Utils, Options, Configure
+
+try:
+	from urllib.parse import urlparse
+except ImportError:
+	from urlparse import urlparse
+
+
+
+
+DEFAULT_DIR = 'wafcfg'
+# add first the current wafcfg subdirectory
+sys.path.append(osp.abspath(DEFAULT_DIR))
+
+def options(self):
+	group = self.add_option_group('configure options')
+	group.add_option('--download', dest='download', default=False, action='store_true', help='try to download the tools if missing')
+
+	group.add_option('--use-config', action='store', default=None,
+					 metavar='CFG', dest='use_config',
+					 help='force the configuration parameters by importing '
+						  'CFG.py. Several modules may be provided (comma '
+						  'separated).')
+	group.add_option('--use-config-dir', action='store', default=DEFAULT_DIR,
+					 metavar='CFG_DIR', dest='use_config_dir',
+					 help='path or url where to find the configuration file')
+
+def download_check(node):
+	"""
+	Hook to check for the tools which are downloaded. Replace with your function if necessary.
+	"""
+	pass
+
+
+def download_tool(tool, force=False, ctx=None):
+	"""
+	Download a Waf tool from the remote repository defined in :py:const:`waflib.extras.use_config.remote_repo`::
+
+		$ waf configure --download
+	"""
+	for x in Utils.to_list(remote_repo):
+		for sub in Utils.to_list(remote_locs):
+			url = '/'.join((x, sub, tool + '.py'))
+			try:
+				web = urlopen(url)
+				try:
+					if web.getcode() != 200:
+						continue
+				except AttributeError:
+					pass
+			except Exception:
+				# on python3 urlopen throws an exception
+				# python 2.3 does not have getcode and throws an exception to fail
+				continue
+			else:
+				tmp = ctx.root.make_node(os.sep.join((Context.waf_dir, 'waflib', 'extras', tool + '.py')))
+				tmp.write(web.read(), 'wb')
+				Logs.warn('Downloaded %s from %s', tool, url)
+				download_check(tmp)
+				try:
+					module = Context.load_tool(tool)
+				except Exception:
+					Logs.warn('The tool %s from %s is unusable', tool, url)
+					try:
+						tmp.delete()
+					except Exception:
+						pass
+					continue
+				return module
+
+	raise Errors.WafError('Could not load the Waf tool')
+
+def load_tool(tool, tooldir=None, ctx=None, with_sys_path=True):
+	try:
+		module = Context.load_tool_default(tool, tooldir, ctx, with_sys_path)
+	except ImportError as e:
+		if not ctx or not hasattr(Options.options, 'download'):
+			Logs.error('Could not load %r during options phase (download unavailable at this point)' % tool)
+			raise
+		if Options.options.download:
+			module = download_tool(tool, ctx=ctx)
+			if not module:
+				ctx.fatal('Could not load the Waf tool %r or download a suitable replacement from the repository (sys.path %r)\n%s' % (tool, sys.path, e))
+		else:
+			ctx.fatal('Could not load the Waf tool %r from %r (try the --download option?):\n%s' % (tool, sys.path, e))
+	return module
+
+Context.load_tool_default = Context.load_tool
+Context.load_tool = load_tool
+Configure.download_tool = download_tool
+
+def configure(self):
+	opts = self.options
+	use_cfg = opts.use_config
+	if use_cfg is None:
+		return
+	url = urlparse(opts.use_config_dir)
+	kwargs = {}
+	if url.scheme:
+		kwargs['download'] = True
+		kwargs['remote_url'] = url.geturl()
+		# search first with the exact url, else try with +'/wafcfg'
+		kwargs['remote_locs'] = ['', DEFAULT_DIR]
+	tooldir = url.geturl() + ' ' + DEFAULT_DIR
+	for cfg in use_cfg.split(','):
+		Logs.pprint('NORMAL', "Searching configuration '%s'..." % cfg)
+		self.load(cfg, tooldir=tooldir, **kwargs)
+	self.start_msg('Checking for configuration')
+	self.end_msg(use_cfg)
+
diff --git a/third_party/waf/waflib/extras/valadoc.py b/third_party/waf/waflib/extras/valadoc.py
new file mode 100644
index 0000000..c50f69e
--- /dev/null
+++ b/third_party/waf/waflib/extras/valadoc.py
@@ -0,0 +1,140 @@
+#! /usr/bin/env python
+# encoding: UTF-8
+# Nicolas Joseph 2009
+
+"""
+ported from waf 1.5:
+TODO: tabs vs spaces
+"""
+
+from waflib import Task, Utils, Errors, Logs
+from waflib.TaskGen import feature
+
+VALADOC_STR = '${VALADOC}'
+
+class valadoc(Task.Task):
+	vars  = ['VALADOC', 'VALADOCFLAGS']
+	color = 'BLUE'
+	after = ['cprogram', 'cstlib', 'cshlib', 'cxxprogram', 'cxxstlib', 'cxxshlib']
+	quiet = True # no outputs .. this is weird
+
+	def __init__(self, *k, **kw):
+		Task.Task.__init__(self, *k, **kw)
+		self.output_dir = ''
+		self.doclet = ''
+		self.package_name = ''
+		self.package_version = ''
+		self.files = []
+		self.vapi_dirs = []
+		self.protected = True
+		self.private = False
+		self.inherit = False
+		self.deps = False
+		self.vala_defines = []
+		self.vala_target_glib = None
+		self.enable_non_null_experimental = False
+		self.force = False
+
+	def run(self):
+		if not self.env['VALADOCFLAGS']:
+			self.env['VALADOCFLAGS'] = ''
+		cmd = [Utils.subst_vars(VALADOC_STR, self.env)]
+		cmd.append ('-o %s' % self.output_dir)
+		if getattr(self, 'doclet', None):
+			cmd.append ('--doclet %s' % self.doclet)
+		cmd.append ('--package-name %s' % self.package_name)
+		if getattr(self, 'package_version', None):
+			cmd.append ('--package-version %s' % self.package_version)
+		if getattr(self, 'packages', None):
+			for package in self.packages:
+				cmd.append ('--pkg %s' % package)
+		if getattr(self, 'vapi_dirs', None):
+			for vapi_dir in self.vapi_dirs:
+				cmd.append ('--vapidir %s' % vapi_dir)
+		if not getattr(self, 'protected', None):
+			cmd.append ('--no-protected')
+		if getattr(self, 'private', None):
+			cmd.append ('--private')
+		if getattr(self, 'inherit', None):
+			cmd.append ('--inherit')
+		if getattr(self, 'deps', None):
+			cmd.append ('--deps')
+		if getattr(self, 'vala_defines', None):
+			for define in self.vala_defines:
+				cmd.append ('--define %s' % define)
+		if getattr(self, 'vala_target_glib', None):
+			cmd.append ('--target-glib=%s' % self.vala_target_glib)
+		if getattr(self, 'enable_non_null_experimental', None):
+			cmd.append ('--enable-non-null-experimental')
+		if getattr(self, 'force', None):
+			cmd.append ('--force')
+		cmd.append (' '.join ([x.abspath() for x in self.files]))
+		return self.generator.bld.exec_command(' '.join(cmd))
+
+@feature('valadoc')
+def process_valadoc(self):
+	"""
+	Generate API documentation from Vala source code with valadoc
+
+	doc = bld(
+		features = 'valadoc',
+		output_dir = '../doc/html',
+		package_name = 'vala-gtk-example',
+		package_version = '1.0.0',
+		packages = 'gtk+-2.0',
+		vapi_dirs = '../vapi',
+		force = True
+	)
+
+	path = bld.path.find_dir ('../src')
+	doc.files = path.ant_glob (incl='**/*.vala')
+	"""
+
+	task = self.create_task('valadoc')
+	if getattr(self, 'output_dir', None):
+		task.output_dir = self.path.find_or_declare(self.output_dir).abspath()
+	else:
+		Errors.WafError('no output directory')
+	if getattr(self, 'doclet', None):
+		task.doclet = self.doclet
+	else:
+		Errors.WafError('no doclet directory')
+	if getattr(self, 'package_name', None):
+		task.package_name = self.package_name
+	else:
+		Errors.WafError('no package name')
+	if getattr(self, 'package_version', None):
+		task.package_version = self.package_version
+	if getattr(self, 'packages', None):
+		task.packages = Utils.to_list(self.packages)
+	if getattr(self, 'vapi_dirs', None):
+		vapi_dirs = Utils.to_list(self.vapi_dirs)
+		for vapi_dir in vapi_dirs:
+			try:
+				task.vapi_dirs.append(self.path.find_dir(vapi_dir).abspath())
+			except AttributeError:
+				Logs.warn('Unable to locate Vala API directory: %r', vapi_dir)
+	if getattr(self, 'files', None):
+		task.files = self.files
+	else:
+		Errors.WafError('no input file')
+	if getattr(self, 'protected', None):
+		task.protected = self.protected
+	if getattr(self, 'private', None):
+		task.private = self.private
+	if getattr(self, 'inherit', None):
+		task.inherit = self.inherit
+	if getattr(self, 'deps', None):
+		task.deps = self.deps
+	if getattr(self, 'vala_defines', None):
+		task.vala_defines = Utils.to_list(self.vala_defines)
+	if getattr(self, 'vala_target_glib', None):
+		task.vala_target_glib = self.vala_target_glib
+	if getattr(self, 'enable_non_null_experimental', None):
+		task.enable_non_null_experimental = self.enable_non_null_experimental
+	if getattr(self, 'force', None):
+		task.force = self.force
+
+def configure(conf):
+	conf.find_program('valadoc', errmsg='You must install valadoc <http://live.gnome.org/Valadoc> for generate the API documentation')
+
diff --git a/third_party/waf/waflib/extras/waf_xattr.py b/third_party/waf/waflib/extras/waf_xattr.py
new file mode 100644
index 0000000..351dd63
--- /dev/null
+++ b/third_party/waf/waflib/extras/waf_xattr.py
@@ -0,0 +1,150 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+"""
+Use extended attributes instead of database files
+
+1. Input files will be made writable
+2. This is only for systems providing extended filesystem attributes
+3. By default, hashes are calculated only if timestamp/size change (HASH_CACHE below)
+4. The module enables "deep_inputs" on all tasks by propagating task signatures
+5. This module also skips task signature comparisons for task code changes due to point 4.
+6. This module is for Python3/Linux only, but it could be extended to Python2/other systems
+   using the xattr library
+7. For projects in which tasks always declare output files, it should be possible to
+   store the rest of build context attributes on output files (imp_sigs, raw_deps and node_deps)
+   but this is not done here
+
+On a simple C++ project benchmark, the variations before and after adding waf_xattr.py were observed:
+total build time: 20s -> 22s
+no-op build time: 2.4s -> 1.8s
+pickle file size: 2.9MB -> 2.6MB
+"""
+
+import os
+from waflib import Logs, Node, Task, Utils, Errors
+from waflib.Task import SKIP_ME, RUN_ME, CANCEL_ME, ASK_LATER, SKIPPED, MISSING
+
+HASH_CACHE = True
+SIG_VAR = 'user.waf.sig'
+SEP = ','.encode()
+TEMPLATE = '%b%d,%d'.encode()
+
+try:
+	PermissionError
+except NameError:
+	PermissionError = IOError
+
+def getxattr(self):
+	return os.getxattr(self.abspath(), SIG_VAR)
+
+def setxattr(self, val):
+	os.setxattr(self.abspath(), SIG_VAR, val)
+
+def h_file(self):
+	try:
+		ret = getxattr(self)
+	except OSError:
+		if HASH_CACHE:
+			st = os.stat(self.abspath())
+			mtime = st.st_mtime
+			size = st.st_size
+	else:
+		if len(ret) == 16:
+			# for build directory files
+			return ret
+
+		if HASH_CACHE:
+			# check if timestamp and mtime match to avoid re-hashing
+			st = os.stat(self.abspath())
+			mtime, size = ret[16:].split(SEP)
+			if int(1000 * st.st_mtime) == int(mtime) and st.st_size == int(size):
+				return ret[:16]
+
+	ret = Utils.h_file(self.abspath())
+	if HASH_CACHE:
+		val = TEMPLATE % (ret, int(1000 * st.st_mtime), int(st.st_size))
+		try:
+			setxattr(self, val)
+		except PermissionError:
+			os.chmod(self.abspath(), st.st_mode | 128)
+			setxattr(self, val)
+	return ret
+
+def runnable_status(self):
+	bld = self.generator.bld
+	if bld.is_install < 0:
+		return SKIP_ME
+
+	for t in self.run_after:
+		if not t.hasrun:
+			return ASK_LATER
+		elif t.hasrun < SKIPPED:
+			# a dependency has an error
+			return CANCEL_ME
+
+	# first compute the signature
+	try:
+		new_sig = self.signature()
+	except Errors.TaskNotReady:
+		return ASK_LATER
+
+	if not self.outputs:
+		# compare the signature to a signature computed previously
+		# this part is only for tasks with no output files
+		key = self.uid()
+		try:
+			prev_sig = bld.task_sigs[key]
+		except KeyError:
+			Logs.debug('task: task %r must run: it was never run before or the task code changed', self)
+			return RUN_ME
+		if new_sig != prev_sig:
+			Logs.debug('task: task %r must run: the task signature changed', self)
+			return RUN_ME
+
+	# compare the signatures of the outputs to make a decision
+	for node in self.outputs:
+		try:
+			sig = node.h_file()
+		except EnvironmentError:
+			Logs.debug('task: task %r must run: an output node does not exist', self)
+			return RUN_ME
+		if sig != new_sig:
+			Logs.debug('task: task %r must run: an output node is stale', self)
+			return RUN_ME
+
+	return (self.always_run and RUN_ME) or SKIP_ME
+
+def post_run(self):
+	bld = self.generator.bld
+	sig = self.signature()
+	for node in self.outputs:
+		if not node.exists():
+			self.hasrun = MISSING
+			self.err_msg = '-> missing file: %r' % node.abspath()
+			raise Errors.WafError(self.err_msg)
+		os.setxattr(node.abspath(), 'user.waf.sig', sig)
+	if not self.outputs:
+		# only for task with no outputs
+		bld.task_sigs[self.uid()] = sig
+	if not self.keep_last_cmd:
+		try:
+			del self.last_cmd
+		except AttributeError:
+			pass
+
+try:
+	os.getxattr
+except AttributeError:
+	pass
+else:
+	h_file.__doc__ = Node.Node.h_file.__doc__
+
+	# keep file hashes as file attributes
+	Node.Node.h_file = h_file
+
+	# enable "deep_inputs" on all tasks
+	Task.Task.runnable_status = runnable_status
+	Task.Task.post_run = post_run
+	Task.Task.sig_deep_inputs = Utils.nada
+
diff --git a/third_party/waf/waflib/extras/wafcache.py b/third_party/waf/waflib/extras/wafcache.py
new file mode 100644
index 0000000..30ac3ef
--- /dev/null
+++ b/third_party/waf/waflib/extras/wafcache.py
@@ -0,0 +1,648 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2019 (ita)
+
+"""
+Filesystem-based cache system to share and re-use build artifacts
+
+Cache access operations (copy to and from) are delegated to
+independent pre-forked worker subprocesses.
+
+The following environment variables may be set:
+* WAFCACHE: several possibilities:
+  - File cache:
+    absolute path of the waf cache (~/.cache/wafcache_user,
+    where `user` represents the currently logged-in user)
+  - URL to a cache server, for example:
+    export WAFCACHE=http://localhost:8080/files/
+    in that case, GET/POST requests are made to urls of the form
+    http://localhost:8080/files/000000000/0 (cache management is delegated to the server)
+  - GCS, S3 or MINIO bucket
+    gs://my-bucket/    (uses gsutil command line tool or WAFCACHE_CMD)
+    s3://my-bucket/    (uses aws command line tool or WAFCACHE_CMD)
+    minio://my-bucket/ (uses mc command line tool or WAFCACHE_CMD)
+* WAFCACHE_CMD: bucket upload/download command, for example:
+    WAFCACHE_CMD="gsutil cp %{SRC} %{TGT}"
+  Note that the WAFCACHE bucket value is used for the source or destination
+  depending on the operation (upload or download). For example, with:
+    WAFCACHE="gs://mybucket/"
+  the following commands may be run:
+    gsutil cp build/myprogram  gs://mybucket/aa/aaaaa/1
+    gsutil cp gs://mybucket/bb/bbbbb/2 build/somefile
+* WAFCACHE_NO_PUSH: if set, disables pushing to the cache
+* WAFCACHE_VERBOSITY: if set, displays more detailed cache operations
+* WAFCACHE_STATS: if set, displays cache usage statistics on exit
+
+File cache specific options:
+  Files are copied using hard links by default; if the cache is located
+  onto another partition, the system switches to file copies instead.
+* WAFCACHE_TRIM_MAX_FOLDER: maximum amount of tasks to cache (1M)
+* WAFCACHE_EVICT_MAX_BYTES: maximum amount of cache size in bytes (10GB)
+* WAFCACHE_EVICT_INTERVAL_MINUTES: minimum time interval to try
+                                   and trim the cache (3 minutes)
+
+Upload specific options:
+* WAFCACHE_ASYNC_WORKERS: define a number of workers to upload results asynchronously
+                          this may improve build performance with many/long file uploads
+                          the default is unset (synchronous uploads)
+* WAFCACHE_ASYNC_NOWAIT: do not wait for uploads to complete (default: False)
+                         this requires asynchonous uploads to have an effect
+
+Usage::
+
+	def build(bld):
+		bld.load('wafcache')
+		...
+
+To troubleshoot::
+
+	waf clean build --zone=wafcache
+"""
+
+import atexit, base64, errno, fcntl, getpass, os, re, shutil, sys, time, threading, traceback, urllib3, shlex
+try:
+	import subprocess32 as subprocess
+except ImportError:
+	import subprocess
+
+base_cache = os.path.expanduser('~/.cache/')
+if not os.path.isdir(base_cache):
+	base_cache = '/tmp/'
+default_wafcache_dir = os.path.join(base_cache, 'wafcache_' + getpass.getuser())
+
+CACHE_DIR = os.environ.get('WAFCACHE', default_wafcache_dir)
+WAFCACHE_CMD = os.environ.get('WAFCACHE_CMD')
+TRIM_MAX_FOLDERS = int(os.environ.get('WAFCACHE_TRIM_MAX_FOLDER', 1000000))
+EVICT_INTERVAL_MINUTES = int(os.environ.get('WAFCACHE_EVICT_INTERVAL_MINUTES', 3))
+EVICT_MAX_BYTES = int(os.environ.get('WAFCACHE_EVICT_MAX_BYTES', 10**10))
+WAFCACHE_NO_PUSH = 1 if os.environ.get('WAFCACHE_NO_PUSH') else 0
+WAFCACHE_VERBOSITY = 1 if os.environ.get('WAFCACHE_VERBOSITY') else 0
+WAFCACHE_STATS = 1 if os.environ.get('WAFCACHE_STATS') else 0
+WAFCACHE_ASYNC_WORKERS = os.environ.get('WAFCACHE_ASYNC_WORKERS')
+WAFCACHE_ASYNC_NOWAIT = os.environ.get('WAFCACHE_ASYNC_NOWAIT')
+OK = "ok"
+
+re_waf_cmd = re.compile('(?P<src>%{SRC})|(?P<tgt>%{TGT})')
+
+try:
+	import cPickle
+except ImportError:
+	import pickle as cPickle
+
+if __name__ != '__main__':
+	from waflib import Task, Logs, Utils, Build
+
+def can_retrieve_cache(self):
+	"""
+	New method for waf Task classes
+	"""
+	if not self.outputs:
+		return False
+
+	self.cached = False
+
+	sig = self.signature()
+	ssig = Utils.to_hex(self.uid() + sig)
+
+	if WAFCACHE_STATS:
+		self.generator.bld.cache_reqs += 1
+
+	files_to = [node.abspath() for node in self.outputs]
+	proc = get_process()
+	err = cache_command(proc, ssig, [], files_to)
+	process_pool.append(proc)
+	if err.startswith(OK):
+		if WAFCACHE_VERBOSITY:
+			Logs.pprint('CYAN', '  Fetched %r from cache' % files_to)
+		else:
+			Logs.debug('wafcache: fetched %r from cache', files_to)
+		if WAFCACHE_STATS:
+			self.generator.bld.cache_hits += 1
+	else:
+		if WAFCACHE_VERBOSITY:
+			Logs.pprint('YELLOW', '  No cache entry %s' % files_to)
+		else:
+			Logs.debug('wafcache: No cache entry %s: %s', files_to, err)
+		return False
+
+	self.cached = True
+	return True
+
+def put_files_cache(self):
+	"""
+	New method for waf Task classes
+	"""
+	if WAFCACHE_NO_PUSH or getattr(self, 'cached', None) or not self.outputs:
+		return
+
+	files_from = []
+	for node in self.outputs:
+		path = node.abspath()
+		if not os.path.isfile(path):
+			return
+		files_from.append(path)
+
+	bld = self.generator.bld
+	old_sig = self.signature()
+
+	for node in self.inputs:
+		try:
+			del node.ctx.cache_sig[node]
+		except KeyError:
+			pass
+
+	delattr(self, 'cache_sig')
+	sig = self.signature()
+
+	def _async_put_files_cache(bld, ssig, files_from):
+		proc = get_process()
+		if WAFCACHE_ASYNC_WORKERS:
+			with bld.wafcache_lock:
+				if bld.wafcache_stop:
+					process_pool.append(proc)
+					return
+				bld.wafcache_procs.add(proc)
+
+		err = cache_command(proc, ssig, files_from, [])
+		process_pool.append(proc)
+		if err.startswith(OK):
+			if WAFCACHE_VERBOSITY:
+				Logs.pprint('CYAN', '  Successfully uploaded %s to cache' % files_from)
+			else:
+				Logs.debug('wafcache: Successfully uploaded %r to cache', files_from)
+			if WAFCACHE_STATS:
+				bld.cache_puts += 1
+		else:
+			if WAFCACHE_VERBOSITY:
+				Logs.pprint('RED', '  Error caching step results %s: %s' % (files_from, err))
+			else:
+				Logs.debug('wafcache: Error caching results %s: %s', files_from, err)
+
+	if old_sig == sig:
+		ssig = Utils.to_hex(self.uid() + sig)
+		if WAFCACHE_ASYNC_WORKERS:
+			fut = bld.wafcache_executor.submit(_async_put_files_cache, bld, ssig, files_from)
+			bld.wafcache_uploads.append(fut)
+		else:
+			_async_put_files_cache(bld, ssig, files_from)
+	else:
+		Logs.debug('wafcache: skipped %r upload due to late input modifications %r', self.outputs, self.inputs)
+
+	bld.task_sigs[self.uid()] = self.cache_sig
+
+def hash_env_vars(self, env, vars_lst):
+	"""
+	Reimplement BuildContext.hash_env_vars so that the resulting hash does not depend on local paths
+	"""
+	if not env.table:
+		env = env.parent
+		if not env:
+			return Utils.SIG_NIL
+
+	idx = str(id(env)) + str(vars_lst)
+	try:
+		cache = self.cache_env
+	except AttributeError:
+		cache = self.cache_env = {}
+	else:
+		try:
+			return self.cache_env[idx]
+		except KeyError:
+			pass
+
+	v = str([env[a] for a in vars_lst])
+	v = v.replace(self.srcnode.abspath().__repr__()[:-1], '')
+	m = Utils.md5()
+	m.update(v.encode())
+	ret = m.digest()
+
+	Logs.debug('envhash: %r %r', ret, v)
+
+	cache[idx] = ret
+
+	return ret
+
+def uid(self):
+	"""
+	Reimplement Task.uid() so that the signature does not depend on local paths
+	"""
+	try:
+		return self.uid_
+	except AttributeError:
+		m = Utils.md5()
+		src = self.generator.bld.srcnode
+		up = m.update
+		up(self.__class__.__name__.encode())
+		for x in self.inputs + self.outputs:
+			up(x.path_from(src).encode())
+		self.uid_ = m.digest()
+		return self.uid_
+
+
+def make_cached(cls):
+	"""
+	Enable the waf cache for a given task class
+	"""
+	if getattr(cls, 'nocache', None) or getattr(cls, 'has_cache', False):
+		return
+
+	full_name = "%s.%s" % (cls.__module__, cls.__name__)
+	if full_name in ('waflib.Tools.ccroot.vnum', 'waflib.Build.inst'):
+		return
+
+	m1 = getattr(cls, 'run', None)
+	def run(self):
+		if getattr(self, 'nocache', False):
+			return m1(self)
+		if self.can_retrieve_cache():
+			return 0
+		return m1(self)
+	cls.run = run
+
+	m2 = getattr(cls, 'post_run', None)
+	def post_run(self):
+		if getattr(self, 'nocache', False):
+			return m2(self)
+		ret = m2(self)
+		self.put_files_cache()
+		return ret
+	cls.post_run = post_run
+	cls.has_cache = True
+
+process_pool = []
+def get_process():
+	"""
+	Returns a worker process that can process waf cache commands
+	The worker process is assumed to be returned to the process pool when unused
+	"""
+	try:
+		return process_pool.pop()
+	except IndexError:
+		filepath = os.path.dirname(os.path.abspath(__file__)) + os.sep + 'wafcache.py'
+		cmd = [sys.executable, '-c', Utils.readf(filepath)]
+		return subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, bufsize=0)
+
+def atexit_pool():
+	for proc in process_pool:
+		proc.kill()
+atexit.register(atexit_pool)
+
+def build(bld):
+	"""
+	Called during the build process to enable file caching
+	"""
+
+	if WAFCACHE_ASYNC_WORKERS:
+		try:
+			num_workers = int(WAFCACHE_ASYNC_WORKERS)
+		except ValueError:
+			Logs.warn('Invalid WAFCACHE_ASYNC_WORKERS specified: %r' % WAFCACHE_ASYNC_WORKERS)
+		else:
+			from concurrent.futures import ThreadPoolExecutor
+			bld.wafcache_executor = ThreadPoolExecutor(max_workers=num_workers)
+			bld.wafcache_uploads = []
+			bld.wafcache_procs = set([])
+			bld.wafcache_stop = False
+			bld.wafcache_lock = threading.Lock()
+
+		def finalize_upload_async(bld):
+			if WAFCACHE_ASYNC_NOWAIT:
+				with bld.wafcache_lock:
+					bld.wafcache_stop = True
+
+				for fut in reversed(bld.wafcache_uploads):
+					fut.cancel()
+
+				for proc in bld.wafcache_procs:
+					proc.kill()
+
+				bld.wafcache_procs.clear()
+			else:
+				Logs.pprint('CYAN', '... waiting for wafcache uploads to complete (%s uploads)' % len(bld.wafcache_uploads))
+			bld.wafcache_executor.shutdown(wait=True)
+		bld.add_post_fun(finalize_upload_async)
+
+	if WAFCACHE_STATS:
+		# Init counter for statistics and hook to print results at the end
+		bld.cache_reqs = bld.cache_hits = bld.cache_puts = 0
+
+		def printstats(bld):
+			hit_ratio = 0
+			if bld.cache_reqs > 0:
+				hit_ratio = (bld.cache_hits / bld.cache_reqs) * 100
+			Logs.pprint('CYAN', '  wafcache stats: %s requests, %s hits (ratio: %.2f%%), %s writes' %
+					 (bld.cache_reqs, bld.cache_hits, hit_ratio, bld.cache_puts) )
+		bld.add_post_fun(printstats)
+
+	if process_pool:
+		# already called once
+		return
+
+	# pre-allocation
+	processes = [get_process() for x in range(bld.jobs)]
+	process_pool.extend(processes)
+
+	Task.Task.can_retrieve_cache = can_retrieve_cache
+	Task.Task.put_files_cache = put_files_cache
+	Task.Task.uid = uid
+	Build.BuildContext.hash_env_vars = hash_env_vars
+	for x in reversed(list(Task.classes.values())):
+		make_cached(x)
+
+def cache_command(proc, sig, files_from, files_to):
+	"""
+	Create a command for cache worker processes, returns a pickled
+	base64-encoded tuple containing the task signature, a list of files to
+	cache and a list of files files to get from cache (one of the lists
+	is assumed to be empty)
+	"""
+	obj = base64.b64encode(cPickle.dumps([sig, files_from, files_to]))
+	proc.stdin.write(obj)
+	proc.stdin.write('\n'.encode())
+	proc.stdin.flush()
+	obj = proc.stdout.readline()
+	if not obj:
+		raise OSError('Preforked sub-process %r died' % proc.pid)
+	return cPickle.loads(base64.b64decode(obj))
+
+try:
+	copyfun = os.link
+except NameError:
+	copyfun = shutil.copy2
+
+def atomic_copy(orig, dest):
+	"""
+	Copy files to the cache, the operation is atomic for a given file
+	"""
+	global copyfun
+	tmp = dest + '.tmp'
+	up = os.path.dirname(dest)
+	try:
+		os.makedirs(up)
+	except OSError:
+		pass
+
+	try:
+		copyfun(orig, tmp)
+	except OSError as e:
+		if e.errno == errno.EXDEV:
+			copyfun = shutil.copy2
+			copyfun(orig, tmp)
+		else:
+			raise
+	os.rename(tmp, dest)
+
+def lru_trim():
+	"""
+	the cache folders take the form:
+	`CACHE_DIR/0b/0b180f82246d726ece37c8ccd0fb1cde2650d7bfcf122ec1f169079a3bfc0ab9`
+	they are listed in order of last access, and then removed
+	until the amount of folders is within TRIM_MAX_FOLDERS and the total space
+	taken by files is less than EVICT_MAX_BYTES
+	"""
+	lst = []
+	for up in os.listdir(CACHE_DIR):
+		if len(up) == 2:
+			sub = os.path.join(CACHE_DIR, up)
+			for hval in os.listdir(sub):
+				path = os.path.join(sub, hval)
+
+				size = 0
+				for fname in os.listdir(path):
+					try:
+						size += os.lstat(os.path.join(path, fname)).st_size
+					except OSError:
+						pass
+				lst.append((os.stat(path).st_mtime, size, path))
+
+	lst.sort(key=lambda x: x[0])
+	lst.reverse()
+
+	tot = sum(x[1] for x in lst)
+	while tot > EVICT_MAX_BYTES or len(lst) > TRIM_MAX_FOLDERS:
+		_, tmp_size, path = lst.pop()
+		tot -= tmp_size
+
+		tmp = path + '.remove'
+		try:
+			shutil.rmtree(tmp)
+		except OSError:
+			pass
+		try:
+			os.rename(path, tmp)
+		except OSError:
+			sys.stderr.write('Could not rename %r to %r\n' % (path, tmp))
+		else:
+			try:
+				shutil.rmtree(tmp)
+			except OSError:
+				sys.stderr.write('Could not remove %r\n' % tmp)
+	sys.stderr.write("Cache trimmed: %r bytes in %r folders left\n" % (tot, len(lst)))
+
+
+def lru_evict():
+	"""
+	Reduce the cache size
+	"""
+	lockfile = os.path.join(CACHE_DIR, 'all.lock')
+	try:
+		st = os.stat(lockfile)
+	except EnvironmentError as e:
+		if e.errno == errno.ENOENT:
+			with open(lockfile, 'w') as f:
+				f.write('')
+			return
+		else:
+			raise
+
+	if st.st_mtime < time.time() - EVICT_INTERVAL_MINUTES * 60:
+		# check every EVICT_INTERVAL_MINUTES minutes if the cache is too big
+		# OCLOEXEC is unnecessary because no processes are spawned
+		fd = os.open(lockfile, os.O_RDWR | os.O_CREAT, 0o755)
+		try:
+			try:
+				fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
+			except EnvironmentError:
+				if WAFCACHE_VERBOSITY:
+					sys.stderr.write('wafcache: another cleaning process is running\n')
+			else:
+				# now dow the actual cleanup
+				lru_trim()
+				os.utime(lockfile, None)
+		finally:
+			os.close(fd)
+
+class netcache(object):
+	def __init__(self):
+		self.http = urllib3.PoolManager()
+
+	def url_of(self, sig, i):
+		return "%s/%s/%s" % (CACHE_DIR, sig, i)
+
+	def upload(self, file_path, sig, i):
+		url = self.url_of(sig, i)
+		with open(file_path, 'rb') as f:
+			file_data = f.read()
+		r = self.http.request('POST', url, timeout=60,
+			fields={ 'file': ('%s/%s' % (sig, i), file_data), })
+		if r.status >= 400:
+			raise OSError("Invalid status %r %r" % (url, r.status))
+
+	def download(self, file_path, sig, i):
+		url = self.url_of(sig, i)
+		with self.http.request('GET', url, preload_content=False, timeout=60) as inf:
+			if inf.status >= 400:
+				raise OSError("Invalid status %r %r" % (url, inf.status))
+			with open(file_path, 'wb') as out:
+				shutil.copyfileobj(inf, out)
+
+	def copy_to_cache(self, sig, files_from, files_to):
+		try:
+			for i, x in enumerate(files_from):
+				if not os.path.islink(x):
+					self.upload(x, sig, i)
+		except Exception:
+			return traceback.format_exc()
+		return OK
+
+	def copy_from_cache(self, sig, files_from, files_to):
+		try:
+			for i, x in enumerate(files_to):
+				self.download(x, sig, i)
+		except Exception:
+			return traceback.format_exc()
+		return OK
+
+class fcache(object):
+	def __init__(self):
+		if not os.path.exists(CACHE_DIR):
+			try:
+				os.makedirs(CACHE_DIR)
+			except OSError:
+				pass
+		if not os.path.exists(CACHE_DIR):
+			raise ValueError('Could not initialize the cache directory')
+
+	def copy_to_cache(self, sig, files_from, files_to):
+		"""
+		Copy files to the cache, existing files are overwritten,
+		and the copy is atomic only for a given file, not for all files
+		that belong to a given task object
+		"""
+		try:
+			for i, x in enumerate(files_from):
+				dest = os.path.join(CACHE_DIR, sig[:2], sig, str(i))
+				atomic_copy(x, dest)
+		except Exception:
+			return traceback.format_exc()
+		else:
+			# attempt trimming if caching was successful:
+			# we may have things to trim!
+			try:
+				lru_evict()
+			except Exception:
+				return traceback.format_exc()
+		return OK
+
+	def copy_from_cache(self, sig, files_from, files_to):
+		"""
+		Copy files from the cache
+		"""
+		try:
+			for i, x in enumerate(files_to):
+				orig = os.path.join(CACHE_DIR, sig[:2], sig, str(i))
+				atomic_copy(orig, x)
+
+			# success! update the cache time
+			os.utime(os.path.join(CACHE_DIR, sig[:2], sig), None)
+		except Exception:
+			return traceback.format_exc()
+		return OK
+
+class bucket_cache(object):
+	def bucket_copy(self, source, target):
+		if WAFCACHE_CMD:
+			def replacer(match):
+				if match.group('src'):
+					return source
+				elif match.group('tgt'):
+					return target
+			cmd = [re_waf_cmd.sub(replacer, x) for x in shlex.split(WAFCACHE_CMD)]
+		elif CACHE_DIR.startswith('s3://'):
+			cmd = ['aws', 's3', 'cp', source, target]
+		elif CACHE_DIR.startswith('gs://'):
+			cmd = ['gsutil', 'cp', source, target]
+		else:
+			cmd = ['mc', 'cp', source, target]
+
+		proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+		out, err = proc.communicate()
+		if proc.returncode:
+			raise OSError('Error copy %r to %r using: %r (exit %r):\n  out:%s\n  err:%s' % (
+				source, target, cmd, proc.returncode, out.decode(errors='replace'), err.decode(errors='replace')))
+
+	def copy_to_cache(self, sig, files_from, files_to):
+		try:
+			for i, x in enumerate(files_from):
+				dest = os.path.join(CACHE_DIR, sig[:2], sig, str(i))
+				self.bucket_copy(x, dest)
+		except Exception:
+			return traceback.format_exc()
+		return OK
+
+	def copy_from_cache(self, sig, files_from, files_to):
+		try:
+			for i, x in enumerate(files_to):
+				orig = os.path.join(CACHE_DIR, sig[:2], sig, str(i))
+				self.bucket_copy(orig, x)
+		except EnvironmentError:
+			return traceback.format_exc()
+		return OK
+
+def loop(service):
+	"""
+	This function is run when this file is run as a standalone python script,
+	it assumes a parent process that will communicate the commands to it
+	as pickled-encoded tuples (one line per command)
+
+	The commands are to copy files to the cache or copy files from the
+	cache to a target destination
+	"""
+	# one operation is performed at a single time by a single process
+	# therefore stdin never has more than one line
+	txt = sys.stdin.readline().strip()
+	if not txt:
+		# parent process probably ended
+		sys.exit(1)
+	ret = OK
+
+	[sig, files_from, files_to] = cPickle.loads(base64.b64decode(txt))
+	if files_from:
+		# TODO return early when pushing files upstream
+		ret = service.copy_to_cache(sig, files_from, files_to)
+	elif files_to:
+		# the build process waits for workers to (possibly) obtain files from the cache
+		ret = service.copy_from_cache(sig, files_from, files_to)
+	else:
+		ret = "Invalid command"
+
+	obj = base64.b64encode(cPickle.dumps(ret))
+	sys.stdout.write(obj.decode())
+	sys.stdout.write('\n')
+	sys.stdout.flush()
+
+if __name__ == '__main__':
+	if CACHE_DIR.startswith('s3://') or CACHE_DIR.startswith('gs://') or CACHE_DIR.startswith('minio://'):
+		if CACHE_DIR.startswith('minio://'):
+			CACHE_DIR = CACHE_DIR[8:]   # minio doesn't need the protocol part, uses config aliases
+		service = bucket_cache()
+	elif CACHE_DIR.startswith('http'):
+		service = netcache()
+	else:
+		service = fcache()
+	while 1:
+		try:
+			loop(service)
+		except KeyboardInterrupt:
+			break
+
diff --git a/third_party/waf/waflib/extras/why.py b/third_party/waf/waflib/extras/why.py
new file mode 100644
index 0000000..1bb941f
--- /dev/null
+++ b/third_party/waf/waflib/extras/why.py
@@ -0,0 +1,78 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2010 (ita)
+
+"""
+This tool modifies the task signature scheme to store and obtain
+information about the task execution (why it must run, etc)::
+
+	def configure(conf):
+		conf.load('why')
+
+After adding the tool, a full rebuild is necessary:
+waf clean build --zones=task
+"""
+
+from waflib import Task, Utils, Logs, Errors
+
+def signature(self):
+	# compute the result one time, and suppose the scan_signature will give the good result
+	try:
+		return self.cache_sig
+	except AttributeError:
+		pass
+
+	self.m = Utils.md5()
+	self.m.update(self.hcode)
+	id_sig = self.m.digest()
+
+	# explicit deps
+	self.m = Utils.md5()
+	self.sig_explicit_deps()
+	exp_sig = self.m.digest()
+
+	# env vars
+	self.m = Utils.md5()
+	self.sig_vars()
+	var_sig = self.m.digest()
+
+	# implicit deps / scanner results
+	self.m = Utils.md5()
+	if self.scan:
+		try:
+			self.sig_implicit_deps()
+		except Errors.TaskRescan:
+			return self.signature()
+	impl_sig = self.m.digest()
+
+	ret = self.cache_sig = impl_sig + id_sig + exp_sig + var_sig
+	return ret
+
+
+Task.Task.signature = signature
+
+old = Task.Task.runnable_status
+def runnable_status(self):
+	ret = old(self)
+	if ret == Task.RUN_ME:
+		try:
+			old_sigs = self.generator.bld.task_sigs[self.uid()]
+		except (KeyError, AttributeError):
+			Logs.debug("task: task must run as no previous signature exists")
+		else:
+			new_sigs = self.cache_sig
+			def v(x):
+				return Utils.to_hex(x)
+
+			Logs.debug('Task %r', self)
+			msgs = ['* Implicit or scanner dependency', '* Task code', '* Source file, explicit or manual dependency', '* Configuration data variable']
+			tmp = 'task: -> %s: %s %s'
+			for x in range(len(msgs)):
+				l = len(Utils.SIG_NIL)
+				a = new_sigs[x*l : (x+1)*l]
+				b = old_sigs[x*l : (x+1)*l]
+				if (a != b):
+					Logs.debug(tmp, msgs[x].ljust(35), v(a), v(b))
+	return ret
+Task.Task.runnable_status = runnable_status
+
diff --git a/third_party/waf/waflib/extras/win32_opts.py b/third_party/waf/waflib/extras/win32_opts.py
new file mode 100644
index 0000000..9f7443c
--- /dev/null
+++ b/third_party/waf/waflib/extras/win32_opts.py
@@ -0,0 +1,170 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+"""
+Windows-specific optimizations
+
+This module can help reducing the overhead of listing files on windows
+(more than 10000 files). Python 3.5 already provides the listdir
+optimization though.
+"""
+
+import os
+from waflib import Utils, Build, Node, Logs
+
+try:
+	TP = '%s\\*'.decode('ascii')
+except AttributeError:
+	TP = '%s\\*'
+
+if Utils.is_win32:
+	from waflib.Tools import md5_tstamp
+	import ctypes, ctypes.wintypes
+
+	FindFirstFile        = ctypes.windll.kernel32.FindFirstFileW
+	FindNextFile         = ctypes.windll.kernel32.FindNextFileW
+	FindClose            = ctypes.windll.kernel32.FindClose
+	FILE_ATTRIBUTE_DIRECTORY = 0x10
+	INVALID_HANDLE_VALUE = -1
+	UPPER_FOLDERS = ('.', '..')
+	try:
+		UPPER_FOLDERS = [unicode(x) for x in UPPER_FOLDERS]
+	except NameError:
+		pass
+
+	def cached_hash_file(self):
+		try:
+			cache = self.ctx.cache_listdir_cache_hash_file
+		except AttributeError:
+			cache = self.ctx.cache_listdir_cache_hash_file = {}
+
+		if id(self.parent) in cache:
+			try:
+				t = cache[id(self.parent)][self.name]
+			except KeyError:
+				raise IOError('Not a file')
+		else:
+			# an opportunity to list the files and the timestamps at once
+			findData = ctypes.wintypes.WIN32_FIND_DATAW()
+			find     = FindFirstFile(TP % self.parent.abspath(), ctypes.byref(findData))
+
+			if find == INVALID_HANDLE_VALUE:
+				cache[id(self.parent)] = {}
+				raise IOError('Not a file')
+
+			cache[id(self.parent)] = lst_files = {}
+			try:
+				while True:
+					if findData.cFileName not in UPPER_FOLDERS:
+						thatsadir = findData.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY
+						if not thatsadir:
+							ts = findData.ftLastWriteTime
+							d = (ts.dwLowDateTime << 32) | ts.dwHighDateTime
+							lst_files[str(findData.cFileName)] = d
+					if not FindNextFile(find, ctypes.byref(findData)):
+						break
+			except Exception:
+				cache[id(self.parent)] = {}
+				raise IOError('Not a file')
+			finally:
+				FindClose(find)
+			t = lst_files[self.name]
+
+		fname = self.abspath()
+		if fname in Build.hashes_md5_tstamp:
+			if Build.hashes_md5_tstamp[fname][0] == t:
+				return Build.hashes_md5_tstamp[fname][1]
+
+		try:
+			fd = os.open(fname, os.O_BINARY | os.O_RDONLY | os.O_NOINHERIT)
+		except OSError:
+			raise IOError('Cannot read from %r' % fname)
+		f = os.fdopen(fd, 'rb')
+		m = Utils.md5()
+		rb = 1
+		try:
+			while rb:
+				rb = f.read(200000)
+				m.update(rb)
+		finally:
+			f.close()
+
+		# ensure that the cache is overwritten
+		Build.hashes_md5_tstamp[fname] = (t, m.digest())
+		return m.digest()
+	Node.Node.cached_hash_file = cached_hash_file
+
+	def get_bld_sig_win32(self):
+		try:
+			return self.ctx.hash_cache[id(self)]
+		except KeyError:
+			pass
+		except AttributeError:
+			self.ctx.hash_cache = {}
+		self.ctx.hash_cache[id(self)] = ret = Utils.h_file(self.abspath())
+		return ret
+	Node.Node.get_bld_sig = get_bld_sig_win32
+
+	def isfile_cached(self):
+		# optimize for nt.stat calls, assuming there are many files for few folders
+		try:
+			cache = self.__class__.cache_isfile_cache
+		except AttributeError:
+			cache = self.__class__.cache_isfile_cache = {}
+
+		try:
+			c1 = cache[id(self.parent)]
+		except KeyError:
+			c1 = cache[id(self.parent)] = []
+
+			curpath = self.parent.abspath()
+			findData = ctypes.wintypes.WIN32_FIND_DATAW()
+			find     = FindFirstFile(TP % curpath, ctypes.byref(findData))
+
+			if find == INVALID_HANDLE_VALUE:
+				Logs.error("invalid win32 handle isfile_cached %r", self.abspath())
+				return os.path.isfile(self.abspath())
+
+			try:
+				while True:
+					if findData.cFileName not in UPPER_FOLDERS:
+						thatsadir = findData.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY
+						if not thatsadir:
+							c1.append(str(findData.cFileName))
+					if not FindNextFile(find, ctypes.byref(findData)):
+						break
+			except Exception as e:
+				Logs.error('exception while listing a folder %r %r', self.abspath(), e)
+				return os.path.isfile(self.abspath())
+			finally:
+				FindClose(find)
+		return self.name in c1
+	Node.Node.isfile_cached = isfile_cached
+
+	def find_or_declare_win32(self, lst):
+		# assuming that "find_or_declare" is called before the build starts, remove the calls to os.path.isfile
+		if isinstance(lst, str):
+			lst = [x for x in Utils.split_path(lst) if x and x != '.']
+
+		node = self.get_bld().search_node(lst)
+		if node:
+			if not node.isfile_cached():
+				try:
+					node.parent.mkdir()
+				except OSError:
+					pass
+			return node
+		self = self.get_src()
+		node = self.find_node(lst)
+		if node:
+			if not node.isfile_cached():
+				try:
+					node.parent.mkdir()
+				except OSError:
+					pass
+			return node
+		node = self.get_bld().make_node(lst)
+		node.parent.mkdir()
+		return node
+	Node.Node.find_or_declare = find_or_declare_win32
+
diff --git a/third_party/waf/waflib/extras/wix.py b/third_party/waf/waflib/extras/wix.py
new file mode 100644
index 0000000..d87bfbb
--- /dev/null
+++ b/third_party/waf/waflib/extras/wix.py
@@ -0,0 +1,87 @@
+#!/usr/bin/python
+# encoding: utf-8
+# vim: tabstop=4 noexpandtab
+
+"""
+Windows Installer XML Tool (WiX)
+
+.wxs --- candle ---> .wxobj --- light ---> .msi
+
+bld(features='wix', some.wxs, gen='some.msi', candleflags=[..], lightflags=[..])
+
+bld(features='wix', source=['bundle.wxs','WixBalExtension'], gen='setup.exe', candleflags=[..])
+"""
+
+import os, copy
+from waflib import TaskGen
+from waflib import Task
+from waflib.Utils import winreg
+
+class candle(Task.Task):
+	run_str = '${CANDLE} -nologo ${CANDLEFLAGS} -out ${TGT} ${SRC[0].abspath()}',
+
+class light(Task.Task):
+	run_str = "${LIGHT} -nologo -b ${SRC[0].parent.abspath()} ${LIGHTFLAGS} -out ${TGT} ${SRC[0].abspath()}"
+
+@TaskGen.feature('wix')
+@TaskGen.before_method('process_source')
+def wix(self):
+	#X.wxs -> ${SRC} for CANDLE
+	#X.wxobj -> ${SRC} for LIGHT
+	#X.dll -> -ext X in ${LIGHTFLAGS}
+	#X.wxl -> wixui.wixlib -loc X.wxl in ${LIGHTFLAGS}
+	wxobj = []
+	wxs = []
+	exts = []
+	wxl = []
+	rest = []
+	for x in self.source:
+		if x.endswith('.wxobj'):
+			wxobj.append(x)
+		elif x.endswith('.wxs'):
+			wxobj.append(self.path.find_or_declare(x[:-4]+'.wxobj'))
+			wxs.append(x)
+		elif x.endswith('.dll'):
+			exts.append(x[:-4])
+		elif '.' not in x:
+			exts.append(x)
+		elif x.endswith('.wxl'):
+			wxl.append(x)
+		else:
+			rest.append(x)
+	self.source = self.to_nodes(rest) #.wxs
+
+	cndl = self.create_task('candle', self.to_nodes(wxs), self.to_nodes(wxobj))
+	lght = self.create_task('light', self.to_nodes(wxobj), self.path.find_or_declare(self.gen))
+
+	cndl.env.CANDLEFLAGS = copy.copy(getattr(self,'candleflags',[]))
+	lght.env.LIGHTFLAGS = copy.copy(getattr(self,'lightflags',[]))
+
+	for x in wxl:
+		lght.env.append_value('LIGHTFLAGS','wixui.wixlib')
+		lght.env.append_value('LIGHTFLAGS','-loc')
+		lght.env.append_value('LIGHTFLAGS',x)
+	for x in exts:
+		cndl.env.append_value('CANDLEFLAGS','-ext')
+		cndl.env.append_value('CANDLEFLAGS',x)
+		lght.env.append_value('LIGHTFLAGS','-ext')
+		lght.env.append_value('LIGHTFLAGS',x)
+
+#wix_bin_path()
+def wix_bin_path():
+	basekey = r"SOFTWARE\Microsoft\.NETFramework\AssemblyFolders"
+	query = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, basekey)
+	cnt=winreg.QueryInfoKey(query)[0]
+	thiskey = r'C:\Program Files (x86)\WiX Toolset v3.10\SDK'
+	for i in range(cnt-1,-1,-1):
+		thiskey = winreg.EnumKey(query,i)
+		if 'WiX' in thiskey:
+			break
+	winreg.CloseKey(query)
+	return os.path.normpath(winreg.QueryValue(winreg.HKEY_LOCAL_MACHINE, basekey+r'\\'+thiskey)+'..\\bin')
+
+def configure(ctx):
+	path_list=[wix_bin_path()]
+	ctx.find_program('candle', var='CANDLE', mandatory=True, path_list = path_list)
+	ctx.find_program('light', var='LIGHT', mandatory=True, path_list = path_list)
+
diff --git a/third_party/waf/waflib/extras/xcode6.py b/third_party/waf/waflib/extras/xcode6.py
new file mode 100644
index 0000000..c5b3091
--- /dev/null
+++ b/third_party/waf/waflib/extras/xcode6.py
@@ -0,0 +1,727 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# XCode 3/XCode 4/XCode 6/Xcode 7 generator for Waf
+# Based on work by Nicolas Mercier 2011
+# Extended by Simon Warg 2015, https://github.com/mimon
+# XCode project file format based on http://www.monobjc.net/xcode-project-file-format.html
+
+"""
+See playground/xcode6/ for usage examples.
+
+"""
+
+from waflib import Context, TaskGen, Build, Utils, Errors, Logs
+import os, sys
+
+# FIXME too few extensions
+XCODE_EXTS = ['.c', '.cpp', '.m', '.mm']
+
+HEADERS_GLOB = '**/(*.h|*.hpp|*.H|*.inl)'
+
+MAP_EXT = {
+	'': "folder",
+	'.h' :  "sourcecode.c.h",
+
+	'.hh':  "sourcecode.cpp.h",
+	'.inl': "sourcecode.cpp.h",
+	'.hpp': "sourcecode.cpp.h",
+
+	'.c':   "sourcecode.c.c",
+
+	'.m':   "sourcecode.c.objc",
+
+	'.mm':  "sourcecode.cpp.objcpp",
+
+	'.cc':  "sourcecode.cpp.cpp",
+
+	'.cpp': "sourcecode.cpp.cpp",
+	'.C':   "sourcecode.cpp.cpp",
+	'.cxx': "sourcecode.cpp.cpp",
+	'.c++': "sourcecode.cpp.cpp",
+
+	'.l':   "sourcecode.lex", # luthor
+	'.ll':  "sourcecode.lex",
+
+	'.y':   "sourcecode.yacc",
+	'.yy':  "sourcecode.yacc",
+
+	'.plist': "text.plist.xml",
+	".nib":   "wrapper.nib",
+	".xib":   "text.xib",
+}
+
+# Used in PBXNativeTarget elements
+PRODUCT_TYPE_APPLICATION = 'com.apple.product-type.application'
+PRODUCT_TYPE_FRAMEWORK = 'com.apple.product-type.framework'
+PRODUCT_TYPE_EXECUTABLE = 'com.apple.product-type.tool'
+PRODUCT_TYPE_LIB_STATIC = 'com.apple.product-type.library.static'
+PRODUCT_TYPE_LIB_DYNAMIC = 'com.apple.product-type.library.dynamic'
+PRODUCT_TYPE_EXTENSION = 'com.apple.product-type.kernel-extension'
+PRODUCT_TYPE_IOKIT = 'com.apple.product-type.kernel-extension.iokit'
+
+# Used in PBXFileReference elements
+FILE_TYPE_APPLICATION = 'wrapper.cfbundle'
+FILE_TYPE_FRAMEWORK = 'wrapper.framework'
+FILE_TYPE_LIB_DYNAMIC = 'compiled.mach-o.dylib'
+FILE_TYPE_LIB_STATIC = 'archive.ar'
+FILE_TYPE_EXECUTABLE = 'compiled.mach-o.executable'
+
+# Tuple packs of the above
+TARGET_TYPE_FRAMEWORK = (PRODUCT_TYPE_FRAMEWORK, FILE_TYPE_FRAMEWORK, '.framework')
+TARGET_TYPE_APPLICATION = (PRODUCT_TYPE_APPLICATION, FILE_TYPE_APPLICATION, '.app')
+TARGET_TYPE_DYNAMIC_LIB = (PRODUCT_TYPE_LIB_DYNAMIC, FILE_TYPE_LIB_DYNAMIC, '.dylib')
+TARGET_TYPE_STATIC_LIB = (PRODUCT_TYPE_LIB_STATIC, FILE_TYPE_LIB_STATIC, '.a')
+TARGET_TYPE_EXECUTABLE = (PRODUCT_TYPE_EXECUTABLE, FILE_TYPE_EXECUTABLE, '')
+
+# Maps target type string to its data
+TARGET_TYPES = {
+	'framework': TARGET_TYPE_FRAMEWORK,
+	'app': TARGET_TYPE_APPLICATION,
+	'dylib': TARGET_TYPE_DYNAMIC_LIB,
+	'stlib': TARGET_TYPE_STATIC_LIB,
+	'exe' :TARGET_TYPE_EXECUTABLE,
+}
+
+def delete_invalid_values(dct):
+	""" Deletes entries that are dictionaries or sets """
+	for k, v in list(dct.items()):
+		if isinstance(v, dict) or isinstance(v, set):
+			del dct[k]
+	return dct
+
+"""
+Configuration of the global project settings. Sets an environment variable 'PROJ_CONFIGURATION'
+which is a dictionary of configuration name and buildsettings pair.
+E.g.:
+env.PROJ_CONFIGURATION = {
+	'Debug': {
+		'ARCHS': 'x86',
+		...
+	}
+	'Release': {
+		'ARCHS': x86_64'
+		...
+	}
+}
+The user can define a completely customized dictionary in configure() stage. Otherwise a default Debug/Release will be created
+based on env variable
+"""
+def configure(self):
+	if not self.env.PROJ_CONFIGURATION:
+		self.to_log("A default project configuration was created since no custom one was given in the configure(conf) stage. Define your custom project settings by adding PROJ_CONFIGURATION to env. The env.PROJ_CONFIGURATION must be a dictionary with at least one key, where each key is the configuration name, and the value is a dictionary of key/value settings.\n")
+
+	# Check for any added config files added by the tool 'c_config'.
+	if 'cfg_files' in self.env:
+		self.env.INCLUDES = Utils.to_list(self.env.INCLUDES) + [os.path.abspath(os.path.dirname(f)) for f in self.env.cfg_files]
+
+	# Create default project configuration?
+	if 'PROJ_CONFIGURATION' not in self.env:
+		defaults = delete_invalid_values(self.env.get_merged_dict())
+		self.env.PROJ_CONFIGURATION = {
+			"Debug": defaults,
+			"Release": defaults,
+		}
+
+	# Some build settings are required to be present by XCode. We will supply default values
+	# if user hasn't defined any.
+	defaults_required = [('PRODUCT_NAME', '$(TARGET_NAME)')]
+	for cfgname,settings in self.env.PROJ_CONFIGURATION.items():
+		for default_var, default_val in defaults_required:
+			if default_var not in settings:
+				settings[default_var] = default_val
+
+	# Error check customization
+	if not isinstance(self.env.PROJ_CONFIGURATION, dict):
+		raise Errors.ConfigurationError("The env.PROJ_CONFIGURATION must be a dictionary with at least one key, where each key is the configuration name, and the value is a dictionary of key/value settings.")
+
+part1 = 0
+part2 = 10000
+part3 = 0
+id = 562000999
+def newid():
+	global id
+	id += 1
+	return "%04X%04X%04X%012d" % (0, 10000, 0, id)
+
+"""
+Represents a tree node in the XCode project plist file format.
+When written to a file, all attributes of XCodeNode are stringified together with
+its value. However, attributes starting with an underscore _ are ignored
+during that process and allows you to store arbitrary values that are not supposed
+to be written out.
+"""
+class XCodeNode(object):
+	def __init__(self):
+		self._id = newid()
+		self._been_written = False
+
+	def tostring(self, value):
+		if isinstance(value, dict):
+			result = "{\n"
+			for k,v in value.items():
+				result = result + "\t\t\t%s = %s;\n" % (k, self.tostring(v))
+			result = result + "\t\t}"
+			return result
+		elif isinstance(value, str):
+			return '"%s"' % value.replace('"', '\\\\\\"')
+		elif isinstance(value, list):
+			result = "(\n"
+			for i in value:
+				result = result + "\t\t\t\t%s,\n" % self.tostring(i)
+			result = result + "\t\t\t)"
+			return result
+		elif isinstance(value, XCodeNode):
+			return value._id
+		else:
+			return str(value)
+
+	def write_recursive(self, value, file):
+		if isinstance(value, dict):
+			for k,v in value.items():
+				self.write_recursive(v, file)
+		elif isinstance(value, list):
+			for i in value:
+				self.write_recursive(i, file)
+		elif isinstance(value, XCodeNode):
+			value.write(file)
+
+	def write(self, file):
+		if not self._been_written:
+			self._been_written = True
+			for attribute,value in self.__dict__.items():
+				if attribute[0] != '_':
+					self.write_recursive(value, file)
+			w = file.write
+			w("\t%s = {\n" % self._id)
+			w("\t\tisa = %s;\n" % self.__class__.__name__)
+			for attribute,value in self.__dict__.items():
+				if attribute[0] != '_':
+					w("\t\t%s = %s;\n" % (attribute, self.tostring(value)))
+			w("\t};\n\n")
+
+# Configurations
+class XCBuildConfiguration(XCodeNode):
+	def __init__(self, name, settings = {}, env=None):
+		XCodeNode.__init__(self)
+		self.baseConfigurationReference = ""
+		self.buildSettings = settings
+		self.name = name
+		if env and env.ARCH:
+			settings['ARCHS'] = " ".join(env.ARCH)
+
+
+class XCConfigurationList(XCodeNode):
+	def __init__(self, configlst):
+		""" :param configlst: list of XCConfigurationList """
+		XCodeNode.__init__(self)
+		self.buildConfigurations = configlst
+		self.defaultConfigurationIsVisible = 0
+		self.defaultConfigurationName = configlst and configlst[0].name or ""
+
+# Group/Files
+class PBXFileReference(XCodeNode):
+	def __init__(self, name, path, filetype = '', sourcetree = "SOURCE_ROOT"):
+
+		XCodeNode.__init__(self)
+		self.fileEncoding = 4
+		if not filetype:
+			_, ext = os.path.splitext(name)
+			filetype = MAP_EXT.get(ext, 'text')
+		self.lastKnownFileType = filetype
+		self.explicitFileType = filetype
+		self.name = name
+		self.path = path
+		self.sourceTree = sourcetree
+
+	def __hash__(self):
+		return (self.path+self.name).__hash__()
+
+	def __eq__(self, other):
+		return (self.path, self.name) == (other.path, other.name)
+
+class PBXBuildFile(XCodeNode):
+	""" This element indicate a file reference that is used in a PBXBuildPhase (either as an include or resource). """
+	def __init__(self, fileRef, settings={}):
+		XCodeNode.__init__(self)
+
+		# fileRef is a reference to a PBXFileReference object
+		self.fileRef = fileRef
+
+		# A map of key/value pairs for additional settings.
+		self.settings = settings
+
+	def __hash__(self):
+		return (self.fileRef).__hash__()
+
+	def __eq__(self, other):
+		return self.fileRef == other.fileRef
+
+class PBXGroup(XCodeNode):
+	def __init__(self, name, sourcetree = 'SOURCE_TREE'):
+		XCodeNode.__init__(self)
+		self.children = []
+		self.name = name
+		self.sourceTree = sourcetree
+
+		# Maintain a lookup table for all PBXFileReferences
+		# that are contained in this group.
+		self._filerefs = {}
+
+	def add(self, sources):
+		"""
+		Add a list of PBXFileReferences to this group
+
+		:param sources: list of PBXFileReferences objects
+		"""
+		self._filerefs.update(dict(zip(sources, sources)))
+		self.children.extend(sources)
+
+	def get_sub_groups(self):
+		"""
+		Returns all child PBXGroup objects contained in this group
+		"""
+		return list(filter(lambda x: isinstance(x, PBXGroup), self.children))
+
+	def find_fileref(self, fileref):
+		"""
+		Recursively search this group for an existing PBXFileReference. Returns None
+		if none were found.
+
+		The reason you'd want to reuse existing PBXFileReferences from a PBXGroup is that XCode doesn't like PBXFileReferences that aren't part of a PBXGroup hierarchy.
+		If it isn't, the consequence is that certain UI features like 'Reveal in Finder'
+		stops working.
+		"""
+		if fileref in self._filerefs:
+			return self._filerefs[fileref]
+		elif self.children:
+			for childgroup in self.get_sub_groups():
+				f = childgroup.find_fileref(fileref)
+				if f:
+					return f
+		return None
+
+class PBXContainerItemProxy(XCodeNode):
+	""" This is the element for to decorate a target item. """
+	def __init__(self, containerPortal, remoteGlobalIDString, remoteInfo='', proxyType=1):
+		XCodeNode.__init__(self)
+		self.containerPortal = containerPortal # PBXProject
+		self.remoteGlobalIDString = remoteGlobalIDString # PBXNativeTarget
+		self.remoteInfo = remoteInfo # Target name
+		self.proxyType = proxyType
+
+class PBXTargetDependency(XCodeNode):
+	""" This is the element for referencing other target through content proxies. """
+	def __init__(self, native_target, proxy):
+		XCodeNode.__init__(self)
+		self.target = native_target
+		self.targetProxy = proxy
+
+class PBXFrameworksBuildPhase(XCodeNode):
+	""" This is the element for the framework link build phase, i.e. linking to frameworks """
+	def __init__(self, pbxbuildfiles):
+		XCodeNode.__init__(self)
+		self.buildActionMask = 2147483647
+		self.runOnlyForDeploymentPostprocessing = 0
+		self.files = pbxbuildfiles #List of PBXBuildFile (.o, .framework, .dylib)
+
+class PBXHeadersBuildPhase(XCodeNode):
+	""" This is the element for adding header files to be packaged into the .framework """
+	def __init__(self, pbxbuildfiles):
+		XCodeNode.__init__(self)
+		self.buildActionMask = 2147483647
+		self.runOnlyForDeploymentPostprocessing = 0
+		self.files = pbxbuildfiles #List of PBXBuildFile (.o, .framework, .dylib)
+
+class PBXCopyFilesBuildPhase(XCodeNode):
+	"""
+	Represents the PBXCopyFilesBuildPhase section. PBXBuildFile
+	can be added to this node to copy files after build is done.
+	"""
+	def __init__(self, pbxbuildfiles, dstpath, dstSubpathSpec=0, *args, **kwargs):
+			XCodeNode.__init__(self)
+			self.files = pbxbuildfiles
+			self.dstPath = dstpath
+			self.dstSubfolderSpec = dstSubpathSpec
+
+class PBXSourcesBuildPhase(XCodeNode):
+	""" Represents the 'Compile Sources' build phase in a Xcode target """
+	def __init__(self, buildfiles):
+		XCodeNode.__init__(self)
+		self.files = buildfiles # List of PBXBuildFile objects
+
+class PBXLegacyTarget(XCodeNode):
+	def __init__(self, action, target=''):
+		XCodeNode.__init__(self)
+		self.buildConfigurationList = XCConfigurationList([XCBuildConfiguration('waf', {})])
+		if not target:
+			self.buildArgumentsString = "%s %s" % (sys.argv[0], action)
+		else:
+			self.buildArgumentsString = "%s %s --targets=%s" % (sys.argv[0], action, target)
+		self.buildPhases = []
+		self.buildToolPath = sys.executable
+		self.buildWorkingDirectory = ""
+		self.dependencies = []
+		self.name = target or action
+		self.productName = target or action
+		self.passBuildSettingsInEnvironment = 0
+
+class PBXShellScriptBuildPhase(XCodeNode):
+	def __init__(self, action, target):
+		XCodeNode.__init__(self)
+		self.buildActionMask = 2147483647
+		self.files = []
+		self.inputPaths = []
+		self.outputPaths = []
+		self.runOnlyForDeploymentPostProcessing = 0
+		self.shellPath = "/bin/sh"
+		self.shellScript = "%s %s %s --targets=%s" % (sys.executable, sys.argv[0], action, target)
+
+class PBXNativeTarget(XCodeNode):
+	""" Represents a target in XCode, e.g. App, DyLib, Framework etc. """
+	def __init__(self, target, node, target_type=TARGET_TYPE_APPLICATION, configlist=[], buildphases=[]):
+		XCodeNode.__init__(self)
+		product_type = target_type[0]
+		file_type = target_type[1]
+
+		self.buildConfigurationList = XCConfigurationList(configlist)
+		self.buildPhases = buildphases
+		self.buildRules = []
+		self.dependencies = []
+		self.name = target
+		self.productName = target
+		self.productType = product_type # See TARGET_TYPE_ tuples constants
+		self.productReference = PBXFileReference(node.name, node.abspath(), file_type, '')
+
+	def add_configuration(self, cf):
+		""" :type cf: XCBuildConfiguration """
+		self.buildConfigurationList.buildConfigurations.append(cf)
+
+	def add_build_phase(self, phase):
+		# Some build phase types may appear only once. If a phase type already exists, then merge them.
+		if ( (phase.__class__ == PBXFrameworksBuildPhase)
+			or (phase.__class__ == PBXSourcesBuildPhase) ):
+			for b in self.buildPhases:
+				if b.__class__ == phase.__class__:
+					b.files.extend(phase.files)
+					return
+		self.buildPhases.append(phase)
+
+	def add_dependency(self, depnd):
+		self.dependencies.append(depnd)
+
+# Root project object
+class PBXProject(XCodeNode):
+	def __init__(self, name, version, env):
+		XCodeNode.__init__(self)
+
+		if not isinstance(env.PROJ_CONFIGURATION, dict):
+			raise Errors.WafError("Error: env.PROJ_CONFIGURATION must be a dictionary. This is done for you if you do not define one yourself. However, did you load the xcode module at the end of your wscript configure() ?")
+
+		# Retrieve project configuration
+		configurations = []
+		for config_name, settings in env.PROJ_CONFIGURATION.items():
+			cf = XCBuildConfiguration(config_name, settings)
+			configurations.append(cf)
+
+		self.buildConfigurationList = XCConfigurationList(configurations)
+		self.compatibilityVersion = version[0]
+		self.hasScannedForEncodings = 1
+		self.mainGroup = PBXGroup(name)
+		self.projectRoot = ""
+		self.projectDirPath = ""
+		self.targets = []
+		self._objectVersion = version[1]
+
+	def create_target_dependency(self, target, name):
+		""" : param target : PXBNativeTarget """
+		proxy = PBXContainerItemProxy(self, target, name)
+		dependency = PBXTargetDependency(target, proxy)
+		return dependency
+
+	def write(self, file):
+
+		# Make sure this is written only once
+		if self._been_written:
+			return
+
+		w = file.write
+		w("// !$*UTF8*$!\n")
+		w("{\n")
+		w("\tarchiveVersion = 1;\n")
+		w("\tclasses = {\n")
+		w("\t};\n")
+		w("\tobjectVersion = %d;\n" % self._objectVersion)
+		w("\tobjects = {\n\n")
+
+		XCodeNode.write(self, file)
+
+		w("\t};\n")
+		w("\trootObject = %s;\n" % self._id)
+		w("}\n")
+
+	def add_target(self, target):
+		self.targets.append(target)
+
+	def get_target(self, name):
+		""" Get a reference to PBXNativeTarget if it exists """
+		for t in self.targets:
+			if t.name == name:
+				return t
+		return None
+
+@TaskGen.feature('c', 'cxx')
+@TaskGen.after('propagate_uselib_vars', 'apply_incpaths')
+def process_xcode(self):
+	bld = self.bld
+	try:
+		p = bld.project
+	except AttributeError:
+		return
+
+	if not hasattr(self, 'target_type'):
+		return
+
+	products_group = bld.products_group
+
+	target_group = PBXGroup(self.name)
+	p.mainGroup.children.append(target_group)
+
+	# Determine what type to build - framework, app bundle etc.
+	target_type = getattr(self, 'target_type', 'app')
+	if target_type not in TARGET_TYPES:
+		raise Errors.WafError("Target type '%s' does not exists. Available options are '%s'. In target '%s'" % (target_type, "', '".join(TARGET_TYPES.keys()), self.name))
+	else:
+		target_type = TARGET_TYPES[target_type]
+	file_ext = target_type[2]
+
+	# Create the output node
+	target_node = self.path.find_or_declare(self.name+file_ext)
+	target = PBXNativeTarget(self.name, target_node, target_type, [], [])
+
+	products_group.children.append(target.productReference)
+
+	# Pull source files from the 'source' attribute and assign them to a UI group.
+	# Use a default UI group named 'Source' unless the user
+	# provides a 'group_files' dictionary to customize the UI grouping.
+	sources = getattr(self, 'source', [])
+	if hasattr(self, 'group_files'):
+		group_files = getattr(self, 'group_files', [])
+		for grpname,files in group_files.items():
+			group = bld.create_group(grpname, files)
+			target_group.children.append(group)
+	else:
+		group = bld.create_group('Source', sources)
+		target_group.children.append(group)
+
+	# Create a PBXFileReference for each source file.
+	# If the source file already exists as a PBXFileReference in any of the UI groups, then
+	# reuse that PBXFileReference object (XCode does not like it if we don't reuse)
+	for idx, path in enumerate(sources):
+		fileref = PBXFileReference(path.name, path.abspath())
+		existing_fileref = target_group.find_fileref(fileref)
+		if existing_fileref:
+			sources[idx] = existing_fileref
+		else:
+			sources[idx] = fileref
+
+	# If the 'source' attribute contains any file extension that XCode can't work with,
+	# then remove it. The allowed file extensions are defined in XCODE_EXTS.
+	is_valid_file_extension = lambda file: os.path.splitext(file.path)[1] in XCODE_EXTS
+	sources = list(filter(is_valid_file_extension, sources))
+
+	buildfiles = [bld.unique_buildfile(PBXBuildFile(x)) for x in sources]
+	target.add_build_phase(PBXSourcesBuildPhase(buildfiles))
+
+	# Check if any framework to link against is some other target we've made
+	libs = getattr(self, 'tmp_use_seen', [])
+	for lib in libs:
+		use_target = p.get_target(lib)
+		if use_target:
+			# Create an XCode dependency so that XCode knows to build the other target before this target
+			dependency = p.create_target_dependency(use_target, use_target.name)
+			target.add_dependency(dependency)
+
+			buildphase = PBXFrameworksBuildPhase([PBXBuildFile(use_target.productReference)])
+			target.add_build_phase(buildphase)
+			if lib in self.env.LIB:
+				self.env.LIB = list(filter(lambda x: x != lib, self.env.LIB))
+
+	# If 'export_headers' is present, add files to the Headers build phase in xcode.
+	# These are files that'll get packed into the Framework for instance.
+	exp_hdrs = getattr(self, 'export_headers', [])
+	hdrs = bld.as_nodes(Utils.to_list(exp_hdrs))
+	files = [p.mainGroup.find_fileref(PBXFileReference(n.name, n.abspath())) for n in hdrs]
+	files = [PBXBuildFile(f, {'ATTRIBUTES': ('Public',)}) for f in files]
+	buildphase = PBXHeadersBuildPhase(files)
+	target.add_build_phase(buildphase)
+
+	# Merge frameworks and libs into one list, and prefix the frameworks
+	frameworks = Utils.to_list(self.env.FRAMEWORK)
+	frameworks = ' '.join(['-framework %s' % (f.split('.framework')[0]) for f in frameworks])
+
+	libs = Utils.to_list(self.env.STLIB) + Utils.to_list(self.env.LIB)
+	libs = ' '.join(bld.env['STLIB_ST'] % t for t in libs)
+
+	# Override target specific build settings
+	bldsettings = {
+		'HEADER_SEARCH_PATHS': ['$(inherited)'] + self.env['INCPATHS'],
+		'LIBRARY_SEARCH_PATHS': ['$(inherited)'] + Utils.to_list(self.env.LIBPATH) + Utils.to_list(self.env.STLIBPATH) + Utils.to_list(self.env.LIBDIR),
+		'FRAMEWORK_SEARCH_PATHS': ['$(inherited)'] + Utils.to_list(self.env.FRAMEWORKPATH),
+		'OTHER_LDFLAGS': libs + ' ' + frameworks + ' ' + ' '.join(bld.env['LINKFLAGS']),
+		'OTHER_CPLUSPLUSFLAGS': Utils.to_list(self.env['CXXFLAGS']),
+		'OTHER_CFLAGS': Utils.to_list(self.env['CFLAGS']),
+		'INSTALL_PATH': [],
+		'GCC_PREPROCESSOR_DEFINITIONS': self.env['DEFINES']
+	}
+
+	# Install path
+	installpaths = Utils.to_list(getattr(self, 'install', []))
+	prodbuildfile = PBXBuildFile(target.productReference)
+	for instpath in installpaths:
+		bldsettings['INSTALL_PATH'].append(instpath)
+		target.add_build_phase(PBXCopyFilesBuildPhase([prodbuildfile], instpath))
+
+	if not bldsettings['INSTALL_PATH']:
+		del bldsettings['INSTALL_PATH']
+
+	# Create build settings which can override the project settings. Defaults to none if user
+	# did not pass argument. This will be filled up with target specific
+	# search paths, libs to link etc.
+	settings = getattr(self, 'settings', {})
+
+	# The keys represents different build configuration, e.g. Debug, Release and so on..
+	# Insert our generated build settings to all configuration names
+	keys = set(settings.keys()) | set(bld.env.PROJ_CONFIGURATION.keys())
+	for k in keys:
+		if k in settings:
+			settings[k].update(bldsettings)
+		else:
+			settings[k] = bldsettings
+
+	for k,v in settings.items():
+		target.add_configuration(XCBuildConfiguration(k, v))
+
+	p.add_target(target)
+
+
+class xcode(Build.BuildContext):
+	cmd = 'xcode6'
+	fun = 'build'
+
+	def as_nodes(self, files):
+		""" Returns a list of waflib.Nodes from a list of string of file paths """
+		nodes = []
+		for x in files:
+			if not isinstance(x, str):
+				d = x
+			else:
+				d = self.srcnode.find_node(x)
+				if not d:
+					raise Errors.WafError('File \'%s\' was not found' % x)
+			nodes.append(d)
+		return nodes
+
+	def create_group(self, name, files):
+		"""
+		Returns a new PBXGroup containing the files (paths) passed in the files arg
+		:type files: string
+		"""
+		group = PBXGroup(name)
+		"""
+		Do not use unique file reference here, since XCode seem to allow only one file reference
+		to be referenced by a group.
+		"""
+		files_ = []
+		for d in self.as_nodes(Utils.to_list(files)):
+			fileref = PBXFileReference(d.name, d.abspath())
+			files_.append(fileref)
+		group.add(files_)
+		return group
+
+	def unique_buildfile(self, buildfile):
+		"""
+		Returns a unique buildfile, possibly an existing one.
+		Use this after you've constructed a PBXBuildFile to make sure there is
+		only one PBXBuildFile for the same file in the same project.
+		"""
+		try:
+			build_files = self.build_files
+		except AttributeError:
+			build_files = self.build_files = {}
+
+		if buildfile not in build_files:
+			build_files[buildfile] = buildfile
+		return build_files[buildfile]
+
+	def execute(self):
+		"""
+		Entry point
+		"""
+		self.restore()
+		if not self.all_envs:
+			self.load_envs()
+		self.recurse([self.run_dir])
+
+		appname = getattr(Context.g_module, Context.APPNAME, os.path.basename(self.srcnode.abspath()))
+
+		p = PBXProject(appname, ('Xcode 3.2', 46), self.env)
+
+		# If we don't create a Products group, then
+		# XCode will create one, which entails that
+		# we'll start to see duplicate files in the UI
+		# for some reason.
+		products_group = PBXGroup('Products')
+		p.mainGroup.children.append(products_group)
+
+		self.project = p
+		self.products_group = products_group
+
+		# post all task generators
+		# the process_xcode method above will be called for each target
+		if self.targets and self.targets != '*':
+			(self._min_grp, self._exact_tg) = self.get_targets()
+
+		self.current_group = 0
+		while self.current_group < len(self.groups):
+			self.post_group()
+			self.current_group += 1
+
+		node = self.bldnode.make_node('%s.xcodeproj' % appname)
+		node.mkdir()
+		node = node.make_node('project.pbxproj')
+		with open(node.abspath(), 'w') as f:
+			p.write(f)
+		Logs.pprint('GREEN', 'Wrote %r' % node.abspath())
+
+def bind_fun(tgtype):
+	def fun(self, *k, **kw):
+		tgtype = fun.__name__
+		if tgtype == 'shlib' or tgtype == 'dylib':
+			features = 'cxx cxxshlib'
+			tgtype = 'dylib'
+		elif tgtype == 'framework':
+			features = 'cxx cxxshlib'
+			tgtype = 'framework'
+		elif tgtype == 'program':
+			features = 'cxx cxxprogram'
+			tgtype = 'exe'
+		elif tgtype == 'app':
+			features = 'cxx cxxprogram'
+			tgtype = 'app'
+		elif tgtype == 'stlib':
+			features = 'cxx cxxstlib'
+			tgtype = 'stlib'
+		lst = kw['features'] = Utils.to_list(kw.get('features', []))
+		for x in features.split():
+			if not x in kw['features']:
+				lst.append(x)
+
+		kw['target_type'] = tgtype
+		return self(*k, **kw)
+	fun.__name__ = tgtype
+	setattr(Build.BuildContext, tgtype, fun)
+	return fun
+
+for xx in 'app framework dylib shlib stlib program'.split():
+	bind_fun(xx)
+
diff --git a/third_party/waf/waflib/fixpy2.py b/third_party/waf/waflib/fixpy2.py
new file mode 100644
index 0000000..c99bff4
--- /dev/null
+++ b/third_party/waf/waflib/fixpy2.py
@@ -0,0 +1,64 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2010-2018 (ita)
+
+from __future__ import with_statement
+
+import os
+
+all_modifs = {}
+
+def fixdir(dir):
+	"""Call all substitution functions on Waf folders"""
+	for k in all_modifs:
+		for v in all_modifs[k]:
+			modif(os.path.join(dir, 'waflib'), k, v)
+
+def modif(dir, name, fun):
+	"""Call a substitution function"""
+	if name == '*':
+		lst = []
+		for y in '. Tools extras'.split():
+			for x in os.listdir(os.path.join(dir, y)):
+				if x.endswith('.py'):
+					lst.append(y + os.sep + x)
+		for x in lst:
+			modif(dir, x, fun)
+		return
+
+	filename = os.path.join(dir, name)
+	with open(filename, 'r') as f:
+		txt = f.read()
+
+	txt = fun(txt)
+
+	with open(filename, 'w') as f:
+		f.write(txt)
+
+def subst(*k):
+	"""register a substitution function"""
+	def do_subst(fun):
+		for x in k:
+			try:
+				all_modifs[x].append(fun)
+			except KeyError:
+				all_modifs[x] = [fun]
+		return fun
+	return do_subst
+
+@subst('*')
+def r1(code):
+	"utf-8 fixes for python < 2.6"
+	code = code.replace('as e:', ',e:')
+	code = code.replace(".decode(sys.stdout.encoding or'latin-1',errors='replace')", '')
+	return code.replace('.encode()', '')
+
+@subst('Runner.py')
+def r4(code):
+	"generator syntax"
+	return code.replace('next(self.biter)', 'self.biter.next()').replace('self.daemon = True', 'self.setDaemon(1)')
+
+@subst('Context.py')
+def r5(code):
+	return code.replace("('Execution failure: %s'%str(e),ex=e)", "('Execution failure: %s'%str(e),ex=e),None,sys.exc_info()[2]")
+
diff --git a/third_party/waf/waflib/processor.py b/third_party/waf/waflib/processor.py
new file mode 100755
index 0000000..eff2e69
--- /dev/null
+++ b/third_party/waf/waflib/processor.py
@@ -0,0 +1,68 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2016-2018 (ita)
+
+import os, sys, traceback, base64, signal
+try:
+	import cPickle
+except ImportError:
+	import pickle as cPickle
+
+try:
+	import subprocess32 as subprocess
+except ImportError:
+	import subprocess
+
+try:
+	TimeoutExpired = subprocess.TimeoutExpired
+except AttributeError:
+	class TimeoutExpired(Exception):
+		pass
+
+def run():
+	txt = sys.stdin.readline().strip()
+	if not txt:
+		# parent process probably ended
+		sys.exit(1)
+	[cmd, kwargs, cargs] = cPickle.loads(base64.b64decode(txt))
+	cargs = cargs or {}
+
+	if not 'close_fds' in kwargs:
+		# workers have no fds
+		kwargs['close_fds'] = False
+
+	ret = 1
+	out, err, ex, trace = (None, None, None, None)
+	try:
+		proc = subprocess.Popen(cmd, **kwargs)
+		try:
+			out, err = proc.communicate(**cargs)
+		except TimeoutExpired:
+			if kwargs.get('start_new_session') and hasattr(os, 'killpg'):
+				os.killpg(proc.pid, signal.SIGKILL)
+			else:
+				proc.kill()
+			out, err = proc.communicate()
+			exc = TimeoutExpired(proc.args, timeout=cargs['timeout'], output=out)
+			exc.stderr = err
+			raise exc
+		ret = proc.returncode
+	except Exception as e:
+		exc_type, exc_value, tb = sys.exc_info()
+		exc_lines = traceback.format_exception(exc_type, exc_value, tb)
+		trace = str(cmd) + '\n' + ''.join(exc_lines)
+		ex = e.__class__.__name__
+
+	# it is just text so maybe we do not need to pickle()
+	tmp = [ret, out, err, ex, trace]
+	obj = base64.b64encode(cPickle.dumps(tmp))
+	sys.stdout.write(obj.decode())
+	sys.stdout.write('\n')
+	sys.stdout.flush()
+
+while 1:
+	try:
+		run()
+	except KeyboardInterrupt:
+		break
+
-- 
cgit v1.2.3